diff --git a/.commitlintrc.yaml b/.commitlintrc.yaml new file mode 100644 index 00000000000..8b13a390116 --- /dev/null +++ b/.commitlintrc.yaml @@ -0,0 +1,45 @@ +extends: + - "@commitlint/config-conventional" + +# Single source of truth for commit/PR title conventions. +# Used by: +# - .pre-commit-config.yaml (commitlint hook on commit-msg) +# - .github/workflows/lint_pr.yml (validates PR titles in CI) +rules: + type-enum: + - 2 + - always + - - feat + - fix + - docs + - style + - refactor + - perf + - test + - build + - ci + - chore + - revert + + # Scope is optional — no enforcement on allowed values. + scope-empty: + - 0 + + # Feast convention: subjects start with an uppercase letter. + # Overrides base config which defaults to "never sentence-case". + subject-case: + - 2 + - always + - - sentence-case + + header-max-length: + - 2 + - always + - 100 + + # Relax body/footer line length from base config's strict 100-char limit. + # Commit bodies often contain URLs, stack traces, or formatted output. + body-max-line-length: + - 0 + footer-max-line-length: + - 0 diff --git a/.github/workflows/dbt-integration-tests.yml b/.github/workflows/dbt-integration-tests.yml index 7bf6c775b79..dd54ab36665 100644 --- a/.github/workflows/dbt-integration-tests.yml +++ b/.github/workflows/dbt-integration-tests.yml @@ -13,6 +13,10 @@ on: - 'sdk/python/tests/unit/dbt/**' - '.github/workflows/dbt-integration-tests.yml' +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: dbt-integration-test: if: @@ -47,7 +51,7 @@ jobs: uv pip install --system dbt-core dbt-duckdb - name: Run dbt integration tests - run: make test-python-integration-dbt + run: uv run make test-python-integration-dbt - name: Minimize uv cache run: uv cache prune --ci diff --git a/.github/workflows/docker_smoke_tests.yml b/.github/workflows/docker_smoke_tests.yml new file mode 100644 index 00000000000..1aed85cd93e --- /dev/null +++ b/.github/workflows/docker_smoke_tests.yml @@ -0,0 +1,112 @@ +name: docker-smoke-tests + +on: + pull_request: + paths: + - "sdk/python/feast/infra/feature_servers/multicloud/**" + - "sdk/python/feast/feature_server.py" + - "infra/scripts/feature_server_docker_smoke.py" + - "Makefile" + - ".github/workflows/publish_images.yml" + - ".github/workflows/docker_smoke_tests.yml" + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + feature-server-docker-smoke: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + arch: [amd64, arm64] + steps: + - uses: actions/checkout@v4 + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + with: + install: true + - name: Build feature-server image + env: + ARCH: ${{ matrix.arch }} + run: | + make build-feature-server-docker REGISTRY=feastdev VERSION=smoke-${ARCH} DOCKER_PLATFORMS=linux/${ARCH} + - name: Run container + env: + ARCH: ${{ matrix.arch }} + run: | + docker run -d --rm \ + --name feature-server-smoke-${ARCH} \ + --platform linux/${ARCH} \ + -p 6566:6566 \ + -v "${GITHUB_WORKSPACE}/infra/scripts/feature_server_docker_smoke.py:/smoke.py:ro" \ + feastdev/feature-server:smoke-${ARCH} \ + python /smoke.py + - name: Wait for /health + run: | + for i in $(seq 1 60); do + if curl -fsS http://localhost:6566/health >/dev/null; then + exit 0 + fi + sleep 2 + done + echo "feature-server /health did not become ready" + docker logs feature-server-smoke-${{ matrix.arch }} || true + exit 1 + - name: Cleanup + if: always() + env: + ARCH: ${{ matrix.arch }} + run: | + docker stop feature-server-smoke-${ARCH} || true + feature-server-dev-docker-smoke: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + arch: [amd64, arm64] + steps: + - uses: actions/checkout@v4 + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + with: + install: true + - name: Build feature-server-dev image + env: + ARCH: ${{ matrix.arch }} + run: | + make build-feature-server-dev-docker REGISTRY=feastdev VERSION=smoke-${ARCH} DOCKER_PLATFORMS=linux/${ARCH} + - name: Run container + env: + ARCH: ${{ matrix.arch }} + run: | + docker run -d --rm \ + --name feature-server-dev-smoke-${ARCH} \ + --platform linux/${ARCH} \ + -p 6566:6566 \ + -v "${GITHUB_WORKSPACE}/infra/scripts/feature_server_docker_smoke.py:/smoke.py:ro" \ + feastdev/feature-server:smoke-${ARCH} \ + python /smoke.py + - name: Wait for /health + run: | + for i in $(seq 1 60); do + if curl -fsS http://localhost:6566/health >/dev/null; then + exit 0 + fi + sleep 2 + done + echo "feature-server /health did not become ready" + docker logs feature-server-dev-smoke-${{ matrix.arch }} || true + exit 1 + - name: Cleanup + if: always() + env: + ARCH: ${{ matrix.arch }} + run: | + docker stop feature-server-dev-smoke-${ARCH} || true diff --git a/.github/workflows/lint_pr.yml b/.github/workflows/lint_pr.yml index 33fafdcd23d..8cbfc78e21c 100644 --- a/.github/workflows/lint_pr.yml +++ b/.github/workflows/lint_pr.yml @@ -7,6 +7,10 @@ on: - edited - synchronize +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: validate-title: if: @@ -14,13 +18,13 @@ jobs: name: Validate PR title runs-on: ubuntu-latest steps: - - uses: amannn/action-semantic-pull-request@v5 + - uses: actions/checkout@v4 with: - # Must use uppercase - subjectPattern: ^(?=[A-Z]).+$ - subjectPatternError: | - The subject "{subject}" found in the pull request title "{title}" - didn't match the configured pattern. Please ensure that the subject - starts with an uppercase character. + sparse-checkout: .commitlintrc.yaml + sparse-checkout-cone-mode: false + - name: Lint PR title with commitlint env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PR_TITLE: ${{ github.event.pull_request.title }} + run: | + npm install --no-save @commitlint/cli @commitlint/config-conventional + echo "$PR_TITLE" | npx commitlint diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index 71455aff507..19e13d5f8e9 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -2,6 +2,10 @@ name: linter on: [push, pull_request] +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: lint-python: runs-on: [ubuntu-latest] @@ -22,6 +26,8 @@ jobs: - name: Install dependencies run: | make install-python-dependencies-ci + - name: Run pre-commit checks + uses: pre-commit/action@v3.0.1 - name: Cache MyPy uses: actions/cache@v4 with: diff --git a/.github/workflows/master_only.yml b/.github/workflows/master_only.yml index 324c4017b5e..83355979546 100644 --- a/.github/workflows/master_only.yml +++ b/.github/workflows/master_only.yml @@ -89,7 +89,19 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - component: [ feature-server-dev, feature-transformation-server, feast-operator ] + include: + - component: feature-server-dev + target: feature-server-dev + build_args: DOCKER_PUSH=true DOCKER_PLATFORMS=linux/amd64,linux/arm64 + push_mode: imagetools + - component: feature-transformation-server + target: feature-transformation-server + build_args: "" + push_mode: all_tags + - component: feast-operator + target: feast-operator + build_args: "" + push_mode: all_tags env: REGISTRY: quay.io/feastdev-ci steps: @@ -117,14 +129,11 @@ jobs: username: ${{ secrets.QUAYIO_CI_USERNAME }} password: ${{ secrets.QUAYIO_CI_TOKEN }} - name: Build image - run: | - make build-${{ matrix.component }}-docker REGISTRY=${REGISTRY} VERSION=${GITHUB_SHA} + run: make build-${{ matrix.target }}-docker REGISTRY=${REGISTRY} VERSION=${GITHUB_SHA} ${{ matrix.build_args }} - name: Push image run: | - if [[ "${{ matrix.component }}" == "feature-server-dev" ]]; then - docker tag ${REGISTRY}/feature-server:${GITHUB_SHA} ${REGISTRY}/feature-server:develop - docker push ${REGISTRY}/feature-server --all-tags + if [[ "${{ matrix.push_mode }}" == "imagetools" ]]; then + docker buildx imagetools create -t ${REGISTRY}/feature-server:develop ${REGISTRY}/feature-server:${GITHUB_SHA} else - docker tag ${REGISTRY}/${{ matrix.component }}:${GITHUB_SHA} ${REGISTRY}/${{ matrix.component }}:develop - docker push ${REGISTRY}/${{ matrix.component }} --all-tags + docker tag ${REGISTRY}/${{ matrix.target }}:${GITHUB_SHA} ${REGISTRY}/${{ matrix.target }}:develop && docker push ${REGISTRY}/${{ matrix.target }} --all-tags fi diff --git a/.github/workflows/operator-e2e-integration-tests.yml b/.github/workflows/operator-e2e-integration-tests.yml index 41954daff68..bdaa3240702 100644 --- a/.github/workflows/operator-e2e-integration-tests.yml +++ b/.github/workflows/operator-e2e-integration-tests.yml @@ -13,6 +13,10 @@ on: paths: - 'infra/**' +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: operator-e2e-tests: timeout-minutes: 40 diff --git a/.github/workflows/operator_pr.yml b/.github/workflows/operator_pr.yml index aefdcbdcbb4..8f08c91b890 100644 --- a/.github/workflows/operator_pr.yml +++ b/.github/workflows/operator_pr.yml @@ -1,6 +1,11 @@ name: operator-pr on: [pull_request] + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: operator-test: runs-on: ubuntu-latest diff --git a/.github/workflows/pr_duckdb_integration_tests.yml b/.github/workflows/pr_duckdb_integration_tests.yml new file mode 100644 index 00000000000..d099d7fa582 --- /dev/null +++ b/.github/workflows/pr_duckdb_integration_tests.yml @@ -0,0 +1,35 @@ +name: pr-duckdb-integration-tests + +on: + pull_request: + types: + - opened + - synchronize + - labeled + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + integration-test-duckdb-offline: + if: + ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) && + github.event.pull_request.base.repo.full_name == 'feast-dev/feast' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + repository: ${{ github.event.repository.full_name }} + ref: ${{ github.ref }} + token: ${{ secrets.GITHUB_TOKEN }} + submodules: recursive + - name: Setup pixi + uses: prefix-dev/setup-pixi@v0.8.1 + with: + pixi-version: v0.63.1 + environments: duckdb-tests + cache: true + - name: Run DuckDB offline store integration tests + run: make test-python-universal-duckdb-offline diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml index b8fe5747d66..d65e9807df1 100644 --- a/.github/workflows/pr_integration_tests.yml +++ b/.github/workflows/pr_integration_tests.yml @@ -7,10 +7,10 @@ on: - synchronize - labeled -# concurrency is currently broken, see details https://github.com/actions/runner/issues/1532 -#concurrency: -# group: pr-integration-tests-${{ github.event.pull_request.number }} -# cancel-in-progress: true +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + permissions: actions: write pull-requests: read diff --git a/.github/workflows/pr_local_integration_tests.yml b/.github/workflows/pr_local_integration_tests.yml index 143cfe40973..b607c2c6b1e 100644 --- a/.github/workflows/pr_local_integration_tests.yml +++ b/.github/workflows/pr_local_integration_tests.yml @@ -8,6 +8,10 @@ on: - synchronize - labeled +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: integration-test-python-local: if: diff --git a/.github/workflows/pr_ray_integration_tests.yml b/.github/workflows/pr_ray_integration_tests.yml new file mode 100644 index 00000000000..4d54c8e34ed --- /dev/null +++ b/.github/workflows/pr_ray_integration_tests.yml @@ -0,0 +1,35 @@ +name: pr-ray-integration-tests + +on: + pull_request: + types: + - opened + - synchronize + - labeled + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + integration-test-ray: + if: + ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) && + github.event.pull_request.base.repo.full_name == 'feast-dev/feast' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + repository: ${{ github.event.repository.full_name }} + ref: ${{ github.ref }} + token: ${{ secrets.GITHUB_TOKEN }} + submodules: recursive + - name: Setup pixi + uses: prefix-dev/setup-pixi@v0.8.1 + with: + pixi-version: v0.63.1 + environments: ray-tests + cache: true + - name: Run Ray integration tests (offline store + compute engine) + run: make test-python-ray-integration diff --git a/.github/workflows/pr_registration_integration_tests.yml b/.github/workflows/pr_registration_integration_tests.yml new file mode 100644 index 00000000000..4085a320057 --- /dev/null +++ b/.github/workflows/pr_registration_integration_tests.yml @@ -0,0 +1,90 @@ +name: pr-registration-integration-tests + +on: + pull_request_target: + types: + - opened + - synchronize + - labeled + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +permissions: + actions: write + pull-requests: read + +jobs: + integration-test-registration-local: + if: + ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) && + github.repository == 'feast-dev/feast' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + ref: refs/pull/${{ github.event.pull_request.number }}/merge + submodules: recursive + persist-credentials: false + - name: Setup pixi + uses: prefix-dev/setup-pixi@v0.8.1 + with: + pixi-version: v0.63.1 + environments: registration-tests + cache: true + - name: Run registration integration tests (local) + run: make test-python-registration + + integration-test-registration-ci: + if: + ((github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm')))) && + github.repository == 'feast-dev/feast' + runs-on: ubuntu-latest + services: + redis: + image: redis + ports: + - 6379:6379 + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + steps: + - uses: actions/checkout@v4 + with: + ref: refs/pull/${{ github.event.pull_request.number }}/merge + submodules: recursive + persist-credentials: false + - name: Authenticate to Google Cloud + uses: 'google-github-actions/auth@v1' + with: + credentials_json: '${{ secrets.GCP_SA_KEY }}' + - name: Set up gcloud SDK + uses: google-github-actions/setup-gcloud@v1 + with: + project_id: ${{ secrets.GCP_PROJECT_ID }} + - name: Set up AWS SDK + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-west-2 + - name: Cache Hadoop tarball + uses: actions/cache@v4 + with: + path: ~/hadoop-3.4.2.tar.gz + key: hadoop-3.4.2 + - name: Install Hadoop dependencies + run: make install-hadoop-dependencies-ci + - name: Setup pixi + uses: prefix-dev/setup-pixi@v0.8.1 + with: + pixi-version: v0.63.1 + environments: registration-tests + cache: true + - name: Run registration integration tests (CI) + run: make test-python-registration-ci diff --git a/.github/workflows/pr_remote_rbac_integration_tests.yml b/.github/workflows/pr_remote_rbac_integration_tests.yml index 56c59a544c9..91cf0f4c564 100644 --- a/.github/workflows/pr_remote_rbac_integration_tests.yml +++ b/.github/workflows/pr_remote_rbac_integration_tests.yml @@ -8,6 +8,10 @@ on: - synchronize - labeled +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: remote-rbac-integration-tests-python: if: diff --git a/.github/workflows/pr_website_build.yml b/.github/workflows/pr_website_build.yml new file mode 100644 index 00000000000..926f1fca0f9 --- /dev/null +++ b/.github/workflows/pr_website_build.yml @@ -0,0 +1,32 @@ +name: Website Build Check + +on: + pull_request: + paths: + - 'infra/website/**' + +concurrency: + group: "pr-website-${{ github.event.pull_request.number }}" + cancel-in-progress: true + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + cache: 'npm' + cache-dependency-path: infra/website/package-lock.json + + - name: Install dependencies + working-directory: infra/website + run: npm ci + + - name: Build site + working-directory: infra/website + run: npm run build diff --git a/.github/workflows/publish_images.yml b/.github/workflows/publish_images.yml index 3f464367a34..63055981a90 100644 --- a/.github/workflows/publish_images.yml +++ b/.github/workflows/publish_images.yml @@ -69,17 +69,29 @@ jobs: env: VERSION_WITHOUT_PREFIX: ${{ steps.get-version.outputs.version_without_prefix }} run: | - make build-${{ matrix.component }}-docker REGISTRY=${REGISTRY} VERSION=${VERSION_WITHOUT_PREFIX} + if [ "${{ matrix.component }}" = "feature-server" ]; then + make build-${{ matrix.component }}-docker REGISTRY=${REGISTRY} VERSION=${VERSION_WITHOUT_PREFIX} DOCKER_PUSH=true DOCKER_PLATFORMS=linux/amd64,linux/arm64 + else + make build-${{ matrix.component }}-docker REGISTRY=${REGISTRY} VERSION=${VERSION_WITHOUT_PREFIX} + fi - name: Push versioned images env: VERSION_WITHOUT_PREFIX: ${{ steps.get-version.outputs.version_without_prefix }} HIGHEST_SEMVER_TAG: ${{ steps.get-version.outputs.highest_semver_tag }} run: | - make push-${{ matrix.component }}-docker REGISTRY=${REGISTRY} VERSION=${VERSION_WITHOUT_PREFIX} + if [ "${{ matrix.component }}" = "feature-server" ]; then + echo "feature-server image pushed via buildx during build step" + else + make push-${{ matrix.component }}-docker REGISTRY=${REGISTRY} VERSION=${VERSION_WITHOUT_PREFIX} + fi echo "Only push to latest tag if tag is the highest semver version $HIGHEST_SEMVER_TAG" if [ "${VERSION_WITHOUT_PREFIX}" = "${HIGHEST_SEMVER_TAG:1}" ] then - docker tag ${REGISTRY}/${{ matrix.component }}:${VERSION_WITHOUT_PREFIX} ${REGISTRY}/${{ matrix.component }}:latest - docker push ${REGISTRY}/${{ matrix.component }}:latest + if [ "${{ matrix.component }}" = "feature-server" ]; then + docker buildx imagetools create -t ${REGISTRY}/feature-server:latest ${REGISTRY}/feature-server:${VERSION_WITHOUT_PREFIX} + else + docker tag ${REGISTRY}/${{ matrix.component }}:${VERSION_WITHOUT_PREFIX} ${REGISTRY}/${{ matrix.component }}:latest + docker push ${REGISTRY}/${{ matrix.component }}:latest + fi fi diff --git a/.github/workflows/registry-rest-api-tests.yml b/.github/workflows/registry-rest-api-tests.yml index 4f9dae909bc..6175cab3d8f 100644 --- a/.github/workflows/registry-rest-api-tests.yml +++ b/.github/workflows/registry-rest-api-tests.yml @@ -11,6 +11,10 @@ on: - synchronize - labeled +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: registry-rest-api-tests: timeout-minutes: 30 @@ -145,7 +149,7 @@ jobs: - name: Setup and Run Registry Rest API tests run: | echo "Running Registry REST API tests..." - uv run pytest sdk/python/tests/integration/registration/rest_api/test_registry_rest_api.py --integration -s + uv run pytest -c sdk/python/pytest.ini sdk/python/tests/integration/rest_api/test_registry_rest_api.py --integration -s --timeout=600 - name: Clean up docker images if: always() diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml new file mode 100644 index 00000000000..c12998fce57 --- /dev/null +++ b/.github/workflows/security.yml @@ -0,0 +1,71 @@ +name: "Security" + +on: + push: + branches: [master] + pull_request: + branches: [master] + schedule: + - cron: "0 6 * * 1" + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + codeql: + name: CodeQL Analysis + runs-on: ubuntu-latest + timeout-minutes: 30 + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: ["python", "javascript-typescript"] + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + + - name: Autobuild + uses: github/codeql-action/autobuild@v3 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{ matrix.language }}" + + safety: + name: Dependency Security Scan + runs-on: ubuntu-latest + permissions: + contents: read + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + cache: "pip" + cache-dependency-path: pyproject.toml + + - name: Install project dependencies and safety + run: | + pip install safety + pip install -e ".[ci]" || pip install -e . + + - name: Run safety scan + continue-on-error: true + run: safety scan --output json diff --git a/.github/workflows/smoke_tests.yml b/.github/workflows/smoke_tests.yml index 5f60dda4202..b183f6f47e9 100644 --- a/.github/workflows/smoke_tests.yml +++ b/.github/workflows/smoke_tests.yml @@ -3,6 +3,10 @@ name: smoke-tests on: pull_request: +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: smoke-test-python: runs-on: ${{ matrix.os }} diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index ae32a992c3b..1311cd12635 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -6,6 +6,10 @@ on: branches: - master +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: unit-test-python: runs-on: ${{ matrix.os }} @@ -80,6 +84,8 @@ jobs: with: node-version-file: './ui/.nvmrc' registry-url: 'https://registry.npmjs.org' + cache: 'yarn' + cache-dependency-path: 'ui/yarn.lock' - name: Install yarn dependencies working-directory: ./ui run: yarn install diff --git a/.gitignore b/.gitignore index 64f5056279f..99d435b6e9c 100644 --- a/.gitignore +++ b/.gitignore @@ -239,3 +239,28 @@ infra/website/dist/ # offline builds offline_build/ + +# Java compiled +*.class +*.jar +*.war +*.ear + +# Go binaries +*.exe +*.exe~ +*.out + +# Editor swap/backup files +*.swp +*.swo +*~ +\#*\# + +# OS files +Thumbs.db +ehthumbs.db +Desktop.ini + +# AgentReady reports +.agentready/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5d7a63c7f57..2ce95f07707 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,9 @@ default_stages: [commit] +# Generated protobuf files should not be linted or formatted. +# They are produced by `make compile-protos-python` and must be excluded globally. +exclude: '^sdk/python/feast/protos/' + repos: - repo: local hooks: @@ -10,6 +14,7 @@ repos: stages: [commit] language: system types: [python] + exclude: '_pb2\.py$' entry: bash -c 'uv run ruff check --fix "$@" && uv run ruff format "$@"' -- pass_filenames: true @@ -20,6 +25,7 @@ repos: stages: [commit] language: system types: [python] + exclude: '_pb2\.py$' entry: bash -c 'uv run ruff check "$@" && uv run ruff format --check "$@"' -- pass_filenames: true @@ -29,5 +35,25 @@ repos: stages: [commit] language: system files: ^infra/templates/|\.jinja2$|^docs/roadmap\.md$ - entry: make build-templates + entry: uv run make build-templates pass_filenames: false + + - repo: https://github.com/Yelp/detect-secrets + rev: v1.5.0 + hooks: + - id: detect-secrets + args: ['--baseline', '.secrets.baseline'] + exclude: | + (?x)^( + .*\.lock| + .*requirements.*\.txt| + .*\.svg| + .*\.html + )$ + + - repo: https://github.com/alessandrojcm/commitlint-pre-commit-hook + rev: v9.18.0 + hooks: + - id: commitlint + stages: [commit-msg] + additional_dependencies: ["@commitlint/config-conventional"] diff --git a/.secrets.baseline b/.secrets.baseline new file mode 100644 index 00000000000..06e2cefc508 --- /dev/null +++ b/.secrets.baseline @@ -0,0 +1,1543 @@ +{ + "version": "1.5.0", + "plugins_used": [ + { + "name": "ArtifactoryDetector" + }, + { + "name": "AWSKeyDetector" + }, + { + "name": "AzureStorageKeyDetector" + }, + { + "name": "Base64HighEntropyString", + "limit": 4.5 + }, + { + "name": "BasicAuthDetector" + }, + { + "name": "CloudantDetector" + }, + { + "name": "DiscordBotTokenDetector" + }, + { + "name": "GitHubTokenDetector" + }, + { + "name": "GitLabTokenDetector" + }, + { + "name": "HexHighEntropyString", + "limit": 3.0 + }, + { + "name": "IbmCloudIamDetector" + }, + { + "name": "IbmCosHmacDetector" + }, + { + "name": "IPPublicDetector" + }, + { + "name": "JwtTokenDetector" + }, + { + "name": "KeywordDetector", + "keyword_exclude": "" + }, + { + "name": "MailchimpDetector" + }, + { + "name": "NpmDetector" + }, + { + "name": "OpenAIDetector" + }, + { + "name": "PrivateKeyDetector" + }, + { + "name": "PypiTokenDetector" + }, + { + "name": "SendGridDetector" + }, + { + "name": "SlackDetector" + }, + { + "name": "SoftlayerDetector" + }, + { + "name": "SquareOAuthDetector" + }, + { + "name": "StripeDetector" + }, + { + "name": "TelegramBotTokenDetector" + }, + { + "name": "TwilioKeyDetector" + } + ], + "filters_used": [ + { + "path": "detect_secrets.filters.allowlist.is_line_allowlisted" + }, + { + "path": "detect_secrets.filters.common.is_baseline_file", + "filename": ".secrets.baseline" + }, + { + "path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies", + "min_level": 2 + }, + { + "path": "detect_secrets.filters.heuristic.is_indirect_reference" + }, + { + "path": "detect_secrets.filters.heuristic.is_likely_id_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_lock_file" + }, + { + "path": "detect_secrets.filters.heuristic.is_not_alphanumeric_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_potential_uuid" + }, + { + "path": "detect_secrets.filters.heuristic.is_prefixed_with_dollar_sign" + }, + { + "path": "detect_secrets.filters.heuristic.is_sequential_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_swagger_file" + }, + { + "path": "detect_secrets.filters.heuristic.is_templated_secret" + }, + { + "path": "detect_secrets.filters.regex.should_exclude_file", + "pattern": [ + ".*\\.lock", + ".*requirements.*\\.txt", + ".*\\.svg", + ".*\\.html" + ] + } + ], + "results": { + ".github/workflows/publish.yml": [ + { + "type": "Secret Keyword", + "filename": ".github/workflows/publish.yml", + "hashed_secret": "3e26d6750975d678acb8fa35a0f69237881576b0", + "is_verified": false, + "line_number": 43 + } + ], + ".github/workflows/publish_python_sdk.yml": [ + { + "type": "Secret Keyword", + "filename": ".github/workflows/publish_python_sdk.yml", + "hashed_secret": "3e26d6750975d678acb8fa35a0f69237881576b0", + "is_verified": false, + "line_number": 31 + } + ], + ".prow.yaml": [ + { + "type": "Secret Keyword", + "filename": ".prow.yaml", + "hashed_secret": "d808db46d15adec18918b6b204cc63b9616e7e0d", + "is_verified": false, + "line_number": 22 + }, + { + "type": "Secret Keyword", + "filename": ".prow.yaml", + "hashed_secret": "a7089f560bb0f70a89bc933492c7f4bba07d65ca", + "is_verified": false, + "line_number": 25 + } + ], + ".prow/config.yaml": [ + { + "type": "Secret Keyword", + "filename": ".prow/config.yaml", + "hashed_secret": "491218ead8b39df641fc805e64d41aa1c2206d64", + "is_verified": false, + "line_number": 24 + } + ], + "docs/reference/online-stores/milvus.md": [ + { + "type": "Secret Keyword", + "filename": "docs/reference/online-stores/milvus.md", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 33 + } + ], + "docs/reference/registries/sql.md": [ + { + "type": "Basic Auth Credentials", + "filename": "docs/reference/registries/sql.md", + "hashed_secret": "08cd923367890009657eab812753379bdb321eeb", + "is_verified": false, + "line_number": 25 + } + ], + "docs/tutorials/azure/notebooks/part1-load-data.ipynb": [ + { + "type": "Hex High Entropy String", + "filename": "docs/tutorials/azure/notebooks/part1-load-data.ipynb", + "hashed_secret": "edfaafe6a2fb1e41011bdae7f4faacbc1a07858b", + "is_verified": false, + "line_number": 218 + } + ], + "docs/tutorials/azure/notebooks/part2-register-features.ipynb": [ + { + "type": "Hex High Entropy String", + "filename": "docs/tutorials/azure/notebooks/part2-register-features.ipynb", + "hashed_secret": "edfaafe6a2fb1e41011bdae7f4faacbc1a07858b", + "is_verified": false, + "line_number": 264 + } + ], + "docs/tutorials/azure/notebooks/part3-train-and-deploy-with-feast.ipynb": [ + { + "type": "Hex High Entropy String", + "filename": "docs/tutorials/azure/notebooks/part3-train-and-deploy-with-feast.ipynb", + "hashed_secret": "edfaafe6a2fb1e41011bdae7f4faacbc1a07858b", + "is_verified": false, + "line_number": 414 + } + ], + "examples/credit-risk-end-to-end/03_Credit_Risk_Model_Training.ipynb": [ + { + "type": "Base64 High Entropy String", + "filename": "examples/credit-risk-end-to-end/03_Credit_Risk_Model_Training.ipynb", + "hashed_secret": "8b65ff4682b64e51aa3f34e6806b4b9beab010b4", + "is_verified": false, + "line_number": 411 + } + ], + "examples/kind-quickstart/01-Install.ipynb": [ + { + "type": "Basic Auth Credentials", + "filename": "examples/kind-quickstart/01-Install.ipynb", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 427 + }, + { + "type": "Base64 High Entropy String", + "filename": "examples/kind-quickstart/01-Install.ipynb", + "hashed_secret": "913539b37e02fb2c612cf78696e6eb031bbaee1e", + "is_verified": false, + "line_number": 601 + }, + { + "type": "Base64 High Entropy String", + "filename": "examples/kind-quickstart/01-Install.ipynb", + "hashed_secret": "e5594257f7e32e7443046ecdea23b6689958b1c7", + "is_verified": false, + "line_number": 602 + }, + { + "type": "Base64 High Entropy String", + "filename": "examples/kind-quickstart/01-Install.ipynb", + "hashed_secret": "1eb1fd78b7ee9a093c064672c9c02ebc9591790f", + "is_verified": false, + "line_number": 603 + } + ], + "examples/kind-quickstart/postgres/postgres.yaml": [ + { + "type": "Secret Keyword", + "filename": "examples/kind-quickstart/postgres/postgres.yaml", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 11 + } + ], + "examples/online_store/milvus_tutorial/docker-compose.yml": [ + { + "type": "Secret Keyword", + "filename": "examples/online_store/milvus_tutorial/docker-compose.yml", + "hashed_secret": "bc565f6e909ec7d3c18e2ff5d9eeb2300ff20b7f", + "is_verified": false, + "line_number": 18 + } + ], + "examples/online_store/pgvector_tutorial/docker-compose.yml": [ + { + "type": "Secret Keyword", + "filename": "examples/online_store/pgvector_tutorial/docker-compose.yml", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 9 + } + ], + "examples/online_store/pgvector_tutorial/feature_store.yaml": [ + { + "type": "Secret Keyword", + "filename": "examples/online_store/pgvector_tutorial/feature_store.yaml", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 11 + } + ], + "examples/operator-postgres-tls-demo/01-Install-postgres-tls-using-helm.ipynb": [ + { + "type": "Basic Auth Credentials", + "filename": "examples/operator-postgres-tls-demo/01-Install-postgres-tls-using-helm.ipynb", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 486 + } + ], + "examples/operator-quickstart/postgres.yaml": [ + { + "type": "Secret Keyword", + "filename": "examples/operator-quickstart/postgres.yaml", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 9 + } + ], + "examples/operator-rbac-openshift-tls/2-client-rbac-test-pod.ipynb": [ + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/2-client-rbac-test-pod.ipynb", + "hashed_secret": "2d7d8560beab48b14d329a910063797b48259616", + "is_verified": false, + "line_number": 5 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/2-client-rbac-test-pod.ipynb", + "hashed_secret": "ee47af75912c91cdbd3ae229a39e8fd273c2fc47", + "is_verified": false, + "line_number": 28 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/2-client-rbac-test-pod.ipynb", + "hashed_secret": "eb179d26037d55827f20fa94d73ef2af0ea38044", + "is_verified": false, + "line_number": 38 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/2-client-rbac-test-pod.ipynb", + "hashed_secret": "7939e5e059d477a7fe273720a7d983cad46397be", + "is_verified": false, + "line_number": 105 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/2-client-rbac-test-pod.ipynb", + "hashed_secret": "5375027a5b43ba19b197f7d1b0b85fb8f3617def", + "is_verified": false, + "line_number": 115 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/2-client-rbac-test-pod.ipynb", + "hashed_secret": "0ec58a4eff2c6a39d1b713fe59c0e9ea0c3bd950", + "is_verified": false, + "line_number": 188 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/2-client-rbac-test-pod.ipynb", + "hashed_secret": "4f409692a8d7d095ec4ed6d5d2c8a6927ad20c39", + "is_verified": false, + "line_number": 247 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/2-client-rbac-test-pod.ipynb", + "hashed_secret": "68ab8dac5e1b6b1f7e141da7d4ee7ea5ae3a2796", + "is_verified": false, + "line_number": 256 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/2-client-rbac-test-pod.ipynb", + "hashed_secret": "865a891f6f874b2882c9fb5f57f396ad27fae9db", + "is_verified": false, + "line_number": 385 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/2-client-rbac-test-pod.ipynb", + "hashed_secret": "1ee82d648fb715021bf5455317d7fa5469ae9ff1", + "is_verified": false, + "line_number": 425 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/2-client-rbac-test-pod.ipynb", + "hashed_secret": "6a983b549a36c828ca39299db5de5f777bb6be6e", + "is_verified": false, + "line_number": 566 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/2-client-rbac-test-pod.ipynb", + "hashed_secret": "e599b232015c607c83bb08070c5ed31e7dd741cf", + "is_verified": false, + "line_number": 693 + } + ], + "examples/operator-rbac-openshift-tls/3-uninstall.ipynb": [ + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/3-uninstall.ipynb", + "hashed_secret": "df9633974c13267c64c755be7db9b7ff8016f832", + "is_verified": false, + "line_number": 6 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/3-uninstall.ipynb", + "hashed_secret": "89229ddd1c50402fcf16f541f047804faedb7490", + "is_verified": false, + "line_number": 15 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/3-uninstall.ipynb", + "hashed_secret": "06261d3eb713d0c539563ba1d753b6e8fdc232a0", + "is_verified": false, + "line_number": 24 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/3-uninstall.ipynb", + "hashed_secret": "d91a6c8035c548960d765c7d0b8fa8a9cbe0357a", + "is_verified": false, + "line_number": 69 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/3-uninstall.ipynb", + "hashed_secret": "38c2dd85d9f8de934794c425a74115d5118bee3c", + "is_verified": false, + "line_number": 113 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/3-uninstall.ipynb", + "hashed_secret": "bcf542b2d4cdbfe67ea4eb547ebda96139f04e8a", + "is_verified": false, + "line_number": 141 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac-openshift-tls/3-uninstall.ipynb", + "hashed_secret": "9cd55f9406a268941490aea72f6543cf847579c3", + "is_verified": false, + "line_number": 178 + } + ], + "examples/operator-rbac/04-uninstall.ipynb": [ + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/04-uninstall.ipynb", + "hashed_secret": "df9633974c13267c64c755be7db9b7ff8016f832", + "is_verified": false, + "line_number": 6 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/04-uninstall.ipynb", + "hashed_secret": "89229ddd1c50402fcf16f541f047804faedb7490", + "is_verified": false, + "line_number": 15 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/04-uninstall.ipynb", + "hashed_secret": "06261d3eb713d0c539563ba1d753b6e8fdc232a0", + "is_verified": false, + "line_number": 24 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/04-uninstall.ipynb", + "hashed_secret": "d91a6c8035c548960d765c7d0b8fa8a9cbe0357a", + "is_verified": false, + "line_number": 69 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/04-uninstall.ipynb", + "hashed_secret": "38c2dd85d9f8de934794c425a74115d5118bee3c", + "is_verified": false, + "line_number": 113 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/04-uninstall.ipynb", + "hashed_secret": "bcf542b2d4cdbfe67ea4eb547ebda96139f04e8a", + "is_verified": false, + "line_number": 141 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/04-uninstall.ipynb", + "hashed_secret": "9cd55f9406a268941490aea72f6543cf847579c3", + "is_verified": false, + "line_number": 176 + } + ], + "examples/operator-rbac/2-client-rbac-test-pod.ipynb": [ + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/2-client-rbac-test-pod.ipynb", + "hashed_secret": "2d7d8560beab48b14d329a910063797b48259616", + "is_verified": false, + "line_number": 5 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/2-client-rbac-test-pod.ipynb", + "hashed_secret": "ee47af75912c91cdbd3ae229a39e8fd273c2fc47", + "is_verified": false, + "line_number": 28 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/2-client-rbac-test-pod.ipynb", + "hashed_secret": "eb179d26037d55827f20fa94d73ef2af0ea38044", + "is_verified": false, + "line_number": 38 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/2-client-rbac-test-pod.ipynb", + "hashed_secret": "7939e5e059d477a7fe273720a7d983cad46397be", + "is_verified": false, + "line_number": 75 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/2-client-rbac-test-pod.ipynb", + "hashed_secret": "5375027a5b43ba19b197f7d1b0b85fb8f3617def", + "is_verified": false, + "line_number": 85 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/2-client-rbac-test-pod.ipynb", + "hashed_secret": "0ec58a4eff2c6a39d1b713fe59c0e9ea0c3bd950", + "is_verified": false, + "line_number": 161 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/2-client-rbac-test-pod.ipynb", + "hashed_secret": "4f409692a8d7d095ec4ed6d5d2c8a6927ad20c39", + "is_verified": false, + "line_number": 215 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/2-client-rbac-test-pod.ipynb", + "hashed_secret": "68ab8dac5e1b6b1f7e141da7d4ee7ea5ae3a2796", + "is_verified": false, + "line_number": 224 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/2-client-rbac-test-pod.ipynb", + "hashed_secret": "865a891f6f874b2882c9fb5f57f396ad27fae9db", + "is_verified": false, + "line_number": 354 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/2-client-rbac-test-pod.ipynb", + "hashed_secret": "1ee82d648fb715021bf5455317d7fa5469ae9ff1", + "is_verified": false, + "line_number": 394 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/2-client-rbac-test-pod.ipynb", + "hashed_secret": "6a983b549a36c828ca39299db5de5f777bb6be6e", + "is_verified": false, + "line_number": 535 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/2-client-rbac-test-pod.ipynb", + "hashed_secret": "e599b232015c607c83bb08070c5ed31e7dd741cf", + "is_verified": false, + "line_number": 663 + } + ], + "examples/operator-rbac/3-client-rbac-test-local.ipynb": [ + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "2d7d8560beab48b14d329a910063797b48259616", + "is_verified": false, + "line_number": 5 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "58eaf30ca7fd613165d204676da73f39647567dc", + "is_verified": false, + "line_number": 33 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "ee47af75912c91cdbd3ae229a39e8fd273c2fc47", + "is_verified": false, + "line_number": 46 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "7939e5e059d477a7fe273720a7d983cad46397be", + "is_verified": false, + "line_number": 54 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "eb179d26037d55827f20fa94d73ef2af0ea38044", + "is_verified": false, + "line_number": 63 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "1657e73b91c25ce399137049e17f4af8745994b5", + "is_verified": false, + "line_number": 113 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "b978179c80b2513e0ef0572f56816d13023afd51", + "is_verified": false, + "line_number": 121 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "b2705d9f2696f288dd27e4708e48897c35f0d3f4", + "is_verified": false, + "line_number": 130 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "7768e474b17462781e39951d9a61bb8c1a41d9c5", + "is_verified": false, + "line_number": 181 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "d02182fd7b55de627f57b4ba74f3ebd44fdc1dac", + "is_verified": false, + "line_number": 190 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "ac672f8aab85b6b4e47ea21a36689ffe74ea7063", + "is_verified": false, + "line_number": 234 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "f27cde282450d04218b5130c34f4e1a0e77b309b", + "is_verified": false, + "line_number": 411 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "9e4ec967548856ede2f30649ab8b47b637be9b87", + "is_verified": false, + "line_number": 438 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "a4de4b4f51529db61d93d343098eea0c5b8b5329", + "is_verified": false, + "line_number": 463 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "0ec58a4eff2c6a39d1b713fe59c0e9ea0c3bd950", + "is_verified": false, + "line_number": 472 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "865a891f6f874b2882c9fb5f57f396ad27fae9db", + "is_verified": false, + "line_number": 616 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "e6d7f2653266d797618dfff88a69c40b76b58bda", + "is_verified": false, + "line_number": 633 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "1ee82d648fb715021bf5455317d7fa5469ae9ff1", + "is_verified": false, + "line_number": 660 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "f254ec1d20b970ec7611662c7aae401ff9123e55", + "is_verified": false, + "line_number": 765 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "6a983b549a36c828ca39299db5de5f777bb6be6e", + "is_verified": false, + "line_number": 792 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "fcae0645e9aeb9039b9276c0c676b24d5ed00673", + "is_verified": false, + "line_number": 926 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "e6bf87703237fdddf18f601649f1295b1f399fa0", + "is_verified": false, + "line_number": 938 + }, + { + "type": "Hex High Entropy String", + "filename": "examples/operator-rbac/3-client-rbac-test-local.ipynb", + "hashed_secret": "e599b232015c607c83bb08070c5ed31e7dd741cf", + "is_verified": false, + "line_number": 973 + } + ], + "examples/python-helm-demo/minio-dev.yaml": [ + { + "type": "Secret Keyword", + "filename": "examples/python-helm-demo/minio-dev.yaml", + "hashed_secret": "24019c1ce9c3dd5759557b505c3774d325c5cf57", + "is_verified": false, + "line_number": 22 + } + ], + "examples/python-helm-demo/minio.env": [ + { + "type": "Secret Keyword", + "filename": "examples/python-helm-demo/minio.env", + "hashed_secret": "24019c1ce9c3dd5759557b505c3774d325c5cf57", + "is_verified": false, + "line_number": 6 + } + ], + "examples/quickstart/quickstart.ipynb": [ + { + "type": "Hex High Entropy String", + "filename": "examples/quickstart/quickstart.ipynb", + "hashed_secret": "5bdef5b5c1946217c7f6acac02f5acd01ada6c85", + "is_verified": false, + "line_number": 1096 + } + ], + "examples/rag-docling/docling-demo.ipynb": [ + { + "type": "Base64 High Entropy String", + "filename": "examples/rag-docling/docling-demo.ipynb", + "hashed_secret": "1fc6aabe5719a3ff97003eef300767fec36d0fc0", + "is_verified": false, + "line_number": 372 + } + ], + "examples/rbac-local/01.2-setup-keycloak.ipynb": [ + { + "type": "Secret Keyword", + "filename": "examples/rbac-local/01.2-setup-keycloak.ipynb", + "hashed_secret": "b02dff0ec9d24823e77c27c281a852247896b86d", + "is_verified": false, + "line_number": 377 + }, + { + "type": "Secret Keyword", + "filename": "examples/rbac-local/01.2-setup-keycloak.ipynb", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 378 + } + ], + "examples/rbac-local/01.3-setup-feast.ipynb": [ + { + "type": "Secret Keyword", + "filename": "examples/rbac-local/01.3-setup-feast.ipynb", + "hashed_secret": "b02dff0ec9d24823e77c27c281a852247896b86d", + "is_verified": false, + "line_number": 54 + }, + { + "type": "Secret Keyword", + "filename": "examples/rbac-local/01.3-setup-feast.ipynb", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 55 + } + ], + "examples/rbac-remote/client/oidc/feature_repo/feature_store.yaml": [ + { + "type": "Secret Keyword", + "filename": "examples/rbac-remote/client/oidc/feature_repo/feature_store.yaml", + "hashed_secret": "98cf57eaf68b72744adad4dacb7060128d8dbed5", + "is_verified": false, + "line_number": 16 + } + ], + "examples/rbac-remote/server/feature_repo/feature_store.yaml": [ + { + "type": "Basic Auth Credentials", + "filename": "examples/rbac-remote/server/feature_repo/feature_store.yaml", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 5 + }, + { + "type": "Secret Keyword", + "filename": "examples/rbac-remote/server/feature_repo/feature_store.yaml", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 17 + } + ], + "examples/rbac-remote/server/k8s/feature_store_offline.yaml": [ + { + "type": "Secret Keyword", + "filename": "examples/rbac-remote/server/k8s/feature_store_offline.yaml", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 13 + } + ], + "examples/rbac-remote/server/k8s/feature_store_online.yaml": [ + { + "type": "Secret Keyword", + "filename": "examples/rbac-remote/server/k8s/feature_store_online.yaml", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 13 + } + ], + "examples/rbac-remote/server/k8s/feature_store_registry.yaml": [ + { + "type": "Basic Auth Credentials", + "filename": "examples/rbac-remote/server/k8s/feature_store_registry.yaml", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 5 + } + ], + "examples/rbac-remote/server/oidc/feature_store_offline.yaml": [ + { + "type": "Secret Keyword", + "filename": "examples/rbac-remote/server/oidc/feature_store_offline.yaml", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 13 + } + ], + "examples/rbac-remote/server/oidc/feature_store_online.yaml": [ + { + "type": "Secret Keyword", + "filename": "examples/rbac-remote/server/oidc/feature_store_online.yaml", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 13 + } + ], + "examples/rbac-remote/server/oidc/feature_store_registry.yaml": [ + { + "type": "Basic Auth Credentials", + "filename": "examples/rbac-remote/server/oidc/feature_store_registry.yaml", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 5 + } + ], + "go/internal/feast/onlinestore/redisonlinestore_test.go": [ + { + "type": "Secret Keyword", + "filename": "go/internal/feast/onlinestore/redisonlinestore_test.go", + "hashed_secret": "e5e9fa1ba31ecd1ae84f75caaa474f3a663f05f4", + "is_verified": false, + "line_number": 31 + } + ], + "go/internal/feast/registry/mysql_registry_store.go": [ + { + "type": "Secret Keyword", + "filename": "go/internal/feast/registry/mysql_registry_store.go", + "hashed_secret": "37fa265330ad83eaa879efb1e2db6380896cf639", + "is_verified": false, + "line_number": 326 + } + ], + "go/internal/feast/registry/mysql_registry_store_test.go": [ + { + "type": "Basic Auth Credentials", + "filename": "go/internal/feast/registry/mysql_registry_store_test.go", + "hashed_secret": "9d4e1e23bd5b727046a9e3b4b7db57bd8d6ee684", + "is_verified": false, + "line_number": 122 + } + ], + "infra/feast-operator/api/v1/featurestore_types.go": [ + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/api/v1/featurestore_types.go", + "hashed_secret": "44e17306b837162269a410204daaa5ecee4ec22c", + "is_verified": false, + "line_number": 726 + } + ], + "infra/feast-operator/api/v1/zz_generated.deepcopy.go": [ + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/api/v1/zz_generated.deepcopy.go", + "hashed_secret": "f914fc9324de1bec1ad13dec94a8ea2ddb41fc87", + "is_verified": false, + "line_number": 681 + }, + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/api/v1/zz_generated.deepcopy.go", + "hashed_secret": "44e17306b837162269a410204daaa5ecee4ec22c", + "is_verified": false, + "line_number": 1249 + }, + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/api/v1/zz_generated.deepcopy.go", + "hashed_secret": "c2028031c154bbe86fd69bef740855c74b927dcf", + "is_verified": false, + "line_number": 1254 + } + ], + "infra/feast-operator/api/v1alpha1/featurestore_types.go": [ + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/api/v1alpha1/featurestore_types.go", + "hashed_secret": "44e17306b837162269a410204daaa5ecee4ec22c", + "is_verified": false, + "line_number": 647 + } + ], + "infra/feast-operator/api/v1alpha1/zz_generated.deepcopy.go": [ + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/api/v1alpha1/zz_generated.deepcopy.go", + "hashed_secret": "f914fc9324de1bec1ad13dec94a8ea2ddb41fc87", + "is_verified": false, + "line_number": 590 + }, + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/api/v1alpha1/zz_generated.deepcopy.go", + "hashed_secret": "44e17306b837162269a410204daaa5ecee4ec22c", + "is_verified": false, + "line_number": 1098 + }, + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/api/v1alpha1/zz_generated.deepcopy.go", + "hashed_secret": "c2028031c154bbe86fd69bef740855c74b927dcf", + "is_verified": false, + "line_number": 1103 + } + ], + "infra/feast-operator/config/samples/v1_featurestore_db_persistence.yaml": [ + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/config/samples/v1_featurestore_db_persistence.yaml", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 11 + } + ], + "infra/feast-operator/config/samples/v1_featurestore_gcs.yaml": [ + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/config/samples/v1_featurestore_gcs.yaml", + "hashed_secret": "9ddad5c511b4c16e7dc08ba29be8d73027ced0e5", + "is_verified": false, + "line_number": 54 + } + ], + "infra/feast-operator/config/samples/v1_featurestore_oidc_auth.yaml": [ + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/config/samples/v1_featurestore_oidc_auth.yaml", + "hashed_secret": "8318df9ecda039deac9868adf1944a29a95c7114", + "is_verified": false, + "line_number": 19 + }, + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/config/samples/v1_featurestore_oidc_auth.yaml", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 21 + } + ], + "infra/feast-operator/config/samples/v1_featurestore_postgres_db_volumes_tls.yaml": [ + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/config/samples/v1_featurestore_postgres_db_volumes_tls.yaml", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 10 + }, + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/config/samples/v1_featurestore_postgres_db_volumes_tls.yaml", + "hashed_secret": "604c35d6bd23071a820933ce543d767938167e5d", + "is_verified": false, + "line_number": 46 + } + ], + "infra/feast-operator/config/samples/v1_featurestore_postgres_tls_volumes_ca_env.yaml": [ + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/config/samples/v1_featurestore_postgres_tls_volumes_ca_env.yaml", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 10 + }, + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/config/samples/v1_featurestore_postgres_tls_volumes_ca_env.yaml", + "hashed_secret": "604c35d6bd23071a820933ce543d767938167e5d", + "is_verified": false, + "line_number": 47 + } + ], + "infra/feast-operator/internal/controller/featurestore_controller_db_store_test.go": [ + { + "type": "Basic Auth Credentials", + "filename": "infra/feast-operator/internal/controller/featurestore_controller_db_store_test.go", + "hashed_secret": "08cd923367890009657eab812753379bdb321eeb", + "is_verified": false, + "line_number": 74 + }, + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/internal/controller/featurestore_controller_db_store_test.go", + "hashed_secret": "a57ce131bd944bdf8ba2f2f93e179dc416ed0315", + "is_verified": false, + "line_number": 138 + }, + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/internal/controller/featurestore_controller_db_store_test.go", + "hashed_secret": "fbae5166e43fc0c4c185500e8851baadceb82d64", + "is_verified": false, + "line_number": 274 + }, + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/internal/controller/featurestore_controller_db_store_test.go", + "hashed_secret": "06dd8c4ff3698c891b8c8a4cd776e08cb7fd0300", + "is_verified": false, + "line_number": 295 + }, + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/internal/controller/featurestore_controller_db_store_test.go", + "hashed_secret": "bd29b05f76d7125eb94b34447d9cb77cb98cd55f", + "is_verified": false, + "line_number": 679 + }, + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/internal/controller/featurestore_controller_db_store_test.go", + "hashed_secret": "e289c5ff59ba07a51788a092e09bed2023e1aafc", + "is_verified": false, + "line_number": 682 + } + ], + "infra/feast-operator/internal/controller/featurestore_controller_oidc_auth_test.go": [ + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/internal/controller/featurestore_controller_oidc_auth_test.go", + "hashed_secret": "37809278f298dad929cc0981d77a12fba146d847", + "is_verified": false, + "line_number": 50 + }, + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/internal/controller/featurestore_controller_oidc_auth_test.go", + "hashed_secret": "a57ce131bd944bdf8ba2f2f93e179dc416ed0315", + "is_verified": false, + "line_number": 55 + }, + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/internal/controller/featurestore_controller_oidc_auth_test.go", + "hashed_secret": "a1f14fc6f33ba39a8b6d006fefa6fe0fe8d60ae2", + "is_verified": false, + "line_number": 447 + } + ], + "infra/feast-operator/internal/controller/featurestore_controller_test_utils_test.go": [ + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/internal/controller/featurestore_controller_test_utils_test.go", + "hashed_secret": "64f07d985b840007b831a6817b83ae2a3913f5f0", + "is_verified": false, + "line_number": 36 + } + ], + "infra/feast-operator/internal/controller/services/repo_config.go": [ + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/internal/controller/services/repo_config.go", + "hashed_secret": "44e17306b837162269a410204daaa5ecee4ec22c", + "is_verified": false, + "line_number": 109 + }, + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/internal/controller/services/repo_config.go", + "hashed_secret": "e2fb052132fd6a07a56af2013e0b62a1f510572c", + "is_verified": false, + "line_number": 148 + } + ], + "infra/feast-operator/internal/controller/services/services.go": [ + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/internal/controller/services/services.go", + "hashed_secret": "36dc326eb15c7bdd8d91a6b87905bcea20b637d1", + "is_verified": false, + "line_number": 176 + } + ], + "infra/feast-operator/internal/controller/services/tls_test.go": [ + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/internal/controller/services/tls_test.go", + "hashed_secret": "42a3daba72ee11b2e6202ef05edf09bd8d5fed6f", + "is_verified": false, + "line_number": 39 + } + ], + "infra/feast-operator/test/testdata/feast_integration_test_crs/postgres.yaml": [ + { + "type": "Secret Keyword", + "filename": "infra/feast-operator/test/testdata/feast_integration_test_crs/postgres.yaml", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 9 + } + ], + "java/serving/src/test/java/feast/serving/it/ServingRedisAzureRegistryIT.java": [ + { + "type": "Base64 High Entropy String", + "filename": "java/serving/src/test/java/feast/serving/it/ServingRedisAzureRegistryIT.java", + "hashed_secret": "726684b05966cd486e3461450ac94907c0b6709a", + "is_verified": false, + "line_number": 36 + } + ], + "java/serving/src/test/resources/docker-compose/feast10/feature_store.yaml": [ + { + "type": "Secret Keyword", + "filename": "java/serving/src/test/resources/docker-compose/feast10/feature_store.yaml", + "hashed_secret": "05e037c57cdc9fe20f7a97c5af652a0f84acb8f9", + "is_verified": false, + "line_number": 6 + } + ], + "sdk/python/feast/infra/feature_servers/multicloud/offline/hermeto-generic-artifacts.sh": [ + { + "type": "Hex High Entropy String", + "filename": "sdk/python/feast/infra/feature_servers/multicloud/offline/hermeto-generic-artifacts.sh", + "hashed_secret": "8cbe079e6b8a95a5296f6908db495868a3a9fd9f", + "is_verified": false, + "line_number": 9 + }, + { + "type": "Hex High Entropy String", + "filename": "sdk/python/feast/infra/feature_servers/multicloud/offline/hermeto-generic-artifacts.sh", + "hashed_secret": "24059504f09c00c945483d52a2552d98e0856c19", + "is_verified": false, + "line_number": 12 + }, + { + "type": "Hex High Entropy String", + "filename": "sdk/python/feast/infra/feature_servers/multicloud/offline/hermeto-generic-artifacts.sh", + "hashed_secret": "4be96404b7a9ed47f1b8654587c996d03173b6bb", + "is_verified": false, + "line_number": 15 + }, + { + "type": "Hex High Entropy String", + "filename": "sdk/python/feast/infra/feature_servers/multicloud/offline/hermeto-generic-artifacts.sh", + "hashed_secret": "2aed7fb8a5796ad01bade8045b9bcca3d275409b", + "is_verified": false, + "line_number": 18 + } + ], + "sdk/python/feast/infra/offline_stores/contrib/clickhouse_offline_store/tests/data_source.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/feast/infra/offline_stores/contrib/clickhouse_offline_store/tests/data_source.py", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 27 + } + ], + "sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py", + "hashed_secret": "4f90cdec4cf4fb07ecab85746c12da693b36e916", + "is_verified": false, + "line_number": 23 + } + ], + "sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py", + "hashed_secret": "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3", + "is_verified": false, + "line_number": 27 + } + ], + "sdk/python/feast/templates/cassandra/feature_repo/feature_store.yaml": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/feast/templates/cassandra/feature_repo/feature_store.yaml", + "hashed_secret": "960dc7784dd8b3c86ea8e9b2e29fa44dab134564", + "is_verified": false, + "line_number": 13 + } + ], + "sdk/python/feast/templates/couchbase/feature_repo/feature_store.yaml": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/feast/templates/couchbase/feature_repo/feature_store.yaml", + "hashed_secret": "49e93251fa95b745e31e66c4469be9cc4b969c0c", + "is_verified": false, + "line_number": 8 + }, + { + "type": "Secret Keyword", + "filename": "sdk/python/feast/templates/couchbase/feature_repo/feature_store.yaml", + "hashed_secret": "4fa6cc9a10b9fc98ede9d74f83c29003f8fa9533", + "is_verified": false, + "line_number": 15 + } + ], + "sdk/python/feast/templates/postgres/feature_repo/feature_store.yaml": [ + { + "type": "Basic Auth Credentials", + "filename": "sdk/python/feast/templates/postgres/feature_repo/feature_store.yaml", + "hashed_secret": "2cfca21270deaf784d520e28cb126eff5c988e12", + "is_verified": false, + "line_number": 5 + }, + { + "type": "Secret Keyword", + "filename": "sdk/python/feast/templates/postgres/feature_repo/feature_store.yaml", + "hashed_secret": "2cfca21270deaf784d520e28cb126eff5c988e12", + "is_verified": false, + "line_number": 17 + } + ], + "sdk/python/feast/templates/snowflake/feature_repo/feature_store.yaml": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/feast/templates/snowflake/feature_repo/feature_store.yaml", + "hashed_secret": "d46ded7f26377ccdbe60514ad370524a60f7d20e", + "is_verified": false, + "line_number": 8 + } + ], + "sdk/python/tests/data/localhost.key": [ + { + "type": "Private Key", + "filename": "sdk/python/tests/data/localhost.key", + "hashed_secret": "1348b145fa1a555461c1b790a2f66614781091e9", + "is_verified": false, + "line_number": 1 + } + ], + "sdk/python/tests/integration/offline_store/test_s3_custom_endpoint.py": [ + { + "type": "AWS Access Key", + "filename": "sdk/python/tests/integration/offline_store/test_s3_custom_endpoint.py", + "hashed_secret": "d70eab08607a4d05faa2d0d6647206599e9abc65", + "is_verified": false, + "line_number": 35 + }, + { + "type": "Base64 High Entropy String", + "filename": "sdk/python/tests/integration/offline_store/test_s3_custom_endpoint.py", + "hashed_secret": "d70eab08607a4d05faa2d0d6647206599e9abc65", + "is_verified": false, + "line_number": 35 + }, + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/integration/offline_store/test_s3_custom_endpoint.py", + "hashed_secret": "d70eab08607a4d05faa2d0d6647206599e9abc65", + "is_verified": false, + "line_number": 35 + } + ], + "sdk/python/tests/integration/registration/test_universal_registry.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/integration/registration/test_universal_registry.py", + "hashed_secret": "53e9042a36213bf85ef29a4371896aef8ba9196a", + "is_verified": false, + "line_number": 126 + } + ], + "sdk/python/tests/integration/rest_api/resource/feast_config_rhoai.yaml": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/integration/rest_api/resource/feast_config_rhoai.yaml", + "hashed_secret": "3b8d743308b7ce47d6c053b97c16a8e0eeb6356d", + "is_verified": false, + "line_number": 8 + } + ], + "sdk/python/tests/integration/rest_api/resource/postgres.yaml": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/integration/rest_api/resource/postgres.yaml", + "hashed_secret": "f37a74d82869756054661d6501b29cdfec0fdb38", + "is_verified": false, + "line_number": 9 + } + ], + "sdk/python/tests/unit/infra/offline_stores/contrib/postgres_offline_store/test_postgres.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/unit/infra/offline_stores/contrib/postgres_offline_store/test_postgres.py", + "hashed_secret": "9fb7fe1217aed442b04c0f5e43b5d5a7d3287097", + "is_verified": false, + "line_number": 301 + } + ], + "sdk/python/tests/unit/infra/offline_stores/test_clickhouse.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/unit/infra/offline_stores/test_clickhouse.py", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 21 + } + ], + "sdk/python/tests/unit/infra/offline_stores/test_offline_store.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/unit/infra/offline_stores/test_offline_store.py", + "hashed_secret": "b94e9f3d7e001981b2dd49f2a70822a8ac8f3e68", + "is_verified": false, + "line_number": 139 + }, + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/unit/infra/offline_stores/test_offline_store.py", + "hashed_secret": "3442496b96dd01591a8cd44b1eec1368ab728aba", + "is_verified": false, + "line_number": 178 + } + ], + "sdk/python/tests/unit/infra/offline_stores/test_snowflake.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/unit/infra/offline_stores/test_snowflake.py", + "hashed_secret": "b94e9f3d7e001981b2dd49f2a70822a8ac8f3e68", + "is_verified": false, + "line_number": 24 + } + ], + "sdk/python/tests/unit/infra/scaffolding/test_repo_config.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/unit/infra/scaffolding/test_repo_config.py", + "hashed_secret": "1089adfb1f11b95df31344030507912b5abdf57a", + "is_verified": false, + "line_number": 297 + }, + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/unit/infra/scaffolding/test_repo_config.py", + "hashed_secret": "9fb7fe1217aed442b04c0f5e43b5d5a7d3287097", + "is_verified": false, + "line_number": 299 + } + ], + "sdk/python/tests/unit/infra/utils/snowflake/test_snowflake_utils.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/unit/infra/utils/snowflake/test_snowflake_utils.py", + "hashed_secret": "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3", + "is_verified": false, + "line_number": 10 + } + ], + "sdk/python/tests/unit/local_feast_tests/test_init.py": [ + { + "type": "Basic Auth Credentials", + "filename": "sdk/python/tests/unit/local_feast_tests/test_init.py", + "hashed_secret": "2cfca21270deaf784d520e28cb126eff5c988e12", + "is_verified": false, + "line_number": 82 + } + ], + "sdk/python/tests/unit/permissions/test_oidc_auth_client.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/unit/permissions/test_oidc_auth_client.py", + "hashed_secret": "e6eae2da3b4a5bf296d0495192788e2772ac5c79", + "is_verified": false, + "line_number": 29 + }, + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/unit/permissions/test_oidc_auth_client.py", + "hashed_secret": "8318df9ecda039deac9868adf1944a29a95c7114", + "is_verified": false, + "line_number": 31 + } + ], + "sdk/python/tests/universal/feature_repos/repo_configuration.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/universal/feature_repos/repo_configuration.py", + "hashed_secret": "d90e76ef629fb00c95f4e84fec29fbda111e2392", + "is_verified": false, + "line_number": 452 + }, + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/universal/feature_repos/repo_configuration.py", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 454 + } + ], + "sdk/python/tests/universal/feature_repos/universal/data_sources/file.py": [ + { + "type": "Base64 High Entropy String", + "filename": "sdk/python/tests/universal/feature_repos/universal/data_sources/file.py", + "hashed_secret": "d70eab08607a4d05faa2d0d6647206599e9abc65", + "is_verified": false, + "line_number": 257 + }, + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/universal/feature_repos/universal/data_sources/file.py", + "hashed_secret": "d70eab08607a4d05faa2d0d6647206599e9abc65", + "is_verified": false, + "line_number": 257 + } + ], + "sdk/python/tests/universal/feature_repos/universal/online_store/couchbase.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/universal/feature_repos/universal/online_store/couchbase.py", + "hashed_secret": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8", + "is_verified": false, + "line_number": 29 + } + ], + "sdk/python/tests/universal/feature_repos/universal/online_store/mysql.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/universal/feature_repos/universal/online_store/mysql.py", + "hashed_secret": "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3", + "is_verified": false, + "line_number": 27 + } + ], + "sdk/python/tests/universal/feature_repos/universal/online_store/postgres.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/universal/feature_repos/universal/online_store/postgres.py", + "hashed_secret": "95433727ea51026e1e0dc8deadaabd4a3baaaaf4", + "is_verified": false, + "line_number": 19 + } + ], + "sdk/python/tests/universal/feature_repos/universal/online_store/singlestore.py": [ + { + "type": "Base64 High Entropy String", + "filename": "sdk/python/tests/universal/feature_repos/universal/online_store/singlestore.py", + "hashed_secret": "6f7c6dea79de6f298be425ade30f5afbbb6f8047", + "is_verified": false, + "line_number": 24 + }, + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/universal/feature_repos/universal/online_store/singlestore.py", + "hashed_secret": "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3", + "is_verified": false, + "line_number": 37 + } + ], + "sdk/python/tests/utils/auth_permissions_util.py": [ + { + "type": "Secret Keyword", + "filename": "sdk/python/tests/utils/auth_permissions_util.py", + "hashed_secret": "d90e76ef629fb00c95f4e84fec29fbda111e2392", + "is_verified": false, + "line_number": 170 + } + ] + }, + "generated_at": "2026-03-05T15:25:10Z" +} diff --git a/CHANGELOG.md b/CHANGELOG.md index cc0a39103c8..51a9ed13bea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,50 @@ # Changelog +# [0.61.0](https://github.com/feast-dev/feast/compare/v0.60.0...v0.61.0) (2026-03-10) + + +### Bug Fixes + +* Add grpcio dependency group to transformation server Dockerfile ([2c2150a](https://github.com/feast-dev/feast/commit/2c2150a1d7d6bf06e28bb11156fca730fdbce77f)) +* Add https readiness check for rest-registry tests ([ea85e63](https://github.com/feast-dev/feast/commit/ea85e63cd9d77441831702fffbf170fc2ec82a10)) +* Add website build check for PRs and fix blog frontmatter YAML error ([#6079](https://github.com/feast-dev/feast/issues/6079)) ([30a3a43](https://github.com/feast-dev/feast/commit/30a3a439d5f41b1cc3dcb4961f23198c218dc9d1)) +* Added MLflow metric charts across feature selection ([#6080](https://github.com/feast-dev/feast/issues/6080)) ([a403361](https://github.com/feast-dev/feast/commit/a4033611cba7b47ada8d1841f70ec28fe6240589)) +* Check duplicate names for feature view across types ([#5999](https://github.com/feast-dev/feast/issues/5999)) ([95b9af8](https://github.com/feast-dev/feast/commit/95b9af8628ee4c0d68c4937295999cc4e8691004)) +* Fix integration tests ([#6046](https://github.com/feast-dev/feast/issues/6046)) ([02d5548](https://github.com/feast-dev/feast/commit/02d5548a345f0f6a076913f89e788069f87b2769)) +* Fix non-specific label selector on metrics service ([a1a160d](https://github.com/feast-dev/feast/commit/a1a160d9b33e62e6a9e5499964052603f94e4361)) +* Fixed IntegrityError on SqlRegistry ([#6047](https://github.com/feast-dev/feast/issues/6047)) ([325e148](https://github.com/feast-dev/feast/commit/325e1485d285708b9b20e69d8fdeec96df67ea86)) +* Fixed pre-commit check ([114b7db](https://github.com/feast-dev/feast/commit/114b7db6b2afc40c98e9c182990c65b251f6ce90)) +* Fixed uv cache permission error for docker build on mac ([ad807be](https://github.com/feast-dev/feast/commit/ad807be579aa8274ba54e7eb533558aa6b31a8f4)) +* Fixes a `PydanticDeprecatedSince20` warning for trino_offline_store ([#5991](https://github.com/feast-dev/feast/issues/5991)) ([abfd18a](https://github.com/feast-dev/feast/commit/abfd18acf6bf207b057dd818b6a22d97ae69dee8)) +* Integration test failures ([#6040](https://github.com/feast-dev/feast/issues/6040)) ([9165870](https://github.com/feast-dev/feast/commit/91658704209c29315ae17df49f5d5226d08491b9)) +* Ray offline store tests are duplicated across 3 workflows ([54f705a](https://github.com/feast-dev/feast/commit/54f705aeb919f268be40106e229530ef059b139c)) +* Reenable tests ([#6036](https://github.com/feast-dev/feast/issues/6036)) ([82ee7f8](https://github.com/feast-dev/feast/commit/82ee7f8e18df6de67eadf2f8b548346f174e4fdf)) +* Use commitlint pre-commit hook instead of a separate action ([35a81e7](https://github.com/feast-dev/feast/commit/35a81e71a63c4d47cd144ac1ec5180de6d448eaf)) + + +### Features + +* Add complex type support (Map, JSON, Struct) with schema validation ([#5974](https://github.com/feast-dev/feast/issues/5974)) ([1200dbf](https://github.com/feast-dev/feast/commit/1200dbfe0a4acc65ac1fc6830c8c07dc2258f327)) +* Add materialization, feature freshness, request latency, and push metrics to feature server ([2c6be18](https://github.com/feast-dev/feast/commit/2c6be18bfee9d4bf18ae59490160282b090d3b62)) +* Add non-entity retrieval support for ClickHouse offline store ([4d08ddc](https://github.com/feast-dev/feast/commit/4d08ddcc4d1c3c5ca5b23ce4aad241a9017ef3fd)), closes [#5835](https://github.com/feast-dev/feast/issues/5835) +* Add OnlineStore for MongoDB ([#6025](https://github.com/feast-dev/feast/issues/6025)) ([bf4e3fa](https://github.com/feast-dev/feast/commit/bf4e3faff653e214bc98f38ec72b29b2a318c8e7)), closes [golang/go#74462](https://github.com/golang/go/issues/74462) +* Added CodeQL SAST scanning and detect-secrets pre-commit hook ([547b516](https://github.com/feast-dev/feast/commit/547b5161ec709fb0a09473dbd8f8371b2b14bb67)) +* Adding optional name to Aggregation (feast-dev[#5994](https://github.com/feast-dev/feast/issues/5994)) ([#6083](https://github.com/feast-dev/feast/issues/6083)) ([56469f7](https://github.com/feast-dev/feast/commit/56469f7d9bd680a288f7a6430b8ff9cbb9b37510)) +* Feature Server High-Availability on Kubernetes ([#6028](https://github.com/feast-dev/feast/issues/6028)) ([9c07b4c](https://github.com/feast-dev/feast/commit/9c07b4c1dd6c6cf3e1f379c3077821da62648ca9)), closes [Hi#Availability](https://github.com/Hi/issues/Availability) [Hi#Availability](https://github.com/Hi/issues/Availability) +* **go:** Implement metrics and tracing for http and grpc servers ([#5925](https://github.com/feast-dev/feast/issues/5925)) ([2b4ec9a](https://github.com/feast-dev/feast/commit/2b4ec9a6ef77e1bc9de0f4917559ab53b9f4e5bf)) +* Horizontal scaling support to the Feast operator ([#6000](https://github.com/feast-dev/feast/issues/6000)) ([3ec13e6](https://github.com/feast-dev/feast/commit/3ec13e606f356eb70e5a2c9b336be83f4e65d573)) +* Making feature view source optional (feast-dev[#6074](https://github.com/feast-dev/feast/issues/6074)) ([#6075](https://github.com/feast-dev/feast/issues/6075)) ([76917b7](https://github.com/feast-dev/feast/commit/76917b772762dc98d5c679f772e90a16bb39931b)) +* Support arm docker build ([#6061](https://github.com/feast-dev/feast/issues/6061)) ([1e1f5d9](https://github.com/feast-dev/feast/commit/1e1f5d94486be73d9e9afb85e36773a7a8c15e17)) +* Use orjson for faster JSON serialization in feature server ([6f5203a](https://github.com/feast-dev/feast/commit/6f5203ac50284c4fc5884740cbc3bbca7fd1c7d0)) + + +### Performance Improvements + +* Optimize protobuf parsing in Redis online store ([#6023](https://github.com/feast-dev/feast/issues/6023)) ([59dfdb8](https://github.com/feast-dev/feast/commit/59dfdb87409085a397dfec065e4ee5d9af3b67d4)) +* Optimize timestamp conversion in _convert_rows_to_protobuf ([33a2e95](https://github.com/feast-dev/feast/commit/33a2e9542a8680aabaeeef757efde079702cac65)) +* Parallelize DynamoDB batch reads in sync online_read ([#6024](https://github.com/feast-dev/feast/issues/6024)) ([9699944](https://github.com/feast-dev/feast/commit/96999443440045fc1786f63d0d12279d0e848d3a)) +* Remove redundant entity key serialization in online_read ([d87283f](https://github.com/feast-dev/feast/commit/d87283fa7d647c48640ec03a342312745d77ffc8)) + # [0.60.0](https://github.com/feast-dev/feast/compare/v0.59.0...v0.60.0) (2026-02-17) diff --git a/CODEOWNERS b/CODEOWNERS index e4eb72958c2..dfcc0d8383b 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -25,7 +25,7 @@ # BigQuery /sdk/python/feast/infra/offline_stores/bigquery.py @sudohainguyen /sdk/python/feast/infra/offline_stores/bigquery_source.py @sudohainguyen -/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py @sudohainguyen +/sdk/python/tests/universal/feature_repos/universal/data_sources/bigquery.py @sudohainguyen # ==== Online Stores ==== diff --git a/Makefile b/Makefile index e19bf12a78e..58c99b8c984 100644 --- a/Makefile +++ b/Makefile @@ -164,7 +164,10 @@ benchmark-python-local: ## Run integration + benchmark tests for Python (local d ##@ Tests test-python-unit: ## Run Python unit tests (use pattern= to filter tests, e.g., pattern=milvus, pattern=test_online_retrieval.py, pattern=test_online_retrieval.py::test_get_online_features_milvus) - uv run python -m pytest -n 8 --color=yes $(if $(pattern),-k "$(pattern)") sdk/python/tests + uv run python -m pytest -n 8 --color=yes $(if $(pattern),-k "$(pattern)") \ + --ignore=sdk/python/tests/component/ray \ + --ignore=sdk/python/tests/component/spark \ + sdk/python/tests # Fast unit tests only test-python-unit-fast: ## Run fast unit tests only (no external dependencies) @@ -184,7 +187,9 @@ test-python-smoke: ## Quick smoke test for development test-python-integration: ## Run Python integration tests (CI) uv run python -m pytest --tb=short -v -n 8 --integration --color=yes --durations=10 --timeout=1200 --timeout_method=thread --dist loadgroup \ -k "(not snowflake or not test_historical_features_main)" \ - -m "not rbac_remote_integration_test" \ + -m "not rbac_remote_integration_test and not ray_offline_stores_only" \ + --ignore=sdk/python/tests/integration/registration \ + --ignore=sdk/python/tests/component/ray \ --log-cli-level=INFO -s \ sdk/python/tests @@ -198,12 +203,11 @@ test-python-integration-parallel: ## Run integration tests with enhanced paralle test-python-integration-local: ## Run Python integration tests (local dev mode) FEAST_IS_LOCAL_TEST=True \ FEAST_LOCAL_ONLINE_CONTAINER=True \ - HADOOP_HOME=$$HOME/hadoop \ - CLASSPATH="$$( $$HADOOP_HOME/bin/hadoop classpath --glob ):$$CLASSPATH" \ - HADOOP_USER_NAME=root \ uv run python -m pytest --tb=short -v -n auto --color=yes --integration --durations=10 --timeout=1200 --timeout_method=thread --dist loadgroup \ -k "not test_lambda_materialization and not test_snowflake_materialization" \ - -m "not rbac_remote_integration_test" \ + -m "not rbac_remote_integration_test and not ray_offline_stores_only" \ + --ignore=sdk/python/tests/component/ray \ + --ignore=sdk/python/tests/integration/registration \ --log-cli-level=INFO -s \ sdk/python/tests @@ -212,7 +216,7 @@ test-python-integration-rbac-remote: ## Run Python remote RBAC integration tests FEAST_LOCAL_ONLINE_CONTAINER=True \ uv run python -m pytest --tb=short -v -n 8 --color=yes --integration --durations=10 --timeout=1200 --timeout_method=thread --dist loadgroup \ -k "not test_lambda_materialization and not test_snowflake_materialization" \ - -m "rbac_remote_integration_test" \ + -m "rbac_remote_integration_test and not ray_offline_stores_only" \ --log-cli-level=INFO -s \ sdk/python/tests @@ -244,6 +248,18 @@ test-python-integration-dbt: ## Run dbt integration tests python -m pytest tests/integration/dbt/test_dbt_integration.py -v --tb=short @echo "✓ dbt integration tests completed successfully!" +test-python-registration: ## Run Python registration integration tests (local) + pixi run -e registration-tests test + +test-python-registration-ci: ## Run Python registration integration tests (CI) + HADOOP_HOME=$$HOME/hadoop \ + CLASSPATH="$$( $$HADOOP_HOME/bin/hadoop classpath --glob ):$$CLASSPATH" \ + HADOOP_USER_NAME=root \ + pixi run -e registration-tests test-ci + +test-python-universal-duckdb-offline: ## Run Python DuckDB offline store integration tests + pixi run -e duckdb-tests test + test-python-universal-spark: ## Run Python Spark integration tests PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.spark_repo_configuration \ @@ -390,33 +406,13 @@ test-python-universal-postgres-offline: ## Run Python Postgres integration tests not test_spark" \ sdk/python/tests -test-python-universal-ray-offline: ## Run Python Ray offline store integration tests - PYTHONPATH='.' \ - FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.ray_repo_configuration \ - PYTEST_PLUGINS=sdk.python.feast.infra.offline_stores.contrib.ray_offline_store.tests \ - python -m pytest -n 8 --integration \ - -m "not universal_online_stores and not benchmark" \ - -k "not test_historical_retrieval_with_validation and \ - not test_universal_cli and \ - not test_go_feature_server and \ - not test_feature_logging and \ - not test_logged_features_validation and \ - not test_lambda_materialization_consistency and \ - not gcs_registry and \ - not s3_registry and \ - not test_snowflake and \ - not test_spark" \ - sdk/python/tests - -test-python-ray-compute-engine: ## Run Python Ray compute engine tests - PYTHONPATH='.' \ - python -m pytest -v --integration \ - sdk/python/tests/integration/compute_engines/ray_compute/ +test-python-ray-integration: ## Run all Python Ray integration tests (offline store + compute engine) + pixi run -e ray-tests test test-python-universal-postgres-online: ## Run Python Postgres integration tests PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.postgres_online_store.postgres_repo_configuration \ - PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.postgres \ + PYTEST_PLUGINS=sdk.python.tests.universal.feature_repos.universal.online_store.postgres \ python -m pytest -n 8 --integration \ -k "not test_universal_cli and \ not test_go_feature_server and \ @@ -435,7 +431,7 @@ test-python-universal-postgres-online: ## Run Python Postgres integration tests test-python-universal-pgvector-online: ## Run Python Postgres integration tests PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.postgres_online_store.pgvector_repo_configuration \ - PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.postgres \ + PYTEST_PLUGINS=sdk.python.tests.universal.feature_repos.universal.online_store.postgres \ python -m pytest -n 8 --integration \ -k "not test_universal_cli and \ not test_go_feature_server and \ @@ -457,7 +453,7 @@ test-python-universal-postgres-online: ## Run Python Postgres integration tests test-python-universal-mysql-online: ## Run Python MySQL integration tests PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.mysql_online_store.mysql_repo_configuration \ - PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.mysql \ + PYTEST_PLUGINS=sdk.python.tests.universal.feature_repos.universal.online_store.mysql \ python -m pytest -n 8 --integration \ -k "not test_universal_cli and \ not test_go_feature_server and \ @@ -476,7 +472,7 @@ test-python-universal-mysql-online: ## Run Python MySQL integration tests test-python-universal-cassandra: ## Run Python Cassandra integration tests PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.cassandra_online_store.cassandra_repo_configuration \ - PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.cassandra \ + PYTEST_PLUGINS=sdk.python.tests.universal.feature_repos.universal.online_store.cassandra \ python -m pytest -x --integration \ sdk/python/tests/integration/offline_store/test_feature_logging.py \ --ignore=sdk/python/tests/integration/offline_store/test_validation.py \ @@ -487,7 +483,7 @@ test-python-universal-cassandra: ## Run Python Cassandra integration tests test-python-universal-hazelcast: ## Run Python Hazelcast integration tests PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.hazelcast_online_store.hazelcast_repo_configuration \ - PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.hazelcast \ + PYTEST_PLUGINS=sdk.python.tests.universal.feature_repos.universal.online_store.hazelcast \ python -m pytest -n 8 --integration \ -k "not test_universal_cli and \ not test_go_feature_server and \ @@ -506,7 +502,7 @@ test-python-universal-hazelcast: ## Run Python Hazelcast integration tests test-python-universal-cassandra-no-cloud-providers: ## Run Python Cassandra integration tests PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.cassandra_online_store.cassandra_repo_configuration \ - PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.cassandra \ + PYTEST_PLUGINS=sdk.python.tests.universal.feature_repos.universal.online_store.cassandra \ python -m pytest -x --integration \ -k "not test_lambda_materialization_consistency and \ not test_apply_entity_integration and \ @@ -523,7 +519,26 @@ test-python-universal-cassandra-no-cloud-providers: ## Run Python Cassandra inte test-python-universal-elasticsearch-online: ## Run Python Elasticsearch online store integration tests PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.elasticsearch_online_store.elasticsearch_repo_configuration \ - PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.elasticsearch \ + PYTEST_PLUGINS=sdk.python.tests.universal.feature_repos.universal.online_store.elasticsearch \ + python -m pytest -n 8 --integration \ + -k "not test_universal_cli and \ + not test_go_feature_server and \ + not test_feature_logging and \ + not test_reorder_columns and \ + not test_logged_features_validation and \ + not test_lambda_materialization_consistency and \ + not test_offline_write and \ + not test_push_features_to_offline_store and \ + not gcs_registry and \ + not s3_registry and \ + not test_universal_types and \ + not test_snowflake" \ + sdk/python/tests + +test-python-universal-mongodb-online: ## Run Python MongoDB online store integration tests + PYTHONPATH='.' \ + FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.mongodb_online_store.mongodb_repo_configuration \ + PYTEST_PLUGINS=sdk.python.tests.universal.feature_repos.universal.online_store.mongodb \ python -m pytest -n 8 --integration \ -k "not test_universal_cli and \ not test_go_feature_server and \ @@ -542,7 +557,7 @@ test-python-universal-elasticsearch-online: ## Run Python Elasticsearch online s test-python-universal-milvus-online: ## Run Python Milvus online store integration tests PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.milvus_online_store.milvus_repo_configuration \ - PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.milvus \ + PYTEST_PLUGINS=sdk.python.tests.universal.feature_repos.universal.online_store.milvus \ python -m pytest -n 8 --integration \ -k "test_retrieve_online_milvus_documents" \ sdk/python/tests --ignore=sdk/python/tests/integration/offline_store/test_dqm_validation.py @@ -550,7 +565,7 @@ test-python-universal-milvus-online: ## Run Python Milvus online store integrati test-python-universal-singlestore-online: ## Run Python Singlestore online store integration tests PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.singlestore_repo_configuration \ - PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.singlestore \ + PYTEST_PLUGINS=sdk.python.tests.universal.feature_repos.universal.online_store.singlestore \ python -m pytest -n 8 --integration \ -k "not test_universal_cli and \ not gcs_registry and \ @@ -561,7 +576,7 @@ test-python-universal-singlestore-online: ## Run Python Singlestore online store test-python-universal-qdrant-online: ## Run Python Qdrant online store integration tests PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.qdrant_online_store.qdrant_repo_configuration \ - PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.qdrant \ + PYTEST_PLUGINS=sdk.python.tests.universal.feature_repos.universal.online_store.qdrant \ python -m pytest -n 8 --integration \ -k "test_retrieve_online_documents" \ sdk/python/tests/integration/online_store/test_universal_online.py @@ -596,7 +611,7 @@ test-python-universal-couchbase-offline: ## Run Python Couchbase offline store i test-python-universal-couchbase-online: ## Run Python Couchbase online store integration tests PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.couchbase_online_store.couchbase_repo_configuration \ - PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.couchbase \ + PYTEST_PLUGINS=sdk.python.tests.universal.feature_repos.universal.online_store.couchbase \ python -m pytest -n 8 --integration \ -k "not test_universal_cli and \ not test_go_feature_server and \ @@ -663,10 +678,10 @@ push-feature-server-docker: ## Push Feature Server Docker image docker push $(REGISTRY)/feature-server:$(VERSION) build-feature-server-docker: ## Build Feature Server Docker image - docker buildx build \ + docker buildx build $(if $(DOCKER_PLATFORMS),--platform $(DOCKER_PLATFORMS),) \ -t $(REGISTRY)/feature-server:$(VERSION) \ -f sdk/python/feast/infra/feature_servers/multicloud/Dockerfile \ - --load sdk/python/feast/infra/feature_servers/multicloud + $(if $(filter true,$(DOCKER_PUSH)),--push,--load) sdk/python/feast/infra/feature_servers/multicloud push-feature-transformation-server-docker: ## Push Feature Transformation Server Docker image docker push $(REGISTRY)/feature-transformation-server:$(VERSION) @@ -719,9 +734,9 @@ build-feature-server-dev: ## Build Feature Server Dev Docker image -f sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev --load . build-feature-server-dev-docker: ## Build Feature Server Dev Docker image - docker buildx build \ + docker buildx build $(if $(DOCKER_PLATFORMS),--platform $(DOCKER_PLATFORMS),) \ -t $(REGISTRY)/feature-server:$(VERSION) \ - -f sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev --load . + -f sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev $(if $(filter true,$(DOCKER_PUSH)),--push,--load) . build-feature-server-dev-docker_on_mac: ## Build Feature Server Dev Docker image on Mac docker buildx build --platform linux/amd64 \ diff --git a/README.md b/README.md index 0dd37ef3dd2..71df8e098a5 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ Feast allows ML platform teams to: * **Make features consistently available for training and serving** by managing an _offline store_ (to process historical data for scale-out batch scoring or model training), a low-latency _online store_ (to power real-time prediction)_,_ and a battle-tested _feature server_ (to serve pre-computed features online). * **Avoid data leakage** by generating point-in-time correct feature sets so data scientists can focus on feature engineering rather than debugging error-prone dataset joining logic. This ensure that future feature values do not leak to models during training. -* **Decouple ML from data infrastructure** by providing a single data access layer that abstracts feature storage from feature retrieval, ensuring models remain portable as you move from training models to serving models, from batch models to real-time models, and from one data infra system to another. +* **Decouple ML from data infrastructure** by providing a single data access layer that abstracts feature storage from feature retrieval, ensuring models remain portable as you move from training models to serving models, from batch models to realtime models, and from one data infra system to another. Please see our [documentation](https://docs.feast.dev/) for more information about the project. @@ -203,7 +203,6 @@ The list below contains the functionality that contributors are planning to deve * [x] [Bigtable](https://docs.feast.dev/reference/online-stores/bigtable) * [x] [SQLite](https://docs.feast.dev/reference/online-stores/sqlite) * [x] [Dragonfly](https://docs.feast.dev/reference/online-stores/dragonfly) - * [x] [IKV - Inlined Key Value Store](https://docs.feast.dev/reference/online-stores/ikv) * [x] [Azure Cache for Redis (community plugin)](https://github.com/Azure/feast-azure) * [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/online-stores/postgres) * [x] [Cassandra / AstraDB (contrib plugin)](https://docs.feast.dev/reference/online-stores/cassandra) diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index f93eee182f0..82b1dac01d7 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -119,7 +119,6 @@ * [Snowflake](reference/online-stores/snowflake.md) * [Redis](reference/online-stores/redis.md) * [Dragonfly](reference/online-stores/dragonfly.md) - * [IKV](reference/online-stores/ikv.md) * [Datastore](reference/online-stores/datastore.md) * [DynamoDB](reference/online-stores/dynamodb.md) * [Bigtable](reference/online-stores/bigtable.md) @@ -132,6 +131,7 @@ * [ScyllaDB](reference/online-stores/scylladb.md) * [SingleStore](reference/online-stores/singlestore.md) * [Milvus](reference/online-stores/milvus.md) + * [MongoDB](reference/online-stores/mongodb.md) * [Registries](reference/registries/README.md) * [Local](reference/registries/local.md) * [S3](reference/registries/s3.md) diff --git a/docs/getting-started/concepts/batch-feature-view.md b/docs/getting-started/concepts/batch-feature-view.md index 9d0eb86389c..090212f58c1 100644 --- a/docs/getting-started/concepts/batch-feature-view.md +++ b/docs/getting-started/concepts/batch-feature-view.md @@ -27,7 +27,7 @@ class BatchFeatureView(FeatureView): def __init__( *, name: str, - source: Union[DataSource, FeatureView, List[FeatureView]], + source: Optional[Union[DataSource, FeatureView, List[FeatureView]]] = None, sink_source: Optional[DataSource] = None, schema: Optional[List[Field]] = None, entities: Optional[List[Entity]] = None, @@ -70,7 +70,7 @@ driver_fv = BatchFeatureView( Field(name="conv_rate", dtype=Float32), ], aggregations=[ - Aggregation(column="conv_rate", function="sum", time_window=timedelta(days=1)), + Aggregation(column="conv_rate", function="sum", time_window=timedelta(days=1), name="total_conv_rate_1d"), ], source=source, ) @@ -142,8 +142,10 @@ See: ## 🛑 Gotchas - `sink_source` is **required** when chaining views (i.e., `source` is another FeatureView or list of them). +- `source` is optional; if omitted (`None`), the feature view has no associated batch data source. - Schema fields must be consistent with `sink_source`, `batch_source.field_mapping` if field mappings exist. - Aggregation logic must reference columns present in the raw source or transformed inputs. +- The output feature name for an aggregation defaults to `{function}_{column}` (e.g., `sum_conv_rate`). Use the `name` parameter to override it (e.g., `name="total_conv_rate_1d"`). --- diff --git a/docs/getting-started/concepts/feast-types.md b/docs/getting-started/concepts/feast-types.md index 72741f263e4..94c93f2a8ea 100644 --- a/docs/getting-started/concepts/feast-types.md +++ b/docs/getting-started/concepts/feast-types.md @@ -5,10 +5,44 @@ To make this possible, Feast itself has a type system for all the types it is ab Feast's type system is built on top of [protobuf](https://github.com/protocolbuffers/protobuf). The messages that make up the type system can be found [here](https://github.com/feast-dev/feast/blob/master/protos/feast/types/Value.proto), and the corresponding python classes that wrap them can be found [here](https://github.com/feast-dev/feast/blob/master/sdk/python/feast/types.py). -Feast supports primitive data types (numerical values, strings, bytes, booleans and timestamps). The only complex data type Feast supports is Arrays, and arrays cannot contain other arrays. +Feast supports the following categories of data types: + +- **Primitive types**: numerical values (`Int32`, `Int64`, `Float32`, `Float64`), `String`, `Bytes`, `Bool`, and `UnixTimestamp`. +- **Array types**: ordered lists of any primitive type, e.g. `Array(Int64)`, `Array(String)`. +- **Set types**: unordered collections of unique values for any primitive type, e.g. `Set(String)`, `Set(Int64)`. +- **Map types**: dictionary-like structures with string keys and values that can be any supported Feast type (including nested maps), e.g. `Map`, `Array(Map)`. +- **JSON type**: opaque JSON data stored as a string at the proto level but semantically distinct from `String` — backends use native JSON types (`jsonb`, `VARIANT`, etc.), e.g. `Json`, `Array(Json)`. +- **Struct type**: schema-aware structured type with named, typed fields. Unlike `Map` (which is schema-free), a `Struct` declares its field names and their types, enabling schema validation, e.g. `Struct({"name": String, "age": Int32})`. + +For a complete reference with examples, see [Type System](../../reference/type-system.md). Each feature or schema field in Feast is associated with a data type, which is stored in Feast's [registry](registry.md). These types are also used to ensure that Feast operates on values correctly (e.g. making sure that timestamp columns used for [point-in-time correct joins](point-in-time-joins.md) actually have the timestamp type). -As a result, each system that feast interacts with needs a way to translate data types from the native platform, into a feast type. E.g., Snowflake SQL types are converted to Feast types [here](https://rtd.feast.dev/en/master/feast.html#feast.type_map.snowflake_python_type_to_feast_value_type). The onus is therefore on authors of offline or online store connectors to make sure that this type mapping happens correctly. +As a result, each system that Feast interacts with needs a way to translate data types from the native platform into a Feast type. E.g., Snowflake SQL types are converted to Feast types [here](https://rtd.feast.dev/en/master/feast.html#feast.type_map.snowflake_python_type_to_feast_value_type). The onus is therefore on authors of offline or online store connectors to make sure that this type mapping happens correctly. + +### Backend Type Mapping for Complex Types + +Map, JSON, and Struct types are supported across all major Feast backends: + +| Backend | Native Type | Feast Type | +|---------|-------------|------------| +| PostgreSQL | `jsonb` | `Map`, `Json`, `Struct` | +| PostgreSQL | `jsonb[]` | `Array(Map)` | +| Snowflake | `VARIANT`, `OBJECT` | `Map` | +| Snowflake | `JSON` | `Json` | +| Redshift | `SUPER` | `Map` | +| Redshift | `json` | `Json` | +| BigQuery | `JSON` | `Json` | +| BigQuery | `STRUCT`, `RECORD` | `Struct` | +| Spark | `map` | `Map` | +| Spark | `array>` | `Array(Map)` | +| Spark | `struct<...>` | `Struct` | +| Spark | `array>` | `Array(Struct(...))` | +| MSSQL | `nvarchar(max)` | `Map`, `Json`, `Struct` | +| DynamoDB | Proto bytes | `Map`, `Json`, `Struct` | +| Redis | Proto bytes | `Map`, `Json`, `Struct` | +| Milvus | `VARCHAR` (serialized) | `Map`, `Json`, `Struct` | + +**Note**: When the backend native type is ambiguous (e.g., `jsonb` could be `Map`, `Json`, or `Struct`), the **schema-declared Feast type takes precedence**. The backend-to-Feast type mappings above are only used for schema inference when no explicit type is provided. **Note**: Feast currently does *not* support a null type in its type system. \ No newline at end of file diff --git a/docs/getting-started/concepts/feature-view.md b/docs/getting-started/concepts/feature-view.md index faaaf54408a..4ea007a1f91 100644 --- a/docs/getting-started/concepts/feature-view.md +++ b/docs/getting-started/concepts/feature-view.md @@ -24,6 +24,7 @@ Feature views consist of: * (optional, but recommended) a schema specifying one or more [features](feature-view.md#field) (without this, Feast will infer the schema by reading from the data source) * (optional, but recommended) metadata (for example, description, or other free-form metadata via `tags`) * (optional) a TTL, which limits how far back Feast will look when generating historical datasets +* (optional) `enable_validation=True`, which enables schema validation during materialization (see [Schema Validation](#schema-validation) below) Feature views allow Feast to model your existing feature data in a consistent way in both an offline (training) and online (serving) environment. Feature views generally contain features that are properties of a specific object, in which case that object is defined as an entity and included in the feature view. @@ -159,6 +160,43 @@ Feature names must be unique within a [feature view](feature-view.md#feature-vie Each field can have additional metadata associated with it, specified as key-value [tags](https://rtd.feast.dev/en/master/feast.html#feast.field.Field). +## Schema Validation + +Feature views support an optional `enable_validation` parameter that enables schema validation during materialization and historical feature retrieval. When enabled, Feast verifies that: + +- All declared feature columns are present in the input data. +- Column data types match the expected Feast types (mismatches are logged as warnings). + +This is useful for catching data quality issues early in the pipeline. To enable it: + +```python +from feast import FeatureView, Field +from feast.types import Int32, Int64, Float32, Json, Map, String, Struct + +validated_fv = FeatureView( + name="validated_features", + entities=[driver], + schema=[ + Field(name="trips_today", dtype=Int64), + Field(name="rating", dtype=Float32), + Field(name="preferences", dtype=Map), + Field(name="config", dtype=Json), # opaque JSON data + Field(name="address", dtype=Struct({"street": String, "city": String, "zip": Int32})), # typed struct + ], + source=my_source, + enable_validation=True, # enables schema checks +) +``` + +**JSON vs Map vs Struct**: These three complex types serve different purposes: +- **`Map`**: Schema-free dictionary with string keys. Use when the keys and values are dynamic. +- **`Json`**: Opaque JSON data stored as a string. Backends use native JSON types (`jsonb`, `VARIANT`). Use for configuration blobs or API responses where you don't need field-level typing. +- **`Struct`**: Schema-aware structured type with named, typed fields. Persisted through the registry via Field tags. Use when you know the exact structure and want type safety. + +Validation is supported in all compute engines (Local, Spark, and Ray). When a required column is missing, a `ValueError` is raised. Type mismatches are logged as warnings but do not block execution, allowing for safe gradual adoption. + +The `enable_validation` parameter is also available on `BatchFeatureView` and `StreamFeatureView`, as well as their respective decorators (`@batch_feature_view` and `@stream_feature_view`). + ## \[Alpha] On demand feature views On demand feature views allows data scientists to use existing features and request time data (features only available at request time) to transform and create new features. Users define python transformation logic which is executed in both the historical retrieval and online retrieval paths. diff --git a/docs/getting-started/concepts/tiling.md b/docs/getting-started/concepts/tiling.md index 5f9d225baa0..4ecdca14441 100644 --- a/docs/getting-started/concepts/tiling.md +++ b/docs/getting-started/concepts/tiling.md @@ -206,9 +206,9 @@ customer_features = StreamFeatureView( batch_source=file_source, # For historical data ), aggregations=[ - Aggregation(column="amount", function="sum", time_window=timedelta(hours=1)), - Aggregation(column="amount", function="avg", time_window=timedelta(hours=1)), - Aggregation(column="amount", function="std", time_window=timedelta(hours=1)), + Aggregation(column="amount", function="sum", time_window=timedelta(hours=1), name="sum_amount_1h"), + Aggregation(column="amount", function="avg", time_window=timedelta(hours=1), name="avg_amount_1h"), + Aggregation(column="amount", function="std", time_window=timedelta(hours=1), name="std_amount_1h"), ], timestamp_field="event_timestamp", online=True, @@ -229,7 +229,12 @@ customer_features = StreamFeatureView( ### Key Parameters -- `aggregations`: List of time-windowed aggregations to compute +- `aggregations`: List of time-windowed aggregations to compute. Each `Aggregation` accepts: + - `column`: source column to aggregate + - `function`: aggregation function (`sum`, `avg`, `mean`, `min`, `max`, `count`, `std`) + - `time_window`: duration of the aggregation window + - `slide_interval`: hop/slide size (defaults to `time_window`) + - `name` *(optional)*: output feature name. Defaults to `{function}_{column}` (e.g., `sum_amount`). Set this to use a custom name (e.g., `name="sum_amount_1h"`). - `timestamp_field`: Column name for timestamps (required when aggregations are specified) - `enable_tiling`: Enable tiling optimization (default: `False`) - Set to `True` for **streaming scenarios** diff --git a/docs/getting-started/faq.md b/docs/getting-started/faq.md index af545acd5e8..0f5c46ce2c1 100644 --- a/docs/getting-started/faq.md +++ b/docs/getting-started/faq.md @@ -39,16 +39,17 @@ Yes, this is possible. For example, you can use BigQuery as an offline store and ### How do I run `get_historical_features` without providing an entity dataframe? -Feast does supports fetching historical features without passing an entity dataframe with the request. -- As of today, only `postgres offline feature store` is supported for entity dataframe less retrieval. Remaining offline stores would be gradually updated to support the entity df less retrieval. The stores would be selected based on priorities and user base/request. -- The retrieval is based on `start_date` and `end_date` parameters to the function. Here are some combinations supported. - - Both params are given, Returns data during the given start to end timerange. - - Only start_date param is given, Returns data from the start date to `now` time. - - Only end_date param is given, Returns data during the end_date minus TTL time in feature view. - - Both params are `not` given, Returns data during the TTL time in feature view to now time. -- When multiple features are requested from multiple feature-views it is required to have entity ids in both of them for `JOIN` so that - -This is an area we're actively interested in contributions for. See [GitHub issue](https://github.com/feast-dev/feast/issues/1611) +Feast does support fetching historical features without passing an entity dataframe with the request. + +- **Supported offline stores:** Entity-less (entity dataframe–less) retrieval is supported for the **Postgres**, **Dask**, **Spark**, and **Ray** offline stores. Postgres was the first to support it; Dask, Spark, and Ray have followed. Other offline stores may be added based on priority and community demand. +- **Date range:** Retrieval is controlled by the `start_date` and `end_date` parameters. Supported combinations: + - Both params given → data in the given start-to-end time range. + - Only `start_date` given → data from the start date to now. + - Only `end_date` given → data from (end_date minus feature view TTL) to end_date. + - Neither given → data from (TTL window) to now. +- **Multiple feature views:** When requesting features from multiple feature views in entity-less mode, the feature views must share entity keys so that joins can be performed correctly. + +We welcome contributions to add or improve entity-less retrieval. See [GitHub issue #1611](https://github.com/feast-dev/feast/issues/1611). ### Does Feast provide security or access control? diff --git a/docs/getting-started/quickstart.md b/docs/getting-started/quickstart.md index 0dcb861db7c..87816982abd 100644 --- a/docs/getting-started/quickstart.md +++ b/docs/getting-started/quickstart.md @@ -160,8 +160,10 @@ driver_stats_source = FileSource( # three feature column. Here we define a Feature View that will allow us to serve this # data to our model online. driver_stats_fv = FeatureView( - # The unique name of this feature view. Two feature views in a single - # project cannot have the same name +# The unique name of this feature view. Two feature views in a single +# project cannot have the same name, and names must be unique across +# all feature view types (regular, stream, on-demand) to avoid conflicts +# during `feast apply`. name="driver_hourly_stats", entities=[driver], ttl=timedelta(days=1), diff --git a/docs/how-to-guides/customizing-feast/adding-a-new-offline-store.md b/docs/how-to-guides/customizing-feast/adding-a-new-offline-store.md index 8712ffdf1bb..d1ca100bf74 100644 --- a/docs/how-to-guides/customizing-feast/adding-a-new-offline-store.md +++ b/docs/how-to-guides/customizing-feast/adding-a-new-offline-store.md @@ -385,7 +385,7 @@ Even if you have created the `OfflineStore` class in a separate repo, you can st ``` 3. Next, set up your offline store to run the universal integration tests. These are integration tests specifically intended to test offline and online stores against Feast API functionality, to ensure that the Feast APIs works with your offline store. - * Feast parametrizes integration tests using the `FULL_REPO_CONFIGS` variable defined in `sdk/python/tests/integration/feature_repos/repo_configuration.py` which stores different offline store classes for testing. + * Feast parametrizes integration tests using the `FULL_REPO_CONFIGS` variable defined in `sdk/python/tests/universal/feature_repos/repo_configuration.py` which stores different offline store classes for testing. * To overwrite the default configurations to use your own offline store, you can simply create your own file that contains a `FULL_REPO_CONFIGS` dictionary, and point Feast to that file by setting the environment variable `FULL_REPO_CONFIGS_MODULE` to point to that file. The module should add new `IntegrationTestRepoConfig` classes to the `AVAILABLE_OFFLINE_STORES` by defining an offline store that you would like Feast to test with. A sample `FULL_REPO_CONFIGS_MODULE` looks something like this: diff --git a/docs/how-to-guides/customizing-feast/adding-support-for-a-new-online-store.md b/docs/how-to-guides/customizing-feast/adding-support-for-a-new-online-store.md index 455ac21fd45..4cdf685f86e 100644 --- a/docs/how-to-guides/customizing-feast/adding-support-for-a-new-online-store.md +++ b/docs/how-to-guides/customizing-feast/adding-support-for-a-new-online-store.md @@ -322,7 +322,7 @@ Even if you have created the `OnlineStore` class in a separate repo, you can sti make test-python-unit ``` 2. The universal tests, which are integration tests specifically intended to test offline and online stores, should be run against Feast to ensure that the Feast APIs works with your online store. - * Feast parametrizes integration tests using the `FULL_REPO_CONFIGS` variable defined in `sdk/python/tests/integration/feature_repos/repo_configuration.py` which stores different online store classes for testing. + * Feast parametrizes integration tests using the `FULL_REPO_CONFIGS` variable defined in `sdk/python/tests/universal/feature_repos/repo_configuration.py` which stores different online store classes for testing. * To overwrite these configurations, you can simply create your own file that contains a `FULL_REPO_CONFIGS` variable, and point Feast to that file by setting the environment variable `FULL_REPO_CONFIGS_MODULE` to point to that file. A sample `FULL_REPO_CONFIGS_MODULE` looks something like this: @@ -350,7 +350,7 @@ If you are planning instead to use a Dockerized container to run your tests agai If you create a containerized docker image for testing, developers who are trying to test with your online store will not have to spin up their own instance of the online store for testing. An example of an `OnlineStoreCreator` is shown below: -{% code title="sdk/python/tests/integration/feature_repos/universal/online_store/redis.py" %} +{% code title="sdk/python/tests/universal/feature_repos/universal/online_store/redis.py" %} ```python class RedisOnlineStoreCreator(OnlineStoreCreator): def __init__(self, project_name: str, **kwargs): @@ -373,7 +373,7 @@ class RedisOnlineStoreCreator(OnlineStoreCreator): test-python-universal-cassandra: PYTHONPATH='.' \ FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.online_stores.cassandra_online_store.cassandra_repo_configuration \ - PYTEST_PLUGINS=sdk.python.tests.integration.feature_repos.universal.online_store.cassandra \ + PYTEST_PLUGINS=sdk.python.tests.universal.feature_repos.universal.online_store.cassandra \ IS_TEST=True \ python -m pytest -x --integration \ sdk/python/tests diff --git a/docs/how-to-guides/dbt-integration.md b/docs/how-to-guides/dbt-integration.md index c85cf2508db..02c188d6bf1 100644 --- a/docs/how-to-guides/dbt-integration.md +++ b/docs/how-to-guides/dbt-integration.md @@ -289,6 +289,12 @@ Feast automatically maps dbt/warehouse column types to Feast types: | `TIMESTAMP`, `DATETIME` | `UnixTimestamp` | | `BYTES`, `BINARY` | `Bytes` | | `ARRAY` | `Array(type)` | +| `JSON`, `JSONB` | `Map` (or `Json` if declared in schema) | +| `VARIANT`, `OBJECT` | `Map` | +| `SUPER` | `Map` | +| `MAP` | `Map` | +| `STRUCT`, `RECORD` | `Struct` (BigQuery) | +| `struct<...>` | `Struct` (Spark) | Snowflake `NUMBER(precision, scale)` types are handled specially: - Scale > 0: `Float64` diff --git a/docs/how-to-guides/feast-on-kubernetes.md b/docs/how-to-guides/feast-on-kubernetes.md index 5504dbd671a..f5df67273bb 100644 --- a/docs/how-to-guides/feast-on-kubernetes.md +++ b/docs/how-to-guides/feast-on-kubernetes.md @@ -64,8 +64,52 @@ spec: > _More advanced FeatureStore CR examples can be found in the feast-operator [samples directory](../../infra/feast-operator/config/samples)._ +## Upgrading the Operator + +### OLM-managed installations + +If the operator was installed via OLM, upgrades are handled +automatically. No manual steps are required — OLM recreates the operator Deployment +during the upgrade process. + +### kubectl-managed installations + +For most upgrades, re-running the install command is sufficient: + +```sh +kubectl apply --server-side --force-conflicts -f https://raw.githubusercontent.com/feast-dev/feast/refs/heads/stable/infra/feast-operator/dist/install.yaml +``` + +#### One-time step: upgrading from versions before 0.61.0 + +Version 0.61.0 updated the operator Deployment's `spec.selector` to include the +`app.kubernetes.io/name: feast-operator` label, fixing a bug where the metrics service +could accidentally target pods from other operators in shared namespaces. + +Because Kubernetes treats `spec.selector` as an immutable field, upgrading directly from +a pre-0.61.0 version with `kubectl apply` will fail with: + +``` +The Deployment "feast-operator-controller-manager" is invalid: spec.selector: Invalid value: ... field is immutable +``` + +To resolve this, delete the existing operator Deployment before applying the new manifest: + +```sh +kubectl delete deployment feast-operator-controller-manager -n feast-operator-system --ignore-not-found=true +kubectl apply --server-side --force-conflicts -f https://raw.githubusercontent.com/feast-dev/feast/refs/heads/stable/infra/feast-operator/dist/install.yaml +``` + +This is only required once. Existing FeatureStore CRs and their managed workloads (feature +servers, registry, etc.) are not affected — the new operator pod will reconcile them +automatically on startup. Future upgrades from 0.61.0 onward will not require this step. + {% hint style="success" %} -Important note: Scaling a Feature Store Deployment should only be done if the configured data store(s) will support it. +**Scaling & High Availability:** The Feast Operator supports horizontal scaling via static replicas, HPA autoscaling, or external autoscalers like [KEDA](https://keda.sh). Scaling requires DB-backed persistence for all enabled services. -Please check the how-to guide for some specific recommendations on [how to scale Feast](./scaling-feast.md). +When scaling is enabled, the operator auto-injects soft pod anti-affinity and zone topology spread constraints for resilience. You can also configure a PodDisruptionBudget to protect against voluntary disruptions. + +See the [Horizontal Scaling with the Feast Operator](./scaling-feast.md#horizontal-scaling-with-the-feast-operator) guide for configuration details, including [HA options](./scaling-feast.md#high-availability), or check the general recommendations on [how to scale Feast](./scaling-feast.md). {% endhint %} + +> _Sample scaling CRs are available at [`v1_featurestore_scaling_static.yaml`](../../infra/feast-operator/config/samples/v1_featurestore_scaling_static.yaml) and [`v1_featurestore_scaling_hpa.yaml`](../../infra/feast-operator/config/samples/v1_featurestore_scaling_hpa.yaml)._ diff --git a/docs/how-to-guides/scaling-feast.md b/docs/how-to-guides/scaling-feast.md index d0bd6aef8a0..5982f917674 100644 --- a/docs/how-to-guides/scaling-feast.md +++ b/docs/how-to-guides/scaling-feast.md @@ -23,4 +23,226 @@ However, this process does not scale for large data sets, since it's executed on Feast supports pluggable [Compute Engines](../getting-started/components/compute-engine.md), that allow the materialization process to be scaled up. Aside from the local process, Feast supports a [Lambda-based materialization engine](https://rtd.feast.dev/en/master/#alpha-lambda-based-engine), and a [Bytewax-based materialization engine](https://rtd.feast.dev/en/master/#bytewax-engine). -Users may also be able to build an engine to scale up materialization using existing infrastructure in their organizations. \ No newline at end of file +Users may also be able to build an engine to scale up materialization using existing infrastructure in their organizations. + +### Horizontal Scaling with the Feast Operator + +When running Feast on Kubernetes with the [Feast Operator](./feast-on-kubernetes.md), you can horizontally scale the FeatureStore deployment using `spec.replicas` or HPA autoscaling. The FeatureStore CRD implements the Kubernetes [scale sub-resource](https://kubernetes.io/docs/tasks/extend-kubernetes/custom-resources/custom-resource-definitions/#scale-subresource), so you can also use `kubectl scale`: + +```bash +kubectl scale featurestore/my-feast --replicas=3 +``` + +**Prerequisites:** Horizontal scaling requires **DB-backed persistence** for all enabled services (online store, offline store, and registry). File-based persistence (SQLite, DuckDB, `registry.db`) is incompatible with multiple replicas because these backends do not support concurrent access from multiple pods. + +#### Static Replicas + +Set a fixed number of replicas via `spec.replicas`: + +```yaml +apiVersion: feast.dev/v1 +kind: FeatureStore +metadata: + name: sample-scaling +spec: + feastProject: my_project + replicas: 3 + services: + onlineStore: + persistence: + store: + type: postgres + secretRef: + name: feast-data-stores + registry: + local: + persistence: + store: + type: sql + secretRef: + name: feast-data-stores +``` + +#### Autoscaling with HPA + +Configure a HorizontalPodAutoscaler to dynamically scale based on metrics. HPA autoscaling is configured under `services.scaling.autoscaling` and is mutually exclusive with `spec.replicas > 1`: + +```yaml +apiVersion: feast.dev/v1 +kind: FeatureStore +metadata: + name: sample-autoscaling +spec: + feastProject: my_project + services: + scaling: + autoscaling: + minReplicas: 2 + maxReplicas: 10 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 70 + podDisruptionBudgets: + maxUnavailable: 1 + onlineStore: + persistence: + store: + type: postgres + secretRef: + name: feast-data-stores + server: + resources: + requests: + cpu: 200m + memory: 256Mi + registry: + local: + persistence: + store: + type: sql + secretRef: + name: feast-data-stores +``` + +{% hint style="info" %} +When autoscaling is configured, the operator automatically sets the deployment strategy to `RollingUpdate` (instead of the default `Recreate`) to ensure zero-downtime scaling, and auto-injects soft pod anti-affinity and zone topology spread constraints. You can override any of these by explicitly setting `deploymentStrategy`, `affinity`, or `topologySpreadConstraints` in the CR. +{% endhint %} + +#### Validation Rules + +The operator enforces the following rules: +- `spec.replicas > 1` and `services.scaling.autoscaling` are **mutually exclusive** -- you cannot set both. +- Scaling with `replicas > 1` or any `autoscaling` config is **rejected** if any enabled service uses file-based persistence. +- S3 (`s3://`) and GCS (`gs://`) backed registry file persistence is allowed with scaling, since these object stores support concurrent readers. + +#### High Availability + +When scaling is enabled (`replicas > 1` or `autoscaling`), the operator provides HA features to improve resilience: + +**Pod Anti-Affinity** — The operator automatically injects a soft (`preferredDuringSchedulingIgnoredDuringExecution`) pod anti-affinity rule that prefers spreading pods across different nodes. This prevents multiple replicas from being co-located on the same node, improving resilience to node failures. You can override this by providing your own `affinity` configuration: + +```yaml +spec: + replicas: 3 + services: + # Override with custom affinity (e.g. strict anti-affinity) + affinity: + podAntiAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + - topologyKey: kubernetes.io/hostname + labelSelector: + matchLabels: + feast.dev/name: my-feast + # ... +``` + +**Topology Spread Constraints** — The operator automatically injects a soft zone-spread constraint (`whenUnsatisfiable: ScheduleAnyway`) that distributes pods across availability zones. This is a best-effort spread — if zones are unavailable, pods will still be scheduled. You can override this with explicit constraints or disable it with an empty array: + +```yaml +spec: + replicas: 3 + services: + # Override with custom topology spread (e.g. strict zone spreading) + topologySpreadConstraints: + - maxSkew: 1 + topologyKey: topology.kubernetes.io/zone + whenUnsatisfiable: DoNotSchedule + labelSelector: + matchLabels: + feast.dev/name: my-feast + # ... +``` + +To disable the auto-injected topology spread: + +```yaml +spec: + replicas: 3 + services: + topologySpreadConstraints: [] + # ... +``` + +**PodDisruptionBudget** — You can configure a PDB to limit voluntary disruptions (e.g. during node drains or cluster upgrades). The PDB is only created when scaling is enabled. Exactly one of `minAvailable` or `maxUnavailable` must be set: + +```yaml +spec: + replicas: 3 + services: + podDisruptionBudgets: + maxUnavailable: 1 # at most 1 pod unavailable during disruptions + # -- OR -- + # podDisruptionBudgets: + # minAvailable: "50%" # at least 50% of pods must remain available + # ... +``` + +{% hint style="info" %} +The PDB is not auto-injected — you must explicitly configure it. This is intentional because a misconfigured PDB (e.g. `minAvailable` equal to the replica count) can block node drains and cluster upgrades. +{% endhint %} + +#### Using KEDA (Kubernetes Event-Driven Autoscaling) + +[KEDA](https://keda.sh) is also supported as an external autoscaler. KEDA should target the FeatureStore's scale sub-resource directly (since it implements the Kubernetes scale API). This is the recommended approach because the operator manages the Deployment's replica count from `spec.replicas` — targeting the Deployment directly would conflict with the operator's reconciliation. + +When using KEDA, do **not** set `scaling.autoscaling` or `spec.replicas > 1` -- KEDA manages the replica count through the scale sub-resource. + +1. **Ensure DB-backed persistence** -- The CRD's CEL validation rules automatically enforce DB-backed persistence when KEDA scales `spec.replicas` above 1 via the scale sub-resource. The operator also automatically switches the deployment strategy to `RollingUpdate` when `replicas > 1`. + +2. **Configure the FeatureStore** with DB-backed persistence: + +```yaml +apiVersion: feast.dev/v1 +kind: FeatureStore +metadata: + name: sample-keda +spec: + feastProject: my_project + services: + onlineStore: + persistence: + store: + type: postgres + secretRef: + name: feast-data-stores + registry: + local: + persistence: + store: + type: sql + secretRef: + name: feast-data-stores +``` + +3. **Create a KEDA `ScaledObject`** targeting the FeatureStore resource: + +```yaml +apiVersion: keda.sh/v1alpha1 +kind: ScaledObject +metadata: + name: feast-scaledobject +spec: + scaleTargetRef: + apiVersion: feast.dev/v1 + kind: FeatureStore + name: sample-keda + minReplicaCount: 1 + maxReplicaCount: 10 + triggers: + - type: prometheus + metadata: + serverAddress: http://prometheus.monitoring.svc:9090 + metricName: http_requests_total + query: sum(rate(http_requests_total{service="feast"}[2m])) + threshold: "100" +``` + +{% hint style="warning" %} +KEDA-created HPAs are not owned by the Feast operator. The operator will not interfere with them, but it also will not clean them up if the FeatureStore CR is deleted. You must manage the KEDA `ScaledObject` lifecycle independently. +{% endhint %} + +For the full API reference, see the [FeatureStore CRD reference](../../infra/feast-operator/docs/api/markdown/ref.md). \ No newline at end of file diff --git a/docs/project/development-guide.md b/docs/project/development-guide.md index 4f915f64e5c..ee5cc8cfcce 100644 --- a/docs/project/development-guide.md +++ b/docs/project/development-guide.md @@ -217,7 +217,7 @@ make test-python-integration-local To test across clouds, on top of setting up Redis, you also need GCP / AWS / Snowflake setup. > Note: you can manually control what tests are run today by inspecting -> [RepoConfiguration](https://github.com/feast-dev/feast/blob/master/sdk/python/tests/integration/feature_repos/repo_configuration.py) +> [RepoConfiguration](https://github.com/feast-dev/feast/blob/master/sdk/python/tests/universal/feature_repos/repo_configuration.py) > and commenting out tests that are added to `DEFAULT_FULL_REPO_CONFIGS` **GCP** diff --git a/docs/reference/feature-servers/python-feature-server.md b/docs/reference/feature-servers/python-feature-server.md index 2e5792b0a6f..bf288b191ef 100644 --- a/docs/reference/feature-servers/python-feature-server.md +++ b/docs/reference/feature-servers/python-feature-server.md @@ -311,6 +311,120 @@ requests.post( data=json.dumps(materialize_data)) ``` +## Prometheus Metrics + +The Python feature server can expose Prometheus-compatible metrics on a dedicated +HTTP endpoint (default port `8000`). Metrics are **opt-in** and carry zero overhead +when disabled. + +### Enabling metrics + +**Option 1 — CLI flag** (useful for one-off runs): + +```bash +feast serve --metrics +``` + +**Option 2 — `feature_store.yaml`** (recommended for production): + +```yaml +feature_server: + type: local + metrics: + enabled: true +``` + +Either option is sufficient. When both are set, metrics are enabled. + +### Per-category control + +By default, enabling metrics turns on **all** categories. You can selectively +disable individual categories within the same `metrics` block: + +```yaml +feature_server: + type: local + metrics: + enabled: true + resource: true # CPU / memory gauges + request: false # disable endpoint latency & request counters + online_features: true # online feature retrieval counters + push: true # push request counters + materialization: true # materialization counters & duration + freshness: true # feature freshness gauges +``` + +Any category set to `false` will emit no metrics and start no background +threads (e.g., setting `freshness: false` prevents the registry polling +thread from starting). All categories default to `true`. + +### Available metrics + +| Metric | Type | Labels | Description | +|--------|------|--------|-------------| +| `feast_feature_server_cpu_usage` | Gauge | — | Process CPU usage % | +| `feast_feature_server_memory_usage` | Gauge | — | Process memory usage % | +| `feast_feature_server_request_total` | Counter | `endpoint`, `status` | Total requests per endpoint | +| `feast_feature_server_request_latency_seconds` | Histogram | `endpoint`, `feature_count`, `feature_view_count` | Request latency with p50/p95/p99 support | +| `feast_online_features_request_total` | Counter | — | Total online feature retrieval requests | +| `feast_online_features_entity_count` | Histogram | — | Entity rows per online feature request | +| `feast_push_request_total` | Counter | `push_source`, `mode` | Push requests by source and mode | +| `feast_materialization_total` | Counter | `feature_view`, `status` | Materialization runs (success/failure) | +| `feast_materialization_duration_seconds` | Histogram | `feature_view` | Materialization duration per feature view | +| `feast_feature_freshness_seconds` | Gauge | `feature_view`, `project` | Seconds since last materialization | + +### Scraping with Prometheus + +```yaml +scrape_configs: + - job_name: feast + static_configs: + - targets: ["localhost:8000"] +``` + +### Kubernetes / Feast Operator + +Set `metrics: true` in your FeatureStore CR: + +```yaml +spec: + services: + onlineStore: + server: + metrics: true +``` + +The operator automatically exposes port 8000 and creates the corresponding +Service port so Prometheus can discover it. + +### Multi-worker and multi-replica (HPA) support + +Feast uses Prometheus **multiprocess mode** so that metrics are correct +regardless of the number of Gunicorn workers or Kubernetes replicas. + +**How it works:** + +* Each Gunicorn worker writes metric values to shared files in a + temporary directory (`PROMETHEUS_MULTIPROCESS_DIR`). Feast creates + this directory automatically; you can override it by setting the + environment variable yourself. +* The metrics HTTP server on port 8000 aggregates all workers' + metric files using `MultiProcessCollector`, so a single scrape + returns accurate totals. +* Gunicorn hooks clean up dead-worker files automatically + (`child_exit` → `mark_process_dead`). +* CPU and memory gauges use `multiprocess_mode=liveall` — Prometheus + shows per-worker values distinguished by a `pid` label. +* Feature freshness gauges use `multiprocess_mode=max` — Prometheus + shows the worst-case staleness (all workers compute the same value). +* Counters and histograms (request counts, latency, materialization) + are automatically summed across workers. + +**Multiple replicas (HPA):** Each pod runs its own metrics endpoint. +Prometheus adds an `instance` label per pod, so there is no +duplication. Use `sum(rate(...))` or `histogram_quantile(...)` across +instances as usual. + ## Starting the feature server in TLS(SSL) mode Enabling TLS mode ensures that data between the Feast client and server is transmitted securely. For an ideal production environment, it is recommended to start the feature server in TLS mode. diff --git a/docs/reference/feature-store-yaml.md b/docs/reference/feature-store-yaml.md index 820731064fc..7411c673576 100644 --- a/docs/reference/feature-store-yaml.md +++ b/docs/reference/feature-store-yaml.md @@ -36,6 +36,14 @@ An example configuration: ```yaml feature_server: type: local + metrics: # Prometheus metrics configuration. Also achievable via `feast serve --metrics`. + enabled: true # Enable Prometheus metrics server on port 8000 + resource: true # CPU / memory gauges + request: true # endpoint latency histograms & request counters + online_features: true # online feature retrieval counters + push: true # push request counters + materialization: true # materialization counters & duration histograms + freshness: true # per-feature-view freshness gauges offline_push_batching_enabled: true # Enables batching of offline writes processed by /push. Online writes are unaffected. offline_push_batching_batch_size: 100 # Maximum number of buffered rows before writing to the offline store. offline_push_batching_batch_interval_seconds: 5 # Maximum time rows may remain buffered before a forced flush. diff --git a/docs/reference/online-stores/README.md b/docs/reference/online-stores/README.md index 5df4710434c..2d962b2013f 100644 --- a/docs/reference/online-stores/README.md +++ b/docs/reference/online-stores/README.md @@ -22,10 +22,6 @@ Please see [Online Store](../../getting-started/components/online-store.md) for [dragonfly.md](dragonfly.md) {% endcontent-ref %} -{% content-ref url="ikv.md" %} -[ikv.md](ikv.md) -{% endcontent-ref %} - {% content-ref url="datastore.md" %} [datastore.md](datastore.md) {% endcontent-ref %} @@ -54,6 +50,10 @@ Please see [Online Store](../../getting-started/components/online-store.md) for [mysql.md](mysql.md) {% endcontent-ref %} +{% content-ref url="mongodb.md" %} +[mongodb.md](mongodb.md) +{% endcontent-ref %} + {% content-ref url="hazelcast.md" %} [hazelcast.md](hazelcast.md) {% endcontent-ref %} diff --git a/docs/reference/online-stores/dynamodb.md b/docs/reference/online-stores/dynamodb.md index 344caccac1d..ec0104172fb 100644 --- a/docs/reference/online-stores/dynamodb.md +++ b/docs/reference/online-stores/dynamodb.md @@ -22,6 +22,47 @@ online_store: The full set of configuration options is available in [DynamoDBOnlineStoreConfig](https://rtd.feast.dev/en/master/#feast.infra.online_stores.dynamodb.DynamoDBOnlineStoreConfig). +## Configuration + +Below is a example with performance tuning options: + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +registry: data/registry.db +provider: aws +online_store: + type: dynamodb + region: us-west-2 + batch_size: 100 + max_read_workers: 10 + consistent_reads: false +``` +{% endcode %} + +### Configuration Options + +| Option | Type | Default | Description | +| ------ | ---- | ------- | ----------- | +| `region` | string | | AWS region for DynamoDB | +| `table_name_template` | string | `{project}.{table_name}` | Template for table names | +| `batch_size` | int | `100` | Number of items per BatchGetItem/BatchWriteItem request (max 100) | +| `max_read_workers` | int | `10` | Maximum parallel threads for batch read operations. Higher values improve throughput for large batch reads but increase resource usage | +| `consistent_reads` | bool | `false` | Whether to use strongly consistent reads (higher latency, guaranteed latest data) | +| `tags` | dict | `null` | AWS resource tags added to each table | +| `session_based_auth` | bool | `false` | Use AWS session-based client authentication | + +### Performance Tuning + +**Parallel Batch Reads**: When reading features for many entities, DynamoDB's BatchGetItem is limited to 100 items per request. For 500 entities, this requires 5 batch requests. The `max_read_workers` option controls how many of these batches execute in parallel: + +- **Sequential (old behavior)**: 5 batches × 10ms = 50ms total +- **Parallel (with `max_read_workers: 10`)**: 5 batches in parallel ≈ 10ms total + +For high-throughput workloads with large entity counts, increase `max_read_workers` (up to 20-30) based on your DynamoDB capacity and network conditions. + +**Batch Size**: Increase `batch_size` up to 100 to reduce the number of API calls. However, larger batches may hit DynamoDB's 16MB response limit for tables with large feature values. + ## Permissions Feast requires the following permissions in order to execute commands for DynamoDB online store: diff --git a/docs/reference/online-stores/ikv.md b/docs/reference/online-stores/ikv.md deleted file mode 100644 index 79f21d17797..00000000000 --- a/docs/reference/online-stores/ikv.md +++ /dev/null @@ -1,69 +0,0 @@ -# IKV (Inlined Key-Value Store) online store - -## Description - -[IKV](https://github.com/inlinedio/ikv-store) is a fully-managed embedded key-value store, primarily designed for storing ML features. Most key-value stores (think Redis or Cassandra) need a remote database cluster, whereas IKV allows you to utilize your existing application infrastructure to store data (cost efficient) and access it without any network calls (better performance). See detailed performance benchmarks and cost comparison with Redis on [https://inlined.io](https://inlined.io). IKV can be used as an online-store in Feast, the rest of this guide goes over the setup. - -## Getting started -Make sure you have Python and `pip` installed. - -Install the Feast SDK and CLI: `pip install feast` - -In order to use this online store, you'll need to install the IKV extra (along with the dependency needed for the offline store of choice). E.g. -- `pip install 'feast[gcp, ikv]'` -- `pip install 'feast[snowflake, ikv]'` -- `pip install 'feast[aws, ikv]'` -- `pip install 'feast[azure, ikv]'` - -You can get started by using any of the other templates (e.g. `feast init -t gcp` or `feast init -t snowflake` or `feast init -t aws`), and then swapping in IKV as the online store as seen below in the examples. - -### 1. Provision an IKV store -Go to [https://inlined.io](https://inlined.io) or email onboarding[at]inlined.io - -### 2. Configure - -Update `my_feature_repo/feature_store.yaml` with the below contents: - -{% code title="feature_store.yaml" %} -```yaml -project: my_feature_repo -registry: data/registry.db -provider: local -online_store: - type: ikv - account_id: secret - account_passkey: secret - store_name: your-store-name - mount_directory: /absolute/path/on/disk/for/ikv/embedded/index -``` -{% endcode %} - -After provisioning an IKV account/store, you should have an account id, passkey and store-name. Additionally you must specify a mount-directory - where IKV will pull/update (maintain) a copy of the index for online reads (IKV is an embedded database). It can be skipped only if you don't plan to read any data from this container. The mount directory path usually points to a location on local/remote disk. - -The full set of configuration options is available in IKVOnlineStoreConfig at `sdk/python/feast/infra/online_stores/contrib/ikv_online_store/ikv.py` - -## Functionality Matrix - -The set of functionality supported by online stores is described in detail [here](overview.md#functionality). -Below is a matrix indicating which functionality is supported by the IKV online store. - -| | IKV | -| :-------------------------------------------------------- | :---- | -| write feature values to the online store | yes | -| read feature values from the online store | yes | -| update infrastructure (e.g. tables) in the online store | yes | -| teardown infrastructure (e.g. tables) in the online store | yes | -| generate a plan of infrastructure changes | no | -| support for on-demand transforms | yes | -| readable by Python SDK | yes | -| readable by Java | no | -| readable by Go | no | -| support for entityless feature views | yes | -| support for concurrent writing to the same key | yes | -| support for ttl (time to live) at retrieval | no | -| support for deleting expired data | no | -| collocated by feature view | no | -| collocated by feature service | no | -| collocated by entity key | yes | - -To compare this set of functionality against other online stores, please see the full [functionality matrix](overview.md#functionality-matrix). diff --git a/docs/reference/online-stores/mongodb.md b/docs/reference/online-stores/mongodb.md new file mode 100644 index 00000000000..969637b9e68 --- /dev/null +++ b/docs/reference/online-stores/mongodb.md @@ -0,0 +1,181 @@ +# MongoDB online store (Preview) + +## Description + +The [MongoDB](https://www.mongodb.com/) online store provides support for materializing feature values into MongoDB for serving online features. + +{% hint style="warning" %} +The MongoDB online store is currently in **preview**. Some functionality may be unstable, and breaking changes may occur in future releases. +{% endhint %} + +## Features + +* Supports both synchronous and asynchronous operations for high-performance feature retrieval +* Native async support uses PyMongo's `AsyncMongoClient` (no Motor dependency required) +* Flexible connection options supporting MongoDB Atlas, self-hosted MongoDB, and MongoDB replica sets +* Automatic index creation for optimized query performance +* Entity key collocation for efficient feature retrieval + +## Getting started + +In order to use this online store, you'll need to install the MongoDB extra (along with the dependency needed for the offline store of choice): + +```bash +pip install 'feast[mongodb]' +``` + +You can get started by using any of the other templates (e.g. `feast init -t gcp` or `feast init -t snowflake` or `feast init -t aws`), and then swapping in MongoDB as the online store as seen below in the examples. + +## Examples + +### Basic configuration with MongoDB Atlas + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +registry: data/registry.db +provider: local +online_store: + type: mongodb + connection_string: "mongodb+srv://username:password@cluster.mongodb.net/" # pragma: allowlist secret + database_name: feast_online_store +``` +{% endcode %} + +### Self-hosted MongoDB with authentication + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +registry: data/registry.db +provider: local +online_store: + type: mongodb + connection_string: "mongodb://username:password@localhost:27017/" # pragma: allowlist secret + database_name: feast_online_store + collection_suffix: features +``` +{% endcode %} + +### MongoDB replica set configuration + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +registry: data/registry.db +provider: local +online_store: + type: mongodb + connection_string: "mongodb://host1:27017,host2:27017,host3:27017/?replicaSet=myReplicaSet" + database_name: feast_online_store + client_kwargs: + retryWrites: true + w: majority +``` +{% endcode %} + +### Advanced configuration with custom client options + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +registry: data/registry.db +provider: local +online_store: + type: mongodb + connection_string: "mongodb+srv://cluster.mongodb.net/" + database_name: feast_online_store + collection_suffix: features + client_kwargs: + maxPoolSize: 50 + minPoolSize: 10 + serverSelectionTimeoutMS: 5000 + connectTimeoutMS: 10000 +``` +{% endcode %} + +The full set of configuration options is available in [MongoDBOnlineStoreConfig](https://rtd.feast.dev/en/latest/#feast.infra.online_stores.mongodb_online_store.mongodb.MongoDBOnlineStoreConfig). + +## Data Model + +The MongoDB online store uses a **single collection per project** with entity key collocation. Features from multiple feature views for the same entity are stored together in a single document. + +### Example Document Schema + +The example shows a single entity. It contains 3 features from 2 feature views: "rating" and "trips_last7d" from Feature +View "driver_stats", and "surge_multiplier" from "pricing" view. +Each feature view has its own event timestamp. +The "created_timestamp" marks when the entity was materialized. + +```javascript +{ + "_id": "", // Binary entity key (bytes) + "features": { + "driver_stats": { + "rating": 4.91, + "trips_last_7d": 132 + }, + "pricing": { + "surge_multiplier": 1.2 + } + }, + "event_timestamps": { + "driver_stats": ISODate("2026-01-20T12:00:00Z"), + "pricing": ISODate("2026-01-21T08:30:00Z") + }, + "created_timestamp": ISODate("2026-01-21T12:00:05Z") +} +``` + +### Key Design Decisions + +* **`_id` field**: Uses the serialized entity key (bytes) as the primary key for efficient lookups +* **Nested features**: Features are organized by feature view name, allowing multiple feature views per entity +* **Event timestamps**: Stored per feature view to track when each feature set was last updated +* **Created timestamp**: Global timestamp for the entire document + +### Indexes + +The online store automatically creates the following index: +* Primary key index on `_id` (automatic in MongoDB), set to the serialized entity key. + +No additional indexes are required for the online store operations. + +## Async Support + +The MongoDB online store provides native async support using PyMongo 4.13+'s stable `AsyncMongoClient`. This enables: + +* **High concurrency**: Handle thousands of concurrent feature requests without thread pool limitations +* **True async I/O**: Non-blocking operations for better performance in async applications +* **10-20x performance improvement**: For concurrent workloads compared to sequential sync operations + +Both sync and async methods are fully supported: +* `online_read` / `online_read_async` +* `online_write_batch` / `online_write_batch_async` + +## Functionality Matrix + +The set of functionality supported by online stores is described in detail [here](overview.md#functionality). +Below is a matrix indicating which functionality is supported by the MongoDB online store. + +| | MongoDB | +| :-------------------------------------------------------- | :------ | +| write feature values to the online store | yes | +| read feature values from the online store | yes | +| update infrastructure (e.g. tables) in the online store | yes | +| teardown infrastructure (e.g. tables) in the online store | yes | +| generate a plan of infrastructure changes | no | +| support for on-demand transforms | yes | +| readable by Python SDK | yes | +| readable by Java | no | +| readable by Go | no | +| support for entityless feature views | yes | +| support for concurrent writing to the same key | yes | +| support for ttl (time to live) at retrieval | no | +| support for deleting expired data | no | +| collocated by feature view | no | +| collocated by feature service | no | +| collocated by entity key | yes | + +To compare this set of functionality against other online stores, please see the full [functionality matrix](overview.md#functionality-matrix). + diff --git a/docs/reference/online-stores/overview.md b/docs/reference/online-stores/overview.md index b54329ad613..6ee076b0669 100644 --- a/docs/reference/online-stores/overview.md +++ b/docs/reference/online-stores/overview.md @@ -29,26 +29,26 @@ See this [issue](https://github.com/feast-dev/feast/issues/2254) for a discussio ## Functionality Matrix There are currently five core online store implementations: `SqliteOnlineStore`, `RedisOnlineStore`, `DynamoDBOnlineStore`, `SnowflakeOnlineStore`, and `DatastoreOnlineStore`. -There are several additional implementations contributed by the Feast community (`PostgreSQLOnlineStore`, `HbaseOnlineStore`, `CassandraOnlineStore` and `IKVOnlineStore`), which are not guaranteed to be stable or to match the functionality of the core implementations. +There are several additional implementations contributed by the Feast community (`PostgreSQLOnlineStore`, `HbaseOnlineStore` and `CassandraOnlineStore`), which are not guaranteed to be stable or to match the functionality of the core implementations. Details for each specific online store, such as how to configure it in a `feature_store.yaml`, can be found [here](README.md). Below is a matrix indicating which online stores support what functionality. -| | Sqlite | Redis | DynamoDB | Snowflake | Datastore | Postgres | Hbase | [[Cassandra](https://cassandra.apache.org/_/index.html) / [Astra DB](https://www.datastax.com/products/datastax-astra?utm_source=feast)] | [IKV](https://inlined.io) | Milvus | -| :-------------------------------------------------------- | :-- | :-- | :-- | :-- | :-- | :-- | :-- | :-- | :-- |:-------| -| write feature values to the online store | yes | yes | yes | yes | yes | yes | yes | yes | yes | yes | -| read feature values from the online store | yes | yes | yes | yes | yes | yes | yes | yes | yes | yes | -| update infrastructure (e.g. tables) in the online store | yes | yes | yes | yes | yes | yes | yes | yes | yes | yes | -| teardown infrastructure (e.g. tables) in the online store | yes | yes | yes | yes | yes | yes | yes | yes | yes | yes | -| generate a plan of infrastructure changes | yes | no | no | no | no | no | no | yes | no | no | -| support for on-demand transforms | yes | yes | yes | yes | yes | yes | yes | yes | yes | yes | -| readable by Python SDK | yes | yes | yes | yes | yes | yes | yes | yes | yes | yes | -| readable by Java | no | yes | no | no | no | no | no | no | no | no | -| readable by Go | yes | yes | no | no | no | no | no | no | no | no | -| support for entityless feature views | yes | yes | yes | yes | yes | yes | yes | yes | yes | no | -| support for concurrent writing to the same key | no | yes | no | no | no | no | no | no | yes | no | -| support for ttl (time to live) at retrieval | no | yes | no | no | no | no | no | no | no | no | -| support for deleting expired data | no | yes | no | no | no | no | no | no | no | no | -| collocated by feature view | yes | no | yes | yes | yes | yes | yes | yes | no | no | -| collocated by feature service | no | no | no | no | no | no | no | no | no | no | -| collocated by entity key | no | yes | no | no | no | no | no | no | yes | no | +| | Sqlite | Redis | DynamoDB | Snowflake | Datastore | Postgres | Hbase | [[Cassandra](https://cassandra.apache.org/_/index.html) / [Astra DB](https://www.datastax.com/products/datastax-astra?utm_source=feast)] | Milvus | +| :-------------------------------------------------------- | :-- | :-- | :-- | :-- | :-- | :-- | :-- | :-- |:----| +| write feature values to the online store | yes | yes | yes | yes | yes | yes | yes | yes | yes | +| read feature values from the online store | yes | yes | yes | yes | yes | yes | yes | yes | yes | +| update infrastructure (e.g. tables) in the online store | yes | yes | yes | yes | yes | yes | yes | yes | yes | +| teardown infrastructure (e.g. tables) in the online store | yes | yes | yes | yes | yes | yes | yes | yes | yes | +| generate a plan of infrastructure changes | yes | no | no | no | no | no | no | yes | no | +| support for on-demand transforms | yes | yes | yes | yes | yes | yes | yes | yes | yes | +| readable by Python SDK | yes | yes | yes | yes | yes | yes | yes | yes | yes | +| readable by Java | no | yes | no | no | no | no | no | no | no | +| readable by Go | yes | yes | no | no | no | no | no | no | no | +| support for entityless feature views | yes | yes | yes | yes | yes | yes | yes | yes | yes | +| support for concurrent writing to the same key | no | yes | no | no | no | no | no | no | yes | +| support for ttl (time to live) at retrieval | no | yes | no | no | no | no | no | no | no | +| support for deleting expired data | no | yes | no | no | no | no | no | no | no | +| collocated by feature view | yes | no | yes | yes | yes | yes | yes | yes | no | +| collocated by feature service | no | no | no | no | no | no | no | no | no | +| collocated by entity key | no | yes | no | no | no | no | no | no | yes | diff --git a/docs/roadmap.md b/docs/roadmap.md index b7bab598cca..eb517b6ae80 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -38,7 +38,6 @@ The list below contains the functionality that contributors are planning to deve * [x] [Bigtable](https://docs.feast.dev/reference/online-stores/bigtable) * [x] [SQLite](https://docs.feast.dev/reference/online-stores/sqlite) * [x] [Dragonfly](https://docs.feast.dev/reference/online-stores/dragonfly) - * [x] [IKV - Inlined Key Value Store](https://docs.feast.dev/reference/online-stores/ikv) * [x] [Azure Cache for Redis (community plugin)](https://github.com/Azure/feast-azure) * [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/online-stores/postgres) * [x] [Cassandra / AstraDB (contrib plugin)](https://docs.feast.dev/reference/online-stores/cassandra) diff --git a/docs/specs/offline_store_format.md b/docs/specs/offline_store_format.md index ac829dd52f1..1b440d34c27 100644 --- a/docs/specs/offline_store_format.md +++ b/docs/specs/offline_store_format.md @@ -49,6 +49,12 @@ Here's how Feast types map to Pandas types for Feast APIs that take in or return | DOUBLE\_LIST | `list[float]`| | FLOAT\_LIST | `list[float]`| | BOOL\_LIST | `list[bool]`| +| MAP | `dict` (`Dict[str, Any]`)| +| MAP\_LIST | `list[dict]` (`List[Dict[str, Any]]`)| +| JSON | `object` (parsed Python dict/list/str)| +| JSON\_LIST | `list[object]`| +| STRUCT | `dict` (`Dict[str, Any]`)| +| STRUCT\_LIST | `list[dict]` (`List[Dict[str, Any]]`)| Note that this mapping is non-injective, that is more than one Pandas type may corresponds to one Feast type (but not vice versa). In these cases, when converting Feast values to Pandas, the **first** Pandas type in the table above is used. @@ -78,6 +84,12 @@ Here's how Feast types map to BigQuery types when using BigQuery for offline sto | DOUBLE\_LIST | `ARRAY`| | FLOAT\_LIST | `ARRAY`| | BOOL\_LIST | `ARRAY`| +| MAP | `JSON` / `STRUCT` | +| MAP\_LIST | `ARRAY` / `ARRAY` | +| JSON | `JSON` | +| JSON\_LIST | `ARRAY` | +| STRUCT | `STRUCT` / `RECORD` | +| STRUCT\_LIST | `ARRAY` | Values that are not specified by the table above will cause an error on conversion. @@ -94,3 +106,23 @@ https://docs.snowflake.com/en/user-guide/python-connector-pandas.html#snowflake- | INT32 | `INT8 / UINT8 / INT16 / UINT16 / INT32 / UINT32` | | INT64 | `INT64 / UINT64` | | DOUBLE | `FLOAT64` | +| MAP | `VARIANT` / `OBJECT` | +| JSON | `JSON` / `VARIANT` | + +#### Redshift Types +Here's how Feast types map to Redshift types when using Redshift for offline storage: + +| Feast Type | Redshift Type | +|-------------|--| +| Event Timestamp | `TIMESTAMP` / `TIMESTAMPTZ` | +| BYTES | `VARBYTE` | +| STRING | `VARCHAR` | +| INT32 | `INT4` / `SMALLINT` | +| INT64 | `INT8` / `BIGINT` | +| DOUBLE | `FLOAT8` / `DOUBLE PRECISION` | +| FLOAT | `FLOAT4` / `REAL` | +| BOOL | `BOOL` | +| MAP | `SUPER` | +| JSON | `json` / `SUPER` | + +Note: Redshift's `SUPER` type stores semi-structured JSON data. During materialization, Feast automatically handles `SUPER` columns that are exported as JSON strings by parsing them back into Python dictionaries before converting to `MAP` proto values. diff --git a/docs/tutorials/azure/data/data_generator.py b/docs/tutorials/azure/data/data_generator.py index 77fec082963..20af682c9a3 100644 --- a/docs/tutorials/azure/data/data_generator.py +++ b/docs/tutorials/azure/data/data_generator.py @@ -7,8 +7,7 @@ from pytz import FixedOffset, timezone, utc from random import randint from enum import Enum -from sqlalchemy import create_engine, DateTime -from datetime import datetime +from sqlalchemy import DateTime DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL = "event_timestamp" @@ -65,7 +64,8 @@ def create_orders_df( ) ] df.sort_values( - by=["e_ts", "order_id", "driver_id", "customer_id"], inplace=True, + by=["e_ts", "order_id", "driver_id", "customer_id"], + inplace=True, ) else: df[DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL] = [ @@ -208,9 +208,7 @@ def create_customer_daily_profile_df(customers, start_date, end_date) -> pd.Data def generate_entities(date, n_customers, n_drivers, order_count): end_date = date - before_start_date = end_date - timedelta(days=365) start_date = end_date - timedelta(days=7) - after_end_date = end_date + timedelta(days=365) customer_entities = [20000 + c_id for c_id in range(n_customers)] driver_entities = [50000 + d_id for d_id in range(n_drivers)] orders_df = create_orders_df( @@ -225,7 +223,7 @@ def generate_entities(date, n_customers, n_drivers, order_count): def save_df_to_csv(df, table_name, dtype): - df.to_csv(table_name+".csv", index=False) + df.to_csv(table_name + ".csv", index=False) if __name__ == "__main__": @@ -247,7 +245,6 @@ def save_df_to_csv(df, table_name, dtype): print(drivers_df.head()) - orders_table = "orders" driver_hourly_table = "driver_hourly" customer_profile_table = "customer_profile" @@ -257,4 +254,4 @@ def save_df_to_csv(df, table_name, dtype): print("uploading drivers") save_df_to_csv(drivers_df, driver_hourly_table, dtype={"datetime": DateTime()}) print("uploading customers") - save_df_to_csv(customer_df, customer_profile_table, dtype={"datetime": DateTime()}) \ No newline at end of file + save_df_to_csv(customer_df, customer_profile_table, dtype={"datetime": DateTime()}) diff --git a/docs/tutorials/azure/notebooks/src/score.py b/docs/tutorials/azure/notebooks/src/score.py index 7def7d2d2ad..f6281c47157 100644 --- a/docs/tutorials/azure/notebooks/src/score.py +++ b/docs/tutorials/azure/notebooks/src/score.py @@ -11,7 +11,7 @@ from feast.infra.offline_stores.contrib.mssql_offline_store.mssql import ( MsSqlServerOfflineStoreConfig, ) -from feast.infra.online_stores.redis import RedisOnlineStoreConfig, RedisOnlineStore +from feast.infra.online_stores.redis import RedisOnlineStoreConfig def init(): diff --git a/examples/java-demo/feature_repo/driver_repo.py b/examples/java-demo/feature_repo/driver_repo.py index f7dd05afff7..edb3e1e9d89 100644 --- a/examples/java-demo/feature_repo/driver_repo.py +++ b/examples/java-demo/feature_repo/driver_repo.py @@ -14,7 +14,10 @@ timestamp_field="event_timestamp", created_timestamp_column="created", ) -driver = Entity(name="driver_id", description="driver id",) +driver = Entity( + name="driver_id", + description="driver id", +) driver_hourly_stats_view = FeatureView( name="driver_hourly_stats", entities=[driver], @@ -58,4 +61,3 @@ def transformed_conv_rate(inputs: pd.DataFrame) -> pd.DataFrame: df["conv_rate_plus_val1"] = inputs["conv_rate"] + inputs["val_to_add"] df["conv_rate_plus_val2"] = inputs["conv_rate"] + inputs["val_to_add_2"] return df - diff --git a/examples/java-demo/feature_repo/test.py b/examples/java-demo/feature_repo/test.py index f73883019d6..9dd0c34a1f6 100644 --- a/examples/java-demo/feature_repo/test.py +++ b/examples/java-demo/feature_repo/test.py @@ -20,9 +20,13 @@ def fetch_java(): print( stub.GetOnlineFeatures( - GetOnlineFeaturesRequest(features=feature_refs, entities=entity_rows,) + GetOnlineFeaturesRequest( + features=feature_refs, + entities=entity_rows, + ) ) ) + if __name__ == "__main__": fetch_java() diff --git a/examples/java-demo/feature_repo/test_python_fetch.py b/examples/java-demo/feature_repo/test_python_fetch.py index 5e2781e1508..0edc05c6603 100644 --- a/examples/java-demo/feature_repo/test_python_fetch.py +++ b/examples/java-demo/feature_repo/test_python_fetch.py @@ -15,7 +15,7 @@ def run_demo(): }, { "driver_id": 1002, - } + }, ], ).to_dict() for key, value in sorted(features.items()): diff --git a/examples/kind-quickstart/src/utils.py b/examples/kind-quickstart/src/utils.py index ea549d7ed8c..34d674a088a 100644 --- a/examples/kind-quickstart/src/utils.py +++ b/examples/kind-quickstart/src/utils.py @@ -1,12 +1,18 @@ import subprocess -def port_forward(service, external_port, local_port=80) : - """ - Run a background process to forward port 80 of the given `service` service to the given `external_port` port. - Returns: the process instance - """ - command = ["kubectl", "port-forward", f"service/{service}", f"{external_port}:{local_port}"] - process = subprocess.Popen(command) - print(f"Port-forwarding {service} with process ID: {process.pid}") - return process +def port_forward(service, external_port, local_port=80): + """ + Run a background process to forward port 80 of the given `service` service to the given `external_port` port. + + Returns: the process instance + """ + command = [ + "kubectl", + "port-forward", + f"service/{service}", + f"{external_port}:{local_port}", + ] + process = subprocess.Popen(command) + print(f"Port-forwarding {service} with process ID: {process.pid}") + return process diff --git a/examples/online_store/milvus_tutorial/milvus_example.py b/examples/online_store/milvus_tutorial/milvus_example.py index dc5cb646c60..c8889efdf0f 100644 --- a/examples/online_store/milvus_tutorial/milvus_example.py +++ b/examples/online_store/milvus_tutorial/milvus_example.py @@ -32,39 +32,71 @@ def generate_sample_data(): # Sample product data products = [ - {"id": 1, "name": "Smartphone", - "description": "A high-end smartphone with advanced camera features and long battery life."}, - {"id": 2, "name": "Laptop", - "description": "Powerful laptop with fast processor and high-resolution display for professional use."}, - {"id": 3, "name": "Headphones", - "description": "Wireless noise-cancelling headphones with premium sound quality."}, - {"id": 4, "name": "Smartwatch", - "description": "Fitness tracking smartwatch with heart rate monitoring and sleep analysis."}, - {"id": 5, "name": "Tablet", - "description": "Lightweight tablet with vibrant display perfect for reading and browsing."}, - {"id": 6, "name": "Camera", - "description": "Professional digital camera with high-resolution sensor and interchangeable lenses."}, - {"id": 7, "name": "Speaker", - "description": "Bluetooth speaker with rich bass and long battery life for outdoor use."}, - {"id": 8, "name": "Gaming Console", - "description": "Next-generation gaming console with 4K graphics and fast loading times."}, - {"id": 9, "name": "E-reader", - "description": "E-ink display reader with backlight for comfortable reading in any lighting condition."}, - {"id": 10, "name": "Smart TV", - "description": "4K smart television with built-in streaming apps and voice control."} + { + "id": 1, + "name": "Smartphone", + "description": "A high-end smartphone with advanced camera features and long battery life.", + }, + { + "id": 2, + "name": "Laptop", + "description": "Powerful laptop with fast processor and high-resolution display for professional use.", + }, + { + "id": 3, + "name": "Headphones", + "description": "Wireless noise-cancelling headphones with premium sound quality.", + }, + { + "id": 4, + "name": "Smartwatch", + "description": "Fitness tracking smartwatch with heart rate monitoring and sleep analysis.", + }, + { + "id": 5, + "name": "Tablet", + "description": "Lightweight tablet with vibrant display perfect for reading and browsing.", + }, + { + "id": 6, + "name": "Camera", + "description": "Professional digital camera with high-resolution sensor and interchangeable lenses.", + }, + { + "id": 7, + "name": "Speaker", + "description": "Bluetooth speaker with rich bass and long battery life for outdoor use.", + }, + { + "id": 8, + "name": "Gaming Console", + "description": "Next-generation gaming console with 4K graphics and fast loading times.", + }, + { + "id": 9, + "name": "E-reader", + "description": "E-ink display reader with backlight for comfortable reading in any lighting condition.", + }, + { + "id": 10, + "name": "Smart TV", + "description": "4K smart television with built-in streaming apps and voice control.", + }, ] # Create DataFrame df = pd.DataFrame(products) # Generate embeddings using sentence-transformers - model = SentenceTransformer('all-MiniLM-L6-v2') # Small, fast model with 384-dim embeddings - embeddings = model.encode(df['description'].tolist()) + model = SentenceTransformer( + "all-MiniLM-L6-v2" + ) # Small, fast model with 384-dim embeddings + embeddings = model.encode(df["description"].tolist()) # Add embeddings and timestamp to DataFrame - df['embedding'] = embeddings.tolist() - df['event_timestamp'] = datetime.now() - timedelta(days=1) - df['created_timestamp'] = datetime.now() - timedelta(days=1) + df["embedding"] = embeddings.tolist() + df["event_timestamp"] = datetime.now() - timedelta(days=1) + df["created_timestamp"] = datetime.now() - timedelta(days=1) # Save to parquet file parquet_path = "data/sample_data.parquet" @@ -135,16 +167,20 @@ def perform_similarity_search(store, query_text: str, top_k: int = 3): print(f"\nPerforming similarity search for: '{query_text}'") # Generate embedding for query text - model = SentenceTransformer('all-MiniLM-L6-v2') + model = SentenceTransformer("all-MiniLM-L6-v2") query_embedding = model.encode(query_text).tolist() # Perform similarity search using vector embeddings with version 2 API try: results = store.retrieve_online_documents_v2( - features=["product_embeddings:embedding", "product_embeddings:name", "product_embeddings:description"], + features=[ + "product_embeddings:embedding", + "product_embeddings:name", + "product_embeddings:description", + ], query=query_embedding, top_k=top_k, - distance_metric="L2" + distance_metric="L2", ).to_df() # Print results @@ -184,7 +220,9 @@ def main(): perform_similarity_search(store, "portable computing device for work", top_k=3) print("\n=== Tutorial Complete ===") - print("You've successfully set up Milvus with Feast and performed vector similarity searches!") + print( + "You've successfully set up Milvus with Feast and performed vector similarity searches!" + ) if __name__ == "__main__": diff --git a/examples/online_store/pgvector_tutorial/pgvector_example.py b/examples/online_store/pgvector_tutorial/pgvector_example.py index c8236ea4a7b..94c6e079952 100644 --- a/examples/online_store/pgvector_tutorial/pgvector_example.py +++ b/examples/online_store/pgvector_tutorial/pgvector_example.py @@ -4,12 +4,9 @@ # as a vector database backend for Feast. import os -import numpy as np import pandas as pd from datetime import datetime, timedelta -from typing import List, Optional import subprocess -import time # For generating embeddings try: @@ -21,53 +18,97 @@ from feast import FeatureStore, Entity, FeatureView, Field, FileSource from feast.data_format import ParquetFormat -from feast.types import Float32, Array, String, Int64 +from feast.types import Float32, Array, String from feast.value_type import ValueType # Create data directory if it doesn't exist os.makedirs("data", exist_ok=True) + # Step 1: Generate sample data with embeddings def generate_sample_data(): print("Generating sample data with embeddings...") - + # Sample product data products = [ - {"id": 1, "name": "Smartphone", "description": "A high-end smartphone with advanced camera features and long battery life."}, - {"id": 2, "name": "Laptop", "description": "Powerful laptop with fast processor and high-resolution display for professional use."}, - {"id": 3, "name": "Headphones", "description": "Wireless noise-cancelling headphones with premium sound quality."}, - {"id": 4, "name": "Smartwatch", "description": "Fitness tracking smartwatch with heart rate monitoring and sleep analysis."}, - {"id": 5, "name": "Tablet", "description": "Lightweight tablet with vibrant display perfect for reading and browsing."}, - {"id": 6, "name": "Camera", "description": "Professional digital camera with high-resolution sensor and interchangeable lenses."}, - {"id": 7, "name": "Speaker", "description": "Bluetooth speaker with rich bass and long battery life for outdoor use."}, - {"id": 8, "name": "Gaming Console", "description": "Next-generation gaming console with 4K graphics and fast loading times."}, - {"id": 9, "name": "E-reader", "description": "E-ink display reader with backlight for comfortable reading in any lighting condition."}, - {"id": 10, "name": "Smart TV", "description": "4K smart television with built-in streaming apps and voice control."} + { + "id": 1, + "name": "Smartphone", + "description": "A high-end smartphone with advanced camera features and long battery life.", + }, + { + "id": 2, + "name": "Laptop", + "description": "Powerful laptop with fast processor and high-resolution display for professional use.", + }, + { + "id": 3, + "name": "Headphones", + "description": "Wireless noise-cancelling headphones with premium sound quality.", + }, + { + "id": 4, + "name": "Smartwatch", + "description": "Fitness tracking smartwatch with heart rate monitoring and sleep analysis.", + }, + { + "id": 5, + "name": "Tablet", + "description": "Lightweight tablet with vibrant display perfect for reading and browsing.", + }, + { + "id": 6, + "name": "Camera", + "description": "Professional digital camera with high-resolution sensor and interchangeable lenses.", + }, + { + "id": 7, + "name": "Speaker", + "description": "Bluetooth speaker with rich bass and long battery life for outdoor use.", + }, + { + "id": 8, + "name": "Gaming Console", + "description": "Next-generation gaming console with 4K graphics and fast loading times.", + }, + { + "id": 9, + "name": "E-reader", + "description": "E-ink display reader with backlight for comfortable reading in any lighting condition.", + }, + { + "id": 10, + "name": "Smart TV", + "description": "4K smart television with built-in streaming apps and voice control.", + }, ] - + # Create DataFrame df = pd.DataFrame(products) - + # Generate embeddings using sentence-transformers - model = SentenceTransformer('all-MiniLM-L6-v2') # Small, fast model with 384-dim embeddings - embeddings = model.encode(df['description'].tolist()) - + model = SentenceTransformer( + "all-MiniLM-L6-v2" + ) # Small, fast model with 384-dim embeddings + embeddings = model.encode(df["description"].tolist()) + # Add embeddings and timestamp to DataFrame - df['embedding'] = embeddings.tolist() - df['event_timestamp'] = datetime.now() - timedelta(days=1) - df['created_timestamp'] = datetime.now() - timedelta(days=1) - + df["embedding"] = embeddings.tolist() + df["event_timestamp"] = datetime.now() - timedelta(days=1) + df["created_timestamp"] = datetime.now() - timedelta(days=1) + # Save to parquet file parquet_path = "data/sample_data.parquet" df.to_parquet(parquet_path, index=False) - + print(f"Sample data saved to {parquet_path}") return parquet_path + # Step 2: Define feature repository def create_feature_definitions(data_path): print("Creating feature definitions...") - + # Define entity product = Entity( name="product_id", @@ -75,7 +116,7 @@ def create_feature_definitions(data_path): join_keys=["id"], value_type=ValueType.INT64, ) - + # Define data source source = FileSource( file_format=ParquetFormat(), @@ -83,7 +124,7 @@ def create_feature_definitions(data_path): timestamp_field="event_timestamp", created_timestamp_column="created_timestamp", ) - + # Define feature view with vector embeddings product_embeddings = FeatureView( name="product_embeddings", @@ -91,10 +132,10 @@ def create_feature_definitions(data_path): ttl=timedelta(days=30), schema=[ Field( - name="embedding", - dtype=Array(Float32), + name="embedding", + dtype=Array(Float32), vector_index=True, # Mark as vector field - vector_search_metric="L2" # Use L2 distance for similarity + vector_search_metric="L2", # Use L2 distance for similarity ), Field(name="name", dtype=String), Field(name="description", dtype=String), @@ -102,106 +143,152 @@ def create_feature_definitions(data_path): source=source, online=True, ) - + return product, product_embeddings + # Step 3: Initialize and apply feature store def setup_feature_store(product, product_embeddings): print("Setting up feature store...") - + # Initialize feature store store = FeatureStore(repo_path=".") - + # Apply feature definitions store.apply([product, product_embeddings]) - + # Materialize features to online store store.materialize( start_date=datetime.now() - timedelta(days=2), end_date=datetime.now(), ) - + print("Feature store setup complete") return store + # Step 4: Perform vector similarity search def perform_similarity_search(store, query_text: str, top_k: int = 3): print(f"\nPerforming similarity search for: '{query_text}'") - + # Generate embedding for query text - model = SentenceTransformer('all-MiniLM-L6-v2') + model = SentenceTransformer("all-MiniLM-L6-v2") query_embedding = model.encode(query_text).tolist() - + # Perform similarity search using vector embeddings results = store.retrieve_online_documents( query=query_embedding, features=["product_embeddings:embedding"], top_k=top_k, - distance_metric="L2" + distance_metric="L2", ) - + # Extract product IDs from the results by parsing entity keys # (The entities are encoded in a way that's not directly accessible) - + print(f"\nTop {top_k} similar products:") print("Available fields:", list(results.to_dict().keys())) - + # Since we can't access the entity keys directly, let's do a manual search # to show the top similar products based on our search query - + # Get top 5 products sorted by relevance to our query (manual approach) products = [ - {"id": 3, "name": "Headphones", "description": "Wireless noise-cancelling headphones with premium sound quality."}, - {"id": 7, "name": "Speaker", "description": "Bluetooth speaker with rich bass and long battery life for outdoor use."}, - {"id": 2, "name": "Laptop", "description": "Powerful laptop with fast processor and high-resolution display for professional use."}, - {"id": 5, "name": "Tablet", "description": "Lightweight tablet with vibrant display perfect for reading and browsing."}, - {"id": 1, "name": "Smartphone", "description": "A high-end smartphone with advanced camera features and long battery life."}, + { + "id": 3, + "name": "Headphones", + "description": "Wireless noise-cancelling headphones with premium sound quality.", + }, + { + "id": 7, + "name": "Speaker", + "description": "Bluetooth speaker with rich bass and long battery life for outdoor use.", + }, + { + "id": 2, + "name": "Laptop", + "description": "Powerful laptop with fast processor and high-resolution display for professional use.", + }, + { + "id": 5, + "name": "Tablet", + "description": "Lightweight tablet with vibrant display perfect for reading and browsing.", + }, + { + "id": 1, + "name": "Smartphone", + "description": "A high-end smartphone with advanced camera features and long battery life.", + }, ] - + # Filter based on the search query - if "wireless" in query_text.lower() or "audio" in query_text.lower() or "sound" in query_text.lower(): - relevant = [products[0], products[1], products[4]] # Headphones, Speaker, Smartphone - elif "portable" in query_text.lower() or "computing" in query_text.lower() or "work" in query_text.lower(): + if ( + "wireless" in query_text.lower() + or "audio" in query_text.lower() + or "sound" in query_text.lower() + ): + relevant = [ + products[0], + products[1], + products[4], + ] # Headphones, Speaker, Smartphone + elif ( + "portable" in query_text.lower() + or "computing" in query_text.lower() + or "work" in query_text.lower() + ): relevant = [products[2], products[4], products[3]] # Laptop, Smartphone, Tablet else: relevant = products[:3] # Just show first 3 - + # Display results for i, product in enumerate(relevant[:top_k], 1): print(f"\n{i}. Name: {product['name']}") print(f" Description: {product['description']}") - + print("\nNote: Using simulated results for display purposes.") - print("The vector search is working, but the result structure in this Feast version") - print("doesn't allow easy access to the entity keys to retrieve the product details.") + print( + "The vector search is working, but the result structure in this Feast version" + ) + print( + "doesn't allow easy access to the entity keys to retrieve the product details." + ) + # Main function to run the example def main(): print("=== PGVector Tutorial with Feast ===") - + # Check if PostgreSQL with pgvector is running print("\nEnsure PostgreSQL with pgvector is running:") - print("docker run -d \\\n --name postgres-pgvector \\\n -e POSTGRES_USER=feast \\\n -e POSTGRES_PASSWORD=feast \\\n -e POSTGRES_DB=feast \\\n -p 5432:5432 \\\n pgvector/pgvector:pg16") + print( + "docker run -d \\\n --name postgres-pgvector \\\n -e POSTGRES_USER=feast \\\n -e POSTGRES_PASSWORD=feast \\\n -e POSTGRES_DB=feast \\\n -p 5432:5432 \\\n pgvector/pgvector:pg16" + ) print("\nEnsure pgvector extension is created:") - print("docker exec -it postgres-pgvector psql -U feast -c \"CREATE EXTENSION IF NOT EXISTS vector;\"") - + print( + 'docker exec -it postgres-pgvector psql -U feast -c "CREATE EXTENSION IF NOT EXISTS vector;"' + ) + input("\nPress Enter to continue once PostgreSQL with pgvector is ready...") - + # Generate sample data data_path = generate_sample_data() - + # Create feature definitions product, product_embeddings = create_feature_definitions(data_path) - + # Setup feature store store = setup_feature_store(product, product_embeddings) - + # Perform similarity searches perform_similarity_search(store, "wireless audio device with good sound", top_k=3) perform_similarity_search(store, "portable computing device for work", top_k=3) - + print("\n=== Tutorial Complete ===") - print("You've successfully set up pgvector with Feast and performed vector similarity searches!") + print( + "You've successfully set up pgvector with Feast and performed vector similarity searches!" + ) + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/examples/openlineage-integration/openlineage_demo.py b/examples/openlineage-integration/openlineage_demo.py index 5bcb40fb902..fb02136eeb1 100644 --- a/examples/openlineage-integration/openlineage_demo.py +++ b/examples/openlineage-integration/openlineage_demo.py @@ -33,6 +33,7 @@ from feast import Entity, FeatureService, FeatureView, FileSource, Field from feast.types import Float32, Int64 + def create_feature_store_yaml(url: str) -> str: """Create a feature_store.yaml with OpenLineage configuration.""" return f"""project: openlineage_demo @@ -68,13 +69,13 @@ def run_demo(url: str): (repo_path / "feature_store.yaml").write_text(feature_store_yaml) print(f"Created demo repository at: {repo_path}") - print(f"feature_store.yaml:") + print("feature_store.yaml:") print("-" * 50) print(feature_store_yaml) print("-" * 50) try: - import openlineage.client + import openlineage.client # noqa: F401 except ImportError: print("OpenLineage client not installed.") print("Install with: pip install openlineage-python") @@ -123,7 +124,9 @@ def run_demo(url: str): Field(name="conv_rate", dtype=Float32, description="Conversion rate"), Field(name="acc_rate", dtype=Float32, description="Acceptance rate"), Field( - name="avg_daily_trips", dtype=Int64, description="Average daily trips" + name="avg_daily_trips", + dtype=Int64, + description="Average daily trips", ), ], source=driver_stats_source, @@ -140,12 +143,21 @@ def run_demo(url: str): try: fs.apply( - [driver, driver_stats_source, driver_hourly_stats_view, driver_stats_service] + [ + driver, + driver_stats_source, + driver_hourly_stats_view, + driver_stats_service, + ] ) print("Applied entities, feature views, and feature services") print("OpenLineage events emitted automatically:") - print(" - feast_feature_views_openlineage_demo (DataSources → FeatureViews)") - print(" - feature_service_driver_stats_service (FeatureViews → FeatureService)") + print( + " - feast_feature_views_openlineage_demo (DataSources → FeatureViews)" + ) + print( + " - feature_service_driver_stats_service (FeatureViews → FeatureService)" + ) except Exception as e: print(f"Apply failed: {e}") diff --git a/examples/operator-rbac-openshift-tls/client/feature_repo/test.py b/examples/operator-rbac-openshift-tls/client/feature_repo/test.py index 78732327a62..88f7a0e6484 100644 --- a/examples/operator-rbac-openshift-tls/client/feature_repo/test.py +++ b/examples/operator-rbac-openshift-tls/client/feature_repo/test.py @@ -9,6 +9,7 @@ repo_path = os.getenv("FEAST_REPO_PATH", ".") store = FeatureStore(repo_path=repo_path) + def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: bool): """Fetch historical features for training or batch scoring.""" try: @@ -38,13 +39,17 @@ def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: "transformed_conv_rate:conv_rate_plus_val2", ], ).to_df() - print(f"Successfully fetched {'batch scoring' if for_batch_scoring else 'training'} historical features:\n", training_df.head()) + print( + f"Successfully fetched {'batch scoring' if for_batch_scoring else 'training'} historical features:\n", + training_df.head(), + ) except PermissionError: print("\n*** PERMISSION DENIED *** Cannot fetch historical features.") except Exception as e: print(f"Unexpected error while fetching historical features: {e}") + def fetch_online_features(store: FeatureStore, source: str = ""): """Fetch online features from the feature store.""" try: @@ -76,7 +81,9 @@ def fetch_online_features(store: FeatureStore, source: str = ""): entity_rows=entity_rows, ).to_dict() - print(f"Successfully fetched online features {'via feature service' if source else 'directly'}:\n") + print( + f"Successfully fetched online features {'via feature service' if source else 'directly'}:\n" + ) for key, value in sorted(returned_features.items()): print(f"{key} : {value}") @@ -85,6 +92,7 @@ def fetch_online_features(store: FeatureStore, source: str = ""): except Exception as e: print(f"Unexpected error while fetching online features: {e}") + def check_permissions(): """Check user role, test various Feast operations.""" feature_views = [] @@ -94,7 +102,9 @@ def check_permissions(): try: feature_views = store.list_feature_views() if not feature_views: - print("No feature views found. You might not have access or they haven't been created.") + print( + "No feature views found. You might not have access or they haven't been created." + ) else: print(f"Successfully listed {len(feature_views)} feature views:") for fv in feature_views: @@ -117,7 +127,9 @@ def check_permissions(): store.apply(feature_views) print("User has write access to the feature store.") except PermissionError: - print("\n*** PERMISSION DENIED *** User lacks permission to modify the feature store.") + print( + "\n*** PERMISSION DENIED *** User lacks permission to modify the feature store." + ) except Exception as e: print(f"Unexpected error testing write access: {e}") @@ -151,5 +163,6 @@ def check_permissions(): except Exception as e: print(f"Unexpected error while pushing event: {e}") + if __name__ == "__main__": check_permissions() diff --git a/examples/operator-rbac-openshift-tls/permissions_apply.py b/examples/operator-rbac-openshift-tls/permissions_apply.py index 0d46ad5260a..d628fa88eb8 100644 --- a/examples/operator-rbac-openshift-tls/permissions_apply.py +++ b/examples/operator-rbac-openshift-tls/permissions_apply.py @@ -5,8 +5,10 @@ from feast.permissions.policy import RoleBasedPolicy # Define K8s roles same as created with FeatureStore CR -admin_roles = ["feast-writer"] # Full access (can create, update, delete ) Feast Resources -user_roles = ["feast-reader"] # Read-only access on Feast Resources +admin_roles = [ + "feast-writer" +] # Full access (can create, update, delete ) Feast Resources +user_roles = ["feast-reader"] # Read-only access on Feast Resources # User permissions (feast_user_permission) # - Grants read and describing Feast objects access @@ -14,7 +16,8 @@ name="feast_user_permission", types=ALL_RESOURCE_TYPES, policy=RoleBasedPolicy(roles=user_roles), - actions=[AuthzedAction.DESCRIBE] + READ # Read access (READ_ONLINE, READ_OFFLINE) + describe other Feast Resources. + actions=[AuthzedAction.DESCRIBE] + + READ, # Read access (READ_ONLINE, READ_OFFLINE) + describe other Feast Resources. ) # Admin permissions (feast_admin_permission) @@ -23,5 +26,5 @@ name="feast_admin_permission", types=ALL_RESOURCE_TYPES, policy=RoleBasedPolicy(roles=admin_roles), - actions=ALL_ACTIONS # Full permissions: CREATE, UPDATE, DELETE, READ, WRITE + actions=ALL_ACTIONS, # Full permissions: CREATE, UPDATE, DELETE, READ, WRITE ) diff --git a/examples/operator-rbac-openshift-tls/permissions_with_groups_namespaces.py b/examples/operator-rbac-openshift-tls/permissions_with_groups_namespaces.py index 5565c612902..d5c17fe1c2b 100644 --- a/examples/operator-rbac-openshift-tls/permissions_with_groups_namespaces.py +++ b/examples/operator-rbac-openshift-tls/permissions_with_groups_namespaces.py @@ -5,28 +5,36 @@ from feast.feast_object import ALL_FEATURE_VIEW_TYPES, ALL_RESOURCE_TYPES from feast.project import Project from feast.entity import Entity -from feast.feature_view import FeatureView -from feast.on_demand_feature_view import OnDemandFeatureView -from feast.batch_feature_view import BatchFeatureView -from feast.stream_feature_view import StreamFeatureView from feast.feature_service import FeatureService from feast.data_source import DataSource from feast.saved_dataset import SavedDataset from feast.permissions.permission import Permission from feast.permissions.action import READ, AuthzedAction, ALL_ACTIONS -from feast.permissions.policy import RoleBasedPolicy, GroupBasedPolicy, NamespaceBasedPolicy, CombinedGroupNamespacePolicy +from feast.permissions.policy import ( + RoleBasedPolicy, + GroupBasedPolicy, + NamespaceBasedPolicy, + CombinedGroupNamespacePolicy, +) -# New Testing +# New Testing -WITHOUT_DATA_SOURCE = [Project, Entity, FeatureService, SavedDataset] + ALL_FEATURE_VIEW_TYPES +WITHOUT_DATA_SOURCE = [ + Project, + Entity, + FeatureService, + SavedDataset, +] + ALL_FEATURE_VIEW_TYPES ONLY_ENTITIES = [Entity] ONLY_DS = [DataSource] # Define K8s roles (existing functionality) -admin_roles = ["feast-writer"] # Full access (can create, update, delete) Feast Resources -user_roles = ["feast-reader"] # Read-only access on Feast Resources +admin_roles = [ + "feast-writer" +] # Full access (can create, update, delete) Feast Resources +user_roles = ["feast-reader"] # Read-only access on Feast Resources # Define groups for different teams data_team_groups = ["data-team", "ml-engineers"] @@ -41,9 +49,14 @@ name="pre_Changed", types=ONLY_ENTITIES, policy=NamespaceBasedPolicy(namespaces=prod_namespaces), - actions=[AuthzedAction.DESCRIBE] + READ + actions=[AuthzedAction.DESCRIBE] + READ, +) +only_ds = Permission( + name="entity_reader", + types=ONLY_DS, + policy=NamespaceBasedPolicy(namespaces=prod_namespaces), + actions=[AuthzedAction.DESCRIBE] + READ, ) -only_ds = Permission(name="entity_reader", types=ONLY_DS, policy=NamespaceBasedPolicy(namespaces=[prod_namespaces]), actions=[AuthzedAction.DESCRIBE] + READ) staging_namespaces = ["staging", "dev"] test_namespaces = ["test", "testing"] @@ -53,7 +66,8 @@ name="feast_user_permission", types=ALL_RESOURCE_TYPES, policy=RoleBasedPolicy(roles=user_roles), - actions=[AuthzedAction.DESCRIBE] + READ # Read access (READ_ONLINE, READ_OFFLINE) + describe other Feast Resources. + actions=[AuthzedAction.DESCRIBE] + + READ, # Read access (READ_ONLINE, READ_OFFLINE) + describe other Feast Resources. ) # Admin permissions (existing functionality) @@ -62,7 +76,7 @@ name="feast_admin_permission", types=ALL_RESOURCE_TYPES, policy=RoleBasedPolicy(roles=admin_roles), - actions=ALL_ACTIONS # Full permissions: CREATE, UPDATE, DELETE, READ, WRITE + actions=ALL_ACTIONS, # Full permissions: CREATE, UPDATE, DELETE, READ, WRITE ) # Group-based permissions (new functionality) @@ -71,7 +85,7 @@ name="data_team_read_permission", types=ALL_RESOURCE_TYPES, policy=GroupBasedPolicy(groups=data_team_groups), - actions=[AuthzedAction.DESCRIBE] + READ + actions=[AuthzedAction.DESCRIBE] + READ, ) # - Grants full access to admin groups @@ -79,7 +93,7 @@ name="admin_group_permission", types=ALL_RESOURCE_TYPES, policy=GroupBasedPolicy(groups=admin_groups), - actions=ALL_ACTIONS + actions=ALL_ACTIONS, ) # Namespace-based permissions (new functionality) @@ -88,7 +102,7 @@ name="production_read_permission", types=ALL_RESOURCE_TYPES, policy=NamespaceBasedPolicy(namespaces=prod_namespaces), - actions=[AuthzedAction.DESCRIBE] + READ + actions=[AuthzedAction.DESCRIBE] + READ, ) # # - Grants full access to staging namespace users @@ -96,7 +110,7 @@ name="staging_full_permission", types=ALL_RESOURCE_TYPES, policy=NamespaceBasedPolicy(namespaces=staging_namespaces), - actions=ALL_ACTIONS + actions=ALL_ACTIONS, ) # # Combined permissions (using combined policy type) @@ -104,14 +118,18 @@ dev_test_perm = Permission( name="dev_test_permission", types=ALL_RESOURCE_TYPES, - policy=CombinedGroupNamespacePolicy(groups=dev_team_groups, namespaces=test_namespaces), - actions=[AuthzedAction.DESCRIBE] + READ + policy=CombinedGroupNamespacePolicy( + groups=dev_team_groups, namespaces=test_namespaces + ), + actions=[AuthzedAction.DESCRIBE] + READ, ) # # - Grants full access to data team members in staging namespaces data_staging_perm = Permission( name="data_staging_permission", types=ALL_RESOURCE_TYPES, - policy=CombinedGroupNamespacePolicy(groups=data_team_groups, namespaces=staging_namespaces), - actions=ALL_ACTIONS + policy=CombinedGroupNamespacePolicy( + groups=data_team_groups, namespaces=staging_namespaces + ), + actions=ALL_ACTIONS, ) diff --git a/examples/operator-rbac/client/feature_repo/test.py b/examples/operator-rbac/client/feature_repo/test.py index 78732327a62..88f7a0e6484 100644 --- a/examples/operator-rbac/client/feature_repo/test.py +++ b/examples/operator-rbac/client/feature_repo/test.py @@ -9,6 +9,7 @@ repo_path = os.getenv("FEAST_REPO_PATH", ".") store = FeatureStore(repo_path=repo_path) + def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: bool): """Fetch historical features for training or batch scoring.""" try: @@ -38,13 +39,17 @@ def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: "transformed_conv_rate:conv_rate_plus_val2", ], ).to_df() - print(f"Successfully fetched {'batch scoring' if for_batch_scoring else 'training'} historical features:\n", training_df.head()) + print( + f"Successfully fetched {'batch scoring' if for_batch_scoring else 'training'} historical features:\n", + training_df.head(), + ) except PermissionError: print("\n*** PERMISSION DENIED *** Cannot fetch historical features.") except Exception as e: print(f"Unexpected error while fetching historical features: {e}") + def fetch_online_features(store: FeatureStore, source: str = ""): """Fetch online features from the feature store.""" try: @@ -76,7 +81,9 @@ def fetch_online_features(store: FeatureStore, source: str = ""): entity_rows=entity_rows, ).to_dict() - print(f"Successfully fetched online features {'via feature service' if source else 'directly'}:\n") + print( + f"Successfully fetched online features {'via feature service' if source else 'directly'}:\n" + ) for key, value in sorted(returned_features.items()): print(f"{key} : {value}") @@ -85,6 +92,7 @@ def fetch_online_features(store: FeatureStore, source: str = ""): except Exception as e: print(f"Unexpected error while fetching online features: {e}") + def check_permissions(): """Check user role, test various Feast operations.""" feature_views = [] @@ -94,7 +102,9 @@ def check_permissions(): try: feature_views = store.list_feature_views() if not feature_views: - print("No feature views found. You might not have access or they haven't been created.") + print( + "No feature views found. You might not have access or they haven't been created." + ) else: print(f"Successfully listed {len(feature_views)} feature views:") for fv in feature_views: @@ -117,7 +127,9 @@ def check_permissions(): store.apply(feature_views) print("User has write access to the feature store.") except PermissionError: - print("\n*** PERMISSION DENIED *** User lacks permission to modify the feature store.") + print( + "\n*** PERMISSION DENIED *** User lacks permission to modify the feature store." + ) except Exception as e: print(f"Unexpected error testing write access: {e}") @@ -151,5 +163,6 @@ def check_permissions(): except Exception as e: print(f"Unexpected error while pushing event: {e}") + if __name__ == "__main__": check_permissions() diff --git a/examples/operator-rbac/permissions_apply.py b/examples/operator-rbac/permissions_apply.py index 0d46ad5260a..d628fa88eb8 100644 --- a/examples/operator-rbac/permissions_apply.py +++ b/examples/operator-rbac/permissions_apply.py @@ -5,8 +5,10 @@ from feast.permissions.policy import RoleBasedPolicy # Define K8s roles same as created with FeatureStore CR -admin_roles = ["feast-writer"] # Full access (can create, update, delete ) Feast Resources -user_roles = ["feast-reader"] # Read-only access on Feast Resources +admin_roles = [ + "feast-writer" +] # Full access (can create, update, delete ) Feast Resources +user_roles = ["feast-reader"] # Read-only access on Feast Resources # User permissions (feast_user_permission) # - Grants read and describing Feast objects access @@ -14,7 +16,8 @@ name="feast_user_permission", types=ALL_RESOURCE_TYPES, policy=RoleBasedPolicy(roles=user_roles), - actions=[AuthzedAction.DESCRIBE] + READ # Read access (READ_ONLINE, READ_OFFLINE) + describe other Feast Resources. + actions=[AuthzedAction.DESCRIBE] + + READ, # Read access (READ_ONLINE, READ_OFFLINE) + describe other Feast Resources. ) # Admin permissions (feast_admin_permission) @@ -23,5 +26,5 @@ name="feast_admin_permission", types=ALL_RESOURCE_TYPES, policy=RoleBasedPolicy(roles=admin_roles), - actions=ALL_ACTIONS # Full permissions: CREATE, UPDATE, DELETE, READ, WRITE + actions=ALL_ACTIONS, # Full permissions: CREATE, UPDATE, DELETE, READ, WRITE ) diff --git a/examples/podman_local/client/feature_repo/test.py b/examples/podman_local/client/feature_repo/test.py index 13ab2444aab..1e3b741a1b3 100644 --- a/examples/podman_local/client/feature_repo/test.py +++ b/examples/podman_local/client/feature_repo/test.py @@ -1,9 +1,9 @@ -import subprocess from datetime import datetime import pandas as pd from feast import FeatureStore from feast.data_source import PushMode + def run_demo(): try: store = FeatureStore(repo_path=".") @@ -51,6 +51,7 @@ def run_demo(): except Exception as e: print(f"An error occurred in run_demo: {e}") + def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: bool): try: entity_df = pd.DataFrame.from_dict( @@ -83,6 +84,7 @@ def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: except Exception as e: print(f"An error occurred in fetch_historical_features_entity_df: {e}") + def fetch_online_features(store, source: str = ""): try: entity_rows = [ @@ -116,6 +118,7 @@ def fetch_online_features(store, source: str = ""): except Exception as e: print(f"An error occurred in fetch_online_features: {e}") + if __name__ == "__main__": try: run_demo() diff --git a/examples/python-helm-demo/feature_repo/driver_repo.py b/examples/python-helm-demo/feature_repo/driver_repo.py index f7dd05afff7..edb3e1e9d89 100644 --- a/examples/python-helm-demo/feature_repo/driver_repo.py +++ b/examples/python-helm-demo/feature_repo/driver_repo.py @@ -14,7 +14,10 @@ timestamp_field="event_timestamp", created_timestamp_column="created", ) -driver = Entity(name="driver_id", description="driver id",) +driver = Entity( + name="driver_id", + description="driver id", +) driver_hourly_stats_view = FeatureView( name="driver_hourly_stats", entities=[driver], @@ -58,4 +61,3 @@ def transformed_conv_rate(inputs: pd.DataFrame) -> pd.DataFrame: df["conv_rate_plus_val1"] = inputs["conv_rate"] + inputs["val_to_add"] df["conv_rate_plus_val2"] = inputs["conv_rate"] + inputs["val_to_add_2"] return df - diff --git a/examples/python-helm-demo/test/test_python_fetch.py b/examples/python-helm-demo/test/test_python_fetch.py index 715912422f3..3412f7d4de0 100644 --- a/examples/python-helm-demo/test/test_python_fetch.py +++ b/examples/python-helm-demo/test/test_python_fetch.py @@ -18,8 +18,11 @@ def run_demo_http(): resp_data = json.loads(r.text) records = pd.DataFrame.from_records( - columns=resp_data["metadata"]["feature_names"], - data=[[r["values"][i] for r in resp_data["results"]] for i in range(len(resp_data["results"]))] + columns=resp_data["metadata"]["feature_names"], + data=[ + [r["values"][i] for r in resp_data["results"]] + for i in range(len(resp_data["results"])) + ], ) for col in sorted(records.columns): print(col, " : ", records[col].values) diff --git a/examples/rag-docling/feature_repo/example_repo.py b/examples/rag-docling/feature_repo/example_repo.py index 6ce2adab8a5..1169b1c830a 100644 --- a/examples/rag-docling/feature_repo/example_repo.py +++ b/examples/rag-docling/feature_repo/example_repo.py @@ -1,6 +1,5 @@ from datetime import timedelta -import pandas as pd from feast import ( FeatureView, Field, @@ -12,7 +11,7 @@ from feast.types import Float64, Array, String, ValueType, PdfBytes from feast.on_demand_feature_view import on_demand_feature_view from sentence_transformers import SentenceTransformer -from typing import Dict, Any, List +from typing import Any import hashlib from docling.datamodel.base_models import DocumentStream @@ -20,7 +19,6 @@ import io from docling.document_converter import DocumentConverter from transformers import AutoTokenizer -from sentence_transformers import SentenceTransformer from docling.chunking import HybridChunker # Load tokenizer and embedding model @@ -31,13 +29,19 @@ embedding_model = SentenceTransformer(EMBED_MODEL_ID) chunker = HybridChunker(tokenizer=tokenizer, max_tokens=MAX_TOKENS, merge_peers=True) + def embed_text(text: str) -> list[float]: """Generate an embedding for a given text.""" return embedding_model.encode([text], normalize_embeddings=True).tolist()[0] -def generate_chunk_id(file_name: str, raw_chunk_markdown: str="") -> str: + +def generate_chunk_id(file_name: str, raw_chunk_markdown: str = "") -> str: """Generate a unique chunk ID based on file_name and raw_chunk_markdown.""" - unique_string = f"{file_name}-{raw_chunk_markdown}" if raw_chunk_markdown != "" else f"{file_name}" + unique_string = ( + f"{file_name}-{raw_chunk_markdown}" + if raw_chunk_markdown != "" + else f"{file_name}" + ) return hashlib.sha256(unique_string.encode()).hexdigest() @@ -64,7 +68,7 @@ def generate_chunk_id(file_name: str, raw_chunk_markdown: str="") -> str: input_request_pdf = RequestSource( name="pdf_request_source", schema=[ - Field(name="document_id", dtype=String), + Field(name="document_id", dtype=String), Field(name="pdf_bytes", dtype=PdfBytes), Field(name="file_name", dtype=String), ], @@ -88,6 +92,7 @@ def generate_chunk_id(file_name: str, raw_chunk_markdown: str="") -> str: ttl=timedelta(hours=2), ) + @on_demand_feature_view( entities=[chunk, document], sources=[input_request_pdf], diff --git a/examples/rag/feature_repo/example_repo.py b/examples/rag/feature_repo/example_repo.py index e0a9be21452..7a37d99d495 100644 --- a/examples/rag/feature_repo/example_repo.py +++ b/examples/rag/feature_repo/example_repo.py @@ -39,4 +39,4 @@ ], source=source, ttl=timedelta(hours=2), -) \ No newline at end of file +) diff --git a/examples/rag/feature_repo/test_workflow.py b/examples/rag/feature_repo/test_workflow.py index 05cd554d823..8a8a8813bf9 100644 --- a/examples/rag/feature_repo/test_workflow.py +++ b/examples/rag/feature_repo/test_workflow.py @@ -33,21 +33,23 @@ def run_model(sentences, tokenizer, model): sentence_embeddings = F.normalize(sentence_embeddings, p=2, dim=1) return sentence_embeddings + def run_demo(): store = FeatureStore(repo_path=".") df = pd.read_parquet("./data/city_wikipedia_summaries_with_embeddings.parquet") - embedding_length = len(df['vector'][0]) - print(f'embedding length = {embedding_length}') + embedding_length = len(df["vector"][0]) + print(f"embedding length = {embedding_length}") store.apply([city_embeddings_feature_view, item]) - fields = [ - f.name for f in city_embeddings_feature_view.features - ] + city_embeddings_feature_view.entities + [city_embeddings_feature_view.batch_source.timestamp_field] - print('\ndata=') + fields = ( + [f.name for f in city_embeddings_feature_view.features] + + city_embeddings_feature_view.entities + + [city_embeddings_feature_view.batch_source.timestamp_field] + ) + print("\ndata=") print(df[fields].head().T) store.write_to_online_store("city_embeddings", df[fields][0:3]) - question = "the most populous city in the state of New York is New York" tokenizer = AutoTokenizer.from_pretrained(TOKENIZER) model = AutoModel.from_pretrained(MODEL) @@ -70,5 +72,6 @@ def run_demo(): print(features.to_df()) store.teardown() + if __name__ == "__main__": run_demo() diff --git a/examples/rbac-remote/client/k8s/feature_repo/test.py b/examples/rbac-remote/client/k8s/feature_repo/test.py index 6e1480bc947..96fe678d5f8 100644 --- a/examples/rbac-remote/client/k8s/feature_repo/test.py +++ b/examples/rbac-remote/client/k8s/feature_repo/test.py @@ -20,9 +20,11 @@ def run_demo(): try: print("\n--- Load features into online store/materialize_incremental ---") - feature_views= store.list_feature_views() + feature_views = store.list_feature_views() if not feature_views: - raise PermissionError("No access to feature-views or no feature-views available.") + raise PermissionError( + "No access to feature-views or no feature-views available." + ) store.materialize_incremental(end_date=datetime.now()) except PermissionError as pe: print(f"Permission error: {pe}") @@ -74,9 +76,7 @@ def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: # values we're using for an on-demand transformation "val_to_add": [1, 2, 3], "val_to_add_2": [10, 20, 30], - } - ) if for_batch_scoring: entity_df["event_timestamp"] = pd.to_datetime("now", utc=True) diff --git a/examples/rbac-remote/client/oidc/feature_repo/test.py b/examples/rbac-remote/client/oidc/feature_repo/test.py index 6e1480bc947..96fe678d5f8 100644 --- a/examples/rbac-remote/client/oidc/feature_repo/test.py +++ b/examples/rbac-remote/client/oidc/feature_repo/test.py @@ -20,9 +20,11 @@ def run_demo(): try: print("\n--- Load features into online store/materialize_incremental ---") - feature_views= store.list_feature_views() + feature_views = store.list_feature_views() if not feature_views: - raise PermissionError("No access to feature-views or no feature-views available.") + raise PermissionError( + "No access to feature-views or no feature-views available." + ) store.materialize_incremental(end_date=datetime.now()) except PermissionError as pe: print(f"Permission error: {pe}") @@ -74,9 +76,7 @@ def fetch_historical_features_entity_df(store: FeatureStore, for_batch_scoring: # values we're using for an on-demand transformation "val_to_add": [1, 2, 3], "val_to_add_2": [10, 20, 30], - } - ) if for_batch_scoring: entity_df["event_timestamp"] = pd.to_datetime("now", utc=True) diff --git a/examples/rbac-remote/server/feature_repo/example_repo.py b/examples/rbac-remote/server/feature_repo/example_repo.py index 5b8105bb948..08914f33b73 100644 --- a/examples/rbac-remote/server/feature_repo/example_repo.py +++ b/examples/rbac-remote/server/feature_repo/example_repo.py @@ -5,7 +5,9 @@ import pandas as pd from feast import Entity, FeatureService, FeatureView, Field, PushSource, RequestSource -from feast.infra.offline_stores.contrib.postgres_offline_store.postgres_source import PostgreSQLSource +from feast.infra.offline_stores.contrib.postgres_offline_store.postgres_source import ( + PostgreSQLSource, +) from feast.on_demand_feature_view import on_demand_feature_view from feast.types import Float32, Float64, Int64 diff --git a/examples/rbac-remote/server/feature_repo/permissions_apply.py b/examples/rbac-remote/server/feature_repo/permissions_apply.py index 93bdf2ffc62..244d0bcc73f 100644 --- a/examples/rbac-remote/server/feature_repo/permissions_apply.py +++ b/examples/rbac-remote/server/feature_repo/permissions_apply.py @@ -10,12 +10,12 @@ name="feast_user_permission", types=ALL_RESOURCE_TYPES, policy=RoleBasedPolicy(roles=user_roles), - actions=[AuthzedAction.DESCRIBE] + READ + actions=[AuthzedAction.DESCRIBE] + READ, ) admin_perm = Permission( name="feast_admin_permission", types=ALL_RESOURCE_TYPES, policy=RoleBasedPolicy(roles=admin_roles), - actions=ALL_ACTIONS + actions=ALL_ACTIONS, ) diff --git a/go.mod b/go.mod index 46332e3c807..d59d6cfa4b3 100644 --- a/go.mod +++ b/go.mod @@ -11,12 +11,16 @@ require ( github.com/aws/aws-sdk-go-v2/config v1.29.14 github.com/aws/aws-sdk-go-v2/service/dynamodb v1.43.3 github.com/aws/aws-sdk-go-v2/service/s3 v1.79.3 + github.com/cabify/gotoprom v1.1.0 github.com/ghodss/yaml v1.0.0 github.com/go-sql-driver/mysql v1.8.1 github.com/golang/protobuf v1.5.4 github.com/google/uuid v1.6.0 + github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.1.0 + github.com/jackc/pgx/v5 v5.8.0 github.com/mattn/go-sqlite3 v1.14.23 github.com/pkg/errors v0.9.1 + github.com/prometheus/client_golang v1.23.2 github.com/redis/go-redis/v9 v9.6.1 github.com/roberson-io/mmh3 v0.0.0-20190729202758-fdfce3ba6225 github.com/rs/zerolog v1.33.0 @@ -64,6 +68,7 @@ require ( github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.1 // indirect github.com/aws/aws-sdk-go-v2/service/sts v1.33.19 // indirect github.com/aws/smithy-go v1.22.2 // indirect + github.com/beorn7/perks v1.0.1 // indirect github.com/cenkalti/backoff/v5 v5.0.3 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 // indirect @@ -81,10 +86,10 @@ require ( github.com/google/s2a-go v0.1.9 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.7 // indirect github.com/googleapis/gax-go/v2 v2.15.0 // indirect + github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.1.0 // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect - github.com/jackc/pgx/v5 v5.8.0 // indirect github.com/jackc/puddle/v2 v2.2.2 // indirect github.com/klauspost/asmfmt v1.3.2 // indirect github.com/klauspost/compress v1.18.0 // indirect @@ -93,9 +98,13 @@ require ( github.com/mattn/go-isatty v0.0.20 // indirect github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 // indirect github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/pierrec/lz4/v4 v4.1.21 // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/prometheus/client_model v0.6.2 // indirect + github.com/prometheus/common v0.66.1 // indirect + github.com/prometheus/procfs v0.16.1 // indirect github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/zeebo/errs v1.4.0 // indirect @@ -107,6 +116,7 @@ require ( go.opentelemetry.io/otel/metric v1.38.0 // indirect go.opentelemetry.io/otel/sdk/metric v1.38.0 // indirect go.opentelemetry.io/proto/otlp v1.7.1 // indirect + go.yaml.in/yaml/v2 v2.4.2 // indirect golang.org/x/crypto v0.45.0 // indirect golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 // indirect golang.org/x/mod v0.29.0 // indirect diff --git a/go.sum b/go.sum index 51d24f57197..b9112cc4961 100644 --- a/go.sum +++ b/go.sum @@ -32,6 +32,8 @@ github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapp github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0/go.mod h1:Mf6O40IAyB9zR/1J8nGDDPirZQQPbYJni8Yisy7NTMc= github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c h1:RGWPOewvKIROun94nF7v2cua9qP+thov/7M50KEoeSU= github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c/go.mod h1:X0CRv0ky0k6m906ixxpzmDRLvX58TFUKS2eePweuyxk= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M= github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY= github.com/apache/arrow/go/v17 v17.0.0 h1:RRR2bdqKcdbss9Gxy2NS/hK8i4LDMh23L6BbkN5+F54= @@ -78,10 +80,16 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.33.19 h1:1XuUZ8mYJw9B6lzAkXhqHlJd/Xv github.com/aws/aws-sdk-go-v2/service/sts v1.33.19/go.mod h1:cQnB8CUnxbMU82JvlqjKR2HBOm3fe9pWorWBza6MBJ4= github.com/aws/smithy-go v1.22.2 h1:6D9hW43xKFrRx/tXXfAlIZc4JI+yQe6snnWcQyxSyLQ= github.com/aws/smithy-go v1.22.2/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= +github.com/cabify/gotoprom v1.1.0 h1:IyM06IuVDPpEhBdXqSIfQK1KGrerkjGkDamrQqu8dWo= +github.com/cabify/gotoprom v1.1.0/go.mod h1:8H4gdB+iJqM8QrNneQxxbYsx4xA7m3h1BP8K7h16R4w= github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM= github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= @@ -90,6 +98,7 @@ github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= @@ -108,6 +117,8 @@ github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-jose/go-jose/v4 v4.1.2 h1:TK/7NqRQZfgAh+Td8AlsrvtPoUyiHh0LqVvokh+1vHI= github.com/go-jose/go-jose/v4 v4.1.2/go.mod h1:22cg9HWM1pOlnRiY+9cQYJ9XHmya1bYW8OeDM6Ku6Oo= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= @@ -115,9 +126,13 @@ github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= @@ -136,6 +151,10 @@ github.com/googleapis/enterprise-certificate-proxy v0.3.7 h1:zrn2Ee/nWmHulBx5sAV github.com/googleapis/enterprise-certificate-proxy v0.3.7/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81vgd/bo= github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc= +github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.1.0 h1:QGLs/O40yoNK9vmy4rhUGBVyMf1lISBGtXRpsu/Qu/o= +github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.1.0/go.mod h1:hM2alZsMUni80N33RBe6J0e423LB+odMj7d3EMP9l20= +github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.1.0 h1:pRhl55Yx1eC7BZ1N+BBWwnKaMyD8uC+34TLdndZMAKk= +github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.1.0/go.mod h1:XKMd7iuf/RGPSMJ/U4HP0zS2Z9Fh8Ps9a+6X26m/tmI= github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 h1:8Tjv8EJ+pM1xP8mK6egEbD1OgnVTyacbefKhmbLhIhU= github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2/go.mod h1:pkJQ2tZHJ0aFOVEEot6oZmaVEZcRme73eIFmhiVuRWs= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= @@ -146,16 +165,22 @@ github.com/jackc/pgx/v5 v5.8.0 h1:TYPDoleBBme0xGSAX3/+NujXXtpZn9HBONkQC7IEZSo= github.com/jackc/pgx/v5 v5.8.0/go.mod h1:QVeDInX2m9VyzvNeiCJVjCkNFqzsNb43204HshNSZKw= github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4= github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM= github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= @@ -164,12 +189,19 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-sqlite3 v1.14.23 h1:gbShiuAP1W5j9UOksQ06aiiqPMxYecovVGwmTxWtuw0= github.com/mattn/go-sqlite3 v1.14.23/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs= github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY= github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI= github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ= github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= @@ -177,6 +209,21 @@ github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o= +github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= +github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.66.1 h1:h5E0h5/Y8niHc5DlaLlWLArTQI7tMrsfQjHV+d9ZoGs= +github.com/prometheus/common v0.66.1/go.mod h1:gcaUsgf3KfRSwHY4dIMXLPV0K/Wg1oZ8+SbZk/HH/dA= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg= +github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is= github.com/redis/go-redis/v9 v9.6.1 h1:HHDteefn6ZkTtY5fGUE8tj8uy85AHk6zP7CpzIAM0y4= github.com/redis/go-redis/v9 v9.6.1/go.mod h1:0C0c6ycQsdpVNQpxb1njEQIqkx5UcsM8FJCQLgE9+RA= github.com/roberson-io/mmh3 v0.0.0-20190729202758-fdfce3ba6225 h1:ZMsPCp7oYgjoIFt1c+sM2qojxZXotSYcMF8Ur9/LJlM= @@ -186,13 +233,17 @@ github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7 github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8= github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE= github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= @@ -231,18 +282,26 @@ go.opentelemetry.io/proto/otlp v1.7.1 h1:gTOMpGDb0WTBOP8JaO72iL3auEZhVmAQg4ipjOV go.opentelemetry.io/proto/otlp v1.7.1/go.mod h1:b2rVh6rfI/s2pHWNlB7ILJcRALpcNDzKhACevjI+ZnE= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI= +go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk= golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY= golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= golang.org/x/oauth2 v0.33.0 h1:4Q+qn+E5z8gPRJfmRy7C2gGG3T4jIprK6aSYgTXGRpo= golang.org/x/oauth2 v0.33.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -273,9 +332,11 @@ google.golang.org/grpc v1.76.0 h1:UnVkv1+uMLYXoIz6o7chp59WfQUYA2ex/BXQ9rHZu7A= google.golang.org/grpc v1.76.0/go.mod h1:Ju12QI8M6iQJtbcsV+awF5a4hfJMLi4X0JLo94ULZ6c= google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE= google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/go/README.md b/go/README.md index a8e381519a4..308787cac9f 100644 --- a/go/README.md +++ b/go/README.md @@ -6,14 +6,27 @@ To build and run the Go Feature Server locally, create a feature_store.yaml file ```bash go build -o feast-go ./go/main.go - # start the http server - ./feast-go --type=http --port=8080 + # start the http server (metrics on port 9090 by default) + ./feast-go --type=http --port=8080 --metrics-port=9090 # or start the gRPC server - #./feast-go --type=grpc --port=[your-choice] + #./feast-go --type=grpc --port=[your-choice] --metrics-port=9091 ``` +## Prometheus Metrics +The server exposes Prometheus metrics at the `/metrics` endpoint on a dedicated port (default `:9090`). +- **HTTP Mode**: Metrics server runs on port `9090` (configurable via `-metrics-port`). +- **gRPC Mode**: Metrics server runs on port `9090` (configurable via `-metrics-port`). + +Key metrics include: +- `http_request_duration_seconds`: Histogram of response latency. +- `http_requests_total`: Counter of HTTP requests by status, method, and path. +- Standard Go and Process metrics. + +A `/health` endpoint is available on the main application port (default `:8080`) for readiness probes. + ## OTEL based observability The OS level env variable `ENABLE_OTEL_TRACING=="true"/"false"` (string type) is used to enable/disable this service (with Tracing only). +You can also configure the service name using `OTEL_SERVICE_NAME` env variable (defaults to "FeastGoFeatureServer"). The default exporter URL is "http://localhost:4318". The default schema of sending data to collector is **HTTP**. Please refer the following two docs about the configuration of the OTEL exporter: 1. https://opentelemetry.io/docs/languages/sdk-configuration/otlp-exporter/ diff --git a/go/internal/feast/metrics/metrics.go b/go/internal/feast/metrics/metrics.go new file mode 100644 index 00000000000..804eef6fa1b --- /dev/null +++ b/go/internal/feast/metrics/metrics.go @@ -0,0 +1,66 @@ +package metrics + +import ( + "reflect" + "time" + + "github.com/cabify/gotoprom" + "github.com/cabify/gotoprom/prometheusvanilla" + "github.com/prometheus/client_golang/prometheus" +) + +var HttpMetrics struct { + Duration func(HttpLabels) TimeHistogram `name:"http_request_duration_seconds" help:"Time taken to serve HTTP requests" buckets:".005,.01,.025,.05,.1,.25,.5,1,2.5,5,10"` + + RequestsTotal func(HttpLabels) prometheus.Counter `name:"http_requests_total" help:"Total number of HTTP requests"` +} + +type HttpLabels struct { + Method string `label:"method"` + Status int `label:"status"` + Path string `label:"path"` +} + +func init() { + gotoprom.MustAddBuilder(TimeHistogramType, RegisterTimeHistogram) + gotoprom.MustInit(&HttpMetrics, "feast") +} + +var ( + TimeHistogramType = reflect.TypeOf((*TimeHistogram)(nil)).Elem() +) + + +func RegisterTimeHistogram(name, help, namespace string, labelNames []string, tag reflect.StructTag) (func(prometheus.Labels) interface{}, prometheus.Collector, error) { + f, collector, err := prometheusvanilla.BuildHistogram(name, help, namespace, labelNames, tag) + if err != nil { + return nil, nil, err + } + + return func(labels prometheus.Labels) interface{} { + return timeHistogramAdapter{Histogram: f(labels).(prometheus.Histogram)} + }, collector, nil +} + +// TimeHistogram offers the basic prometheus.Histogram functionality +type TimeHistogram interface { + prometheus.Histogram + // Duration observes the duration in seconds + Duration(duration time.Duration) + // Since observes the duration in seconds since the time point provided + Since(time.Time) +} + +type timeHistogramAdapter struct { + prometheus.Histogram +} + +// Duration observes the duration in seconds +func (to timeHistogramAdapter) Duration(duration time.Duration) { + to.Observe(duration.Seconds()) +} + +// Since observes the duration in seconds since the time point provided +func (to timeHistogramAdapter) Since(duration time.Time) { + to.Duration(time.Since(duration)) +} diff --git a/go/internal/feast/server/http_server.go b/go/internal/feast/server/http_server.go index 312a0a6352e..adfd40110e7 100644 --- a/go/internal/feast/server/http_server.go +++ b/go/internal/feast/server/http_server.go @@ -17,6 +17,8 @@ import ( prototypes "github.com/feast-dev/feast/go/protos/feast/types" "github.com/feast-dev/feast/go/types" "github.com/rs/zerolog/log" + + "github.com/feast-dev/feast/go/internal/feast/metrics" ) type httpServer struct { @@ -335,10 +337,55 @@ func recoverMiddleware(next http.Handler) http.Handler { }) } +type statusWriter struct { + http.ResponseWriter + status int +} + +func (w *statusWriter) WriteHeader(status int) { + if w.status == 0 { + w.status = status + } + w.ResponseWriter.WriteHeader(status) +} + +func (w *statusWriter) Write(b []byte) (int, error) { + if w.status == 0 { + w.status = 200 + } + n, err := w.ResponseWriter.Write(b) + return n, err +} + +func metricsMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + t0 := time.Now() + sw := &statusWriter{ResponseWriter: w} + next.ServeHTTP(sw, r) + duration := time.Since(t0) + + if sw.status == 0 { + sw.status = 200 + } + + metrics.HttpMetrics.Duration(metrics.HttpLabels{ + Method: r.Method, + Status: sw.status, + Path: r.URL.Path, + }).Duration(duration) + + metrics.HttpMetrics.RequestsTotal(metrics.HttpLabels{ + Method: r.Method, + Status: sw.status, + Path: r.URL.Path, + }).Inc() + }) +} + func (s *httpServer) Serve(host string, port int) error { mux := http.NewServeMux() - mux.Handle("/get-online-features", recoverMiddleware(http.HandlerFunc(s.getOnlineFeatures))) - mux.HandleFunc("/health", healthCheckHandler) + mux.Handle("/get-online-features", metricsMiddleware(recoverMiddleware(http.HandlerFunc(s.getOnlineFeatures)))) + mux.Handle("/health", metricsMiddleware(http.HandlerFunc(healthCheckHandler))) s.server = &http.Server{Addr: fmt.Sprintf("%s:%d", host, port), Handler: mux, ReadTimeout: 5 * time.Second, WriteTimeout: 10 * time.Second, IdleTimeout: 15 * time.Second} err := s.server.ListenAndServe() // Don't return the error if it's caused by graceful shutdown using Stop() diff --git a/go/internal/test/feature_repo/example.py b/go/internal/test/feature_repo/example.py index a814b58913b..efd1966d03e 100644 --- a/go/internal/test/feature_repo/example.py +++ b/go/internal/test/feature_repo/example.py @@ -2,7 +2,7 @@ from datetime import timedelta -from feast import Entity, Feature, FeatureView, Field, FileSource, FeatureService, RequestSource +from feast import Entity, FeatureView, Field, FileSource, FeatureService, RequestSource from feast.feature_logging import LoggingConfig from feast.infra.offline_stores.file_source import FileLoggingDestination from feast.types import Float32, Float64, Int64, PrimitiveFeastType @@ -42,7 +42,7 @@ driver_stats_fs = FeatureService( name="test_service", features=[driver_hourly_stats_view], - logging_config=LoggingConfig(destination=FileLoggingDestination(path="")) + logging_config=LoggingConfig(destination=FileLoggingDestination(path="")), ) @@ -53,22 +53,20 @@ schema=[ Field(name="val_to_add", dtype=PrimitiveFeastType.INT64), Field(name="val_to_add_2", dtype=PrimitiveFeastType.INT64), - ] + ], ) + # Use the input data and feature view features to create new features @on_demand_feature_view( - sources=[ - driver_hourly_stats_view, - input_request - ], - schema=[ - Field(name='conv_rate_plus_val1', dtype=Float64), - Field(name='conv_rate_plus_val2', dtype=Float64) - ] + sources=[driver_hourly_stats_view, input_request], + schema=[ + Field(name="conv_rate_plus_val1", dtype=Float64), + Field(name="conv_rate_plus_val2", dtype=Float64), + ], ) def transformed_conv_rate(features_df: pd.DataFrame) -> pd.DataFrame: df = pd.DataFrame() - df['conv_rate_plus_val1'] = (features_df['conv_rate'] + features_df['val_to_add']) - df['conv_rate_plus_val2'] = (features_df['conv_rate'] + features_df['val_to_add_2']) + df["conv_rate_plus_val1"] = features_df["conv_rate"] + features_df["val_to_add"] + df["conv_rate_plus_val2"] = features_df["conv_rate"] + features_df["val_to_add_2"] return df diff --git a/go/main.go b/go/main.go index 77999671e07..f49a27efa46 100644 --- a/go/main.go +++ b/go/main.go @@ -5,9 +5,11 @@ import ( "flag" "fmt" "net" + "net/http" "os" "os/signal" "strings" + "sync" "syscall" "github.com/feast-dev/feast/go/internal/feast" @@ -20,6 +22,9 @@ import ( "google.golang.org/grpc/health" "google.golang.org/grpc/health/grpc_health_v1" + grpc_prometheus "github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promhttp" "go.opentelemetry.io/otel" "go.opentelemetry.io/otel/exporters/otlp/otlptrace" "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp" @@ -32,18 +37,18 @@ import ( var tracer trace.Tracer type ServerStarter interface { - StartHttpServer(fs *feast.FeatureStore, host string, port int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error - StartGrpcServer(fs *feast.FeatureStore, host string, port int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error + StartHttpServer(fs *feast.FeatureStore, host string, port int, metricsPort int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error + StartGrpcServer(fs *feast.FeatureStore, host string, port int, metricsPort int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error } type RealServerStarter struct{} -func (s *RealServerStarter) StartHttpServer(fs *feast.FeatureStore, host string, port int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error { - return StartHttpServer(fs, host, port, writeLoggedFeaturesCallback, loggingOpts) +func (s *RealServerStarter) StartHttpServer(fs *feast.FeatureStore, host string, port int, metricsPort int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error { + return StartHttpServer(fs, host, port, metricsPort, writeLoggedFeaturesCallback, loggingOpts) } -func (s *RealServerStarter) StartGrpcServer(fs *feast.FeatureStore, host string, port int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error { - return StartGrpcServer(fs, host, port, writeLoggedFeaturesCallback, loggingOpts) +func (s *RealServerStarter) StartGrpcServer(fs *feast.FeatureStore, host string, port int, metricsPort int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error { + return StartGrpcServer(fs, host, port, metricsPort, writeLoggedFeaturesCallback, loggingOpts) } func main() { @@ -51,6 +56,7 @@ func main() { serverType := "http" host := "" port := 8080 + metricsPort := 9090 server := RealServerStarter{} // Current Directory repoPath, err := os.Getwd() @@ -63,6 +69,7 @@ func main() { flag.StringVar(&host, "host", host, "Specify a host for the server") flag.IntVar(&port, "port", port, "Specify a port for the server") + flag.IntVar(&metricsPort, "metrics-port", metricsPort, "Specify a port for the metrics server") flag.Parse() // Initialize tracer @@ -109,9 +116,9 @@ func main() { // TODO: writeLoggedFeaturesCallback is defaulted to nil. write_logged_features functionality needs to be // implemented in Golang specific to OfflineStoreSink. Python Feature Server doesn't support this. if serverType == "http" { - err = server.StartHttpServer(fs, host, port, nil, loggingOptions) + err = server.StartHttpServer(fs, host, port, metricsPort, nil, loggingOptions) } else if serverType == "grpc" { - err = server.StartGrpcServer(fs, host, port, nil, loggingOptions) + err = server.StartGrpcServer(fs, host, port, metricsPort, nil, loggingOptions) } else { fmt.Println("Unknown server type. Please specify 'http' or 'grpc'.") } @@ -144,7 +151,7 @@ func constructLoggingService(fs *feast.FeatureStore, writeLoggedFeaturesCallback } // StartGprcServerWithLogging starts gRPC server with enabled feature logging -func StartGrpcServer(fs *feast.FeatureStore, host string, port int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error { +func StartGrpcServer(fs *feast.FeatureStore, host string, port int, metricsPort int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error { loggingService, err := constructLoggingService(fs, writeLoggedFeaturesCallback, loggingOpts) if err != nil { return err @@ -155,58 +162,127 @@ func StartGrpcServer(fs *feast.FeatureStore, host string, port int, writeLoggedF if err != nil { return err } - - grpcServer := grpc.NewServer() + srvMetrics := grpc_prometheus.NewServerMetrics( + grpc_prometheus.WithServerHandlingTimeHistogram( + grpc_prometheus.WithHistogramBuckets([]float64{0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1, 2.5, 5, 10}), + ), + ) + prometheus.MustRegister(srvMetrics) + grpcServer := grpc.NewServer( + grpc.UnaryInterceptor(srvMetrics.UnaryServerInterceptor()), + ) serving.RegisterServingServiceServer(grpcServer, ser) healthService := health.NewServer() grpc_health_v1.RegisterHealthServer(grpcServer, healthService) + srvMetrics.InitializeMetrics(grpcServer) + + // Start metrics server + metricsServer := &http.Server{Addr: fmt.Sprintf(":%d", metricsPort)} + go func() { + log.Info().Msgf("Starting metrics server on port %d", metricsPort) + mux := http.NewServeMux() + mux.Handle("/metrics", promhttp.Handler()) + metricsServer.Handler = mux + if err := metricsServer.ListenAndServe(); err != nil && err != http.ErrServerClosed { + log.Error().Err(err).Msg("Failed to start metrics server") + } + }() stop := make(chan os.Signal, 1) signal.Notify(stop, syscall.SIGINT, syscall.SIGTERM) + var wg sync.WaitGroup + wg.Add(1) + serverExited := make(chan struct{}) go func() { - // As soon as these signals are received from OS, try to gracefully stop the gRPC server - <-stop - log.Info().Msg("Stopping the gRPC server...") - grpcServer.GracefulStop() - if loggingService != nil { - loggingService.Stop() + defer wg.Done() + select { + case <-stop: + // Received SIGINT/SIGTERM. Perform graceful shutdown. + log.Info().Msg("Stopping the gRPC server...") + grpcServer.GracefulStop() + if loggingService != nil { + loggingService.Stop() + } + log.Info().Msg("Stopping metrics server...") + if err := metricsServer.Shutdown(context.Background()); err != nil { + log.Error().Err(err).Msg("Error stopping metrics server") + } + log.Info().Msg("gRPC server terminated") + case <-serverExited: + // Server exited (e.g. startup error), ensure metrics server is stopped + metricsServer.Shutdown(context.Background()) + if loggingService != nil { + loggingService.Stop() + } } - log.Info().Msg("gRPC server terminated") }() - return grpcServer.Serve(lis) + err = grpcServer.Serve(lis) + close(serverExited) + wg.Wait() + return err } // StartHttpServerWithLogging starts HTTP server with enabled feature logging // Go does not allow direct assignment to package-level functions as a way to // mock them for tests -func StartHttpServer(fs *feast.FeatureStore, host string, port int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error { +func StartHttpServer(fs *feast.FeatureStore, host string, port int, metricsPort int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error { loggingService, err := constructLoggingService(fs, writeLoggedFeaturesCallback, loggingOpts) if err != nil { return err } ser := server.NewHttpServer(fs, loggingService) log.Info().Msgf("Starting a HTTP server on host %s, port %d", host, port) + // Start metrics server + metricsServer := &http.Server{Addr: fmt.Sprintf(":%d", metricsPort)} + go func() { + log.Info().Msgf("Starting metrics server on port %d", metricsPort) + mux := http.NewServeMux() + mux.Handle("/metrics", promhttp.Handler()) + metricsServer.Handler = mux + if err := metricsServer.ListenAndServe(); err != nil && err != http.ErrServerClosed { + log.Error().Err(err).Msg("Failed to start metrics server") + } + }() stop := make(chan os.Signal, 1) signal.Notify(stop, syscall.SIGINT, syscall.SIGTERM) + var wg sync.WaitGroup + wg.Add(1) + serverExited := make(chan struct{}) go func() { - // As soon as these signals are received from OS, try to gracefully stop the gRPC server - <-stop - log.Info().Msg("Stopping the HTTP server...") - err := ser.Stop() - if err != nil { - log.Error().Err(err).Msg("Error when stopping the HTTP server") - } - if loggingService != nil { - loggingService.Stop() + defer wg.Done() + select { + case <-stop: + // Received SIGINT/SIGTERM. Perform graceful shutdown. + log.Info().Msg("Stopping the HTTP server...") + err := ser.Stop() + if err != nil { + log.Error().Err(err).Msg("Error when stopping the HTTP server") + } + log.Info().Msg("Stopping metrics server...") + if err := metricsServer.Shutdown(context.Background()); err != nil { + log.Error().Err(err).Msg("Error stopping metrics server") + } + if loggingService != nil { + loggingService.Stop() + } + log.Info().Msg("HTTP server terminated") + case <-serverExited: + // Server exited (e.g. startup error), ensure metrics server is stopped + metricsServer.Shutdown(context.Background()) + if loggingService != nil { + loggingService.Stop() + } } - log.Info().Msg("HTTP server terminated") }() - return ser.Serve(host, port) + err = ser.Serve(host, port) + close(serverExited) + wg.Wait() + return err } func OTELTracingEnabled() bool { @@ -223,11 +299,15 @@ func newExporter(ctx context.Context) (*otlptrace.Exporter, error) { } func newTracerProvider(exp sdktrace.SpanExporter) (*sdktrace.TracerProvider, error) { + serviceName := os.Getenv("OTEL_SERVICE_NAME") + if serviceName == "" { + serviceName = "FeastGoFeatureServer" + } r, err := resource.Merge( resource.Default(), resource.NewWithAttributes( semconv.SchemaURL, - semconv.ServiceName("FeastGoFeatureServer"), + semconv.ServiceName(serviceName), ), ) diff --git a/go/main_test.go b/go/main_test.go index 567a6cf5af4..f1f2ae98698 100644 --- a/go/main_test.go +++ b/go/main_test.go @@ -14,13 +14,13 @@ type MockServerStarter struct { mock.Mock } -func (m *MockServerStarter) StartHttpServer(fs *feast.FeatureStore, host string, port int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error { - args := m.Called(fs, host, port, writeLoggedFeaturesCallback, loggingOpts) +func (m *MockServerStarter) StartHttpServer(fs *feast.FeatureStore, host string, port int, metricsPort int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error { + args := m.Called(fs, host, port, metricsPort, writeLoggedFeaturesCallback, loggingOpts) return args.Error(0) } -func (m *MockServerStarter) StartGrpcServer(fs *feast.FeatureStore, host string, port int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error { - args := m.Called(fs, host, port, writeLoggedFeaturesCallback, loggingOpts) +func (m *MockServerStarter) StartGrpcServer(fs *feast.FeatureStore, host string, port int, metricsPort int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts *logging.LoggingOptions) error { + args := m.Called(fs, host, port, metricsPort, writeLoggedFeaturesCallback, loggingOpts) return args.Error(0) } @@ -34,9 +34,9 @@ func TestStartHttpServer(t *testing.T) { loggingOpts := &logging.LoggingOptions{} - mockServerStarter.On("StartHttpServer", fs, host, port, mock.AnythingOfType("logging.OfflineStoreWriteCallback"), loggingOpts).Return(nil) + mockServerStarter.On("StartHttpServer", fs, host, port, 9090, mock.AnythingOfType("logging.OfflineStoreWriteCallback"), loggingOpts).Return(nil) - err := mockServerStarter.StartHttpServer(fs, host, port, writeLoggedFeaturesCallback, loggingOpts) + err := mockServerStarter.StartHttpServer(fs, host, port, 9090, writeLoggedFeaturesCallback, loggingOpts) assert.NoError(t, err) mockServerStarter.AssertExpectations(t) } @@ -50,9 +50,9 @@ func TestStartGrpcServer(t *testing.T) { var writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback loggingOpts := &logging.LoggingOptions{} - mockServerStarter.On("StartGrpcServer", fs, host, port, mock.AnythingOfType("logging.OfflineStoreWriteCallback"), loggingOpts).Return(nil) + mockServerStarter.On("StartGrpcServer", fs, host, port, 9090, mock.AnythingOfType("logging.OfflineStoreWriteCallback"), loggingOpts).Return(nil) - err := mockServerStarter.StartGrpcServer(fs, host, port, writeLoggedFeaturesCallback, loggingOpts) + err := mockServerStarter.StartGrpcServer(fs, host, port, 9090, writeLoggedFeaturesCallback, loggingOpts) assert.NoError(t, err) mockServerStarter.AssertExpectations(t) } @@ -67,5 +67,3 @@ func TestConstructLoggingService(t *testing.T) { assert.NoError(t, err) // Further assertions can be added here based on the expected behavior of constructLoggingService } - -// Note: Additional tests can be written for other functions and error scenarios. diff --git a/infra/charts/feast-feature-server/Chart.yaml b/infra/charts/feast-feature-server/Chart.yaml index 1b4e503dffd..af9895f8ee8 100644 --- a/infra/charts/feast-feature-server/Chart.yaml +++ b/infra/charts/feast-feature-server/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: feast-feature-server description: Feast Feature Server in Go or Python type: application -version: 0.60.0 +version: 0.61.0 keywords: - machine learning - big data diff --git a/infra/charts/feast-feature-server/README.md b/infra/charts/feast-feature-server/README.md index 438b3de9105..da74f905aa4 100644 --- a/infra/charts/feast-feature-server/README.md +++ b/infra/charts/feast-feature-server/README.md @@ -1,6 +1,6 @@ # Feast Python / Go Feature Server Helm Charts -Current chart version is `0.60.0` +Current chart version is `0.61.0` ## Installation @@ -42,7 +42,7 @@ See [here](https://github.com/feast-dev/feast/tree/master/examples/python-helm-d | fullnameOverride | string | `""` | | | image.pullPolicy | string | `"IfNotPresent"` | | | image.repository | string | `"quay.io/feastdev/feature-server"` | Docker image for Feature Server repository | -| image.tag | string | `"0.60.0"` | The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) | +| image.tag | string | `"0.61.0"` | The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) | | imagePullSecrets | list | `[]` | | | livenessProbe.initialDelaySeconds | int | `30` | | | livenessProbe.periodSeconds | int | `30` | | diff --git a/infra/charts/feast-feature-server/values.yaml b/infra/charts/feast-feature-server/values.yaml index 9bb76d0a724..159ec432163 100644 --- a/infra/charts/feast-feature-server/values.yaml +++ b/infra/charts/feast-feature-server/values.yaml @@ -9,7 +9,7 @@ image: repository: quay.io/feastdev/feature-server pullPolicy: IfNotPresent # image.tag -- The Docker image tag (can be overwritten if custom feature server deps are needed for on demand transforms) - tag: 0.60.0 + tag: 0.61.0 logLevel: "WARNING" # Set log level DEBUG, INFO, WARNING, ERROR, and CRITICAL (case-insensitive) diff --git a/infra/charts/feast/Chart.yaml b/infra/charts/feast/Chart.yaml index 12e465ec052..fee9360d800 100644 --- a/infra/charts/feast/Chart.yaml +++ b/infra/charts/feast/Chart.yaml @@ -1,7 +1,7 @@ apiVersion: v1 description: Feature store for machine learning name: feast -version: 0.60.0 +version: 0.61.0 keywords: - machine learning - big data diff --git a/infra/charts/feast/README.md b/infra/charts/feast/README.md index d577e3a14ec..bbe86e319cf 100644 --- a/infra/charts/feast/README.md +++ b/infra/charts/feast/README.md @@ -8,7 +8,7 @@ This repo contains Helm charts for Feast Java components that are being installe ## Chart: Feast -Feature store for machine learning Current chart version is `0.60.0` +Feature store for machine learning Current chart version is `0.61.0` ## Installation @@ -65,8 +65,8 @@ See [here](https://github.com/feast-dev/feast/tree/master/examples/java-demo) fo | Repository | Name | Version | |------------|------|---------| | https://charts.helm.sh/stable | redis | 10.5.6 | -| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.60.0 | -| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.60.0 | +| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.61.0 | +| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.61.0 | ## Values diff --git a/infra/charts/feast/charts/feature-server/Chart.yaml b/infra/charts/feast/charts/feature-server/Chart.yaml index f3ea165878f..57f0e1b918a 100644 --- a/infra/charts/feast/charts/feature-server/Chart.yaml +++ b/infra/charts/feast/charts/feature-server/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: "Feast Feature Server: Online feature serving service for Feast" name: feature-server -version: 0.60.0 -appVersion: v0.60.0 +version: 0.61.0 +appVersion: v0.61.0 keywords: - machine learning - big data diff --git a/infra/charts/feast/charts/feature-server/README.md b/infra/charts/feast/charts/feature-server/README.md index 290965a972f..7b31a355b29 100644 --- a/infra/charts/feast/charts/feature-server/README.md +++ b/infra/charts/feast/charts/feature-server/README.md @@ -1,6 +1,6 @@ # feature-server -![Version: 0.60.0](https://img.shields.io/badge/Version-0.60.0-informational?style=flat-square) ![AppVersion: v0.60.0](https://img.shields.io/badge/AppVersion-v0.60.0-informational?style=flat-square) +![Version: 0.61.0](https://img.shields.io/badge/Version-0.61.0-informational?style=flat-square) ![AppVersion: v0.61.0](https://img.shields.io/badge/AppVersion-v0.61.0-informational?style=flat-square) Feast Feature Server: Online feature serving service for Feast @@ -17,7 +17,7 @@ Feast Feature Server: Online feature serving service for Feast | envOverrides | object | `{}` | Extra environment variables to set | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"quay.io/feastdev/feature-server-java"` | Docker image for Feature Server repository | -| image.tag | string | `"0.60.0"` | Image tag | +| image.tag | string | `"0.61.0"` | Image tag | | ingress.grpc.annotations | object | `{}` | Extra annotations for the ingress | | ingress.grpc.auth.enabled | bool | `false` | Flag to enable auth | | ingress.grpc.class | string | `"nginx"` | Which ingress controller to use | diff --git a/infra/charts/feast/charts/feature-server/values.yaml b/infra/charts/feast/charts/feature-server/values.yaml index d5ab59d7ef7..dc0e5178125 100644 --- a/infra/charts/feast/charts/feature-server/values.yaml +++ b/infra/charts/feast/charts/feature-server/values.yaml @@ -5,7 +5,7 @@ image: # image.repository -- Docker image for Feature Server repository repository: quay.io/feastdev/feature-server-java # image.tag -- Image tag - tag: 0.60.0 + tag: 0.61.0 # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent diff --git a/infra/charts/feast/charts/transformation-service/Chart.yaml b/infra/charts/feast/charts/transformation-service/Chart.yaml index c18f681a85a..572a3dd84f2 100644 --- a/infra/charts/feast/charts/transformation-service/Chart.yaml +++ b/infra/charts/feast/charts/transformation-service/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: "Transformation service: to compute on-demand features" name: transformation-service -version: 0.60.0 -appVersion: v0.60.0 +version: 0.61.0 +appVersion: v0.61.0 keywords: - machine learning - big data diff --git a/infra/charts/feast/charts/transformation-service/README.md b/infra/charts/feast/charts/transformation-service/README.md index 76253c8bb0c..27be0d7f33c 100644 --- a/infra/charts/feast/charts/transformation-service/README.md +++ b/infra/charts/feast/charts/transformation-service/README.md @@ -1,6 +1,6 @@ # transformation-service -![Version: 0.60.0](https://img.shields.io/badge/Version-0.60.0-informational?style=flat-square) ![AppVersion: v0.60.0](https://img.shields.io/badge/AppVersion-v0.60.0-informational?style=flat-square) +![Version: 0.61.0](https://img.shields.io/badge/Version-0.61.0-informational?style=flat-square) ![AppVersion: v0.61.0](https://img.shields.io/badge/AppVersion-v0.61.0-informational?style=flat-square) Transformation service: to compute on-demand features @@ -13,7 +13,7 @@ Transformation service: to compute on-demand features | envOverrides | object | `{}` | Extra environment variables to set | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"quay.io/feastdev/feature-transformation-server"` | Docker image for Transformation Server repository | -| image.tag | string | `"0.60.0"` | Image tag | +| image.tag | string | `"0.61.0"` | Image tag | | nodeSelector | object | `{}` | Node labels for pod assignment | | podLabels | object | `{}` | Labels to be added to Feast Serving pods | | replicaCount | int | `1` | Number of pods that will be created | diff --git a/infra/charts/feast/charts/transformation-service/values.yaml b/infra/charts/feast/charts/transformation-service/values.yaml index e00e9c4f523..dc401c66627 100644 --- a/infra/charts/feast/charts/transformation-service/values.yaml +++ b/infra/charts/feast/charts/transformation-service/values.yaml @@ -5,7 +5,7 @@ image: # image.repository -- Docker image for Transformation Server repository repository: quay.io/feastdev/feature-transformation-server # image.tag -- Image tag - tag: 0.60.0 + tag: 0.61.0 # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent diff --git a/infra/charts/feast/requirements.yaml b/infra/charts/feast/requirements.yaml index 59c3442b5a5..23bd7bc36d3 100644 --- a/infra/charts/feast/requirements.yaml +++ b/infra/charts/feast/requirements.yaml @@ -1,12 +1,12 @@ dependencies: - name: feature-server alias: feature-server - version: 0.60.0 + version: 0.61.0 condition: feature-server.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: transformation-service alias: transformation-service - version: 0.60.0 + version: 0.61.0 condition: transformation-service.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: redis diff --git a/infra/feast-operator/Makefile b/infra/feast-operator/Makefile index f017154d39d..bff19a5f979 100644 --- a/infra/feast-operator/Makefile +++ b/infra/feast-operator/Makefile @@ -3,7 +3,7 @@ # To re-generate a bundle for another specific version without changing the standard setup, you can: # - use the VERSION as arg of the bundle target (e.g make bundle VERSION=0.0.2) # - use environment variables to overwrite this value (e.g export VERSION=0.0.2) -VERSION ?= 0.60.0 +VERSION ?= 0.61.0 # CHANNELS define the bundle channels used in the bundle. # Add a new line here if you would like to change its default config. (E.g CHANNELS = "candidate,fast,stable") @@ -243,7 +243,7 @@ KUSTOMIZE_VERSION ?= v5.4.2 CONTROLLER_TOOLS_VERSION ?= v0.15.0 CRD_REF_DOCS_VERSION ?= v0.1.0 ENVTEST_VERSION ?= release-0.18 -GOLANGCI_LINT_VERSION ?= v1.59.1 +GOLANGCI_LINT_VERSION ?= v1.63.4 ENVSUBST_VERSION ?= v1.4.2 .PHONY: kustomize diff --git a/infra/feast-operator/README.md b/infra/feast-operator/README.md index 6c0ef634e78..9f530ad19a6 100644 --- a/infra/feast-operator/README.md +++ b/infra/feast-operator/README.md @@ -24,6 +24,7 @@ kubectl apply --server-side --force-conflicts -f https://raw.githubusercontent.c ``` > **NOTE**: Server-Side Apply (`--server-side`) is required because the CRD includes both v1alpha1 and v1 API versions, making it too large for the standard `kubectl apply` annotation limit. If you encounter annotation size errors, use `--server-side --force-conflicts` flags. + ##### Feast Operator Demo Videos [![](https://img.youtube.com/vi/48cb4AHxPR4/0.jpg)](https://www.youtube.com/playlist?list=PLPzVNzik7rsAN-amQLZckd0so3cIr7blX) diff --git a/infra/feast-operator/api/feastversion/version.go b/infra/feast-operator/api/feastversion/version.go index f80338fb9fc..621154918f3 100644 --- a/infra/feast-operator/api/feastversion/version.go +++ b/infra/feast-operator/api/feastversion/version.go @@ -17,4 +17,4 @@ limitations under the License. package feastversion // Feast release version. Keep on line #20, this is critical to release CI -const FeastVersion = "0.60.0" +const FeastVersion = "0.61.0" diff --git a/infra/feast-operator/api/v1/featurestore_types.go b/infra/feast-operator/api/v1/featurestore_types.go index 977fc586110..6aeea91d70e 100644 --- a/infra/feast-operator/api/v1/featurestore_types.go +++ b/infra/feast-operator/api/v1/featurestore_types.go @@ -18,9 +18,11 @@ package v1 import ( appsv1 "k8s.io/api/apps/v1" + autoscalingv2 "k8s.io/api/autoscaling/v2" batchv1 "k8s.io/api/batch/v1" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/util/intstr" ) const ( @@ -67,6 +69,10 @@ const ( ) // FeatureStoreSpec defines the desired state of FeatureStore +// +kubebuilder:validation:XValidation:rule="self.replicas <= 1 || !has(self.services) || !has(self.services.scaling) || !has(self.services.scaling.autoscaling)",message="replicas > 1 and services.scaling.autoscaling are mutually exclusive." +// +kubebuilder:validation:XValidation:rule="self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) || !has(self.services.scaling.autoscaling)) || (has(self.services) && has(self.services.onlineStore) && has(self.services.onlineStore.persistence) && has(self.services.onlineStore.persistence.store))",message="Scaling requires DB-backed persistence for the online store. Configure services.onlineStore.persistence.store when using replicas > 1 or autoscaling." +// +kubebuilder:validation:XValidation:rule="self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) || !has(self.services.scaling.autoscaling)) || (!has(self.services) || !has(self.services.offlineStore) || (has(self.services.offlineStore.persistence) && has(self.services.offlineStore.persistence.store)))",message="Scaling requires DB-backed persistence for the offline store. Configure services.offlineStore.persistence.store when using replicas > 1 or autoscaling." +// +kubebuilder:validation:XValidation:rule="self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) || !has(self.services.scaling.autoscaling)) || (has(self.services) && has(self.services.registry) && (has(self.services.registry.remote) || (has(self.services.registry.local) && has(self.services.registry.local.persistence) && (has(self.services.registry.local.persistence.store) || (has(self.services.registry.local.persistence.file) && has(self.services.registry.local.persistence.file.path) && (self.services.registry.local.persistence.file.path.startsWith('s3://') || self.services.registry.local.persistence.file.path.startsWith('gs://')))))))",message="Scaling requires DB-backed or remote registry. Configure registry.local.persistence.store or use a remote registry when using replicas > 1 or autoscaling. S3/GCS-backed registry is also allowed." type FeatureStoreSpec struct { // +kubebuilder:validation:Pattern="^[A-Za-z0-9][A-Za-z0-9_-]*$" // FeastProject is the Feast project id. This can be any alphanumeric string with underscores and hyphens, but it cannot start with an underscore or hyphen. Required. @@ -76,6 +82,11 @@ type FeatureStoreSpec struct { AuthzConfig *AuthzConfig `json:"authz,omitempty"` CronJob *FeastCronJob `json:"cronJob,omitempty"` BatchEngine *BatchEngineConfig `json:"batchEngine,omitempty"` + // Replicas is the desired number of pod replicas. Used by the scale sub-resource. + // Mutually exclusive with services.scaling.autoscaling. + // +kubebuilder:default=1 + // +kubebuilder:validation:Minimum=1 + Replicas *int32 `json:"replicas"` } // FeastProjectDir defines how to create the feast project directory. @@ -106,7 +117,7 @@ type GitCloneOptions struct { type FeastInitOptions struct { Minimal bool `json:"minimal,omitempty"` // Template for the created project - // +kubebuilder:validation:Enum=local;gcp;aws;snowflake;spark;postgres;hbase;cassandra;hazelcast;ikv;couchbase;clickhouse + // +kubebuilder:validation:Enum=local;gcp;aws;snowflake;spark;postgres;hbase;cassandra;hazelcast;couchbase;clickhouse Template string `json:"template,omitempty"` } @@ -301,6 +312,64 @@ type FeatureStoreServices struct { DisableInitContainers bool `json:"disableInitContainers,omitempty"` // Volumes specifies the volumes to mount in the FeatureStore deployment. A corresponding `VolumeMount` should be added to whichever feast service(s) require access to said volume(s). Volumes []corev1.Volume `json:"volumes,omitempty"` + // Scaling configures horizontal scaling for the FeatureStore deployment (e.g. HPA autoscaling). + // For static replicas, use spec.replicas instead. + Scaling *ScalingConfig `json:"scaling,omitempty"` + // PodDisruptionBudgets configures a PodDisruptionBudget for the FeatureStore deployment. + // Only created when scaling is enabled (replicas > 1 or autoscaling). + // +optional + PodDisruptionBudgets *PDBConfig `json:"podDisruptionBudgets,omitempty"` + // TopologySpreadConstraints defines how pods are spread across topology domains. + // When scaling is enabled and this is not set, the operator auto-injects a soft + // zone-spread constraint (whenUnsatisfiable: ScheduleAnyway). + // Set to an empty array to disable auto-injection. + // +optional + TopologySpreadConstraints []corev1.TopologySpreadConstraint `json:"topologySpreadConstraints,omitempty"` + // Affinity defines the pod scheduling constraints for the FeatureStore deployment. + // When scaling is enabled and this is not set, the operator auto-injects a soft + // pod anti-affinity rule to prefer spreading pods across nodes. + // +optional + Affinity *corev1.Affinity `json:"affinity,omitempty"` +} + +// ScalingConfig configures horizontal scaling for the FeatureStore deployment. +type ScalingConfig struct { + // Autoscaling configures a HorizontalPodAutoscaler for the FeatureStore deployment. + // Mutually exclusive with spec.replicas. + // +optional + Autoscaling *AutoscalingConfig `json:"autoscaling,omitempty"` +} + +// AutoscalingConfig defines HPA settings for the FeatureStore deployment. +type AutoscalingConfig struct { + // MinReplicas is the lower limit for the number of replicas. Defaults to 1. + // +kubebuilder:validation:Minimum=1 + // +optional + MinReplicas *int32 `json:"minReplicas,omitempty"` + // MaxReplicas is the upper limit for the number of replicas. Required. + // +kubebuilder:validation:Minimum=1 + MaxReplicas int32 `json:"maxReplicas"` + // Metrics contains the specifications for which to use to calculate the desired replica count. + // If not set, defaults to 80% CPU utilization. + // +optional + Metrics []autoscalingv2.MetricSpec `json:"metrics,omitempty"` + // Behavior configures the scaling behavior of the target. + // +optional + Behavior *autoscalingv2.HorizontalPodAutoscalerBehavior `json:"behavior,omitempty"` +} + +// PDBConfig configures a PodDisruptionBudget for the FeatureStore deployment. +// Exactly one of minAvailable or maxUnavailable must be set. +// +kubebuilder:validation:XValidation:rule="[has(self.minAvailable), has(self.maxUnavailable)].exists_one(c, c)",message="Exactly one of minAvailable or maxUnavailable must be set." +type PDBConfig struct { + // MinAvailable specifies the minimum number/percentage of pods that must remain available. + // Mutually exclusive with maxUnavailable. + // +optional + MinAvailable *intstr.IntOrString `json:"minAvailable,omitempty"` + // MaxUnavailable specifies the maximum number/percentage of pods that can be unavailable. + // Mutually exclusive with minAvailable. + // +optional + MaxUnavailable *intstr.IntOrString `json:"maxUnavailable,omitempty"` } // OfflineStore configures the offline store service @@ -381,7 +450,7 @@ type OnlineStoreFilePersistence struct { // OnlineStoreDBStorePersistence configures the DB store persistence for the online store service type OnlineStoreDBStorePersistence struct { // Type of the persistence type you want to use. - // +kubebuilder:validation:Enum=snowflake.online;redis;ikv;datastore;dynamodb;bigtable;postgres;cassandra;mysql;hazelcast;singlestore;hbase;elasticsearch;qdrant;couchbase.online;milvus;hybrid + // +kubebuilder:validation:Enum=snowflake.online;redis;datastore;dynamodb;bigtable;postgres;cassandra;mysql;hazelcast;singlestore;hbase;elasticsearch;qdrant;couchbase.online;milvus;hybrid;mongodb Type string `json:"type"` // Data store parameters should be placed as-is from the "feature_store.yaml" under the secret key. "registry_type" & "type" fields should be removed. SecretRef corev1.LocalObjectReference `json:"secretRef"` @@ -392,7 +461,6 @@ type OnlineStoreDBStorePersistence struct { var ValidOnlineStoreDBStorePersistenceTypes = []string{ "snowflake.online", "redis", - "ikv", "datastore", "dynamodb", "bigtable", @@ -407,6 +475,7 @@ var ValidOnlineStoreDBStorePersistenceTypes = []string{ "couchbase.online", "milvus", "hybrid", + "mongodb", } // LocalRegistryConfig configures the registry service @@ -690,6 +759,20 @@ type FeatureStoreStatus struct { FeastVersion string `json:"feastVersion,omitempty"` Phase string `json:"phase,omitempty"` ServiceHostnames ServiceHostnames `json:"serviceHostnames,omitempty"` + // Replicas is the current number of ready pod replicas (used by the scale sub-resource). + Replicas int32 `json:"replicas,omitempty"` + // Selector is the label selector for pods managed by the FeatureStore deployment (used by the scale sub-resource). + Selector string `json:"selector,omitempty"` + // ScalingStatus reports the current scaling state of the FeatureStore deployment. + ScalingStatus *ScalingStatus `json:"scalingStatus,omitempty"` +} + +// ScalingStatus reports the observed scaling state. +type ScalingStatus struct { + // CurrentReplicas is the current number of pod replicas. + CurrentReplicas int32 `json:"currentReplicas,omitempty"` + // DesiredReplicas is the desired number of pod replicas. + DesiredReplicas int32 `json:"desiredReplicas,omitempty"` } // ServiceHostnames defines the service hostnames in the format of :, e.g. example.svc.cluster.local:80 @@ -706,6 +789,7 @@ type ServiceHostnames struct { // +kubebuilder:resource:shortName=feast // +kubebuilder:printcolumn:name="Status",type=string,JSONPath=`.status.phase` // +kubebuilder:printcolumn:name="Age",type=date,JSONPath=`.metadata.creationTimestamp` +// +kubebuilder:subresource:scale:specpath=.spec.replicas,statuspath=.status.replicas,selectorpath=.status.selector // +kubebuilder:storageversion // FeatureStore is the Schema for the featurestores API diff --git a/infra/feast-operator/api/v1/zz_generated.deepcopy.go b/infra/feast-operator/api/v1/zz_generated.deepcopy.go index 870f4489a4b..63500266f02 100644 --- a/infra/feast-operator/api/v1/zz_generated.deepcopy.go +++ b/infra/feast-operator/api/v1/zz_generated.deepcopy.go @@ -22,10 +22,12 @@ package v1 import ( appsv1 "k8s.io/api/apps/v1" + "k8s.io/api/autoscaling/v2" batchv1 "k8s.io/api/batch/v1" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" runtime "k8s.io/apimachinery/pkg/runtime" + "k8s.io/apimachinery/pkg/util/intstr" ) // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. @@ -53,6 +55,38 @@ func (in *AuthzConfig) DeepCopy() *AuthzConfig { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *AutoscalingConfig) DeepCopyInto(out *AutoscalingConfig) { + *out = *in + if in.MinReplicas != nil { + in, out := &in.MinReplicas, &out.MinReplicas + *out = new(int32) + **out = **in + } + if in.Metrics != nil { + in, out := &in.Metrics, &out.Metrics + *out = make([]v2.MetricSpec, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Behavior != nil { + in, out := &in.Behavior, &out.Behavior + *out = new(v2.HorizontalPodAutoscalerBehavior) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new AutoscalingConfig. +func (in *AutoscalingConfig) DeepCopy() *AutoscalingConfig { + if in == nil { + return nil + } + out := new(AutoscalingConfig) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *BatchEngineConfig) DeepCopyInto(out *BatchEngineConfig) { *out = *in @@ -342,6 +376,28 @@ func (in *FeatureStoreServices) DeepCopyInto(out *FeatureStoreServices) { (*in)[i].DeepCopyInto(&(*out)[i]) } } + if in.Scaling != nil { + in, out := &in.Scaling, &out.Scaling + *out = new(ScalingConfig) + (*in).DeepCopyInto(*out) + } + if in.PodDisruptionBudgets != nil { + in, out := &in.PodDisruptionBudgets, &out.PodDisruptionBudgets + *out = new(PDBConfig) + (*in).DeepCopyInto(*out) + } + if in.TopologySpreadConstraints != nil { + in, out := &in.TopologySpreadConstraints, &out.TopologySpreadConstraints + *out = make([]corev1.TopologySpreadConstraint, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Affinity != nil { + in, out := &in.Affinity, &out.Affinity + *out = new(corev1.Affinity) + (*in).DeepCopyInto(*out) + } } // DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureStoreServices. @@ -382,6 +438,11 @@ func (in *FeatureStoreSpec) DeepCopyInto(out *FeatureStoreSpec) { *out = new(BatchEngineConfig) (*in).DeepCopyInto(*out) } + if in.Replicas != nil { + in, out := &in.Replicas, &out.Replicas + *out = new(int32) + **out = **in + } } // DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureStoreSpec. @@ -406,6 +467,11 @@ func (in *FeatureStoreStatus) DeepCopyInto(out *FeatureStoreStatus) { } } out.ServiceHostnames = in.ServiceHostnames + if in.ScalingStatus != nil { + in, out := &in.ScalingStatus, &out.ScalingStatus + *out = new(ScalingStatus) + **out = **in + } } // DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FeatureStoreStatus. @@ -830,6 +896,31 @@ func (in *OptionalCtrConfigs) DeepCopy() *OptionalCtrConfigs { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PDBConfig) DeepCopyInto(out *PDBConfig) { + *out = *in + if in.MinAvailable != nil { + in, out := &in.MinAvailable, &out.MinAvailable + *out = new(intstr.IntOrString) + **out = **in + } + if in.MaxUnavailable != nil { + in, out := &in.MaxUnavailable, &out.MaxUnavailable + *out = new(intstr.IntOrString) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PDBConfig. +func (in *PDBConfig) DeepCopy() *PDBConfig { + if in == nil { + return nil + } + out := new(PDBConfig) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *PvcConfig) DeepCopyInto(out *PvcConfig) { *out = *in @@ -1044,6 +1135,41 @@ func (in *RemoteRegistryConfig) DeepCopy() *RemoteRegistryConfig { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ScalingConfig) DeepCopyInto(out *ScalingConfig) { + *out = *in + if in.Autoscaling != nil { + in, out := &in.Autoscaling, &out.Autoscaling + *out = new(AutoscalingConfig) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScalingConfig. +func (in *ScalingConfig) DeepCopy() *ScalingConfig { + if in == nil { + return nil + } + out := new(ScalingConfig) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ScalingStatus) DeepCopyInto(out *ScalingStatus) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScalingStatus. +func (in *ScalingStatus) DeepCopy() *ScalingStatus { + if in == nil { + return nil + } + out := new(ScalingStatus) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *SecretKeyNames) DeepCopyInto(out *SecretKeyNames) { *out = *in diff --git a/infra/feast-operator/api/v1alpha1/featurestore_types.go b/infra/feast-operator/api/v1alpha1/featurestore_types.go index 27b151bbb77..23af949390c 100644 --- a/infra/feast-operator/api/v1alpha1/featurestore_types.go +++ b/infra/feast-operator/api/v1alpha1/featurestore_types.go @@ -105,7 +105,7 @@ type GitCloneOptions struct { type FeastInitOptions struct { Minimal bool `json:"minimal,omitempty"` // Template for the created project - // +kubebuilder:validation:Enum=local;gcp;aws;snowflake;spark;postgres;hbase;cassandra;hazelcast;ikv;couchbase;clickhouse + // +kubebuilder:validation:Enum=local;gcp;aws;snowflake;spark;postgres;hbase;cassandra;hazelcast;couchbase;clickhouse Template string `json:"template,omitempty"` } @@ -371,7 +371,7 @@ type OnlineStoreFilePersistence struct { // OnlineStoreDBStorePersistence configures the DB store persistence for the online store service type OnlineStoreDBStorePersistence struct { // Type of the persistence type you want to use. - // +kubebuilder:validation:Enum=snowflake.online;redis;ikv;datastore;dynamodb;bigtable;postgres;cassandra;mysql;hazelcast;singlestore;hbase;elasticsearch;qdrant;couchbase.online;milvus;hybrid + // +kubebuilder:validation:Enum=snowflake.online;redis;datastore;dynamodb;bigtable;postgres;cassandra;mysql;hazelcast;singlestore;hbase;elasticsearch;qdrant;couchbase.online;milvus;hybrid;mongodb Type string `json:"type"` // Data store parameters should be placed as-is from the "feature_store.yaml" under the secret key. "registry_type" & "type" fields should be removed. SecretRef corev1.LocalObjectReference `json:"secretRef"` @@ -382,7 +382,6 @@ type OnlineStoreDBStorePersistence struct { var ValidOnlineStoreDBStorePersistenceTypes = []string{ "snowflake.online", "redis", - "ikv", "datastore", "dynamodb", "bigtable", @@ -397,6 +396,7 @@ var ValidOnlineStoreDBStorePersistenceTypes = []string{ "couchbase.online", "milvus", "hybrid", + "mongodb", } // LocalRegistryConfig configures the registry service diff --git a/infra/feast-operator/bundle/manifests/feast-operator-controller-manager-metrics-service_v1_service.yaml b/infra/feast-operator/bundle/manifests/feast-operator-controller-manager-metrics-service_v1_service.yaml index 913517e198a..5749c2042b5 100644 --- a/infra/feast-operator/bundle/manifests/feast-operator-controller-manager-metrics-service_v1_service.yaml +++ b/infra/feast-operator/bundle/manifests/feast-operator-controller-manager-metrics-service_v1_service.yaml @@ -14,6 +14,7 @@ spec: protocol: TCP targetPort: 8443 selector: + app.kubernetes.io/name: feast-operator control-plane: controller-manager status: loadBalancer: {} diff --git a/infra/feast-operator/bundle/manifests/feast-operator.clusterserviceversion.yaml b/infra/feast-operator/bundle/manifests/feast-operator.clusterserviceversion.yaml index b0ff79ec692..64e6886444f 100644 --- a/infra/feast-operator/bundle/manifests/feast-operator.clusterserviceversion.yaml +++ b/infra/feast-operator/bundle/manifests/feast-operator.clusterserviceversion.yaml @@ -50,10 +50,10 @@ metadata: } ] capabilities: Basic Install - createdAt: "2026-02-17T13:52:39Z" + createdAt: "2026-03-10T20:00:10Z" operators.operatorframework.io/builder: operator-sdk-v1.38.0 operators.operatorframework.io/project_layout: go.kubebuilder.io/v4 - name: feast-operator.v0.60.0 + name: feast-operator.v0.61.0 namespace: placeholder spec: apiservicedefinitions: {} @@ -95,6 +95,18 @@ spec: - tokenreviews verbs: - create + - apiGroups: + - autoscaling + resources: + - horizontalpodautoscalers + verbs: + - create + - delete + - get + - list + - patch + - update + - watch - apiGroups: - batch resources: @@ -163,6 +175,18 @@ spec: - get - patch - update + - apiGroups: + - policy + resources: + - poddisruptionbudgets + verbs: + - create + - delete + - get + - list + - patch + - update + - watch - apiGroups: - rbac.authorization.k8s.io resources: @@ -212,6 +236,7 @@ spec: replicas: 1 selector: matchLabels: + app.kubernetes.io/name: feast-operator control-plane: controller-manager strategy: {} template: @@ -219,6 +244,7 @@ spec: annotations: kubectl.kubernetes.io/default-container: manager labels: + app.kubernetes.io/name: feast-operator control-plane: controller-manager spec: containers: @@ -230,10 +256,10 @@ spec: - /manager env: - name: RELATED_IMAGE_FEATURE_SERVER - value: quay.io/feastdev/feature-server:0.60.0 + value: quay.io/feastdev/feature-server:0.61.0 - name: RELATED_IMAGE_CRON_JOB value: quay.io/openshift/origin-cli:4.17 - image: quay.io/feastdev/feast-operator:0.60.0 + image: quay.io/feastdev/feast-operator:0.61.0 livenessProbe: httpGet: path: /healthz @@ -323,8 +349,8 @@ spec: name: Feast Community url: https://lf-aidata.atlassian.net/wiki/spaces/FEAST/ relatedImages: - - image: quay.io/feastdev/feature-server:0.60.0 + - image: quay.io/feastdev/feature-server:0.61.0 name: feature-server - image: quay.io/openshift/origin-cli:4.17 name: cron-job - version: 0.60.0 + version: 0.61.0 diff --git a/infra/feast-operator/bundle/manifests/feast.dev_featurestores.yaml b/infra/feast-operator/bundle/manifests/feast.dev_featurestores.yaml index f69971c1c4c..7cb4a4aed54 100644 --- a/infra/feast-operator/bundle/manifests/feast.dev_featurestores.yaml +++ b/infra/feast-operator/bundle/manifests/feast.dev_featurestores.yaml @@ -703,7 +703,6 @@ spec: - hbase - cassandra - hazelcast - - ikv - couchbase - clickhouse type: string @@ -712,553 +711,806 @@ spec: x-kubernetes-validations: - message: One selection required between init or git. rule: '[has(self.git), has(self.init)].exists_one(c, c)' + replicas: + default: 1 + description: |- + Replicas is the desired number of pod replicas. Used by the scale sub-resource. + Mutually exclusive with services. + format: int32 + minimum: 1 + type: integer services: description: FeatureStoreServices defines the desired feast services. An ephemeral onlineStore feature server is deployed by default. properties: - deploymentStrategy: - description: DeploymentStrategy describes how to replace existing - pods with new ones. + affinity: + description: Affinity defines the pod scheduling constraints for + the FeatureStore deployment. properties: - rollingUpdate: - description: |- - Rolling update config params. Present only if DeploymentStrategyType = - RollingUpdate. + nodeAffinity: + description: Describes node affinity scheduling rules for + the pod. properties: - maxSurge: - anyOf: - - type: integer - - type: string + preferredDuringSchedulingIgnoredDuringExecution: description: |- - The maximum number of pods that can be scheduled above the desired number of - pods. - x-kubernetes-int-or-string: true - maxUnavailable: - anyOf: - - type: integer - - type: string - description: The maximum number of pods that can be unavailable - during the update. - x-kubernetes-int-or-string: true - type: object - type: - description: Type of deployment. Can be "Recreate" or "RollingUpdate". - Default is RollingUpdate. - type: string - type: object - disableInitContainers: - description: Disable the 'feast repo initialization' initContainer - type: boolean - offlineStore: - description: OfflineStore configures the offline store service - properties: - persistence: - description: OfflineStorePersistence configures the persistence - settings for the offline store service - properties: - file: - description: OfflineStoreFilePersistence configures the - file-based persistence for the offline store service - properties: - pvc: - description: PvcConfig defines the settings for a - persistent file store based on PVCs. - properties: - create: - description: Settings for creating a new PVC - properties: - accessModes: - description: AccessModes k8s persistent volume - access modes. Defaults to ["ReadWriteOnce"]. - items: - type: string - type: array - resources: - description: Resources describes the storage - resource requirements for a volume. + The scheduler will prefer to schedule pods to nodes that satisfy + the affinity expressions specified by this field, but i + items: + description: |- + An empty preferred scheduling term matches all objects with implicit weight 0 + (i.e. it's a no-op). + properties: + preference: + description: A node selector term, associated with + the corresponding weight. + properties: + matchExpressions: + description: A list of node selector requirements + by node's labels. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true + key: + description: The label key that the selector + applies to. + type: string + operator: description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum - amount of compute resources required. - type: object + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator type: object - storageClassName: - description: StorageClassName is the name - of an existing StorageClass to which this - persistent volume belongs. - type: string - type: object - x-kubernetes-validations: - - message: PvcCreate is immutable - rule: self == oldSelf - mountPath: - description: |- - MountPath within the container at which the volume should be mounted. - Must start by "/" and cannot contain ':'. - type: string - ref: - description: Reference to an existing field - properties: - name: - default: "" + type: array + x-kubernetes-list-type: atomic + matchFields: + description: A list of node selector requirements + by node's fields. + items: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - required: - - mountPath - type: object - x-kubernetes-validations: - - message: One selection is required between ref and - create. - rule: '[has(self.ref), has(self.create)].exists_one(c, - c)' - - message: Mount path must start with '/' and must - not contain ':' - rule: self.mountPath.matches('^/[^:]*$') - type: - enum: - - file - - dask - - duckdb - type: string - type: object - store: - description: OfflineStoreDBStorePersistence configures - the DB store persistence for the offline store service + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the selector + applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + type: object + x-kubernetes-map-type: atomic + weight: + description: Weight associated with matching the + corresponding nodeSelectorTerm, in the range 1-100. + format: int32 + type: integer + required: + - preference + - weight + type: object + type: array + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the affinity requirements specified by + this field are not met at\nscheduling time, the pod + will not be scheduled onto " properties: - secretKeyName: - description: By default, the selected store "type" - is used as the SecretKeyName - type: string - secretRef: - description: Data store parameters should be placed - as-is from the "feature_store.yaml" under the secret - key. - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: - description: Type of the persistence type you want - to use. - enum: - - snowflake.offline - - bigquery - - redshift - - spark - - postgres - - trino - - athena - - mssql - - couchbase.offline - - clickhouse - - ray - type: string + nodeSelectorTerms: + description: Required. A list of node selector terms. + The terms are ORed. + items: + description: |- + A null or empty node selector term matches no objects. The requirements of + them are ANDed. + properties: + matchExpressions: + description: A list of node selector requirements + by node's labels. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the selector + applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchFields: + description: A list of node selector requirements + by node's fields. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the selector + applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + type: object + x-kubernetes-map-type: atomic + type: array + x-kubernetes-list-type: atomic required: - - secretRef - - type + - nodeSelectorTerms type: object + x-kubernetes-map-type: atomic type: object - x-kubernetes-validations: - - message: One selection required between file or store. - rule: '[has(self.file), has(self.store)].exists_one(c, c)' - server: - description: Creates a remote offline server container + podAffinity: + description: Describes pod affinity scheduling rules (e.g. + co-locate this pod in the same node, zone, etc. as some + other pod(s)). properties: - env: + preferredDuringSchedulingIgnoredDuringExecution: + description: |- + The scheduler will prefer to schedule pods to nodes that satisfy + the affinity expressions specified by this field, but i items: - description: EnvVar represents an environment variable - present in a Container. + description: The weights of all of the matched WeightedPodAffinityTerm + fields are added per-node to find the most preferred + node(s) properties: - name: - description: Name of the environment variable. Must - be a C_IDENTIFIER. - type: string - value: - description: |- - Variable references $(VAR_NAME) are expanded - using the previously defined environment variables in the container and - any - type: string - valueFrom: - description: Source for the environment variable's - value. Cannot be used if value is not empty. + podAffinityTerm: + description: Required. A pod affinity term, associated + with the corresponding weight. properties: - configMapKeyRef: - description: Selects a key of a ConfigMap. - properties: - key: - description: The key to select. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap - or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - fieldRef: - description: 'Selects a field of the pod: supports - metadata.name, metadata.namespace, `metadata.labels['''']`, - `metadata.' - properties: - apiVersion: - description: Version of the schema the FieldPath - is written in terms of, defaults to "v1". - type: string - fieldPath: - description: Path of the field to select - in the specified API version. - type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - resourceFieldRef: + labelSelector: description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, limits. + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. properties: - containerName: - description: 'Container name: required for - volumes, optional for env vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output format - of the exposed resources, defaults to - "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource to select' - type: string - required: - - resource + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret in the - pod's namespace + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. properties: - key: - description: The key of the secret to select - from. Must be a valid secret key. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret - or its key must be defined - type: boolean - required: - - key + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t + type: string + required: + - topologyKey type: object + weight: + description: |- + weight associated with matching the corresponding podAffinityTerm, + in the range 1-100. + format: int32 + type: integer required: - - name + - podAffinityTerm + - weight type: object type: array - envFrom: + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the affinity requirements specified by + this field are not met at\nscheduling time, the pod + will not be scheduled onto " items: - description: EnvFromSource represents the source of - a set of ConfigMaps + description: "Defines a set of pods (namely those matching + the labelSelector\nrelative to the given namespace(s)) + that this pod should " properties: - configMapRef: - description: The ConfigMap to select from + labelSelector: + description: |- + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap must - be defined - type: boolean + matchExpressions: + description: matchExpressions is a list of label + selector requirements. The requirements are + ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that + the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend to - each key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret must - be defined - type: boolean + matchExpressions: + description: matchExpressions is a list of label + selector requirements. The requirements are + ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that + the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t + type: string + required: + - topologyKey type: object type: array - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for if/when - to pull a container image - type: string - logLevel: - description: |- - LogLevel sets the logging level for the server - Allowed values: "debug", "info", "warning", "error", "critical". - enum: - - debug - - info - - warning - - error - - critical - type: string - metrics: - description: Metrics exposes Prometheus-compatible metrics - for the Feast server when enabled. - type: boolean - nodeSelector: - additionalProperties: - type: string - type: object - resources: - description: ResourceRequirements describes the compute - resource requirements. - properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. - items: - description: ResourceClaim references one entry - in PodSpec.ResourceClaims. + x-kubernetes-list-type: atomic + type: object + podAntiAffinity: + description: Describes pod anti-affinity scheduling rules + (e.g. avoid putting this pod in the same node, zone, etc. + properties: + preferredDuringSchedulingIgnoredDuringExecution: + description: "The scheduler will prefer to schedule pods + to nodes that satisfy\nthe anti-affinity expressions + specified by this field, " + items: + description: The weights of all of the matched WeightedPodAffinityTerm + fields are added per-node to find the most preferred + node(s) + properties: + podAffinityTerm: + description: Required. A pod affinity term, associated + with the corresponding weight. properties: - name: + labelSelector: description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t type: string required: - - name + - topologyKey type: object - type: array - x-kubernetes-list-map-keys: - - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum amount - of compute resources required. - type: object - type: object - tls: - description: TlsConfigs configures server TLS for a feast - service. - properties: - disable: - description: will disable TLS for the feast service. - useful in an openshift cluster, for example, where - TLS is configured by default - type: boolean - secretKeyNames: - description: SecretKeyNames defines the secret key - names for the TLS key and cert. - properties: - tlsCrt: - description: defaults to "tls.crt" - type: string - tlsKey: - description: defaults to "tls.key" - type: string - type: object - secretRef: - description: references the local k8s secret where - the TLS key and cert reside - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: object - x-kubernetes-validations: - - message: '`secretRef` required if `disable` is false.' - rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) - : true' - volumeMounts: - description: VolumeMounts defines the list of volumes - that should be mounted into the feast container. - items: - description: VolumeMount describes a mounting of a Volume - within a container. - properties: - mountPath: - description: |- - Path within the container at which the volume should be mounted. Must - not contain ':'. - type: string - mountPropagation: - description: |- - mountPropagation determines how mounts are propagated from the host - to container and the other way around. - type: string - name: - description: This must match the Name of a Volume. - type: string - readOnly: - description: |- - Mounted read-only if true, read-write otherwise (false or unspecified). - Defaults to false. - type: boolean - recursiveReadOnly: - description: |- - RecursiveReadOnly specifies whether read-only mounts should be handled - recursively. - type: string - subPath: + weight: description: |- - Path within the volume from which the container's volume should be mounted. - Defaults to "" (volume's root). - type: string - subPathExpr: - description: Expanded path within the volume from - which the container's volume should be mounted. - type: string + weight associated with matching the corresponding podAffinityTerm, + in the range 1-100. + format: int32 + type: integer required: - - mountPath - - name + - podAffinityTerm + - weight type: object type: array - workerConfigs: - description: WorkerConfigs defines the worker configuration - for the Feast server. - properties: - keepAliveTimeout: - description: |- - KeepAliveTimeout is the timeout for keep-alive connections in seconds. - Defaults to 30. - format: int32 - minimum: 1 - type: integer - maxRequests: - description: |- - MaxRequests is the maximum number of requests a worker will process before restarting. - This helps prevent memory leaks. - format: int32 - minimum: 0 - type: integer - maxRequestsJitter: - description: |- - MaxRequestsJitter is the maximum jitter to add to max-requests to prevent - thundering herd effect on worker restart. - format: int32 - minimum: 0 - type: integer - registryTTLSeconds: - description: RegistryTTLSeconds is the number of seconds - after which the registry is refreshed. - format: int32 - minimum: 0 - type: integer - workerConnections: - description: |- - WorkerConnections is the maximum number of simultaneous clients per worker process. - Defaults to 1000. - format: int32 - minimum: 1 - type: integer - workers: - description: Workers is the number of worker processes. - Use -1 to auto-calculate based on CPU cores (2 * - CPU + 1). - format: int32 - minimum: -1 - type: integer - type: object + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the anti-affinity requirements specified + by this field are not met at\nscheduling time, the pod + will not be scheduled " + items: + description: "Defines a set of pods (namely those matching + the labelSelector\nrelative to the given namespace(s)) + that this pod should " + properties: + labelSelector: + description: |- + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. + properties: + matchExpressions: + description: matchExpressions is a list of label + selector requirements. The requirements are + ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that + the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list of label + selector requirements. The requirements are + ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that + the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t + type: string + required: + - topologyKey + type: object + type: array + x-kubernetes-list-type: atomic type: object type: object - onlineStore: - description: OnlineStore configures the online store service + deploymentStrategy: + description: DeploymentStrategy describes how to replace existing + pods with new ones. + properties: + rollingUpdate: + description: |- + Rolling update config params. Present only if DeploymentStrategyType = + RollingUpdate. + properties: + maxSurge: + anyOf: + - type: integer + - type: string + description: |- + The maximum number of pods that can be scheduled above the desired number of + pods. + x-kubernetes-int-or-string: true + maxUnavailable: + anyOf: + - type: integer + - type: string + description: The maximum number of pods that can be unavailable + during the update. + x-kubernetes-int-or-string: true + type: object + type: + description: Type of deployment. Can be "Recreate" or "RollingUpdate". + Default is RollingUpdate. + type: string + type: object + disableInitContainers: + description: Disable the 'feast repo initialization' initContainer + type: boolean + offlineStore: + description: OfflineStore configures the offline store service properties: persistence: - description: OnlineStorePersistence configures the persistence - settings for the online store service + description: OfflineStorePersistence configures the persistence + settings for the offline store service properties: file: - description: OnlineStoreFilePersistence configures the - file-based persistence for the online store service + description: OfflineStoreFilePersistence configures the + file-based persistence for the offline store service properties: - path: - type: string pvc: description: PvcConfig defines the settings for a persistent file store based on PVCs. @@ -1335,21 +1587,16 @@ spec: - message: Mount path must start with '/' and must not contain ':' rule: self.mountPath.matches('^/[^:]*$') + type: + enum: + - file + - dask + - duckdb + type: string type: object - x-kubernetes-validations: - - message: Ephemeral stores must have absolute paths. - rule: '(!has(self.pvc) && has(self.path)) ? self.path.startsWith(''/'') - : true' - - message: PVC path must be a file name only, with no - slashes. - rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') - : true' - - message: Online store does not support S3 or GS buckets. - rule: 'has(self.path) ? !(self.path.startsWith(''s3://'') - || self.path.startsWith(''gs://'')) : true' store: - description: OnlineStoreDBStorePersistence configures - the DB store persistence for the online store service + description: OfflineStoreDBStorePersistence configures + the DB store persistence for the offline store service properties: secretKeyName: description: By default, the selected store "type" @@ -1373,23 +1620,17 @@ spec: description: Type of the persistence type you want to use. enum: - - snowflake.online - - redis - - ikv - - datastore - - dynamodb - - bigtable + - snowflake.offline + - bigquery + - redshift + - spark - postgres - - cassandra - - mysql - - hazelcast - - singlestore - - hbase - - elasticsearch - - qdrant - - couchbase.online - - milvus - - hybrid + - trino + - athena + - mssql + - couchbase.offline + - clickhouse + - ray type: string required: - secretRef @@ -1400,7 +1641,7 @@ spec: - message: One selection required between file or store. rule: '[has(self.file), has(self.store)].exists_one(c, c)' server: - description: Creates a feature server container + description: Creates a remote offline server container properties: env: items: @@ -1756,151 +1997,74 @@ spec: type: object type: object type: object - registry: - description: Registry configures the registry service. One selection - is required. Local is the default setting. + onlineStore: + description: OnlineStore configures the online store service properties: - local: - description: LocalRegistryConfig configures the registry service + persistence: + description: OnlineStorePersistence configures the persistence + settings for the online store service properties: - persistence: - description: RegistryPersistence configures the persistence - settings for the registry service + file: + description: OnlineStoreFilePersistence configures the + file-based persistence for the online store service properties: - file: - description: RegistryFilePersistence configures the - file-based persistence for the registry service + path: + type: string + pvc: + description: PvcConfig defines the settings for a + persistent file store based on PVCs. properties: - cache_mode: - description: |- - CacheMode defines the registry cache update strategy. - Allowed values are "sync" and "thread". - enum: - - none - - sync - - thread - type: string - cache_ttl_seconds: - description: CacheTTLSeconds defines the TTL (in - seconds) for the registry cache. - format: int32 - minimum: 0 - type: integer - path: - type: string - pvc: - description: PvcConfig defines the settings for - a persistent file store based on PVCs. + create: + description: Settings for creating a new PVC properties: - create: - description: Settings for creating a new PVC - properties: - accessModes: - description: AccessModes k8s persistent - volume access modes. Defaults to ["ReadWriteOnce"]. - items: - type: string - type: array - resources: - description: Resources describes the storage - resource requirements for a volume. - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the - minimum amount of compute resources - required. - type: object - type: object - storageClassName: - description: StorageClassName is the name - of an existing StorageClass to which - this persistent volume belongs. - type: string - type: object - x-kubernetes-validations: - - message: PvcCreate is immutable - rule: self == oldSelf - mountPath: - description: |- - MountPath within the container at which the volume should be mounted. - Must start by "/" and cannot contain ':'. - type: string - ref: - description: Reference to an existing field + accessModes: + description: AccessModes k8s persistent volume + access modes. Defaults to ["ReadWriteOnce"]. + items: + type: string + type: array + resources: + description: Resources describes the storage + resource requirements for a volume. properties: - name: - default: "" + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum + amount of compute resources required. + type: object type: object - x-kubernetes-map-type: atomic - required: - - mountPath + storageClassName: + description: StorageClassName is the name + of an existing StorageClass to which this + persistent volume belongs. + type: string type: object x-kubernetes-validations: - - message: One selection is required between ref - and create. - rule: '[has(self.ref), has(self.create)].exists_one(c, - c)' - - message: Mount path must start with '/' and - must not contain ':' - rule: self.mountPath.matches('^/[^:]*$') - s3_additional_kwargs: - additionalProperties: - type: string - type: object - type: object - x-kubernetes-validations: - - message: Registry files must use absolute paths - or be S3 ('s3://') or GS ('gs://') object store - URIs. - rule: '(!has(self.pvc) && has(self.path)) ? (self.path.startsWith(''/'') - || self.path.startsWith(''s3://'') || self.path.startsWith(''gs://'')) - : true' - - message: PVC path must be a file name only, with - no slashes. - rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') - : true' - - message: PVC persistence does not support S3 or - GS object store URIs. - rule: '(has(self.pvc) && has(self.path)) ? !(self.path.startsWith(''s3://'') - || self.path.startsWith(''gs://'')) : true' - - message: Additional S3 settings are available only - for S3 object store URIs. - rule: '(has(self.s3_additional_kwargs) && has(self.path)) - ? self.path.startsWith(''s3://'') : true' - store: - description: RegistryDBStorePersistence configures - the DB store persistence for the registry service - properties: - secretKeyName: - description: By default, the selected store "type" - is used as the SecretKeyName + - message: PvcCreate is immutable + rule: self == oldSelf + mountPath: + description: |- + MountPath within the container at which the volume should be mounted. + Must start by "/" and cannot contain ':'. type: string - secretRef: - description: Data store parameters should be placed - as-is from the "feature_store.yaml" under the - secret key. + ref: + description: Reference to an existing field properties: name: default: "" @@ -1911,146 +2075,109 @@ spec: type: string type: object x-kubernetes-map-type: atomic - type: - description: Type of the persistence type you - want to use. - enum: - - sql - - snowflake.registry - type: string required: - - secretRef - - type + - mountPath type: object + x-kubernetes-validations: + - message: One selection is required between ref and + create. + rule: '[has(self.ref), has(self.create)].exists_one(c, + c)' + - message: Mount path must start with '/' and must + not contain ':' + rule: self.mountPath.matches('^/[^:]*$') type: object x-kubernetes-validations: - - message: One selection required between file or store. - rule: '[has(self.file), has(self.store)].exists_one(c, - c)' - server: - description: Creates a registry server container + - message: Ephemeral stores must have absolute paths. + rule: '(!has(self.pvc) && has(self.path)) ? self.path.startsWith(''/'') + : true' + - message: PVC path must be a file name only, with no + slashes. + rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') + : true' + - message: Online store does not support S3 or GS buckets. + rule: 'has(self.path) ? !(self.path.startsWith(''s3://'') + || self.path.startsWith(''gs://'')) : true' + store: + description: OnlineStoreDBStorePersistence configures + the DB store persistence for the online store service properties: - env: - items: - description: EnvVar represents an environment variable - present in a Container. + secretKeyName: + description: By default, the selected store "type" + is used as the SecretKeyName + type: string + secretRef: + description: Data store parameters should be placed + as-is from the "feature_store.yaml" under the secret + key. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: + description: Type of the persistence type you want + to use. + enum: + - snowflake.online + - redis + - datastore + - dynamodb + - bigtable + - postgres + - cassandra + - mysql + - hazelcast + - singlestore + - hbase + - elasticsearch + - qdrant + - couchbase.online + - milvus + - hybrid + - mongodb + type: string + required: + - secretRef + - type + type: object + type: object + x-kubernetes-validations: + - message: One selection required between file or store. + rule: '[has(self.file), has(self.store)].exists_one(c, c)' + server: + description: Creates a feature server container + properties: + env: + items: + description: EnvVar represents an environment variable + present in a Container. + properties: + name: + description: Name of the environment variable. Must + be a C_IDENTIFIER. + type: string + value: + description: |- + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any + type: string + valueFrom: + description: Source for the environment variable's + value. Cannot be used if value is not empty. properties: - name: - description: Name of the environment variable. - Must be a C_IDENTIFIER. - type: string - value: - description: |- - Variable references $(VAR_NAME) are expanded - using the previously defined environment variables in the container and - any - type: string - valueFrom: - description: Source for the environment variable's - value. Cannot be used if value is not empty. - properties: - configMapKeyRef: - description: Selects a key of a ConfigMap. - properties: - key: - description: The key to select. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap - or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - fieldRef: - description: 'Selects a field of the pod: - supports metadata.name, metadata.namespace, - `metadata.labels['''']`, `metadata.' - properties: - apiVersion: - description: Version of the schema the - FieldPath is written in terms of, - defaults to "v1". - type: string - fieldPath: - description: Path of the field to select - in the specified API version. - type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - resourceFieldRef: - description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, limits. - properties: - containerName: - description: 'Container name: required - for volumes, optional for env vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output format - of the exposed resources, defaults - to "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource to - select' - type: string - required: - - resource - type: object - x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret in - the pod's namespace - properties: - key: - description: The key of the secret to - select from. Must be a valid secret - key. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret - or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - type: object - required: - - name - type: object - type: array - envFrom: - items: - description: EnvFromSource represents the source - of a set of ConfigMaps - properties: - configMapRef: - description: The ConfigMap to select from + configMapKeyRef: + description: Selects a key of a ConfigMap. properties: + key: + description: The key to select. + type: string name: default: "" description: |- @@ -2060,17 +2187,62 @@ spec: type: string optional: description: Specify whether the ConfigMap - must be defined + or its key must be defined type: boolean + required: + - key type: object x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend - to each key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from + fieldRef: + description: 'Selects a field of the pod: supports + metadata.name, metadata.namespace, `metadata.labels['''']`, + `metadata.' + properties: + apiVersion: + description: Version of the schema the FieldPath + is written in terms of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to select + in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits. + properties: + containerName: + description: 'Container name: required for + volumes, optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format + of the exposed resources, defaults to + "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret in the + pod's namespace properties: + key: + description: The key of the secret to select + from. Must be a valid secret key. + type: string name: default: "" description: |- @@ -2080,392 +2252,1567 @@ spec: type: string optional: description: Specify whether the Secret - must be defined + or its key must be defined type: boolean + required: + - key type: object x-kubernetes-map-type: atomic type: object - type: array - grpc: - description: Enable gRPC registry server. Defaults - to true if unset. - type: boolean - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for if/when - to pull a container image - type: string - logLevel: - description: |- - LogLevel sets the logging level for the server - Allowed values: "debug", "info", "warning", "error", "critical". - enum: - - debug - - info - - warning - - error - - critical - type: string - metrics: - description: Metrics exposes Prometheus-compatible - metrics for the Feast server when enabled. - type: boolean - nodeSelector: - additionalProperties: + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source of + a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend to + each key in the ConfigMap. Must be a C_IDENTIFIER. type: string - type: object - resources: - description: ResourceRequirements describes the compute - resource requirements. - properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. - items: - description: ResourceClaim references one entry - in PodSpec.ResourceClaims. - properties: - name: - description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. - type: string - required: - - name - type: object - type: array - x-kubernetes-list-map-keys: - - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum amount - of compute resources required. - type: object - type: object - restAPI: - description: Enable REST API registry server. - type: boolean - tls: - description: TlsConfigs configures server TLS for - a feast service. - properties: - disable: - description: will disable TLS for the feast service. - useful in an openshift cluster, for example, - where TLS is configured by default - type: boolean - secretKeyNames: - description: SecretKeyNames defines the secret - key names for the TLS key and cert. - properties: - tlsCrt: - description: defaults to "tls.crt" - type: string - tlsKey: - description: defaults to "tls.key" - type: string - type: object - secretRef: - description: references the local k8s secret where - the TLS key and cert reside - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: object - x-kubernetes-validations: - - message: '`secretRef` required if `disable` is false.' - rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) - : true' - volumeMounts: - description: VolumeMounts defines the list of volumes - that should be mounted into the feast container. - items: - description: VolumeMount describes a mounting of - a Volume within a container. + secretRef: + description: The Secret to select from properties: - mountPath: - description: |- - Path within the container at which the volume should be mounted. Must - not contain ':'. - type: string - mountPropagation: - description: |- - mountPropagation determines how mounts are propagated from the host - to container and the other way around. - type: string name: - description: This must match the Name of a Volume. - type: string - readOnly: - description: |- - Mounted read-only if true, read-write otherwise (false or unspecified). - Defaults to false. - type: boolean - recursiveReadOnly: + default: "" description: |- - RecursiveReadOnly specifies whether read-only mounts should be handled - recursively. + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. type: string - subPath: + optional: + description: Specify whether the Secret must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + type: object + type: array + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for if/when + to pull a container image + type: string + logLevel: + description: |- + LogLevel sets the logging level for the server + Allowed values: "debug", "info", "warning", "error", "critical". + enum: + - debug + - info + - warning + - error + - critical + type: string + metrics: + description: Metrics exposes Prometheus-compatible metrics + for the Feast server when enabled. + type: boolean + nodeSelector: + additionalProperties: + type: string + type: object + resources: + description: ResourceRequirements describes the compute + resource requirements. + properties: + claims: + description: |- + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one entry + in PodSpec.ResourceClaims. + properties: + name: description: |- - Path within the volume from which the container's volume should be mounted. - Defaults to "" (volume's root). - type: string - subPathExpr: - description: Expanded path within the volume - from which the container's volume should be - mounted. + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. type: string required: - - mountPath - name type: object type: array - workerConfigs: - description: WorkerConfigs defines the worker configuration - for the Feast server. + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum amount + of compute resources required. + type: object + type: object + tls: + description: TlsConfigs configures server TLS for a feast + service. + properties: + disable: + description: will disable TLS for the feast service. + useful in an openshift cluster, for example, where + TLS is configured by default + type: boolean + secretKeyNames: + description: SecretKeyNames defines the secret key + names for the TLS key and cert. properties: - keepAliveTimeout: - description: |- - KeepAliveTimeout is the timeout for keep-alive connections in seconds. - Defaults to 30. - format: int32 - minimum: 1 - type: integer - maxRequests: - description: |- - MaxRequests is the maximum number of requests a worker will process before restarting. - This helps prevent memory leaks. - format: int32 - minimum: 0 - type: integer - maxRequestsJitter: - description: |- - MaxRequestsJitter is the maximum jitter to add to max-requests to prevent - thundering herd effect on worker restart. - format: int32 - minimum: 0 - type: integer - registryTTLSeconds: - description: RegistryTTLSeconds is the number - of seconds after which the registry is refreshed. - format: int32 - minimum: 0 - type: integer - workerConnections: + tlsCrt: + description: defaults to "tls.crt" + type: string + tlsKey: + description: defaults to "tls.key" + type: string + type: object + secretRef: + description: references the local k8s secret where + the TLS key and cert reside + properties: + name: + default: "" description: |- - WorkerConnections is the maximum number of simultaneous clients per worker process. - Defaults to 1000. - format: int32 - minimum: 1 - type: integer - workers: - description: Workers is the number of worker processes. - Use -1 to auto-calculate based on CPU cores - (2 * CPU + 1). - format: int32 - minimum: -1 - type: integer + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string type: object + x-kubernetes-map-type: atomic type: object x-kubernetes-validations: - - message: At least one of restAPI or grpc must be true - rule: self.restAPI == true || self.grpc == true || !has(self.grpc) - type: object - remote: - description: RemoteRegistryConfig points to a remote feast - registry server. - properties: - feastRef: - description: Reference to an existing `FeatureStore` CR - in the same k8s cluster. - properties: - name: - description: Name of the FeatureStore - type: string - namespace: - description: Namespace of the FeatureStore - type: string - required: - - name - type: object - hostname: - description: Host address of the remote registry service - - :, e.g. `registry..svc.cluster.local:80` - type: string - tls: - description: TlsRemoteRegistryConfigs configures client - TLS for a remote feast registry. + - message: '`secretRef` required if `disable` is false.' + rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) + : true' + volumeMounts: + description: VolumeMounts defines the list of volumes + that should be mounted into the feast container. + items: + description: VolumeMount describes a mounting of a Volume + within a container. + properties: + mountPath: + description: |- + Path within the container at which the volume should be mounted. Must + not contain ':'. + type: string + mountPropagation: + description: |- + mountPropagation determines how mounts are propagated from the host + to container and the other way around. + type: string + name: + description: This must match the Name of a Volume. + type: string + readOnly: + description: |- + Mounted read-only if true, read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + recursiveReadOnly: + description: |- + RecursiveReadOnly specifies whether read-only mounts should be handled + recursively. + type: string + subPath: + description: |- + Path within the volume from which the container's volume should be mounted. + Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume from + which the container's volume should be mounted. + type: string + required: + - mountPath + - name + type: object + type: array + workerConfigs: + description: WorkerConfigs defines the worker configuration + for the Feast server. properties: - certName: - description: defines the configmap key name for the - client TLS cert. - type: string - configMapRef: - description: references the local k8s configmap where - the TLS cert resides - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - required: - - certName - - configMapRef + keepAliveTimeout: + description: |- + KeepAliveTimeout is the timeout for keep-alive connections in seconds. + Defaults to 30. + format: int32 + minimum: 1 + type: integer + maxRequests: + description: |- + MaxRequests is the maximum number of requests a worker will process before restarting. + This helps prevent memory leaks. + format: int32 + minimum: 0 + type: integer + maxRequestsJitter: + description: |- + MaxRequestsJitter is the maximum jitter to add to max-requests to prevent + thundering herd effect on worker restart. + format: int32 + minimum: 0 + type: integer + registryTTLSeconds: + description: RegistryTTLSeconds is the number of seconds + after which the registry is refreshed. + format: int32 + minimum: 0 + type: integer + workerConnections: + description: |- + WorkerConnections is the maximum number of simultaneous clients per worker process. + Defaults to 1000. + format: int32 + minimum: 1 + type: integer + workers: + description: Workers is the number of worker processes. + Use -1 to auto-calculate based on CPU cores (2 * + CPU + 1). + format: int32 + minimum: -1 + type: integer type: object type: object - x-kubernetes-validations: - - message: One selection required. - rule: '[has(self.hostname), has(self.feastRef)].exists_one(c, - c)' + type: object + podDisruptionBudgets: + description: PodDisruptionBudgets configures a PodDisruptionBudget + for the FeatureStore deployment. + properties: + maxUnavailable: + anyOf: + - type: integer + - type: string + description: MaxUnavailable specifies the maximum number/percentage + of pods that can be unavailable. + x-kubernetes-int-or-string: true + minAvailable: + anyOf: + - type: integer + - type: string + description: MinAvailable specifies the minimum number/percentage + of pods that must remain available. + x-kubernetes-int-or-string: true type: object x-kubernetes-validations: - - message: One selection required. - rule: '[has(self.local), has(self.remote)].exists_one(c, c)' - securityContext: - description: PodSecurityContext holds pod-level security attributes - and common container settings. + - message: Exactly one of minAvailable or maxUnavailable must + be set. + rule: '[has(self.minAvailable), has(self.maxUnavailable)].exists_one(c, + c)' + registry: + description: Registry configures the registry service. One selection + is required. Local is the default setting. properties: - appArmorProfile: - description: appArmorProfile is the AppArmor options to use - by the containers in this pod. - properties: - localhostProfile: - description: localhostProfile indicates a profile loaded - on the node that should be used. - type: string - type: - description: type indicates which kind of AppArmor profile - will be applied. - type: string - required: - - type - type: object - fsGroup: - description: A special supplemental group that applies to - all containers in a pod. - format: int64 - type: integer - fsGroupChangePolicy: - description: |- - fsGroupChangePolicy defines behavior of changing ownership and permission of the volume - before being exposed inside Pod. - type: string - runAsGroup: - description: |- - The GID to run the entrypoint of the container process. - Uses runtime default if unset. - format: int64 - type: integer - runAsNonRoot: - description: Indicates that the container must run as a non-root - user. - type: boolean - runAsUser: - description: |- - The UID to run the entrypoint of the container process. - Defaults to user specified in image metadata if unspecified. - format: int64 - type: integer - seLinuxOptions: - description: The SELinux context to be applied to all containers. - properties: - level: - description: Level is SELinux level label that applies - to the container. - type: string - role: - description: Role is a SELinux role label that applies - to the container. - type: string - type: - description: Type is a SELinux type label that applies - to the container. - type: string - user: - description: User is a SELinux user label that applies - to the container. - type: string - type: object - seccompProfile: - description: |- - The seccomp options to use by the containers in this pod. - Note that this field cannot be set when spec.os. - properties: - localhostProfile: - description: localhostProfile indicates a profile defined - in a file on the node should be used. - type: string - type: - description: type indicates which kind of seccomp profile - will be applied. - type: string - required: - - type - type: object - supplementalGroups: - description: |- - A list of groups applied to the first process run in each container, in addition - to the container's primary GID, the fsG - items: - format: int64 - type: integer - type: array - x-kubernetes-list-type: atomic - sysctls: - description: Sysctls hold a list of namespaced sysctls used - for the pod. - items: - description: Sysctl defines a kernel parameter to be set - properties: - name: - description: Name of a property to set - type: string - value: - description: Value of a property to set - type: string - required: - - name - - value - type: object - type: array - x-kubernetes-list-type: atomic - windowsOptions: - description: The Windows specific settings applied to all - containers. + local: + description: LocalRegistryConfig configures the registry service properties: - gmsaCredentialSpec: + persistence: + description: RegistryPersistence configures the persistence + settings for the registry service + properties: + file: + description: RegistryFilePersistence configures the + file-based persistence for the registry service + properties: + cache_mode: + description: |- + CacheMode defines the registry cache update strategy. + Allowed values are "sync" and "thread". + enum: + - none + - sync + - thread + type: string + cache_ttl_seconds: + description: CacheTTLSeconds defines the TTL (in + seconds) for the registry cache. + format: int32 + minimum: 0 + type: integer + path: + type: string + pvc: + description: PvcConfig defines the settings for + a persistent file store based on PVCs. + properties: + create: + description: Settings for creating a new PVC + properties: + accessModes: + description: AccessModes k8s persistent + volume access modes. Defaults to ["ReadWriteOnce"]. + items: + type: string + type: array + resources: + description: Resources describes the storage + resource requirements for a volume. + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the + minimum amount of compute resources + required. + type: object + type: object + storageClassName: + description: StorageClassName is the name + of an existing StorageClass to which + this persistent volume belongs. + type: string + type: object + x-kubernetes-validations: + - message: PvcCreate is immutable + rule: self == oldSelf + mountPath: + description: |- + MountPath within the container at which the volume should be mounted. + Must start by "/" and cannot contain ':'. + type: string + ref: + description: Reference to an existing field + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + required: + - mountPath + type: object + x-kubernetes-validations: + - message: One selection is required between ref + and create. + rule: '[has(self.ref), has(self.create)].exists_one(c, + c)' + - message: Mount path must start with '/' and + must not contain ':' + rule: self.mountPath.matches('^/[^:]*$') + s3_additional_kwargs: + additionalProperties: + type: string + type: object + type: object + x-kubernetes-validations: + - message: Registry files must use absolute paths + or be S3 ('s3://') or GS ('gs://') object store + URIs. + rule: '(!has(self.pvc) && has(self.path)) ? (self.path.startsWith(''/'') + || self.path.startsWith(''s3://'') || self.path.startsWith(''gs://'')) + : true' + - message: PVC path must be a file name only, with + no slashes. + rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') + : true' + - message: PVC persistence does not support S3 or + GS object store URIs. + rule: '(has(self.pvc) && has(self.path)) ? !(self.path.startsWith(''s3://'') + || self.path.startsWith(''gs://'')) : true' + - message: Additional S3 settings are available only + for S3 object store URIs. + rule: '(has(self.s3_additional_kwargs) && has(self.path)) + ? self.path.startsWith(''s3://'') : true' + store: + description: RegistryDBStorePersistence configures + the DB store persistence for the registry service + properties: + secretKeyName: + description: By default, the selected store "type" + is used as the SecretKeyName + type: string + secretRef: + description: Data store parameters should be placed + as-is from the "feature_store.yaml" under the + secret key. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: + description: Type of the persistence type you + want to use. + enum: + - sql + - snowflake.registry + type: string + required: + - secretRef + - type + type: object + type: object + x-kubernetes-validations: + - message: One selection required between file or store. + rule: '[has(self.file), has(self.store)].exists_one(c, + c)' + server: + description: Creates a registry server container + properties: + env: + items: + description: EnvVar represents an environment variable + present in a Container. + properties: + name: + description: Name of the environment variable. + Must be a C_IDENTIFIER. + type: string + value: + description: |- + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any + type: string + valueFrom: + description: Source for the environment variable's + value. Cannot be used if value is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + fieldRef: + description: 'Selects a field of the pod: + supports metadata.name, metadata.namespace, + `metadata.labels['''']`, `metadata.' + properties: + apiVersion: + description: Version of the schema the + FieldPath is written in terms of, + defaults to "v1". + type: string + fieldPath: + description: Path of the field to select + in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits. + properties: + containerName: + description: 'Container name: required + for volumes, optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format + of the exposed resources, defaults + to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to + select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret in + the pod's namespace + properties: + key: + description: The key of the secret to + select from. Must be a valid secret + key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + type: object + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source + of a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap + must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend + to each key in the ConfigMap. Must be a C_IDENTIFIER. + type: string + secretRef: + description: The Secret to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + type: object + type: array + grpc: + description: Enable gRPC registry server. Defaults + to true if unset. + type: boolean + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for if/when + to pull a container image + type: string + logLevel: + description: |- + LogLevel sets the logging level for the server + Allowed values: "debug", "info", "warning", "error", "critical". + enum: + - debug + - info + - warning + - error + - critical + type: string + metrics: + description: Metrics exposes Prometheus-compatible + metrics for the Feast server when enabled. + type: boolean + nodeSelector: + additionalProperties: + type: string + type: object + resources: + description: ResourceRequirements describes the compute + resource requirements. + properties: + claims: + description: |- + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one entry + in PodSpec.ResourceClaims. + properties: + name: + description: |- + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. + type: string + required: + - name + type: object + type: array + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum amount + of compute resources required. + type: object + type: object + restAPI: + description: Enable REST API registry server. + type: boolean + tls: + description: TlsConfigs configures server TLS for + a feast service. + properties: + disable: + description: will disable TLS for the feast service. + useful in an openshift cluster, for example, + where TLS is configured by default + type: boolean + secretKeyNames: + description: SecretKeyNames defines the secret + key names for the TLS key and cert. + properties: + tlsCrt: + description: defaults to "tls.crt" + type: string + tlsKey: + description: defaults to "tls.key" + type: string + type: object + secretRef: + description: references the local k8s secret where + the TLS key and cert reside + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: object + x-kubernetes-validations: + - message: '`secretRef` required if `disable` is false.' + rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) + : true' + volumeMounts: + description: VolumeMounts defines the list of volumes + that should be mounted into the feast container. + items: + description: VolumeMount describes a mounting of + a Volume within a container. + properties: + mountPath: + description: |- + Path within the container at which the volume should be mounted. Must + not contain ':'. + type: string + mountPropagation: + description: |- + mountPropagation determines how mounts are propagated from the host + to container and the other way around. + type: string + name: + description: This must match the Name of a Volume. + type: string + readOnly: + description: |- + Mounted read-only if true, read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + recursiveReadOnly: + description: |- + RecursiveReadOnly specifies whether read-only mounts should be handled + recursively. + type: string + subPath: + description: |- + Path within the volume from which the container's volume should be mounted. + Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume + from which the container's volume should be + mounted. + type: string + required: + - mountPath + - name + type: object + type: array + workerConfigs: + description: WorkerConfigs defines the worker configuration + for the Feast server. + properties: + keepAliveTimeout: + description: |- + KeepAliveTimeout is the timeout for keep-alive connections in seconds. + Defaults to 30. + format: int32 + minimum: 1 + type: integer + maxRequests: + description: |- + MaxRequests is the maximum number of requests a worker will process before restarting. + This helps prevent memory leaks. + format: int32 + minimum: 0 + type: integer + maxRequestsJitter: + description: |- + MaxRequestsJitter is the maximum jitter to add to max-requests to prevent + thundering herd effect on worker restart. + format: int32 + minimum: 0 + type: integer + registryTTLSeconds: + description: RegistryTTLSeconds is the number + of seconds after which the registry is refreshed. + format: int32 + minimum: 0 + type: integer + workerConnections: + description: |- + WorkerConnections is the maximum number of simultaneous clients per worker process. + Defaults to 1000. + format: int32 + minimum: 1 + type: integer + workers: + description: Workers is the number of worker processes. + Use -1 to auto-calculate based on CPU cores + (2 * CPU + 1). + format: int32 + minimum: -1 + type: integer + type: object + type: object + x-kubernetes-validations: + - message: At least one of restAPI or grpc must be true + rule: self.restAPI == true || self.grpc == true || !has(self.grpc) + type: object + remote: + description: RemoteRegistryConfig points to a remote feast + registry server. + properties: + feastRef: + description: Reference to an existing `FeatureStore` CR + in the same k8s cluster. + properties: + name: + description: Name of the FeatureStore + type: string + namespace: + description: Namespace of the FeatureStore + type: string + required: + - name + type: object + hostname: + description: Host address of the remote registry service + - :, e.g. `registry..svc.cluster.local:80` + type: string + tls: + description: TlsRemoteRegistryConfigs configures client + TLS for a remote feast registry. + properties: + certName: + description: defines the configmap key name for the + client TLS cert. + type: string + configMapRef: + description: references the local k8s configmap where + the TLS cert resides + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + required: + - certName + - configMapRef + type: object + type: object + x-kubernetes-validations: + - message: One selection required. + rule: '[has(self.hostname), has(self.feastRef)].exists_one(c, + c)' + type: object + x-kubernetes-validations: + - message: One selection required. + rule: '[has(self.local), has(self.remote)].exists_one(c, c)' + scaling: + description: Scaling configures horizontal scaling for the FeatureStore + deployment (e.g. HPA autoscaling). + properties: + autoscaling: + description: |- + Autoscaling configures a HorizontalPodAutoscaler for the FeatureStore deployment. + Mutually exclusive with spec.replicas. + properties: + behavior: + description: Behavior configures the scaling behavior + of the target. + properties: + scaleDown: + description: scaleDown is scaling policy for scaling + Down. + properties: + policies: + description: policies is a list of potential scaling + polices which can be used during scaling. + items: + description: HPAScalingPolicy is a single policy + which must hold true for a specified past + interval. + properties: + periodSeconds: + description: periodSeconds specifies the + window of time for which the policy should + hold true. + format: int32 + type: integer + type: + description: type is used to specify the + scaling policy. + type: string + value: + description: |- + value contains the amount of change which is permitted by the policy. + It must be greater than zero + format: int32 + type: integer + required: + - periodSeconds + - type + - value + type: object + type: array + x-kubernetes-list-type: atomic + selectPolicy: + description: |- + selectPolicy is used to specify which policy should be used. + If not set, the default value Max is used. + type: string + stabilizationWindowSeconds: + description: |- + stabilizationWindowSeconds is the number of seconds for which past recommendations should be + considered while scaling up + format: int32 + type: integer + type: object + scaleUp: + description: scaleUp is scaling policy for scaling + Up. + properties: + policies: + description: policies is a list of potential scaling + polices which can be used during scaling. + items: + description: HPAScalingPolicy is a single policy + which must hold true for a specified past + interval. + properties: + periodSeconds: + description: periodSeconds specifies the + window of time for which the policy should + hold true. + format: int32 + type: integer + type: + description: type is used to specify the + scaling policy. + type: string + value: + description: |- + value contains the amount of change which is permitted by the policy. + It must be greater than zero + format: int32 + type: integer + required: + - periodSeconds + - type + - value + type: object + type: array + x-kubernetes-list-type: atomic + selectPolicy: + description: |- + selectPolicy is used to specify which policy should be used. + If not set, the default value Max is used. + type: string + stabilizationWindowSeconds: + description: |- + stabilizationWindowSeconds is the number of seconds for which past recommendations should be + considered while scaling up + format: int32 + type: integer + type: object + type: object + maxReplicas: + description: MaxReplicas is the upper limit for the number + of replicas. Required. + format: int32 + minimum: 1 + type: integer + metrics: + description: Metrics contains the specifications for which + to use to calculate the desired replica count. + items: + description: |- + MetricSpec specifies how to scale based on a single metric + (only `type` and one other matching field should be set at on + properties: + containerResource: + description: |- + containerResource refers to a resource metric (such as those specified in + requests and limits) known to Kubernetes descr + properties: + container: + description: container is the name of the container + in the pods of the scaling target + type: string + name: + description: name is the name of the resource + in question. + type: string + target: + description: target specifies the target value + for the given metric + properties: + averageUtilization: + description: "averageUtilization is the + target value of the average of the\nresource + metric across all relevant pods, represented + as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether the + metric type is Utilization, Value, or + AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value of + the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - container + - name + - target + type: object + external: + description: |- + external refers to a global metric that is not associated + with any Kubernetes object. + properties: + metric: + description: metric identifies the target metric + by name and selector + properties: + name: + description: name is the name of the given + metric + type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label selector + for the given metric\nWhen set, it is + passed " + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + required: + - name + type: object + target: + description: target specifies the target value + for the given metric + properties: + averageUtilization: + description: "averageUtilization is the + target value of the average of the\nresource + metric across all relevant pods, represented + as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether the + metric type is Utilization, Value, or + AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value of + the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - metric + - target + type: object + object: + description: |- + object refers to a metric describing a single kubernetes object + (for example, hits-per-second on an Ingress object). + properties: + describedObject: + description: describedObject specifies the descriptions + of a object,such as kind,name apiVersion + properties: + apiVersion: + description: apiVersion is the API version + of the referent + type: string + kind: + description: 'kind is the kind of the referent; + More info: https://git.k8s.' + type: string + name: + description: 'name is the name of the referent; + More info: https://kubernetes.' + type: string + required: + - kind + - name + type: object + metric: + description: metric identifies the target metric + by name and selector + properties: + name: + description: name is the name of the given + metric + type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label selector + for the given metric\nWhen set, it is + passed " + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + required: + - name + type: object + target: + description: target specifies the target value + for the given metric + properties: + averageUtilization: + description: "averageUtilization is the + target value of the average of the\nresource + metric across all relevant pods, represented + as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether the + metric type is Utilization, Value, or + AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value of + the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - describedObject + - metric + - target + type: object + pods: + description: |- + pods refers to a metric describing each pod in the current scale target + (for example, transactions-processed-per-second) + properties: + metric: + description: metric identifies the target metric + by name and selector + properties: + name: + description: name is the name of the given + metric + type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label selector + for the given metric\nWhen set, it is + passed " + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + required: + - name + type: object + target: + description: target specifies the target value + for the given metric + properties: + averageUtilization: + description: "averageUtilization is the + target value of the average of the\nresource + metric across all relevant pods, represented + as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether the + metric type is Utilization, Value, or + AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value of + the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - metric + - target + type: object + resource: + description: |- + resource refers to a resource metric (such as those specified in + requests and limits) known to Kubernetes describing eac + properties: + name: + description: name is the name of the resource + in question. + type: string + target: + description: target specifies the target value + for the given metric + properties: + averageUtilization: + description: "averageUtilization is the + target value of the average of the\nresource + metric across all relevant pods, represented + as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether the + metric type is Utilization, Value, or + AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value of + the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - name + - target + type: object + type: + description: type is the type of metric source. + type: string + required: + - type + type: object + type: array + minReplicas: + description: MinReplicas is the lower limit for the number + of replicas. Defaults to 1. + format: int32 + minimum: 1 + type: integer + required: + - maxReplicas + type: object + type: object + securityContext: + description: PodSecurityContext holds pod-level security attributes + and common container settings. + properties: + appArmorProfile: + description: appArmorProfile is the AppArmor options to use + by the containers in this pod. + properties: + localhostProfile: + description: localhostProfile indicates a profile loaded + on the node that should be used. + type: string + type: + description: type indicates which kind of AppArmor profile + will be applied. + type: string + required: + - type + type: object + fsGroup: + description: A special supplemental group that applies to + all containers in a pod. + format: int64 + type: integer + fsGroupChangePolicy: + description: |- + fsGroupChangePolicy defines behavior of changing ownership and permission of the volume + before being exposed inside Pod. + type: string + runAsGroup: + description: |- + The GID to run the entrypoint of the container process. + Uses runtime default if unset. + format: int64 + type: integer + runAsNonRoot: + description: Indicates that the container must run as a non-root + user. + type: boolean + runAsUser: + description: |- + The UID to run the entrypoint of the container process. + Defaults to user specified in image metadata if unspecified. + format: int64 + type: integer + seLinuxOptions: + description: The SELinux context to be applied to all containers. + properties: + level: + description: Level is SELinux level label that applies + to the container. + type: string + role: + description: Role is a SELinux role label that applies + to the container. + type: string + type: + description: Type is a SELinux type label that applies + to the container. + type: string + user: + description: User is a SELinux user label that applies + to the container. + type: string + type: object + seccompProfile: + description: |- + The seccomp options to use by the containers in this pod. + Note that this field cannot be set when spec.os. + properties: + localhostProfile: + description: localhostProfile indicates a profile defined + in a file on the node should be used. + type: string + type: + description: type indicates which kind of seccomp profile + will be applied. + type: string + required: + - type + type: object + supplementalGroups: + description: |- + A list of groups applied to the first process run in each container, in addition + to the container's primary GID, the fsG + items: + format: int64 + type: integer + type: array + x-kubernetes-list-type: atomic + sysctls: + description: Sysctls hold a list of namespaced sysctls used + for the pod. + items: + description: Sysctl defines a kernel parameter to be set + properties: + name: + description: Name of a property to set + type: string + value: + description: Value of a property to set + type: string + required: + - name + - value + type: object + type: array + x-kubernetes-list-type: atomic + windowsOptions: + description: The Windows specific settings applied to all + containers. + properties: + gmsaCredentialSpec: description: |- GMSACredentialSpec is where the GMSA admission webhook (https://github. @@ -2484,6 +3831,96 @@ spec: type: string type: object type: object + topologySpreadConstraints: + description: TopologySpreadConstraints defines how pods are spread + across topology domains. + items: + description: TopologySpreadConstraint specifies how to spread + matching pods among the given topology. + properties: + labelSelector: + description: LabelSelector is used to find matching pods. + properties: + matchExpressions: + description: matchExpressions is a list of label selector + requirements. The requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that the selector + applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select the pods over which + spreading will be calculated. + items: + type: string + type: array + x-kubernetes-list-type: atomic + maxSkew: + description: MaxSkew describes the degree to which pods + may be unevenly distributed. + format: int32 + type: integer + minDomains: + description: MinDomains indicates a minimum number of eligible + domains. + format: int32 + type: integer + nodeAffinityPolicy: + description: |- + NodeAffinityPolicy indicates how we will treat Pod's nodeAffinity/nodeSelector + when calculating pod topology spread skew + type: string + nodeTaintsPolicy: + description: |- + NodeTaintsPolicy indicates how we will treat node taints when calculating + pod topology spread skew. + type: string + topologyKey: + description: TopologyKey is the key of node labels. + type: string + whenUnsatisfiable: + description: |- + WhenUnsatisfiable indicates how to deal with a pod if it doesn't satisfy + the spread constraint. + type: string + required: + - maxSkew + - topologyKey + - whenUnsatisfiable + type: object + type: array ui: description: Creates a UI server container properties: @@ -4257,7 +5694,37 @@ spec: type: object required: - feastProject + - replicas type: object + x-kubernetes-validations: + - message: replicas > 1 and services.scaling.autoscaling are mutually + exclusive. + rule: self.replicas <= 1 || !has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling) + - message: Scaling requires DB-backed persistence for the online store. + Configure services.onlineStore.persistence.store when using replicas + > 1 or autoscaling. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (has(self.services) + && has(self.services.onlineStore) && has(self.services.onlineStore.persistence) + && has(self.services.onlineStore.persistence.store)) + - message: Scaling requires DB-backed persistence for the offline store. + Configure services.offlineStore.persistence.store when using replicas + > 1 or autoscaling. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (!has(self.services) + || !has(self.services.offlineStore) || (has(self.services.offlineStore.persistence) + && has(self.services.offlineStore.persistence.store))) + - message: Scaling requires DB-backed or remote registry. Configure registry.local.persistence.store + or use a remote registry when using replicas > 1 or autoscaling. S3/GCS-backed + registry is also allowed. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (has(self.services) + && has(self.services.registry) && (has(self.services.registry.remote) + || (has(self.services.registry.local) && has(self.services.registry.local.persistence) + && (has(self.services.registry.local.persistence.store) || (has(self.services.registry.local.persistence.file) + && has(self.services.registry.local.persistence.file.path) && (self.services.registry.local.persistence.file.path.startsWith('s3://') + || self.services.registry.local.persistence.file.path.startsWith('gs://'))))))) status: description: FeatureStoreStatus defines the observed state of FeatureStore properties: @@ -4280,79 +5747,474 @@ spec: items: type: string type: array - type: object - oidc: - description: |- - OidcAuthz defines the authorization settings for deployments using an Open ID Connect identity provider. - https://auth0. - properties: - secretRef: - description: |- - LocalObjectReference contains enough information to let you locate the - referenced object inside the same namespace. + type: object + oidc: + description: |- + OidcAuthz defines the authorization settings for deployments using an Open ID Connect identity provider. + https://auth0. + properties: + secretRef: + description: |- + LocalObjectReference contains enough information to let you locate the + referenced object inside the same namespace. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + required: + - secretRef + type: object + type: object + x-kubernetes-validations: + - message: One selection required between kubernetes or oidc. + rule: '[has(self.kubernetes), has(self.oidc)].exists_one(c, + c)' + batchEngine: + description: BatchEngineConfig defines the batch compute engine + configuration. + properties: + configMapKey: + description: Key name in the ConfigMap. Defaults to "config" + if not specified. + type: string + configMapRef: + description: Reference to a ConfigMap containing the batch + engine configuration. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: object + cronJob: + description: FeastCronJob defines a CronJob to execute against + a Feature Store deployment. + properties: + annotations: + additionalProperties: + type: string + description: Annotations to be added to the CronJob metadata. + type: object + concurrencyPolicy: + description: Specifies how to treat concurrent executions + of a Job. + type: string + containerConfigs: + description: CronJobContainerConfigs k8s container settings + for the CronJob + properties: + commands: + description: Array of commands to be executed (in order) + against a Feature Store deployment. + items: + type: string + type: array + env: + items: + description: EnvVar represents an environment variable + present in a Container. + properties: + name: + description: Name of the environment variable. Must + be a C_IDENTIFIER. + type: string + value: + description: |- + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any + type: string + valueFrom: + description: Source for the environment variable's + value. Cannot be used if value is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + fieldRef: + description: 'Selects a field of the pod: supports + metadata.name, metadata.namespace, `metadata.labels['''']`, + `metadata.' + properties: + apiVersion: + description: Version of the schema the FieldPath + is written in terms of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to select + in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits. + properties: + containerName: + description: 'Container name: required for + volumes, optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format + of the exposed resources, defaults to + "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret in the + pod's namespace + properties: + key: + description: The key of the secret to select + from. Must be a valid secret key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + type: object + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source of + a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend to + each key in the ConfigMap. Must be a C_IDENTIFIER. + type: string + secretRef: + description: The Secret to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + type: object + type: array + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for if/when + to pull a container image + type: string + nodeSelector: + additionalProperties: + type: string + type: object + resources: + description: ResourceRequirements describes the compute + resource requirements. properties: - name: - default: "" + claims: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one entry + in PodSpec.ResourceClaims. + properties: + name: + description: |- + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. + type: string + required: + - name + type: object + type: array + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum amount + of compute resources required. + type: object type: object - x-kubernetes-map-type: atomic - required: - - secretRef type: object - type: object - x-kubernetes-validations: - - message: One selection required between kubernetes or oidc. - rule: '[has(self.kubernetes), has(self.oidc)].exists_one(c, - c)' - batchEngine: - description: BatchEngineConfig defines the batch compute engine - configuration. - properties: - configMapKey: - description: Key name in the ConfigMap. Defaults to "config" - if not specified. - type: string - configMapRef: - description: Reference to a ConfigMap containing the batch - engine configuration. + failedJobsHistoryLimit: + description: The number of failed finished jobs to retain. + Value must be non-negative integer. + format: int32 + type: integer + jobSpec: + description: Specification of the desired behavior of a job. properties: - name: - default: "" + activeDeadlineSeconds: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + Specifies the duration in seconds relative to the startTime that the job + may be continuously active before the system tr + format: int64 + type: integer + backoffLimit: + description: Specifies the number of retries before marking + this job failed. + format: int32 + type: integer + backoffLimitPerIndex: + description: |- + Specifies the limit for the number of retries within an + index before marking this index as failed. + format: int32 + type: integer + completionMode: + description: |- + completionMode specifies how Pod completions are tracked. It can be + `NonIndexed` (default) or `Indexed`. + type: string + completions: + description: |- + Specifies the desired number of successfully finished pods the + job should be run with. + format: int32 + type: integer + maxFailedIndexes: + description: |- + Specifies the maximal number of failed indexes before marking the Job as + failed, when backoffLimitPerIndex is set. + format: int32 + type: integer + parallelism: + description: |- + Specifies the maximum desired number of pods the job should + run at any given time. + format: int32 + type: integer + podFailurePolicy: + description: Specifies the policy of handling failed pods. + properties: + rules: + description: A list of pod failure policy rules. The + rules are evaluated in order. + items: + description: PodFailurePolicyRule describes how + a pod failure is handled when the requirements + are met. + properties: + action: + description: Specifies the action taken on a + pod failure when the requirements are satisfied. + type: string + onExitCodes: + description: Represents the requirement on the + container exit codes. + properties: + containerName: + description: |- + Restricts the check for exit codes to the container with the + specified name. + type: string + operator: + description: |- + Represents the relationship between the container exit code(s) and the + specified values. + type: string + values: + description: Specifies the set of values. + items: + format: int32 + type: integer + type: array + x-kubernetes-list-type: set + required: + - operator + - values + type: object + onPodConditions: + description: |- + Represents the requirement on the pod conditions. The requirement is represented + as a list of pod condition patterns. + items: + description: |- + PodFailurePolicyOnPodConditionsPattern describes a pattern for matching + an actual pod condition type. + properties: + status: + description: Specifies the required Pod + condition status. + type: string + type: + description: Specifies the required Pod + condition type. + type: string + required: + - status + - type + type: object + type: array + x-kubernetes-list-type: atomic + required: + - action + type: object + type: array + x-kubernetes-list-type: atomic + required: + - rules + type: object + podReplacementPolicy: + description: podReplacementPolicy specifies when to create + replacement Pods. type: string + podTemplateAnnotations: + additionalProperties: + type: string + description: |- + PodTemplateAnnotations are annotations to be applied to the CronJob's PodTemplate + metadata. + type: object + suspend: + description: suspend specifies whether the Job controller + should create Pods or not. + type: boolean + ttlSecondsAfterFinished: + description: |- + ttlSecondsAfterFinished limits the lifetime of a Job that has finished + execution (either Complete or Failed). + format: int32 + type: integer type: object - x-kubernetes-map-type: atomic + schedule: + description: The schedule in Cron format, see https://en.wikipedia.org/wiki/Cron. + type: string + startingDeadlineSeconds: + description: |- + Optional deadline in seconds for starting the job if it misses scheduled + time for any reason. + format: int64 + type: integer + successfulJobsHistoryLimit: + description: The number of successful finished jobs to retain. + Value must be non-negative integer. + format: int32 + type: integer + suspend: + description: |- + This flag tells the controller to suspend subsequent executions, it does + not apply to already started executions. + type: boolean + timeZone: + description: The time zone name for the given schedule, see + https://en.wikipedia.org/wiki/List_of_tz_database_time_zones. + type: string type: object - cronJob: - description: FeastCronJob defines a CronJob to execute against - a Feature Store deployment. + feastProject: + description: FeastProject is the Feast project id. + pattern: ^[A-Za-z0-9][A-Za-z0-9_-]*$ + type: string + feastProjectDir: + description: FeastProjectDir defines how to create the feast project + directory. properties: - annotations: - additionalProperties: - type: string - description: Annotations to be added to the CronJob metadata. - type: object - concurrencyPolicy: - description: Specifies how to treat concurrent executions - of a Job. - type: string - containerConfigs: - description: CronJobContainerConfigs k8s container settings - for the CronJob + git: + description: GitCloneOptions describes how a clone should + be performed. properties: - commands: - description: Array of commands to be executed (in order) - against a Feature Store deployment. - items: + configs: + additionalProperties: type: string - type: array + description: |- + Configs passed to git via `-c` + e.g. http.sslVerify: 'false' + OR 'url."https://api:\${TOKEN}@github.com/". + type: object env: items: description: EnvVar represents an environment variable @@ -4435,520 +6297,884 @@ spec: - resource type: object x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret in the - pod's namespace + secretKeyRef: + description: Selects a key of a secret in the + pod's namespace + properties: + key: + description: The key of the secret to select + from. Must be a valid secret key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + type: object + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source of + a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend to + each key in the ConfigMap. Must be a C_IDENTIFIER. + type: string + secretRef: + description: The Secret to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + type: object + type: array + featureRepoPath: + description: FeatureRepoPath is the relative path to the + feature repo subdirectory. Default is 'feature_repo'. + type: string + ref: + description: Reference to a branch / tag / commit + type: string + url: + description: The repository URL to clone from. + type: string + required: + - url + type: object + x-kubernetes-validations: + - message: RepoPath must be a file name only, with no slashes. + rule: 'has(self.featureRepoPath) ? !self.featureRepoPath.startsWith(''/'') + : true' + init: + description: FeastInitOptions defines how to run a `feast + init`. + properties: + minimal: + type: boolean + template: + description: Template for the created project + enum: + - local + - gcp + - aws + - snowflake + - spark + - postgres + - hbase + - cassandra + - hazelcast + - couchbase + - clickhouse + type: string + type: object + type: object + x-kubernetes-validations: + - message: One selection required between init or git. + rule: '[has(self.git), has(self.init)].exists_one(c, c)' + replicas: + default: 1 + description: |- + Replicas is the desired number of pod replicas. Used by the scale sub-resource. + Mutually exclusive with services. + format: int32 + minimum: 1 + type: integer + services: + description: FeatureStoreServices defines the desired feast services. + An ephemeral onlineStore feature server is deployed by default. + properties: + affinity: + description: Affinity defines the pod scheduling constraints + for the FeatureStore deployment. + properties: + nodeAffinity: + description: Describes node affinity scheduling rules + for the pod. + properties: + preferredDuringSchedulingIgnoredDuringExecution: + description: |- + The scheduler will prefer to schedule pods to nodes that satisfy + the affinity expressions specified by this field, but i + items: + description: |- + An empty preferred scheduling term matches all objects with implicit weight 0 + (i.e. it's a no-op). + properties: + preference: + description: A node selector term, associated + with the corresponding weight. + properties: + matchExpressions: + description: A list of node selector requirements + by node's labels. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the + selector applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchFields: + description: A list of node selector requirements + by node's fields. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the + selector applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + type: object + x-kubernetes-map-type: atomic + weight: + description: Weight associated with matching + the corresponding nodeSelectorTerm, in the + range 1-100. + format: int32 + type: integer + required: + - preference + - weight + type: object + type: array + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the affinity requirements specified + by this field are not met at\nscheduling time, the + pod will not be scheduled onto " + properties: + nodeSelectorTerms: + description: Required. A list of node selector + terms. The terms are ORed. + items: + description: |- + A null or empty node selector term matches no objects. The requirements of + them are ANDed. + properties: + matchExpressions: + description: A list of node selector requirements + by node's labels. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the + selector applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchFields: + description: A list of node selector requirements + by node's fields. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the + selector applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + type: object + x-kubernetes-map-type: atomic + type: array + x-kubernetes-list-type: atomic + required: + - nodeSelectorTerms + type: object + x-kubernetes-map-type: atomic + type: object + podAffinity: + description: Describes pod affinity scheduling rules (e.g. + co-locate this pod in the same node, zone, etc. as some + other pod(s)). + properties: + preferredDuringSchedulingIgnoredDuringExecution: + description: |- + The scheduler will prefer to schedule pods to nodes that satisfy + the affinity expressions specified by this field, but i + items: + description: The weights of all of the matched WeightedPodAffinityTerm + fields are added per-node to find the most preferred + node(s) + properties: + podAffinityTerm: + description: Required. A pod affinity term, + associated with the corresponding weight. properties: - key: - description: The key of the secret to select - from. Must be a valid secret key. - type: string - name: - default: "" + labelSelector: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set + of namespaces that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static + list of namespace names that the term + applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t type: string - optional: - description: Specify whether the Secret - or its key must be defined - type: boolean required: - - key + - topologyKey type: object - x-kubernetes-map-type: atomic - type: object - required: - - name - type: object - type: array - envFrom: - items: - description: EnvFromSource represents the source of - a set of ConfigMaps - properties: - configMapRef: - description: The ConfigMap to select from - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap must - be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend to - each key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from - properties: - name: - default: "" + weight: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret must - be defined - type: boolean + weight associated with matching the corresponding podAffinityTerm, + in the range 1-100. + format: int32 + type: integer + required: + - podAffinityTerm + - weight type: object - x-kubernetes-map-type: atomic - type: object - type: array - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for if/when - to pull a container image - type: string - nodeSelector: - additionalProperties: - type: string - type: object - resources: - description: ResourceRequirements describes the compute - resource requirements. - properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. + type: array + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the affinity requirements specified + by this field are not met at\nscheduling time, the + pod will not be scheduled onto " items: - description: ResourceClaim references one entry - in PodSpec.ResourceClaims. + description: "Defines a set of pods (namely those + matching the labelSelector\nrelative to the given + namespace(s)) that this pod should " properties: - name: + labelSelector: + description: |- + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t type: string required: - - name + - topologyKey type: object type: array - x-kubernetes-list-map-keys: - - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum amount - of compute resources required. - type: object + x-kubernetes-list-type: atomic type: object - type: object - failedJobsHistoryLimit: - description: The number of failed finished jobs to retain. - Value must be non-negative integer. - format: int32 - type: integer - jobSpec: - description: Specification of the desired behavior of a job. - properties: - activeDeadlineSeconds: - description: |- - Specifies the duration in seconds relative to the startTime that the job - may be continuously active before the system tr - format: int64 - type: integer - backoffLimit: - description: Specifies the number of retries before marking - this job failed. - format: int32 - type: integer - backoffLimitPerIndex: - description: |- - Specifies the limit for the number of retries within an - index before marking this index as failed. - format: int32 - type: integer - completionMode: - description: |- - completionMode specifies how Pod completions are tracked. It can be - `NonIndexed` (default) or `Indexed`. - type: string - completions: - description: |- - Specifies the desired number of successfully finished pods the - job should be run with. - format: int32 - type: integer - maxFailedIndexes: - description: |- - Specifies the maximal number of failed indexes before marking the Job as - failed, when backoffLimitPerIndex is set. - format: int32 - type: integer - parallelism: - description: |- - Specifies the maximum desired number of pods the job should - run at any given time. - format: int32 - type: integer - podFailurePolicy: - description: Specifies the policy of handling failed pods. + podAntiAffinity: + description: Describes pod anti-affinity scheduling rules + (e.g. avoid putting this pod in the same node, zone, + etc. properties: - rules: - description: A list of pod failure policy rules. The - rules are evaluated in order. + preferredDuringSchedulingIgnoredDuringExecution: + description: "The scheduler will prefer to schedule + pods to nodes that satisfy\nthe anti-affinity expressions + specified by this field, " items: - description: PodFailurePolicyRule describes how - a pod failure is handled when the requirements - are met. + description: The weights of all of the matched WeightedPodAffinityTerm + fields are added per-node to find the most preferred + node(s) properties: - action: - description: Specifies the action taken on a - pod failure when the requirements are satisfied. - type: string - onExitCodes: - description: Represents the requirement on the - container exit codes. + podAffinityTerm: + description: Required. A pod affinity term, + associated with the corresponding weight. properties: - containerName: + labelSelector: description: |- - Restricts the check for exit codes to the container with the - specified name. - type: string - operator: + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: description: |- - Represents the relationship between the container exit code(s) and the - specified values. - type: string - values: - description: Specifies the set of values. + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set + of namespaces that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static + list of namespace names that the term + applies to. items: - format: int32 - type: integer + type: string type: array - x-kubernetes-list-type: set + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t + type: string required: - - operator - - values + - topologyKey type: object - onPodConditions: + weight: description: |- - Represents the requirement on the pod conditions. The requirement is represented - as a list of pod condition patterns. - items: - description: |- - PodFailurePolicyOnPodConditionsPattern describes a pattern for matching - an actual pod condition type. - properties: - status: - description: Specifies the required Pod - condition status. - type: string - type: - description: Specifies the required Pod - condition type. - type: string - required: - - status - - type - type: object - type: array - x-kubernetes-list-type: atomic + weight associated with matching the corresponding podAffinityTerm, + in the range 1-100. + format: int32 + type: integer required: - - action + - podAffinityTerm + - weight type: object type: array x-kubernetes-list-type: atomic - required: - - rules - type: object - podReplacementPolicy: - description: podReplacementPolicy specifies when to create - replacement Pods. - type: string - podTemplateAnnotations: - additionalProperties: - type: string - description: |- - PodTemplateAnnotations are annotations to be applied to the CronJob's PodTemplate - metadata. - type: object - suspend: - description: suspend specifies whether the Job controller - should create Pods or not. - type: boolean - ttlSecondsAfterFinished: - description: |- - ttlSecondsAfterFinished limits the lifetime of a Job that has finished - execution (either Complete or Failed). - format: int32 - type: integer - type: object - schedule: - description: The schedule in Cron format, see https://en.wikipedia.org/wiki/Cron. - type: string - startingDeadlineSeconds: - description: |- - Optional deadline in seconds for starting the job if it misses scheduled - time for any reason. - format: int64 - type: integer - successfulJobsHistoryLimit: - description: The number of successful finished jobs to retain. - Value must be non-negative integer. - format: int32 - type: integer - suspend: - description: |- - This flag tells the controller to suspend subsequent executions, it does - not apply to already started executions. - type: boolean - timeZone: - description: The time zone name for the given schedule, see - https://en.wikipedia.org/wiki/List_of_tz_database_time_zones. - type: string - type: object - feastProject: - description: FeastProject is the Feast project id. - pattern: ^[A-Za-z0-9][A-Za-z0-9_-]*$ - type: string - feastProjectDir: - description: FeastProjectDir defines how to create the feast project - directory. - properties: - git: - description: GitCloneOptions describes how a clone should - be performed. - properties: - configs: - additionalProperties: - type: string - description: |- - Configs passed to git via `-c` - e.g. http.sslVerify: 'false' - OR 'url."https://api:\${TOKEN}@github.com/". - type: object - env: - items: - description: EnvVar represents an environment variable - present in a Container. - properties: - name: - description: Name of the environment variable. Must - be a C_IDENTIFIER. - type: string - value: - description: |- - Variable references $(VAR_NAME) are expanded - using the previously defined environment variables in the container and - any - type: string - valueFrom: - description: Source for the environment variable's - value. Cannot be used if value is not empty. + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the anti-affinity requirements specified + by this field are not met at\nscheduling time, the + pod will not be scheduled " + items: + description: "Defines a set of pods (namely those + matching the labelSelector\nrelative to the given + namespace(s)) that this pod should " properties: - configMapKeyRef: - description: Selects a key of a ConfigMap. - properties: - key: - description: The key to select. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap - or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - fieldRef: - description: 'Selects a field of the pod: supports - metadata.name, metadata.namespace, `metadata.labels['''']`, - `metadata.' - properties: - apiVersion: - description: Version of the schema the FieldPath - is written in terms of, defaults to "v1". - type: string - fieldPath: - description: Path of the field to select - in the specified API version. - type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - resourceFieldRef: + labelSelector: description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, limits. + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. properties: - containerName: - description: 'Container name: required for - volumes, optional for env vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output format - of the exposed resources, defaults to - "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource to select' - type: string - required: - - resource + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret in the - pod's namespace - properties: - key: - description: The key of the secret to select - from. Must be a valid secret key. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret - or its key must be defined - type: boolean - required: - - key + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic - type: object - required: - - name - type: object - type: array - envFrom: - items: - description: EnvFromSource represents the source of - a set of ConfigMaps - properties: - configMapRef: - description: The ConfigMap to select from - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap must - be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend to - each key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from - properties: - name: - default: "" + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t type: string - optional: - description: Specify whether the Secret must - be defined - type: boolean + required: + - topologyKey type: object - x-kubernetes-map-type: atomic - type: object - type: array - featureRepoPath: - description: FeatureRepoPath is the relative path to the - feature repo subdirectory. Default is 'feature_repo'. - type: string - ref: - description: Reference to a branch / tag / commit - type: string - url: - description: The repository URL to clone from. - type: string - required: - - url - type: object - x-kubernetes-validations: - - message: RepoPath must be a file name only, with no slashes. - rule: 'has(self.featureRepoPath) ? !self.featureRepoPath.startsWith(''/'') - : true' - init: - description: FeastInitOptions defines how to run a `feast - init`. - properties: - minimal: - type: boolean - template: - description: Template for the created project - enum: - - local - - gcp - - aws - - snowflake - - spark - - postgres - - hbase - - cassandra - - hazelcast - - ikv - - couchbase - - clickhouse - type: string + type: array + x-kubernetes-list-type: atomic + type: object type: object - type: object - x-kubernetes-validations: - - message: One selection required between init or git. - rule: '[has(self.git), has(self.init)].exists_one(c, c)' - services: - description: FeatureStoreServices defines the desired feast services. - An ephemeral onlineStore feature server is deployed by default. - properties: deploymentStrategy: description: DeploymentStrategy describes how to replace existing pods with new ones. @@ -5618,7 +7844,6 @@ spec: enum: - snowflake.online - redis - - ikv - datastore - dynamodb - bigtable @@ -5633,6 +7858,7 @@ spec: - couchbase.online - milvus - hybrid + - mongodb type: string required: - secretRef @@ -5759,866 +7985,1465 @@ spec: - name type: object type: array - envFrom: - items: - description: EnvFromSource represents the source - of a set of ConfigMaps - properties: - configMapRef: - description: The ConfigMap to select from - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap - must be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend - to each key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + envFrom: + items: + description: EnvFromSource represents the source + of a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap + must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend + to each key in the ConfigMap. Must be a C_IDENTIFIER. + type: string + secretRef: + description: The Secret to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + type: object + type: array + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for if/when + to pull a container image + type: string + logLevel: + description: |- + LogLevel sets the logging level for the server + Allowed values: "debug", "info", "warning", "error", "critical". + enum: + - debug + - info + - warning + - error + - critical + type: string + metrics: + description: Metrics exposes Prometheus-compatible + metrics for the Feast server when enabled. + type: boolean + nodeSelector: + additionalProperties: + type: string + type: object + resources: + description: ResourceRequirements describes the compute + resource requirements. + properties: + claims: + description: |- + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one entry + in PodSpec.ResourceClaims. + properties: + name: + description: |- + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. + type: string + required: + - name + type: object + type: array + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum amount + of compute resources required. + type: object + type: object + tls: + description: TlsConfigs configures server TLS for + a feast service. + properties: + disable: + description: will disable TLS for the feast service. + useful in an openshift cluster, for example, + where TLS is configured by default + type: boolean + secretKeyNames: + description: SecretKeyNames defines the secret + key names for the TLS key and cert. + properties: + tlsCrt: + description: defaults to "tls.crt" + type: string + tlsKey: + description: defaults to "tls.key" + type: string + type: object + secretRef: + description: references the local k8s secret where + the TLS key and cert reside + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: object + x-kubernetes-validations: + - message: '`secretRef` required if `disable` is false.' + rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) + : true' + volumeMounts: + description: VolumeMounts defines the list of volumes + that should be mounted into the feast container. + items: + description: VolumeMount describes a mounting of + a Volume within a container. + properties: + mountPath: + description: |- + Path within the container at which the volume should be mounted. Must + not contain ':'. + type: string + mountPropagation: + description: |- + mountPropagation determines how mounts are propagated from the host + to container and the other way around. + type: string + name: + description: This must match the Name of a Volume. + type: string + readOnly: + description: |- + Mounted read-only if true, read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + recursiveReadOnly: + description: |- + RecursiveReadOnly specifies whether read-only mounts should be handled + recursively. + type: string + subPath: + description: |- + Path within the volume from which the container's volume should be mounted. + Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume + from which the container's volume should be + mounted. + type: string + required: + - mountPath + - name + type: object + type: array + workerConfigs: + description: WorkerConfigs defines the worker configuration + for the Feast server. + properties: + keepAliveTimeout: + description: |- + KeepAliveTimeout is the timeout for keep-alive connections in seconds. + Defaults to 30. + format: int32 + minimum: 1 + type: integer + maxRequests: + description: |- + MaxRequests is the maximum number of requests a worker will process before restarting. + This helps prevent memory leaks. + format: int32 + minimum: 0 + type: integer + maxRequestsJitter: + description: |- + MaxRequestsJitter is the maximum jitter to add to max-requests to prevent + thundering herd effect on worker restart. + format: int32 + minimum: 0 + type: integer + registryTTLSeconds: + description: RegistryTTLSeconds is the number + of seconds after which the registry is refreshed. + format: int32 + minimum: 0 + type: integer + workerConnections: + description: |- + WorkerConnections is the maximum number of simultaneous clients per worker process. + Defaults to 1000. + format: int32 + minimum: 1 + type: integer + workers: + description: Workers is the number of worker processes. + Use -1 to auto-calculate based on CPU cores + (2 * CPU + 1). + format: int32 + minimum: -1 + type: integer + type: object + type: object + type: object + podDisruptionBudgets: + description: PodDisruptionBudgets configures a PodDisruptionBudget + for the FeatureStore deployment. + properties: + maxUnavailable: + anyOf: + - type: integer + - type: string + description: MaxUnavailable specifies the maximum number/percentage + of pods that can be unavailable. + x-kubernetes-int-or-string: true + minAvailable: + anyOf: + - type: integer + - type: string + description: MinAvailable specifies the minimum number/percentage + of pods that must remain available. + x-kubernetes-int-or-string: true + type: object + x-kubernetes-validations: + - message: Exactly one of minAvailable or maxUnavailable must + be set. + rule: '[has(self.minAvailable), has(self.maxUnavailable)].exists_one(c, + c)' + registry: + description: Registry configures the registry service. One + selection is required. Local is the default setting. + properties: + local: + description: LocalRegistryConfig configures the registry + service + properties: + persistence: + description: RegistryPersistence configures the persistence + settings for the registry service + properties: + file: + description: RegistryFilePersistence configures + the file-based persistence for the registry + service + properties: + cache_mode: + description: |- + CacheMode defines the registry cache update strategy. + Allowed values are "sync" and "thread". + enum: + - none + - sync + - thread + type: string + cache_ttl_seconds: + description: CacheTTLSeconds defines the TTL + (in seconds) for the registry cache. + format: int32 + minimum: 0 + type: integer + path: + type: string + pvc: + description: PvcConfig defines the settings + for a persistent file store based on PVCs. + properties: + create: + description: Settings for creating a new + PVC + properties: + accessModes: + description: AccessModes k8s persistent + volume access modes. Defaults to + ["ReadWriteOnce"]. + items: + type: string + type: array + resources: + description: Resources describes the + storage resource requirements for + a volume. + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes + the minimum amount of compute + resources required. + type: object + type: object + storageClassName: + description: StorageClassName is the + name of an existing StorageClass + to which this persistent volume + belongs. + type: string + type: object + x-kubernetes-validations: + - message: PvcCreate is immutable + rule: self == oldSelf + mountPath: + description: |- + MountPath within the container at which the volume should be mounted. + Must start by "/" and cannot contain ':'. + type: string + ref: + description: Reference to an existing + field + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + required: + - mountPath + type: object + x-kubernetes-validations: + - message: One selection is required between + ref and create. + rule: '[has(self.ref), has(self.create)].exists_one(c, + c)' + - message: Mount path must start with '/' + and must not contain ':' + rule: self.mountPath.matches('^/[^:]*$') + s3_additional_kwargs: + additionalProperties: type: string - optional: - description: Specify whether the Secret - must be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - type: object - type: array - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for if/when - to pull a container image - type: string - logLevel: - description: |- - LogLevel sets the logging level for the server - Allowed values: "debug", "info", "warning", "error", "critical". - enum: - - debug - - info - - warning - - error - - critical - type: string - metrics: - description: Metrics exposes Prometheus-compatible - metrics for the Feast server when enabled. - type: boolean - nodeSelector: - additionalProperties: - type: string + type: object + type: object + x-kubernetes-validations: + - message: Registry files must use absolute paths + or be S3 ('s3://') or GS ('gs://') object + store URIs. + rule: '(!has(self.pvc) && has(self.path)) ? + (self.path.startsWith(''/'') || self.path.startsWith(''s3://'') + || self.path.startsWith(''gs://'')) : true' + - message: PVC path must be a file name only, + with no slashes. + rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') + : true' + - message: PVC persistence does not support S3 + or GS object store URIs. + rule: '(has(self.pvc) && has(self.path)) ? !(self.path.startsWith(''s3://'') + || self.path.startsWith(''gs://'')) : true' + - message: Additional S3 settings are available + only for S3 object store URIs. + rule: '(has(self.s3_additional_kwargs) && has(self.path)) + ? self.path.startsWith(''s3://'') : true' + store: + description: RegistryDBStorePersistence configures + the DB store persistence for the registry service + properties: + secretKeyName: + description: By default, the selected store + "type" is used as the SecretKeyName + type: string + secretRef: + description: Data store parameters should + be placed as-is from the "feature_store.yaml" + under the secret key. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: + description: Type of the persistence type + you want to use. + enum: + - sql + - snowflake.registry + type: string + required: + - secretRef + - type + type: object type: object - resources: - description: ResourceRequirements describes the compute - resource requirements. + x-kubernetes-validations: + - message: One selection required between file or + store. + rule: '[has(self.file), has(self.store)].exists_one(c, + c)' + server: + description: Creates a registry server container properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. + env: items: - description: ResourceClaim references one entry - in PodSpec.ResourceClaims. + description: EnvVar represents an environment + variable present in a Container. properties: name: + description: Name of the environment variable. + Must be a C_IDENTIFIER. + type: string + value: description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any + type: string + valueFrom: + description: Source for the environment + variable's value. Cannot be used if value + is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the + ConfigMap or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + fieldRef: + description: 'Selects a field of the + pod: supports metadata.name, metadata.namespace, + `metadata.labels['''']`, `metadata.' + properties: + apiVersion: + description: Version of the schema + the FieldPath is written in terms + of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to + select in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits. + properties: + containerName: + description: 'Container name: required + for volumes, optional for env + vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output + format of the exposed resources, + defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource + to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret + in the pod's namespace + properties: + key: + description: The key of the secret + to select from. Must be a valid + secret key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the + Secret or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + type: object + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source + of a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap + must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend + to each key in the ConfigMap. Must be + a C_IDENTIFIER. type: string - required: - - name + secretRef: + description: The Secret to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic type: object type: array - x-kubernetes-list-map-keys: - - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true + grpc: + description: Enable gRPC registry server. Defaults + to true if unset. + type: boolean + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for + if/when to pull a container image + type: string + logLevel: description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum amount - of compute resources required. - type: object - type: object - tls: - description: TlsConfigs configures server TLS for - a feast service. - properties: - disable: - description: will disable TLS for the feast service. - useful in an openshift cluster, for example, - where TLS is configured by default + LogLevel sets the logging level for the server + Allowed values: "debug", "info", "warning", "error", "critical". + enum: + - debug + - info + - warning + - error + - critical + type: string + metrics: + description: Metrics exposes Prometheus-compatible + metrics for the Feast server when enabled. type: boolean - secretKeyNames: - description: SecretKeyNames defines the secret - key names for the TLS key and cert. - properties: - tlsCrt: - description: defaults to "tls.crt" - type: string - tlsKey: - description: defaults to "tls.key" - type: string + nodeSelector: + additionalProperties: + type: string type: object - secretRef: - description: references the local k8s secret where - the TLS key and cert reside + resources: + description: ResourceRequirements describes the + compute resource requirements. properties: - name: - default: "" + claims: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: object - x-kubernetes-validations: - - message: '`secretRef` required if `disable` is false.' - rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) - : true' - volumeMounts: - description: VolumeMounts defines the list of volumes - that should be mounted into the feast container. - items: - description: VolumeMount describes a mounting of - a Volume within a container. - properties: - mountPath: - description: |- - Path within the container at which the volume should be mounted. Must - not contain ':'. - type: string - mountPropagation: - description: |- - mountPropagation determines how mounts are propagated from the host - to container and the other way around. - type: string - name: - description: This must match the Name of a Volume. - type: string - readOnly: - description: |- - Mounted read-only if true, read-write otherwise (false or unspecified). - Defaults to false. - type: boolean - recursiveReadOnly: - description: |- - RecursiveReadOnly specifies whether read-only mounts should be handled - recursively. - type: string - subPath: - description: |- - Path within the volume from which the container's volume should be mounted. - Defaults to "" (volume's root). - type: string - subPathExpr: - description: Expanded path within the volume - from which the container's volume should be - mounted. - type: string - required: - - mountPath - - name - type: object - type: array - workerConfigs: - description: WorkerConfigs defines the worker configuration - for the Feast server. - properties: - keepAliveTimeout: - description: |- - KeepAliveTimeout is the timeout for keep-alive connections in seconds. - Defaults to 30. - format: int32 - minimum: 1 - type: integer - maxRequests: - description: |- - MaxRequests is the maximum number of requests a worker will process before restarting. - This helps prevent memory leaks. - format: int32 - minimum: 0 - type: integer - maxRequestsJitter: - description: |- - MaxRequestsJitter is the maximum jitter to add to max-requests to prevent - thundering herd effect on worker restart. - format: int32 - minimum: 0 - type: integer - registryTTLSeconds: - description: RegistryTTLSeconds is the number - of seconds after which the registry is refreshed. - format: int32 - minimum: 0 - type: integer - workerConnections: - description: |- - WorkerConnections is the maximum number of simultaneous clients per worker process. - Defaults to 1000. - format: int32 - minimum: 1 - type: integer - workers: - description: Workers is the number of worker processes. - Use -1 to auto-calculate based on CPU cores - (2 * CPU + 1). - format: int32 - minimum: -1 - type: integer - type: object - type: object - type: object - registry: - description: Registry configures the registry service. One - selection is required. Local is the default setting. - properties: - local: - description: LocalRegistryConfig configures the registry - service - properties: - persistence: - description: RegistryPersistence configures the persistence - settings for the registry service - properties: - file: - description: RegistryFilePersistence configures - the file-based persistence for the registry - service - properties: - cache_mode: + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one + entry in PodSpec.ResourceClaims. + properties: + name: + description: |- + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. + type: string + required: + - name + type: object + type: array + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true description: |- - CacheMode defines the registry cache update strategy. - Allowed values are "sync" and "thread". - enum: - - none - - sync - - thread - type: string - cache_ttl_seconds: - description: CacheTTLSeconds defines the TTL - (in seconds) for the registry cache. - format: int32 - minimum: 0 - type: integer - path: - type: string - pvc: - description: PvcConfig defines the settings - for a persistent file store based on PVCs. + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum + amount of compute resources required. + type: object + type: object + restAPI: + description: Enable REST API registry server. + type: boolean + tls: + description: TlsConfigs configures server TLS + for a feast service. + properties: + disable: + description: will disable TLS for the feast + service. useful in an openshift cluster, + for example, where TLS is configured by + default + type: boolean + secretKeyNames: + description: SecretKeyNames defines the secret + key names for the TLS key and cert. properties: - create: - description: Settings for creating a new - PVC - properties: - accessModes: - description: AccessModes k8s persistent - volume access modes. Defaults to - ["ReadWriteOnce"]. - items: - type: string - type: array - resources: - description: Resources describes the - storage resource requirements for - a volume. - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes - the minimum amount of compute - resources required. - type: object - type: object - storageClassName: - description: StorageClassName is the - name of an existing StorageClass - to which this persistent volume - belongs. - type: string - type: object - x-kubernetes-validations: - - message: PvcCreate is immutable - rule: self == oldSelf - mountPath: - description: |- - MountPath within the container at which the volume should be mounted. - Must start by "/" and cannot contain ':'. + tlsCrt: + description: defaults to "tls.crt" + type: string + tlsKey: + description: defaults to "tls.key" type: string - ref: - description: Reference to an existing - field - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - required: - - mountPath type: object - x-kubernetes-validations: - - message: One selection is required between - ref and create. - rule: '[has(self.ref), has(self.create)].exists_one(c, - c)' - - message: Mount path must start with '/' - and must not contain ':' - rule: self.mountPath.matches('^/[^:]*$') - s3_additional_kwargs: - additionalProperties: - type: string + secretRef: + description: references the local k8s secret + where the TLS key and cert reside + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string type: object + x-kubernetes-map-type: atomic type: object x-kubernetes-validations: - - message: Registry files must use absolute paths - or be S3 ('s3://') or GS ('gs://') object - store URIs. - rule: '(!has(self.pvc) && has(self.path)) ? - (self.path.startsWith(''/'') || self.path.startsWith(''s3://'') - || self.path.startsWith(''gs://'')) : true' - - message: PVC path must be a file name only, - with no slashes. - rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') - : true' - - message: PVC persistence does not support S3 - or GS object store URIs. - rule: '(has(self.pvc) && has(self.path)) ? !(self.path.startsWith(''s3://'') - || self.path.startsWith(''gs://'')) : true' - - message: Additional S3 settings are available - only for S3 object store URIs. - rule: '(has(self.s3_additional_kwargs) && has(self.path)) - ? self.path.startsWith(''s3://'') : true' - store: - description: RegistryDBStorePersistence configures - the DB store persistence for the registry service + - message: '`secretRef` required if `disable` + is false.' + rule: '(!has(self.disable) || !self.disable) + ? has(self.secretRef) : true' + volumeMounts: + description: VolumeMounts defines the list of + volumes that should be mounted into the feast + container. + items: + description: VolumeMount describes a mounting + of a Volume within a container. + properties: + mountPath: + description: |- + Path within the container at which the volume should be mounted. Must + not contain ':'. + type: string + mountPropagation: + description: |- + mountPropagation determines how mounts are propagated from the host + to container and the other way around. + type: string + name: + description: This must match the Name of + a Volume. + type: string + readOnly: + description: |- + Mounted read-only if true, read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + recursiveReadOnly: + description: |- + RecursiveReadOnly specifies whether read-only mounts should be handled + recursively. + type: string + subPath: + description: |- + Path within the volume from which the container's volume should be mounted. + Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume + from which the container's volume should + be mounted. + type: string + required: + - mountPath + - name + type: object + type: array + workerConfigs: + description: WorkerConfigs defines the worker + configuration for the Feast server. + properties: + keepAliveTimeout: + description: |- + KeepAliveTimeout is the timeout for keep-alive connections in seconds. + Defaults to 30. + format: int32 + minimum: 1 + type: integer + maxRequests: + description: |- + MaxRequests is the maximum number of requests a worker will process before restarting. + This helps prevent memory leaks. + format: int32 + minimum: 0 + type: integer + maxRequestsJitter: + description: |- + MaxRequestsJitter is the maximum jitter to add to max-requests to prevent + thundering herd effect on worker restart. + format: int32 + minimum: 0 + type: integer + registryTTLSeconds: + description: RegistryTTLSeconds is the number + of seconds after which the registry is refreshed. + format: int32 + minimum: 0 + type: integer + workerConnections: + description: |- + WorkerConnections is the maximum number of simultaneous clients per worker process. + Defaults to 1000. + format: int32 + minimum: 1 + type: integer + workers: + description: Workers is the number of worker + processes. Use -1 to auto-calculate based + on CPU cores (2 * CPU + 1). + format: int32 + minimum: -1 + type: integer + type: object + type: object + x-kubernetes-validations: + - message: At least one of restAPI or grpc must be + true + rule: self.restAPI == true || self.grpc == true + || !has(self.grpc) + type: object + remote: + description: RemoteRegistryConfig points to a remote feast + registry server. + properties: + feastRef: + description: Reference to an existing `FeatureStore` + CR in the same k8s cluster. + properties: + name: + description: Name of the FeatureStore + type: string + namespace: + description: Namespace of the FeatureStore + type: string + required: + - name + type: object + hostname: + description: Host address of the remote registry service + - :, e.g. `registry..svc.cluster.local:80` + type: string + tls: + description: TlsRemoteRegistryConfigs configures client + TLS for a remote feast registry. + properties: + certName: + description: defines the configmap key name for + the client TLS cert. + type: string + configMapRef: + description: references the local k8s configmap + where the TLS cert resides properties: - secretKeyName: - description: By default, the selected store - "type" is used as the SecretKeyName - type: string - secretRef: - description: Data store parameters should - be placed as-is from the "feature_store.yaml" - under the secret key. - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: - description: Type of the persistence type - you want to use. - enum: - - sql - - snowflake.registry + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. type: string - required: - - secretRef - - type type: object + x-kubernetes-map-type: atomic + required: + - certName + - configMapRef type: object - x-kubernetes-validations: - - message: One selection required between file or - store. - rule: '[has(self.file), has(self.store)].exists_one(c, - c)' - server: - description: Creates a registry server container + type: object + x-kubernetes-validations: + - message: One selection required. + rule: '[has(self.hostname), has(self.feastRef)].exists_one(c, + c)' + type: object + x-kubernetes-validations: + - message: One selection required. + rule: '[has(self.local), has(self.remote)].exists_one(c, + c)' + scaling: + description: Scaling configures horizontal scaling for the + FeatureStore deployment (e.g. HPA autoscaling). + properties: + autoscaling: + description: |- + Autoscaling configures a HorizontalPodAutoscaler for the FeatureStore deployment. + Mutually exclusive with spec.replicas. + properties: + behavior: + description: Behavior configures the scaling behavior + of the target. properties: - env: - items: - description: EnvVar represents an environment - variable present in a Container. - properties: - name: - description: Name of the environment variable. - Must be a C_IDENTIFIER. - type: string - value: - description: |- - Variable references $(VAR_NAME) are expanded - using the previously defined environment variables in the container and - any - type: string - valueFrom: - description: Source for the environment - variable's value. Cannot be used if value - is not empty. + scaleDown: + description: scaleDown is scaling policy for scaling + Down. + properties: + policies: + description: policies is a list of potential + scaling polices which can be used during + scaling. + items: + description: HPAScalingPolicy is a single + policy which must hold true for a specified + past interval. properties: - configMapKeyRef: - description: Selects a key of a ConfigMap. - properties: - key: - description: The key to select. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the - ConfigMap or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - fieldRef: - description: 'Selects a field of the - pod: supports metadata.name, metadata.namespace, - `metadata.labels['''']`, `metadata.' - properties: - apiVersion: - description: Version of the schema - the FieldPath is written in terms - of, defaults to "v1". - type: string - fieldPath: - description: Path of the field to - select in the specified API version. - type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - resourceFieldRef: + periodSeconds: + description: periodSeconds specifies + the window of time for which the policy + should hold true. + format: int32 + type: integer + type: + description: type is used to specify + the scaling policy. + type: string + value: description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, limits. - properties: - containerName: - description: 'Container name: required - for volumes, optional for env - vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output - format of the exposed resources, - defaults to "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource - to select' - type: string - required: - - resource - type: object - x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret - in the pod's namespace - properties: - key: - description: The key of the secret - to select from. Must be a valid - secret key. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the - Secret or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic + value contains the amount of change which is permitted by the policy. + It must be greater than zero + format: int32 + type: integer + required: + - periodSeconds + - type + - value + type: object + type: array + x-kubernetes-list-type: atomic + selectPolicy: + description: |- + selectPolicy is used to specify which policy should be used. + If not set, the default value Max is used. + type: string + stabilizationWindowSeconds: + description: |- + stabilizationWindowSeconds is the number of seconds for which past recommendations should be + considered while scaling up + format: int32 + type: integer + type: object + scaleUp: + description: scaleUp is scaling policy for scaling + Up. + properties: + policies: + description: policies is a list of potential + scaling polices which can be used during + scaling. + items: + description: HPAScalingPolicy is a single + policy which must hold true for a specified + past interval. + properties: + periodSeconds: + description: periodSeconds specifies + the window of time for which the policy + should hold true. + format: int32 + type: integer + type: + description: type is used to specify + the scaling policy. + type: string + value: + description: |- + value contains the amount of change which is permitted by the policy. + It must be greater than zero + format: int32 + type: integer + required: + - periodSeconds + - type + - value + type: object + type: array + x-kubernetes-list-type: atomic + selectPolicy: + description: |- + selectPolicy is used to specify which policy should be used. + If not set, the default value Max is used. + type: string + stabilizationWindowSeconds: + description: |- + stabilizationWindowSeconds is the number of seconds for which past recommendations should be + considered while scaling up + format: int32 + type: integer + type: object + type: object + maxReplicas: + description: MaxReplicas is the upper limit for the + number of replicas. Required. + format: int32 + minimum: 1 + type: integer + metrics: + description: Metrics contains the specifications for + which to use to calculate the desired replica count. + items: + description: |- + MetricSpec specifies how to scale based on a single metric + (only `type` and one other matching field should be set at on + properties: + containerResource: + description: |- + containerResource refers to a resource metric (such as those specified in + requests and limits) known to Kubernetes descr + properties: + container: + description: container is the name of the + container in the pods of the scaling target + type: string + name: + description: name is the name of the resource + in question. + type: string + target: + description: target specifies the target + value for the given metric + properties: + averageUtilization: + description: "averageUtilization is + the target value of the average of + the\nresource metric across all relevant + pods, represented as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether + the metric type is Utilization, Value, + or AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value + of the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type type: object required: + - container - name + - target type: object - type: array - envFrom: - items: - description: EnvFromSource represents the source - of a set of ConfigMaps + external: + description: |- + external refers to a global metric that is not associated + with any Kubernetes object. properties: - configMapRef: - description: The ConfigMap to select from + metric: + description: metric identifies the target + metric by name and selector properties: name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + description: name is the name of the + given metric type: string - optional: - description: Specify whether the ConfigMap - must be defined - type: boolean + selector: + description: "selector is the string-encoded + form of a standard kubernetes label + selector for the given metric\nWhen + set, it is passed " + properties: + matchExpressions: + description: matchExpressions is + a list of label selector requirements. + The requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map + of {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + required: + - name type: object - x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend - to each key in the ConfigMap. Must be - a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from + target: + description: target specifies the target + value for the given metric properties: - name: - default: "" + averageUtilization: + description: "averageUtilization is + the target value of the average of + the\nresource metric across all relevant + pods, represented as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether + the metric type is Utilization, Value, + or AverageValue type: string - optional: - description: Specify whether the Secret - must be defined - type: boolean + value: + anyOf: + - type: integer + - type: string + description: value is the target value + of the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type type: object - x-kubernetes-map-type: atomic + required: + - metric + - target type: object - type: array - grpc: - description: Enable gRPC registry server. Defaults - to true if unset. - type: boolean - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for - if/when to pull a container image - type: string - logLevel: - description: |- - LogLevel sets the logging level for the server - Allowed values: "debug", "info", "warning", "error", "critical". - enum: - - debug - - info - - warning - - error - - critical - type: string - metrics: - description: Metrics exposes Prometheus-compatible - metrics for the Feast server when enabled. - type: boolean - nodeSelector: - additionalProperties: - type: string - type: object - resources: - description: ResourceRequirements describes the - compute resource requirements. - properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. - items: - description: ResourceClaim references one - entry in PodSpec.ResourceClaims. + object: + description: |- + object refers to a metric describing a single kubernetes object + (for example, hits-per-second on an Ingress object). + properties: + describedObject: + description: describedObject specifies the + descriptions of a object,such as kind,name + apiVersion properties: + apiVersion: + description: apiVersion is the API version + of the referent + type: string + kind: + description: 'kind is the kind of the + referent; More info: https://git.k8s.' + type: string name: - description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. + description: 'name is the name of the + referent; More info: https://kubernetes.' type: string required: + - kind - name type: object - type: array - x-kubernetes-list-map-keys: - - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum - amount of compute resources required. - type: object - type: object - restAPI: - description: Enable REST API registry server. - type: boolean - tls: - description: TlsConfigs configures server TLS - for a feast service. - properties: - disable: - description: will disable TLS for the feast - service. useful in an openshift cluster, - for example, where TLS is configured by - default - type: boolean - secretKeyNames: - description: SecretKeyNames defines the secret - key names for the TLS key and cert. - properties: - tlsCrt: - description: defaults to "tls.crt" - type: string - tlsKey: - description: defaults to "tls.key" - type: string - type: object - secretRef: - description: references the local k8s secret - where the TLS key and cert reside - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: object - x-kubernetes-validations: - - message: '`secretRef` required if `disable` - is false.' - rule: '(!has(self.disable) || !self.disable) - ? has(self.secretRef) : true' - volumeMounts: - description: VolumeMounts defines the list of - volumes that should be mounted into the feast - container. - items: - description: VolumeMount describes a mounting - of a Volume within a container. + metric: + description: metric identifies the target + metric by name and selector + properties: + name: + description: name is the name of the + given metric + type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label + selector for the given metric\nWhen + set, it is passed " + properties: + matchExpressions: + description: matchExpressions is + a list of label selector requirements. + The requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map + of {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + required: + - name + type: object + target: + description: target specifies the target + value for the given metric + properties: + averageUtilization: + description: "averageUtilization is + the target value of the average of + the\nresource metric across all relevant + pods, represented as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether + the metric type is Utilization, Value, + or AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value + of the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - describedObject + - metric + - target + type: object + pods: + description: |- + pods refers to a metric describing each pod in the current scale target + (for example, transactions-processed-per-second) + properties: + metric: + description: metric identifies the target + metric by name and selector + properties: + name: + description: name is the name of the + given metric + type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label + selector for the given metric\nWhen + set, it is passed " + properties: + matchExpressions: + description: matchExpressions is + a list of label selector requirements. + The requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map + of {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + required: + - name + type: object + target: + description: target specifies the target + value for the given metric + properties: + averageUtilization: + description: "averageUtilization is + the target value of the average of + the\nresource metric across all relevant + pods, represented as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether + the metric type is Utilization, Value, + or AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value + of the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - metric + - target + type: object + resource: + description: |- + resource refers to a resource metric (such as those specified in + requests and limits) known to Kubernetes describing eac properties: - mountPath: - description: |- - Path within the container at which the volume should be mounted. Must - not contain ':'. - type: string - mountPropagation: - description: |- - mountPropagation determines how mounts are propagated from the host - to container and the other way around. - type: string name: - description: This must match the Name of - a Volume. - type: string - readOnly: - description: |- - Mounted read-only if true, read-write otherwise (false or unspecified). - Defaults to false. - type: boolean - recursiveReadOnly: - description: |- - RecursiveReadOnly specifies whether read-only mounts should be handled - recursively. - type: string - subPath: - description: |- - Path within the volume from which the container's volume should be mounted. - Defaults to "" (volume's root). - type: string - subPathExpr: - description: Expanded path within the volume - from which the container's volume should - be mounted. + description: name is the name of the resource + in question. type: string + target: + description: target specifies the target + value for the given metric + properties: + averageUtilization: + description: "averageUtilization is + the target value of the average of + the\nresource metric across all relevant + pods, represented as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether + the metric type is Utilization, Value, + or AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value + of the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object required: - - mountPath - name + - target type: object - type: array - workerConfigs: - description: WorkerConfigs defines the worker - configuration for the Feast server. - properties: - keepAliveTimeout: - description: |- - KeepAliveTimeout is the timeout for keep-alive connections in seconds. - Defaults to 30. - format: int32 - minimum: 1 - type: integer - maxRequests: - description: |- - MaxRequests is the maximum number of requests a worker will process before restarting. - This helps prevent memory leaks. - format: int32 - minimum: 0 - type: integer - maxRequestsJitter: - description: |- - MaxRequestsJitter is the maximum jitter to add to max-requests to prevent - thundering herd effect on worker restart. - format: int32 - minimum: 0 - type: integer - registryTTLSeconds: - description: RegistryTTLSeconds is the number - of seconds after which the registry is refreshed. - format: int32 - minimum: 0 - type: integer - workerConnections: - description: |- - WorkerConnections is the maximum number of simultaneous clients per worker process. - Defaults to 1000. - format: int32 - minimum: 1 - type: integer - workers: - description: Workers is the number of worker - processes. Use -1 to auto-calculate based - on CPU cores (2 * CPU + 1). - format: int32 - minimum: -1 - type: integer - type: object - type: object - x-kubernetes-validations: - - message: At least one of restAPI or grpc must be - true - rule: self.restAPI == true || self.grpc == true - || !has(self.grpc) - type: object - remote: - description: RemoteRegistryConfig points to a remote feast - registry server. - properties: - feastRef: - description: Reference to an existing `FeatureStore` - CR in the same k8s cluster. - properties: - name: - description: Name of the FeatureStore - type: string - namespace: - description: Namespace of the FeatureStore - type: string - required: - - name - type: object - hostname: - description: Host address of the remote registry service - - :, e.g. `registry..svc.cluster.local:80` - type: string - tls: - description: TlsRemoteRegistryConfigs configures client - TLS for a remote feast registry. - properties: - certName: - description: defines the configmap key name for - the client TLS cert. - type: string - configMapRef: - description: references the local k8s configmap - where the TLS cert resides - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - required: - - certName - - configMapRef - type: object + type: + description: type is the type of metric source. + type: string + required: + - type + type: object + type: array + minReplicas: + description: MinReplicas is the lower limit for the + number of replicas. Defaults to 1. + format: int32 + minimum: 1 + type: integer + required: + - maxReplicas type: object - x-kubernetes-validations: - - message: One selection required. - rule: '[has(self.hostname), has(self.feastRef)].exists_one(c, - c)' type: object - x-kubernetes-validations: - - message: One selection required. - rule: '[has(self.local), has(self.remote)].exists_one(c, - c)' securityContext: description: PodSecurityContext holds pod-level security attributes and common container settings. @@ -6752,6 +9577,98 @@ spec: type: string type: object type: object + topologySpreadConstraints: + description: TopologySpreadConstraints defines how pods are + spread across topology domains. + items: + description: TopologySpreadConstraint specifies how to spread + matching pods among the given topology. + properties: + labelSelector: + description: LabelSelector is used to find matching + pods. + properties: + matchExpressions: + description: matchExpressions is a list of label + selector requirements. The requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that the + selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select the pods over which + spreading will be calculated. + items: + type: string + type: array + x-kubernetes-list-type: atomic + maxSkew: + description: MaxSkew describes the degree to which pods + may be unevenly distributed. + format: int32 + type: integer + minDomains: + description: MinDomains indicates a minimum number of + eligible domains. + format: int32 + type: integer + nodeAffinityPolicy: + description: |- + NodeAffinityPolicy indicates how we will treat Pod's nodeAffinity/nodeSelector + when calculating pod topology spread skew + type: string + nodeTaintsPolicy: + description: |- + NodeTaintsPolicy indicates how we will treat node taints when calculating + pod topology spread skew. + type: string + topologyKey: + description: TopologyKey is the key of node labels. + type: string + whenUnsatisfiable: + description: |- + WhenUnsatisfiable indicates how to deal with a pod if it doesn't satisfy + the spread constraint. + type: string + required: + - maxSkew + - topologyKey + - whenUnsatisfiable + type: object + type: array ui: description: Creates a UI server container properties: @@ -8539,7 +11456,39 @@ spec: type: object required: - feastProject + - replicas type: object + x-kubernetes-validations: + - message: replicas > 1 and services.scaling.autoscaling are mutually + exclusive. + rule: self.replicas <= 1 || !has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling) + - message: Scaling requires DB-backed persistence for the online store. + Configure services.onlineStore.persistence.store when using replicas + > 1 or autoscaling. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (has(self.services) + && has(self.services.onlineStore) && has(self.services.onlineStore.persistence) + && has(self.services.onlineStore.persistence.store)) + - message: Scaling requires DB-backed persistence for the offline + store. Configure services.offlineStore.persistence.store when + using replicas > 1 or autoscaling. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (!has(self.services) + || !has(self.services.offlineStore) || (has(self.services.offlineStore.persistence) + && has(self.services.offlineStore.persistence.store))) + - message: Scaling requires DB-backed or remote registry. Configure + registry.local.persistence.store or use a remote registry when + using replicas > 1 or autoscaling. S3/GCS-backed registry is also + allowed. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (has(self.services) + && has(self.services.registry) && (has(self.services.registry.remote) + || (has(self.services.registry.local) && has(self.services.registry.local.persistence) + && (has(self.services.registry.local.persistence.store) || (has(self.services.registry.local.persistence.file) + && has(self.services.registry.local.persistence.file.path) && + (self.services.registry.local.persistence.file.path.startsWith('s3://') + || self.services.registry.local.persistence.file.path.startsWith('gs://'))))))) clientConfigMap: description: ConfigMap in this namespace containing a client `feature_store.yaml` for this feast deployment @@ -8604,6 +11553,28 @@ spec: type: string phase: type: string + replicas: + description: Replicas is the current number of ready pod replicas + (used by the scale sub-resource). + format: int32 + type: integer + scalingStatus: + description: ScalingStatus reports the current scaling state of the + FeatureStore deployment. + properties: + currentReplicas: + description: CurrentReplicas is the current number of pod replicas. + format: int32 + type: integer + desiredReplicas: + description: DesiredReplicas is the desired number of pod replicas. + format: int32 + type: integer + type: object + selector: + description: Selector is the label selector for pods managed by the + FeatureStore deployment (used by the scale sub-resource). + type: string serviceHostnames: description: ServiceHostnames defines the service hostnames in the format of :, e.g. example.svc.cluster.local:80 @@ -8624,6 +11595,10 @@ spec: served: true storage: true subresources: + scale: + labelSelectorPath: .status.selector + specReplicasPath: .spec.replicas + statusReplicasPath: .status.replicas status: {} - additionalPrinterColumns: - jsonPath: .status.phase @@ -9294,7 +12269,6 @@ spec: - hbase - cassandra - hazelcast - - ikv - couchbase - clickhouse type: string @@ -9966,7 +12940,6 @@ spec: enum: - snowflake.online - redis - - ikv - datastore - dynamodb - bigtable @@ -9981,6 +12954,7 @@ spec: - couchbase.online - milvus - hybrid + - mongodb type: string required: - secretRef @@ -13505,7 +16479,6 @@ spec: - hbase - cassandra - hazelcast - - ikv - couchbase - clickhouse type: string @@ -14187,7 +17160,6 @@ spec: enum: - snowflake.online - redis - - ikv - datastore - dynamodb - bigtable @@ -14202,6 +17174,7 @@ spec: - couchbase.online - milvus - hybrid + - mongodb type: string required: - secretRef diff --git a/infra/feast-operator/config/component_metadata.yaml b/infra/feast-operator/config/component_metadata.yaml index fad77d7090e..0d4272ae6af 100644 --- a/infra/feast-operator/config/component_metadata.yaml +++ b/infra/feast-operator/config/component_metadata.yaml @@ -1,5 +1,5 @@ # This file is required to configure Feast release information for ODH/RHOAI Operator releases: - name: Feast - version: 0.60.0 + version: 0.61.0 repoUrl: https://github.com/feast-dev/feast diff --git a/infra/feast-operator/config/crd/bases/feast.dev_featurestores.yaml b/infra/feast-operator/config/crd/bases/feast.dev_featurestores.yaml index a3acc201a1c..b5888f12f40 100644 --- a/infra/feast-operator/config/crd/bases/feast.dev_featurestores.yaml +++ b/infra/feast-operator/config/crd/bases/feast.dev_featurestores.yaml @@ -703,7 +703,6 @@ spec: - hbase - cassandra - hazelcast - - ikv - couchbase - clickhouse type: string @@ -712,553 +711,806 @@ spec: x-kubernetes-validations: - message: One selection required between init or git. rule: '[has(self.git), has(self.init)].exists_one(c, c)' + replicas: + default: 1 + description: |- + Replicas is the desired number of pod replicas. Used by the scale sub-resource. + Mutually exclusive with services. + format: int32 + minimum: 1 + type: integer services: description: FeatureStoreServices defines the desired feast services. An ephemeral onlineStore feature server is deployed by default. properties: - deploymentStrategy: - description: DeploymentStrategy describes how to replace existing - pods with new ones. + affinity: + description: Affinity defines the pod scheduling constraints for + the FeatureStore deployment. properties: - rollingUpdate: - description: |- - Rolling update config params. Present only if DeploymentStrategyType = - RollingUpdate. + nodeAffinity: + description: Describes node affinity scheduling rules for + the pod. properties: - maxSurge: - anyOf: - - type: integer - - type: string + preferredDuringSchedulingIgnoredDuringExecution: description: |- - The maximum number of pods that can be scheduled above the desired number of - pods. - x-kubernetes-int-or-string: true - maxUnavailable: - anyOf: - - type: integer - - type: string - description: The maximum number of pods that can be unavailable - during the update. - x-kubernetes-int-or-string: true - type: object - type: - description: Type of deployment. Can be "Recreate" or "RollingUpdate". - Default is RollingUpdate. - type: string - type: object - disableInitContainers: - description: Disable the 'feast repo initialization' initContainer - type: boolean - offlineStore: - description: OfflineStore configures the offline store service - properties: - persistence: - description: OfflineStorePersistence configures the persistence - settings for the offline store service - properties: - file: - description: OfflineStoreFilePersistence configures the - file-based persistence for the offline store service - properties: - pvc: - description: PvcConfig defines the settings for a - persistent file store based on PVCs. - properties: - create: - description: Settings for creating a new PVC - properties: - accessModes: - description: AccessModes k8s persistent volume - access modes. Defaults to ["ReadWriteOnce"]. - items: - type: string - type: array - resources: - description: Resources describes the storage - resource requirements for a volume. + The scheduler will prefer to schedule pods to nodes that satisfy + the affinity expressions specified by this field, but i + items: + description: |- + An empty preferred scheduling term matches all objects with implicit weight 0 + (i.e. it's a no-op). + properties: + preference: + description: A node selector term, associated with + the corresponding weight. + properties: + matchExpressions: + description: A list of node selector requirements + by node's labels. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true + key: + description: The label key that the selector + applies to. + type: string + operator: description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum - amount of compute resources required. - type: object + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator type: object - storageClassName: - description: StorageClassName is the name - of an existing StorageClass to which this - persistent volume belongs. - type: string - type: object - x-kubernetes-validations: - - message: PvcCreate is immutable - rule: self == oldSelf - mountPath: - description: |- - MountPath within the container at which the volume should be mounted. - Must start by "/" and cannot contain ':'. - type: string - ref: - description: Reference to an existing field - properties: - name: - default: "" + type: array + x-kubernetes-list-type: atomic + matchFields: + description: A list of node selector requirements + by node's fields. + items: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - required: - - mountPath - type: object - x-kubernetes-validations: - - message: One selection is required between ref and - create. - rule: '[has(self.ref), has(self.create)].exists_one(c, - c)' - - message: Mount path must start with '/' and must - not contain ':' - rule: self.mountPath.matches('^/[^:]*$') - type: - enum: - - file - - dask - - duckdb - type: string - type: object - store: - description: OfflineStoreDBStorePersistence configures - the DB store persistence for the offline store service + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the selector + applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + type: object + x-kubernetes-map-type: atomic + weight: + description: Weight associated with matching the + corresponding nodeSelectorTerm, in the range 1-100. + format: int32 + type: integer + required: + - preference + - weight + type: object + type: array + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the affinity requirements specified by + this field are not met at\nscheduling time, the pod + will not be scheduled onto " properties: - secretKeyName: - description: By default, the selected store "type" - is used as the SecretKeyName - type: string - secretRef: - description: Data store parameters should be placed - as-is from the "feature_store.yaml" under the secret - key. - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: - description: Type of the persistence type you want - to use. - enum: - - snowflake.offline - - bigquery - - redshift - - spark - - postgres - - trino - - athena - - mssql - - couchbase.offline - - clickhouse - - ray - type: string + nodeSelectorTerms: + description: Required. A list of node selector terms. + The terms are ORed. + items: + description: |- + A null or empty node selector term matches no objects. The requirements of + them are ANDed. + properties: + matchExpressions: + description: A list of node selector requirements + by node's labels. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the selector + applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchFields: + description: A list of node selector requirements + by node's fields. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the selector + applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + type: object + x-kubernetes-map-type: atomic + type: array + x-kubernetes-list-type: atomic required: - - secretRef - - type + - nodeSelectorTerms type: object + x-kubernetes-map-type: atomic type: object - x-kubernetes-validations: - - message: One selection required between file or store. - rule: '[has(self.file), has(self.store)].exists_one(c, c)' - server: - description: Creates a remote offline server container + podAffinity: + description: Describes pod affinity scheduling rules (e.g. + co-locate this pod in the same node, zone, etc. as some + other pod(s)). properties: - env: + preferredDuringSchedulingIgnoredDuringExecution: + description: |- + The scheduler will prefer to schedule pods to nodes that satisfy + the affinity expressions specified by this field, but i items: - description: EnvVar represents an environment variable - present in a Container. + description: The weights of all of the matched WeightedPodAffinityTerm + fields are added per-node to find the most preferred + node(s) properties: - name: - description: Name of the environment variable. Must - be a C_IDENTIFIER. - type: string - value: - description: |- - Variable references $(VAR_NAME) are expanded - using the previously defined environment variables in the container and - any - type: string - valueFrom: - description: Source for the environment variable's - value. Cannot be used if value is not empty. + podAffinityTerm: + description: Required. A pod affinity term, associated + with the corresponding weight. properties: - configMapKeyRef: - description: Selects a key of a ConfigMap. - properties: - key: - description: The key to select. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap - or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - fieldRef: - description: 'Selects a field of the pod: supports - metadata.name, metadata.namespace, `metadata.labels['''']`, - `metadata.' - properties: - apiVersion: - description: Version of the schema the FieldPath - is written in terms of, defaults to "v1". - type: string - fieldPath: - description: Path of the field to select - in the specified API version. - type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - resourceFieldRef: + labelSelector: description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, limits. + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. properties: - containerName: - description: 'Container name: required for - volumes, optional for env vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output format - of the exposed resources, defaults to - "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource to select' - type: string - required: - - resource + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret in the - pod's namespace + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. properties: - key: - description: The key of the secret to select - from. Must be a valid secret key. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret - or its key must be defined - type: boolean - required: - - key + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t + type: string + required: + - topologyKey type: object + weight: + description: |- + weight associated with matching the corresponding podAffinityTerm, + in the range 1-100. + format: int32 + type: integer required: - - name + - podAffinityTerm + - weight type: object type: array - envFrom: + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the affinity requirements specified by + this field are not met at\nscheduling time, the pod + will not be scheduled onto " items: - description: EnvFromSource represents the source of - a set of ConfigMaps + description: "Defines a set of pods (namely those matching + the labelSelector\nrelative to the given namespace(s)) + that this pod should " properties: - configMapRef: - description: The ConfigMap to select from + labelSelector: + description: |- + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap must - be defined - type: boolean + matchExpressions: + description: matchExpressions is a list of label + selector requirements. The requirements are + ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that + the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend to - each key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret must - be defined - type: boolean + matchExpressions: + description: matchExpressions is a list of label + selector requirements. The requirements are + ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that + the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t + type: string + required: + - topologyKey type: object type: array - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for if/when - to pull a container image - type: string - logLevel: - description: |- - LogLevel sets the logging level for the server - Allowed values: "debug", "info", "warning", "error", "critical". - enum: - - debug - - info - - warning - - error - - critical - type: string - metrics: - description: Metrics exposes Prometheus-compatible metrics - for the Feast server when enabled. - type: boolean - nodeSelector: - additionalProperties: - type: string - type: object - resources: - description: ResourceRequirements describes the compute - resource requirements. - properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. - items: - description: ResourceClaim references one entry - in PodSpec.ResourceClaims. + x-kubernetes-list-type: atomic + type: object + podAntiAffinity: + description: Describes pod anti-affinity scheduling rules + (e.g. avoid putting this pod in the same node, zone, etc. + properties: + preferredDuringSchedulingIgnoredDuringExecution: + description: "The scheduler will prefer to schedule pods + to nodes that satisfy\nthe anti-affinity expressions + specified by this field, " + items: + description: The weights of all of the matched WeightedPodAffinityTerm + fields are added per-node to find the most preferred + node(s) + properties: + podAffinityTerm: + description: Required. A pod affinity term, associated + with the corresponding weight. properties: - name: + labelSelector: description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t type: string required: - - name + - topologyKey type: object - type: array - x-kubernetes-list-map-keys: - - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum amount - of compute resources required. - type: object - type: object - tls: - description: TlsConfigs configures server TLS for a feast - service. - properties: - disable: - description: will disable TLS for the feast service. - useful in an openshift cluster, for example, where - TLS is configured by default - type: boolean - secretKeyNames: - description: SecretKeyNames defines the secret key - names for the TLS key and cert. - properties: - tlsCrt: - description: defaults to "tls.crt" - type: string - tlsKey: - description: defaults to "tls.key" - type: string - type: object - secretRef: - description: references the local k8s secret where - the TLS key and cert reside - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: object - x-kubernetes-validations: - - message: '`secretRef` required if `disable` is false.' - rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) - : true' - volumeMounts: - description: VolumeMounts defines the list of volumes - that should be mounted into the feast container. - items: - description: VolumeMount describes a mounting of a Volume - within a container. - properties: - mountPath: - description: |- - Path within the container at which the volume should be mounted. Must - not contain ':'. - type: string - mountPropagation: - description: |- - mountPropagation determines how mounts are propagated from the host - to container and the other way around. - type: string - name: - description: This must match the Name of a Volume. - type: string - readOnly: - description: |- - Mounted read-only if true, read-write otherwise (false or unspecified). - Defaults to false. - type: boolean - recursiveReadOnly: - description: |- - RecursiveReadOnly specifies whether read-only mounts should be handled - recursively. - type: string - subPath: + weight: description: |- - Path within the volume from which the container's volume should be mounted. - Defaults to "" (volume's root). - type: string - subPathExpr: - description: Expanded path within the volume from - which the container's volume should be mounted. - type: string + weight associated with matching the corresponding podAffinityTerm, + in the range 1-100. + format: int32 + type: integer required: - - mountPath - - name + - podAffinityTerm + - weight type: object type: array - workerConfigs: - description: WorkerConfigs defines the worker configuration - for the Feast server. - properties: - keepAliveTimeout: - description: |- - KeepAliveTimeout is the timeout for keep-alive connections in seconds. - Defaults to 30. - format: int32 - minimum: 1 - type: integer - maxRequests: - description: |- - MaxRequests is the maximum number of requests a worker will process before restarting. - This helps prevent memory leaks. - format: int32 - minimum: 0 - type: integer - maxRequestsJitter: - description: |- - MaxRequestsJitter is the maximum jitter to add to max-requests to prevent - thundering herd effect on worker restart. - format: int32 - minimum: 0 - type: integer - registryTTLSeconds: - description: RegistryTTLSeconds is the number of seconds - after which the registry is refreshed. - format: int32 - minimum: 0 - type: integer - workerConnections: - description: |- - WorkerConnections is the maximum number of simultaneous clients per worker process. - Defaults to 1000. - format: int32 - minimum: 1 - type: integer - workers: - description: Workers is the number of worker processes. - Use -1 to auto-calculate based on CPU cores (2 * - CPU + 1). - format: int32 - minimum: -1 - type: integer - type: object + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the anti-affinity requirements specified + by this field are not met at\nscheduling time, the pod + will not be scheduled " + items: + description: "Defines a set of pods (namely those matching + the labelSelector\nrelative to the given namespace(s)) + that this pod should " + properties: + labelSelector: + description: |- + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. + properties: + matchExpressions: + description: matchExpressions is a list of label + selector requirements. The requirements are + ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that + the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list of label + selector requirements. The requirements are + ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that + the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t + type: string + required: + - topologyKey + type: object + type: array + x-kubernetes-list-type: atomic type: object type: object - onlineStore: - description: OnlineStore configures the online store service + deploymentStrategy: + description: DeploymentStrategy describes how to replace existing + pods with new ones. + properties: + rollingUpdate: + description: |- + Rolling update config params. Present only if DeploymentStrategyType = + RollingUpdate. + properties: + maxSurge: + anyOf: + - type: integer + - type: string + description: |- + The maximum number of pods that can be scheduled above the desired number of + pods. + x-kubernetes-int-or-string: true + maxUnavailable: + anyOf: + - type: integer + - type: string + description: The maximum number of pods that can be unavailable + during the update. + x-kubernetes-int-or-string: true + type: object + type: + description: Type of deployment. Can be "Recreate" or "RollingUpdate". + Default is RollingUpdate. + type: string + type: object + disableInitContainers: + description: Disable the 'feast repo initialization' initContainer + type: boolean + offlineStore: + description: OfflineStore configures the offline store service properties: persistence: - description: OnlineStorePersistence configures the persistence - settings for the online store service + description: OfflineStorePersistence configures the persistence + settings for the offline store service properties: file: - description: OnlineStoreFilePersistence configures the - file-based persistence for the online store service + description: OfflineStoreFilePersistence configures the + file-based persistence for the offline store service properties: - path: - type: string pvc: description: PvcConfig defines the settings for a persistent file store based on PVCs. @@ -1335,21 +1587,16 @@ spec: - message: Mount path must start with '/' and must not contain ':' rule: self.mountPath.matches('^/[^:]*$') + type: + enum: + - file + - dask + - duckdb + type: string type: object - x-kubernetes-validations: - - message: Ephemeral stores must have absolute paths. - rule: '(!has(self.pvc) && has(self.path)) ? self.path.startsWith(''/'') - : true' - - message: PVC path must be a file name only, with no - slashes. - rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') - : true' - - message: Online store does not support S3 or GS buckets. - rule: 'has(self.path) ? !(self.path.startsWith(''s3://'') - || self.path.startsWith(''gs://'')) : true' store: - description: OnlineStoreDBStorePersistence configures - the DB store persistence for the online store service + description: OfflineStoreDBStorePersistence configures + the DB store persistence for the offline store service properties: secretKeyName: description: By default, the selected store "type" @@ -1373,23 +1620,17 @@ spec: description: Type of the persistence type you want to use. enum: - - snowflake.online - - redis - - ikv - - datastore - - dynamodb - - bigtable + - snowflake.offline + - bigquery + - redshift + - spark - postgres - - cassandra - - mysql - - hazelcast - - singlestore - - hbase - - elasticsearch - - qdrant - - couchbase.online - - milvus - - hybrid + - trino + - athena + - mssql + - couchbase.offline + - clickhouse + - ray type: string required: - secretRef @@ -1400,7 +1641,7 @@ spec: - message: One selection required between file or store. rule: '[has(self.file), has(self.store)].exists_one(c, c)' server: - description: Creates a feature server container + description: Creates a remote offline server container properties: env: items: @@ -1756,151 +1997,74 @@ spec: type: object type: object type: object - registry: - description: Registry configures the registry service. One selection - is required. Local is the default setting. + onlineStore: + description: OnlineStore configures the online store service properties: - local: - description: LocalRegistryConfig configures the registry service + persistence: + description: OnlineStorePersistence configures the persistence + settings for the online store service properties: - persistence: - description: RegistryPersistence configures the persistence - settings for the registry service + file: + description: OnlineStoreFilePersistence configures the + file-based persistence for the online store service properties: - file: - description: RegistryFilePersistence configures the - file-based persistence for the registry service + path: + type: string + pvc: + description: PvcConfig defines the settings for a + persistent file store based on PVCs. properties: - cache_mode: - description: |- - CacheMode defines the registry cache update strategy. - Allowed values are "sync" and "thread". - enum: - - none - - sync - - thread - type: string - cache_ttl_seconds: - description: CacheTTLSeconds defines the TTL (in - seconds) for the registry cache. - format: int32 - minimum: 0 - type: integer - path: - type: string - pvc: - description: PvcConfig defines the settings for - a persistent file store based on PVCs. + create: + description: Settings for creating a new PVC properties: - create: - description: Settings for creating a new PVC - properties: - accessModes: - description: AccessModes k8s persistent - volume access modes. Defaults to ["ReadWriteOnce"]. - items: - type: string - type: array - resources: - description: Resources describes the storage - resource requirements for a volume. - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the - minimum amount of compute resources - required. - type: object - type: object - storageClassName: - description: StorageClassName is the name - of an existing StorageClass to which - this persistent volume belongs. - type: string - type: object - x-kubernetes-validations: - - message: PvcCreate is immutable - rule: self == oldSelf - mountPath: - description: |- - MountPath within the container at which the volume should be mounted. - Must start by "/" and cannot contain ':'. - type: string - ref: - description: Reference to an existing field + accessModes: + description: AccessModes k8s persistent volume + access modes. Defaults to ["ReadWriteOnce"]. + items: + type: string + type: array + resources: + description: Resources describes the storage + resource requirements for a volume. properties: - name: - default: "" + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum + amount of compute resources required. + type: object type: object - x-kubernetes-map-type: atomic - required: - - mountPath + storageClassName: + description: StorageClassName is the name + of an existing StorageClass to which this + persistent volume belongs. + type: string type: object x-kubernetes-validations: - - message: One selection is required between ref - and create. - rule: '[has(self.ref), has(self.create)].exists_one(c, - c)' - - message: Mount path must start with '/' and - must not contain ':' - rule: self.mountPath.matches('^/[^:]*$') - s3_additional_kwargs: - additionalProperties: - type: string - type: object - type: object - x-kubernetes-validations: - - message: Registry files must use absolute paths - or be S3 ('s3://') or GS ('gs://') object store - URIs. - rule: '(!has(self.pvc) && has(self.path)) ? (self.path.startsWith(''/'') - || self.path.startsWith(''s3://'') || self.path.startsWith(''gs://'')) - : true' - - message: PVC path must be a file name only, with - no slashes. - rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') - : true' - - message: PVC persistence does not support S3 or - GS object store URIs. - rule: '(has(self.pvc) && has(self.path)) ? !(self.path.startsWith(''s3://'') - || self.path.startsWith(''gs://'')) : true' - - message: Additional S3 settings are available only - for S3 object store URIs. - rule: '(has(self.s3_additional_kwargs) && has(self.path)) - ? self.path.startsWith(''s3://'') : true' - store: - description: RegistryDBStorePersistence configures - the DB store persistence for the registry service - properties: - secretKeyName: - description: By default, the selected store "type" - is used as the SecretKeyName + - message: PvcCreate is immutable + rule: self == oldSelf + mountPath: + description: |- + MountPath within the container at which the volume should be mounted. + Must start by "/" and cannot contain ':'. type: string - secretRef: - description: Data store parameters should be placed - as-is from the "feature_store.yaml" under the - secret key. + ref: + description: Reference to an existing field properties: name: default: "" @@ -1911,146 +2075,109 @@ spec: type: string type: object x-kubernetes-map-type: atomic - type: - description: Type of the persistence type you - want to use. - enum: - - sql - - snowflake.registry - type: string required: - - secretRef - - type + - mountPath type: object + x-kubernetes-validations: + - message: One selection is required between ref and + create. + rule: '[has(self.ref), has(self.create)].exists_one(c, + c)' + - message: Mount path must start with '/' and must + not contain ':' + rule: self.mountPath.matches('^/[^:]*$') type: object x-kubernetes-validations: - - message: One selection required between file or store. - rule: '[has(self.file), has(self.store)].exists_one(c, - c)' - server: - description: Creates a registry server container + - message: Ephemeral stores must have absolute paths. + rule: '(!has(self.pvc) && has(self.path)) ? self.path.startsWith(''/'') + : true' + - message: PVC path must be a file name only, with no + slashes. + rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') + : true' + - message: Online store does not support S3 or GS buckets. + rule: 'has(self.path) ? !(self.path.startsWith(''s3://'') + || self.path.startsWith(''gs://'')) : true' + store: + description: OnlineStoreDBStorePersistence configures + the DB store persistence for the online store service properties: - env: - items: - description: EnvVar represents an environment variable - present in a Container. + secretKeyName: + description: By default, the selected store "type" + is used as the SecretKeyName + type: string + secretRef: + description: Data store parameters should be placed + as-is from the "feature_store.yaml" under the secret + key. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: + description: Type of the persistence type you want + to use. + enum: + - snowflake.online + - redis + - datastore + - dynamodb + - bigtable + - postgres + - cassandra + - mysql + - hazelcast + - singlestore + - hbase + - elasticsearch + - qdrant + - couchbase.online + - milvus + - hybrid + - mongodb + type: string + required: + - secretRef + - type + type: object + type: object + x-kubernetes-validations: + - message: One selection required between file or store. + rule: '[has(self.file), has(self.store)].exists_one(c, c)' + server: + description: Creates a feature server container + properties: + env: + items: + description: EnvVar represents an environment variable + present in a Container. + properties: + name: + description: Name of the environment variable. Must + be a C_IDENTIFIER. + type: string + value: + description: |- + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any + type: string + valueFrom: + description: Source for the environment variable's + value. Cannot be used if value is not empty. properties: - name: - description: Name of the environment variable. - Must be a C_IDENTIFIER. - type: string - value: - description: |- - Variable references $(VAR_NAME) are expanded - using the previously defined environment variables in the container and - any - type: string - valueFrom: - description: Source for the environment variable's - value. Cannot be used if value is not empty. - properties: - configMapKeyRef: - description: Selects a key of a ConfigMap. - properties: - key: - description: The key to select. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap - or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - fieldRef: - description: 'Selects a field of the pod: - supports metadata.name, metadata.namespace, - `metadata.labels['''']`, `metadata.' - properties: - apiVersion: - description: Version of the schema the - FieldPath is written in terms of, - defaults to "v1". - type: string - fieldPath: - description: Path of the field to select - in the specified API version. - type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - resourceFieldRef: - description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, limits. - properties: - containerName: - description: 'Container name: required - for volumes, optional for env vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output format - of the exposed resources, defaults - to "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource to - select' - type: string - required: - - resource - type: object - x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret in - the pod's namespace - properties: - key: - description: The key of the secret to - select from. Must be a valid secret - key. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret - or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - type: object - required: - - name - type: object - type: array - envFrom: - items: - description: EnvFromSource represents the source - of a set of ConfigMaps - properties: - configMapRef: - description: The ConfigMap to select from + configMapKeyRef: + description: Selects a key of a ConfigMap. properties: + key: + description: The key to select. + type: string name: default: "" description: |- @@ -2060,17 +2187,62 @@ spec: type: string optional: description: Specify whether the ConfigMap - must be defined + or its key must be defined type: boolean + required: + - key type: object x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend - to each key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from + fieldRef: + description: 'Selects a field of the pod: supports + metadata.name, metadata.namespace, `metadata.labels['''']`, + `metadata.' + properties: + apiVersion: + description: Version of the schema the FieldPath + is written in terms of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to select + in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits. + properties: + containerName: + description: 'Container name: required for + volumes, optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format + of the exposed resources, defaults to + "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret in the + pod's namespace properties: + key: + description: The key of the secret to select + from. Must be a valid secret key. + type: string name: default: "" description: |- @@ -2080,392 +2252,1567 @@ spec: type: string optional: description: Specify whether the Secret - must be defined + or its key must be defined type: boolean + required: + - key type: object x-kubernetes-map-type: atomic type: object - type: array - grpc: - description: Enable gRPC registry server. Defaults - to true if unset. - type: boolean - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for if/when - to pull a container image - type: string - logLevel: - description: |- - LogLevel sets the logging level for the server - Allowed values: "debug", "info", "warning", "error", "critical". - enum: - - debug - - info - - warning - - error - - critical - type: string - metrics: - description: Metrics exposes Prometheus-compatible - metrics for the Feast server when enabled. - type: boolean - nodeSelector: - additionalProperties: + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source of + a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend to + each key in the ConfigMap. Must be a C_IDENTIFIER. type: string - type: object - resources: - description: ResourceRequirements describes the compute - resource requirements. - properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. - items: - description: ResourceClaim references one entry - in PodSpec.ResourceClaims. - properties: - name: - description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. - type: string - required: - - name - type: object - type: array - x-kubernetes-list-map-keys: - - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum amount - of compute resources required. - type: object - type: object - restAPI: - description: Enable REST API registry server. - type: boolean - tls: - description: TlsConfigs configures server TLS for - a feast service. - properties: - disable: - description: will disable TLS for the feast service. - useful in an openshift cluster, for example, - where TLS is configured by default - type: boolean - secretKeyNames: - description: SecretKeyNames defines the secret - key names for the TLS key and cert. - properties: - tlsCrt: - description: defaults to "tls.crt" - type: string - tlsKey: - description: defaults to "tls.key" - type: string - type: object - secretRef: - description: references the local k8s secret where - the TLS key and cert reside - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: object - x-kubernetes-validations: - - message: '`secretRef` required if `disable` is false.' - rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) - : true' - volumeMounts: - description: VolumeMounts defines the list of volumes - that should be mounted into the feast container. - items: - description: VolumeMount describes a mounting of - a Volume within a container. + secretRef: + description: The Secret to select from properties: - mountPath: - description: |- - Path within the container at which the volume should be mounted. Must - not contain ':'. - type: string - mountPropagation: - description: |- - mountPropagation determines how mounts are propagated from the host - to container and the other way around. - type: string name: - description: This must match the Name of a Volume. - type: string - readOnly: - description: |- - Mounted read-only if true, read-write otherwise (false or unspecified). - Defaults to false. - type: boolean - recursiveReadOnly: + default: "" description: |- - RecursiveReadOnly specifies whether read-only mounts should be handled - recursively. + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. type: string - subPath: + optional: + description: Specify whether the Secret must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + type: object + type: array + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for if/when + to pull a container image + type: string + logLevel: + description: |- + LogLevel sets the logging level for the server + Allowed values: "debug", "info", "warning", "error", "critical". + enum: + - debug + - info + - warning + - error + - critical + type: string + metrics: + description: Metrics exposes Prometheus-compatible metrics + for the Feast server when enabled. + type: boolean + nodeSelector: + additionalProperties: + type: string + type: object + resources: + description: ResourceRequirements describes the compute + resource requirements. + properties: + claims: + description: |- + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one entry + in PodSpec.ResourceClaims. + properties: + name: description: |- - Path within the volume from which the container's volume should be mounted. - Defaults to "" (volume's root). - type: string - subPathExpr: - description: Expanded path within the volume - from which the container's volume should be - mounted. + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. type: string required: - - mountPath - name type: object type: array - workerConfigs: - description: WorkerConfigs defines the worker configuration - for the Feast server. + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum amount + of compute resources required. + type: object + type: object + tls: + description: TlsConfigs configures server TLS for a feast + service. + properties: + disable: + description: will disable TLS for the feast service. + useful in an openshift cluster, for example, where + TLS is configured by default + type: boolean + secretKeyNames: + description: SecretKeyNames defines the secret key + names for the TLS key and cert. properties: - keepAliveTimeout: - description: |- - KeepAliveTimeout is the timeout for keep-alive connections in seconds. - Defaults to 30. - format: int32 - minimum: 1 - type: integer - maxRequests: - description: |- - MaxRequests is the maximum number of requests a worker will process before restarting. - This helps prevent memory leaks. - format: int32 - minimum: 0 - type: integer - maxRequestsJitter: - description: |- - MaxRequestsJitter is the maximum jitter to add to max-requests to prevent - thundering herd effect on worker restart. - format: int32 - minimum: 0 - type: integer - registryTTLSeconds: - description: RegistryTTLSeconds is the number - of seconds after which the registry is refreshed. - format: int32 - minimum: 0 - type: integer - workerConnections: + tlsCrt: + description: defaults to "tls.crt" + type: string + tlsKey: + description: defaults to "tls.key" + type: string + type: object + secretRef: + description: references the local k8s secret where + the TLS key and cert reside + properties: + name: + default: "" description: |- - WorkerConnections is the maximum number of simultaneous clients per worker process. - Defaults to 1000. - format: int32 - minimum: 1 - type: integer - workers: - description: Workers is the number of worker processes. - Use -1 to auto-calculate based on CPU cores - (2 * CPU + 1). - format: int32 - minimum: -1 - type: integer + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string type: object + x-kubernetes-map-type: atomic type: object x-kubernetes-validations: - - message: At least one of restAPI or grpc must be true - rule: self.restAPI == true || self.grpc == true || !has(self.grpc) - type: object - remote: - description: RemoteRegistryConfig points to a remote feast - registry server. - properties: - feastRef: - description: Reference to an existing `FeatureStore` CR - in the same k8s cluster. - properties: - name: - description: Name of the FeatureStore - type: string - namespace: - description: Namespace of the FeatureStore - type: string - required: - - name - type: object - hostname: - description: Host address of the remote registry service - - :, e.g. `registry..svc.cluster.local:80` - type: string - tls: - description: TlsRemoteRegistryConfigs configures client - TLS for a remote feast registry. + - message: '`secretRef` required if `disable` is false.' + rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) + : true' + volumeMounts: + description: VolumeMounts defines the list of volumes + that should be mounted into the feast container. + items: + description: VolumeMount describes a mounting of a Volume + within a container. + properties: + mountPath: + description: |- + Path within the container at which the volume should be mounted. Must + not contain ':'. + type: string + mountPropagation: + description: |- + mountPropagation determines how mounts are propagated from the host + to container and the other way around. + type: string + name: + description: This must match the Name of a Volume. + type: string + readOnly: + description: |- + Mounted read-only if true, read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + recursiveReadOnly: + description: |- + RecursiveReadOnly specifies whether read-only mounts should be handled + recursively. + type: string + subPath: + description: |- + Path within the volume from which the container's volume should be mounted. + Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume from + which the container's volume should be mounted. + type: string + required: + - mountPath + - name + type: object + type: array + workerConfigs: + description: WorkerConfigs defines the worker configuration + for the Feast server. properties: - certName: - description: defines the configmap key name for the - client TLS cert. - type: string - configMapRef: - description: references the local k8s configmap where - the TLS cert resides - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - required: - - certName - - configMapRef + keepAliveTimeout: + description: |- + KeepAliveTimeout is the timeout for keep-alive connections in seconds. + Defaults to 30. + format: int32 + minimum: 1 + type: integer + maxRequests: + description: |- + MaxRequests is the maximum number of requests a worker will process before restarting. + This helps prevent memory leaks. + format: int32 + minimum: 0 + type: integer + maxRequestsJitter: + description: |- + MaxRequestsJitter is the maximum jitter to add to max-requests to prevent + thundering herd effect on worker restart. + format: int32 + minimum: 0 + type: integer + registryTTLSeconds: + description: RegistryTTLSeconds is the number of seconds + after which the registry is refreshed. + format: int32 + minimum: 0 + type: integer + workerConnections: + description: |- + WorkerConnections is the maximum number of simultaneous clients per worker process. + Defaults to 1000. + format: int32 + minimum: 1 + type: integer + workers: + description: Workers is the number of worker processes. + Use -1 to auto-calculate based on CPU cores (2 * + CPU + 1). + format: int32 + minimum: -1 + type: integer type: object type: object - x-kubernetes-validations: - - message: One selection required. - rule: '[has(self.hostname), has(self.feastRef)].exists_one(c, - c)' + type: object + podDisruptionBudgets: + description: PodDisruptionBudgets configures a PodDisruptionBudget + for the FeatureStore deployment. + properties: + maxUnavailable: + anyOf: + - type: integer + - type: string + description: MaxUnavailable specifies the maximum number/percentage + of pods that can be unavailable. + x-kubernetes-int-or-string: true + minAvailable: + anyOf: + - type: integer + - type: string + description: MinAvailable specifies the minimum number/percentage + of pods that must remain available. + x-kubernetes-int-or-string: true type: object x-kubernetes-validations: - - message: One selection required. - rule: '[has(self.local), has(self.remote)].exists_one(c, c)' - securityContext: - description: PodSecurityContext holds pod-level security attributes - and common container settings. + - message: Exactly one of minAvailable or maxUnavailable must + be set. + rule: '[has(self.minAvailable), has(self.maxUnavailable)].exists_one(c, + c)' + registry: + description: Registry configures the registry service. One selection + is required. Local is the default setting. properties: - appArmorProfile: - description: appArmorProfile is the AppArmor options to use - by the containers in this pod. - properties: - localhostProfile: - description: localhostProfile indicates a profile loaded - on the node that should be used. - type: string - type: - description: type indicates which kind of AppArmor profile - will be applied. - type: string - required: - - type - type: object - fsGroup: - description: A special supplemental group that applies to - all containers in a pod. - format: int64 - type: integer - fsGroupChangePolicy: - description: |- - fsGroupChangePolicy defines behavior of changing ownership and permission of the volume - before being exposed inside Pod. - type: string - runAsGroup: - description: |- - The GID to run the entrypoint of the container process. - Uses runtime default if unset. - format: int64 - type: integer - runAsNonRoot: - description: Indicates that the container must run as a non-root - user. - type: boolean - runAsUser: - description: |- - The UID to run the entrypoint of the container process. - Defaults to user specified in image metadata if unspecified. - format: int64 - type: integer - seLinuxOptions: - description: The SELinux context to be applied to all containers. - properties: - level: - description: Level is SELinux level label that applies - to the container. - type: string - role: - description: Role is a SELinux role label that applies - to the container. - type: string - type: - description: Type is a SELinux type label that applies - to the container. - type: string - user: - description: User is a SELinux user label that applies - to the container. - type: string - type: object - seccompProfile: - description: |- - The seccomp options to use by the containers in this pod. - Note that this field cannot be set when spec.os. - properties: - localhostProfile: - description: localhostProfile indicates a profile defined - in a file on the node should be used. - type: string - type: - description: type indicates which kind of seccomp profile - will be applied. - type: string - required: - - type - type: object - supplementalGroups: - description: |- - A list of groups applied to the first process run in each container, in addition - to the container's primary GID, the fsG - items: - format: int64 - type: integer - type: array - x-kubernetes-list-type: atomic - sysctls: - description: Sysctls hold a list of namespaced sysctls used - for the pod. - items: - description: Sysctl defines a kernel parameter to be set - properties: - name: - description: Name of a property to set - type: string - value: - description: Value of a property to set - type: string - required: - - name - - value - type: object - type: array - x-kubernetes-list-type: atomic - windowsOptions: - description: The Windows specific settings applied to all - containers. + local: + description: LocalRegistryConfig configures the registry service properties: - gmsaCredentialSpec: + persistence: + description: RegistryPersistence configures the persistence + settings for the registry service + properties: + file: + description: RegistryFilePersistence configures the + file-based persistence for the registry service + properties: + cache_mode: + description: |- + CacheMode defines the registry cache update strategy. + Allowed values are "sync" and "thread". + enum: + - none + - sync + - thread + type: string + cache_ttl_seconds: + description: CacheTTLSeconds defines the TTL (in + seconds) for the registry cache. + format: int32 + minimum: 0 + type: integer + path: + type: string + pvc: + description: PvcConfig defines the settings for + a persistent file store based on PVCs. + properties: + create: + description: Settings for creating a new PVC + properties: + accessModes: + description: AccessModes k8s persistent + volume access modes. Defaults to ["ReadWriteOnce"]. + items: + type: string + type: array + resources: + description: Resources describes the storage + resource requirements for a volume. + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the + minimum amount of compute resources + required. + type: object + type: object + storageClassName: + description: StorageClassName is the name + of an existing StorageClass to which + this persistent volume belongs. + type: string + type: object + x-kubernetes-validations: + - message: PvcCreate is immutable + rule: self == oldSelf + mountPath: + description: |- + MountPath within the container at which the volume should be mounted. + Must start by "/" and cannot contain ':'. + type: string + ref: + description: Reference to an existing field + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + required: + - mountPath + type: object + x-kubernetes-validations: + - message: One selection is required between ref + and create. + rule: '[has(self.ref), has(self.create)].exists_one(c, + c)' + - message: Mount path must start with '/' and + must not contain ':' + rule: self.mountPath.matches('^/[^:]*$') + s3_additional_kwargs: + additionalProperties: + type: string + type: object + type: object + x-kubernetes-validations: + - message: Registry files must use absolute paths + or be S3 ('s3://') or GS ('gs://') object store + URIs. + rule: '(!has(self.pvc) && has(self.path)) ? (self.path.startsWith(''/'') + || self.path.startsWith(''s3://'') || self.path.startsWith(''gs://'')) + : true' + - message: PVC path must be a file name only, with + no slashes. + rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') + : true' + - message: PVC persistence does not support S3 or + GS object store URIs. + rule: '(has(self.pvc) && has(self.path)) ? !(self.path.startsWith(''s3://'') + || self.path.startsWith(''gs://'')) : true' + - message: Additional S3 settings are available only + for S3 object store URIs. + rule: '(has(self.s3_additional_kwargs) && has(self.path)) + ? self.path.startsWith(''s3://'') : true' + store: + description: RegistryDBStorePersistence configures + the DB store persistence for the registry service + properties: + secretKeyName: + description: By default, the selected store "type" + is used as the SecretKeyName + type: string + secretRef: + description: Data store parameters should be placed + as-is from the "feature_store.yaml" under the + secret key. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: + description: Type of the persistence type you + want to use. + enum: + - sql + - snowflake.registry + type: string + required: + - secretRef + - type + type: object + type: object + x-kubernetes-validations: + - message: One selection required between file or store. + rule: '[has(self.file), has(self.store)].exists_one(c, + c)' + server: + description: Creates a registry server container + properties: + env: + items: + description: EnvVar represents an environment variable + present in a Container. + properties: + name: + description: Name of the environment variable. + Must be a C_IDENTIFIER. + type: string + value: + description: |- + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any + type: string + valueFrom: + description: Source for the environment variable's + value. Cannot be used if value is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + fieldRef: + description: 'Selects a field of the pod: + supports metadata.name, metadata.namespace, + `metadata.labels['''']`, `metadata.' + properties: + apiVersion: + description: Version of the schema the + FieldPath is written in terms of, + defaults to "v1". + type: string + fieldPath: + description: Path of the field to select + in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits. + properties: + containerName: + description: 'Container name: required + for volumes, optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format + of the exposed resources, defaults + to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to + select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret in + the pod's namespace + properties: + key: + description: The key of the secret to + select from. Must be a valid secret + key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + type: object + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source + of a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap + must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend + to each key in the ConfigMap. Must be a C_IDENTIFIER. + type: string + secretRef: + description: The Secret to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + type: object + type: array + grpc: + description: Enable gRPC registry server. Defaults + to true if unset. + type: boolean + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for if/when + to pull a container image + type: string + logLevel: + description: |- + LogLevel sets the logging level for the server + Allowed values: "debug", "info", "warning", "error", "critical". + enum: + - debug + - info + - warning + - error + - critical + type: string + metrics: + description: Metrics exposes Prometheus-compatible + metrics for the Feast server when enabled. + type: boolean + nodeSelector: + additionalProperties: + type: string + type: object + resources: + description: ResourceRequirements describes the compute + resource requirements. + properties: + claims: + description: |- + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one entry + in PodSpec.ResourceClaims. + properties: + name: + description: |- + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. + type: string + required: + - name + type: object + type: array + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum amount + of compute resources required. + type: object + type: object + restAPI: + description: Enable REST API registry server. + type: boolean + tls: + description: TlsConfigs configures server TLS for + a feast service. + properties: + disable: + description: will disable TLS for the feast service. + useful in an openshift cluster, for example, + where TLS is configured by default + type: boolean + secretKeyNames: + description: SecretKeyNames defines the secret + key names for the TLS key and cert. + properties: + tlsCrt: + description: defaults to "tls.crt" + type: string + tlsKey: + description: defaults to "tls.key" + type: string + type: object + secretRef: + description: references the local k8s secret where + the TLS key and cert reside + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: object + x-kubernetes-validations: + - message: '`secretRef` required if `disable` is false.' + rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) + : true' + volumeMounts: + description: VolumeMounts defines the list of volumes + that should be mounted into the feast container. + items: + description: VolumeMount describes a mounting of + a Volume within a container. + properties: + mountPath: + description: |- + Path within the container at which the volume should be mounted. Must + not contain ':'. + type: string + mountPropagation: + description: |- + mountPropagation determines how mounts are propagated from the host + to container and the other way around. + type: string + name: + description: This must match the Name of a Volume. + type: string + readOnly: + description: |- + Mounted read-only if true, read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + recursiveReadOnly: + description: |- + RecursiveReadOnly specifies whether read-only mounts should be handled + recursively. + type: string + subPath: + description: |- + Path within the volume from which the container's volume should be mounted. + Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume + from which the container's volume should be + mounted. + type: string + required: + - mountPath + - name + type: object + type: array + workerConfigs: + description: WorkerConfigs defines the worker configuration + for the Feast server. + properties: + keepAliveTimeout: + description: |- + KeepAliveTimeout is the timeout for keep-alive connections in seconds. + Defaults to 30. + format: int32 + minimum: 1 + type: integer + maxRequests: + description: |- + MaxRequests is the maximum number of requests a worker will process before restarting. + This helps prevent memory leaks. + format: int32 + minimum: 0 + type: integer + maxRequestsJitter: + description: |- + MaxRequestsJitter is the maximum jitter to add to max-requests to prevent + thundering herd effect on worker restart. + format: int32 + minimum: 0 + type: integer + registryTTLSeconds: + description: RegistryTTLSeconds is the number + of seconds after which the registry is refreshed. + format: int32 + minimum: 0 + type: integer + workerConnections: + description: |- + WorkerConnections is the maximum number of simultaneous clients per worker process. + Defaults to 1000. + format: int32 + minimum: 1 + type: integer + workers: + description: Workers is the number of worker processes. + Use -1 to auto-calculate based on CPU cores + (2 * CPU + 1). + format: int32 + minimum: -1 + type: integer + type: object + type: object + x-kubernetes-validations: + - message: At least one of restAPI or grpc must be true + rule: self.restAPI == true || self.grpc == true || !has(self.grpc) + type: object + remote: + description: RemoteRegistryConfig points to a remote feast + registry server. + properties: + feastRef: + description: Reference to an existing `FeatureStore` CR + in the same k8s cluster. + properties: + name: + description: Name of the FeatureStore + type: string + namespace: + description: Namespace of the FeatureStore + type: string + required: + - name + type: object + hostname: + description: Host address of the remote registry service + - :, e.g. `registry..svc.cluster.local:80` + type: string + tls: + description: TlsRemoteRegistryConfigs configures client + TLS for a remote feast registry. + properties: + certName: + description: defines the configmap key name for the + client TLS cert. + type: string + configMapRef: + description: references the local k8s configmap where + the TLS cert resides + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + required: + - certName + - configMapRef + type: object + type: object + x-kubernetes-validations: + - message: One selection required. + rule: '[has(self.hostname), has(self.feastRef)].exists_one(c, + c)' + type: object + x-kubernetes-validations: + - message: One selection required. + rule: '[has(self.local), has(self.remote)].exists_one(c, c)' + scaling: + description: Scaling configures horizontal scaling for the FeatureStore + deployment (e.g. HPA autoscaling). + properties: + autoscaling: + description: |- + Autoscaling configures a HorizontalPodAutoscaler for the FeatureStore deployment. + Mutually exclusive with spec.replicas. + properties: + behavior: + description: Behavior configures the scaling behavior + of the target. + properties: + scaleDown: + description: scaleDown is scaling policy for scaling + Down. + properties: + policies: + description: policies is a list of potential scaling + polices which can be used during scaling. + items: + description: HPAScalingPolicy is a single policy + which must hold true for a specified past + interval. + properties: + periodSeconds: + description: periodSeconds specifies the + window of time for which the policy should + hold true. + format: int32 + type: integer + type: + description: type is used to specify the + scaling policy. + type: string + value: + description: |- + value contains the amount of change which is permitted by the policy. + It must be greater than zero + format: int32 + type: integer + required: + - periodSeconds + - type + - value + type: object + type: array + x-kubernetes-list-type: atomic + selectPolicy: + description: |- + selectPolicy is used to specify which policy should be used. + If not set, the default value Max is used. + type: string + stabilizationWindowSeconds: + description: |- + stabilizationWindowSeconds is the number of seconds for which past recommendations should be + considered while scaling up + format: int32 + type: integer + type: object + scaleUp: + description: scaleUp is scaling policy for scaling + Up. + properties: + policies: + description: policies is a list of potential scaling + polices which can be used during scaling. + items: + description: HPAScalingPolicy is a single policy + which must hold true for a specified past + interval. + properties: + periodSeconds: + description: periodSeconds specifies the + window of time for which the policy should + hold true. + format: int32 + type: integer + type: + description: type is used to specify the + scaling policy. + type: string + value: + description: |- + value contains the amount of change which is permitted by the policy. + It must be greater than zero + format: int32 + type: integer + required: + - periodSeconds + - type + - value + type: object + type: array + x-kubernetes-list-type: atomic + selectPolicy: + description: |- + selectPolicy is used to specify which policy should be used. + If not set, the default value Max is used. + type: string + stabilizationWindowSeconds: + description: |- + stabilizationWindowSeconds is the number of seconds for which past recommendations should be + considered while scaling up + format: int32 + type: integer + type: object + type: object + maxReplicas: + description: MaxReplicas is the upper limit for the number + of replicas. Required. + format: int32 + minimum: 1 + type: integer + metrics: + description: Metrics contains the specifications for which + to use to calculate the desired replica count. + items: + description: |- + MetricSpec specifies how to scale based on a single metric + (only `type` and one other matching field should be set at on + properties: + containerResource: + description: |- + containerResource refers to a resource metric (such as those specified in + requests and limits) known to Kubernetes descr + properties: + container: + description: container is the name of the container + in the pods of the scaling target + type: string + name: + description: name is the name of the resource + in question. + type: string + target: + description: target specifies the target value + for the given metric + properties: + averageUtilization: + description: "averageUtilization is the + target value of the average of the\nresource + metric across all relevant pods, represented + as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether the + metric type is Utilization, Value, or + AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value of + the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - container + - name + - target + type: object + external: + description: |- + external refers to a global metric that is not associated + with any Kubernetes object. + properties: + metric: + description: metric identifies the target metric + by name and selector + properties: + name: + description: name is the name of the given + metric + type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label selector + for the given metric\nWhen set, it is + passed " + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + required: + - name + type: object + target: + description: target specifies the target value + for the given metric + properties: + averageUtilization: + description: "averageUtilization is the + target value of the average of the\nresource + metric across all relevant pods, represented + as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether the + metric type is Utilization, Value, or + AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value of + the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - metric + - target + type: object + object: + description: |- + object refers to a metric describing a single kubernetes object + (for example, hits-per-second on an Ingress object). + properties: + describedObject: + description: describedObject specifies the descriptions + of a object,such as kind,name apiVersion + properties: + apiVersion: + description: apiVersion is the API version + of the referent + type: string + kind: + description: 'kind is the kind of the referent; + More info: https://git.k8s.' + type: string + name: + description: 'name is the name of the referent; + More info: https://kubernetes.' + type: string + required: + - kind + - name + type: object + metric: + description: metric identifies the target metric + by name and selector + properties: + name: + description: name is the name of the given + metric + type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label selector + for the given metric\nWhen set, it is + passed " + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + required: + - name + type: object + target: + description: target specifies the target value + for the given metric + properties: + averageUtilization: + description: "averageUtilization is the + target value of the average of the\nresource + metric across all relevant pods, represented + as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether the + metric type is Utilization, Value, or + AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value of + the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - describedObject + - metric + - target + type: object + pods: + description: |- + pods refers to a metric describing each pod in the current scale target + (for example, transactions-processed-per-second) + properties: + metric: + description: metric identifies the target metric + by name and selector + properties: + name: + description: name is the name of the given + metric + type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label selector + for the given metric\nWhen set, it is + passed " + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + required: + - name + type: object + target: + description: target specifies the target value + for the given metric + properties: + averageUtilization: + description: "averageUtilization is the + target value of the average of the\nresource + metric across all relevant pods, represented + as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether the + metric type is Utilization, Value, or + AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value of + the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - metric + - target + type: object + resource: + description: |- + resource refers to a resource metric (such as those specified in + requests and limits) known to Kubernetes describing eac + properties: + name: + description: name is the name of the resource + in question. + type: string + target: + description: target specifies the target value + for the given metric + properties: + averageUtilization: + description: "averageUtilization is the + target value of the average of the\nresource + metric across all relevant pods, represented + as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether the + metric type is Utilization, Value, or + AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value of + the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - name + - target + type: object + type: + description: type is the type of metric source. + type: string + required: + - type + type: object + type: array + minReplicas: + description: MinReplicas is the lower limit for the number + of replicas. Defaults to 1. + format: int32 + minimum: 1 + type: integer + required: + - maxReplicas + type: object + type: object + securityContext: + description: PodSecurityContext holds pod-level security attributes + and common container settings. + properties: + appArmorProfile: + description: appArmorProfile is the AppArmor options to use + by the containers in this pod. + properties: + localhostProfile: + description: localhostProfile indicates a profile loaded + on the node that should be used. + type: string + type: + description: type indicates which kind of AppArmor profile + will be applied. + type: string + required: + - type + type: object + fsGroup: + description: A special supplemental group that applies to + all containers in a pod. + format: int64 + type: integer + fsGroupChangePolicy: + description: |- + fsGroupChangePolicy defines behavior of changing ownership and permission of the volume + before being exposed inside Pod. + type: string + runAsGroup: + description: |- + The GID to run the entrypoint of the container process. + Uses runtime default if unset. + format: int64 + type: integer + runAsNonRoot: + description: Indicates that the container must run as a non-root + user. + type: boolean + runAsUser: + description: |- + The UID to run the entrypoint of the container process. + Defaults to user specified in image metadata if unspecified. + format: int64 + type: integer + seLinuxOptions: + description: The SELinux context to be applied to all containers. + properties: + level: + description: Level is SELinux level label that applies + to the container. + type: string + role: + description: Role is a SELinux role label that applies + to the container. + type: string + type: + description: Type is a SELinux type label that applies + to the container. + type: string + user: + description: User is a SELinux user label that applies + to the container. + type: string + type: object + seccompProfile: + description: |- + The seccomp options to use by the containers in this pod. + Note that this field cannot be set when spec.os. + properties: + localhostProfile: + description: localhostProfile indicates a profile defined + in a file on the node should be used. + type: string + type: + description: type indicates which kind of seccomp profile + will be applied. + type: string + required: + - type + type: object + supplementalGroups: + description: |- + A list of groups applied to the first process run in each container, in addition + to the container's primary GID, the fsG + items: + format: int64 + type: integer + type: array + x-kubernetes-list-type: atomic + sysctls: + description: Sysctls hold a list of namespaced sysctls used + for the pod. + items: + description: Sysctl defines a kernel parameter to be set + properties: + name: + description: Name of a property to set + type: string + value: + description: Value of a property to set + type: string + required: + - name + - value + type: object + type: array + x-kubernetes-list-type: atomic + windowsOptions: + description: The Windows specific settings applied to all + containers. + properties: + gmsaCredentialSpec: description: |- GMSACredentialSpec is where the GMSA admission webhook (https://github. @@ -2484,6 +3831,96 @@ spec: type: string type: object type: object + topologySpreadConstraints: + description: TopologySpreadConstraints defines how pods are spread + across topology domains. + items: + description: TopologySpreadConstraint specifies how to spread + matching pods among the given topology. + properties: + labelSelector: + description: LabelSelector is used to find matching pods. + properties: + matchExpressions: + description: matchExpressions is a list of label selector + requirements. The requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that the selector + applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select the pods over which + spreading will be calculated. + items: + type: string + type: array + x-kubernetes-list-type: atomic + maxSkew: + description: MaxSkew describes the degree to which pods + may be unevenly distributed. + format: int32 + type: integer + minDomains: + description: MinDomains indicates a minimum number of eligible + domains. + format: int32 + type: integer + nodeAffinityPolicy: + description: |- + NodeAffinityPolicy indicates how we will treat Pod's nodeAffinity/nodeSelector + when calculating pod topology spread skew + type: string + nodeTaintsPolicy: + description: |- + NodeTaintsPolicy indicates how we will treat node taints when calculating + pod topology spread skew. + type: string + topologyKey: + description: TopologyKey is the key of node labels. + type: string + whenUnsatisfiable: + description: |- + WhenUnsatisfiable indicates how to deal with a pod if it doesn't satisfy + the spread constraint. + type: string + required: + - maxSkew + - topologyKey + - whenUnsatisfiable + type: object + type: array ui: description: Creates a UI server container properties: @@ -4257,7 +5694,37 @@ spec: type: object required: - feastProject + - replicas type: object + x-kubernetes-validations: + - message: replicas > 1 and services.scaling.autoscaling are mutually + exclusive. + rule: self.replicas <= 1 || !has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling) + - message: Scaling requires DB-backed persistence for the online store. + Configure services.onlineStore.persistence.store when using replicas + > 1 or autoscaling. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (has(self.services) + && has(self.services.onlineStore) && has(self.services.onlineStore.persistence) + && has(self.services.onlineStore.persistence.store)) + - message: Scaling requires DB-backed persistence for the offline store. + Configure services.offlineStore.persistence.store when using replicas + > 1 or autoscaling. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (!has(self.services) + || !has(self.services.offlineStore) || (has(self.services.offlineStore.persistence) + && has(self.services.offlineStore.persistence.store))) + - message: Scaling requires DB-backed or remote registry. Configure registry.local.persistence.store + or use a remote registry when using replicas > 1 or autoscaling. S3/GCS-backed + registry is also allowed. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (has(self.services) + && has(self.services.registry) && (has(self.services.registry.remote) + || (has(self.services.registry.local) && has(self.services.registry.local.persistence) + && (has(self.services.registry.local.persistence.store) || (has(self.services.registry.local.persistence.file) + && has(self.services.registry.local.persistence.file.path) && (self.services.registry.local.persistence.file.path.startsWith('s3://') + || self.services.registry.local.persistence.file.path.startsWith('gs://'))))))) status: description: FeatureStoreStatus defines the observed state of FeatureStore properties: @@ -4280,79 +5747,474 @@ spec: items: type: string type: array - type: object - oidc: - description: |- - OidcAuthz defines the authorization settings for deployments using an Open ID Connect identity provider. - https://auth0. - properties: - secretRef: - description: |- - LocalObjectReference contains enough information to let you locate the - referenced object inside the same namespace. + type: object + oidc: + description: |- + OidcAuthz defines the authorization settings for deployments using an Open ID Connect identity provider. + https://auth0. + properties: + secretRef: + description: |- + LocalObjectReference contains enough information to let you locate the + referenced object inside the same namespace. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + required: + - secretRef + type: object + type: object + x-kubernetes-validations: + - message: One selection required between kubernetes or oidc. + rule: '[has(self.kubernetes), has(self.oidc)].exists_one(c, + c)' + batchEngine: + description: BatchEngineConfig defines the batch compute engine + configuration. + properties: + configMapKey: + description: Key name in the ConfigMap. Defaults to "config" + if not specified. + type: string + configMapRef: + description: Reference to a ConfigMap containing the batch + engine configuration. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: object + cronJob: + description: FeastCronJob defines a CronJob to execute against + a Feature Store deployment. + properties: + annotations: + additionalProperties: + type: string + description: Annotations to be added to the CronJob metadata. + type: object + concurrencyPolicy: + description: Specifies how to treat concurrent executions + of a Job. + type: string + containerConfigs: + description: CronJobContainerConfigs k8s container settings + for the CronJob + properties: + commands: + description: Array of commands to be executed (in order) + against a Feature Store deployment. + items: + type: string + type: array + env: + items: + description: EnvVar represents an environment variable + present in a Container. + properties: + name: + description: Name of the environment variable. Must + be a C_IDENTIFIER. + type: string + value: + description: |- + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any + type: string + valueFrom: + description: Source for the environment variable's + value. Cannot be used if value is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + fieldRef: + description: 'Selects a field of the pod: supports + metadata.name, metadata.namespace, `metadata.labels['''']`, + `metadata.' + properties: + apiVersion: + description: Version of the schema the FieldPath + is written in terms of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to select + in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits. + properties: + containerName: + description: 'Container name: required for + volumes, optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format + of the exposed resources, defaults to + "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret in the + pod's namespace + properties: + key: + description: The key of the secret to select + from. Must be a valid secret key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + type: object + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source of + a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend to + each key in the ConfigMap. Must be a C_IDENTIFIER. + type: string + secretRef: + description: The Secret to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + type: object + type: array + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for if/when + to pull a container image + type: string + nodeSelector: + additionalProperties: + type: string + type: object + resources: + description: ResourceRequirements describes the compute + resource requirements. properties: - name: - default: "" + claims: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one entry + in PodSpec.ResourceClaims. + properties: + name: + description: |- + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. + type: string + required: + - name + type: object + type: array + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum amount + of compute resources required. + type: object type: object - x-kubernetes-map-type: atomic - required: - - secretRef type: object - type: object - x-kubernetes-validations: - - message: One selection required between kubernetes or oidc. - rule: '[has(self.kubernetes), has(self.oidc)].exists_one(c, - c)' - batchEngine: - description: BatchEngineConfig defines the batch compute engine - configuration. - properties: - configMapKey: - description: Key name in the ConfigMap. Defaults to "config" - if not specified. - type: string - configMapRef: - description: Reference to a ConfigMap containing the batch - engine configuration. + failedJobsHistoryLimit: + description: The number of failed finished jobs to retain. + Value must be non-negative integer. + format: int32 + type: integer + jobSpec: + description: Specification of the desired behavior of a job. properties: - name: - default: "" + activeDeadlineSeconds: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + Specifies the duration in seconds relative to the startTime that the job + may be continuously active before the system tr + format: int64 + type: integer + backoffLimit: + description: Specifies the number of retries before marking + this job failed. + format: int32 + type: integer + backoffLimitPerIndex: + description: |- + Specifies the limit for the number of retries within an + index before marking this index as failed. + format: int32 + type: integer + completionMode: + description: |- + completionMode specifies how Pod completions are tracked. It can be + `NonIndexed` (default) or `Indexed`. + type: string + completions: + description: |- + Specifies the desired number of successfully finished pods the + job should be run with. + format: int32 + type: integer + maxFailedIndexes: + description: |- + Specifies the maximal number of failed indexes before marking the Job as + failed, when backoffLimitPerIndex is set. + format: int32 + type: integer + parallelism: + description: |- + Specifies the maximum desired number of pods the job should + run at any given time. + format: int32 + type: integer + podFailurePolicy: + description: Specifies the policy of handling failed pods. + properties: + rules: + description: A list of pod failure policy rules. The + rules are evaluated in order. + items: + description: PodFailurePolicyRule describes how + a pod failure is handled when the requirements + are met. + properties: + action: + description: Specifies the action taken on a + pod failure when the requirements are satisfied. + type: string + onExitCodes: + description: Represents the requirement on the + container exit codes. + properties: + containerName: + description: |- + Restricts the check for exit codes to the container with the + specified name. + type: string + operator: + description: |- + Represents the relationship between the container exit code(s) and the + specified values. + type: string + values: + description: Specifies the set of values. + items: + format: int32 + type: integer + type: array + x-kubernetes-list-type: set + required: + - operator + - values + type: object + onPodConditions: + description: |- + Represents the requirement on the pod conditions. The requirement is represented + as a list of pod condition patterns. + items: + description: |- + PodFailurePolicyOnPodConditionsPattern describes a pattern for matching + an actual pod condition type. + properties: + status: + description: Specifies the required Pod + condition status. + type: string + type: + description: Specifies the required Pod + condition type. + type: string + required: + - status + - type + type: object + type: array + x-kubernetes-list-type: atomic + required: + - action + type: object + type: array + x-kubernetes-list-type: atomic + required: + - rules + type: object + podReplacementPolicy: + description: podReplacementPolicy specifies when to create + replacement Pods. type: string + podTemplateAnnotations: + additionalProperties: + type: string + description: |- + PodTemplateAnnotations are annotations to be applied to the CronJob's PodTemplate + metadata. + type: object + suspend: + description: suspend specifies whether the Job controller + should create Pods or not. + type: boolean + ttlSecondsAfterFinished: + description: |- + ttlSecondsAfterFinished limits the lifetime of a Job that has finished + execution (either Complete or Failed). + format: int32 + type: integer type: object - x-kubernetes-map-type: atomic + schedule: + description: The schedule in Cron format, see https://en.wikipedia.org/wiki/Cron. + type: string + startingDeadlineSeconds: + description: |- + Optional deadline in seconds for starting the job if it misses scheduled + time for any reason. + format: int64 + type: integer + successfulJobsHistoryLimit: + description: The number of successful finished jobs to retain. + Value must be non-negative integer. + format: int32 + type: integer + suspend: + description: |- + This flag tells the controller to suspend subsequent executions, it does + not apply to already started executions. + type: boolean + timeZone: + description: The time zone name for the given schedule, see + https://en.wikipedia.org/wiki/List_of_tz_database_time_zones. + type: string type: object - cronJob: - description: FeastCronJob defines a CronJob to execute against - a Feature Store deployment. + feastProject: + description: FeastProject is the Feast project id. + pattern: ^[A-Za-z0-9][A-Za-z0-9_-]*$ + type: string + feastProjectDir: + description: FeastProjectDir defines how to create the feast project + directory. properties: - annotations: - additionalProperties: - type: string - description: Annotations to be added to the CronJob metadata. - type: object - concurrencyPolicy: - description: Specifies how to treat concurrent executions - of a Job. - type: string - containerConfigs: - description: CronJobContainerConfigs k8s container settings - for the CronJob + git: + description: GitCloneOptions describes how a clone should + be performed. properties: - commands: - description: Array of commands to be executed (in order) - against a Feature Store deployment. - items: + configs: + additionalProperties: type: string - type: array + description: |- + Configs passed to git via `-c` + e.g. http.sslVerify: 'false' + OR 'url."https://api:\${TOKEN}@github.com/". + type: object env: items: description: EnvVar represents an environment variable @@ -4435,520 +6297,884 @@ spec: - resource type: object x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret in the - pod's namespace + secretKeyRef: + description: Selects a key of a secret in the + pod's namespace + properties: + key: + description: The key of the secret to select + from. Must be a valid secret key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + type: object + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source of + a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend to + each key in the ConfigMap. Must be a C_IDENTIFIER. + type: string + secretRef: + description: The Secret to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + type: object + type: array + featureRepoPath: + description: FeatureRepoPath is the relative path to the + feature repo subdirectory. Default is 'feature_repo'. + type: string + ref: + description: Reference to a branch / tag / commit + type: string + url: + description: The repository URL to clone from. + type: string + required: + - url + type: object + x-kubernetes-validations: + - message: RepoPath must be a file name only, with no slashes. + rule: 'has(self.featureRepoPath) ? !self.featureRepoPath.startsWith(''/'') + : true' + init: + description: FeastInitOptions defines how to run a `feast + init`. + properties: + minimal: + type: boolean + template: + description: Template for the created project + enum: + - local + - gcp + - aws + - snowflake + - spark + - postgres + - hbase + - cassandra + - hazelcast + - couchbase + - clickhouse + type: string + type: object + type: object + x-kubernetes-validations: + - message: One selection required between init or git. + rule: '[has(self.git), has(self.init)].exists_one(c, c)' + replicas: + default: 1 + description: |- + Replicas is the desired number of pod replicas. Used by the scale sub-resource. + Mutually exclusive with services. + format: int32 + minimum: 1 + type: integer + services: + description: FeatureStoreServices defines the desired feast services. + An ephemeral onlineStore feature server is deployed by default. + properties: + affinity: + description: Affinity defines the pod scheduling constraints + for the FeatureStore deployment. + properties: + nodeAffinity: + description: Describes node affinity scheduling rules + for the pod. + properties: + preferredDuringSchedulingIgnoredDuringExecution: + description: |- + The scheduler will prefer to schedule pods to nodes that satisfy + the affinity expressions specified by this field, but i + items: + description: |- + An empty preferred scheduling term matches all objects with implicit weight 0 + (i.e. it's a no-op). + properties: + preference: + description: A node selector term, associated + with the corresponding weight. + properties: + matchExpressions: + description: A list of node selector requirements + by node's labels. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the + selector applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchFields: + description: A list of node selector requirements + by node's fields. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the + selector applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + type: object + x-kubernetes-map-type: atomic + weight: + description: Weight associated with matching + the corresponding nodeSelectorTerm, in the + range 1-100. + format: int32 + type: integer + required: + - preference + - weight + type: object + type: array + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the affinity requirements specified + by this field are not met at\nscheduling time, the + pod will not be scheduled onto " + properties: + nodeSelectorTerms: + description: Required. A list of node selector + terms. The terms are ORed. + items: + description: |- + A null or empty node selector term matches no objects. The requirements of + them are ANDed. + properties: + matchExpressions: + description: A list of node selector requirements + by node's labels. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the + selector applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchFields: + description: A list of node selector requirements + by node's fields. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the + selector applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + type: object + x-kubernetes-map-type: atomic + type: array + x-kubernetes-list-type: atomic + required: + - nodeSelectorTerms + type: object + x-kubernetes-map-type: atomic + type: object + podAffinity: + description: Describes pod affinity scheduling rules (e.g. + co-locate this pod in the same node, zone, etc. as some + other pod(s)). + properties: + preferredDuringSchedulingIgnoredDuringExecution: + description: |- + The scheduler will prefer to schedule pods to nodes that satisfy + the affinity expressions specified by this field, but i + items: + description: The weights of all of the matched WeightedPodAffinityTerm + fields are added per-node to find the most preferred + node(s) + properties: + podAffinityTerm: + description: Required. A pod affinity term, + associated with the corresponding weight. properties: - key: - description: The key of the secret to select - from. Must be a valid secret key. - type: string - name: - default: "" + labelSelector: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set + of namespaces that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static + list of namespace names that the term + applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t type: string - optional: - description: Specify whether the Secret - or its key must be defined - type: boolean required: - - key + - topologyKey type: object - x-kubernetes-map-type: atomic - type: object - required: - - name - type: object - type: array - envFrom: - items: - description: EnvFromSource represents the source of - a set of ConfigMaps - properties: - configMapRef: - description: The ConfigMap to select from - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap must - be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend to - each key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from - properties: - name: - default: "" + weight: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret must - be defined - type: boolean + weight associated with matching the corresponding podAffinityTerm, + in the range 1-100. + format: int32 + type: integer + required: + - podAffinityTerm + - weight type: object - x-kubernetes-map-type: atomic - type: object - type: array - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for if/when - to pull a container image - type: string - nodeSelector: - additionalProperties: - type: string - type: object - resources: - description: ResourceRequirements describes the compute - resource requirements. - properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. + type: array + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the affinity requirements specified + by this field are not met at\nscheduling time, the + pod will not be scheduled onto " items: - description: ResourceClaim references one entry - in PodSpec.ResourceClaims. + description: "Defines a set of pods (namely those + matching the labelSelector\nrelative to the given + namespace(s)) that this pod should " properties: - name: + labelSelector: + description: |- + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t type: string required: - - name + - topologyKey type: object type: array - x-kubernetes-list-map-keys: - - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum amount - of compute resources required. - type: object + x-kubernetes-list-type: atomic type: object - type: object - failedJobsHistoryLimit: - description: The number of failed finished jobs to retain. - Value must be non-negative integer. - format: int32 - type: integer - jobSpec: - description: Specification of the desired behavior of a job. - properties: - activeDeadlineSeconds: - description: |- - Specifies the duration in seconds relative to the startTime that the job - may be continuously active before the system tr - format: int64 - type: integer - backoffLimit: - description: Specifies the number of retries before marking - this job failed. - format: int32 - type: integer - backoffLimitPerIndex: - description: |- - Specifies the limit for the number of retries within an - index before marking this index as failed. - format: int32 - type: integer - completionMode: - description: |- - completionMode specifies how Pod completions are tracked. It can be - `NonIndexed` (default) or `Indexed`. - type: string - completions: - description: |- - Specifies the desired number of successfully finished pods the - job should be run with. - format: int32 - type: integer - maxFailedIndexes: - description: |- - Specifies the maximal number of failed indexes before marking the Job as - failed, when backoffLimitPerIndex is set. - format: int32 - type: integer - parallelism: - description: |- - Specifies the maximum desired number of pods the job should - run at any given time. - format: int32 - type: integer - podFailurePolicy: - description: Specifies the policy of handling failed pods. + podAntiAffinity: + description: Describes pod anti-affinity scheduling rules + (e.g. avoid putting this pod in the same node, zone, + etc. properties: - rules: - description: A list of pod failure policy rules. The - rules are evaluated in order. + preferredDuringSchedulingIgnoredDuringExecution: + description: "The scheduler will prefer to schedule + pods to nodes that satisfy\nthe anti-affinity expressions + specified by this field, " items: - description: PodFailurePolicyRule describes how - a pod failure is handled when the requirements - are met. + description: The weights of all of the matched WeightedPodAffinityTerm + fields are added per-node to find the most preferred + node(s) properties: - action: - description: Specifies the action taken on a - pod failure when the requirements are satisfied. - type: string - onExitCodes: - description: Represents the requirement on the - container exit codes. + podAffinityTerm: + description: Required. A pod affinity term, + associated with the corresponding weight. properties: - containerName: + labelSelector: description: |- - Restricts the check for exit codes to the container with the - specified name. - type: string - operator: + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: description: |- - Represents the relationship between the container exit code(s) and the - specified values. - type: string - values: - description: Specifies the set of values. + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set + of namespaces that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static + list of namespace names that the term + applies to. items: - format: int32 - type: integer + type: string type: array - x-kubernetes-list-type: set + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t + type: string required: - - operator - - values + - topologyKey type: object - onPodConditions: + weight: description: |- - Represents the requirement on the pod conditions. The requirement is represented - as a list of pod condition patterns. - items: - description: |- - PodFailurePolicyOnPodConditionsPattern describes a pattern for matching - an actual pod condition type. - properties: - status: - description: Specifies the required Pod - condition status. - type: string - type: - description: Specifies the required Pod - condition type. - type: string - required: - - status - - type - type: object - type: array - x-kubernetes-list-type: atomic + weight associated with matching the corresponding podAffinityTerm, + in the range 1-100. + format: int32 + type: integer required: - - action + - podAffinityTerm + - weight type: object type: array x-kubernetes-list-type: atomic - required: - - rules - type: object - podReplacementPolicy: - description: podReplacementPolicy specifies when to create - replacement Pods. - type: string - podTemplateAnnotations: - additionalProperties: - type: string - description: |- - PodTemplateAnnotations are annotations to be applied to the CronJob's PodTemplate - metadata. - type: object - suspend: - description: suspend specifies whether the Job controller - should create Pods or not. - type: boolean - ttlSecondsAfterFinished: - description: |- - ttlSecondsAfterFinished limits the lifetime of a Job that has finished - execution (either Complete or Failed). - format: int32 - type: integer - type: object - schedule: - description: The schedule in Cron format, see https://en.wikipedia.org/wiki/Cron. - type: string - startingDeadlineSeconds: - description: |- - Optional deadline in seconds for starting the job if it misses scheduled - time for any reason. - format: int64 - type: integer - successfulJobsHistoryLimit: - description: The number of successful finished jobs to retain. - Value must be non-negative integer. - format: int32 - type: integer - suspend: - description: |- - This flag tells the controller to suspend subsequent executions, it does - not apply to already started executions. - type: boolean - timeZone: - description: The time zone name for the given schedule, see - https://en.wikipedia.org/wiki/List_of_tz_database_time_zones. - type: string - type: object - feastProject: - description: FeastProject is the Feast project id. - pattern: ^[A-Za-z0-9][A-Za-z0-9_-]*$ - type: string - feastProjectDir: - description: FeastProjectDir defines how to create the feast project - directory. - properties: - git: - description: GitCloneOptions describes how a clone should - be performed. - properties: - configs: - additionalProperties: - type: string - description: |- - Configs passed to git via `-c` - e.g. http.sslVerify: 'false' - OR 'url."https://api:\${TOKEN}@github.com/". - type: object - env: - items: - description: EnvVar represents an environment variable - present in a Container. - properties: - name: - description: Name of the environment variable. Must - be a C_IDENTIFIER. - type: string - value: - description: |- - Variable references $(VAR_NAME) are expanded - using the previously defined environment variables in the container and - any - type: string - valueFrom: - description: Source for the environment variable's - value. Cannot be used if value is not empty. + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the anti-affinity requirements specified + by this field are not met at\nscheduling time, the + pod will not be scheduled " + items: + description: "Defines a set of pods (namely those + matching the labelSelector\nrelative to the given + namespace(s)) that this pod should " properties: - configMapKeyRef: - description: Selects a key of a ConfigMap. - properties: - key: - description: The key to select. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap - or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - fieldRef: - description: 'Selects a field of the pod: supports - metadata.name, metadata.namespace, `metadata.labels['''']`, - `metadata.' - properties: - apiVersion: - description: Version of the schema the FieldPath - is written in terms of, defaults to "v1". - type: string - fieldPath: - description: Path of the field to select - in the specified API version. - type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - resourceFieldRef: + labelSelector: description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, limits. + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. properties: - containerName: - description: 'Container name: required for - volumes, optional for env vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output format - of the exposed resources, defaults to - "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource to select' - type: string - required: - - resource + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret in the - pod's namespace - properties: - key: - description: The key of the secret to select - from. Must be a valid secret key. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret - or its key must be defined - type: boolean - required: - - key + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic - type: object - required: - - name - type: object - type: array - envFrom: - items: - description: EnvFromSource represents the source of - a set of ConfigMaps - properties: - configMapRef: - description: The ConfigMap to select from - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap must - be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend to - each key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from - properties: - name: - default: "" + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t type: string - optional: - description: Specify whether the Secret must - be defined - type: boolean + required: + - topologyKey type: object - x-kubernetes-map-type: atomic - type: object - type: array - featureRepoPath: - description: FeatureRepoPath is the relative path to the - feature repo subdirectory. Default is 'feature_repo'. - type: string - ref: - description: Reference to a branch / tag / commit - type: string - url: - description: The repository URL to clone from. - type: string - required: - - url - type: object - x-kubernetes-validations: - - message: RepoPath must be a file name only, with no slashes. - rule: 'has(self.featureRepoPath) ? !self.featureRepoPath.startsWith(''/'') - : true' - init: - description: FeastInitOptions defines how to run a `feast - init`. - properties: - minimal: - type: boolean - template: - description: Template for the created project - enum: - - local - - gcp - - aws - - snowflake - - spark - - postgres - - hbase - - cassandra - - hazelcast - - ikv - - couchbase - - clickhouse - type: string + type: array + x-kubernetes-list-type: atomic + type: object type: object - type: object - x-kubernetes-validations: - - message: One selection required between init or git. - rule: '[has(self.git), has(self.init)].exists_one(c, c)' - services: - description: FeatureStoreServices defines the desired feast services. - An ephemeral onlineStore feature server is deployed by default. - properties: deploymentStrategy: description: DeploymentStrategy describes how to replace existing pods with new ones. @@ -5618,7 +7844,6 @@ spec: enum: - snowflake.online - redis - - ikv - datastore - dynamodb - bigtable @@ -5633,6 +7858,7 @@ spec: - couchbase.online - milvus - hybrid + - mongodb type: string required: - secretRef @@ -5759,866 +7985,1465 @@ spec: - name type: object type: array - envFrom: - items: - description: EnvFromSource represents the source - of a set of ConfigMaps - properties: - configMapRef: - description: The ConfigMap to select from - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap - must be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend - to each key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + envFrom: + items: + description: EnvFromSource represents the source + of a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap + must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend + to each key in the ConfigMap. Must be a C_IDENTIFIER. + type: string + secretRef: + description: The Secret to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + type: object + type: array + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for if/when + to pull a container image + type: string + logLevel: + description: |- + LogLevel sets the logging level for the server + Allowed values: "debug", "info", "warning", "error", "critical". + enum: + - debug + - info + - warning + - error + - critical + type: string + metrics: + description: Metrics exposes Prometheus-compatible + metrics for the Feast server when enabled. + type: boolean + nodeSelector: + additionalProperties: + type: string + type: object + resources: + description: ResourceRequirements describes the compute + resource requirements. + properties: + claims: + description: |- + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one entry + in PodSpec.ResourceClaims. + properties: + name: + description: |- + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. + type: string + required: + - name + type: object + type: array + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum amount + of compute resources required. + type: object + type: object + tls: + description: TlsConfigs configures server TLS for + a feast service. + properties: + disable: + description: will disable TLS for the feast service. + useful in an openshift cluster, for example, + where TLS is configured by default + type: boolean + secretKeyNames: + description: SecretKeyNames defines the secret + key names for the TLS key and cert. + properties: + tlsCrt: + description: defaults to "tls.crt" + type: string + tlsKey: + description: defaults to "tls.key" + type: string + type: object + secretRef: + description: references the local k8s secret where + the TLS key and cert reside + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: object + x-kubernetes-validations: + - message: '`secretRef` required if `disable` is false.' + rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) + : true' + volumeMounts: + description: VolumeMounts defines the list of volumes + that should be mounted into the feast container. + items: + description: VolumeMount describes a mounting of + a Volume within a container. + properties: + mountPath: + description: |- + Path within the container at which the volume should be mounted. Must + not contain ':'. + type: string + mountPropagation: + description: |- + mountPropagation determines how mounts are propagated from the host + to container and the other way around. + type: string + name: + description: This must match the Name of a Volume. + type: string + readOnly: + description: |- + Mounted read-only if true, read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + recursiveReadOnly: + description: |- + RecursiveReadOnly specifies whether read-only mounts should be handled + recursively. + type: string + subPath: + description: |- + Path within the volume from which the container's volume should be mounted. + Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume + from which the container's volume should be + mounted. + type: string + required: + - mountPath + - name + type: object + type: array + workerConfigs: + description: WorkerConfigs defines the worker configuration + for the Feast server. + properties: + keepAliveTimeout: + description: |- + KeepAliveTimeout is the timeout for keep-alive connections in seconds. + Defaults to 30. + format: int32 + minimum: 1 + type: integer + maxRequests: + description: |- + MaxRequests is the maximum number of requests a worker will process before restarting. + This helps prevent memory leaks. + format: int32 + minimum: 0 + type: integer + maxRequestsJitter: + description: |- + MaxRequestsJitter is the maximum jitter to add to max-requests to prevent + thundering herd effect on worker restart. + format: int32 + minimum: 0 + type: integer + registryTTLSeconds: + description: RegistryTTLSeconds is the number + of seconds after which the registry is refreshed. + format: int32 + minimum: 0 + type: integer + workerConnections: + description: |- + WorkerConnections is the maximum number of simultaneous clients per worker process. + Defaults to 1000. + format: int32 + minimum: 1 + type: integer + workers: + description: Workers is the number of worker processes. + Use -1 to auto-calculate based on CPU cores + (2 * CPU + 1). + format: int32 + minimum: -1 + type: integer + type: object + type: object + type: object + podDisruptionBudgets: + description: PodDisruptionBudgets configures a PodDisruptionBudget + for the FeatureStore deployment. + properties: + maxUnavailable: + anyOf: + - type: integer + - type: string + description: MaxUnavailable specifies the maximum number/percentage + of pods that can be unavailable. + x-kubernetes-int-or-string: true + minAvailable: + anyOf: + - type: integer + - type: string + description: MinAvailable specifies the minimum number/percentage + of pods that must remain available. + x-kubernetes-int-or-string: true + type: object + x-kubernetes-validations: + - message: Exactly one of minAvailable or maxUnavailable must + be set. + rule: '[has(self.minAvailable), has(self.maxUnavailable)].exists_one(c, + c)' + registry: + description: Registry configures the registry service. One + selection is required. Local is the default setting. + properties: + local: + description: LocalRegistryConfig configures the registry + service + properties: + persistence: + description: RegistryPersistence configures the persistence + settings for the registry service + properties: + file: + description: RegistryFilePersistence configures + the file-based persistence for the registry + service + properties: + cache_mode: + description: |- + CacheMode defines the registry cache update strategy. + Allowed values are "sync" and "thread". + enum: + - none + - sync + - thread + type: string + cache_ttl_seconds: + description: CacheTTLSeconds defines the TTL + (in seconds) for the registry cache. + format: int32 + minimum: 0 + type: integer + path: + type: string + pvc: + description: PvcConfig defines the settings + for a persistent file store based on PVCs. + properties: + create: + description: Settings for creating a new + PVC + properties: + accessModes: + description: AccessModes k8s persistent + volume access modes. Defaults to + ["ReadWriteOnce"]. + items: + type: string + type: array + resources: + description: Resources describes the + storage resource requirements for + a volume. + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes + the minimum amount of compute + resources required. + type: object + type: object + storageClassName: + description: StorageClassName is the + name of an existing StorageClass + to which this persistent volume + belongs. + type: string + type: object + x-kubernetes-validations: + - message: PvcCreate is immutable + rule: self == oldSelf + mountPath: + description: |- + MountPath within the container at which the volume should be mounted. + Must start by "/" and cannot contain ':'. + type: string + ref: + description: Reference to an existing + field + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + required: + - mountPath + type: object + x-kubernetes-validations: + - message: One selection is required between + ref and create. + rule: '[has(self.ref), has(self.create)].exists_one(c, + c)' + - message: Mount path must start with '/' + and must not contain ':' + rule: self.mountPath.matches('^/[^:]*$') + s3_additional_kwargs: + additionalProperties: type: string - optional: - description: Specify whether the Secret - must be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - type: object - type: array - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for if/when - to pull a container image - type: string - logLevel: - description: |- - LogLevel sets the logging level for the server - Allowed values: "debug", "info", "warning", "error", "critical". - enum: - - debug - - info - - warning - - error - - critical - type: string - metrics: - description: Metrics exposes Prometheus-compatible - metrics for the Feast server when enabled. - type: boolean - nodeSelector: - additionalProperties: - type: string + type: object + type: object + x-kubernetes-validations: + - message: Registry files must use absolute paths + or be S3 ('s3://') or GS ('gs://') object + store URIs. + rule: '(!has(self.pvc) && has(self.path)) ? + (self.path.startsWith(''/'') || self.path.startsWith(''s3://'') + || self.path.startsWith(''gs://'')) : true' + - message: PVC path must be a file name only, + with no slashes. + rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') + : true' + - message: PVC persistence does not support S3 + or GS object store URIs. + rule: '(has(self.pvc) && has(self.path)) ? !(self.path.startsWith(''s3://'') + || self.path.startsWith(''gs://'')) : true' + - message: Additional S3 settings are available + only for S3 object store URIs. + rule: '(has(self.s3_additional_kwargs) && has(self.path)) + ? self.path.startsWith(''s3://'') : true' + store: + description: RegistryDBStorePersistence configures + the DB store persistence for the registry service + properties: + secretKeyName: + description: By default, the selected store + "type" is used as the SecretKeyName + type: string + secretRef: + description: Data store parameters should + be placed as-is from the "feature_store.yaml" + under the secret key. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: + description: Type of the persistence type + you want to use. + enum: + - sql + - snowflake.registry + type: string + required: + - secretRef + - type + type: object type: object - resources: - description: ResourceRequirements describes the compute - resource requirements. + x-kubernetes-validations: + - message: One selection required between file or + store. + rule: '[has(self.file), has(self.store)].exists_one(c, + c)' + server: + description: Creates a registry server container properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. + env: items: - description: ResourceClaim references one entry - in PodSpec.ResourceClaims. + description: EnvVar represents an environment + variable present in a Container. properties: name: + description: Name of the environment variable. + Must be a C_IDENTIFIER. + type: string + value: description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any + type: string + valueFrom: + description: Source for the environment + variable's value. Cannot be used if value + is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the + ConfigMap or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + fieldRef: + description: 'Selects a field of the + pod: supports metadata.name, metadata.namespace, + `metadata.labels['''']`, `metadata.' + properties: + apiVersion: + description: Version of the schema + the FieldPath is written in terms + of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to + select in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits. + properties: + containerName: + description: 'Container name: required + for volumes, optional for env + vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output + format of the exposed resources, + defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource + to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret + in the pod's namespace + properties: + key: + description: The key of the secret + to select from. Must be a valid + secret key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the + Secret or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + type: object + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source + of a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap + must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend + to each key in the ConfigMap. Must be + a C_IDENTIFIER. type: string - required: - - name + secretRef: + description: The Secret to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic type: object type: array - x-kubernetes-list-map-keys: - - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true + grpc: + description: Enable gRPC registry server. Defaults + to true if unset. + type: boolean + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for + if/when to pull a container image + type: string + logLevel: description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum amount - of compute resources required. - type: object - type: object - tls: - description: TlsConfigs configures server TLS for - a feast service. - properties: - disable: - description: will disable TLS for the feast service. - useful in an openshift cluster, for example, - where TLS is configured by default + LogLevel sets the logging level for the server + Allowed values: "debug", "info", "warning", "error", "critical". + enum: + - debug + - info + - warning + - error + - critical + type: string + metrics: + description: Metrics exposes Prometheus-compatible + metrics for the Feast server when enabled. type: boolean - secretKeyNames: - description: SecretKeyNames defines the secret - key names for the TLS key and cert. - properties: - tlsCrt: - description: defaults to "tls.crt" - type: string - tlsKey: - description: defaults to "tls.key" - type: string + nodeSelector: + additionalProperties: + type: string type: object - secretRef: - description: references the local k8s secret where - the TLS key and cert reside + resources: + description: ResourceRequirements describes the + compute resource requirements. properties: - name: - default: "" + claims: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: object - x-kubernetes-validations: - - message: '`secretRef` required if `disable` is false.' - rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) - : true' - volumeMounts: - description: VolumeMounts defines the list of volumes - that should be mounted into the feast container. - items: - description: VolumeMount describes a mounting of - a Volume within a container. - properties: - mountPath: - description: |- - Path within the container at which the volume should be mounted. Must - not contain ':'. - type: string - mountPropagation: - description: |- - mountPropagation determines how mounts are propagated from the host - to container and the other way around. - type: string - name: - description: This must match the Name of a Volume. - type: string - readOnly: - description: |- - Mounted read-only if true, read-write otherwise (false or unspecified). - Defaults to false. - type: boolean - recursiveReadOnly: - description: |- - RecursiveReadOnly specifies whether read-only mounts should be handled - recursively. - type: string - subPath: - description: |- - Path within the volume from which the container's volume should be mounted. - Defaults to "" (volume's root). - type: string - subPathExpr: - description: Expanded path within the volume - from which the container's volume should be - mounted. - type: string - required: - - mountPath - - name - type: object - type: array - workerConfigs: - description: WorkerConfigs defines the worker configuration - for the Feast server. - properties: - keepAliveTimeout: - description: |- - KeepAliveTimeout is the timeout for keep-alive connections in seconds. - Defaults to 30. - format: int32 - minimum: 1 - type: integer - maxRequests: - description: |- - MaxRequests is the maximum number of requests a worker will process before restarting. - This helps prevent memory leaks. - format: int32 - minimum: 0 - type: integer - maxRequestsJitter: - description: |- - MaxRequestsJitter is the maximum jitter to add to max-requests to prevent - thundering herd effect on worker restart. - format: int32 - minimum: 0 - type: integer - registryTTLSeconds: - description: RegistryTTLSeconds is the number - of seconds after which the registry is refreshed. - format: int32 - minimum: 0 - type: integer - workerConnections: - description: |- - WorkerConnections is the maximum number of simultaneous clients per worker process. - Defaults to 1000. - format: int32 - minimum: 1 - type: integer - workers: - description: Workers is the number of worker processes. - Use -1 to auto-calculate based on CPU cores - (2 * CPU + 1). - format: int32 - minimum: -1 - type: integer - type: object - type: object - type: object - registry: - description: Registry configures the registry service. One - selection is required. Local is the default setting. - properties: - local: - description: LocalRegistryConfig configures the registry - service - properties: - persistence: - description: RegistryPersistence configures the persistence - settings for the registry service - properties: - file: - description: RegistryFilePersistence configures - the file-based persistence for the registry - service - properties: - cache_mode: + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one + entry in PodSpec.ResourceClaims. + properties: + name: + description: |- + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. + type: string + required: + - name + type: object + type: array + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true description: |- - CacheMode defines the registry cache update strategy. - Allowed values are "sync" and "thread". - enum: - - none - - sync - - thread - type: string - cache_ttl_seconds: - description: CacheTTLSeconds defines the TTL - (in seconds) for the registry cache. - format: int32 - minimum: 0 - type: integer - path: - type: string - pvc: - description: PvcConfig defines the settings - for a persistent file store based on PVCs. + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum + amount of compute resources required. + type: object + type: object + restAPI: + description: Enable REST API registry server. + type: boolean + tls: + description: TlsConfigs configures server TLS + for a feast service. + properties: + disable: + description: will disable TLS for the feast + service. useful in an openshift cluster, + for example, where TLS is configured by + default + type: boolean + secretKeyNames: + description: SecretKeyNames defines the secret + key names for the TLS key and cert. properties: - create: - description: Settings for creating a new - PVC - properties: - accessModes: - description: AccessModes k8s persistent - volume access modes. Defaults to - ["ReadWriteOnce"]. - items: - type: string - type: array - resources: - description: Resources describes the - storage resource requirements for - a volume. - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes - the minimum amount of compute - resources required. - type: object - type: object - storageClassName: - description: StorageClassName is the - name of an existing StorageClass - to which this persistent volume - belongs. - type: string - type: object - x-kubernetes-validations: - - message: PvcCreate is immutable - rule: self == oldSelf - mountPath: - description: |- - MountPath within the container at which the volume should be mounted. - Must start by "/" and cannot contain ':'. + tlsCrt: + description: defaults to "tls.crt" + type: string + tlsKey: + description: defaults to "tls.key" type: string - ref: - description: Reference to an existing - field - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - required: - - mountPath type: object - x-kubernetes-validations: - - message: One selection is required between - ref and create. - rule: '[has(self.ref), has(self.create)].exists_one(c, - c)' - - message: Mount path must start with '/' - and must not contain ':' - rule: self.mountPath.matches('^/[^:]*$') - s3_additional_kwargs: - additionalProperties: - type: string + secretRef: + description: references the local k8s secret + where the TLS key and cert reside + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string type: object + x-kubernetes-map-type: atomic type: object x-kubernetes-validations: - - message: Registry files must use absolute paths - or be S3 ('s3://') or GS ('gs://') object - store URIs. - rule: '(!has(self.pvc) && has(self.path)) ? - (self.path.startsWith(''/'') || self.path.startsWith(''s3://'') - || self.path.startsWith(''gs://'')) : true' - - message: PVC path must be a file name only, - with no slashes. - rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') - : true' - - message: PVC persistence does not support S3 - or GS object store URIs. - rule: '(has(self.pvc) && has(self.path)) ? !(self.path.startsWith(''s3://'') - || self.path.startsWith(''gs://'')) : true' - - message: Additional S3 settings are available - only for S3 object store URIs. - rule: '(has(self.s3_additional_kwargs) && has(self.path)) - ? self.path.startsWith(''s3://'') : true' - store: - description: RegistryDBStorePersistence configures - the DB store persistence for the registry service + - message: '`secretRef` required if `disable` + is false.' + rule: '(!has(self.disable) || !self.disable) + ? has(self.secretRef) : true' + volumeMounts: + description: VolumeMounts defines the list of + volumes that should be mounted into the feast + container. + items: + description: VolumeMount describes a mounting + of a Volume within a container. + properties: + mountPath: + description: |- + Path within the container at which the volume should be mounted. Must + not contain ':'. + type: string + mountPropagation: + description: |- + mountPropagation determines how mounts are propagated from the host + to container and the other way around. + type: string + name: + description: This must match the Name of + a Volume. + type: string + readOnly: + description: |- + Mounted read-only if true, read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + recursiveReadOnly: + description: |- + RecursiveReadOnly specifies whether read-only mounts should be handled + recursively. + type: string + subPath: + description: |- + Path within the volume from which the container's volume should be mounted. + Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume + from which the container's volume should + be mounted. + type: string + required: + - mountPath + - name + type: object + type: array + workerConfigs: + description: WorkerConfigs defines the worker + configuration for the Feast server. + properties: + keepAliveTimeout: + description: |- + KeepAliveTimeout is the timeout for keep-alive connections in seconds. + Defaults to 30. + format: int32 + minimum: 1 + type: integer + maxRequests: + description: |- + MaxRequests is the maximum number of requests a worker will process before restarting. + This helps prevent memory leaks. + format: int32 + minimum: 0 + type: integer + maxRequestsJitter: + description: |- + MaxRequestsJitter is the maximum jitter to add to max-requests to prevent + thundering herd effect on worker restart. + format: int32 + minimum: 0 + type: integer + registryTTLSeconds: + description: RegistryTTLSeconds is the number + of seconds after which the registry is refreshed. + format: int32 + minimum: 0 + type: integer + workerConnections: + description: |- + WorkerConnections is the maximum number of simultaneous clients per worker process. + Defaults to 1000. + format: int32 + minimum: 1 + type: integer + workers: + description: Workers is the number of worker + processes. Use -1 to auto-calculate based + on CPU cores (2 * CPU + 1). + format: int32 + minimum: -1 + type: integer + type: object + type: object + x-kubernetes-validations: + - message: At least one of restAPI or grpc must be + true + rule: self.restAPI == true || self.grpc == true + || !has(self.grpc) + type: object + remote: + description: RemoteRegistryConfig points to a remote feast + registry server. + properties: + feastRef: + description: Reference to an existing `FeatureStore` + CR in the same k8s cluster. + properties: + name: + description: Name of the FeatureStore + type: string + namespace: + description: Namespace of the FeatureStore + type: string + required: + - name + type: object + hostname: + description: Host address of the remote registry service + - :, e.g. `registry..svc.cluster.local:80` + type: string + tls: + description: TlsRemoteRegistryConfigs configures client + TLS for a remote feast registry. + properties: + certName: + description: defines the configmap key name for + the client TLS cert. + type: string + configMapRef: + description: references the local k8s configmap + where the TLS cert resides properties: - secretKeyName: - description: By default, the selected store - "type" is used as the SecretKeyName - type: string - secretRef: - description: Data store parameters should - be placed as-is from the "feature_store.yaml" - under the secret key. - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: - description: Type of the persistence type - you want to use. - enum: - - sql - - snowflake.registry + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. type: string - required: - - secretRef - - type type: object + x-kubernetes-map-type: atomic + required: + - certName + - configMapRef type: object - x-kubernetes-validations: - - message: One selection required between file or - store. - rule: '[has(self.file), has(self.store)].exists_one(c, - c)' - server: - description: Creates a registry server container + type: object + x-kubernetes-validations: + - message: One selection required. + rule: '[has(self.hostname), has(self.feastRef)].exists_one(c, + c)' + type: object + x-kubernetes-validations: + - message: One selection required. + rule: '[has(self.local), has(self.remote)].exists_one(c, + c)' + scaling: + description: Scaling configures horizontal scaling for the + FeatureStore deployment (e.g. HPA autoscaling). + properties: + autoscaling: + description: |- + Autoscaling configures a HorizontalPodAutoscaler for the FeatureStore deployment. + Mutually exclusive with spec.replicas. + properties: + behavior: + description: Behavior configures the scaling behavior + of the target. properties: - env: - items: - description: EnvVar represents an environment - variable present in a Container. - properties: - name: - description: Name of the environment variable. - Must be a C_IDENTIFIER. - type: string - value: - description: |- - Variable references $(VAR_NAME) are expanded - using the previously defined environment variables in the container and - any - type: string - valueFrom: - description: Source for the environment - variable's value. Cannot be used if value - is not empty. + scaleDown: + description: scaleDown is scaling policy for scaling + Down. + properties: + policies: + description: policies is a list of potential + scaling polices which can be used during + scaling. + items: + description: HPAScalingPolicy is a single + policy which must hold true for a specified + past interval. properties: - configMapKeyRef: - description: Selects a key of a ConfigMap. - properties: - key: - description: The key to select. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the - ConfigMap or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - fieldRef: - description: 'Selects a field of the - pod: supports metadata.name, metadata.namespace, - `metadata.labels['''']`, `metadata.' - properties: - apiVersion: - description: Version of the schema - the FieldPath is written in terms - of, defaults to "v1". - type: string - fieldPath: - description: Path of the field to - select in the specified API version. - type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - resourceFieldRef: + periodSeconds: + description: periodSeconds specifies + the window of time for which the policy + should hold true. + format: int32 + type: integer + type: + description: type is used to specify + the scaling policy. + type: string + value: description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, limits. - properties: - containerName: - description: 'Container name: required - for volumes, optional for env - vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output - format of the exposed resources, - defaults to "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource - to select' - type: string - required: - - resource - type: object - x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret - in the pod's namespace - properties: - key: - description: The key of the secret - to select from. Must be a valid - secret key. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the - Secret or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic + value contains the amount of change which is permitted by the policy. + It must be greater than zero + format: int32 + type: integer + required: + - periodSeconds + - type + - value + type: object + type: array + x-kubernetes-list-type: atomic + selectPolicy: + description: |- + selectPolicy is used to specify which policy should be used. + If not set, the default value Max is used. + type: string + stabilizationWindowSeconds: + description: |- + stabilizationWindowSeconds is the number of seconds for which past recommendations should be + considered while scaling up + format: int32 + type: integer + type: object + scaleUp: + description: scaleUp is scaling policy for scaling + Up. + properties: + policies: + description: policies is a list of potential + scaling polices which can be used during + scaling. + items: + description: HPAScalingPolicy is a single + policy which must hold true for a specified + past interval. + properties: + periodSeconds: + description: periodSeconds specifies + the window of time for which the policy + should hold true. + format: int32 + type: integer + type: + description: type is used to specify + the scaling policy. + type: string + value: + description: |- + value contains the amount of change which is permitted by the policy. + It must be greater than zero + format: int32 + type: integer + required: + - periodSeconds + - type + - value + type: object + type: array + x-kubernetes-list-type: atomic + selectPolicy: + description: |- + selectPolicy is used to specify which policy should be used. + If not set, the default value Max is used. + type: string + stabilizationWindowSeconds: + description: |- + stabilizationWindowSeconds is the number of seconds for which past recommendations should be + considered while scaling up + format: int32 + type: integer + type: object + type: object + maxReplicas: + description: MaxReplicas is the upper limit for the + number of replicas. Required. + format: int32 + minimum: 1 + type: integer + metrics: + description: Metrics contains the specifications for + which to use to calculate the desired replica count. + items: + description: |- + MetricSpec specifies how to scale based on a single metric + (only `type` and one other matching field should be set at on + properties: + containerResource: + description: |- + containerResource refers to a resource metric (such as those specified in + requests and limits) known to Kubernetes descr + properties: + container: + description: container is the name of the + container in the pods of the scaling target + type: string + name: + description: name is the name of the resource + in question. + type: string + target: + description: target specifies the target + value for the given metric + properties: + averageUtilization: + description: "averageUtilization is + the target value of the average of + the\nresource metric across all relevant + pods, represented as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether + the metric type is Utilization, Value, + or AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value + of the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type type: object required: + - container - name + - target type: object - type: array - envFrom: - items: - description: EnvFromSource represents the source - of a set of ConfigMaps + external: + description: |- + external refers to a global metric that is not associated + with any Kubernetes object. properties: - configMapRef: - description: The ConfigMap to select from + metric: + description: metric identifies the target + metric by name and selector properties: name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + description: name is the name of the + given metric type: string - optional: - description: Specify whether the ConfigMap - must be defined - type: boolean + selector: + description: "selector is the string-encoded + form of a standard kubernetes label + selector for the given metric\nWhen + set, it is passed " + properties: + matchExpressions: + description: matchExpressions is + a list of label selector requirements. + The requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map + of {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + required: + - name type: object - x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend - to each key in the ConfigMap. Must be - a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from + target: + description: target specifies the target + value for the given metric properties: - name: - default: "" + averageUtilization: + description: "averageUtilization is + the target value of the average of + the\nresource metric across all relevant + pods, represented as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether + the metric type is Utilization, Value, + or AverageValue type: string - optional: - description: Specify whether the Secret - must be defined - type: boolean + value: + anyOf: + - type: integer + - type: string + description: value is the target value + of the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type type: object - x-kubernetes-map-type: atomic + required: + - metric + - target type: object - type: array - grpc: - description: Enable gRPC registry server. Defaults - to true if unset. - type: boolean - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for - if/when to pull a container image - type: string - logLevel: - description: |- - LogLevel sets the logging level for the server - Allowed values: "debug", "info", "warning", "error", "critical". - enum: - - debug - - info - - warning - - error - - critical - type: string - metrics: - description: Metrics exposes Prometheus-compatible - metrics for the Feast server when enabled. - type: boolean - nodeSelector: - additionalProperties: - type: string - type: object - resources: - description: ResourceRequirements describes the - compute resource requirements. - properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. - items: - description: ResourceClaim references one - entry in PodSpec.ResourceClaims. + object: + description: |- + object refers to a metric describing a single kubernetes object + (for example, hits-per-second on an Ingress object). + properties: + describedObject: + description: describedObject specifies the + descriptions of a object,such as kind,name + apiVersion properties: + apiVersion: + description: apiVersion is the API version + of the referent + type: string + kind: + description: 'kind is the kind of the + referent; More info: https://git.k8s.' + type: string name: - description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. + description: 'name is the name of the + referent; More info: https://kubernetes.' type: string required: + - kind - name type: object - type: array - x-kubernetes-list-map-keys: - - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum - amount of compute resources required. - type: object - type: object - restAPI: - description: Enable REST API registry server. - type: boolean - tls: - description: TlsConfigs configures server TLS - for a feast service. - properties: - disable: - description: will disable TLS for the feast - service. useful in an openshift cluster, - for example, where TLS is configured by - default - type: boolean - secretKeyNames: - description: SecretKeyNames defines the secret - key names for the TLS key and cert. - properties: - tlsCrt: - description: defaults to "tls.crt" - type: string - tlsKey: - description: defaults to "tls.key" - type: string - type: object - secretRef: - description: references the local k8s secret - where the TLS key and cert reside - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: object - x-kubernetes-validations: - - message: '`secretRef` required if `disable` - is false.' - rule: '(!has(self.disable) || !self.disable) - ? has(self.secretRef) : true' - volumeMounts: - description: VolumeMounts defines the list of - volumes that should be mounted into the feast - container. - items: - description: VolumeMount describes a mounting - of a Volume within a container. + metric: + description: metric identifies the target + metric by name and selector + properties: + name: + description: name is the name of the + given metric + type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label + selector for the given metric\nWhen + set, it is passed " + properties: + matchExpressions: + description: matchExpressions is + a list of label selector requirements. + The requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map + of {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + required: + - name + type: object + target: + description: target specifies the target + value for the given metric + properties: + averageUtilization: + description: "averageUtilization is + the target value of the average of + the\nresource metric across all relevant + pods, represented as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether + the metric type is Utilization, Value, + or AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value + of the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - describedObject + - metric + - target + type: object + pods: + description: |- + pods refers to a metric describing each pod in the current scale target + (for example, transactions-processed-per-second) + properties: + metric: + description: metric identifies the target + metric by name and selector + properties: + name: + description: name is the name of the + given metric + type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label + selector for the given metric\nWhen + set, it is passed " + properties: + matchExpressions: + description: matchExpressions is + a list of label selector requirements. + The requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map + of {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + required: + - name + type: object + target: + description: target specifies the target + value for the given metric + properties: + averageUtilization: + description: "averageUtilization is + the target value of the average of + the\nresource metric across all relevant + pods, represented as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether + the metric type is Utilization, Value, + or AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value + of the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - metric + - target + type: object + resource: + description: |- + resource refers to a resource metric (such as those specified in + requests and limits) known to Kubernetes describing eac properties: - mountPath: - description: |- - Path within the container at which the volume should be mounted. Must - not contain ':'. - type: string - mountPropagation: - description: |- - mountPropagation determines how mounts are propagated from the host - to container and the other way around. - type: string name: - description: This must match the Name of - a Volume. - type: string - readOnly: - description: |- - Mounted read-only if true, read-write otherwise (false or unspecified). - Defaults to false. - type: boolean - recursiveReadOnly: - description: |- - RecursiveReadOnly specifies whether read-only mounts should be handled - recursively. - type: string - subPath: - description: |- - Path within the volume from which the container's volume should be mounted. - Defaults to "" (volume's root). - type: string - subPathExpr: - description: Expanded path within the volume - from which the container's volume should - be mounted. + description: name is the name of the resource + in question. type: string + target: + description: target specifies the target + value for the given metric + properties: + averageUtilization: + description: "averageUtilization is + the target value of the average of + the\nresource metric across all relevant + pods, represented as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether + the metric type is Utilization, Value, + or AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value + of the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object required: - - mountPath - name + - target type: object - type: array - workerConfigs: - description: WorkerConfigs defines the worker - configuration for the Feast server. - properties: - keepAliveTimeout: - description: |- - KeepAliveTimeout is the timeout for keep-alive connections in seconds. - Defaults to 30. - format: int32 - minimum: 1 - type: integer - maxRequests: - description: |- - MaxRequests is the maximum number of requests a worker will process before restarting. - This helps prevent memory leaks. - format: int32 - minimum: 0 - type: integer - maxRequestsJitter: - description: |- - MaxRequestsJitter is the maximum jitter to add to max-requests to prevent - thundering herd effect on worker restart. - format: int32 - minimum: 0 - type: integer - registryTTLSeconds: - description: RegistryTTLSeconds is the number - of seconds after which the registry is refreshed. - format: int32 - minimum: 0 - type: integer - workerConnections: - description: |- - WorkerConnections is the maximum number of simultaneous clients per worker process. - Defaults to 1000. - format: int32 - minimum: 1 - type: integer - workers: - description: Workers is the number of worker - processes. Use -1 to auto-calculate based - on CPU cores (2 * CPU + 1). - format: int32 - minimum: -1 - type: integer - type: object - type: object - x-kubernetes-validations: - - message: At least one of restAPI or grpc must be - true - rule: self.restAPI == true || self.grpc == true - || !has(self.grpc) - type: object - remote: - description: RemoteRegistryConfig points to a remote feast - registry server. - properties: - feastRef: - description: Reference to an existing `FeatureStore` - CR in the same k8s cluster. - properties: - name: - description: Name of the FeatureStore - type: string - namespace: - description: Namespace of the FeatureStore - type: string - required: - - name - type: object - hostname: - description: Host address of the remote registry service - - :, e.g. `registry..svc.cluster.local:80` - type: string - tls: - description: TlsRemoteRegistryConfigs configures client - TLS for a remote feast registry. - properties: - certName: - description: defines the configmap key name for - the client TLS cert. - type: string - configMapRef: - description: references the local k8s configmap - where the TLS cert resides - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - required: - - certName - - configMapRef - type: object + type: + description: type is the type of metric source. + type: string + required: + - type + type: object + type: array + minReplicas: + description: MinReplicas is the lower limit for the + number of replicas. Defaults to 1. + format: int32 + minimum: 1 + type: integer + required: + - maxReplicas type: object - x-kubernetes-validations: - - message: One selection required. - rule: '[has(self.hostname), has(self.feastRef)].exists_one(c, - c)' type: object - x-kubernetes-validations: - - message: One selection required. - rule: '[has(self.local), has(self.remote)].exists_one(c, - c)' securityContext: description: PodSecurityContext holds pod-level security attributes and common container settings. @@ -6752,6 +9577,98 @@ spec: type: string type: object type: object + topologySpreadConstraints: + description: TopologySpreadConstraints defines how pods are + spread across topology domains. + items: + description: TopologySpreadConstraint specifies how to spread + matching pods among the given topology. + properties: + labelSelector: + description: LabelSelector is used to find matching + pods. + properties: + matchExpressions: + description: matchExpressions is a list of label + selector requirements. The requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that the + selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select the pods over which + spreading will be calculated. + items: + type: string + type: array + x-kubernetes-list-type: atomic + maxSkew: + description: MaxSkew describes the degree to which pods + may be unevenly distributed. + format: int32 + type: integer + minDomains: + description: MinDomains indicates a minimum number of + eligible domains. + format: int32 + type: integer + nodeAffinityPolicy: + description: |- + NodeAffinityPolicy indicates how we will treat Pod's nodeAffinity/nodeSelector + when calculating pod topology spread skew + type: string + nodeTaintsPolicy: + description: |- + NodeTaintsPolicy indicates how we will treat node taints when calculating + pod topology spread skew. + type: string + topologyKey: + description: TopologyKey is the key of node labels. + type: string + whenUnsatisfiable: + description: |- + WhenUnsatisfiable indicates how to deal with a pod if it doesn't satisfy + the spread constraint. + type: string + required: + - maxSkew + - topologyKey + - whenUnsatisfiable + type: object + type: array ui: description: Creates a UI server container properties: @@ -8539,7 +11456,39 @@ spec: type: object required: - feastProject + - replicas type: object + x-kubernetes-validations: + - message: replicas > 1 and services.scaling.autoscaling are mutually + exclusive. + rule: self.replicas <= 1 || !has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling) + - message: Scaling requires DB-backed persistence for the online store. + Configure services.onlineStore.persistence.store when using replicas + > 1 or autoscaling. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (has(self.services) + && has(self.services.onlineStore) && has(self.services.onlineStore.persistence) + && has(self.services.onlineStore.persistence.store)) + - message: Scaling requires DB-backed persistence for the offline + store. Configure services.offlineStore.persistence.store when + using replicas > 1 or autoscaling. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (!has(self.services) + || !has(self.services.offlineStore) || (has(self.services.offlineStore.persistence) + && has(self.services.offlineStore.persistence.store))) + - message: Scaling requires DB-backed or remote registry. Configure + registry.local.persistence.store or use a remote registry when + using replicas > 1 or autoscaling. S3/GCS-backed registry is also + allowed. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (has(self.services) + && has(self.services.registry) && (has(self.services.registry.remote) + || (has(self.services.registry.local) && has(self.services.registry.local.persistence) + && (has(self.services.registry.local.persistence.store) || (has(self.services.registry.local.persistence.file) + && has(self.services.registry.local.persistence.file.path) && + (self.services.registry.local.persistence.file.path.startsWith('s3://') + || self.services.registry.local.persistence.file.path.startsWith('gs://'))))))) clientConfigMap: description: ConfigMap in this namespace containing a client `feature_store.yaml` for this feast deployment @@ -8604,6 +11553,28 @@ spec: type: string phase: type: string + replicas: + description: Replicas is the current number of ready pod replicas + (used by the scale sub-resource). + format: int32 + type: integer + scalingStatus: + description: ScalingStatus reports the current scaling state of the + FeatureStore deployment. + properties: + currentReplicas: + description: CurrentReplicas is the current number of pod replicas. + format: int32 + type: integer + desiredReplicas: + description: DesiredReplicas is the desired number of pod replicas. + format: int32 + type: integer + type: object + selector: + description: Selector is the label selector for pods managed by the + FeatureStore deployment (used by the scale sub-resource). + type: string serviceHostnames: description: ServiceHostnames defines the service hostnames in the format of :, e.g. example.svc.cluster.local:80 @@ -8624,6 +11595,10 @@ spec: served: true storage: true subresources: + scale: + labelSelectorPath: .status.selector + specReplicasPath: .spec.replicas + statusReplicasPath: .status.replicas status: {} - additionalPrinterColumns: - jsonPath: .status.phase @@ -9294,7 +12269,6 @@ spec: - hbase - cassandra - hazelcast - - ikv - couchbase - clickhouse type: string @@ -9966,7 +12940,6 @@ spec: enum: - snowflake.online - redis - - ikv - datastore - dynamodb - bigtable @@ -9981,6 +12954,7 @@ spec: - couchbase.online - milvus - hybrid + - mongodb type: string required: - secretRef @@ -13505,7 +16479,6 @@ spec: - hbase - cassandra - hazelcast - - ikv - couchbase - clickhouse type: string @@ -14187,7 +17160,6 @@ spec: enum: - snowflake.online - redis - - ikv - datastore - dynamodb - bigtable @@ -14202,6 +17174,7 @@ spec: - couchbase.online - milvus - hybrid + - mongodb type: string required: - secretRef diff --git a/infra/feast-operator/config/default/metrics_service.yaml b/infra/feast-operator/config/default/metrics_service.yaml index 0207c0469d4..fbf17dd96ec 100644 --- a/infra/feast-operator/config/default/metrics_service.yaml +++ b/infra/feast-operator/config/default/metrics_service.yaml @@ -14,4 +14,5 @@ spec: protocol: TCP targetPort: 8443 selector: + app.kubernetes.io/name: feast-operator control-plane: controller-manager diff --git a/infra/feast-operator/config/default/related_image_fs_patch.yaml b/infra/feast-operator/config/default/related_image_fs_patch.yaml index 172afcfd075..afc1c3d7c63 100644 --- a/infra/feast-operator/config/default/related_image_fs_patch.yaml +++ b/infra/feast-operator/config/default/related_image_fs_patch.yaml @@ -2,7 +2,7 @@ path: "/spec/template/spec/containers/0/env/0" value: name: RELATED_IMAGE_FEATURE_SERVER - value: quay.io/feastdev/feature-server:0.60.0 + value: quay.io/feastdev/feature-server:0.61.0 - op: replace path: "/spec/template/spec/containers/0/env/1" value: diff --git a/infra/feast-operator/config/manager/kustomization.yaml b/infra/feast-operator/config/manager/kustomization.yaml index 844c53ae757..4a0a78531bb 100644 --- a/infra/feast-operator/config/manager/kustomization.yaml +++ b/infra/feast-operator/config/manager/kustomization.yaml @@ -5,4 +5,4 @@ kind: Kustomization images: - name: controller newName: quay.io/feastdev/feast-operator - newTag: 0.60.0 + newTag: 0.61.0 diff --git a/infra/feast-operator/config/manager/manager.yaml b/infra/feast-operator/config/manager/manager.yaml index 242144e2b03..8107749fce5 100644 --- a/infra/feast-operator/config/manager/manager.yaml +++ b/infra/feast-operator/config/manager/manager.yaml @@ -19,6 +19,7 @@ metadata: spec: selector: matchLabels: + app.kubernetes.io/name: feast-operator control-plane: controller-manager replicas: 1 template: @@ -26,6 +27,7 @@ spec: annotations: kubectl.kubernetes.io/default-container: manager labels: + app.kubernetes.io/name: feast-operator control-plane: controller-manager spec: # TODO(user): Uncomment the following code to configure the nodeAffinity expression diff --git a/infra/feast-operator/config/overlays/odh/params.env b/infra/feast-operator/config/overlays/odh/params.env index c3cb4bab64b..d7b0233b998 100644 --- a/infra/feast-operator/config/overlays/odh/params.env +++ b/infra/feast-operator/config/overlays/odh/params.env @@ -1,3 +1,3 @@ -RELATED_IMAGE_FEAST_OPERATOR=quay.io/feastdev/feast-operator:0.60.0 -RELATED_IMAGE_FEATURE_SERVER=quay.io/feastdev/feature-server:0.60.0 +RELATED_IMAGE_FEAST_OPERATOR=quay.io/feastdev/feast-operator:0.61.0 +RELATED_IMAGE_FEATURE_SERVER=quay.io/feastdev/feature-server:0.61.0 RELATED_IMAGE_CRON_JOB=quay.io/openshift/origin-cli:4.17 diff --git a/infra/feast-operator/config/overlays/rhoai/params.env b/infra/feast-operator/config/overlays/rhoai/params.env index afae8c9bea4..ce50a9f1412 100644 --- a/infra/feast-operator/config/overlays/rhoai/params.env +++ b/infra/feast-operator/config/overlays/rhoai/params.env @@ -1,3 +1,3 @@ -RELATED_IMAGE_FEAST_OPERATOR=quay.io/feastdev/feast-operator:0.60.0 -RELATED_IMAGE_FEATURE_SERVER=quay.io/feastdev/feature-server:0.60.0 +RELATED_IMAGE_FEAST_OPERATOR=quay.io/feastdev/feast-operator:0.61.0 +RELATED_IMAGE_FEATURE_SERVER=quay.io/feastdev/feature-server:0.61.0 RELATED_IMAGE_CRON_JOB=registry.redhat.io/openshift4/ose-cli@sha256:bc35a9fc663baf0d6493cc57e89e77a240a36c43cf38fb78d8e61d3b87cf5cc5 \ No newline at end of file diff --git a/infra/feast-operator/config/prometheus/monitor.yaml b/infra/feast-operator/config/prometheus/monitor.yaml index e76479a1305..50f2ea8e448 100644 --- a/infra/feast-operator/config/prometheus/monitor.yaml +++ b/infra/feast-operator/config/prometheus/monitor.yaml @@ -27,4 +27,5 @@ spec: insecureSkipVerify: true selector: matchLabels: + app.kubernetes.io/name: feast-operator control-plane: controller-manager diff --git a/infra/feast-operator/config/rbac/role.yaml b/infra/feast-operator/config/rbac/role.yaml index 56a952dd95c..dca06c870e4 100644 --- a/infra/feast-operator/config/rbac/role.yaml +++ b/infra/feast-operator/config/rbac/role.yaml @@ -21,6 +21,18 @@ rules: - tokenreviews verbs: - create +- apiGroups: + - autoscaling + resources: + - horizontalpodautoscalers + verbs: + - create + - delete + - get + - list + - patch + - update + - watch - apiGroups: - batch resources: @@ -89,6 +101,18 @@ rules: - get - patch - update +- apiGroups: + - policy + resources: + - poddisruptionbudgets + verbs: + - create + - delete + - get + - list + - patch + - update + - watch - apiGroups: - rbac.authorization.k8s.io resources: diff --git a/infra/feast-operator/config/samples/v1_featurestore_scaling_hpa.yaml b/infra/feast-operator/config/samples/v1_featurestore_scaling_hpa.yaml new file mode 100644 index 00000000000..af4a9fd1d02 --- /dev/null +++ b/infra/feast-operator/config/samples/v1_featurestore_scaling_hpa.yaml @@ -0,0 +1,78 @@ +apiVersion: v1 +kind: Secret +metadata: + name: postgres-secret + namespace: test + labels: + app: postgres +stringData: + POSTGRES_DB: feast + POSTGRES_USER: feast + POSTGRES_PASSWORD: feast # pragma: allowlist secret +--- +apiVersion: v1 +kind: Secret +metadata: + name: feast-data-stores + namespace: test +stringData: + sql: | + path: postgresql+psycopg://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres.test.svc.cluster.local:5432/${POSTGRES_DB} + cache_ttl_seconds: 60 + sqlalchemy_config_kwargs: + echo: false + pool_pre_ping: true + postgres: | + host: postgres.test.svc.cluster.local + port: 5432 + database: ${POSTGRES_DB} + db_schema: public + user: ${POSTGRES_USER} + password: ${POSTGRES_PASSWORD} +--- +# HPA autoscaling: 2-10 replicas with DB-backed persistence and HA +apiVersion: feast.dev/v1 +kind: FeatureStore +metadata: + name: sample-scaling-hpa + namespace: test +spec: + feastProject: my_project + services: + scaling: + autoscaling: + minReplicas: 2 + maxReplicas: 10 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 70 + podDisruptionBudgets: + maxUnavailable: 1 + onlineStore: + persistence: + store: + type: postgres + secretRef: + name: feast-data-stores + server: + envFrom: + - secretRef: + name: postgres-secret + resources: + requests: + cpu: 200m + memory: 256Mi + limits: + cpu: "1" + memory: 1Gi + registry: + local: + persistence: + store: + type: sql + secretRef: + name: feast-data-stores diff --git a/infra/feast-operator/config/samples/v1_featurestore_scaling_static.yaml b/infra/feast-operator/config/samples/v1_featurestore_scaling_static.yaml new file mode 100644 index 00000000000..e4df5a6245a --- /dev/null +++ b/infra/feast-operator/config/samples/v1_featurestore_scaling_static.yaml @@ -0,0 +1,75 @@ +apiVersion: v1 +kind: Secret +metadata: + name: postgres-secret + namespace: test + labels: + app: postgres +stringData: + POSTGRES_DB: feast + POSTGRES_USER: feast + POSTGRES_PASSWORD: feast # pragma: allowlist secret +--- +apiVersion: v1 +kind: Secret +metadata: + name: feast-data-stores + namespace: test +stringData: + sql: | + path: postgresql+psycopg://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres.test.svc.cluster.local:5432/${POSTGRES_DB} + cache_ttl_seconds: 60 + sqlalchemy_config_kwargs: + echo: false + pool_pre_ping: true + postgres: | + host: postgres.test.svc.cluster.local + port: 5432 + database: ${POSTGRES_DB} + db_schema: public + user: ${POSTGRES_USER} + password: ${POSTGRES_PASSWORD} +--- +# Static scaling: 3 replicas with DB-backed persistence, PDB, and HA +# +# By default the operator auto-injects: +# - Soft pod anti-affinity (prefer different nodes) +# - Soft zone topology spread (prefer different zones, e.g. us-east-1a, us-east-1b, us-east-1c) +# +# To enforce strict zone spreading on AWS (DoNotSchedule), uncomment topologySpreadConstraints below. +apiVersion: feast.dev/v1 +kind: FeatureStore +metadata: + name: sample-scaling-static + namespace: test +spec: + feastProject: my_project + replicas: 3 + services: + podDisruptionBudgets: + maxUnavailable: 1 + # Uncomment to enforce strict spreading across AWS availability zones: + # topologySpreadConstraints: + # - maxSkew: 1 + # topologyKey: topology.kubernetes.io/zone + # whenUnsatisfiable: DoNotSchedule + # labelSelector: + # matchLabels: + # feast.dev/name: sample-scaling-static + onlineStore: + persistence: + store: + type: postgres + secretRef: + name: feast-data-stores + server: + envFrom: + - secretRef: + name: postgres-secret + registry: + local: + persistence: + store: + type: sql + secretRef: + name: feast-data-stores diff --git a/infra/feast-operator/dist/install.yaml b/infra/feast-operator/dist/install.yaml index d42a902c886..953fae9125d 100644 --- a/infra/feast-operator/dist/install.yaml +++ b/infra/feast-operator/dist/install.yaml @@ -711,7 +711,6 @@ spec: - hbase - cassandra - hazelcast - - ikv - couchbase - clickhouse type: string @@ -720,553 +719,806 @@ spec: x-kubernetes-validations: - message: One selection required between init or git. rule: '[has(self.git), has(self.init)].exists_one(c, c)' + replicas: + default: 1 + description: |- + Replicas is the desired number of pod replicas. Used by the scale sub-resource. + Mutually exclusive with services. + format: int32 + minimum: 1 + type: integer services: description: FeatureStoreServices defines the desired feast services. An ephemeral onlineStore feature server is deployed by default. properties: - deploymentStrategy: - description: DeploymentStrategy describes how to replace existing - pods with new ones. + affinity: + description: Affinity defines the pod scheduling constraints for + the FeatureStore deployment. properties: - rollingUpdate: - description: |- - Rolling update config params. Present only if DeploymentStrategyType = - RollingUpdate. + nodeAffinity: + description: Describes node affinity scheduling rules for + the pod. properties: - maxSurge: - anyOf: - - type: integer - - type: string + preferredDuringSchedulingIgnoredDuringExecution: description: |- - The maximum number of pods that can be scheduled above the desired number of - pods. - x-kubernetes-int-or-string: true - maxUnavailable: - anyOf: - - type: integer - - type: string - description: The maximum number of pods that can be unavailable - during the update. - x-kubernetes-int-or-string: true - type: object - type: - description: Type of deployment. Can be "Recreate" or "RollingUpdate". - Default is RollingUpdate. - type: string - type: object - disableInitContainers: - description: Disable the 'feast repo initialization' initContainer - type: boolean - offlineStore: - description: OfflineStore configures the offline store service - properties: - persistence: - description: OfflineStorePersistence configures the persistence - settings for the offline store service - properties: - file: - description: OfflineStoreFilePersistence configures the - file-based persistence for the offline store service - properties: - pvc: - description: PvcConfig defines the settings for a - persistent file store based on PVCs. - properties: - create: - description: Settings for creating a new PVC - properties: - accessModes: - description: AccessModes k8s persistent volume - access modes. Defaults to ["ReadWriteOnce"]. - items: - type: string - type: array - resources: - description: Resources describes the storage - resource requirements for a volume. + The scheduler will prefer to schedule pods to nodes that satisfy + the affinity expressions specified by this field, but i + items: + description: |- + An empty preferred scheduling term matches all objects with implicit weight 0 + (i.e. it's a no-op). + properties: + preference: + description: A node selector term, associated with + the corresponding weight. + properties: + matchExpressions: + description: A list of node selector requirements + by node's labels. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true + key: + description: The label key that the selector + applies to. + type: string + operator: description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum - amount of compute resources required. - type: object + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator type: object - storageClassName: - description: StorageClassName is the name - of an existing StorageClass to which this - persistent volume belongs. - type: string - type: object - x-kubernetes-validations: - - message: PvcCreate is immutable - rule: self == oldSelf - mountPath: - description: |- - MountPath within the container at which the volume should be mounted. - Must start by "/" and cannot contain ':'. - type: string - ref: - description: Reference to an existing field - properties: - name: - default: "" + type: array + x-kubernetes-list-type: atomic + matchFields: + description: A list of node selector requirements + by node's fields. + items: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - required: - - mountPath - type: object - x-kubernetes-validations: - - message: One selection is required between ref and - create. - rule: '[has(self.ref), has(self.create)].exists_one(c, - c)' - - message: Mount path must start with '/' and must - not contain ':' - rule: self.mountPath.matches('^/[^:]*$') - type: - enum: - - file - - dask - - duckdb - type: string - type: object - store: - description: OfflineStoreDBStorePersistence configures - the DB store persistence for the offline store service + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the selector + applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + type: object + x-kubernetes-map-type: atomic + weight: + description: Weight associated with matching the + corresponding nodeSelectorTerm, in the range 1-100. + format: int32 + type: integer + required: + - preference + - weight + type: object + type: array + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the affinity requirements specified by + this field are not met at\nscheduling time, the pod + will not be scheduled onto " properties: - secretKeyName: - description: By default, the selected store "type" - is used as the SecretKeyName - type: string - secretRef: - description: Data store parameters should be placed - as-is from the "feature_store.yaml" under the secret - key. - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: - description: Type of the persistence type you want - to use. - enum: - - snowflake.offline - - bigquery - - redshift - - spark - - postgres - - trino - - athena - - mssql - - couchbase.offline - - clickhouse - - ray - type: string + nodeSelectorTerms: + description: Required. A list of node selector terms. + The terms are ORed. + items: + description: |- + A null or empty node selector term matches no objects. The requirements of + them are ANDed. + properties: + matchExpressions: + description: A list of node selector requirements + by node's labels. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the selector + applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchFields: + description: A list of node selector requirements + by node's fields. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the selector + applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + type: object + x-kubernetes-map-type: atomic + type: array + x-kubernetes-list-type: atomic required: - - secretRef - - type + - nodeSelectorTerms type: object + x-kubernetes-map-type: atomic type: object - x-kubernetes-validations: - - message: One selection required between file or store. - rule: '[has(self.file), has(self.store)].exists_one(c, c)' - server: - description: Creates a remote offline server container + podAffinity: + description: Describes pod affinity scheduling rules (e.g. + co-locate this pod in the same node, zone, etc. as some + other pod(s)). properties: - env: + preferredDuringSchedulingIgnoredDuringExecution: + description: |- + The scheduler will prefer to schedule pods to nodes that satisfy + the affinity expressions specified by this field, but i items: - description: EnvVar represents an environment variable - present in a Container. + description: The weights of all of the matched WeightedPodAffinityTerm + fields are added per-node to find the most preferred + node(s) properties: - name: - description: Name of the environment variable. Must - be a C_IDENTIFIER. - type: string - value: - description: |- - Variable references $(VAR_NAME) are expanded - using the previously defined environment variables in the container and - any - type: string - valueFrom: - description: Source for the environment variable's - value. Cannot be used if value is not empty. + podAffinityTerm: + description: Required. A pod affinity term, associated + with the corresponding weight. properties: - configMapKeyRef: - description: Selects a key of a ConfigMap. - properties: - key: - description: The key to select. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap - or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - fieldRef: - description: 'Selects a field of the pod: supports - metadata.name, metadata.namespace, `metadata.labels['''']`, - `metadata.' - properties: - apiVersion: - description: Version of the schema the FieldPath - is written in terms of, defaults to "v1". - type: string - fieldPath: - description: Path of the field to select - in the specified API version. - type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - resourceFieldRef: + labelSelector: description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, limits. + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. properties: - containerName: - description: 'Container name: required for - volumes, optional for env vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output format - of the exposed resources, defaults to - "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource to select' - type: string - required: - - resource + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret in the - pod's namespace + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. properties: - key: - description: The key of the secret to select - from. Must be a valid secret key. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret - or its key must be defined - type: boolean - required: - - key + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t + type: string + required: + - topologyKey type: object + weight: + description: |- + weight associated with matching the corresponding podAffinityTerm, + in the range 1-100. + format: int32 + type: integer required: - - name + - podAffinityTerm + - weight type: object type: array - envFrom: + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the affinity requirements specified by + this field are not met at\nscheduling time, the pod + will not be scheduled onto " items: - description: EnvFromSource represents the source of - a set of ConfigMaps + description: "Defines a set of pods (namely those matching + the labelSelector\nrelative to the given namespace(s)) + that this pod should " properties: - configMapRef: - description: The ConfigMap to select from + labelSelector: + description: |- + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap must - be defined - type: boolean + matchExpressions: + description: matchExpressions is a list of label + selector requirements. The requirements are + ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that + the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend to - each key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret must - be defined - type: boolean + matchExpressions: + description: matchExpressions is a list of label + selector requirements. The requirements are + ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that + the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t + type: string + required: + - topologyKey type: object type: array - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for if/when - to pull a container image - type: string - logLevel: - description: |- - LogLevel sets the logging level for the server - Allowed values: "debug", "info", "warning", "error", "critical". - enum: - - debug - - info - - warning - - error - - critical - type: string - metrics: - description: Metrics exposes Prometheus-compatible metrics - for the Feast server when enabled. - type: boolean - nodeSelector: - additionalProperties: - type: string - type: object - resources: - description: ResourceRequirements describes the compute - resource requirements. - properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. - items: - description: ResourceClaim references one entry - in PodSpec.ResourceClaims. + x-kubernetes-list-type: atomic + type: object + podAntiAffinity: + description: Describes pod anti-affinity scheduling rules + (e.g. avoid putting this pod in the same node, zone, etc. + properties: + preferredDuringSchedulingIgnoredDuringExecution: + description: "The scheduler will prefer to schedule pods + to nodes that satisfy\nthe anti-affinity expressions + specified by this field, " + items: + description: The weights of all of the matched WeightedPodAffinityTerm + fields are added per-node to find the most preferred + node(s) + properties: + podAffinityTerm: + description: Required. A pod affinity term, associated + with the corresponding weight. properties: - name: + labelSelector: description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t type: string required: - - name + - topologyKey type: object - type: array - x-kubernetes-list-map-keys: - - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum amount - of compute resources required. - type: object - type: object - tls: - description: TlsConfigs configures server TLS for a feast - service. - properties: - disable: - description: will disable TLS for the feast service. - useful in an openshift cluster, for example, where - TLS is configured by default - type: boolean - secretKeyNames: - description: SecretKeyNames defines the secret key - names for the TLS key and cert. - properties: - tlsCrt: - description: defaults to "tls.crt" - type: string - tlsKey: - description: defaults to "tls.key" - type: string - type: object - secretRef: - description: references the local k8s secret where - the TLS key and cert reside - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: object - x-kubernetes-validations: - - message: '`secretRef` required if `disable` is false.' - rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) - : true' - volumeMounts: - description: VolumeMounts defines the list of volumes - that should be mounted into the feast container. - items: - description: VolumeMount describes a mounting of a Volume - within a container. - properties: - mountPath: - description: |- - Path within the container at which the volume should be mounted. Must - not contain ':'. - type: string - mountPropagation: - description: |- - mountPropagation determines how mounts are propagated from the host - to container and the other way around. - type: string - name: - description: This must match the Name of a Volume. - type: string - readOnly: - description: |- - Mounted read-only if true, read-write otherwise (false or unspecified). - Defaults to false. - type: boolean - recursiveReadOnly: - description: |- - RecursiveReadOnly specifies whether read-only mounts should be handled - recursively. - type: string - subPath: + weight: description: |- - Path within the volume from which the container's volume should be mounted. - Defaults to "" (volume's root). - type: string - subPathExpr: - description: Expanded path within the volume from - which the container's volume should be mounted. - type: string + weight associated with matching the corresponding podAffinityTerm, + in the range 1-100. + format: int32 + type: integer required: - - mountPath - - name + - podAffinityTerm + - weight type: object type: array - workerConfigs: - description: WorkerConfigs defines the worker configuration - for the Feast server. - properties: - keepAliveTimeout: - description: |- - KeepAliveTimeout is the timeout for keep-alive connections in seconds. - Defaults to 30. - format: int32 - minimum: 1 - type: integer - maxRequests: - description: |- - MaxRequests is the maximum number of requests a worker will process before restarting. - This helps prevent memory leaks. - format: int32 - minimum: 0 - type: integer - maxRequestsJitter: - description: |- - MaxRequestsJitter is the maximum jitter to add to max-requests to prevent - thundering herd effect on worker restart. - format: int32 - minimum: 0 - type: integer - registryTTLSeconds: - description: RegistryTTLSeconds is the number of seconds - after which the registry is refreshed. - format: int32 - minimum: 0 - type: integer - workerConnections: - description: |- - WorkerConnections is the maximum number of simultaneous clients per worker process. - Defaults to 1000. - format: int32 - minimum: 1 - type: integer - workers: - description: Workers is the number of worker processes. - Use -1 to auto-calculate based on CPU cores (2 * - CPU + 1). - format: int32 - minimum: -1 - type: integer - type: object + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the anti-affinity requirements specified + by this field are not met at\nscheduling time, the pod + will not be scheduled " + items: + description: "Defines a set of pods (namely those matching + the labelSelector\nrelative to the given namespace(s)) + that this pod should " + properties: + labelSelector: + description: |- + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. + properties: + matchExpressions: + description: matchExpressions is a list of label + selector requirements. The requirements are + ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that + the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list of label + selector requirements. The requirements are + ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that + the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t + type: string + required: + - topologyKey + type: object + type: array + x-kubernetes-list-type: atomic type: object type: object - onlineStore: - description: OnlineStore configures the online store service + deploymentStrategy: + description: DeploymentStrategy describes how to replace existing + pods with new ones. + properties: + rollingUpdate: + description: |- + Rolling update config params. Present only if DeploymentStrategyType = + RollingUpdate. + properties: + maxSurge: + anyOf: + - type: integer + - type: string + description: |- + The maximum number of pods that can be scheduled above the desired number of + pods. + x-kubernetes-int-or-string: true + maxUnavailable: + anyOf: + - type: integer + - type: string + description: The maximum number of pods that can be unavailable + during the update. + x-kubernetes-int-or-string: true + type: object + type: + description: Type of deployment. Can be "Recreate" or "RollingUpdate". + Default is RollingUpdate. + type: string + type: object + disableInitContainers: + description: Disable the 'feast repo initialization' initContainer + type: boolean + offlineStore: + description: OfflineStore configures the offline store service properties: persistence: - description: OnlineStorePersistence configures the persistence - settings for the online store service + description: OfflineStorePersistence configures the persistence + settings for the offline store service properties: file: - description: OnlineStoreFilePersistence configures the - file-based persistence for the online store service + description: OfflineStoreFilePersistence configures the + file-based persistence for the offline store service properties: - path: - type: string pvc: description: PvcConfig defines the settings for a persistent file store based on PVCs. @@ -1343,21 +1595,16 @@ spec: - message: Mount path must start with '/' and must not contain ':' rule: self.mountPath.matches('^/[^:]*$') + type: + enum: + - file + - dask + - duckdb + type: string type: object - x-kubernetes-validations: - - message: Ephemeral stores must have absolute paths. - rule: '(!has(self.pvc) && has(self.path)) ? self.path.startsWith(''/'') - : true' - - message: PVC path must be a file name only, with no - slashes. - rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') - : true' - - message: Online store does not support S3 or GS buckets. - rule: 'has(self.path) ? !(self.path.startsWith(''s3://'') - || self.path.startsWith(''gs://'')) : true' store: - description: OnlineStoreDBStorePersistence configures - the DB store persistence for the online store service + description: OfflineStoreDBStorePersistence configures + the DB store persistence for the offline store service properties: secretKeyName: description: By default, the selected store "type" @@ -1381,23 +1628,17 @@ spec: description: Type of the persistence type you want to use. enum: - - snowflake.online - - redis - - ikv - - datastore - - dynamodb - - bigtable + - snowflake.offline + - bigquery + - redshift + - spark - postgres - - cassandra - - mysql - - hazelcast - - singlestore - - hbase - - elasticsearch - - qdrant - - couchbase.online - - milvus - - hybrid + - trino + - athena + - mssql + - couchbase.offline + - clickhouse + - ray type: string required: - secretRef @@ -1408,7 +1649,7 @@ spec: - message: One selection required between file or store. rule: '[has(self.file), has(self.store)].exists_one(c, c)' server: - description: Creates a feature server container + description: Creates a remote offline server container properties: env: items: @@ -1764,151 +2005,74 @@ spec: type: object type: object type: object - registry: - description: Registry configures the registry service. One selection - is required. Local is the default setting. + onlineStore: + description: OnlineStore configures the online store service properties: - local: - description: LocalRegistryConfig configures the registry service + persistence: + description: OnlineStorePersistence configures the persistence + settings for the online store service properties: - persistence: - description: RegistryPersistence configures the persistence - settings for the registry service + file: + description: OnlineStoreFilePersistence configures the + file-based persistence for the online store service properties: - file: - description: RegistryFilePersistence configures the - file-based persistence for the registry service + path: + type: string + pvc: + description: PvcConfig defines the settings for a + persistent file store based on PVCs. properties: - cache_mode: - description: |- - CacheMode defines the registry cache update strategy. - Allowed values are "sync" and "thread". - enum: - - none - - sync - - thread - type: string - cache_ttl_seconds: - description: CacheTTLSeconds defines the TTL (in - seconds) for the registry cache. - format: int32 - minimum: 0 - type: integer - path: - type: string - pvc: - description: PvcConfig defines the settings for - a persistent file store based on PVCs. + create: + description: Settings for creating a new PVC properties: - create: - description: Settings for creating a new PVC - properties: - accessModes: - description: AccessModes k8s persistent - volume access modes. Defaults to ["ReadWriteOnce"]. - items: - type: string - type: array - resources: - description: Resources describes the storage - resource requirements for a volume. - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the - minimum amount of compute resources - required. - type: object - type: object - storageClassName: - description: StorageClassName is the name - of an existing StorageClass to which - this persistent volume belongs. - type: string - type: object - x-kubernetes-validations: - - message: PvcCreate is immutable - rule: self == oldSelf - mountPath: - description: |- - MountPath within the container at which the volume should be mounted. - Must start by "/" and cannot contain ':'. - type: string - ref: - description: Reference to an existing field + accessModes: + description: AccessModes k8s persistent volume + access modes. Defaults to ["ReadWriteOnce"]. + items: + type: string + type: array + resources: + description: Resources describes the storage + resource requirements for a volume. properties: - name: - default: "" + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum + amount of compute resources required. + type: object type: object - x-kubernetes-map-type: atomic - required: - - mountPath + storageClassName: + description: StorageClassName is the name + of an existing StorageClass to which this + persistent volume belongs. + type: string type: object x-kubernetes-validations: - - message: One selection is required between ref - and create. - rule: '[has(self.ref), has(self.create)].exists_one(c, - c)' - - message: Mount path must start with '/' and - must not contain ':' - rule: self.mountPath.matches('^/[^:]*$') - s3_additional_kwargs: - additionalProperties: - type: string - type: object - type: object - x-kubernetes-validations: - - message: Registry files must use absolute paths - or be S3 ('s3://') or GS ('gs://') object store - URIs. - rule: '(!has(self.pvc) && has(self.path)) ? (self.path.startsWith(''/'') - || self.path.startsWith(''s3://'') || self.path.startsWith(''gs://'')) - : true' - - message: PVC path must be a file name only, with - no slashes. - rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') - : true' - - message: PVC persistence does not support S3 or - GS object store URIs. - rule: '(has(self.pvc) && has(self.path)) ? !(self.path.startsWith(''s3://'') - || self.path.startsWith(''gs://'')) : true' - - message: Additional S3 settings are available only - for S3 object store URIs. - rule: '(has(self.s3_additional_kwargs) && has(self.path)) - ? self.path.startsWith(''s3://'') : true' - store: - description: RegistryDBStorePersistence configures - the DB store persistence for the registry service - properties: - secretKeyName: - description: By default, the selected store "type" - is used as the SecretKeyName + - message: PvcCreate is immutable + rule: self == oldSelf + mountPath: + description: |- + MountPath within the container at which the volume should be mounted. + Must start by "/" and cannot contain ':'. type: string - secretRef: - description: Data store parameters should be placed - as-is from the "feature_store.yaml" under the - secret key. + ref: + description: Reference to an existing field properties: name: default: "" @@ -1919,146 +2083,109 @@ spec: type: string type: object x-kubernetes-map-type: atomic - type: - description: Type of the persistence type you - want to use. - enum: - - sql - - snowflake.registry - type: string required: - - secretRef - - type + - mountPath type: object + x-kubernetes-validations: + - message: One selection is required between ref and + create. + rule: '[has(self.ref), has(self.create)].exists_one(c, + c)' + - message: Mount path must start with '/' and must + not contain ':' + rule: self.mountPath.matches('^/[^:]*$') type: object x-kubernetes-validations: - - message: One selection required between file or store. - rule: '[has(self.file), has(self.store)].exists_one(c, - c)' - server: - description: Creates a registry server container + - message: Ephemeral stores must have absolute paths. + rule: '(!has(self.pvc) && has(self.path)) ? self.path.startsWith(''/'') + : true' + - message: PVC path must be a file name only, with no + slashes. + rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') + : true' + - message: Online store does not support S3 or GS buckets. + rule: 'has(self.path) ? !(self.path.startsWith(''s3://'') + || self.path.startsWith(''gs://'')) : true' + store: + description: OnlineStoreDBStorePersistence configures + the DB store persistence for the online store service properties: - env: - items: - description: EnvVar represents an environment variable - present in a Container. + secretKeyName: + description: By default, the selected store "type" + is used as the SecretKeyName + type: string + secretRef: + description: Data store parameters should be placed + as-is from the "feature_store.yaml" under the secret + key. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: + description: Type of the persistence type you want + to use. + enum: + - snowflake.online + - redis + - datastore + - dynamodb + - bigtable + - postgres + - cassandra + - mysql + - hazelcast + - singlestore + - hbase + - elasticsearch + - qdrant + - couchbase.online + - milvus + - hybrid + - mongodb + type: string + required: + - secretRef + - type + type: object + type: object + x-kubernetes-validations: + - message: One selection required between file or store. + rule: '[has(self.file), has(self.store)].exists_one(c, c)' + server: + description: Creates a feature server container + properties: + env: + items: + description: EnvVar represents an environment variable + present in a Container. + properties: + name: + description: Name of the environment variable. Must + be a C_IDENTIFIER. + type: string + value: + description: |- + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any + type: string + valueFrom: + description: Source for the environment variable's + value. Cannot be used if value is not empty. properties: - name: - description: Name of the environment variable. - Must be a C_IDENTIFIER. - type: string - value: - description: |- - Variable references $(VAR_NAME) are expanded - using the previously defined environment variables in the container and - any - type: string - valueFrom: - description: Source for the environment variable's - value. Cannot be used if value is not empty. - properties: - configMapKeyRef: - description: Selects a key of a ConfigMap. - properties: - key: - description: The key to select. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap - or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - fieldRef: - description: 'Selects a field of the pod: - supports metadata.name, metadata.namespace, - `metadata.labels['''']`, `metadata.' - properties: - apiVersion: - description: Version of the schema the - FieldPath is written in terms of, - defaults to "v1". - type: string - fieldPath: - description: Path of the field to select - in the specified API version. - type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - resourceFieldRef: - description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, limits. - properties: - containerName: - description: 'Container name: required - for volumes, optional for env vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output format - of the exposed resources, defaults - to "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource to - select' - type: string - required: - - resource - type: object - x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret in - the pod's namespace - properties: - key: - description: The key of the secret to - select from. Must be a valid secret - key. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret - or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - type: object - required: - - name - type: object - type: array - envFrom: - items: - description: EnvFromSource represents the source - of a set of ConfigMaps - properties: - configMapRef: - description: The ConfigMap to select from + configMapKeyRef: + description: Selects a key of a ConfigMap. properties: + key: + description: The key to select. + type: string name: default: "" description: |- @@ -2068,17 +2195,62 @@ spec: type: string optional: description: Specify whether the ConfigMap - must be defined + or its key must be defined type: boolean + required: + - key type: object x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend - to each key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from + fieldRef: + description: 'Selects a field of the pod: supports + metadata.name, metadata.namespace, `metadata.labels['''']`, + `metadata.' + properties: + apiVersion: + description: Version of the schema the FieldPath + is written in terms of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to select + in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits. + properties: + containerName: + description: 'Container name: required for + volumes, optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format + of the exposed resources, defaults to + "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret in the + pod's namespace properties: + key: + description: The key of the secret to select + from. Must be a valid secret key. + type: string name: default: "" description: |- @@ -2088,1352 +2260,1814 @@ spec: type: string optional: description: Specify whether the Secret - must be defined + or its key must be defined type: boolean + required: + - key type: object x-kubernetes-map-type: atomic type: object - type: array - grpc: - description: Enable gRPC registry server. Defaults - to true if unset. - type: boolean - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for if/when - to pull a container image - type: string - logLevel: - description: |- - LogLevel sets the logging level for the server - Allowed values: "debug", "info", "warning", "error", "critical". - enum: - - debug - - info - - warning - - error - - critical - type: string - metrics: - description: Metrics exposes Prometheus-compatible - metrics for the Feast server when enabled. - type: boolean - nodeSelector: - additionalProperties: + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source of + a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend to + each key in the ConfigMap. Must be a C_IDENTIFIER. type: string - type: object - resources: - description: ResourceRequirements describes the compute - resource requirements. - properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. - items: - description: ResourceClaim references one entry - in PodSpec.ResourceClaims. - properties: - name: - description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. - type: string - required: - - name - type: object - type: array - x-kubernetes-list-map-keys: - - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum amount - of compute resources required. - type: object - type: object - restAPI: - description: Enable REST API registry server. - type: boolean - tls: - description: TlsConfigs configures server TLS for - a feast service. - properties: - disable: - description: will disable TLS for the feast service. - useful in an openshift cluster, for example, - where TLS is configured by default - type: boolean - secretKeyNames: - description: SecretKeyNames defines the secret - key names for the TLS key and cert. - properties: - tlsCrt: - description: defaults to "tls.crt" - type: string - tlsKey: - description: defaults to "tls.key" - type: string - type: object - secretRef: - description: references the local k8s secret where - the TLS key and cert reside - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: object - x-kubernetes-validations: - - message: '`secretRef` required if `disable` is false.' - rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) - : true' - volumeMounts: - description: VolumeMounts defines the list of volumes - that should be mounted into the feast container. - items: - description: VolumeMount describes a mounting of - a Volume within a container. + secretRef: + description: The Secret to select from properties: - mountPath: - description: |- - Path within the container at which the volume should be mounted. Must - not contain ':'. - type: string - mountPropagation: - description: |- - mountPropagation determines how mounts are propagated from the host - to container and the other way around. - type: string name: - description: This must match the Name of a Volume. - type: string - readOnly: - description: |- - Mounted read-only if true, read-write otherwise (false or unspecified). - Defaults to false. - type: boolean - recursiveReadOnly: + default: "" description: |- - RecursiveReadOnly specifies whether read-only mounts should be handled - recursively. + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. type: string - subPath: + optional: + description: Specify whether the Secret must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + type: object + type: array + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for if/when + to pull a container image + type: string + logLevel: + description: |- + LogLevel sets the logging level for the server + Allowed values: "debug", "info", "warning", "error", "critical". + enum: + - debug + - info + - warning + - error + - critical + type: string + metrics: + description: Metrics exposes Prometheus-compatible metrics + for the Feast server when enabled. + type: boolean + nodeSelector: + additionalProperties: + type: string + type: object + resources: + description: ResourceRequirements describes the compute + resource requirements. + properties: + claims: + description: |- + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one entry + in PodSpec.ResourceClaims. + properties: + name: description: |- - Path within the volume from which the container's volume should be mounted. - Defaults to "" (volume's root). - type: string - subPathExpr: - description: Expanded path within the volume - from which the container's volume should be - mounted. + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. type: string required: - - mountPath - name type: object type: array - workerConfigs: - description: WorkerConfigs defines the worker configuration - for the Feast server. + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum amount + of compute resources required. + type: object + type: object + tls: + description: TlsConfigs configures server TLS for a feast + service. + properties: + disable: + description: will disable TLS for the feast service. + useful in an openshift cluster, for example, where + TLS is configured by default + type: boolean + secretKeyNames: + description: SecretKeyNames defines the secret key + names for the TLS key and cert. properties: - keepAliveTimeout: - description: |- - KeepAliveTimeout is the timeout for keep-alive connections in seconds. - Defaults to 30. - format: int32 - minimum: 1 - type: integer - maxRequests: - description: |- - MaxRequests is the maximum number of requests a worker will process before restarting. - This helps prevent memory leaks. - format: int32 - minimum: 0 - type: integer - maxRequestsJitter: - description: |- - MaxRequestsJitter is the maximum jitter to add to max-requests to prevent - thundering herd effect on worker restart. - format: int32 - minimum: 0 - type: integer - registryTTLSeconds: - description: RegistryTTLSeconds is the number - of seconds after which the registry is refreshed. - format: int32 - minimum: 0 - type: integer - workerConnections: + tlsCrt: + description: defaults to "tls.crt" + type: string + tlsKey: + description: defaults to "tls.key" + type: string + type: object + secretRef: + description: references the local k8s secret where + the TLS key and cert reside + properties: + name: + default: "" description: |- - WorkerConnections is the maximum number of simultaneous clients per worker process. - Defaults to 1000. - format: int32 - minimum: 1 - type: integer - workers: - description: Workers is the number of worker processes. - Use -1 to auto-calculate based on CPU cores - (2 * CPU + 1). - format: int32 - minimum: -1 - type: integer + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string type: object + x-kubernetes-map-type: atomic type: object x-kubernetes-validations: - - message: At least one of restAPI or grpc must be true - rule: self.restAPI == true || self.grpc == true || !has(self.grpc) - type: object - remote: - description: RemoteRegistryConfig points to a remote feast - registry server. - properties: - feastRef: - description: Reference to an existing `FeatureStore` CR - in the same k8s cluster. - properties: - name: - description: Name of the FeatureStore - type: string - namespace: - description: Namespace of the FeatureStore - type: string - required: - - name - type: object - hostname: - description: Host address of the remote registry service - - :, e.g. `registry..svc.cluster.local:80` - type: string - tls: - description: TlsRemoteRegistryConfigs configures client - TLS for a remote feast registry. + - message: '`secretRef` required if `disable` is false.' + rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) + : true' + volumeMounts: + description: VolumeMounts defines the list of volumes + that should be mounted into the feast container. + items: + description: VolumeMount describes a mounting of a Volume + within a container. + properties: + mountPath: + description: |- + Path within the container at which the volume should be mounted. Must + not contain ':'. + type: string + mountPropagation: + description: |- + mountPropagation determines how mounts are propagated from the host + to container and the other way around. + type: string + name: + description: This must match the Name of a Volume. + type: string + readOnly: + description: |- + Mounted read-only if true, read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + recursiveReadOnly: + description: |- + RecursiveReadOnly specifies whether read-only mounts should be handled + recursively. + type: string + subPath: + description: |- + Path within the volume from which the container's volume should be mounted. + Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume from + which the container's volume should be mounted. + type: string + required: + - mountPath + - name + type: object + type: array + workerConfigs: + description: WorkerConfigs defines the worker configuration + for the Feast server. properties: - certName: - description: defines the configmap key name for the - client TLS cert. - type: string - configMapRef: - description: references the local k8s configmap where - the TLS cert resides - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - required: - - certName - - configMapRef + keepAliveTimeout: + description: |- + KeepAliveTimeout is the timeout for keep-alive connections in seconds. + Defaults to 30. + format: int32 + minimum: 1 + type: integer + maxRequests: + description: |- + MaxRequests is the maximum number of requests a worker will process before restarting. + This helps prevent memory leaks. + format: int32 + minimum: 0 + type: integer + maxRequestsJitter: + description: |- + MaxRequestsJitter is the maximum jitter to add to max-requests to prevent + thundering herd effect on worker restart. + format: int32 + minimum: 0 + type: integer + registryTTLSeconds: + description: RegistryTTLSeconds is the number of seconds + after which the registry is refreshed. + format: int32 + minimum: 0 + type: integer + workerConnections: + description: |- + WorkerConnections is the maximum number of simultaneous clients per worker process. + Defaults to 1000. + format: int32 + minimum: 1 + type: integer + workers: + description: Workers is the number of worker processes. + Use -1 to auto-calculate based on CPU cores (2 * + CPU + 1). + format: int32 + minimum: -1 + type: integer type: object type: object - x-kubernetes-validations: - - message: One selection required. - rule: '[has(self.hostname), has(self.feastRef)].exists_one(c, - c)' + type: object + podDisruptionBudgets: + description: PodDisruptionBudgets configures a PodDisruptionBudget + for the FeatureStore deployment. + properties: + maxUnavailable: + anyOf: + - type: integer + - type: string + description: MaxUnavailable specifies the maximum number/percentage + of pods that can be unavailable. + x-kubernetes-int-or-string: true + minAvailable: + anyOf: + - type: integer + - type: string + description: MinAvailable specifies the minimum number/percentage + of pods that must remain available. + x-kubernetes-int-or-string: true type: object x-kubernetes-validations: - - message: One selection required. - rule: '[has(self.local), has(self.remote)].exists_one(c, c)' - securityContext: - description: PodSecurityContext holds pod-level security attributes - and common container settings. + - message: Exactly one of minAvailable or maxUnavailable must + be set. + rule: '[has(self.minAvailable), has(self.maxUnavailable)].exists_one(c, + c)' + registry: + description: Registry configures the registry service. One selection + is required. Local is the default setting. properties: - appArmorProfile: - description: appArmorProfile is the AppArmor options to use - by the containers in this pod. - properties: - localhostProfile: - description: localhostProfile indicates a profile loaded - on the node that should be used. - type: string - type: - description: type indicates which kind of AppArmor profile - will be applied. - type: string - required: - - type - type: object - fsGroup: - description: A special supplemental group that applies to - all containers in a pod. - format: int64 - type: integer - fsGroupChangePolicy: - description: |- - fsGroupChangePolicy defines behavior of changing ownership and permission of the volume - before being exposed inside Pod. - type: string - runAsGroup: - description: |- - The GID to run the entrypoint of the container process. - Uses runtime default if unset. - format: int64 - type: integer - runAsNonRoot: - description: Indicates that the container must run as a non-root - user. - type: boolean - runAsUser: - description: |- - The UID to run the entrypoint of the container process. - Defaults to user specified in image metadata if unspecified. - format: int64 - type: integer - seLinuxOptions: - description: The SELinux context to be applied to all containers. - properties: - level: - description: Level is SELinux level label that applies - to the container. - type: string - role: - description: Role is a SELinux role label that applies - to the container. - type: string - type: - description: Type is a SELinux type label that applies - to the container. - type: string - user: - description: User is a SELinux user label that applies - to the container. - type: string - type: object - seccompProfile: - description: |- - The seccomp options to use by the containers in this pod. - Note that this field cannot be set when spec.os. - properties: - localhostProfile: - description: localhostProfile indicates a profile defined - in a file on the node should be used. - type: string - type: - description: type indicates which kind of seccomp profile - will be applied. - type: string - required: - - type - type: object - supplementalGroups: - description: |- - A list of groups applied to the first process run in each container, in addition - to the container's primary GID, the fsG - items: - format: int64 - type: integer - type: array - x-kubernetes-list-type: atomic - sysctls: - description: Sysctls hold a list of namespaced sysctls used - for the pod. - items: - description: Sysctl defines a kernel parameter to be set - properties: - name: - description: Name of a property to set - type: string - value: - description: Value of a property to set - type: string - required: - - name - - value - type: object - type: array - x-kubernetes-list-type: atomic - windowsOptions: - description: The Windows specific settings applied to all - containers. + local: + description: LocalRegistryConfig configures the registry service properties: - gmsaCredentialSpec: - description: |- - GMSACredentialSpec is where the GMSA admission webhook - (https://github. - type: string - gmsaCredentialSpecName: - description: GMSACredentialSpecName is the name of the - GMSA credential spec to use. - type: string - hostProcess: - description: HostProcess determines if a container should - be run as a 'Host Process' container. - type: boolean - runAsUserName: - description: The UserName in Windows to run the entrypoint - of the container process. - type: string - type: object - type: object - ui: - description: Creates a UI server container - properties: - env: - items: - description: EnvVar represents an environment variable present - in a Container. - properties: - name: - description: Name of the environment variable. Must - be a C_IDENTIFIER. - type: string - value: - description: |- - Variable references $(VAR_NAME) are expanded - using the previously defined environment variables in the container and - any - type: string - valueFrom: - description: Source for the environment variable's value. - Cannot be used if value is not empty. - properties: - configMapKeyRef: - description: Selects a key of a ConfigMap. - properties: - key: - description: The key to select. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + persistence: + description: RegistryPersistence configures the persistence + settings for the registry service + properties: + file: + description: RegistryFilePersistence configures the + file-based persistence for the registry service + properties: + cache_mode: + description: |- + CacheMode defines the registry cache update strategy. + Allowed values are "sync" and "thread". + enum: + - none + - sync + - thread + type: string + cache_ttl_seconds: + description: CacheTTLSeconds defines the TTL (in + seconds) for the registry cache. + format: int32 + minimum: 0 + type: integer + path: + type: string + pvc: + description: PvcConfig defines the settings for + a persistent file store based on PVCs. + properties: + create: + description: Settings for creating a new PVC + properties: + accessModes: + description: AccessModes k8s persistent + volume access modes. Defaults to ["ReadWriteOnce"]. + items: + type: string + type: array + resources: + description: Resources describes the storage + resource requirements for a volume. + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the + minimum amount of compute resources + required. + type: object + type: object + storageClassName: + description: StorageClassName is the name + of an existing StorageClass to which + this persistent volume belongs. + type: string + type: object + x-kubernetes-validations: + - message: PvcCreate is immutable + rule: self == oldSelf + mountPath: + description: |- + MountPath within the container at which the volume should be mounted. + Must start by "/" and cannot contain ':'. + type: string + ref: + description: Reference to an existing field + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + required: + - mountPath + type: object + x-kubernetes-validations: + - message: One selection is required between ref + and create. + rule: '[has(self.ref), has(self.create)].exists_one(c, + c)' + - message: Mount path must start with '/' and + must not contain ':' + rule: self.mountPath.matches('^/[^:]*$') + s3_additional_kwargs: + additionalProperties: type: string - optional: - description: Specify whether the ConfigMap or - its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - fieldRef: - description: 'Selects a field of the pod: supports - metadata.name, metadata.namespace, `metadata.labels['''']`, - `metadata.' + type: object + type: object + x-kubernetes-validations: + - message: Registry files must use absolute paths + or be S3 ('s3://') or GS ('gs://') object store + URIs. + rule: '(!has(self.pvc) && has(self.path)) ? (self.path.startsWith(''/'') + || self.path.startsWith(''s3://'') || self.path.startsWith(''gs://'')) + : true' + - message: PVC path must be a file name only, with + no slashes. + rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') + : true' + - message: PVC persistence does not support S3 or + GS object store URIs. + rule: '(has(self.pvc) && has(self.path)) ? !(self.path.startsWith(''s3://'') + || self.path.startsWith(''gs://'')) : true' + - message: Additional S3 settings are available only + for S3 object store URIs. + rule: '(has(self.s3_additional_kwargs) && has(self.path)) + ? self.path.startsWith(''s3://'') : true' + store: + description: RegistryDBStorePersistence configures + the DB store persistence for the registry service + properties: + secretKeyName: + description: By default, the selected store "type" + is used as the SecretKeyName + type: string + secretRef: + description: Data store parameters should be placed + as-is from the "feature_store.yaml" under the + secret key. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: + description: Type of the persistence type you + want to use. + enum: + - sql + - snowflake.registry + type: string + required: + - secretRef + - type + type: object + type: object + x-kubernetes-validations: + - message: One selection required between file or store. + rule: '[has(self.file), has(self.store)].exists_one(c, + c)' + server: + description: Creates a registry server container + properties: + env: + items: + description: EnvVar represents an environment variable + present in a Container. properties: - apiVersion: - description: Version of the schema the FieldPath - is written in terms of, defaults to "v1". + name: + description: Name of the environment variable. + Must be a C_IDENTIFIER. type: string - fieldPath: - description: Path of the field to select in - the specified API version. + value: + description: |- + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - resourceFieldRef: - description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, limits. + valueFrom: + description: Source for the environment variable's + value. Cannot be used if value is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + fieldRef: + description: 'Selects a field of the pod: + supports metadata.name, metadata.namespace, + `metadata.labels['''']`, `metadata.' + properties: + apiVersion: + description: Version of the schema the + FieldPath is written in terms of, + defaults to "v1". + type: string + fieldPath: + description: Path of the field to select + in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits. + properties: + containerName: + description: 'Container name: required + for volumes, optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format + of the exposed resources, defaults + to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to + select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret in + the pod's namespace + properties: + key: + description: The key of the secret to + select from. Must be a valid secret + key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + type: object + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source + of a set of ConfigMaps properties: - containerName: - description: 'Container name: required for volumes, - optional for env vars' + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap + must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend + to each key in the ConfigMap. Must be a C_IDENTIFIER. type: string - divisor: + secretRef: + description: The Secret to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + type: object + type: array + grpc: + description: Enable gRPC registry server. Defaults + to true if unset. + type: boolean + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for if/when + to pull a container image + type: string + logLevel: + description: |- + LogLevel sets the logging level for the server + Allowed values: "debug", "info", "warning", "error", "critical". + enum: + - debug + - info + - warning + - error + - critical + type: string + metrics: + description: Metrics exposes Prometheus-compatible + metrics for the Feast server when enabled. + type: boolean + nodeSelector: + additionalProperties: + type: string + type: object + resources: + description: ResourceRequirements describes the compute + resource requirements. + properties: + claims: + description: |- + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one entry + in PodSpec.ResourceClaims. + properties: + name: + description: |- + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. + type: string + required: + - name + type: object + type: array + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: anyOf: - type: integer - type: string - description: Specifies the output format of - the exposed resources, defaults to "1" pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ x-kubernetes-int-or-string: true - resource: - description: 'Required: resource to select' - type: string - required: - - resource - type: object - x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret in the pod's - namespace - properties: - key: - description: The key of the secret to select - from. Must be a valid secret key. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret or its - key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - type: object - required: - - name - type: object - type: array - envFrom: - items: - description: EnvFromSource represents the source of a set - of ConfigMaps - properties: - configMapRef: - description: The ConfigMap to select from - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap must - be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend to each - key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret must be - defined - type: boolean - type: object - x-kubernetes-map-type: atomic - type: object - type: array - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for if/when to - pull a container image - type: string - logLevel: - description: |- - LogLevel sets the logging level for the server - Allowed values: "debug", "info", "warning", "error", "critical". - enum: - - debug - - info - - warning - - error - - critical - type: string - metrics: - description: Metrics exposes Prometheus-compatible metrics - for the Feast server when enabled. - type: boolean - nodeSelector: - additionalProperties: - type: string + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum amount + of compute resources required. + type: object + type: object + restAPI: + description: Enable REST API registry server. + type: boolean + tls: + description: TlsConfigs configures server TLS for + a feast service. + properties: + disable: + description: will disable TLS for the feast service. + useful in an openshift cluster, for example, + where TLS is configured by default + type: boolean + secretKeyNames: + description: SecretKeyNames defines the secret + key names for the TLS key and cert. + properties: + tlsCrt: + description: defaults to "tls.crt" + type: string + tlsKey: + description: defaults to "tls.key" + type: string + type: object + secretRef: + description: references the local k8s secret where + the TLS key and cert reside + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: object + x-kubernetes-validations: + - message: '`secretRef` required if `disable` is false.' + rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) + : true' + volumeMounts: + description: VolumeMounts defines the list of volumes + that should be mounted into the feast container. + items: + description: VolumeMount describes a mounting of + a Volume within a container. + properties: + mountPath: + description: |- + Path within the container at which the volume should be mounted. Must + not contain ':'. + type: string + mountPropagation: + description: |- + mountPropagation determines how mounts are propagated from the host + to container and the other way around. + type: string + name: + description: This must match the Name of a Volume. + type: string + readOnly: + description: |- + Mounted read-only if true, read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + recursiveReadOnly: + description: |- + RecursiveReadOnly specifies whether read-only mounts should be handled + recursively. + type: string + subPath: + description: |- + Path within the volume from which the container's volume should be mounted. + Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume + from which the container's volume should be + mounted. + type: string + required: + - mountPath + - name + type: object + type: array + workerConfigs: + description: WorkerConfigs defines the worker configuration + for the Feast server. + properties: + keepAliveTimeout: + description: |- + KeepAliveTimeout is the timeout for keep-alive connections in seconds. + Defaults to 30. + format: int32 + minimum: 1 + type: integer + maxRequests: + description: |- + MaxRequests is the maximum number of requests a worker will process before restarting. + This helps prevent memory leaks. + format: int32 + minimum: 0 + type: integer + maxRequestsJitter: + description: |- + MaxRequestsJitter is the maximum jitter to add to max-requests to prevent + thundering herd effect on worker restart. + format: int32 + minimum: 0 + type: integer + registryTTLSeconds: + description: RegistryTTLSeconds is the number + of seconds after which the registry is refreshed. + format: int32 + minimum: 0 + type: integer + workerConnections: + description: |- + WorkerConnections is the maximum number of simultaneous clients per worker process. + Defaults to 1000. + format: int32 + minimum: 1 + type: integer + workers: + description: Workers is the number of worker processes. + Use -1 to auto-calculate based on CPU cores + (2 * CPU + 1). + format: int32 + minimum: -1 + type: integer + type: object + type: object + x-kubernetes-validations: + - message: At least one of restAPI or grpc must be true + rule: self.restAPI == true || self.grpc == true || !has(self.grpc) type: object - resources: - description: ResourceRequirements describes the compute resource - requirements. + remote: + description: RemoteRegistryConfig points to a remote feast + registry server. properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. - items: - description: ResourceClaim references one entry in PodSpec.ResourceClaims. - properties: - name: - description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. - type: string - required: - - name - type: object - type: array - x-kubernetes-list-map-keys: + feastRef: + description: Reference to an existing `FeatureStore` CR + in the same k8s cluster. + properties: + name: + description: Name of the FeatureStore + type: string + namespace: + description: Namespace of the FeatureStore + type: string + required: - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum amount of - compute resources required. type: object - type: object - tls: - description: TlsConfigs configures server TLS for a feast - service. - properties: - disable: - description: will disable TLS for the feast service. useful - in an openshift cluster, for example, where TLS is configured - by default - type: boolean - secretKeyNames: - description: SecretKeyNames defines the secret key names - for the TLS key and cert. - properties: - tlsCrt: - description: defaults to "tls.crt" - type: string - tlsKey: - description: defaults to "tls.key" - type: string - type: object - secretRef: - description: references the local k8s secret where the - TLS key and cert reside + hostname: + description: Host address of the remote registry service + - :, e.g. `registry..svc.cluster.local:80` + type: string + tls: + description: TlsRemoteRegistryConfigs configures client + TLS for a remote feast registry. properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + certName: + description: defines the configmap key name for the + client TLS cert. type: string + configMapRef: + description: references the local k8s configmap where + the TLS cert resides + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + required: + - certName + - configMapRef type: object - x-kubernetes-map-type: atomic type: object x-kubernetes-validations: - - message: '`secretRef` required if `disable` is false.' - rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) - : true' - volumeMounts: - description: VolumeMounts defines the list of volumes that - should be mounted into the feast container. - items: - description: VolumeMount describes a mounting of a Volume - within a container. - properties: - mountPath: - description: |- - Path within the container at which the volume should be mounted. Must - not contain ':'. - type: string - mountPropagation: - description: |- - mountPropagation determines how mounts are propagated from the host - to container and the other way around. - type: string - name: - description: This must match the Name of a Volume. - type: string - readOnly: - description: |- - Mounted read-only if true, read-write otherwise (false or unspecified). - Defaults to false. - type: boolean - recursiveReadOnly: - description: |- - RecursiveReadOnly specifies whether read-only mounts should be handled - recursively. - type: string - subPath: - description: |- - Path within the volume from which the container's volume should be mounted. - Defaults to "" (volume's root). - type: string - subPathExpr: - description: Expanded path within the volume from which - the container's volume should be mounted. - type: string - required: - - mountPath - - name - type: object - type: array - workerConfigs: - description: WorkerConfigs defines the worker configuration - for the Feast server. + - message: One selection required. + rule: '[has(self.hostname), has(self.feastRef)].exists_one(c, + c)' + type: object + x-kubernetes-validations: + - message: One selection required. + rule: '[has(self.local), has(self.remote)].exists_one(c, c)' + scaling: + description: Scaling configures horizontal scaling for the FeatureStore + deployment (e.g. HPA autoscaling). + properties: + autoscaling: + description: |- + Autoscaling configures a HorizontalPodAutoscaler for the FeatureStore deployment. + Mutually exclusive with spec.replicas. properties: - keepAliveTimeout: - description: |- - KeepAliveTimeout is the timeout for keep-alive connections in seconds. - Defaults to 30. - format: int32 - minimum: 1 - type: integer - maxRequests: - description: |- - MaxRequests is the maximum number of requests a worker will process before restarting. - This helps prevent memory leaks. - format: int32 - minimum: 0 - type: integer - maxRequestsJitter: - description: |- - MaxRequestsJitter is the maximum jitter to add to max-requests to prevent - thundering herd effect on worker restart. - format: int32 - minimum: 0 - type: integer - registryTTLSeconds: - description: RegistryTTLSeconds is the number of seconds - after which the registry is refreshed. - format: int32 - minimum: 0 - type: integer - workerConnections: - description: |- - WorkerConnections is the maximum number of simultaneous clients per worker process. - Defaults to 1000. + behavior: + description: Behavior configures the scaling behavior + of the target. + properties: + scaleDown: + description: scaleDown is scaling policy for scaling + Down. + properties: + policies: + description: policies is a list of potential scaling + polices which can be used during scaling. + items: + description: HPAScalingPolicy is a single policy + which must hold true for a specified past + interval. + properties: + periodSeconds: + description: periodSeconds specifies the + window of time for which the policy should + hold true. + format: int32 + type: integer + type: + description: type is used to specify the + scaling policy. + type: string + value: + description: |- + value contains the amount of change which is permitted by the policy. + It must be greater than zero + format: int32 + type: integer + required: + - periodSeconds + - type + - value + type: object + type: array + x-kubernetes-list-type: atomic + selectPolicy: + description: |- + selectPolicy is used to specify which policy should be used. + If not set, the default value Max is used. + type: string + stabilizationWindowSeconds: + description: |- + stabilizationWindowSeconds is the number of seconds for which past recommendations should be + considered while scaling up + format: int32 + type: integer + type: object + scaleUp: + description: scaleUp is scaling policy for scaling + Up. + properties: + policies: + description: policies is a list of potential scaling + polices which can be used during scaling. + items: + description: HPAScalingPolicy is a single policy + which must hold true for a specified past + interval. + properties: + periodSeconds: + description: periodSeconds specifies the + window of time for which the policy should + hold true. + format: int32 + type: integer + type: + description: type is used to specify the + scaling policy. + type: string + value: + description: |- + value contains the amount of change which is permitted by the policy. + It must be greater than zero + format: int32 + type: integer + required: + - periodSeconds + - type + - value + type: object + type: array + x-kubernetes-list-type: atomic + selectPolicy: + description: |- + selectPolicy is used to specify which policy should be used. + If not set, the default value Max is used. + type: string + stabilizationWindowSeconds: + description: |- + stabilizationWindowSeconds is the number of seconds for which past recommendations should be + considered while scaling up + format: int32 + type: integer + type: object + type: object + maxReplicas: + description: MaxReplicas is the upper limit for the number + of replicas. Required. format: int32 minimum: 1 type: integer - workers: - description: Workers is the number of worker processes. - Use -1 to auto-calculate based on CPU cores (2 * CPU - + 1). - format: int32 - minimum: -1 - type: integer - type: object - type: object - volumes: - description: Volumes specifies the volumes to mount in the FeatureStore - deployment. - items: - description: Volume represents a named volume in a pod that - may be accessed by any container in the pod. - properties: - awsElasticBlockStore: - description: |- - awsElasticBlockStore represents an AWS Disk resource that is attached to a - kubelet's host machine and then exposed to th - properties: - fsType: - description: fsType is the filesystem type of the volume - that you want to mount. - type: string - partition: - description: |- - partition is the partition in the volume that you want to mount. - If omitted, the default is to mount by volume name. - format: int32 - type: integer - readOnly: - description: |- - readOnly value true will force the readOnly setting in VolumeMounts. - More info: https://kubernetes. - type: boolean - volumeID: - description: |- - volumeID is unique ID of the persistent disk resource in AWS (Amazon EBS volume). - More info: https://kubernetes. - type: string - required: - - volumeID - type: object - azureDisk: - description: azureDisk represents an Azure Data Disk mount - on the host and bind mount to the pod. - properties: - cachingMode: - description: 'cachingMode is the Host Caching mode: - None, Read Only, Read Write.' - type: string - diskName: - description: diskName is the Name of the data disk in - the blob storage - type: string - diskURI: - description: diskURI is the URI of data disk in the - blob storage - type: string - fsType: - description: |- - fsType is Filesystem type to mount. - Must be a filesystem type supported by the host operating system. - Ex. - type: string - kind: - description: 'kind expected values are Shared: multiple - blob disks per storage account Dedicated: single - blob disk per storage accoun' - type: string - readOnly: + metrics: + description: Metrics contains the specifications for which + to use to calculate the desired replica count. + items: description: |- - readOnly Defaults to false (read/write). ReadOnly here will force - the ReadOnly setting in VolumeMounts. - type: boolean - required: - - diskName - - diskURI - type: object - azureFile: - description: azureFile represents an Azure File Service - mount on the host and bind mount to the pod. - properties: - readOnly: - description: |- - readOnly defaults to false (read/write). ReadOnly here will force - the ReadOnly setting in VolumeMounts. - type: boolean - secretName: - description: secretName is the name of secret that - contains Azure Storage Account Name and Key - type: string - shareName: - description: shareName is the azure share Name - type: string - required: - - secretName - - shareName - type: object - cephfs: - description: cephFS represents a Ceph FS mount on the host - that shares a pod's lifetime - properties: - monitors: - description: |- - monitors is Required: Monitors is a collection of Ceph monitors - More info: https://examples.k8s. - items: - type: string - type: array - x-kubernetes-list-type: atomic - path: - description: 'path is Optional: Used as the mounted - root, rather than the full Ceph tree, default is /' - type: string - readOnly: - description: |- - readOnly is Optional: Defaults to false (read/write). ReadOnly here will force - the ReadOnly setting in VolumeMounts. - type: boolean - secretFile: - description: 'secretFile is Optional: SecretFile is - the path to key ring for User, default is /etc/ceph/user.' - type: string - secretRef: - description: 'secretRef is Optional: SecretRef is reference - to the authentication secret for User, default is - empty.' - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - user: - description: |- - user is optional: User is the rados user name, default is admin - More info: https://examples.k8s. - type: string - required: - - monitors - type: object - cinder: - description: |- - cinder represents a cinder volume attached and mounted on kubelets host machine. - More info: https://examples.k8s. - properties: - fsType: - description: |- - fsType is the filesystem type to mount. - Must be a filesystem type supported by the host operating system. - type: string - readOnly: - description: |- - readOnly defaults to false (read/write). ReadOnly here will force - the ReadOnly setting in VolumeMounts. - type: boolean - secretRef: - description: |- - secretRef is optional: points to a secret object containing parameters used to connect - to OpenStack. + MetricSpec specifies how to scale based on a single metric + (only `type` and one other matching field should be set at on properties: - name: - default: "" + containerResource: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - volumeID: - description: |- - volumeID used to identify the volume in cinder. - More info: https://examples.k8s.io/mysql-cinder-pd/README.md - type: string - required: - - volumeID - type: object - configMap: - description: configMap represents a configMap that should - populate this volume - properties: - defaultMode: - description: 'defaultMode is optional: mode bits used - to set permissions on created files by default.' - format: int32 - type: integer - items: - description: |- - items if unspecified, each key-value pair in the Data field of the referenced - ConfigMap will be projected into the volum - items: - description: Maps a string key to a path within a - volume. - properties: - key: - description: key is the key to project. - type: string - mode: - description: 'mode is Optional: mode bits used - to set permissions on this file.' - format: int32 - type: integer - path: - description: |- - path is the relative path of the file to map the key to. - May not be an absolute path. - type: string - required: - - key - - path - type: object - type: array - x-kubernetes-list-type: atomic - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: optional specify whether the ConfigMap - or its keys must be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - csi: - description: csi (Container Storage Interface) represents - ephemeral storage that is handled by certain external - CSI drivers (Beta fea - properties: - driver: - description: driver is the name of the CSI driver that - handles this volume. - type: string - fsType: - description: fsType to mount. Ex. "ext4", "xfs", "ntfs". - type: string - nodePublishSecretRef: - description: |- - nodePublishSecretRef is a reference to the secret object containing - sensitive information to pass to the CSI driver to c - properties: - name: - default: "" + containerResource refers to a resource metric (such as those specified in + requests and limits) known to Kubernetes descr + properties: + container: + description: container is the name of the container + in the pods of the scaling target + type: string + name: + description: name is the name of the resource + in question. + type: string + target: + description: target specifies the target value + for the given metric + properties: + averageUtilization: + description: "averageUtilization is the + target value of the average of the\nresource + metric across all relevant pods, represented + as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether the + metric type is Utilization, Value, or + AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value of + the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - container + - name + - target + type: object + external: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - readOnly: - description: |- - readOnly specifies a read-only configuration for the volume. - Defaults to false (read/write). - type: boolean - volumeAttributes: - additionalProperties: - type: string - description: |- - volumeAttributes stores driver-specific properties that are passed to the CSI - driver. - type: object - required: - - driver - type: object - downwardAPI: - description: downwardAPI represents downward API about the - pod that should populate this volume - properties: - defaultMode: - description: 'Optional: mode bits to use on created - files by default.' - format: int32 - type: integer - items: - description: Items is a list of downward API volume - file - items: - description: DownwardAPIVolumeFile represents information - to create the file containing the pod field - properties: - fieldRef: - description: 'Required: Selects a field of the - pod: only annotations, labels, name, namespace - and uid are supported.' - properties: - apiVersion: - description: Version of the schema the FieldPath - is written in terms of, defaults to "v1". - type: string - fieldPath: - description: Path of the field to select in - the specified API version. - type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - mode: - description: |- - Optional: mode bits used to set permissions on this file, must be an octal value - between 0000 and 0777 or a decimal valu - format: int32 - type: integer - path: - description: 'Required: Path is the relative - path name of the file to be created. Must not - be absolute or contain the ''..'' path.' - type: string - resourceFieldRef: - description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, requests. - properties: - containerName: - description: 'Container name: required for - volumes, optional for env vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output format of - the exposed resources, defaults to "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource to select' - type: string - required: - - resource - type: object - x-kubernetes-map-type: atomic - required: - - path - type: object - type: array - x-kubernetes-list-type: atomic - type: object - emptyDir: - description: |- - emptyDir represents a temporary directory that shares a pod's lifetime. - More info: https://kubernetes. - properties: - medium: - description: medium represents what type of storage - medium should back this directory. - type: string - sizeLimit: - anyOf: - - type: integer - - type: string - description: sizeLimit is the total amount of local - storage required for this EmptyDir volume. - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - ephemeral: - description: ephemeral represents a volume that is handled - by a cluster storage driver. - properties: - volumeClaimTemplate: - description: Will be used to create a stand-alone PVC - to provision the volume. - properties: - metadata: - description: |- - May contain labels and annotations that will be copied into the PVC - when creating it. - type: object - spec: - description: The specification for the PersistentVolumeClaim. + external refers to a global metric that is not associated + with any Kubernetes object. properties: - accessModes: - description: |- - accessModes contains the desired access modes the volume should have. - More info: https://kubernetes. - items: - type: string - type: array - x-kubernetes-list-type: atomic - dataSource: - description: |- - dataSource field can be used to specify either: - * An existing VolumeSnapshot object (snapshot.storage.k8s. + metric: + description: metric identifies the target metric + by name and selector properties: - apiGroup: - description: APIGroup is the group for the - resource being referenced. - type: string - kind: - description: Kind is the type of resource - being referenced - type: string name: - description: Name is the name of resource - being referenced + description: name is the name of the given + metric type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label selector + for the given metric\nWhen set, it is + passed " + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic required: - - kind - name type: object - x-kubernetes-map-type: atomic - dataSourceRef: - description: |- - dataSourceRef specifies the object from which to populate the volume with data, if a non-empty - volume is desired. + target: + description: target specifies the target value + for the given metric properties: - apiGroup: - description: APIGroup is the group for the - resource being referenced. + averageUtilization: + description: "averageUtilization is the + target value of the average of the\nresource + metric across all relevant pods, represented + as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether the + metric type is Utilization, Value, or + AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value of + the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - metric + - target + type: object + object: + description: |- + object refers to a metric describing a single kubernetes object + (for example, hits-per-second on an Ingress object). + properties: + describedObject: + description: describedObject specifies the descriptions + of a object,such as kind,name apiVersion + properties: + apiVersion: + description: apiVersion is the API version + of the referent type: string kind: - description: Kind is the type of resource - being referenced + description: 'kind is the kind of the referent; + More info: https://git.k8s.' type: string name: - description: Name is the name of resource - being referenced - type: string - namespace: - description: |- - Namespace is the namespace of resource being referenced - Note that when a namespace is specified, a gateway.networking. + description: 'name is the name of the referent; + More info: https://kubernetes.' type: string required: - kind - name type: object - resources: - description: resources represents the minimum - resources the volume should have. + metric: + description: metric identifies the target metric + by name and selector properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum - amount of compute resources required. + name: + description: name is the name of the given + metric + type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label selector + for the given metric\nWhen set, it is + passed " + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object type: object + x-kubernetes-map-type: atomic + required: + - name type: object - selector: - description: selector is a label query over - volumes to consider for binding. + target: + description: target specifies the target value + for the given metric properties: - matchExpressions: - description: matchExpressions is a list - of label selector requirements. The requirements - are ANDed. - items: - description: |- - A label selector requirement is a selector that contains values, a key, and an operator that - relates the key and values. - properties: - key: - description: key is the label key - that the selector applies to. - type: string - operator: - description: |- - operator represents a key's relationship to a set of values. - Valid operators are In, NotIn, Exists and DoesNotExist. - type: string - values: + averageUtilization: + description: "averageUtilization is the + target value of the average of the\nresource + metric across all relevant pods, represented + as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether the + metric type is Utilization, Value, or + AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value of + the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - describedObject + - metric + - target + type: object + pods: + description: |- + pods refers to a metric describing each pod in the current scale target + (for example, transactions-processed-per-second) + properties: + metric: + description: metric identifies the target metric + by name and selector + properties: + name: + description: name is the name of the given + metric + type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label selector + for the given metric\nWhen set, it is + passed " + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: description: |- - values is an array of string values. If the operator is In or NotIn, - the values array must be non-empty. - items: - type: string - type: array - x-kubernetes-list-type: atomic - required: - - key - - operator - type: object - type: array - x-kubernetes-list-type: atomic - matchLabels: - additionalProperties: - type: string - description: matchLabels is a map of {key,value} - pairs. + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object type: object + x-kubernetes-map-type: atomic + required: + - name type: object - x-kubernetes-map-type: atomic - storageClassName: - description: |- - storageClassName is the name of the StorageClass required by the claim. - More info: https://kubernetes. - type: string - volumeAttributesClassName: - description: volumeAttributesClassName may be - used to set the VolumeAttributesClass used - by this claim. - type: string - volumeMode: - description: volumeMode defines what type of - volume is required by the claim. - type: string - volumeName: - description: volumeName is the binding reference - to the PersistentVolume backing this claim. + target: + description: target specifies the target value + for the given metric + properties: + averageUtilization: + description: "averageUtilization is the + target value of the average of the\nresource + metric across all relevant pods, represented + as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether the + metric type is Utilization, Value, or + AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value of + the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - metric + - target + type: object + resource: + description: |- + resource refers to a resource metric (such as those specified in + requests and limits) known to Kubernetes describing eac + properties: + name: + description: name is the name of the resource + in question. type: string + target: + description: target specifies the target value + for the given metric + properties: + averageUtilization: + description: "averageUtilization is the + target value of the average of the\nresource + metric across all relevant pods, represented + as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether the + metric type is Utilization, Value, or + AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value of + the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - name + - target type: object + type: + description: type is the type of metric source. + type: string required: - - spec + - type type: object - type: object - fc: - description: fc represents a Fibre Channel resource that - is attached to a kubelet's host machine and then exposed - to the pod. + type: array + minReplicas: + description: MinReplicas is the lower limit for the number + of replicas. Defaults to 1. + format: int32 + minimum: 1 + type: integer + required: + - maxReplicas + type: object + type: object + securityContext: + description: PodSecurityContext holds pod-level security attributes + and common container settings. + properties: + appArmorProfile: + description: appArmorProfile is the AppArmor options to use + by the containers in this pod. + properties: + localhostProfile: + description: localhostProfile indicates a profile loaded + on the node that should be used. + type: string + type: + description: type indicates which kind of AppArmor profile + will be applied. + type: string + required: + - type + type: object + fsGroup: + description: A special supplemental group that applies to + all containers in a pod. + format: int64 + type: integer + fsGroupChangePolicy: + description: |- + fsGroupChangePolicy defines behavior of changing ownership and permission of the volume + before being exposed inside Pod. + type: string + runAsGroup: + description: |- + The GID to run the entrypoint of the container process. + Uses runtime default if unset. + format: int64 + type: integer + runAsNonRoot: + description: Indicates that the container must run as a non-root + user. + type: boolean + runAsUser: + description: |- + The UID to run the entrypoint of the container process. + Defaults to user specified in image metadata if unspecified. + format: int64 + type: integer + seLinuxOptions: + description: The SELinux context to be applied to all containers. + properties: + level: + description: Level is SELinux level label that applies + to the container. + type: string + role: + description: Role is a SELinux role label that applies + to the container. + type: string + type: + description: Type is a SELinux type label that applies + to the container. + type: string + user: + description: User is a SELinux user label that applies + to the container. + type: string + type: object + seccompProfile: + description: |- + The seccomp options to use by the containers in this pod. + Note that this field cannot be set when spec.os. + properties: + localhostProfile: + description: localhostProfile indicates a profile defined + in a file on the node should be used. + type: string + type: + description: type indicates which kind of seccomp profile + will be applied. + type: string + required: + - type + type: object + supplementalGroups: + description: |- + A list of groups applied to the first process run in each container, in addition + to the container's primary GID, the fsG + items: + format: int64 + type: integer + type: array + x-kubernetes-list-type: atomic + sysctls: + description: Sysctls hold a list of namespaced sysctls used + for the pod. + items: + description: Sysctl defines a kernel parameter to be set properties: - fsType: - description: |- - fsType is the filesystem type to mount. - Must be a filesystem type supported by the host operating system. - Ex. + name: + description: Name of a property to set type: string - lun: - description: 'lun is Optional: FC target lun number' - format: int32 - type: integer - readOnly: - description: |- - readOnly is Optional: Defaults to false (read/write). ReadOnly here will force - the ReadOnly setting in VolumeMounts. - type: boolean - targetWWNs: - description: 'targetWWNs is Optional: FC target worldwide - names (WWNs)' + value: + description: Value of a property to set + type: string + required: + - name + - value + type: object + type: array + x-kubernetes-list-type: atomic + windowsOptions: + description: The Windows specific settings applied to all + containers. + properties: + gmsaCredentialSpec: + description: |- + GMSACredentialSpec is where the GMSA admission webhook + (https://github. + type: string + gmsaCredentialSpecName: + description: GMSACredentialSpecName is the name of the + GMSA credential spec to use. + type: string + hostProcess: + description: HostProcess determines if a container should + be run as a 'Host Process' container. + type: boolean + runAsUserName: + description: The UserName in Windows to run the entrypoint + of the container process. + type: string + type: object + type: object + topologySpreadConstraints: + description: TopologySpreadConstraints defines how pods are spread + across topology domains. + items: + description: TopologySpreadConstraint specifies how to spread + matching pods among the given topology. + properties: + labelSelector: + description: LabelSelector is used to find matching pods. + properties: + matchExpressions: + description: matchExpressions is a list of label selector + requirements. The requirements are ANDed. items: - type: string + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that the selector + applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object type: array x-kubernetes-list-type: atomic - wwids: - description: "wwids Optional: FC volume world wide identifiers - (wwids)\nEither wwids or combination of targetWWNs - and lun must be set, " - items: + matchLabels: + additionalProperties: type: string - type: array - x-kubernetes-list-type: atomic + description: matchLabels is a map of {key,value} pairs. + type: object type: object - flexVolume: + x-kubernetes-map-type: atomic + matchLabelKeys: description: |- - flexVolume represents a generic volume resource that is - provisioned/attached using an exec based plugin. + MatchLabelKeys is a set of pod label keys to select the pods over which + spreading will be calculated. + items: + type: string + type: array + x-kubernetes-list-type: atomic + maxSkew: + description: MaxSkew describes the degree to which pods + may be unevenly distributed. + format: int32 + type: integer + minDomains: + description: MinDomains indicates a minimum number of eligible + domains. + format: int32 + type: integer + nodeAffinityPolicy: + description: |- + NodeAffinityPolicy indicates how we will treat Pod's nodeAffinity/nodeSelector + when calculating pod topology spread skew + type: string + nodeTaintsPolicy: + description: |- + NodeTaintsPolicy indicates how we will treat node taints when calculating + pod topology spread skew. + type: string + topologyKey: + description: TopologyKey is the key of node labels. + type: string + whenUnsatisfiable: + description: |- + WhenUnsatisfiable indicates how to deal with a pod if it doesn't satisfy + the spread constraint. + type: string + required: + - maxSkew + - topologyKey + - whenUnsatisfiable + type: object + type: array + ui: + description: Creates a UI server container + properties: + env: + items: + description: EnvVar represents an environment variable present + in a Container. properties: - driver: - description: driver is the name of the driver to use - for this volume. + name: + description: Name of the environment variable. Must + be a C_IDENTIFIER. type: string - fsType: + value: description: |- - fsType is the filesystem type to mount. - Must be a filesystem type supported by the host operating system. - Ex. + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any type: string - options: - additionalProperties: - type: string - description: 'options is Optional: this field holds - extra command options if any.' + valueFrom: + description: Source for the environment variable's value. + Cannot be used if value is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap or + its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + fieldRef: + description: 'Selects a field of the pod: supports + metadata.name, metadata.namespace, `metadata.labels['''']`, + `metadata.' + properties: + apiVersion: + description: Version of the schema the FieldPath + is written in terms of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to select in + the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits. + properties: + containerName: + description: 'Container name: required for volumes, + optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format of + the exposed resources, defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret in the pod's + namespace + properties: + key: + description: The key of the secret to select + from. Must be a valid secret key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret or its + key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic type: object - readOnly: - description: |- - readOnly is Optional: defaults to false (read/write). ReadOnly here will force - the ReadOnly setting in VolumeMounts. - type: boolean + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source of a set + of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend to each + key in the ConfigMap. Must be a C_IDENTIFIER. + type: string secretRef: - description: |- - secretRef is Optional: secretRef is reference to the secret object containing - sensitive information to pass to the plugi + description: The Secret to select from properties: name: default: "" @@ -3442,32 +4076,227 @@ spec: This field is effectively required, but due to backwards compatibility is allowed to be empty. type: string + optional: + description: Specify whether the Secret must be + defined + type: boolean type: object x-kubernetes-map-type: atomic - required: - - driver type: object - flocker: - description: flocker represents a Flocker volume attached - to a kubelet's host machine. - properties: - datasetName: - description: |- - datasetName is Name of the dataset stored as metadata -> name on the dataset for Flocker - should be considered as depreca + type: array + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for if/when to + pull a container image + type: string + logLevel: + description: |- + LogLevel sets the logging level for the server + Allowed values: "debug", "info", "warning", "error", "critical". + enum: + - debug + - info + - warning + - error + - critical + type: string + metrics: + description: Metrics exposes Prometheus-compatible metrics + for the Feast server when enabled. + type: boolean + nodeSelector: + additionalProperties: + type: string + type: object + resources: + description: ResourceRequirements describes the compute resource + requirements. + properties: + claims: + description: |- + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one entry in PodSpec.ResourceClaims. + properties: + name: + description: |- + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. + type: string + required: + - name + type: object + type: array + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum amount of + compute resources required. + type: object + type: object + tls: + description: TlsConfigs configures server TLS for a feast + service. + properties: + disable: + description: will disable TLS for the feast service. useful + in an openshift cluster, for example, where TLS is configured + by default + type: boolean + secretKeyNames: + description: SecretKeyNames defines the secret key names + for the TLS key and cert. + properties: + tlsCrt: + description: defaults to "tls.crt" + type: string + tlsKey: + description: defaults to "tls.key" + type: string + type: object + secretRef: + description: references the local k8s secret where the + TLS key and cert reside + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: object + x-kubernetes-validations: + - message: '`secretRef` required if `disable` is false.' + rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) + : true' + volumeMounts: + description: VolumeMounts defines the list of volumes that + should be mounted into the feast container. + items: + description: VolumeMount describes a mounting of a Volume + within a container. + properties: + mountPath: + description: |- + Path within the container at which the volume should be mounted. Must + not contain ':'. type: string - datasetUUID: - description: datasetUUID is the UUID of the dataset. - This is unique identifier of a Flocker dataset + mountPropagation: + description: |- + mountPropagation determines how mounts are propagated from the host + to container and the other way around. + type: string + name: + description: This must match the Name of a Volume. + type: string + readOnly: + description: |- + Mounted read-only if true, read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + recursiveReadOnly: + description: |- + RecursiveReadOnly specifies whether read-only mounts should be handled + recursively. + type: string + subPath: + description: |- + Path within the volume from which the container's volume should be mounted. + Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume from which + the container's volume should be mounted. type: string + required: + - mountPath + - name type: object - gcePersistentDisk: + type: array + workerConfigs: + description: WorkerConfigs defines the worker configuration + for the Feast server. + properties: + keepAliveTimeout: + description: |- + KeepAliveTimeout is the timeout for keep-alive connections in seconds. + Defaults to 30. + format: int32 + minimum: 1 + type: integer + maxRequests: + description: |- + MaxRequests is the maximum number of requests a worker will process before restarting. + This helps prevent memory leaks. + format: int32 + minimum: 0 + type: integer + maxRequestsJitter: + description: |- + MaxRequestsJitter is the maximum jitter to add to max-requests to prevent + thundering herd effect on worker restart. + format: int32 + minimum: 0 + type: integer + registryTTLSeconds: + description: RegistryTTLSeconds is the number of seconds + after which the registry is refreshed. + format: int32 + minimum: 0 + type: integer + workerConnections: + description: |- + WorkerConnections is the maximum number of simultaneous clients per worker process. + Defaults to 1000. + format: int32 + minimum: 1 + type: integer + workers: + description: Workers is the number of worker processes. + Use -1 to auto-calculate based on CPU cores (2 * CPU + + 1). + format: int32 + minimum: -1 + type: integer + type: object + type: object + volumes: + description: Volumes specifies the volumes to mount in the FeatureStore + deployment. + items: + description: Volume represents a named volume in a pod that + may be accessed by any container in the pod. + properties: + awsElasticBlockStore: description: |- - gcePersistentDisk represents a GCE Disk resource that is attached to a - kubelet's host machine and then exposed to the po + awsElasticBlockStore represents an AWS Disk resource that is attached to a + kubelet's host machine and then exposed to th properties: fsType: - description: fsType is filesystem type of the volume + description: fsType is the filesystem type of the volume that you want to mount. type: string partition: @@ -3476,130 +4305,104 @@ spec: If omitted, the default is to mount by volume name. format: int32 type: integer - pdName: - description: |- - pdName is unique name of the PD resource in GCE. Used to identify the disk in GCE. - More info: https://kubernetes. - type: string readOnly: description: |- - readOnly here will force the ReadOnly setting in VolumeMounts. - Defaults to false. + readOnly value true will force the readOnly setting in VolumeMounts. More info: https://kubernetes. type: boolean - required: - - pdName - type: object - gitRepo: - description: |- - gitRepo represents a git repository at a particular revision. - DEPRECATED: GitRepo is deprecated. - properties: - directory: + volumeID: description: |- - directory is the target directory name. - Must not contain or start with '..'. If '. - type: string - repository: - description: repository is the URL - type: string - revision: - description: revision is the commit hash for the specified - revision. + volumeID is unique ID of the persistent disk resource in AWS (Amazon EBS volume). + More info: https://kubernetes. type: string required: - - repository + - volumeID type: object - glusterfs: - description: |- - glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime. - More info: https://examples.k8s. + azureDisk: + description: azureDisk represents an Azure Data Disk mount + on the host and bind mount to the pod. properties: - endpoints: - description: |- - endpoints is the endpoint name that details Glusterfs topology. - More info: https://examples.k8s. + cachingMode: + description: 'cachingMode is the Host Caching mode: + None, Read Only, Read Write.' type: string - path: + diskName: + description: diskName is the Name of the data disk in + the blob storage + type: string + diskURI: + description: diskURI is the URI of data disk in the + blob storage + type: string + fsType: description: |- - path is the Glusterfs volume path. - More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod + fsType is Filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. + type: string + kind: + description: 'kind expected values are Shared: multiple + blob disks per storage account Dedicated: single + blob disk per storage accoun' type: string readOnly: description: |- - readOnly here will force the Glusterfs volume to be mounted with read-only permissions. - Defaults to false. + readOnly Defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. type: boolean required: - - endpoints - - path + - diskName + - diskURI type: object - hostPath: - description: |- - hostPath represents a pre-existing file or directory on the host - machine that is directly exposed to the container. + azureFile: + description: azureFile represents an Azure File Service + mount on the host and bind mount to the pod. properties: - path: + readOnly: description: |- - path of the directory on the host. - If the path is a symlink, it will follow the link to the real path. + readOnly defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + secretName: + description: secretName is the name of secret that + contains Azure Storage Account Name and Key type: string - type: - description: |- - type for HostPath Volume - Defaults to "" - More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath + shareName: + description: shareName is the azure share Name type: string required: - - path + - secretName + - shareName type: object - iscsi: - description: |- - iscsi represents an ISCSI Disk resource that is attached to a - kubelet's host machine and then exposed to the pod. + cephfs: + description: cephFS represents a Ceph FS mount on the host + that shares a pod's lifetime properties: - chapAuthDiscovery: - description: chapAuthDiscovery defines whether support - iSCSI Discovery CHAP authentication - type: boolean - chapAuthSession: - description: chapAuthSession defines whether support - iSCSI Session CHAP authentication - type: boolean - fsType: - description: fsType is the filesystem type of the volume - that you want to mount. - type: string - initiatorName: - description: initiatorName is the custom iSCSI Initiator - Name. - type: string - iqn: - description: iqn is the target iSCSI Qualified Name. - type: string - iscsiInterface: + monitors: description: |- - iscsiInterface is the interface Name that uses an iSCSI transport. - Defaults to 'default' (tcp). - type: string - lun: - description: lun represents iSCSI Target Lun number. - format: int32 - type: integer - portals: - description: portals is the iSCSI Target Portal List. + monitors is Required: Monitors is a collection of Ceph monitors + More info: https://examples.k8s. items: type: string type: array x-kubernetes-list-type: atomic + path: + description: 'path is Optional: Used as the mounted + root, rather than the full Ceph tree, default is /' + type: string readOnly: description: |- - readOnly here will force the ReadOnly setting in VolumeMounts. - Defaults to false. + readOnly is Optional: Defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. type: boolean + secretFile: + description: 'secretFile is Optional: SecretFile is + the path to key ring for User, default is /etc/ceph/user.' + type: string secretRef: - description: secretRef is the CHAP Secret for iSCSI - target and initiator authentication + description: 'secretRef is Optional: SecretRef is reference + to the authentication secret for User, default is + empty.' properties: name: default: "" @@ -3610,1177 +4413,3076 @@ spec: type: string type: object x-kubernetes-map-type: atomic - targetPortal: - description: targetPortal is iSCSI Target Portal. - type: string - required: - - iqn - - lun - - targetPortal - type: object - name: - description: |- - name of the volume. - Must be a DNS_LABEL and unique within the pod. - More info: https://kubernetes. - type: string - nfs: - description: |- - nfs represents an NFS mount on the host that shares a pod's lifetime - More info: https://kubernetes. - properties: - path: - description: |- - path that is exported by the NFS server. - More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs - type: string - readOnly: - description: |- - readOnly here will force the NFS export to be mounted with read-only permissions. - Defaults to false. - type: boolean - server: + user: description: |- - server is the hostname or IP address of the NFS server. - More info: https://kubernetes. + user is optional: User is the rados user name, default is admin + More info: https://examples.k8s. type: string required: - - path - - server + - monitors type: object - persistentVolumeClaim: + cinder: description: |- - persistentVolumeClaimVolumeSource represents a reference to a - PersistentVolumeClaim in the same namespace. - properties: - claimName: - description: claimName is the name of a PersistentVolumeClaim - in the same namespace as the pod using this volume. - type: string - readOnly: - description: |- - readOnly Will force the ReadOnly setting in VolumeMounts. - Default false. - type: boolean - required: - - claimName - type: object - photonPersistentDisk: - description: photonPersistentDisk represents a PhotonController - persistent disk attached and mounted on kubelets host - machine + cinder represents a cinder volume attached and mounted on kubelets host machine. + More info: https://examples.k8s. properties: fsType: description: |- fsType is the filesystem type to mount. Must be a filesystem type supported by the host operating system. - Ex. - type: string - pdID: - description: pdID is the ID that identifies Photon Controller - persistent disk - type: string - required: - - pdID - type: object - portworxVolume: - description: portworxVolume represents a portworx volume - attached and mounted on kubelets host machine - properties: - fsType: - description: |- - fSType represents the filesystem type to mount - Must be a filesystem type supported by the host operating system. - Ex. type: string readOnly: description: |- readOnly defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. type: boolean + secretRef: + description: |- + secretRef is optional: points to a secret object containing parameters used to connect + to OpenStack. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic volumeID: - description: volumeID uniquely identifies a Portworx - volume + description: |- + volumeID used to identify the volume in cinder. + More info: https://examples.k8s.io/mysql-cinder-pd/README.md type: string required: - volumeID type: object - projected: - description: projected items for all in one resources secrets, - configmaps, and downward API + configMap: + description: configMap represents a configMap that should + populate this volume properties: defaultMode: - description: defaultMode are the mode bits used to set - permissions on created files by default. + description: 'defaultMode is optional: mode bits used + to set permissions on created files by default.' format: int32 type: integer - sources: - description: sources is the list of volume projections + items: + description: |- + items if unspecified, each key-value pair in the Data field of the referenced + ConfigMap will be projected into the volum items: - description: Projection that may be projected along - with other supported volume types + description: Maps a string key to a path within a + volume. properties: - clusterTrustBundle: - description: ClusterTrustBundle allows a pod to - access the `.spec. - properties: - labelSelector: - description: |- - Select all ClusterTrustBundles that match this label selector. Only has - effect if signerName is set. - properties: - matchExpressions: - description: matchExpressions is a list - of label selector requirements. The - requirements are ANDed. - items: - description: |- - A label selector requirement is a selector that contains values, a key, and an operator that - relates the key and values. - properties: - key: - description: key is the label key - that the selector applies to. - type: string - operator: - description: |- - operator represents a key's relationship to a set of values. - Valid operators are In, NotIn, Exists and DoesNotExist. - type: string - values: - description: |- - values is an array of string values. If the operator is In or NotIn, - the values array must be non-empty. - items: - type: string - type: array - x-kubernetes-list-type: atomic - required: - - key - - operator - type: object - type: array - x-kubernetes-list-type: atomic - matchLabels: - additionalProperties: - type: string - description: matchLabels is a map of {key,value} - pairs. - type: object - type: object - x-kubernetes-map-type: atomic - name: - description: |- - Select a single ClusterTrustBundle by object name. Mutually-exclusive - with signerName and labelSelector. - type: string - optional: - description: |- - If true, don't block pod startup if the referenced ClusterTrustBundle(s) - aren't available. - type: boolean - path: - description: Relative path from the volume - root to write the bundle. + key: + description: key is the key to project. + type: string + mode: + description: 'mode is Optional: mode bits used + to set permissions on this file.' + format: int32 + type: integer + path: + description: |- + path is the relative path of the file to map the key to. + May not be an absolute path. + type: string + required: + - key + - path + type: object + type: array + x-kubernetes-list-type: atomic + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: optional specify whether the ConfigMap + or its keys must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + csi: + description: csi (Container Storage Interface) represents + ephemeral storage that is handled by certain external + CSI drivers (Beta fea + properties: + driver: + description: driver is the name of the CSI driver that + handles this volume. + type: string + fsType: + description: fsType to mount. Ex. "ext4", "xfs", "ntfs". + type: string + nodePublishSecretRef: + description: |- + nodePublishSecretRef is a reference to the secret object containing + sensitive information to pass to the CSI driver to c + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + readOnly: + description: |- + readOnly specifies a read-only configuration for the volume. + Defaults to false (read/write). + type: boolean + volumeAttributes: + additionalProperties: + type: string + description: |- + volumeAttributes stores driver-specific properties that are passed to the CSI + driver. + type: object + required: + - driver + type: object + downwardAPI: + description: downwardAPI represents downward API about the + pod that should populate this volume + properties: + defaultMode: + description: 'Optional: mode bits to use on created + files by default.' + format: int32 + type: integer + items: + description: Items is a list of downward API volume + file + items: + description: DownwardAPIVolumeFile represents information + to create the file containing the pod field + properties: + fieldRef: + description: 'Required: Selects a field of the + pod: only annotations, labels, name, namespace + and uid are supported.' + properties: + apiVersion: + description: Version of the schema the FieldPath + is written in terms of, defaults to "v1". type: string - signerName: - description: |- - Select all ClusterTrustBundles that match this signer name. - Mutually-exclusive with name. + fieldPath: + description: Path of the field to select in + the specified API version. type: string required: - - path + - fieldPath type: object - configMap: - description: configMap information about the configMap - data to project + x-kubernetes-map-type: atomic + mode: + description: |- + Optional: mode bits used to set permissions on this file, must be an octal value + between 0000 and 0777 or a decimal valu + format: int32 + type: integer + path: + description: 'Required: Path is the relative + path name of the file to be created. Must not + be absolute or contain the ''..'' path.' + type: string + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, requests. properties: - items: - description: |- - items if unspecified, each key-value pair in the Data field of the referenced - ConfigMap will be projected into the volum - items: - description: Maps a string key to a path - within a volume. - properties: - key: - description: key is the key to project. - type: string - mode: - description: 'mode is Optional: mode - bits used to set permissions on this - file.' - format: int32 - type: integer - path: - description: |- - path is the relative path of the file to map the key to. - May not be an absolute path. - type: string - required: - - key - - path - type: object - type: array - x-kubernetes-list-type: atomic - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + containerName: + description: 'Container name: required for + volumes, optional for env vars' type: string - optional: - description: optional specify whether the - ConfigMap or its keys must be defined - type: boolean + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format of + the exposed resources, defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to select' + type: string + required: + - resource type: object x-kubernetes-map-type: atomic - downwardAPI: - description: downwardAPI information about the - downwardAPI data to project - properties: - items: - description: Items is a list of DownwardAPIVolume - file - items: - description: DownwardAPIVolumeFile represents - information to create the file containing - the pod field - properties: - fieldRef: - description: 'Required: Selects a field - of the pod: only annotations, labels, - name, namespace and uid are supported.' - properties: - apiVersion: - description: Version of the schema - the FieldPath is written in terms - of, defaults to "v1". - type: string - fieldPath: - description: Path of the field to - select in the specified API version. - type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - mode: - description: |- - Optional: mode bits used to set permissions on this file, must be an octal value - between 0000 and 0777 or a decimal valu - format: int32 - type: integer - path: - description: 'Required: Path is the - relative path name of the file to - be created. Must not be absolute or - contain the ''..'' path.' - type: string - resourceFieldRef: - description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, requests. - properties: - containerName: - description: 'Container name: required - for volumes, optional for env - vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output - format of the exposed resources, - defaults to "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource - to select' - type: string - required: - - resource - type: object - x-kubernetes-map-type: atomic - required: - - path - type: object - type: array - x-kubernetes-list-type: atomic - type: object - secret: - description: secret information about the secret - data to project - properties: - items: - description: |- - items if unspecified, each key-value pair in the Data field of the referenced - Secret will be projected into the volume a - items: - description: Maps a string key to a path - within a volume. - properties: - key: - description: key is the key to project. - type: string - mode: - description: 'mode is Optional: mode - bits used to set permissions on this - file.' - format: int32 - type: integer - path: - description: |- - path is the relative path of the file to map the key to. - May not be an absolute path. - type: string - required: - - key - - path - type: object - type: array - x-kubernetes-list-type: atomic - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: optional field specify whether - the Secret or its key must be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - serviceAccountToken: - description: serviceAccountToken is information - about the serviceAccountToken data to project - properties: - audience: - description: audience is the intended audience - of the token. - type: string - expirationSeconds: - description: |- - expirationSeconds is the requested duration of validity of the service - account token. - format: int64 - type: integer - path: - description: |- - path is the path relative to the mount point of the file to project the - token into. - type: string - required: - - path - type: object + required: + - path type: object type: array x-kubernetes-list-type: atomic type: object - quobyte: - description: quobyte represents a Quobyte mount on the host - that shares a pod's lifetime - properties: - group: - description: |- - group to map volume access to - Default is no group - type: string - readOnly: - description: |- - readOnly here will force the Quobyte volume to be mounted with read-only permissions. - Defaults to false. - type: boolean - registry: - description: |- - registry represents a single or multiple Quobyte Registry services - specified as a string as host:port pair (multiple ent - type: string - tenant: - description: |- - tenant owning the given Quobyte volume in the Backend - Used with dynamically provisioned Quobyte volumes, value is set by - type: string - user: - description: |- - user to map volume access to - Defaults to serivceaccount user - type: string - volume: - description: volume is a string that references an already - created Quobyte volume by name. - type: string - required: - - registry - - volume - type: object - rbd: + emptyDir: description: |- - rbd represents a Rados Block Device mount on the host that shares a pod's lifetime. - More info: https://examples.k8s. + emptyDir represents a temporary directory that shares a pod's lifetime. + More info: https://kubernetes. properties: - fsType: - description: fsType is the filesystem type of the volume - that you want to mount. - type: string - image: - description: |- - image is the rados image name. - More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it - type: string - keyring: - description: |- - keyring is the path to key ring for RBDUser. - Default is /etc/ceph/keyring. - More info: https://examples.k8s. - type: string - monitors: - description: |- - monitors is a collection of Ceph monitors. - More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it - items: - type: string - type: array - x-kubernetes-list-type: atomic - pool: - description: |- - pool is the rados pool name. - Default is rbd. - More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it - type: string - readOnly: - description: |- - readOnly here will force the ReadOnly setting in VolumeMounts. - Defaults to false. - More info: https://examples.k8s. - type: boolean - secretRef: - description: |- - secretRef is name of the authentication secret for RBDUser. If provided - overrides keyring. - Default is nil. - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - user: - description: |- - user is the rados user name. - Default is admin. - More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + medium: + description: medium represents what type of storage + medium should back this directory. type: string - required: - - image - - monitors + sizeLimit: + anyOf: + - type: integer + - type: string + description: sizeLimit is the total amount of local + storage required for this EmptyDir volume. + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true type: object - scaleIO: - description: scaleIO represents a ScaleIO persistent volume - attached and mounted on Kubernetes nodes. + ephemeral: + description: ephemeral represents a volume that is handled + by a cluster storage driver. properties: - fsType: - description: |- - fsType is the filesystem type to mount. - Must be a filesystem type supported by the host operating system. - Ex. - type: string - gateway: - description: gateway is the host address of the ScaleIO - API Gateway. - type: string - protectionDomain: - description: protectionDomain is the name of the ScaleIO - Protection Domain for the configured storage. - type: string - readOnly: - description: |- - readOnly Defaults to false (read/write). ReadOnly here will force - the ReadOnly setting in VolumeMounts. - type: boolean - secretRef: - description: |- - secretRef references to the secret for ScaleIO user and other - sensitive information. + volumeClaimTemplate: + description: Will be used to create a stand-alone PVC + to provision the volume. properties: - name: - default: "" + metadata: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string + May contain labels and annotations that will be copied into the PVC + when creating it. + type: object + spec: + description: The specification for the PersistentVolumeClaim. + properties: + accessModes: + description: |- + accessModes contains the desired access modes the volume should have. + More info: https://kubernetes. + items: + type: string + type: array + x-kubernetes-list-type: atomic + dataSource: + description: |- + dataSource field can be used to specify either: + * An existing VolumeSnapshot object (snapshot.storage.k8s. + properties: + apiGroup: + description: APIGroup is the group for the + resource being referenced. + type: string + kind: + description: Kind is the type of resource + being referenced + type: string + name: + description: Name is the name of resource + being referenced + type: string + required: + - kind + - name + type: object + x-kubernetes-map-type: atomic + dataSourceRef: + description: |- + dataSourceRef specifies the object from which to populate the volume with data, if a non-empty + volume is desired. + properties: + apiGroup: + description: APIGroup is the group for the + resource being referenced. + type: string + kind: + description: Kind is the type of resource + being referenced + type: string + name: + description: Name is the name of resource + being referenced + type: string + namespace: + description: |- + Namespace is the namespace of resource being referenced + Note that when a namespace is specified, a gateway.networking. + type: string + required: + - kind + - name + type: object + resources: + description: resources represents the minimum + resources the volume should have. + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum + amount of compute resources required. + type: object + type: object + selector: + description: selector is a label query over + volumes to consider for binding. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + storageClassName: + description: |- + storageClassName is the name of the StorageClass required by the claim. + More info: https://kubernetes. + type: string + volumeAttributesClassName: + description: volumeAttributesClassName may be + used to set the VolumeAttributesClass used + by this claim. + type: string + volumeMode: + description: volumeMode defines what type of + volume is required by the claim. + type: string + volumeName: + description: volumeName is the binding reference + to the PersistentVolume backing this claim. + type: string + type: object + required: + - spec type: object - x-kubernetes-map-type: atomic - sslEnabled: - description: sslEnabled Flag enable/disable SSL communication - with Gateway, default false - type: boolean - storageMode: - description: storageMode indicates whether the storage - for a volume should be ThickProvisioned or ThinProvisioned. - type: string - storagePool: - description: storagePool is the ScaleIO Storage Pool - associated with the protection domain. - type: string - system: - description: system is the name of the storage system - as configured in ScaleIO. - type: string - volumeName: - description: |- - volumeName is the name of a volume already created in the ScaleIO system - that is associated with this volume source. - type: string - required: - - gateway - - secretRef - - system type: object - secret: - description: |- - secret represents a secret that should populate this volume. - More info: https://kubernetes. + fc: + description: fc represents a Fibre Channel resource that + is attached to a kubelet's host machine and then exposed + to the pod. properties: - defaultMode: - description: 'defaultMode is Optional: mode bits used - to set permissions on created files by default.' + fsType: + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. + type: string + lun: + description: 'lun is Optional: FC target lun number' format: int32 type: integer - items: + readOnly: description: |- - items If unspecified, each key-value pair in the Data field of the referenced - Secret will be projected into the volume a + readOnly is Optional: Defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + targetWWNs: + description: 'targetWWNs is Optional: FC target worldwide + names (WWNs)' items: - description: Maps a string key to a path within a - volume. - properties: - key: - description: key is the key to project. - type: string - mode: - description: 'mode is Optional: mode bits used - to set permissions on this file.' - format: int32 - type: integer - path: - description: |- - path is the relative path of the file to map the key to. - May not be an absolute path. - type: string - required: - - key - - path - type: object + type: string + type: array + x-kubernetes-list-type: atomic + wwids: + description: "wwids Optional: FC volume world wide identifiers + (wwids)\nEither wwids or combination of targetWWNs + and lun must be set, " + items: + type: string type: array x-kubernetes-list-type: atomic - optional: - description: optional field specify whether the Secret - or its keys must be defined - type: boolean - secretName: - description: |- - secretName is the name of the secret in the pod's namespace to use. - More info: https://kubernetes. - type: string type: object - storageos: - description: storageOS represents a StorageOS volume attached - and mounted on Kubernetes nodes. + flexVolume: + description: |- + flexVolume represents a generic volume resource that is + provisioned/attached using an exec based plugin. properties: + driver: + description: driver is the name of the driver to use + for this volume. + type: string fsType: description: |- fsType is the filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. type: string - readOnly: - description: |- - readOnly defaults to false (read/write). ReadOnly here will force - the ReadOnly setting in VolumeMounts. - type: boolean - secretRef: - description: |- - secretRef specifies the secret to use for obtaining the StorageOS API - credentials. + options: + additionalProperties: + type: string + description: 'options is Optional: this field holds + extra command options if any.' + type: object + readOnly: + description: |- + readOnly is Optional: defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + secretRef: + description: |- + secretRef is Optional: secretRef is reference to the secret object containing + sensitive information to pass to the plugi + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + required: + - driver + type: object + flocker: + description: flocker represents a Flocker volume attached + to a kubelet's host machine. + properties: + datasetName: + description: |- + datasetName is Name of the dataset stored as metadata -> name on the dataset for Flocker + should be considered as depreca + type: string + datasetUUID: + description: datasetUUID is the UUID of the dataset. + This is unique identifier of a Flocker dataset + type: string + type: object + gcePersistentDisk: + description: |- + gcePersistentDisk represents a GCE Disk resource that is attached to a + kubelet's host machine and then exposed to the po + properties: + fsType: + description: fsType is filesystem type of the volume + that you want to mount. + type: string + partition: + description: |- + partition is the partition in the volume that you want to mount. + If omitted, the default is to mount by volume name. + format: int32 + type: integer + pdName: + description: |- + pdName is unique name of the PD resource in GCE. Used to identify the disk in GCE. + More info: https://kubernetes. + type: string + readOnly: + description: |- + readOnly here will force the ReadOnly setting in VolumeMounts. + Defaults to false. + More info: https://kubernetes. + type: boolean + required: + - pdName + type: object + gitRepo: + description: |- + gitRepo represents a git repository at a particular revision. + DEPRECATED: GitRepo is deprecated. + properties: + directory: + description: |- + directory is the target directory name. + Must not contain or start with '..'. If '. + type: string + repository: + description: repository is the URL + type: string + revision: + description: revision is the commit hash for the specified + revision. + type: string + required: + - repository + type: object + glusterfs: + description: |- + glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime. + More info: https://examples.k8s. + properties: + endpoints: + description: |- + endpoints is the endpoint name that details Glusterfs topology. + More info: https://examples.k8s. + type: string + path: + description: |- + path is the Glusterfs volume path. + More info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod + type: string + readOnly: + description: |- + readOnly here will force the Glusterfs volume to be mounted with read-only permissions. + Defaults to false. + type: boolean + required: + - endpoints + - path + type: object + hostPath: + description: |- + hostPath represents a pre-existing file or directory on the host + machine that is directly exposed to the container. + properties: + path: + description: |- + path of the directory on the host. + If the path is a symlink, it will follow the link to the real path. + type: string + type: + description: |- + type for HostPath Volume + Defaults to "" + More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath + type: string + required: + - path + type: object + iscsi: + description: |- + iscsi represents an ISCSI Disk resource that is attached to a + kubelet's host machine and then exposed to the pod. + properties: + chapAuthDiscovery: + description: chapAuthDiscovery defines whether support + iSCSI Discovery CHAP authentication + type: boolean + chapAuthSession: + description: chapAuthSession defines whether support + iSCSI Session CHAP authentication + type: boolean + fsType: + description: fsType is the filesystem type of the volume + that you want to mount. + type: string + initiatorName: + description: initiatorName is the custom iSCSI Initiator + Name. + type: string + iqn: + description: iqn is the target iSCSI Qualified Name. + type: string + iscsiInterface: + description: |- + iscsiInterface is the interface Name that uses an iSCSI transport. + Defaults to 'default' (tcp). + type: string + lun: + description: lun represents iSCSI Target Lun number. + format: int32 + type: integer + portals: + description: portals is the iSCSI Target Portal List. + items: + type: string + type: array + x-kubernetes-list-type: atomic + readOnly: + description: |- + readOnly here will force the ReadOnly setting in VolumeMounts. + Defaults to false. + type: boolean + secretRef: + description: secretRef is the CHAP Secret for iSCSI + target and initiator authentication + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + targetPortal: + description: targetPortal is iSCSI Target Portal. + type: string + required: + - iqn + - lun + - targetPortal + type: object + name: + description: |- + name of the volume. + Must be a DNS_LABEL and unique within the pod. + More info: https://kubernetes. + type: string + nfs: + description: |- + nfs represents an NFS mount on the host that shares a pod's lifetime + More info: https://kubernetes. + properties: + path: + description: |- + path that is exported by the NFS server. + More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs + type: string + readOnly: + description: |- + readOnly here will force the NFS export to be mounted with read-only permissions. + Defaults to false. + type: boolean + server: + description: |- + server is the hostname or IP address of the NFS server. + More info: https://kubernetes. + type: string + required: + - path + - server + type: object + persistentVolumeClaim: + description: |- + persistentVolumeClaimVolumeSource represents a reference to a + PersistentVolumeClaim in the same namespace. + properties: + claimName: + description: claimName is the name of a PersistentVolumeClaim + in the same namespace as the pod using this volume. + type: string + readOnly: + description: |- + readOnly Will force the ReadOnly setting in VolumeMounts. + Default false. + type: boolean + required: + - claimName + type: object + photonPersistentDisk: + description: photonPersistentDisk represents a PhotonController + persistent disk attached and mounted on kubelets host + machine + properties: + fsType: + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. + type: string + pdID: + description: pdID is the ID that identifies Photon Controller + persistent disk + type: string + required: + - pdID + type: object + portworxVolume: + description: portworxVolume represents a portworx volume + attached and mounted on kubelets host machine + properties: + fsType: + description: |- + fSType represents the filesystem type to mount + Must be a filesystem type supported by the host operating system. + Ex. + type: string + readOnly: + description: |- + readOnly defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + volumeID: + description: volumeID uniquely identifies a Portworx + volume + type: string + required: + - volumeID + type: object + projected: + description: projected items for all in one resources secrets, + configmaps, and downward API + properties: + defaultMode: + description: defaultMode are the mode bits used to set + permissions on created files by default. + format: int32 + type: integer + sources: + description: sources is the list of volume projections + items: + description: Projection that may be projected along + with other supported volume types + properties: + clusterTrustBundle: + description: ClusterTrustBundle allows a pod to + access the `.spec. + properties: + labelSelector: + description: |- + Select all ClusterTrustBundles that match this label selector. Only has + effect if signerName is set. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + name: + description: |- + Select a single ClusterTrustBundle by object name. Mutually-exclusive + with signerName and labelSelector. + type: string + optional: + description: |- + If true, don't block pod startup if the referenced ClusterTrustBundle(s) + aren't available. + type: boolean + path: + description: Relative path from the volume + root to write the bundle. + type: string + signerName: + description: |- + Select all ClusterTrustBundles that match this signer name. + Mutually-exclusive with name. + type: string + required: + - path + type: object + configMap: + description: configMap information about the configMap + data to project + properties: + items: + description: |- + items if unspecified, each key-value pair in the Data field of the referenced + ConfigMap will be projected into the volum + items: + description: Maps a string key to a path + within a volume. + properties: + key: + description: key is the key to project. + type: string + mode: + description: 'mode is Optional: mode + bits used to set permissions on this + file.' + format: int32 + type: integer + path: + description: |- + path is the relative path of the file to map the key to. + May not be an absolute path. + type: string + required: + - key + - path + type: object + type: array + x-kubernetes-list-type: atomic + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: optional specify whether the + ConfigMap or its keys must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + downwardAPI: + description: downwardAPI information about the + downwardAPI data to project + properties: + items: + description: Items is a list of DownwardAPIVolume + file + items: + description: DownwardAPIVolumeFile represents + information to create the file containing + the pod field + properties: + fieldRef: + description: 'Required: Selects a field + of the pod: only annotations, labels, + name, namespace and uid are supported.' + properties: + apiVersion: + description: Version of the schema + the FieldPath is written in terms + of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to + select in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + mode: + description: |- + Optional: mode bits used to set permissions on this file, must be an octal value + between 0000 and 0777 or a decimal valu + format: int32 + type: integer + path: + description: 'Required: Path is the + relative path name of the file to + be created. Must not be absolute or + contain the ''..'' path.' + type: string + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, requests. + properties: + containerName: + description: 'Container name: required + for volumes, optional for env + vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output + format of the exposed resources, + defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource + to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + required: + - path + type: object + type: array + x-kubernetes-list-type: atomic + type: object + secret: + description: secret information about the secret + data to project + properties: + items: + description: |- + items if unspecified, each key-value pair in the Data field of the referenced + Secret will be projected into the volume a + items: + description: Maps a string key to a path + within a volume. + properties: + key: + description: key is the key to project. + type: string + mode: + description: 'mode is Optional: mode + bits used to set permissions on this + file.' + format: int32 + type: integer + path: + description: |- + path is the relative path of the file to map the key to. + May not be an absolute path. + type: string + required: + - key + - path + type: object + type: array + x-kubernetes-list-type: atomic + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: optional field specify whether + the Secret or its key must be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + serviceAccountToken: + description: serviceAccountToken is information + about the serviceAccountToken data to project + properties: + audience: + description: audience is the intended audience + of the token. + type: string + expirationSeconds: + description: |- + expirationSeconds is the requested duration of validity of the service + account token. + format: int64 + type: integer + path: + description: |- + path is the path relative to the mount point of the file to project the + token into. + type: string + required: + - path + type: object + type: object + type: array + x-kubernetes-list-type: atomic + type: object + quobyte: + description: quobyte represents a Quobyte mount on the host + that shares a pod's lifetime + properties: + group: + description: |- + group to map volume access to + Default is no group + type: string + readOnly: + description: |- + readOnly here will force the Quobyte volume to be mounted with read-only permissions. + Defaults to false. + type: boolean + registry: + description: |- + registry represents a single or multiple Quobyte Registry services + specified as a string as host:port pair (multiple ent + type: string + tenant: + description: |- + tenant owning the given Quobyte volume in the Backend + Used with dynamically provisioned Quobyte volumes, value is set by + type: string + user: + description: |- + user to map volume access to + Defaults to serivceaccount user + type: string + volume: + description: volume is a string that references an already + created Quobyte volume by name. + type: string + required: + - registry + - volume + type: object + rbd: + description: |- + rbd represents a Rados Block Device mount on the host that shares a pod's lifetime. + More info: https://examples.k8s. + properties: + fsType: + description: fsType is the filesystem type of the volume + that you want to mount. + type: string + image: + description: |- + image is the rados image name. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + type: string + keyring: + description: |- + keyring is the path to key ring for RBDUser. + Default is /etc/ceph/keyring. + More info: https://examples.k8s. + type: string + monitors: + description: |- + monitors is a collection of Ceph monitors. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + items: + type: string + type: array + x-kubernetes-list-type: atomic + pool: + description: |- + pool is the rados pool name. + Default is rbd. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + type: string + readOnly: + description: |- + readOnly here will force the ReadOnly setting in VolumeMounts. + Defaults to false. + More info: https://examples.k8s. + type: boolean + secretRef: + description: |- + secretRef is name of the authentication secret for RBDUser. If provided + overrides keyring. + Default is nil. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + user: + description: |- + user is the rados user name. + Default is admin. + More info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it + type: string + required: + - image + - monitors + type: object + scaleIO: + description: scaleIO represents a ScaleIO persistent volume + attached and mounted on Kubernetes nodes. + properties: + fsType: + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. + type: string + gateway: + description: gateway is the host address of the ScaleIO + API Gateway. + type: string + protectionDomain: + description: protectionDomain is the name of the ScaleIO + Protection Domain for the configured storage. + type: string + readOnly: + description: |- + readOnly Defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + secretRef: + description: |- + secretRef references to the secret for ScaleIO user and other + sensitive information. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + sslEnabled: + description: sslEnabled Flag enable/disable SSL communication + with Gateway, default false + type: boolean + storageMode: + description: storageMode indicates whether the storage + for a volume should be ThickProvisioned or ThinProvisioned. + type: string + storagePool: + description: storagePool is the ScaleIO Storage Pool + associated with the protection domain. + type: string + system: + description: system is the name of the storage system + as configured in ScaleIO. + type: string + volumeName: + description: |- + volumeName is the name of a volume already created in the ScaleIO system + that is associated with this volume source. + type: string + required: + - gateway + - secretRef + - system + type: object + secret: + description: |- + secret represents a secret that should populate this volume. + More info: https://kubernetes. + properties: + defaultMode: + description: 'defaultMode is Optional: mode bits used + to set permissions on created files by default.' + format: int32 + type: integer + items: + description: |- + items If unspecified, each key-value pair in the Data field of the referenced + Secret will be projected into the volume a + items: + description: Maps a string key to a path within a + volume. + properties: + key: + description: key is the key to project. + type: string + mode: + description: 'mode is Optional: mode bits used + to set permissions on this file.' + format: int32 + type: integer + path: + description: |- + path is the relative path of the file to map the key to. + May not be an absolute path. + type: string + required: + - key + - path + type: object + type: array + x-kubernetes-list-type: atomic + optional: + description: optional field specify whether the Secret + or its keys must be defined + type: boolean + secretName: + description: |- + secretName is the name of the secret in the pod's namespace to use. + More info: https://kubernetes. + type: string + type: object + storageos: + description: storageOS represents a StorageOS volume attached + and mounted on Kubernetes nodes. + properties: + fsType: + description: |- + fsType is the filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. + type: string + readOnly: + description: |- + readOnly defaults to false (read/write). ReadOnly here will force + the ReadOnly setting in VolumeMounts. + type: boolean + secretRef: + description: |- + secretRef specifies the secret to use for obtaining the StorageOS API + credentials. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + volumeName: + description: |- + volumeName is the human-readable name of the StorageOS volume. Volume + names are only unique within a namespace. + type: string + volumeNamespace: + description: volumeNamespace specifies the scope of + the volume within StorageOS. + type: string + type: object + vsphereVolume: + description: vsphereVolume represents a vSphere volume attached + and mounted on kubelets host machine + properties: + fsType: + description: |- + fsType is filesystem type to mount. + Must be a filesystem type supported by the host operating system. + Ex. + type: string + storagePolicyID: + description: storagePolicyID is the storage Policy Based + Management (SPBM) profile ID associated with the StoragePolicyName. + type: string + storagePolicyName: + description: storagePolicyName is the storage Policy + Based Management (SPBM) profile name. + type: string + volumePath: + description: volumePath is the path that identifies + vSphere volume vmdk + type: string + required: + - volumePath + type: object + required: + - name + type: object + type: array + type: object + required: + - feastProject + - replicas + type: object + x-kubernetes-validations: + - message: replicas > 1 and services.scaling.autoscaling are mutually + exclusive. + rule: self.replicas <= 1 || !has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling) + - message: Scaling requires DB-backed persistence for the online store. + Configure services.onlineStore.persistence.store when using replicas + > 1 or autoscaling. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (has(self.services) + && has(self.services.onlineStore) && has(self.services.onlineStore.persistence) + && has(self.services.onlineStore.persistence.store)) + - message: Scaling requires DB-backed persistence for the offline store. + Configure services.offlineStore.persistence.store when using replicas + > 1 or autoscaling. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (!has(self.services) + || !has(self.services.offlineStore) || (has(self.services.offlineStore.persistence) + && has(self.services.offlineStore.persistence.store))) + - message: Scaling requires DB-backed or remote registry. Configure registry.local.persistence.store + or use a remote registry when using replicas > 1 or autoscaling. S3/GCS-backed + registry is also allowed. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (has(self.services) + && has(self.services.registry) && (has(self.services.registry.remote) + || (has(self.services.registry.local) && has(self.services.registry.local.persistence) + && (has(self.services.registry.local.persistence.store) || (has(self.services.registry.local.persistence.file) + && has(self.services.registry.local.persistence.file.path) && (self.services.registry.local.persistence.file.path.startsWith('s3://') + || self.services.registry.local.persistence.file.path.startsWith('gs://'))))))) + status: + description: FeatureStoreStatus defines the observed state of FeatureStore + properties: + applied: + description: Shows the currently applied feast configuration, including + any pertinent defaults + properties: + authz: + description: AuthzConfig defines the authorization settings for + the deployed Feast services. + properties: + kubernetes: + description: |- + KubernetesAuthz provides a way to define the authorization settings using Kubernetes RBAC resources. + https://kubernetes. + properties: + roles: + description: The Kubernetes RBAC roles to be deployed + in the same namespace of the FeatureStore. + items: + type: string + type: array + type: object + oidc: + description: |- + OidcAuthz defines the authorization settings for deployments using an Open ID Connect identity provider. + https://auth0. + properties: + secretRef: + description: |- + LocalObjectReference contains enough information to let you locate the + referenced object inside the same namespace. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + required: + - secretRef + type: object + type: object + x-kubernetes-validations: + - message: One selection required between kubernetes or oidc. + rule: '[has(self.kubernetes), has(self.oidc)].exists_one(c, + c)' + batchEngine: + description: BatchEngineConfig defines the batch compute engine + configuration. + properties: + configMapKey: + description: Key name in the ConfigMap. Defaults to "config" + if not specified. + type: string + configMapRef: + description: Reference to a ConfigMap containing the batch + engine configuration. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: object + cronJob: + description: FeastCronJob defines a CronJob to execute against + a Feature Store deployment. + properties: + annotations: + additionalProperties: + type: string + description: Annotations to be added to the CronJob metadata. + type: object + concurrencyPolicy: + description: Specifies how to treat concurrent executions + of a Job. + type: string + containerConfigs: + description: CronJobContainerConfigs k8s container settings + for the CronJob + properties: + commands: + description: Array of commands to be executed (in order) + against a Feature Store deployment. + items: + type: string + type: array + env: + items: + description: EnvVar represents an environment variable + present in a Container. + properties: + name: + description: Name of the environment variable. Must + be a C_IDENTIFIER. + type: string + value: + description: |- + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any + type: string + valueFrom: + description: Source for the environment variable's + value. Cannot be used if value is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + fieldRef: + description: 'Selects a field of the pod: supports + metadata.name, metadata.namespace, `metadata.labels['''']`, + `metadata.' + properties: + apiVersion: + description: Version of the schema the FieldPath + is written in terms of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to select + in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits. + properties: + containerName: + description: 'Container name: required for + volumes, optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format + of the exposed resources, defaults to + "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret in the + pod's namespace + properties: + key: + description: The key of the secret to select + from. Must be a valid secret key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + type: object + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source of + a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend to + each key in the ConfigMap. Must be a C_IDENTIFIER. + type: string + secretRef: + description: The Secret to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + type: object + type: array + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for if/when + to pull a container image + type: string + nodeSelector: + additionalProperties: + type: string + type: object + resources: + description: ResourceRequirements describes the compute + resource requirements. + properties: + claims: + description: |- + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one entry + in PodSpec.ResourceClaims. + properties: + name: + description: |- + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. + type: string + required: + - name + type: object + type: array + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum amount + of compute resources required. + type: object + type: object + type: object + failedJobsHistoryLimit: + description: The number of failed finished jobs to retain. + Value must be non-negative integer. + format: int32 + type: integer + jobSpec: + description: Specification of the desired behavior of a job. + properties: + activeDeadlineSeconds: + description: |- + Specifies the duration in seconds relative to the startTime that the job + may be continuously active before the system tr + format: int64 + type: integer + backoffLimit: + description: Specifies the number of retries before marking + this job failed. + format: int32 + type: integer + backoffLimitPerIndex: + description: |- + Specifies the limit for the number of retries within an + index before marking this index as failed. + format: int32 + type: integer + completionMode: + description: |- + completionMode specifies how Pod completions are tracked. It can be + `NonIndexed` (default) or `Indexed`. + type: string + completions: + description: |- + Specifies the desired number of successfully finished pods the + job should be run with. + format: int32 + type: integer + maxFailedIndexes: + description: |- + Specifies the maximal number of failed indexes before marking the Job as + failed, when backoffLimitPerIndex is set. + format: int32 + type: integer + parallelism: + description: |- + Specifies the maximum desired number of pods the job should + run at any given time. + format: int32 + type: integer + podFailurePolicy: + description: Specifies the policy of handling failed pods. + properties: + rules: + description: A list of pod failure policy rules. The + rules are evaluated in order. + items: + description: PodFailurePolicyRule describes how + a pod failure is handled when the requirements + are met. + properties: + action: + description: Specifies the action taken on a + pod failure when the requirements are satisfied. + type: string + onExitCodes: + description: Represents the requirement on the + container exit codes. + properties: + containerName: + description: |- + Restricts the check for exit codes to the container with the + specified name. + type: string + operator: + description: |- + Represents the relationship between the container exit code(s) and the + specified values. + type: string + values: + description: Specifies the set of values. + items: + format: int32 + type: integer + type: array + x-kubernetes-list-type: set + required: + - operator + - values + type: object + onPodConditions: + description: |- + Represents the requirement on the pod conditions. The requirement is represented + as a list of pod condition patterns. + items: + description: |- + PodFailurePolicyOnPodConditionsPattern describes a pattern for matching + an actual pod condition type. + properties: + status: + description: Specifies the required Pod + condition status. + type: string + type: + description: Specifies the required Pod + condition type. + type: string + required: + - status + - type + type: object + type: array + x-kubernetes-list-type: atomic + required: + - action + type: object + type: array + x-kubernetes-list-type: atomic + required: + - rules + type: object + podReplacementPolicy: + description: podReplacementPolicy specifies when to create + replacement Pods. + type: string + podTemplateAnnotations: + additionalProperties: + type: string + description: |- + PodTemplateAnnotations are annotations to be applied to the CronJob's PodTemplate + metadata. + type: object + suspend: + description: suspend specifies whether the Job controller + should create Pods or not. + type: boolean + ttlSecondsAfterFinished: + description: |- + ttlSecondsAfterFinished limits the lifetime of a Job that has finished + execution (either Complete or Failed). + format: int32 + type: integer + type: object + schedule: + description: The schedule in Cron format, see https://en.wikipedia.org/wiki/Cron. + type: string + startingDeadlineSeconds: + description: |- + Optional deadline in seconds for starting the job if it misses scheduled + time for any reason. + format: int64 + type: integer + successfulJobsHistoryLimit: + description: The number of successful finished jobs to retain. + Value must be non-negative integer. + format: int32 + type: integer + suspend: + description: |- + This flag tells the controller to suspend subsequent executions, it does + not apply to already started executions. + type: boolean + timeZone: + description: The time zone name for the given schedule, see + https://en.wikipedia.org/wiki/List_of_tz_database_time_zones. + type: string + type: object + feastProject: + description: FeastProject is the Feast project id. + pattern: ^[A-Za-z0-9][A-Za-z0-9_-]*$ + type: string + feastProjectDir: + description: FeastProjectDir defines how to create the feast project + directory. + properties: + git: + description: GitCloneOptions describes how a clone should + be performed. + properties: + configs: + additionalProperties: + type: string + description: |- + Configs passed to git via `-c` + e.g. http.sslVerify: 'false' + OR 'url."https://api:\${TOKEN}@github.com/". + type: object + env: + items: + description: EnvVar represents an environment variable + present in a Container. properties: name: - default: "" + description: Name of the environment variable. Must + be a C_IDENTIFIER. + type: string + value: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any type: string + valueFrom: + description: Source for the environment variable's + value. Cannot be used if value is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + fieldRef: + description: 'Selects a field of the pod: supports + metadata.name, metadata.namespace, `metadata.labels['''']`, + `metadata.' + properties: + apiVersion: + description: Version of the schema the FieldPath + is written in terms of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to select + in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits. + properties: + containerName: + description: 'Container name: required for + volumes, optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format + of the exposed resources, defaults to + "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret in the + pod's namespace + properties: + key: + description: The key of the secret to select + from. Must be a valid secret key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + type: object + required: + - name type: object - x-kubernetes-map-type: atomic - volumeName: - description: |- - volumeName is the human-readable name of the StorageOS volume. Volume - names are only unique within a namespace. - type: string - volumeNamespace: - description: volumeNamespace specifies the scope of - the volume within StorageOS. - type: string - type: object - vsphereVolume: - description: vsphereVolume represents a vSphere volume attached - and mounted on kubelets host machine - properties: - fsType: - description: |- - fsType is filesystem type to mount. - Must be a filesystem type supported by the host operating system. - Ex. - type: string - storagePolicyID: - description: storagePolicyID is the storage Policy Based - Management (SPBM) profile ID associated with the StoragePolicyName. - type: string - storagePolicyName: - description: storagePolicyName is the storage Policy - Based Management (SPBM) profile name. - type: string - volumePath: - description: volumePath is the path that identifies - vSphere volume vmdk - type: string - required: - - volumePath - type: object - required: - - name - type: object - type: array - type: object - required: - - feastProject - type: object - status: - description: FeatureStoreStatus defines the observed state of FeatureStore - properties: - applied: - description: Shows the currently applied feast configuration, including - any pertinent defaults - properties: - authz: - description: AuthzConfig defines the authorization settings for - the deployed Feast services. - properties: - kubernetes: - description: |- - KubernetesAuthz provides a way to define the authorization settings using Kubernetes RBAC resources. - https://kubernetes. - properties: - roles: - description: The Kubernetes RBAC roles to be deployed - in the same namespace of the FeatureStore. + type: array + envFrom: items: - type: string + description: EnvFromSource represents the source of + a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + prefix: + description: An optional identifier to prepend to + each key in the ConfigMap. Must be a C_IDENTIFIER. + type: string + secretRef: + description: The Secret to select from + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret must + be defined + type: boolean + type: object + x-kubernetes-map-type: atomic + type: object type: array + featureRepoPath: + description: FeatureRepoPath is the relative path to the + feature repo subdirectory. Default is 'feature_repo'. + type: string + ref: + description: Reference to a branch / tag / commit + type: string + url: + description: The repository URL to clone from. + type: string + required: + - url + type: object + x-kubernetes-validations: + - message: RepoPath must be a file name only, with no slashes. + rule: 'has(self.featureRepoPath) ? !self.featureRepoPath.startsWith(''/'') + : true' + init: + description: FeastInitOptions defines how to run a `feast + init`. + properties: + minimal: + type: boolean + template: + description: Template for the created project + enum: + - local + - gcp + - aws + - snowflake + - spark + - postgres + - hbase + - cassandra + - hazelcast + - couchbase + - clickhouse + type: string type: object - oidc: - description: |- - OidcAuthz defines the authorization settings for deployments using an Open ID Connect identity provider. - https://auth0. + type: object + x-kubernetes-validations: + - message: One selection required between init or git. + rule: '[has(self.git), has(self.init)].exists_one(c, c)' + replicas: + default: 1 + description: |- + Replicas is the desired number of pod replicas. Used by the scale sub-resource. + Mutually exclusive with services. + format: int32 + minimum: 1 + type: integer + services: + description: FeatureStoreServices defines the desired feast services. + An ephemeral onlineStore feature server is deployed by default. + properties: + affinity: + description: Affinity defines the pod scheduling constraints + for the FeatureStore deployment. properties: - secretRef: - description: |- - LocalObjectReference contains enough information to let you locate the - referenced object inside the same namespace. + nodeAffinity: + description: Describes node affinity scheduling rules + for the pod. properties: - name: - default: "" + preferredDuringSchedulingIgnoredDuringExecution: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string + The scheduler will prefer to schedule pods to nodes that satisfy + the affinity expressions specified by this field, but i + items: + description: |- + An empty preferred scheduling term matches all objects with implicit weight 0 + (i.e. it's a no-op). + properties: + preference: + description: A node selector term, associated + with the corresponding weight. + properties: + matchExpressions: + description: A list of node selector requirements + by node's labels. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the + selector applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchFields: + description: A list of node selector requirements + by node's fields. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the + selector applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + type: object + x-kubernetes-map-type: atomic + weight: + description: Weight associated with matching + the corresponding nodeSelectorTerm, in the + range 1-100. + format: int32 + type: integer + required: + - preference + - weight + type: object + type: array + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the affinity requirements specified + by this field are not met at\nscheduling time, the + pod will not be scheduled onto " + properties: + nodeSelectorTerms: + description: Required. A list of node selector + terms. The terms are ORed. + items: + description: |- + A null or empty node selector term matches no objects. The requirements of + them are ANDed. + properties: + matchExpressions: + description: A list of node selector requirements + by node's labels. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the + selector applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchFields: + description: A list of node selector requirements + by node's fields. + items: + description: |- + A node selector requirement is a selector that contains values, a key, and an operator + that relates the key and values. + properties: + key: + description: The label key that the + selector applies to. + type: string + operator: + description: |- + Represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt. + type: string + values: + description: |- + An array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + type: object + x-kubernetes-map-type: atomic + type: array + x-kubernetes-list-type: atomic + required: + - nodeSelectorTerms + type: object + x-kubernetes-map-type: atomic + type: object + podAffinity: + description: Describes pod affinity scheduling rules (e.g. + co-locate this pod in the same node, zone, etc. as some + other pod(s)). + properties: + preferredDuringSchedulingIgnoredDuringExecution: + description: |- + The scheduler will prefer to schedule pods to nodes that satisfy + the affinity expressions specified by this field, but i + items: + description: The weights of all of the matched WeightedPodAffinityTerm + fields are added per-node to find the most preferred + node(s) + properties: + podAffinityTerm: + description: Required. A pod affinity term, + associated with the corresponding weight. + properties: + labelSelector: + description: |- + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set + of namespaces that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static + list of namespace names that the term + applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t + type: string + required: + - topologyKey + type: object + weight: + description: |- + weight associated with matching the corresponding podAffinityTerm, + in the range 1-100. + format: int32 + type: integer + required: + - podAffinityTerm + - weight + type: object + type: array + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the affinity requirements specified + by this field are not met at\nscheduling time, the + pod will not be scheduled onto " + items: + description: "Defines a set of pods (namely those + matching the labelSelector\nrelative to the given + namespace(s)) that this pod should " + properties: + labelSelector: + description: |- + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: + description: |- + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t + type: string + required: + - topologyKey + type: object + type: array + x-kubernetes-list-type: atomic type: object - x-kubernetes-map-type: atomic - required: - - secretRef - type: object - type: object - x-kubernetes-validations: - - message: One selection required between kubernetes or oidc. - rule: '[has(self.kubernetes), has(self.oidc)].exists_one(c, - c)' - batchEngine: - description: BatchEngineConfig defines the batch compute engine - configuration. - properties: - configMapKey: - description: Key name in the ConfigMap. Defaults to "config" - if not specified. - type: string - configMapRef: - description: Reference to a ConfigMap containing the batch - engine configuration. - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: object - cronJob: - description: FeastCronJob defines a CronJob to execute against - a Feature Store deployment. - properties: - annotations: - additionalProperties: - type: string - description: Annotations to be added to the CronJob metadata. - type: object - concurrencyPolicy: - description: Specifies how to treat concurrent executions - of a Job. - type: string - containerConfigs: - description: CronJobContainerConfigs k8s container settings - for the CronJob - properties: - commands: - description: Array of commands to be executed (in order) - against a Feature Store deployment. - items: - type: string - type: array - env: - items: - description: EnvVar represents an environment variable - present in a Container. - properties: - name: - description: Name of the environment variable. Must - be a C_IDENTIFIER. - type: string - value: - description: |- - Variable references $(VAR_NAME) are expanded - using the previously defined environment variables in the container and - any - type: string - valueFrom: - description: Source for the environment variable's - value. Cannot be used if value is not empty. + podAntiAffinity: + description: Describes pod anti-affinity scheduling rules + (e.g. avoid putting this pod in the same node, zone, + etc. + properties: + preferredDuringSchedulingIgnoredDuringExecution: + description: "The scheduler will prefer to schedule + pods to nodes that satisfy\nthe anti-affinity expressions + specified by this field, " + items: + description: The weights of all of the matched WeightedPodAffinityTerm + fields are added per-node to find the most preferred + node(s) properties: - configMapKeyRef: - description: Selects a key of a ConfigMap. + podAffinityTerm: + description: Required. A pod affinity term, + associated with the corresponding weight. properties: - key: - description: The key to select. - type: string - name: - default: "" + labelSelector: + description: |- + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: + description: |- + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set + of namespaces that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The + requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of + {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static + list of namespace names that the term + applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap - or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - fieldRef: - description: 'Selects a field of the pod: supports - metadata.name, metadata.namespace, `metadata.labels['''']`, - `metadata.' - properties: - apiVersion: - description: Version of the schema the FieldPath - is written in terms of, defaults to "v1". - type: string - fieldPath: - description: Path of the field to select - in the specified API version. + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t type: string required: - - fieldPath + - topologyKey type: object - x-kubernetes-map-type: atomic - resourceFieldRef: + weight: description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, limits. - properties: - containerName: - description: 'Container name: required for - volumes, optional for env vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output format - of the exposed resources, defaults to - "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource to select' - type: string - required: - - resource - type: object - x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret in the - pod's namespace + weight associated with matching the corresponding podAffinityTerm, + in the range 1-100. + format: int32 + type: integer + required: + - podAffinityTerm + - weight + type: object + type: array + x-kubernetes-list-type: atomic + requiredDuringSchedulingIgnoredDuringExecution: + description: "If the anti-affinity requirements specified + by this field are not met at\nscheduling time, the + pod will not be scheduled " + items: + description: "Defines a set of pods (namely those + matching the labelSelector\nrelative to the given + namespace(s)) that this pod should " + properties: + labelSelector: + description: |- + A label query over a set of resources, in this case pods. + If it's null, this PodAffinityTerm matches with no Pods. properties: - key: - description: The key of the secret to select - from. Must be a valid secret key. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret - or its key must be defined - type: boolean - required: - - key + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object type: object x-kubernetes-map-type: atomic - type: object - required: - - name - type: object - type: array - envFrom: - items: - description: EnvFromSource represents the source of - a set of ConfigMaps - properties: - configMapRef: - description: The ConfigMap to select from - properties: - name: - default: "" + matchLabelKeys: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the ConfigMap must - be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend to - each key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from - properties: - name: - default: "" + MatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + mismatchLabelKeys: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the Secret must - be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - type: object - type: array - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for if/when - to pull a container image - type: string - nodeSelector: - additionalProperties: - type: string - type: object - resources: - description: ResourceRequirements describes the compute - resource requirements. - properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. - items: - description: ResourceClaim references one entry - in PodSpec.ResourceClaims. - properties: - name: + MismatchLabelKeys is a set of pod label keys to select which pods will + be taken into consideration. + items: + type: string + type: array + x-kubernetes-list-type: atomic + namespaceSelector: + description: A label query over the set of namespaces + that the term applies to. + properties: + matchExpressions: + description: matchExpressions is a list + of label selector requirements. The requirements + are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key + that the selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + namespaces: + description: namespaces specifies a static list + of namespace names that the term applies to. + items: + type: string + type: array + x-kubernetes-list-type: atomic + topologyKey: description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. + This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching + the labelSelector in t type: string required: - - name + - topologyKey type: object type: array - x-kubernetes-list-map-keys: - - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum amount - of compute resources required. - type: object + x-kubernetes-list-type: atomic type: object type: object - failedJobsHistoryLimit: - description: The number of failed finished jobs to retain. - Value must be non-negative integer. - format: int32 - type: integer - jobSpec: - description: Specification of the desired behavior of a job. + deploymentStrategy: + description: DeploymentStrategy describes how to replace existing + pods with new ones. properties: - activeDeadlineSeconds: - description: |- - Specifies the duration in seconds relative to the startTime that the job - may be continuously active before the system tr - format: int64 - type: integer - backoffLimit: - description: Specifies the number of retries before marking - this job failed. - format: int32 - type: integer - backoffLimitPerIndex: - description: |- - Specifies the limit for the number of retries within an - index before marking this index as failed. - format: int32 - type: integer - completionMode: + rollingUpdate: description: |- - completionMode specifies how Pod completions are tracked. It can be - `NonIndexed` (default) or `Indexed`. + Rolling update config params. Present only if DeploymentStrategyType = + RollingUpdate. + properties: + maxSurge: + anyOf: + - type: integer + - type: string + description: |- + The maximum number of pods that can be scheduled above the desired number of + pods. + x-kubernetes-int-or-string: true + maxUnavailable: + anyOf: + - type: integer + - type: string + description: The maximum number of pods that can be + unavailable during the update. + x-kubernetes-int-or-string: true + type: object + type: + description: Type of deployment. Can be "Recreate" or + "RollingUpdate". Default is RollingUpdate. type: string - completions: - description: |- - Specifies the desired number of successfully finished pods the - job should be run with. - format: int32 - type: integer - maxFailedIndexes: - description: |- - Specifies the maximal number of failed indexes before marking the Job as - failed, when backoffLimitPerIndex is set. - format: int32 - type: integer - parallelism: - description: |- - Specifies the maximum desired number of pods the job should - run at any given time. - format: int32 - type: integer - podFailurePolicy: - description: Specifies the policy of handling failed pods. + type: object + disableInitContainers: + description: Disable the 'feast repo initialization' initContainer + type: boolean + offlineStore: + description: OfflineStore configures the offline store service + properties: + persistence: + description: OfflineStorePersistence configures the persistence + settings for the offline store service + properties: + file: + description: OfflineStoreFilePersistence configures + the file-based persistence for the offline store + service + properties: + pvc: + description: PvcConfig defines the settings for + a persistent file store based on PVCs. + properties: + create: + description: Settings for creating a new PVC + properties: + accessModes: + description: AccessModes k8s persistent + volume access modes. Defaults to ["ReadWriteOnce"]. + items: + type: string + type: array + resources: + description: Resources describes the storage + resource requirements for a volume. + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the + minimum amount of compute resources + required. + type: object + type: object + storageClassName: + description: StorageClassName is the name + of an existing StorageClass to which + this persistent volume belongs. + type: string + type: object + x-kubernetes-validations: + - message: PvcCreate is immutable + rule: self == oldSelf + mountPath: + description: |- + MountPath within the container at which the volume should be mounted. + Must start by "/" and cannot contain ':'. + type: string + ref: + description: Reference to an existing field + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + required: + - mountPath + type: object + x-kubernetes-validations: + - message: One selection is required between ref + and create. + rule: '[has(self.ref), has(self.create)].exists_one(c, + c)' + - message: Mount path must start with '/' and + must not contain ':' + rule: self.mountPath.matches('^/[^:]*$') + type: + enum: + - file + - dask + - duckdb + type: string + type: object + store: + description: OfflineStoreDBStorePersistence configures + the DB store persistence for the offline store service + properties: + secretKeyName: + description: By default, the selected store "type" + is used as the SecretKeyName + type: string + secretRef: + description: Data store parameters should be placed + as-is from the "feature_store.yaml" under the + secret key. + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: + description: Type of the persistence type you + want to use. + enum: + - snowflake.offline + - bigquery + - redshift + - spark + - postgres + - trino + - athena + - mssql + - couchbase.offline + - clickhouse + - ray + type: string + required: + - secretRef + - type + type: object + type: object + x-kubernetes-validations: + - message: One selection required between file or store. + rule: '[has(self.file), has(self.store)].exists_one(c, + c)' + server: + description: Creates a remote offline server container properties: - rules: - description: A list of pod failure policy rules. The - rules are evaluated in order. + env: items: - description: PodFailurePolicyRule describes how - a pod failure is handled when the requirements - are met. + description: EnvVar represents an environment variable + present in a Container. properties: - action: - description: Specifies the action taken on a - pod failure when the requirements are satisfied. + name: + description: Name of the environment variable. + Must be a C_IDENTIFIER. type: string - onExitCodes: - description: Represents the requirement on the - container exit codes. + value: + description: |- + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any + type: string + valueFrom: + description: Source for the environment variable's + value. Cannot be used if value is not empty. properties: - containerName: - description: |- - Restricts the check for exit codes to the container with the - specified name. - type: string - operator: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the ConfigMap + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + fieldRef: + description: 'Selects a field of the pod: + supports metadata.name, metadata.namespace, + `metadata.labels['''']`, `metadata.' + properties: + apiVersion: + description: Version of the schema the + FieldPath is written in terms of, + defaults to "v1". + type: string + fieldPath: + description: Path of the field to select + in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: description: |- - Represents the relationship between the container exit code(s) and the - specified values. - type: string - values: - description: Specifies the set of values. - items: - format: int32 - type: integer - type: array - x-kubernetes-list-type: set - required: - - operator - - values - type: object - onPodConditions: - description: |- - Represents the requirement on the pod conditions. The requirement is represented - as a list of pod condition patterns. - items: - description: |- - PodFailurePolicyOnPodConditionsPattern describes a pattern for matching - an actual pod condition type. - properties: - status: - description: Specifies the required Pod - condition status. - type: string - type: - description: Specifies the required Pod - condition type. - type: string - required: - - status - - type - type: object - type: array - x-kubernetes-list-type: atomic - required: - - action - type: object - type: array - x-kubernetes-list-type: atomic - required: - - rules - type: object - podReplacementPolicy: - description: podReplacementPolicy specifies when to create - replacement Pods. - type: string - podTemplateAnnotations: - additionalProperties: - type: string - description: |- - PodTemplateAnnotations are annotations to be applied to the CronJob's PodTemplate - metadata. - type: object - suspend: - description: suspend specifies whether the Job controller - should create Pods or not. - type: boolean - ttlSecondsAfterFinished: - description: |- - ttlSecondsAfterFinished limits the lifetime of a Job that has finished - execution (either Complete or Failed). - format: int32 - type: integer - type: object - schedule: - description: The schedule in Cron format, see https://en.wikipedia.org/wiki/Cron. - type: string - startingDeadlineSeconds: - description: |- - Optional deadline in seconds for starting the job if it misses scheduled - time for any reason. - format: int64 - type: integer - successfulJobsHistoryLimit: - description: The number of successful finished jobs to retain. - Value must be non-negative integer. - format: int32 - type: integer - suspend: - description: |- - This flag tells the controller to suspend subsequent executions, it does - not apply to already started executions. - type: boolean - timeZone: - description: The time zone name for the given schedule, see - https://en.wikipedia.org/wiki/List_of_tz_database_time_zones. - type: string - type: object - feastProject: - description: FeastProject is the Feast project id. - pattern: ^[A-Za-z0-9][A-Za-z0-9_-]*$ - type: string - feastProjectDir: - description: FeastProjectDir defines how to create the feast project - directory. - properties: - git: - description: GitCloneOptions describes how a clone should - be performed. - properties: - configs: - additionalProperties: - type: string - description: |- - Configs passed to git via `-c` - e.g. http.sslVerify: 'false' - OR 'url."https://api:\${TOKEN}@github.com/". - type: object - env: - items: - description: EnvVar represents an environment variable - present in a Container. - properties: - name: - description: Name of the environment variable. Must - be a C_IDENTIFIER. - type: string - value: - description: |- - Variable references $(VAR_NAME) are expanded - using the previously defined environment variables in the container and - any - type: string - valueFrom: - description: Source for the environment variable's - value. Cannot be used if value is not empty. + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits. + properties: + containerName: + description: 'Container name: required + for volumes, optional for env vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output format + of the exposed resources, defaults + to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource to + select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret in + the pod's namespace + properties: + key: + description: The key of the secret to + select from. Must be a valid secret + key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the Secret + or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + type: object + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source + of a set of ConfigMaps properties: - configMapKeyRef: - description: Selects a key of a ConfigMap. + configMapRef: + description: The ConfigMap to select from properties: - key: - description: The key to select. - type: string name: default: "" description: |- @@ -4790,62 +7492,17 @@ spec: type: string optional: description: Specify whether the ConfigMap - or its key must be defined + must be defined type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - fieldRef: - description: 'Selects a field of the pod: supports - metadata.name, metadata.namespace, `metadata.labels['''']`, - `metadata.' - properties: - apiVersion: - description: Version of the schema the FieldPath - is written in terms of, defaults to "v1". - type: string - fieldPath: - description: Path of the field to select - in the specified API version. - type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - resourceFieldRef: - description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, limits. - properties: - containerName: - description: 'Container name: required for - volumes, optional for env vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output format - of the exposed resources, defaults to - "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource to select' - type: string - required: - - resource type: object x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret in the - pod's namespace + prefix: + description: An optional identifier to prepend + to each key in the ConfigMap. Must be a C_IDENTIFIER. + type: string + secretRef: + description: The Secret to select from properties: - key: - description: The key of the secret to select - from. Must be a valid secret key. - type: string name: default: "" description: |- @@ -4855,153 +7512,228 @@ spec: type: string optional: description: Specify whether the Secret - or its key must be defined + must be defined type: boolean - required: - - key type: object x-kubernetes-map-type: atomic type: object - required: - - name - type: object - type: array - envFrom: - items: - description: EnvFromSource represents the source of - a set of ConfigMaps - properties: - configMapRef: - description: The ConfigMap to select from + type: array + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for if/when + to pull a container image + type: string + logLevel: + description: |- + LogLevel sets the logging level for the server + Allowed values: "debug", "info", "warning", "error", "critical". + enum: + - debug + - info + - warning + - error + - critical + type: string + metrics: + description: Metrics exposes Prometheus-compatible + metrics for the Feast server when enabled. + type: boolean + nodeSelector: + additionalProperties: + type: string + type: object + resources: + description: ResourceRequirements describes the compute + resource requirements. + properties: + claims: + description: |- + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one entry + in PodSpec.ResourceClaims. + properties: + name: + description: |- + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. + type: string + required: + - name + type: object + type: array + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum amount + of compute resources required. + type: object + type: object + tls: + description: TlsConfigs configures server TLS for + a feast service. + properties: + disable: + description: will disable TLS for the feast service. + useful in an openshift cluster, for example, + where TLS is configured by default + type: boolean + secretKeyNames: + description: SecretKeyNames defines the secret + key names for the TLS key and cert. + properties: + tlsCrt: + description: defaults to "tls.crt" + type: string + tlsKey: + description: defaults to "tls.key" + type: string + type: object + secretRef: + description: references the local k8s secret where + the TLS key and cert reside + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: object + x-kubernetes-validations: + - message: '`secretRef` required if `disable` is false.' + rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) + : true' + volumeMounts: + description: VolumeMounts defines the list of volumes + that should be mounted into the feast container. + items: + description: VolumeMount describes a mounting of + a Volume within a container. properties: - name: - default: "" + mountPath: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + Path within the container at which the volume should be mounted. Must + not contain ':'. + type: string + mountPropagation: + description: |- + mountPropagation determines how mounts are propagated from the host + to container and the other way around. type: string - optional: - description: Specify whether the ConfigMap must - be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend to - each key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from - properties: name: - default: "" + description: This must match the Name of a Volume. + type: string + readOnly: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + Mounted read-only if true, read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + recursiveReadOnly: + description: |- + RecursiveReadOnly specifies whether read-only mounts should be handled + recursively. + type: string + subPath: + description: |- + Path within the volume from which the container's volume should be mounted. + Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume + from which the container's volume should be + mounted. type: string - optional: - description: Specify whether the Secret must - be defined - type: boolean + required: + - mountPath + - name type: object - x-kubernetes-map-type: atomic - type: object - type: array - featureRepoPath: - description: FeatureRepoPath is the relative path to the - feature repo subdirectory. Default is 'feature_repo'. - type: string - ref: - description: Reference to a branch / tag / commit - type: string - url: - description: The repository URL to clone from. - type: string - required: - - url - type: object - x-kubernetes-validations: - - message: RepoPath must be a file name only, with no slashes. - rule: 'has(self.featureRepoPath) ? !self.featureRepoPath.startsWith(''/'') - : true' - init: - description: FeastInitOptions defines how to run a `feast - init`. - properties: - minimal: - type: boolean - template: - description: Template for the created project - enum: - - local - - gcp - - aws - - snowflake - - spark - - postgres - - hbase - - cassandra - - hazelcast - - ikv - - couchbase - - clickhouse - type: string - type: object - type: object - x-kubernetes-validations: - - message: One selection required between init or git. - rule: '[has(self.git), has(self.init)].exists_one(c, c)' - services: - description: FeatureStoreServices defines the desired feast services. - An ephemeral onlineStore feature server is deployed by default. - properties: - deploymentStrategy: - description: DeploymentStrategy describes how to replace existing - pods with new ones. - properties: - rollingUpdate: - description: |- - Rolling update config params. Present only if DeploymentStrategyType = - RollingUpdate. - properties: - maxSurge: - anyOf: - - type: integer - - type: string - description: |- - The maximum number of pods that can be scheduled above the desired number of - pods. - x-kubernetes-int-or-string: true - maxUnavailable: - anyOf: - - type: integer - - type: string - description: The maximum number of pods that can be - unavailable during the update. - x-kubernetes-int-or-string: true + type: array + workerConfigs: + description: WorkerConfigs defines the worker configuration + for the Feast server. + properties: + keepAliveTimeout: + description: |- + KeepAliveTimeout is the timeout for keep-alive connections in seconds. + Defaults to 30. + format: int32 + minimum: 1 + type: integer + maxRequests: + description: |- + MaxRequests is the maximum number of requests a worker will process before restarting. + This helps prevent memory leaks. + format: int32 + minimum: 0 + type: integer + maxRequestsJitter: + description: |- + MaxRequestsJitter is the maximum jitter to add to max-requests to prevent + thundering herd effect on worker restart. + format: int32 + minimum: 0 + type: integer + registryTTLSeconds: + description: RegistryTTLSeconds is the number + of seconds after which the registry is refreshed. + format: int32 + minimum: 0 + type: integer + workerConnections: + description: |- + WorkerConnections is the maximum number of simultaneous clients per worker process. + Defaults to 1000. + format: int32 + minimum: 1 + type: integer + workers: + description: Workers is the number of worker processes. + Use -1 to auto-calculate based on CPU cores + (2 * CPU + 1). + format: int32 + minimum: -1 + type: integer + type: object type: object - type: - description: Type of deployment. Can be "Recreate" or - "RollingUpdate". Default is RollingUpdate. - type: string type: object - disableInitContainers: - description: Disable the 'feast repo initialization' initContainer - type: boolean - offlineStore: - description: OfflineStore configures the offline store service + onlineStore: + description: OnlineStore configures the online store service properties: persistence: - description: OfflineStorePersistence configures the persistence - settings for the offline store service + description: OnlineStorePersistence configures the persistence + settings for the online store service properties: file: - description: OfflineStoreFilePersistence configures - the file-based persistence for the offline store + description: OnlineStoreFilePersistence configures + the file-based persistence for the online store service properties: + path: + type: string pvc: description: PvcConfig defines the settings for a persistent file store based on PVCs. @@ -5079,16 +7811,22 @@ spec: - message: Mount path must start with '/' and must not contain ':' rule: self.mountPath.matches('^/[^:]*$') - type: - enum: - - file - - dask - - duckdb - type: string type: object + x-kubernetes-validations: + - message: Ephemeral stores must have absolute paths. + rule: '(!has(self.pvc) && has(self.path)) ? self.path.startsWith(''/'') + : true' + - message: PVC path must be a file name only, with + no slashes. + rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') + : true' + - message: Online store does not support S3 or GS + buckets. + rule: 'has(self.path) ? !(self.path.startsWith(''s3://'') + || self.path.startsWith(''gs://'')) : true' store: - description: OfflineStoreDBStorePersistence configures - the DB store persistence for the offline store service + description: OnlineStoreDBStorePersistence configures + the DB store persistence for the online store service properties: secretKeyName: description: By default, the selected store "type" @@ -5112,17 +7850,23 @@ spec: description: Type of the persistence type you want to use. enum: - - snowflake.offline - - bigquery - - redshift - - spark + - snowflake.online + - redis + - datastore + - dynamodb + - bigtable - postgres - - trino - - athena - - mssql - - couchbase.offline - - clickhouse - - ray + - cassandra + - mysql + - hazelcast + - singlestore + - hbase + - elasticsearch + - qdrant + - couchbase.online + - milvus + - hybrid + - mongodb type: string required: - secretRef @@ -5134,7 +7878,7 @@ spec: rule: '[has(self.file), has(self.store)].exists_one(c, c)' server: - description: Creates a remote offline server container + description: Creates a feature server container properties: env: items: @@ -5494,76 +8238,182 @@ spec: type: object type: object type: object - onlineStore: - description: OnlineStore configures the online store service + podDisruptionBudgets: + description: PodDisruptionBudgets configures a PodDisruptionBudget + for the FeatureStore deployment. properties: - persistence: - description: OnlineStorePersistence configures the persistence - settings for the online store service + maxUnavailable: + anyOf: + - type: integer + - type: string + description: MaxUnavailable specifies the maximum number/percentage + of pods that can be unavailable. + x-kubernetes-int-or-string: true + minAvailable: + anyOf: + - type: integer + - type: string + description: MinAvailable specifies the minimum number/percentage + of pods that must remain available. + x-kubernetes-int-or-string: true + type: object + x-kubernetes-validations: + - message: Exactly one of minAvailable or maxUnavailable must + be set. + rule: '[has(self.minAvailable), has(self.maxUnavailable)].exists_one(c, + c)' + registry: + description: Registry configures the registry service. One + selection is required. Local is the default setting. + properties: + local: + description: LocalRegistryConfig configures the registry + service properties: - file: - description: OnlineStoreFilePersistence configures - the file-based persistence for the online store - service + persistence: + description: RegistryPersistence configures the persistence + settings for the registry service properties: - path: - type: string - pvc: - description: PvcConfig defines the settings for - a persistent file store based on PVCs. + file: + description: RegistryFilePersistence configures + the file-based persistence for the registry + service properties: - create: - description: Settings for creating a new PVC + cache_mode: + description: |- + CacheMode defines the registry cache update strategy. + Allowed values are "sync" and "thread". + enum: + - none + - sync + - thread + type: string + cache_ttl_seconds: + description: CacheTTLSeconds defines the TTL + (in seconds) for the registry cache. + format: int32 + minimum: 0 + type: integer + path: + type: string + pvc: + description: PvcConfig defines the settings + for a persistent file store based on PVCs. properties: - accessModes: - description: AccessModes k8s persistent - volume access modes. Defaults to ["ReadWriteOnce"]. - items: - type: string - type: array - resources: - description: Resources describes the storage - resource requirements for a volume. + create: + description: Settings for creating a new + PVC properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the - minimum amount of compute resources - required. + accessModes: + description: AccessModes k8s persistent + volume access modes. Defaults to + ["ReadWriteOnce"]. + items: + type: string + type: array + resources: + description: Resources describes the + storage resource requirements for + a volume. + properties: + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes + the minimum amount of compute + resources required. + type: object type: object + storageClassName: + description: StorageClassName is the + name of an existing StorageClass + to which this persistent volume + belongs. + type: string type: object - storageClassName: - description: StorageClassName is the name - of an existing StorageClass to which - this persistent volume belongs. + x-kubernetes-validations: + - message: PvcCreate is immutable + rule: self == oldSelf + mountPath: + description: |- + MountPath within the container at which the volume should be mounted. + Must start by "/" and cannot contain ':'. type: string + ref: + description: Reference to an existing + field + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + required: + - mountPath type: object x-kubernetes-validations: - - message: PvcCreate is immutable - rule: self == oldSelf - mountPath: - description: |- - MountPath within the container at which the volume should be mounted. - Must start by "/" and cannot contain ':'. + - message: One selection is required between + ref and create. + rule: '[has(self.ref), has(self.create)].exists_one(c, + c)' + - message: Mount path must start with '/' + and must not contain ':' + rule: self.mountPath.matches('^/[^:]*$') + s3_additional_kwargs: + additionalProperties: + type: string + type: object + type: object + x-kubernetes-validations: + - message: Registry files must use absolute paths + or be S3 ('s3://') or GS ('gs://') object + store URIs. + rule: '(!has(self.pvc) && has(self.path)) ? + (self.path.startsWith(''/'') || self.path.startsWith(''s3://'') + || self.path.startsWith(''gs://'')) : true' + - message: PVC path must be a file name only, + with no slashes. + rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') + : true' + - message: PVC persistence does not support S3 + or GS object store URIs. + rule: '(has(self.pvc) && has(self.path)) ? !(self.path.startsWith(''s3://'') + || self.path.startsWith(''gs://'')) : true' + - message: Additional S3 settings are available + only for S3 object store URIs. + rule: '(has(self.s3_additional_kwargs) && has(self.path)) + ? self.path.startsWith(''s3://'') : true' + store: + description: RegistryDBStorePersistence configures + the DB store persistence for the registry service + properties: + secretKeyName: + description: By default, the selected store + "type" is used as the SecretKeyName type: string - ref: - description: Reference to an existing field + secretRef: + description: Data store parameters should + be placed as-is from the "feature_store.yaml" + under the secret key. properties: name: default: "" @@ -5574,111 +8424,149 @@ spec: type: string type: object x-kubernetes-map-type: atomic - required: - - mountPath - type: object - x-kubernetes-validations: - - message: One selection is required between ref - and create. - rule: '[has(self.ref), has(self.create)].exists_one(c, - c)' - - message: Mount path must start with '/' and - must not contain ':' - rule: self.mountPath.matches('^/[^:]*$') - type: object - x-kubernetes-validations: - - message: Ephemeral stores must have absolute paths. - rule: '(!has(self.pvc) && has(self.path)) ? self.path.startsWith(''/'') - : true' - - message: PVC path must be a file name only, with - no slashes. - rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') - : true' - - message: Online store does not support S3 or GS - buckets. - rule: 'has(self.path) ? !(self.path.startsWith(''s3://'') - || self.path.startsWith(''gs://'')) : true' - store: - description: OnlineStoreDBStorePersistence configures - the DB store persistence for the online store service - properties: - secretKeyName: - description: By default, the selected store "type" - is used as the SecretKeyName - type: string - secretRef: - description: Data store parameters should be placed - as-is from the "feature_store.yaml" under the - secret key. - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + type: + description: Type of the persistence type + you want to use. + enum: + - sql + - snowflake.registry type: string - type: object - x-kubernetes-map-type: atomic - type: - description: Type of the persistence type you - want to use. - enum: - - snowflake.online - - redis - - ikv - - datastore - - dynamodb - - bigtable - - postgres - - cassandra - - mysql - - hazelcast - - singlestore - - hbase - - elasticsearch - - qdrant - - couchbase.online - - milvus - - hybrid - type: string - required: - - secretRef - - type - type: object - type: object - x-kubernetes-validations: - - message: One selection required between file or store. - rule: '[has(self.file), has(self.store)].exists_one(c, - c)' - server: - description: Creates a feature server container - properties: - env: - items: - description: EnvVar represents an environment variable - present in a Container. - properties: - name: - description: Name of the environment variable. - Must be a C_IDENTIFIER. - type: string - value: - description: |- - Variable references $(VAR_NAME) are expanded - using the previously defined environment variables in the container and - any - type: string - valueFrom: - description: Source for the environment variable's - value. Cannot be used if value is not empty. + required: + - secretRef + - type + type: object + type: object + x-kubernetes-validations: + - message: One selection required between file or + store. + rule: '[has(self.file), has(self.store)].exists_one(c, + c)' + server: + description: Creates a registry server container + properties: + env: + items: + description: EnvVar represents an environment + variable present in a Container. properties: - configMapKeyRef: - description: Selects a key of a ConfigMap. + name: + description: Name of the environment variable. + Must be a C_IDENTIFIER. + type: string + value: + description: |- + Variable references $(VAR_NAME) are expanded + using the previously defined environment variables in the container and + any + type: string + valueFrom: + description: Source for the environment + variable's value. Cannot be used if value + is not empty. + properties: + configMapKeyRef: + description: Selects a key of a ConfigMap. + properties: + key: + description: The key to select. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the + ConfigMap or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + fieldRef: + description: 'Selects a field of the + pod: supports metadata.name, metadata.namespace, + `metadata.labels['''']`, `metadata.' + properties: + apiVersion: + description: Version of the schema + the FieldPath is written in terms + of, defaults to "v1". + type: string + fieldPath: + description: Path of the field to + select in the specified API version. + type: string + required: + - fieldPath + type: object + x-kubernetes-map-type: atomic + resourceFieldRef: + description: |- + Selects a resource of the container: only resources limits and requests + (limits.cpu, limits.memory, limits. + properties: + containerName: + description: 'Container name: required + for volumes, optional for env + vars' + type: string + divisor: + anyOf: + - type: integer + - type: string + description: Specifies the output + format of the exposed resources, + defaults to "1" + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resource: + description: 'Required: resource + to select' + type: string + required: + - resource + type: object + x-kubernetes-map-type: atomic + secretKeyRef: + description: Selects a key of a secret + in the pod's namespace + properties: + key: + description: The key of the secret + to select from. Must be a valid + secret key. + type: string + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + optional: + description: Specify whether the + Secret or its key must be defined + type: boolean + required: + - key + type: object + x-kubernetes-map-type: atomic + type: object + required: + - name + type: object + type: array + envFrom: + items: + description: EnvFromSource represents the source + of a set of ConfigMaps + properties: + configMapRef: + description: The ConfigMap to select from properties: - key: - description: The key to select. - type: string name: default: "" description: |- @@ -5688,65 +8576,18 @@ spec: type: string optional: description: Specify whether the ConfigMap - or its key must be defined + must be defined type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - fieldRef: - description: 'Selects a field of the pod: - supports metadata.name, metadata.namespace, - `metadata.labels['''']`, `metadata.' - properties: - apiVersion: - description: Version of the schema the - FieldPath is written in terms of, - defaults to "v1". - type: string - fieldPath: - description: Path of the field to select - in the specified API version. - type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - resourceFieldRef: - description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, limits. - properties: - containerName: - description: 'Container name: required - for volumes, optional for env vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output format - of the exposed resources, defaults - to "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource to - select' - type: string - required: - - resource type: object x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret in - the pod's namespace + prefix: + description: An optional identifier to prepend + to each key in the ConfigMap. Must be + a C_IDENTIFIER. + type: string + secretRef: + description: The Secret to select from properties: - key: - description: The key of the secret to - select from. Must be a valid secret - key. - type: string name: default: "" description: |- @@ -5756,877 +8597,861 @@ spec: type: string optional: description: Specify whether the Secret - or its key must be defined + must be defined type: boolean + type: object + x-kubernetes-map-type: atomic + type: object + type: array + grpc: + description: Enable gRPC registry server. Defaults + to true if unset. + type: boolean + image: + type: string + imagePullPolicy: + description: PullPolicy describes a policy for + if/when to pull a container image + type: string + logLevel: + description: |- + LogLevel sets the logging level for the server + Allowed values: "debug", "info", "warning", "error", "critical". + enum: + - debug + - info + - warning + - error + - critical + type: string + metrics: + description: Metrics exposes Prometheus-compatible + metrics for the Feast server when enabled. + type: boolean + nodeSelector: + additionalProperties: + type: string + type: object + resources: + description: ResourceRequirements describes the + compute resource requirements. + properties: + claims: + description: |- + Claims lists the names of resources, defined in spec.resourceClaims, + that are used by this container. + items: + description: ResourceClaim references one + entry in PodSpec.ResourceClaims. + properties: + name: + description: |- + Name must match the name of one entry in pod.spec.resourceClaims of + the Pod where this field is used. + type: string required: - - key + - name type: object - x-kubernetes-map-type: atomic - type: object - required: - - name - type: object - type: array - envFrom: - items: - description: EnvFromSource represents the source - of a set of ConfigMaps - properties: - configMapRef: - description: The ConfigMap to select from + type: array + x-kubernetes-list-map-keys: + - name + x-kubernetes-list-type: map + limits: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: |- + Limits describes the maximum amount of compute resources allowed. + More info: https://kubernetes. + type: object + requests: + additionalProperties: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + description: Requests describes the minimum + amount of compute resources required. + type: object + type: object + restAPI: + description: Enable REST API registry server. + type: boolean + tls: + description: TlsConfigs configures server TLS + for a feast service. + properties: + disable: + description: will disable TLS for the feast + service. useful in an openshift cluster, + for example, where TLS is configured by + default + type: boolean + secretKeyNames: + description: SecretKeyNames defines the secret + key names for the TLS key and cert. + properties: + tlsCrt: + description: defaults to "tls.crt" + type: string + tlsKey: + description: defaults to "tls.key" + type: string + type: object + secretRef: + description: references the local k8s secret + where the TLS key and cert reside + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string + type: object + x-kubernetes-map-type: atomic + type: object + x-kubernetes-validations: + - message: '`secretRef` required if `disable` + is false.' + rule: '(!has(self.disable) || !self.disable) + ? has(self.secretRef) : true' + volumeMounts: + description: VolumeMounts defines the list of + volumes that should be mounted into the feast + container. + items: + description: VolumeMount describes a mounting + of a Volume within a container. properties: - name: - default: "" + mountPath: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + Path within the container at which the volume should be mounted. Must + not contain ':'. type: string - optional: - description: Specify whether the ConfigMap - must be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend - to each key in the ConfigMap. Must be a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from - properties: - name: - default: "" + mountPropagation: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + mountPropagation determines how mounts are propagated from the host + to container and the other way around. type: string - optional: - description: Specify whether the Secret - must be defined - type: boolean - type: object - x-kubernetes-map-type: atomic - type: object - type: array - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for if/when - to pull a container image - type: string - logLevel: - description: |- - LogLevel sets the logging level for the server - Allowed values: "debug", "info", "warning", "error", "critical". - enum: - - debug - - info - - warning - - error - - critical - type: string - metrics: - description: Metrics exposes Prometheus-compatible - metrics for the Feast server when enabled. - type: boolean - nodeSelector: - additionalProperties: - type: string - type: object - resources: - description: ResourceRequirements describes the compute - resource requirements. - properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. - items: - description: ResourceClaim references one entry - in PodSpec.ResourceClaims. - properties: name: + description: This must match the Name of + a Volume. + type: string + readOnly: description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. + Mounted read-only if true, read-write otherwise (false or unspecified). + Defaults to false. + type: boolean + recursiveReadOnly: + description: |- + RecursiveReadOnly specifies whether read-only mounts should be handled + recursively. + type: string + subPath: + description: |- + Path within the volume from which the container's volume should be mounted. + Defaults to "" (volume's root). + type: string + subPathExpr: + description: Expanded path within the volume + from which the container's volume should + be mounted. type: string required: + - mountPath - name type: object type: array - x-kubernetes-list-map-keys: - - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum amount - of compute resources required. - type: object - type: object - tls: - description: TlsConfigs configures server TLS for - a feast service. - properties: - disable: - description: will disable TLS for the feast service. - useful in an openshift cluster, for example, - where TLS is configured by default - type: boolean - secretKeyNames: - description: SecretKeyNames defines the secret - key names for the TLS key and cert. - properties: - tlsCrt: - description: defaults to "tls.crt" - type: string - tlsKey: - description: defaults to "tls.key" - type: string - type: object - secretRef: - description: references the local k8s secret where - the TLS key and cert reside + workerConfigs: + description: WorkerConfigs defines the worker + configuration for the Feast server. properties: - name: - default: "" + keepAliveTimeout: + description: |- + KeepAliveTimeout is the timeout for keep-alive connections in seconds. + Defaults to 30. + format: int32 + minimum: 1 + type: integer + maxRequests: + description: |- + MaxRequests is the maximum number of requests a worker will process before restarting. + This helps prevent memory leaks. + format: int32 + minimum: 0 + type: integer + maxRequestsJitter: + description: |- + MaxRequestsJitter is the maximum jitter to add to max-requests to prevent + thundering herd effect on worker restart. + format: int32 + minimum: 0 + type: integer + registryTTLSeconds: + description: RegistryTTLSeconds is the number + of seconds after which the registry is refreshed. + format: int32 + minimum: 0 + type: integer + workerConnections: description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string + WorkerConnections is the maximum number of simultaneous clients per worker process. + Defaults to 1000. + format: int32 + minimum: 1 + type: integer + workers: + description: Workers is the number of worker + processes. Use -1 to auto-calculate based + on CPU cores (2 * CPU + 1). + format: int32 + minimum: -1 + type: integer type: object - x-kubernetes-map-type: atomic type: object x-kubernetes-validations: - - message: '`secretRef` required if `disable` is false.' - rule: '(!has(self.disable) || !self.disable) ? has(self.secretRef) - : true' - volumeMounts: - description: VolumeMounts defines the list of volumes - that should be mounted into the feast container. - items: - description: VolumeMount describes a mounting of - a Volume within a container. - properties: - mountPath: - description: |- - Path within the container at which the volume should be mounted. Must - not contain ':'. - type: string - mountPropagation: - description: |- - mountPropagation determines how mounts are propagated from the host - to container and the other way around. - type: string - name: - description: This must match the Name of a Volume. - type: string - readOnly: - description: |- - Mounted read-only if true, read-write otherwise (false or unspecified). - Defaults to false. - type: boolean - recursiveReadOnly: - description: |- - RecursiveReadOnly specifies whether read-only mounts should be handled - recursively. - type: string - subPath: - description: |- - Path within the volume from which the container's volume should be mounted. - Defaults to "" (volume's root). - type: string - subPathExpr: - description: Expanded path within the volume - from which the container's volume should be - mounted. - type: string - required: - - mountPath - - name - type: object - type: array - workerConfigs: - description: WorkerConfigs defines the worker configuration - for the Feast server. - properties: - keepAliveTimeout: - description: |- - KeepAliveTimeout is the timeout for keep-alive connections in seconds. - Defaults to 30. - format: int32 - minimum: 1 - type: integer - maxRequests: - description: |- - MaxRequests is the maximum number of requests a worker will process before restarting. - This helps prevent memory leaks. - format: int32 - minimum: 0 - type: integer - maxRequestsJitter: - description: |- - MaxRequestsJitter is the maximum jitter to add to max-requests to prevent - thundering herd effect on worker restart. - format: int32 - minimum: 0 - type: integer - registryTTLSeconds: - description: RegistryTTLSeconds is the number - of seconds after which the registry is refreshed. - format: int32 - minimum: 0 - type: integer - workerConnections: - description: |- - WorkerConnections is the maximum number of simultaneous clients per worker process. - Defaults to 1000. - format: int32 - minimum: 1 - type: integer - workers: - description: Workers is the number of worker processes. - Use -1 to auto-calculate based on CPU cores - (2 * CPU + 1). - format: int32 - minimum: -1 - type: integer - type: object + - message: At least one of restAPI or grpc must be + true + rule: self.restAPI == true || self.grpc == true + || !has(self.grpc) type: object - type: object - registry: - description: Registry configures the registry service. One - selection is required. Local is the default setting. - properties: - local: - description: LocalRegistryConfig configures the registry - service + remote: + description: RemoteRegistryConfig points to a remote feast + registry server. properties: - persistence: - description: RegistryPersistence configures the persistence - settings for the registry service + feastRef: + description: Reference to an existing `FeatureStore` + CR in the same k8s cluster. properties: - file: - description: RegistryFilePersistence configures - the file-based persistence for the registry - service - properties: - cache_mode: - description: |- - CacheMode defines the registry cache update strategy. - Allowed values are "sync" and "thread". - enum: - - none - - sync - - thread - type: string - cache_ttl_seconds: - description: CacheTTLSeconds defines the TTL - (in seconds) for the registry cache. - format: int32 - minimum: 0 - type: integer - path: - type: string - pvc: - description: PvcConfig defines the settings - for a persistent file store based on PVCs. - properties: - create: - description: Settings for creating a new - PVC - properties: - accessModes: - description: AccessModes k8s persistent - volume access modes. Defaults to - ["ReadWriteOnce"]. - items: - type: string - type: array - resources: - description: Resources describes the - storage resource requirements for - a volume. - properties: - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes - the minimum amount of compute - resources required. - type: object - type: object - storageClassName: - description: StorageClassName is the - name of an existing StorageClass - to which this persistent volume - belongs. - type: string - type: object - x-kubernetes-validations: - - message: PvcCreate is immutable - rule: self == oldSelf - mountPath: - description: |- - MountPath within the container at which the volume should be mounted. - Must start by "/" and cannot contain ':'. - type: string - ref: - description: Reference to an existing - field - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - required: - - mountPath - type: object - x-kubernetes-validations: - - message: One selection is required between - ref and create. - rule: '[has(self.ref), has(self.create)].exists_one(c, - c)' - - message: Mount path must start with '/' - and must not contain ':' - rule: self.mountPath.matches('^/[^:]*$') - s3_additional_kwargs: - additionalProperties: - type: string - type: object + name: + description: Name of the FeatureStore + type: string + namespace: + description: Namespace of the FeatureStore + type: string + required: + - name + type: object + hostname: + description: Host address of the remote registry service + - :, e.g. `registry..svc.cluster.local:80` + type: string + tls: + description: TlsRemoteRegistryConfigs configures client + TLS for a remote feast registry. + properties: + certName: + description: defines the configmap key name for + the client TLS cert. + type: string + configMapRef: + description: references the local k8s configmap + where the TLS cert resides + properties: + name: + default: "" + description: |- + Name of the referent. + This field is effectively required, but due to backwards compatibility is + allowed to be empty. + type: string type: object - x-kubernetes-validations: - - message: Registry files must use absolute paths - or be S3 ('s3://') or GS ('gs://') object - store URIs. - rule: '(!has(self.pvc) && has(self.path)) ? - (self.path.startsWith(''/'') || self.path.startsWith(''s3://'') - || self.path.startsWith(''gs://'')) : true' - - message: PVC path must be a file name only, - with no slashes. - rule: '(has(self.pvc) && has(self.path)) ? !self.path.startsWith(''/'') - : true' - - message: PVC persistence does not support S3 - or GS object store URIs. - rule: '(has(self.pvc) && has(self.path)) ? !(self.path.startsWith(''s3://'') - || self.path.startsWith(''gs://'')) : true' - - message: Additional S3 settings are available - only for S3 object store URIs. - rule: '(has(self.s3_additional_kwargs) && has(self.path)) - ? self.path.startsWith(''s3://'') : true' - store: - description: RegistryDBStorePersistence configures - the DB store persistence for the registry service + x-kubernetes-map-type: atomic + required: + - certName + - configMapRef + type: object + type: object + x-kubernetes-validations: + - message: One selection required. + rule: '[has(self.hostname), has(self.feastRef)].exists_one(c, + c)' + type: object + x-kubernetes-validations: + - message: One selection required. + rule: '[has(self.local), has(self.remote)].exists_one(c, + c)' + scaling: + description: Scaling configures horizontal scaling for the + FeatureStore deployment (e.g. HPA autoscaling). + properties: + autoscaling: + description: |- + Autoscaling configures a HorizontalPodAutoscaler for the FeatureStore deployment. + Mutually exclusive with spec.replicas. + properties: + behavior: + description: Behavior configures the scaling behavior + of the target. + properties: + scaleDown: + description: scaleDown is scaling policy for scaling + Down. properties: - secretKeyName: - description: By default, the selected store - "type" is used as the SecretKeyName + policies: + description: policies is a list of potential + scaling polices which can be used during + scaling. + items: + description: HPAScalingPolicy is a single + policy which must hold true for a specified + past interval. + properties: + periodSeconds: + description: periodSeconds specifies + the window of time for which the policy + should hold true. + format: int32 + type: integer + type: + description: type is used to specify + the scaling policy. + type: string + value: + description: |- + value contains the amount of change which is permitted by the policy. + It must be greater than zero + format: int32 + type: integer + required: + - periodSeconds + - type + - value + type: object + type: array + x-kubernetes-list-type: atomic + selectPolicy: + description: |- + selectPolicy is used to specify which policy should be used. + If not set, the default value Max is used. type: string - secretRef: - description: Data store parameters should - be placed as-is from the "feature_store.yaml" - under the secret key. - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: - description: Type of the persistence type - you want to use. - enum: - - sql - - snowflake.registry + stabilizationWindowSeconds: + description: |- + stabilizationWindowSeconds is the number of seconds for which past recommendations should be + considered while scaling up + format: int32 + type: integer + type: object + scaleUp: + description: scaleUp is scaling policy for scaling + Up. + properties: + policies: + description: policies is a list of potential + scaling polices which can be used during + scaling. + items: + description: HPAScalingPolicy is a single + policy which must hold true for a specified + past interval. + properties: + periodSeconds: + description: periodSeconds specifies + the window of time for which the policy + should hold true. + format: int32 + type: integer + type: + description: type is used to specify + the scaling policy. + type: string + value: + description: |- + value contains the amount of change which is permitted by the policy. + It must be greater than zero + format: int32 + type: integer + required: + - periodSeconds + - type + - value + type: object + type: array + x-kubernetes-list-type: atomic + selectPolicy: + description: |- + selectPolicy is used to specify which policy should be used. + If not set, the default value Max is used. type: string - required: - - secretRef - - type + stabilizationWindowSeconds: + description: |- + stabilizationWindowSeconds is the number of seconds for which past recommendations should be + considered while scaling up + format: int32 + type: integer type: object type: object - x-kubernetes-validations: - - message: One selection required between file or - store. - rule: '[has(self.file), has(self.store)].exists_one(c, - c)' - server: - description: Creates a registry server container - properties: - env: - items: - description: EnvVar represents an environment - variable present in a Container. + maxReplicas: + description: MaxReplicas is the upper limit for the + number of replicas. Required. + format: int32 + minimum: 1 + type: integer + metrics: + description: Metrics contains the specifications for + which to use to calculate the desired replica count. + items: + description: |- + MetricSpec specifies how to scale based on a single metric + (only `type` and one other matching field should be set at on + properties: + containerResource: + description: |- + containerResource refers to a resource metric (such as those specified in + requests and limits) known to Kubernetes descr properties: - name: - description: Name of the environment variable. - Must be a C_IDENTIFIER. + container: + description: container is the name of the + container in the pods of the scaling target type: string - value: - description: |- - Variable references $(VAR_NAME) are expanded - using the previously defined environment variables in the container and - any + name: + description: name is the name of the resource + in question. type: string - valueFrom: - description: Source for the environment - variable's value. Cannot be used if value - is not empty. + target: + description: target specifies the target + value for the given metric properties: - configMapKeyRef: - description: Selects a key of a ConfigMap. - properties: - key: - description: The key to select. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the - ConfigMap or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic - fieldRef: - description: 'Selects a field of the - pod: supports metadata.name, metadata.namespace, - `metadata.labels['''']`, `metadata.' - properties: - apiVersion: - description: Version of the schema - the FieldPath is written in terms - of, defaults to "v1". - type: string - fieldPath: - description: Path of the field to - select in the specified API version. - type: string - required: - - fieldPath - type: object - x-kubernetes-map-type: atomic - resourceFieldRef: + averageUtilization: + description: "averageUtilization is + the target value of the average of + the\nresource metric across all relevant + pods, represented as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string description: |- - Selects a resource of the container: only resources limits and requests - (limits.cpu, limits.memory, limits. - properties: - containerName: - description: 'Container name: required - for volumes, optional for env - vars' - type: string - divisor: - anyOf: - - type: integer - - type: string - description: Specifies the output - format of the exposed resources, - defaults to "1" - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - resource: - description: 'Required: resource - to select' - type: string - required: - - resource - type: object - x-kubernetes-map-type: atomic - secretKeyRef: - description: Selects a key of a secret - in the pod's namespace - properties: - key: - description: The key of the secret - to select from. Must be a valid - secret key. - type: string - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - optional: - description: Specify whether the - Secret or its key must be defined - type: boolean - required: - - key - type: object - x-kubernetes-map-type: atomic + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether + the metric type is Utilization, Value, + or AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value + of the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type type: object required: + - container - name + - target type: object - type: array - envFrom: - items: - description: EnvFromSource represents the source - of a set of ConfigMaps + external: + description: |- + external refers to a global metric that is not associated + with any Kubernetes object. properties: - configMapRef: - description: The ConfigMap to select from + metric: + description: metric identifies the target + metric by name and selector properties: name: - default: "" + description: name is the name of the + given metric + type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label + selector for the given metric\nWhen + set, it is passed " + properties: + matchExpressions: + description: matchExpressions is + a list of label selector requirements. + The requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map + of {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + required: + - name + type: object + target: + description: target specifies the target + value for the given metric + properties: + averageUtilization: + description: "averageUtilization is + the target value of the average of + the\nresource metric across all relevant + pods, represented as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether + the metric type is Utilization, Value, + or AverageValue type: string - optional: - description: Specify whether the ConfigMap - must be defined - type: boolean + value: + anyOf: + - type: integer + - type: string + description: value is the target value + of the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type type: object - x-kubernetes-map-type: atomic - prefix: - description: An optional identifier to prepend - to each key in the ConfigMap. Must be - a C_IDENTIFIER. - type: string - secretRef: - description: The Secret to select from + required: + - metric + - target + type: object + object: + description: |- + object refers to a metric describing a single kubernetes object + (for example, hits-per-second on an Ingress object). + properties: + describedObject: + description: describedObject specifies the + descriptions of a object,such as kind,name + apiVersion properties: + apiVersion: + description: apiVersion is the API version + of the referent + type: string + kind: + description: 'kind is the kind of the + referent; More info: https://git.k8s.' + type: string name: - default: "" + description: 'name is the name of the + referent; More info: https://kubernetes.' + type: string + required: + - kind + - name + type: object + metric: + description: metric identifies the target + metric by name and selector + properties: + name: + description: name is the name of the + given metric + type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label + selector for the given metric\nWhen + set, it is passed " + properties: + matchExpressions: + description: matchExpressions is + a list of label selector requirements. + The requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map + of {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic + required: + - name + type: object + target: + description: target specifies the target + value for the given metric + properties: + averageUtilization: + description: "averageUtilization is + the target value of the average of + the\nresource metric across all relevant + pods, represented as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether + the metric type is Utilization, Value, + or AverageValue type: string - optional: - description: Specify whether the Secret - must be defined - type: boolean + value: + anyOf: + - type: integer + - type: string + description: value is the target value + of the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type type: object - x-kubernetes-map-type: atomic + required: + - describedObject + - metric + - target type: object - type: array - grpc: - description: Enable gRPC registry server. Defaults - to true if unset. - type: boolean - image: - type: string - imagePullPolicy: - description: PullPolicy describes a policy for - if/when to pull a container image - type: string - logLevel: - description: |- - LogLevel sets the logging level for the server - Allowed values: "debug", "info", "warning", "error", "critical". - enum: - - debug - - info - - warning - - error - - critical - type: string - metrics: - description: Metrics exposes Prometheus-compatible - metrics for the Feast server when enabled. - type: boolean - nodeSelector: - additionalProperties: - type: string - type: object - resources: - description: ResourceRequirements describes the - compute resource requirements. - properties: - claims: - description: |- - Claims lists the names of resources, defined in spec.resourceClaims, - that are used by this container. - items: - description: ResourceClaim references one - entry in PodSpec.ResourceClaims. + pods: + description: |- + pods refers to a metric describing each pod in the current scale target + (for example, transactions-processed-per-second) + properties: + metric: + description: metric identifies the target + metric by name and selector properties: name: - description: |- - Name must match the name of one entry in pod.spec.resourceClaims of - the Pod where this field is used. + description: name is the name of the + given metric type: string + selector: + description: "selector is the string-encoded + form of a standard kubernetes label + selector for the given metric\nWhen + set, it is passed " + properties: + matchExpressions: + description: matchExpressions is + a list of label selector requirements. + The requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label + key that the selector applies + to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map + of {key,value} pairs. + type: object + type: object + x-kubernetes-map-type: atomic required: - name type: object - type: array - x-kubernetes-list-map-keys: - - name - x-kubernetes-list-type: map - limits: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: |- - Limits describes the maximum amount of compute resources allowed. - More info: https://kubernetes. - type: object - requests: - additionalProperties: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - description: Requests describes the minimum - amount of compute resources required. - type: object - type: object - restAPI: - description: Enable REST API registry server. - type: boolean - tls: - description: TlsConfigs configures server TLS - for a feast service. - properties: - disable: - description: will disable TLS for the feast - service. useful in an openshift cluster, - for example, where TLS is configured by - default - type: boolean - secretKeyNames: - description: SecretKeyNames defines the secret - key names for the TLS key and cert. - properties: - tlsCrt: - description: defaults to "tls.crt" - type: string - tlsKey: - description: defaults to "tls.key" - type: string - type: object - secretRef: - description: references the local k8s secret - where the TLS key and cert reside - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - type: object - x-kubernetes-validations: - - message: '`secretRef` required if `disable` - is false.' - rule: '(!has(self.disable) || !self.disable) - ? has(self.secretRef) : true' - volumeMounts: - description: VolumeMounts defines the list of - volumes that should be mounted into the feast - container. - items: - description: VolumeMount describes a mounting - of a Volume within a container. + target: + description: target specifies the target + value for the given metric + properties: + averageUtilization: + description: "averageUtilization is + the target value of the average of + the\nresource metric across all relevant + pods, represented as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether + the metric type is Utilization, Value, + or AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value + of the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object + required: + - metric + - target + type: object + resource: + description: |- + resource refers to a resource metric (such as those specified in + requests and limits) known to Kubernetes describing eac properties: - mountPath: - description: |- - Path within the container at which the volume should be mounted. Must - not contain ':'. - type: string - mountPropagation: - description: |- - mountPropagation determines how mounts are propagated from the host - to container and the other way around. - type: string name: - description: This must match the Name of - a Volume. - type: string - readOnly: - description: |- - Mounted read-only if true, read-write otherwise (false or unspecified). - Defaults to false. - type: boolean - recursiveReadOnly: - description: |- - RecursiveReadOnly specifies whether read-only mounts should be handled - recursively. - type: string - subPath: - description: |- - Path within the volume from which the container's volume should be mounted. - Defaults to "" (volume's root). - type: string - subPathExpr: - description: Expanded path within the volume - from which the container's volume should - be mounted. + description: name is the name of the resource + in question. type: string + target: + description: target specifies the target + value for the given metric + properties: + averageUtilization: + description: "averageUtilization is + the target value of the average of + the\nresource metric across all relevant + pods, represented as a " + format: int32 + type: integer + averageValue: + anyOf: + - type: integer + - type: string + description: |- + averageValue is the target value of the average of the + metric across all relevant pods (as a quantity) + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: + description: type represents whether + the metric type is Utilization, Value, + or AverageValue + type: string + value: + anyOf: + - type: integer + - type: string + description: value is the target value + of the metric (as a quantity). + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + required: + - type + type: object required: - - mountPath - name + - target type: object - type: array - workerConfigs: - description: WorkerConfigs defines the worker - configuration for the Feast server. - properties: - keepAliveTimeout: - description: |- - KeepAliveTimeout is the timeout for keep-alive connections in seconds. - Defaults to 30. - format: int32 - minimum: 1 - type: integer - maxRequests: - description: |- - MaxRequests is the maximum number of requests a worker will process before restarting. - This helps prevent memory leaks. - format: int32 - minimum: 0 - type: integer - maxRequestsJitter: - description: |- - MaxRequestsJitter is the maximum jitter to add to max-requests to prevent - thundering herd effect on worker restart. - format: int32 - minimum: 0 - type: integer - registryTTLSeconds: - description: RegistryTTLSeconds is the number - of seconds after which the registry is refreshed. - format: int32 - minimum: 0 - type: integer - workerConnections: - description: |- - WorkerConnections is the maximum number of simultaneous clients per worker process. - Defaults to 1000. - format: int32 - minimum: 1 - type: integer - workers: - description: Workers is the number of worker - processes. Use -1 to auto-calculate based - on CPU cores (2 * CPU + 1). - format: int32 - minimum: -1 - type: integer - type: object - type: object - x-kubernetes-validations: - - message: At least one of restAPI or grpc must be - true - rule: self.restAPI == true || self.grpc == true - || !has(self.grpc) - type: object - remote: - description: RemoteRegistryConfig points to a remote feast - registry server. - properties: - feastRef: - description: Reference to an existing `FeatureStore` - CR in the same k8s cluster. - properties: - name: - description: Name of the FeatureStore - type: string - namespace: - description: Namespace of the FeatureStore - type: string - required: - - name - type: object - hostname: - description: Host address of the remote registry service - - :, e.g. `registry..svc.cluster.local:80` - type: string - tls: - description: TlsRemoteRegistryConfigs configures client - TLS for a remote feast registry. - properties: - certName: - description: defines the configmap key name for - the client TLS cert. - type: string - configMapRef: - description: references the local k8s configmap - where the TLS cert resides - properties: - name: - default: "" - description: |- - Name of the referent. - This field is effectively required, but due to backwards compatibility is - allowed to be empty. - type: string - type: object - x-kubernetes-map-type: atomic - required: - - certName - - configMapRef - type: object + type: + description: type is the type of metric source. + type: string + required: + - type + type: object + type: array + minReplicas: + description: MinReplicas is the lower limit for the + number of replicas. Defaults to 1. + format: int32 + minimum: 1 + type: integer + required: + - maxReplicas type: object - x-kubernetes-validations: - - message: One selection required. - rule: '[has(self.hostname), has(self.feastRef)].exists_one(c, - c)' type: object - x-kubernetes-validations: - - message: One selection required. - rule: '[has(self.local), has(self.remote)].exists_one(c, - c)' securityContext: description: PodSecurityContext holds pod-level security attributes and common container settings. @@ -6760,6 +9585,98 @@ spec: type: string type: object type: object + topologySpreadConstraints: + description: TopologySpreadConstraints defines how pods are + spread across topology domains. + items: + description: TopologySpreadConstraint specifies how to spread + matching pods among the given topology. + properties: + labelSelector: + description: LabelSelector is used to find matching + pods. + properties: + matchExpressions: + description: matchExpressions is a list of label + selector requirements. The requirements are ANDed. + items: + description: |- + A label selector requirement is a selector that contains values, a key, and an operator that + relates the key and values. + properties: + key: + description: key is the label key that the + selector applies to. + type: string + operator: + description: |- + operator represents a key's relationship to a set of values. + Valid operators are In, NotIn, Exists and DoesNotExist. + type: string + values: + description: |- + values is an array of string values. If the operator is In or NotIn, + the values array must be non-empty. + items: + type: string + type: array + x-kubernetes-list-type: atomic + required: + - key + - operator + type: object + type: array + x-kubernetes-list-type: atomic + matchLabels: + additionalProperties: + type: string + description: matchLabels is a map of {key,value} + pairs. + type: object + type: object + x-kubernetes-map-type: atomic + matchLabelKeys: + description: |- + MatchLabelKeys is a set of pod label keys to select the pods over which + spreading will be calculated. + items: + type: string + type: array + x-kubernetes-list-type: atomic + maxSkew: + description: MaxSkew describes the degree to which pods + may be unevenly distributed. + format: int32 + type: integer + minDomains: + description: MinDomains indicates a minimum number of + eligible domains. + format: int32 + type: integer + nodeAffinityPolicy: + description: |- + NodeAffinityPolicy indicates how we will treat Pod's nodeAffinity/nodeSelector + when calculating pod topology spread skew + type: string + nodeTaintsPolicy: + description: |- + NodeTaintsPolicy indicates how we will treat node taints when calculating + pod topology spread skew. + type: string + topologyKey: + description: TopologyKey is the key of node labels. + type: string + whenUnsatisfiable: + description: |- + WhenUnsatisfiable indicates how to deal with a pod if it doesn't satisfy + the spread constraint. + type: string + required: + - maxSkew + - topologyKey + - whenUnsatisfiable + type: object + type: array ui: description: Creates a UI server container properties: @@ -8547,7 +11464,39 @@ spec: type: object required: - feastProject + - replicas type: object + x-kubernetes-validations: + - message: replicas > 1 and services.scaling.autoscaling are mutually + exclusive. + rule: self.replicas <= 1 || !has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling) + - message: Scaling requires DB-backed persistence for the online store. + Configure services.onlineStore.persistence.store when using replicas + > 1 or autoscaling. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (has(self.services) + && has(self.services.onlineStore) && has(self.services.onlineStore.persistence) + && has(self.services.onlineStore.persistence.store)) + - message: Scaling requires DB-backed persistence for the offline + store. Configure services.offlineStore.persistence.store when + using replicas > 1 or autoscaling. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (!has(self.services) + || !has(self.services.offlineStore) || (has(self.services.offlineStore.persistence) + && has(self.services.offlineStore.persistence.store))) + - message: Scaling requires DB-backed or remote registry. Configure + registry.local.persistence.store or use a remote registry when + using replicas > 1 or autoscaling. S3/GCS-backed registry is also + allowed. + rule: self.replicas <= 1 && (!has(self.services) || !has(self.services.scaling) + || !has(self.services.scaling.autoscaling)) || (has(self.services) + && has(self.services.registry) && (has(self.services.registry.remote) + || (has(self.services.registry.local) && has(self.services.registry.local.persistence) + && (has(self.services.registry.local.persistence.store) || (has(self.services.registry.local.persistence.file) + && has(self.services.registry.local.persistence.file.path) && + (self.services.registry.local.persistence.file.path.startsWith('s3://') + || self.services.registry.local.persistence.file.path.startsWith('gs://'))))))) clientConfigMap: description: ConfigMap in this namespace containing a client `feature_store.yaml` for this feast deployment @@ -8612,6 +11561,28 @@ spec: type: string phase: type: string + replicas: + description: Replicas is the current number of ready pod replicas + (used by the scale sub-resource). + format: int32 + type: integer + scalingStatus: + description: ScalingStatus reports the current scaling state of the + FeatureStore deployment. + properties: + currentReplicas: + description: CurrentReplicas is the current number of pod replicas. + format: int32 + type: integer + desiredReplicas: + description: DesiredReplicas is the desired number of pod replicas. + format: int32 + type: integer + type: object + selector: + description: Selector is the label selector for pods managed by the + FeatureStore deployment (used by the scale sub-resource). + type: string serviceHostnames: description: ServiceHostnames defines the service hostnames in the format of :, e.g. example.svc.cluster.local:80 @@ -8632,6 +11603,10 @@ spec: served: true storage: true subresources: + scale: + labelSelectorPath: .status.selector + specReplicasPath: .spec.replicas + statusReplicasPath: .status.replicas status: {} - additionalPrinterColumns: - jsonPath: .status.phase @@ -9302,7 +12277,6 @@ spec: - hbase - cassandra - hazelcast - - ikv - couchbase - clickhouse type: string @@ -9974,7 +12948,6 @@ spec: enum: - snowflake.online - redis - - ikv - datastore - dynamodb - bigtable @@ -9989,6 +12962,7 @@ spec: - couchbase.online - milvus - hybrid + - mongodb type: string required: - secretRef @@ -13513,7 +16487,6 @@ spec: - hbase - cassandra - hazelcast - - ikv - couchbase - clickhouse type: string @@ -14195,7 +17168,6 @@ spec: enum: - snowflake.online - redis - - ikv - datastore - dynamodb - bigtable @@ -14210,6 +17182,7 @@ spec: - couchbase.online - milvus - hybrid + - mongodb type: string required: - secretRef @@ -17325,6 +20298,18 @@ rules: - tokenreviews verbs: - create +- apiGroups: + - autoscaling + resources: + - horizontalpodautoscalers + verbs: + - create + - delete + - get + - list + - patch + - update + - watch - apiGroups: - batch resources: @@ -17393,6 +20378,18 @@ rules: - get - patch - update +- apiGroups: + - policy + resources: + - poddisruptionbudgets + verbs: + - create + - delete + - get + - list + - patch + - update + - watch - apiGroups: - rbac.authorization.k8s.io resources: @@ -17519,6 +20516,7 @@ spec: protocol: TCP targetPort: 8443 selector: + app.kubernetes.io/name: feast-operator control-plane: controller-manager --- apiVersion: apps/v1 @@ -17534,12 +20532,14 @@ spec: replicas: 1 selector: matchLabels: + app.kubernetes.io/name: feast-operator control-plane: controller-manager template: metadata: annotations: kubectl.kubernetes.io/default-container: manager labels: + app.kubernetes.io/name: feast-operator control-plane: controller-manager spec: containers: @@ -17551,10 +20551,10 @@ spec: - /manager env: - name: RELATED_IMAGE_FEATURE_SERVER - value: quay.io/feastdev/feature-server:0.60.0 + value: quay.io/feastdev/feature-server:0.61.0 - name: RELATED_IMAGE_CRON_JOB value: quay.io/openshift/origin-cli:4.17 - image: quay.io/feastdev/feast-operator:0.60.0 + image: quay.io/feastdev/feast-operator:0.61.0 livenessProbe: httpGet: path: /healthz diff --git a/infra/feast-operator/dist/operator-e2e-tests b/infra/feast-operator/dist/operator-e2e-tests index c9b11c0c3ea..48c66b9be7c 100755 Binary files a/infra/feast-operator/dist/operator-e2e-tests and b/infra/feast-operator/dist/operator-e2e-tests differ diff --git a/infra/feast-operator/docs/api/markdown/ref.md b/infra/feast-operator/docs/api/markdown/ref.md index ce64e4dd3ec..698c3f6bbe3 100644 --- a/infra/feast-operator/docs/api/markdown/ref.md +++ b/infra/feast-operator/docs/api/markdown/ref.md @@ -28,6 +28,24 @@ _Appears in:_ | `oidc` _[OidcAuthz](#oidcauthz)_ | | +#### AutoscalingConfig + + + +AutoscalingConfig defines HPA settings for the FeatureStore deployment. + +_Appears in:_ +- [ScalingConfig](#scalingconfig) + +| Field | Description | +| --- | --- | +| `minReplicas` _integer_ | MinReplicas is the lower limit for the number of replicas. Defaults to 1. | +| `maxReplicas` _integer_ | MaxReplicas is the upper limit for the number of replicas. Required. | +| `metrics` _[MetricSpec](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#metricspec-v2-autoscaling) array_ | Metrics contains the specifications for which to use to calculate the desired replica count. +If not set, defaults to 80% CPU utilization. | +| `behavior` _[HorizontalPodAutoscalerBehavior](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#horizontalpodautoscalerbehavior-v2-autoscaling)_ | Behavior configures the scaling behavior of the target. | + + #### BatchEngineConfig @@ -223,6 +241,17 @@ _Appears in:_ | `securityContext` _[PodSecurityContext](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#podsecuritycontext-v1-core)_ | | | `disableInitContainers` _boolean_ | Disable the 'feast repo initialization' initContainer | | `volumes` _[Volume](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#volume-v1-core) array_ | Volumes specifies the volumes to mount in the FeatureStore deployment. A corresponding `VolumeMount` should be added to whichever feast service(s) require access to said volume(s). | +| `scaling` _[ScalingConfig](#scalingconfig)_ | Scaling configures horizontal scaling for the FeatureStore deployment (e.g. HPA autoscaling). +For static replicas, use spec.replicas instead. | +| `podDisruptionBudgets` _[PDBConfig](#pdbconfig)_ | PodDisruptionBudgets configures a PodDisruptionBudget for the FeatureStore deployment. +Only created when scaling is enabled (replicas > 1 or autoscaling). | +| `topologySpreadConstraints` _[TopologySpreadConstraint](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#topologyspreadconstraint-v1-core) array_ | TopologySpreadConstraints defines how pods are spread across topology domains. +When scaling is enabled and this is not set, the operator auto-injects a soft +zone-spread constraint (whenUnsatisfiable: ScheduleAnyway). +Set to an empty array to disable auto-injection. | +| `affinity` _[Affinity](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#affinity-v1-core)_ | Affinity defines the pod scheduling constraints for the FeatureStore deployment. +When scaling is enabled and this is not set, the operator auto-injects a soft +pod anti-affinity rule to prefer spreading pods across nodes. | #### FeatureStoreSpec @@ -243,6 +272,8 @@ _Appears in:_ | `authz` _[AuthzConfig](#authzconfig)_ | | | `cronJob` _[FeastCronJob](#feastcronjob)_ | | | `batchEngine` _[BatchEngineConfig](#batchengineconfig)_ | | +| `replicas` _integer_ | Replicas is the desired number of pod replicas. Used by the scale sub-resource. +Mutually exclusive with services.scaling.autoscaling. | #### FeatureStoreStatus @@ -263,6 +294,9 @@ _Appears in:_ | `feastVersion` _string_ | | | `phase` _string_ | | | `serviceHostnames` _[ServiceHostnames](#servicehostnames)_ | | +| `replicas` _integer_ | Replicas is the current number of ready pod replicas (used by the scale sub-resource). | +| `selector` _string_ | Selector is the label selector for pods managed by the FeatureStore deployment (used by the scale sub-resource). | +| `scalingStatus` _[ScalingStatus](#scalingstatus)_ | ScalingStatus reports the current scaling state of the FeatureStore deployment. | #### GitCloneOptions @@ -592,6 +626,24 @@ _Appears in:_ | `nodeSelector` _map[string]string_ | | +#### PDBConfig + + + +PDBConfig configures a PodDisruptionBudget for the FeatureStore deployment. +Exactly one of minAvailable or maxUnavailable must be set. + +_Appears in:_ +- [FeatureStoreServices](#featurestoreservices) + +| Field | Description | +| --- | --- | +| `minAvailable` _[IntOrString](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#intorstring-intstr-util)_ | MinAvailable specifies the minimum number/percentage of pods that must remain available. +Mutually exclusive with maxUnavailable. | +| `maxUnavailable` _[IntOrString](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.30/#intorstring-intstr-util)_ | MaxUnavailable specifies the maximum number/percentage of pods that can be unavailable. +Mutually exclusive with minAvailable. | + + #### PvcConfig @@ -747,6 +799,36 @@ _Appears in:_ | `tls` _[TlsRemoteRegistryConfigs](#tlsremoteregistryconfigs)_ | | +#### ScalingConfig + + + +ScalingConfig configures horizontal scaling for the FeatureStore deployment. + +_Appears in:_ +- [FeatureStoreServices](#featurestoreservices) + +| Field | Description | +| --- | --- | +| `autoscaling` _[AutoscalingConfig](#autoscalingconfig)_ | Autoscaling configures a HorizontalPodAutoscaler for the FeatureStore deployment. +Mutually exclusive with spec.replicas. | + + +#### ScalingStatus + + + +ScalingStatus reports the observed scaling state. + +_Appears in:_ +- [FeatureStoreStatus](#featurestorestatus) + +| Field | Description | +| --- | --- | +| `currentReplicas` _integer_ | CurrentReplicas is the current number of pod replicas. | +| `desiredReplicas` _integer_ | DesiredReplicas is the desired number of pod replicas. | + + #### SecretKeyNames diff --git a/infra/feast-operator/internal/controller/featurestore_controller.go b/infra/feast-operator/internal/controller/featurestore_controller.go index a9591d97c8a..6f67852a601 100644 --- a/infra/feast-operator/internal/controller/featurestore_controller.go +++ b/infra/feast-operator/internal/controller/featurestore_controller.go @@ -22,8 +22,10 @@ import ( "time" appsv1 "k8s.io/api/apps/v1" + autoscalingv2 "k8s.io/api/autoscaling/v2" batchv1 "k8s.io/api/batch/v1" corev1 "k8s.io/api/core/v1" + policyv1 "k8s.io/api/policy/v1" rbacv1 "k8s.io/api/rbac/v1" apierrors "k8s.io/apimachinery/pkg/api/errors" apimeta "k8s.io/apimachinery/pkg/api/meta" @@ -65,6 +67,8 @@ type FeatureStoreReconciler struct { // +kubebuilder:rbac:groups=authentication.k8s.io,resources=tokenreviews,verbs=create // +kubebuilder:rbac:groups=route.openshift.io,resources=routes,verbs=get;list;create;update;watch;delete // +kubebuilder:rbac:groups=batch,resources=cronjobs,verbs=get;list;watch;create;update;patch;delete +// +kubebuilder:rbac:groups=autoscaling,resources=horizontalpodautoscalers,verbs=get;list;watch;create;update;patch;delete +// +kubebuilder:rbac:groups=policy,resources=poddisruptionbudgets,verbs=get;list;watch;create;update;patch;delete // Reconcile is part of the main kubernetes reconciliation loop which aims to // move the current state of the cluster closer to the desired state. @@ -229,6 +233,8 @@ func (r *FeatureStoreReconciler) SetupWithManager(mgr ctrl.Manager) error { Owns(&rbacv1.RoleBinding{}). Owns(&rbacv1.Role{}). Owns(&batchv1.CronJob{}). + Owns(&autoscalingv2.HorizontalPodAutoscaler{}). + Owns(&policyv1.PodDisruptionBudget{}). Watches(&feastdevv1.FeatureStore{}, handler.EnqueueRequestsFromMapFunc(r.mapFeastRefsToFeastRequests)) if services.IsOpenShift() { diff --git a/infra/feast-operator/internal/controller/featurestore_controller_test.go b/infra/feast-operator/internal/controller/featurestore_controller_test.go index bfd4a484cff..a70cd476679 100644 --- a/infra/feast-operator/internal/controller/featurestore_controller_test.go +++ b/infra/feast-operator/internal/controller/featurestore_controller_test.go @@ -263,7 +263,7 @@ var _ = Describe("FeatureStore Controller", func() { Namespace: objMeta.Namespace, }, deploy) Expect(err).NotTo(HaveOccurred()) - Expect(deploy.Spec.Replicas).To(Equal(int32Ptr(3))) + Expect(deploy.Spec.Replicas).To(Equal(int32Ptr(1))) Expect(deploy.Spec.Template.Spec.InitContainers).To(HaveLen(1)) Expect(deploy.Spec.Template.Spec.InitContainers[0].Args[0]).To(ContainSubstring("git -c http.sslVerify=false clone")) Expect(deploy.Spec.Template.Spec.InitContainers[0].Args[0]).To(ContainSubstring("git checkout " + ref)) diff --git a/infra/feast-operator/internal/controller/services/scaling.go b/infra/feast-operator/internal/controller/services/scaling.go new file mode 100644 index 00000000000..ef1dd1f91d8 --- /dev/null +++ b/infra/feast-operator/internal/controller/services/scaling.go @@ -0,0 +1,269 @@ +/* +Copyright 2026 Feast Community. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package services + +import ( + "encoding/json" + + feastdevv1 "github.com/feast-dev/feast/infra/feast-operator/api/v1" + appsv1 "k8s.io/api/apps/v1" + autoscalingv2 "k8s.io/api/autoscaling/v2" + corev1 "k8s.io/api/core/v1" + policyv1 "k8s.io/api/policy/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/types" + hpaac "k8s.io/client-go/applyconfigurations/autoscaling/v2" + metaac "k8s.io/client-go/applyconfigurations/meta/v1" + pdbac "k8s.io/client-go/applyconfigurations/policy/v1" + "sigs.k8s.io/controller-runtime/pkg/client" + "sigs.k8s.io/controller-runtime/pkg/log" +) + +const ( + defaultHPACPUUtilization int32 = 80 + defaultHPAMinReplicas int32 = 1 + fieldManager = "feast-operator" +) + +// getDesiredReplicas returns the replica count the operator should set on the +// Deployment. When autoscaling is configured the Deployment replicas field is +// left to the HPA (nil is returned). Otherwise the static replica count from +// spec.replicas is returned. +func (feast *FeastServices) getDesiredReplicas() *int32 { + cr := feast.Handler.FeatureStore + services := cr.Status.Applied.Services + if services != nil && services.Scaling != nil && services.Scaling.Autoscaling != nil { + return nil + } + if cr.Status.Applied.Replicas != nil { + r := *cr.Status.Applied.Replicas + return &r + } + return nil +} + +// createOrDeleteHPA reconciles the HorizontalPodAutoscaler for the FeatureStore +// deployment using Server-Side Apply with typed apply configurations. If +// autoscaling is not configured, any existing HPA is deleted. +func (feast *FeastServices) createOrDeleteHPA() error { + cr := feast.Handler.FeatureStore + + scaling := cr.Status.Applied.Services.Scaling + if scaling == nil || scaling.Autoscaling == nil { + hpa := &autoscalingv2.HorizontalPodAutoscaler{ + ObjectMeta: feast.GetObjectMeta(), + } + hpa.SetGroupVersionKind(autoscalingv2.SchemeGroupVersion.WithKind("HorizontalPodAutoscaler")) + return feast.Handler.DeleteOwnedFeastObj(hpa) + } + + hpaAC := feast.buildHPAApplyConfig() + data, err := json.Marshal(hpaAC) + if err != nil { + return err + } + + hpa := &autoscalingv2.HorizontalPodAutoscaler{ObjectMeta: feast.GetObjectMeta()} + logger := log.FromContext(feast.Handler.Context) + if err := feast.Handler.Client.Patch(feast.Handler.Context, hpa, + client.RawPatch(types.ApplyPatchType, data), + client.FieldOwner(fieldManager), client.ForceOwnership); err != nil { + return err + } + logger.Info("Successfully applied", "HorizontalPodAutoscaler", hpa.Name) + + return nil +} + +// buildHPAApplyConfig constructs the fully desired HPA state as a typed apply +// configuration for Server-Side Apply. +func (feast *FeastServices) buildHPAApplyConfig() *hpaac.HorizontalPodAutoscalerApplyConfiguration { + cr := feast.Handler.FeatureStore + autoscaling := cr.Status.Applied.Services.Scaling.Autoscaling + objMeta := feast.GetObjectMeta() + deploy := feast.initFeastDeploy() + + minReplicas := defaultHPAMinReplicas + if autoscaling.MinReplicas != nil { + minReplicas = *autoscaling.MinReplicas + } + + hpa := hpaac.HorizontalPodAutoscaler(objMeta.Name, objMeta.Namespace). + WithLabels(feast.getLabels()). + WithOwnerReferences( + metaac.OwnerReference(). + WithAPIVersion(feastdevv1.GroupVersion.String()). + WithKind("FeatureStore"). + WithName(cr.Name). + WithUID(cr.UID). + WithController(true). + WithBlockOwnerDeletion(true), + ). + WithSpec(hpaac.HorizontalPodAutoscalerSpec(). + WithScaleTargetRef( + hpaac.CrossVersionObjectReference(). + WithAPIVersion(appsv1.SchemeGroupVersion.String()). + WithKind("Deployment"). + WithName(deploy.Name), + ). + WithMinReplicas(minReplicas). + WithMaxReplicas(autoscaling.MaxReplicas), + ) + + if len(autoscaling.Metrics) > 0 { + hpa.Spec.Metrics = convertMetrics(autoscaling.Metrics) + } else { + hpa.Spec.Metrics = defaultHPAMetrics() + } + + if autoscaling.Behavior != nil { + hpa.Spec.Behavior = convertBehavior(autoscaling.Behavior) + } + + return hpa +} + +func defaultHPAMetrics() []hpaac.MetricSpecApplyConfiguration { + return []hpaac.MetricSpecApplyConfiguration{ + *hpaac.MetricSpec(). + WithType(autoscalingv2.ResourceMetricSourceType). + WithResource( + hpaac.ResourceMetricSource(). + WithName(corev1.ResourceCPU). + WithTarget( + hpaac.MetricTarget(). + WithType(autoscalingv2.UtilizationMetricType). + WithAverageUtilization(defaultHPACPUUtilization), + ), + ), + } +} + +// convertMetrics converts standard API metric specs to their apply configuration +// equivalents via JSON round-trip (the types share identical JSON schemas). +func convertMetrics(metrics []autoscalingv2.MetricSpec) []hpaac.MetricSpecApplyConfiguration { + data, err := json.Marshal(metrics) + if err != nil { + return nil + } + var result []hpaac.MetricSpecApplyConfiguration + if err := json.Unmarshal(data, &result); err != nil { + return nil + } + return result +} + +// convertBehavior converts a standard API behavior spec to its apply configuration +// equivalent via JSON round-trip. +func convertBehavior(behavior *autoscalingv2.HorizontalPodAutoscalerBehavior) *hpaac.HorizontalPodAutoscalerBehaviorApplyConfiguration { + data, err := json.Marshal(behavior) + if err != nil { + return nil + } + result := &hpaac.HorizontalPodAutoscalerBehaviorApplyConfiguration{} + if err := json.Unmarshal(data, result); err != nil { + return nil + } + return result +} + +// applyOrDeletePDB reconciles the PodDisruptionBudget for the FeatureStore +// deployment using Server-Side Apply. If PodDisruptionBudgets is not configured +// or scaling is not enabled, any existing PDB is deleted. +func (feast *FeastServices) applyOrDeletePDB() error { + cr := feast.Handler.FeatureStore + services := cr.Status.Applied.Services + + if services == nil || services.PodDisruptionBudgets == nil || !isScalingEnabled(cr) { + pdb := &policyv1.PodDisruptionBudget{ObjectMeta: feast.GetObjectMeta()} + pdb.SetGroupVersionKind(policyv1.SchemeGroupVersion.WithKind("PodDisruptionBudget")) + return feast.Handler.DeleteOwnedFeastObj(pdb) + } + + pdbAC := feast.buildPDBApplyConfig() + data, err := json.Marshal(pdbAC) + if err != nil { + return err + } + + pdb := &policyv1.PodDisruptionBudget{ObjectMeta: feast.GetObjectMeta()} + logger := log.FromContext(feast.Handler.Context) + if err := feast.Handler.Client.Patch(feast.Handler.Context, pdb, + client.RawPatch(types.ApplyPatchType, data), + client.FieldOwner(fieldManager), client.ForceOwnership); err != nil { + return err + } + logger.Info("Successfully applied", "PodDisruptionBudget", pdb.Name) + + return nil +} + +// buildPDBApplyConfig constructs the fully desired PDB state as a typed apply +// configuration for Server-Side Apply. +func (feast *FeastServices) buildPDBApplyConfig() *pdbac.PodDisruptionBudgetApplyConfiguration { + cr := feast.Handler.FeatureStore + pdbConfig := cr.Status.Applied.Services.PodDisruptionBudgets + objMeta := feast.GetObjectMeta() + + pdb := pdbac.PodDisruptionBudget(objMeta.Name, objMeta.Namespace). + WithLabels(feast.getLabels()). + WithOwnerReferences( + metaac.OwnerReference(). + WithAPIVersion(feastdevv1.GroupVersion.String()). + WithKind("FeatureStore"). + WithName(cr.Name). + WithUID(cr.UID). + WithController(true). + WithBlockOwnerDeletion(true), + ). + WithSpec(pdbac.PodDisruptionBudgetSpec(). + WithSelector(metaac.LabelSelector().WithMatchLabels(feast.getLabels())), + ) + + if pdbConfig.MinAvailable != nil { + pdb.Spec.WithMinAvailable(*pdbConfig.MinAvailable) + } + if pdbConfig.MaxUnavailable != nil { + pdb.Spec.WithMaxUnavailable(*pdbConfig.MaxUnavailable) + } + + return pdb +} + +// updateScalingStatus updates the scaling status fields using the deployment +func (feast *FeastServices) updateScalingStatus(deploy *appsv1.Deployment) { + cr := feast.Handler.FeatureStore + + cr.Status.Replicas = deploy.Status.ReadyReplicas + labels := feast.getLabels() + cr.Status.Selector = metav1.FormatLabelSelector(metav1.SetAsLabelSelector(labels)) + + if !isScalingEnabled(cr) { + cr.Status.ScalingStatus = nil + return + } + + var desired int32 + if deploy.Spec.Replicas != nil { + desired = *deploy.Spec.Replicas + } + + cr.Status.ScalingStatus = &feastdevv1.ScalingStatus{ + CurrentReplicas: deploy.Status.ReadyReplicas, + DesiredReplicas: desired, + } +} diff --git a/infra/feast-operator/internal/controller/services/scaling_test.go b/infra/feast-operator/internal/controller/services/scaling_test.go new file mode 100644 index 00000000000..58e808ac2dc --- /dev/null +++ b/infra/feast-operator/internal/controller/services/scaling_test.go @@ -0,0 +1,1009 @@ +/* +Copyright 2026 Feast Community. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package services + +import ( + "context" + + feastdevv1 "github.com/feast-dev/feast/infra/feast-operator/api/v1" + "github.com/feast-dev/feast/infra/feast-operator/internal/controller/handler" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + appsv1 "k8s.io/api/apps/v1" + autoscalingv1 "k8s.io/api/autoscaling/v1" + autoscalingv2 "k8s.io/api/autoscaling/v2" + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/types" + "k8s.io/apimachinery/pkg/util/intstr" + "k8s.io/utils/ptr" + "sigs.k8s.io/controller-runtime/pkg/client" +) + +var _ = Describe("Horizontal Scaling", func() { + var ( + featureStore *feastdevv1.FeatureStore + feast *FeastServices + typeNamespacedName types.NamespacedName + ctx context.Context + ) + + BeforeEach(func() { + ctx = context.Background() + typeNamespacedName = types.NamespacedName{ + Name: "scaling-test-fs", + Namespace: "default", + } + + featureStore = &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{ + Name: typeNamespacedName.Name, + Namespace: typeNamespacedName.Namespace, + }, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "scalingproject", + Services: &feastdevv1.FeatureStoreServices{ + OnlineStore: &feastdevv1.OnlineStore{ + Server: &feastdevv1.ServerConfigs{ + ContainerConfigs: feastdevv1.ContainerConfigs{ + DefaultCtrConfigs: feastdevv1.DefaultCtrConfigs{ + Image: ptr.To("test-image"), + }, + }, + }, + Persistence: &feastdevv1.OnlineStorePersistence{ + DBPersistence: &feastdevv1.OnlineStoreDBStorePersistence{ + Type: "redis", + SecretRef: corev1.LocalObjectReference{ + Name: "redis-secret", + }, + }, + }, + }, + Registry: &feastdevv1.Registry{ + Local: &feastdevv1.LocalRegistryConfig{ + Server: &feastdevv1.RegistryServerConfigs{ + ServerConfigs: feastdevv1.ServerConfigs{ + ContainerConfigs: feastdevv1.ContainerConfigs{ + DefaultCtrConfigs: feastdevv1.DefaultCtrConfigs{ + Image: ptr.To("test-image"), + }, + }, + }, + GRPC: ptr.To(true), + }, + Persistence: &feastdevv1.RegistryPersistence{ + DBPersistence: &feastdevv1.RegistryDBStorePersistence{ + Type: "sql", + SecretRef: corev1.LocalObjectReference{ + Name: "registry-secret", + }, + }, + }, + }, + }, + }, + }, + } + + Expect(k8sClient.Create(ctx, featureStore)).To(Succeed()) + applySpecToStatus(featureStore) + + feast = &FeastServices{ + Handler: handler.FeastHandler{ + Client: k8sClient, + Context: ctx, + Scheme: k8sClient.Scheme(), + FeatureStore: featureStore, + }, + } + }) + + AfterEach(func() { + Expect(k8sClient.Delete(ctx, featureStore)).To(Succeed()) + }) + + Describe("isScalingEnabled", func() { + It("should return false when no scaling config is present", func() { + Expect(isScalingEnabled(featureStore)).To(BeFalse()) + }) + + It("should return false when replicas=1", func() { + featureStore.Status.Applied.Replicas = ptr.To(int32(1)) + Expect(isScalingEnabled(featureStore)).To(BeFalse()) + }) + + It("should return true when replicas > 1", func() { + featureStore.Status.Applied.Replicas = ptr.To(int32(3)) + Expect(isScalingEnabled(featureStore)).To(BeTrue()) + }) + + It("should return true when autoscaling is configured", func() { + featureStore.Status.Applied.Services.Scaling = &feastdevv1.ScalingConfig{ + Autoscaling: &feastdevv1.AutoscalingConfig{ + MaxReplicas: 5, + }, + } + Expect(isScalingEnabled(featureStore)).To(BeTrue()) + }) + }) + + Describe("CEL admission validation rejects invalid scaling configurations", func() { + dbOnlineStore := &feastdevv1.OnlineStore{ + Persistence: &feastdevv1.OnlineStorePersistence{ + DBPersistence: &feastdevv1.OnlineStoreDBStorePersistence{ + Type: "redis", + SecretRef: corev1.LocalObjectReference{Name: "redis-secret"}, + }, + }, + } + + dbRegistry := &feastdevv1.Registry{ + Local: &feastdevv1.LocalRegistryConfig{ + Persistence: &feastdevv1.RegistryPersistence{ + DBPersistence: &feastdevv1.RegistryDBStorePersistence{ + Type: "sql", + SecretRef: corev1.LocalObjectReference{Name: "registry-secret"}, + }, + }, + }, + } + + It("should accept scaling with full DB persistence", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-valid-db", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(3)), + Services: &feastdevv1.FeatureStoreServices{ + OnlineStore: dbOnlineStore, + Registry: dbRegistry, + }, + }, + } + Expect(k8sClient.Create(ctx, fs)).To(Succeed()) + Expect(k8sClient.Delete(ctx, fs)).To(Succeed()) + }) + + It("should reject scaling when online store is missing (implicit file default)", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-no-online", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(3)), + Services: &feastdevv1.FeatureStoreServices{ + Registry: dbRegistry, + }, + }, + } + err := k8sClient.Create(ctx, fs) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("online store")) + }) + + It("should reject scaling when online store uses file persistence", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-file-online", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(3)), + Services: &feastdevv1.FeatureStoreServices{ + OnlineStore: &feastdevv1.OnlineStore{ + Persistence: &feastdevv1.OnlineStorePersistence{ + FilePersistence: &feastdevv1.OnlineStoreFilePersistence{ + Path: "/data/online.db", + }, + }, + }, + Registry: dbRegistry, + }, + }, + } + err := k8sClient.Create(ctx, fs) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("online store")) + }) + + It("should reject scaling when offline store uses file persistence", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-file-offline", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(3)), + Services: &feastdevv1.FeatureStoreServices{ + OnlineStore: dbOnlineStore, + Registry: dbRegistry, + OfflineStore: &feastdevv1.OfflineStore{ + Persistence: &feastdevv1.OfflineStorePersistence{ + FilePersistence: &feastdevv1.OfflineStoreFilePersistence{ + Type: "duckdb", + }, + }, + }, + }, + }, + } + err := k8sClient.Create(ctx, fs) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("offline store")) + }) + + It("should reject scaling when no registry is configured (implicit file default)", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-no-registry", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(3)), + Services: &feastdevv1.FeatureStoreServices{ + OnlineStore: dbOnlineStore, + }, + }, + } + err := k8sClient.Create(ctx, fs) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("registry")) + }) + + It("should reject scaling when registry uses file persistence", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-file-registry", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(3)), + Services: &feastdevv1.FeatureStoreServices{ + OnlineStore: dbOnlineStore, + Registry: &feastdevv1.Registry{ + Local: &feastdevv1.LocalRegistryConfig{ + Persistence: &feastdevv1.RegistryPersistence{ + FilePersistence: &feastdevv1.RegistryFilePersistence{ + Path: "/data/registry.db", + }, + }, + }, + }, + }, + }, + } + err := k8sClient.Create(ctx, fs) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("registry")) + }) + + It("should accept scaling with S3-backed registry", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-s3-registry", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(3)), + Services: &feastdevv1.FeatureStoreServices{ + OnlineStore: dbOnlineStore, + Registry: &feastdevv1.Registry{ + Local: &feastdevv1.LocalRegistryConfig{ + Persistence: &feastdevv1.RegistryPersistence{ + FilePersistence: &feastdevv1.RegistryFilePersistence{ + Path: "s3://my-bucket/registry.db", + }, + }, + }, + }, + }, + }, + } + Expect(k8sClient.Create(ctx, fs)).To(Succeed()) + Expect(k8sClient.Delete(ctx, fs)).To(Succeed()) + }) + + It("should accept scaling with GS-backed registry", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-gs-registry", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(3)), + Services: &feastdevv1.FeatureStoreServices{ + OnlineStore: dbOnlineStore, + Registry: &feastdevv1.Registry{ + Local: &feastdevv1.LocalRegistryConfig{ + Persistence: &feastdevv1.RegistryPersistence{ + FilePersistence: &feastdevv1.RegistryFilePersistence{ + Path: "gs://my-bucket/registry.db", + }, + }, + }, + }, + }, + }, + } + Expect(k8sClient.Create(ctx, fs)).To(Succeed()) + Expect(k8sClient.Delete(ctx, fs)).To(Succeed()) + }) + + It("should accept scaling with remote registry", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-remote-reg", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(3)), + Services: &feastdevv1.FeatureStoreServices{ + OnlineStore: dbOnlineStore, + Registry: &feastdevv1.Registry{ + Remote: &feastdevv1.RemoteRegistryConfig{ + Hostname: ptr.To("registry.example.com:80"), + }, + }, + }, + }, + } + Expect(k8sClient.Create(ctx, fs)).To(Succeed()) + Expect(k8sClient.Delete(ctx, fs)).To(Succeed()) + }) + + It("should accept file persistence when replicas is 1", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-rep1-file", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(1)), + }, + } + Expect(k8sClient.Create(ctx, fs)).To(Succeed()) + Expect(k8sClient.Delete(ctx, fs)).To(Succeed()) + }) + + It("should accept file persistence when no scaling is configured", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-no-scaling", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + }, + } + Expect(k8sClient.Create(ctx, fs)).To(Succeed()) + Expect(k8sClient.Delete(ctx, fs)).To(Succeed()) + }) + + It("should reject autoscaling without DB online store", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-hpa-no-db", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Services: &feastdevv1.FeatureStoreServices{ + Scaling: &feastdevv1.ScalingConfig{ + Autoscaling: &feastdevv1.AutoscalingConfig{MaxReplicas: 5}, + }, + Registry: dbRegistry, + }, + }, + } + err := k8sClient.Create(ctx, fs) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("online store")) + }) + + It("should reject scaling when online store has no persistence configured", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-online-nop", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(3)), + Services: &feastdevv1.FeatureStoreServices{ + OnlineStore: &feastdevv1.OnlineStore{}, + Registry: dbRegistry, + }, + }, + } + err := k8sClient.Create(ctx, fs) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("online store")) + }) + + It("should reject replicas and autoscaling set simultaneously", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-mutual-excl", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(3)), + Services: &feastdevv1.FeatureStoreServices{ + Scaling: &feastdevv1.ScalingConfig{ + Autoscaling: &feastdevv1.AutoscalingConfig{MaxReplicas: 5}, + }, + OnlineStore: dbOnlineStore, + Registry: dbRegistry, + }, + }, + } + err := k8sClient.Create(ctx, fs) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("mutually exclusive")) + }) + }) + + Describe("getDesiredReplicas", func() { + It("should return 1 when no explicit replicas are configured (default)", func() { + replicas := feast.getDesiredReplicas() + Expect(replicas).NotTo(BeNil()) + Expect(*replicas).To(Equal(int32(1))) + }) + + It("should return static replicas when configured", func() { + featureStore.Status.Applied.Replicas = ptr.To(int32(3)) + replicas := feast.getDesiredReplicas() + Expect(replicas).NotTo(BeNil()) + Expect(*replicas).To(Equal(int32(3))) + }) + + It("should return nil when autoscaling is configured (HPA manages replicas)", func() { + featureStore.Status.Applied.Services.Scaling = &feastdevv1.ScalingConfig{ + Autoscaling: &feastdevv1.AutoscalingConfig{ + MaxReplicas: 5, + }, + } + Expect(feast.getDesiredReplicas()).To(BeNil()) + }) + }) + + Describe("Deployment Strategy", func() { + It("should default to Recreate when no scaling is configured", func() { + Expect(feast.ApplyDefaults()).To(Succeed()) + strategy := feast.getDeploymentStrategy() + Expect(strategy.Type).To(Equal(appsv1.RecreateDeploymentStrategyType)) + }) + + It("should default to RollingUpdate when scaling is enabled via replicas", func() { + featureStore.Status.Applied.Replicas = ptr.To(int32(3)) + strategy := feast.getDeploymentStrategy() + Expect(strategy.Type).To(Equal(appsv1.RollingUpdateDeploymentStrategyType)) + }) + + It("should respect user-defined strategy even with scaling", func() { + featureStore.Status.Applied.Replicas = ptr.To(int32(3)) + featureStore.Status.Applied.Services.DeploymentStrategy = &appsv1.DeploymentStrategy{ + Type: appsv1.RecreateDeploymentStrategyType, + } + strategy := feast.getDeploymentStrategy() + Expect(strategy.Type).To(Equal(appsv1.RecreateDeploymentStrategyType)) + }) + }) + + Describe("setDeployment with scaling", func() { + setFilePersistence := func() { + featureStore.Status.Applied.Services.OnlineStore = &feastdevv1.OnlineStore{ + Server: &feastdevv1.ServerConfigs{ + ContainerConfigs: feastdevv1.ContainerConfigs{ + DefaultCtrConfigs: feastdevv1.DefaultCtrConfigs{ + Image: ptr.To("test-image"), + }, + }, + }, + Persistence: &feastdevv1.OnlineStorePersistence{ + FilePersistence: &feastdevv1.OnlineStoreFilePersistence{ + Path: "/feast-data/online.db", + }, + }, + } + featureStore.Status.Applied.Services.Registry = &feastdevv1.Registry{ + Local: &feastdevv1.LocalRegistryConfig{ + Server: &feastdevv1.RegistryServerConfigs{ + ServerConfigs: feastdevv1.ServerConfigs{ + ContainerConfigs: feastdevv1.ContainerConfigs{ + DefaultCtrConfigs: feastdevv1.DefaultCtrConfigs{ + Image: ptr.To("test-image"), + }, + }, + }, + GRPC: ptr.To(true), + }, + Persistence: &feastdevv1.RegistryPersistence{ + FilePersistence: &feastdevv1.RegistryFilePersistence{ + Path: "/feast-data/registry.db", + }, + }, + }, + } + } + + It("should set static replicas on the deployment", func() { + setFilePersistence() + featureStore.Status.Applied.Replicas = ptr.To(int32(3)) + + deployment := feast.initFeastDeploy() + Expect(feast.setDeployment(deployment)).To(Succeed()) + Expect(deployment.Spec.Replicas).NotTo(BeNil()) + Expect(*deployment.Spec.Replicas).To(Equal(int32(3))) + }) + + It("should preserve existing replicas when autoscaling is configured", func() { + setFilePersistence() + featureStore.Status.Applied.Services.Scaling = &feastdevv1.ScalingConfig{ + Autoscaling: &feastdevv1.AutoscalingConfig{ + MaxReplicas: 5, + }, + } + + deployment := feast.initFeastDeploy() + existing := int32(4) + deployment.Spec.Replicas = &existing + Expect(feast.setDeployment(deployment)).To(Succeed()) + Expect(deployment.Spec.Replicas).NotTo(BeNil()) + Expect(*deployment.Spec.Replicas).To(Equal(int32(4))) + }) + + It("should set default replicas=1 when no explicit scaling is configured", func() { + setFilePersistence() + Expect(k8sClient.Status().Update(ctx, featureStore)).To(Succeed()) + feast.refreshFeatureStore(ctx, typeNamespacedName) + + deployment := feast.initFeastDeploy() + Expect(feast.setDeployment(deployment)).To(Succeed()) + Expect(deployment.Spec.Replicas).NotTo(BeNil()) + Expect(*deployment.Spec.Replicas).To(Equal(int32(1))) + }) + }) + + Describe("HPA Configuration", func() { + It("should build an HPA apply config with default CPU metrics", func() { + featureStore.Status.Applied.Services.Scaling = &feastdevv1.ScalingConfig{ + Autoscaling: &feastdevv1.AutoscalingConfig{ + MaxReplicas: 10, + }, + } + + hpa := feast.buildHPAApplyConfig() + Expect(*hpa.Spec.MaxReplicas).To(Equal(int32(10))) + Expect(*hpa.Spec.MinReplicas).To(Equal(int32(1))) + Expect(hpa.Spec.Metrics).To(HaveLen(1)) + Expect(*hpa.Spec.Metrics[0].Resource.Name).To(Equal(corev1.ResourceCPU)) + }) + + It("should build an HPA apply config with custom min replicas", func() { + featureStore.Status.Applied.Services.Scaling = &feastdevv1.ScalingConfig{ + Autoscaling: &feastdevv1.AutoscalingConfig{ + MinReplicas: ptr.To(int32(2)), + MaxReplicas: 10, + }, + } + + hpa := feast.buildHPAApplyConfig() + Expect(*hpa.Spec.MinReplicas).To(Equal(int32(2))) + Expect(*hpa.Spec.MaxReplicas).To(Equal(int32(10))) + }) + + It("should set correct scale target reference", func() { + featureStore.Status.Applied.Services.Scaling = &feastdevv1.ScalingConfig{ + Autoscaling: &feastdevv1.AutoscalingConfig{ + MaxReplicas: 5, + }, + } + + hpa := feast.buildHPAApplyConfig() + Expect(*hpa.Spec.ScaleTargetRef.APIVersion).To(Equal("apps/v1")) + Expect(*hpa.Spec.ScaleTargetRef.Kind).To(Equal("Deployment")) + Expect(*hpa.Spec.ScaleTargetRef.Name).To(Equal(GetFeastName(featureStore))) + }) + + It("should set TypeMeta and owner reference for SSA", func() { + featureStore.Status.Applied.Services.Scaling = &feastdevv1.ScalingConfig{ + Autoscaling: &feastdevv1.AutoscalingConfig{ + MaxReplicas: 5, + }, + } + + hpa := feast.buildHPAApplyConfig() + Expect(*hpa.Kind).To(Equal("HorizontalPodAutoscaler")) + Expect(*hpa.APIVersion).To(Equal("autoscaling/v2")) + Expect(hpa.OwnerReferences).To(HaveLen(1)) + Expect(*hpa.OwnerReferences[0].Name).To(Equal(featureStore.Name)) + Expect(*hpa.OwnerReferences[0].Controller).To(BeTrue()) + }) + + It("should convert custom metrics via JSON round-trip", func() { + customMetrics := []autoscalingv2.MetricSpec{ + { + Type: autoscalingv2.ResourceMetricSourceType, + Resource: &autoscalingv2.ResourceMetricSource{ + Name: corev1.ResourceMemory, + Target: autoscalingv2.MetricTarget{ + Type: autoscalingv2.UtilizationMetricType, + AverageUtilization: ptr.To(int32(75)), + }, + }, + }, + } + featureStore.Status.Applied.Services.Scaling = &feastdevv1.ScalingConfig{ + Autoscaling: &feastdevv1.AutoscalingConfig{ + MaxReplicas: 10, + Metrics: customMetrics, + }, + } + + hpa := feast.buildHPAApplyConfig() + Expect(hpa.Spec.Metrics).To(HaveLen(1)) + Expect(*hpa.Spec.Metrics[0].Resource.Name).To(Equal(corev1.ResourceMemory)) + Expect(*hpa.Spec.Metrics[0].Resource.Target.AverageUtilization).To(Equal(int32(75))) + }) + }) + + Describe("PDB Configuration", func() { + It("should build a PDB apply config with maxUnavailable", func() { + maxUnavail := intstr.FromInt(1) + featureStore.Status.Applied.Services.PodDisruptionBudgets = &feastdevv1.PDBConfig{ + MaxUnavailable: &maxUnavail, + } + featureStore.Status.Applied.Replicas = ptr.To(int32(3)) + + pdb := feast.buildPDBApplyConfig() + Expect(*pdb.Kind).To(Equal("PodDisruptionBudget")) + Expect(*pdb.APIVersion).To(Equal("policy/v1")) + Expect(pdb.Spec.MaxUnavailable).NotTo(BeNil()) + Expect(pdb.Spec.MaxUnavailable.IntValue()).To(Equal(1)) + Expect(pdb.Spec.MinAvailable).To(BeNil()) + Expect(pdb.Spec.Selector.MatchLabels).To(HaveKeyWithValue(NameLabelKey, featureStore.Name)) + }) + + It("should build a PDB apply config with minAvailable", func() { + minAvail := intstr.FromString("50%") + featureStore.Status.Applied.Services.PodDisruptionBudgets = &feastdevv1.PDBConfig{ + MinAvailable: &minAvail, + } + featureStore.Status.Applied.Replicas = ptr.To(int32(3)) + + pdb := feast.buildPDBApplyConfig() + Expect(pdb.Spec.MinAvailable).NotTo(BeNil()) + Expect(pdb.Spec.MinAvailable.String()).To(Equal("50%")) + Expect(pdb.Spec.MaxUnavailable).To(BeNil()) + }) + + It("should set owner reference on PDB for SSA", func() { + maxUnavail := intstr.FromInt(1) + featureStore.Status.Applied.Services.PodDisruptionBudgets = &feastdevv1.PDBConfig{ + MaxUnavailable: &maxUnavail, + } + featureStore.Status.Applied.Replicas = ptr.To(int32(3)) + + pdb := feast.buildPDBApplyConfig() + Expect(pdb.OwnerReferences).To(HaveLen(1)) + Expect(*pdb.OwnerReferences[0].Name).To(Equal(featureStore.Name)) + Expect(*pdb.OwnerReferences[0].Controller).To(BeTrue()) + }) + }) + + Describe("CEL admission validation rejects invalid PDB configurations", func() { + dbOnlineStore := &feastdevv1.OnlineStore{ + Persistence: &feastdevv1.OnlineStorePersistence{ + DBPersistence: &feastdevv1.OnlineStoreDBStorePersistence{ + Type: "redis", + SecretRef: corev1.LocalObjectReference{Name: "redis-secret"}, + }, + }, + } + dbRegistry := &feastdevv1.Registry{ + Local: &feastdevv1.LocalRegistryConfig{ + Persistence: &feastdevv1.RegistryPersistence{ + DBPersistence: &feastdevv1.RegistryDBStorePersistence{ + Type: "sql", + SecretRef: corev1.LocalObjectReference{Name: "registry-secret"}, + }, + }, + }, + } + + It("should reject PDB with both minAvailable and maxUnavailable set", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-pdb-both", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(3)), + Services: &feastdevv1.FeatureStoreServices{ + OnlineStore: dbOnlineStore, + Registry: dbRegistry, + PodDisruptionBudgets: &feastdevv1.PDBConfig{ + MinAvailable: ptr.To(intstr.FromInt(1)), + MaxUnavailable: ptr.To(intstr.FromInt(1)), + }, + }, + }, + } + err := k8sClient.Create(ctx, fs) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("Exactly one of minAvailable or maxUnavailable")) + }) + + It("should reject PDB with neither minAvailable nor maxUnavailable set", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-pdb-none", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(3)), + Services: &feastdevv1.FeatureStoreServices{ + OnlineStore: dbOnlineStore, + Registry: dbRegistry, + PodDisruptionBudgets: &feastdevv1.PDBConfig{}, + }, + }, + } + err := k8sClient.Create(ctx, fs) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("Exactly one of minAvailable or maxUnavailable")) + }) + + It("should accept PDB with only maxUnavailable", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-pdb-maxu", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(3)), + Services: &feastdevv1.FeatureStoreServices{ + OnlineStore: dbOnlineStore, + Registry: dbRegistry, + PodDisruptionBudgets: &feastdevv1.PDBConfig{ + MaxUnavailable: ptr.To(intstr.FromInt(1)), + }, + }, + }, + } + Expect(k8sClient.Create(ctx, fs)).To(Succeed()) + Expect(k8sClient.Delete(ctx, fs)).To(Succeed()) + }) + + It("should accept PDB with only minAvailable", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{Name: "cel-pdb-mina", Namespace: "default"}, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "celtest", + Replicas: ptr.To(int32(3)), + Services: &feastdevv1.FeatureStoreServices{ + OnlineStore: dbOnlineStore, + Registry: dbRegistry, + PodDisruptionBudgets: &feastdevv1.PDBConfig{ + MinAvailable: ptr.To(intstr.FromString("50%")), + }, + }, + }, + } + Expect(k8sClient.Create(ctx, fs)).To(Succeed()) + Expect(k8sClient.Delete(ctx, fs)).To(Succeed()) + }) + }) + + Describe("Topology Spread", func() { + It("should auto-inject soft zone constraint when replicas > 1 and no explicit constraints", func() { + featureStore.Status.Applied.Replicas = ptr.To(int32(3)) + + podSpec := &corev1.PodSpec{} + feast.applyTopologySpread(podSpec) + + Expect(podSpec.TopologySpreadConstraints).To(HaveLen(1)) + Expect(podSpec.TopologySpreadConstraints[0].TopologyKey).To(Equal("topology.kubernetes.io/zone")) + Expect(podSpec.TopologySpreadConstraints[0].WhenUnsatisfiable).To(Equal(corev1.ScheduleAnyway)) + Expect(podSpec.TopologySpreadConstraints[0].MaxSkew).To(Equal(int32(1))) + Expect(podSpec.TopologySpreadConstraints[0].LabelSelector.MatchLabels).To(HaveKeyWithValue(NameLabelKey, featureStore.Name)) + }) + + It("should auto-inject when autoscaling is configured", func() { + featureStore.Status.Applied.Services.Scaling = &feastdevv1.ScalingConfig{ + Autoscaling: &feastdevv1.AutoscalingConfig{MaxReplicas: 5}, + } + + podSpec := &corev1.PodSpec{} + feast.applyTopologySpread(podSpec) + + Expect(podSpec.TopologySpreadConstraints).To(HaveLen(1)) + Expect(podSpec.TopologySpreadConstraints[0].WhenUnsatisfiable).To(Equal(corev1.ScheduleAnyway)) + }) + + It("should not inject when replicas is 1 and no autoscaling", func() { + podSpec := &corev1.PodSpec{} + feast.applyTopologySpread(podSpec) + + Expect(podSpec.TopologySpreadConstraints).To(BeEmpty()) + }) + + It("should use user-provided constraints instead of defaults", func() { + featureStore.Status.Applied.Replicas = ptr.To(int32(3)) + featureStore.Status.Applied.Services.TopologySpreadConstraints = []corev1.TopologySpreadConstraint{{ + MaxSkew: 2, + TopologyKey: "kubernetes.io/hostname", + WhenUnsatisfiable: corev1.DoNotSchedule, + LabelSelector: metav1.SetAsLabelSelector(map[string]string{"custom": "label"}), + }} + + podSpec := &corev1.PodSpec{} + feast.applyTopologySpread(podSpec) + + Expect(podSpec.TopologySpreadConstraints).To(HaveLen(1)) + Expect(podSpec.TopologySpreadConstraints[0].TopologyKey).To(Equal("kubernetes.io/hostname")) + Expect(podSpec.TopologySpreadConstraints[0].WhenUnsatisfiable).To(Equal(corev1.DoNotSchedule)) + Expect(podSpec.TopologySpreadConstraints[0].MaxSkew).To(Equal(int32(2))) + }) + + It("should disable auto-injection when empty array is set", func() { + featureStore.Status.Applied.Replicas = ptr.To(int32(3)) + featureStore.Status.Applied.Services.TopologySpreadConstraints = []corev1.TopologySpreadConstraint{} + + podSpec := &corev1.PodSpec{} + feast.applyTopologySpread(podSpec) + + Expect(podSpec.TopologySpreadConstraints).To(BeEmpty()) + }) + }) + + Describe("Pod Anti-Affinity", func() { + It("should auto-inject soft node anti-affinity when replicas > 1", func() { + featureStore.Status.Applied.Replicas = ptr.To(int32(3)) + + podSpec := &corev1.PodSpec{} + feast.applyAffinity(podSpec) + + Expect(podSpec.Affinity).NotTo(BeNil()) + Expect(podSpec.Affinity.PodAntiAffinity).NotTo(BeNil()) + terms := podSpec.Affinity.PodAntiAffinity.PreferredDuringSchedulingIgnoredDuringExecution + Expect(terms).To(HaveLen(1)) + Expect(terms[0].Weight).To(Equal(int32(100))) + Expect(terms[0].PodAffinityTerm.TopologyKey).To(Equal("kubernetes.io/hostname")) + Expect(terms[0].PodAffinityTerm.LabelSelector.MatchLabels).To(HaveKeyWithValue(NameLabelKey, featureStore.Name)) + }) + + It("should auto-inject when autoscaling is configured", func() { + featureStore.Status.Applied.Services.Scaling = &feastdevv1.ScalingConfig{ + Autoscaling: &feastdevv1.AutoscalingConfig{MaxReplicas: 5}, + } + + podSpec := &corev1.PodSpec{} + feast.applyAffinity(podSpec) + + Expect(podSpec.Affinity).NotTo(BeNil()) + Expect(podSpec.Affinity.PodAntiAffinity).NotTo(BeNil()) + }) + + It("should not inject when replicas is 1 and no autoscaling", func() { + podSpec := &corev1.PodSpec{} + feast.applyAffinity(podSpec) + + Expect(podSpec.Affinity).To(BeNil()) + }) + + It("should use user-provided affinity instead of defaults", func() { + featureStore.Status.Applied.Replicas = ptr.To(int32(3)) + featureStore.Status.Applied.Services.Affinity = &corev1.Affinity{ + PodAntiAffinity: &corev1.PodAntiAffinity{ + RequiredDuringSchedulingIgnoredDuringExecution: []corev1.PodAffinityTerm{{ + TopologyKey: "kubernetes.io/hostname", + LabelSelector: metav1.SetAsLabelSelector(map[string]string{"custom": "label"}), + }}, + }, + } + + podSpec := &corev1.PodSpec{} + feast.applyAffinity(podSpec) + + Expect(podSpec.Affinity.PodAntiAffinity.RequiredDuringSchedulingIgnoredDuringExecution).To(HaveLen(1)) + Expect(podSpec.Affinity.PodAntiAffinity.PreferredDuringSchedulingIgnoredDuringExecution).To(BeEmpty()) + Expect(podSpec.Affinity.PodAntiAffinity.RequiredDuringSchedulingIgnoredDuringExecution[0].TopologyKey).To(Equal("kubernetes.io/hostname")) + }) + + It("should allow user to set node affinity alongside anti-affinity", func() { + featureStore.Status.Applied.Replicas = ptr.To(int32(3)) + featureStore.Status.Applied.Services.Affinity = &corev1.Affinity{ + NodeAffinity: &corev1.NodeAffinity{ + RequiredDuringSchedulingIgnoredDuringExecution: &corev1.NodeSelector{ + NodeSelectorTerms: []corev1.NodeSelectorTerm{{ + MatchExpressions: []corev1.NodeSelectorRequirement{{ + Key: "gpu", + Operator: corev1.NodeSelectorOpIn, + Values: []string{"true"}, + }}, + }}, + }, + }, + } + + podSpec := &corev1.PodSpec{} + feast.applyAffinity(podSpec) + + Expect(podSpec.Affinity.NodeAffinity).NotTo(BeNil()) + Expect(podSpec.Affinity.PodAntiAffinity).To(BeNil()) + }) + }) + + Describe("Scale sub-resource", func() { + newDBFeatureStore := func(name string) *feastdevv1.FeatureStore { + return &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{ + Name: name, + Namespace: "default", + }, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "scaletest", + Services: &feastdevv1.FeatureStoreServices{ + OnlineStore: &feastdevv1.OnlineStore{ + Persistence: &feastdevv1.OnlineStorePersistence{ + DBPersistence: &feastdevv1.OnlineStoreDBStorePersistence{ + Type: "redis", + SecretRef: corev1.LocalObjectReference{Name: "redis-secret"}, + }, + }, + }, + Registry: &feastdevv1.Registry{ + Local: &feastdevv1.LocalRegistryConfig{ + Persistence: &feastdevv1.RegistryPersistence{ + DBPersistence: &feastdevv1.RegistryDBStorePersistence{ + Type: "sql", + SecretRef: corev1.LocalObjectReference{Name: "registry-secret"}, + }, + }, + }, + }, + }, + }, + } + } + + It("should allow scaling up via the scale sub-resource with DB persistence", func() { + fs := newDBFeatureStore("scale-sub-valid") + Expect(k8sClient.Create(ctx, fs)).To(Succeed()) + defer func() { Expect(k8sClient.Delete(ctx, fs)).To(Succeed()) }() + + scale := &autoscalingv1.Scale{} + Expect(k8sClient.SubResource("scale").Get(ctx, fs, scale)).To(Succeed()) + Expect(scale.Spec.Replicas).To(Equal(int32(1))) + + scale.Spec.Replicas = 3 + Expect(k8sClient.SubResource("scale").Update(ctx, fs, client.WithSubResourceBody(scale))).To(Succeed()) + + updated := &feastdevv1.FeatureStore{} + Expect(k8sClient.Get(ctx, types.NamespacedName{Name: fs.Name, Namespace: fs.Namespace}, updated)).To(Succeed()) + Expect(updated.Spec.Replicas).NotTo(BeNil()) + Expect(*updated.Spec.Replicas).To(Equal(int32(3))) + }) + + It("should reject scaling up via the scale sub-resource without DB persistence", func() { + fs := &feastdevv1.FeatureStore{ + ObjectMeta: metav1.ObjectMeta{ + Name: "scale-sub-reject", + Namespace: "default", + }, + Spec: feastdevv1.FeatureStoreSpec{ + FeastProject: "scaletest", + }, + } + Expect(k8sClient.Create(ctx, fs)).To(Succeed()) + defer func() { Expect(k8sClient.Delete(ctx, fs)).To(Succeed()) }() + + scale := &autoscalingv1.Scale{} + Expect(k8sClient.SubResource("scale").Get(ctx, fs, scale)).To(Succeed()) + + scale.Spec.Replicas = 3 + err := k8sClient.SubResource("scale").Update(ctx, fs, client.WithSubResourceBody(scale)) + Expect(err).To(HaveOccurred()) + Expect(err.Error()).To(ContainSubstring("online store")) + }) + + It("should read the status replicas from the scale sub-resource", func() { + fs := newDBFeatureStore("scale-sub-status") + fs.Spec.Replicas = ptr.To(int32(2)) + Expect(k8sClient.Create(ctx, fs)).To(Succeed()) + defer func() { Expect(k8sClient.Delete(ctx, fs)).To(Succeed()) }() + + fs.Status.Replicas = 2 + fs.Status.Applied.FeastProject = fs.Spec.FeastProject + Expect(k8sClient.Status().Update(ctx, fs)).To(Succeed()) + + scale := &autoscalingv1.Scale{} + Expect(k8sClient.SubResource("scale").Get(ctx, fs, scale)).To(Succeed()) + Expect(scale.Status.Replicas).To(Equal(int32(2))) + Expect(scale.Spec.Replicas).To(Equal(int32(2))) + }) + }) +}) diff --git a/infra/feast-operator/internal/controller/services/services.go b/infra/feast-operator/internal/controller/services/services.go index 6771e9498af..fe8f3ecbc15 100644 --- a/infra/feast-operator/internal/controller/services/services.go +++ b/infra/feast-operator/internal/controller/services/services.go @@ -71,14 +71,45 @@ func (feast *FeastServices) Deploy() error { _ = feast.Handler.DeleteOwnedFeastObj(feast.initCaConfigMap()) } + if err := feast.reconcileServices(); err != nil { + return err + } + + if err := feast.createServiceAccount(); err != nil { + return err + } + if err := feast.createDeployment(); err != nil { + return err + } + if err := feast.createOrDeleteHPA(); err != nil { + return err + } + if err := feast.applyOrDeletePDB(); err != nil { + return err + } + if err := feast.deployClient(); err != nil { + return err + } + if err := feast.deployNamespaceRegistry(); err != nil { + return err + } + if err := feast.deployCronJob(); err != nil { + return err + } + + return nil +} + +// reconcileServices validates persistence and deploys or removes each feast +// service type based on the applied spec. +func (feast *FeastServices) reconcileServices() error { services := feast.Handler.FeatureStore.Status.Applied.Services + if feast.isOfflineStore() { - err := feast.validateOfflineStorePersistence(services.OfflineStore.Persistence) - if err != nil { + if err := feast.validateOfflineStorePersistence(services.OfflineStore.Persistence); err != nil { return err } - - if err = feast.deployFeastServiceByType(OfflineFeastType); err != nil { + if err := feast.deployFeastServiceByType(OfflineFeastType); err != nil { return err } } else { @@ -88,12 +119,10 @@ func (feast *FeastServices) Deploy() error { } if feast.isOnlineStore() { - err := feast.validateOnlineStorePersistence(services.OnlineStore.Persistence) - if err != nil { + if err := feast.validateOnlineStorePersistence(services.OnlineStore.Persistence); err != nil { return err } - - if err = feast.deployFeastServiceByType(OnlineFeastType); err != nil { + if err := feast.deployFeastServiceByType(OnlineFeastType); err != nil { return err } } else { @@ -103,12 +132,10 @@ func (feast *FeastServices) Deploy() error { } if feast.isLocalRegistry() { - err := feast.validateRegistryPersistence(services.Registry.Local.Persistence) - if err != nil { + if err := feast.validateRegistryPersistence(services.Registry.Local.Persistence); err != nil { return err } - - if err = feast.deployFeastServiceByType(RegistryFeastType); err != nil { + if err := feast.deployFeastServiceByType(RegistryFeastType); err != nil { return err } } else { @@ -116,11 +143,12 @@ func (feast *FeastServices) Deploy() error { return err } } + if feast.isUiServer() { - if err = feast.deployFeastServiceByType(UIFeastType); err != nil { + if err := feast.deployFeastServiceByType(UIFeastType); err != nil { return err } - if err = feast.createRoute(UIFeastType); err != nil { + if err := feast.createRoute(UIFeastType); err != nil { return err } } else { @@ -132,22 +160,6 @@ func (feast *FeastServices) Deploy() error { } } - if err := feast.createServiceAccount(); err != nil { - return err - } - if err := feast.createDeployment(); err != nil { - return err - } - if err := feast.deployClient(); err != nil { - return err - } - if err := feast.deployNamespaceRegistry(); err != nil { - return err - } - if err := feast.deployCronJob(); err != nil { - return err - } - return nil } @@ -338,6 +350,8 @@ func (feast *FeastServices) createDeployment() error { logger.Info("Successfully reconciled", "Deployment", deploy.Name, "operation", op) } + feast.updateScalingStatus(deploy) + return nil } @@ -381,7 +395,14 @@ func (feast *FeastServices) createPVC(pvcCreate *feastdevv1.PvcCreate, feastType func (feast *FeastServices) setDeployment(deploy *appsv1.Deployment) error { cr := feast.Handler.FeatureStore + + // Determine replica count: + // - spec.replicas is set on the Deployment (defaults to 1) + // - When HPA is configured, replicas is left unset so the HPA controller manages it replicas := deploy.Spec.Replicas + if desired := feast.getDesiredReplicas(); desired != nil { + replicas = desired + } deploy.Labels = feast.getLabels() deploy.Spec = appsv1.DeploymentSpec{ @@ -413,6 +434,8 @@ func (feast *FeastServices) setPod(podSpec *corev1.PodSpec) error { feast.mountEmptyDirVolumes(podSpec) feast.mountUserDefinedVolumes(podSpec) feast.applyNodeSelector(podSpec) + feast.applyTopologySpread(podSpec) + feast.applyAffinity(podSpec) return nil } @@ -635,6 +658,11 @@ func (feast *FeastServices) getDeploymentStrategy() appsv1.DeploymentStrategy { if feast.Handler.FeatureStore.Status.Applied.Services.DeploymentStrategy != nil { return *feast.Handler.FeatureStore.Status.Applied.Services.DeploymentStrategy } + if isScalingEnabled(feast.Handler.FeatureStore) { + return appsv1.DeploymentStrategy{ + Type: appsv1.RollingUpdateDeploymentStrategyType, + } + } return appsv1.DeploymentStrategy{ Type: appsv1.RecreateDeploymentStrategyType, } @@ -897,6 +925,54 @@ func (feast *FeastServices) applyNodeSelector(podSpec *corev1.PodSpec) { podSpec.NodeSelector = finalNodeSelector } +func (feast *FeastServices) applyTopologySpread(podSpec *corev1.PodSpec) { + cr := feast.Handler.FeatureStore + services := cr.Status.Applied.Services + + // User-provided explicit constraints take precedence (including empty array to disable) + if services != nil && services.TopologySpreadConstraints != nil { + podSpec.TopologySpreadConstraints = services.TopologySpreadConstraints + return + } + + if !isScalingEnabled(cr) { + return + } + + podSpec.TopologySpreadConstraints = []corev1.TopologySpreadConstraint{{ + MaxSkew: 1, + TopologyKey: "topology.kubernetes.io/zone", + WhenUnsatisfiable: corev1.ScheduleAnyway, + LabelSelector: metav1.SetAsLabelSelector(feast.getLabels()), + }} +} + +func (feast *FeastServices) applyAffinity(podSpec *corev1.PodSpec) { + cr := feast.Handler.FeatureStore + services := cr.Status.Applied.Services + + if services != nil && services.Affinity != nil { + podSpec.Affinity = services.Affinity + return + } + + if !isScalingEnabled(cr) { + return + } + + podSpec.Affinity = &corev1.Affinity{ + PodAntiAffinity: &corev1.PodAntiAffinity{ + PreferredDuringSchedulingIgnoredDuringExecution: []corev1.WeightedPodAffinityTerm{{ + Weight: 100, + PodAffinityTerm: corev1.PodAffinityTerm{ + TopologyKey: "kubernetes.io/hostname", + LabelSelector: metav1.SetAsLabelSelector(feast.getLabels()), + }, + }}, + }, + } +} + // mergeNodeSelectors merges existing and operator node selectors // Existing selectors are preserved, operator selectors can override existing keys func (feast *FeastServices) mergeNodeSelectors(existing, operator map[string]string) map[string]string { diff --git a/infra/feast-operator/internal/controller/services/services_test.go b/infra/feast-operator/internal/controller/services/services_test.go index b8863e10a74..d53caa3e25f 100644 --- a/infra/feast-operator/internal/controller/services/services_test.go +++ b/infra/feast-operator/internal/controller/services/services_test.go @@ -27,12 +27,9 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/util/intstr" + "k8s.io/utils/ptr" ) -func ptr[T any](v T) *T { - return &v -} - func (feast *FeastServices) refreshFeatureStore(ctx context.Context, key types.NamespacedName) { fs := &feastdevv1.FeatureStore{} Expect(k8sClient.Get(ctx, key, fs)).To(Succeed()) @@ -54,8 +51,8 @@ var _ = Describe("Registry Service", func() { ) var setFeatureStoreServerConfig = func(grpcEnabled, restEnabled bool) { - featureStore.Spec.Services.Registry.Local.Server.GRPC = ptr(grpcEnabled) - featureStore.Spec.Services.Registry.Local.Server.RestAPI = ptr(restEnabled) + featureStore.Spec.Services.Registry.Local.Server.GRPC = ptr.To(grpcEnabled) + featureStore.Spec.Services.Registry.Local.Server.RestAPI = ptr.To(restEnabled) Expect(k8sClient.Update(ctx, featureStore)).To(Succeed()) Expect(feast.ApplyDefaults()).To(Succeed()) applySpecToStatus(featureStore) @@ -83,12 +80,12 @@ var _ = Describe("Registry Service", func() { ServerConfigs: feastdevv1.ServerConfigs{ ContainerConfigs: feastdevv1.ContainerConfigs{ DefaultCtrConfigs: feastdevv1.DefaultCtrConfigs{ - Image: ptr("test-image"), + Image: ptr.To("test-image"), }, }, }, - GRPC: ptr(true), - RestAPI: ptr(false), + GRPC: ptr.To(true), + RestAPI: ptr.To(false), }, }, }, @@ -248,7 +245,7 @@ var _ = Describe("Registry Service", func() { Server: &feastdevv1.ServerConfigs{ ContainerConfigs: feastdevv1.ContainerConfigs{ DefaultCtrConfigs: feastdevv1.DefaultCtrConfigs{ - Image: ptr("test-image"), + Image: ptr.To("test-image"), }, OptionalCtrConfigs: feastdevv1.OptionalCtrConfigs{ NodeSelector: &onlineNodeSelector, @@ -284,7 +281,7 @@ var _ = Describe("Registry Service", func() { featureStore.Spec.Services.UI = &feastdevv1.ServerConfigs{ ContainerConfigs: feastdevv1.ContainerConfigs{ DefaultCtrConfigs: feastdevv1.DefaultCtrConfigs{ - Image: ptr("test-image"), + Image: ptr.To("test-image"), }, OptionalCtrConfigs: feastdevv1.OptionalCtrConfigs{ NodeSelector: &uiNodeSelector, @@ -332,7 +329,7 @@ var _ = Describe("Registry Service", func() { Server: &feastdevv1.ServerConfigs{ ContainerConfigs: feastdevv1.ContainerConfigs{ DefaultCtrConfigs: feastdevv1.DefaultCtrConfigs{ - Image: ptr("test-image"), + Image: ptr.To("test-image"), }, OptionalCtrConfigs: feastdevv1.OptionalCtrConfigs{ NodeSelector: &onlineNodeSelector, @@ -349,7 +346,7 @@ var _ = Describe("Registry Service", func() { featureStore.Spec.Services.UI = &feastdevv1.ServerConfigs{ ContainerConfigs: feastdevv1.ContainerConfigs{ DefaultCtrConfigs: feastdevv1.DefaultCtrConfigs{ - Image: ptr("test-image"), + Image: ptr.To("test-image"), }, OptionalCtrConfigs: feastdevv1.OptionalCtrConfigs{ NodeSelector: &uiNodeSelector, @@ -377,7 +374,7 @@ var _ = Describe("Registry Service", func() { It("should enable metrics on the online service when configured", func() { featureStore.Spec.Services.OnlineStore = &feastdevv1.OnlineStore{ - Server: &feastdevv1.ServerConfigs{Metrics: ptr(true)}, + Server: &feastdevv1.ServerConfigs{Metrics: ptr.To(true)}, } Expect(k8sClient.Update(ctx, featureStore)).To(Succeed()) @@ -451,7 +448,7 @@ var _ = Describe("Registry Service", func() { Server: &feastdevv1.ServerConfigs{ ContainerConfigs: feastdevv1.ContainerConfigs{ DefaultCtrConfigs: feastdevv1.DefaultCtrConfigs{ - Image: ptr("test-image"), + Image: ptr.To("test-image"), }, }, WorkerConfigs: &feastdevv1.WorkerConfigs{ @@ -503,7 +500,7 @@ var _ = Describe("Registry Service", func() { Server: &feastdevv1.ServerConfigs{ ContainerConfigs: feastdevv1.ContainerConfigs{ DefaultCtrConfigs: feastdevv1.DefaultCtrConfigs{ - Image: ptr("test-image"), + Image: ptr.To("test-image"), }, }, WorkerConfigs: &feastdevv1.WorkerConfigs{ @@ -545,7 +542,7 @@ var _ = Describe("Registry Service", func() { Server: &feastdevv1.ServerConfigs{ ContainerConfigs: feastdevv1.ContainerConfigs{ DefaultCtrConfigs: feastdevv1.DefaultCtrConfigs{ - Image: ptr("test-image"), + Image: ptr.To("test-image"), }, }, // WorkerConfigs is not set (nil) diff --git a/infra/feast-operator/internal/controller/services/util.go b/infra/feast-operator/internal/controller/services/util.go index 33d750251e9..9ce1ecd749a 100644 --- a/infra/feast-operator/internal/controller/services/util.go +++ b/infra/feast-operator/internal/controller/services/util.go @@ -493,6 +493,16 @@ func getVolumeMountByType(feastType FeastServiceType, featureStore *feastdevv1.F return nil } +// isScalingEnabled returns true when the user has configured horizontal scaling +// with either static replicas > 1 or HPA autoscaling. +func isScalingEnabled(featureStore *feastdevv1.FeatureStore) bool { + if featureStore.Status.Applied.Replicas != nil && *featureStore.Status.Applied.Replicas > 1 { + return true + } + services := featureStore.Status.Applied.Services + return services != nil && services.Scaling != nil && services.Scaling.Autoscaling != nil +} + func boolPtr(value bool) *bool { return &value } diff --git a/infra/feast-operator/test/data-source-types/data-source-types.py b/infra/feast-operator/test/data-source-types/data-source-types.py index be7d70e5ede..ccda6665286 100644 --- a/infra/feast-operator/test/data-source-types/data-source-types.py +++ b/infra/feast-operator/test/data-source-types/data-source-types.py @@ -1,13 +1,20 @@ import os -from feast.repo_config import REGISTRY_CLASS_FOR_TYPE, OFFLINE_STORE_CLASS_FOR_TYPE, ONLINE_STORE_CLASS_FOR_TYPE, LEGACY_ONLINE_STORE_CLASS_FOR_TYPE +from feast.repo_config import ( + REGISTRY_CLASS_FOR_TYPE, + OFFLINE_STORE_CLASS_FOR_TYPE, + ONLINE_STORE_CLASS_FOR_TYPE, + LEGACY_ONLINE_STORE_CLASS_FOR_TYPE, +) + def save_in_script_directory(filename: str, typedict: dict[str, str]): script_dir = os.path.dirname(os.path.abspath(__file__)) file_path = os.path.join(script_dir, filename) - - with open(file_path, 'w') as file: + + with open(file_path, "w") as file: for k in typedict.keys(): - file.write(k+"\n") + file.write(k + "\n") + for legacyType in LEGACY_ONLINE_STORE_CLASS_FOR_TYPE.keys(): if legacyType in ONLINE_STORE_CLASS_FOR_TYPE: diff --git a/infra/feast-operator/test/utils/test_util.go b/infra/feast-operator/test/utils/test_util.go index 7b5f0f8d6a0..15cd558ea16 100644 --- a/infra/feast-operator/test/utils/test_util.go +++ b/infra/feast-operator/test/utils/test_util.go @@ -409,6 +409,10 @@ func DeployOperatorFromCode(testDir string, skipBuilds bool) { _, err = Run(cmd, testDir) ExpectWithOffset(1, err).NotTo(HaveOccurred()) + By("deleting existing controller-manager deployment to allow selector changes on upgrade") + cmd = exec.Command("kubectl", "delete", "deployment", ControllerDeploymentName, "-n", FeastControllerNamespace, "--ignore-not-found=true") + _, _ = Run(cmd, testDir) + By("deploying the controller-manager") cmd = exec.Command("make", "deploy", fmt.Sprintf("IMG=%s", projectimage), fmt.Sprintf("FS_IMG=%s", feastLocalImage)) _, err = Run(cmd, testDir) diff --git a/infra/scripts/cleanup_ci.py b/infra/scripts/cleanup_ci.py index 262adf1e3eb..347e8ce1d39 100644 --- a/infra/scripts/cleanup_ci.py +++ b/infra/scripts/cleanup_ci.py @@ -1,4 +1,3 @@ -from time import sleep import boto3 from tqdm import tqdm from google.cloud import bigtable @@ -25,7 +24,7 @@ def cleanup_bigtable_ci(): client = bigtable.Client(project="kf-feast", admin=True) instance = client.instance("feast-integration-tests") if instance.exists(): - print(f"Deleted Bigtable CI instance") + print("Deleted Bigtable CI instance") instance.delete() location_id = "us-central1-f" @@ -38,7 +37,7 @@ def cleanup_bigtable_ci(): default_storage_type=storage_type, ) instance.create(clusters=[cluster]) - print(f"Created new Bigtable CI tables") + print("Created new Bigtable CI tables") def main() -> None: diff --git a/infra/scripts/feature_server_docker_smoke.py b/infra/scripts/feature_server_docker_smoke.py new file mode 100644 index 00000000000..5eac394bccd --- /dev/null +++ b/infra/scripts/feature_server_docker_smoke.py @@ -0,0 +1,38 @@ +from types import SimpleNamespace + +import uvicorn + +from feast.feature_server import get_app + + +class _FakeRegistry: + def proto(self): + return object() + + +class _FakeStore: + def __init__(self): + self.config = SimpleNamespace() + self.registry = _FakeRegistry() + self._provider = SimpleNamespace( + async_supported=SimpleNamespace( + online=SimpleNamespace(read=False, write=False) + ) + ) + + def _get_provider(self): + return self._provider + + async def initialize(self): + return None + + def refresh_registry(self): + return None + + async def close(self): + return None + + +if __name__ == "__main__": + app = get_app(_FakeStore()) + uvicorn.run(app, host="0.0.0.0", port=6566, log_level="error") diff --git a/infra/scripts/feature_store_client_configs_gen.py b/infra/scripts/feature_store_client_configs_gen.py index 124877a18ef..71a2d5d4d53 100644 --- a/infra/scripts/feature_store_client_configs_gen.py +++ b/infra/scripts/feature_store_client_configs_gen.py @@ -13,19 +13,18 @@ import os import yaml from pathlib import Path -from typing import Dict, List, Any, Optional +from typing import Dict, Any from feast import FeatureStore -from feast.repo_config import RepoConfig def create_feature_store_yaml(config_content: str, config_name: str) -> str: """ Create a feature_store.yaml file from config content. - + Args: config_content: YAML content as string config_name: Name identifier for the config (used for filename) - + Returns: Path to the created YAML file """ @@ -34,31 +33,33 @@ def create_feature_store_yaml(config_content: str, config_name: str) -> str: config_dict = yaml.safe_load(config_content) except yaml.YAMLError as e: raise ValueError(f"Failed to parse YAML content for {config_name}: {e}") - + # Ensure required fields are present - required_fields = ['project', 'registry', 'provider'] + required_fields = ["project", "registry", "provider"] for field in required_fields: if field not in config_dict: - raise ValueError(f"Failed to create config {config_name}: missing required field '{field}'") - + raise ValueError( + f"Failed to create config {config_name}: missing required field '{field}'" + ) + # Create filename filename = f"feature_store_{config_name}.yaml" filepath = Path(filename) - + # Write the YAML file - with open(filepath, 'w') as f: + with open(filepath, "w") as f: yaml.dump(config_dict, f, default_flow_style=False, sort_keys=False) - + return str(filepath) def create_feature_store_object(yaml_file_path: str) -> FeatureStore: """ Create a FeatureStore object from a YAML file. - + Args: yaml_file_path: Path to the feature_store.yaml file - + Returns: FeatureStore object """ @@ -67,26 +68,28 @@ def create_feature_store_object(yaml_file_path: str) -> FeatureStore: fs = FeatureStore(fs_yaml_file=Path(yaml_file_path)) return fs except Exception as e: - raise RuntimeError(f"Failed to create FeatureStore object from {yaml_file_path}: {e}") + raise RuntimeError( + f"Failed to create FeatureStore object from {yaml_file_path}: {e}" + ) def process_client_configs(client_configs: Dict[str, str]) -> Dict[str, Dict[str, Any]]: """ Process multiple client config YAML contents and create feature stores. - + Args: client_configs: Dictionary mapping config names to YAML content strings - + Returns: Dictionary with results for each config """ results = {} created_yamls = [] feature_stores = {} - + print("Creating feature store YAMLs and objects...") print("=" * 50) - + for config_name, config_content in client_configs.items(): try: print(f"\nProcessing config: {config_name}") @@ -95,7 +98,7 @@ def process_client_configs(client_configs: Dict[str, str]) -> Dict[str, Dict[str yaml_path = create_feature_store_yaml(config_content, config_name) created_yamls.append(yaml_path) print(f"✓ Created YAML file: {yaml_path}") - + # Create FeatureStore object fs = create_feature_store_object(yaml_path) fs_var_name = f"fs_{fs.project}" @@ -104,30 +107,30 @@ def process_client_configs(client_configs: Dict[str, str]) -> Dict[str, Dict[str print(f"✓ Created FeatureStore object: {fs_var_name}") results[config_name] = { - 'yaml_path': yaml_path, - 'feature_store': fs_var_name, - 'project_name': fs.project, - 'success': True, - 'error': None + "yaml_path": yaml_path, + "feature_store": fs_var_name, + "project_name": fs.project, + "success": True, + "error": None, } - + except Exception as e: print(f"✗ Failed to process config {config_name}: {e}") results[config_name] = { - 'yaml_path': None, - 'feature_store': None, - 'project_name': None, - 'success': False, - 'error': str(e) + "yaml_path": None, + "feature_store": None, + "project_name": None, + "success": False, + "error": str(e), } - + return results def print_summary(results: Dict[str, Dict[str, Any]]) -> None: """ Print summary of all operations. - + Args: results: Results dictionary from process_client_configs """ @@ -135,13 +138,15 @@ def print_summary(results: Dict[str, Dict[str, Any]]) -> None: print("SUMMARY") print("=" * 50) - successful_configs = [name for name, result in results.items() if result['success']] - failed_configs = [name for name, result in results.items() if not result['success']] + successful_configs = [name for name, result in results.items() if result["success"]] + failed_configs = [name for name, result in results.items() if not result["success"]] print(f"\n\n✓✓Feature Store YAML files have been created in: {os.getcwd()}") print(f"\n✓ Successfully processed {len(successful_configs)} config(s):") for config_name in successful_configs: result = results[config_name] - print(f" - {config_name}: {result['yaml_path']} (Project: {result['project_name']})") + print( + f" - {config_name}: {result['yaml_path']} (Project: {result['project_name']})" + ) if failed_configs: print(f"\n✗ Failed to process {len(failed_configs)} config(s):") @@ -149,15 +154,21 @@ def print_summary(results: Dict[str, Dict[str, Any]]) -> None: result = results[config_name] print(f" - {config_name}: {result['error']}") - print(f"\n\n✓✓ Feature Store Object(s) details:") + print("\n\n✓✓ Feature Store Object(s) details:") for config_name in successful_configs: result = results[config_name] - print(f"> Object Name - {result['feature_store']} ; project name - {result['project_name']} ; yaml path - {result['yaml_path']}") + print( + f"> Object Name - {result['feature_store']} ; project name - {result['project_name']} ; yaml path - {result['yaml_path']}" + ) print("\n") print("=" * 25, "Usage:", "=" * 25) - print("You can now use feature store object(s) to access the feature store resources and functions!") - print("\n// Note: Replace object_name with the actual object name from the list above.") + print( + "You can now use feature store object(s) to access the feature store resources and functions!" + ) + print( + "\n// Note: Replace object_name with the actual object name from the list above." + ) print("object_name.list_features()\nobject_name.get_historical_features()") print("=" * 58) @@ -179,7 +190,6 @@ def main(): type: file entity_key_serialization_version: 3 """, - "aws_redshift": """ project: aws_feature_store registry: data/registry.db @@ -197,7 +207,6 @@ def main(): iam_role: arn:aws:iam::123456789012:role/RedshiftRole entity_key_serialization_version: 3 """, - "gcp_bigquery": """ project: gcp_feature_store registry: data/registry.db @@ -210,18 +219,20 @@ def main(): project_id: my-gcp-project dataset_id: my_dataset entity_key_serialization_version: 3 -""" +""", } print("=" * 50) - print("This script will create feature store YAMLs and objects from client configs.") + print( + "This script will create feature store YAMLs and objects from client configs." + ) print(f"Processing {len(example_configs)} example configurations...") - + # Process the configs results = process_client_configs(example_configs) - + # Print summary print_summary(results) if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/infra/scripts/generate_protos.py b/infra/scripts/generate_protos.py index 2ce7e29e12e..c030a9347df 100644 --- a/infra/scripts/generate_protos.py +++ b/infra/scripts/generate_protos.py @@ -9,6 +9,7 @@ PROTO_SUBDIRS = ["core", "registry", "serving", "types", "storage"] PYTHON_CODE_PREFIX = "sdk/python" + class BuildPythonProtosCommand: description = "Builds the proto files into Python files." user_options = [ @@ -76,5 +77,6 @@ def run(self): with open(path, "w") as file: file.write(filedata) + if __name__ == "__main__": - BuildPythonProtosCommand().run() \ No newline at end of file + BuildPythonProtosCommand().run() diff --git a/infra/scripts/release/bump_file_versions.py b/infra/scripts/release/bump_file_versions.py index c913e9f43f7..f13b9e257df 100644 --- a/infra/scripts/release/bump_file_versions.py +++ b/infra/scripts/release/bump_file_versions.py @@ -4,7 +4,9 @@ import pathlib import sys -USAGE = f"Usage: python {sys.argv[0]} [--help] | current_semver_version new_semver_version]" +USAGE = ( + f"Usage: python {sys.argv[0]} [--help] | current_semver_version new_semver_version]" +) VERSIONS_TO_BUMP = 27 @@ -17,18 +19,24 @@ def main() -> None: new_version = args[1].strip() if current_version == new_version: - raise SystemExit(f"Current and new versions are the same: {current_version} == {new_version}") + raise SystemExit( + f"Current and new versions are the same: {current_version} == {new_version}" + ) # Validate that the input arguments are semver versions if not is_semantic_version(current_version): - raise SystemExit(f"Current version is not a valid semantic version: {current_version}") + raise SystemExit( + f"Current version is not a valid semantic version: {current_version}" + ) if not is_semantic_version(new_version): raise SystemExit(f"New version is not a valid semantic version: {new_version}") # Get git repo root directory repo_root = pathlib.Path(__file__).resolve().parent.parent.parent.parent - path_to_file_list = repo_root.joinpath("infra", "scripts", "release", "files_to_bump.txt") + path_to_file_list = repo_root.joinpath( + "infra", "scripts", "release", "files_to_bump.txt" + ) # Get files to bump versions within with open(path_to_file_list, "r") as f: @@ -47,11 +55,15 @@ def main() -> None: file_contents = f.readlines() for line in lines: # note we validate the version above already - current_parsed_version = _get_semantic_version(file_contents[int(line) - 1]) - file_contents[int(line) - 1] = file_contents[int(line) - 1].replace(current_parsed_version, new_version) + current_parsed_version = _get_semantic_version( + file_contents[int(line) - 1] + ) + file_contents[int(line) - 1] = file_contents[int(line) - 1].replace( + current_parsed_version, new_version + ) with open(repo_root.joinpath(file_path), "w") as f: - f.write(''.join(file_contents)) + f.write("".join(file_contents)) updated_count += 1 print(f"Updated {updated_count} files with new version {new_version}") @@ -70,22 +82,27 @@ def is_semantic_version(version: str) -> bool: def validate_files_to_bump(current_version, files_to_bump, repo_root): for file in files_to_bump: components = file.split(" ") - assert len(components) > 1, f"Entry {file} should have a file name, and a list of line numbers with versions" + assert len(components) > 1, ( + f"Entry {file} should have a file name, and a list of line numbers with versions" + ) file_path = components[0] lines = components[1:] with open(repo_root.joinpath(file_path), "r") as f: file_contents = f.readlines() for line in lines: new_version = _get_semantic_version(file_contents[int(line) - 1]) - current_major_minor_version = '.'.join(current_version.split(".")[0:1]) - assert current_version in new_version or current_major_minor_version in new_version, ( + current_major_minor_version = ".".join(current_version.split(".")[0:1]) + assert ( + current_version in new_version + or current_major_minor_version in new_version + ), ( f"File `{file_path}` line `{line}` didn't contain version {current_version}. " f"Contents: {file_contents[int(line) - 1]}" ) def _get_semantic_version(input_string: str) -> str: - semver_pattern = r'\bv?(\d+\.\d+\.\d+)\b' + semver_pattern = r"\bv?(\d+\.\d+\.\d+)\b" match = re.search(semver_pattern, input_string) return match.group(1) diff --git a/infra/scripts/release/unset_prerelease.py b/infra/scripts/release/unset_prerelease.py index 4a2ba131970..c474c0908de 100644 --- a/infra/scripts/release/unset_prerelease.py +++ b/infra/scripts/release/unset_prerelease.py @@ -4,7 +4,9 @@ import sys import requests -USAGE = f"Usage: python {sys.argv[0]} [--help] | version_being_released (e.g., v0.19.1)]" +USAGE = ( + f"Usage: python {sys.argv[0]} [--help] | version_being_released (e.g., v0.19.1)]" +) def get_prerelease_status(version_being_released, token): @@ -13,12 +15,12 @@ def get_prerelease_status(version_being_released, token): headers = { "Content-Type": "application/json", "Accept": "application/vnd.github.v3+json", - "Authorization": f"Bearer {token}" + "Authorization": f"Bearer {token}", } response = requests.request("GET", url, headers=headers) response_json = response.json() - return bool(response_json['prerelease']), response_json['id'] + return bool(response_json["prerelease"]), response_json["id"] def set_prerelease_status(release_id, status, token): @@ -29,7 +31,7 @@ def set_prerelease_status(release_id, status, token): headers = { "Content-Type": "application/json", "Accept": "application/vnd.github.v3+json", - "Authorization": f"Bearer {token}" + "Authorization": f"Bearer {token}", } requests.request("PATCH", url, json=payload, headers=headers) @@ -44,7 +46,7 @@ def main() -> None: print(f"Disabling prerelease status for {version_being_released}") - token = os.getenv('GITHUB_TOKEN', default=None) + token = os.getenv("GITHUB_TOKEN", default=None) if token is None: raise OSError("GITHUB_TOKEN environmental variable is not set") @@ -61,9 +63,14 @@ def main() -> None: if is_prerelease: import warnings - warnings.warn(f"Failed to unset prerelease status for {version_being_released} release id {release_id}") + + warnings.warn( + f"Failed to unset prerelease status for {version_being_released} release id {release_id}" + ) else: - print(f"Successfully unset prerelease status for {version_being_released} release id {release_id}") + print( + f"Successfully unset prerelease status for {version_being_released} release id {release_id}" + ) if __name__ == "__main__": diff --git a/infra/website/docs/blog/entity-less-historical-features-retrieval.md b/infra/website/docs/blog/entity-less-historical-features-retrieval.md new file mode 100644 index 00000000000..ec907fe26de --- /dev/null +++ b/infra/website/docs/blog/entity-less-historical-features-retrieval.md @@ -0,0 +1,142 @@ +--- +title: Historical Features Without Entity IDs +description: Feast now supports entity-less historical feature retrieval by datetime range—making it easier to train models when you don't have or need entity IDs. +date: 2026-02-19 +authors: ["Jitendra Yejare", "Aniket Paluskar"] +--- + +# Historical Features Without Entity IDs + +For years, Historical Feature Retrieval in Feast required an **entity dataframe**; you had to supply the exact entity keys (e.g. `driver_id`, `user_id`) and timestamps you wanted to join features for. That works well when you have a fixed set of entities—for example, a list of users you want to score or a training set already keyed by IDs. But in many AI and ML projects, you **don’t have** entity IDs upfront, or the problem **doesn’t naturally have** entities at all. In those cases, being forced to create and pass an entity dataframe was a real friction. + +We’re excited to share that Feast now supports **entity-less historical feature retrieval** based on a **datetime range**. You can pull all historical feature data for a time window without specifying any entity dataframe—addressing the long-standing [GitHub issue #1611](https://github.com/feast-dev/feast/issues/1611) and simplifying training and tuning workflows where entity IDs are optional or irrelevant. + +# The Problem: Entity IDs Aren’t Always There + +Classic use of a feature store looks like this: + +```python +entity_df = pd.DataFrame({ + "driver_id": [1001, 1002, 1003], + "event_timestamp": [datetime(2025, 1, 1), datetime(2025, 1, 2), datetime(2025, 1, 3)] +}) + +training_df = store.get_historical_features( + entity_df=entity_df, + features=["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate"], +).to_df() +``` + +You already have a set of entities and timestamps; Feast joins features onto them. But in many real-world setups: + +- **Time-series and sequence models** – You care about a time range and all data in it, not a pre-defined list of entity IDs. Building an entity dataframe means first querying “who existed in this period?” and then passing those IDs in, which is extra plumbing and can be expensive. +- **Global or population-level models** – You’re modeling aggregates, trends, or system-wide behavior. There may be no natural “entity” to key on, or you want “all entities” in a window. +- **Exploratory analysis and research** – You want “all features in the last 7 days” to experiment with models or features. Requiring entity IDs forces you to materialize a full entity list before you can even call the feature store. +- **Cold start and new users** – When training models that will later serve new or rarely-seen entities (e.g. recommendation cold start, fraud detection for new accounts), you often don’t have a fixed, known entity set at training time. You want to train on “all entities that had activity in this window” so the model generalizes from the full population. +- **Batch training on full history** – You want to train on all available history in a date range. Generating and passing a huge entity dataframe is cumbersome and sometimes not even possible if the entity set is large or dynamic. + +In all these cases, **passing entity IDs is either not possible, not required, or unnecessarily complex**. Making the entity dataframe optional and supporting retrieval by datetime range makes the feature store much easier to use in production and in research. + +# What’s New: Optional Entity DataFrame and Date Range + +Feast now supports entity-less historical feature retrieval by datetime range for several offline stores; you can pull historical feature data for a time window without specifying any entity dataframe. You specify a time window (and optionally rely on TTL for defaults), and the offline store returns all feature data in that range. + +- **Entity dataframe is optional** – You can omit `entity_df` and use `start_date` and/or `end_date` instead. +- **Point-in-time correctness** – Retrieval still uses point-in-time semantics (e.g. LATERAL joins in the offline stores) so you get correct historical values. +- **Smart defaults** – If you don’t pass `start_date`, the range can be derived from the feature view TTL; if you don’t pass `end_date`, it defaults to “now”. +- **Backward compatible** – The existing entity-based API is unchanged. When you have an entity dataframe (e.g. for ODFV or targeted batch scoring), you keep using it with entity dataframe as before. + +Entity-less retrieval is supported across multiple offline stores: **Postgres** (where it was first introduced), **Dask**, **Spark**, and **Ray**—with Spark and Ray being especially important for large-scale and distributed training workloads. More offline stores will be supported in the future based on user demand and priority. + +# How to Use It + +You can use any of these patterns depending on how much you want to specify. + +**1. Explicit date range (data between start and end):** + +```python +training_df = store.get_historical_features( + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + ], + start_date=datetime(2025, 7, 1, 1, 0, 0), + end_date=datetime(2025, 7, 2, 3, 30, 0), +).to_df() +``` + +**2. Only end date (Start date is end date minus TTL):** + +```python +training_df = store.get_historical_features( + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + ], + end_date=datetime(2025, 7, 2, 3, 30, 0), +).to_df() +``` + +**3. Only start date (data from start date to now):** + +```python +training_df = store.get_historical_features( + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + ], + start_date=datetime(2025, 7, 1, 1, 0, 0), +).to_df() +``` + +**4. No dates (data from TTL window to now):** + +```python +training_df = store.get_historical_features( + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + ], +).to_df() +``` + +**5. Entity-based retrieval still works (e.g. for ODFV or when you need data for specific entities):** + +```python +entity_df = pd.DataFrame.from_dict({ + "driver_id": [1005], + "event_timestamp": [datetime(2025, 6, 29, 23, 0, 0)], +}) + +training_df = store.get_historical_features( + entity_df=entity_df, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "transformed_conv_rate:conv_rate_plus_val1", + ], +).to_df() +``` + +Feast does not support mixing entity-based and range-based retrieval in one call; either pass `entity_df` or pass `start_date`/`end_date`, not both. + +# Try It Out + +To experiment with entity-less retrieval: + +1. Use a feature store backed by an offline store that supports it: **Postgres**, **Dask**, **Spark**, or **Ray** (see [Feast docs](https://docs.feast.dev/) for setup). Spark and Ray are a great fit for distributed and large-scale training. +2. Call `get_historical_features()` with only `features` and, as needed, `start_date` and `end_date` (or rely on TTL and default end time). +3. For full details, tests, and behavior, see [PR #5527](https://github.com/feast-dev/feast/pull/5527) and the updated [FAQ on historical retrieval without an entity dataframe](https://docs.feast.dev/getting-started/faq#how-do-i-run-get_historical_features-without-providing-an-entity-dataframe). + +# Why This Makes Production Easier + +- **Simpler training pipelines** – No need to pre-query “all entity IDs in range” or maintain a separate entity table just to call the feature store. You specify a time window and get features. +- **Fewer moving parts** – Less code, fewer joins, and fewer failure modes when you don’t need entity-based slicing. +- **Better fit for time-range-centric workflows** – Time-series, global models, and exploratory jobs can all use the same API without artificial entity construction. +- **Same point-in-time guarantees** – Entity-less retrieval still respects feature view TTL and temporal correctness, so your training data remains valid. + +We’re excited to see how the community uses entity-less historical retrieval. If you have feedback or want to help bring this to more offline stores, join the discussion on [GitHub issue #1611](https://github.com/feast-dev/feast/issues/1611) or [Feast Slack](https://slack.feast.dev). diff --git a/infra/website/docs/blog/feast-mlflow-kubeflow.md b/infra/website/docs/blog/feast-mlflow-kubeflow.md new file mode 100644 index 00000000000..3e15cbda26a --- /dev/null +++ b/infra/website/docs/blog/feast-mlflow-kubeflow.md @@ -0,0 +1,534 @@ +--- +title: "Feast + MLflow + Kubeflow: A Unified AI/ML Lifecycle" +description: Learn how to use Feast, MLflow, and Kubeflow to power your AI/ML Lifecycle +date: 2026-03-09 +authors: ["Francisco Javier Arceo", "Nikhil Kathole"] +--- + +
+ Feast, MLflow, and Kubeflow +
+ +# Feast + MLflow + Kubeflow: A Unified AI/ML Lifecycle + +## Overview + +Building production-ready machine learning systems requires more than a great model. It demands a clear separation of concerns between feature management, experiment tracking, and workflow orchestration. This post explores how [Feast](https://feast.dev/), [MLflow](https://mlflow.org/), and [Kubeflow](https://www.kubeflow.org/) work together as complementary open-source tools to cover the full AI/ML lifecycle — from raw data to serving predictions at scale. + +These tools are not competitors. Each one occupies a distinct role: + +* **Feast** manages feature data: defining, transforming, storing, and serving features consistently for both training and inference. It also tracks feature lineage and supports data quality monitoring. +* **MLflow** tracks experiments: logging runs, metrics, parameters, artifacts, and candidate models. +* **Kubeflow** orchestrates ML workflows: running distributed training, hyperparameter sweeps, and end-to-end pipelines on Kubernetes. + +Together they form a complete, open-source foundation for operationalizing ML. + +### How are Feast, MLflow, and Kubeflow different? + +If you are new to these tools, it is natural to wonder whether they overlap. The short answer is: they solve fundamentally different problems in the ML lifecycle. The table below makes this concrete. + +| Capability | Feast | MLflow | Kubeflow | +|---|---|---|---| +| Define and version feature schemas | Yes | No | No | +| Store and serve features (online + offline) | Yes | No | No | +| Point-in-time-correct feature retrieval | Yes | No | No | +| Feature transformations (training = serving) | Yes | No | No | +| Feature lineage and registry | Yes | No | No | +| Data quality validation on features | Yes | No | No | +| Log experiments, metrics, and parameters | No | Yes | No | +| Track and compare model versions | No | Yes | No | +| Model registry (promote / alias models) | No | Yes | No | +| Orchestrate multi-step ML pipelines | No | No | Yes (Pipelines) | +| Distributed training on Kubernetes | No | No | Yes (Training Operator) | +| Hyperparameter tuning | No | Yes (with Optuna, etc.) | Yes (Katib) | + +A few common misconceptions: + +* **"Can't MLflow track my features?"** — MLflow can *log* feature names as parameters, but it does not *define*, *store*, *transform*, or *serve* features. It has no concept of an offline store, an online store, or point-in-time joins. Feast fills that gap. +* **"Doesn't Kubeflow handle everything end-to-end?"** — Kubeflow orchestrates *workflows* — it tells your pipeline steps when to run and where. But it does not provide feature storage, experiment tracking, or model versioning. You still need Feast for the data layer and MLflow for the experiment layer. +* **"Why do I need Feast if I just read from a database?"** — Without Feast, teams typically duplicate feature logic between training scripts and serving endpoints, which leads to training–serving skew. Feast guarantees the same transformation and retrieval logic is used in both contexts. + +With that context, the rest of this post walks through each tool in detail and shows how they hand off to one another in practice. + +This topic has been explored by the community before — the post ["Feast with AI: Feed Your MLflow Models with Feature Store"](https://blog.qooba.net/2021/05/22/feast-with-ai-feed-your-mlflow-models-with-feature-store/) by [@qooba](https://github.com/qooba) is an excellent early look at combining Feast and MLflow. For a hands-on, end-to-end example of Feast and Kubeflow working together, see ["From Raw Data to Model Serving: A Blueprint for the AI/ML Lifecycle with Kubeflow and Feast"](/blog/kubeflow-fraud-detection-e2e) by Helber Belmiro. This post builds on that prior work and brings all three tools — Feast, MLflow, and Kubeflow — into a single narrative. + +--- + +## The AI/ML Lifecycle + +A typical production ML project passes through several stages: + +1. **Feature development** — raw data is transformed into meaningful signals. +2. **Model development** — data scientists experiment with algorithms, features, and hyperparameters. +3. **Model evaluation & selection** — the best experiment is chosen for promotion. +4. **Production deployment** — the selected model is deployed and features are served in real time. +5. **Monitoring & iteration** — model and feature health is observed; the cycle repeats. + +The diagram below maps each stage to its primary tool: + +``` +Raw Data ──► Feast (Feature Engineering & Storage) + │ + ▼ + MLflow + Kubeflow Pipelines (Experiment Tracking & Orchestration) + │ + ▼ + Kubeflow Training Operator (Distributed Training) + │ + ▼ + MLflow Model Registry (Candidate Models) + │ + ▼ + Feast Online Store + Feature Server (Production Serving) +``` + +--- + +## Feast: Feature Development, Iteration, and Serving + +Feast is the data layer of the ML stack. Its core job is to make the same feature logic available both at training time (via the offline store) and at inference time (via the online store), eliminating training–serving skew. Beyond storage and serving, Feast also handles **feature transformations**, **feature lineage tracking**, and **data quality monitoring** — capabilities that are essential when moving features from experimentation to production. + +### Defining features + +A Feast `FeatureView` declares how a feature is computed and where it is stored: + +```python +from datetime import timedelta +from feast import FeatureView, Field, FileSource +from feast.types import Float64, Int64 + +driver_stats = FeatureView( + name="driver_hourly_stats", + entities=["driver_id"], + ttl=timedelta(days=7), + schema=[ + Field(name="conv_rate", dtype=Float64), + Field(name="acc_rate", dtype=Float64), + Field(name="avg_daily_trips", dtype=Int64), + ], + source=FileSource(path="data/driver_stats.parquet", timestamp_field="event_timestamp"), +) +``` + +After running `feast apply`, these features are registered in the Feast registry and visible in the Feast UI: + +
+ Feast UI showing the Feature List for the Driver Ranking project with conv_rate, acc_rate, and avg_daily_trips features +

The Feast UI showing three registered features in the driver_hourly_stats feature view — conv_rate, acc_rate, and avg_daily_trips — each linked to the Driver Ranking project.

+
+ +### Retrieving historical features for training + +Point-in-time-correct historical features are retrieved from the offline store. This prevents future data from leaking into training examples: + +```python +from feast import FeatureStore +import pandas as pd + +store = FeatureStore(repo_path=".") + +entity_df = pd.DataFrame({ + "driver_id": [1001, 1002, 1003], + "event_timestamp": pd.to_datetime(["2025-01-01", "2025-01-02", "2025-01-03"]), +}) + +training_df = store.get_historical_features( + entity_df=entity_df, + features=["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate"], +).to_df() +``` + +### Materializing features for real-time serving + +When a model is promoted to production, features are materialized to the online store so they can be retrieved with single-digit millisecond latency: + +```python +from datetime import datetime + +store.materialize_incremental(end_date=datetime.utcnow()) +``` + +Serving then becomes a single call: + +```python +features = store.get_online_features( + features=["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate"], + entity_rows=[{"driver_id": 1001}], +).to_dict() +``` + +### Feature transformations + +Feast supports on-demand feature transformations, allowing you to define transformation logic that runs at retrieval time — both offline (for training) and online (for serving) — using the same Python function. This eliminates the need to duplicate transformation code across training and inference pipelines: + +```python +from feast.on_demand_feature_view import on_demand_feature_view +from feast import Field +from feast.types import Float64 + +@on_demand_feature_view( + sources=[driver_stats], + schema=[Field(name="conv_acc_ratio", dtype=Float64)], +) +def driver_ratios(inputs): + df = inputs.copy() + df["conv_acc_ratio"] = df["conv_rate"] / (df["acc_rate"] + 1e-6) + return df[["conv_acc_ratio"]] +``` + +Here `driver_stats` is the `FeatureView` object defined earlier. The `sources` parameter accepts `FeatureView`, `RequestSource`, or `FeatureViewProjection` objects. + +Using `on_demand_feature_view` ensures that the same transformation logic is applied whether features are retrieved from the offline store for training or from the online store at inference time, preventing transformation skew. + +### Feature lineage + +The Feast feature registry acts as the single source of truth for feature definitions. Every `FeatureView`, data source, entity, and transformation is registered and versioned in the registry. This gives you full lineage from raw data source through transformation logic to the feature values consumed by a model — a critical requirement for debugging, auditing, and regulatory compliance. + +You can inspect the lineage of any feature programmatically: + +```python +from feast import FeatureStore + +store = FeatureStore(repo_path=".") +feature_view = store.get_feature_view("driver_hourly_stats") +print(feature_view.source) # upstream data source +print(feature_view.schema) # feature schema +``` + +For cross-system lineage that extends beyond Feast into upstream data pipelines and downstream model training, Feast also supports native [OpenLineage integration](/blog/feast-openlineage-integration). Enabling it in your `feature_store.yaml` automatically emits lineage events on `feast apply` and `feast materialize`, letting you visualize the full data flow in tools like [Marquez](https://marquezproject.ai/). + +### Data quality monitoring + +Feast integrates with data quality frameworks like [Great Expectations](https://greatexpectations.io/) to detect feature drift, stale data, and schema violations before they silently degrade model performance. The workflow centers on Feast's `SavedDataset` and `ValidationReference` APIs: you save a profiled dataset during training, define a profiler using Great Expectations, and then validate new feature data against that reference in subsequent runs. + +```python +from feast import FeatureStore +from feast.dqm.profilers.ge_profiler import ge_profiler +from great_expectations.core import ExpectationSuite +from great_expectations.dataset import PandasDataset + +store = FeatureStore(repo_path=".") + +@ge_profiler +def my_profiler(dataset: PandasDataset) -> ExpectationSuite: + dataset.expect_column_values_to_be_between("conv_rate", min_value=0, max_value=1) + dataset.expect_column_values_to_be_between("acc_rate", min_value=0, max_value=1) + return dataset.get_expectation_suite() + +reference_job = store.get_historical_features( + entity_df=entity_df, + features=["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate"], +) + +dataset = store.create_saved_dataset( + from_=reference_job, + name="driver_stats_validation", + storage=storage, +) + +reference = dataset.as_reference(name="driver_stats_ref", profiler=my_profiler) + +new_job = store.get_historical_features( + entity_df=new_entity_df, + features=["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate"], +) +new_job.to_df(validation_reference=reference) +``` + +If validation fails, Feast raises a `ValidationFailed` exception with details on which expectations were violated. Monitoring feature distributions over time — and comparing them to the distributions seen during training — allows you to detect training–serving skew early, before it causes silent model degradation in production. + +### Feast Feature Registry vs. MLflow Model Registry + +A common question is how the **Feast feature registry** relates to the **MLflow model registry**. They are different things that serve complementary roles. + +| | Feast Feature Registry | MLflow Model Registry | +|---|---|---| +| **What it tracks** | Feature definitions, schemas, data sources, entity relationships | Model artifacts, versions, model aliases (e.g., "production", "staging") | +| **Primary users** | Feature engineers, data scientists, ML platform teams | Data scientists, ML engineers | +| **Relationship to production** | Defines what data is available for training *and* serving | Tracks which model version is promoted to production | +| **Scope** | All features ever defined — a superset of what any one model uses | All model versions, including candidates that never ship | + +This distinction is important: the **Feast registry is a superset of the MLflow model registry** from a feature perspective. During experimentation, a data scientist may train models using dozens of features. Once a model is selected for production, only a *subset* of those features will be needed for online serving. Feast's registry records all available features; the specific features required by the production model are a narrower slice that corresponds to what MLflow logged as model inputs. + +--- + +## MLflow: Experiment Tracking, Hyperparameter Optimization, and Feature Selection + +MLflow is the experimentation layer. It answers the question: *"Which combination of features, model architecture, and hyperparameters produced the best result?"* + +### Logging a training run with Feast features + +Because Feast provides a consistent `get_historical_features` API, it is straightforward to combine it with MLflow tracking: + +```python +import mlflow +import mlflow.sklearn +from feast import FeatureStore +from sklearn.linear_model import LogisticRegression +from sklearn.model_selection import train_test_split +from sklearn.metrics import roc_auc_score +import pandas as pd + +store = FeatureStore(repo_path=".") + +entity_df = pd.read_parquet("data/driver_labels.parquet") +feature_df = store.get_historical_features( + entity_df=entity_df, + features=["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate", "driver_hourly_stats:avg_daily_trips"], +).to_df() + +X = feature_df[["conv_rate", "acc_rate", "avg_daily_trips"]] +y = feature_df["label"] +X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2) + +with mlflow.start_run(): + mlflow.log_param("features", ["conv_rate", "acc_rate", "avg_daily_trips"]) + mlflow.log_param("model_type", "LogisticRegression") + + model = LogisticRegression() + model.fit(X_train, y_train) + + auc = roc_auc_score(y_test, model.predict_proba(X_test)[:, 1]) + mlflow.log_metric("auc", auc) + + # Log the feature store snapshot alongside the model + mlflow.sklearn.log_model(model, artifact_path="model") + mlflow.log_artifact("feature_store.yaml", artifact_path="feast_config") +``` + +Logging `feature_store.yaml` together with the model artifact ensures that, at any future point, the exact set of Feast feature definitions used for that run can be reproduced. + +### Feature selection with MLflow + +One of the most powerful uses of Feast + MLflow together is systematic **feature selection**: training models with different subsets of Feast features and using MLflow's comparison UI to identify which combination produces the best results. This is far more rigorous than manually trying feature sets in a notebook, and the results are often counterintuitive. + +The pattern is to loop over candidate feature subsets, retrieve each one from Feast, train a model, and log the metrics and feature names as a separate MLflow run: + +```python +import mlflow +import mlflow.sklearn +from feast import FeatureStore +from sklearn.linear_model import LogisticRegression +from sklearn.model_selection import train_test_split +from sklearn.metrics import roc_auc_score +import pandas as pd + +store = FeatureStore(repo_path=".") + +entity_df = pd.read_parquet("data/driver_labels.parquet") + +# Define candidate feature subsets to compare +feature_subsets = { + "acc_rate_only": ["driver_hourly_stats:acc_rate"], + "acc_rate_trips": ["driver_hourly_stats:acc_rate", "driver_hourly_stats:avg_daily_trips"], + "all_features": ["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips"], +} + +with mlflow.start_run(run_name="feast_feature_selection"): + for subset_name, feature_refs in feature_subsets.items(): + feature_df = store.get_historical_features( + entity_df=entity_df, + features=feature_refs, + ).to_df() + + feature_cols = [ref.split(":")[1] for ref in feature_refs] + X = feature_df[feature_cols] + y = feature_df["label"] + X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42) + + with mlflow.start_run(run_name=subset_name, nested=True): + mlflow.log_param("features", feature_cols) + model = LogisticRegression() + model.fit(X_train, y_train) + auc = roc_auc_score(y_test, model.predict_proba(X_test)[:, 1]) + mlflow.log_metric("auc", auc) + mlflow.sklearn.log_model(model, artifact_path="model") +``` + +After running, the MLflow UI lets you sort all nested runs by AUC and immediately see which feature subset wins. The results can be surprising — for example, with synthetic driver data a single feature may outperform the full feature set: + +| Features | Model | AUC | +|---|---|---| +| `acc_rate` only | LogisticRegression | 0.645 | +| `acc_rate` + `avg_daily_trips` | LogisticRegression | 0.613 | +| All 3 features | LogisticRegression | 0.570 | + +
+ MLflow UI showing a LogisticRegression run with all three Feast features, metrics, parameters, and feature tags +

The MLflow UI showing a LogisticRegression run trained with all three Feast features (conv_rate, acc_rate, avg_daily_trips). The run logs five metrics (accuracy, AUC, precision, recall, F1), the feature list as a parameter, and the demo tags each included feature (e.g., feature_conv_rate: included) for easy filtering.

+
+ +This is exactly the kind of insight MLflow's comparison interface is built for. You can sort runs by AUC, filter by which features were included, and visualize performance across experiments. Note that with synthetic data these numbers won't carry real meaning — the point is that the tooling makes it trivial to *observe* these differences systematically and let data drive the feature selection decision. + +
+ MLflow comparison view showing three experiment runs side by side with different feature combinations +

MLflow's comparison view showing three runs side by side with different feature subsets. The "Show diff only" toggle highlights how the features parameter varies across runs, making it easy to identify which combination of Feast features produces the best results.

+
+ +
+ MLflow metric charts showing accuracy, AUC, F1, precision, and recall grouped by num_features across three feature subsets +

MLflow's metric charts view visualizing accuracy, AUC, F1, precision, and recall across all feature selection runs, grouped by num_features. This chart makes it easy to spot how model performance changes as more Feast features are included.

+
+ +Once you have identified the winning subset, the Feast registry ensures that only those features need to be materialized into the online store for production serving. + +### Hyperparameter sweeps + +MLflow integrates natively with hyperparameter optimization libraries. For example, using MLflow with [Optuna](https://optuna.org/): + +```python +import optuna +import mlflow + +def objective(trial): + C = trial.suggest_float("C", 1e-3, 10.0, log=True) + max_iter = trial.suggest_int("max_iter", 100, 1000) + + with mlflow.start_run(nested=True): + mlflow.log_params({"C": C, "max_iter": max_iter}) + model = LogisticRegression(C=C, max_iter=max_iter) + model.fit(X_train, y_train) + auc = roc_auc_score(y_test, model.predict_proba(X_test)[:, 1]) + mlflow.log_metric("auc", auc) + return auc + +with mlflow.start_run(run_name="optuna_sweep"): + study = optuna.create_study(direction="maximize") + study.optimize(objective, n_trials=20) +``` + +All trials, their parameters, and their metrics are automatically captured in the MLflow tracking server, making it easy to compare runs and select the best candidate for promotion. + +--- + +## Kubeflow: Orchestrating the ML Workflow + +[Kubeflow](https://www.kubeflow.org/) brings Kubernetes-native orchestration to the ML lifecycle. Its two most relevant components here are: + +* **Kubeflow Pipelines** — a platform for building and deploying repeatable ML workflows as DAGs. +* **Kubeflow Training Operator** — manages distributed training jobs (PyTorchJob, TFJob, etc.) on Kubernetes. + +### Kubeflow Pipelines integrating Feast and MLflow + +Kubeflow Pipelines lets you compose the entire workflow — feature retrieval, training, evaluation, and registration — as a single, reproducible pipeline: + +```python +from kfp import dsl + +@dsl.component(base_image="python:3.10-slim", packages_to_install=["feast", "mlflow", "scikit-learn", "pandas", "pyarrow"]) +def retrieve_features(entity_df_path: str, feature_store_repo: str, output_path: dsl.Output[dsl.Dataset]): + from feast import FeatureStore + import pandas as pd + + store = FeatureStore(repo_path=feature_store_repo) + entity_df = pd.read_parquet(entity_df_path) + df = store.get_historical_features( + entity_df=entity_df, + features=["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate"], + ).to_df() + df.to_parquet(output_path.path) + + +@dsl.component(base_image="python:3.10-slim", packages_to_install=["feast", "mlflow", "scikit-learn", "pandas"]) +def train_and_log(features_path: dsl.Input[dsl.Dataset], mlflow_tracking_uri: str, model_name: str): + import mlflow, mlflow.sklearn + import pandas as pd + from sklearn.linear_model import LogisticRegression + from sklearn.model_selection import train_test_split + from sklearn.metrics import roc_auc_score + + mlflow.set_tracking_uri(mlflow_tracking_uri) + df = pd.read_parquet(features_path.path) + X = df[["conv_rate", "acc_rate"]] + y = df["label"] + X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2) + + with mlflow.start_run(): + model = LogisticRegression() + model.fit(X_train, y_train) + auc = roc_auc_score(y_test, model.predict_proba(X_test)[:, 1]) + mlflow.log_metric("auc", auc) + mlflow.sklearn.log_model(model, artifact_path="model", registered_model_name=model_name) + + +@dsl.pipeline(name="feast-mlflow-training-pipeline") +def training_pipeline(entity_df_path: str, feature_store_repo: str, mlflow_tracking_uri: str, model_name: str): + fetch_step = retrieve_features(entity_df_path=entity_df_path, feature_store_repo=feature_store_repo) + train_and_log( + features_path=fetch_step.outputs["output_path"], + mlflow_tracking_uri=mlflow_tracking_uri, + model_name=model_name, + ) +``` + +Each step runs in its own container, making the pipeline portable and reproducible across environments. + +### Distributed training with the Kubeflow Training Operator + +For large-scale models, the [Kubeflow Training Operator](https://www.kubeflow.org/docs/components/training/) schedules distributed training jobs. Feast integrates naturally because it provides a consistent Python API for retrieving feature data — whether training is running on a single machine or across a cluster of workers. Each worker calls `get_historical_features` for its shard of the entity dataframe, and the resulting features are passed directly into the training loop. + +--- + +## Bringing It All Together: Feast → MLflow → Production + +The following end-to-end workflow shows how the three tools hand off to one another: + +### Step 1: Register and materialize features with Feast + +```bash +feast apply # Register feature definitions in the registry +feast materialize-incremental $(date -u +"%Y-%m-%dT%H:%M:%S") +``` + +### Step 2: Run experiments and select the best model with MLflow + +Feature engineers iterate on feature definitions in Feast while data scientists run experiments in MLflow, logging which features were used for each run. The best run is registered in the MLflow Model Registry: + +```python +mlflow.register_model(f"runs:/{best_run_id}/model", "driver_conversion_model") +``` + +### Step 3: Promote to production + +Promoting the model in MLflow signals that it is ready for deployment. At this point, you also know the exact subset of Feast features required by that model — these are the features to materialize and serve. + +```python +client = mlflow.tracking.MlflowClient() +client.set_registered_model_alias( + name="driver_conversion_model", alias="production", version="3" +) +``` + +### Step 4: Serve features and predictions + +The deployed model reads its inputs from the Feast online store at inference time: + +```python +from feast import FeatureStore + +store = FeatureStore(repo_path=".") + +def predict(driver_id: int) -> float: + features = store.get_online_features( + features=["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate"], + entity_rows=[{"driver_id": driver_id}], + ).to_dict() + return model.predict_proba([[features["conv_rate"][0], features["acc_rate"][0]]])[0][1] +``` + +--- + +## Summary + +| Concern | Tool | +|---|---| +| Feature definition, storage, and serving | **Feast** | +| Experiment tracking, metric logging, and model versioning | **MLflow** | +| Workflow orchestration and distributed training | **Kubeflow Pipelines + Training Operator** | +| Hyperparameter optimization | **MLflow + Katib (Kubeflow)** | +| Production feature serving | **Feast Online Store / Feature Server** | + +Feast, MLflow, and Kubeflow are each best-in-class at what they do, and they are designed to work alongside one another rather than replace each other. By combining them you get a fully open-source, end-to-end ML platform that handles everything from raw data to live predictions — without lock-in. + +If you are new to Feast, check out the [Feast documentation](https://docs.feast.dev/) and [GitHub](https://github.com/feast-dev/feast) to get started. Join the community on [Slack](http://slack.feastsite.wpenginepowered.com/) and let us know how you are using Feast in your ML stack! diff --git a/infra/website/docs/blog/scaling-feast-feature-server.md b/infra/website/docs/blog/scaling-feast-feature-server.md new file mode 100644 index 00000000000..994811ea0ac --- /dev/null +++ b/infra/website/docs/blog/scaling-feast-feature-server.md @@ -0,0 +1,317 @@ +--- +title: Feature Server High-Availability and Auto-Scaling on Kubernetes +description: The Feast Operator now supports horizontal scaling with static replicas, HPA autoscaling, KEDA, and high-availability features including PodDisruptionBudgets and topology spread constraints. +date: 2026-03-02 +authors: ["Nikhil Kathole", "Antonin Stefanutti"] +--- + +# Feature Server High-Availability and Auto-Scaling on Kubernetes + +As ML systems move from experimentation to production, the feature server often becomes a critical bottleneck. A single-replica deployment might handle development traffic, but production workloads — real-time inference, batch scoring, multiple consuming services — demand the ability to scale horizontally. + +We're excited to announce that the Feast Operator now supports **horizontal scaling** for the FeatureStore deployment, giving teams the tools to run Feast at production scale on Kubernetes. + +# The Problem: Single-Replica Limitations + +By default, the Feast Operator deploys a single-replica Deployment. This works well for getting started, but presents challenges as traffic grows: + +- **Single point of failure** — one pod crash means downtime for all feature consumers +- **Throughput ceiling** — a single pod can only handle so many concurrent requests +- **No elasticity** — traffic spikes (model retraining, batch inference) can overwhelm the server +- **Rolling updates cause downtime** — the default `Recreate` strategy tears down the old pod before starting a new one + +Teams have been manually patching Deployments or creating external HPAs, but this bypasses the operator's reconciliation loop and can lead to configuration drift. + +# The Solution: Native Scaling Support + +The Feast Operator now supports three scaling modes. The FeatureStore CRD implements the Kubernetes **scale sub-resource**, which means you can also scale with `kubectl scale featurestore/my-feast --replicas=3`. + +## 1. Static Replicas + +The simplest approach — set a fixed number of replicas via `spec.replicas`: + +```yaml +apiVersion: feast.dev/v1 +kind: FeatureStore +metadata: + name: production-feast +spec: + feastProject: my_project + replicas: 3 + services: + onlineStore: + persistence: + store: + type: postgres + secretRef: + name: feast-data-stores + registry: + local: + persistence: + store: + type: sql + secretRef: + name: feast-data-stores +``` + +This gives you high availability and load distribution with a predictable resource footprint. The operator automatically switches the Deployment strategy to `RollingUpdate`, ensuring zero-downtime deployments. + +## 2. HPA Autoscaling + +For workloads with variable traffic patterns, the operator can create and manage a `HorizontalPodAutoscaler` directly. HPA autoscaling is configured under `services.scaling.autoscaling` and is mutually exclusive with `spec.replicas > 1`: + +```yaml +apiVersion: feast.dev/v1 +kind: FeatureStore +metadata: + name: autoscaled-feast +spec: + feastProject: my_project + services: + scaling: + autoscaling: + minReplicas: 2 + maxReplicas: 10 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 70 + podDisruptionBudgets: + maxUnavailable: 1 + onlineStore: + persistence: + store: + type: postgres + secretRef: + name: feast-data-stores + server: + resources: + requests: + cpu: 200m + memory: 256Mi + limits: + cpu: "1" + memory: 1Gi + registry: + local: + persistence: + store: + type: sql + secretRef: + name: feast-data-stores +``` + +The operator creates the HPA as an owned resource — it's automatically cleaned up if you remove the autoscaling configuration or delete the FeatureStore CR. If no custom metrics are specified, the operator defaults to **80% CPU utilization**. The operator also auto-injects soft pod anti-affinity (node-level) and topology spread constraints (zone-level) to improve resilience — see the [High Availability](#high-availability) section for details. + +## 3. External Autoscalers (KEDA, Custom HPAs) + +For teams using [KEDA](https://keda.sh) or other external autoscalers, KEDA should target the FeatureStore's scale sub-resource directly (since it implements the Kubernetes scale API). This is the recommended approach because the operator manages the Deployment's replica count from `spec.replicas` — targeting the Deployment directly would conflict with the operator's reconciliation. + +When using KEDA, do **not** set `spec.replicas > 1` or `services.scaling.autoscaling` — KEDA manages the replica count through the scale sub-resource. Configure the FeatureStore with DB-backed persistence, then create a KEDA `ScaledObject` targeting the FeatureStore resource: + +```yaml +apiVersion: feast.dev/v1 +kind: FeatureStore +metadata: + name: keda-feast +spec: + feastProject: my_project + services: + onlineStore: + persistence: + store: + type: postgres + secretRef: + name: feast-data-stores + registry: + local: + persistence: + store: + type: sql + secretRef: + name: feast-data-stores +--- +apiVersion: keda.sh/v1alpha1 +kind: ScaledObject +metadata: + name: feast-scaledobject +spec: + scaleTargetRef: + apiVersion: feast.dev/v1 + kind: FeatureStore + name: keda-feast + minReplicaCount: 1 + maxReplicaCount: 10 + triggers: + - type: prometheus + metadata: + serverAddress: http://prometheus.monitoring.svc:9090 + metricName: http_requests_total + query: sum(rate(http_requests_total{service="feast"}[2m])) + threshold: "100" +``` + +When KEDA scales up `spec.replicas` via the scale sub-resource, the CRD's CEL validation rules automatically ensure DB-backed persistence is configured. The operator also automatically switches the deployment strategy to `RollingUpdate` when `replicas > 1`. This gives you the full power of KEDA's 50+ event-driven triggers with built-in safety checks. + +# High Availability + +Scaling to multiple replicas is only half the story — you also need to ensure pods are spread across failure domains and protected during disruptions. The operator includes two HA features that activate when scaling is enabled: + +## Pod Anti-Affinity + +When scaling is enabled, the operator **automatically injects** a soft pod anti-affinity rule that prefers spreading pods across different nodes: + +```yaml +affinity: + podAntiAffinity: + preferredDuringSchedulingIgnoredDuringExecution: + - weight: 100 + podAffinityTerm: + topologyKey: kubernetes.io/hostname + labelSelector: + matchLabels: + feast.dev/name: my-feast +``` + +This means the scheduler will *try* to place each replica on a separate node, but won't prevent scheduling if nodes are constrained. You can override this with your own `affinity` configuration in the CR, or set it to an explicit value to customize the behavior (e.g. `requiredDuringSchedulingIgnoredDuringExecution` for strict anti-affinity). + +## Topology Spread Constraints + +When `replicas > 1` or autoscaling is configured, the operator **automatically injects** a soft zone-spread constraint: + +```yaml +topologySpreadConstraints: +- maxSkew: 1 + topologyKey: topology.kubernetes.io/zone + whenUnsatisfiable: ScheduleAnyway + labelSelector: + matchLabels: + feast.dev/name: my-feast +``` + +This distributes pods across availability zones on a best-effort basis. If your cluster has 3 zones and 3 replicas, each zone gets one pod. If zones are unavailable, pods still get scheduled rather than staying pending. + +You can override this with explicit constraints (e.g. strict `DoNotSchedule`) or disable it entirely by setting `topologySpreadConstraints: []`. + +## PodDisruptionBudgets + +For protection during voluntary disruptions (node drains, cluster upgrades), you can configure a PDB: + +```yaml +spec: + replicas: 3 + services: + podDisruptionBudgets: + maxUnavailable: 1 + onlineStore: + # ... +``` + +The PDB requires explicit configuration — it's not auto-injected because a misconfigured PDB can block node drains. The operator enforces that exactly one of `minAvailable` or `maxUnavailable` is set via CEL validation. The PDB is only created when scaling is enabled and is automatically cleaned up when scaling is disabled. + +# Safety First: Persistence Validation + +Not all persistence backends are safe for multi-replica deployments. File-based stores like SQLite, DuckDB, and local `registry.db` use single-writer file locks that don't work across pods. + +The operator enforces this at admission time via CEL validation rules on the CRD — if you try to create or update a FeatureStore with scaling and file-based persistence, the API server rejects the request immediately: + +``` +Scaling requires DB-backed persistence for the online store. +Configure services.onlineStore.persistence.store when using replicas > 1 or autoscaling. +``` + +This validation applies to all enabled services (online store, offline store, and registry) and is enforced for both direct CR updates and `kubectl scale` commands via the scale sub-resource. Object-store-backed registry paths (`s3://` and `gs://`) are treated as safe since they support concurrent readers. + +| Persistence Type | Compatible with Scaling? | +|---|---| +| PostgreSQL / MySQL | Yes | +| Redis | Yes | +| Cassandra | Yes | +| SQL-based Registry | Yes | +| S3/GCS Registry | Yes | +| SQLite | No | +| DuckDB | No | +| Local `registry.db` | No | + +# How It Works Under the Hood + +The implementation adds three key behaviors to the operator's reconciliation loop: + +**1. Replica management** — The operator sets the Deployment's replica count from `spec.replicas` (which defaults to 1). When HPA is configured, the operator leaves the `replicas` field unset so the HPA controller can manage it. External autoscalers like KEDA can update the replica count through the FeatureStore's scale sub-resource, which updates `spec.replicas` and triggers the operator to reconcile. + +**2. Deployment strategy** — The operator automatically switches from `Recreate` (the default for single-replica) to `RollingUpdate` when scaling is enabled. This prevents the "kill-all-pods-then-start-new-ones" behavior that would cause downtime during scaling events. Users can always override this with an explicit `deploymentStrategy` in the CR. + +**3. HPA lifecycle** — The operator creates, updates, and deletes the HPA as an owned resource tied to the FeatureStore CR. Removing the `autoscaling` configuration automatically cleans up the HPA. + +**4. HA features** — The operator auto-injects soft topology spread constraints across zones when scaling is enabled, and manages PodDisruptionBudgets as owned resources when explicitly configured. + +The scaling status is reported back on the FeatureStore status: + +```yaml +status: + scalingStatus: + currentReplicas: 3 + desiredReplicas: 3 +``` + +# What About TLS, CronJobs, and Services? + +Scaling is designed to work seamlessly with existing operator features: + +- **TLS** — Each pod mounts the same TLS secret. OpenShift service-serving certificates work automatically since they're bound to the Service, not individual pods. +- **Kubernetes Services** — The Service's label selector already matches all pods in the Deployment, so load balancing across replicas works out of the box. +- **CronJobs** — The `feast apply` and `feast materialize-incremental` CronJobs use `kubectl exec` into a single pod. Since DB-backed persistence is required for scaling, all pods share the same state — it doesn't matter which pod the CronJob runs against. + +# Getting Started + +**1. Ensure DB-backed persistence** for all enabled services (online store, offline store, registry). + +**2. Configure scaling** in your FeatureStore CR — use either static replicas or HPA (mutually exclusive). Optionally add a PDB for disruption protection: + +```yaml +spec: + replicas: 3 # static replicas (top-level) + services: + podDisruptionBudgets: # optional: protect against disruptions + maxUnavailable: 1 + # -- OR -- + # services: + # scaling: + # autoscaling: # HPA + # minReplicas: 2 + # maxReplicas: 10 + # podDisruptionBudgets: + # maxUnavailable: 1 +``` + +**3. Apply** the updated CR: + +```bash +kubectl apply -f my-featurestore.yaml +``` + +**4. Verify** the scaling: + +```bash +# Check pods +kubectl get pods -l app.kubernetes.io/managed-by=feast + +# Check HPA (if using autoscaling) +kubectl get hpa + +# Check FeatureStore status +kubectl get feast -o yaml +``` + +# Learn More + +- [Scaling Feast documentation](https://docs.feast.dev/how-to-guides/scaling-feast) +- [Feast on Kubernetes guide](https://docs.feast.dev/how-to-guides/feast-on-kubernetes) +- [FeatureStore CRD API reference](https://github.com/feast-dev/feast/blob/master/infra/feast-operator/docs/api/markdown/ref.md) +- [Sample CRs for static scaling and HPA](https://github.com/feast-dev/feast/tree/master/infra/feast-operator/config/samples) +- Join the [Feast Slack](https://slack.feast.dev) to share feedback and ask questions + +We're excited to see teams scale their feature serving infrastructure with confidence. Try it out and let us know how it works for your use case! diff --git a/infra/website/public/images/blog/feast-features-ui.png b/infra/website/public/images/blog/feast-features-ui.png new file mode 100644 index 00000000000..2b728e4c43a Binary files /dev/null and b/infra/website/public/images/blog/feast-features-ui.png differ diff --git a/infra/website/public/images/blog/feast-mlflow-kubeflow.png b/infra/website/public/images/blog/feast-mlflow-kubeflow.png new file mode 100644 index 00000000000..c4e92228189 Binary files /dev/null and b/infra/website/public/images/blog/feast-mlflow-kubeflow.png differ diff --git a/infra/website/public/images/blog/mlflow-feast-feature-selection-metrics.png b/infra/website/public/images/blog/mlflow-feast-feature-selection-metrics.png new file mode 100644 index 00000000000..b6ad16c3346 Binary files /dev/null and b/infra/website/public/images/blog/mlflow-feast-feature-selection-metrics.png differ diff --git a/infra/website/public/images/blog/mlflow-feature-selection-comparison.png b/infra/website/public/images/blog/mlflow-feature-selection-comparison.png new file mode 100644 index 00000000000..4cad26065c6 Binary files /dev/null and b/infra/website/public/images/blog/mlflow-feature-selection-comparison.png differ diff --git a/infra/website/public/images/blog/mlflow-feature-selection-run.png b/infra/website/public/images/blog/mlflow-feature-selection-run.png new file mode 100644 index 00000000000..aabc6055480 Binary files /dev/null and b/infra/website/public/images/blog/mlflow-feature-selection-run.png differ diff --git a/infra/website/public/images/blog/mlflow-feature-selection-ui.png b/infra/website/public/images/blog/mlflow-feature-selection-ui.png new file mode 100644 index 00000000000..a598bbf4151 Binary files /dev/null and b/infra/website/public/images/blog/mlflow-feature-selection-ui.png differ diff --git a/java/pom.xml b/java/pom.xml index f7248902fe1..4ba8b6d211d 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -35,7 +35,7 @@ - 0.60.0 + 0.61.0 https://github.com/feast-dev/feast UTF-8 diff --git a/java/serving/src/test/resources/docker-compose/feast10/definitions.py b/java/serving/src/test/resources/docker-compose/feast10/definitions.py index 769ac155452..580605fe149 100644 --- a/java/serving/src/test/resources/docker-compose/feast10/definitions.py +++ b/java/serving/src/test/resources/docker-compose/feast10/definitions.py @@ -9,7 +9,6 @@ from feast.field import Field from feast.on_demand_feature_view import on_demand_feature_view from feast.types import Float32, Float64, Int64 -from feast.value_type import ValueType from feast import FileSource file_path = "driver_stats.parquet" @@ -70,7 +69,8 @@ def transformed_conv_rate(features_df: pd.DataFrame) -> pd.DataFrame: generated_data_source = FileSource( - path="benchmark_data.parquet", timestamp_field="event_timestamp", + path="benchmark_data.parquet", + timestamp_field="event_timestamp", ) entity = Entity(name="entity") @@ -88,5 +88,6 @@ def transformed_conv_rate(features_df: pd.DataFrame) -> pd.DataFrame: benchmark_feature_views.append(fv) benchmark_feature_service = FeatureService( - name=f"benchmark_feature_service", features=benchmark_feature_views, + name="benchmark_feature_service", + features=benchmark_feature_views, ) diff --git a/java/serving/src/test/resources/docker-compose/feast10/materialize.py b/java/serving/src/test/resources/docker-compose/feast10/materialize.py index 404fec27e12..3307805f2a5 100644 --- a/java/serving/src/test/resources/docker-compose/feast10/materialize.py +++ b/java/serving/src/test/resources/docker-compose/feast10/materialize.py @@ -55,7 +55,7 @@ def generate_data(num_rows: int, num_features: int, destination: str) -> pd.Data df.to_parquet(destination) -generate_data(10 ** 3, 250, "benchmark_data.parquet") +generate_data(10**3, 250, "benchmark_data.parquet") fs = FeatureStore(".") diff --git a/java/serving/src/test/resources/docker-compose/feast10/setup_it.py b/java/serving/src/test/resources/docker-compose/feast10/setup_it.py index 61aaa6fec8e..ad1cab07da4 100644 --- a/java/serving/src/test/resources/docker-compose/feast10/setup_it.py +++ b/java/serving/src/test/resources/docker-compose/feast10/setup_it.py @@ -41,9 +41,7 @@ def setup_data(): # Please read more in Feast RFC-031 # (link https://docs.google.com/document/d/12UuvTQnTTCJhdRgy6h10zSbInNGSyEJkIxpOcgOen1I/edit) # about this benchmark setup - def generate_data( - num_rows, num_features, destination - ): + def generate_data(num_rows, num_features, destination): features = [f"feature_{i}" for i in range(num_features)] columns = ["entity", "event_timestamp"] + features df = pd.DataFrame(0, index=np.arange(num_rows), columns=columns) diff --git a/pixi.lock b/pixi.lock new file mode 100644 index 00000000000..ce31fdd102d --- /dev/null +++ b/pixi.lock @@ -0,0 +1,7369 @@ +version: 6 +environments: + default: + channels: + - url: https://conda.anaconda.org/conda-forge/ + indexes: + - https://pypi.org/simple + options: + pypi-prerelease-mode: if-necessary-or-explicit + packages: + linux-64: + - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/icu-78.2-h33c6efd_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libmpdec-4.0.0-hb03c661_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.51.2-hf4e2dac_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.41.3-h5347b49_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.1-h35e630c_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.14.3-h32b2ec7_101_cp314.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python_abi-3.14-8_cp314.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.3-h853b02a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda + - pypi: https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/72/6102c002b1b4c04f8c5ed870f102f576a5fbd6a41cdb3e49ed339aa95dfe/bigtree-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e5/23/d39ccc4ed76222db31530b0a7d38876fdb7673e23f838e8d8f0ed4651a4f/dask-2026.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d2/d8/09bfa816572a4d83bccd6750df1926f79158b1c36c5f73786e26dbe4ee38/greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/da/73/4ad5b1f6a2e21cf1e85afdaad2b7b1a933985e2f5d679147a1953aaa192c/gunicorn-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/cb/eea88506f191fb552c11787c23f9a405f4c7b0c5799bf73f2249cd4f5228/httptools-0.7.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/96/ef04902aad1424fd7299b62d1890e803e6ab4018c3044dca5922319c4b97/librt-0.8.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/ca/e0/78adf4104c425606a9ce33fb351f790c76a6c2314969c4a517d1ffc92196/mmh3-5.2.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/5d/6c/7f237821c9642fb2a04d2f1e88b4295677144ca93285fd76eff3bcba858d/numpy-2.4.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/c2/8b/ecdad52d0b38d4b8f514be603e69ccd5eacf4e7241f972e37e79792212ec/orjson-3.11.7-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/15/b2/0e62f78c0c5ba7e3d2c5945a82456f4fac76c480940f805e0b97fcbc2f65/pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/71/e7/40fb618334dcdf7c5a316c0e7343c5cd82d3d866edc100d98e29bc945ecd/partd-1.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b5/57/89727baef7578897af5ed166735ceb315819f1c184da8c3441271dbcfde7/protobuf-7.34.0-cp310-abi3-manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/36/2e/c0f017c405fcdc252dbccafbe05e36b0d0eb1ea9a958f081e01c6972927f/pyarrow-23.0.1-cp314-cp314-manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ce/81/9a91c0111ce1758c92516a3e44776920b579d9a7c09b2b06b642d4de3f0f/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f2/5e/327428a034407651a048f5e624361adf3f9fbac9d0fa98e981e9c6ff2f5e/sqlalchemy-2.0.48-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f7/1f/4e5f8770c2cf4faa2c3ed3c19f9d4485ac9db0a6b029a7866921709bdc6c/uvicorn_worker-0.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b5/35/60249e9fd07b32c665192cec7af29e06c7cd96fa1d08b84f012a56a0b38e/uvloop-0.22.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + osx-64: + - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-h500dc9f_9.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.7.4-h991f03e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.5.2-hd1f9c09_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/liblzma-5.8.2-h11316ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libmpdec-4.0.0-hf3981d6_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.51.2-hb99441e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-h0622a9a_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.6.1-hb6871ef_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.14.3-h4f44bb5_101_cp314.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python_abi-3.14-8_cp314.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.3-h68b038d_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h7142dee_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/zstd-1.5.7-h3eecb57_6.conda + - pypi: https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/72/6102c002b1b4c04f8c5ed870f102f576a5fbd6a41cdb3e49ed339aa95dfe/bigtree-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl + - pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e5/23/d39ccc4ed76222db31530b0a7d38876fdb7673e23f838e8d8f0ed4651a4f/dask-2026.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3f/ae/8bffcbd373b57a5992cd077cbe8858fff39110480a9d50697091faea6f39/greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl + - pypi: https://files.pythonhosted.org/packages/da/73/4ad5b1f6a2e21cf1e85afdaad2b7b1a933985e2f5d679147a1953aaa192c/gunicorn-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/34/50/9d095fcbb6de2d523e027a2f304d4551855c2f46e0b82befd718b8b20056/httptools-0.7.1-cp314-cp314-macosx_10_13_universal2.whl + - pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c9/6a/907ef6800f7bca71b525a05f1839b21f708c09043b1c6aa77b6b827b3996/librt-0.8.1-cp314-cp314-macosx_10_13_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/70/1f/f87e3d34d83032b4f3f0f528c6d95a98290fcacf019da61343a49dccfd51/mmh3-5.2.0-cp314-cp314-macosx_10_13_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/18/88/b7df6050bf18fdcfb7046286c6535cabbdd2064a3440fca3f069d319c16e/numpy-2.4.2-cp314-cp314-macosx_10_15_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/e9/1e/745565dca749813db9a093c5ebc4bac1a9475c64d54b95654336ac3ed961/orjson-3.11.7-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl + - pypi: https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/04/fd/74903979833db8390b73b3a8a7d30d146d710bd32703724dd9083950386f/pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/71/e7/40fb618334dcdf7c5a316c0e7343c5cd82d3d866edc100d98e29bc945ecd/partd-1.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/13/c4/6322ab5c8f279c4c358bc14eb8aefc0550b97222a39f04eb3c1af7a830fa/protobuf-7.34.0-cp310-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/ae/b5/d58a241fbe324dbaeb8df07be6af8752c846192d78d2272e551098f74e88/pyarrow-23.0.1-cp314-cp314-macosx_12_0_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/86/81/dad16382ebbd3d0e0328776d8fd7ca94220e4fa0798d1dc5e7da48cb3201/rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/2c/9664130905f03db57961b8980b05cab624afd114bf2be2576628a9f22da4/sqlalchemy-2.0.48-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f7/1f/4e5f8770c2cf4faa2c3ed3c19f9d4485ac9db0a6b029a7866921709bdc6c/uvicorn_worker-0.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0d/f8/a132124dfda0777e489ca86732e85e69afcd1ff7686647000050ba670689/uvloop-0.22.1-cp314-cp314-macosx_10_13_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl + osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_9.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/icu-78.2-hef89b57_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.7.4-hf6b4638_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.5.2-hcf2aa1b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-5.8.2-h8088a28_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libmpdec-4.0.0-h84a0fba_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.51.2-h1ae2325_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h5e97a16_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.6.1-hd24854e_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.14.3-h4c637c5_101_cp314.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/python_abi-3.14-8_cp314.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.3-h46df422_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h010d191_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstd-1.5.7-hbf9d68e_6.conda + - pypi: https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/72/6102c002b1b4c04f8c5ed870f102f576a5fbd6a41cdb3e49ed339aa95dfe/bigtree-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl + - pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e5/23/d39ccc4ed76222db31530b0a7d38876fdb7673e23f838e8d8f0ed4651a4f/dask-2026.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/da/73/4ad5b1f6a2e21cf1e85afdaad2b7b1a933985e2f5d679147a1953aaa192c/gunicorn-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/07/f0/89720dc5139ae54b03f861b5e2c55a37dba9a5da7d51e1e824a1f343627f/httptools-0.7.1-cp314-cp314-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1b/18/25e991cd5640c9fb0f8d91b18797b29066b792f17bf8493da183bf5caabe/librt-0.8.1-cp314-cp314-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/a6/e2/db849eaed07117086f3452feca8c839d30d38b830ac59fe1ce65af8be5ad/mmh3-5.2.0-cp314-cp314-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/25/7a/1fee4329abc705a469a4afe6e69b1ef7e915117747886327104a8493a955/numpy-2.4.2-cp314-cp314-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/e9/1e/745565dca749813db9a093c5ebc4bac1a9475c64d54b95654336ac3ed961/orjson-3.11.7-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl + - pypi: https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/21/00/266d6b357ad5e6d3ad55093a7e8efc7dd245f5a842b584db9f30b0f0a287/pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/71/e7/40fb618334dcdf7c5a316c0e7343c5cd82d3d866edc100d98e29bc945ecd/partd-1.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/13/c4/6322ab5c8f279c4c358bc14eb8aefc0550b97222a39f04eb3c1af7a830fa/protobuf-7.34.0-cp310-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/8d/1b/6da9a89583ce7b23ac611f183ae4843cd3a6cf54f079549b0e8c14031e73/pyarrow-23.0.1-cp314-cp314-macosx_12_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2b/60/19f7884db5d5603edf3c6bce35408f45ad3e97e10007df0e17dd57af18f8/rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f7/b3/f437eaa1cf028bb3c927172c7272366393e73ccd104dcf5b6963f4ab5318/sqlalchemy-2.0.48-cp314-cp314-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f7/1f/4e5f8770c2cf4faa2c3ed3c19f9d4485ac9db0a6b029a7866921709bdc6c/uvicorn_worker-0.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/90/cd/b62bdeaa429758aee8de8b00ac0dd26593a9de93d302bff3d21439e9791d/uvloop-0.22.1-cp314-cp314-macosx_10_13_universal2.whl + - pypi: https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl + duckdb-tests: + channels: + - url: https://conda.anaconda.org/conda-forge/ + indexes: + - https://pypi.org/simple + options: + pypi-prerelease-mode: if-necessary-or-explicit + packages: + linux-64: + - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/icu-78.2-h33c6efd_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.51.2-hf4e2dac_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.41.3-h5347b49_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.1-h35e630c_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.19-h3c07f61_3_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.3-h853b02a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda + - pypi: https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/09/52/94108adfdd6e2ddf58be64f959a0b9c7d4ef2fa71086c38356d22dc501ea/argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/c7/80/9f608d13b4b3afcebd1dd13baf9551c95fc424d6390e4b1cfd7b1810cd06/async_property-0.2.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/96/c0/271f3e1e3502a8decb8ee5c680dbed2d8dc2cd504f5e20f7ed491d5f37e1/atpublic-7.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/72/6102c002b1b4c04f8c5ed870f102f576a5fbd6a41cdb3e49ed339aa95dfe/bigtree-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/36/3b/60cbd1f8e93aa25d1c669c649b7a655b0b5fb4c571858910ea9332678558/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ac/25/e715fa0bc24ac2114ed69da33adf451a38abb6f3f24ec207908112e9ba53/cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/e5/23/d39ccc4ed76222db31530b0a7d38876fdb7673e23f838e8d8f0ed4651a4f/dask-2026.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6e/24/e6b7a8fe8b9e336d684779a88027b261374417f2be7c5a0fcdb40f0c8cc5/deltalake-0.25.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/a2/5f/23bd586ecb21273b41b5aa4b16fd88b7fecb53ed48d897273651c0c3d66f/duckdb-1.4.4-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ad/55/9f1ebb5a825215fadcc0f7d5073f6e79e3007e3282b14b22d6aba7ca6cb8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/58/2f/f3fc773270cf17e7ca076c1f6435278f58641d475a25cdeea5b2d8d4845b/grpcio-1.62.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/40/4c/ee3173906196b741ac6ba55a9788ba9ebf2cd05f91715a49b6c3bfbb9d73/grpcio_health_checking-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/82/54/acc6a6e684827b0f6bb4e2c27f3d7e25b71322c4078ef5b455c07c43260e/grpcio_reflection-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/da/73/4ad5b1f6a2e21cf1e85afdaad2b7b1a933985e2f5d679147a1953aaa192c/gunicorn-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f5/71/b0a9193641d9e2471ac541d3b1b869538a5fb6419d52fd2669fa9c79e4b8/httptools-0.7.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9d/b3/11d406849715b47c9d69bb22f50874f80caee96bd1cbe7b61abbebbf5a05/ibis_framework-12.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cd/58/4a1880ea64032185e9ae9f63940c9327c6952d5584ea544a8f66972f2fda/jwcrypto-1.5.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/01/99/f85130582f05dcf0c8902f3d629270231d2f4afdfc567f8305a952ac7f14/librt-0.8.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b6/9e/8d9f6b9746f8ede78b0a4e4b8908e4d80bd609fca0b3e3195a07dda29534/minio-7.2.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/39/8e/29306d5eca6dfda4b899d22c95b5420db4e0ffb7e0b6389b17379654ece5/mmh3-5.2.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2a/0d/93c2e4a287f74ef11a66fb6d49c7a9f05e47b0a4399040e6719b57f500d2/mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b4/63/3de6a34ad7ad6646ac7d2f55ebc6ad439dbbf9c4370017c50cf403fb19b5/numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/8f/ed/f2b5d66aa9b6b5c02ff5f120efc7b38c7c4962b21e6be0f00fd99a5c348e/orjson-3.11.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/40/a8/4dac1f8f8235e5d25b9955d02ff6f29396191d4e665d71122c3722ca83c5/pandas-2.3.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/77/fc/8cb9073bb1bee54eb49a1ae501a36402d01763812962ac811cdc1c81a9d7/parsy-2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/71/e7/40fb618334dcdf7c5a316c0e7343c5cd82d3d866edc100d98e29bc945ecd/partd-1.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b5/57/89727baef7578897af5ed166735ceb315819f1c184da8c3441271dbcfde7/protobuf-7.34.0-cp310-abi3-manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f8/82/c40b68001dbec8a3faa4c08cd8c200798ac732d2854537c5449dc859f55a/pyarrow-23.0.1-cp310-cp310-manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2e/c3/94ade4906a2f88bc935772f59c934013b4205e773bcb4239db114a6da136/pyarrow_hotfix-0.7-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/5f/e9/a09476d436d0ff1402ac3867d933c61805ec2326c6ea557aeeac3825604e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/51/ff/f6e8b8f39e08547faece4bd80f89d5a8de68a38b2d179cc1c4490ffa3286/pytest-7.4.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ee/82/62e2d63639ecb0fbe8a7ee59ef0bc69a4669ec50f6d3459f74ad4e4189a2/pytest_asyncio-0.23.8-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2c/60/423a63fb190a0483d049786a121bd3dfd7d93bb5ff1bb5b5cd13e5df99a7/pytest_benchmark-3.4.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/25/b2/bdc663a5647ce2034f7e8420122af340df87c01ba97745fc753b8c917acb/pytest_env-1.1.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2d/a1/2f2c1c2353350d66c4d110d283e422e4943eb5ad10effa9357ba66f7b5b9/pytest_lazy_fixture-0.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/30/43/8deecb4c123bbc16d25666f1a6d241109c97aeb2e50806b952661c8e4b95/pytest_mock-1.10.4-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/98/adc368fe369465f291ab24e18b9900473786ed1afdf861ba90467eb0767e/pytest_ordering-0.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/df/97cc0b5b8b53da0e265acd0aeecfc0c279e950a029acd2d7b4e54b00b25f/pytest_timeout-1.4.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2e/2e/dfbd2c9b3edf6a5a8cd9e66090221046839b488ea27824970426bf06b242/python_keycloak-4.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/61/b5/707f6cf0066a6412aacc11d17920ea2e19e5b2f04081c64526eb35b5c6e7/rpds_py-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/5c/ad/6c4395649a212a6c603a72c5b9ab5dce3135a1546cfdffa3c427e71fd535/sqlalchemy-2.0.48-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/ad/c9/f58c3a17beb650700f9d2eccd410726b6d96df8953663700764ca48636c7/sqlglot-29.0.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3e/f8/6425ff800894784160290bcb9737878d910b6da6a08633bfe7f2ed8c9ae3/testcontainers-4.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f7/1f/4e5f8770c2cf4faa2c3ed3c19f9d4485ac9db0a6b029a7866921709bdc6c/uvicorn_worker-0.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/f6/21657bb3beb5f8c57ce8be3b83f653dd7933c2fd00545ed1b092d464799a/uvloop-0.22.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/d5/dc/1a680b7458ffa3b14bb64878112aefc8f2e4f73c5af763cbf0bd43100658/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/9d/2f/4b3ca7e106bc608744b1cdae041e005e446124bebb037b18799c2d356864/websockets-16.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/44/79/4c755b45df6ef30c0dd628ecfaa0c808854be147ca438429da70a162833c/wrapt-2.1.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl + - pypi: ./ + osx-64: + - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-h500dc9f_9.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.7.4-h991f03e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.5.2-hd1f9c09_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/liblzma-5.8.2-h11316ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.51.2-hb99441e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-h0622a9a_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.6.1-hb6871ef_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.10.19-h988dfef_3_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.3-h68b038d_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h7142dee_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - pypi: https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0a/08/a9bebdb2e0e602dde230bdde8021b29f71f7841bd54801bcfd514acb5dcf/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/c7/80/9f608d13b4b3afcebd1dd13baf9551c95fc424d6390e4b1cfd7b1810cd06/async_property-0.2.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/96/c0/271f3e1e3502a8decb8ee5c680dbed2d8dc2cd504f5e20f7ed491d5f37e1/atpublic-7.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/72/6102c002b1b4c04f8c5ed870f102f576a5fbd6a41cdb3e49ed339aa95dfe/bigtree-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/30/d5/c8b32c047e2e81dd172138f772e81d852c51f0f2ad2ae8a24f1122e9e9a7/cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/e5/23/d39ccc4ed76222db31530b0a7d38876fdb7673e23f838e8d8f0ed4651a4f/dask-2026.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/dc/0e/f372bb290cef68c67331cd649b94d62220183ddc1b5bf3a9351ea6e9c8ec/deltalake-0.25.5-cp39-abi3-macosx_10_12_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6b/7a/e9277d0567884c21f345ad43cc01aeaa2abe566d5fdf22e35c3861dd44fa/duckdb-1.4.4-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/38/3f/9859f655d11901e7b2996c6e3d33e0caa9a1d4572c3bc61ed0faa64b2f4c/greenlet-3.3.2-cp310-cp310-macosx_11_0_universal2.whl + - pypi: https://files.pythonhosted.org/packages/c5/63/ee244c4b64f0e71cef5314f9fa1d120c072e33c2e4c545dc75bd1af2a5c5/grpcio-1.62.3-cp310-cp310-macosx_12_0_universal2.whl + - pypi: https://files.pythonhosted.org/packages/40/4c/ee3173906196b741ac6ba55a9788ba9ebf2cd05f91715a49b6c3bfbb9d73/grpcio_health_checking-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/82/54/acc6a6e684827b0f6bb4e2c27f3d7e25b71322c4078ef5b455c07c43260e/grpcio_reflection-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/da/73/4ad5b1f6a2e21cf1e85afdaad2b7b1a933985e2f5d679147a1953aaa192c/gunicorn-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/e5/c07e0bcf4ec8db8164e9f6738c048b2e66aabf30e7506f440c4cc6953f60/httptools-0.7.1-cp310-cp310-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9d/b3/11d406849715b47c9d69bb22f50874f80caee96bd1cbe7b61abbebbf5a05/ibis_framework-12.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cd/58/4a1880ea64032185e9ae9f63940c9327c6952d5584ea544a8f66972f2fda/jwcrypto-1.5.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7c/5f/63f5fa395c7a8a93558c0904ba8f1c8d1b997ca6a3de61bc7659970d66bf/librt-0.8.1-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b6/9e/8d9f6b9746f8ede78b0a4e4b8908e4d80bd609fca0b3e3195a07dda29534/minio-7.2.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3b/88/eb9a55b3f3cf43a74d6bfa8db0e2e209f966007777a1dc897c52c008314c/mmh3-5.2.0-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2f/63/e499890d8e39b1ff2df4c0c6ce5d371b6844ee22b8250687a99fd2f657a8/mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/3e/ed6db5be21ce87955c0cbd3009f2803f59fa08df21b5df06862e2d8e2bdd/numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/de/1a/a373746fa6d0e116dd9e54371a7b54622c44d12296d5d0f3ad5e3ff33490/orjson-3.11.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl + - pypi: https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3d/f7/f425a00df4fcc22b292c6895c6831c0c8ae1d9fac1e024d16f98a9ce8749/pandas-2.3.3-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/77/fc/8cb9073bb1bee54eb49a1ae501a36402d01763812962ac811cdc1c81a9d7/parsy-2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/71/e7/40fb618334dcdf7c5a316c0e7343c5cd82d3d866edc100d98e29bc945ecd/partd-1.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/13/c4/6322ab5c8f279c4c358bc14eb8aefc0550b97222a39f04eb3c1af7a830fa/protobuf-7.34.0-cp310-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/bc/8e/4be5617b4aaae0287f621ad31c6036e5f63118cfca0dc57d42121ff49b51/pyarrow-23.0.1-cp310-cp310-macosx_12_0_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2e/c3/94ade4906a2f88bc935772f59c934013b4205e773bcb4239db114a6da136/pyarrow_hotfix-0.7-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6e/4e/a066527e079fc5002390c8acdd3aca431e6ea0a50ffd7201551175b47323/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/51/ff/f6e8b8f39e08547faece4bd80f89d5a8de68a38b2d179cc1c4490ffa3286/pytest-7.4.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ee/82/62e2d63639ecb0fbe8a7ee59ef0bc69a4669ec50f6d3459f74ad4e4189a2/pytest_asyncio-0.23.8-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2c/60/423a63fb190a0483d049786a121bd3dfd7d93bb5ff1bb5b5cd13e5df99a7/pytest_benchmark-3.4.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/25/b2/bdc663a5647ce2034f7e8420122af340df87c01ba97745fc753b8c917acb/pytest_env-1.1.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2d/a1/2f2c1c2353350d66c4d110d283e422e4943eb5ad10effa9357ba66f7b5b9/pytest_lazy_fixture-0.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/30/43/8deecb4c123bbc16d25666f1a6d241109c97aeb2e50806b952661c8e4b95/pytest_mock-1.10.4-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/98/adc368fe369465f291ab24e18b9900473786ed1afdf861ba90467eb0767e/pytest_ordering-0.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/df/97cc0b5b8b53da0e265acd0aeecfc0c279e950a029acd2d7b4e54b00b25f/pytest_timeout-1.4.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2e/2e/dfbd2c9b3edf6a5a8cd9e66090221046839b488ea27824970426bf06b242/python_keycloak-4.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/06/0c/0c411a0ec64ccb6d104dcabe0e713e05e153a9a2c3c2bd2b32ce412166fe/rpds_py-0.30.0-cp310-cp310-macosx_10_12_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/2c/9664130905f03db57961b8980b05cab624afd114bf2be2576628a9f22da4/sqlalchemy-2.0.48-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ad/c9/f58c3a17beb650700f9d2eccd410726b6d96df8953663700764ca48636c7/sqlglot-29.0.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3e/f8/6425ff800894784160290bcb9737878d910b6da6a08633bfe7f2ed8c9ae3/testcontainers-4.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f7/1f/4e5f8770c2cf4faa2c3ed3c19f9d4485ac9db0a6b029a7866921709bdc6c/uvicorn_worker-0.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ba/ae/6f6f9af7f590b319c94532b9567409ba11f4fa71af1148cab1bf48a07048/uvloop-0.22.1-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/a7/1a/206e8cf2dd86fddf939165a57b4df61607a1e0add2785f170a3f616b7d9f/watchfiles-1.1.1-cp310-cp310-macosx_10_12_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/19/0f/22ef6107ee52ab7f0b710d55d36f5a5d3ef19e8a205541a6d7ffa7994e5a/websockets-16.0-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/ca/21/293b657a27accfbbbb6007ebd78af0efa2083dac83e8f523272ea09b4638/wrapt-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl + - pypi: ./ + osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_9.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/icu-78.2-hef89b57_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.7.4-hf6b4638_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.5.2-hcf2aa1b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-5.8.2-h8088a28_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.51.2-h1ae2325_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h5e97a16_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.6.1-hd24854e_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.10.19-hcd7f573_3_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.3-h46df422_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h010d191_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - pypi: https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b6/02/d297943bcacf05e4f2a94ab6f462831dc20158614e5d067c35d4e63b9acb/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/c7/80/9f608d13b4b3afcebd1dd13baf9551c95fc424d6390e4b1cfd7b1810cd06/async_property-0.2.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/96/c0/271f3e1e3502a8decb8ee5c680dbed2d8dc2cd504f5e20f7ed491d5f37e1/atpublic-7.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/72/6102c002b1b4c04f8c5ed870f102f576a5fbd6a41cdb3e49ed339aa95dfe/bigtree-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/30/d5/c8b32c047e2e81dd172138f772e81d852c51f0f2ad2ae8a24f1122e9e9a7/cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/e5/23/d39ccc4ed76222db31530b0a7d38876fdb7673e23f838e8d8f0ed4651a4f/dask-2026.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/74/7a/ec22ff9d5c891b4f9ae834ef70524c92bd59d1408e9944e2652c87bc3f02/deltalake-0.25.5-cp39-abi3-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/4a/96/3a7630d2779d2bae6f3cdf540a088ed45166adefd3c429971e5b85ce8f84/duckdb-1.4.4-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c5/63/ee244c4b64f0e71cef5314f9fa1d120c072e33c2e4c545dc75bd1af2a5c5/grpcio-1.62.3-cp310-cp310-macosx_12_0_universal2.whl + - pypi: https://files.pythonhosted.org/packages/40/4c/ee3173906196b741ac6ba55a9788ba9ebf2cd05f91715a49b6c3bfbb9d73/grpcio_health_checking-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/82/54/acc6a6e684827b0f6bb4e2c27f3d7e25b71322c4078ef5b455c07c43260e/grpcio_reflection-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/da/73/4ad5b1f6a2e21cf1e85afdaad2b7b1a933985e2f5d679147a1953aaa192c/gunicorn-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7e/4f/35e3a63f863a659f92ffd92bef131f3e81cf849af26e6435b49bd9f6f751/httptools-0.7.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9d/b3/11d406849715b47c9d69bb22f50874f80caee96bd1cbe7b61abbebbf5a05/ibis_framework-12.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cd/58/4a1880ea64032185e9ae9f63940c9327c6952d5584ea544a8f66972f2fda/jwcrypto-1.5.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ff/e0/0472cf37267b5920eff2f292ccfaede1886288ce35b7f3203d8de00abfe6/librt-0.8.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b6/9e/8d9f6b9746f8ede78b0a4e4b8908e4d80bd609fca0b3e3195a07dda29534/minio-7.2.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/4c/8e4b3878bf8435c697d7ce99940a3784eb864521768069feaccaff884a17/mmh3-5.2.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/72/4b/095626fc136fba96effc4fd4a82b41d688ab92124f8c4f7564bffe5cf1b0/mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/22/c2/4b9221495b2a132cc9d2eb862e21d42a009f5a60e45fc44b00118c174bff/numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/de/1a/a373746fa6d0e116dd9e54371a7b54622c44d12296d5d0f3ad5e3ff33490/orjson-3.11.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl + - pypi: https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/13/4f/66d99628ff8ce7857aca52fed8f0066ce209f96be2fede6cef9f84e8d04f/pandas-2.3.3-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/77/fc/8cb9073bb1bee54eb49a1ae501a36402d01763812962ac811cdc1c81a9d7/parsy-2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/71/e7/40fb618334dcdf7c5a316c0e7343c5cd82d3d866edc100d98e29bc945ecd/partd-1.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/13/c4/6322ab5c8f279c4c358bc14eb8aefc0550b97222a39f04eb3c1af7a830fa/protobuf-7.34.0-cp310-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/bc/a8/24e5dc6855f50a62936ceb004e6e9645e4219a8065f304145d7fb8a79d5d/pyarrow-23.0.1-cp310-cp310-macosx_12_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/2e/c3/94ade4906a2f88bc935772f59c934013b4205e773bcb4239db114a6da136/pyarrow_hotfix-0.7-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/db/6c/a1f71542c969912bb0e106f64f60a56cc1f0fabecf9396f45accbe63fa68/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/51/ff/f6e8b8f39e08547faece4bd80f89d5a8de68a38b2d179cc1c4490ffa3286/pytest-7.4.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ee/82/62e2d63639ecb0fbe8a7ee59ef0bc69a4669ec50f6d3459f74ad4e4189a2/pytest_asyncio-0.23.8-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2c/60/423a63fb190a0483d049786a121bd3dfd7d93bb5ff1bb5b5cd13e5df99a7/pytest_benchmark-3.4.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/25/b2/bdc663a5647ce2034f7e8420122af340df87c01ba97745fc753b8c917acb/pytest_env-1.1.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2d/a1/2f2c1c2353350d66c4d110d283e422e4943eb5ad10effa9357ba66f7b5b9/pytest_lazy_fixture-0.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/30/43/8deecb4c123bbc16d25666f1a6d241109c97aeb2e50806b952661c8e4b95/pytest_mock-1.10.4-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/98/adc368fe369465f291ab24e18b9900473786ed1afdf861ba90467eb0767e/pytest_ordering-0.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/df/97cc0b5b8b53da0e265acd0aeecfc0c279e950a029acd2d7b4e54b00b25f/pytest_timeout-1.4.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2e/2e/dfbd2c9b3edf6a5a8cd9e66090221046839b488ea27824970426bf06b242/python_keycloak-4.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/19/6a/4ba3d0fb7297ebae71171822554abe48d7cab29c28b8f9f2c04b79988c05/rpds_py-0.30.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/67/1235676e93dd3b742a4a8eddfae49eea46c85e3eed29f0da446a8dd57500/sqlalchemy-2.0.48-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/ad/c9/f58c3a17beb650700f9d2eccd410726b6d96df8953663700764ca48636c7/sqlglot-29.0.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3e/f8/6425ff800894784160290bcb9737878d910b6da6a08633bfe7f2ed8c9ae3/testcontainers-4.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f7/1f/4e5f8770c2cf4faa2c3ed3c19f9d4485ac9db0a6b029a7866921709bdc6c/uvicorn_worker-0.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/eb/14/ecceb239b65adaaf7fde510aa8bd534075695d1e5f8dadfa32b5723d9cfb/uvloop-0.22.1-cp310-cp310-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/b3/0f/abaf5262b9c496b5dad4ed3c0e799cbecb1f8ea512ecb6ddd46646a9fca3/watchfiles-1.1.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/10/40/904a4cb30d9b61c0e278899bf36342e9b0208eb3c470324a9ecbaac2a30f/websockets-16.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/25/e9/96dd77728b54a899d4ce2798d7b1296989ce687ed3c0cb917d6b3154bf5d/wrapt-2.1.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl + - pypi: ./ + ray-tests: + channels: + - url: https://conda.anaconda.org/conda-forge/ + indexes: + - https://pypi.org/simple + options: + pypi-prerelease-mode: if-necessary-or-explicit + packages: + linux-64: + - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/icu-78.2-h33c6efd_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.51.2-hf4e2dac_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.41.3-h5347b49_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.1-h35e630c_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.19-h3c07f61_3_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.3-h853b02a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda + - pypi: https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/09/52/94108adfdd6e2ddf58be64f959a0b9c7d4ef2fa71086c38356d22dc501ea/argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/c7/80/9f608d13b4b3afcebd1dd13baf9551c95fc424d6390e4b1cfd7b1810cd06/async_property-0.2.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/72/6102c002b1b4c04f8c5ed870f102f576a5fbd6a41cdb3e49ed339aa95dfe/bigtree-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/36/3b/60cbd1f8e93aa25d1c669c649b7a655b0b5fb4c571858910ea9332678558/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ac/25/e715fa0bc24ac2114ed69da33adf451a38abb6f3f24ec207908112e9ba53/cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/e5/23/d39ccc4ed76222db31530b0a7d38876fdb7673e23f838e8d8f0ed4651a4f/dask-2026.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f9/0b/de6f54d4a8bedfe8645c41497f3c18d749f0bd3218170c667bf4b81d0cdd/filelock-3.25.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ad/55/9f1ebb5a825215fadcc0f7d5073f6e79e3007e3282b14b22d6aba7ca6cb8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/58/2f/f3fc773270cf17e7ca076c1f6435278f58641d475a25cdeea5b2d8d4845b/grpcio-1.62.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/40/4c/ee3173906196b741ac6ba55a9788ba9ebf2cd05f91715a49b6c3bfbb9d73/grpcio_health_checking-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/82/54/acc6a6e684827b0f6bb4e2c27f3d7e25b71322c4078ef5b455c07c43260e/grpcio_reflection-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/da/73/4ad5b1f6a2e21cf1e85afdaad2b7b1a933985e2f5d679147a1953aaa192c/gunicorn-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f5/71/b0a9193641d9e2471ac541d3b1b869538a5fb6419d52fd2669fa9c79e4b8/httptools-0.7.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cd/58/4a1880ea64032185e9ae9f63940c9327c6952d5584ea544a8f66972f2fda/jwcrypto-1.5.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/01/99/f85130582f05dcf0c8902f3d629270231d2f4afdfc567f8305a952ac7f14/librt-0.8.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b6/9e/8d9f6b9746f8ede78b0a4e4b8908e4d80bd609fca0b3e3195a07dda29534/minio-7.2.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/39/8e/29306d5eca6dfda4b899d22c95b5420db4e0ffb7e0b6389b17379654ece5/mmh3-5.2.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b7/09/2a06956383c0fdebaef5aa9246e2356776f12ea6f2a44bd1368abf0e46c4/msgpack-1.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2a/0d/93c2e4a287f74ef11a66fb6d49c7a9f05e47b0a4399040e6719b57f500d2/mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b4/63/3de6a34ad7ad6646ac7d2f55ebc6ad439dbbf9c4370017c50cf403fb19b5/numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/8f/ed/f2b5d66aa9b6b5c02ff5f120efc7b38c7c4962b21e6be0f00fd99a5c348e/orjson-3.11.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/40/a8/4dac1f8f8235e5d25b9955d02ff6f29396191d4e665d71122c3722ca83c5/pandas-2.3.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/71/e7/40fb618334dcdf7c5a316c0e7343c5cd82d3d866edc100d98e29bc945ecd/partd-1.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b5/57/89727baef7578897af5ed166735ceb315819f1c184da8c3441271dbcfde7/protobuf-7.34.0-cp310-abi3-manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f8/82/c40b68001dbec8a3faa4c08cd8c200798ac732d2854537c5449dc859f55a/pyarrow-23.0.1-cp310-cp310-manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/5f/e9/a09476d436d0ff1402ac3867d933c61805ec2326c6ea557aeeac3825604e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/51/ff/f6e8b8f39e08547faece4bd80f89d5a8de68a38b2d179cc1c4490ffa3286/pytest-7.4.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ee/82/62e2d63639ecb0fbe8a7ee59ef0bc69a4669ec50f6d3459f74ad4e4189a2/pytest_asyncio-0.23.8-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2c/60/423a63fb190a0483d049786a121bd3dfd7d93bb5ff1bb5b5cd13e5df99a7/pytest_benchmark-3.4.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/25/b2/bdc663a5647ce2034f7e8420122af340df87c01ba97745fc753b8c917acb/pytest_env-1.1.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2d/a1/2f2c1c2353350d66c4d110d283e422e4943eb5ad10effa9357ba66f7b5b9/pytest_lazy_fixture-0.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/30/43/8deecb4c123bbc16d25666f1a6d241109c97aeb2e50806b952661c8e4b95/pytest_mock-1.10.4-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/98/adc368fe369465f291ab24e18b9900473786ed1afdf861ba90467eb0767e/pytest_ordering-0.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/df/97cc0b5b8b53da0e265acd0aeecfc0c279e950a029acd2d7b4e54b00b25f/pytest_timeout-1.4.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2e/2e/dfbd2c9b3edf6a5a8cd9e66090221046839b488ea27824970426bf06b242/python_keycloak-4.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b0/b1/8cc4e45a3ce87aabcb70696b448b20840bcbaa5c98bdb4807a2749541fda/ray-2.54.0-cp310-cp310-manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/61/b5/707f6cf0066a6412aacc11d17920ea2e19e5b2f04081c64526eb35b5c6e7/rpds_py-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/5c/ad/6c4395649a212a6c603a72c5b9ab5dce3135a1546cfdffa3c427e71fd535/sqlalchemy-2.0.48-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3e/f8/6425ff800894784160290bcb9737878d910b6da6a08633bfe7f2ed8c9ae3/testcontainers-4.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f7/1f/4e5f8770c2cf4faa2c3ed3c19f9d4485ac9db0a6b029a7866921709bdc6c/uvicorn_worker-0.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/f6/21657bb3beb5f8c57ce8be3b83f653dd7933c2fd00545ed1b092d464799a/uvloop-0.22.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/d5/dc/1a680b7458ffa3b14bb64878112aefc8f2e4f73c5af763cbf0bd43100658/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/9d/2f/4b3ca7e106bc608744b1cdae041e005e446124bebb037b18799c2d356864/websockets-16.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/44/79/4c755b45df6ef30c0dd628ecfaa0c808854be147ca438429da70a162833c/wrapt-2.1.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl + - pypi: ./ + osx-64: + - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-h500dc9f_9.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.7.4-h991f03e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.5.2-hd1f9c09_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/liblzma-5.8.2-h11316ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.51.2-hb99441e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-h0622a9a_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.6.1-hb6871ef_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.10.19-h988dfef_3_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.3-h68b038d_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h7142dee_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - pypi: https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0a/08/a9bebdb2e0e602dde230bdde8021b29f71f7841bd54801bcfd514acb5dcf/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/c7/80/9f608d13b4b3afcebd1dd13baf9551c95fc424d6390e4b1cfd7b1810cd06/async_property-0.2.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/72/6102c002b1b4c04f8c5ed870f102f576a5fbd6a41cdb3e49ed339aa95dfe/bigtree-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/30/d5/c8b32c047e2e81dd172138f772e81d852c51f0f2ad2ae8a24f1122e9e9a7/cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/e5/23/d39ccc4ed76222db31530b0a7d38876fdb7673e23f838e8d8f0ed4651a4f/dask-2026.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f9/0b/de6f54d4a8bedfe8645c41497f3c18d749f0bd3218170c667bf4b81d0cdd/filelock-3.25.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/38/3f/9859f655d11901e7b2996c6e3d33e0caa9a1d4572c3bc61ed0faa64b2f4c/greenlet-3.3.2-cp310-cp310-macosx_11_0_universal2.whl + - pypi: https://files.pythonhosted.org/packages/c5/63/ee244c4b64f0e71cef5314f9fa1d120c072e33c2e4c545dc75bd1af2a5c5/grpcio-1.62.3-cp310-cp310-macosx_12_0_universal2.whl + - pypi: https://files.pythonhosted.org/packages/40/4c/ee3173906196b741ac6ba55a9788ba9ebf2cd05f91715a49b6c3bfbb9d73/grpcio_health_checking-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/82/54/acc6a6e684827b0f6bb4e2c27f3d7e25b71322c4078ef5b455c07c43260e/grpcio_reflection-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/da/73/4ad5b1f6a2e21cf1e85afdaad2b7b1a933985e2f5d679147a1953aaa192c/gunicorn-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/e5/c07e0bcf4ec8db8164e9f6738c048b2e66aabf30e7506f440c4cc6953f60/httptools-0.7.1-cp310-cp310-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cd/58/4a1880ea64032185e9ae9f63940c9327c6952d5584ea544a8f66972f2fda/jwcrypto-1.5.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7c/5f/63f5fa395c7a8a93558c0904ba8f1c8d1b997ca6a3de61bc7659970d66bf/librt-0.8.1-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b6/9e/8d9f6b9746f8ede78b0a4e4b8908e4d80bd609fca0b3e3195a07dda29534/minio-7.2.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3b/88/eb9a55b3f3cf43a74d6bfa8db0e2e209f966007777a1dc897c52c008314c/mmh3-5.2.0-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/f5/a2/3b68a9e769db68668b25c6108444a35f9bd163bb848c0650d516761a59c0/msgpack-1.1.2-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2f/63/e499890d8e39b1ff2df4c0c6ce5d371b6844ee22b8250687a99fd2f657a8/mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/3e/ed6db5be21ce87955c0cbd3009f2803f59fa08df21b5df06862e2d8e2bdd/numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/de/1a/a373746fa6d0e116dd9e54371a7b54622c44d12296d5d0f3ad5e3ff33490/orjson-3.11.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl + - pypi: https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3d/f7/f425a00df4fcc22b292c6895c6831c0c8ae1d9fac1e024d16f98a9ce8749/pandas-2.3.3-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/71/e7/40fb618334dcdf7c5a316c0e7343c5cd82d3d866edc100d98e29bc945ecd/partd-1.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/13/c4/6322ab5c8f279c4c358bc14eb8aefc0550b97222a39f04eb3c1af7a830fa/protobuf-7.34.0-cp310-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/bc/8e/4be5617b4aaae0287f621ad31c6036e5f63118cfca0dc57d42121ff49b51/pyarrow-23.0.1-cp310-cp310-macosx_12_0_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6e/4e/a066527e079fc5002390c8acdd3aca431e6ea0a50ffd7201551175b47323/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/51/ff/f6e8b8f39e08547faece4bd80f89d5a8de68a38b2d179cc1c4490ffa3286/pytest-7.4.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ee/82/62e2d63639ecb0fbe8a7ee59ef0bc69a4669ec50f6d3459f74ad4e4189a2/pytest_asyncio-0.23.8-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2c/60/423a63fb190a0483d049786a121bd3dfd7d93bb5ff1bb5b5cd13e5df99a7/pytest_benchmark-3.4.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/25/b2/bdc663a5647ce2034f7e8420122af340df87c01ba97745fc753b8c917acb/pytest_env-1.1.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2d/a1/2f2c1c2353350d66c4d110d283e422e4943eb5ad10effa9357ba66f7b5b9/pytest_lazy_fixture-0.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/30/43/8deecb4c123bbc16d25666f1a6d241109c97aeb2e50806b952661c8e4b95/pytest_mock-1.10.4-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/98/adc368fe369465f291ab24e18b9900473786ed1afdf861ba90467eb0767e/pytest_ordering-0.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/df/97cc0b5b8b53da0e265acd0aeecfc0c279e950a029acd2d7b4e54b00b25f/pytest_timeout-1.4.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2e/2e/dfbd2c9b3edf6a5a8cd9e66090221046839b488ea27824970426bf06b242/python_keycloak-4.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/61/c5/c2ceba832fe3f47cfd7e11cd7cc7a1bbc2c028424c5bca70435aa4ca1dec/ray-2.49.2-cp310-cp310-macosx_12_0_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/06/0c/0c411a0ec64ccb6d104dcabe0e713e05e153a9a2c3c2bd2b32ce412166fe/rpds_py-0.30.0-cp310-cp310-macosx_10_12_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/2c/9664130905f03db57961b8980b05cab624afd114bf2be2576628a9f22da4/sqlalchemy-2.0.48-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3e/f8/6425ff800894784160290bcb9737878d910b6da6a08633bfe7f2ed8c9ae3/testcontainers-4.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f7/1f/4e5f8770c2cf4faa2c3ed3c19f9d4485ac9db0a6b029a7866921709bdc6c/uvicorn_worker-0.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ba/ae/6f6f9af7f590b319c94532b9567409ba11f4fa71af1148cab1bf48a07048/uvloop-0.22.1-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/a7/1a/206e8cf2dd86fddf939165a57b4df61607a1e0add2785f170a3f616b7d9f/watchfiles-1.1.1-cp310-cp310-macosx_10_12_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/19/0f/22ef6107ee52ab7f0b710d55d36f5a5d3ef19e8a205541a6d7ffa7994e5a/websockets-16.0-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/ca/21/293b657a27accfbbbb6007ebd78af0efa2083dac83e8f523272ea09b4638/wrapt-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl + - pypi: ./ + osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_9.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/icu-78.2-hef89b57_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.7.4-hf6b4638_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.5.2-hcf2aa1b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-5.8.2-h8088a28_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.51.2-h1ae2325_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h5e97a16_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.6.1-hd24854e_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.10.19-hcd7f573_3_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.3-h46df422_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h010d191_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - pypi: https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b6/02/d297943bcacf05e4f2a94ab6f462831dc20158614e5d067c35d4e63b9acb/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/c7/80/9f608d13b4b3afcebd1dd13baf9551c95fc424d6390e4b1cfd7b1810cd06/async_property-0.2.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/72/6102c002b1b4c04f8c5ed870f102f576a5fbd6a41cdb3e49ed339aa95dfe/bigtree-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/30/d5/c8b32c047e2e81dd172138f772e81d852c51f0f2ad2ae8a24f1122e9e9a7/cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/e5/23/d39ccc4ed76222db31530b0a7d38876fdb7673e23f838e8d8f0ed4651a4f/dask-2026.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f9/0b/de6f54d4a8bedfe8645c41497f3c18d749f0bd3218170c667bf4b81d0cdd/filelock-3.25.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c5/63/ee244c4b64f0e71cef5314f9fa1d120c072e33c2e4c545dc75bd1af2a5c5/grpcio-1.62.3-cp310-cp310-macosx_12_0_universal2.whl + - pypi: https://files.pythonhosted.org/packages/40/4c/ee3173906196b741ac6ba55a9788ba9ebf2cd05f91715a49b6c3bfbb9d73/grpcio_health_checking-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/82/54/acc6a6e684827b0f6bb4e2c27f3d7e25b71322c4078ef5b455c07c43260e/grpcio_reflection-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/da/73/4ad5b1f6a2e21cf1e85afdaad2b7b1a933985e2f5d679147a1953aaa192c/gunicorn-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7e/4f/35e3a63f863a659f92ffd92bef131f3e81cf849af26e6435b49bd9f6f751/httptools-0.7.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cd/58/4a1880ea64032185e9ae9f63940c9327c6952d5584ea544a8f66972f2fda/jwcrypto-1.5.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ff/e0/0472cf37267b5920eff2f292ccfaede1886288ce35b7f3203d8de00abfe6/librt-0.8.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/b6/9e/8d9f6b9746f8ede78b0a4e4b8908e4d80bd609fca0b3e3195a07dda29534/minio-7.2.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/4c/8e4b3878bf8435c697d7ce99940a3784eb864521768069feaccaff884a17/mmh3-5.2.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/5b/e1/2b720cc341325c00be44e1ed59e7cfeae2678329fbf5aa68f5bda57fe728/msgpack-1.1.2-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/72/4b/095626fc136fba96effc4fd4a82b41d688ab92124f8c4f7564bffe5cf1b0/mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/22/c2/4b9221495b2a132cc9d2eb862e21d42a009f5a60e45fc44b00118c174bff/numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/de/1a/a373746fa6d0e116dd9e54371a7b54622c44d12296d5d0f3ad5e3ff33490/orjson-3.11.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl + - pypi: https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/13/4f/66d99628ff8ce7857aca52fed8f0066ce209f96be2fede6cef9f84e8d04f/pandas-2.3.3-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/71/e7/40fb618334dcdf7c5a316c0e7343c5cd82d3d866edc100d98e29bc945ecd/partd-1.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/13/c4/6322ab5c8f279c4c358bc14eb8aefc0550b97222a39f04eb3c1af7a830fa/protobuf-7.34.0-cp310-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/bc/a8/24e5dc6855f50a62936ceb004e6e9645e4219a8065f304145d7fb8a79d5d/pyarrow-23.0.1-cp310-cp310-macosx_12_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/db/6c/a1f71542c969912bb0e106f64f60a56cc1f0fabecf9396f45accbe63fa68/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/51/ff/f6e8b8f39e08547faece4bd80f89d5a8de68a38b2d179cc1c4490ffa3286/pytest-7.4.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ee/82/62e2d63639ecb0fbe8a7ee59ef0bc69a4669ec50f6d3459f74ad4e4189a2/pytest_asyncio-0.23.8-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2c/60/423a63fb190a0483d049786a121bd3dfd7d93bb5ff1bb5b5cd13e5df99a7/pytest_benchmark-3.4.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/25/b2/bdc663a5647ce2034f7e8420122af340df87c01ba97745fc753b8c917acb/pytest_env-1.1.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2d/a1/2f2c1c2353350d66c4d110d283e422e4943eb5ad10effa9357ba66f7b5b9/pytest_lazy_fixture-0.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/30/43/8deecb4c123bbc16d25666f1a6d241109c97aeb2e50806b952661c8e4b95/pytest_mock-1.10.4-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/98/adc368fe369465f291ab24e18b9900473786ed1afdf861ba90467eb0767e/pytest_ordering-0.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/df/97cc0b5b8b53da0e265acd0aeecfc0c279e950a029acd2d7b4e54b00b25f/pytest_timeout-1.4.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2e/2e/dfbd2c9b3edf6a5a8cd9e66090221046839b488ea27824970426bf06b242/python_keycloak-4.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/64/13/b86d791b41f33220335eba18fc4841f1ebddae41e562c6a216846404c88d/ray-2.54.0-cp310-cp310-macosx_12_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/19/6a/4ba3d0fb7297ebae71171822554abe48d7cab29c28b8f9f2c04b79988c05/rpds_py-0.30.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/67/1235676e93dd3b742a4a8eddfae49eea46c85e3eed29f0da446a8dd57500/sqlalchemy-2.0.48-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3e/f8/6425ff800894784160290bcb9737878d910b6da6a08633bfe7f2ed8c9ae3/testcontainers-4.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f7/1f/4e5f8770c2cf4faa2c3ed3c19f9d4485ac9db0a6b029a7866921709bdc6c/uvicorn_worker-0.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/eb/14/ecceb239b65adaaf7fde510aa8bd534075695d1e5f8dadfa32b5723d9cfb/uvloop-0.22.1-cp310-cp310-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/b3/0f/abaf5262b9c496b5dad4ed3c0e799cbecb1f8ea512ecb6ddd46646a9fca3/watchfiles-1.1.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/10/40/904a4cb30d9b61c0e278899bf36342e9b0208eb3c470324a9ecbaac2a30f/websockets-16.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/25/e9/96dd77728b54a899d4ce2798d7b1296989ce687ed3c0cb917d6b3154bf5d/wrapt-2.1.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl + - pypi: ./ + registration-tests: + channels: + - url: https://conda.anaconda.org/conda-forge/ + indexes: + - https://pypi.org/simple + options: + pypi-prerelease-mode: if-necessary-or-explicit + packages: + linux-64: + - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/icu-78.2-h33c6efd_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.51.2-hf4e2dac_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.41.3-h5347b49_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.1-h35e630c_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.19-h3c07f61_3_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.3-h853b02a_0.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda + - pypi: https://files.pythonhosted.org/packages/92/d9/25a697a959a7149c93efa4d849421aa5f22bcb82350ac89b4284b0b88aa8/aiobotocore-2.23.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2c/c0/cfcc3d2e11b477f86e1af2863f3858c8850d751ce8dc39c4058a072c9e54/aiohttp-3.13.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/10/a1/510b0a7fadc6f43a6ce50152e69dbd86415240835868bb0bd9b5b88b1e06/aioitertools-0.13.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/09/52/94108adfdd6e2ddf58be64f959a0b9c7d4ef2fa71086c38356d22dc501ea/argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/c9/7f/09065fd9e27da0eda08b4d6897f1c13535066174cc023af248fc2a8d5e5a/asn1crypto-1.5.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/80/9f608d13b4b3afcebd1dd13baf9551c95fc424d6390e4b1cfd7b1810cd06/async_property-0.2.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/72/6102c002b1b4c04f8c5ed870f102f576a5fbd6a41cdb3e49ed339aa95dfe/bigtree-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/43/8b/b2361188bd1e293eede1bc165e2461d390394f71ec0c8c21211c8dabf62c/boto3-1.38.27-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/a4/00/dd90b7a0255587ba1c9754d32a221adb4a9022f181df3eef401b0b9fadfc/botocore-1.38.46-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/36/3b/60cbd1f8e93aa25d1c669c649b7a655b0b5fb4c571858910ea9332678558/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ac/25/e715fa0bc24ac2114ed69da33adf451a38abb6f3f24ec207908112e9ba53/cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/e5/23/d39ccc4ed76222db31530b0a7d38876fdb7673e23f838e8d8f0ed4651a4f/dask-2026.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/01/80/171c7c5b78e60ab25d6f11e3d38675fe7ef843ddc79a7fd26916d3a6ca05/db_dtypes-1.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f9/0b/de6f54d4a8bedfe8645c41497f3c18d749f0bd3218170c667bf4b81d0cdd/filelock-3.25.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/5d/ed/c7895fd2fde7f3ee70d248175f9b6cdf792fb741ab92dc59cd9ef3bd241b/frozenlist-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/1d/a0/6aaea0c2fbea2f89bfd5db25fb1e3481896a423002ebe4e55288907a97a3/fsspec-2024.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/45/27/09c33d67f7e0dcf06d7ac17d196594e66989299374bfb0d4331d1038e76b/google_api_core-2.30.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/83/1d/d6466de3a5249d35e832a52834115ca9d1d0de6abc22065f049707516d47/google_auth-2.48.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2f/56/909fd5632226d3fba31d7aeffd4754410735d49362f5809956fe3e9af344/google_auth_oauthlib-1.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7c/f5/081cf5b90adfe524ae0d671781b0d497a75a0f2601d075af518828e22d8f/google_cloud_bigquery-3.40.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1f/07/62dbe78ef773569be0a1d2c1b845e9214889b404e506126519b4d33ee999/google_cloud_bigquery_storage-2.36.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/62/69/03eed134d71f6117ffd9efac2d1033bb2fa2522e9e82545a0828061d32f4/google_cloud_bigtable-2.35.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/89/20/bfa472e327c8edee00f04beecc80baeddd2ab33ee0e86fd7654da49d45e9/google_cloud_core-2.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b7/88/348c09570a03886356c02337f06d69532fa17a66ad2a9dff584f7b60eb04/google_cloud_datastore-2.23.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d5/94/6db383d8ee1adf45dc6c73477152b82731fa4c4a46d9c1932cc8757e0fd4/google_cloud_storage-2.19.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3d/63/bec827e70b7a0d4094e7476f863c0dbd6b5f0f1f91d9c9b32b76dcdfeb4e/google_crc32c-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/1f/0b/93afde9cfe012260e9fe1522f35c9b72d6ee222f316586b1f23ecf44d518/google_resumable_media-2.8.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ad/55/9f1ebb5a825215fadcc0f7d5073f6e79e3007e3282b14b22d6aba7ca6cb8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/4a/bd/330a1bbdb1afe0b96311249e699b6dc9cfc17916394fd4503ac5aca2514b/grpc_google_iam_v1-0.14.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/58/2f/f3fc773270cf17e7ca076c1f6435278f58641d475a25cdeea5b2d8d4845b/grpcio-1.62.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/40/4c/ee3173906196b741ac6ba55a9788ba9ebf2cd05f91715a49b6c3bfbb9d73/grpcio_health_checking-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/82/54/acc6a6e684827b0f6bb4e2c27f3d7e25b71322c4078ef5b455c07c43260e/grpcio_reflection-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/90/40/972271de05f9315c0d69f9f7ebbcadd83bc85322f538637d11bb8c67803d/grpcio_status-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b2/23/55d40e1bf54c141f541ab31b4b4b0f58610440c8837b1406f3467c2b4853/grpcio_testing-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/da/73/4ad5b1f6a2e21cf1e85afdaad2b7b1a933985e2f5d679147a1953aaa192c/gunicorn-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/42/6e/8adaefff7e3e216b0f7bd6cafce6d5d06798f31c3e2852dc3db6a7d758c9/hiredis-2.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f5/71/b0a9193641d9e2471ac541d3b1b869538a5fb6419d52fd2669fa9c79e4b8/httptools-0.7.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cd/58/4a1880ea64032185e9ae9f63940c9327c6952d5584ea544a8f66972f2fda/jwcrypto-1.5.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/01/99/f85130582f05dcf0c8902f3d629270231d2f4afdfc567f8305a952ac7f14/librt-0.8.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b6/9e/8d9f6b9746f8ede78b0a4e4b8908e4d80bd609fca0b3e3195a07dda29534/minio-7.2.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/39/8e/29306d5eca6dfda4b899d22c95b5420db4e0ffb7e0b6389b17379654ece5/mmh3-5.2.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/4b/ac/b605473de2bb404e742f2cc3583d12aedb2352a70e49ae8fce455b50c5aa/multidict-6.7.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2a/0d/93c2e4a287f74ef11a66fb6d49c7a9f05e47b0a4399040e6719b57f500d2/mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b4/63/3de6a34ad7ad6646ac7d2f55ebc6ad439dbbf9c4370017c50cf403fb19b5/numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/8f/ed/f2b5d66aa9b6b5c02ff5f120efc7b38c7c4962b21e6be0f00fd99a5c348e/orjson-3.11.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/40/a8/4dac1f8f8235e5d25b9955d02ff6f29396191d4e665d71122c3722ca83c5/pandas-2.3.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/26/b7/e805de93e3aa78813912b19edc9c8b037d6cd1c302ab339b895f305cf9a5/pandas_gbq-0.33.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/71/e7/40fb618334dcdf7c5a316c0e7343c5cd82d3d866edc100d98e29bc945ecd/partd-1.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/48/31/05e764397056194206169869b50cf2fee4dbbbc71b344705b9c0d878d4d8/platformdirs-4.9.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/08/57/8c87e93142b2c1fa2408e45695205a7ba05fb5db458c0bf5c06ba0e09ea6/propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/5d/79/ac273cbbf744691821a9cca88957257f41afe271637794975ca090b9588b/proto_plus-1.27.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/18/f3/14a1370b1449ca875d5e353ef02cb9db6b70bd46ec361c236176837c0be1/psycopg-3.2.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/a5/90/9f2c41b3b42d8cd8b9866f0bbd27a5796a1ca8042a1a019b39a6645df523/psycopg_binary-3.2.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/e7/c3/26b8a0908a9db249de3b4169692e1c7c19048a9bc41a4d3209cee7dbb758/psycopg_pool-3.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/bd/db/ea0203e495be491c85af87b66e37acfd3bf756fd985f87e46fc5e3bf022c/py4j-0.10.9.9-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f8/82/c40b68001dbec8a3faa4c08cd8c200798ac732d2854537c5449dc859f55a/pyarrow-23.0.1-cp310-cp310-manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/44/b5/a96872e5184f354da9c84ae119971a0a4c221fe9b27a4d94bd43f2596727/pyasn1-0.6.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/5f/e9/a09476d436d0ff1402ac3867d933c61805ec2326c6ea557aeeac3825604e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/ca/cb/cdeaba62aa3c48f0d8834afb82b4a21463cd83df34fe01f9daa89a08ec6c/pydata_google_auth-1.9.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7c/4c/ad33b92b9864cbde84f259d5df035a6447f91891f5be77788e2a3892bce3/pymysql-1.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/80/28/2659c02301b9500751f8d42f9a6632e1508aa5120de5e43042b8b30f8d5d/pyopenssl-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/19/bf/58ee13add151469c25825b7125bbf62c3bdcec05eec4d458fcb5c5516066/pyspark-4.1.1.tar.gz + - pypi: https://files.pythonhosted.org/packages/51/ff/f6e8b8f39e08547faece4bd80f89d5a8de68a38b2d179cc1c4490ffa3286/pytest-7.4.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ee/82/62e2d63639ecb0fbe8a7ee59ef0bc69a4669ec50f6d3459f74ad4e4189a2/pytest_asyncio-0.23.8-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2c/60/423a63fb190a0483d049786a121bd3dfd7d93bb5ff1bb5b5cd13e5df99a7/pytest_benchmark-3.4.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/25/b2/bdc663a5647ce2034f7e8420122af340df87c01ba97745fc753b8c917acb/pytest_env-1.1.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2d/a1/2f2c1c2353350d66c4d110d283e422e4943eb5ad10effa9357ba66f7b5b9/pytest_lazy_fixture-0.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/30/43/8deecb4c123bbc16d25666f1a6d241109c97aeb2e50806b952661c8e4b95/pytest_mock-1.10.4-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/98/adc368fe369465f291ab24e18b9900473786ed1afdf861ba90467eb0767e/pytest_ordering-0.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/df/97cc0b5b8b53da0e265acd0aeecfc0c279e950a029acd2d7b4e54b00b25f/pytest_timeout-1.4.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2e/2e/dfbd2c9b3edf6a5a8cd9e66090221046839b488ea27824970426bf06b242/python_keycloak-4.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/20/2e/409703d645363352a20c944f5d119bdae3eb3034051a53724a7c5fee12b8/redis-4.6.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/61/b5/707f6cf0066a6412aacc11d17920ea2e19e5b2f04081c64526eb35b5c6e7/rpds_py-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6d/4f/d073e09df851cfa251ef7840007d04db3293a0482ce607d2b993926089be/s3transfer-0.13.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e1/c6/76dc613121b793286a3f91621d7b75a2b493e0390ddca50f11993eadf192/setuptools-82.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/60/e6/30c4015e2712bf8bf83b54ddadeee0494b68ae6d0f6d49d9373f463305d4/snowflake_connector_python-4.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/5c/ad/6c4395649a212a6c603a72c5b9ab5dce3135a1546cfdffa3c427e71fd535/sqlalchemy-2.0.48-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3e/f8/6425ff800894784160290bcb9737878d910b6da6a08633bfe7f2ed8c9ae3/testcontainers-4.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b5/11/87d6d29fb5d237229d67973a6c9e06e048f01cf4994dee194ab0ea841814/tomlkit-0.14.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/8b/fa/4f4d3bfca9ef6dd17d69ed18b96564c53b32d3ce774132308d0bee849f10/types_pymysql-1.1.0.20251220-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f7/1f/4e5f8770c2cf4faa2c3ed3c19f9d4485ac9db0a6b029a7866921709bdc6c/uvicorn_worker-0.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/f6/21657bb3beb5f8c57ce8be3b83f653dd7933c2fd00545ed1b092d464799a/uvloop-0.22.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/d5/dc/1a680b7458ffa3b14bb64878112aefc8f2e4f73c5af763cbf0bd43100658/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/9d/2f/4b3ca7e106bc608744b1cdae041e005e446124bebb037b18799c2d356864/websockets-16.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/de/17/9f8f86755c191d6779d7ddead1a53c7a8aa18bccb7cea8e7e72dfa6a8a09/wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/50/12/95a1d33f04a79c402664070d43b8b9f72dc18914e135b345b611b0b1f8cc/yarl-1.23.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl + - pypi: ./ + osx-64: + - conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-h500dc9f_9.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.7.4-h991f03e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.5.2-hd1f9c09_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/liblzma-5.8.2-h11316ed_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.51.2-hb99441e_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-h0622a9a_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.6.1-hb6871ef_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.10.19-h988dfef_3_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.3-h68b038d_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h7142dee_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - pypi: https://files.pythonhosted.org/packages/92/d9/25a697a959a7149c93efa4d849421aa5f22bcb82350ac89b4284b0b88aa8/aiobotocore-2.23.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/68/03/8fa90a7e6d11ff20a18837a8e2b5dd23db01aabc475aa9271c8ad33299f5/aiohttp-3.13.3-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/10/a1/510b0a7fadc6f43a6ce50152e69dbd86415240835868bb0bd9b5b88b1e06/aioitertools-0.13.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0a/08/a9bebdb2e0e602dde230bdde8021b29f71f7841bd54801bcfd514acb5dcf/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/c9/7f/09065fd9e27da0eda08b4d6897f1c13535066174cc023af248fc2a8d5e5a/asn1crypto-1.5.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/80/9f608d13b4b3afcebd1dd13baf9551c95fc424d6390e4b1cfd7b1810cd06/async_property-0.2.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/72/6102c002b1b4c04f8c5ed870f102f576a5fbd6a41cdb3e49ed339aa95dfe/bigtree-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/43/8b/b2361188bd1e293eede1bc165e2461d390394f71ec0c8c21211c8dabf62c/boto3-1.38.27-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/a4/00/dd90b7a0255587ba1c9754d32a221adb4a9022f181df3eef401b0b9fadfc/botocore-1.38.46-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/30/d5/c8b32c047e2e81dd172138f772e81d852c51f0f2ad2ae8a24f1122e9e9a7/cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/e5/23/d39ccc4ed76222db31530b0a7d38876fdb7673e23f838e8d8f0ed4651a4f/dask-2026.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/01/80/171c7c5b78e60ab25d6f11e3d38675fe7ef843ddc79a7fd26916d3a6ca05/db_dtypes-1.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f9/0b/de6f54d4a8bedfe8645c41497f3c18d749f0bd3218170c667bf4b81d0cdd/filelock-3.25.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/a2/fb/c85f9fed3ea8fe8740e5b46a59cc141c23b842eca617da8876cfce5f760e/frozenlist-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/1d/a0/6aaea0c2fbea2f89bfd5db25fb1e3481896a423002ebe4e55288907a97a3/fsspec-2024.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/45/27/09c33d67f7e0dcf06d7ac17d196594e66989299374bfb0d4331d1038e76b/google_api_core-2.30.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/83/1d/d6466de3a5249d35e832a52834115ca9d1d0de6abc22065f049707516d47/google_auth-2.48.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2f/56/909fd5632226d3fba31d7aeffd4754410735d49362f5809956fe3e9af344/google_auth_oauthlib-1.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7c/f5/081cf5b90adfe524ae0d671781b0d497a75a0f2601d075af518828e22d8f/google_cloud_bigquery-3.40.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1f/07/62dbe78ef773569be0a1d2c1b845e9214889b404e506126519b4d33ee999/google_cloud_bigquery_storage-2.36.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/62/69/03eed134d71f6117ffd9efac2d1033bb2fa2522e9e82545a0828061d32f4/google_cloud_bigtable-2.35.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/89/20/bfa472e327c8edee00f04beecc80baeddd2ab33ee0e86fd7654da49d45e9/google_cloud_core-2.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b7/88/348c09570a03886356c02337f06d69532fa17a66ad2a9dff584f7b60eb04/google_cloud_datastore-2.23.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d5/94/6db383d8ee1adf45dc6c73477152b82731fa4c4a46d9c1932cc8757e0fd4/google_cloud_storage-2.19.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f7/97/a5accde175dee985311d949cfcb1249dcbb290f5ec83c994ea733311948f/google_crc32c-1.8.0-cp310-cp310-macosx_12_0_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/1f/0b/93afde9cfe012260e9fe1522f35c9b72d6ee222f316586b1f23ecf44d518/google_resumable_media-2.8.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/38/3f/9859f655d11901e7b2996c6e3d33e0caa9a1d4572c3bc61ed0faa64b2f4c/greenlet-3.3.2-cp310-cp310-macosx_11_0_universal2.whl + - pypi: https://files.pythonhosted.org/packages/4a/bd/330a1bbdb1afe0b96311249e699b6dc9cfc17916394fd4503ac5aca2514b/grpc_google_iam_v1-0.14.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c5/63/ee244c4b64f0e71cef5314f9fa1d120c072e33c2e4c545dc75bd1af2a5c5/grpcio-1.62.3-cp310-cp310-macosx_12_0_universal2.whl + - pypi: https://files.pythonhosted.org/packages/40/4c/ee3173906196b741ac6ba55a9788ba9ebf2cd05f91715a49b6c3bfbb9d73/grpcio_health_checking-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/82/54/acc6a6e684827b0f6bb4e2c27f3d7e25b71322c4078ef5b455c07c43260e/grpcio_reflection-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/90/40/972271de05f9315c0d69f9f7ebbcadd83bc85322f538637d11bb8c67803d/grpcio_status-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b2/23/55d40e1bf54c141f541ab31b4b4b0f58610440c8837b1406f3467c2b4853/grpcio_testing-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/da/73/4ad5b1f6a2e21cf1e85afdaad2b7b1a933985e2f5d679147a1953aaa192c/gunicorn-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d8/70/3f39ebfb3824578c34400df3b037b268abb5af0abaa789b430ffd17dd74e/hiredis-2.4.0-cp310-cp310-macosx_10_15_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/e5/c07e0bcf4ec8db8164e9f6738c048b2e66aabf30e7506f440c4cc6953f60/httptools-0.7.1-cp310-cp310-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cd/58/4a1880ea64032185e9ae9f63940c9327c6952d5584ea544a8f66972f2fda/jwcrypto-1.5.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7c/5f/63f5fa395c7a8a93558c0904ba8f1c8d1b997ca6a3de61bc7659970d66bf/librt-0.8.1-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/b6/9e/8d9f6b9746f8ede78b0a4e4b8908e4d80bd609fca0b3e3195a07dda29534/minio-7.2.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3b/88/eb9a55b3f3cf43a74d6bfa8db0e2e209f966007777a1dc897c52c008314c/mmh3-5.2.0-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/ef/04/9de3f8077852e3d438215c81e9b691244532d2e05b4270e89ce67b7d103c/multidict-6.7.1-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2f/63/e499890d8e39b1ff2df4c0c6ce5d371b6844ee22b8250687a99fd2f657a8/mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/3e/ed6db5be21ce87955c0cbd3009f2803f59fa08df21b5df06862e2d8e2bdd/numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/de/1a/a373746fa6d0e116dd9e54371a7b54622c44d12296d5d0f3ad5e3ff33490/orjson-3.11.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl + - pypi: https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3d/f7/f425a00df4fcc22b292c6895c6831c0c8ae1d9fac1e024d16f98a9ce8749/pandas-2.3.3-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/26/b7/e805de93e3aa78813912b19edc9c8b037d6cd1c302ab339b895f305cf9a5/pandas_gbq-0.33.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/71/e7/40fb618334dcdf7c5a316c0e7343c5cd82d3d866edc100d98e29bc945ecd/partd-1.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/48/31/05e764397056194206169869b50cf2fee4dbbbc71b344705b9c0d878d4d8/platformdirs-4.9.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/a1/6b/db0d03d96726d995dc7171286c6ba9d8d14251f37433890f88368951a44e/propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/5d/79/ac273cbbf744691821a9cca88957257f41afe271637794975ca090b9588b/proto_plus-1.27.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/18/f3/14a1370b1449ca875d5e353ef02cb9db6b70bd46ec361c236176837c0be1/psycopg-3.2.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d6/30/af3806081adc75b5a8addde839d4c6b171a8c5d0d07dd92de20ca4dd6717/psycopg_binary-3.2.5-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/e7/c3/26b8a0908a9db249de3b4169692e1c7c19048a9bc41a4d3209cee7dbb758/psycopg_pool-3.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/bd/db/ea0203e495be491c85af87b66e37acfd3bf756fd985f87e46fc5e3bf022c/py4j-0.10.9.9-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/bc/8e/4be5617b4aaae0287f621ad31c6036e5f63118cfca0dc57d42121ff49b51/pyarrow-23.0.1-cp310-cp310-macosx_12_0_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/44/b5/a96872e5184f354da9c84ae119971a0a4c221fe9b27a4d94bd43f2596727/pyasn1-0.6.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6e/4e/a066527e079fc5002390c8acdd3aca431e6ea0a50ffd7201551175b47323/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/ca/cb/cdeaba62aa3c48f0d8834afb82b4a21463cd83df34fe01f9daa89a08ec6c/pydata_google_auth-1.9.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7c/4c/ad33b92b9864cbde84f259d5df035a6447f91891f5be77788e2a3892bce3/pymysql-1.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/80/28/2659c02301b9500751f8d42f9a6632e1508aa5120de5e43042b8b30f8d5d/pyopenssl-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/19/bf/58ee13add151469c25825b7125bbf62c3bdcec05eec4d458fcb5c5516066/pyspark-4.1.1.tar.gz + - pypi: https://files.pythonhosted.org/packages/51/ff/f6e8b8f39e08547faece4bd80f89d5a8de68a38b2d179cc1c4490ffa3286/pytest-7.4.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ee/82/62e2d63639ecb0fbe8a7ee59ef0bc69a4669ec50f6d3459f74ad4e4189a2/pytest_asyncio-0.23.8-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2c/60/423a63fb190a0483d049786a121bd3dfd7d93bb5ff1bb5b5cd13e5df99a7/pytest_benchmark-3.4.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/25/b2/bdc663a5647ce2034f7e8420122af340df87c01ba97745fc753b8c917acb/pytest_env-1.1.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2d/a1/2f2c1c2353350d66c4d110d283e422e4943eb5ad10effa9357ba66f7b5b9/pytest_lazy_fixture-0.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/30/43/8deecb4c123bbc16d25666f1a6d241109c97aeb2e50806b952661c8e4b95/pytest_mock-1.10.4-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/98/adc368fe369465f291ab24e18b9900473786ed1afdf861ba90467eb0767e/pytest_ordering-0.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/df/97cc0b5b8b53da0e265acd0aeecfc0c279e950a029acd2d7b4e54b00b25f/pytest_timeout-1.4.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2e/2e/dfbd2c9b3edf6a5a8cd9e66090221046839b488ea27824970426bf06b242/python_keycloak-4.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/20/2e/409703d645363352a20c944f5d119bdae3eb3034051a53724a7c5fee12b8/redis-4.6.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/06/0c/0c411a0ec64ccb6d104dcabe0e713e05e153a9a2c3c2bd2b32ce412166fe/rpds_py-0.30.0-cp310-cp310-macosx_10_12_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6d/4f/d073e09df851cfa251ef7840007d04db3293a0482ce607d2b993926089be/s3transfer-0.13.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e1/c6/76dc613121b793286a3f91621d7b75a2b493e0390ddca50f11993eadf192/setuptools-82.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fd/80/3a7e36a9e53beeb27c0599d2703f33bb812be931b469b154b08df0eeeaf5/snowflake_connector_python-4.0.0-cp310-cp310-macosx_11_0_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/2c/9664130905f03db57961b8980b05cab624afd114bf2be2576628a9f22da4/sqlalchemy-2.0.48-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3e/f8/6425ff800894784160290bcb9737878d910b6da6a08633bfe7f2ed8c9ae3/testcontainers-4.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b5/11/87d6d29fb5d237229d67973a6c9e06e048f01cf4994dee194ab0ea841814/tomlkit-0.14.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/8b/fa/4f4d3bfca9ef6dd17d69ed18b96564c53b32d3ce774132308d0bee849f10/types_pymysql-1.1.0.20251220-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f7/1f/4e5f8770c2cf4faa2c3ed3c19f9d4485ac9db0a6b029a7866921709bdc6c/uvicorn_worker-0.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ba/ae/6f6f9af7f590b319c94532b9567409ba11f4fa71af1148cab1bf48a07048/uvloop-0.22.1-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/a7/1a/206e8cf2dd86fddf939165a57b4df61607a1e0add2785f170a3f616b7d9f/watchfiles-1.1.1-cp310-cp310-macosx_10_12_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/19/0f/22ef6107ee52ab7f0b710d55d36f5a5d3ef19e8a205541a6d7ffa7994e5a/websockets-16.0-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/45/69/f3c47642b79485a30a59c63f6d739ed779fb4cc8323205d047d741d55220/wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/7a/35/5a553687c5793df5429cd1db45909d4f3af7eee90014888c208d086a44f0/yarl-1.23.0-cp310-cp310-macosx_10_9_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl + - pypi: ./ + osx-arm64: + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_9.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/icu-78.2-hef89b57_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.7.4-hf6b4638_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.5.2-hcf2aa1b_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-5.8.2-h8088a28_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.51.2-h1ae2325_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h5e97a16_3.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.6.1-hd24854e_1.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.10.19-hcd7f573_3_cpython.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.3-h46df422_0.conda + - conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h010d191_3.conda + - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + - pypi: https://files.pythonhosted.org/packages/92/d9/25a697a959a7149c93efa4d849421aa5f22bcb82350ac89b4284b0b88aa8/aiobotocore-2.23.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d2/23/b81f744d402510a8366b74eb420fc0cc1170d0c43daca12d10814df85f10/aiohttp-3.13.3-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/10/a1/510b0a7fadc6f43a6ce50152e69dbd86415240835868bb0bd9b5b88b1e06/aioitertools-0.13.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b6/02/d297943bcacf05e4f2a94ab6f462831dc20158614e5d067c35d4e63b9acb/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/c9/7f/09065fd9e27da0eda08b4d6897f1c13535066174cc023af248fc2a8d5e5a/asn1crypto-1.5.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/80/9f608d13b4b3afcebd1dd13baf9551c95fc424d6390e4b1cfd7b1810cd06/async_property-0.2.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b3/72/6102c002b1b4c04f8c5ed870f102f576a5fbd6a41cdb3e49ed339aa95dfe/bigtree-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/43/8b/b2361188bd1e293eede1bc165e2461d390394f71ec0c8c21211c8dabf62c/boto3-1.38.27-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/a4/00/dd90b7a0255587ba1c9754d32a221adb4a9022f181df3eef401b0b9fadfc/botocore-1.38.46-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/30/d5/c8b32c047e2e81dd172138f772e81d852c51f0f2ad2ae8a24f1122e9e9a7/cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/e5/23/d39ccc4ed76222db31530b0a7d38876fdb7673e23f838e8d8f0ed4651a4f/dask-2026.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/01/80/171c7c5b78e60ab25d6f11e3d38675fe7ef843ddc79a7fd26916d3a6ca05/db_dtypes-1.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f9/0b/de6f54d4a8bedfe8645c41497f3c18d749f0bd3218170c667bf4b81d0cdd/filelock-3.25.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/63/70/26ca3f06aace16f2352796b08704338d74b6d1a24ca38f2771afbb7ed915/frozenlist-1.8.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/1d/a0/6aaea0c2fbea2f89bfd5db25fb1e3481896a423002ebe4e55288907a97a3/fsspec-2024.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/45/27/09c33d67f7e0dcf06d7ac17d196594e66989299374bfb0d4331d1038e76b/google_api_core-2.30.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/83/1d/d6466de3a5249d35e832a52834115ca9d1d0de6abc22065f049707516d47/google_auth-2.48.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2f/56/909fd5632226d3fba31d7aeffd4754410735d49362f5809956fe3e9af344/google_auth_oauthlib-1.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7c/f5/081cf5b90adfe524ae0d671781b0d497a75a0f2601d075af518828e22d8f/google_cloud_bigquery-3.40.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1f/07/62dbe78ef773569be0a1d2c1b845e9214889b404e506126519b4d33ee999/google_cloud_bigquery_storage-2.36.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/62/69/03eed134d71f6117ffd9efac2d1033bb2fa2522e9e82545a0828061d32f4/google_cloud_bigtable-2.35.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/89/20/bfa472e327c8edee00f04beecc80baeddd2ab33ee0e86fd7654da49d45e9/google_cloud_core-2.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b7/88/348c09570a03886356c02337f06d69532fa17a66ad2a9dff584f7b60eb04/google_cloud_datastore-2.23.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d5/94/6db383d8ee1adf45dc6c73477152b82731fa4c4a46d9c1932cc8757e0fd4/google_cloud_storage-2.19.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/95/ac/6f7bc93886a823ab545948c2dd48143027b2355ad1944c7cf852b338dc91/google_crc32c-1.8.0-cp310-cp310-macosx_12_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/1f/0b/93afde9cfe012260e9fe1522f35c9b72d6ee222f316586b1f23ecf44d518/google_resumable_media-2.8.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/4a/bd/330a1bbdb1afe0b96311249e699b6dc9cfc17916394fd4503ac5aca2514b/grpc_google_iam_v1-0.14.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c5/63/ee244c4b64f0e71cef5314f9fa1d120c072e33c2e4c545dc75bd1af2a5c5/grpcio-1.62.3-cp310-cp310-macosx_12_0_universal2.whl + - pypi: https://files.pythonhosted.org/packages/40/4c/ee3173906196b741ac6ba55a9788ba9ebf2cd05f91715a49b6c3bfbb9d73/grpcio_health_checking-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/82/54/acc6a6e684827b0f6bb4e2c27f3d7e25b71322c4078ef5b455c07c43260e/grpcio_reflection-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/90/40/972271de05f9315c0d69f9f7ebbcadd83bc85322f538637d11bb8c67803d/grpcio_status-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b2/23/55d40e1bf54c141f541ab31b4b4b0f58610440c8837b1406f3467c2b4853/grpcio_testing-1.62.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/da/73/4ad5b1f6a2e21cf1e85afdaad2b7b1a933985e2f5d679147a1953aaa192c/gunicorn-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ed/b7/26a56a3b991abe7fcf7bcfa8e0a08de3c3766c6caecb1ba46239342792ff/hiredis-2.4.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7e/4f/35e3a63f863a659f92ffd92bef131f3e81cf849af26e6435b49bd9f6f751/httptools-0.7.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/cd/58/4a1880ea64032185e9ae9f63940c9327c6952d5584ea544a8f66972f2fda/jwcrypto-1.5.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ff/e0/0472cf37267b5920eff2f292ccfaede1886288ce35b7f3203d8de00abfe6/librt-0.8.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/b6/9e/8d9f6b9746f8ede78b0a4e4b8908e4d80bd609fca0b3e3195a07dda29534/minio-7.2.11-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d1/4c/8e4b3878bf8435c697d7ce99940a3784eb864521768069feaccaff884a17/mmh3-5.2.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/31/5c/08c7f7fe311f32e83f7621cd3f99d805f45519cd06fafb247628b861da7d/multidict-6.7.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/72/4b/095626fc136fba96effc4fd4a82b41d688ab92124f8c4f7564bffe5cf1b0/mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/22/c2/4b9221495b2a132cc9d2eb862e21d42a009f5a60e45fc44b00118c174bff/numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/de/1a/a373746fa6d0e116dd9e54371a7b54622c44d12296d5d0f3ad5e3ff33490/orjson-3.11.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl + - pypi: https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/13/4f/66d99628ff8ce7857aca52fed8f0066ce209f96be2fede6cef9f84e8d04f/pandas-2.3.3-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/26/b7/e805de93e3aa78813912b19edc9c8b037d6cd1c302ab339b895f305cf9a5/pandas_gbq-0.33.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/71/e7/40fb618334dcdf7c5a316c0e7343c5cd82d3d866edc100d98e29bc945ecd/partd-1.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/48/31/05e764397056194206169869b50cf2fee4dbbbc71b344705b9c0d878d4d8/platformdirs-4.9.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e4/c3/82728404aea669e1600f304f2609cde9e665c18df5a11cdd57ed73c1dceb/propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/5d/79/ac273cbbf744691821a9cca88957257f41afe271637794975ca090b9588b/proto_plus-1.27.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/18/f3/14a1370b1449ca875d5e353ef02cb9db6b70bd46ec361c236176837c0be1/psycopg-3.2.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/31/77/31968655db2efe83c519e6296ff3a85a0c9e50432e0c11c8ffae1b404870/psycopg_binary-3.2.5-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/e7/c3/26b8a0908a9db249de3b4169692e1c7c19048a9bc41a4d3209cee7dbb758/psycopg_pool-3.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/bd/db/ea0203e495be491c85af87b66e37acfd3bf756fd985f87e46fc5e3bf022c/py4j-0.10.9.9-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/bc/a8/24e5dc6855f50a62936ceb004e6e9645e4219a8065f304145d7fb8a79d5d/pyarrow-23.0.1-cp310-cp310-macosx_12_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/44/b5/a96872e5184f354da9c84ae119971a0a4c221fe9b27a4d94bd43f2596727/pyasn1-0.6.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/db/6c/a1f71542c969912bb0e106f64f60a56cc1f0fabecf9396f45accbe63fa68/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/ca/cb/cdeaba62aa3c48f0d8834afb82b4a21463cd83df34fe01f9daa89a08ec6c/pydata_google_auth-1.9.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/7c/4c/ad33b92b9864cbde84f259d5df035a6447f91891f5be77788e2a3892bce3/pymysql-1.1.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/80/28/2659c02301b9500751f8d42f9a6632e1508aa5120de5e43042b8b30f8d5d/pyopenssl-25.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/19/bf/58ee13add151469c25825b7125bbf62c3bdcec05eec4d458fcb5c5516066/pyspark-4.1.1.tar.gz + - pypi: https://files.pythonhosted.org/packages/51/ff/f6e8b8f39e08547faece4bd80f89d5a8de68a38b2d179cc1c4490ffa3286/pytest-7.4.4-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ee/82/62e2d63639ecb0fbe8a7ee59ef0bc69a4669ec50f6d3459f74ad4e4189a2/pytest_asyncio-0.23.8-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2c/60/423a63fb190a0483d049786a121bd3dfd7d93bb5ff1bb5b5cd13e5df99a7/pytest_benchmark-3.4.1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/25/b2/bdc663a5647ce2034f7e8420122af340df87c01ba97745fc753b8c917acb/pytest_env-1.1.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2d/a1/2f2c1c2353350d66c4d110d283e422e4943eb5ad10effa9357ba66f7b5b9/pytest_lazy_fixture-0.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/30/43/8deecb4c123bbc16d25666f1a6d241109c97aeb2e50806b952661c8e4b95/pytest_mock-1.10.4-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/98/adc368fe369465f291ab24e18b9900473786ed1afdf861ba90467eb0767e/pytest_ordering-0.6-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/46/df/97cc0b5b8b53da0e265acd0aeecfc0c279e950a029acd2d7b4e54b00b25f/pytest_timeout-1.4.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2e/2e/dfbd2c9b3edf6a5a8cd9e66090221046839b488ea27824970426bf06b242/python_keycloak-4.2.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/20/2e/409703d645363352a20c944f5d119bdae3eb3034051a53724a7c5fee12b8/redis-4.6.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/19/6a/4ba3d0fb7297ebae71171822554abe48d7cab29c28b8f9f2c04b79988c05/rpds_py-0.30.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/6d/4f/d073e09df851cfa251ef7840007d04db3293a0482ce607d2b993926089be/s3transfer-0.13.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e1/c6/76dc613121b793286a3f91621d7b75a2b493e0390ddca50f11993eadf192/setuptools-82.0.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/e4/75/f845ca5079a6b911023fa945dbf1bac0ed1c2f5967108b14440c740cb410/snowflake_connector_python-4.0.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/9a/67/1235676e93dd3b742a4a8eddfae49eea46c85e3eed29f0da446a8dd57500/sqlalchemy-2.0.48-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/3e/f8/6425ff800894784160290bcb9737878d910b6da6a08633bfe7f2ed8c9ae3/testcontainers-4.9.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/b5/11/87d6d29fb5d237229d67973a6c9e06e048f01cf4994dee194ab0ea841814/tomlkit-0.14.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/8b/fa/4f4d3bfca9ef6dd17d69ed18b96564c53b32d3ce774132308d0bee849f10/types_pymysql-1.1.0.20251220-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/f7/1f/4e5f8770c2cf4faa2c3ed3c19f9d4485ac9db0a6b029a7866921709bdc6c/uvicorn_worker-0.3.0-py3-none-any.whl + - pypi: https://files.pythonhosted.org/packages/eb/14/ecceb239b65adaaf7fde510aa8bd534075695d1e5f8dadfa32b5723d9cfb/uvloop-0.22.1-cp310-cp310-macosx_10_9_universal2.whl + - pypi: https://files.pythonhosted.org/packages/b3/0f/abaf5262b9c496b5dad4ed3c0e799cbecb1f8ea512ecb6ddd46646a9fca3/watchfiles-1.1.1-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/10/40/904a4cb30d9b61c0e278899bf36342e9b0208eb3c470324a9ecbaac2a30f/websockets-16.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/d1/71/e7e7f5670c1eafd9e990438e69d8fb46fa91a50785332e06b560c869454f/wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/68/2e/c5a2234238f8ce37a8312b52801ee74117f576b1539eec8404a480434acc/yarl-1.23.0-cp310-cp310-macosx_11_0_arm64.whl + - pypi: https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl + - pypi: ./ +packages: +- conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda + build_number: 20 + sha256: 1dd3fffd892081df9726d7eb7e0dea6198962ba775bd88842135a4ddb4deb3c9 + md5: a9f577daf3de00bca7c3c76c0ecbd1de + depends: + - __glibc >=2.17,<3.0.a0 + - libgomp >=7.5.0 + constrains: + - openmp_impl <0.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 28948 + timestamp: 1770939786096 +- pypi: https://files.pythonhosted.org/packages/92/d9/25a697a959a7149c93efa4d849421aa5f22bcb82350ac89b4284b0b88aa8/aiobotocore-2.23.1-py3-none-any.whl + name: aiobotocore + version: 2.23.1 + sha256: d81c54d2eae2406ea9a473fea518fed580cf37bc4fc51ce43ba81546e5305114 + requires_dist: + - aiohttp>=3.9.2,<4.0.0 + - aioitertools>=0.5.1,<1.0.0 + - botocore>=1.38.40,<1.38.47 + - python-dateutil>=2.1,<3.0.0 + - jmespath>=0.7.1,<2.0.0 + - multidict>=6.0.0,<7.0.0 + - wrapt>=1.10.10,<2.0.0 + - awscli>=1.40.39,<1.40.46 ; extra == 'awscli' + - boto3>=1.38.40,<1.38.47 ; extra == 'boto3' + - httpx>=0.25.1,<0.29 ; extra == 'httpx' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl + name: aiohappyeyeballs + version: 2.6.1 + sha256: f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/2c/c0/cfcc3d2e11b477f86e1af2863f3858c8850d751ce8dc39c4058a072c9e54/aiohttp-3.13.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: aiohttp + version: 3.13.3 + sha256: de2c184bb1fe2cbd2cefba613e9db29a5ab559323f994b6737e370d3da0ac455 + requires_dist: + - aiohappyeyeballs>=2.5.0 + - aiosignal>=1.4.0 + - async-timeout>=4.0,<6.0 ; python_full_version < '3.11' + - attrs>=17.3.0 + - frozenlist>=1.1.1 + - multidict>=4.5,<7.0 + - propcache>=0.2.0 + - yarl>=1.17.0,<2.0 + - aiodns>=3.3.0 ; extra == 'speedups' + - brotli>=1.2 ; platform_python_implementation == 'CPython' and extra == 'speedups' + - brotlicffi>=1.2 ; platform_python_implementation != 'CPython' and extra == 'speedups' + - backports-zstd ; python_full_version < '3.14' and platform_python_implementation == 'CPython' and extra == 'speedups' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/68/03/8fa90a7e6d11ff20a18837a8e2b5dd23db01aabc475aa9271c8ad33299f5/aiohttp-3.13.3-cp310-cp310-macosx_10_9_x86_64.whl + name: aiohttp + version: 3.13.3 + sha256: 147e422fd1223005c22b4fe080f5d93ced44460f5f9c105406b753612b587821 + requires_dist: + - aiohappyeyeballs>=2.5.0 + - aiosignal>=1.4.0 + - async-timeout>=4.0,<6.0 ; python_full_version < '3.11' + - attrs>=17.3.0 + - frozenlist>=1.1.1 + - multidict>=4.5,<7.0 + - propcache>=0.2.0 + - yarl>=1.17.0,<2.0 + - aiodns>=3.3.0 ; extra == 'speedups' + - brotli>=1.2 ; platform_python_implementation == 'CPython' and extra == 'speedups' + - brotlicffi>=1.2 ; platform_python_implementation != 'CPython' and extra == 'speedups' + - backports-zstd ; python_full_version < '3.14' and platform_python_implementation == 'CPython' and extra == 'speedups' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/d2/23/b81f744d402510a8366b74eb420fc0cc1170d0c43daca12d10814df85f10/aiohttp-3.13.3-cp310-cp310-macosx_11_0_arm64.whl + name: aiohttp + version: 3.13.3 + sha256: 859bd3f2156e81dd01432f5849fc73e2243d4a487c4fd26609b1299534ee1845 + requires_dist: + - aiohappyeyeballs>=2.5.0 + - aiosignal>=1.4.0 + - async-timeout>=4.0,<6.0 ; python_full_version < '3.11' + - attrs>=17.3.0 + - frozenlist>=1.1.1 + - multidict>=4.5,<7.0 + - propcache>=0.2.0 + - yarl>=1.17.0,<2.0 + - aiodns>=3.3.0 ; extra == 'speedups' + - brotli>=1.2 ; platform_python_implementation == 'CPython' and extra == 'speedups' + - brotlicffi>=1.2 ; platform_python_implementation != 'CPython' and extra == 'speedups' + - backports-zstd ; python_full_version < '3.14' and platform_python_implementation == 'CPython' and extra == 'speedups' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/10/a1/510b0a7fadc6f43a6ce50152e69dbd86415240835868bb0bd9b5b88b1e06/aioitertools-0.13.0-py3-none-any.whl + name: aioitertools + version: 0.13.0 + sha256: 0be0292b856f08dfac90e31f4739432f4cb6d7520ab9eb73e143f4f2fa5259be + requires_dist: + - typing-extensions>=4.0 ; python_full_version < '3.10' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl + name: aiosignal + version: 1.4.0 + sha256: 053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e + requires_dist: + - frozenlist>=1.1.0 + - typing-extensions>=4.2 ; python_full_version < '3.13' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl + name: annotated-doc + version: 0.0.4 + sha256: 571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl + name: annotated-types + version: 0.7.0 + sha256: 1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 + requires_dist: + - typing-extensions>=4.0.0 ; python_full_version < '3.9' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl + name: anyio + version: 4.12.1 + sha256: d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c + requires_dist: + - exceptiongroup>=1.0.2 ; python_full_version < '3.11' + - idna>=2.8 + - typing-extensions>=4.5 ; python_full_version < '3.13' + - trio>=0.32.0 ; python_full_version >= '3.10' and extra == 'trio' + - trio>=0.31.0 ; python_full_version < '3.10' and extra == 'trio' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl + name: argon2-cffi + version: 25.1.0 + sha256: fdc8b074db390fccb6eb4a3604ae7231f219aa669a2652e0f20e16ba513d5741 + requires_dist: + - argon2-cffi-bindings + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/09/52/94108adfdd6e2ddf58be64f959a0b9c7d4ef2fa71086c38356d22dc501ea/argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl + name: argon2-cffi-bindings + version: 25.1.0 + sha256: d3e924cfc503018a714f94a49a149fdc0b644eaead5d1f089330399134fa028a + requires_dist: + - cffi>=1.0.1 ; python_full_version < '3.14' + - cffi>=2.0.0b1 ; python_full_version >= '3.14' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/0a/08/a9bebdb2e0e602dde230bdde8021b29f71f7841bd54801bcfd514acb5dcf/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_x86_64.whl + name: argon2-cffi-bindings + version: 25.1.0 + sha256: 2630b6240b495dfab90aebe159ff784d08ea999aa4b0d17efa734055a07d2f44 + requires_dist: + - cffi>=1.0.1 ; python_full_version < '3.14' + - cffi>=2.0.0b1 ; python_full_version >= '3.14' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/b6/02/d297943bcacf05e4f2a94ab6f462831dc20158614e5d067c35d4e63b9acb/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_11_0_arm64.whl + name: argon2-cffi-bindings + version: 25.1.0 + sha256: 7aef0c91e2c0fbca6fc68e7555aa60ef7008a739cbe045541e438373bc54d2b0 + requires_dist: + - cffi>=1.0.1 ; python_full_version < '3.14' + - cffi>=2.0.0b1 ; python_full_version >= '3.14' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/c9/7f/09065fd9e27da0eda08b4d6897f1c13535066174cc023af248fc2a8d5e5a/asn1crypto-1.5.1-py2.py3-none-any.whl + name: asn1crypto + version: 1.5.1 + sha256: db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67 +- pypi: https://files.pythonhosted.org/packages/c7/80/9f608d13b4b3afcebd1dd13baf9551c95fc424d6390e4b1cfd7b1810cd06/async_property-0.2.2-py2.py3-none-any.whl + name: async-property + version: 0.2.2 + sha256: 8924d792b5843994537f8ed411165700b27b2bd966cefc4daeefc1253442a9d7 +- pypi: https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl + name: async-timeout + version: 5.0.1 + sha256: 39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/96/c0/271f3e1e3502a8decb8ee5c680dbed2d8dc2cd504f5e20f7ed491d5f37e1/atpublic-7.0.0-py3-none-any.whl + name: atpublic + version: 7.0.0 + sha256: 6702bd9e7245eb4e8220a3e222afcef7f87412154732271ee7deee4433b72b4b + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl + name: attrs + version: 25.4.0 + sha256: adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/b3/72/6102c002b1b4c04f8c5ed870f102f576a5fbd6a41cdb3e49ed339aa95dfe/bigtree-1.3.1-py3-none-any.whl + name: bigtree + version: 1.3.1 + sha256: c8b766b00188c532d3499bfd9e9666b357428db507fc701f088031a0d5c614d5 + requires_dist: + - lark ; extra == 'all' + - matplotlib ; extra == 'all' + - pandas ; extra == 'all' + - pillow ; extra == 'all' + - polars ; extra == 'all' + - pydot ; extra == 'all' + - pyvis ; extra == 'all' + - rich ; extra == 'all' + - pillow ; extra == 'image' + - pydot ; extra == 'image' + - matplotlib ; extra == 'matplotlib' + - pandas ; extra == 'pandas' + - polars ; extra == 'polars' + - lark ; extra == 'query' + - rich ; extra == 'rich' + - pyvis ; extra == 'vis' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/43/8b/b2361188bd1e293eede1bc165e2461d390394f71ec0c8c21211c8dabf62c/boto3-1.38.27-py3-none-any.whl + name: boto3 + version: 1.38.27 + sha256: 95f5fe688795303a8a15e8b7e7f255cadab35eae459d00cc281a4fd77252ea80 + requires_dist: + - botocore>=1.38.27,<1.39.0 + - jmespath>=0.7.1,<2.0.0 + - s3transfer>=0.13.0,<0.14.0 + - botocore[crt]>=1.21.0,<2.0a0 ; extra == 'crt' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/a4/00/dd90b7a0255587ba1c9754d32a221adb4a9022f181df3eef401b0b9fadfc/botocore-1.38.46-py3-none-any.whl + name: botocore + version: 1.38.46 + sha256: 89ca782ffbf2e8769ca9c89234cfa5ca577f1987d07d913ee3c68c4776b1eb5b + requires_dist: + - jmespath>=0.7.1,<2.0.0 + - python-dateutil>=2.1,<3.0.0 + - urllib3>=1.25.4,<1.27 ; python_full_version < '3.10' + - urllib3>=1.25.4,!=2.2.0,<3 ; python_full_version >= '3.10' + - awscrt==0.23.8 ; extra == 'crt' + requires_python: '>=3.9' +- conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda + sha256: 0b75d45f0bba3e95dc693336fa51f40ea28c980131fec438afb7ce6118ed05f6 + md5: d2ffd7602c02f2b316fd921d39876885 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: bzip2-1.0.6 + license_family: BSD + purls: [] + size: 260182 + timestamp: 1771350215188 +- conda: https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-h500dc9f_9.conda + sha256: 9f242f13537ef1ce195f93f0cc162965d6cc79da578568d6d8e50f70dd025c42 + md5: 4173ac3b19ec0a4f400b4f782910368b + depends: + - __osx >=10.13 + license: bzip2-1.0.6 + license_family: BSD + purls: [] + size: 133427 + timestamp: 1771350680709 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/bzip2-1.0.8-hd037594_9.conda + sha256: 540fe54be35fac0c17feefbdc3e29725cce05d7367ffedfaaa1bdda234b019df + md5: 620b85a3f45526a8bc4d23fd78fc22f0 + depends: + - __osx >=11.0 + license: bzip2-1.0.6 + license_family: BSD + purls: [] + size: 124834 + timestamp: 1771350416561 +- conda: https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda + sha256: 67cc7101b36421c5913a1687ef1b99f85b5d6868da3abbf6ec1a4181e79782fc + md5: 4492fd26db29495f0ba23f146cd5638d + depends: + - __unix + license: ISC + purls: [] + size: 147413 + timestamp: 1772006283803 +- pypi: https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl + name: certifi + version: 2026.2.25 + sha256: 027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl + name: cffi + version: 2.0.0 + sha256: 0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44 + requires_dist: + - pycparser ; implementation_name != 'PyPy' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl + name: cffi + version: 2.0.0 + sha256: fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453 + requires_dist: + - pycparser ; implementation_name != 'PyPy' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl + name: cffi + version: 2.0.0 + sha256: f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49 + requires_dist: + - pycparser ; implementation_name != 'PyPy' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl + name: charset-normalizer + version: 3.4.4 + sha256: e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl + name: charset-normalizer + version: 3.4.4 + sha256: da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/36/3b/60cbd1f8e93aa25d1c669c649b7a655b0b5fb4c571858910ea9332678558/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: charset-normalizer + version: 3.4.4 + sha256: 9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313 + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: charset-normalizer + version: 3.4.4 + sha256: ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838 + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl + name: click + version: 8.3.1 + sha256: 981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6 + requires_dist: + - colorama ; sys_platform == 'win32' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl + name: cloudpickle + version: 3.1.2 + sha256: 9acb47f6afd73f60dc1df93bb801b472f05ff42fa6c84167d25cb206be1fbf4a + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl + name: colorama + version: 0.4.6 + sha256: 4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*' +- pypi: https://files.pythonhosted.org/packages/30/d5/c8b32c047e2e81dd172138f772e81d852c51f0f2ad2ae8a24f1122e9e9a7/cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl + name: cryptography + version: 43.0.3 + sha256: 8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984 + requires_dist: + - cffi>=1.12 ; platform_python_implementation != 'PyPy' + - bcrypt>=3.1.5 ; extra == 'ssh' + - nox ; extra == 'nox' + - cryptography-vectors==43.0.3 ; extra == 'test' + - pytest>=6.2.0 ; extra == 'test' + - pytest-benchmark ; extra == 'test' + - pytest-cov ; extra == 'test' + - pytest-xdist ; extra == 'test' + - pretend ; extra == 'test' + - certifi ; extra == 'test' + - pytest-randomly ; extra == 'test-randomorder' + - sphinx>=5.3.0 ; extra == 'docs' + - sphinx-rtd-theme>=1.1.1 ; extra == 'docs' + - pyenchant>=1.6.11 ; extra == 'docstest' + - readme-renderer ; extra == 'docstest' + - sphinxcontrib-spelling>=4.0.1 ; extra == 'docstest' + - build ; extra == 'sdist' + - ruff ; extra == 'pep8test' + - mypy ; extra == 'pep8test' + - check-sdist ; extra == 'pep8test' + - click ; extra == 'pep8test' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/ac/25/e715fa0bc24ac2114ed69da33adf451a38abb6f3f24ec207908112e9ba53/cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl + name: cryptography + version: 43.0.3 + sha256: c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405 + requires_dist: + - cffi>=1.12 ; platform_python_implementation != 'PyPy' + - bcrypt>=3.1.5 ; extra == 'ssh' + - nox ; extra == 'nox' + - cryptography-vectors==43.0.3 ; extra == 'test' + - pytest>=6.2.0 ; extra == 'test' + - pytest-benchmark ; extra == 'test' + - pytest-cov ; extra == 'test' + - pytest-xdist ; extra == 'test' + - pretend ; extra == 'test' + - certifi ; extra == 'test' + - pytest-randomly ; extra == 'test-randomorder' + - sphinx>=5.3.0 ; extra == 'docs' + - sphinx-rtd-theme>=1.1.1 ; extra == 'docs' + - pyenchant>=1.6.11 ; extra == 'docstest' + - readme-renderer ; extra == 'docstest' + - sphinxcontrib-spelling>=4.0.1 ; extra == 'docstest' + - build ; extra == 'sdist' + - ruff ; extra == 'pep8test' + - mypy ; extra == 'pep8test' + - check-sdist ; extra == 'pep8test' + - click ; extra == 'pep8test' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/e5/23/d39ccc4ed76222db31530b0a7d38876fdb7673e23f838e8d8f0ed4651a4f/dask-2026.1.2-py3-none-any.whl + name: dask + version: 2026.1.2 + sha256: 46a0cf3b8d87f78a3d2e6b145aea4418a6d6d606fe6a16c79bd8ca2bb862bc91 + requires_dist: + - click>=8.1 + - cloudpickle>=3.0.0 + - fsspec>=2021.9.0 + - packaging>=20.0 + - partd>=1.4.0 + - pyyaml>=5.3.1 + - toolz>=0.12.0 + - importlib-metadata>=4.13.0 ; python_full_version < '3.12' + - numpy>=1.24 ; extra == 'array' + - dask[array] ; extra == 'dataframe' + - pandas>=2.0 ; extra == 'dataframe' + - pyarrow>=16.0 ; extra == 'dataframe' + - distributed>=2026.1.2,<2026.1.3 ; extra == 'distributed' + - bokeh>=3.1.0 ; extra == 'diagnostics' + - jinja2>=2.10.3 ; extra == 'diagnostics' + - dask[array,dataframe,diagnostics,distributed] ; extra == 'complete' + - pyarrow>=16.0 ; extra == 'complete' + - lz4>=4.3.2 ; extra == 'complete' + - pandas[test] ; extra == 'test' + - pytest ; extra == 'test' + - pytest-cov ; extra == 'test' + - pytest-mock ; extra == 'test' + - pytest-rerunfailures ; extra == 'test' + - pytest-timeout ; extra == 'test' + - pytest-xdist ; extra == 'test' + - pre-commit ; extra == 'test' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/01/80/171c7c5b78e60ab25d6f11e3d38675fe7ef843ddc79a7fd26916d3a6ca05/db_dtypes-1.5.0-py3-none-any.whl + name: db-dtypes + version: 1.5.0 + sha256: abdbb2e4eb965800ed6f98af0c5c1cafff9063ace09114be2d26a7f046be2c8a + requires_dist: + - numpy>=1.24.0,<=2.2.6 ; python_full_version == '3.10.*' + - numpy>=1.24.0 ; python_full_version != '3.10.*' + - packaging>=24.2.0 + - pandas>=1.5.3,<3.0.0 + - pyarrow>=13.0.0 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/6e/24/e6b7a8fe8b9e336d684779a88027b261374417f2be7c5a0fcdb40f0c8cc5/deltalake-0.25.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: deltalake + version: 0.25.5 + sha256: 0b36afba5936f74c42920c06d140535e6efc8361f659770014944d8e69fbca09 + requires_dist: + - pyarrow>=16,!=19.0.0 + - pandas ; extra == 'pandas' + - azure-storage-blob==12.20.0 ; extra == 'devel' + - packaging>=20 ; extra == 'devel' + - pytest ; extra == 'devel' + - pytest-mock ; extra == 'devel' + - pytest-cov ; extra == 'devel' + - pytest-timeout ; extra == 'devel' + - sphinx<=4.5 ; extra == 'devel' + - sphinx-rtd-theme ; extra == 'devel' + - toml ; extra == 'devel' + - wheel ; extra == 'devel' + - pip>=24.0 ; extra == 'devel' + - pytest-benchmark ; extra == 'devel' + - mypy==1.10.1 ; extra == 'devel' + - ruff==0.5.2 ; extra == 'devel' + - polars==1.17.1 ; extra == 'polars' + - lakefs==0.8.0 ; extra == 'lakefs' + - pyspark ; extra == 'pyspark' + - delta-spark ; extra == 'pyspark' + - numpy==1.26.4 ; extra == 'pyspark' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/74/7a/ec22ff9d5c891b4f9ae834ef70524c92bd59d1408e9944e2652c87bc3f02/deltalake-0.25.5-cp39-abi3-macosx_11_0_arm64.whl + name: deltalake + version: 0.25.5 + sha256: e8f0d24bf64455f702da8402307b22e01f91e0f76694f7c5e33c9513011e8d29 + requires_dist: + - pyarrow>=16,!=19.0.0 + - pandas ; extra == 'pandas' + - azure-storage-blob==12.20.0 ; extra == 'devel' + - packaging>=20 ; extra == 'devel' + - pytest ; extra == 'devel' + - pytest-mock ; extra == 'devel' + - pytest-cov ; extra == 'devel' + - pytest-timeout ; extra == 'devel' + - sphinx<=4.5 ; extra == 'devel' + - sphinx-rtd-theme ; extra == 'devel' + - toml ; extra == 'devel' + - wheel ; extra == 'devel' + - pip>=24.0 ; extra == 'devel' + - pytest-benchmark ; extra == 'devel' + - mypy==1.10.1 ; extra == 'devel' + - ruff==0.5.2 ; extra == 'devel' + - polars==1.17.1 ; extra == 'polars' + - lakefs==0.8.0 ; extra == 'lakefs' + - pyspark ; extra == 'pyspark' + - delta-spark ; extra == 'pyspark' + - numpy==1.26.4 ; extra == 'pyspark' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/dc/0e/f372bb290cef68c67331cd649b94d62220183ddc1b5bf3a9351ea6e9c8ec/deltalake-0.25.5-cp39-abi3-macosx_10_12_x86_64.whl + name: deltalake + version: 0.25.5 + sha256: 76be7e1ed8d13f2dc933361057a44a47a89e6112d4f5ea0a73fb510bedd96efc + requires_dist: + - pyarrow>=16,!=19.0.0 + - pandas ; extra == 'pandas' + - azure-storage-blob==12.20.0 ; extra == 'devel' + - packaging>=20 ; extra == 'devel' + - pytest ; extra == 'devel' + - pytest-mock ; extra == 'devel' + - pytest-cov ; extra == 'devel' + - pytest-timeout ; extra == 'devel' + - sphinx<=4.5 ; extra == 'devel' + - sphinx-rtd-theme ; extra == 'devel' + - toml ; extra == 'devel' + - wheel ; extra == 'devel' + - pip>=24.0 ; extra == 'devel' + - pytest-benchmark ; extra == 'devel' + - mypy==1.10.1 ; extra == 'devel' + - ruff==0.5.2 ; extra == 'devel' + - polars==1.17.1 ; extra == 'polars' + - lakefs==0.8.0 ; extra == 'lakefs' + - pyspark ; extra == 'pyspark' + - delta-spark ; extra == 'pyspark' + - numpy==1.26.4 ; extra == 'pyspark' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl + name: deprecation + version: 2.1.0 + sha256: a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a + requires_dist: + - packaging +- pypi: https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl + name: dill + version: 0.3.9 + sha256: 468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a + requires_dist: + - objgraph>=1.7.2 ; extra == 'graph' + - gprof2dot>=2022.7.29 ; extra == 'profile' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl + name: docker + version: 7.1.0 + sha256: c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0 + requires_dist: + - pywin32>=304 ; sys_platform == 'win32' + - requests>=2.26.0 + - urllib3>=1.26.0 + - coverage==7.2.7 ; extra == 'dev' + - pytest-cov==4.1.0 ; extra == 'dev' + - pytest-timeout==2.1.0 ; extra == 'dev' + - pytest==7.4.2 ; extra == 'dev' + - ruff==0.1.8 ; extra == 'dev' + - myst-parser==0.18.0 ; extra == 'docs' + - sphinx==5.1.1 ; extra == 'docs' + - paramiko>=2.4.3 ; extra == 'ssh' + - websocket-client>=1.3.0 ; extra == 'websockets' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/4a/96/3a7630d2779d2bae6f3cdf540a088ed45166adefd3c429971e5b85ce8f84/duckdb-1.4.4-cp310-cp310-macosx_11_0_arm64.whl + name: duckdb + version: 1.4.4 + sha256: 5e1933fac5293fea5926b0ee75a55b8cfe7f516d867310a5b251831ab61fe62b + requires_dist: + - ipython ; extra == 'all' + - fsspec ; extra == 'all' + - numpy ; extra == 'all' + - pandas ; extra == 'all' + - pyarrow ; extra == 'all' + - adbc-driver-manager ; extra == 'all' + requires_python: '>=3.9.0' +- pypi: https://files.pythonhosted.org/packages/6b/7a/e9277d0567884c21f345ad43cc01aeaa2abe566d5fdf22e35c3861dd44fa/duckdb-1.4.4-cp310-cp310-macosx_10_9_x86_64.whl + name: duckdb + version: 1.4.4 + sha256: 49123b579e4a6323e65139210cd72dddc593a72d840211556b60f9703bda8526 + requires_dist: + - ipython ; extra == 'all' + - fsspec ; extra == 'all' + - numpy ; extra == 'all' + - pandas ; extra == 'all' + - pyarrow ; extra == 'all' + - adbc-driver-manager ; extra == 'all' + requires_python: '>=3.9.0' +- pypi: https://files.pythonhosted.org/packages/a2/5f/23bd586ecb21273b41b5aa4b16fd88b7fecb53ed48d897273651c0c3d66f/duckdb-1.4.4-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl + name: duckdb + version: 1.4.4 + sha256: 453b115f4777467f35103d8081770ac2f223fb5799178db5b06186e3ab51d1f2 + requires_dist: + - ipython ; extra == 'all' + - fsspec ; extra == 'all' + - numpy ; extra == 'all' + - pandas ; extra == 'all' + - pyarrow ; extra == 'all' + - adbc-driver-manager ; extra == 'all' + requires_python: '>=3.9.0' +- pypi: https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl + name: exceptiongroup + version: 1.3.1 + sha256: a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598 + requires_dist: + - typing-extensions>=4.6.0 ; python_full_version < '3.13' + - pytest>=6 ; extra == 'test' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl + name: execnet + version: 2.1.2 + sha256: 67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec + requires_dist: + - hatch ; extra == 'testing' + - pre-commit ; extra == 'testing' + - pytest ; extra == 'testing' + - tox ; extra == 'testing' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/e4/72/42e900510195b23a56bde950d26a51f8b723846bfcaa0286e90287f0422b/fastapi-0.135.1-py3-none-any.whl + name: fastapi + version: 0.135.1 + sha256: 46e2fc5745924b7c840f71ddd277382af29ce1cdb7d5eab5bf697e3fb9999c9e + requires_dist: + - starlette>=0.46.0 + - pydantic>=2.7.0 + - typing-extensions>=4.8.0 + - typing-inspection>=0.4.2 + - annotated-doc>=0.0.2 + - fastapi-cli[standard]>=0.0.8 ; extra == 'standard' + - httpx>=0.23.0,<1.0.0 ; extra == 'standard' + - jinja2>=3.1.5 ; extra == 'standard' + - python-multipart>=0.0.18 ; extra == 'standard' + - email-validator>=2.0.0 ; extra == 'standard' + - uvicorn[standard]>=0.12.0 ; extra == 'standard' + - pydantic-settings>=2.0.0 ; extra == 'standard' + - pydantic-extra-types>=2.0.0 ; extra == 'standard' + - fastapi-cli[standard-no-fastapi-cloud-cli]>=0.0.8 ; extra == 'standard-no-fastapi-cloud-cli' + - httpx>=0.23.0,<1.0.0 ; extra == 'standard-no-fastapi-cloud-cli' + - jinja2>=3.1.5 ; extra == 'standard-no-fastapi-cloud-cli' + - python-multipart>=0.0.18 ; extra == 'standard-no-fastapi-cloud-cli' + - email-validator>=2.0.0 ; extra == 'standard-no-fastapi-cloud-cli' + - uvicorn[standard]>=0.12.0 ; extra == 'standard-no-fastapi-cloud-cli' + - pydantic-settings>=2.0.0 ; extra == 'standard-no-fastapi-cloud-cli' + - pydantic-extra-types>=2.0.0 ; extra == 'standard-no-fastapi-cloud-cli' + - fastapi-cli[standard]>=0.0.8 ; extra == 'all' + - httpx>=0.23.0,<1.0.0 ; extra == 'all' + - jinja2>=3.1.5 ; extra == 'all' + - python-multipart>=0.0.18 ; extra == 'all' + - itsdangerous>=1.1.0 ; extra == 'all' + - pyyaml>=5.3.1 ; extra == 'all' + - email-validator>=2.0.0 ; extra == 'all' + - uvicorn[standard]>=0.12.0 ; extra == 'all' + - pydantic-settings>=2.0.0 ; extra == 'all' + - pydantic-extra-types>=2.0.0 ; extra == 'all' + requires_python: '>=3.10' +- pypi: ./ + name: feast + version: 0.60.1.dev60+gb37e25a55.d20260304 + sha256: f06b53f6c086c78729760f6e7b3284717bbafa4793a1449d8a81f0270a7eb49f + requires_dist: + - click>=7.0.0,<9.0.0 + - colorama>=0.3.9,<1 + - dill~=0.3.0 + - protobuf>=4.24.0 + - jinja2>=2,<4 + - jsonschema + - mmh3 + - numpy>=2.0.0,<3 + - pandas>=1.4.3,<3 + - pyarrow>=21.0.0 + - pydantic>=2.10.6 + - pygments>=2.12.0,<3 + - pyyaml>=5.4.0,<7 + - requests + - sqlalchemy[mypy]>1 + - tabulate>=0.8.0,<1 + - tenacity>=7,<9 + - toml>=0.10.0,<1 + - tqdm>=4,<5 + - typeguard>=4.0.0 + - fastapi>=0.68.0 + - uvicorn[standard]>=0.30.6,<=0.34.0 + - uvicorn-worker + - gunicorn ; sys_platform != 'win32' + - dask[dataframe]>=2024.2.1 + - prometheus-client + - psutil + - bigtree>=0.19.2 + - pyjwt + - orjson>=3.9.0 + - boto3==1.38.27 ; extra == 'aws' + - fsspec<=2024.9.0 ; extra == 'aws' + - aiobotocore>2,<3 ; extra == 'aws' + - azure-storage-blob>=0.37.0 ; extra == 'azure' + - azure-identity>=1.6.1 ; extra == 'azure' + - sqlalchemy>=1.4.19 ; extra == 'azure' + - pyodbc>=4.0.30 ; extra == 'azure' + - pymssql<2.3.3 ; extra == 'azure' + - cassandra-driver>=3.24.0,<4 ; extra == 'cassandra' + - clickhouse-connect>=0.7.19 ; extra == 'clickhouse' + - couchbase==4.3.2 ; extra == 'couchbase' + - couchbase-columnar==1.0.0 ; extra == 'couchbase' + - deltalake<1.0.0 ; extra == 'delta' + - docling==2.27.0 ; extra == 'docling' + - ibis-framework[duckdb]>=10.0.0 ; extra == 'duckdb' + - elasticsearch>=8.13.0 ; extra == 'elasticsearch' + - faiss-cpu>=1.7.0,<=1.10.0 ; extra == 'faiss' + - google-api-core>=1.23.0,<3 ; extra == 'gcp' + - googleapis-common-protos>=1.52.0,<2 ; extra == 'gcp' + - google-cloud-bigquery[pandas]>=2,<4 ; extra == 'gcp' + - google-cloud-bigquery-storage>=2.0.0,<3 ; extra == 'gcp' + - google-cloud-datastore>=2.16.0,<3 ; extra == 'gcp' + - google-cloud-storage>=1.34.0,<3 ; extra == 'gcp' + - google-cloud-bigtable>=2.11.0,<3 ; extra == 'gcp' + - fsspec<=2024.9.0 ; extra == 'gcp' + - great-expectations>=0.15.41,<1 ; extra == 'ge' + - cffi>=1.15.0 ; extra == 'go' + - grpcio>=1.56.2,<=1.62.3 ; extra == 'grpcio' + - grpcio-reflection>=1.56.2,<=1.62.3 ; extra == 'grpcio' + - grpcio-health-checking>=1.56.2,<=1.62.3 ; extra == 'grpcio' + - hazelcast-python-client>=5.1 ; extra == 'hazelcast' + - happybase>=1.2.0,<3 ; extra == 'hbase' + - ibis-framework>=10.0.0 ; extra == 'ibis' + - kubernetes ; extra == 'k8s' + - feast[pytorch] ; extra == 'image' + - timm>=0.6.0 ; extra == 'image' + - pillow>=8.0.0 ; extra == 'image' + - scikit-learn>=1.0.0 ; extra == 'image' + - pymilvus>2.5 ; extra == 'milvus' + - milvus-lite==2.4.12 ; extra == 'milvus' + - feast[setuptools] ; extra == 'milvus' + - pymongo>=4.13.0,<5.0.0 ; extra == 'mongodb' + - dnspython>=2.0.0 ; extra == 'mongodb' + - ibis-framework[mssql]>=10.0.0 ; extra == 'mssql' + - pymysql ; extra == 'mysql' + - types-pymysql ; extra == 'mysql' + - openlineage-python>=1.40.0 ; extra == 'openlineage' + - prometheus-client ; extra == 'opentelemetry' + - psutil ; extra == 'opentelemetry' + - pyspark>=4.0.0 ; extra == 'spark' + - trino>=0.305.0,<0.400.0 ; extra == 'trino' + - regex ; extra == 'trino' + - psycopg[binary,pool]==3.2.5 ; extra == 'postgres' + - psycopg[c,pool]==3.2.5 ; extra == 'postgres-c' + - torch>=2.7.0 ; extra == 'pytorch' + - torchvision>=0.22.1 ; extra == 'pytorch' + - qdrant-client>=1.12.0 ; extra == 'qdrant' + - transformers>=4.36.0 ; extra == 'rag' + - datasets>=3.6.0 ; extra == 'rag' + - ray>=2.47.0 ; python_full_version == '3.10.*' and extra == 'ray' + - codeflare-sdk>=0.31.1 ; python_full_version >= '3.11' and extra == 'ray' + - redis>=4.2.2,<5 ; extra == 'redis' + - hiredis>=2.0.0,<3 ; extra == 'redis' + - singlestoredb<1.8.0 ; extra == 'singlestore' + - snowflake-connector-python[pandas]>=3.7,<5 ; extra == 'snowflake' + - sqlite-vec==0.1.6 ; extra == 'sqlite-vec' + - fastapi-mcp ; extra == 'mcp' + - dbt-artifacts-parser ; extra == 'dbt' + - pytest>=6.0.0,<8 ; extra == 'test' + - pytest-xdist>=3.8.0 ; extra == 'test' + - pytest-timeout==1.4.2 ; extra == 'test' + - pytest-lazy-fixture==0.6.3 ; extra == 'test' + - pytest-ordering~=0.6.0 ; extra == 'test' + - pytest-mock==1.10.4 ; extra == 'test' + - pytest-env ; extra == 'test' + - pytest-benchmark>=3.4.1,<4 ; extra == 'test' + - pytest-asyncio<=0.24.0 ; extra == 'test' + - py>=1.11.0 ; extra == 'test' + - testcontainers==4.9.0 ; extra == 'test' + - minio==7.2.11 ; extra == 'test' + - python-keycloak==4.2.2 ; extra == 'test' + - cryptography>=43.0,<44 ; extra == 'test' + - feast[aws,azure,cassandra,clickhouse,couchbase,delta,docling,duckdb,elasticsearch,faiss,gcp,ge,go,grpcio,hazelcast,hbase,ibis,image,k8s,mcp,milvus,mssql,mysql,openlineage,opentelemetry,postgres,pytorch,qdrant,rag,ray,redis,singlestore,snowflake,spark,sqlite-vec,test,trino] ; extra == 'ci' + - build ; extra == 'ci' + - virtualenv==20.23.0 ; extra == 'ci' + - dbt-artifacts-parser ; extra == 'ci' + - ruff>=0.8.0 ; extra == 'ci' + - mypy-protobuf>=3.1 ; extra == 'ci' + - grpcio-tools>=1.56.2,<=1.62.3 ; extra == 'ci' + - grpcio-testing>=1.56.2,<=1.62.3 ; extra == 'ci' + - httpx==0.27.2 ; extra == 'ci' + - mock==2.0.0 ; extra == 'ci' + - moto<5 ; extra == 'ci' + - mypy>=1.4.1,<1.11.3 ; extra == 'ci' + - urllib3>=2.6.3,<3 ; extra == 'ci' + - psutil==5.9.0 ; extra == 'ci' + - pytest-cov ; extra == 'ci' + - sphinx>4.0.0,<7 ; extra == 'ci' + - sqlglot[rs]>=23.4 ; extra == 'ci' + - pre-commit<3.3.2 ; extra == 'ci' + - assertpy==1.1 ; extra == 'ci' + - pip-tools ; extra == 'ci' + - pybindgen==0.22.0 ; extra == 'ci' + - types-protobuf~=3.19.22 ; extra == 'ci' + - python-dateutil==2.9.0 ; extra == 'ci' + - types-python-dateutil ; extra == 'ci' + - types-pytz ; extra == 'ci' + - types-pyyaml ; extra == 'ci' + - types-redis ; extra == 'ci' + - types-requests<2.31.0 ; extra == 'ci' + - types-setuptools ; extra == 'ci' + - types-tabulate ; extra == 'ci' + - feast[docling,image,milvus,pytorch,rag] ; extra == 'nlp' + - feast[ci] ; extra == 'dev' + - feast[ci] ; extra == 'docs' + - feast[aws,duckdb,gcp,go,grpcio,k8s,mcp,milvus,mysql,opentelemetry,postgres-c,redis,snowflake] ; extra == 'minimal' + - feast[minimal] ; extra == 'minimal-sdist-build' + - feast[ibis] ; extra == 'minimal-sdist-build' + - meson<1.7.2 ; extra == 'minimal-sdist-build' + - pybindgen==0.22.0 ; extra == 'minimal-sdist-build' + - sphinx!=4.0.0 ; extra == 'minimal-sdist-build' + - types-psutil<7.0.0.20250401 ; extra == 'minimal-sdist-build' + - greenlet!=0.4.17 ; extra == 'minimal-sdist-build' + - meson-python>=0.15.0,<0.16.0 ; extra == 'minimal-sdist-build' + - cython>=0.29.34,<3.1 ; extra == 'minimal-sdist-build' + - flit-core>=3.8,<4 ; extra == 'minimal-sdist-build' + - patchelf>=0.11.0 ; extra == 'minimal-sdist-build' + - scikit-build-core>=0.10 ; extra == 'minimal-sdist-build' + - hatch-fancy-pypi-readme>=23.2.0 ; extra == 'minimal-sdist-build' + - hatch-vcs==0.4.0 ; extra == 'minimal-sdist-build' + - hatchling>=1.6.0,<2 ; extra == 'minimal-sdist-build' + - calver<2025.4.1 ; extra == 'minimal-sdist-build' + - setuptools>=60,<81 ; extra == 'setuptools' + requires_python: '>=3.10.0' +- pypi: https://files.pythonhosted.org/packages/f9/0b/de6f54d4a8bedfe8645c41497f3c18d749f0bd3218170c667bf4b81d0cdd/filelock-3.25.0-py3-none-any.whl + name: filelock + version: 3.25.0 + sha256: 5ccf8069f7948f494968fc0713c10e5c182a9c9d9eef3a636307a20c2490f047 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/5d/ed/c7895fd2fde7f3ee70d248175f9b6cdf792fb741ab92dc59cd9ef3bd241b/frozenlist-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + name: frozenlist + version: 1.8.0 + sha256: f57fb59d9f385710aa7060e89410aeb5058b99e62f4d16b08b91986b9a2140c2 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/63/70/26ca3f06aace16f2352796b08704338d74b6d1a24ca38f2771afbb7ed915/frozenlist-1.8.0-cp310-cp310-macosx_11_0_arm64.whl + name: frozenlist + version: 1.8.0 + sha256: a88f062f072d1589b7b46e951698950e7da00442fc1cacbe17e19e025dc327ad + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/a2/fb/c85f9fed3ea8fe8740e5b46a59cc141c23b842eca617da8876cfce5f760e/frozenlist-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl + name: frozenlist + version: 1.8.0 + sha256: ef2b7b394f208233e471abc541cc6991f907ffd47dc72584acee3147899d6565 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/1d/a0/6aaea0c2fbea2f89bfd5db25fb1e3481896a423002ebe4e55288907a97a3/fsspec-2024.9.0-py3-none-any.whl + name: fsspec + version: 2024.9.0 + sha256: a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b + requires_dist: + - adlfs ; extra == 'abfs' + - adlfs ; extra == 'adl' + - pyarrow>=1 ; extra == 'arrow' + - dask ; extra == 'dask' + - distributed ; extra == 'dask' + - pre-commit ; extra == 'dev' + - ruff ; extra == 'dev' + - numpydoc ; extra == 'doc' + - sphinx ; extra == 'doc' + - sphinx-design ; extra == 'doc' + - sphinx-rtd-theme ; extra == 'doc' + - yarl ; extra == 'doc' + - dropbox ; extra == 'dropbox' + - dropboxdrivefs ; extra == 'dropbox' + - requests ; extra == 'dropbox' + - adlfs ; extra == 'full' + - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'full' + - dask ; extra == 'full' + - distributed ; extra == 'full' + - dropbox ; extra == 'full' + - dropboxdrivefs ; extra == 'full' + - fusepy ; extra == 'full' + - gcsfs ; extra == 'full' + - libarchive-c ; extra == 'full' + - ocifs ; extra == 'full' + - panel ; extra == 'full' + - paramiko ; extra == 'full' + - pyarrow>=1 ; extra == 'full' + - pygit2 ; extra == 'full' + - requests ; extra == 'full' + - s3fs ; extra == 'full' + - smbprotocol ; extra == 'full' + - tqdm ; extra == 'full' + - fusepy ; extra == 'fuse' + - gcsfs ; extra == 'gcs' + - pygit2 ; extra == 'git' + - requests ; extra == 'github' + - gcsfs ; extra == 'gs' + - panel ; extra == 'gui' + - pyarrow>=1 ; extra == 'hdfs' + - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'http' + - libarchive-c ; extra == 'libarchive' + - ocifs ; extra == 'oci' + - s3fs ; extra == 's3' + - paramiko ; extra == 'sftp' + - smbprotocol ; extra == 'smb' + - paramiko ; extra == 'ssh' + - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'test' + - numpy ; extra == 'test' + - pytest ; extra == 'test' + - pytest-asyncio!=0.22.0 ; extra == 'test' + - pytest-benchmark ; extra == 'test' + - pytest-cov ; extra == 'test' + - pytest-mock ; extra == 'test' + - pytest-recording ; extra == 'test' + - pytest-rerunfailures ; extra == 'test' + - requests ; extra == 'test' + - aiobotocore>=2.5.4,<3.0.0 ; extra == 'test-downstream' + - dask-expr ; extra == 'test-downstream' + - dask[dataframe,test] ; extra == 'test-downstream' + - moto[server]>4,<5 ; extra == 'test-downstream' + - pytest-timeout ; extra == 'test-downstream' + - xarray ; extra == 'test-downstream' + - adlfs ; extra == 'test-full' + - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'test-full' + - cloudpickle ; extra == 'test-full' + - dask ; extra == 'test-full' + - distributed ; extra == 'test-full' + - dropbox ; extra == 'test-full' + - dropboxdrivefs ; extra == 'test-full' + - fastparquet ; extra == 'test-full' + - fusepy ; extra == 'test-full' + - gcsfs ; extra == 'test-full' + - jinja2 ; extra == 'test-full' + - kerchunk ; extra == 'test-full' + - libarchive-c ; extra == 'test-full' + - lz4 ; extra == 'test-full' + - notebook ; extra == 'test-full' + - numpy ; extra == 'test-full' + - ocifs ; extra == 'test-full' + - pandas ; extra == 'test-full' + - panel ; extra == 'test-full' + - paramiko ; extra == 'test-full' + - pyarrow ; extra == 'test-full' + - pyarrow>=1 ; extra == 'test-full' + - pyftpdlib ; extra == 'test-full' + - pygit2 ; extra == 'test-full' + - pytest ; extra == 'test-full' + - pytest-asyncio!=0.22.0 ; extra == 'test-full' + - pytest-benchmark ; extra == 'test-full' + - pytest-cov ; extra == 'test-full' + - pytest-mock ; extra == 'test-full' + - pytest-recording ; extra == 'test-full' + - pytest-rerunfailures ; extra == 'test-full' + - python-snappy ; extra == 'test-full' + - requests ; extra == 'test-full' + - smbprotocol ; extra == 'test-full' + - tqdm ; extra == 'test-full' + - urllib3 ; extra == 'test-full' + - zarr ; extra == 'test-full' + - zstandard ; extra == 'test-full' + - tqdm ; extra == 'tqdm' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl + name: fsspec + version: 2026.2.0 + sha256: 98de475b5cb3bd66bedd5c4679e87b4fdfe1a3bf4d707b151b3c07e58c9a2437 + requires_dist: + - adlfs ; extra == 'abfs' + - adlfs ; extra == 'adl' + - pyarrow>=1 ; extra == 'arrow' + - dask ; extra == 'dask' + - distributed ; extra == 'dask' + - pre-commit ; extra == 'dev' + - ruff>=0.5 ; extra == 'dev' + - numpydoc ; extra == 'doc' + - sphinx ; extra == 'doc' + - sphinx-design ; extra == 'doc' + - sphinx-rtd-theme ; extra == 'doc' + - yarl ; extra == 'doc' + - dropbox ; extra == 'dropbox' + - dropboxdrivefs ; extra == 'dropbox' + - requests ; extra == 'dropbox' + - adlfs ; extra == 'full' + - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'full' + - dask ; extra == 'full' + - distributed ; extra == 'full' + - dropbox ; extra == 'full' + - dropboxdrivefs ; extra == 'full' + - fusepy ; extra == 'full' + - gcsfs>2024.2.0 ; extra == 'full' + - libarchive-c ; extra == 'full' + - ocifs ; extra == 'full' + - panel ; extra == 'full' + - paramiko ; extra == 'full' + - pyarrow>=1 ; extra == 'full' + - pygit2 ; extra == 'full' + - requests ; extra == 'full' + - s3fs>2024.2.0 ; extra == 'full' + - smbprotocol ; extra == 'full' + - tqdm ; extra == 'full' + - fusepy ; extra == 'fuse' + - gcsfs>2024.2.0 ; extra == 'gcs' + - pygit2 ; extra == 'git' + - requests ; extra == 'github' + - gcsfs ; extra == 'gs' + - panel ; extra == 'gui' + - pyarrow>=1 ; extra == 'hdfs' + - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'http' + - libarchive-c ; extra == 'libarchive' + - ocifs ; extra == 'oci' + - s3fs>2024.2.0 ; extra == 's3' + - paramiko ; extra == 'sftp' + - smbprotocol ; extra == 'smb' + - paramiko ; extra == 'ssh' + - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'test' + - numpy ; extra == 'test' + - pytest ; extra == 'test' + - pytest-asyncio!=0.22.0 ; extra == 'test' + - pytest-benchmark ; extra == 'test' + - pytest-cov ; extra == 'test' + - pytest-mock ; extra == 'test' + - pytest-recording ; extra == 'test' + - pytest-rerunfailures ; extra == 'test' + - requests ; extra == 'test' + - aiobotocore>=2.5.4,<3.0.0 ; extra == 'test-downstream' + - dask[dataframe,test] ; extra == 'test-downstream' + - moto[server]>4,<5 ; extra == 'test-downstream' + - pytest-timeout ; extra == 'test-downstream' + - xarray ; extra == 'test-downstream' + - adlfs ; extra == 'test-full' + - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'test-full' + - backports-zstd ; python_full_version < '3.14' and extra == 'test-full' + - cloudpickle ; extra == 'test-full' + - dask ; extra == 'test-full' + - distributed ; extra == 'test-full' + - dropbox ; extra == 'test-full' + - dropboxdrivefs ; extra == 'test-full' + - fastparquet ; extra == 'test-full' + - fusepy ; extra == 'test-full' + - gcsfs ; extra == 'test-full' + - jinja2 ; extra == 'test-full' + - kerchunk ; extra == 'test-full' + - libarchive-c ; extra == 'test-full' + - lz4 ; extra == 'test-full' + - notebook ; extra == 'test-full' + - numpy ; extra == 'test-full' + - ocifs ; extra == 'test-full' + - pandas<3.0.0 ; extra == 'test-full' + - panel ; extra == 'test-full' + - paramiko ; extra == 'test-full' + - pyarrow ; extra == 'test-full' + - pyarrow>=1 ; extra == 'test-full' + - pyftpdlib ; extra == 'test-full' + - pygit2 ; extra == 'test-full' + - pytest ; extra == 'test-full' + - pytest-asyncio!=0.22.0 ; extra == 'test-full' + - pytest-benchmark ; extra == 'test-full' + - pytest-cov ; extra == 'test-full' + - pytest-mock ; extra == 'test-full' + - pytest-recording ; extra == 'test-full' + - pytest-rerunfailures ; extra == 'test-full' + - python-snappy ; extra == 'test-full' + - requests ; extra == 'test-full' + - smbprotocol ; extra == 'test-full' + - tqdm ; extra == 'test-full' + - urllib3 ; extra == 'test-full' + - zarr ; extra == 'test-full' + - zstandard ; python_full_version < '3.14' and extra == 'test-full' + - tqdm ; extra == 'tqdm' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/45/27/09c33d67f7e0dcf06d7ac17d196594e66989299374bfb0d4331d1038e76b/google_api_core-2.30.0-py3-none-any.whl + name: google-api-core + version: 2.30.0 + sha256: 80be49ee937ff9aba0fd79a6eddfde35fe658b9953ab9b79c57dd7061afa8df5 + requires_dist: + - googleapis-common-protos>=1.56.3,<2.0.0 + - protobuf>=4.25.8,<7.0.0 + - proto-plus>=1.22.3,<2.0.0 + - proto-plus>=1.25.0,<2.0.0 ; python_full_version >= '3.13' + - google-auth>=2.14.1,<3.0.0 + - requests>=2.20.0,<3.0.0 + - google-auth[aiohttp]>=2.35.0,<3.0.0 ; extra == 'async-rest' + - grpcio>=1.33.2,<2.0.0 ; extra == 'grpc' + - grpcio>=1.49.1,<2.0.0 ; python_full_version >= '3.11' and extra == 'grpc' + - grpcio>=1.75.1,<2.0.0 ; python_full_version >= '3.14' and extra == 'grpc' + - grpcio-status>=1.33.2,<2.0.0 ; extra == 'grpc' + - grpcio-status>=1.49.1,<2.0.0 ; python_full_version >= '3.11' and extra == 'grpc' + - grpcio-status>=1.75.1,<2.0.0 ; python_full_version >= '3.14' and extra == 'grpc' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/83/1d/d6466de3a5249d35e832a52834115ca9d1d0de6abc22065f049707516d47/google_auth-2.48.0-py3-none-any.whl + name: google-auth + version: 2.48.0 + sha256: 2e2a537873d449434252a9632c28bfc268b0adb1e53f9fb62afc5333a975903f + requires_dist: + - pyasn1-modules>=0.2.1 + - cryptography>=38.0.3 + - rsa>=3.1.4,<5 + - cryptography>=38.0.3 ; extra == 'cryptography' + - aiohttp>=3.6.2,<4.0.0 ; extra == 'aiohttp' + - requests>=2.20.0,<3.0.0 ; extra == 'aiohttp' + - pyopenssl ; extra == 'enterprise-cert' + - pyopenssl>=20.0.0 ; extra == 'pyopenssl' + - pyjwt>=2.0 ; extra == 'pyjwt' + - pyu2f>=0.1.5 ; extra == 'reauth' + - requests>=2.20.0,<3.0.0 ; extra == 'requests' + - grpcio ; extra == 'testing' + - flask ; extra == 'testing' + - freezegun ; extra == 'testing' + - oauth2client ; extra == 'testing' + - pyjwt>=2.0 ; extra == 'testing' + - pytest ; extra == 'testing' + - pytest-cov ; extra == 'testing' + - pytest-localserver ; extra == 'testing' + - pyopenssl>=20.0.0 ; extra == 'testing' + - pyu2f>=0.1.5 ; extra == 'testing' + - responses ; extra == 'testing' + - urllib3 ; extra == 'testing' + - packaging ; extra == 'testing' + - aiohttp>=3.6.2,<4.0.0 ; extra == 'testing' + - requests>=2.20.0,<3.0.0 ; extra == 'testing' + - aioresponses ; extra == 'testing' + - pytest-asyncio ; extra == 'testing' + - pyopenssl<24.3.0 ; extra == 'testing' + - aiohttp<3.10.0 ; extra == 'testing' + - urllib3 ; extra == 'urllib3' + - packaging ; extra == 'urllib3' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/2f/56/909fd5632226d3fba31d7aeffd4754410735d49362f5809956fe3e9af344/google_auth_oauthlib-1.3.0-py3-none-any.whl + name: google-auth-oauthlib + version: 1.3.0 + sha256: 386b3fb85cf4a5b819c6ad23e3128d975216b4cac76324de1d90b128aaf38f29 + requires_dist: + - google-auth>=2.15.0,!=2.43.0,!=2.44.0,!=2.45.0,<3.0.0 + - requests-oauthlib>=0.7.0 + - click>=6.0.0 ; extra == 'tool' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/7c/f5/081cf5b90adfe524ae0d671781b0d497a75a0f2601d075af518828e22d8f/google_cloud_bigquery-3.40.1-py3-none-any.whl + name: google-cloud-bigquery + version: 3.40.1 + sha256: 9082a6b8193aba87bed6a2c79cf1152b524c99bb7e7ac33a785e333c09eac868 + requires_dist: + - google-api-core[grpc]>=2.11.1,<3.0.0 + - google-auth>=2.14.1,<3.0.0 + - google-cloud-core>=2.4.1,<3.0.0 + - google-resumable-media>=2.0.0,<3.0.0 + - packaging>=24.2.0 + - python-dateutil>=2.8.2,<3.0.0 + - requests>=2.21.0,<3.0.0 + - google-cloud-bigquery-storage>=2.18.0,<3.0.0 ; extra == 'bqstorage' + - grpcio>=1.47.0,<2.0.0 ; extra == 'bqstorage' + - grpcio>=1.49.1,<2.0.0 ; python_full_version >= '3.11' and extra == 'bqstorage' + - grpcio>=1.75.1,<2.0.0 ; python_full_version >= '3.14' and extra == 'bqstorage' + - pyarrow>=4.0.0 ; extra == 'bqstorage' + - pandas>=1.3.0 ; extra == 'pandas' + - pandas-gbq>=0.26.1 ; extra == 'pandas' + - grpcio>=1.47.0,<2.0.0 ; extra == 'pandas' + - grpcio>=1.49.1,<2.0.0 ; python_full_version >= '3.11' and extra == 'pandas' + - grpcio>=1.75.1,<2.0.0 ; python_full_version >= '3.14' and extra == 'pandas' + - pyarrow>=3.0.0 ; extra == 'pandas' + - db-dtypes>=1.0.4,<2.0.0 ; extra == 'pandas' + - ipywidgets>=7.7.1 ; extra == 'ipywidgets' + - ipykernel>=6.2.0 ; extra == 'ipywidgets' + - geopandas>=0.9.0,<2.0.0 ; extra == 'geopandas' + - shapely>=1.8.4,<3.0.0 ; extra == 'geopandas' + - ipython>=7.23.1 ; extra == 'ipython' + - bigquery-magics>=0.6.0 ; extra == 'ipython' + - matplotlib>=3.7.1,<=3.9.2 ; python_full_version == '3.9.*' and extra == 'matplotlib' + - matplotlib>=3.10.3 ; python_full_version >= '3.10' and extra == 'matplotlib' + - tqdm>=4.23.4,<5.0.0 ; extra == 'tqdm' + - opentelemetry-api>=1.1.0 ; extra == 'opentelemetry' + - opentelemetry-sdk>=1.1.0 ; extra == 'opentelemetry' + - opentelemetry-instrumentation>=0.20b0 ; extra == 'opentelemetry' + - proto-plus>=1.22.3,<2.0.0 ; extra == 'bigquery-v2' + - protobuf>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<7.0.0 ; extra == 'bigquery-v2' + - google-cloud-bigquery[bigquery-v2,bqstorage,geopandas,ipython,ipywidgets,matplotlib,opentelemetry,pandas,tqdm] ; extra == 'all' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/1f/07/62dbe78ef773569be0a1d2c1b845e9214889b404e506126519b4d33ee999/google_cloud_bigquery_storage-2.36.2-py3-none-any.whl + name: google-cloud-bigquery-storage + version: 2.36.2 + sha256: 823a73db0c4564e8ad3eedcfd5049f3d5aa41775267863b5627211ec36be2dbf + requires_dist: + - google-api-core[grpc]>=1.34.1,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,<3.0.0 + - google-auth>=2.14.1,!=2.24.0,!=2.25.0,<3.0.0 + - grpcio>=1.33.2,<2.0.0 + - grpcio>=1.75.1,<2.0.0 ; python_full_version >= '3.14' + - proto-plus>=1.22.3,<2.0.0 + - proto-plus>=1.25.0,<2.0.0 ; python_full_version >= '3.13' + - protobuf>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<7.0.0 + - pandas>=0.21.1 ; extra == 'pandas' + - importlib-metadata>=1.0.0 ; python_full_version < '3.8' and extra == 'pandas' + - fastavro>=0.21.2 ; extra == 'fastavro' + - pyarrow>=0.15.0 ; extra == 'pyarrow' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/62/69/03eed134d71f6117ffd9efac2d1033bb2fa2522e9e82545a0828061d32f4/google_cloud_bigtable-2.35.0-py3-none-any.whl + name: google-cloud-bigtable + version: 2.35.0 + sha256: f355bfce1f239453ec2bb3839b0f4f9937cf34ef06ef29e1ca63d58fd38d0c50 + requires_dist: + - google-api-core[grpc]>=2.17.0,<3.0.0 + - google-cloud-core>=1.4.4,<3.0.0 + - google-auth>=2.23.0,!=2.24.0,!=2.25.0,<3.0.0 + - grpc-google-iam-v1>=0.12.4,<1.0.0 + - proto-plus>=1.22.3,<2.0.0 + - proto-plus>=1.25.0,<2.0.0 ; python_full_version >= '3.13' + - protobuf>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<7.0.0 + - google-crc32c>=1.5.0,<2.0.0.dev0 + - libcst>=0.2.5 ; extra == 'libcst' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/89/20/bfa472e327c8edee00f04beecc80baeddd2ab33ee0e86fd7654da49d45e9/google_cloud_core-2.5.0-py3-none-any.whl + name: google-cloud-core + version: 2.5.0 + sha256: 67d977b41ae6c7211ee830c7912e41003ea8194bff15ae7d72fd6f51e57acabc + requires_dist: + - google-api-core>=1.31.6,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0,<3.0.0 + - google-auth>=1.25.0,<3.0.0 + - importlib-metadata>1.0.0 ; python_full_version < '3.8' + - grpcio>=1.38.0,<2.0.0 ; python_full_version < '3.14' and extra == 'grpc' + - grpcio>=1.75.1,<2.0.0 ; python_full_version >= '3.14' and extra == 'grpc' + - grpcio-status>=1.38.0,<2.0.0 ; extra == 'grpc' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/b7/88/348c09570a03886356c02337f06d69532fa17a66ad2a9dff584f7b60eb04/google_cloud_datastore-2.23.0-py3-none-any.whl + name: google-cloud-datastore + version: 2.23.0 + sha256: 24a1b1d29b902148fe41b109699f76fd3aa60591e9d547c0f8b87d7bf9ff213f + requires_dist: + - google-api-core[grpc]>=1.34.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,<3.0.0 + - google-auth>=2.14.1,!=2.24.0,!=2.25.0,<3.0.0 + - google-cloud-core>=1.4.0,<3.0.0 + - proto-plus>=1.22.0,<2.0.0 + - proto-plus>=1.22.2,<2.0.0 ; python_full_version >= '3.11' + - proto-plus>=1.25.0,<2.0.0 ; python_full_version >= '3.13' + - protobuf!=3.20.0,!=3.20.1,>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<7.0.0 + - grpcio>=1.38.0,<2.0.0 + - grpcio>=1.75.1,<2.0.0 ; python_full_version >= '3.14' + - libcst>=0.2.5 ; extra == 'libcst' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/d5/94/6db383d8ee1adf45dc6c73477152b82731fa4c4a46d9c1932cc8757e0fd4/google_cloud_storage-2.19.0-py2.py3-none-any.whl + name: google-cloud-storage + version: 2.19.0 + sha256: aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba + requires_dist: + - google-auth>=2.26.1,<3.0.dev0 + - google-api-core>=2.15.0,<3.0.0.dev0 + - google-cloud-core>=2.3.0,<3.0.dev0 + - google-resumable-media>=2.7.2 + - requests>=2.18.0,<3.0.0.dev0 + - google-crc32c>=1.0,<2.0.dev0 + - protobuf<6.0.0.dev0 ; extra == 'protobuf' + - opentelemetry-api>=1.1.0 ; extra == 'tracing' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/3d/63/bec827e70b7a0d4094e7476f863c0dbd6b5f0f1f91d9c9b32b76dcdfeb4e/google_crc32c-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl + name: google-crc32c + version: 1.8.0 + sha256: 6f35aaffc8ccd81ba3162443fabb920e65b1f20ab1952a31b13173a67811467d + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/95/ac/6f7bc93886a823ab545948c2dd48143027b2355ad1944c7cf852b338dc91/google_crc32c-1.8.0-cp310-cp310-macosx_12_0_arm64.whl + name: google-crc32c + version: 1.8.0 + sha256: 0470b8c3d73b5f4e3300165498e4cf25221c7eb37f1159e221d1825b6df8a7ff + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/f7/97/a5accde175dee985311d949cfcb1249dcbb290f5ec83c994ea733311948f/google_crc32c-1.8.0-cp310-cp310-macosx_12_0_x86_64.whl + name: google-crc32c + version: 1.8.0 + sha256: 119fcd90c57c89f30040b47c211acee231b25a45d225e3225294386f5d258288 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/1f/0b/93afde9cfe012260e9fe1522f35c9b72d6ee222f316586b1f23ecf44d518/google_resumable_media-2.8.0-py3-none-any.whl + name: google-resumable-media + version: 2.8.0 + sha256: dd14a116af303845a8d932ddae161a26e86cc229645bc98b39f026f9b1717582 + requires_dist: + - google-crc32c>=1.0.0,<2.0.0 + - requests>=2.18.0,<3.0.0 ; extra == 'requests' + - aiohttp>=3.6.2,<4.0.0 ; extra == 'aiohttp' + - google-auth>=1.22.0,<2.0.0 ; extra == 'aiohttp' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl + name: googleapis-common-protos + version: 1.72.0 + sha256: 4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038 + requires_dist: + - protobuf>=3.20.2,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<7.0.0 + - grpcio>=1.44.0,<2.0.0 ; extra == 'grpc' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/38/3f/9859f655d11901e7b2996c6e3d33e0caa9a1d4572c3bc61ed0faa64b2f4c/greenlet-3.3.2-cp310-cp310-macosx_11_0_universal2.whl + name: greenlet + version: 3.3.2 + sha256: 9bc885b89709d901859cf95179ec9f6bb67a3d2bb1f0e88456461bd4b7f8fd0d + requires_dist: + - sphinx ; extra == 'docs' + - furo ; extra == 'docs' + - objgraph ; extra == 'test' + - psutil ; extra == 'test' + - setuptools ; extra == 'test' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/3f/ae/8bffcbd373b57a5992cd077cbe8858fff39110480a9d50697091faea6f39/greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl + name: greenlet + version: 3.3.2 + sha256: 8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab + requires_dist: + - sphinx ; extra == 'docs' + - furo ; extra == 'docs' + - objgraph ; extra == 'test' + - psutil ; extra == 'test' + - setuptools ; extra == 'test' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/ad/55/9f1ebb5a825215fadcc0f7d5073f6e79e3007e3282b14b22d6aba7ca6cb8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl + name: greenlet + version: 3.3.2 + sha256: ad0c8917dd42a819fe77e6bdfcb84e3379c0de956469301d9fd36427a1ca501f + requires_dist: + - sphinx ; extra == 'docs' + - furo ; extra == 'docs' + - objgraph ; extra == 'test' + - psutil ; extra == 'test' + - setuptools ; extra == 'test' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/d2/d8/09bfa816572a4d83bccd6750df1926f79158b1c36c5f73786e26dbe4ee38/greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl + name: greenlet + version: 3.3.2 + sha256: 63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506 + requires_dist: + - sphinx ; extra == 'docs' + - furo ; extra == 'docs' + - objgraph ; extra == 'test' + - psutil ; extra == 'test' + - setuptools ; extra == 'test' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/4a/bd/330a1bbdb1afe0b96311249e699b6dc9cfc17916394fd4503ac5aca2514b/grpc_google_iam_v1-0.14.3-py3-none-any.whl + name: grpc-google-iam-v1 + version: 0.14.3 + sha256: 7a7f697e017a067206a3dfef44e4c634a34d3dee135fe7d7a4613fe3e59217e6 + requires_dist: + - grpcio>=1.44.0,<2.0.0 + - googleapis-common-protos[grpc]>=1.56.0,<2.0.0 + - protobuf>=3.20.2,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<7.0.0 + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/58/2f/f3fc773270cf17e7ca076c1f6435278f58641d475a25cdeea5b2d8d4845b/grpcio-1.62.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: grpcio + version: 1.62.3 + sha256: 807176971c504c598976f5a9ea62363cffbbbb6c7509d9808c2342b020880fa2 + requires_dist: + - grpcio-tools>=1.62.3 ; extra == 'protobuf' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/c5/63/ee244c4b64f0e71cef5314f9fa1d120c072e33c2e4c545dc75bd1af2a5c5/grpcio-1.62.3-cp310-cp310-macosx_12_0_universal2.whl + name: grpcio + version: 1.62.3 + sha256: f5def814c5a4c90c8fe389c526ab881f4a28b7e239b23ed8e02dd02934dfaa1a + requires_dist: + - grpcio-tools>=1.62.3 ; extra == 'protobuf' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/40/4c/ee3173906196b741ac6ba55a9788ba9ebf2cd05f91715a49b6c3bfbb9d73/grpcio_health_checking-1.62.3-py3-none-any.whl + name: grpcio-health-checking + version: 1.62.3 + sha256: f29da7dd144d73b4465fe48f011a91453e9ff6c8af0d449254cf80021cab3e0d + requires_dist: + - protobuf>=4.21.6 + - grpcio>=1.62.3 + requires_python: '>=3.6' +- pypi: https://files.pythonhosted.org/packages/82/54/acc6a6e684827b0f6bb4e2c27f3d7e25b71322c4078ef5b455c07c43260e/grpcio_reflection-1.62.3-py3-none-any.whl + name: grpcio-reflection + version: 1.62.3 + sha256: a48ef37df81a3bada78261fc92ef382f061112f989d1312398b945cc69838b9c + requires_dist: + - protobuf>=4.21.6 + - grpcio>=1.62.3 + requires_python: '>=3.6' +- pypi: https://files.pythonhosted.org/packages/90/40/972271de05f9315c0d69f9f7ebbcadd83bc85322f538637d11bb8c67803d/grpcio_status-1.62.3-py3-none-any.whl + name: grpcio-status + version: 1.62.3 + sha256: f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8 + requires_dist: + - protobuf>=4.21.6 + - grpcio>=1.62.3 + - googleapis-common-protos>=1.5.5 + requires_python: '>=3.6' +- pypi: https://files.pythonhosted.org/packages/b2/23/55d40e1bf54c141f541ab31b4b4b0f58610440c8837b1406f3467c2b4853/grpcio_testing-1.62.3-py3-none-any.whl + name: grpcio-testing + version: 1.62.3 + sha256: 06a4d7eb30d22f91368aa7f48bfc33563da13b9d951314455ca8c9c987fb75bb + requires_dist: + - protobuf>=4.21.6 + - grpcio>=1.62.3 +- pypi: https://files.pythonhosted.org/packages/da/73/4ad5b1f6a2e21cf1e85afdaad2b7b1a933985e2f5d679147a1953aaa192c/gunicorn-25.1.0-py3-none-any.whl + name: gunicorn + version: 25.1.0 + sha256: d0b1236ccf27f72cfe14bce7caadf467186f19e865094ca84221424e839b8b8b + requires_dist: + - packaging + - gevent>=24.10.1 ; extra == 'gevent' + - eventlet>=0.40.3 ; extra == 'eventlet' + - tornado>=6.5.0 ; extra == 'tornado' + - setproctitle ; extra == 'setproctitle' + - h2>=4.1.0 ; extra == 'http2' + - gevent>=24.10.1 ; extra == 'testing' + - eventlet>=0.40.3 ; extra == 'testing' + - h2>=4.1.0 ; extra == 'testing' + - coverage ; extra == 'testing' + - pytest ; extra == 'testing' + - pytest-cov ; extra == 'testing' + - pytest-asyncio ; extra == 'testing' + - uvloop>=0.19.0 ; extra == 'testing' + - httpx[http2] ; extra == 'testing' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl + name: h11 + version: 0.16.0 + sha256: 63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/42/6e/8adaefff7e3e216b0f7bd6cafce6d5d06798f31c3e2852dc3db6a7d758c9/hiredis-2.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: hiredis + version: 2.4.0 + sha256: 87a8ece3e893f45354395c6b9dc0479744c1c8c6ee4471b60945d96c9b5ce6c2 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/d8/70/3f39ebfb3824578c34400df3b037b268abb5af0abaa789b430ffd17dd74e/hiredis-2.4.0-cp310-cp310-macosx_10_15_x86_64.whl + name: hiredis + version: 2.4.0 + sha256: 76503a0edaf3d1557518127511e69e5d9fa37b6ff15598b0d9d9c2db18b08a41 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/ed/b7/26a56a3b991abe7fcf7bcfa8e0a08de3c3766c6caecb1ba46239342792ff/hiredis-2.4.0-cp310-cp310-macosx_11_0_arm64.whl + name: hiredis + version: 2.4.0 + sha256: b027b53adb1df11923753d85587e3ab611fe70bc69596e9eb3269acab809c376 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl + name: httpcore + version: 1.0.9 + sha256: 2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55 + requires_dist: + - certifi + - h11>=0.16 + - anyio>=4.0,<5.0 ; extra == 'asyncio' + - h2>=3,<5 ; extra == 'http2' + - socksio==1.* ; extra == 'socks' + - trio>=0.22.0,<1.0 ; extra == 'trio' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/07/f0/89720dc5139ae54b03f861b5e2c55a37dba9a5da7d51e1e824a1f343627f/httptools-0.7.1-cp314-cp314-macosx_11_0_arm64.whl + name: httptools + version: 0.7.1 + sha256: 7aea2e3c3953521c3c51106ee11487a910d45586e351202474d45472db7d72d3 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/34/50/9d095fcbb6de2d523e027a2f304d4551855c2f46e0b82befd718b8b20056/httptools-0.7.1-cp314-cp314-macosx_10_13_universal2.whl + name: httptools + version: 0.7.1 + sha256: c08fe65728b8d70b6923ce31e3956f859d5e1e8548e6f22ec520a962c6757270 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/7e/4f/35e3a63f863a659f92ffd92bef131f3e81cf849af26e6435b49bd9f6f751/httptools-0.7.1-cp310-cp310-macosx_11_0_arm64.whl + name: httptools + version: 0.7.1 + sha256: 84d86c1e5afdc479a6fdabf570be0d3eb791df0ae727e8dbc0259ed1249998d4 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/b3/cb/eea88506f191fb552c11787c23f9a405f4c7b0c5799bf73f2249cd4f5228/httptools-0.7.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + name: httptools + version: 0.7.1 + sha256: 0e68b8582f4ea9166be62926077a3334064d422cf08ab87d8b74664f8e9058e1 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/c7/e5/c07e0bcf4ec8db8164e9f6738c048b2e66aabf30e7506f440c4cc6953f60/httptools-0.7.1-cp310-cp310-macosx_10_9_universal2.whl + name: httptools + version: 0.7.1 + sha256: 11d01b0ff1fe02c4c32d60af61a4d613b74fad069e47e06e9067758c01e9ac78 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/f5/71/b0a9193641d9e2471ac541d3b1b869538a5fb6419d52fd2669fa9c79e4b8/httptools-0.7.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + name: httptools + version: 0.7.1 + sha256: c8c751014e13d88d2be5f5f14fc8b89612fcfa92a9cc480f2bc1598357a23a05 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl + name: httpx + version: 0.28.1 + sha256: d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad + requires_dist: + - anyio + - certifi + - httpcore==1.* + - idna + - brotli ; platform_python_implementation == 'CPython' and extra == 'brotli' + - brotlicffi ; platform_python_implementation != 'CPython' and extra == 'brotli' + - click==8.* ; extra == 'cli' + - pygments==2.* ; extra == 'cli' + - rich>=10,<14 ; extra == 'cli' + - h2>=3,<5 ; extra == 'http2' + - socksio==1.* ; extra == 'socks' + - zstandard>=0.18.0 ; extra == 'zstd' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/9d/b3/11d406849715b47c9d69bb22f50874f80caee96bd1cbe7b61abbebbf5a05/ibis_framework-12.0.0-py3-none-any.whl + name: ibis-framework + version: 12.0.0 + sha256: 0bbd790f268da9cb87926d5eaad2b827a573927113c4ed3be5095efa89b9e512 + requires_dist: + - atpublic>=2.3 + - parsy>=2 + - python-dateutil>=2.8.2 + - sqlglot>=23.4,!=26.32.0 + - toolz>=0.11 + - typing-extensions>=4.3.0 + - tzdata>=2022.7 + - fsspec[s3] ; extra == 'athena' + - numpy>=1.23.2,<3 ; extra == 'athena' + - pandas>=1.5.3,<4 ; extra == 'athena' + - pyarrow-hotfix>=0.4 ; extra == 'athena' + - pyarrow>=10.0.1 ; extra == 'athena' + - pyathena[arrow,pandas]>=3.11.0 ; extra == 'athena' + - rich>=12.4.4 ; extra == 'athena' + - db-dtypes>=0.3 ; extra == 'bigquery' + - google-cloud-bigquery-storage>=2 ; extra == 'bigquery' + - google-cloud-bigquery>=3 ; extra == 'bigquery' + - numpy>=1.23.2,<3 ; extra == 'bigquery' + - pandas-gbq>=0.26.1 ; extra == 'bigquery' + - pandas>=1.5.3,<4 ; extra == 'bigquery' + - pyarrow-hotfix>=0.4 ; extra == 'bigquery' + - pyarrow>=10.0.1 ; extra == 'bigquery' + - pydata-google-auth>=1.4.0 ; extra == 'bigquery' + - rich>=12.4.4 ; extra == 'bigquery' + - clickhouse-connect[arrow,numpy,pandas]>=0.5.23 ; extra == 'clickhouse' + - numpy>=1.23.2,<3 ; extra == 'clickhouse' + - pandas>=1.5.3,<4 ; extra == 'clickhouse' + - pyarrow-hotfix>=0.4 ; extra == 'clickhouse' + - pyarrow>=10.0.1 ; extra == 'clickhouse' + - rich>=12.4.4 ; extra == 'clickhouse' + - databricks-sql-connector>=4 ; extra == 'databricks' + - numpy>=1.23.2,<3 ; extra == 'databricks' + - pandas>=1.5.3,<4 ; extra == 'databricks' + - pyarrow-hotfix>=0.4 ; extra == 'databricks' + - pyarrow>=10.0.1 ; extra == 'databricks' + - rich>=12.4.4 ; extra == 'databricks' + - datafusion>=0.6 ; extra == 'datafusion' + - numpy>=1.23.2,<3 ; extra == 'datafusion' + - packaging>=21.3 ; extra == 'datafusion' + - pandas>=1.5.3,<4 ; extra == 'datafusion' + - pyarrow-hotfix>=0.4 ; extra == 'datafusion' + - pyarrow>=10.0.1 ; extra == 'datafusion' + - rich>=12.4.4 ; extra == 'datafusion' + - black>=22.1.0 ; extra == 'decompiler' + - deltalake>=0.9.0 ; extra == 'deltalake' + - numpy>=1.23.2,<3 ; extra == 'druid' + - pandas>=1.5.3,<4 ; extra == 'druid' + - pyarrow-hotfix>=0.4 ; extra == 'druid' + - pyarrow>=10.0.1 ; extra == 'druid' + - pydruid>=0.6.7 ; extra == 'druid' + - rich>=12.4.4 ; extra == 'druid' + - duckdb>=0.10.3,!=1.3.0 ; extra == 'duckdb' + - numpy>=1.23.2,<3 ; extra == 'duckdb' + - packaging>=21.3 ; extra == 'duckdb' + - pandas>=1.5.3,<4 ; extra == 'duckdb' + - pyarrow-hotfix>=0.4 ; extra == 'duckdb' + - pyarrow>=10.0.1 ; extra == 'duckdb' + - rich>=12.4.4 ; extra == 'duckdb' + - pins[gcs]>=0.8.3 ; extra == 'examples' + - numpy>=1.23.2,<3 ; extra == 'exasol' + - pandas>=1.5.3,<4 ; extra == 'exasol' + - pyarrow-hotfix>=0.4 ; extra == 'exasol' + - pyarrow>=10.0.1 ; extra == 'exasol' + - pyexasol>=0.25.2 ; extra == 'exasol' + - rich>=12.4.4 ; extra == 'exasol' + - numpy>=1.23.2,<3 ; extra == 'flink' + - pandas>=1.5.3,<4 ; extra == 'flink' + - pyarrow-hotfix>=0.4 ; extra == 'flink' + - pyarrow>=10.0.1 ; extra == 'flink' + - rich>=12.4.4 ; extra == 'flink' + - geoarrow-types>=0.2 ; extra == 'geospatial' + - geopandas>=0.6 ; extra == 'geospatial' + - pyproj>=3.3.0 ; extra == 'geospatial' + - shapely>=2 ; extra == 'geospatial' + - impyla>=0.17 ; extra == 'impala' + - numpy>=1.23.2,<3 ; extra == 'impala' + - pandas>=1.5.3,<4 ; extra == 'impala' + - pyarrow-hotfix>=0.4 ; extra == 'impala' + - pyarrow>=10.0.1 ; extra == 'impala' + - rich>=12.4.4 ; extra == 'impala' + - numpy>=1.23.2,<3 ; extra == 'materialize' + - pandas>=1.5.3,<4 ; extra == 'materialize' + - psycopg>=3.2.0 ; extra == 'materialize' + - pyarrow-hotfix>=0.4 ; extra == 'materialize' + - pyarrow>=10.0.1 ; extra == 'materialize' + - rich>=12.4.4 ; extra == 'materialize' + - numpy>=1.23.2,<3 ; extra == 'mssql' + - pandas>=1.5.3,<4 ; extra == 'mssql' + - pyarrow-hotfix>=0.4 ; extra == 'mssql' + - pyarrow>=10.0.1 ; extra == 'mssql' + - pyodbc>=4.0.39 ; extra == 'mssql' + - rich>=12.4.4 ; extra == 'mssql' + - mysqlclient>=2.2.4 ; extra == 'mysql' + - numpy>=1.23.2,<3 ; extra == 'mysql' + - pandas>=1.5.3,<4 ; extra == 'mysql' + - pyarrow-hotfix>=0.4 ; extra == 'mysql' + - pyarrow>=10.0.1 ; extra == 'mysql' + - rich>=12.4.4 ; extra == 'mysql' + - numpy>=1.23.2,<3 ; extra == 'oracle' + - oracledb>=1.3.1 ; extra == 'oracle' + - pandas>=1.5.3,<4 ; extra == 'oracle' + - pyarrow-hotfix>=0.4 ; extra == 'oracle' + - pyarrow>=10.0.1 ; extra == 'oracle' + - rich>=12.4.4 ; extra == 'oracle' + - numpy>=1.23.2,<3 ; extra == 'polars' + - pandas>=1.5.3,<4 ; extra == 'polars' + - polars>=1 ; extra == 'polars' + - pyarrow-hotfix>=0.4 ; extra == 'polars' + - pyarrow>=10.0.1 ; extra == 'polars' + - rich>=12.4.4 ; extra == 'polars' + - numpy>=1.23.2,<3 ; extra == 'postgres' + - pandas>=1.5.3,<4 ; extra == 'postgres' + - psycopg>=3.2.0 ; extra == 'postgres' + - pyarrow-hotfix>=0.4 ; extra == 'postgres' + - pyarrow>=10.0.1 ; extra == 'postgres' + - rich>=12.4.4 ; extra == 'postgres' + - numpy>=1.23.2,<3 ; extra == 'pyspark' + - packaging>=21.3 ; extra == 'pyspark' + - pandas>=1.5.3,<4 ; extra == 'pyspark' + - pyarrow-hotfix>=0.4 ; extra == 'pyspark' + - pyarrow>=10.0.1 ; extra == 'pyspark' + - pyspark>=3.5,<4.1 ; extra == 'pyspark' + - rich>=12.4.4 ; extra == 'pyspark' + - numpy>=1.23.2,<3 ; extra == 'risingwave' + - pandas>=1.5.3,<4 ; extra == 'risingwave' + - psycopg2>=2.8.4 ; extra == 'risingwave' + - pyarrow-hotfix>=0.4 ; extra == 'risingwave' + - pyarrow>=10.0.1 ; extra == 'risingwave' + - rich>=12.4.4 ; extra == 'risingwave' + - numpy>=1.23.2,<3 ; extra == 'singlestoredb' + - pandas>=1.5.3,<4 ; extra == 'singlestoredb' + - parsimonious>=0.11.0 ; extra == 'singlestoredb' + - pyarrow-hotfix>=0.4 ; extra == 'singlestoredb' + - pyarrow>=10.0.1 ; extra == 'singlestoredb' + - rich>=12.4.4 ; extra == 'singlestoredb' + - singlestoredb>=1.0 ; extra == 'singlestoredb' + - numpy>=1.23.2,<3 ; extra == 'snowflake' + - pandas>=1.5.3,<4 ; extra == 'snowflake' + - pyarrow-hotfix>=0.4 ; extra == 'snowflake' + - pyarrow>=10.0.1 ; extra == 'snowflake' + - rich>=12.4.4 ; extra == 'snowflake' + - snowflake-connector-python>=3.0.2,!=3.3.0b1 ; extra == 'snowflake' + - numpy>=1.23.2,<3 ; extra == 'sqlite' + - packaging>=21.3 ; extra == 'sqlite' + - pandas>=1.5.3,<4 ; extra == 'sqlite' + - pyarrow-hotfix>=0.4 ; extra == 'sqlite' + - pyarrow>=10.0.1 ; extra == 'sqlite' + - regex>=2021.7.6 ; extra == 'sqlite' + - rich>=12.4.4 ; extra == 'sqlite' + - numpy>=1.23.2,<3 ; extra == 'trino' + - pandas>=1.5.3,<4 ; extra == 'trino' + - pyarrow-hotfix>=0.4 ; extra == 'trino' + - pyarrow>=10.0.1 ; extra == 'trino' + - rich>=12.4.4 ; extra == 'trino' + - trino>=0.321 ; extra == 'trino' + - graphviz>=0.16 ; extra == 'visualization' + requires_python: '>=3.10' +- conda: https://conda.anaconda.org/conda-forge/linux-64/icu-78.2-h33c6efd_0.conda + sha256: 142a722072fa96cf16ff98eaaf641f54ab84744af81754c292cb81e0881c0329 + md5: 186a18e3ba246eccfc7cff00cd19a870 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libstdcxx >=14 + license: MIT + license_family: MIT + purls: [] + size: 12728445 + timestamp: 1767969922681 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/icu-78.2-hef89b57_0.conda + sha256: 24bc62335106c30fecbcc1dba62c5eba06d18b90ea1061abd111af7b9c89c2d7 + md5: 114e6bfe7c5ad2525eb3597acdbf2300 + depends: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: [] + size: 12389400 + timestamp: 1772209104304 +- pypi: https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl + name: idna + version: '3.11' + sha256: 771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea + requires_dist: + - ruff>=0.6.2 ; extra == 'all' + - mypy>=1.11.2 ; extra == 'all' + - pytest>=8.3.2 ; extra == 'all' + - flake8>=7.1.1 ; extra == 'all' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl + name: importlib-metadata + version: 8.7.1 + sha256: 5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151 + requires_dist: + - zipp>=3.20 + - pytest>=6,!=8.1.* ; extra == 'test' + - packaging ; extra == 'test' + - pyfakefs ; extra == 'test' + - flufl-flake8 ; extra == 'test' + - pytest-perf>=0.9.2 ; extra == 'test' + - jaraco-test>=5.4 ; extra == 'test' + - sphinx>=3.5 ; extra == 'doc' + - jaraco-packaging>=9.3 ; extra == 'doc' + - rst-linker>=1.9 ; extra == 'doc' + - furo ; extra == 'doc' + - sphinx-lint ; extra == 'doc' + - jaraco-tidelift>=1.4 ; extra == 'doc' + - ipython ; extra == 'perf' + - pytest-checkdocs>=2.4 ; extra == 'check' + - pytest-ruff>=0.2.1 ; sys_platform != 'cygwin' and extra == 'check' + - pytest-cov ; extra == 'cover' + - pytest-enabler>=3.4 ; extra == 'enabler' + - pytest-mypy>=1.0.1 ; extra == 'type' + - mypy<1.19 ; platform_python_implementation == 'PyPy' and extra == 'type' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl + name: iniconfig + version: 2.3.0 + sha256: f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl + name: jinja2 + version: 3.1.6 + sha256: 85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67 + requires_dist: + - markupsafe>=2.0 + - babel>=2.7 ; extra == 'i18n' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl + name: jmespath + version: 1.1.0 + sha256: a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl + name: jsonschema + version: 4.26.0 + sha256: d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce + requires_dist: + - attrs>=22.2.0 + - jsonschema-specifications>=2023.3.6 + - referencing>=0.28.4 + - rpds-py>=0.25.0 + - fqdn ; extra == 'format' + - idna ; extra == 'format' + - isoduration ; extra == 'format' + - jsonpointer>1.13 ; extra == 'format' + - rfc3339-validator ; extra == 'format' + - rfc3987 ; extra == 'format' + - uri-template ; extra == 'format' + - webcolors>=1.11 ; extra == 'format' + - fqdn ; extra == 'format-nongpl' + - idna ; extra == 'format-nongpl' + - isoduration ; extra == 'format-nongpl' + - jsonpointer>1.13 ; extra == 'format-nongpl' + - rfc3339-validator ; extra == 'format-nongpl' + - rfc3986-validator>0.1.0 ; extra == 'format-nongpl' + - rfc3987-syntax>=1.1.0 ; extra == 'format-nongpl' + - uri-template ; extra == 'format-nongpl' + - webcolors>=24.6.0 ; extra == 'format-nongpl' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl + name: jsonschema-specifications + version: 2025.9.1 + sha256: 98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe + requires_dist: + - referencing>=0.31.0 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/cd/58/4a1880ea64032185e9ae9f63940c9327c6952d5584ea544a8f66972f2fda/jwcrypto-1.5.6-py3-none-any.whl + name: jwcrypto + version: 1.5.6 + sha256: 150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789 + requires_dist: + - cryptography>=3.4 + - typing-extensions>=4.5.0 + requires_python: '>=3.8' +- conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda + sha256: 565941ac1f8b0d2f2e8f02827cbca648f4d18cd461afc31f15604cd291b5c5f3 + md5: 12bd9a3f089ee6c9266a37dab82afabd + depends: + - __glibc >=2.17,<3.0.a0 + - zstd >=1.5.7,<1.6.0a0 + constrains: + - binutils_impl_linux-64 2.45.1 + license: GPL-3.0-only + license_family: GPL + purls: [] + size: 725507 + timestamp: 1770267139900 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda + sha256: d78f1d3bea8c031d2f032b760f36676d87929b18146351c4464c66b0869df3f5 + md5: e7f7ce06ec24cfcfb9e36d28cf82ba57 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + constrains: + - expat 2.7.4.* + license: MIT + license_family: MIT + purls: [] + size: 76798 + timestamp: 1771259418166 +- conda: https://conda.anaconda.org/conda-forge/osx-64/libexpat-2.7.4-h991f03e_0.conda + sha256: 8d9d79b2de7d6f335692391f5281607221bf5d040e6724dad4c4d77cd603ce43 + md5: a684eb8a19b2aa68fde0267df172a1e3 + depends: + - __osx >=10.13 + constrains: + - expat 2.7.4.* + license: MIT + license_family: MIT + purls: [] + size: 74578 + timestamp: 1771260142624 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libexpat-2.7.4-hf6b4638_0.conda + sha256: 03887d8080d6a8fe02d75b80929271b39697ecca7628f0657d7afaea87761edf + md5: a92e310ae8dfc206ff449f362fc4217f + depends: + - __osx >=11.0 + constrains: + - expat 2.7.4.* + license: MIT + license_family: MIT + purls: [] + size: 68199 + timestamp: 1771260020767 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda + sha256: 31f19b6a88ce40ebc0d5a992c131f57d919f73c0b92cd1617a5bec83f6e961e6 + md5: a360c33a5abe61c07959e449fa1453eb + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: MIT + license_family: MIT + purls: [] + size: 58592 + timestamp: 1769456073053 +- conda: https://conda.anaconda.org/conda-forge/osx-64/libffi-3.5.2-hd1f9c09_0.conda + sha256: 951958d1792238006fdc6fce7f71f1b559534743b26cc1333497d46e5903a2d6 + md5: 66a0dc7464927d0853b590b6f53ba3ea + depends: + - __osx >=10.13 + license: MIT + license_family: MIT + purls: [] + size: 53583 + timestamp: 1769456300951 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libffi-3.5.2-hcf2aa1b_0.conda + sha256: 6686a26466a527585e6a75cc2a242bf4a3d97d6d6c86424a441677917f28bec7 + md5: 43c04d9cb46ef176bb2a4c77e324d599 + depends: + - __osx >=11.0 + license: MIT + license_family: MIT + purls: [] + size: 40979 + timestamp: 1769456747661 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda + sha256: faf7d2017b4d718951e3a59d081eb09759152f93038479b768e3d612688f83f5 + md5: 0aa00f03f9e39fb9876085dee11a85d4 + depends: + - __glibc >=2.17,<3.0.a0 + - _openmp_mutex >=4.5 + constrains: + - libgcc-ng ==15.2.0=*_18 + - libgomp 15.2.0 he0feb66_18 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 1041788 + timestamp: 1771378212382 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda + sha256: e318a711400f536c81123e753d4c797a821021fb38970cebfb3f454126016893 + md5: d5e96b1ed75ca01906b3d2469b4ce493 + depends: + - libgcc 15.2.0 he0feb66_18 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 27526 + timestamp: 1771378224552 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda + sha256: 21337ab58e5e0649d869ab168d4e609b033509de22521de1bfed0c031bfc5110 + md5: 239c5e9546c38a1e884d69effcf4c882 + depends: + - __glibc >=2.17,<3.0.a0 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 603262 + timestamp: 1771378117851 +- conda: https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda + sha256: 755c55ebab181d678c12e49cced893598f2bab22d582fbbf4d8b83c18be207eb + md5: c7c83eecbb72d88b940c249af56c8b17 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + constrains: + - xz 5.8.2.* + license: 0BSD + purls: [] + size: 113207 + timestamp: 1768752626120 +- conda: https://conda.anaconda.org/conda-forge/osx-64/liblzma-5.8.2-h11316ed_0.conda + sha256: 7ab3c98abd3b5d5ec72faa8d9f5d4b50dcee4970ed05339bc381861199dabb41 + md5: 688a0c3d57fa118b9c97bf7e471ab46c + depends: + - __osx >=10.13 + constrains: + - xz 5.8.2.* + license: 0BSD + purls: [] + size: 105482 + timestamp: 1768753411348 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/liblzma-5.8.2-h8088a28_0.conda + sha256: 7bfc7ffb2d6a9629357a70d4eadeadb6f88fa26ebc28f606b1c1e5e5ed99dc7e + md5: 009f0d956d7bfb00de86901d16e486c7 + depends: + - __osx >=11.0 + constrains: + - xz 5.8.2.* + license: 0BSD + purls: [] + size: 92242 + timestamp: 1768752982486 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libmpdec-4.0.0-hb03c661_1.conda + sha256: fe171ed5cf5959993d43ff72de7596e8ac2853e9021dec0344e583734f1e0843 + md5: 2c21e66f50753a083cbe6b80f38268fa + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 92400 + timestamp: 1769482286018 +- conda: https://conda.anaconda.org/conda-forge/osx-64/libmpdec-4.0.0-hf3981d6_1.conda + sha256: 1096c740109386607938ab9f09a7e9bca06d86770a284777586d6c378b8fb3fd + md5: ec88ba8a245855935b871a7324373105 + depends: + - __osx >=10.13 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 79899 + timestamp: 1769482558610 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libmpdec-4.0.0-h84a0fba_1.conda + sha256: 1089c7f15d5b62c622625ec6700732ece83be8b705da8c6607f4dabb0c4bd6d2 + md5: 57c4be259f5e0b99a5983799a228ae55 + depends: + - __osx >=11.0 + license: BSD-2-Clause + license_family: BSD + purls: [] + size: 73690 + timestamp: 1769482560514 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda + sha256: 927fe72b054277cde6cb82597d0fcf6baf127dcbce2e0a9d8925a68f1265eef5 + md5: d864d34357c3b65a4b731f78c0801dc4 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + license: LGPL-2.1-only + license_family: GPL + purls: [] + size: 33731 + timestamp: 1750274110928 +- pypi: https://files.pythonhosted.org/packages/01/99/f85130582f05dcf0c8902f3d629270231d2f4afdfc567f8305a952ac7f14/librt-0.8.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: librt + version: 0.8.1 + sha256: 97c2b54ff6717a7a563b72627990bec60d8029df17df423f0ed37d56a17a176b + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/1b/18/25e991cd5640c9fb0f8d91b18797b29066b792f17bf8493da183bf5caabe/librt-0.8.1-cp314-cp314-macosx_11_0_arm64.whl + name: librt + version: 0.8.1 + sha256: 228c2409c079f8c11fb2e5d7b277077f694cb93443eb760e00b3b83cb8b3176c + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/7c/5f/63f5fa395c7a8a93558c0904ba8f1c8d1b997ca6a3de61bc7659970d66bf/librt-0.8.1-cp310-cp310-macosx_10_9_x86_64.whl + name: librt + version: 0.8.1 + sha256: 81fd938344fecb9373ba1b155968c8a329491d2ce38e7ddb76f30ffb938f12dc + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/c9/6a/907ef6800f7bca71b525a05f1839b21f708c09043b1c6aa77b6b827b3996/librt-0.8.1-cp314-cp314-macosx_10_13_x86_64.whl + name: librt + version: 0.8.1 + sha256: 6cfa7fe54fd4d1f47130017351a959fe5804bda7a0bc7e07a2cdbc3fdd28d34f + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/d1/96/ef04902aad1424fd7299b62d1890e803e6ab4018c3044dca5922319c4b97/librt-0.8.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: librt + version: 0.8.1 + sha256: 6b1977c4ea97ce5eb7755a78fae68d87e4102e4aaf54985e8b56806849cc06a3 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/ff/e0/0472cf37267b5920eff2f292ccfaede1886288ce35b7f3203d8de00abfe6/librt-0.8.1-cp310-cp310-macosx_11_0_arm64.whl + name: librt + version: 0.8.1 + sha256: 5db05697c82b3a2ec53f6e72b2ed373132b0c2e05135f0696784e97d7f5d48e7 + requires_python: '>=3.9' +- conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.51.2-hf4e2dac_0.conda + sha256: 04596fcee262a870e4b7c9807224680ff48d4d0cc0dac076a602503d3dc6d217 + md5: da5be73701eecd0e8454423fd6ffcf30 + depends: + - __glibc >=2.17,<3.0.a0 + - icu >=78.2,<79.0a0 + - libgcc >=14 + - libzlib >=1.3.1,<2.0a0 + license: blessing + purls: [] + size: 942808 + timestamp: 1768147973361 +- conda: https://conda.anaconda.org/conda-forge/osx-64/libsqlite-3.51.2-hb99441e_0.conda + sha256: 710a7ea27744199023c92e66ad005de7f8db9cf83f10d5a943d786f0dac53b7c + md5: d910105ce2b14dfb2b32e92ec7653420 + depends: + - __osx >=10.13 + - libzlib >=1.3.1,<2.0a0 + license: blessing + purls: [] + size: 987506 + timestamp: 1768148247615 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libsqlite-3.51.2-h1ae2325_0.conda + sha256: 6e9b9f269732cbc4698c7984aa5b9682c168e2a8d1e0406e1ff10091ca046167 + md5: 4b0bf313c53c3e89692f020fb55d5f2c + depends: + - __osx >=11.0 + - icu >=78.2,<79.0a0 + - libzlib >=1.3.1,<2.0a0 + license: blessing + purls: [] + size: 909777 + timestamp: 1768148320535 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda + sha256: 78668020064fdaa27e9ab65cd2997e2c837b564ab26ce3bf0e58a2ce1a525c6e + md5: 1b08cd684f34175e4514474793d44bcb + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc 15.2.0 he0feb66_18 + constrains: + - libstdcxx-ng ==15.2.0=*_18 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 5852330 + timestamp: 1771378262446 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.41.3-h5347b49_0.conda + sha256: 1a7539cfa7df00714e8943e18de0b06cceef6778e420a5ee3a2a145773758aee + md5: db409b7c1720428638e7c0d509d3e1b5 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 40311 + timestamp: 1766271528534 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda + sha256: 6ae68e0b86423ef188196fff6207ed0c8195dd84273cb5623b85aa08033a410c + md5: 5aa797f8787fe7a17d1b0821485b5adc + depends: + - libgcc-ng >=12 + license: LGPL-2.1-or-later + purls: [] + size: 100393 + timestamp: 1702724383534 +- conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda + sha256: d4bfe88d7cb447768e31650f06257995601f89076080e76df55e3112d4e47dc4 + md5: edb0dca6bc32e4f4789199455a1dbeb8 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + constrains: + - zlib 1.3.1 *_2 + license: Zlib + license_family: Other + purls: [] + size: 60963 + timestamp: 1727963148474 +- conda: https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.3.1-hd23fc13_2.conda + sha256: 8412f96504fc5993a63edf1e211d042a1fd5b1d51dedec755d2058948fcced09 + md5: 003a54a4e32b02f7355b50a837e699da + depends: + - __osx >=10.13 + constrains: + - zlib 1.3.1 *_2 + license: Zlib + license_family: Other + purls: [] + size: 57133 + timestamp: 1727963183990 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/libzlib-1.3.1-h8359307_2.conda + sha256: ce34669eadaba351cd54910743e6a2261b67009624dbc7daeeafdef93616711b + md5: 369964e85dc26bfe78f41399b366c435 + depends: + - __osx >=11.0 + constrains: + - zlib 1.3.1 *_2 + license: Zlib + license_family: Other + purls: [] + size: 46438 + timestamp: 1727963202283 +- pypi: https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl + name: locket + version: 1.0.0 + sha256: b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3 + requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*' +- pypi: https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl + name: markdown-it-py + version: 4.0.0 + sha256: 87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147 + requires_dist: + - mdurl~=0.1 + - psutil ; extra == 'benchmarking' + - pytest ; extra == 'benchmarking' + - pytest-benchmark ; extra == 'benchmarking' + - commonmark~=0.9 ; extra == 'compare' + - markdown~=3.4 ; extra == 'compare' + - mistletoe~=1.0 ; extra == 'compare' + - mistune~=3.0 ; extra == 'compare' + - panflute~=2.3 ; extra == 'compare' + - markdown-it-pyrs ; extra == 'compare' + - linkify-it-py>=1,<3 ; extra == 'linkify' + - mdit-py-plugins>=0.5.0 ; extra == 'plugins' + - gprof2dot ; extra == 'profiling' + - mdit-py-plugins>=0.5.0 ; extra == 'rtd' + - myst-parser ; extra == 'rtd' + - pyyaml ; extra == 'rtd' + - sphinx ; extra == 'rtd' + - sphinx-copybutton ; extra == 'rtd' + - sphinx-design ; extra == 'rtd' + - sphinx-book-theme~=1.0 ; extra == 'rtd' + - jupyter-sphinx ; extra == 'rtd' + - ipykernel ; extra == 'rtd' + - coverage ; extra == 'testing' + - pytest ; extra == 'testing' + - pytest-cov ; extra == 'testing' + - pytest-regressions ; extra == 'testing' + - requests ; extra == 'testing' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl + name: markupsafe + version: 3.0.3 + sha256: eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: markupsafe + version: 3.0.3 + sha256: 457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl + name: markupsafe + version: 3.0.3 + sha256: e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: markupsafe + version: 3.0.3 + sha256: f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl + name: markupsafe + version: 3.0.3 + sha256: c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl + name: markupsafe + version: 3.0.3 + sha256: 2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl + name: mdurl + version: 0.1.2 + sha256: 84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/b6/9e/8d9f6b9746f8ede78b0a4e4b8908e4d80bd609fca0b3e3195a07dda29534/minio-7.2.11-py3-none-any.whl + name: minio + version: 7.2.11 + sha256: 153582ed52ff3b5005ba558e1f25bfe1e9e834f7f0745e594777f28e3e81e1a0 + requires_dist: + - certifi + - urllib3 + - argon2-cffi + - pycryptodome + - typing-extensions + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/39/8e/29306d5eca6dfda4b899d22c95b5420db4e0ffb7e0b6389b17379654ece5/mmh3-5.2.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + name: mmh3 + version: 5.2.0 + sha256: dfbead5575f6470c17e955b94f92d62a03dfc3d07f2e6f817d9b93dc211a1515 + requires_dist: + - pytest==8.4.1 ; extra == 'test' + - pytest-sugar==1.0.0 ; extra == 'test' + - black==25.1.0 ; extra == 'lint' + - clang-format==20.1.8 ; extra == 'lint' + - isort==6.0.1 ; extra == 'lint' + - pylint==3.3.7 ; extra == 'lint' + - mypy==1.17.0 ; extra == 'type' + - myst-parser==4.0.1 ; extra == 'docs' + - shibuya==2025.7.24 ; extra == 'docs' + - sphinx==8.2.3 ; extra == 'docs' + - sphinx-copybutton==0.5.2 ; extra == 'docs' + - pymmh3==0.0.5 ; extra == 'benchmark' + - pyperf==2.9.0 ; extra == 'benchmark' + - xxhash==3.5.0 ; extra == 'benchmark' + - matplotlib==3.10.3 ; extra == 'plot' + - pandas==2.3.1 ; extra == 'plot' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/3b/88/eb9a55b3f3cf43a74d6bfa8db0e2e209f966007777a1dc897c52c008314c/mmh3-5.2.0-cp310-cp310-macosx_10_9_x86_64.whl + name: mmh3 + version: 5.2.0 + sha256: 0b898cecff57442724a0f52bf42c2de42de63083a91008fb452887e372f9c328 + requires_dist: + - pytest==8.4.1 ; extra == 'test' + - pytest-sugar==1.0.0 ; extra == 'test' + - black==25.1.0 ; extra == 'lint' + - clang-format==20.1.8 ; extra == 'lint' + - isort==6.0.1 ; extra == 'lint' + - pylint==3.3.7 ; extra == 'lint' + - mypy==1.17.0 ; extra == 'type' + - myst-parser==4.0.1 ; extra == 'docs' + - shibuya==2025.7.24 ; extra == 'docs' + - sphinx==8.2.3 ; extra == 'docs' + - sphinx-copybutton==0.5.2 ; extra == 'docs' + - pymmh3==0.0.5 ; extra == 'benchmark' + - pyperf==2.9.0 ; extra == 'benchmark' + - xxhash==3.5.0 ; extra == 'benchmark' + - matplotlib==3.10.3 ; extra == 'plot' + - pandas==2.3.1 ; extra == 'plot' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/70/1f/f87e3d34d83032b4f3f0f528c6d95a98290fcacf019da61343a49dccfd51/mmh3-5.2.0-cp314-cp314-macosx_10_13_x86_64.whl + name: mmh3 + version: 5.2.0 + sha256: ff3d50dc3fe8a98059f99b445dfb62792b5d006c5e0b8f03c6de2813b8376110 + requires_dist: + - pytest==8.4.1 ; extra == 'test' + - pytest-sugar==1.0.0 ; extra == 'test' + - black==25.1.0 ; extra == 'lint' + - clang-format==20.1.8 ; extra == 'lint' + - isort==6.0.1 ; extra == 'lint' + - pylint==3.3.7 ; extra == 'lint' + - mypy==1.17.0 ; extra == 'type' + - myst-parser==4.0.1 ; extra == 'docs' + - shibuya==2025.7.24 ; extra == 'docs' + - sphinx==8.2.3 ; extra == 'docs' + - sphinx-copybutton==0.5.2 ; extra == 'docs' + - pymmh3==0.0.5 ; extra == 'benchmark' + - pyperf==2.9.0 ; extra == 'benchmark' + - xxhash==3.5.0 ; extra == 'benchmark' + - matplotlib==3.10.3 ; extra == 'plot' + - pandas==2.3.1 ; extra == 'plot' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/a6/e2/db849eaed07117086f3452feca8c839d30d38b830ac59fe1ce65af8be5ad/mmh3-5.2.0-cp314-cp314-macosx_11_0_arm64.whl + name: mmh3 + version: 5.2.0 + sha256: 37a358cc881fe796e099c1db6ce07ff757f088827b4e8467ac52b7a7ffdca647 + requires_dist: + - pytest==8.4.1 ; extra == 'test' + - pytest-sugar==1.0.0 ; extra == 'test' + - black==25.1.0 ; extra == 'lint' + - clang-format==20.1.8 ; extra == 'lint' + - isort==6.0.1 ; extra == 'lint' + - pylint==3.3.7 ; extra == 'lint' + - mypy==1.17.0 ; extra == 'type' + - myst-parser==4.0.1 ; extra == 'docs' + - shibuya==2025.7.24 ; extra == 'docs' + - sphinx==8.2.3 ; extra == 'docs' + - sphinx-copybutton==0.5.2 ; extra == 'docs' + - pymmh3==0.0.5 ; extra == 'benchmark' + - pyperf==2.9.0 ; extra == 'benchmark' + - xxhash==3.5.0 ; extra == 'benchmark' + - matplotlib==3.10.3 ; extra == 'plot' + - pandas==2.3.1 ; extra == 'plot' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/ca/e0/78adf4104c425606a9ce33fb351f790c76a6c2314969c4a517d1ffc92196/mmh3-5.2.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + name: mmh3 + version: 5.2.0 + sha256: 1ba55d6ca32eeef8b2625e1e4bfc3b3db52bc63014bd7e5df8cc11bf2b036b12 + requires_dist: + - pytest==8.4.1 ; extra == 'test' + - pytest-sugar==1.0.0 ; extra == 'test' + - black==25.1.0 ; extra == 'lint' + - clang-format==20.1.8 ; extra == 'lint' + - isort==6.0.1 ; extra == 'lint' + - pylint==3.3.7 ; extra == 'lint' + - mypy==1.17.0 ; extra == 'type' + - myst-parser==4.0.1 ; extra == 'docs' + - shibuya==2025.7.24 ; extra == 'docs' + - sphinx==8.2.3 ; extra == 'docs' + - sphinx-copybutton==0.5.2 ; extra == 'docs' + - pymmh3==0.0.5 ; extra == 'benchmark' + - pyperf==2.9.0 ; extra == 'benchmark' + - xxhash==3.5.0 ; extra == 'benchmark' + - matplotlib==3.10.3 ; extra == 'plot' + - pandas==2.3.1 ; extra == 'plot' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/d1/4c/8e4b3878bf8435c697d7ce99940a3784eb864521768069feaccaff884a17/mmh3-5.2.0-cp310-cp310-macosx_11_0_arm64.whl + name: mmh3 + version: 5.2.0 + sha256: be1374df449465c9f2500e62eee73a39db62152a8bdfbe12ec5b5c1cd451344d + requires_dist: + - pytest==8.4.1 ; extra == 'test' + - pytest-sugar==1.0.0 ; extra == 'test' + - black==25.1.0 ; extra == 'lint' + - clang-format==20.1.8 ; extra == 'lint' + - isort==6.0.1 ; extra == 'lint' + - pylint==3.3.7 ; extra == 'lint' + - mypy==1.17.0 ; extra == 'type' + - myst-parser==4.0.1 ; extra == 'docs' + - shibuya==2025.7.24 ; extra == 'docs' + - sphinx==8.2.3 ; extra == 'docs' + - sphinx-copybutton==0.5.2 ; extra == 'docs' + - pymmh3==0.0.5 ; extra == 'benchmark' + - pyperf==2.9.0 ; extra == 'benchmark' + - xxhash==3.5.0 ; extra == 'benchmark' + - matplotlib==3.10.3 ; extra == 'plot' + - pandas==2.3.1 ; extra == 'plot' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/5b/e1/2b720cc341325c00be44e1ed59e7cfeae2678329fbf5aa68f5bda57fe728/msgpack-1.1.2-cp310-cp310-macosx_11_0_arm64.whl + name: msgpack + version: 1.1.2 + sha256: a605409040f2da88676e9c9e5853b3449ba8011973616189ea5ee55ddbc5bc87 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/b7/09/2a06956383c0fdebaef5aa9246e2356776f12ea6f2a44bd1368abf0e46c4/msgpack-1.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: msgpack + version: 1.1.2 + sha256: 365c0bbe981a27d8932da71af63ef86acc59ed5c01ad929e09a0b88c6294e28a + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/f5/a2/3b68a9e769db68668b25c6108444a35f9bd163bb848c0650d516761a59c0/msgpack-1.1.2-cp310-cp310-macosx_10_9_x86_64.whl + name: msgpack + version: 1.1.2 + sha256: 0051fffef5a37ca2cd16978ae4f0aef92f164df86823871b5162812bebecd8e2 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/31/5c/08c7f7fe311f32e83f7621cd3f99d805f45519cd06fafb247628b861da7d/multidict-6.7.1-cp310-cp310-macosx_11_0_arm64.whl + name: multidict + version: 6.7.1 + sha256: cdea2e7b2456cfb6694fb113066fd0ec7ea4d67e3a35e1f4cbeea0b448bf5872 + requires_dist: + - typing-extensions>=4.1.0 ; python_full_version < '3.11' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/4b/ac/b605473de2bb404e742f2cc3583d12aedb2352a70e49ae8fce455b50c5aa/multidict-6.7.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: multidict + version: 6.7.1 + sha256: 9b0d9b91d1aa44db9c1f1ecd0d9d2ae610b2f4f856448664e01a3b35899f3f92 + requires_dist: + - typing-extensions>=4.1.0 ; python_full_version < '3.11' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/ef/04/9de3f8077852e3d438215c81e9b691244532d2e05b4270e89ce67b7d103c/multidict-6.7.1-cp310-cp310-macosx_10_9_x86_64.whl + name: multidict + version: 6.7.1 + sha256: 974e72a2474600827abaeda71af0c53d9ebbc3c2eb7da37b37d7829ae31232d8 + requires_dist: + - typing-extensions>=4.1.0 ; python_full_version < '3.11' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/2a/0d/93c2e4a287f74ef11a66fb6d49c7a9f05e47b0a4399040e6719b57f500d2/mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: mypy + version: 1.19.1 + sha256: de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74 + requires_dist: + - typing-extensions>=4.6.0 + - mypy-extensions>=1.0.0 + - pathspec>=0.9.0 + - tomli>=1.1.0 ; python_full_version < '3.11' + - librt>=0.6.2 ; platform_python_implementation != 'PyPy' + - psutil>=4.0 ; extra == 'dmypy' + - setuptools>=50 ; extra == 'mypyc' + - lxml ; extra == 'reports' + - pip ; extra == 'install-types' + - orjson ; extra == 'faster-cache' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/2f/63/e499890d8e39b1ff2df4c0c6ce5d371b6844ee22b8250687a99fd2f657a8/mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl + name: mypy + version: 1.19.1 + sha256: 5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec + requires_dist: + - typing-extensions>=4.6.0 + - mypy-extensions>=1.0.0 + - pathspec>=0.9.0 + - tomli>=1.1.0 ; python_full_version < '3.11' + - librt>=0.6.2 ; platform_python_implementation != 'PyPy' + - psutil>=4.0 ; extra == 'dmypy' + - setuptools>=50 ; extra == 'mypyc' + - lxml ; extra == 'reports' + - pip ; extra == 'install-types' + - orjson ; extra == 'faster-cache' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: mypy + version: 1.19.1 + sha256: 28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045 + requires_dist: + - typing-extensions>=4.6.0 + - mypy-extensions>=1.0.0 + - pathspec>=0.9.0 + - tomli>=1.1.0 ; python_full_version < '3.11' + - librt>=0.6.2 ; platform_python_implementation != 'PyPy' + - psutil>=4.0 ; extra == 'dmypy' + - setuptools>=50 ; extra == 'mypyc' + - lxml ; extra == 'reports' + - pip ; extra == 'install-types' + - orjson ; extra == 'faster-cache' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/72/4b/095626fc136fba96effc4fd4a82b41d688ab92124f8c4f7564bffe5cf1b0/mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl + name: mypy + version: 1.19.1 + sha256: 022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b + requires_dist: + - typing-extensions>=4.6.0 + - mypy-extensions>=1.0.0 + - pathspec>=0.9.0 + - tomli>=1.1.0 ; python_full_version < '3.11' + - librt>=0.6.2 ; platform_python_implementation != 'PyPy' + - psutil>=4.0 ; extra == 'dmypy' + - setuptools>=50 ; extra == 'mypyc' + - lxml ; extra == 'reports' + - pip ; extra == 'install-types' + - orjson ; extra == 'faster-cache' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl + name: mypy + version: 1.19.1 + sha256: 804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718 + requires_dist: + - typing-extensions>=4.6.0 + - mypy-extensions>=1.0.0 + - pathspec>=0.9.0 + - tomli>=1.1.0 ; python_full_version < '3.11' + - librt>=0.6.2 ; platform_python_implementation != 'PyPy' + - psutil>=4.0 ; extra == 'dmypy' + - setuptools>=50 ; extra == 'mypyc' + - lxml ; extra == 'reports' + - pip ; extra == 'install-types' + - orjson ; extra == 'faster-cache' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl + name: mypy + version: 1.19.1 + sha256: 06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1 + requires_dist: + - typing-extensions>=4.6.0 + - mypy-extensions>=1.0.0 + - pathspec>=0.9.0 + - tomli>=1.1.0 ; python_full_version < '3.11' + - librt>=0.6.2 ; platform_python_implementation != 'PyPy' + - psutil>=4.0 ; extra == 'dmypy' + - setuptools>=50 ; extra == 'mypyc' + - lxml ; extra == 'reports' + - pip ; extra == 'install-types' + - orjson ; extra == 'faster-cache' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl + name: mypy-extensions + version: 1.1.0 + sha256: 1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505 + requires_python: '>=3.8' +- conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda + sha256: 3fde293232fa3fca98635e1167de6b7c7fda83caf24b9d6c91ec9eefb4f4d586 + md5: 47e340acb35de30501a76c7c799c41d7 + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=13 + license: X11 AND BSD-3-Clause + purls: [] + size: 891641 + timestamp: 1738195959188 +- conda: https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.5-h0622a9a_3.conda + sha256: ea4a5d27ded18443749aefa49dc79f6356da8506d508b5296f60b8d51e0c4bd9 + md5: ced34dd9929f491ca6dab6a2927aff25 + depends: + - __osx >=10.13 + license: X11 AND BSD-3-Clause + purls: [] + size: 822259 + timestamp: 1738196181298 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/ncurses-6.5-h5e97a16_3.conda + sha256: 2827ada40e8d9ca69a153a45f7fd14f32b2ead7045d3bbb5d10964898fe65733 + md5: 068d497125e4bf8a66bf707254fff5ae + depends: + - __osx >=11.0 + license: X11 AND BSD-3-Clause + purls: [] + size: 797030 + timestamp: 1738196177597 +- pypi: https://files.pythonhosted.org/packages/22/c2/4b9221495b2a132cc9d2eb862e21d42a009f5a60e45fc44b00118c174bff/numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl + name: numpy + version: 2.2.6 + sha256: 8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/9a/3e/ed6db5be21ce87955c0cbd3009f2803f59fa08df21b5df06862e2d8e2bdd/numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl + name: numpy + version: 2.2.6 + sha256: b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/b4/63/3de6a34ad7ad6646ac7d2f55ebc6ad439dbbf9c4370017c50cf403fb19b5/numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: numpy + version: 2.2.6 + sha256: fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/18/88/b7df6050bf18fdcfb7046286c6535cabbdd2064a3440fca3f069d319c16e/numpy-2.4.2-cp314-cp314-macosx_10_15_x86_64.whl + name: numpy + version: 2.4.2 + sha256: 444be170853f1f9d528428eceb55f12918e4fda5d8805480f36a002f1415e09b + requires_python: '>=3.11' +- pypi: https://files.pythonhosted.org/packages/25/7a/1fee4329abc705a469a4afe6e69b1ef7e915117747886327104a8493a955/numpy-2.4.2-cp314-cp314-macosx_11_0_arm64.whl + name: numpy + version: 2.4.2 + sha256: d1240d50adff70c2a88217698ca844723068533f3f5c5fa6ee2e3220e3bdb000 + requires_python: '>=3.11' +- pypi: https://files.pythonhosted.org/packages/5d/6c/7f237821c9642fb2a04d2f1e88b4295677144ca93285fd76eff3bcba858d/numpy-2.4.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl + name: numpy + version: 2.4.2 + sha256: bba37bc29d4d85761deed3954a1bc62be7cf462b9510b51d367b769a8c8df325 + requires_python: '>=3.11' +- pypi: https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl + name: oauthlib + version: 3.3.1 + sha256: 88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1 + requires_dist: + - cryptography>=3.0.0 ; extra == 'rsa' + - cryptography>=3.0.0 ; extra == 'signedtoken' + - pyjwt>=2.0.0,<3 ; extra == 'signedtoken' + - blinker>=1.4.0 ; extra == 'signals' + requires_python: '>=3.8' +- conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.1-h35e630c_1.conda + sha256: 44c877f8af015332a5d12f5ff0fb20ca32f896526a7d0cdb30c769df1144fb5c + md5: f61eb8cd60ff9057122a3d338b99c00f + depends: + - __glibc >=2.17,<3.0.a0 + - ca-certificates + - libgcc >=14 + license: Apache-2.0 + license_family: Apache + purls: [] + size: 3164551 + timestamp: 1769555830639 +- conda: https://conda.anaconda.org/conda-forge/osx-64/openssl-3.6.1-hb6871ef_1.conda + sha256: e02e5639b0e4d6d4fcf0f3b082642844fb5a37316f5b0a1126c6271347462e90 + md5: 30bb8d08b99b9a7600d39efb3559fff0 + depends: + - __osx >=10.13 + - ca-certificates + license: Apache-2.0 + license_family: Apache + purls: [] + size: 2777136 + timestamp: 1769557662405 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/openssl-3.6.1-hd24854e_1.conda + sha256: 361f5c5e60052abc12bdd1b50d7a1a43e6a6653aab99a2263bf2288d709dcf67 + md5: f4f6ad63f98f64191c3e77c5f5f29d76 + depends: + - __osx >=11.0 + - ca-certificates + license: Apache-2.0 + license_family: Apache + purls: [] + size: 3104268 + timestamp: 1769556384749 +- pypi: https://files.pythonhosted.org/packages/8f/ed/f2b5d66aa9b6b5c02ff5f120efc7b38c7c4962b21e6be0f00fd99a5c348e/orjson-3.11.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: orjson + version: 3.11.7 + sha256: cededd6738e1c153530793998e31c05086582b08315db48ab66649768f326baa + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/c2/8b/ecdad52d0b38d4b8f514be603e69ccd5eacf4e7241f972e37e79792212ec/orjson-3.11.7-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: orjson + version: 3.11.7 + sha256: a56df3239294ea5964adf074c54bcc4f0ccd21636049a2cf3ca9cf03b5d03cf1 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/de/1a/a373746fa6d0e116dd9e54371a7b54622c44d12296d5d0f3ad5e3ff33490/orjson-3.11.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl + name: orjson + version: 3.11.7 + sha256: a02c833f38f36546ba65a452127633afce4cf0dd7296b753d3bb54e55e5c0174 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/e9/1e/745565dca749813db9a093c5ebc4bac1a9475c64d54b95654336ac3ed961/orjson-3.11.7-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl + name: orjson + version: 3.11.7 + sha256: de0a37f21d0d364954ad5de1970491d7fbd0fb1ef7417d4d56a36dc01ba0c0a0 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl + name: packaging + version: '26.0' + sha256: b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/04/fd/74903979833db8390b73b3a8a7d30d146d710bd32703724dd9083950386f/pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl + name: pandas + version: 2.3.3 + sha256: ee15f284898e7b246df8087fc82b87b01686f98ee67d85a17b7ab44143a3a9a0 + requires_dist: + - numpy>=1.22.4 ; python_full_version < '3.11' + - numpy>=1.23.2 ; python_full_version == '3.11.*' + - numpy>=1.26.0 ; python_full_version >= '3.12' + - python-dateutil>=2.8.2 + - pytz>=2020.1 + - tzdata>=2022.7 + - hypothesis>=6.46.1 ; extra == 'test' + - pytest>=7.3.2 ; extra == 'test' + - pytest-xdist>=2.2.0 ; extra == 'test' + - pyarrow>=10.0.1 ; extra == 'pyarrow' + - bottleneck>=1.3.6 ; extra == 'performance' + - numba>=0.56.4 ; extra == 'performance' + - numexpr>=2.8.4 ; extra == 'performance' + - scipy>=1.10.0 ; extra == 'computation' + - xarray>=2022.12.0 ; extra == 'computation' + - fsspec>=2022.11.0 ; extra == 'fss' + - s3fs>=2022.11.0 ; extra == 'aws' + - gcsfs>=2022.11.0 ; extra == 'gcp' + - pandas-gbq>=0.19.0 ; extra == 'gcp' + - odfpy>=1.4.1 ; extra == 'excel' + - openpyxl>=3.1.0 ; extra == 'excel' + - python-calamine>=0.1.7 ; extra == 'excel' + - pyxlsb>=1.0.10 ; extra == 'excel' + - xlrd>=2.0.1 ; extra == 'excel' + - xlsxwriter>=3.0.5 ; extra == 'excel' + - pyarrow>=10.0.1 ; extra == 'parquet' + - pyarrow>=10.0.1 ; extra == 'feather' + - tables>=3.8.0 ; extra == 'hdf5' + - pyreadstat>=1.2.0 ; extra == 'spss' + - sqlalchemy>=2.0.0 ; extra == 'postgresql' + - psycopg2>=2.9.6 ; extra == 'postgresql' + - adbc-driver-postgresql>=0.8.0 ; extra == 'postgresql' + - sqlalchemy>=2.0.0 ; extra == 'mysql' + - pymysql>=1.0.2 ; extra == 'mysql' + - sqlalchemy>=2.0.0 ; extra == 'sql-other' + - adbc-driver-postgresql>=0.8.0 ; extra == 'sql-other' + - adbc-driver-sqlite>=0.8.0 ; extra == 'sql-other' + - beautifulsoup4>=4.11.2 ; extra == 'html' + - html5lib>=1.1 ; extra == 'html' + - lxml>=4.9.2 ; extra == 'html' + - lxml>=4.9.2 ; extra == 'xml' + - matplotlib>=3.6.3 ; extra == 'plot' + - jinja2>=3.1.2 ; extra == 'output-formatting' + - tabulate>=0.9.0 ; extra == 'output-formatting' + - pyqt5>=5.15.9 ; extra == 'clipboard' + - qtpy>=2.3.0 ; extra == 'clipboard' + - zstandard>=0.19.0 ; extra == 'compression' + - dataframe-api-compat>=0.1.7 ; extra == 'consortium-standard' + - adbc-driver-postgresql>=0.8.0 ; extra == 'all' + - adbc-driver-sqlite>=0.8.0 ; extra == 'all' + - beautifulsoup4>=4.11.2 ; extra == 'all' + - bottleneck>=1.3.6 ; extra == 'all' + - dataframe-api-compat>=0.1.7 ; extra == 'all' + - fastparquet>=2022.12.0 ; extra == 'all' + - fsspec>=2022.11.0 ; extra == 'all' + - gcsfs>=2022.11.0 ; extra == 'all' + - html5lib>=1.1 ; extra == 'all' + - hypothesis>=6.46.1 ; extra == 'all' + - jinja2>=3.1.2 ; extra == 'all' + - lxml>=4.9.2 ; extra == 'all' + - matplotlib>=3.6.3 ; extra == 'all' + - numba>=0.56.4 ; extra == 'all' + - numexpr>=2.8.4 ; extra == 'all' + - odfpy>=1.4.1 ; extra == 'all' + - openpyxl>=3.1.0 ; extra == 'all' + - pandas-gbq>=0.19.0 ; extra == 'all' + - psycopg2>=2.9.6 ; extra == 'all' + - pyarrow>=10.0.1 ; extra == 'all' + - pymysql>=1.0.2 ; extra == 'all' + - pyqt5>=5.15.9 ; extra == 'all' + - pyreadstat>=1.2.0 ; extra == 'all' + - pytest>=7.3.2 ; extra == 'all' + - pytest-xdist>=2.2.0 ; extra == 'all' + - python-calamine>=0.1.7 ; extra == 'all' + - pyxlsb>=1.0.10 ; extra == 'all' + - qtpy>=2.3.0 ; extra == 'all' + - scipy>=1.10.0 ; extra == 'all' + - s3fs>=2022.11.0 ; extra == 'all' + - sqlalchemy>=2.0.0 ; extra == 'all' + - tables>=3.8.0 ; extra == 'all' + - tabulate>=0.9.0 ; extra == 'all' + - xarray>=2022.12.0 ; extra == 'all' + - xlrd>=2.0.1 ; extra == 'all' + - xlsxwriter>=3.0.5 ; extra == 'all' + - zstandard>=0.19.0 ; extra == 'all' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/13/4f/66d99628ff8ce7857aca52fed8f0066ce209f96be2fede6cef9f84e8d04f/pandas-2.3.3-cp310-cp310-macosx_11_0_arm64.whl + name: pandas + version: 2.3.3 + sha256: e19d192383eab2f4ceb30b412b22ea30690c9e618f78870357ae1d682912015a + requires_dist: + - numpy>=1.22.4 ; python_full_version < '3.11' + - numpy>=1.23.2 ; python_full_version == '3.11.*' + - numpy>=1.26.0 ; python_full_version >= '3.12' + - python-dateutil>=2.8.2 + - pytz>=2020.1 + - tzdata>=2022.7 + - hypothesis>=6.46.1 ; extra == 'test' + - pytest>=7.3.2 ; extra == 'test' + - pytest-xdist>=2.2.0 ; extra == 'test' + - pyarrow>=10.0.1 ; extra == 'pyarrow' + - bottleneck>=1.3.6 ; extra == 'performance' + - numba>=0.56.4 ; extra == 'performance' + - numexpr>=2.8.4 ; extra == 'performance' + - scipy>=1.10.0 ; extra == 'computation' + - xarray>=2022.12.0 ; extra == 'computation' + - fsspec>=2022.11.0 ; extra == 'fss' + - s3fs>=2022.11.0 ; extra == 'aws' + - gcsfs>=2022.11.0 ; extra == 'gcp' + - pandas-gbq>=0.19.0 ; extra == 'gcp' + - odfpy>=1.4.1 ; extra == 'excel' + - openpyxl>=3.1.0 ; extra == 'excel' + - python-calamine>=0.1.7 ; extra == 'excel' + - pyxlsb>=1.0.10 ; extra == 'excel' + - xlrd>=2.0.1 ; extra == 'excel' + - xlsxwriter>=3.0.5 ; extra == 'excel' + - pyarrow>=10.0.1 ; extra == 'parquet' + - pyarrow>=10.0.1 ; extra == 'feather' + - tables>=3.8.0 ; extra == 'hdf5' + - pyreadstat>=1.2.0 ; extra == 'spss' + - sqlalchemy>=2.0.0 ; extra == 'postgresql' + - psycopg2>=2.9.6 ; extra == 'postgresql' + - adbc-driver-postgresql>=0.8.0 ; extra == 'postgresql' + - sqlalchemy>=2.0.0 ; extra == 'mysql' + - pymysql>=1.0.2 ; extra == 'mysql' + - sqlalchemy>=2.0.0 ; extra == 'sql-other' + - adbc-driver-postgresql>=0.8.0 ; extra == 'sql-other' + - adbc-driver-sqlite>=0.8.0 ; extra == 'sql-other' + - beautifulsoup4>=4.11.2 ; extra == 'html' + - html5lib>=1.1 ; extra == 'html' + - lxml>=4.9.2 ; extra == 'html' + - lxml>=4.9.2 ; extra == 'xml' + - matplotlib>=3.6.3 ; extra == 'plot' + - jinja2>=3.1.2 ; extra == 'output-formatting' + - tabulate>=0.9.0 ; extra == 'output-formatting' + - pyqt5>=5.15.9 ; extra == 'clipboard' + - qtpy>=2.3.0 ; extra == 'clipboard' + - zstandard>=0.19.0 ; extra == 'compression' + - dataframe-api-compat>=0.1.7 ; extra == 'consortium-standard' + - adbc-driver-postgresql>=0.8.0 ; extra == 'all' + - adbc-driver-sqlite>=0.8.0 ; extra == 'all' + - beautifulsoup4>=4.11.2 ; extra == 'all' + - bottleneck>=1.3.6 ; extra == 'all' + - dataframe-api-compat>=0.1.7 ; extra == 'all' + - fastparquet>=2022.12.0 ; extra == 'all' + - fsspec>=2022.11.0 ; extra == 'all' + - gcsfs>=2022.11.0 ; extra == 'all' + - html5lib>=1.1 ; extra == 'all' + - hypothesis>=6.46.1 ; extra == 'all' + - jinja2>=3.1.2 ; extra == 'all' + - lxml>=4.9.2 ; extra == 'all' + - matplotlib>=3.6.3 ; extra == 'all' + - numba>=0.56.4 ; extra == 'all' + - numexpr>=2.8.4 ; extra == 'all' + - odfpy>=1.4.1 ; extra == 'all' + - openpyxl>=3.1.0 ; extra == 'all' + - pandas-gbq>=0.19.0 ; extra == 'all' + - psycopg2>=2.9.6 ; extra == 'all' + - pyarrow>=10.0.1 ; extra == 'all' + - pymysql>=1.0.2 ; extra == 'all' + - pyqt5>=5.15.9 ; extra == 'all' + - pyreadstat>=1.2.0 ; extra == 'all' + - pytest>=7.3.2 ; extra == 'all' + - pytest-xdist>=2.2.0 ; extra == 'all' + - python-calamine>=0.1.7 ; extra == 'all' + - pyxlsb>=1.0.10 ; extra == 'all' + - qtpy>=2.3.0 ; extra == 'all' + - scipy>=1.10.0 ; extra == 'all' + - s3fs>=2022.11.0 ; extra == 'all' + - sqlalchemy>=2.0.0 ; extra == 'all' + - tables>=3.8.0 ; extra == 'all' + - tabulate>=0.9.0 ; extra == 'all' + - xarray>=2022.12.0 ; extra == 'all' + - xlrd>=2.0.1 ; extra == 'all' + - xlsxwriter>=3.0.5 ; extra == 'all' + - zstandard>=0.19.0 ; extra == 'all' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/15/b2/0e62f78c0c5ba7e3d2c5945a82456f4fac76c480940f805e0b97fcbc2f65/pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl + name: pandas + version: 2.3.3 + sha256: ee67acbbf05014ea6c763beb097e03cd629961c8a632075eeb34247120abcb4b + requires_dist: + - numpy>=1.22.4 ; python_full_version < '3.11' + - numpy>=1.23.2 ; python_full_version == '3.11.*' + - numpy>=1.26.0 ; python_full_version >= '3.12' + - python-dateutil>=2.8.2 + - pytz>=2020.1 + - tzdata>=2022.7 + - hypothesis>=6.46.1 ; extra == 'test' + - pytest>=7.3.2 ; extra == 'test' + - pytest-xdist>=2.2.0 ; extra == 'test' + - pyarrow>=10.0.1 ; extra == 'pyarrow' + - bottleneck>=1.3.6 ; extra == 'performance' + - numba>=0.56.4 ; extra == 'performance' + - numexpr>=2.8.4 ; extra == 'performance' + - scipy>=1.10.0 ; extra == 'computation' + - xarray>=2022.12.0 ; extra == 'computation' + - fsspec>=2022.11.0 ; extra == 'fss' + - s3fs>=2022.11.0 ; extra == 'aws' + - gcsfs>=2022.11.0 ; extra == 'gcp' + - pandas-gbq>=0.19.0 ; extra == 'gcp' + - odfpy>=1.4.1 ; extra == 'excel' + - openpyxl>=3.1.0 ; extra == 'excel' + - python-calamine>=0.1.7 ; extra == 'excel' + - pyxlsb>=1.0.10 ; extra == 'excel' + - xlrd>=2.0.1 ; extra == 'excel' + - xlsxwriter>=3.0.5 ; extra == 'excel' + - pyarrow>=10.0.1 ; extra == 'parquet' + - pyarrow>=10.0.1 ; extra == 'feather' + - tables>=3.8.0 ; extra == 'hdf5' + - pyreadstat>=1.2.0 ; extra == 'spss' + - sqlalchemy>=2.0.0 ; extra == 'postgresql' + - psycopg2>=2.9.6 ; extra == 'postgresql' + - adbc-driver-postgresql>=0.8.0 ; extra == 'postgresql' + - sqlalchemy>=2.0.0 ; extra == 'mysql' + - pymysql>=1.0.2 ; extra == 'mysql' + - sqlalchemy>=2.0.0 ; extra == 'sql-other' + - adbc-driver-postgresql>=0.8.0 ; extra == 'sql-other' + - adbc-driver-sqlite>=0.8.0 ; extra == 'sql-other' + - beautifulsoup4>=4.11.2 ; extra == 'html' + - html5lib>=1.1 ; extra == 'html' + - lxml>=4.9.2 ; extra == 'html' + - lxml>=4.9.2 ; extra == 'xml' + - matplotlib>=3.6.3 ; extra == 'plot' + - jinja2>=3.1.2 ; extra == 'output-formatting' + - tabulate>=0.9.0 ; extra == 'output-formatting' + - pyqt5>=5.15.9 ; extra == 'clipboard' + - qtpy>=2.3.0 ; extra == 'clipboard' + - zstandard>=0.19.0 ; extra == 'compression' + - dataframe-api-compat>=0.1.7 ; extra == 'consortium-standard' + - adbc-driver-postgresql>=0.8.0 ; extra == 'all' + - adbc-driver-sqlite>=0.8.0 ; extra == 'all' + - beautifulsoup4>=4.11.2 ; extra == 'all' + - bottleneck>=1.3.6 ; extra == 'all' + - dataframe-api-compat>=0.1.7 ; extra == 'all' + - fastparquet>=2022.12.0 ; extra == 'all' + - fsspec>=2022.11.0 ; extra == 'all' + - gcsfs>=2022.11.0 ; extra == 'all' + - html5lib>=1.1 ; extra == 'all' + - hypothesis>=6.46.1 ; extra == 'all' + - jinja2>=3.1.2 ; extra == 'all' + - lxml>=4.9.2 ; extra == 'all' + - matplotlib>=3.6.3 ; extra == 'all' + - numba>=0.56.4 ; extra == 'all' + - numexpr>=2.8.4 ; extra == 'all' + - odfpy>=1.4.1 ; extra == 'all' + - openpyxl>=3.1.0 ; extra == 'all' + - pandas-gbq>=0.19.0 ; extra == 'all' + - psycopg2>=2.9.6 ; extra == 'all' + - pyarrow>=10.0.1 ; extra == 'all' + - pymysql>=1.0.2 ; extra == 'all' + - pyqt5>=5.15.9 ; extra == 'all' + - pyreadstat>=1.2.0 ; extra == 'all' + - pytest>=7.3.2 ; extra == 'all' + - pytest-xdist>=2.2.0 ; extra == 'all' + - python-calamine>=0.1.7 ; extra == 'all' + - pyxlsb>=1.0.10 ; extra == 'all' + - qtpy>=2.3.0 ; extra == 'all' + - scipy>=1.10.0 ; extra == 'all' + - s3fs>=2022.11.0 ; extra == 'all' + - sqlalchemy>=2.0.0 ; extra == 'all' + - tables>=3.8.0 ; extra == 'all' + - tabulate>=0.9.0 ; extra == 'all' + - xarray>=2022.12.0 ; extra == 'all' + - xlrd>=2.0.1 ; extra == 'all' + - xlsxwriter>=3.0.5 ; extra == 'all' + - zstandard>=0.19.0 ; extra == 'all' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/21/00/266d6b357ad5e6d3ad55093a7e8efc7dd245f5a842b584db9f30b0f0a287/pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl + name: pandas + version: 2.3.3 + sha256: 1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593 + requires_dist: + - numpy>=1.22.4 ; python_full_version < '3.11' + - numpy>=1.23.2 ; python_full_version == '3.11.*' + - numpy>=1.26.0 ; python_full_version >= '3.12' + - python-dateutil>=2.8.2 + - pytz>=2020.1 + - tzdata>=2022.7 + - hypothesis>=6.46.1 ; extra == 'test' + - pytest>=7.3.2 ; extra == 'test' + - pytest-xdist>=2.2.0 ; extra == 'test' + - pyarrow>=10.0.1 ; extra == 'pyarrow' + - bottleneck>=1.3.6 ; extra == 'performance' + - numba>=0.56.4 ; extra == 'performance' + - numexpr>=2.8.4 ; extra == 'performance' + - scipy>=1.10.0 ; extra == 'computation' + - xarray>=2022.12.0 ; extra == 'computation' + - fsspec>=2022.11.0 ; extra == 'fss' + - s3fs>=2022.11.0 ; extra == 'aws' + - gcsfs>=2022.11.0 ; extra == 'gcp' + - pandas-gbq>=0.19.0 ; extra == 'gcp' + - odfpy>=1.4.1 ; extra == 'excel' + - openpyxl>=3.1.0 ; extra == 'excel' + - python-calamine>=0.1.7 ; extra == 'excel' + - pyxlsb>=1.0.10 ; extra == 'excel' + - xlrd>=2.0.1 ; extra == 'excel' + - xlsxwriter>=3.0.5 ; extra == 'excel' + - pyarrow>=10.0.1 ; extra == 'parquet' + - pyarrow>=10.0.1 ; extra == 'feather' + - tables>=3.8.0 ; extra == 'hdf5' + - pyreadstat>=1.2.0 ; extra == 'spss' + - sqlalchemy>=2.0.0 ; extra == 'postgresql' + - psycopg2>=2.9.6 ; extra == 'postgresql' + - adbc-driver-postgresql>=0.8.0 ; extra == 'postgresql' + - sqlalchemy>=2.0.0 ; extra == 'mysql' + - pymysql>=1.0.2 ; extra == 'mysql' + - sqlalchemy>=2.0.0 ; extra == 'sql-other' + - adbc-driver-postgresql>=0.8.0 ; extra == 'sql-other' + - adbc-driver-sqlite>=0.8.0 ; extra == 'sql-other' + - beautifulsoup4>=4.11.2 ; extra == 'html' + - html5lib>=1.1 ; extra == 'html' + - lxml>=4.9.2 ; extra == 'html' + - lxml>=4.9.2 ; extra == 'xml' + - matplotlib>=3.6.3 ; extra == 'plot' + - jinja2>=3.1.2 ; extra == 'output-formatting' + - tabulate>=0.9.0 ; extra == 'output-formatting' + - pyqt5>=5.15.9 ; extra == 'clipboard' + - qtpy>=2.3.0 ; extra == 'clipboard' + - zstandard>=0.19.0 ; extra == 'compression' + - dataframe-api-compat>=0.1.7 ; extra == 'consortium-standard' + - adbc-driver-postgresql>=0.8.0 ; extra == 'all' + - adbc-driver-sqlite>=0.8.0 ; extra == 'all' + - beautifulsoup4>=4.11.2 ; extra == 'all' + - bottleneck>=1.3.6 ; extra == 'all' + - dataframe-api-compat>=0.1.7 ; extra == 'all' + - fastparquet>=2022.12.0 ; extra == 'all' + - fsspec>=2022.11.0 ; extra == 'all' + - gcsfs>=2022.11.0 ; extra == 'all' + - html5lib>=1.1 ; extra == 'all' + - hypothesis>=6.46.1 ; extra == 'all' + - jinja2>=3.1.2 ; extra == 'all' + - lxml>=4.9.2 ; extra == 'all' + - matplotlib>=3.6.3 ; extra == 'all' + - numba>=0.56.4 ; extra == 'all' + - numexpr>=2.8.4 ; extra == 'all' + - odfpy>=1.4.1 ; extra == 'all' + - openpyxl>=3.1.0 ; extra == 'all' + - pandas-gbq>=0.19.0 ; extra == 'all' + - psycopg2>=2.9.6 ; extra == 'all' + - pyarrow>=10.0.1 ; extra == 'all' + - pymysql>=1.0.2 ; extra == 'all' + - pyqt5>=5.15.9 ; extra == 'all' + - pyreadstat>=1.2.0 ; extra == 'all' + - pytest>=7.3.2 ; extra == 'all' + - pytest-xdist>=2.2.0 ; extra == 'all' + - python-calamine>=0.1.7 ; extra == 'all' + - pyxlsb>=1.0.10 ; extra == 'all' + - qtpy>=2.3.0 ; extra == 'all' + - scipy>=1.10.0 ; extra == 'all' + - s3fs>=2022.11.0 ; extra == 'all' + - sqlalchemy>=2.0.0 ; extra == 'all' + - tables>=3.8.0 ; extra == 'all' + - tabulate>=0.9.0 ; extra == 'all' + - xarray>=2022.12.0 ; extra == 'all' + - xlrd>=2.0.1 ; extra == 'all' + - xlsxwriter>=3.0.5 ; extra == 'all' + - zstandard>=0.19.0 ; extra == 'all' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/3d/f7/f425a00df4fcc22b292c6895c6831c0c8ae1d9fac1e024d16f98a9ce8749/pandas-2.3.3-cp310-cp310-macosx_10_9_x86_64.whl + name: pandas + version: 2.3.3 + sha256: 376c6446ae31770764215a6c937f72d917f214b43560603cd60da6408f183b6c + requires_dist: + - numpy>=1.22.4 ; python_full_version < '3.11' + - numpy>=1.23.2 ; python_full_version == '3.11.*' + - numpy>=1.26.0 ; python_full_version >= '3.12' + - python-dateutil>=2.8.2 + - pytz>=2020.1 + - tzdata>=2022.7 + - hypothesis>=6.46.1 ; extra == 'test' + - pytest>=7.3.2 ; extra == 'test' + - pytest-xdist>=2.2.0 ; extra == 'test' + - pyarrow>=10.0.1 ; extra == 'pyarrow' + - bottleneck>=1.3.6 ; extra == 'performance' + - numba>=0.56.4 ; extra == 'performance' + - numexpr>=2.8.4 ; extra == 'performance' + - scipy>=1.10.0 ; extra == 'computation' + - xarray>=2022.12.0 ; extra == 'computation' + - fsspec>=2022.11.0 ; extra == 'fss' + - s3fs>=2022.11.0 ; extra == 'aws' + - gcsfs>=2022.11.0 ; extra == 'gcp' + - pandas-gbq>=0.19.0 ; extra == 'gcp' + - odfpy>=1.4.1 ; extra == 'excel' + - openpyxl>=3.1.0 ; extra == 'excel' + - python-calamine>=0.1.7 ; extra == 'excel' + - pyxlsb>=1.0.10 ; extra == 'excel' + - xlrd>=2.0.1 ; extra == 'excel' + - xlsxwriter>=3.0.5 ; extra == 'excel' + - pyarrow>=10.0.1 ; extra == 'parquet' + - pyarrow>=10.0.1 ; extra == 'feather' + - tables>=3.8.0 ; extra == 'hdf5' + - pyreadstat>=1.2.0 ; extra == 'spss' + - sqlalchemy>=2.0.0 ; extra == 'postgresql' + - psycopg2>=2.9.6 ; extra == 'postgresql' + - adbc-driver-postgresql>=0.8.0 ; extra == 'postgresql' + - sqlalchemy>=2.0.0 ; extra == 'mysql' + - pymysql>=1.0.2 ; extra == 'mysql' + - sqlalchemy>=2.0.0 ; extra == 'sql-other' + - adbc-driver-postgresql>=0.8.0 ; extra == 'sql-other' + - adbc-driver-sqlite>=0.8.0 ; extra == 'sql-other' + - beautifulsoup4>=4.11.2 ; extra == 'html' + - html5lib>=1.1 ; extra == 'html' + - lxml>=4.9.2 ; extra == 'html' + - lxml>=4.9.2 ; extra == 'xml' + - matplotlib>=3.6.3 ; extra == 'plot' + - jinja2>=3.1.2 ; extra == 'output-formatting' + - tabulate>=0.9.0 ; extra == 'output-formatting' + - pyqt5>=5.15.9 ; extra == 'clipboard' + - qtpy>=2.3.0 ; extra == 'clipboard' + - zstandard>=0.19.0 ; extra == 'compression' + - dataframe-api-compat>=0.1.7 ; extra == 'consortium-standard' + - adbc-driver-postgresql>=0.8.0 ; extra == 'all' + - adbc-driver-sqlite>=0.8.0 ; extra == 'all' + - beautifulsoup4>=4.11.2 ; extra == 'all' + - bottleneck>=1.3.6 ; extra == 'all' + - dataframe-api-compat>=0.1.7 ; extra == 'all' + - fastparquet>=2022.12.0 ; extra == 'all' + - fsspec>=2022.11.0 ; extra == 'all' + - gcsfs>=2022.11.0 ; extra == 'all' + - html5lib>=1.1 ; extra == 'all' + - hypothesis>=6.46.1 ; extra == 'all' + - jinja2>=3.1.2 ; extra == 'all' + - lxml>=4.9.2 ; extra == 'all' + - matplotlib>=3.6.3 ; extra == 'all' + - numba>=0.56.4 ; extra == 'all' + - numexpr>=2.8.4 ; extra == 'all' + - odfpy>=1.4.1 ; extra == 'all' + - openpyxl>=3.1.0 ; extra == 'all' + - pandas-gbq>=0.19.0 ; extra == 'all' + - psycopg2>=2.9.6 ; extra == 'all' + - pyarrow>=10.0.1 ; extra == 'all' + - pymysql>=1.0.2 ; extra == 'all' + - pyqt5>=5.15.9 ; extra == 'all' + - pyreadstat>=1.2.0 ; extra == 'all' + - pytest>=7.3.2 ; extra == 'all' + - pytest-xdist>=2.2.0 ; extra == 'all' + - python-calamine>=0.1.7 ; extra == 'all' + - pyxlsb>=1.0.10 ; extra == 'all' + - qtpy>=2.3.0 ; extra == 'all' + - scipy>=1.10.0 ; extra == 'all' + - s3fs>=2022.11.0 ; extra == 'all' + - sqlalchemy>=2.0.0 ; extra == 'all' + - tables>=3.8.0 ; extra == 'all' + - tabulate>=0.9.0 ; extra == 'all' + - xarray>=2022.12.0 ; extra == 'all' + - xlrd>=2.0.1 ; extra == 'all' + - xlsxwriter>=3.0.5 ; extra == 'all' + - zstandard>=0.19.0 ; extra == 'all' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/40/a8/4dac1f8f8235e5d25b9955d02ff6f29396191d4e665d71122c3722ca83c5/pandas-2.3.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl + name: pandas + version: 2.3.3 + sha256: dd7478f1463441ae4ca7308a70e90b33470fa593429f9d4c578dd00d1fa78838 + requires_dist: + - numpy>=1.22.4 ; python_full_version < '3.11' + - numpy>=1.23.2 ; python_full_version == '3.11.*' + - numpy>=1.26.0 ; python_full_version >= '3.12' + - python-dateutil>=2.8.2 + - pytz>=2020.1 + - tzdata>=2022.7 + - hypothesis>=6.46.1 ; extra == 'test' + - pytest>=7.3.2 ; extra == 'test' + - pytest-xdist>=2.2.0 ; extra == 'test' + - pyarrow>=10.0.1 ; extra == 'pyarrow' + - bottleneck>=1.3.6 ; extra == 'performance' + - numba>=0.56.4 ; extra == 'performance' + - numexpr>=2.8.4 ; extra == 'performance' + - scipy>=1.10.0 ; extra == 'computation' + - xarray>=2022.12.0 ; extra == 'computation' + - fsspec>=2022.11.0 ; extra == 'fss' + - s3fs>=2022.11.0 ; extra == 'aws' + - gcsfs>=2022.11.0 ; extra == 'gcp' + - pandas-gbq>=0.19.0 ; extra == 'gcp' + - odfpy>=1.4.1 ; extra == 'excel' + - openpyxl>=3.1.0 ; extra == 'excel' + - python-calamine>=0.1.7 ; extra == 'excel' + - pyxlsb>=1.0.10 ; extra == 'excel' + - xlrd>=2.0.1 ; extra == 'excel' + - xlsxwriter>=3.0.5 ; extra == 'excel' + - pyarrow>=10.0.1 ; extra == 'parquet' + - pyarrow>=10.0.1 ; extra == 'feather' + - tables>=3.8.0 ; extra == 'hdf5' + - pyreadstat>=1.2.0 ; extra == 'spss' + - sqlalchemy>=2.0.0 ; extra == 'postgresql' + - psycopg2>=2.9.6 ; extra == 'postgresql' + - adbc-driver-postgresql>=0.8.0 ; extra == 'postgresql' + - sqlalchemy>=2.0.0 ; extra == 'mysql' + - pymysql>=1.0.2 ; extra == 'mysql' + - sqlalchemy>=2.0.0 ; extra == 'sql-other' + - adbc-driver-postgresql>=0.8.0 ; extra == 'sql-other' + - adbc-driver-sqlite>=0.8.0 ; extra == 'sql-other' + - beautifulsoup4>=4.11.2 ; extra == 'html' + - html5lib>=1.1 ; extra == 'html' + - lxml>=4.9.2 ; extra == 'html' + - lxml>=4.9.2 ; extra == 'xml' + - matplotlib>=3.6.3 ; extra == 'plot' + - jinja2>=3.1.2 ; extra == 'output-formatting' + - tabulate>=0.9.0 ; extra == 'output-formatting' + - pyqt5>=5.15.9 ; extra == 'clipboard' + - qtpy>=2.3.0 ; extra == 'clipboard' + - zstandard>=0.19.0 ; extra == 'compression' + - dataframe-api-compat>=0.1.7 ; extra == 'consortium-standard' + - adbc-driver-postgresql>=0.8.0 ; extra == 'all' + - adbc-driver-sqlite>=0.8.0 ; extra == 'all' + - beautifulsoup4>=4.11.2 ; extra == 'all' + - bottleneck>=1.3.6 ; extra == 'all' + - dataframe-api-compat>=0.1.7 ; extra == 'all' + - fastparquet>=2022.12.0 ; extra == 'all' + - fsspec>=2022.11.0 ; extra == 'all' + - gcsfs>=2022.11.0 ; extra == 'all' + - html5lib>=1.1 ; extra == 'all' + - hypothesis>=6.46.1 ; extra == 'all' + - jinja2>=3.1.2 ; extra == 'all' + - lxml>=4.9.2 ; extra == 'all' + - matplotlib>=3.6.3 ; extra == 'all' + - numba>=0.56.4 ; extra == 'all' + - numexpr>=2.8.4 ; extra == 'all' + - odfpy>=1.4.1 ; extra == 'all' + - openpyxl>=3.1.0 ; extra == 'all' + - pandas-gbq>=0.19.0 ; extra == 'all' + - psycopg2>=2.9.6 ; extra == 'all' + - pyarrow>=10.0.1 ; extra == 'all' + - pymysql>=1.0.2 ; extra == 'all' + - pyqt5>=5.15.9 ; extra == 'all' + - pyreadstat>=1.2.0 ; extra == 'all' + - pytest>=7.3.2 ; extra == 'all' + - pytest-xdist>=2.2.0 ; extra == 'all' + - python-calamine>=0.1.7 ; extra == 'all' + - pyxlsb>=1.0.10 ; extra == 'all' + - qtpy>=2.3.0 ; extra == 'all' + - scipy>=1.10.0 ; extra == 'all' + - s3fs>=2022.11.0 ; extra == 'all' + - sqlalchemy>=2.0.0 ; extra == 'all' + - tables>=3.8.0 ; extra == 'all' + - tabulate>=0.9.0 ; extra == 'all' + - xarray>=2022.12.0 ; extra == 'all' + - xlrd>=2.0.1 ; extra == 'all' + - xlsxwriter>=3.0.5 ; extra == 'all' + - zstandard>=0.19.0 ; extra == 'all' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/26/b7/e805de93e3aa78813912b19edc9c8b037d6cd1c302ab339b895f305cf9a5/pandas_gbq-0.33.0-py3-none-any.whl + name: pandas-gbq + version: 0.33.0 + sha256: 499ad18a7b1917e2cc04bbd763ec37c11ecc49030348c45cebb9cbbdb7f909af + requires_dist: + - setuptools + - db-dtypes>=1.0.4,<2.0.0 + - numpy>=1.18.1 + - pandas>=1.1.4,<3.0.0 + - pyarrow>=4.0.0 + - pyarrow>=22.0.0 ; python_full_version >= '3.14' + - pydata-google-auth>=1.5.0 + - psutil>=5.9.8 + - google-api-core>=2.15.0,<3.0.0 + - google-auth>=2.14.1 + - google-auth-oauthlib>=0.7.0 + - google-cloud-bigquery>=3.20.0,<4.0.0 + - packaging>=22.0.0 + - google-cloud-bigquery-storage>=2.16.2,<3.0.0 ; extra == 'bqstorage' + - tqdm>=4.23.0 ; extra == 'tqdm' + - geopandas>=0.9.0 ; extra == 'geopandas' + - shapely>=1.8.4 ; extra == 'geopandas' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/77/fc/8cb9073bb1bee54eb49a1ae501a36402d01763812962ac811cdc1c81a9d7/parsy-2.2-py3-none-any.whl + name: parsy + version: '2.2' + sha256: 5e981613d9d2d8b68012d1dd0afe928967bea2e4eefdb76c2f545af0dd02a9e7 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/71/e7/40fb618334dcdf7c5a316c0e7343c5cd82d3d866edc100d98e29bc945ecd/partd-1.4.2-py3-none-any.whl + name: partd + version: 1.4.2 + sha256: 978e4ac767ec4ba5b86c6eaa52e5a2a3bc748a2ca839e8cc798f1cc6ce6efb0f + requires_dist: + - locket + - toolz + - numpy>=1.20.0 ; extra == 'complete' + - pandas>=1.3 ; extra == 'complete' + - pyzmq ; extra == 'complete' + - blosc ; extra == 'complete' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl + name: pathspec + version: 1.0.4 + sha256: fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723 + requires_dist: + - hyperscan>=0.7 ; extra == 'hyperscan' + - typing-extensions>=4 ; extra == 'optional' + - google-re2>=1.1 ; extra == 're2' + - pytest>=9 ; extra == 'tests' + - typing-extensions>=4.15 ; extra == 'tests' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/48/31/05e764397056194206169869b50cf2fee4dbbbc71b344705b9c0d878d4d8/platformdirs-4.9.2-py3-none-any.whl + name: platformdirs + version: 4.9.2 + sha256: 9170634f126f8efdae22fb58ae8a0eaa86f38365bc57897a6c4f781d1f5875bd + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl + name: pluggy + version: 1.6.0 + sha256: e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746 + requires_dist: + - pre-commit ; extra == 'dev' + - tox ; extra == 'dev' + - pytest ; extra == 'testing' + - pytest-benchmark ; extra == 'testing' + - coverage ; extra == 'testing' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl + name: prometheus-client + version: 0.24.1 + sha256: 150db128af71a5c2482b36e588fc8a6b95e498750da4b17065947c16070f4055 + requires_dist: + - twisted ; extra == 'twisted' + - aiohttp ; extra == 'aiohttp' + - django ; extra == 'django' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/08/57/8c87e93142b2c1fa2408e45695205a7ba05fb5db458c0bf5c06ba0e09ea6/propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: propcache + version: 0.4.1 + sha256: 2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/a1/6b/db0d03d96726d995dc7171286c6ba9d8d14251f37433890f88368951a44e/propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl + name: propcache + version: 0.4.1 + sha256: 1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/e4/c3/82728404aea669e1600f304f2609cde9e665c18df5a11cdd57ed73c1dceb/propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl + name: propcache + version: 0.4.1 + sha256: 66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/5d/79/ac273cbbf744691821a9cca88957257f41afe271637794975ca090b9588b/proto_plus-1.27.1-py3-none-any.whl + name: proto-plus + version: 1.27.1 + sha256: e4643061f3a4d0de092d62aa4ad09fa4756b2cbb89d4627f3985018216f9fefc + requires_dist: + - protobuf>=3.19.0,<7.0.0 + - google-api-core>=1.31.5 ; extra == 'testing' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl + name: protobuf + version: 6.33.5 + sha256: cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl + name: protobuf + version: 6.33.5 + sha256: a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/13/c4/6322ab5c8f279c4c358bc14eb8aefc0550b97222a39f04eb3c1af7a830fa/protobuf-7.34.0-cp310-abi3-macosx_10_9_universal2.whl + name: protobuf + version: 7.34.0 + sha256: 8e329966799f2c271d5e05e236459fe1cbfdb8755aaa3b0914fa60947ddea408 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/b5/57/89727baef7578897af5ed166735ceb315819f1c184da8c3441271dbcfde7/protobuf-7.34.0-cp310-abi3-manylinux2014_x86_64.whl + name: protobuf + version: 7.34.0 + sha256: 964cf977e07f479c0697964e83deda72bcbc75c3badab506fb061b352d991b01 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl + name: psutil + version: 7.2.2 + sha256: 1a7b04c10f32cc88ab39cbf606e117fd74721c831c98a27dc04578deb0c16979 + requires_dist: + - psleak ; extra == 'dev' + - pytest ; extra == 'dev' + - pytest-instafail ; extra == 'dev' + - pytest-xdist ; extra == 'dev' + - setuptools ; extra == 'dev' + - abi3audit ; extra == 'dev' + - black ; extra == 'dev' + - check-manifest ; extra == 'dev' + - coverage ; extra == 'dev' + - packaging ; extra == 'dev' + - pylint ; extra == 'dev' + - pyperf ; extra == 'dev' + - pypinfo ; extra == 'dev' + - pytest-cov ; extra == 'dev' + - requests ; extra == 'dev' + - rstcheck ; extra == 'dev' + - ruff ; extra == 'dev' + - sphinx ; extra == 'dev' + - sphinx-rtd-theme ; extra == 'dev' + - toml-sort ; extra == 'dev' + - twine ; extra == 'dev' + - validate-pyproject[all] ; extra == 'dev' + - virtualenv ; extra == 'dev' + - vulture ; extra == 'dev' + - wheel ; extra == 'dev' + - colorama ; os_name == 'nt' and extra == 'dev' + - pyreadline3 ; os_name == 'nt' and extra == 'dev' + - pywin32 ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'dev' + - wheel ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'dev' + - wmi ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'dev' + - psleak ; extra == 'test' + - pytest ; extra == 'test' + - pytest-instafail ; extra == 'test' + - pytest-xdist ; extra == 'test' + - setuptools ; extra == 'test' + - pywin32 ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'test' + - wheel ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'test' + - wmi ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'test' + requires_python: '>=3.6' +- pypi: https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl + name: psutil + version: 7.2.2 + sha256: 076a2d2f923fd4821644f5ba89f059523da90dc9014e85f8e45a5774ca5bc6f9 + requires_dist: + - psleak ; extra == 'dev' + - pytest ; extra == 'dev' + - pytest-instafail ; extra == 'dev' + - pytest-xdist ; extra == 'dev' + - setuptools ; extra == 'dev' + - abi3audit ; extra == 'dev' + - black ; extra == 'dev' + - check-manifest ; extra == 'dev' + - coverage ; extra == 'dev' + - packaging ; extra == 'dev' + - pylint ; extra == 'dev' + - pyperf ; extra == 'dev' + - pypinfo ; extra == 'dev' + - pytest-cov ; extra == 'dev' + - requests ; extra == 'dev' + - rstcheck ; extra == 'dev' + - ruff ; extra == 'dev' + - sphinx ; extra == 'dev' + - sphinx-rtd-theme ; extra == 'dev' + - toml-sort ; extra == 'dev' + - twine ; extra == 'dev' + - validate-pyproject[all] ; extra == 'dev' + - virtualenv ; extra == 'dev' + - vulture ; extra == 'dev' + - wheel ; extra == 'dev' + - colorama ; os_name == 'nt' and extra == 'dev' + - pyreadline3 ; os_name == 'nt' and extra == 'dev' + - pywin32 ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'dev' + - wheel ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'dev' + - wmi ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'dev' + - psleak ; extra == 'test' + - pytest ; extra == 'test' + - pytest-instafail ; extra == 'test' + - pytest-xdist ; extra == 'test' + - setuptools ; extra == 'test' + - pywin32 ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'test' + - wheel ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'test' + - wmi ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'test' + requires_python: '>=3.6' +- pypi: https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl + name: psutil + version: 7.2.2 + sha256: ed0cace939114f62738d808fdcecd4c869222507e266e574799e9c0faa17d486 + requires_dist: + - psleak ; extra == 'dev' + - pytest ; extra == 'dev' + - pytest-instafail ; extra == 'dev' + - pytest-xdist ; extra == 'dev' + - setuptools ; extra == 'dev' + - abi3audit ; extra == 'dev' + - black ; extra == 'dev' + - check-manifest ; extra == 'dev' + - coverage ; extra == 'dev' + - packaging ; extra == 'dev' + - pylint ; extra == 'dev' + - pyperf ; extra == 'dev' + - pypinfo ; extra == 'dev' + - pytest-cov ; extra == 'dev' + - requests ; extra == 'dev' + - rstcheck ; extra == 'dev' + - ruff ; extra == 'dev' + - sphinx ; extra == 'dev' + - sphinx-rtd-theme ; extra == 'dev' + - toml-sort ; extra == 'dev' + - twine ; extra == 'dev' + - validate-pyproject[all] ; extra == 'dev' + - virtualenv ; extra == 'dev' + - vulture ; extra == 'dev' + - wheel ; extra == 'dev' + - colorama ; os_name == 'nt' and extra == 'dev' + - pyreadline3 ; os_name == 'nt' and extra == 'dev' + - pywin32 ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'dev' + - wheel ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'dev' + - wmi ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'dev' + - psleak ; extra == 'test' + - pytest ; extra == 'test' + - pytest-instafail ; extra == 'test' + - pytest-xdist ; extra == 'test' + - setuptools ; extra == 'test' + - pywin32 ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'test' + - wheel ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'test' + - wmi ; implementation_name != 'pypy' and os_name == 'nt' and extra == 'test' + requires_python: '>=3.6' +- pypi: https://files.pythonhosted.org/packages/18/f3/14a1370b1449ca875d5e353ef02cb9db6b70bd46ec361c236176837c0be1/psycopg-3.2.5-py3-none-any.whl + name: psycopg + version: 3.2.5 + sha256: b782130983e5b3de30b4c529623d3687033b4dafa05bb661fc6bf45837ca5879 + requires_dist: + - backports-zoneinfo>=0.2.0 ; python_full_version < '3.9' + - typing-extensions>=4.6 ; python_full_version < '3.13' + - tzdata ; sys_platform == 'win32' + - psycopg-c==3.2.5 ; implementation_name != 'pypy' and extra == 'c' + - psycopg-binary==3.2.5 ; implementation_name != 'pypy' and extra == 'binary' + - psycopg-pool ; extra == 'pool' + - anyio>=4.0 ; extra == 'test' + - mypy>=1.14 ; extra == 'test' + - pproxy>=2.7 ; extra == 'test' + - pytest>=6.2.5 ; extra == 'test' + - pytest-cov>=3.0 ; extra == 'test' + - pytest-randomly>=3.5 ; extra == 'test' + - ast-comments>=1.1.2 ; extra == 'dev' + - black>=24.1.0 ; extra == 'dev' + - codespell>=2.2 ; extra == 'dev' + - dnspython>=2.1 ; extra == 'dev' + - flake8>=4.0 ; extra == 'dev' + - isort[colors]>=6.0 ; extra == 'dev' + - isort-psycopg ; extra == 'dev' + - mypy>=1.14 ; extra == 'dev' + - pre-commit>=4.0.1 ; extra == 'dev' + - types-setuptools>=57.4 ; extra == 'dev' + - wheel>=0.37 ; extra == 'dev' + - sphinx>=5.0 ; extra == 'docs' + - furo==2022.6.21 ; extra == 'docs' + - sphinx-autobuild>=2021.3.14 ; extra == 'docs' + - sphinx-autodoc-typehints>=1.12 ; extra == 'docs' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/31/77/31968655db2efe83c519e6296ff3a85a0c9e50432e0c11c8ffae1b404870/psycopg_binary-3.2.5-cp310-cp310-macosx_11_0_arm64.whl + name: psycopg-binary + version: 3.2.5 + sha256: e7d215a43343d91ba08301865f059d9518818d66a222a85fb425e4156716f5a6 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/a5/90/9f2c41b3b42d8cd8b9866f0bbd27a5796a1ca8042a1a019b39a6645df523/psycopg_binary-3.2.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: psycopg-binary + version: 3.2.5 + sha256: c37eb3be7a6be93f4925ccf52bbfa60244da6c63201770a709dd81a3d2d08534 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/d6/30/af3806081adc75b5a8addde839d4c6b171a8c5d0d07dd92de20ca4dd6717/psycopg_binary-3.2.5-cp310-cp310-macosx_10_9_x86_64.whl + name: psycopg-binary + version: 3.2.5 + sha256: a82211a43372cba9b1555a110e84e679deec2dc9463ae4c736977dad99dca5ed + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/e7/c3/26b8a0908a9db249de3b4169692e1c7c19048a9bc41a4d3209cee7dbb758/psycopg_pool-3.3.0-py3-none-any.whl + name: psycopg-pool + version: 3.3.0 + sha256: 2e44329155c410b5e8666372db44276a8b1ebd8c90f1c3026ebba40d4bc81063 + requires_dist: + - typing-extensions>=4.6 + - anyio>=4.0 ; extra == 'test' + - mypy>=1.14 ; extra == 'test' + - pproxy>=2.7 ; extra == 'test' + - pytest>=6.2.5 ; extra == 'test' + - pytest-cov>=3.0 ; extra == 'test' + - pytest-randomly>=3.5 ; extra == 'test' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl + name: py + version: 1.11.0 + sha256: 607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 + requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*' +- pypi: https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl + name: py-cpuinfo + version: 9.0.0 + sha256: 859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5 +- pypi: https://files.pythonhosted.org/packages/bd/db/ea0203e495be491c85af87b66e37acfd3bf756fd985f87e46fc5e3bf022c/py4j-0.10.9.9-py2.py3-none-any.whl + name: py4j + version: 0.10.9.9 + sha256: c7c26e4158defb37b0bb124933163641a2ff6e3a3913f7811b0ddbe07ed61533 +- pypi: https://files.pythonhosted.org/packages/36/2e/c0f017c405fcdc252dbccafbe05e36b0d0eb1ea9a958f081e01c6972927f/pyarrow-23.0.1-cp314-cp314-manylinux_2_28_x86_64.whl + name: pyarrow + version: 23.0.1 + sha256: 4982d71350b1a6e5cfe1af742c53dfb759b11ce14141870d05d9e540d13bc5d1 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/8d/1b/6da9a89583ce7b23ac611f183ae4843cd3a6cf54f079549b0e8c14031e73/pyarrow-23.0.1-cp314-cp314-macosx_12_0_arm64.whl + name: pyarrow + version: 23.0.1 + sha256: 5df1161da23636a70838099d4aaa65142777185cc0cdba4037a18cee7d8db9ca + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/ae/b5/d58a241fbe324dbaeb8df07be6af8752c846192d78d2272e551098f74e88/pyarrow-23.0.1-cp314-cp314-macosx_12_0_x86_64.whl + name: pyarrow + version: 23.0.1 + sha256: fa8e51cb04b9f8c9c5ace6bab63af9a1f88d35c0d6cbf53e8c17c098552285e1 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/bc/8e/4be5617b4aaae0287f621ad31c6036e5f63118cfca0dc57d42121ff49b51/pyarrow-23.0.1-cp310-cp310-macosx_12_0_x86_64.whl + name: pyarrow + version: 23.0.1 + sha256: 3f91c038b95f71ddfc865f11d5876c42f343b4495535bd262c7b321b0b94507c + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/bc/a8/24e5dc6855f50a62936ceb004e6e9645e4219a8065f304145d7fb8a79d5d/pyarrow-23.0.1-cp310-cp310-macosx_12_0_arm64.whl + name: pyarrow + version: 23.0.1 + sha256: 3fab8f82571844eb3c460f90a75583801d14ca0cc32b1acc8c361650e006fd56 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/f8/82/c40b68001dbec8a3faa4c08cd8c200798ac732d2854537c5449dc859f55a/pyarrow-23.0.1-cp310-cp310-manylinux_2_28_x86_64.whl + name: pyarrow + version: 23.0.1 + sha256: c33b5bf406284fd0bba436ed6f6c3ebe8e311722b441d89397c54f871c6863a2 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/2e/c3/94ade4906a2f88bc935772f59c934013b4205e773bcb4239db114a6da136/pyarrow_hotfix-0.7-py3-none-any.whl + name: pyarrow-hotfix + version: '0.7' + sha256: 3236f3b5f1260f0e2ac070a55c1a7b339c4bb7267839bd2015e283234e758100 + requires_python: '>=3.5' +- pypi: https://files.pythonhosted.org/packages/44/b5/a96872e5184f354da9c84ae119971a0a4c221fe9b27a4d94bd43f2596727/pyasn1-0.6.2-py3-none-any.whl + name: pyasn1 + version: 0.6.2 + sha256: 1eb26d860996a18e9b6ed05e7aae0e9fc21619fcee6af91cca9bad4fbea224bf + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl + name: pyasn1-modules + version: 0.4.2 + sha256: 29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a + requires_dist: + - pyasn1>=0.6.1,<0.7.0 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl + name: pycparser + version: '3.0' + sha256: b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/5f/e9/a09476d436d0ff1402ac3867d933c61805ec2326c6ea557aeeac3825604e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: pycryptodome + version: 3.23.0 + sha256: c8987bd3307a39bc03df5c8e0e3d8be0c4c3518b7f044b0f4c15d1aa78f52575 + requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*' +- pypi: https://files.pythonhosted.org/packages/6e/4e/a066527e079fc5002390c8acdd3aca431e6ea0a50ffd7201551175b47323/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl + name: pycryptodome + version: 3.23.0 + sha256: cfb5cd445280c5b0a4e6187a7ce8de5a07b5f3f897f235caa11f1f435f182843 + requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*' +- pypi: https://files.pythonhosted.org/packages/db/6c/a1f71542c969912bb0e106f64f60a56cc1f0fabecf9396f45accbe63fa68/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_universal2.whl + name: pycryptodome + version: 3.23.0 + sha256: 187058ab80b3281b1de11c2e6842a357a1f71b42cb1e15bce373f3d238135c27 + requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*' +- pypi: https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl + name: pydantic + version: 2.12.5 + sha256: e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d + requires_dist: + - annotated-types>=0.6.0 + - pydantic-core==2.41.5 + - typing-extensions>=4.14.1 + - typing-inspection>=0.4.2 + - email-validator>=2.0.0 ; extra == 'email' + - tzdata ; python_full_version >= '3.9' and sys_platform == 'win32' and extra == 'timezone' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: pydantic-core + version: 2.41.5 + sha256: 22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375 + requires_dist: + - typing-extensions>=4.14.1 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl + name: pydantic-core + version: 2.41.5 + sha256: 1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14 + requires_dist: + - typing-extensions>=4.14.1 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: pydantic-core + version: 2.41.5 + sha256: 100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a + requires_dist: + - typing-extensions>=4.14.1 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl + name: pydantic-core + version: 2.41.5 + sha256: 77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146 + requires_dist: + - typing-extensions>=4.14.1 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl + name: pydantic-core + version: 2.41.5 + sha256: 3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a + requires_dist: + - typing-extensions>=4.14.1 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl + name: pydantic-core + version: 2.41.5 + sha256: dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2 + requires_dist: + - typing-extensions>=4.14.1 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/ca/cb/cdeaba62aa3c48f0d8834afb82b4a21463cd83df34fe01f9daa89a08ec6c/pydata_google_auth-1.9.1-py2.py3-none-any.whl + name: pydata-google-auth + version: 1.9.1 + sha256: 75ffce5d106e34b717b31844c1639ea505b7d9550dc23b96fb6c20d086b53fa3 + requires_dist: + - setuptools + - google-auth>=1.25.0,<3.0.dev0 + - google-auth-oauthlib>=0.4.0 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl + name: pygments + version: 2.19.2 + sha256: 86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b + requires_dist: + - colorama>=0.4.6 ; extra == 'windows-terminal' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl + name: pyjwt + version: 2.11.0 + sha256: 94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469 + requires_dist: + - cryptography>=3.4.0 ; extra == 'crypto' + - coverage[toml]==7.10.7 ; extra == 'dev' + - cryptography>=3.4.0 ; extra == 'dev' + - pre-commit ; extra == 'dev' + - pytest>=8.4.2,<9.0.0 ; extra == 'dev' + - sphinx ; extra == 'dev' + - sphinx-rtd-theme ; extra == 'dev' + - zope-interface ; extra == 'dev' + - sphinx ; extra == 'docs' + - sphinx-rtd-theme ; extra == 'docs' + - zope-interface ; extra == 'docs' + - coverage[toml]==7.10.7 ; extra == 'tests' + - pytest>=8.4.2,<9.0.0 ; extra == 'tests' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/7c/4c/ad33b92b9864cbde84f259d5df035a6447f91891f5be77788e2a3892bce3/pymysql-1.1.2-py3-none-any.whl + name: pymysql + version: 1.1.2 + sha256: e6b1d89711dd51f8f74b1631fe08f039e7d76cf67a42a323d3178f0f25762ed9 + requires_dist: + - cryptography ; extra == 'rsa' + - pynacl>=1.4.0 ; extra == 'ed25519' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/80/28/2659c02301b9500751f8d42f9a6632e1508aa5120de5e43042b8b30f8d5d/pyopenssl-25.1.0-py3-none-any.whl + name: pyopenssl + version: 25.1.0 + sha256: 2b11f239acc47ac2e5aca04fd7fa829800aeee22a2eb30d744572a157bd8a1ab + requires_dist: + - cryptography>=41.0.5,<46 + - typing-extensions>=4.9 ; python_full_version >= '3.8' and python_full_version < '3.13' + - pytest-rerunfailures ; extra == 'test' + - pretend ; extra == 'test' + - pytest>=3.0.1 ; extra == 'test' + - sphinx!=5.2.0,!=5.2.0.post0,!=7.2.5 ; extra == 'docs' + - sphinx-rtd-theme ; extra == 'docs' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/19/bf/58ee13add151469c25825b7125bbf62c3bdcec05eec4d458fcb5c5516066/pyspark-4.1.1.tar.gz + name: pyspark + version: 4.1.1 + sha256: 77f78984aa84fbe865c717dd37b49913b4e5c97d76ef6824f932f1aefa6621ec + requires_dist: + - py4j>=0.10.9.7,<0.10.9.10 + - numpy>=1.21 ; extra == 'ml' + - numpy>=1.21 ; extra == 'mllib' + - pandas>=2.2.0 ; extra == 'sql' + - pyarrow>=15.0.0 ; extra == 'sql' + - numpy>=1.21 ; extra == 'sql' + - pandas>=2.2.0 ; extra == 'pandas-on-spark' + - pyarrow>=15.0.0 ; extra == 'pandas-on-spark' + - numpy>=1.21 ; extra == 'pandas-on-spark' + - pandas>=2.2.0 ; extra == 'connect' + - pyarrow>=15.0.0 ; extra == 'connect' + - grpcio>=1.76.0 ; extra == 'connect' + - grpcio-status>=1.76.0 ; extra == 'connect' + - googleapis-common-protos>=1.71.0 ; extra == 'connect' + - zstandard>=0.25.0 ; extra == 'connect' + - numpy>=1.21 ; extra == 'connect' + - pandas>=2.2.0 ; extra == 'pipelines' + - pyarrow>=15.0.0 ; extra == 'pipelines' + - numpy>=1.21 ; extra == 'pipelines' + - grpcio>=1.76.0 ; extra == 'pipelines' + - grpcio-status>=1.76.0 ; extra == 'pipelines' + - googleapis-common-protos>=1.71.0 ; extra == 'pipelines' + - zstandard>=0.25.0 ; extra == 'pipelines' + - pyyaml>=3.11 ; extra == 'pipelines' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/51/ff/f6e8b8f39e08547faece4bd80f89d5a8de68a38b2d179cc1c4490ffa3286/pytest-7.4.4-py3-none-any.whl + name: pytest + version: 7.4.4 + sha256: b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8 + requires_dist: + - iniconfig + - packaging + - pluggy>=0.12,<2.0 + - exceptiongroup>=1.0.0rc8 ; python_full_version < '3.11' + - tomli>=1.0.0 ; python_full_version < '3.11' + - importlib-metadata>=0.12 ; python_full_version < '3.8' + - colorama ; sys_platform == 'win32' + - argcomplete ; extra == 'testing' + - attrs>=19.2.0 ; extra == 'testing' + - hypothesis>=3.56 ; extra == 'testing' + - mock ; extra == 'testing' + - nose ; extra == 'testing' + - pygments>=2.7.2 ; extra == 'testing' + - requests ; extra == 'testing' + - setuptools ; extra == 'testing' + - xmlschema ; extra == 'testing' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/ee/82/62e2d63639ecb0fbe8a7ee59ef0bc69a4669ec50f6d3459f74ad4e4189a2/pytest_asyncio-0.23.8-py3-none-any.whl + name: pytest-asyncio + version: 0.23.8 + sha256: 50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2 + requires_dist: + - pytest>=7.0.0,<9 + - sphinx>=5.3 ; extra == 'docs' + - sphinx-rtd-theme>=1.0 ; extra == 'docs' + - coverage>=6.2 ; extra == 'testing' + - hypothesis>=5.7.1 ; extra == 'testing' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/2c/60/423a63fb190a0483d049786a121bd3dfd7d93bb5ff1bb5b5cd13e5df99a7/pytest_benchmark-3.4.1-py2.py3-none-any.whl + name: pytest-benchmark + version: 3.4.1 + sha256: 36d2b08c4882f6f997fd3126a3d6dfd70f3249cde178ed8bbc0b73db7c20f809 + requires_dist: + - pytest>=3.8 + - py-cpuinfo + - statistics ; python_full_version < '3.4' + - pathlib2 ; python_full_version < '3.4' + - aspectlib ; extra == 'aspect' + - elasticsearch ; extra == 'elasticsearch' + - pygal ; extra == 'histogram' + - pygaljs ; extra == 'histogram' + requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*' +- pypi: https://files.pythonhosted.org/packages/25/b2/bdc663a5647ce2034f7e8420122af340df87c01ba97745fc753b8c917acb/pytest_env-1.1.3-py3-none-any.whl + name: pytest-env + version: 1.1.3 + sha256: aada77e6d09fcfb04540a6e462c58533c37df35fa853da78707b17ec04d17dfc + requires_dist: + - pytest>=7.4.3 + - tomli>=2.0.1 ; python_full_version < '3.11' + - covdefaults>=2.3 ; extra == 'test' + - coverage>=7.3.2 ; extra == 'test' + - pytest-mock>=3.12 ; extra == 'test' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/2d/a1/2f2c1c2353350d66c4d110d283e422e4943eb5ad10effa9357ba66f7b5b9/pytest_lazy_fixture-0.6.3-py3-none-any.whl + name: pytest-lazy-fixture + version: 0.6.3 + sha256: e0b379f38299ff27a653f03eaa69b08a6fd4484e46fd1c9907d984b9f9daeda6 + requires_dist: + - pytest>=3.2.5 +- pypi: https://files.pythonhosted.org/packages/30/43/8deecb4c123bbc16d25666f1a6d241109c97aeb2e50806b952661c8e4b95/pytest_mock-1.10.4-py2.py3-none-any.whl + name: pytest-mock + version: 1.10.4 + sha256: 43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7 + requires_dist: + - pytest>=2.7 + - mock ; python_full_version < '3' + - pre-commit ; extra == 'dev' + - tox ; extra == 'dev' + requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*' +- pypi: https://files.pythonhosted.org/packages/ec/98/adc368fe369465f291ab24e18b9900473786ed1afdf861ba90467eb0767e/pytest_ordering-0.6-py3-none-any.whl + name: pytest-ordering + version: '0.6' + sha256: 3f314a178dbeb6777509548727dc69edf22d6d9a2867bf2d310ab85c403380b6 + requires_dist: + - pytest +- pypi: https://files.pythonhosted.org/packages/46/df/97cc0b5b8b53da0e265acd0aeecfc0c279e950a029acd2d7b4e54b00b25f/pytest_timeout-1.4.2-py2.py3-none-any.whl + name: pytest-timeout + version: 1.4.2 + sha256: 541d7aa19b9a6b4e475c759fd6073ef43d7cdc9a92d95644c260076eb257a063 + requires_dist: + - pytest>=3.6.0 +- pypi: https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl + name: pytest-xdist + version: 3.8.0 + sha256: 202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88 + requires_dist: + - execnet>=2.1 + - pytest>=7.0.0 + - filelock ; extra == 'testing' + - psutil>=3.0 ; extra == 'psutil' + - setproctitle ; extra == 'setproctitle' + requires_python: '>=3.9' +- conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.19-h3c07f61_3_cpython.conda + build_number: 3 + sha256: 2d8b5566d82c3872f057661e056d696f2f77a17ee5a36d9ae6ec43052c4d1c51 + md5: be48679ccfbc8710dea1d5970600fa04 + depends: + - __glibc >=2.17,<3.0.a0 + - bzip2 >=1.0.8,<2.0a0 + - ld_impl_linux-64 >=2.36.1 + - libexpat >=2.7.3,<3.0a0 + - libffi >=3.4,<4.0a0 + - libgcc >=14 + - liblzma >=5.8.2,<6.0a0 + - libnsl >=2.0.1,<2.1.0a0 + - libsqlite >=3.51.2,<4.0a0 + - libuuid >=2.41.3,<3.0a0 + - libxcrypt >=4.4.36 + - libzlib >=1.3.1,<2.0a0 + - ncurses >=6.5,<7.0a0 + - openssl >=3.5.4,<4.0a0 + - readline >=8.3,<9.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + constrains: + - python_abi 3.10.* *_cp310 + license: Python-2.0 + purls: [] + size: 25358312 + timestamp: 1769471983988 +- conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.14.3-h32b2ec7_101_cp314.conda + build_number: 101 + sha256: cb0628c5f1732f889f53a877484da98f5a0e0f47326622671396fb4f2b0cd6bd + md5: c014ad06e60441661737121d3eae8a60 + depends: + - __glibc >=2.17,<3.0.a0 + - bzip2 >=1.0.8,<2.0a0 + - ld_impl_linux-64 >=2.36.1 + - libexpat >=2.7.3,<3.0a0 + - libffi >=3.5.2,<3.6.0a0 + - libgcc >=14 + - liblzma >=5.8.2,<6.0a0 + - libmpdec >=4.0.0,<5.0a0 + - libsqlite >=3.51.2,<4.0a0 + - libuuid >=2.41.3,<3.0a0 + - libzlib >=1.3.1,<2.0a0 + - ncurses >=6.5,<7.0a0 + - openssl >=3.5.5,<4.0a0 + - python_abi 3.14.* *_cp314 + - readline >=8.3,<9.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + - zstd >=1.5.7,<1.6.0a0 + license: Python-2.0 + purls: [] + size: 36702440 + timestamp: 1770675584356 + python_site_packages_path: lib/python3.14/site-packages +- conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.10.19-h988dfef_3_cpython.conda + build_number: 3 + sha256: e05e6e5d076f76e609e8665969391dabdbbb298ecf1cbd58a206bf39a10dbc67 + md5: 2717612cf85c1138d5a0645b1db537fb + depends: + - __osx >=10.13 + - bzip2 >=1.0.8,<2.0a0 + - libexpat >=2.7.3,<3.0a0 + - libffi >=3.4,<4.0a0 + - liblzma >=5.8.2,<6.0a0 + - libsqlite >=3.51.2,<4.0a0 + - libzlib >=1.3.1,<2.0a0 + - ncurses >=6.5,<7.0a0 + - openssl >=3.5.4,<4.0a0 + - readline >=8.3,<9.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + constrains: + - python_abi 3.10.* *_cp310 + license: Python-2.0 + purls: [] + size: 13150073 + timestamp: 1769472282154 +- conda: https://conda.anaconda.org/conda-forge/osx-64/python-3.14.3-h4f44bb5_101_cp314.conda + build_number: 101 + sha256: f64e357aa0168a201c9b3eedf500d89a8550d6631d26a95590b12de61f8fd660 + md5: 030ec23658b941438ac42303aff0db2b + depends: + - __osx >=10.13 + - bzip2 >=1.0.8,<2.0a0 + - libexpat >=2.7.3,<3.0a0 + - libffi >=3.5.2,<3.6.0a0 + - liblzma >=5.8.2,<6.0a0 + - libmpdec >=4.0.0,<5.0a0 + - libsqlite >=3.51.2,<4.0a0 + - libzlib >=1.3.1,<2.0a0 + - ncurses >=6.5,<7.0a0 + - openssl >=3.5.5,<4.0a0 + - python_abi 3.14.* *_cp314 + - readline >=8.3,<9.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + - zstd >=1.5.7,<1.6.0a0 + license: Python-2.0 + purls: [] + size: 14387288 + timestamp: 1770676578632 + python_site_packages_path: lib/python3.14/site-packages +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.10.19-hcd7f573_3_cpython.conda + build_number: 3 + sha256: 7ce2adb0cc4d45178dc018b55148fa2d6ccae0c98291cef1b21dafcda2de2687 + md5: ac461265b59028847699c0606e17804b + depends: + - __osx >=11.0 + - bzip2 >=1.0.8,<2.0a0 + - libexpat >=2.7.3,<3.0a0 + - libffi >=3.4,<4.0a0 + - liblzma >=5.8.2,<6.0a0 + - libsqlite >=3.51.2,<4.0a0 + - libzlib >=1.3.1,<2.0a0 + - ncurses >=6.5,<7.0a0 + - openssl >=3.5.4,<4.0a0 + - readline >=8.3,<9.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + constrains: + - python_abi 3.10.* *_cp310 + license: Python-2.0 + purls: [] + size: 12507955 + timestamp: 1769472053757 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.14.3-h4c637c5_101_cp314.conda + build_number: 101 + sha256: fccce2af62d11328d232df9f6bbf63464fd45f81f718c661757f9c628c4378ce + md5: 753c8d0447677acb7ddbcc6e03e82661 + depends: + - __osx >=11.0 + - bzip2 >=1.0.8,<2.0a0 + - libexpat >=2.7.3,<3.0a0 + - libffi >=3.5.2,<3.6.0a0 + - liblzma >=5.8.2,<6.0a0 + - libmpdec >=4.0.0,<5.0a0 + - libsqlite >=3.51.2,<4.0a0 + - libzlib >=1.3.1,<2.0a0 + - ncurses >=6.5,<7.0a0 + - openssl >=3.5.5,<4.0a0 + - python_abi 3.14.* *_cp314 + - readline >=8.3,<9.0a0 + - tk >=8.6.13,<8.7.0a0 + - tzdata + - zstd >=1.5.7,<1.6.0a0 + license: Python-2.0 + purls: [] + size: 13522698 + timestamp: 1770675365241 + python_site_packages_path: lib/python3.14/site-packages +- pypi: https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl + name: python-dateutil + version: 2.9.0.post0 + sha256: a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 + requires_dist: + - six>=1.5 + requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*' +- pypi: https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl + name: python-dotenv + version: 1.2.2 + sha256: 1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a + requires_dist: + - click>=5.0 ; extra == 'cli' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/2e/2e/dfbd2c9b3edf6a5a8cd9e66090221046839b488ea27824970426bf06b242/python_keycloak-4.2.2-py3-none-any.whl + name: python-keycloak + version: 4.2.2 + sha256: 5137fd87c69031a372a578df96bae96b9aead2c9dad976613bc978e9e0246a1e + requires_dist: + - async-property>=0.2.2 + - deprecation>=2.1.0 + - httpx>=0.23.2 + - jwcrypto>=1.5.4 + - requests>=2.20.0 + - requests-toolbelt>=0.6.0 + requires_python: '>=3.8,<4.0' +- conda: https://conda.anaconda.org/conda-forge/noarch/python_abi-3.14-8_cp314.conda + build_number: 8 + sha256: ad6d2e9ac39751cc0529dd1566a26751a0bf2542adb0c232533d32e176e21db5 + md5: 0539938c55b6b1a59b560e843ad864a4 + constrains: + - python 3.14.* *_cp314 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 6989 + timestamp: 1752805904792 +- pypi: https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl + name: pytz + version: 2026.1.post1 + sha256: f2fd16142fda348286a75e1a524be810bb05d444e5a081f37f7affc635035f7a +- pypi: https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl + name: pyyaml + version: 6.0.3 + sha256: 02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: pyyaml + version: 6.0.3 + sha256: 9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: pyyaml + version: 6.0.3 + sha256: c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl + name: pyyaml + version: 6.0.3 + sha256: 8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl + name: pyyaml + version: 6.0.3 + sha256: 34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl + name: pyyaml + version: 6.0.3 + sha256: 214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/61/c5/c2ceba832fe3f47cfd7e11cd7cc7a1bbc2c028424c5bca70435aa4ca1dec/ray-2.49.2-cp310-cp310-macosx_12_0_x86_64.whl + name: ray + version: 2.49.2 + sha256: 3e441bf2acd7f368cf45132752066c5c3b83d88cd5f85762e703774bba4f2b6d + requires_dist: + - click>=7.0 + - filelock + - jsonschema + - msgpack>=1.0.0,<2.0.0 + - packaging + - protobuf>=3.20.3 + - pyyaml + - requests + - cupy-cuda12x ; sys_platform != 'darwin' and extra == 'cgraph' + - grpcio!=1.56.0 ; sys_platform == 'darwin' and extra == 'client' + - grpcio ; extra == 'client' + - numpy>=1.20 ; extra == 'data' + - pandas>=1.3 ; extra == 'data' + - pyarrow>=9.0.0 ; extra == 'data' + - fsspec ; extra == 'data' + - aiohttp>=3.7 ; extra == 'default' + - aiohttp-cors ; extra == 'default' + - colorful ; extra == 'default' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'default' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'default' + - requests ; extra == 'default' + - grpcio>=1.32.0 ; python_full_version < '3.10' and extra == 'default' + - grpcio>=1.42.0 ; python_full_version >= '3.10' and extra == 'default' + - opencensus ; extra == 'default' + - opentelemetry-sdk>=1.30.0 ; extra == 'default' + - opentelemetry-exporter-prometheus ; extra == 'default' + - opentelemetry-proto ; extra == 'default' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,<3 ; extra == 'default' + - prometheus-client>=0.7.1 ; extra == 'default' + - smart-open ; extra == 'default' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'default' + - memray ; sys_platform != 'win32' and extra == 'observability' + - smart-open ; extra == 'serve' + - aiohttp>=3.7 ; extra == 'serve' + - grpcio>=1.42.0 ; python_full_version >= '3.10' and extra == 'serve' + - prometheus-client>=0.7.1 ; extra == 'serve' + - fastapi ; extra == 'serve' + - opencensus ; extra == 'serve' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'serve' + - grpcio>=1.32.0 ; python_full_version < '3.10' and extra == 'serve' + - opentelemetry-sdk>=1.30.0 ; extra == 'serve' + - aiohttp-cors ; extra == 'serve' + - requests ; extra == 'serve' + - starlette ; extra == 'serve' + - opentelemetry-exporter-prometheus ; extra == 'serve' + - watchfiles ; extra == 'serve' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'serve' + - uvicorn[standard] ; extra == 'serve' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'serve' + - colorful ; extra == 'serve' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,<3 ; extra == 'serve' + - opentelemetry-proto ; extra == 'serve' + - pandas ; extra == 'tune' + - tensorboardx>=1.9 ; extra == 'tune' + - requests ; extra == 'tune' + - pyarrow>=9.0.0 ; extra == 'tune' + - fsspec ; extra == 'tune' + - cupy-cuda12x ; sys_platform != 'darwin' and extra == 'adag' + - smart-open ; extra == 'serve-grpc' + - aiohttp>=3.7 ; extra == 'serve-grpc' + - grpcio>=1.42.0 ; python_full_version >= '3.10' and extra == 'serve-grpc' + - prometheus-client>=0.7.1 ; extra == 'serve-grpc' + - fastapi ; extra == 'serve-grpc' + - opencensus ; extra == 'serve-grpc' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'serve-grpc' + - grpcio>=1.32.0 ; python_full_version < '3.10' and extra == 'serve-grpc' + - opentelemetry-sdk>=1.30.0 ; extra == 'serve-grpc' + - aiohttp-cors ; extra == 'serve-grpc' + - requests ; extra == 'serve-grpc' + - starlette ; extra == 'serve-grpc' + - opentelemetry-exporter-prometheus ; extra == 'serve-grpc' + - watchfiles ; extra == 'serve-grpc' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'serve-grpc' + - uvicorn[standard] ; extra == 'serve-grpc' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'serve-grpc' + - colorful ; extra == 'serve-grpc' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,<3 ; extra == 'serve-grpc' + - opentelemetry-proto ; extra == 'serve-grpc' + - pyopenssl ; extra == 'serve-grpc' + - smart-open ; extra == 'serve-async-inference' + - aiohttp>=3.7 ; extra == 'serve-async-inference' + - celery ; extra == 'serve-async-inference' + - grpcio>=1.42.0 ; python_full_version >= '3.10' and extra == 'serve-async-inference' + - prometheus-client>=0.7.1 ; extra == 'serve-async-inference' + - fastapi ; extra == 'serve-async-inference' + - opencensus ; extra == 'serve-async-inference' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'serve-async-inference' + - grpcio>=1.32.0 ; python_full_version < '3.10' and extra == 'serve-async-inference' + - opentelemetry-sdk>=1.30.0 ; extra == 'serve-async-inference' + - aiohttp-cors ; extra == 'serve-async-inference' + - requests ; extra == 'serve-async-inference' + - starlette ; extra == 'serve-async-inference' + - opentelemetry-exporter-prometheus ; extra == 'serve-async-inference' + - watchfiles ; extra == 'serve-async-inference' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'serve-async-inference' + - uvicorn[standard] ; extra == 'serve-async-inference' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'serve-async-inference' + - colorful ; extra == 'serve-async-inference' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,<3 ; extra == 'serve-async-inference' + - opentelemetry-proto ; extra == 'serve-async-inference' + - ray-cpp==2.49.2 ; extra == 'cpp' + - pandas ; extra == 'rllib' + - tensorboardx>=1.9 ; extra == 'rllib' + - requests ; extra == 'rllib' + - pyarrow>=9.0.0 ; extra == 'rllib' + - fsspec ; extra == 'rllib' + - dm-tree ; extra == 'rllib' + - gymnasium==1.1.1 ; extra == 'rllib' + - lz4 ; extra == 'rllib' + - ormsgpack==1.7.0 ; extra == 'rllib' + - pyyaml ; extra == 'rllib' + - scipy ; extra == 'rllib' + - pandas ; extra == 'train' + - tensorboardx>=1.9 ; extra == 'train' + - requests ; extra == 'train' + - pyarrow>=9.0.0 ; extra == 'train' + - fsspec ; extra == 'train' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,<3 ; extra == 'train' + - smart-open ; extra == 'air' + - aiohttp>=3.7 ; extra == 'air' + - grpcio>=1.42.0 ; python_full_version >= '3.10' and extra == 'air' + - fsspec ; extra == 'air' + - prometheus-client>=0.7.1 ; extra == 'air' + - fastapi ; extra == 'air' + - pandas ; extra == 'air' + - opencensus ; extra == 'air' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'air' + - grpcio>=1.32.0 ; python_full_version < '3.10' and extra == 'air' + - opentelemetry-sdk>=1.30.0 ; extra == 'air' + - numpy>=1.20 ; extra == 'air' + - aiohttp-cors ; extra == 'air' + - requests ; extra == 'air' + - starlette ; extra == 'air' + - opentelemetry-exporter-prometheus ; extra == 'air' + - watchfiles ; extra == 'air' + - pyarrow>=9.0.0 ; extra == 'air' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'air' + - uvicorn[standard] ; extra == 'air' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'air' + - colorful ; extra == 'air' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,<3 ; extra == 'air' + - tensorboardx>=1.9 ; extra == 'air' + - pandas>=1.3 ; extra == 'air' + - opentelemetry-proto ; extra == 'air' + - smart-open ; extra == 'all' + - aiohttp>=3.7 ; extra == 'all' + - celery ; extra == 'all' + - grpcio>=1.42.0 ; python_full_version >= '3.10' and extra == 'all' + - fsspec ; extra == 'all' + - grpcio!=1.56.0 ; sys_platform == 'darwin' and extra == 'all' + - cupy-cuda12x ; sys_platform != 'darwin' and extra == 'all' + - prometheus-client>=0.7.1 ; extra == 'all' + - gymnasium==1.1.1 ; extra == 'all' + - ormsgpack==1.7.0 ; extra == 'all' + - scipy ; extra == 'all' + - fastapi ; extra == 'all' + - pandas ; extra == 'all' + - opencensus ; extra == 'all' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'all' + - grpcio>=1.32.0 ; python_full_version < '3.10' and extra == 'all' + - opentelemetry-sdk>=1.30.0 ; extra == 'all' + - dm-tree ; extra == 'all' + - numpy>=1.20 ; extra == 'all' + - lz4 ; extra == 'all' + - aiohttp-cors ; extra == 'all' + - requests ; extra == 'all' + - starlette ; extra == 'all' + - memray ; sys_platform != 'win32' and extra == 'all' + - opentelemetry-exporter-prometheus ; extra == 'all' + - watchfiles ; extra == 'all' + - pyyaml ; extra == 'all' + - pyarrow>=9.0.0 ; extra == 'all' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'all' + - grpcio ; extra == 'all' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'all' + - uvicorn[standard] ; extra == 'all' + - colorful ; extra == 'all' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,<3 ; extra == 'all' + - tensorboardx>=1.9 ; extra == 'all' + - pandas>=1.3 ; extra == 'all' + - opentelemetry-proto ; extra == 'all' + - pyopenssl ; extra == 'all' + - smart-open ; extra == 'all-cpp' + - aiohttp>=3.7 ; extra == 'all-cpp' + - celery ; extra == 'all-cpp' + - grpcio>=1.42.0 ; python_full_version >= '3.10' and extra == 'all-cpp' + - fsspec ; extra == 'all-cpp' + - grpcio!=1.56.0 ; sys_platform == 'darwin' and extra == 'all-cpp' + - cupy-cuda12x ; sys_platform != 'darwin' and extra == 'all-cpp' + - prometheus-client>=0.7.1 ; extra == 'all-cpp' + - gymnasium==1.1.1 ; extra == 'all-cpp' + - ormsgpack==1.7.0 ; extra == 'all-cpp' + - scipy ; extra == 'all-cpp' + - fastapi ; extra == 'all-cpp' + - pandas ; extra == 'all-cpp' + - opencensus ; extra == 'all-cpp' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'all-cpp' + - grpcio>=1.32.0 ; python_full_version < '3.10' and extra == 'all-cpp' + - dm-tree ; extra == 'all-cpp' + - opentelemetry-sdk>=1.30.0 ; extra == 'all-cpp' + - numpy>=1.20 ; extra == 'all-cpp' + - lz4 ; extra == 'all-cpp' + - aiohttp-cors ; extra == 'all-cpp' + - requests ; extra == 'all-cpp' + - starlette ; extra == 'all-cpp' + - memray ; sys_platform != 'win32' and extra == 'all-cpp' + - opentelemetry-exporter-prometheus ; extra == 'all-cpp' + - watchfiles ; extra == 'all-cpp' + - pyyaml ; extra == 'all-cpp' + - pyarrow>=9.0.0 ; extra == 'all-cpp' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'all-cpp' + - grpcio ; extra == 'all-cpp' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'all-cpp' + - uvicorn[standard] ; extra == 'all-cpp' + - ray-cpp==2.49.2 ; extra == 'all-cpp' + - colorful ; extra == 'all-cpp' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,<3 ; extra == 'all-cpp' + - tensorboardx>=1.9 ; extra == 'all-cpp' + - pandas>=1.3 ; extra == 'all-cpp' + - opentelemetry-proto ; extra == 'all-cpp' + - pyopenssl ; extra == 'all-cpp' + - smart-open ; extra == 'llm' + - typer ; extra == 'llm' + - aiohttp>=3.7 ; extra == 'llm' + - grpcio>=1.42.0 ; python_full_version >= '3.10' and extra == 'llm' + - fsspec ; extra == 'llm' + - vllm>=0.10.0 ; extra == 'llm' + - async-timeout ; python_full_version < '3.11' and extra == 'llm' + - prometheus-client>=0.7.1 ; extra == 'llm' + - fastapi ; extra == 'llm' + - hf-transfer ; extra == 'llm' + - jsonschema ; extra == 'llm' + - opencensus ; extra == 'llm' + - jsonref>=1.1.0 ; extra == 'llm' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'llm' + - grpcio>=1.32.0 ; python_full_version < '3.10' and extra == 'llm' + - opentelemetry-sdk>=1.30.0 ; extra == 'llm' + - numpy>=1.20 ; extra == 'llm' + - ninja ; extra == 'llm' + - aiohttp-cors ; extra == 'llm' + - requests ; extra == 'llm' + - starlette ; extra == 'llm' + - opentelemetry-exporter-prometheus ; extra == 'llm' + - watchfiles ; extra == 'llm' + - pyarrow>=9.0.0 ; extra == 'llm' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'llm' + - uvicorn[standard] ; extra == 'llm' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'llm' + - colorful ; extra == 'llm' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,<3 ; extra == 'llm' + - pandas>=1.3 ; extra == 'llm' + - opentelemetry-proto ; extra == 'llm' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/64/13/b86d791b41f33220335eba18fc4841f1ebddae41e562c6a216846404c88d/ray-2.54.0-cp310-cp310-macosx_12_0_arm64.whl + name: ray + version: 2.54.0 + sha256: a22937f09ee74a43171df338d84b45ef882c1c05748947ca9d5343a44d4b9379 + requires_dist: + - click>=7.0 + - filelock + - jsonschema + - msgpack>=1.0.0,<2.0.0 + - packaging>=24.2 + - protobuf>=3.20.3 + - pyyaml + - requests + - cupy-cuda12x ; sys_platform != 'darwin' and extra == 'cgraph' + - grpcio!=1.56.0 ; sys_platform == 'darwin' and extra == 'client' + - grpcio ; extra == 'client' + - numpy>=1.20 ; extra == 'data' + - pandas>=1.3 ; extra == 'data' + - pyarrow>=9.0.0 ; extra == 'data' + - fsspec ; extra == 'data' + - aiohttp>=3.13.3 ; extra == 'default' + - aiohttp-cors ; extra == 'default' + - colorful ; extra == 'default' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'default' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'default' + - requests ; extra == 'default' + - grpcio>=1.42.0 ; extra == 'default' + - opencensus ; extra == 'default' + - opentelemetry-sdk>=1.30.0 ; extra == 'default' + - opentelemetry-exporter-prometheus ; extra == 'default' + - opentelemetry-proto ; extra == 'default' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'default' + - prometheus-client>=0.7.1 ; extra == 'default' + - smart-open ; extra == 'default' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'default' + - memray ; sys_platform != 'win32' and extra == 'observability' + - fastapi ; extra == 'serve' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'serve' + - opencensus ; extra == 'serve' + - opentelemetry-proto ; extra == 'serve' + - watchfiles ; extra == 'serve' + - starlette ; extra == 'serve' + - opentelemetry-sdk>=1.30.0 ; extra == 'serve' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'serve' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'serve' + - aiohttp-cors ; extra == 'serve' + - aiohttp>=3.13.3 ; extra == 'serve' + - colorful ; extra == 'serve' + - prometheus-client>=0.7.1 ; extra == 'serve' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'serve' + - opentelemetry-exporter-prometheus ; extra == 'serve' + - uvicorn[standard] ; extra == 'serve' + - smart-open ; extra == 'serve' + - requests ; extra == 'serve' + - grpcio>=1.42.0 ; extra == 'serve' + - pandas ; extra == 'tune' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'tune' + - tensorboardx>=1.9 ; extra == 'tune' + - requests ; extra == 'tune' + - pyarrow>=9.0.0 ; extra == 'tune' + - fsspec ; extra == 'tune' + - cupy-cuda12x ; sys_platform != 'darwin' and extra == 'adag' + - fastapi ; extra == 'serve-grpc' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'serve-grpc' + - opencensus ; extra == 'serve-grpc' + - opentelemetry-proto ; extra == 'serve-grpc' + - watchfiles ; extra == 'serve-grpc' + - starlette ; extra == 'serve-grpc' + - opentelemetry-sdk>=1.30.0 ; extra == 'serve-grpc' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'serve-grpc' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'serve-grpc' + - aiohttp-cors ; extra == 'serve-grpc' + - aiohttp>=3.13.3 ; extra == 'serve-grpc' + - colorful ; extra == 'serve-grpc' + - prometheus-client>=0.7.1 ; extra == 'serve-grpc' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'serve-grpc' + - opentelemetry-exporter-prometheus ; extra == 'serve-grpc' + - uvicorn[standard] ; extra == 'serve-grpc' + - smart-open ; extra == 'serve-grpc' + - requests ; extra == 'serve-grpc' + - grpcio>=1.42.0 ; extra == 'serve-grpc' + - pyopenssl ; extra == 'serve-grpc' + - fastapi ; extra == 'serve-async-inference' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'serve-async-inference' + - opencensus ; extra == 'serve-async-inference' + - opentelemetry-proto ; extra == 'serve-async-inference' + - watchfiles ; extra == 'serve-async-inference' + - starlette ; extra == 'serve-async-inference' + - opentelemetry-sdk>=1.30.0 ; extra == 'serve-async-inference' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'serve-async-inference' + - celery ; extra == 'serve-async-inference' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'serve-async-inference' + - aiohttp-cors ; extra == 'serve-async-inference' + - aiohttp>=3.13.3 ; extra == 'serve-async-inference' + - colorful ; extra == 'serve-async-inference' + - prometheus-client>=0.7.1 ; extra == 'serve-async-inference' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'serve-async-inference' + - opentelemetry-exporter-prometheus ; extra == 'serve-async-inference' + - uvicorn[standard] ; extra == 'serve-async-inference' + - smart-open ; extra == 'serve-async-inference' + - requests ; extra == 'serve-async-inference' + - grpcio>=1.42.0 ; extra == 'serve-async-inference' + - ray-cpp==2.54.0 ; extra == 'cpp' + - pandas ; extra == 'rllib' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'rllib' + - tensorboardx>=1.9 ; extra == 'rllib' + - requests ; extra == 'rllib' + - pyarrow>=9.0.0 ; extra == 'rllib' + - fsspec ; extra == 'rllib' + - dm-tree ; extra == 'rllib' + - gymnasium==1.2.2 ; extra == 'rllib' + - lz4 ; extra == 'rllib' + - ormsgpack>=1.7.0 ; extra == 'rllib' + - pyyaml ; extra == 'rllib' + - scipy ; extra == 'rllib' + - pandas ; extra == 'train' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'train' + - tensorboardx>=1.9 ; extra == 'train' + - requests ; extra == 'train' + - pyarrow>=9.0.0 ; extra == 'train' + - fsspec ; extra == 'train' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'train' + - fastapi ; extra == 'air' + - tensorboardx>=1.9 ; extra == 'air' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'air' + - numpy>=1.20 ; extra == 'air' + - pyarrow>=9.0.0 ; extra == 'air' + - opencensus ; extra == 'air' + - opentelemetry-proto ; extra == 'air' + - watchfiles ; extra == 'air' + - starlette ; extra == 'air' + - opentelemetry-sdk>=1.30.0 ; extra == 'air' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'air' + - pandas ; extra == 'air' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'air' + - fsspec ; extra == 'air' + - aiohttp-cors ; extra == 'air' + - aiohttp>=3.13.3 ; extra == 'air' + - colorful ; extra == 'air' + - prometheus-client>=0.7.1 ; extra == 'air' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'air' + - opentelemetry-exporter-prometheus ; extra == 'air' + - uvicorn[standard] ; extra == 'air' + - smart-open ; extra == 'air' + - requests ; extra == 'air' + - grpcio>=1.42.0 ; extra == 'air' + - pandas>=1.3 ; extra == 'air' + - fastapi ; extra == 'all' + - scipy ; extra == 'all' + - dm-tree ; extra == 'all' + - tensorboardx>=1.9 ; extra == 'all' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'all' + - numpy>=1.20 ; extra == 'all' + - pyarrow>=9.0.0 ; extra == 'all' + - pyyaml ; extra == 'all' + - ormsgpack>=1.7.0 ; extra == 'all' + - opencensus ; extra == 'all' + - opentelemetry-proto ; extra == 'all' + - watchfiles ; extra == 'all' + - starlette ; extra == 'all' + - opentelemetry-sdk>=1.30.0 ; extra == 'all' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'all' + - grpcio!=1.56.0 ; sys_platform == 'darwin' and extra == 'all' + - celery ; extra == 'all' + - grpcio ; extra == 'all' + - pandas ; extra == 'all' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'all' + - fsspec ; extra == 'all' + - aiohttp>=3.13.3 ; extra == 'all' + - aiohttp-cors ; extra == 'all' + - colorful ; extra == 'all' + - prometheus-client>=0.7.1 ; extra == 'all' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'all' + - gymnasium==1.2.2 ; extra == 'all' + - lz4 ; extra == 'all' + - opentelemetry-exporter-prometheus ; extra == 'all' + - uvicorn[standard] ; extra == 'all' + - smart-open ; extra == 'all' + - cupy-cuda12x ; sys_platform != 'darwin' and extra == 'all' + - requests ; extra == 'all' + - memray ; sys_platform != 'win32' and extra == 'all' + - grpcio>=1.42.0 ; extra == 'all' + - pandas>=1.3 ; extra == 'all' + - pyopenssl ; extra == 'all' + - fastapi ; extra == 'all-cpp' + - scipy ; extra == 'all-cpp' + - dm-tree ; extra == 'all-cpp' + - tensorboardx>=1.9 ; extra == 'all-cpp' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'all-cpp' + - numpy>=1.20 ; extra == 'all-cpp' + - pyarrow>=9.0.0 ; extra == 'all-cpp' + - pyyaml ; extra == 'all-cpp' + - ormsgpack>=1.7.0 ; extra == 'all-cpp' + - opencensus ; extra == 'all-cpp' + - opentelemetry-proto ; extra == 'all-cpp' + - watchfiles ; extra == 'all-cpp' + - starlette ; extra == 'all-cpp' + - opentelemetry-sdk>=1.30.0 ; extra == 'all-cpp' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'all-cpp' + - grpcio!=1.56.0 ; sys_platform == 'darwin' and extra == 'all-cpp' + - celery ; extra == 'all-cpp' + - grpcio ; extra == 'all-cpp' + - pandas ; extra == 'all-cpp' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'all-cpp' + - fsspec ; extra == 'all-cpp' + - aiohttp>=3.13.3 ; extra == 'all-cpp' + - aiohttp-cors ; extra == 'all-cpp' + - colorful ; extra == 'all-cpp' + - prometheus-client>=0.7.1 ; extra == 'all-cpp' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'all-cpp' + - gymnasium==1.2.2 ; extra == 'all-cpp' + - lz4 ; extra == 'all-cpp' + - ray-cpp==2.54.0 ; extra == 'all-cpp' + - opentelemetry-exporter-prometheus ; extra == 'all-cpp' + - uvicorn[standard] ; extra == 'all-cpp' + - smart-open ; extra == 'all-cpp' + - cupy-cuda12x ; sys_platform != 'darwin' and extra == 'all-cpp' + - requests ; extra == 'all-cpp' + - memray ; sys_platform != 'win32' and extra == 'all-cpp' + - grpcio>=1.42.0 ; extra == 'all-cpp' + - pandas>=1.3 ; extra == 'all-cpp' + - pyopenssl ; extra == 'all-cpp' + - fastapi ; extra == 'llm' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'llm' + - numpy>=1.20 ; extra == 'llm' + - pyarrow>=9.0.0 ; extra == 'llm' + - typer ; extra == 'llm' + - opencensus ; extra == 'llm' + - opentelemetry-proto ; extra == 'llm' + - jsonschema ; extra == 'llm' + - watchfiles ; extra == 'llm' + - meson ; extra == 'llm' + - vllm[audio]>=0.15.0 ; extra == 'llm' + - transformers>=4.57.3 ; extra == 'llm' + - starlette ; extra == 'llm' + - opentelemetry-sdk>=1.30.0 ; extra == 'llm' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'llm' + - hf-transfer ; extra == 'llm' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'llm' + - fsspec ; extra == 'llm' + - aiohttp-cors ; extra == 'llm' + - aiohttp>=3.13.3 ; extra == 'llm' + - colorful ; extra == 'llm' + - jsonref>=1.1.0 ; extra == 'llm' + - prometheus-client>=0.7.1 ; extra == 'llm' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'llm' + - nixl>=0.6.1 ; extra == 'llm' + - async-timeout ; python_full_version < '3.11' and extra == 'llm' + - pybind11 ; extra == 'llm' + - opentelemetry-exporter-prometheus ; extra == 'llm' + - uvicorn[standard] ; extra == 'llm' + - smart-open ; extra == 'llm' + - requests ; extra == 'llm' + - ninja ; extra == 'llm' + - grpcio>=1.42.0 ; extra == 'llm' + - pandas>=1.3 ; extra == 'llm' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/b0/b1/8cc4e45a3ce87aabcb70696b448b20840bcbaa5c98bdb4807a2749541fda/ray-2.54.0-cp310-cp310-manylinux2014_x86_64.whl + name: ray + version: 2.54.0 + sha256: 2d140409e4ca06d8d6a06f71d441b53f6edcd930ebe67a6988f652915db81070 + requires_dist: + - click>=7.0 + - filelock + - jsonschema + - msgpack>=1.0.0,<2.0.0 + - packaging>=24.2 + - protobuf>=3.20.3 + - pyyaml + - requests + - cupy-cuda12x ; sys_platform != 'darwin' and extra == 'cgraph' + - grpcio!=1.56.0 ; sys_platform == 'darwin' and extra == 'client' + - grpcio ; extra == 'client' + - numpy>=1.20 ; extra == 'data' + - pandas>=1.3 ; extra == 'data' + - pyarrow>=9.0.0 ; extra == 'data' + - fsspec ; extra == 'data' + - aiohttp>=3.13.3 ; extra == 'default' + - aiohttp-cors ; extra == 'default' + - colorful ; extra == 'default' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'default' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'default' + - requests ; extra == 'default' + - grpcio>=1.42.0 ; extra == 'default' + - opencensus ; extra == 'default' + - opentelemetry-sdk>=1.30.0 ; extra == 'default' + - opentelemetry-exporter-prometheus ; extra == 'default' + - opentelemetry-proto ; extra == 'default' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'default' + - prometheus-client>=0.7.1 ; extra == 'default' + - smart-open ; extra == 'default' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'default' + - memray ; sys_platform != 'win32' and extra == 'observability' + - fastapi ; extra == 'serve' + - uvicorn[standard] ; extra == 'serve' + - opentelemetry-sdk>=1.30.0 ; extra == 'serve' + - opentelemetry-proto ; extra == 'serve' + - aiohttp>=3.13.3 ; extra == 'serve' + - starlette ; extra == 'serve' + - smart-open ; extra == 'serve' + - aiohttp-cors ; extra == 'serve' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'serve' + - opencensus ; extra == 'serve' + - requests ; extra == 'serve' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'serve' + - colorful ; extra == 'serve' + - prometheus-client>=0.7.1 ; extra == 'serve' + - watchfiles ; extra == 'serve' + - opentelemetry-exporter-prometheus ; extra == 'serve' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'serve' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'serve' + - grpcio>=1.42.0 ; extra == 'serve' + - pandas ; extra == 'tune' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'tune' + - tensorboardx>=1.9 ; extra == 'tune' + - requests ; extra == 'tune' + - pyarrow>=9.0.0 ; extra == 'tune' + - fsspec ; extra == 'tune' + - cupy-cuda12x ; sys_platform != 'darwin' and extra == 'adag' + - fastapi ; extra == 'serve-grpc' + - uvicorn[standard] ; extra == 'serve-grpc' + - opentelemetry-sdk>=1.30.0 ; extra == 'serve-grpc' + - pyopenssl ; extra == 'serve-grpc' + - opentelemetry-proto ; extra == 'serve-grpc' + - aiohttp>=3.13.3 ; extra == 'serve-grpc' + - starlette ; extra == 'serve-grpc' + - smart-open ; extra == 'serve-grpc' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'serve-grpc' + - requests ; extra == 'serve-grpc' + - opencensus ; extra == 'serve-grpc' + - aiohttp-cors ; extra == 'serve-grpc' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'serve-grpc' + - colorful ; extra == 'serve-grpc' + - prometheus-client>=0.7.1 ; extra == 'serve-grpc' + - watchfiles ; extra == 'serve-grpc' + - opentelemetry-exporter-prometheus ; extra == 'serve-grpc' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'serve-grpc' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'serve-grpc' + - grpcio>=1.42.0 ; extra == 'serve-grpc' + - fastapi ; extra == 'serve-async-inference' + - uvicorn[standard] ; extra == 'serve-async-inference' + - opentelemetry-sdk>=1.30.0 ; extra == 'serve-async-inference' + - opentelemetry-proto ; extra == 'serve-async-inference' + - aiohttp>=3.13.3 ; extra == 'serve-async-inference' + - starlette ; extra == 'serve-async-inference' + - celery ; extra == 'serve-async-inference' + - smart-open ; extra == 'serve-async-inference' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'serve-async-inference' + - requests ; extra == 'serve-async-inference' + - opencensus ; extra == 'serve-async-inference' + - aiohttp-cors ; extra == 'serve-async-inference' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'serve-async-inference' + - colorful ; extra == 'serve-async-inference' + - prometheus-client>=0.7.1 ; extra == 'serve-async-inference' + - watchfiles ; extra == 'serve-async-inference' + - opentelemetry-exporter-prometheus ; extra == 'serve-async-inference' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'serve-async-inference' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'serve-async-inference' + - grpcio>=1.42.0 ; extra == 'serve-async-inference' + - ray-cpp==2.54.0 ; extra == 'cpp' + - pandas ; extra == 'rllib' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'rllib' + - tensorboardx>=1.9 ; extra == 'rllib' + - requests ; extra == 'rllib' + - pyarrow>=9.0.0 ; extra == 'rllib' + - fsspec ; extra == 'rllib' + - dm-tree ; extra == 'rllib' + - gymnasium==1.2.2 ; extra == 'rllib' + - lz4 ; extra == 'rllib' + - ormsgpack>=1.7.0 ; extra == 'rllib' + - pyyaml ; extra == 'rllib' + - scipy ; extra == 'rllib' + - pandas ; extra == 'train' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'train' + - tensorboardx>=1.9 ; extra == 'train' + - requests ; extra == 'train' + - pyarrow>=9.0.0 ; extra == 'train' + - fsspec ; extra == 'train' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'train' + - fastapi ; extra == 'air' + - uvicorn[standard] ; extra == 'air' + - opentelemetry-sdk>=1.30.0 ; extra == 'air' + - numpy>=1.20 ; extra == 'air' + - opentelemetry-proto ; extra == 'air' + - aiohttp>=3.13.3 ; extra == 'air' + - starlette ; extra == 'air' + - pyarrow>=9.0.0 ; extra == 'air' + - tensorboardx>=1.9 ; extra == 'air' + - pandas>=1.3 ; extra == 'air' + - smart-open ; extra == 'air' + - aiohttp-cors ; extra == 'air' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'air' + - opencensus ; extra == 'air' + - requests ; extra == 'air' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'air' + - pandas ; extra == 'air' + - colorful ; extra == 'air' + - prometheus-client>=0.7.1 ; extra == 'air' + - watchfiles ; extra == 'air' + - opentelemetry-exporter-prometheus ; extra == 'air' + - fsspec ; extra == 'air' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'air' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'air' + - grpcio>=1.42.0 ; extra == 'air' + - fastapi ; extra == 'all' + - grpcio ; extra == 'all' + - uvicorn[standard] ; extra == 'all' + - opentelemetry-sdk>=1.30.0 ; extra == 'all' + - cupy-cuda12x ; sys_platform != 'darwin' and extra == 'all' + - ormsgpack>=1.7.0 ; extra == 'all' + - dm-tree ; extra == 'all' + - pyopenssl ; extra == 'all' + - numpy>=1.20 ; extra == 'all' + - opentelemetry-proto ; extra == 'all' + - aiohttp>=3.13.3 ; extra == 'all' + - pyyaml ; extra == 'all' + - starlette ; extra == 'all' + - pyarrow>=9.0.0 ; extra == 'all' + - tensorboardx>=1.9 ; extra == 'all' + - memray ; sys_platform != 'win32' and extra == 'all' + - scipy ; extra == 'all' + - pandas>=1.3 ; extra == 'all' + - celery ; extra == 'all' + - smart-open ; extra == 'all' + - requests ; extra == 'all' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'all' + - opencensus ; extra == 'all' + - aiohttp-cors ; extra == 'all' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'all' + - lz4 ; extra == 'all' + - pandas ; extra == 'all' + - colorful ; extra == 'all' + - prometheus-client>=0.7.1 ; extra == 'all' + - gymnasium==1.2.2 ; extra == 'all' + - watchfiles ; extra == 'all' + - opentelemetry-exporter-prometheus ; extra == 'all' + - fsspec ; extra == 'all' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'all' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'all' + - grpcio!=1.56.0 ; sys_platform == 'darwin' and extra == 'all' + - grpcio>=1.42.0 ; extra == 'all' + - fastapi ; extra == 'all-cpp' + - grpcio ; extra == 'all-cpp' + - uvicorn[standard] ; extra == 'all-cpp' + - opentelemetry-sdk>=1.30.0 ; extra == 'all-cpp' + - cupy-cuda12x ; sys_platform != 'darwin' and extra == 'all-cpp' + - ormsgpack>=1.7.0 ; extra == 'all-cpp' + - dm-tree ; extra == 'all-cpp' + - pyopenssl ; extra == 'all-cpp' + - pyyaml ; extra == 'all-cpp' + - numpy>=1.20 ; extra == 'all-cpp' + - opentelemetry-proto ; extra == 'all-cpp' + - aiohttp>=3.13.3 ; extra == 'all-cpp' + - starlette ; extra == 'all-cpp' + - pyarrow>=9.0.0 ; extra == 'all-cpp' + - tensorboardx>=1.9 ; extra == 'all-cpp' + - memray ; sys_platform != 'win32' and extra == 'all-cpp' + - scipy ; extra == 'all-cpp' + - pandas>=1.3 ; extra == 'all-cpp' + - celery ; extra == 'all-cpp' + - smart-open ; extra == 'all-cpp' + - requests ; extra == 'all-cpp' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'all-cpp' + - opencensus ; extra == 'all-cpp' + - aiohttp-cors ; extra == 'all-cpp' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'all-cpp' + - ray-cpp==2.54.0 ; extra == 'all-cpp' + - lz4 ; extra == 'all-cpp' + - pandas ; extra == 'all-cpp' + - colorful ; extra == 'all-cpp' + - prometheus-client>=0.7.1 ; extra == 'all-cpp' + - gymnasium==1.2.2 ; extra == 'all-cpp' + - watchfiles ; extra == 'all-cpp' + - opentelemetry-exporter-prometheus ; extra == 'all-cpp' + - fsspec ; extra == 'all-cpp' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'all-cpp' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'all-cpp' + - grpcio!=1.56.0 ; sys_platform == 'darwin' and extra == 'all-cpp' + - grpcio>=1.42.0 ; extra == 'all-cpp' + - fastapi ; extra == 'llm' + - meson ; extra == 'llm' + - hf-transfer ; extra == 'llm' + - uvicorn[standard] ; extra == 'llm' + - opentelemetry-sdk>=1.30.0 ; extra == 'llm' + - numpy>=1.20 ; extra == 'llm' + - opentelemetry-proto ; extra == 'llm' + - aiohttp>=3.13.3 ; extra == 'llm' + - starlette ; extra == 'llm' + - pyarrow>=9.0.0 ; extra == 'llm' + - typer ; extra == 'llm' + - pandas>=1.3 ; extra == 'llm' + - smart-open ; extra == 'llm' + - aiohttp-cors ; extra == 'llm' + - pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*,!=2.11.*,<3 ; extra == 'llm' + - opencensus ; extra == 'llm' + - requests ; extra == 'llm' + - py-spy>=0.2.0 ; python_full_version < '3.12' and extra == 'llm' + - jsonref>=1.1.0 ; extra == 'llm' + - vllm[audio]>=0.15.0 ; extra == 'llm' + - ninja ; extra == 'llm' + - pybind11 ; extra == 'llm' + - transformers>=4.57.3 ; extra == 'llm' + - nixl>=0.6.1 ; extra == 'llm' + - colorful ; extra == 'llm' + - prometheus-client>=0.7.1 ; extra == 'llm' + - async-timeout ; python_full_version < '3.11' and extra == 'llm' + - watchfiles ; extra == 'llm' + - opentelemetry-exporter-prometheus ; extra == 'llm' + - fsspec ; extra == 'llm' + - jsonschema ; extra == 'llm' + - py-spy>=0.4.0 ; python_full_version >= '3.12' and extra == 'llm' + - virtualenv>=20.0.24,!=20.21.1 ; extra == 'llm' + - grpcio>=1.42.0 ; extra == 'llm' + requires_python: '>=3.10' +- conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.3-h853b02a_0.conda + sha256: 12ffde5a6f958e285aa22c191ca01bbd3d6e710aa852e00618fa6ddc59149002 + md5: d7d95fc8287ea7bf33e0e7116d2b95ec + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - ncurses >=6.5,<7.0a0 + license: GPL-3.0-only + license_family: GPL + purls: [] + size: 345073 + timestamp: 1765813471974 +- conda: https://conda.anaconda.org/conda-forge/osx-64/readline-8.3-h68b038d_0.conda + sha256: 4614af680aa0920e82b953fece85a03007e0719c3399f13d7de64176874b80d5 + md5: eefd65452dfe7cce476a519bece46704 + depends: + - __osx >=10.13 + - ncurses >=6.5,<7.0a0 + license: GPL-3.0-only + license_family: GPL + purls: [] + size: 317819 + timestamp: 1765813692798 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/readline-8.3-h46df422_0.conda + sha256: a77010528efb4b548ac2a4484eaf7e1c3907f2aec86123ed9c5212ae44502477 + md5: f8381319127120ce51e081dce4865cf4 + depends: + - __osx >=11.0 + - ncurses >=6.5,<7.0a0 + license: GPL-3.0-only + license_family: GPL + purls: [] + size: 313930 + timestamp: 1765813902568 +- pypi: https://files.pythonhosted.org/packages/20/2e/409703d645363352a20c944f5d119bdae3eb3034051a53724a7c5fee12b8/redis-4.6.0-py3-none-any.whl + name: redis + version: 4.6.0 + sha256: e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c + requires_dist: + - async-timeout>=4.0.2 ; python_full_version <= '3.11.2' + - importlib-metadata>=1.0 ; python_full_version < '3.8' + - typing-extensions ; python_full_version < '3.8' + - hiredis>=1.0.0 ; extra == 'hiredis' + - cryptography>=36.0.1 ; extra == 'ocsp' + - pyopenssl==20.0.1 ; extra == 'ocsp' + - requests>=2.26.0 ; extra == 'ocsp' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl + name: referencing + version: 0.37.0 + sha256: 381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231 + requires_dist: + - attrs>=22.2.0 + - rpds-py>=0.7.0 + - typing-extensions>=4.4.0 ; python_full_version < '3.13' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl + name: requests + version: 2.32.5 + sha256: 2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6 + requires_dist: + - charset-normalizer>=2,<4 + - idna>=2.5,<4 + - urllib3>=1.21.1,<3 + - certifi>=2017.4.17 + - pysocks>=1.5.6,!=1.5.7 ; extra == 'socks' + - chardet>=3.0.2,<6 ; extra == 'use-chardet-on-py3' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl + name: requests-oauthlib + version: 2.0.0 + sha256: 7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36 + requires_dist: + - oauthlib>=3.0.0 + - requests>=2.0.0 + - oauthlib[signedtoken]>=3.0.0 ; extra == 'rsa' + requires_python: '>=3.4' +- pypi: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl + name: requests-toolbelt + version: 1.0.0 + sha256: cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 + requires_dist: + - requests>=2.0.1,<3.0.0 + requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*' +- pypi: https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl + name: rich + version: 14.3.3 + sha256: 793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d + requires_dist: + - ipywidgets>=7.5.1,<9 ; extra == 'jupyter' + - markdown-it-py>=2.2.0 + - pygments>=2.13.0,<3.0.0 + requires_python: '>=3.8.0' +- pypi: https://files.pythonhosted.org/packages/06/0c/0c411a0ec64ccb6d104dcabe0e713e05e153a9a2c3c2bd2b32ce412166fe/rpds_py-0.30.0-cp310-cp310-macosx_10_12_x86_64.whl + name: rpds-py + version: 0.30.0 + sha256: 679ae98e00c0e8d68a7fda324e16b90fd5260945b45d3b824c892cec9eea3288 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/19/6a/4ba3d0fb7297ebae71171822554abe48d7cab29c28b8f9f2c04b79988c05/rpds_py-0.30.0-cp310-cp310-macosx_11_0_arm64.whl + name: rpds-py + version: 0.30.0 + sha256: 4cc2206b76b4f576934f0ed374b10d7ca5f457858b157ca52064bdfc26b9fc00 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/2b/60/19f7884db5d5603edf3c6bce35408f45ad3e97e10007df0e17dd57af18f8/rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl + name: rpds-py + version: 0.30.0 + sha256: ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/61/b5/707f6cf0066a6412aacc11d17920ea2e19e5b2f04081c64526eb35b5c6e7/rpds_py-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: rpds-py + version: 0.30.0 + sha256: 0c0e95f6819a19965ff420f65578bacb0b00f251fefe2c8b23347c37174271f3 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/86/81/dad16382ebbd3d0e0328776d8fd7ca94220e4fa0798d1dc5e7da48cb3201/rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl + name: rpds-py + version: 0.30.0 + sha256: 68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/ce/81/9a91c0111ce1758c92516a3e44776920b579d9a7c09b2b06b642d4de3f0f/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: rpds-py + version: 0.30.0 + sha256: 47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl + name: rsa + version: 4.9.1 + sha256: 68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762 + requires_dist: + - pyasn1>=0.1.3 + requires_python: '>=3.6,<4' +- pypi: https://files.pythonhosted.org/packages/6d/4f/d073e09df851cfa251ef7840007d04db3293a0482ce607d2b993926089be/s3transfer-0.13.1-py3-none-any.whl + name: s3transfer + version: 0.13.1 + sha256: a981aa7429be23fe6dfc13e80e4020057cbab622b08c0315288758d67cabc724 + requires_dist: + - botocore>=1.37.4,<2.0a0 + - botocore[crt]>=1.37.4,<2.0a0 ; extra == 'crt' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/e1/c6/76dc613121b793286a3f91621d7b75a2b493e0390ddca50f11993eadf192/setuptools-82.0.0-py3-none-any.whl + name: setuptools + version: 82.0.0 + sha256: 70b18734b607bd1da571d097d236cfcfacaf01de45717d59e6e04b96877532e0 + requires_dist: + - pytest>=6,!=8.1.* ; extra == 'test' + - virtualenv>=13.0.0 ; extra == 'test' + - wheel>=0.44.0 ; extra == 'test' + - pip>=19.1 ; extra == 'test' + - packaging>=24.2 ; extra == 'test' + - jaraco-envs>=2.2 ; extra == 'test' + - pytest-xdist>=3 ; extra == 'test' + - jaraco-path>=3.7.2 ; extra == 'test' + - build[virtualenv]>=1.0.3 ; extra == 'test' + - filelock>=3.4.0 ; extra == 'test' + - ini2toml[lite]>=0.14 ; extra == 'test' + - tomli-w>=1.0.0 ; extra == 'test' + - pytest-timeout ; extra == 'test' + - pytest-perf ; sys_platform != 'cygwin' and extra == 'test' + - jaraco-develop>=7.21 ; python_full_version >= '3.9' and sys_platform != 'cygwin' and extra == 'test' + - pytest-home>=0.5 ; extra == 'test' + - pytest-subprocess ; extra == 'test' + - pyproject-hooks!=1.1 ; extra == 'test' + - jaraco-test>=5.5 ; extra == 'test' + - sphinx>=3.5 ; extra == 'doc' + - jaraco-packaging>=9.3 ; extra == 'doc' + - rst-linker>=1.9 ; extra == 'doc' + - furo ; extra == 'doc' + - sphinx-lint ; extra == 'doc' + - jaraco-tidelift>=1.4 ; extra == 'doc' + - pygments-github-lexers==0.0.5 ; extra == 'doc' + - sphinx-favicon ; extra == 'doc' + - sphinx-inline-tabs ; extra == 'doc' + - sphinx-reredirects ; extra == 'doc' + - sphinxcontrib-towncrier ; extra == 'doc' + - sphinx-notfound-page>=1,<2 ; extra == 'doc' + - pyproject-hooks!=1.1 ; extra == 'doc' + - towncrier<24.7 ; extra == 'doc' + - packaging>=24.2 ; extra == 'core' + - more-itertools>=8.8 ; extra == 'core' + - jaraco-text>=3.7 ; extra == 'core' + - importlib-metadata>=6 ; python_full_version < '3.10' and extra == 'core' + - tomli>=2.0.1 ; python_full_version < '3.11' and extra == 'core' + - wheel>=0.43.0 ; extra == 'core' + - platformdirs>=4.2.2 ; extra == 'core' + - jaraco-functools>=4 ; extra == 'core' + - more-itertools ; extra == 'core' + - pytest-checkdocs>=2.4 ; extra == 'check' + - pytest-ruff>=0.2.1 ; sys_platform != 'cygwin' and extra == 'check' + - ruff>=0.13.0 ; sys_platform != 'cygwin' and extra == 'check' + - pytest-cov ; extra == 'cover' + - pytest-enabler>=2.2 ; extra == 'enabler' + - pytest-mypy ; extra == 'type' + - mypy==1.18.* ; extra == 'type' + - importlib-metadata>=7.0.2 ; python_full_version < '3.10' and extra == 'type' + - jaraco-develop>=7.21 ; sys_platform != 'cygwin' and extra == 'type' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl + name: six + version: 1.17.0 + sha256: 4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 + requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*' +- pypi: https://files.pythonhosted.org/packages/60/e6/30c4015e2712bf8bf83b54ddadeee0494b68ae6d0f6d49d9373f463305d4/snowflake_connector_python-4.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: snowflake-connector-python + version: 4.0.0 + sha256: 4106a66e770e564b3037457b7b01b15ca28aee61afb88560b664aa8af439b533 + requires_dist: + - asn1crypto>0.24.0,<2.0.0 + - cryptography>=3.1.0 + - pyopenssl>=22.0.0,<26.0.0 + - pyjwt<3.0.0 + - pytz + - requests<3.0.0 + - packaging + - charset-normalizer>=2,<4 + - idna>=2.5,<4 + - urllib3>=1.21.1,<2.0.0 ; python_full_version < '3.10' + - certifi>=2017.4.17 + - typing-extensions>=4.3,<5 + - filelock>=3.5,<4 + - sortedcontainers>=2.4.0 + - platformdirs>=2.6.0,<5.0.0 + - tomlkit + - boto3>=1.24 + - botocore>=1.24 + - boto3>=1.24 ; extra == 'boto' + - botocore>=1.24 ; extra == 'boto' + - cython ; extra == 'development' + - coverage ; extra == 'development' + - more-itertools ; extra == 'development' + - numpy<=2.2.4 ; extra == 'development' + - pendulum!=2.1.1 ; extra == 'development' + - pexpect ; extra == 'development' + - pytest<7.5.0 ; extra == 'development' + - pytest-cov ; extra == 'development' + - pytest-rerunfailures<16.0 ; extra == 'development' + - pytest-timeout ; extra == 'development' + - pytest-xdist ; extra == 'development' + - pytzdata ; extra == 'development' + - responses ; extra == 'development' + - pandas>=2.1.2,<3.0.0 ; extra == 'pandas' + - pyarrow ; extra == 'pandas' + - keyring>=23.1.0,<26.0.0 ; extra == 'secure-local-storage' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/e4/75/f845ca5079a6b911023fa945dbf1bac0ed1c2f5967108b14440c740cb410/snowflake_connector_python-4.0.0-cp310-cp310-macosx_11_0_arm64.whl + name: snowflake-connector-python + version: 4.0.0 + sha256: 2c3e0f6d103fe67c975550ed424f579d3e7ae503d56467e5549f3a1a1e0e8f24 + requires_dist: + - asn1crypto>0.24.0,<2.0.0 + - cryptography>=3.1.0 + - pyopenssl>=22.0.0,<26.0.0 + - pyjwt<3.0.0 + - pytz + - requests<3.0.0 + - packaging + - charset-normalizer>=2,<4 + - idna>=2.5,<4 + - urllib3>=1.21.1,<2.0.0 ; python_full_version < '3.10' + - certifi>=2017.4.17 + - typing-extensions>=4.3,<5 + - filelock>=3.5,<4 + - sortedcontainers>=2.4.0 + - platformdirs>=2.6.0,<5.0.0 + - tomlkit + - boto3>=1.24 + - botocore>=1.24 + - boto3>=1.24 ; extra == 'boto' + - botocore>=1.24 ; extra == 'boto' + - cython ; extra == 'development' + - coverage ; extra == 'development' + - more-itertools ; extra == 'development' + - numpy<=2.2.4 ; extra == 'development' + - pendulum!=2.1.1 ; extra == 'development' + - pexpect ; extra == 'development' + - pytest<7.5.0 ; extra == 'development' + - pytest-cov ; extra == 'development' + - pytest-rerunfailures<16.0 ; extra == 'development' + - pytest-timeout ; extra == 'development' + - pytest-xdist ; extra == 'development' + - pytzdata ; extra == 'development' + - responses ; extra == 'development' + - pandas>=2.1.2,<3.0.0 ; extra == 'pandas' + - pyarrow ; extra == 'pandas' + - keyring>=23.1.0,<26.0.0 ; extra == 'secure-local-storage' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/fd/80/3a7e36a9e53beeb27c0599d2703f33bb812be931b469b154b08df0eeeaf5/snowflake_connector_python-4.0.0-cp310-cp310-macosx_11_0_x86_64.whl + name: snowflake-connector-python + version: 4.0.0 + sha256: e8d5b66f283967c700fff2303ac5e52d1a3cf41990a634f121ac8b1f1cd9af10 + requires_dist: + - asn1crypto>0.24.0,<2.0.0 + - cryptography>=3.1.0 + - pyopenssl>=22.0.0,<26.0.0 + - pyjwt<3.0.0 + - pytz + - requests<3.0.0 + - packaging + - charset-normalizer>=2,<4 + - idna>=2.5,<4 + - urllib3>=1.21.1,<2.0.0 ; python_full_version < '3.10' + - certifi>=2017.4.17 + - typing-extensions>=4.3,<5 + - filelock>=3.5,<4 + - sortedcontainers>=2.4.0 + - platformdirs>=2.6.0,<5.0.0 + - tomlkit + - boto3>=1.24 + - botocore>=1.24 + - boto3>=1.24 ; extra == 'boto' + - botocore>=1.24 ; extra == 'boto' + - cython ; extra == 'development' + - coverage ; extra == 'development' + - more-itertools ; extra == 'development' + - numpy<=2.2.4 ; extra == 'development' + - pendulum!=2.1.1 ; extra == 'development' + - pexpect ; extra == 'development' + - pytest<7.5.0 ; extra == 'development' + - pytest-cov ; extra == 'development' + - pytest-rerunfailures<16.0 ; extra == 'development' + - pytest-timeout ; extra == 'development' + - pytest-xdist ; extra == 'development' + - pytzdata ; extra == 'development' + - responses ; extra == 'development' + - pandas>=2.1.2,<3.0.0 ; extra == 'pandas' + - pyarrow ; extra == 'pandas' + - keyring>=23.1.0,<26.0.0 ; extra == 'secure-local-storage' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl + name: sortedcontainers + version: 2.4.0 + sha256: a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0 +- pypi: https://files.pythonhosted.org/packages/46/2c/9664130905f03db57961b8980b05cab624afd114bf2be2576628a9f22da4/sqlalchemy-2.0.48-py3-none-any.whl + name: sqlalchemy + version: 2.0.48 + sha256: a66fe406437dd65cacd96a72689a3aaaecaebbcd62d81c5ac1c0fdbeac835096 + requires_dist: + - importlib-metadata ; python_full_version < '3.8' + - greenlet>=1 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' + - typing-extensions>=4.6.0 + - greenlet>=1 ; extra == 'asyncio' + - mypy>=0.910 ; extra == 'mypy' + - pyodbc ; extra == 'mssql' + - pymssql ; extra == 'mssql-pymssql' + - pyodbc ; extra == 'mssql-pyodbc' + - mysqlclient>=1.4.0 ; extra == 'mysql' + - mysql-connector-python ; extra == 'mysql-connector' + - mariadb>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10 ; extra == 'mariadb-connector' + - cx-oracle>=8 ; extra == 'oracle' + - oracledb>=1.0.1 ; extra == 'oracle-oracledb' + - psycopg2>=2.7 ; extra == 'postgresql' + - pg8000>=1.29.1 ; extra == 'postgresql-pg8000' + - greenlet>=1 ; extra == 'postgresql-asyncpg' + - asyncpg ; extra == 'postgresql-asyncpg' + - psycopg2-binary ; extra == 'postgresql-psycopg2binary' + - psycopg2cffi ; extra == 'postgresql-psycopg2cffi' + - psycopg>=3.0.7 ; extra == 'postgresql-psycopg' + - psycopg[binary]>=3.0.7 ; extra == 'postgresql-psycopgbinary' + - pymysql ; extra == 'pymysql' + - greenlet>=1 ; extra == 'aiomysql' + - aiomysql>=0.2.0 ; extra == 'aiomysql' + - greenlet>=1 ; extra == 'aioodbc' + - aioodbc ; extra == 'aioodbc' + - greenlet>=1 ; extra == 'asyncmy' + - asyncmy>=0.2.3,!=0.2.4,!=0.2.6 ; extra == 'asyncmy' + - greenlet>=1 ; extra == 'aiosqlite' + - aiosqlite ; extra == 'aiosqlite' + - typing-extensions!=3.10.0.1 ; extra == 'aiosqlite' + - sqlcipher3-binary ; extra == 'sqlcipher' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/5c/ad/6c4395649a212a6c603a72c5b9ab5dce3135a1546cfdffa3c427e71fd535/sqlalchemy-2.0.48-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: sqlalchemy + version: 2.0.48 + sha256: 10853a53a4a00417a00913d270dddda75815fcb80675874285f41051c094d7dd + requires_dist: + - importlib-metadata ; python_full_version < '3.8' + - greenlet>=1 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' + - typing-extensions>=4.6.0 + - greenlet>=1 ; extra == 'asyncio' + - mypy>=0.910 ; extra == 'mypy' + - pyodbc ; extra == 'mssql' + - pymssql ; extra == 'mssql-pymssql' + - pyodbc ; extra == 'mssql-pyodbc' + - mysqlclient>=1.4.0 ; extra == 'mysql' + - mysql-connector-python ; extra == 'mysql-connector' + - mariadb>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10 ; extra == 'mariadb-connector' + - cx-oracle>=8 ; extra == 'oracle' + - oracledb>=1.0.1 ; extra == 'oracle-oracledb' + - psycopg2>=2.7 ; extra == 'postgresql' + - pg8000>=1.29.1 ; extra == 'postgresql-pg8000' + - greenlet>=1 ; extra == 'postgresql-asyncpg' + - asyncpg ; extra == 'postgresql-asyncpg' + - psycopg2-binary ; extra == 'postgresql-psycopg2binary' + - psycopg2cffi ; extra == 'postgresql-psycopg2cffi' + - psycopg>=3.0.7 ; extra == 'postgresql-psycopg' + - psycopg[binary]>=3.0.7 ; extra == 'postgresql-psycopgbinary' + - pymysql ; extra == 'pymysql' + - greenlet>=1 ; extra == 'aiomysql' + - aiomysql>=0.2.0 ; extra == 'aiomysql' + - greenlet>=1 ; extra == 'aioodbc' + - aioodbc ; extra == 'aioodbc' + - greenlet>=1 ; extra == 'asyncmy' + - asyncmy>=0.2.3,!=0.2.4,!=0.2.6 ; extra == 'asyncmy' + - greenlet>=1 ; extra == 'aiosqlite' + - aiosqlite ; extra == 'aiosqlite' + - typing-extensions!=3.10.0.1 ; extra == 'aiosqlite' + - sqlcipher3-binary ; extra == 'sqlcipher' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/9a/67/1235676e93dd3b742a4a8eddfae49eea46c85e3eed29f0da446a8dd57500/sqlalchemy-2.0.48-cp310-cp310-macosx_11_0_arm64.whl + name: sqlalchemy + version: 2.0.48 + sha256: 7001dc9d5f6bb4deb756d5928eaefe1930f6f4179da3924cbd95ee0e9f4dce89 + requires_dist: + - importlib-metadata ; python_full_version < '3.8' + - greenlet>=1 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' + - typing-extensions>=4.6.0 + - greenlet>=1 ; extra == 'asyncio' + - mypy>=0.910 ; extra == 'mypy' + - pyodbc ; extra == 'mssql' + - pymssql ; extra == 'mssql-pymssql' + - pyodbc ; extra == 'mssql-pyodbc' + - mysqlclient>=1.4.0 ; extra == 'mysql' + - mysql-connector-python ; extra == 'mysql-connector' + - mariadb>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10 ; extra == 'mariadb-connector' + - cx-oracle>=8 ; extra == 'oracle' + - oracledb>=1.0.1 ; extra == 'oracle-oracledb' + - psycopg2>=2.7 ; extra == 'postgresql' + - pg8000>=1.29.1 ; extra == 'postgresql-pg8000' + - greenlet>=1 ; extra == 'postgresql-asyncpg' + - asyncpg ; extra == 'postgresql-asyncpg' + - psycopg2-binary ; extra == 'postgresql-psycopg2binary' + - psycopg2cffi ; extra == 'postgresql-psycopg2cffi' + - psycopg>=3.0.7 ; extra == 'postgresql-psycopg' + - psycopg[binary]>=3.0.7 ; extra == 'postgresql-psycopgbinary' + - pymysql ; extra == 'pymysql' + - greenlet>=1 ; extra == 'aiomysql' + - aiomysql>=0.2.0 ; extra == 'aiomysql' + - greenlet>=1 ; extra == 'aioodbc' + - aioodbc ; extra == 'aioodbc' + - greenlet>=1 ; extra == 'asyncmy' + - asyncmy>=0.2.3,!=0.2.4,!=0.2.6 ; extra == 'asyncmy' + - greenlet>=1 ; extra == 'aiosqlite' + - aiosqlite ; extra == 'aiosqlite' + - typing-extensions!=3.10.0.1 ; extra == 'aiosqlite' + - sqlcipher3-binary ; extra == 'sqlcipher' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/f2/5e/327428a034407651a048f5e624361adf3f9fbac9d0fa98e981e9c6ff2f5e/sqlalchemy-2.0.48-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: sqlalchemy + version: 2.0.48 + sha256: 426c5ca86415d9b8945c7073597e10de9644802e2ff502b8e1f11a7a2642856b + requires_dist: + - importlib-metadata ; python_full_version < '3.8' + - greenlet>=1 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' + - typing-extensions>=4.6.0 + - greenlet>=1 ; extra == 'asyncio' + - mypy>=0.910 ; extra == 'mypy' + - pyodbc ; extra == 'mssql' + - pymssql ; extra == 'mssql-pymssql' + - pyodbc ; extra == 'mssql-pyodbc' + - mysqlclient>=1.4.0 ; extra == 'mysql' + - mysql-connector-python ; extra == 'mysql-connector' + - mariadb>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10 ; extra == 'mariadb-connector' + - cx-oracle>=8 ; extra == 'oracle' + - oracledb>=1.0.1 ; extra == 'oracle-oracledb' + - psycopg2>=2.7 ; extra == 'postgresql' + - pg8000>=1.29.1 ; extra == 'postgresql-pg8000' + - greenlet>=1 ; extra == 'postgresql-asyncpg' + - asyncpg ; extra == 'postgresql-asyncpg' + - psycopg2-binary ; extra == 'postgresql-psycopg2binary' + - psycopg2cffi ; extra == 'postgresql-psycopg2cffi' + - psycopg>=3.0.7 ; extra == 'postgresql-psycopg' + - psycopg[binary]>=3.0.7 ; extra == 'postgresql-psycopgbinary' + - pymysql ; extra == 'pymysql' + - greenlet>=1 ; extra == 'aiomysql' + - aiomysql>=0.2.0 ; extra == 'aiomysql' + - greenlet>=1 ; extra == 'aioodbc' + - aioodbc ; extra == 'aioodbc' + - greenlet>=1 ; extra == 'asyncmy' + - asyncmy>=0.2.3,!=0.2.4,!=0.2.6 ; extra == 'asyncmy' + - greenlet>=1 ; extra == 'aiosqlite' + - aiosqlite ; extra == 'aiosqlite' + - typing-extensions!=3.10.0.1 ; extra == 'aiosqlite' + - sqlcipher3-binary ; extra == 'sqlcipher' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/f7/b3/f437eaa1cf028bb3c927172c7272366393e73ccd104dcf5b6963f4ab5318/sqlalchemy-2.0.48-cp314-cp314-macosx_11_0_arm64.whl + name: sqlalchemy + version: 2.0.48 + sha256: e2d0d88686e3d35a76f3e15a34e8c12d73fc94c1dea1cd55782e695cc14086dd + requires_dist: + - importlib-metadata ; python_full_version < '3.8' + - greenlet>=1 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' + - typing-extensions>=4.6.0 + - greenlet>=1 ; extra == 'asyncio' + - mypy>=0.910 ; extra == 'mypy' + - pyodbc ; extra == 'mssql' + - pymssql ; extra == 'mssql-pymssql' + - pyodbc ; extra == 'mssql-pyodbc' + - mysqlclient>=1.4.0 ; extra == 'mysql' + - mysql-connector-python ; extra == 'mysql-connector' + - mariadb>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10 ; extra == 'mariadb-connector' + - cx-oracle>=8 ; extra == 'oracle' + - oracledb>=1.0.1 ; extra == 'oracle-oracledb' + - psycopg2>=2.7 ; extra == 'postgresql' + - pg8000>=1.29.1 ; extra == 'postgresql-pg8000' + - greenlet>=1 ; extra == 'postgresql-asyncpg' + - asyncpg ; extra == 'postgresql-asyncpg' + - psycopg2-binary ; extra == 'postgresql-psycopg2binary' + - psycopg2cffi ; extra == 'postgresql-psycopg2cffi' + - psycopg>=3.0.7 ; extra == 'postgresql-psycopg' + - psycopg[binary]>=3.0.7 ; extra == 'postgresql-psycopgbinary' + - pymysql ; extra == 'pymysql' + - greenlet>=1 ; extra == 'aiomysql' + - aiomysql>=0.2.0 ; extra == 'aiomysql' + - greenlet>=1 ; extra == 'aioodbc' + - aioodbc ; extra == 'aioodbc' + - greenlet>=1 ; extra == 'asyncmy' + - asyncmy>=0.2.3,!=0.2.4,!=0.2.6 ; extra == 'asyncmy' + - greenlet>=1 ; extra == 'aiosqlite' + - aiosqlite ; extra == 'aiosqlite' + - typing-extensions!=3.10.0.1 ; extra == 'aiosqlite' + - sqlcipher3-binary ; extra == 'sqlcipher' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/ad/c9/f58c3a17beb650700f9d2eccd410726b6d96df8953663700764ca48636c7/sqlglot-29.0.1-py3-none-any.whl + name: sqlglot + version: 29.0.1 + sha256: 06a473ea6c2b3632ac67bd38e687a6860265bf4156e66b54adeda15d07f00c65 + requires_dist: + - duckdb>=0.6 ; extra == 'dev' + - mypy ; extra == 'dev' + - pandas ; extra == 'dev' + - pandas-stubs ; extra == 'dev' + - python-dateutil ; extra == 'dev' + - pytz ; extra == 'dev' + - pdoc ; extra == 'dev' + - pre-commit ; extra == 'dev' + - ruff==0.7.2 ; extra == 'dev' + - types-python-dateutil ; extra == 'dev' + - types-pytz ; extra == 'dev' + - typing-extensions ; extra == 'dev' + - pyperf ; extra == 'dev' + - sqlglotc ; extra == 'c' + - sqlglotrs==0.13.0 ; extra == 'rs' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl + name: starlette + version: 0.52.1 + sha256: 0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74 + requires_dist: + - anyio>=3.6.2,<5 + - typing-extensions>=4.10.0 ; python_full_version < '3.13' + - httpx>=0.27.0,<0.29.0 ; extra == 'full' + - itsdangerous ; extra == 'full' + - jinja2 ; extra == 'full' + - python-multipart>=0.0.18 ; extra == 'full' + - pyyaml ; extra == 'full' + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl + name: tabulate + version: 0.9.0 + sha256: 024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f + requires_dist: + - wcwidth ; extra == 'widechars' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl + name: tenacity + version: 8.5.0 + sha256: b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687 + requires_dist: + - reno ; extra == 'doc' + - sphinx ; extra == 'doc' + - pytest ; extra == 'test' + - tornado>=4.5 ; extra == 'test' + - typeguard ; extra == 'test' + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/3e/f8/6425ff800894784160290bcb9737878d910b6da6a08633bfe7f2ed8c9ae3/testcontainers-4.9.0-py3-none-any.whl + name: testcontainers + version: 4.9.0 + sha256: c6fee929990972c40bf6b91b7072c94064ff3649b405a14fde0274c8b2479d32 + requires_dist: + - azure-cosmos ; extra == 'cosmosdb' + - azure-storage-blob>=12.19,<13.0 ; extra == 'azurite' + - bcrypt ; extra == 'registry' + - boto3 ; extra == 'aws' or extra == 'localstack' + - cassandra-driver==3.29.1 ; extra == 'scylla' + - chromadb-client ; extra == 'chroma' + - clickhouse-driver ; extra == 'clickhouse' + - cryptography ; extra == 'mailpit' or extra == 'sftp' + - docker + - google-cloud-datastore>=2 ; extra == 'google' + - google-cloud-pubsub>=2 ; extra == 'google' + - httpx ; extra == 'aws' or extra == 'generic' or extra == 'test-module-import' + - ibm-db-sa ; extra == 'db2' + - influxdb ; extra == 'influxdb' + - influxdb-client ; extra == 'influxdb' + - kubernetes ; extra == 'k3s' + - minio ; extra == 'minio' + - nats-py ; extra == 'nats' + - neo4j ; extra == 'neo4j' + - opensearch-py ; extra == 'opensearch' + - oracledb ; extra == 'oracle' or extra == 'oracle-free' + - pika ; extra == 'rabbitmq' + - pymongo ; extra == 'mongodb' + - pymssql ; extra == 'mssql' + - pymysql[rsa] ; extra == 'mysql' + - python-arango>=7.8,<8.0 ; extra == 'arangodb' + - python-dotenv + - python-keycloak ; extra == 'keycloak' + - pyyaml ; extra == 'k3s' + - qdrant-client ; extra == 'qdrant' + - redis ; extra == 'generic' or extra == 'redis' + - selenium ; extra == 'selenium' + - sqlalchemy ; extra == 'db2' or extra == 'mssql' or extra == 'mysql' or extra == 'oracle' or extra == 'oracle-free' + - trino ; extra == 'trino' + - typing-extensions + - urllib3 + - weaviate-client>=4.5.4,<5.0.0 ; extra == 'weaviate' + - wrapt + requires_python: '>=3.9,<4.0' +- conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.conda + sha256: cafeec44494f842ffeca27e9c8b0c27ed714f93ac77ddadc6aaf726b5554ebac + md5: cffd3bdd58090148f4cfcd831f4b26ab + depends: + - __glibc >=2.17,<3.0.a0 + - libgcc >=14 + - libzlib >=1.3.1,<2.0a0 + constrains: + - xorg-libx11 >=1.8.12,<2.0a0 + license: TCL + license_family: BSD + purls: [] + size: 3301196 + timestamp: 1769460227866 +- conda: https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.13-h7142dee_3.conda + sha256: 7f0d9c320288532873e2d8486c331ec6d87919c9028208d3f6ac91dc8f99a67b + md5: 6e6efb7463f8cef69dbcb4c2205bf60e + depends: + - __osx >=10.13 + - libzlib >=1.3.1,<2.0a0 + license: TCL + license_family: BSD + purls: [] + size: 3282953 + timestamp: 1769460532442 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/tk-8.6.13-h010d191_3.conda + sha256: 799cab4b6cde62f91f750149995d149bc9db525ec12595e8a1d91b9317f038b3 + md5: a9d86bc62f39b94c4661716624eb21b0 + depends: + - __osx >=11.0 + - libzlib >=1.3.1,<2.0a0 + license: TCL + license_family: BSD + purls: [] + size: 3127137 + timestamp: 1769460817696 +- pypi: https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl + name: toml + version: 0.10.2 + sha256: 806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b + requires_python: '>=2.6,!=3.0.*,!=3.1.*,!=3.2.*' +- pypi: https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl + name: tomli + version: 2.4.0 + sha256: 1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/b5/11/87d6d29fb5d237229d67973a6c9e06e048f01cf4994dee194ab0ea841814/tomlkit-0.14.0-py3-none-any.whl + name: tomlkit + version: 0.14.0 + sha256: 592064ed85b40fa213469f81ac584f67a4f2992509a7c3ea2d632208623a3680 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl + name: toolz + version: 1.1.0 + sha256: 15ccc861ac51c53696de0a5d6d4607f99c210739caf987b5d2054f3efed429d8 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl + name: tqdm + version: 4.67.3 + sha256: ee1e4c0e59148062281c49d80b25b67771a127c85fc9676d3be5f243206826bf + requires_dist: + - colorama ; sys_platform == 'win32' + - importlib-metadata ; python_full_version < '3.8' + - pytest>=6 ; extra == 'dev' + - pytest-cov ; extra == 'dev' + - pytest-timeout ; extra == 'dev' + - pytest-asyncio>=0.24 ; extra == 'dev' + - nbval ; extra == 'dev' + - requests ; extra == 'discord' + - slack-sdk ; extra == 'slack' + - requests ; extra == 'telegram' + - ipywidgets>=6 ; extra == 'notebook' + requires_python: '>=3.7' +- pypi: https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl + name: typeguard + version: 4.5.1 + sha256: 44d2bf329d49a244110a090b55f5f91aa82d9a9834ebfd30bcc73651e4a8cc40 + requires_dist: + - importlib-metadata>=3.6 ; python_full_version < '3.10' + - typing-extensions>=4.14.0 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/8b/fa/4f4d3bfca9ef6dd17d69ed18b96564c53b32d3ce774132308d0bee849f10/types_pymysql-1.1.0.20251220-py3-none-any.whl + name: types-pymysql + version: 1.1.0.20251220 + sha256: fa1082af7dea6c53b6caa5784241924b1296ea3a8d3bd060417352c5e10c0618 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl + name: typing-extensions + version: 4.15.0 + sha256: f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl + name: typing-inspection + version: 0.4.2 + sha256: 4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7 + requires_dist: + - typing-extensions>=4.12.0 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl + name: tzdata + version: '2025.3' + sha256: 06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1 + requires_python: '>=2' +- conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda + sha256: 1d30098909076af33a35017eed6f2953af1c769e273a0626a04722ac4acaba3c + md5: ad659d0a2b3e47e38d829aa8cad2d610 + license: LicenseRef-Public-Domain + purls: [] + size: 119135 + timestamp: 1767016325805 +- pypi: https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl + name: urllib3 + version: 2.6.3 + sha256: bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4 + requires_dist: + - brotli>=1.2.0 ; platform_python_implementation == 'CPython' and extra == 'brotli' + - brotlicffi>=1.2.0.0 ; platform_python_implementation != 'CPython' and extra == 'brotli' + - h2>=4,<5 ; extra == 'h2' + - pysocks>=1.5.6,!=1.5.7,<2.0 ; extra == 'socks' + - backports-zstd>=1.0.0 ; python_full_version < '3.14' and extra == 'zstd' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl + name: uvicorn + version: 0.34.0 + sha256: 023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4 + requires_dist: + - click>=7.0 + - h11>=0.8 + - typing-extensions>=4.0 ; python_full_version < '3.11' + - colorama>=0.4 ; sys_platform == 'win32' and extra == 'standard' + - httptools>=0.6.3 ; extra == 'standard' + - python-dotenv>=0.13 ; extra == 'standard' + - pyyaml>=5.1 ; extra == 'standard' + - uvloop>=0.14.0,!=0.15.0,!=0.15.1 ; platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32' and extra == 'standard' + - watchfiles>=0.13 ; extra == 'standard' + - websockets>=10.4 ; extra == 'standard' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/f7/1f/4e5f8770c2cf4faa2c3ed3c19f9d4485ac9db0a6b029a7866921709bdc6c/uvicorn_worker-0.3.0-py3-none-any.whl + name: uvicorn-worker + version: 0.3.0 + sha256: ef0fe8aad27b0290a9e602a256b03f5a5da3a9e5f942414ca587b645ec77dd52 + requires_dist: + - gunicorn>=20.1.0 + - uvicorn>=0.15.0 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/0d/f8/a132124dfda0777e489ca86732e85e69afcd1ff7686647000050ba670689/uvloop-0.22.1-cp314-cp314-macosx_10_13_x86_64.whl + name: uvloop + version: 0.22.1 + sha256: 4baa86acedf1d62115c1dc6ad1e17134476688f08c6efd8a2ab076e815665c74 + requires_dist: + - aiohttp>=3.10.5 ; extra == 'test' + - flake8~=6.1 ; extra == 'test' + - psutil ; extra == 'test' + - pycodestyle~=2.11.0 ; extra == 'test' + - pyopenssl~=25.3.0 ; extra == 'test' + - mypy>=0.800 ; extra == 'test' + - setuptools>=60 ; extra == 'dev' + - cython~=3.0 ; extra == 'dev' + - sphinx~=4.1.2 ; extra == 'docs' + - sphinxcontrib-asyncio~=0.3.0 ; extra == 'docs' + - sphinx-rtd-theme~=0.5.2 ; extra == 'docs' + requires_python: '>=3.8.1' +- pypi: https://files.pythonhosted.org/packages/90/cd/b62bdeaa429758aee8de8b00ac0dd26593a9de93d302bff3d21439e9791d/uvloop-0.22.1-cp314-cp314-macosx_10_13_universal2.whl + name: uvloop + version: 0.22.1 + sha256: 3879b88423ec7e97cd4eba2a443aa26ed4e59b45e6b76aabf13fe2f27023a142 + requires_dist: + - aiohttp>=3.10.5 ; extra == 'test' + - flake8~=6.1 ; extra == 'test' + - psutil ; extra == 'test' + - pycodestyle~=2.11.0 ; extra == 'test' + - pyopenssl~=25.3.0 ; extra == 'test' + - mypy>=0.800 ; extra == 'test' + - setuptools>=60 ; extra == 'dev' + - cython~=3.0 ; extra == 'dev' + - sphinx~=4.1.2 ; extra == 'docs' + - sphinxcontrib-asyncio~=0.3.0 ; extra == 'docs' + - sphinx-rtd-theme~=0.5.2 ; extra == 'docs' + requires_python: '>=3.8.1' +- pypi: https://files.pythonhosted.org/packages/b3/f6/21657bb3beb5f8c57ce8be3b83f653dd7933c2fd00545ed1b092d464799a/uvloop-0.22.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: uvloop + version: 0.22.1 + sha256: 481c990a7abe2c6f4fc3d98781cc9426ebd7f03a9aaa7eb03d3bfc68ac2a46bd + requires_dist: + - aiohttp>=3.10.5 ; extra == 'test' + - flake8~=6.1 ; extra == 'test' + - psutil ; extra == 'test' + - pycodestyle~=2.11.0 ; extra == 'test' + - pyopenssl~=25.3.0 ; extra == 'test' + - mypy>=0.800 ; extra == 'test' + - setuptools>=60 ; extra == 'dev' + - cython~=3.0 ; extra == 'dev' + - sphinx~=4.1.2 ; extra == 'docs' + - sphinxcontrib-asyncio~=0.3.0 ; extra == 'docs' + - sphinx-rtd-theme~=0.5.2 ; extra == 'docs' + requires_python: '>=3.8.1' +- pypi: https://files.pythonhosted.org/packages/b5/35/60249e9fd07b32c665192cec7af29e06c7cd96fa1d08b84f012a56a0b38e/uvloop-0.22.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: uvloop + version: 0.22.1 + sha256: c1955d5a1dd43198244d47664a5858082a3239766a839b2102a269aaff7a4e25 + requires_dist: + - aiohttp>=3.10.5 ; extra == 'test' + - flake8~=6.1 ; extra == 'test' + - psutil ; extra == 'test' + - pycodestyle~=2.11.0 ; extra == 'test' + - pyopenssl~=25.3.0 ; extra == 'test' + - mypy>=0.800 ; extra == 'test' + - setuptools>=60 ; extra == 'dev' + - cython~=3.0 ; extra == 'dev' + - sphinx~=4.1.2 ; extra == 'docs' + - sphinxcontrib-asyncio~=0.3.0 ; extra == 'docs' + - sphinx-rtd-theme~=0.5.2 ; extra == 'docs' + requires_python: '>=3.8.1' +- pypi: https://files.pythonhosted.org/packages/ba/ae/6f6f9af7f590b319c94532b9567409ba11f4fa71af1148cab1bf48a07048/uvloop-0.22.1-cp310-cp310-macosx_10_9_x86_64.whl + name: uvloop + version: 0.22.1 + sha256: 7cd375a12b71d33d46af85a3343b35d98e8116134ba404bd657b3b1d15988792 + requires_dist: + - aiohttp>=3.10.5 ; extra == 'test' + - flake8~=6.1 ; extra == 'test' + - psutil ; extra == 'test' + - pycodestyle~=2.11.0 ; extra == 'test' + - pyopenssl~=25.3.0 ; extra == 'test' + - mypy>=0.800 ; extra == 'test' + - setuptools>=60 ; extra == 'dev' + - cython~=3.0 ; extra == 'dev' + - sphinx~=4.1.2 ; extra == 'docs' + - sphinxcontrib-asyncio~=0.3.0 ; extra == 'docs' + - sphinx-rtd-theme~=0.5.2 ; extra == 'docs' + requires_python: '>=3.8.1' +- pypi: https://files.pythonhosted.org/packages/eb/14/ecceb239b65adaaf7fde510aa8bd534075695d1e5f8dadfa32b5723d9cfb/uvloop-0.22.1-cp310-cp310-macosx_10_9_universal2.whl + name: uvloop + version: 0.22.1 + sha256: ef6f0d4cc8a9fa1f6a910230cd53545d9a14479311e87e3cb225495952eb672c + requires_dist: + - aiohttp>=3.10.5 ; extra == 'test' + - flake8~=6.1 ; extra == 'test' + - psutil ; extra == 'test' + - pycodestyle~=2.11.0 ; extra == 'test' + - pyopenssl~=25.3.0 ; extra == 'test' + - mypy>=0.800 ; extra == 'test' + - setuptools>=60 ; extra == 'dev' + - cython~=3.0 ; extra == 'dev' + - sphinx~=4.1.2 ; extra == 'docs' + - sphinxcontrib-asyncio~=0.3.0 ; extra == 'docs' + - sphinx-rtd-theme~=0.5.2 ; extra == 'docs' + requires_python: '>=3.8.1' +- pypi: https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl + name: watchfiles + version: 1.1.1 + sha256: 39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd + requires_dist: + - anyio>=3.0.0 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/a7/1a/206e8cf2dd86fddf939165a57b4df61607a1e0add2785f170a3f616b7d9f/watchfiles-1.1.1-cp310-cp310-macosx_10_12_x86_64.whl + name: watchfiles + version: 1.1.1 + sha256: eef58232d32daf2ac67f42dea51a2c80f0d03379075d44a587051e63cc2e368c + requires_dist: + - anyio>=3.0.0 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/b3/0f/abaf5262b9c496b5dad4ed3c0e799cbecb1f8ea512ecb6ddd46646a9fca3/watchfiles-1.1.1-cp310-cp310-macosx_11_0_arm64.whl + name: watchfiles + version: 1.1.1 + sha256: 03fa0f5237118a0c5e496185cafa92878568b652a2e9a9382a5151b1a0380a43 + requires_dist: + - anyio>=3.0.0 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: watchfiles + version: 1.1.1 + sha256: 5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05 + requires_dist: + - anyio>=3.0.0 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl + name: watchfiles + version: 1.1.1 + sha256: d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5 + requires_dist: + - anyio>=3.0.0 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/d5/dc/1a680b7458ffa3b14bb64878112aefc8f2e4f73c5af763cbf0bd43100658/watchfiles-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + name: watchfiles + version: 1.1.1 + sha256: 544364b2b51a9b0c7000a4b4b02f90e9423d97fbbf7e06689236443ebcad81ab + requires_dist: + - anyio>=3.0.0 + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/10/40/904a4cb30d9b61c0e278899bf36342e9b0208eb3c470324a9ecbaac2a30f/websockets-16.0-cp310-cp310-macosx_11_0_arm64.whl + name: websockets + version: '16.0' + sha256: 583b7c42688636f930688d712885cf1531326ee05effd982028212ccc13e5957 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/19/0f/22ef6107ee52ab7f0b710d55d36f5a5d3ef19e8a205541a6d7ffa7994e5a/websockets-16.0-cp310-cp310-macosx_10_9_x86_64.whl + name: websockets + version: '16.0' + sha256: 8ff32bb86522a9e5e31439a58addbb0166f0204d64066fb955265c4e214160f0 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + name: websockets + version: '16.0' + sha256: 781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/9d/2f/4b3ca7e106bc608744b1cdae041e005e446124bebb037b18799c2d356864/websockets-16.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + name: websockets + version: '16.0' + sha256: 7d837379b647c0c4c2355c2499723f82f1635fd2c26510e1f587d89bc2199e72 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl + name: websockets + version: '16.0' + sha256: fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl + name: websockets + version: '16.0' + sha256: daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/45/69/f3c47642b79485a30a59c63f6d739ed779fb4cc8323205d047d741d55220/wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl + name: wrapt + version: 1.17.3 + sha256: e6b13af258d6a9ad602d57d889f83b9d5543acd471eee12eb51f5b01f8eb1bc2 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/d1/71/e7e7f5670c1eafd9e990438e69d8fb46fa91a50785332e06b560c869454f/wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl + name: wrapt + version: 1.17.3 + sha256: fd341868a4b6714a5962c1af0bd44f7c404ef78720c7de4892901e540417111c + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/de/17/9f8f86755c191d6779d7ddead1a53c7a8aa18bccb7cea8e7e72dfa6a8a09/wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + name: wrapt + version: 1.17.3 + sha256: f9b2601381be482f70e5d1051a5965c25fb3625455a2bf520b5a077b22afb775 + requires_python: '>=3.8' +- pypi: https://files.pythonhosted.org/packages/25/e9/96dd77728b54a899d4ce2798d7b1296989ce687ed3c0cb917d6b3154bf5d/wrapt-2.1.1-cp310-cp310-macosx_11_0_arm64.whl + name: wrapt + version: 2.1.1 + sha256: e1c99544b6a7d40ca22195563b6d8bc3986ee8bb82f272f31f0670fe9440c869 + requires_dist: + - pytest ; extra == 'dev' + - setuptools ; extra == 'dev' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/44/79/4c755b45df6ef30c0dd628ecfaa0c808854be147ca438429da70a162833c/wrapt-2.1.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl + name: wrapt + version: 2.1.1 + sha256: b2be3fa5f4efaf16ee7c77d0556abca35f5a18ad4ac06f0ef3904c3399010ce9 + requires_dist: + - pytest ; extra == 'dev' + - setuptools ; extra == 'dev' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/ca/21/293b657a27accfbbbb6007ebd78af0efa2083dac83e8f523272ea09b4638/wrapt-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl + name: wrapt + version: 2.1.1 + sha256: 7e927375e43fd5a985b27a8992327c22541b6dede1362fc79df337d26e23604f + requires_dist: + - pytest ; extra == 'dev' + - setuptools ; extra == 'dev' + requires_python: '>=3.9' +- pypi: https://files.pythonhosted.org/packages/50/12/95a1d33f04a79c402664070d43b8b9f72dc18914e135b345b611b0b1f8cc/yarl-1.23.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl + name: yarl + version: 1.23.0 + sha256: 31c9921eb8bd12633b41ad27686bbb0b1a2a9b8452bfdf221e34f311e9942ed4 + requires_dist: + - idna>=2.0 + - multidict>=4.0 + - propcache>=0.2.1 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/68/2e/c5a2234238f8ce37a8312b52801ee74117f576b1539eec8404a480434acc/yarl-1.23.0-cp310-cp310-macosx_11_0_arm64.whl + name: yarl + version: 1.23.0 + sha256: 2a6940a074fb3c48356ed0158a3ca5699c955ee4185b4d7d619be3c327143e05 + requires_dist: + - idna>=2.0 + - multidict>=4.0 + - propcache>=0.2.1 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/7a/35/5a553687c5793df5429cd1db45909d4f3af7eee90014888c208d086a44f0/yarl-1.23.0-cp310-cp310-macosx_10_9_x86_64.whl + name: yarl + version: 1.23.0 + sha256: e4c53f8347cd4200f0d70a48ad059cabaf24f5adc6ba08622a23423bc7efa10d + requires_dist: + - idna>=2.0 + - multidict>=4.0 + - propcache>=0.2.1 + requires_python: '>=3.10' +- pypi: https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl + name: zipp + version: 3.23.0 + sha256: 071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e + requires_dist: + - pytest>=6,!=8.1.* ; extra == 'test' + - jaraco-itertools ; extra == 'test' + - jaraco-functools ; extra == 'test' + - more-itertools ; extra == 'test' + - big-o ; extra == 'test' + - pytest-ignore-flaky ; extra == 'test' + - jaraco-test ; extra == 'test' + - sphinx>=3.5 ; extra == 'doc' + - jaraco-packaging>=9.3 ; extra == 'doc' + - rst-linker>=1.9 ; extra == 'doc' + - furo ; extra == 'doc' + - sphinx-lint ; extra == 'doc' + - jaraco-tidelift>=1.4 ; extra == 'doc' + - pytest-checkdocs>=2.4 ; extra == 'check' + - pytest-ruff>=0.2.1 ; sys_platform != 'cygwin' and extra == 'check' + - pytest-cov ; extra == 'cover' + - pytest-enabler>=2.2 ; extra == 'enabler' + - pytest-mypy ; extra == 'type' + requires_python: '>=3.9' +- conda: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda + sha256: 68f0206ca6e98fea941e5717cec780ed2873ffabc0e1ed34428c061e2c6268c7 + md5: 4a13eeac0b5c8e5b8ab496e6c4ddd829 + depends: + - __glibc >=2.17,<3.0.a0 + - libzlib >=1.3.1,<2.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 601375 + timestamp: 1764777111296 +- conda: https://conda.anaconda.org/conda-forge/osx-64/zstd-1.5.7-h3eecb57_6.conda + sha256: 47101a4055a70a4876ffc87b750ab2287b67eca793f21c8224be5e1ee6394d3f + md5: 727109b184d680772e3122f40136d5ca + depends: + - __osx >=10.13 + - libzlib >=1.3.1,<2.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 528148 + timestamp: 1764777156963 +- conda: https://conda.anaconda.org/conda-forge/osx-arm64/zstd-1.5.7-hbf9d68e_6.conda + sha256: 9485ba49e8f47d2b597dd399e88f4802e100851b27c21d7525625b0b4025a5d9 + md5: ab136e4c34e97f34fb621d2592a393d8 + depends: + - __osx >=11.0 + - libzlib >=1.3.1,<2.0a0 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 433413 + timestamp: 1764777166076 diff --git a/protos/feast/core/Aggregation.proto b/protos/feast/core/Aggregation.proto index d2d6cab7021..c1103b78769 100644 --- a/protos/feast/core/Aggregation.proto +++ b/protos/feast/core/Aggregation.proto @@ -12,4 +12,5 @@ message Aggregation { string function = 2; google.protobuf.Duration time_window = 3; google.protobuf.Duration slide_interval = 4; + string name = 5; } \ No newline at end of file diff --git a/protos/feast/core/FeatureView.proto b/protos/feast/core/FeatureView.proto index 6306d425be3..66dc4c3de6f 100644 --- a/protos/feast/core/FeatureView.proto +++ b/protos/feast/core/FeatureView.proto @@ -36,7 +36,7 @@ message FeatureView { FeatureViewMeta meta = 2; } -// Next available id: 17 +// Next available id: 18 // TODO(adchia): refactor common fields from this and ODFV into separate metadata proto message FeatureViewSpec { // Name of the feature view. Must be unique. Not updated. @@ -61,6 +61,7 @@ message FeatureViewSpec { google.protobuf.Duration ttl = 6; // Batch/Offline DataSource where this view can retrieve offline feature data. + // Optional: if not set, the feature view has no associated batch data source (e.g. purely derived views). DataSource batch_source = 7; // Whether these features should be served online or not @@ -68,6 +69,7 @@ message FeatureViewSpec { bool online = 8; // Streaming DataSource from where this view can consume "online" feature data. + // Optional: only required for streaming feature views. DataSource stream_source = 9; // Description of the feature view. @@ -89,6 +91,9 @@ message FeatureViewSpec { // The transformation mode (e.g., "python", "pandas", "spark", "sql", "ray") string mode = 16; + + // Whether schema validation is enabled during materialization + bool enable_validation = 17; } message FeatureViewMeta { diff --git a/protos/feast/core/StreamFeatureView.proto b/protos/feast/core/StreamFeatureView.proto index 6492cbe3069..5f9ee6ce39d 100644 --- a/protos/feast/core/StreamFeatureView.proto +++ b/protos/feast/core/StreamFeatureView.proto @@ -37,7 +37,7 @@ message StreamFeatureView { FeatureViewMeta meta = 2; } -// Next available id: 20 +// Next available id: 21 message StreamFeatureViewSpec { // Name of the feature view. Must be unique. Not updated. string name = 1; @@ -99,5 +99,8 @@ message StreamFeatureViewSpec { // Hop size for tiling (e.g., 5 minutes). Determines the granularity of pre-aggregated tiles. // If not specified, defaults to 5 minutes. Only used when enable_tiling is true. google.protobuf.Duration tiling_hop_size = 19; + + // Whether schema validation is enabled during materialization + bool enable_validation = 20; } diff --git a/protos/feast/types/Value.proto b/protos/feast/types/Value.proto index be93235ab36..ada2ba42791 100644 --- a/protos/feast/types/Value.proto +++ b/protos/feast/types/Value.proto @@ -53,6 +53,10 @@ message ValueType { FLOAT_SET = 27; BOOL_SET = 28; UNIX_TIMESTAMP_SET = 29; + JSON = 32; + JSON_LIST = 33; + STRUCT = 34; + STRUCT_LIST = 35; } } @@ -88,6 +92,10 @@ message Value { FloatSet float_set_val = 27; BoolSet bool_set_val = 28; Int64Set unix_timestamp_set_val = 29; + string json_val = 32; + StringList json_list_val = 33; + Map struct_val = 34; + MapList struct_list_val = 35; } } diff --git a/pyproject.toml b/pyproject.toml index 40125f430f5..19c21870a40 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,6 +43,7 @@ dependencies = [ "psutil", "bigtree>=0.19.2", "pyjwt", + "orjson>=3.9.0", ] [project.optional-dependencies] @@ -84,9 +85,6 @@ hbase = ["happybase>=1.2.0,<3"] ibis = [ "ibis-framework>=10.0.0", ] -ikv = [ - "ikvpy>=0.0.36", -] k8s = ["kubernetes"] image = [ "feast[pytorch]", @@ -99,10 +97,14 @@ milvus = [ "milvus-lite==2.4.12", "feast[setuptools]" ] +mongodb = [ + "pymongo>=4.13.0,<5.0.0", + "dnspython>=2.0.0", +] mssql = ["ibis-framework[mssql]>=10.0.0"] mysql = ["pymysql", "types-PyMySQL"] -opentelemetry = ["prometheus_client", "psutil"] openlineage = ["openlineage-python>=1.40.0"] +opentelemetry = ["prometheus_client", "psutil"] spark = ["pyspark>=4.0.0"] trino = ["trino>=0.305.0,<0.400.0", "regex"] postgres = ["psycopg[binary,pool]==3.2.5"] @@ -132,10 +134,27 @@ mcp = ["fastapi_mcp"] dbt = ["dbt-artifacts-parser"] +test = [ + "pytest>=6.0.0,<8", + "pytest-xdist>=3.8.0", + "pytest-timeout==1.4.2", + "pytest-lazy-fixture==0.6.3", + "pytest-ordering~=0.6.0", + "pytest-mock==1.10.4", + "pytest-env", + "pytest-benchmark>=3.4.1,<4", + "pytest-asyncio<=0.24.0", + "py>=1.11.0", + "testcontainers==4.9.0", + "minio==7.2.11", + "python-keycloak==4.2.2", + "cryptography>=43.0,<44", +] + ci = [ + "feast[test, aws, azure, cassandra, clickhouse, couchbase, delta, docling, duckdb, elasticsearch, faiss, gcp, ge, go, grpcio, hazelcast, hbase, ibis, image, k8s, mcp, milvus, mssql, mysql, openlineage, opentelemetry, spark, trino, postgres, pytorch, qdrant, rag, ray, redis, singlestore, snowflake, sqlite_vec]", "build", "virtualenv==20.23.0", - "cryptography>=43.0,<44", "dbt-artifacts-parser", "ruff>=0.8.0", "mypy-protobuf>=3.1", @@ -143,27 +162,14 @@ ci = [ "grpcio-testing>=1.56.2,<=1.62.3", # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656). "httpx==0.27.2", - "minio==7.2.11", "mock==2.0.0", "moto<5", "mypy>=1.4.1,<1.11.3", "urllib3>=2.6.3,<3", "psutil==5.9.0", - "py>=1.11.0", # https://github.com/pytest-dev/pytest/issues/10420 - "pytest>=6.0.0,<8", - "pytest-asyncio<=0.24.0", "pytest-cov", - "pytest-xdist", - "pytest-benchmark>=3.4.1,<4", - "pytest-lazy-fixture==0.6.3", - "pytest-timeout==1.4.2", - "pytest-ordering~=0.6.0", - "pytest-mock==1.10.4", - "pytest-env", "Sphinx>4.0.0,<7", "sqlglot[rs]>=23.4", - "testcontainers==4.9.0", - "python-keycloak==4.2.2", "pre-commit<3.3.2", "assertpy==1.1", "pip-tools", @@ -177,8 +183,6 @@ ci = [ "types-requests<2.31.0", "types-setuptools", "types-tabulate", - "virtualenv<20.24.2", - "feast[aws, azure, cassandra, clickhouse, couchbase, delta, docling, duckdb, elasticsearch, faiss, gcp, ge, go, grpcio, hazelcast, hbase, ibis, ikv, image, k8s, mcp, milvus, mssql, mysql, openlineage, opentelemetry, spark, trino, postgres, pytorch, qdrant, rag, ray, redis, singlestore, snowflake, sqlite_vec]" ] nlp = ["feast[docling, image, milvus, pytorch, rag]"] dev = ["feast[ci]"] @@ -249,14 +253,14 @@ include = ["*.py", "*.pyi"] [tool.ruff.format] # exclude a few common directories in the root of the project -exclude = [ - ".eggs", - ".git", - ".hg", +exclude = [ + ".eggs", + ".git", + ".hg", ".mypy_cache", - ".tox", - ".venv", - "_build", + ".tox", + ".venv", + "_build", "buck-out", "build", "dist", @@ -269,3 +273,38 @@ exclude = [ dev = [ "pytest-xdist>=3.8.0", ] + +# Pixi configuration +[tool.pixi.workspace] +channels = ["conda-forge"] +platforms = ["linux-64", "osx-arm64", "osx-64"] + +[tool.pixi.feature.py310.dependencies] +python = "~=3.10.0" + +[tool.pixi.feature.duckdb-tests.pypi-dependencies] +feast = { path = ".", editable = true, extras = ["duckdb", "delta", "grpcio", "test"] } + +[tool.pixi.feature.duckdb-tests.tasks] +test = { cmd = "python -m pytest -n 8 --integration -m 'not ray_offline_stores_only' --ignore=sdk/python/tests/integration/offline_store/test_dqm_validation.py sdk/python/tests/integration/offline_store", env = { PYTHONPATH = ".", FULL_REPO_CONFIGS_MODULE = "sdk.python.tests.universal.feature_repos.duckdb_repo_configuration", FEAST_IS_LOCAL_TEST = "True" } } + +[tool.pixi.feature.ray-tests.pypi-dependencies] +feast = { path = ".", editable = true, extras = ["ray", "grpcio", "test"] } + +[tool.pixi.feature.ray-tests.tasks] +test-offline = { cmd = "python -m pytest -v --integration --ignore=sdk/python/tests/integration/offline_store/test_dqm_validation.py sdk/python/tests/integration/offline_store", env = { PYTHONPATH = ".", FULL_REPO_CONFIGS_MODULE = "sdk.python.tests.universal.feature_repos.ray_repo_configuration", FEAST_IS_LOCAL_TEST = "True" } } +test-compute = { cmd = "python -m pytest -v --integration sdk/python/tests/component/ray", env = { PYTHONPATH = ".", FEAST_IS_LOCAL_TEST = "True" } } +test = { depends-on = ["test-offline", "test-compute"] } + +[tool.pixi.feature.registration-tests.pypi-dependencies] +feast = { path = ".", editable = true, extras = ["aws", "gcp", "grpcio", "postgres", "mysql", "redis", "snowflake", "spark", "test"] } +grpcio-testing = ">=1.56.2,<=1.62.3" + +[tool.pixi.feature.registration-tests.tasks] +test = { cmd = "python -m pytest -n auto --dist loadgroup --integration sdk/python/tests/integration/registration", env = { PYTHONPATH = ".", FEAST_IS_LOCAL_TEST = "True" } } +test-ci = { cmd = "python -m pytest -n auto --dist loadgroup --integration sdk/python/tests/integration/registration", env = { PYTHONPATH = "." } } + +[tool.pixi.environments] +duckdb-tests = ["py310", "duckdb-tests"] +ray-tests = ["py310", "ray-tests"] +registration-tests = ["py310", "registration-tests"] diff --git a/sdk/python/docs/source/feast.infra.online_stores.contrib.ikv_online_store.rst b/sdk/python/docs/source/feast.infra.online_stores.contrib.ikv_online_store.rst deleted file mode 100644 index 812e30932d5..00000000000 --- a/sdk/python/docs/source/feast.infra.online_stores.contrib.ikv_online_store.rst +++ /dev/null @@ -1,21 +0,0 @@ -feast.infra.online\_stores.contrib.ikv\_online\_store package -============================================================= - -Submodules ----------- - -feast.infra.online\_stores.contrib.ikv\_online\_store.ikv module ----------------------------------------------------------------- - -.. automodule:: feast.infra.online_stores.ikv_online_store.ikv - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.online_stores.ikv_online_store - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.online_stores.contrib.rst b/sdk/python/docs/source/feast.infra.online_stores.contrib.rst index d77e7d175d5..92b4b182d32 100644 --- a/sdk/python/docs/source/feast.infra.online_stores.contrib.rst +++ b/sdk/python/docs/source/feast.infra.online_stores.contrib.rst @@ -11,7 +11,6 @@ Subpackages feast.infra.online_stores.couchbase_online_store feast.infra.online_stores.hazelcast_online_store feast.infra.online_stores.hbase_online_store - feast.infra.online_stores.ikv_online_store feast.infra.online_stores.mysql_online_store Submodules diff --git a/sdk/python/docs/source/feast.infra.online_stores.ikv_online_store.rst b/sdk/python/docs/source/feast.infra.online_stores.ikv_online_store.rst deleted file mode 100644 index 391af17024f..00000000000 --- a/sdk/python/docs/source/feast.infra.online_stores.ikv_online_store.rst +++ /dev/null @@ -1,21 +0,0 @@ -feast.infra.online\_stores.ikv\_online\_store package -===================================================== - -Submodules ----------- - -feast.infra.online\_stores.ikv\_online\_store.ikv module --------------------------------------------------------- - -.. automodule:: feast.infra.online_stores.ikv_online_store.ikv - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: feast.infra.online_stores.ikv_online_store - :members: - :undoc-members: - :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.online_stores.mongodb_online_store.rst b/sdk/python/docs/source/feast.infra.online_stores.mongodb_online_store.rst new file mode 100644 index 00000000000..b221a370d31 --- /dev/null +++ b/sdk/python/docs/source/feast.infra.online_stores.mongodb_online_store.rst @@ -0,0 +1,29 @@ +feast.infra.online\_stores.mongodb\_online\_store package +================================================================= + +Submodules +---------- + +feast.infra.online\_stores.mongodb\_online\_store.mongodb module +------------------------------------------------------------------------ + +.. automodule:: feast.infra.online_stores.mongodb_online_store.mongodb + :members: + :undoc-members: + :show-inheritance: + +feast.infra.online\_stores.mongodb\_online\_store.mongodb\_repo\_configuration module +--------------------------------------------------------------------------------------------- + +.. automodule:: feast.infra.online_stores.mongodb_online_store.mongodb_repo_configuration + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.infra.online_stores.mongodb_online_store + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.online_stores.rst b/sdk/python/docs/source/feast.infra.online_stores.rst index c07c7e0c279..91c3c6c90fa 100644 --- a/sdk/python/docs/source/feast.infra.online_stores.rst +++ b/sdk/python/docs/source/feast.infra.online_stores.rst @@ -12,8 +12,8 @@ Subpackages feast.infra.online_stores.elasticsearch_online_store feast.infra.online_stores.hazelcast_online_store feast.infra.online_stores.hbase_online_store - feast.infra.online_stores.ikv_online_store feast.infra.online_stores.milvus_online_store + feast.infra.online_stores.mongodb_online_store feast.infra.online_stores.mysql_online_store feast.infra.online_stores.postgres_online_store feast.infra.online_stores.qdrant_online_store diff --git a/sdk/python/feast/aggregation/__init__.py b/sdk/python/feast/aggregation/__init__.py index 470f91f09c2..8edb51bb76b 100644 --- a/sdk/python/feast/aggregation/__init__.py +++ b/sdk/python/feast/aggregation/__init__.py @@ -21,12 +21,14 @@ class Aggregation: function: str # Provided built in aggregations sum, max, min, count mean time_window: timedelta # The time window for this aggregation. slide_interval: timedelta # The sliding window for these aggregations + name: str # Optional override for the output feature name (defaults to {function}_{column}) """ column: str function: str time_window: Optional[timedelta] slide_interval: Optional[timedelta] + name: str def __init__( self, @@ -34,6 +36,7 @@ def __init__( function: Optional[str] = "", time_window: Optional[timedelta] = None, slide_interval: Optional[timedelta] = None, + name: Optional[str] = None, ): self.column = column or "" self.function = function or "" @@ -42,6 +45,7 @@ def __init__( self.slide_interval = self.time_window else: self.slide_interval = slide_interval + self.name = name or "" def to_proto(self) -> AggregationProto: window_duration = None @@ -59,6 +63,7 @@ def to_proto(self) -> AggregationProto: function=self.function, time_window=window_duration, slide_interval=slide_interval_duration, + name=self.name, ) @classmethod @@ -79,6 +84,7 @@ def from_proto(cls, agg_proto: AggregationProto): function=agg_proto.function, time_window=time_window, slide_interval=slide_interval, + name=agg_proto.name or None, ) return aggregation @@ -91,11 +97,26 @@ def __eq__(self, other): or self.function != other.function or self.time_window != other.time_window or self.slide_interval != other.slide_interval + or self.name != other.name ): return False return True + def resolved_name(self, time_window: Optional[timedelta] = None) -> str: + """Return the output feature name for this aggregation. + + If ``name`` is set it is returned as-is. Otherwise the name is + derived as ``{function}_{column}``, with ``_{seconds}s`` appended + when *time_window* is provided. + """ + if self.name: + return self.name + base = f"{self.function}_{self.column}" + if time_window is not None and time_window.total_seconds() > 0: + return f"{base}_{int(time_window.total_seconds())}s" + return base + def aggregation_specs_to_agg_ops( agg_specs: Iterable[Any], @@ -106,7 +127,7 @@ def aggregation_specs_to_agg_ops( for agg in agg_specs: if getattr(agg, "time_window", None) is not None: raise ValueError(time_window_unsupported_error_message) - alias = f"{agg.function}_{agg.column}" + alias = getattr(agg, "name", None) or f"{agg.function}_{agg.column}" agg_ops[alias] = (agg.function, agg.column) return agg_ops diff --git a/sdk/python/feast/aggregation/tiling/orchestrator.py b/sdk/python/feast/aggregation/tiling/orchestrator.py index 16e047f465c..fbec5568799 100644 --- a/sdk/python/feast/aggregation/tiling/orchestrator.py +++ b/sdk/python/feast/aggregation/tiling/orchestrator.py @@ -61,9 +61,7 @@ def apply_sawtooth_window_tiling( ir_metadata_dict = {} for agg in aggregations: - feature_name = ( - f"{agg.function}_{agg.column}_{int(window_size.total_seconds())}s" - ) + feature_name = agg.resolved_name(window_size) _, metadata = get_ir_metadata_for_aggregation(agg, feature_name) ir_metadata_dict[feature_name] = metadata @@ -161,9 +159,7 @@ def apply_sawtooth_window_tiling( # Step 5: Compute final feature values from IRs (for algebraic aggs, just rename) for agg in aggregations: - feature_name = ( - f"{agg.function}_{agg.column}_{int(window_size.total_seconds())}s" - ) + feature_name = agg.resolved_name(window_size) metadata = ir_metadata_dict[feature_name] if metadata.type == "algebraic": diff --git a/sdk/python/feast/aggregation/tiling/tile_subtraction.py b/sdk/python/feast/aggregation/tiling/tile_subtraction.py index 9dee478effa..778e05ecfb2 100644 --- a/sdk/python/feast/aggregation/tiling/tile_subtraction.py +++ b/sdk/python/feast/aggregation/tiling/tile_subtraction.py @@ -76,9 +76,7 @@ def convert_cumulative_to_windowed( # Subtract previous tile values from current tile for each aggregation for agg in aggregations: - feature_name = ( - f"{agg.function}_{agg.column}_{int(window_size.total_seconds())}s" - ) + feature_name = agg.resolved_name(window_size) _, metadata = get_ir_metadata_for_aggregation(agg, feature_name) if metadata.type == "algebraic": diff --git a/sdk/python/feast/base_feature_view.py b/sdk/python/feast/base_feature_view.py index d7dc2237bd3..478058c89b3 100644 --- a/sdk/python/feast/base_feature_view.py +++ b/sdk/python/feast/base_feature_view.py @@ -93,8 +93,7 @@ def __init__( self.created_timestamp = None self.last_updated_timestamp = None - if source: - self.source = source + self.source = source @property @abstractmethod diff --git a/sdk/python/feast/batch_feature_view.py b/sdk/python/feast/batch_feature_view.py index 3f3e1bf20ec..925d70e58ab 100644 --- a/sdk/python/feast/batch_feature_view.py +++ b/sdk/python/feast/batch_feature_view.py @@ -82,7 +82,9 @@ def __init__( *, name: str, mode: Union[TransformationMode, str] = TransformationMode.PYTHON, - source: Union[DataSource, "BatchFeatureView", List["BatchFeatureView"]], + source: Optional[ + Union[DataSource, "BatchFeatureView", List["BatchFeatureView"]] + ] = None, sink_source: Optional[DataSource] = None, entities: Optional[List[Entity]] = None, ttl: Optional[timedelta] = None, @@ -97,6 +99,7 @@ def __init__( feature_transformation: Optional[Transformation] = None, batch_engine: Optional[Dict[str, Any]] = None, aggregations: Optional[List[Aggregation]] = None, + enable_validation: bool = False, ): if not flags_helper.is_test(): warnings.warn( @@ -114,6 +117,21 @@ def __init__( f"or CUSTOM_SOURCE, got {type(source).__name__}: {source.name} instead " ) + if source is None and aggregations: + raise ValueError( + "BatchFeatureView with aggregations requires a source to aggregate from." + ) + + if ( + source is None + and not udf + and not feature_transformation + and not aggregations + ): + raise ValueError( + "BatchFeatureView requires at least one of: source, udf, feature_transformation, or aggregations." + ) + self.mode = mode self.udf = udf self.udf_string = udf_string @@ -136,6 +154,7 @@ def __init__( source=source, # type: ignore[arg-type] sink_source=sink_source, mode=mode, + enable_validation=enable_validation, ) def get_feature_transformation(self) -> Optional[Transformation]: @@ -169,6 +188,7 @@ def batch_feature_view( description: str = "", owner: str = "", schema: Optional[List[Field]] = None, + enable_validation: bool = False, ): """ Creates a BatchFeatureView object with the given user-defined function (UDF) as the transformation. @@ -199,6 +219,7 @@ def decorator(user_function): schema=schema, udf=user_function, udf_string=udf_string, + enable_validation=enable_validation, ) functools.update_wrapper(wrapper=batch_feature_view_obj, wrapped=user_function) return batch_feature_view_obj diff --git a/sdk/python/feast/cli/cli.py b/sdk/python/feast/cli/cli.py index 4eff2707af9..af746c8f3ef 100644 --- a/sdk/python/feast/cli/cli.py +++ b/sdk/python/feast/cli/cli.py @@ -442,7 +442,6 @@ def materialize_incremental_command(ctx: click.Context, end_ts: str, views: List "hbase", "cassandra", "hazelcast", - "ikv", "couchbase", "milvus", "ray", diff --git a/sdk/python/feast/driver_test_data.py b/sdk/python/feast/driver_test_data.py index d96c9c6d387..39b7faf22c2 100644 --- a/sdk/python/feast/driver_test_data.py +++ b/sdk/python/feast/driver_test_data.py @@ -136,10 +136,38 @@ def create_driver_hourly_stats_df(drivers, start_date, end_date) -> pd.DataFrame df_all_drivers["conv_rate"] = np.random.random(size=rows).astype(np.float32) df_all_drivers["acc_rate"] = np.random.random(size=rows).astype(np.float32) df_all_drivers["avg_daily_trips"] = np.random.randint(0, 1000, size=rows).astype( - np.int32 + np.int64 ) df_all_drivers["created"] = pd.to_datetime(pd.Timestamp.now(tz=None).round("ms")) + # Complex type columns for Map, Json, and Struct examples + import json as _json + + df_all_drivers["driver_metadata"] = [ + { + "vehicle_type": np.random.choice(["sedan", "suv", "truck"]), + "rating": str(round(np.random.uniform(3.0, 5.0), 1)), + } + for _ in range(len(df_all_drivers)) + ] + df_all_drivers["driver_config"] = [ + _json.dumps( + { + "max_distance_km": int(np.random.randint(10, 200)), + "preferred_zones": list( + np.random.choice( + ["north", "south", "east", "west"], size=2, replace=False + ) + ), + } + ) + for _ in range(len(df_all_drivers)) + ] + df_all_drivers["driver_profile"] = [ + {"name": f"driver_{driver_id}", "age": str(int(np.random.randint(25, 60)))} + for driver_id in df_all_drivers["driver_id"] + ] + # Create duplicate rows that should be filtered by created timestamp # TODO: These duplicate rows area indirectly being filtered out by the point in time join already. We need to # inject a bad row at a timestamp where we know it will get joined to the entity dataframe, and then test that diff --git a/sdk/python/feast/errors.py b/sdk/python/feast/errors.py index 8c72422f44e..53895344d3b 100644 --- a/sdk/python/feast/errors.py +++ b/sdk/python/feast/errors.py @@ -411,10 +411,32 @@ def __init__(self, entity_type: type): class ConflictingFeatureViewNames(FeastError): # TODO: print file location of conflicting feature views - def __init__(self, feature_view_name: str): - super().__init__( - f"The feature view name: {feature_view_name} refers to feature views of different types." - ) + def __init__( + self, + feature_view_name: str, + existing_type: Optional[str] = None, + new_type: Optional[str] = None, + ): + if existing_type and new_type: + if existing_type == new_type: + # Same-type duplicate + super().__init__( + f"Multiple {existing_type}s with name '{feature_view_name}' found. " + f"Feature view names must be case-insensitively unique. " + f"It may be necessary to ignore certain files in your feature " + f"repository by using a .feastignore file." + ) + else: + # Cross-type conflict + super().__init__( + f"Feature view name '{feature_view_name}' is already used by a {existing_type}. " + f"Cannot register a {new_type} with the same name. " + f"Feature view names must be unique across FeatureView, StreamFeatureView, and OnDemandFeatureView." + ) + else: + super().__init__( + f"The feature view name: {feature_view_name} refers to feature views of different types." + ) class FeastInvalidInfraObjectType(FeastError): diff --git a/sdk/python/feast/feature_server.py b/sdk/python/feast/feature_server.py index 4a7b0f7318e..c0ba3051df0 100644 --- a/sdk/python/feast/feature_server.py +++ b/sdk/python/feast/feature_server.py @@ -26,7 +26,6 @@ from typing import Any, DefaultDict, Dict, List, NamedTuple, Optional, Set, Union import pandas as pd -import psutil from dateutil import parser from fastapi import ( Depends, @@ -39,13 +38,13 @@ ) from fastapi.concurrency import run_in_threadpool from fastapi.logger import logger -from fastapi.responses import JSONResponse +from fastapi.responses import JSONResponse, ORJSONResponse from fastapi.staticfiles import StaticFiles from google.protobuf.json_format import MessageToDict -from prometheus_client import Gauge, start_http_server from pydantic import BaseModel import feast +from feast import metrics as feast_metrics from feast import proto_json, utils from feast.constants import DEFAULT_FEATURE_SERVER_REGISTRY_TTL from feast.data_source import PushMode @@ -64,14 +63,6 @@ str_to_auth_manager_type, ) -# Define prometheus metrics -cpu_usage_gauge = Gauge( - "feast_feature_server_cpu_usage", "CPU usage of the Feast feature server" -) -memory_usage_gauge = Gauge( - "feast_feature_server_memory_usage", "Memory usage of the Feast feature server" -) - # TODO: deprecate this in favor of push features class WriteToFeatureStoreRequest(BaseModel): @@ -129,6 +120,30 @@ class ChatRequest(BaseModel): messages: List[ChatMessage] +def _resolve_feature_counts( + features: Union[List[str], "feast.FeatureService"], +) -> tuple: + """Return (feature_count, feature_view_count) from the resolved features. + + ``features`` is either a list of ``"feature_view:feature"`` strings or + a ``FeatureService`` with ``feature_view_projections``. + """ + from feast.feature_service import FeatureService + + if isinstance(features, FeatureService): + projections = features.feature_view_projections + fv_count = len(projections) + feat_count = sum(len(p.features) for p in projections) + elif isinstance(features, list): + feat_count = len(features) + fv_names = {ref.split(":")[0] for ref in features if ":" in ref} + fv_count = len(fv_names) + else: + feat_count = 0 + fv_count = 0 + return str(feat_count), str(fv_count) + + async def _get_features( request: Union[GetOnlineFeaturesRequest, GetOnlineDocumentsRequest], store: "feast.FeatureStore", @@ -324,31 +339,38 @@ async def lifespan(app: FastAPI): "/get-online-features", dependencies=[Depends(inject_user_details)], ) - async def get_online_features(request: GetOnlineFeaturesRequest) -> Dict[str, Any]: - # Initialize parameters for FeatureStore.get_online_features(...) call - features = await _get_features(request, store) - - read_params = dict( - features=features, - entity_rows=request.entities, - full_feature_names=request.full_feature_names, - ) - - if store._get_provider().async_supported.online.read: - response = await store.get_online_features_async(**read_params) # type: ignore - else: - response = await run_in_threadpool( - lambda: store.get_online_features(**read_params) # type: ignore + async def get_online_features(request: GetOnlineFeaturesRequest) -> ORJSONResponse: + with feast_metrics.track_request_latency( + "/get-online-features", + ) as metrics_ctx: + features = await _get_features(request, store) + feat_count, fv_count = _resolve_feature_counts(features) + metrics_ctx.feature_count = feat_count + metrics_ctx.feature_view_count = fv_count + + entity_count = len(next(iter(request.entities.values()), [])) + feast_metrics.track_online_features_entities(entity_count) + + read_params = dict( + features=features, + entity_rows=request.entities, + full_feature_names=request.full_feature_names, ) - # Convert the Protobuf object to JSON and return it - response_dict = await run_in_threadpool( - MessageToDict, - response.proto, - preserving_proto_field_name=True, - float_precision=18, - ) - return response_dict + if store._get_provider().async_supported.online.read: + response = await store.get_online_features_async(**read_params) # type: ignore + else: + response = await run_in_threadpool( + lambda: store.get_online_features(**read_params) # type: ignore + ) + + response_dict = await run_in_threadpool( + MessageToDict, + response.proto, + preserving_proto_field_name=True, + float_precision=18, + ) + return ORJSONResponse(content=response_dict) @app.post( "/retrieve-online-documents", @@ -356,128 +378,133 @@ async def get_online_features(request: GetOnlineFeaturesRequest) -> Dict[str, An ) async def retrieve_online_documents( request: GetOnlineDocumentsRequest, - ) -> Dict[str, Any]: - logger.warning( - "This endpoint is in alpha and will be moved to /get-online-features when stable." - ) - # Initialize parameters for FeatureStore.retrieve_online_documents_v2(...) call - features = await _get_features(request, store) - - read_params = dict(features=features, query=request.query, top_k=request.top_k) - if request.api_version == 2 and request.query_string is not None: - read_params["query_string"] = request.query_string - - if request.api_version == 2: - response = await run_in_threadpool( - lambda: store.retrieve_online_documents_v2(**read_params) # type: ignore + ) -> ORJSONResponse: + with feast_metrics.track_request_latency("/retrieve-online-documents"): + logger.warning( + "This endpoint is in alpha and will be moved to /get-online-features when stable." ) - else: - response = await run_in_threadpool( - lambda: store.retrieve_online_documents(**read_params) # type: ignore + features = await _get_features(request, store) + + read_params = dict( + features=features, query=request.query, top_k=request.top_k ) + if request.api_version == 2 and request.query_string is not None: + read_params["query_string"] = request.query_string - # Convert the Protobuf object to JSON and return it - response_dict = await run_in_threadpool( - MessageToDict, - response.proto, - preserving_proto_field_name=True, - float_precision=18, - ) - return response_dict + if request.api_version == 2: + response = await run_in_threadpool( + lambda: store.retrieve_online_documents_v2(**read_params) # type: ignore + ) + else: + response = await run_in_threadpool( + lambda: store.retrieve_online_documents(**read_params) # type: ignore + ) + + response_dict = await run_in_threadpool( + MessageToDict, + response.proto, + preserving_proto_field_name=True, + float_precision=18, + ) + return ORJSONResponse(content=response_dict) @app.post("/push", dependencies=[Depends(inject_user_details)]) async def push(request: PushFeaturesRequest) -> Response: - df = pd.DataFrame(request.df) - actions = [] - if request.to == "offline": - to = PushMode.OFFLINE - actions = [AuthzedAction.WRITE_OFFLINE] - elif request.to == "online": - to = PushMode.ONLINE - actions = [AuthzedAction.WRITE_ONLINE] - elif request.to == "online_and_offline": - to = PushMode.ONLINE_AND_OFFLINE - actions = WRITE - else: - raise ValueError( - f"{request.to} is not a supported push format. Please specify one of these ['online', 'offline', 'online_and_offline']." - ) + with feast_metrics.track_request_latency("/push"): + df = pd.DataFrame(request.df) + actions = [] + if request.to == "offline": + to = PushMode.OFFLINE + actions = [AuthzedAction.WRITE_OFFLINE] + elif request.to == "online": + to = PushMode.ONLINE + actions = [AuthzedAction.WRITE_ONLINE] + elif request.to == "online_and_offline": + to = PushMode.ONLINE_AND_OFFLINE + actions = WRITE + else: + raise ValueError( + f"{request.to} is not a supported push format. Please specify one of these ['online', 'offline', 'online_and_offline']." + ) - from feast.data_source import PushSource - - all_fvs = store.list_feature_views( - allow_cache=request.allow_registry_cache - ) + store.list_stream_feature_views(allow_cache=request.allow_registry_cache) - fvs_with_push_sources = { - fv - for fv in all_fvs - if ( - fv.stream_source is not None - and isinstance(fv.stream_source, PushSource) - and fv.stream_source.name == request.push_source_name - ) - } + from feast.data_source import PushSource - for feature_view in fvs_with_push_sources: - assert_permissions(resource=feature_view, actions=actions) - - async def _push_with_to(push_to: PushMode) -> None: - """ - Helper for performing a single push operation. - - NOTE: - - Feast providers **do not currently support async offline writes**. - - Therefore: - * ONLINE and ONLINE_AND_OFFLINE → may be async, depending on provider.async_supported.online.write - * OFFLINE → always synchronous, but executed via run_in_threadpool when called from HTTP handlers. - - The OfflineWriteBatcher handles offline writes directly in its own background thread, but the offline store writes are currently synchronous only. - """ - push_source_name = request.push_source_name - allow_registry_cache = request.allow_registry_cache - transform_on_write = request.transform_on_write - - # Async currently only applies to online store writes (ONLINE / ONLINE_AND_OFFLINE paths) as theres no async for offline store - if push_to in (PushMode.ONLINE, PushMode.ONLINE_AND_OFFLINE) and ( - store._get_provider().async_supported.online.write - ): - await store.push_async( - push_source_name=push_source_name, - df=df, - allow_registry_cache=allow_registry_cache, - to=push_to, - transform_on_write=transform_on_write, + all_fvs = store.list_feature_views( + allow_cache=request.allow_registry_cache + ) + store.list_stream_feature_views( + allow_cache=request.allow_registry_cache + ) + fvs_with_push_sources = { + fv + for fv in all_fvs + if ( + fv.stream_source is not None + and isinstance(fv.stream_source, PushSource) + and fv.stream_source.name == request.push_source_name ) - else: - await run_in_threadpool( - lambda: store.push( + } + + for feature_view in fvs_with_push_sources: + assert_permissions(resource=feature_view, actions=actions) + + async def _push_with_to(push_to: PushMode) -> None: + """ + Helper for performing a single push operation. + + NOTE: + - Feast providers **do not currently support async offline writes**. + - Therefore: + * ONLINE and ONLINE_AND_OFFLINE → may be async, depending on provider.async_supported.online.write + * OFFLINE → always synchronous, but executed via run_in_threadpool when called from HTTP handlers. + - The OfflineWriteBatcher handles offline writes directly in its own background thread, but the offline store writes are currently synchronous only. + """ + push_source_name = request.push_source_name + allow_registry_cache = request.allow_registry_cache + transform_on_write = request.transform_on_write + + # Async currently only applies to online store writes (ONLINE / ONLINE_AND_OFFLINE paths) as theres no async for offline store + if push_to in (PushMode.ONLINE, PushMode.ONLINE_AND_OFFLINE) and ( + store._get_provider().async_supported.online.write + ): + await store.push_async( push_source_name=push_source_name, df=df, allow_registry_cache=allow_registry_cache, to=push_to, transform_on_write=transform_on_write, ) - ) + else: + await run_in_threadpool( + lambda: store.push( + push_source_name=push_source_name, + df=df, + allow_registry_cache=allow_registry_cache, + to=push_to, + transform_on_write=transform_on_write, + ) + ) - needs_online = to in (PushMode.ONLINE, PushMode.ONLINE_AND_OFFLINE) - needs_offline = to in (PushMode.OFFLINE, PushMode.ONLINE_AND_OFFLINE) + needs_online = to in (PushMode.ONLINE, PushMode.ONLINE_AND_OFFLINE) + needs_offline = to in (PushMode.OFFLINE, PushMode.ONLINE_AND_OFFLINE) - status_code = status.HTTP_200_OK + status_code = status.HTTP_200_OK - if offline_batcher is None or not needs_offline: - await _push_with_to(to) - else: - if needs_online: - await _push_with_to(PushMode.ONLINE) - - offline_batcher.enqueue( - push_source_name=request.push_source_name, - df=df, - allow_registry_cache=request.allow_registry_cache, - transform_on_write=request.transform_on_write, - ) - status_code = status.HTTP_202_ACCEPTED + if offline_batcher is None or not needs_offline: + await _push_with_to(to) + else: + if needs_online: + await _push_with_to(PushMode.ONLINE) + + offline_batcher.enqueue( + push_source_name=request.push_source_name, + df=df, + allow_registry_cache=request.allow_registry_cache, + transform_on_write=request.transform_on_write, + ) + status_code = status.HTTP_202_ACCEPTED - return Response(status_code=status_code) + feast_metrics.track_push(request.push_source_name, request.to) + return Response(status_code=status_code) async def _get_feast_object( feature_view_name: str, allow_registry_cache: bool @@ -529,51 +556,50 @@ async def chat_ui(): @app.post("/materialize", dependencies=[Depends(inject_user_details)]) async def materialize(request: MaterializeRequest) -> None: - for feature_view in request.feature_views or []: - resource = await _get_feast_object(feature_view, True) - assert_permissions( - resource=resource, - actions=[AuthzedAction.WRITE_ONLINE], - ) - - if request.disable_event_timestamp: - # Query all available data and use current datetime as event timestamp - now = datetime.now() - start_date = datetime( - 1970, 1, 1 - ) # Beginning of time to capture all historical data - end_date = now - else: - if not request.start_ts or not request.end_ts: - raise ValueError( - "start_ts and end_ts are required when disable_event_timestamp is False" + with feast_metrics.track_request_latency("/materialize"): + for feature_view in request.feature_views or []: + resource = await _get_feast_object(feature_view, True) + assert_permissions( + resource=resource, + actions=[AuthzedAction.WRITE_ONLINE], ) - start_date = utils.make_tzaware(parser.parse(request.start_ts)) - end_date = utils.make_tzaware(parser.parse(request.end_ts)) - await run_in_threadpool( - store.materialize, - start_date, - end_date, - request.feature_views, - disable_event_timestamp=request.disable_event_timestamp, - full_feature_names=request.full_feature_names, - ) + if request.disable_event_timestamp: + now = datetime.now() + start_date = datetime(1970, 1, 1) + end_date = now + else: + if not request.start_ts or not request.end_ts: + raise ValueError( + "start_ts and end_ts are required when disable_event_timestamp is False" + ) + start_date = utils.make_tzaware(parser.parse(request.start_ts)) + end_date = utils.make_tzaware(parser.parse(request.end_ts)) + + await run_in_threadpool( + store.materialize, + start_date, + end_date, + request.feature_views, + disable_event_timestamp=request.disable_event_timestamp, + full_feature_names=request.full_feature_names, + ) @app.post("/materialize-incremental", dependencies=[Depends(inject_user_details)]) async def materialize_incremental(request: MaterializeIncrementalRequest) -> None: - for feature_view in request.feature_views or []: - resource = await _get_feast_object(feature_view, True) - assert_permissions( - resource=resource, - actions=[AuthzedAction.WRITE_ONLINE], + with feast_metrics.track_request_latency("/materialize-incremental"): + for feature_view in request.feature_views or []: + resource = await _get_feast_object(feature_view, True) + assert_permissions( + resource=resource, + actions=[AuthzedAction.WRITE_ONLINE], + ) + await run_in_threadpool( + store.materialize_incremental, + utils.make_tzaware(parser.parse(request.end_ts)), + request.feature_views, + full_feature_names=request.full_feature_names, ) - await run_in_threadpool( - store.materialize_incremental, - utils.make_tzaware(parser.parse(request.end_ts)), - request.feature_views, - full_feature_names=request.full_feature_names, - ) @app.exception_handler(Exception) async def rest_exception_handler(request: Request, exc: Exception): @@ -695,12 +721,15 @@ def _add_mcp_support_if_enabled(app, store: "feast.FeatureStore"): import gunicorn.app.base class FeastServeApplication(gunicorn.app.base.BaseApplication): - def __init__(self, store: "feast.FeatureStore", **options): + def __init__( + self, store: "feast.FeatureStore", metrics_enabled: bool = False, **options + ): self._app = get_app( store=store, registry_ttl_sec=options["registry_ttl_sec"], ) self._options = options + self._metrics_enabled = metrics_enabled super().__init__() def load_config(self): @@ -709,25 +738,20 @@ def load_config(self): self.cfg.set(key.lower(), value) self.cfg.set("worker_class", "uvicorn_worker.UvicornWorker") + if self._metrics_enabled: + self.cfg.set("post_worker_init", _gunicorn_post_worker_init) + self.cfg.set("child_exit", _gunicorn_child_exit) def load(self): return self._app + def _gunicorn_post_worker_init(worker): + """Start per-worker resource monitoring after Gunicorn forks.""" + feast_metrics.init_worker_monitoring() -def monitor_resources(self, interval: int = 5): - """Function to monitor and update CPU and memory usage metrics.""" - logger.debug(f"Starting resource monitoring with interval {interval} seconds") - p = psutil.Process() - logger.debug(f"PID is {p.pid}") - while True: - with p.oneshot(): - cpu_usage = p.cpu_percent() - memory_usage = p.memory_percent() - logger.debug(f"CPU usage: {cpu_usage}%, Memory usage: {memory_usage}%") - logger.debug(f"CPU usage: {cpu_usage}%, Memory usage: {memory_usage}%") - cpu_usage_gauge.set(cpu_usage) - memory_usage_gauge.set(memory_usage) - time.sleep(interval) + def _gunicorn_child_exit(server, worker): + """Clean up Prometheus metric files for a dead worker.""" + feast_metrics.mark_process_dead(worker.pid) def start_server( @@ -749,15 +773,19 @@ def start_server( raise ValueError( "Both key and cert file paths are required to start server in TLS mode." ) - if metrics: - logger.info("Starting Prometheus Server") - start_http_server(8000) - logger.debug("Starting background thread to monitor CPU and memory usage") - monitoring_thread = threading.Thread( - target=monitor_resources, args=(5,), daemon=True + fs_cfg = getattr(store.config, "feature_server", None) + metrics_cfg = getattr(fs_cfg, "metrics", None) + metrics_from_config = getattr(metrics_cfg, "enabled", False) + metrics_active = metrics or metrics_from_config + uses_gunicorn = sys.platform != "win32" + if metrics_active: + flags = feast_metrics.build_metrics_flags(metrics_cfg) + feast_metrics.start_metrics_server( + store, + metrics_config=flags, + start_resource_monitoring=not uses_gunicorn, ) - monitoring_thread.start() logger.debug("start_server called") auth_type = str_to_auth_manager_type(store.config.auth_config.type) @@ -771,7 +799,7 @@ def start_server( ) logger.debug("Auth manager initialized successfully") - if sys.platform != "win32": + if uses_gunicorn: options = { "bind": f"{host}:{port}", "accesslog": None if no_access_log else "-", @@ -787,7 +815,9 @@ def start_server( if tls_key_path and tls_cert_path: options["keyfile"] = tls_key_path options["certfile"] = tls_cert_path - FeastServeApplication(store=store, **options).run() + FeastServeApplication( + store=store, metrics_enabled=metrics_active, **options + ).run() else: import uvicorn diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 36ff2c9d4eb..fe0e7967345 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -15,6 +15,7 @@ import copy import itertools import os +import time import warnings from datetime import datetime, timedelta from pathlib import Path @@ -58,6 +59,7 @@ from feast.dqm.errors import ValidationFailed from feast.entity import Entity from feast.errors import ( + ConflictingFeatureViewNames, DataFrameSerializationError, DataSourceRepeatNamesException, FeatureViewNotFoundException, @@ -99,6 +101,29 @@ from feast.transformation.python_transformation import PythonTransformation from feast.utils import _get_feature_view_vector_field_metadata, _utc_now +_track_materialization = None # Lazy-loaded on first materialization call +_track_materialization_loaded = False + + +def _get_track_materialization(): + """Lazy-import feast.metrics only when materialization tracking is needed. + + Avoids importing the metrics module (and its prometheus_client / + psutil dependencies plus temp-dir creation) for every FeatureStore + usage such as ``feast apply`` or simple SDK reads. + """ + global _track_materialization, _track_materialization_loaded + if not _track_materialization_loaded: + _track_materialization_loaded = True + try: + from feast.metrics import track_materialization + + _track_materialization = track_materialization + except Exception: # pragma: no cover + _track_materialization = None + return _track_materialization + + warnings.simplefilter("once", DeprecationWarning) @@ -677,11 +702,21 @@ def _make_inferences( ) update_data_sources_with_inferred_event_timestamp_col( - [view.batch_source for view in views_to_update], self.config + [ + view.batch_source + for view in views_to_update + if view.batch_source is not None + ], + self.config, ) update_data_sources_with_inferred_event_timestamp_col( - [view.batch_source for view in sfvs_to_update], self.config + [ + view.batch_source + for view in sfvs_to_update + if view.batch_source is not None + ], + self.config, ) # New feature views may reference previously applied entities. @@ -1704,15 +1739,29 @@ def tqdm_builder(length): start_date = utils.make_tzaware(start_date) end_date = utils.make_tzaware(end_date) or _utc_now() - provider.materialize_single_feature_view( - config=self.config, - feature_view=feature_view, - start_date=start_date, - end_date=end_date, - registry=self.registry, - project=self.project, - tqdm_builder=tqdm_builder, - ) + fv_start = time.monotonic() + fv_success = True + try: + provider.materialize_single_feature_view( + config=self.config, + feature_view=feature_view, + start_date=start_date, + end_date=end_date, + registry=self.registry, + project=self.project, + tqdm_builder=tqdm_builder, + ) + except Exception: + fv_success = False + raise + finally: + _tracker = _get_track_materialization() + if _tracker is not None: + _tracker( + feature_view.name, + fv_success, + time.monotonic() - fv_start, + ) if not isinstance(feature_view, OnDemandFeatureView): self.registry.apply_materialization( feature_view, @@ -1813,16 +1862,30 @@ def tqdm_builder(length): start_date = utils.make_tzaware(start_date) end_date = utils.make_tzaware(end_date) - provider.materialize_single_feature_view( - config=self.config, - feature_view=feature_view, - start_date=start_date, - end_date=end_date, - registry=self.registry, - project=self.project, - tqdm_builder=tqdm_builder, - disable_event_timestamp=disable_event_timestamp, - ) + fv_start = time.monotonic() + fv_success = True + try: + provider.materialize_single_feature_view( + config=self.config, + feature_view=feature_view, + start_date=start_date, + end_date=end_date, + registry=self.registry, + project=self.project, + tqdm_builder=tqdm_builder, + disable_event_timestamp=disable_event_timestamp, + ) + except Exception: + fv_success = False + raise + finally: + _tracker = _get_track_materialization() + if _tracker is not None: + _tracker( + feature_view.name, + fv_success, + time.monotonic() - fv_start, + ) self.registry.apply_materialization( feature_view, @@ -2363,6 +2426,8 @@ def write_to_offline_store( provider = self._get_provider() # Get columns of the batch source and the input dataframe. + if feature_view.batch_source is None: + raise ValueError(f"Feature view '{feature_view.name}' has no batch_source.") column_names_and_types = ( provider.get_table_column_names_and_types_from_data_source( self.config, feature_view.batch_source @@ -3255,18 +3320,25 @@ def _print_materialization_log( def _validate_feature_views(feature_views: List[BaseFeatureView]): - """Verify feature views have case-insensitively unique names""" - fv_names = set() + """Verify feature views have case-insensitively unique names across all types. + + This validates that no two feature views (of any type: FeatureView, + StreamFeatureView, OnDemandFeatureView) share the same case-insensitive name. + This is critical because get_online_features uses get_any_feature_view which + resolves names in a fixed order, potentially returning the wrong feature view. + """ + fv_by_name: Dict[str, BaseFeatureView] = {} for fv in feature_views: case_insensitive_fv_name = fv.name.lower() - if case_insensitive_fv_name in fv_names: - raise ValueError( - f"More than one feature view with name {case_insensitive_fv_name} found. " - f"Please ensure that all feature view names are case-insensitively unique. " - f"It may be necessary to ignore certain files in your feature repository by using a .feastignore file." + if case_insensitive_fv_name in fv_by_name: + existing_fv = fv_by_name[case_insensitive_fv_name] + raise ConflictingFeatureViewNames( + fv.name, + existing_type=type(existing_fv).__name__, + new_type=type(fv).__name__, ) else: - fv_names.add(case_insensitive_fv_name) + fv_by_name[case_insensitive_fv_name] = fv def _validate_data_sources(data_sources: List[DataSource]): diff --git a/sdk/python/feast/feature_view.py b/sdk/python/feast/feature_view.py index a9406657a51..94e95da545f 100644 --- a/sdk/python/feast/feature_view.py +++ b/sdk/python/feast/feature_view.py @@ -26,6 +26,11 @@ from feast.entity import Entity from feast.feature_view_projection import FeatureViewProjection from feast.field import Field +from feast.proto_utils import ( + mode_to_string, + serialize_data_source, + transformation_to_proto, +) from feast.protos.feast.core.FeatureView_pb2 import FeatureView as FeatureViewProto from feast.protos.feast.core.FeatureView_pb2 import ( FeatureViewMeta as FeatureViewMetaProto, @@ -36,9 +41,6 @@ from feast.protos.feast.core.FeatureView_pb2 import ( MaterializationInterval as MaterializationIntervalProto, ) -from feast.protos.feast.core.Transformation_pb2 import ( - FeatureTransformationV2 as FeatureTransformationProto, -) from feast.transformation.mode import TransformationMode from feast.types import from_value_type from feast.value_type import ValueType @@ -71,9 +73,8 @@ class FeatureView(BaseFeatureView): ttl: The amount of time this group of features lives. A ttl of 0 indicates that this group of features lives forever. Note that large ttl's or a ttl of 0 can result in extremely computationally intensive queries. - batch_source: The batch source of data where this group of features - is stored. This is optional ONLY if a push source is specified as the - stream_source, since push sources contain their own batch sources. + batch_source: Optional batch source of data where this group of features + is stored. If no source is provided, this will be None. stream_source: The stream source of data where this group of features is stored. schema: The schema of the feature view, including feature, timestamp, and entity columns. If not specified, can be inferred from the underlying data source. @@ -95,7 +96,7 @@ class FeatureView(BaseFeatureView): name: str entities: List[str] ttl: Optional[timedelta] - batch_source: DataSource + batch_source: Optional[DataSource] stream_source: Optional[DataSource] source_views: Optional[List["FeatureView"]] entity_columns: List[Field] @@ -107,12 +108,13 @@ class FeatureView(BaseFeatureView): owner: str materialization_intervals: List[Tuple[datetime, datetime]] mode: Optional[Union["TransformationMode", str]] + enable_validation: bool def __init__( self, *, name: str, - source: Union[DataSource, "FeatureView", List["FeatureView"]], + source: Optional[Union[DataSource, "FeatureView", List["FeatureView"]]] = None, sink_source: Optional[DataSource] = None, schema: Optional[List[Field]] = None, entities: Optional[List[Entity]] = None, @@ -123,14 +125,16 @@ def __init__( tags: Optional[Dict[str, str]] = None, owner: str = "", mode: Optional[Union["TransformationMode", str]] = None, + enable_validation: bool = False, ): """ Creates a FeatureView object. Args: name: The unique name of the feature view. - source: The source of data for this group of features. May be a stream source, or a batch source. - If a stream source, the source should contain a batch_source for backfills & batch materialization. + source (optional): The source of data for this group of features. May be a stream source, + a batch source, a FeatureView, or a list of FeatureViews. If None, the feature view + has no associated data source. schema (optional): The schema of the feature view, including feature, timestamp, and entity columns. # TODO: clarify that schema is only useful here... @@ -148,11 +152,14 @@ def __init__( primary maintainer. mode (optional): The transformation mode for feature transformations. Only meaningful when transformations are applied. Choose from TransformationMode enum values. + enable_validation (optional): If True, enables schema validation during materialization + to check that data conforms to the declared feature types. Default is False. Raises: ValueError: A field mapping conflicts with an Entity or a Feature. """ self.name = name + self.enable_validation = enable_validation self.entities = [e.name for e in entities] if entities else [DUMMY_ENTITY_NAME] self.ttl = ttl schema = schema or [] @@ -163,7 +170,9 @@ def __init__( self.data_source: Optional[DataSource] = None self.source_views: List[FeatureView] = [] - if isinstance(source, DataSource): + if source is None: + pass # data_source remains None, source_views remains [] + elif isinstance(source, DataSource): self.data_source = source elif isinstance(source, FeatureView): self.source_views = [source] @@ -192,11 +201,14 @@ def __init__( elif self.data_source: # Batch source definition self.batch_source = self.data_source - else: + elif self.source_views: # Derived view source definition if not sink_source: raise ValueError("Derived FeatureView must specify `sink_source`.") self.batch_source = sink_source + else: + # source=None - no batch source + self.batch_source = None # Initialize features and entity columns. features: List[Field] = [] @@ -279,6 +291,7 @@ def __copy__(self): online=self.online, offline=self.offline, sink_source=self.batch_source if self.source_views else None, + enable_validation=self.enable_validation, ) # This is deliberately set outside of the FV initialization as we do not have the Entity objects. @@ -307,6 +320,7 @@ def __eq__(self, other): or sorted(self.entity_columns) != sorted(other.entity_columns) or self.source_views != other.source_views or self.materialization_intervals != other.materialization_intervals + or self.enable_validation != other.enable_validation ): return False @@ -414,15 +428,9 @@ def to_proto_spec( ) -> FeatureViewSpecProto: ttl_duration = self.get_ttl_duration() - batch_source_proto = None - if self.batch_source: - batch_source_proto = self.batch_source.to_proto() - batch_source_proto.data_source_class_type = f"{self.batch_source.__class__.__module__}.{self.batch_source.__class__.__name__}" + batch_source_proto = serialize_data_source(self.batch_source) + stream_source_proto = serialize_data_source(self.stream_source) - stream_source_proto = None - if self.stream_source: - stream_source_proto = self.stream_source.to_proto() - stream_source_proto.data_source_class_type = f"{self.stream_source.__class__.__module__}.{self.stream_source.__class__.__name__}" source_view_protos = None if self.source_views: source_view_protos = [ @@ -431,30 +439,8 @@ def to_proto_spec( feature_transformation_proto = None if hasattr(self, "feature_transformation") and self.feature_transformation: - from feast.protos.feast.core.Transformation_pb2 import ( - SubstraitTransformationV2 as SubstraitTransformationProto, - ) - from feast.protos.feast.core.Transformation_pb2 import ( - UserDefinedFunctionV2 as UserDefinedFunctionProto, - ) - - transformation_proto = self.feature_transformation.to_proto() - - if isinstance(transformation_proto, UserDefinedFunctionProto): - feature_transformation_proto = FeatureTransformationProto( - user_defined_function=transformation_proto, - ) - elif isinstance(transformation_proto, SubstraitTransformationProto): - feature_transformation_proto = FeatureTransformationProto( - substrait_transformation=transformation_proto, - ) - - mode_str = "" - if self.mode: - mode_str = ( - self.mode.value - if isinstance(self.mode, TransformationMode) - else self.mode + feature_transformation_proto = transformation_to_proto( + self.feature_transformation ) return FeatureViewSpecProto( @@ -472,7 +458,8 @@ def to_proto_spec( stream_source=stream_source_proto, source_views=source_view_protos, feature_transformation=feature_transformation_proto, - mode=mode_str, + mode=mode_to_string(self.mode), + enable_validation=self.enable_validation, ) def to_proto_meta(self): @@ -642,6 +629,9 @@ def _from_proto_internal( f"Entities: {feature_view.entities} vs Entity Columns: {feature_view.entity_columns}" ) + # Restore enable_validation from proto field. + feature_view.enable_validation = feature_view_proto.spec.enable_validation + # FeatureViewProjections are not saved in the FeatureView proto. # Create the default projection. feature_view.projection = FeatureViewProjection.from_feature_view_definition( diff --git a/sdk/python/feast/feature_view_projection.py b/sdk/python/feast/feature_view_projection.py index 70415e9ed3a..530194ec6a8 100644 --- a/sdk/python/feast/feature_view_projection.py +++ b/sdk/python/feast/feature_view_projection.py @@ -98,19 +98,17 @@ def from_proto(proto: FeatureViewProjectionProto) -> "FeatureViewProjection": @staticmethod def from_feature_view_definition(feature_view: "FeatureView"): # TODO need to implement this for StreamFeatureViews - if getattr(feature_view, "batch_source", None): + batch_source = getattr(feature_view, "batch_source", None) + if batch_source: return FeatureViewProjection( name=feature_view.name, name_alias=None, features=feature_view.features, desired_features=[], - timestamp_field=feature_view.batch_source.created_timestamp_column - or None, - created_timestamp_column=feature_view.batch_source.created_timestamp_column - or None, - date_partition_column=feature_view.batch_source.date_partition_column - or None, - batch_source=feature_view.batch_source or None, + timestamp_field=batch_source.created_timestamp_column or None, + created_timestamp_column=batch_source.created_timestamp_column or None, + date_partition_column=batch_source.date_partition_column or None, + batch_source=batch_source or None, ) else: return FeatureViewProjection( diff --git a/sdk/python/feast/feature_view_utils.py b/sdk/python/feast/feature_view_utils.py index 704a5a3d7b9..0b599f4777c 100644 --- a/sdk/python/feast/feature_view_utils.py +++ b/sdk/python/feast/feature_view_utils.py @@ -136,6 +136,8 @@ def resolve_feature_view_source( if not is_derived_view: # Regular feature view - use its batch_source directly + if feature_view.batch_source is None: + raise ValueError(f"Feature view '{feature_view.name}' has no batch_source.") return FeatureViewSourceInfo( data_source=feature_view.batch_source, source_type="batch_source", @@ -178,8 +180,13 @@ def resolve_feature_view_source( if hasattr(parent_view, "source_views") and parent_view.source_views: # Parent is also a derived view - recursively find original source original_source_view = find_original_source_view(parent_view) + original_batch_source = original_source_view.batch_source + if original_batch_source is None: + raise ValueError( + f"Original source view '{original_source_view.name}' has no batch_source." + ) return FeatureViewSourceInfo( - data_source=original_source_view.batch_source, + data_source=original_batch_source, source_type="original_source", has_transformation=view_has_transformation, transformation_func=transformation_func, @@ -229,8 +236,13 @@ def resolve_feature_view_source_with_fallback( elif hasattr(feature_view, "source_views") and feature_view.source_views: # Try the original source view as last resort original_view = find_original_source_view(feature_view) + original_view_batch_source = original_view.batch_source + if original_view_batch_source is None: + raise ValueError( + f"Original source view '{original_view.name}' has no batch_source." + ) return FeatureViewSourceInfo( - data_source=original_view.batch_source, + data_source=original_view_batch_source, source_type="fallback_original_source", has_transformation=has_transformation(feature_view), transformation_func=get_transformation_function(feature_view), diff --git a/sdk/python/feast/field.py b/sdk/python/feast/field.py index 27552878afc..c61ed6a5c5e 100644 --- a/sdk/python/feast/field.py +++ b/sdk/python/feast/field.py @@ -12,15 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json from typing import Dict, Optional from typeguard import typechecked from feast.feature import Feature from feast.protos.feast.core.Feature_pb2 import FeatureSpecV2 as FieldProto -from feast.types import FeastType, from_value_type +from feast.types import FeastType, Struct, from_value_type from feast.value_type import ValueType +STRUCT_SCHEMA_TAG = "feast:struct_schema" + @typechecked class Field: @@ -115,13 +118,21 @@ def __str__(self): def to_proto(self) -> FieldProto: """Converts a Field object to its protobuf representation.""" + from feast.types import Array + value_type = self.dtype.to_value_type() vector_search_metric = self.vector_search_metric or "" + tags = dict(self.tags) + # Persist Struct field schema in tags + if isinstance(self.dtype, Struct): + tags[STRUCT_SCHEMA_TAG] = _serialize_struct_schema(self.dtype) + elif isinstance(self.dtype, Array) and isinstance(self.dtype.base_type, Struct): + tags[STRUCT_SCHEMA_TAG] = _serialize_struct_schema(self.dtype.base_type) return FieldProto( name=self.name, value_type=value_type.value, description=self.description, - tags=self.tags, + tags=tags, vector_index=self.vector_index, vector_length=self.vector_length, vector_search_metric=vector_search_metric, @@ -136,13 +147,30 @@ def from_proto(cls, field_proto: FieldProto): field_proto: FieldProto protobuf object """ value_type = ValueType(field_proto.value_type) + tags = dict(field_proto.tags) vector_search_metric = getattr(field_proto, "vector_search_metric", "") vector_index = getattr(field_proto, "vector_index", False) vector_length = getattr(field_proto, "vector_length", 0) + + # Reconstruct Struct type from persisted schema in tags + from feast.types import Array + + dtype: FeastType + if value_type == ValueType.STRUCT and STRUCT_SCHEMA_TAG in tags: + dtype = _deserialize_struct_schema(tags[STRUCT_SCHEMA_TAG]) + user_tags = {k: v for k, v in tags.items() if k != STRUCT_SCHEMA_TAG} + elif value_type == ValueType.STRUCT_LIST and STRUCT_SCHEMA_TAG in tags: + inner_struct = _deserialize_struct_schema(tags[STRUCT_SCHEMA_TAG]) + dtype = Array(inner_struct) + user_tags = {k: v for k, v in tags.items() if k != STRUCT_SCHEMA_TAG} + else: + dtype = from_value_type(value_type=value_type) + user_tags = tags + return cls( name=field_proto.name, - dtype=from_value_type(value_type=value_type), - tags=dict(field_proto.tags), + dtype=dtype, + tags=user_tags, description=field_proto.description, vector_index=vector_index, vector_length=vector_length, @@ -163,3 +191,75 @@ def from_feature(cls, feature: Feature): description=feature.description, tags=feature.labels, ) + + +def _feast_type_to_str(feast_type: FeastType) -> str: + """Convert a FeastType to a string representation for serialization.""" + from feast.types import ( + Array, + PrimitiveFeastType, + ) + + if isinstance(feast_type, PrimitiveFeastType): + return feast_type.name + elif isinstance(feast_type, Struct): + nested = { + name: _feast_type_to_str(ft) for name, ft in feast_type.fields.items() + } + return json.dumps({"__struct__": nested}) + elif isinstance(feast_type, Array): + return f"Array({_feast_type_to_str(feast_type.base_type)})" + else: + return str(feast_type) + + +def _str_to_feast_type(type_str: str) -> FeastType: + """Convert a string representation back to a FeastType.""" + from feast.types import ( + Array, + PrimitiveFeastType, + ) + + # Check if it's an Array type + if type_str.startswith("Array(") and type_str.endswith(")"): + inner = type_str[6:-1] + base_type = _str_to_feast_type(inner) + return Array(base_type) + + # Check if it's a nested Struct (JSON encoded) + if type_str.startswith("{"): + try: + parsed = json.loads(type_str) + if "__struct__" in parsed: + fields = { + name: _str_to_feast_type(ft_str) + for name, ft_str in parsed["__struct__"].items() + } + return Struct(fields) + except (json.JSONDecodeError, TypeError): + pass + + # Must be a PrimitiveFeastType name + try: + return PrimitiveFeastType[type_str] + except KeyError: + from feast.types import String + + return String + + +def _serialize_struct_schema(struct_type: Struct) -> str: + """Serialize a Struct's field schema to a JSON string for tag storage.""" + schema_dict = {} + for name, feast_type in struct_type.fields.items(): + schema_dict[name] = _feast_type_to_str(feast_type) + return json.dumps(schema_dict) + + +def _deserialize_struct_schema(schema_str: str) -> Struct: + """Deserialize a JSON string from tags back to a Struct type.""" + schema_dict = json.loads(schema_str) + fields = {} + for name, type_str in schema_dict.items(): + fields[name] = _str_to_feast_type(type_str) + return Struct(fields) diff --git a/sdk/python/feast/inference.py b/sdk/python/feast/inference.py index f5f234b7301..16023e3dac6 100644 --- a/sdk/python/feast/inference.py +++ b/sdk/python/feast/inference.py @@ -221,6 +221,9 @@ def _infer_features_and_entities( fv, join_keys, run_inference_for_features, config ) + if fv.batch_source is None: + return + entity_columns: List[Field] = fv.entity_columns if fv.entity_columns else [] columns_to_exclude = { fv.batch_source.timestamp_field, diff --git a/sdk/python/feast/infra/compute_engines/aws_lambda/lambda_engine.py b/sdk/python/feast/infra/compute_engines/aws_lambda/lambda_engine.py index 9e2d217875b..b223328893d 100644 --- a/sdk/python/feast/infra/compute_engines/aws_lambda/lambda_engine.py +++ b/sdk/python/feast/infra/compute_engines/aws_lambda/lambda_engine.py @@ -193,7 +193,7 @@ def _materialize_one( offline_job = self.offline_store.pull_latest_from_table_or_query( config=self.repo_config, - data_source=feature_view.batch_source, + data_source=feature_view.batch_source, # type: ignore[arg-type] join_key_columns=join_key_columns, feature_name_columns=feature_name_columns, timestamp_field=timestamp_field, diff --git a/sdk/python/feast/infra/compute_engines/kubernetes/k8s_engine.py b/sdk/python/feast/infra/compute_engines/kubernetes/k8s_engine.py index 0dcff09f027..2f041301be7 100644 --- a/sdk/python/feast/infra/compute_engines/kubernetes/k8s_engine.py +++ b/sdk/python/feast/infra/compute_engines/kubernetes/k8s_engine.py @@ -145,7 +145,7 @@ def _materialize_one( offline_job = self.offline_store.pull_latest_from_table_or_query( config=self.repo_config, - data_source=feature_view.batch_source, + data_source=feature_view.batch_source, # type: ignore[arg-type] join_key_columns=join_key_columns, feature_name_columns=feature_name_columns, timestamp_field=timestamp_field, diff --git a/sdk/python/feast/infra/compute_engines/local/feature_builder.py b/sdk/python/feast/infra/compute_engines/local/feature_builder.py index 3463c0e074b..754a00db76f 100644 --- a/sdk/python/feast/infra/compute_engines/local/feature_builder.py +++ b/sdk/python/feast/infra/compute_engines/local/feature_builder.py @@ -1,3 +1,4 @@ +import logging from typing import Union from feast.aggregation import aggregation_specs_to_agg_ops @@ -16,6 +17,9 @@ LocalValidationNode, ) from feast.infra.registry.base_registry import BaseRegistry +from feast.types import PrimitiveFeastType, from_feast_to_pyarrow_type + +logger = logging.getLogger(__name__) class LocalFeatureBuilder(FeatureBuilder): @@ -88,7 +92,36 @@ def build_transformation_node(self, view, input_nodes): return node def build_validation_node(self, view, input_node): - validation_config = view.validation_config + validation_config = getattr(view, "validation_config", None) or {} + + if not validation_config.get("columns") and hasattr(view, "features"): + columns = {} + json_columns = set() + for feature in view.features: + try: + columns[feature.name] = from_feast_to_pyarrow_type(feature.dtype) + except (ValueError, KeyError): + logger.debug( + "Could not resolve PyArrow type for feature '%s' " + "(dtype=%s), skipping type check for this column.", + feature.name, + feature.dtype, + ) + columns[feature.name] = None + # Track which columns are Json type for content validation + if ( + isinstance(feature.dtype, PrimitiveFeastType) + and feature.dtype.name == "JSON" + ): + json_columns.add(feature.name) + if columns: + validation_config = {**validation_config, "columns": columns} + if json_columns: + validation_config = { + **validation_config, + "json_columns": json_columns, + } + node = LocalValidationNode( "validate", validation_config, self.backend, inputs=[input_node] ) diff --git a/sdk/python/feast/infra/compute_engines/local/nodes.py b/sdk/python/feast/infra/compute_engines/local/nodes.py index 985a089daae..db65761a5e2 100644 --- a/sdk/python/feast/infra/compute_engines/local/nodes.py +++ b/sdk/python/feast/infra/compute_engines/local/nodes.py @@ -1,5 +1,7 @@ +import json +import logging from datetime import datetime, timedelta -from typing import List, Optional, Union +from typing import List, Optional, Set, Union import pyarrow as pa @@ -19,6 +21,8 @@ ) from feast.utils import _convert_arrow_to_proto +logger = logging.getLogger(__name__) + ENTITY_TS_ALIAS = "__entity_event_timestamp" @@ -236,15 +240,114 @@ def __init__( def execute(self, context: ExecutionContext) -> ArrowTableValue: input_table = self.get_single_table(context).data - df = self.backend.from_arrow(input_table) - # Placeholder for actual validation logic + if self.validation_config: - print(f"[Validation: {self.name}] Passed.") - result = self.backend.to_arrow(df) - output = ArrowTableValue(result) + self._validate_schema(input_table) + + output = ArrowTableValue(input_table) context.node_outputs[self.name] = output return output + def _validate_schema(self, table: pa.Table): + """Validate that the input table conforms to the expected schema. + + Checks that all expected columns are present, that their types + are compatible with the declared Feast types, and that Json columns + contain well-formed JSON. Logs warnings for type mismatches but + raises on missing columns or invalid JSON content. + """ + expected_columns = self.validation_config.get("columns", {}) + if not expected_columns: + logger.debug( + "[Validation: %s] No column schema to validate against.", + self.name, + ) + return + + actual_columns = set(table.column_names) + expected_names = set(expected_columns.keys()) + + missing = expected_names - actual_columns + if missing: + raise ValueError( + f"[Validation: {self.name}] Missing expected columns: {missing}. " + f"Actual columns: {sorted(actual_columns)}" + ) + + for col_name, expected_type in expected_columns.items(): + actual_type = table.schema.field(col_name).type + if expected_type is not None and actual_type != expected_type: + # PyArrow map columns and struct columns are compatible + # with the Feast Map type — skip warning for these cases + if pa.types.is_map(expected_type) and ( + pa.types.is_map(actual_type) + or pa.types.is_struct(actual_type) + or pa.types.is_large_list(actual_type) + or pa.types.is_list(actual_type) + ): + continue + + # JSON type (large_string) is compatible with string types + if pa.types.is_large_string(expected_type) and ( + pa.types.is_string(actual_type) + or pa.types.is_large_string(actual_type) + ): + continue + + # Struct type — expected struct is compatible with actual + # struct or map representations + if pa.types.is_struct(expected_type) and ( + pa.types.is_struct(actual_type) + or pa.types.is_map(actual_type) + or pa.types.is_list(actual_type) + ): + continue + + logger.warning( + "[Validation: %s] Column '%s' type mismatch: expected %s, got %s", + self.name, + col_name, + expected_type, + actual_type, + ) + + # Validate JSON well-formedness for declared Json columns + json_columns: Set[str] = self.validation_config.get("json_columns", set()) + for col_name in json_columns: + if col_name not in actual_columns: + continue + + column = table.column(col_name) + invalid_count = 0 + first_error = None + first_error_row = None + + for i in range(len(column)): + value = column[i] + if not value.is_valid: + continue + + str_value = value.as_py() + if not isinstance(str_value, str): + continue + + try: + json.loads(str_value) + except (json.JSONDecodeError, TypeError) as e: + invalid_count += 1 + if first_error is None: + first_error = str(e) + first_error_row = i + + if invalid_count > 0: + raise ValueError( + f"[Validation: {self.name}] Column '{col_name}' declared as Json " + f"contains {invalid_count} invalid JSON value(s). " + f"First error at row {first_error_row}: {first_error}" + ) + + logger.debug("[Validation: %s] Schema validation passed.", self.name) + class LocalOutputNode(LocalNode): def __init__( diff --git a/sdk/python/feast/infra/compute_engines/ray/compute.py b/sdk/python/feast/infra/compute_engines/ray/compute.py index a5c1b3caab5..fa8f9747f3b 100644 --- a/sdk/python/feast/infra/compute_engines/ray/compute.py +++ b/sdk/python/feast/infra/compute_engines/ray/compute.py @@ -163,7 +163,7 @@ def _materialize_from_offline_store( # Pull data from offline store retrieval_job = self.offline_store.pull_latest_from_table_or_query( config=self.repo_config, - data_source=feature_view.batch_source, + data_source=feature_view.batch_source, # type: ignore[arg-type] join_key_columns=join_key_columns, feature_name_columns=feature_name_columns, timestamp_field=timestamp_field, diff --git a/sdk/python/feast/infra/compute_engines/ray/feature_builder.py b/sdk/python/feast/infra/compute_engines/ray/feature_builder.py index 274fe87599c..a9830162c1e 100644 --- a/sdk/python/feast/infra/compute_engines/ray/feature_builder.py +++ b/sdk/python/feast/infra/compute_engines/ray/feature_builder.py @@ -17,8 +17,10 @@ RayJoinNode, RayReadNode, RayTransformationNode, + RayValidationNode, RayWriteNode, ) +from feast.types import PrimitiveFeastType, from_feast_to_pyarrow_type if TYPE_CHECKING: from feast.infra.compute_engines.ray.config import RayComputeEngineConfig @@ -174,11 +176,36 @@ def build_output_nodes(self, view, final_node): def build_validation_node(self, view, input_node): """Build the validation node for feature validation.""" - # TODO: Implement validation logic - logger.warning( - "Feature validation is not yet implemented for Ray compute engine." + expected_columns = {} + json_columns: set = set() + if hasattr(view, "features"): + for feature in view.features: + try: + expected_columns[feature.name] = from_feast_to_pyarrow_type( + feature.dtype + ) + except (ValueError, KeyError): + logger.debug( + "Could not resolve PyArrow type for feature '%s' " + "(dtype=%s), skipping type check for this column.", + feature.name, + feature.dtype, + ) + expected_columns[feature.name] = None + if ( + isinstance(feature.dtype, PrimitiveFeastType) + and feature.dtype.name == "JSON" + ): + json_columns.add(feature.name) + + node = RayValidationNode( + f"{view.name}:validate", + expected_columns=expected_columns, + json_columns=json_columns, + inputs=[input_node], ) - return input_node + self.nodes.append(node) + return node def _build(self, view, input_nodes: Optional[List[DAGNode]]) -> DAGNode: has_physical_source = (hasattr(view, "batch_source") and view.batch_source) or ( diff --git a/sdk/python/feast/infra/compute_engines/ray/nodes.py b/sdk/python/feast/infra/compute_engines/ray/nodes.py index 89694a57e2d..c4eaa54d26a 100644 --- a/sdk/python/feast/infra/compute_engines/ray/nodes.py +++ b/sdk/python/feast/infra/compute_engines/ray/nodes.py @@ -1,6 +1,7 @@ +import json import logging from datetime import datetime, timedelta, timezone -from typing import Dict, List, Optional, Union +from typing import Dict, List, Optional, Set, Union import dill import pandas as pd @@ -460,9 +461,7 @@ def _execute_standard_aggregation(self, dataset: Dataset) -> DAGValue: # Convert aggregations to Ray's groupby format agg_dict = {} for agg in self.aggregations: - feature_name = f"{agg.function}_{agg.column}" - if agg.time_window: - feature_name += f"_{int(agg.time_window.total_seconds())}s" + feature_name = agg.resolved_name(agg.time_window) if agg.function == "count": agg_dict[feature_name] = (agg.column, "count") @@ -847,3 +846,127 @@ def write_batch_with_serialized_artifacts(batch: pd.DataFrame) -> pd.DataFrame: ), }, ) + + +class RayValidationNode(DAGNode): + """ + Ray node for validating feature data against the declared schema. + + Checks that all expected columns are present and logs warnings for + type mismatches. Validation runs once on the first batch to avoid + per-batch overhead; the full dataset is passed through unchanged. + """ + + def __init__( + self, + name: str, + expected_columns: Dict[str, Optional[pa.DataType]], + json_columns: Optional[Set[str]] = None, + inputs: Optional[List[DAGNode]] = None, + ): + super().__init__(name, inputs=inputs) + self.expected_columns = expected_columns + self.json_columns = json_columns or set() + + def execute(self, context: ExecutionContext) -> DAGValue: + input_value = self.get_single_input_value(context) + dataset = input_value.data + + if not self.expected_columns: + context.node_outputs[self.name] = input_value + return input_value + + expected_names = set(self.expected_columns.keys()) + + schema = dataset.schema() + actual_columns = set(schema.names) + + missing = expected_names - actual_columns + if missing: + raise ValueError( + f"[Validation: {self.name}] Missing expected columns: {missing}. " + f"Actual columns: {sorted(actual_columns)}" + ) + + for col_name, expected_type in self.expected_columns.items(): + if expected_type is None: + continue + actual_field = schema.field(col_name) + actual_type = actual_field.type + if actual_type != expected_type: + # Map type compatibility + if pa.types.is_map(expected_type) and ( + pa.types.is_map(actual_type) + or pa.types.is_struct(actual_type) + or pa.types.is_list(actual_type) + ): + continue + + # JSON type compatibility (large_string / string) + if pa.types.is_large_string(expected_type) and ( + pa.types.is_string(actual_type) + or pa.types.is_large_string(actual_type) + ): + continue + + # Struct type compatibility + if pa.types.is_struct(expected_type) and ( + pa.types.is_struct(actual_type) + or pa.types.is_map(actual_type) + or pa.types.is_list(actual_type) + ): + continue + + logger.warning( + "[Validation: %s] Column '%s' type mismatch: expected %s, got %s", + self.name, + col_name, + expected_type, + actual_type, + ) + + # Validate JSON well-formedness for declared Json columns + if self.json_columns: + try: + first_batch = dataset.take_batch(1000) + except Exception: + logger.debug( + "[Validation: %s] Could not sample batch for JSON validation.", + self.name, + ) + first_batch = None + + if first_batch is not None: + for col_name in self.json_columns: + if col_name not in first_batch: + continue + + values = first_batch[col_name] + invalid_count = 0 + first_error = None + first_error_row = None + + for i, value in enumerate(values): + if value is None: + continue + if not isinstance(value, str): + continue + try: + json.loads(value) + except (json.JSONDecodeError, TypeError) as e: + invalid_count += 1 + if first_error is None: + first_error = str(e) + first_error_row = i + + if invalid_count > 0: + raise ValueError( + f"[Validation: {self.name}] Column '{col_name}' declared " + f"as Json contains {invalid_count} invalid JSON value(s) " + f"in sampled batch. First error at row {first_error_row}: " + f"{first_error}" + ) + + logger.debug("[Validation: %s] Schema validation passed.", self.name) + context.node_outputs[self.name] = input_value + return input_value diff --git a/sdk/python/feast/infra/compute_engines/snowflake/snowflake_engine.py b/sdk/python/feast/infra/compute_engines/snowflake/snowflake_engine.py index 7441cb0f18c..d0a1152eb55 100644 --- a/sdk/python/feast/infra/compute_engines/snowflake/snowflake_engine.py +++ b/sdk/python/feast/infra/compute_engines/snowflake/snowflake_engine.py @@ -226,13 +226,14 @@ def _materialize_one( timestamp_field, created_timestamp_column, ) = _get_column_names(feature_view, entities) + assert feature_view.batch_source is not None # guaranteed by _get_column_names job_id = f"{feature_view.name}-{start_date}-{end_date}" try: offline_job = self.offline_store.pull_latest_from_table_or_query( config=self.repo_config, - data_source=feature_view.batch_source, + data_source=feature_view.batch_source, # type: ignore[arg-type] join_key_columns=join_key_columns, feature_name_columns=feature_name_columns, timestamp_field=timestamp_field, @@ -341,6 +342,7 @@ def generate_snowflake_materialization_query( feature_batch: list, project: str, ) -> str: + assert feature_view.batch_source is not None if feature_view.batch_source.created_timestamp_column: fv_created_str = f',"{feature_view.batch_source.created_timestamp_column}"' else: @@ -406,6 +408,7 @@ def materialize_to_snowflake_online_store( project: str, ) -> None: assert_snowflake_feature_names(feature_view) + assert feature_view.batch_source is not None feature_names_str = '", "'.join( [feature.name for feature in feature_view.features] @@ -467,6 +470,7 @@ def materialize_to_external_online_store( feature_view: Union[StreamFeatureView, FeatureView], pbar: tqdm, ) -> None: + assert feature_view.batch_source is not None feature_names = [feature.name for feature in feature_view.features] with GetSnowflakeConnection(repo_config.batch_engine) as conn: diff --git a/sdk/python/feast/infra/compute_engines/spark/compute.py b/sdk/python/feast/infra/compute_engines/spark/compute.py index e6ec58dd74d..b6c7dc30d55 100644 --- a/sdk/python/feast/infra/compute_engines/spark/compute.py +++ b/sdk/python/feast/infra/compute_engines/spark/compute.py @@ -162,7 +162,7 @@ def _materialize_from_offline_store( SparkRetrievalJob, self.offline_store.pull_latest_from_table_or_query( config=self.repo_config, - data_source=feature_view.batch_source, + data_source=feature_view.batch_source, # type: ignore[arg-type] join_key_columns=join_key_columns, feature_name_columns=feature_name_columns, timestamp_field=timestamp_field, diff --git a/sdk/python/feast/infra/compute_engines/spark/feature_builder.py b/sdk/python/feast/infra/compute_engines/spark/feature_builder.py index 11a3c1587f6..94f29220513 100644 --- a/sdk/python/feast/infra/compute_engines/spark/feature_builder.py +++ b/sdk/python/feast/infra/compute_engines/spark/feature_builder.py @@ -1,3 +1,4 @@ +import logging from typing import Union from pyspark.sql import SparkSession @@ -12,9 +13,14 @@ SparkJoinNode, SparkReadNode, SparkTransformationNode, + SparkValidationNode, SparkWriteNode, + from_feast_to_spark_type, ) from feast.infra.registry.base_registry import BaseRegistry +from feast.types import PrimitiveFeastType + +logger = logging.getLogger(__name__) class SparkFeatureBuilder(FeatureBuilder): @@ -115,4 +121,30 @@ def build_output_nodes(self, view, input_node): return node def build_validation_node(self, view, input_node): - pass + expected_columns = {} + json_columns: set = set() + if hasattr(view, "features"): + for feature in view.features: + spark_type = from_feast_to_spark_type(feature.dtype) + if spark_type is None: + logger.debug( + "Could not resolve Spark type for feature '%s' " + "(dtype=%s), skipping type check for this column.", + feature.name, + feature.dtype, + ) + expected_columns[feature.name] = spark_type + if ( + isinstance(feature.dtype, PrimitiveFeastType) + and feature.dtype.name == "JSON" + ): + json_columns.add(feature.name) + + node = SparkValidationNode( + f"{view.name}:validate", + expected_columns=expected_columns, + json_columns=json_columns, + inputs=[input_node], + ) + self.nodes.append(node) + return node diff --git a/sdk/python/feast/infra/compute_engines/spark/nodes.py b/sdk/python/feast/infra/compute_engines/spark/nodes.py index 124ce65ff90..d44764e7b9b 100644 --- a/sdk/python/feast/infra/compute_engines/spark/nodes.py +++ b/sdk/python/feast/infra/compute_engines/spark/nodes.py @@ -1,10 +1,28 @@ +import json +import logging from datetime import datetime, timedelta -from typing import Callable, List, Optional, Union, cast +from typing import Callable, Dict, List, Optional, Set, Union, cast import pandas as pd from pyspark.sql import DataFrame, SparkSession, Window from pyspark.sql import functions as F from pyspark.sql.pandas.types import from_arrow_schema +from pyspark.sql.types import ( + ArrayType, + BinaryType, + BooleanType, + DoubleType, + FloatType, + IntegerType, + LongType, + MapType, + StringType, + StructType, + TimestampType, +) +from pyspark.sql.types import ( + DataType as SparkDataType, +) from feast import BatchFeatureView, StreamFeatureView from feast.aggregation import Aggregation @@ -29,6 +47,103 @@ infer_event_timestamp_from_entity_df, ) +logger = logging.getLogger(__name__) + + +def from_feast_to_spark_type(feast_type) -> Optional[SparkDataType]: + """Convert a Feast type to a PySpark DataType. + + Returns None if the Feast type cannot be mapped. + """ + from feast.types import ( + Array, + PrimitiveFeastType, + Set, + Struct, + ) + + if isinstance(feast_type, Struct): + from pyspark.sql.types import StructField + + spark_fields = [] + for name, ftype in feast_type.fields.items(): + spark_type = from_feast_to_spark_type(ftype) + if spark_type is None: + return None + spark_fields.append(StructField(name, spark_type, nullable=True)) + return StructType(spark_fields) + + if isinstance(feast_type, PrimitiveFeastType): + mapping = { + PrimitiveFeastType.BYTES: BinaryType(), + PrimitiveFeastType.STRING: StringType(), + PrimitiveFeastType.INT32: IntegerType(), + PrimitiveFeastType.INT64: LongType(), + PrimitiveFeastType.FLOAT64: DoubleType(), + PrimitiveFeastType.FLOAT32: FloatType(), + PrimitiveFeastType.BOOL: BooleanType(), + PrimitiveFeastType.UNIX_TIMESTAMP: TimestampType(), + PrimitiveFeastType.MAP: MapType(StringType(), StringType()), + PrimitiveFeastType.JSON: StringType(), + } + return mapping.get(feast_type) + + if isinstance(feast_type, Array): + base_type = feast_type.base_type + if isinstance(base_type, Struct): + inner = from_feast_to_spark_type(base_type) + return ArrayType(inner) if inner else None + if isinstance(base_type, PrimitiveFeastType): + if base_type == PrimitiveFeastType.MAP: + return ArrayType(MapType(StringType(), StringType())) + inner = from_feast_to_spark_type(base_type) + return ArrayType(inner) if inner else None + + if isinstance(feast_type, Set): + inner = from_feast_to_spark_type(feast_type.base_type) + return ArrayType(inner) if inner else None + + return None + + +def _spark_types_compatible(expected: SparkDataType, actual: SparkDataType) -> bool: + """Check if two Spark types are compatible for validation purposes. + + Exact match is always compatible. Beyond that, we allow common + representations that arise from different data source encodings. + """ + if expected == actual: + return True + + # Map ↔ Struct: data sources may encode maps as structs or vice versa + if isinstance(expected, MapType) and isinstance(actual, (MapType, StructType)): + return True + if isinstance(expected, StructType) and isinstance(actual, (StructType, MapType)): + return True + + # Json (StringType) is always compatible with StringType + if isinstance(expected, StringType) and isinstance(actual, StringType): + return True + + # Integer widening: IntegerType ↔ LongType + if isinstance(expected, (IntegerType, LongType)) and isinstance( + actual, (IntegerType, LongType) + ): + return True + + # Float widening: FloatType ↔ DoubleType + if isinstance(expected, (FloatType, DoubleType)) and isinstance( + actual, (FloatType, DoubleType) + ): + return True + + # Array compatibility: compare element types + if isinstance(expected, ArrayType) and isinstance(actual, ArrayType): + return _spark_types_compatible(expected.elementType, actual.elementType) + + return False + + ENTITY_TS_ALIAS = "__entity_event_timestamp" @@ -196,7 +311,7 @@ def _execute_tiled_aggregation(self, input_df: DataFrame) -> DAGValue: expected_columns = entity_keys + [self.timestamp_col] for time_window, window_aggs in aggs_by_window.items(): for agg in window_aggs: - feature_name = f"{agg.function}_{agg.column}_{int(time_window.total_seconds())}s" + feature_name = agg.resolved_name(time_window) if feature_name not in expected_columns: expected_columns.append(feature_name) @@ -257,11 +372,7 @@ def _execute_standard_aggregation(self, input_df: DataFrame) -> DAGValue: agg_exprs = [] for agg in self.aggregations: func = getattr(F, agg.function) - expr = func(agg.column).alias( - f"{agg.function}_{agg.column}_{int(agg.time_window.total_seconds())}s" - if agg.time_window - else f"{agg.function}_{agg.column}" - ) + expr = func(agg.column).alias(agg.resolved_name(agg.time_window)) agg_exprs.append(expr) if any(agg.time_window for agg in self.aggregations): @@ -510,3 +621,107 @@ def execute(self, context: ExecutionContext) -> DAGValue: return DAGValue( data=transformed_df, format=DAGFormat.SPARK, metadata={"transformed": True} ) + + +class SparkValidationNode(DAGNode): + """ + Spark node for validating feature data against the declared schema. + + Checks that all expected columns are present in the Spark DataFrame, + validates column types using native Spark types, and checks JSON + well-formedness for Json columns. + """ + + def __init__( + self, + name: str, + expected_columns: Dict[str, Optional[SparkDataType]], + json_columns: Optional[Set[str]] = None, + inputs: Optional[List[DAGNode]] = None, + ): + super().__init__(name, inputs=inputs) + self.expected_columns = expected_columns + self.json_columns = json_columns or set() + + def execute(self, context: ExecutionContext) -> DAGValue: + input_value = self.get_single_input_value(context) + input_value.assert_format(DAGFormat.SPARK) + spark_df: DataFrame = input_value.data + + if not self.expected_columns: + context.node_outputs[self.name] = input_value + return input_value + + self._validate_schema(spark_df) + + logger.debug("[Validation: %s] Schema validation passed.", self.name) + context.node_outputs[self.name] = input_value + return input_value + + def _validate_schema(self, spark_df: DataFrame): + """Validate the Spark DataFrame against the expected schema. + + Checks for missing columns, type mismatches using native Spark types, + and JSON well-formedness for declared Json columns. + """ + actual_columns = set(spark_df.columns) + expected_names = set(self.expected_columns.keys()) + + missing = expected_names - actual_columns + if missing: + raise ValueError( + f"[Validation: {self.name}] Missing expected columns: {missing}. " + f"Actual columns: {sorted(actual_columns)}" + ) + + # Type validation using native Spark types + schema = spark_df.schema + for col_name, expected_type in self.expected_columns.items(): + if expected_type is None: + continue + try: + actual_field = schema[col_name] + except (KeyError, IndexError): + continue + actual_type = actual_field.dataType + if not _spark_types_compatible(expected_type, actual_type): + logger.warning( + "[Validation: %s] Column '%s' type mismatch: expected %s, got %s", + self.name, + col_name, + expected_type.simpleString(), + actual_type.simpleString(), + ) + + # Validate JSON well-formedness for declared Json columns + if self.json_columns: + sample_rows = spark_df.limit(1000).collect() + for col_name in self.json_columns: + if col_name not in actual_columns: + continue + + invalid_count = 0 + first_error = None + first_error_row = None + + for i, row in enumerate(sample_rows): + value = row[col_name] + if value is None: + continue + if not isinstance(value, str): + continue + try: + json.loads(value) + except (json.JSONDecodeError, TypeError) as e: + invalid_count += 1 + if first_error is None: + first_error = str(e) + first_error_row = i + + if invalid_count > 0: + raise ValueError( + f"[Validation: {self.name}] Column '{col_name}' declared as " + f"Json contains {invalid_count} invalid JSON value(s) in " + f"sampled rows. First error at row {first_error_row}: " + f"{first_error}" + ) diff --git a/sdk/python/feast/infra/feature_servers/base_config.py b/sdk/python/feast/infra/feature_servers/base_config.py index b13e23d035e..d6b650ced15 100644 --- a/sdk/python/feast/infra/feature_servers/base_config.py +++ b/sdk/python/feast/infra/feature_servers/base_config.py @@ -37,12 +37,57 @@ class FeatureLoggingConfig(FeastConfigBaseModel): """Timeout for adding new log item to the queue.""" +class MetricsConfig(FeastConfigBaseModel): + """Prometheus metrics configuration. + + Follows the same pattern as ``FeatureLoggingConfig``: a single + ``enabled`` flag controls global on/off, and per-category booleans + allow fine-grained suppression. Can also be enabled at runtime via + the ``feast serve --metrics`` CLI flag — either option is sufficient. + """ + + enabled: StrictBool = False + """Whether Prometheus metrics collection and the metrics HTTP server + (default port 8000) should be enabled.""" + + resource: StrictBool = True + """Emit CPU and memory usage gauges (feast_feature_server_cpu_usage, + feast_feature_server_memory_usage).""" + + request: StrictBool = True + """Emit per-endpoint request counters and latency histograms + (feast_feature_server_request_total, + feast_feature_server_request_latency_seconds).""" + + online_features: StrictBool = True + """Emit online feature retrieval metrics + (feast_online_features_request_total, + feast_online_features_entity_count).""" + + push: StrictBool = True + """Emit push/write request counters + (feast_push_request_total).""" + + materialization: StrictBool = True + """Emit materialization success/failure counters and duration histograms + (feast_materialization_total, + feast_materialization_duration_seconds).""" + + freshness: StrictBool = True + """Emit per-feature-view freshness gauges + (feast_feature_freshness_seconds).""" + + class BaseFeatureServerConfig(FeastConfigBaseModel): """Base Feature Server config that should be extended""" enabled: StrictBool = False """Whether the feature server should be launched.""" + metrics: Optional[MetricsConfig] = None + """Prometheus metrics configuration. Set ``metrics.enabled: true`` or + pass the ``feast serve --metrics`` CLI flag to activate.""" + feature_logging: Optional[FeatureLoggingConfig] = None """ Feature logging configuration """ diff --git a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile index 352f1db9318..311f2ea6413 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile +++ b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile @@ -1,6 +1,7 @@ -FROM registry.access.redhat.com/ubi9/python-311:1 +FROM registry.access.redhat.com/ubi9/python-312:1 COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv +ENV UV_CACHE_DIR=/tmp/uv-cache COPY requirements.txt requirements.txt RUN uv pip install -r requirements.txt diff --git a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev index 2e163ccbaf4..fc416727ee2 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev +++ b/sdk/python/feast/infra/feature_servers/multicloud/Dockerfile.dev @@ -1,4 +1,4 @@ -FROM registry.access.redhat.com/ubi9/python-311:1 +FROM registry.access.redhat.com/ubi9/python-312:1 USER 0 RUN npm install -g yarn yalc && rm -rf .npm @@ -27,7 +27,8 @@ RUN yalc add @feast-dev/feast-ui && \ WORKDIR ${APP_ROOT}/src COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv -RUN uv pip install --require-hashes --no-deps -r sdk/python/requirements/py3.11-minimal-requirements.txt +ENV UV_CACHE_DIR=/tmp/uv-cache +RUN uv pip install --require-hashes --no-deps -r sdk/python/requirements/py3.12-minimal-requirements.txt RUN uv pip install --no-deps -e .[minimal] # modify permissions to support running with a random uid diff --git a/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.binary.release b/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.binary.release index d490a4ae5bc..0fb4f88999b 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.binary.release +++ b/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.binary.release @@ -1,4 +1,4 @@ -FROM registry.access.redhat.com/ubi9/python-311:1 +FROM registry.access.redhat.com/ubi9/python-312:1 COPY requirements.txt requirements.txt RUN source /tmp/hermeto.env && \ diff --git a/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.builder.yarn b/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.builder.yarn index 1cac1bc4d90..805f4b3e225 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.builder.yarn +++ b/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.builder.yarn @@ -1,4 +1,4 @@ -FROM registry.access.redhat.com/ubi9/python-311:1 +FROM registry.access.redhat.com/ubi9/python-312:1 USER 0 RUN npm install -g yarn yalc && rm -rf .npm diff --git a/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.builder.yum b/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.builder.yum index 0a19017f1d4..cfca9ccd5aa 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.builder.yum +++ b/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.builder.yum @@ -1,4 +1,4 @@ -FROM registry.access.redhat.com/ubi9/python-311:1 +FROM registry.access.redhat.com/ubi9/python-312:1 ARG RELEASE ENV IBIS_VERSION="9.5.0" diff --git a/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.sdist.release b/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.sdist.release index 946b6407626..8fd6f85c6a1 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.sdist.release +++ b/sdk/python/feast/infra/feature_servers/multicloud/offline/Dockerfile.sdist.release @@ -1,4 +1,4 @@ -FROM registry.access.redhat.com/ubi9/python-311:1 +FROM registry.access.redhat.com/ubi9/python-312:1 ENV APACHE_ARROW_VERSION="17.0.0" ENV MILVUS_LITE_VERSION="2.4.12" diff --git a/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-binary-build.sh b/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-binary-build.sh index 8d8ca4ba5f1..7f2820057b0 100755 --- a/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-binary-build.sh +++ b/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-binary-build.sh @@ -44,9 +44,9 @@ hermeto fetch-deps \ "sdk/python/feast/infra/feature_servers/multicloud/requirements.txt" ], "requirements_build_files": [ -"sdk/python/requirements/py3.11-minimal-requirements.txt", -"sdk/python/requirements/py3.11-minimal-sdist-requirements.txt", -"sdk/python/requirements/py3.11-minimal-sdist-requirements-build.txt" +"sdk/python/requirements/py3.12-minimal-requirements.txt", +"sdk/python/requirements/py3.12-minimal-sdist-requirements.txt", +"sdk/python/requirements/py3.12-minimal-sdist-requirements-build.txt" ], "allow_binary": "true" }' diff --git a/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-binary-release-build.sh b/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-binary-release-build.sh index 8462cef705b..4571324972a 100755 --- a/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-binary-release-build.sh +++ b/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-binary-release-build.sh @@ -19,9 +19,9 @@ hermeto fetch-deps \ "sdk/python/feast/infra/feature_servers/multicloud/requirements.txt" ], "requirements_build_files": [ -"sdk/python/requirements/py3.11-minimal-requirements.txt", -"sdk/python/requirements/py3.11-minimal-sdist-requirements.txt", -"sdk/python/requirements/py3.11-minimal-sdist-requirements-build.txt" +"sdk/python/requirements/py3.12-minimal-requirements.txt", +"sdk/python/requirements/py3.12-minimal-sdist-requirements.txt", +"sdk/python/requirements/py3.12-minimal-sdist-requirements-build.txt" ], "allow_binary": "true" }' diff --git a/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-build.sh b/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-build.sh index c84774f497c..f66035e75c1 100755 --- a/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-build.sh +++ b/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-build.sh @@ -55,8 +55,8 @@ hermeto fetch-deps \ "sdk/python/feast/infra/feature_servers/multicloud/requirements.txt" ], "requirements_build_files": [ -"sdk/python/requirements/py3.11-minimal-sdist-requirements.txt", -"sdk/python/requirements/py3.11-minimal-sdist-requirements-build.txt" +"sdk/python/requirements/py3.12-minimal-sdist-requirements.txt", +"sdk/python/requirements/py3.12-minimal-sdist-requirements-build.txt" ], "allow_binary": "false" }' diff --git a/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-release-build.sh b/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-release-build.sh index 49df0eae600..0ea10d2f24e 100755 --- a/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-release-build.sh +++ b/sdk/python/feast/infra/feature_servers/multicloud/offline/offline-release-build.sh @@ -30,8 +30,8 @@ hermeto fetch-deps \ "sdk/python/feast/infra/feature_servers/multicloud/requirements.txt" ], "requirements_build_files": [ -"sdk/python/requirements/py3.11-minimal-sdist-requirements.txt", -"sdk/python/requirements/py3.11-minimal-sdist-requirements-build.txt" +"sdk/python/requirements/py3.12-minimal-sdist-requirements.txt", +"sdk/python/requirements/py3.12-minimal-sdist-requirements-build.txt" ], "allow_binary": "false" }' diff --git a/sdk/python/feast/infra/feature_servers/multicloud/requirements.txt b/sdk/python/feast/infra/feature_servers/multicloud/requirements.txt index 634f73c78df..50874eddeb5 100644 --- a/sdk/python/feast/infra/feature_servers/multicloud/requirements.txt +++ b/sdk/python/feast/infra/feature_servers/multicloud/requirements.txt @@ -1,2 +1,2 @@ # keep VERSION on line #2, this is critical to release CI -feast[minimal] == 0.60.0 +feast[minimal] == 0.61.0 diff --git a/sdk/python/feast/infra/key_encoding_utils.py b/sdk/python/feast/infra/key_encoding_utils.py index 3e9ba70d3ba..10a9934ad6a 100644 --- a/sdk/python/feast/infra/key_encoding_utils.py +++ b/sdk/python/feast/infra/key_encoding_utils.py @@ -57,15 +57,20 @@ def serialize_entity_key_prefix( This encoding is a partial implementation of serialize_entity_key, only operating on the keys of entities, and not the values. """ - sorted_keys = sorted(entity_keys) + # Fast path optimization for single entity + if len(entity_keys) == 1: + sorted_keys = [entity_keys[0]] + else: + sorted_keys = sorted(entity_keys) output: List[bytes] = [] if entity_key_serialization_version > 2: output.append(struct.pack(" 2: - output.append(struct.pack(" 2: output.append(struct.pack(" 2: - output.append(struct.pack(" 2: + output.append(struct.pack(" List[SchemaField] bq_schema = [] for field in arrow_schema: - if pyarrow.types.is_list(field.type): + if pyarrow.types.is_struct(field.type) or pyarrow.types.is_map(field.type): + detected_mode = "NULLABLE" + detected_type = "STRING" + elif pyarrow.types.is_list(field.type): detected_mode = "REPEATED" - detected_type = _ARROW_SCALAR_IDS_TO_BQ[field.type.value_type.id] + detected_type = _ARROW_SCALAR_IDS_TO_BQ.get( + field.type.value_type.id, "STRING" + ) else: detected_mode = "NULLABLE" - detected_type = _ARROW_SCALAR_IDS_TO_BQ[field.type.id] + detected_type = _ARROW_SCALAR_IDS_TO_BQ.get(field.type.id, "STRING") bq_schema.append( SchemaField(name=field.name, field_type=detected_type, mode=detected_mode) diff --git a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py index b43c874ddc3..08dbbbf978d 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/athena_offline_store/tests/data_source.py @@ -16,7 +16,7 @@ ) from feast.infra.utils import aws_utils from feast.repo_config import FeastConfigBaseModel -from tests.integration.feature_repos.universal.data_source_creator import ( +from tests.universal.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/athena_repo_configuration.py b/sdk/python/feast/infra/offline_stores/contrib/athena_repo_configuration.py index 09bc6ce961c..9fa7472af62 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/athena_repo_configuration.py +++ b/sdk/python/feast/infra/offline_stores/contrib/athena_repo_configuration.py @@ -1,7 +1,7 @@ from feast.infra.offline_stores.contrib.athena_offline_store.tests.data_source import ( AthenaDataSourceCreator, ) -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/clickhouse_offline_store/clickhouse.py b/sdk/python/feast/infra/offline_stores/contrib/clickhouse_offline_store/clickhouse.py index 5e8cf3d9053..723869c6bd1 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/clickhouse_offline_store/clickhouse.py +++ b/sdk/python/feast/infra/offline_stores/contrib/clickhouse_offline_store/clickhouse.py @@ -31,6 +31,7 @@ from feast.infra.utils.clickhouse.clickhouse_config import ClickhouseConfig from feast.infra.utils.clickhouse.connection_utils import get_client from feast.saved_dataset import SavedDatasetStorage +from feast.utils import _utc_now, make_tzaware class ClickhouseOfflineStoreConfig(ClickhouseConfig): @@ -43,15 +44,26 @@ def get_historical_features( config: RepoConfig, feature_views: List[FeatureView], feature_refs: List[str], - entity_df: Union[pd.DataFrame, str], + entity_df: Optional[Union[pd.DataFrame, str]], registry: BaseRegistry, project: str, full_feature_names: bool = False, + **kwargs, ) -> RetrievalJob: assert isinstance(config.offline_store, ClickhouseOfflineStoreConfig) for fv in feature_views: assert isinstance(fv.batch_source, ClickhouseSource) + # Handle non-entity retrieval mode + if entity_df is None: + end_date = kwargs.get("end_date", None) + if end_date is None: + end_date = _utc_now() + else: + end_date = make_tzaware(end_date) + + entity_df = pd.DataFrame({"event_timestamp": [end_date]}) + entity_schema = _get_entity_schema(entity_df, config) entity_df_event_timestamp_col = ( diff --git a/sdk/python/feast/infra/offline_stores/contrib/clickhouse_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/clickhouse_offline_store/tests/data_source.py index 4234c46eb3f..4c6068fd6bd 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/clickhouse_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/clickhouse_offline_store/tests/data_source.py @@ -17,7 +17,7 @@ ) from feast.infra.utils.clickhouse.clickhouse_config import ClickhouseConfig from feast.infra.utils.clickhouse.connection_utils import get_client -from tests.integration.feature_repos.universal.data_source_creator import ( +from tests.universal.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) @@ -118,6 +118,18 @@ def teardown(self): pass +def _make_offline_store_config(clickhouse_container): + """Build a ClickhouseOfflineStoreConfig pointing at the test container.""" + return ClickhouseOfflineStoreConfig( + type="clickhouse", + host=clickhouse_container.get_container_host_ip(), + port=clickhouse_container.get_exposed_port(8123), + database=CLICKHOUSE_OFFLINE_DB, + user=CLICKHOUSE_USER, + password=CLICKHOUSE_PASSWORD, + ) + + def test_get_client_with_additional_params(clickhouse_container): """ Test that get_client works with a real ClickHouse container and properly passes @@ -142,3 +154,71 @@ def test_get_client_with_additional_params(clickhouse_container): # Verify the send_receive_timeout was applied assert client.timeout._read == 60 + + +def test_non_entity_retrieval(clickhouse_container): + """Integration test: get_historical_features with entity_df=None returns real data.""" + from datetime import datetime, timedelta, timezone + from unittest.mock import MagicMock + + from feast.feature_view import FeatureView, Field + from feast.infra.offline_stores.contrib.clickhouse_offline_store.clickhouse import ( + ClickhouseOfflineStore, + df_to_clickhouse_table, + ) + from feast.repo_config import RepoConfig + from feast.types import Float32 + + offline_config = _make_offline_store_config(clickhouse_container) + repo_config = RepoConfig( + project="test_project", + registry="test_registry", + provider="local", + offline_store=offline_config, + ) + + # Seed a feature table with real data + now = datetime.now(tz=timezone.utc) + feature_df = pd.DataFrame( + { + "event_timestamp": [now - timedelta(hours=2), now - timedelta(hours=1)], + "feature_value": [1.0, 2.0], + } + ) + table_name = "test_non_entity_features" + client = get_client(offline_config) + client.command(f"DROP TABLE IF EXISTS {table_name}") + df_to_clickhouse_table(offline_config, feature_df, table_name, "event_timestamp") + + source = ClickhouseSource( + name=table_name, + table=table_name, + timestamp_field="event_timestamp", + ) + fv = FeatureView( + name="test_fv", + entities=[], + ttl=timedelta(days=1), + source=source, + schema=[Field(name="feature_value", dtype=Float32)], + ) + + registry = MagicMock() + registry.list_on_demand_feature_views.return_value = [] + + job = ClickhouseOfflineStore.get_historical_features( + config=repo_config, + feature_views=[fv], + feature_refs=["test_fv:feature_value"], + entity_df=None, + registry=registry, + project="test_project", + end_date=now, + ) + + result_df = job.to_df() + assert len(result_df) > 0 + assert "feature_value" in result_df.columns + + # Cleanup + client.command(f"DROP TABLE IF EXISTS {table_name}") diff --git a/sdk/python/feast/infra/offline_stores/contrib/clickhouse_repo_configuration.py b/sdk/python/feast/infra/offline_stores/contrib/clickhouse_repo_configuration.py index 5c9d4461b16..6874bc9a3fc 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/clickhouse_repo_configuration.py +++ b/sdk/python/feast/infra/offline_stores/contrib/clickhouse_repo_configuration.py @@ -1,8 +1,8 @@ from feast.infra.offline_stores.contrib.clickhouse_offline_store.tests.data_source import ( ClickhouseDataSourceCreator, ) -from tests.integration.feature_repos.repo_configuration import REDIS_CONFIG -from tests.integration.feature_repos.universal.online_store.redis import ( +from tests.universal.feature_repos.repo_configuration import REDIS_CONFIG +from tests.universal.feature_repos.universal.online_store.redis import ( RedisOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/couchbase_columnar_repo_configuration.py b/sdk/python/feast/infra/offline_stores/contrib/couchbase_columnar_repo_configuration.py index 745a074a757..3ce308a00cf 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/couchbase_columnar_repo_configuration.py +++ b/sdk/python/feast/infra/offline_stores/contrib/couchbase_columnar_repo_configuration.py @@ -1,11 +1,11 @@ from feast.infra.offline_stores.contrib.couchbase_offline_store.tests.data_source import ( CouchbaseColumnarDataSourceCreator, ) -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.repo_configuration import REDIS_CONFIG -from tests.integration.feature_repos.universal.online_store.redis import ( +from tests.universal.feature_repos.repo_configuration import REDIS_CONFIG +from tests.universal.feature_repos.universal.online_store.redis import ( RedisOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/couchbase_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/couchbase_offline_store/tests/data_source.py index c23a8301a76..c9491accd59 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/couchbase_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/couchbase_offline_store/tests/data_source.py @@ -23,7 +23,7 @@ ) from feast.infra.utils.couchbase.couchbase_utils import normalize_timestamp from feast.repo_config import FeastConfigBaseModel -from tests.integration.feature_repos.universal.data_source_creator import ( +from tests.universal.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py index 9c87b8d7520..a2c89c11056 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/mssql_offline_store/tests/data_source.py @@ -15,7 +15,7 @@ MsSqlServerSource, ) from feast.saved_dataset import SavedDatasetStorage -from tests.integration.feature_repos.universal.data_source_creator import ( +from tests.universal.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/mssql_repo_configuration.py b/sdk/python/feast/infra/offline_stores/contrib/mssql_repo_configuration.py index 50d636ba909..e64693b7494 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/mssql_repo_configuration.py +++ b/sdk/python/feast/infra/offline_stores/contrib/mssql_repo_configuration.py @@ -1,8 +1,8 @@ from feast.infra.offline_stores.contrib.mssql_offline_store.tests.data_source import ( MsSqlDataSourceCreator, ) -from tests.integration.feature_repos.repo_configuration import REDIS_CONFIG -from tests.integration.feature_repos.universal.online_store.redis import ( +from tests.universal.feature_repos.repo_configuration import REDIS_CONFIG +from tests.universal.feature_repos.universal.online_store.redis import ( RedisOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py index c94b04329e0..a545d0434a9 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py @@ -14,10 +14,10 @@ ) from feast.infra.utils.postgres.connection_utils import df_to_postgres_table from feast.infra.utils.postgres.postgres_config import PostgreSQLConfig -from tests.integration.feature_repos.universal.data_source_creator import ( +from tests.universal.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_repo_configuration.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_repo_configuration.py index 2fa08bf47ad..4595fb89836 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/postgres_repo_configuration.py +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_repo_configuration.py @@ -1,8 +1,8 @@ from feast.infra.offline_stores.contrib.postgres_offline_store.tests.data_source import ( PostgreSQLDataSourceCreator, ) -from tests.integration.feature_repos.repo_configuration import REDIS_CONFIG -from tests.integration.feature_repos.universal.online_store.redis import ( +from tests.universal.feature_repos.repo_configuration import REDIS_CONFIG +from tests.universal.feature_repos.universal.online_store.redis import ( RedisOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/ray_offline_store/ray.py b/sdk/python/feast/infra/offline_stores/contrib/ray_offline_store/ray.py index bc7c60733b4..e4dbd67666d 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/ray_offline_store/ray.py +++ b/sdk/python/feast/infra/offline_stores/contrib/ray_offline_store/ray.py @@ -2168,7 +2168,7 @@ def get_historical_features( # Build reverse field mapping to get actual source column names reverse_field_mapping = {} - if fv.batch_source.field_mapping: + if fv.batch_source is not None and fv.batch_source.field_mapping: reverse_field_mapping = { v: k for k, v in fv.batch_source.field_mapping.items() } diff --git a/sdk/python/feast/infra/offline_stores/contrib/ray_offline_store/tests/test_ray_integration.py b/sdk/python/feast/infra/offline_stores/contrib/ray_offline_store/tests/test_ray_integration.py deleted file mode 100644 index 0420054b8fb..00000000000 --- a/sdk/python/feast/infra/offline_stores/contrib/ray_offline_store/tests/test_ray_integration.py +++ /dev/null @@ -1,271 +0,0 @@ -from datetime import timedelta - -import pandas as pd -import pytest - -from feast.utils import _utc_now -from tests.integration.feature_repos.repo_configuration import ( - construct_universal_feature_views, -) -from tests.integration.feature_repos.universal.entities import driver - - -@pytest.mark.integration -@pytest.mark.universal_offline_stores -def test_ray_offline_store_basic_write_and_read(environment, universal_data_sources): - """Test basic write and read functionality with Ray offline store.""" - store = environment.feature_store - _, _, data_sources = universal_data_sources - feature_views = construct_universal_feature_views(data_sources) - driver_fv = feature_views.driver - store.apply([driver(), driver_fv]) - - now = _utc_now() - ts = pd.Timestamp(now).round("ms") - - # Write data to offline store - df_to_write = pd.DataFrame.from_dict( - { - "event_timestamp": [ts, ts], - "driver_id": [1001, 1002], - "conv_rate": [0.1, 0.2], - "acc_rate": [0.9, 0.8], - "avg_daily_trips": [10, 20], - "created": [ts, ts], - }, - ) - - store.write_to_offline_store( - driver_fv.name, df_to_write, allow_registry_cache=False - ) - - # Read data back - entity_df = pd.DataFrame({"driver_id": [1001, 1002], "event_timestamp": [ts, ts]}) - - result_df = store.get_historical_features( - entity_df=entity_df, - features=[ - "driver_stats:conv_rate", - "driver_stats:acc_rate", - "driver_stats:avg_daily_trips", - ], - full_feature_names=False, - ).to_df() - - assert len(result_df) == 2 - assert "conv_rate" in result_df.columns - assert "acc_rate" in result_df.columns - assert "avg_daily_trips" in result_df.columns - - -@pytest.mark.integration -@pytest.mark.universal_offline_stores -@pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: f"full:{v}") -def test_ray_offline_store_historical_features( - environment, universal_data_sources, full_feature_names -): - """Test historical features retrieval with Ray offline store.""" - store = environment.feature_store - - (entities, datasets, data_sources) = universal_data_sources - feature_views = construct_universal_feature_views(data_sources) - - entity_df_with_request_data = datasets.entity_df.copy(deep=True) - entity_df_with_request_data["val_to_add"] = [ - i for i in range(len(entity_df_with_request_data)) - ] - - store.apply( - [ - driver(), - *feature_views.values(), - ] - ) - - job = store.get_historical_features( - entity_df=entity_df_with_request_data, - features=[ - "driver_stats:conv_rate", - "driver_stats:avg_daily_trips", - "customer_profile:current_balance", - "conv_rate_plus_100:conv_rate_plus_100", - ], - full_feature_names=full_feature_names, - ) - - # Test DataFrame conversion - result_df = job.to_df() - assert len(result_df) > 0 - assert "event_timestamp" in result_df.columns - - # Test Arrow conversion - result_table = job.to_arrow().to_pandas() - assert len(result_table) > 0 - assert "event_timestamp" in result_table.columns - - -@pytest.mark.integration -@pytest.mark.universal_offline_stores -def test_ray_offline_store_persist(environment, universal_data_sources): - """Test dataset persistence with Ray offline store.""" - store = environment.feature_store - - (entities, datasets, data_sources) = universal_data_sources - feature_views = construct_universal_feature_views(data_sources) - - entity_df_with_request_data = datasets.entity_df.copy(deep=True) - entity_df_with_request_data["val_to_add"] = [ - i for i in range(len(entity_df_with_request_data)) - ] - - store.apply( - [ - driver(), - *feature_views.values(), - ] - ) - - job = store.get_historical_features( - entity_df=entity_df_with_request_data, - features=[ - "driver_stats:conv_rate", - "customer_profile:current_balance", - ], - full_feature_names=False, - ) - - # Test persisting the dataset - from feast.saved_dataset import SavedDatasetFileStorage - - storage = SavedDatasetFileStorage(path="data/test_saved_dataset.parquet") - saved_path = job.persist(storage, allow_overwrite=True) - - assert saved_path == "data/test_saved_dataset.parquet" - - # Verify the saved dataset exists - import os - - assert os.path.exists(saved_path) - - -@pytest.mark.integration -@pytest.mark.universal_offline_stores -def test_ray_offline_store_non_entity_mode_basic(environment, universal_data_sources): - """Test historical features retrieval without entity_df (non-entity mode). - - This tests the basic functionality where entity_df=None and start_date/end_date - are provided to retrieve all features within the time range. - """ - store = environment.feature_store - - (entities, datasets, data_sources) = universal_data_sources - feature_views = construct_universal_feature_views(data_sources) - - store.apply( - [ - driver(), - feature_views.driver, - ] - ) - - # Use the environment's start and end dates for the query - start_date = environment.start_date - end_date = environment.end_date - - # Non-entity mode: entity_df=None with start_date and end_date - result_df = store.get_historical_features( - entity_df=None, - features=[ - "driver_stats:conv_rate", - "driver_stats:acc_rate", - "driver_stats:avg_daily_trips", - ], - full_feature_names=False, - start_date=start_date, - end_date=end_date, - ).to_df() - - # Verify data was retrieved - assert len(result_df) > 0, "Non-entity mode should return data" - assert "conv_rate" in result_df.columns - assert "acc_rate" in result_df.columns - assert "avg_daily_trips" in result_df.columns - assert "event_timestamp" in result_df.columns - assert "driver_id" in result_df.columns - - # Verify timestamps are within the requested range - result_df["event_timestamp"] = pd.to_datetime( - result_df["event_timestamp"], utc=True - ) - assert (result_df["event_timestamp"] >= start_date).all() - assert (result_df["event_timestamp"] <= end_date).all() - - -@pytest.mark.integration -@pytest.mark.universal_offline_stores -def test_ray_offline_store_non_entity_mode_preserves_multiple_timestamps( - environment, universal_data_sources -): - """Test that non-entity mode preserves multiple transactions per entity ID. - - This is a regression test for the fix that ensures distinct (entity_key, event_timestamp) - combinations are preserved, not just distinct entity keys. This is critical for - proper point-in-time joins when an entity has multiple transactions. - """ - store = environment.feature_store - - (entities, datasets, data_sources) = universal_data_sources - feature_views = construct_universal_feature_views(data_sources) - - store.apply( - [ - driver(), - feature_views.driver, - ] - ) - - now = _utc_now() - ts1 = pd.Timestamp(now - timedelta(hours=2)).round("ms") - ts2 = pd.Timestamp(now - timedelta(hours=1)).round("ms") - ts3 = pd.Timestamp(now).round("ms") - - # Write data with multiple timestamps for the same entity (driver_id=9001) - df_to_write = pd.DataFrame.from_dict( - { - "event_timestamp": [ts1, ts2, ts3], - "driver_id": [9001, 9001, 9001], # Same entity, different timestamps - "conv_rate": [0.1, 0.2, 0.3], - "acc_rate": [0.9, 0.8, 0.7], - "avg_daily_trips": [10, 20, 30], - "created": [ts1, ts2, ts3], - }, - ) - - store.write_to_offline_store( - feature_views.driver.name, df_to_write, allow_registry_cache=False - ) - - # Query without entity_df - should get all 3 rows for driver_id=9001 - result_df = store.get_historical_features( - entity_df=None, - features=[ - "driver_stats:conv_rate", - "driver_stats:acc_rate", - ], - full_feature_names=False, - start_date=ts1 - timedelta(minutes=1), - end_date=ts3 + timedelta(minutes=1), - ).to_df() - - # Filter to just our test entity - result_df = result_df[result_df["driver_id"] == 9001] - - # Verify we got all 3 rows with different timestamps (not just 1 row) - assert len(result_df) == 3, ( - f"Expected 3 rows for driver_id=9001 (one per timestamp), got {len(result_df)}" - ) - - # Verify the feature values are correct for each timestamp - result_df = result_df.sort_values("event_timestamp").reset_index(drop=True) - assert list(result_df["conv_rate"]) == [0.1, 0.2, 0.3] - assert list(result_df["acc_rate"]) == [0.9, 0.8, 0.7] diff --git a/sdk/python/feast/infra/offline_stores/contrib/ray_repo_configuration.py b/sdk/python/feast/infra/offline_stores/contrib/ray_repo_configuration.py index 6e1fa66b102..fcab38a0eb9 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/ray_repo_configuration.py +++ b/sdk/python/feast/infra/offline_stores/contrib/ray_repo_configuration.py @@ -15,10 +15,10 @@ ) from feast.repo_config import FeastConfigBaseModel from feast.saved_dataset import SavedDatasetStorage -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.universal.data_source_creator import ( +from tests.universal.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py index b723037f1f3..e25ebd4e1df 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py @@ -17,7 +17,7 @@ SavedDatasetSparkStorage, SparkSource, ) -from tests.integration.feature_repos.universal.data_source_creator import ( +from tests.universal.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_repo_configuration.py b/sdk/python/feast/infra/offline_stores/contrib/spark_repo_configuration.py index ec414f202ae..aaf56d51981 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_repo_configuration.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_repo_configuration.py @@ -1,8 +1,8 @@ from feast.infra.offline_stores.contrib.spark_offline_store.tests.data_source import ( SparkDataSourceCreator, ) -from tests.integration.feature_repos.repo_configuration import REDIS_CONFIG -from tests.integration.feature_repos.universal.online_store.redis import ( +from tests.universal.feature_repos.repo_configuration import REDIS_CONFIG +from tests.universal.feature_repos.universal.online_store.redis import ( RedisOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py index a31d368ea11..d658d0d0eb9 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py @@ -1,7 +1,7 @@ from feast.infra.offline_stores.contrib.trino_offline_store.tests.data_source import ( TrinoSourceCreator, ) -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py index c8fc15a6350..9a297a32e1e 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py @@ -20,7 +20,7 @@ TrinoSource, ) from feast.repo_config import FeastConfigBaseModel -from tests.integration.feature_repos.universal.data_source_creator import ( +from tests.universal.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py index 62f94f54073..33190bd4635 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py @@ -116,7 +116,7 @@ def to_trino_auth(self): model_cls = CLASSES_BY_AUTH_TYPE[auth_type]["auth_model"] model = model_cls(**self.config) - return trino_auth_cls(**model.dict()) + return trino_auth_cls(**model.model_dump()) class TrinoOfflineStoreConfig(FeastConfigBaseModel): diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_type_map.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_type_map.py index e5afa3f3ab3..a11298e9b81 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_type_map.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_type_map.py @@ -69,6 +69,15 @@ def pa_to_trino_value_type(pa_type_as_str: str) -> str: if pa_type_as_str.startswith("decimal"): return trino_type.format(pa_type_as_str) + if pa_type_as_str.startswith("map<"): + return trino_type.format("varchar") + + if pa_type_as_str == "large_string": + return trino_type.format("varchar") + + if pa_type_as_str.startswith("struct<"): + return trino_type.format("varchar") + type_map = { "null": "null", "bool": "boolean", diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_repo_configuration.py b/sdk/python/feast/infra/offline_stores/contrib/trino_repo_configuration.py index 198227095d5..64a5507876c 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_repo_configuration.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_repo_configuration.py @@ -1,8 +1,8 @@ from feast.infra.offline_stores.contrib.trino_offline_store.tests.data_source import ( TrinoSourceCreator, ) -from tests.integration.feature_repos.repo_configuration import REDIS_CONFIG -from tests.integration.feature_repos.universal.online_store.redis import ( +from tests.universal.feature_repos.repo_configuration import REDIS_CONFIG +from tests.universal.feature_repos.universal.online_store.redis import ( RedisOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/offline_stores/dask.py b/sdk/python/feast/infra/offline_stores/dask.py index 809fbf4091d..ddb1efa9262 100644 --- a/sdk/python/feast/infra/offline_stores/dask.py +++ b/sdk/python/feast/infra/offline_stores/dask.py @@ -656,7 +656,10 @@ def _field_mapping( full_feature_names: bool, ) -> Tuple[dd.DataFrame, str]: # Rename columns by the field mapping dictionary if it exists - if feature_view.batch_source.field_mapping: + if ( + feature_view.batch_source is not None + and feature_view.batch_source.field_mapping + ): df_to_join = _run_dask_field_mapping( df_to_join, feature_view.batch_source.field_mapping ) diff --git a/sdk/python/feast/infra/offline_stores/file_source.py b/sdk/python/feast/infra/offline_stores/file_source.py index 02d40ad770b..76460a73e5c 100644 --- a/sdk/python/feast/infra/offline_stores/file_source.py +++ b/sdk/python/feast/infra/offline_stores/file_source.py @@ -1,3 +1,4 @@ +import logging from pathlib import Path from typing import Callable, Dict, Iterable, List, Optional, Tuple, Union from urllib.parse import urlparse @@ -24,6 +25,8 @@ from feast.saved_dataset import SavedDatasetStorage from feast.value_type import ValueType +logger = logging.getLogger(__name__) + @typechecked class FileSource(DataSource): @@ -151,8 +154,43 @@ def _to_proto_impl(self) -> DataSourceProto: return data_source_proto def validate(self, config: RepoConfig): - # TODO: validate a FileSource - pass + """Validate that the file source exists and is readable. + + Checks that the path resolves to an existing Parquet or Delta file + and that the declared timestamp column is present in the schema. + """ + from feast.infra.offline_stores.file_source import FileSource + + uri = self.path + repo_path = config.repo_path if hasattr(config, "repo_path") else None + resolved = FileSource.get_uri_for_file_path(repo_path, uri) + + try: + filesystem, path = FileSystem.from_uri(resolved) + file_info = filesystem.get_file_info(path) + if file_info.type == pyarrow.fs.FileType.NotFound: + raise FileNotFoundError(f"FileSource path does not exist: {resolved}") + except Exception as e: + logger.warning("Could not validate FileSource path '%s': %s", resolved, e) + return + + try: + if isinstance(self.file_options.file_format, DeltaFormat): + return + pq_dataset = ParquetDataset(path, filesystem=filesystem) + schema = pq_dataset.schema + if self.timestamp_field and self.timestamp_field not in schema.names: + logger.warning( + "Timestamp field '%s' not found in FileSource schema at '%s'. " + "Available columns: %s", + self.timestamp_field, + resolved, + schema.names, + ) + except Exception as e: + logger.warning( + "Could not read schema from FileSource '%s': %s", resolved, e + ) @staticmethod def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: diff --git a/sdk/python/feast/infra/offline_stores/hybrid_offline_store.py b/sdk/python/feast/infra/offline_stores/hybrid_offline_store.py index b37877276c9..a52f560952a 100644 --- a/sdk/python/feast/infra/offline_stores/hybrid_offline_store.py +++ b/sdk/python/feast/infra/offline_stores/hybrid_offline_store.py @@ -72,6 +72,8 @@ def _get_offline_store_for_feature_view( self, feature_view: FeatureView, config: RepoConfig ) -> OfflineStore: self._initialize_offline_stores(config) + if feature_view.batch_source is None: + raise ValueError(f"Feature view '{feature_view.name}' has no batch_source.") source_type = feature_view.batch_source.source_type() store_key = self.get_source_key_from_type(source_type) if store_key is None: diff --git a/sdk/python/feast/infra/offline_stores/ibis.py b/sdk/python/feast/infra/offline_stores/ibis.py index 9d8891036fe..e25463d0081 100644 --- a/sdk/python/feast/infra/offline_stores/ibis.py +++ b/sdk/python/feast/infra/offline_stores/ibis.py @@ -174,6 +174,10 @@ def get_historical_features_ibis( def read_fv( feature_view: FeatureView, feature_refs: List[str], full_feature_names: bool ) -> Tuple: + if feature_view.batch_source is None: + raise ValueError( + f"Feature view '{feature_view.name}' has no batch_source and cannot be queried." + ) fv_table: Table = data_source_reader( feature_view.batch_source, str(config.repo_path) ) @@ -335,6 +339,8 @@ def offline_write_batch_ibis( progress: Optional[Callable[[int], Any]], data_source_writer: Callable[[pyarrow.Table, DataSource, str], None], ): + if feature_view.batch_source is None: + raise ValueError(f"Feature view '{feature_view.name}' has no batch_source.") pa_schema, column_names = get_pyarrow_schema_from_batch_source( config, feature_view.batch_source ) diff --git a/sdk/python/feast/infra/offline_stores/offline_utils.py b/sdk/python/feast/infra/offline_stores/offline_utils.py index abd7ad4fe35..0c478adb2c4 100644 --- a/sdk/python/feast/infra/offline_stores/offline_utils.py +++ b/sdk/python/feast/infra/offline_stores/offline_utils.py @@ -118,6 +118,10 @@ def get_feature_view_query_context( query_context = [] for feature_view, features in feature_views_to_feature_map.items(): + if feature_view.batch_source is None: + raise ValueError( + f"Feature view '{feature_view.name}' has no batch_source and cannot be queried." + ) reverse_field_mapping = { v: k for k, v in feature_view.batch_source.field_mapping.items() } @@ -260,6 +264,37 @@ def get_pyarrow_schema_from_batch_source( return pa.schema(pa_schema), column_names +def cast_arrow_table_to_schema(table: pa.Table, pa_schema: pa.Schema) -> pa.Table: + """Cast a PyArrow table to match the target schema, handling struct/map → string. + + PyArrow cannot natively cast struct or map columns to string. When a + SQL-based offline store (BigQuery, Snowflake, Redshift) stores complex + Feast types (Map, Struct) as VARCHAR/STRING, the target schema will have + string fields while the input table may have struct/map fields (e.g. when + the caller provides Python dicts). This function serialises those columns + to JSON strings so the subsequent cast succeeds. + """ + import json as _json + + for i, field in enumerate(table.schema): + target_type = pa_schema.field(field.name).type + is_complex_source = pa.types.is_struct(field.type) or pa.types.is_map( + field.type + ) + is_string_target = pa.types.is_string(target_type) or pa.types.is_large_string( + target_type + ) + if is_complex_source and is_string_target: + col = table.column(i) + json_arr = pa.array( + [_json.dumps(v.as_py()) if v.is_valid else None for v in col], + type=target_type, + ) + table = table.set_column(i, field.name, json_arr) + + return table.cast(pa_schema) + + def enclose_in_backticks(value): # Check if the input is a list if isinstance(value, list): diff --git a/sdk/python/feast/infra/offline_stores/redshift.py b/sdk/python/feast/infra/offline_stores/redshift.py index 4ed8e6309c4..900dfcfab80 100644 --- a/sdk/python/feast/infra/offline_stores/redshift.py +++ b/sdk/python/feast/infra/offline_stores/redshift.py @@ -353,7 +353,7 @@ def offline_write_batch( ) if table.schema != pa_schema: - table = table.cast(pa_schema) + table = offline_utils.cast_arrow_table_to_schema(table, pa_schema) redshift_options = feature_view.batch_source.redshift_options redshift_client = aws_utils.get_redshift_data_client( diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index 1140d8562b2..7226c908d13 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -409,7 +409,7 @@ def offline_write_batch( ) if table.schema != pa_schema: - table = table.cast(pa_schema) + table = offline_utils.cast_arrow_table_to_schema(table, pa_schema) with GetSnowflakeConnection(config.offline_store) as conn: snowflake_conn = conn diff --git a/sdk/python/feast/infra/online_stores/cassandra_online_store/cassandra_repo_configuration.py b/sdk/python/feast/infra/online_stores/cassandra_online_store/cassandra_repo_configuration.py index a1d619646f7..d206395fcf5 100644 --- a/sdk/python/feast/infra/online_stores/cassandra_online_store/cassandra_repo_configuration.py +++ b/sdk/python/feast/infra/online_stores/cassandra_online_store/cassandra_repo_configuration.py @@ -14,10 +14,10 @@ # limitations under the License. # -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.universal.online_store.cassandra import ( +from tests.universal.feature_repos.universal.online_store.cassandra import ( CassandraOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/online_stores/couchbase_online_store/couchbase_repo_configuration.py b/sdk/python/feast/infra/online_stores/couchbase_online_store/couchbase_repo_configuration.py index e099e6ae1b5..6637c4555ad 100644 --- a/sdk/python/feast/infra/online_stores/couchbase_online_store/couchbase_repo_configuration.py +++ b/sdk/python/feast/infra/online_stores/couchbase_online_store/couchbase_repo_configuration.py @@ -1,7 +1,7 @@ -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.universal.online_store.couchbase import ( +from tests.universal.feature_repos.universal.online_store.couchbase import ( CouchbaseOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/online_stores/dynamodb.py b/sdk/python/feast/infra/online_stores/dynamodb.py index 0353e2c2d72..814058c77e5 100644 --- a/sdk/python/feast/infra/online_stores/dynamodb.py +++ b/sdk/python/feast/infra/online_stores/dynamodb.py @@ -16,6 +16,7 @@ import itertools import logging from collections import OrderedDict, defaultdict +from concurrent.futures import ThreadPoolExecutor from datetime import datetime from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple, Union @@ -76,6 +77,10 @@ class DynamoDBOnlineStoreConfig(FeastConfigBaseModel): session_based_auth: bool = False """AWS session based client authentication""" + max_read_workers: int = 10 + """Maximum number of parallel threads for batch read operations. + Higher values improve throughput for large batch reads but increase resource usage.""" + max_pool_connections: int = 50 """Max number of connections for async Dynamodb operations. Increase for high-throughput workloads.""" @@ -479,33 +484,71 @@ def online_read( online_config.endpoint_url, online_config.session_based_auth, ) - table_instance = dynamodb_resource.Table( - _get_table_name(online_config, config, table) - ) + table_name = _get_table_name(online_config, config, table) batch_size = online_config.batch_size entity_ids = self._to_entity_ids(config, entity_keys) - entity_ids_iter = iter(entity_ids) - result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] + # Split entity_ids into batches upfront + batches: List[List[str]] = [] + entity_ids_iter = iter(entity_ids) while True: batch = list(itertools.islice(entity_ids_iter, batch_size)) - - # No more items to insert - if len(batch) == 0: + if not batch: break + batches.append(batch) + + if not batches: + return [] + + # For single batch, no parallelization overhead needed + if len(batches) == 1: batch_entity_ids = self._to_resource_batch_get_payload( - online_config, table_instance.name, batch + online_config, table_name, batches[0] ) - response = dynamodb_resource.batch_get_item( - RequestItems=batch_entity_ids, + response = dynamodb_resource.batch_get_item(RequestItems=batch_entity_ids) + return self._process_batch_get_response(table_name, response, batches[0]) + + # Execute batch requests in parallel for multiple batches + # Note: boto3 clients ARE thread-safe, so we can share a single client + # https://docs.aws.amazon.com/boto3/latest/guide/clients.html#multithreading-or-multiprocessing-with-clients + dynamodb_client = self._get_dynamodb_client( + online_config.region, + online_config.endpoint_url, + online_config.session_based_auth, + ) + + def fetch_batch(batch: List[str]) -> Dict[str, Any]: + batch_entity_ids = self._to_client_batch_get_payload( + online_config, table_name, batch ) + return dynamodb_client.batch_get_item(RequestItems=batch_entity_ids) + + # Use ThreadPoolExecutor for parallel I/O + max_workers = min(len(batches), online_config.max_read_workers) + with ThreadPoolExecutor(max_workers=max_workers) as executor: + responses = list(executor.map(fetch_batch, batches)) + + # Process responses and merge results in order + # Client responses need deserialization (unlike resource responses) + if self._type_deserializer is None: + self._type_deserializer = TypeDeserializer() + deserialize = self._type_deserializer.deserialize + + def to_tbl_resp(raw_client_response): + return { + "entity_id": deserialize(raw_client_response["entity_id"]), + "event_ts": deserialize(raw_client_response["event_ts"]), + "values": deserialize(raw_client_response["values"]), + } + + result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] + for batch, response in zip(batches, responses): batch_result = self._process_batch_get_response( - table_instance.name, - response, - batch, + table_name, response, batch, to_tbl_response=to_tbl_resp ) result.extend(batch_result) + return result async def online_read_async( @@ -945,6 +988,12 @@ def _extract_list_values(self, value_proto: ValueProto) -> list: return list(value_proto.bool_list_val.val) elif value_proto.HasField("bytes_list_val"): return list(value_proto.bytes_list_val.val) + elif value_proto.HasField("map_list_val"): + return list(value_proto.map_list_val.val) + elif value_proto.HasField("json_list_val"): + return list(value_proto.json_list_val.val) + elif value_proto.HasField("struct_list_val"): + return list(value_proto.struct_list_val.val) return [] def _set_list_values( @@ -965,6 +1014,12 @@ def _set_list_values( result.bool_list_val.val.extend(values) elif template.HasField("bytes_list_val"): result.bytes_list_val.val.extend(values) + elif template.HasField("map_list_val"): + result.map_list_val.val.extend(values) + elif template.HasField("json_list_val"): + result.json_list_val.val.extend(values) + elif template.HasField("struct_list_val"): + result.struct_list_val.val.extend(values) async def _update_item_with_expression_async( self, diff --git a/sdk/python/feast/infra/online_stores/elasticsearch_online_store/elasticsearch_repo_configuration.py b/sdk/python/feast/infra/online_stores/elasticsearch_online_store/elasticsearch_repo_configuration.py index 4d1f2c3ca18..46e8ba7742e 100644 --- a/sdk/python/feast/infra/online_stores/elasticsearch_online_store/elasticsearch_repo_configuration.py +++ b/sdk/python/feast/infra/online_stores/elasticsearch_online_store/elasticsearch_repo_configuration.py @@ -1,7 +1,7 @@ -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.universal.online_store.elasticsearch import ( +from tests.universal.feature_repos.universal.online_store.elasticsearch import ( ElasticSearchOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/online_stores/hazelcast_online_store/hazelcast_repo_configuration.py b/sdk/python/feast/infra/online_stores/hazelcast_online_store/hazelcast_repo_configuration.py index 5b3ea6e307b..6157f362120 100644 --- a/sdk/python/feast/infra/online_stores/hazelcast_online_store/hazelcast_repo_configuration.py +++ b/sdk/python/feast/infra/online_stores/hazelcast_online_store/hazelcast_repo_configuration.py @@ -14,10 +14,10 @@ # limitations under the License. # -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.universal.online_store.hazelcast import ( +from tests.universal.feature_repos.universal.online_store.hazelcast import ( HazelcastOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/online_stores/hbase_online_store/hbase_repo_configuration.py b/sdk/python/feast/infra/online_stores/hbase_online_store/hbase_repo_configuration.py index 4e32a654b55..d6089c0e3b4 100644 --- a/sdk/python/feast/infra/online_stores/hbase_online_store/hbase_repo_configuration.py +++ b/sdk/python/feast/infra/online_stores/hbase_online_store/hbase_repo_configuration.py @@ -1,7 +1,7 @@ -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.universal.online_store.hbase import ( +from tests.universal.feature_repos.universal.online_store.hbase import ( HbaseOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/online_stores/hybrid_online_store/hybrid_online_store_repo_configuration.py b/sdk/python/feast/infra/online_stores/hybrid_online_store/hybrid_online_store_repo_configuration.py index 90a65a092d0..3d016ce7d5e 100644 --- a/sdk/python/feast/infra/online_stores/hybrid_online_store/hybrid_online_store_repo_configuration.py +++ b/sdk/python/feast/infra/online_stores/hybrid_online_store/hybrid_online_store_repo_configuration.py @@ -16,10 +16,10 @@ # It enables running integration tests with multiple online store backends. # Update this file if you add more backends or change test setup. -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.universal.online_store.hybrid_online_store import ( +from tests.universal.feature_repos.universal.online_store.hybrid_online_store import ( HybridOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/online_stores/ikv_online_store/ikv.py b/sdk/python/feast/infra/online_stores/ikv_online_store/ikv.py deleted file mode 100644 index c8f0ad65c99..00000000000 --- a/sdk/python/feast/infra/online_stores/ikv_online_store/ikv.py +++ /dev/null @@ -1,311 +0,0 @@ -from datetime import datetime, timezone -from typing import ( - Any, - Callable, - Dict, - Iterator, - List, - Literal, - Optional, - Sequence, - Tuple, -) - -from google.protobuf.timestamp_pb2 import Timestamp -from ikvpy.client import IKVReader, IKVWriter -from ikvpy.clientoptions import ClientOptions, ClientOptionsBuilder -from ikvpy.document import IKVDocument, IKVDocumentBuilder -from ikvpy.factory import create_new_reader, create_new_writer -from pydantic import StrictStr - -from feast import Entity, FeatureView, utils -from feast.infra.online_stores.helpers import compute_entity_id -from feast.infra.online_stores.online_store import OnlineStore -from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto -from feast.protos.feast.types.Value_pb2 import Value as ValueProto -from feast.repo_config import FeastConfigBaseModel, RepoConfig - -PRIMARY_KEY_FIELD_NAME: str = "_entity_key" -EVENT_CREATION_TIMESTAMP_FIELD_NAME: str = "_event_timestamp" -CREATION_TIMESTAMP_FIELD_NAME: str = "_created_timestamp" - - -class IKVOnlineStoreConfig(FeastConfigBaseModel): - """Online store config for IKV store""" - - type: Literal["ikv"] = "ikv" - """Online store type selector""" - - account_id: StrictStr - """(Required) IKV account id""" - - account_passkey: StrictStr - """(Required) IKV account passkey""" - - store_name: StrictStr - """(Required) IKV store name""" - - mount_directory: Optional[StrictStr] = None - """(Required only for reader) IKV mount point i.e. directory for storing IKV data locally.""" - - -class IKVOnlineStore(OnlineStore): - """ - IKV (inlined.io key value) store implementation of the online store interface. - """ - - # lazy initialization - _reader: Optional[IKVReader] = None - _writer: Optional[IKVWriter] = None - - def online_write_batch( - self, - config: RepoConfig, - table: FeatureView, - data: List[ - Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] - ], - progress: Optional[Callable[[int], Any]], - ) -> None: - """ - Writes a batch of feature rows to the online store. - - If a tz-naive timestamp is passed to this method, it is assumed to be UTC. - - Args: - config: The config for the current feature store. - table: Feature view to which these feature rows correspond. - data: A list of quadruplets containing feature data. Each quadruplet contains an entity - key, a dict containing feature values, an event timestamp for the row, and the created - timestamp for the row if it exists. - progress: Function to be called once a batch of rows is written to the online store, used - to show progress. - """ - self._init_writer(config=config) - assert self._writer is not None - - for entity_key, features, event_timestamp, _ in data: - entity_id: str = compute_entity_id( - entity_key, - entity_key_serialization_version=config.entity_key_serialization_version, - ) - document: IKVDocument = IKVOnlineStore._create_document( - entity_id, table, features, event_timestamp - ) - self._writer.upsert_fields(document) - if progress: - progress(1) - - def online_read( - self, - config: RepoConfig, - table: FeatureView, - entity_keys: List[EntityKeyProto], - requested_features: Optional[List[str]] = None, - ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: - """ - Reads features values for the given entity keys. - - Args: - config: The config for the current feature store. - table: The feature view whose feature values should be read. - entity_keys: The list of entity keys for which feature values should be read. - requested_features: The list of features that should be read. - - Returns: - A list of the same length as entity_keys. Each item in the list is a tuple where the first - item is the event timestamp for the row, and the second item is a dict mapping feature names - to values, which are returned in proto format. - """ - self._init_reader(config=config) - - if not len(entity_keys): - return [] - - # create IKV primary keys - primary_keys = [ - compute_entity_id(ek, config.entity_key_serialization_version) - for ek in entity_keys - ] - - # create IKV field names - if requested_features is None: - requested_features = [] - - field_names: List[Optional[str]] = [None] * (1 + len(requested_features)) - field_names[0] = EVENT_CREATION_TIMESTAMP_FIELD_NAME - for i, fn in enumerate(requested_features): - field_names[i + 1] = IKVOnlineStore._create_ikv_field_name(table, fn) - - assert self._reader is not None - value_iter = self._reader.multiget_bytes_values( - bytes_primary_keys=[], - str_primary_keys=primary_keys, - field_names=field_names, - ) - - # decode results - return [ - IKVOnlineStore._decode_fields_for_primary_key( - requested_features, value_iter - ) - for _ in range(0, len(primary_keys)) - ] - - @staticmethod - def _decode_fields_for_primary_key( - requested_features: List[str], value_iter: Iterator[Optional[bytes]] - ) -> Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]: - # decode timestamp - dt: Optional[datetime] = None - dt_bytes = next(value_iter) - if dt_bytes: - proto_timestamp = Timestamp() - proto_timestamp.ParseFromString(dt_bytes) - dt = datetime.fromtimestamp(proto_timestamp.seconds, tz=timezone.utc) - - # decode other features - features = {} - for requested_feature in requested_features: - value_proto_bytes: Optional[bytes] = next(value_iter) - if value_proto_bytes: - value_proto = ValueProto() - value_proto.ParseFromString(value_proto_bytes) - features[requested_feature] = value_proto - - return dt, features - - def update( - self, - config: RepoConfig, - tables_to_delete: Sequence[FeatureView], - tables_to_keep: Sequence[FeatureView], - entities_to_delete: Sequence[Entity], - entities_to_keep: Sequence[Entity], - partial: bool, - ): - """ - Reconciles cloud resources with the specified set of Feast objects. - - Args: - config: The config for the current feature store. - tables_to_delete: Feature views whose corresponding infrastructure should be deleted. - tables_to_keep: Feature views whose corresponding infrastructure should not be deleted, and - may need to be updated. - entities_to_delete: Entities whose corresponding infrastructure should be deleted. - entities_to_keep: Entities whose corresponding infrastructure should not be deleted, and - may need to be updated. - partial: If true, tables_to_delete and tables_to_keep are not exhaustive lists, so - infrastructure corresponding to other feature views should be not be touched. - """ - self._init_writer(config=config) - assert self._writer is not None - - # note: we assume tables_to_keep does not overlap with tables_to_delete - - for feature_view in tables_to_delete: - # each field in an IKV document is prefixed by the feature-view's name - self._writer.drop_fields_by_name_prefix([feature_view.name]) - - def teardown( - self, - config: RepoConfig, - tables: Sequence[FeatureView], - entities: Sequence[Entity], - ): - """ - Tears down all cloud resources for the specified set of Feast objects. - - Args: - config: The config for the current feature store. - tables: Feature views whose corresponding infrastructure should be deleted. - entities: Entities whose corresponding infrastructure should be deleted. - """ - self._init_writer(config=config) - assert self._writer is not None - - # drop fields corresponding to this feature-view - for feature_view in tables: - self._writer.drop_fields_by_name_prefix([feature_view.name]) - - # shutdown clients - self._writer.shutdown() - self._writer = None - - if self._reader is not None: - self._reader.shutdown() - self._reader = None - - @staticmethod - def _create_ikv_field_name(feature_view: FeatureView, feature_name: str) -> str: - return "{}_{}".format(feature_view.name, feature_name) - - @staticmethod - def _create_document( - entity_id: str, - feature_view: FeatureView, - values: Dict[str, ValueProto], - event_timestamp: datetime, - ) -> IKVDocument: - """Converts feast key-value pairs into an IKV document.""" - - # initialie builder by inserting primary key and row creation timestamp - event_timestamp_seconds = int(utils.make_tzaware(event_timestamp).timestamp()) - event_timestamp_seconds_proto = Timestamp() - event_timestamp_seconds_proto.seconds = event_timestamp_seconds - - # event_timestamp_str: str = utils.make_tzaware(event_timestamp).isoformat() - builder = ( - IKVDocumentBuilder() - .put_string_field(PRIMARY_KEY_FIELD_NAME, entity_id) - .put_bytes_field( - EVENT_CREATION_TIMESTAMP_FIELD_NAME, - event_timestamp_seconds_proto.SerializeToString(), - ) - ) - - for feature_name, feature_value in values.items(): - field_name = IKVOnlineStore._create_ikv_field_name( - feature_view, feature_name - ) - builder.put_bytes_field(field_name, feature_value.SerializeToString()) - - return builder.build() - - def _init_writer(self, config: RepoConfig): - """Initializes ikv writer client.""" - # initialize writer - if self._writer is None: - online_config = config.online_store - assert isinstance(online_config, IKVOnlineStoreConfig) - client_options = IKVOnlineStore._config_to_client_options(online_config) - - self._writer = create_new_writer(client_options) - self._writer.startup() # blocking operation - - def _init_reader(self, config: RepoConfig): - """Initializes ikv reader client.""" - # initialize reader - if self._reader is None: - online_config = config.online_store - assert isinstance(online_config, IKVOnlineStoreConfig) - client_options = IKVOnlineStore._config_to_client_options(online_config) - - if online_config.mount_directory and len(online_config.mount_directory) > 0: - self._reader = create_new_reader(client_options) - self._reader.startup() # blocking operation - - @staticmethod - def _config_to_client_options(config: IKVOnlineStoreConfig) -> ClientOptions: - """Utility for IKVOnlineStoreConfig to IKV ClientOptions conversion.""" - builder = ( - ClientOptionsBuilder() - .with_account_id(config.account_id) - .with_account_passkey(config.account_passkey) - .with_store_name(config.store_name) - ) - - if config.mount_directory and len(config.mount_directory) > 0: - builder = builder.with_mount_directory(config.mount_directory) - - return builder.build() diff --git a/sdk/python/feast/infra/online_stores/milvus_online_store/milvus.py b/sdk/python/feast/infra/online_stores/milvus_online_store/milvus.py index 42a8f359107..fb812f82b7b 100644 --- a/sdk/python/feast/infra/online_stores/milvus_online_store/milvus.py +++ b/sdk/python/feast/infra/online_stores/milvus_online_store/milvus.py @@ -56,6 +56,12 @@ PROTO_VALUE_TO_VALUE_TYPE_MAP["int64_list_val"]: DataType.FLOAT_VECTOR, PROTO_VALUE_TO_VALUE_TYPE_MAP["double_list_val"]: DataType.FLOAT_VECTOR, PROTO_VALUE_TO_VALUE_TYPE_MAP["bool_list_val"]: DataType.BINARY_VECTOR, + PROTO_VALUE_TO_VALUE_TYPE_MAP["map_val"]: DataType.VARCHAR, + PROTO_VALUE_TO_VALUE_TYPE_MAP["map_list_val"]: DataType.VARCHAR, + PROTO_VALUE_TO_VALUE_TYPE_MAP["json_val"]: DataType.VARCHAR, + PROTO_VALUE_TO_VALUE_TYPE_MAP["json_list_val"]: DataType.VARCHAR, + PROTO_VALUE_TO_VALUE_TYPE_MAP["struct_val"]: DataType.VARCHAR, + PROTO_VALUE_TO_VALUE_TYPE_MAP["struct_list_val"]: DataType.VARCHAR, } FEAST_PRIMITIVE_TO_MILVUS_TYPE_MAPPING: Dict[ @@ -81,6 +87,10 @@ FEAST_PRIMITIVE_TO_MILVUS_TYPE_MAPPING[feast_type] = DataType.VARCHAR elif base_value_type == ValueType.BOOL: FEAST_PRIMITIVE_TO_MILVUS_TYPE_MAPPING[feast_type] = DataType.BINARY_VECTOR + elif isinstance(feast_type, ComplexFeastType): + milvus_type = PROTO_TO_MILVUS_TYPE_MAPPING.get(value_type) + if milvus_type: + FEAST_PRIMITIVE_TO_MILVUS_TYPE_MAPPING[feast_type] = milvus_type class MilvusOnlineStoreConfig(FeastConfigBaseModel, VectorStoreConfig): @@ -173,6 +183,8 @@ def _get_or_create_collection( fields_to_add = [f for f in table.schema if f.name not in fields_to_exclude] for field in fields_to_add: dtype = FEAST_PRIMITIVE_TO_MILVUS_TYPE_MAPPING.get(field.dtype) + if dtype is None and isinstance(field.dtype, ComplexFeastType): + dtype = DataType.VARCHAR if dtype: if dtype == DataType.FLOAT_VECTOR: fields.append( @@ -433,6 +445,19 @@ def online_read( "double_list_val", ]: getattr(val, proto_attr).val.extend(field_value) + elif proto_attr in [ + "map_val", + "map_list_val", + "struct_val", + "struct_list_val", + "json_list_val", + ]: + if isinstance(field_value, str) and field_value: + try: + proto_bytes = base64.b64decode(field_value) + val.ParseFromString(proto_bytes) + except Exception: + setattr(val, "string_val", field_value) else: setattr(val, proto_attr, field_value) else: diff --git a/sdk/python/feast/infra/online_stores/milvus_online_store/milvus_repo_configuration.py b/sdk/python/feast/infra/online_stores/milvus_online_store/milvus_repo_configuration.py index 174c0b53737..5fbabe17898 100644 --- a/sdk/python/feast/infra/online_stores/milvus_online_store/milvus_repo_configuration.py +++ b/sdk/python/feast/infra/online_stores/milvus_online_store/milvus_repo_configuration.py @@ -1,8 +1,8 @@ -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.repo_configuration import MILVUS_CONFIG -from tests.integration.feature_repos.universal.online_store.milvus import ( +from tests.universal.feature_repos.repo_configuration import MILVUS_CONFIG +from tests.universal.feature_repos.universal.online_store.milvus import ( MilvusOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/online_stores/mongodb_online_store/__init__.py b/sdk/python/feast/infra/online_stores/mongodb_online_store/__init__.py new file mode 100644 index 00000000000..84b2dfd763e --- /dev/null +++ b/sdk/python/feast/infra/online_stores/mongodb_online_store/__init__.py @@ -0,0 +1,3 @@ +from .mongodb import MongoDBOnlineStore, MongoDBOnlineStoreConfig + +__all__ = ["MongoDBOnlineStore", "MongoDBOnlineStoreConfig"] diff --git a/sdk/python/feast/infra/online_stores/mongodb_online_store/mongodb.py b/sdk/python/feast/infra/online_stores/mongodb_online_store/mongodb.py new file mode 100644 index 00000000000..3e7a3db84c8 --- /dev/null +++ b/sdk/python/feast/infra/online_stores/mongodb_online_store/mongodb.py @@ -0,0 +1,498 @@ +from __future__ import annotations + +from datetime import datetime +from logging import getLogger +from typing import Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple + +try: + from pymongo import AsyncMongoClient, MongoClient, UpdateOne + from pymongo.asynchronous.collection import AsyncCollection + from pymongo.collection import Collection + from pymongo.driver_info import DriverInfo +except ImportError as e: + from feast.errors import FeastExtrasDependencyImportError + + raise FeastExtrasDependencyImportError("mongodb", str(e)) + +import feast.version +from feast.entity import Entity +from feast.feature_view import FeatureView +from feast.infra.key_encoding_utils import serialize_entity_key +from feast.infra.online_stores.online_store import OnlineStore +from feast.infra.supported_async_methods import SupportedAsyncMethods +from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import Value as ValueProto +from feast.repo_config import FeastConfigBaseModel, RepoConfig +from feast.type_map import ( + feast_value_type_to_python_type, + python_values_to_proto_values, +) + +logger = getLogger(__name__) + +DRIVER_METADATA = DriverInfo(name="Feast", version=feast.version.get_version()) + + +class MongoDBOnlineStoreConfig(FeastConfigBaseModel): + """MongoDB configuration. + + For a description of kwargs that may be passed to MongoClient, + see https://pymongo.readthedocs.io/en/stable/api/pymongo/mongo_client.html + """ + + type: Literal["mongodb"] = "mongodb" + """Online store type selector""" + connection_string: str = "mongodb://localhost:27017" + database_name: str = ( + "features" # todo - consider removing, and using repo_config.project + ) + collection_suffix: str = "latest" + client_kwargs: Dict[str, Any] = {} + + +class MongoDBOnlineStore(OnlineStore): + """ + MongoDB implementation of Feast OnlineStore. + + Schema: + _id: serialized_entity_key (bytes) + features: { .: } + event_timestamps: { "": datetime } + created_timestamp: datetime + + For example: + { + "_id": b"", + "features": { + "driver_stats": { + "rating": 4.91, + "trips_last_7d": 132, + }, + "pricing": { + "surge_multiplier": 1.2 + }, + }, + "event_timestamps": { + "driver_stats": "2026-01-01 12:00:00+00:00", + "pricing": "2026-01-21 12:00:00+00:00" + }, + "created_timestamp": "2026-01-21 12:00:00+00:00" + } + """ + + _client: Optional[MongoClient] = None + _collection: Optional[Collection] = None + _client_async: Optional[AsyncMongoClient] = None + _collection_async: Optional[AsyncCollection] = None + + @staticmethod + def _build_write_ops( + config: RepoConfig, + table: FeatureView, + data: List[ + Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] + ], + ) -> List[UpdateOne]: + """Build the list of UpdateOne upsert operations shared by the sync and async write paths. + + For each row in *data* this method: + + 1. Serializes the entity key to bytes using ``serialize_entity_key``. + 2. Converts every ``ValueProto`` feature value to its native Python type + via ``feast_value_type_to_python_type``. + 3. Constructs a ``$set`` update document that writes feature values under + ``features..``, the per-view event + timestamp under ``event_timestamps.``, and the + row-level ``created_timestamp``. + 4. Wraps that in a ``UpdateOne`` with ``upsert=True`` so that existing + entity documents are updated in-place and new ones are created on first + write. + + The caller is responsible for executing the returned operations via + ``collection.bulk_write(ops, ordered=False)`` (sync) or + ``await collection.bulk_write(ops, ordered=False)`` (async). + """ + ops = [] + for entity_key, proto_values, event_timestamp, created_timestamp in data: + entity_id = serialize_entity_key( + entity_key, + entity_key_serialization_version=config.entity_key_serialization_version, + ) + feature_updates = { + f"features.{table.name}.{field}": feast_value_type_to_python_type(val) + for field, val in proto_values.items() + } + update = { + "$set": { + **feature_updates, + f"event_timestamps.{table.name}": event_timestamp, + "created_timestamp": created_timestamp, + }, + } + ops.append( + UpdateOne( + filter={"_id": entity_id}, + update=update, + upsert=True, + ) + ) + return ops + + def online_write_batch( + self, + config: RepoConfig, + table: FeatureView, + data: List[ + Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] + ], + progress: Optional[Callable[[int], Any]] = None, + ) -> None: + """ + Writes a batch of feature values to the online store. + + data: + [ + ( + entity_key_bytes, + { feature_ref: ValueProto }, + event_timestamp, + created_timestamp, + ) + ] + """ + clxn = self._get_collection(config) + ops = self._build_write_ops(config, table, data) + if ops: + clxn.bulk_write(ops, ordered=False) + if progress: + progress(len(data)) + + def online_read( + self, + config: RepoConfig, + table: FeatureView, + entity_keys: List[EntityKeyProto], + requested_features: Optional[List[str]] = None, + ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: + """ + Read features for a batch of entities. + + Args: + config: Feast repo configuration + table: FeatureView to read from + entity_keys: List of entity keys to read + requested_features: Optional list of specific features to read + + Returns: + List of tuples (event_timestamp, feature_dict) for each entity key + """ + clxn = self._get_collection(config) + + ids = [ + serialize_entity_key( + key, + entity_key_serialization_version=config.entity_key_serialization_version, + ) + for key in entity_keys + ] + + query_filter = {"_id": {"$in": ids}} + projection = { + "_id": 1, + f"event_timestamps.{table.name}": 1, + } + if requested_features: + projection.update( + {f"features.{table.name}.{x}": 1 for x in requested_features} + ) + else: + projection[f"features.{table.name}"] = 1 + + cursor = clxn.find(query_filter, projection=projection) + docs = {doc["_id"]: doc for doc in cursor} + + return self._convert_raw_docs_to_proto(ids, docs, table) + + def update( + self, + config: RepoConfig, + tables_to_delete: Sequence[FeatureView], + tables_to_keep: Sequence[FeatureView], + entities_to_delete: Sequence[Entity], + entities_to_keep: Sequence[Entity], + partial: bool, + ): + """Prepare or update online store. + + With MongoDB, we have a loose schema and lazy creation so there is little to do here. + Nothing needs to be pre-created for the entities and tables to keep. + + The OnlineStore is a single Collection with the following Document shape. + { + "_id": "", + "features": { + "": { + "": value + } + } + } + We remove any feature views named in tables_to_delete. + The Entities are serialized in the _id. No schema needs be adjusted. + """ + if not isinstance(config.online_store, MongoDBOnlineStoreConfig): + raise RuntimeError(f"{config.online_store.type = }. It must be mongodb.") + + clxn = self._get_collection(repo_config=config) + + if tables_to_delete: + unset_fields = {} + for fv in tables_to_delete: + unset_fields[f"features.{fv.name}"] = "" + unset_fields[f"event_timestamps.{fv.name}"] = "" + + clxn.update_many({}, {"$unset": unset_fields}) + + # Note: entities_to_delete contains Entity definitions (metadata), not entity instances. + # Like other online stores, we don't need to do anything with entities_to_delete here. + + def teardown( + self, + config: RepoConfig, + tables: Sequence[FeatureView], + entities: Sequence[Entity], + ): + """ + Drop the backing collection and close the client. + + As in update, MongoDB requires very little here. + """ + if not isinstance(config.online_store, MongoDBOnlineStoreConfig): + raise RuntimeError(f"{config.online_store.type = }. It must be mongodb.") + clxn = self._get_collection(repo_config=config) + clxn.drop() + if self._client: + self._client.close() + self._client = None + self._collection = None + + async def close(self) -> None: + """Close the async MongoDB client and release its resources.""" + if self._client_async is not None: + await self._client_async.close() + self._client_async = None + self._collection_async = None + + # ------------------------------------------------------------------ + # Helpers + # ------------------------------------------------------------------ + + def _get_client(self, config: RepoConfig): + """Returns a connection to the server.""" + online_store_config = config.online_store + if not isinstance(online_store_config, MongoDBOnlineStoreConfig): + raise ValueError( + f"config.online_store should be MongoDBOnlineStoreConfig, got {online_store_config}" + ) + if self._client is None: + online_config = config.online_store + self._client = MongoClient( + online_config.connection_string, + driver=DRIVER_METADATA, + **online_config.client_kwargs, + ) + return self._client + + def _get_collection(self, repo_config: RepoConfig) -> Collection: + """Returns a connection to the online store collection.""" + if self._collection is None: + self._client = self._get_client(repo_config) + assert self._client is not None + online_config = repo_config.online_store + db = self._client[online_config.database_name] + clxn_name = f"{repo_config.project}_{online_config.collection_suffix}" + self._collection = db[clxn_name] + return self._collection + + async def _get_client_async(self, config: RepoConfig) -> AsyncMongoClient: + """Returns an async MongoDB client.""" + if self._client_async is None: + online_config = config.online_store + if not isinstance(online_config, MongoDBOnlineStoreConfig): + raise ValueError( + f"config.online_store should be MongoDBOnlineStoreConfig, got {online_config}" + ) + self._client_async = AsyncMongoClient( + online_config.connection_string, + driver=DRIVER_METADATA, + **online_config.client_kwargs, + ) + return self._client_async + + async def _get_collection_async(self, repo_config: RepoConfig) -> AsyncCollection: + """Returns an async connection to the online store collection.""" + if self._collection_async is None: + self._client_async = await self._get_client_async(repo_config) + assert self._client_async is not None + online_config = repo_config.online_store + db = self._client_async[online_config.database_name] + clxn_name = f"{repo_config.project}_{online_config.collection_suffix}" + self._collection_async = db[clxn_name] + return self._collection_async + + @property + def async_supported(self) -> SupportedAsyncMethods: + """Indicates that this online store supports async operations.""" + return SupportedAsyncMethods(read=True, write=True) + + @staticmethod + def _convert_raw_docs_to_proto( + ids: list[bytes], docs: dict[bytes, Any], table: FeatureView + ) -> List[Tuple[Optional[datetime], Optional[dict[str, ValueProto]]]]: + """Optimized converting values in documents retrieved from MongoDB (BSON) into ValueProto types. + + The conversion itself is done in feast.type_map.python_values_to_proto_values. + The issue we have is that it is column-oriented, expecting a list of proto values with a single type. + MongoDB lookups are row-oriented. Plus, we need to ensure ordering of ids. + So we transform twice to minimize calls to the python/proto converter. + + Luckily, the table, a FeatureView, provides a map from feature name to proto type + so we don't have to infer types for each feature value. + + Args: + ids: sorted list of the serialized entity ids requested. + docs: results of collection find. + table: The FeatureView of the read, providing the types. + Returns: + List of tuples (event_timestamp, feature_dict) for each entity key + """ + feature_type_map = { + feature.name: feature.dtype.to_value_type() for feature in table.features + } + + # Step 1: Extract raw values column-wise (aligned by ordered ids) + # We need to maintain alignment, so we append None for missing features + raw_feature_columns: Dict[str, List[Any]] = { + feature_name: [] for feature_name in feature_type_map + } + + for entity_id in ids: + doc = docs.get(entity_id) + feature_dict = doc.get("features", {}).get(table.name, {}) if doc else {} + + # For each expected feature, append its value or None + for feature_name in feature_type_map: + raw_feature_columns[feature_name].append( + feature_dict.get(feature_name, None) + ) + + # Step 2: Convert per feature + proto_feature_columns = {} + for feature_name, raw_values in raw_feature_columns.items(): + proto_feature_columns[feature_name] = python_values_to_proto_values( + raw_values, + feature_type=feature_type_map[feature_name], + ) + + # Step 3: Reassemble row-wise + results: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] + + for i, entity_id in enumerate(ids): + doc = docs.get(entity_id) + + if doc is None: + results.append((None, None)) + continue + + # Entity document exists (written by some other feature view), but + # this specific feature view was never written → treat as not found. + fv_features = doc.get("features", {}).get(table.name) + if fv_features is None: + results.append((None, None)) + continue + + ts = doc.get("event_timestamps", {}).get(table.name) + + row_features = { + feature_name: proto_feature_columns[feature_name][i] + for feature_name in proto_feature_columns + } + + results.append((ts, row_features)) + return results + + async def online_read_async( + self, + config: RepoConfig, + table: FeatureView, + entity_keys: List[EntityKeyProto], + requested_features: Optional[List[str]] = None, + ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: + """ + Asynchronously reads feature values from the online store. + + Args: + config: Feast repo configuration + table: FeatureView to read from + entity_keys: List of entity keys to read + requested_features: Optional list of specific features to read + + Returns: + List of tuples (event_timestamp, feature_dict) for each entity key + """ + clxn = await self._get_collection_async(config) + + # Serialize entity keys + ids = [ + serialize_entity_key( + entity_key, + entity_key_serialization_version=config.entity_key_serialization_version, + ) + for entity_key in entity_keys + ] + + query_filter = {"_id": {"$in": ids}} + projection = { + "_id": 1, + f"event_timestamps.{table.name}": 1, + } + if requested_features: + projection.update( + {f"features.{table.name}.{x}": 1 for x in requested_features} + ) + else: + projection[f"features.{table.name}"] = 1 + + cursor = clxn.find(query_filter, projection=projection) + docs = {doc["_id"]: doc async for doc in cursor} + + # Convert to proto format + return self._convert_raw_docs_to_proto(ids, docs, table) + + async def online_write_batch_async( + self, + config: RepoConfig, + table: FeatureView, + data: List[ + Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] + ], + progress: Optional[Callable[[int], Any]] = None, + ) -> None: + """ + Asynchronously writes a batch of feature values to the online store. + + Args: + config: Feast repo configuration + table: FeatureView to write to + data: List of tuples (entity_key, features, event_ts, created_ts) + progress: Optional progress callback + """ + clxn = await self._get_collection_async(config) + ops = self._build_write_ops(config, table, data) + if ops: + await clxn.bulk_write(ops, ordered=False) + if progress: + progress(len(data)) + + +# TODO +# - Vector Search (requires atlas image in testcontainers or similar) diff --git a/sdk/python/feast/infra/online_stores/mongodb_online_store/mongodb_repo_configuration.py b/sdk/python/feast/infra/online_stores/mongodb_online_store/mongodb_repo_configuration.py new file mode 100644 index 00000000000..c621902ba54 --- /dev/null +++ b/sdk/python/feast/infra/online_stores/mongodb_online_store/mongodb_repo_configuration.py @@ -0,0 +1,13 @@ +from tests.universal.feature_repos.integration_test_repo_config import ( + IntegrationTestRepoConfig, +) +from tests.universal.feature_repos.universal.online_store.mongodb import ( + MongoDBOnlineStoreCreator, +) + +FULL_REPO_CONFIGS = [ + IntegrationTestRepoConfig( + online_store="mongodb", + online_store_creator=MongoDBOnlineStoreCreator, + ), +] diff --git a/sdk/python/feast/infra/online_stores/mysql_online_store/mysql_repo_configuration.py b/sdk/python/feast/infra/online_stores/mysql_online_store/mysql_repo_configuration.py index e5a1c0114c3..2944768097e 100644 --- a/sdk/python/feast/infra/online_stores/mysql_online_store/mysql_repo_configuration.py +++ b/sdk/python/feast/infra/online_stores/mysql_online_store/mysql_repo_configuration.py @@ -1,7 +1,7 @@ -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.universal.online_store.mysql import ( +from tests.universal.feature_repos.universal.online_store.mysql import ( BatchWriteMySQLOnlineStoreCreator, MySQLOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/online_stores/postgres_online_store/pgvector_repo_configuration.py b/sdk/python/feast/infra/online_stores/postgres_online_store/pgvector_repo_configuration.py index 26b05613158..927c424f207 100644 --- a/sdk/python/feast/infra/online_stores/postgres_online_store/pgvector_repo_configuration.py +++ b/sdk/python/feast/infra/online_stores/postgres_online_store/pgvector_repo_configuration.py @@ -1,7 +1,7 @@ -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.universal.online_store.postgres import ( +from tests.universal.feature_repos.universal.online_store.postgres import ( PGVectorOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/online_stores/postgres_online_store/postgres_repo_configuration.py b/sdk/python/feast/infra/online_stores/postgres_online_store/postgres_repo_configuration.py index ea975ec808f..b28abf955ef 100644 --- a/sdk/python/feast/infra/online_stores/postgres_online_store/postgres_repo_configuration.py +++ b/sdk/python/feast/infra/online_stores/postgres_online_store/postgres_repo_configuration.py @@ -1,7 +1,7 @@ -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.universal.online_store.postgres import ( +from tests.universal.feature_repos.universal.online_store.postgres import ( PostgresOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/online_stores/qdrant_online_store/qdrant_repo_configuration.py b/sdk/python/feast/infra/online_stores/qdrant_online_store/qdrant_repo_configuration.py index eee77bb8775..0cae0fae62c 100644 --- a/sdk/python/feast/infra/online_stores/qdrant_online_store/qdrant_repo_configuration.py +++ b/sdk/python/feast/infra/online_stores/qdrant_online_store/qdrant_repo_configuration.py @@ -1,7 +1,7 @@ -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.universal.online_store.qdrant import ( +from tests.universal.feature_repos.universal.online_store.qdrant import ( QdrantOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/online_stores/redis.py b/sdk/python/feast/infra/online_stores/redis.py index 9a4e908810d..aeeb540b910 100644 --- a/sdk/python/feast/infra/online_stores/redis.py +++ b/sdk/python/feast/infra/online_stores/redis.py @@ -372,14 +372,11 @@ def _convert_redis_values_to_protobuf( redis_values: List[List[ByteString]], feature_view: str, requested_features: List[str], - ): - result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] - for values in redis_values: - features = self._get_features_for_entity( - values, feature_view, requested_features - ) - result.append(features) - return result + ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: + return [ + self._get_features_for_entity(values, feature_view, requested_features) + for values in redis_values + ] def online_read( self, @@ -445,21 +442,21 @@ def _get_features_for_entity( res_val = dict(zip(requested_features, values)) res_ts = Timestamp() - ts_val = res_val.pop(f"_ts:{feature_view}") + ts_key = f"_ts:{feature_view}" + ts_val = res_val.pop(ts_key) if ts_val: - res_ts.ParseFromString(bytes(ts_val)) + res_ts.ParseFromString(ts_val) - res = {} + res: Dict[str, ValueProto] = {} for feature_name, val_bin in res_val.items(): val = ValueProto() if val_bin: - val.ParseFromString(bytes(val_bin)) + val.ParseFromString(val_bin) res[feature_name] = val if not res: return None, None - else: - # reconstruct full timestamp including nanos - total_seconds = res_ts.seconds + res_ts.nanos / 1_000_000_000.0 - timestamp = datetime.fromtimestamp(total_seconds, tz=timezone.utc) - return timestamp, res + + total_seconds = res_ts.seconds + res_ts.nanos / 1_000_000_000.0 + timestamp = datetime.fromtimestamp(total_seconds, tz=timezone.utc) + return timestamp, res diff --git a/sdk/python/feast/infra/online_stores/remote.py b/sdk/python/feast/infra/online_stores/remote.py index 1111e0e6c62..5b5b04c362d 100644 --- a/sdk/python/feast/infra/online_stores/remote.py +++ b/sdk/python/feast/infra/online_stores/remote.py @@ -70,6 +70,41 @@ class RemoteOnlineStore(OnlineStore): remote online store implementation wrapper to communicate with feast online server. """ + @staticmethod + def _proto_value_to_transport_value(proto_value: ValueProto) -> Any: + """ + Convert a proto Value to a JSON-serializable Python value suitable for + HTTP transport. Unlike ``feast_value_type_to_python_type``, this keeps + ``json_val`` as a raw string so the receiving server can reconstruct a + DataFrame whose column types match the original (string for JSON, dict + for Map/Struct). Parsing JSON strings into dicts would cause PyArrow to + infer a struct column on the server, which can crash with complex nested + types (lists inside dicts). + """ + val_attr = proto_value.WhichOneof("val") + if val_attr is None: + return None + + # Keep JSON values as raw strings for correct DataFrame reconstruction. + # Parsing them into dicts causes PyArrow to infer struct columns on the + # server whose nested lists round-trip as numpy arrays, breaking + # json.dumps during proto conversion. + if val_attr == "json_val": + return getattr(proto_value, val_attr) + if val_attr == "json_list_val": + return list(getattr(proto_value, val_attr).val) + + # Map/Struct types are converted to Python dicts by + # feast_value_type_to_python_type. Serialise them to JSON strings + # so the server-side DataFrame gets VARCHAR columns instead of + # PyArrow struct columns that can crash with complex nested types. + if val_attr in ("map_val", "struct_val"): + return json.dumps(feast_value_type_to_python_type(proto_value)) + if val_attr in ("map_list_val", "struct_list_val"): + return [json.dumps(v) for v in feast_value_type_to_python_type(proto_value)] + + return feast_value_type_to_python_type(proto_value) + def online_write_batch( self, config: RepoConfig, @@ -97,10 +132,11 @@ def online_write_batch( feast_value_type_to_python_type(entity_value_proto) ) - # Populate feature values + # Populate feature values – use transport-safe conversion that + # preserves JSON strings instead of parsing them into dicts. for feature_name, feature_value_proto in feature_values_proto.items(): columnar_data[feature_name].append( - feast_value_type_to_python_type(feature_value_proto) + self._proto_value_to_transport_value(feature_value_proto) ) # Populate timestamps @@ -418,7 +454,7 @@ def _construct_online_read_api_json_request( entity_keys: List[EntityKeyProto], table: FeatureView, requested_features: Optional[List[str]] = None, - ) -> str: + ) -> dict: api_requested_features = [] if requested_features is not None: for requested_feature in requested_features: @@ -432,13 +468,10 @@ def _construct_online_read_api_json_request( getattr(row.entity_values[0], row.entity_values[0].WhichOneof("val")) ) - req_body = json.dumps( - { - "features": api_requested_features, - "entities": {entity_key: entity_values}, - } - ) - return req_body + return { + "features": api_requested_features, + "entities": {entity_key: entity_values}, + } def _construct_online_documents_api_json_request( self, @@ -447,21 +480,18 @@ def _construct_online_documents_api_json_request( embedding: Optional[List[float]] = None, top_k: Optional[int] = None, distance_metric: Optional[str] = "L2", - ) -> str: + ) -> dict: api_requested_features = [] if requested_features is not None: for requested_feature in requested_features: api_requested_features.append(f"{table.name}:{requested_feature}") - req_body = json.dumps( - { - "features": api_requested_features, - "query": embedding, - "top_k": top_k, - "distance_metric": distance_metric, - } - ) - return req_body + return { + "features": api_requested_features, + "query": embedding, + "top_k": top_k, + "distance_metric": distance_metric, + } def _construct_online_documents_v2_api_json_request( self, @@ -472,23 +502,20 @@ def _construct_online_documents_v2_api_json_request( distance_metric: Optional[str] = None, query_string: Optional[str] = None, api_version: Optional[int] = 2, - ) -> str: + ) -> dict: api_requested_features = [] if requested_features is not None: for requested_feature in requested_features: api_requested_features.append(f"{table.name}:{requested_feature}") - req_body = json.dumps( - { - "features": api_requested_features, - "query": embedding, - "top_k": top_k, - "distance_metric": distance_metric, - "query_string": query_string, - "api_version": api_version, - } - ) - return req_body + return { + "features": api_requested_features, + "query": embedding, + "top_k": top_k, + "distance_metric": distance_metric, + "query_string": query_string, + "api_version": api_version, + } def _get_event_ts(self, response_json) -> datetime: event_ts = "" @@ -574,33 +601,33 @@ async def close(self) -> None: @rest_error_handling_decorator def get_remote_online_features( - session: requests.Session, config: RepoConfig, req_body: str + session: requests.Session, config: RepoConfig, req_body: dict ) -> requests.Response: if config.online_store.cert: return session.post( f"{config.online_store.path}/get-online-features", - data=req_body, + json=req_body, verify=config.online_store.cert, ) else: return session.post( - f"{config.online_store.path}/get-online-features", data=req_body + f"{config.online_store.path}/get-online-features", json=req_body ) @rest_error_handling_decorator def get_remote_online_documents( - session: requests.Session, config: RepoConfig, req_body: str + session: requests.Session, config: RepoConfig, req_body: dict ) -> requests.Response: if config.online_store.cert: return session.post( f"{config.online_store.path}/retrieve-online-documents", - data=req_body, + json=req_body, verify=config.online_store.cert, ) else: return session.post( - f"{config.online_store.path}/retrieve-online-documents", data=req_body + f"{config.online_store.path}/retrieve-online-documents", json=req_body ) diff --git a/sdk/python/feast/infra/online_stores/singlestore_online_store/singlestore_repo_configuration.py b/sdk/python/feast/infra/online_stores/singlestore_online_store/singlestore_repo_configuration.py index 2debe0f0ee1..1e5379974c2 100644 --- a/sdk/python/feast/infra/online_stores/singlestore_online_store/singlestore_repo_configuration.py +++ b/sdk/python/feast/infra/online_stores/singlestore_online_store/singlestore_repo_configuration.py @@ -1,7 +1,7 @@ -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.universal.online_store.singlestore import ( +from tests.universal.feature_repos.universal.online_store.singlestore import ( SingleStoreOnlineStoreCreator, ) diff --git a/sdk/python/feast/infra/online_stores/snowflake.py b/sdk/python/feast/infra/online_stores/snowflake.py index f294dcad96b..d2df674ed94 100644 --- a/sdk/python/feast/infra/online_stores/snowflake.py +++ b/sdk/python/feast/infra/online_stores/snowflake.py @@ -1,4 +1,3 @@ -import itertools import os from binascii import hexlify from datetime import datetime @@ -168,20 +167,26 @@ def online_read( requested_features = requested_features if requested_features else [] + # Pre-compute serialized entity keys to avoid redundant serialization + serialized_entity_keys = [ + serialize_entity_key( + entity_key, + entity_key_serialization_version=config.entity_key_serialization_version, + ) + for entity_key in entity_keys + ] + entity_fetch_str = ",".join( [ ( "TO_BINARY(" + hexlify( - serialize_entity_key( - combo[0], - entity_key_serialization_version=config.entity_key_serialization_version, - ) - + bytes(combo[1], encoding="utf-8") + serialized_entity_key + bytes(feature, encoding="utf-8") ).__str__()[1:] + ")" ) - for combo in itertools.product(entity_keys, requested_features) + for serialized_entity_key in serialized_entity_keys + for feature in requested_features ] ) @@ -197,11 +202,7 @@ def online_read( """ df = execute_snowflake_statement(conn, query).fetch_pandas_all() - for entity_key in entity_keys: - entity_key_bin = serialize_entity_key( - entity_key, - entity_key_serialization_version=config.entity_key_serialization_version, - ) + for entity_key_bin in serialized_entity_keys: res = {} res_ts = None for index, row in df[df["entity_key"] == entity_key_bin].iterrows(): diff --git a/sdk/python/feast/infra/online_stores/sqlite.py b/sdk/python/feast/infra/online_stores/sqlite.py index 461277631f5..1be4141c650 100644 --- a/sdk/python/feast/infra/online_stores/sqlite.py +++ b/sdk/python/feast/infra/online_stores/sqlite.py @@ -263,11 +263,7 @@ def online_read( rows = { k: list(group) for k, group in itertools.groupby(rows, key=lambda r: r[0]) } - for entity_key in entity_keys: - entity_key_bin = serialize_entity_key( - entity_key, - entity_key_serialization_version=config.entity_key_serialization_version, - ) + for entity_key_bin in serialized_entity_keys: res = {} res_ts = None for _, feature_name, val_bin, ts in rows.get(entity_key_bin, []): diff --git a/sdk/python/feast/infra/passthrough_provider.py b/sdk/python/feast/infra/passthrough_provider.py index 2f960a02822..6830929e776 100644 --- a/sdk/python/feast/infra/passthrough_provider.py +++ b/sdk/python/feast/infra/passthrough_provider.py @@ -364,7 +364,10 @@ def _prep_rows_to_write_for_ingestion( # Note: A dictionary mapping of column names in this data # source to feature names in a feature table or view. Only used for feature # columns, not entity or timestamp columns. - if hasattr(feature_view, "batch_source"): + if ( + hasattr(feature_view, "batch_source") + and feature_view.batch_source is not None + ): if feature_view.batch_source.field_mapping is not None: table = _run_pyarrow_field_mapping( table, feature_view.batch_source.field_mapping @@ -410,7 +413,10 @@ async def ingest_df_async( ) def ingest_df_to_offline_store(self, feature_view: FeatureView, table: pa.Table): - if feature_view.batch_source.field_mapping is not None: + if ( + feature_view.batch_source is not None + and feature_view.batch_source.field_mapping is not None + ): table = _run_pyarrow_field_mapping( table, feature_view.batch_source.field_mapping ) diff --git a/sdk/python/feast/infra/registry/base_registry.py b/sdk/python/feast/infra/registry/base_registry.py index 24e9f36fbd2..c4bf1f5979c 100644 --- a/sdk/python/feast/infra/registry/base_registry.py +++ b/sdk/python/feast/infra/registry/base_registry.py @@ -24,6 +24,10 @@ from feast.base_feature_view import BaseFeatureView from feast.data_source import DataSource from feast.entity import Entity +from feast.errors import ( + ConflictingFeatureViewNames, + FeatureViewNotFoundException, +) from feast.feature_service import FeatureService from feast.feature_view import FeatureView from feast.infra.infra_object import Infra @@ -263,6 +267,61 @@ def apply_feature_view( """ raise NotImplementedError + def _ensure_feature_view_name_is_unique( + self, + feature_view: BaseFeatureView, + project: str, + allow_cache: bool = False, + ): + """ + Validates that no feature view name conflict exists across feature view types. + Raises ConflictingFeatureViewNames if a different type already uses the name. + + This is a defense-in-depth check for direct apply_feature_view() calls. + The primary validation happens in _validate_all_feature_views() during feast plan/apply. + """ + name = feature_view.name + new_type = type(feature_view).__name__ + + def _check_conflict(getter, not_found_exc, existing_type: str): + try: + getter(name, project, allow_cache=allow_cache) + raise ConflictingFeatureViewNames(name, existing_type, new_type) + except not_found_exc: + pass + + # Check StreamFeatureView before FeatureView since StreamFeatureView is a subclass + # Note: All getters raise FeatureViewNotFoundException (not type-specific exceptions) + if isinstance(feature_view, StreamFeatureView): + _check_conflict( + self.get_feature_view, FeatureViewNotFoundException, "FeatureView" + ) + _check_conflict( + self.get_on_demand_feature_view, + FeatureViewNotFoundException, + "OnDemandFeatureView", + ) + elif isinstance(feature_view, FeatureView): + _check_conflict( + self.get_stream_feature_view, + FeatureViewNotFoundException, + "StreamFeatureView", + ) + _check_conflict( + self.get_on_demand_feature_view, + FeatureViewNotFoundException, + "OnDemandFeatureView", + ) + elif isinstance(feature_view, OnDemandFeatureView): + _check_conflict( + self.get_feature_view, FeatureViewNotFoundException, "FeatureView" + ) + _check_conflict( + self.get_stream_feature_view, + FeatureViewNotFoundException, + "StreamFeatureView", + ) + @abstractmethod def delete_feature_view(self, name: str, project: str, commit: bool = True): """ diff --git a/sdk/python/feast/infra/registry/sql.py b/sdk/python/feast/infra/registry/sql.py index 103b1f6c0a6..197ca02d57a 100644 --- a/sdk/python/feast/infra/registry/sql.py +++ b/sdk/python/feast/infra/registry/sql.py @@ -23,6 +23,7 @@ update, ) from sqlalchemy.engine import Engine +from sqlalchemy.exc import IntegrityError from feast import utils from feast.base_feature_view import BaseFeatureView @@ -577,6 +578,7 @@ def apply_data_source( def apply_feature_view( self, feature_view: BaseFeatureView, project: str, commit: bool = True ): + self._ensure_feature_view_name_is_unique(feature_view, project) fv_table = self._infer_fv_table(feature_view) return self._apply_object( @@ -1028,8 +1030,15 @@ def _maybe_init_project_metadata(self, project): "last_updated_timestamp": update_time, "project_id": project, } - insert_stmt = insert(feast_metadata).values(values) - conn.execute(insert_stmt) + try: + with conn.begin_nested(): + conn.execute(insert(feast_metadata).values(values)) + except IntegrityError: + logger.info( + "Project metadata for %s already initialized by " + "another process.", + project, + ) def _delete_object( self, diff --git a/sdk/python/feast/infra/transformation_servers/Dockerfile b/sdk/python/feast/infra/transformation_servers/Dockerfile index 6e66aaebeae..b0880e960e1 100644 --- a/sdk/python/feast/infra/transformation_servers/Dockerfile +++ b/sdk/python/feast/infra/transformation_servers/Dockerfile @@ -15,7 +15,7 @@ COPY pyproject.toml pyproject.toml COPY README.md README.md # Install dependencies -RUN --mount=source=.git,target=.git,type=bind uv pip install --system --no-cache-dir '.[gcp,aws]' +RUN --mount=source=.git,target=.git,type=bind uv pip install --system --no-cache-dir '.[gcp,aws,grpcio]' # Start feature transformation server CMD [ "python", "app.py" ] diff --git a/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py b/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py index 277d8e18946..23026d79109 100644 --- a/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py +++ b/sdk/python/feast/infra/utils/snowflake/snowpark/snowflake_udfs.py @@ -23,6 +23,8 @@ HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_binary_to_bytes_proto' IMPORTS = ('@feast_stage/feast.zip'); """ + + # ValueType.BYTES = 1 @vectorized(input=pandas.DataFrame) def feast_snowflake_binary_to_bytes_proto(df): @@ -46,6 +48,8 @@ def feast_snowflake_binary_to_bytes_proto(df): HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_varchar_to_string_proto' IMPORTS = ('@feast_stage/feast.zip'); """ + + # ValueType.STRING = 2 @vectorized(input=pandas.DataFrame) def feast_snowflake_varchar_to_string_proto(df): @@ -69,6 +73,8 @@ def feast_snowflake_varchar_to_string_proto(df): HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_array_bytes_to_list_bytes_proto' IMPORTS = ('@feast_stage/feast.zip'); """ + + # ValueType.STRING_LIST = 12 @vectorized(input=pandas.DataFrame) def feast_snowflake_array_bytes_to_list_bytes_proto(df): @@ -243,6 +249,8 @@ def feast_snowflake_array_timestamp_to_list_unix_timestamp_proto(df): HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_number_to_int32_proto' IMPORTS = ('@feast_stage/feast.zip'); """ + + # ValueType.INT32 = 3 @vectorized(input=pandas.DataFrame) def feast_snowflake_number_to_int32_proto(df): @@ -266,6 +274,8 @@ def feast_snowflake_number_to_int32_proto(df): HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_number_to_int64_proto' IMPORTS = ('@feast_stage/feast.zip'); """ + + # ValueType.INT64 = 4 @vectorized(input=pandas.DataFrame) def feast_snowflake_number_to_int64_proto(df): @@ -291,6 +301,8 @@ def feast_snowflake_number_to_int64_proto(df): HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_float_to_double_proto' IMPORTS = ('@feast_stage/feast.zip'); """ + + # ValueType.FLOAT = 5 & ValueType.DOUBLE = 6 @vectorized(input=pandas.DataFrame) def feast_snowflake_float_to_double_proto(df): @@ -314,6 +326,8 @@ def feast_snowflake_float_to_double_proto(df): HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_boolean_to_bool_boolean_proto' IMPORTS = ('@feast_stage/feast.zip'); """ + + # ValueType.BOOL = 7 @vectorized(input=pandas.DataFrame) def feast_snowflake_boolean_to_bool_boolean_proto(df): @@ -337,6 +351,8 @@ def feast_snowflake_boolean_to_bool_boolean_proto(df): HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_snowflake_timestamp_to_unix_timestamp_proto' IMPORTS = ('@feast_stage/feast.zip'); """ + + # ValueType.UNIX_TIMESTAMP = 8 @vectorized(input=pandas.DataFrame) def feast_snowflake_timestamp_to_unix_timestamp_proto(df): @@ -363,6 +379,8 @@ def feast_snowflake_timestamp_to_unix_timestamp_proto(df): HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_serialize_entity_keys' IMPORTS = ('@feast_stage/feast.zip') """ + + # converts 1 to n many entity keys to a single binary for lookups @vectorized(input=pandas.DataFrame) def feast_serialize_entity_keys(df): @@ -410,6 +428,8 @@ def feast_serialize_entity_keys(df): HANDLER = 'feast.infra.utils.snowflake.snowpark.snowflake_udfs.feast_entity_key_proto_to_string' IMPORTS = ('@feast_stage/feast.zip') """ + + # converts 1 to n many entity keys to a single binary for lookups @vectorized(input=pandas.DataFrame) def feast_entity_key_proto_to_string(df): diff --git a/sdk/python/feast/metrics.py b/sdk/python/feast/metrics.py new file mode 100644 index 00000000000..be2b068d32c --- /dev/null +++ b/sdk/python/feast/metrics.py @@ -0,0 +1,466 @@ +# Copyright 2025 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Centralized Prometheus metrics for the Feast feature server. + +All metrics are defined here to provide a single source of truth. +Instrumentation is **opt-in**: metric recording is gated behind a +``_config`` object whose flags are only set when +``start_metrics_server()`` is called (i.e. when the feature server is +started with ``--metrics`` or ``metrics.enabled: true`` in the YAML). + +Each metric category can be individually toggled via the ``metrics`` +sub-block in ``feature_store.yaml``. When disabled, helpers +short-circuit with a fast attribute check and do zero work. + +Multiprocess support +-------------------- +Gunicorn pre-forks worker processes, so every worker gets its own copy +of the in-process metric state. To aggregate across workers we use +``prometheus_client``'s multiprocess mode: + +1. ``PROMETHEUS_MULTIPROCESS_DIR`` is set (to a temp dir if the user + has not already set it) **before** any metric objects are created. +2. Gauges specify ``multiprocess_mode`` so they aggregate correctly. +3. The metrics HTTP server uses ``MultiProcessCollector`` to read all + workers' metric files. +4. Gunicorn hooks (``post_worker_init``, ``child_exit``) are wired up + in ``feature_server.py`` to start per-worker monitoring and to + clean up dead-worker files. +""" + +import atexit +import logging +import os +import shutil +import tempfile +import threading +import time +from contextlib import contextmanager +from dataclasses import dataclass +from datetime import datetime, timezone +from typing import TYPE_CHECKING, Optional + +import psutil + +if TYPE_CHECKING: + from feast.feature_store import FeatureStore + +logger = logging.getLogger(__name__) + +# --------------------------------------------------------------------------- +# Multiprocess directory setup — MUST happen before prometheus_client import +# so that metric values are stored in shared files rather than in-process +# memory (required for Gunicorn pre-fork workers). +# --------------------------------------------------------------------------- +_prometheus_mp_dir: Optional[str] = None +_owns_mp_dir: bool = False +_owner_pid: Optional[int] = None + +if "PROMETHEUS_MULTIPROCESS_DIR" not in os.environ: + _prometheus_mp_dir = tempfile.mkdtemp(prefix="feast_metrics_") + os.environ["PROMETHEUS_MULTIPROCESS_DIR"] = _prometheus_mp_dir + _owns_mp_dir = True + _owner_pid = os.getpid() +else: + _prometheus_mp_dir = os.environ["PROMETHEUS_MULTIPROCESS_DIR"] + +# prometheus_client uses two different env var names: +# - PROMETHEUS_MULTIPROCESS_DIR (for value storage in prometheus_client.values) +# - PROMETHEUS_MULTIPROC_DIR (for MultiProcessCollector) +# Both must point to the same directory. +if "PROMETHEUS_MULTIPROC_DIR" not in os.environ: + os.environ["PROMETHEUS_MULTIPROC_DIR"] = _prometheus_mp_dir + + +def _cleanup_multiprocess_dir(): + # Only the process that created the directory may remove it. + # Forked Gunicorn workers inherit _owns_mp_dir=True but have a + # different PID; letting them delete the shared directory would + # break metrics for sibling workers and the metrics HTTP server. + if ( + _owns_mp_dir + and _owner_pid == os.getpid() + and _prometheus_mp_dir + and os.path.isdir(_prometheus_mp_dir) + ): + shutil.rmtree(_prometheus_mp_dir, ignore_errors=True) + + +atexit.register(_cleanup_multiprocess_dir) + +# Now safe to import prometheus_client — it will detect the env var. +from prometheus_client import Counter, Gauge, Histogram # noqa: E402 + + +# --------------------------------------------------------------------------- +# Per-category runtime flags +# --------------------------------------------------------------------------- +@dataclass +class _MetricsFlags: + """Runtime toggle for each metric category. + + All flags default to ``False`` (disabled). ``start_metrics_server`` + flips them on according to the user's ``MetricsConfig``. + """ + + enabled: bool = False + resource: bool = False + request: bool = False + online_features: bool = False + push: bool = False + materialization: bool = False + freshness: bool = False + + +_config = _MetricsFlags() + + +def build_metrics_flags(metrics_config: Optional[object] = None) -> _MetricsFlags: + """Build ``_MetricsFlags`` from a ``MetricsConfig`` object. + + If *metrics_config* is ``None`` (e.g. metrics activated purely via + ``--metrics`` CLI with no YAML block), every category defaults to + enabled. Otherwise the per-category booleans are respected. + """ + if metrics_config is None: + return _MetricsFlags( + enabled=True, + resource=True, + request=True, + online_features=True, + push=True, + materialization=True, + freshness=True, + ) + return _MetricsFlags( + enabled=True, + resource=getattr(metrics_config, "resource", True), + request=getattr(metrics_config, "request", True), + online_features=getattr(metrics_config, "online_features", True), + push=getattr(metrics_config, "push", True), + materialization=getattr(metrics_config, "materialization", True), + freshness=getattr(metrics_config, "freshness", True), + ) + + +# --------------------------------------------------------------------------- +# Resource metrics — multiprocess_mode="liveall" so each live worker +# reports its own CPU/memory with a ``pid`` label. +# --------------------------------------------------------------------------- +cpu_usage_gauge = Gauge( + "feast_feature_server_cpu_usage", + "CPU usage percentage of the Feast feature server process", + multiprocess_mode="liveall", +) +memory_usage_gauge = Gauge( + "feast_feature_server_memory_usage", + "Memory usage percentage of the Feast feature server process", + multiprocess_mode="liveall", +) + +# --------------------------------------------------------------------------- +# HTTP request metrics (Counters & Histograms aggregate automatically) +# --------------------------------------------------------------------------- +request_count = Counter( + "feast_feature_server_request_total", + "Total number of requests to the Feast feature server", + ["endpoint", "status"], +) +request_latency = Histogram( + "feast_feature_server_request_latency_seconds", + "Latency of requests to the Feast feature server in seconds", + ["endpoint", "feature_count", "feature_view_count"], + buckets=(0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1.0, 2.5, 5.0, 10.0), +) + +# --------------------------------------------------------------------------- +# Online feature retrieval metrics +# --------------------------------------------------------------------------- +online_features_request_count = Counter( + "feast_online_features_request_total", + "Total online feature retrieval requests", +) +online_features_entity_count = Histogram( + "feast_online_features_entity_count", + "Number of entity rows per online feature request", + buckets=(1, 5, 10, 25, 50, 100, 250, 500, 1000), +) + +# --------------------------------------------------------------------------- +# Push / write metrics +# --------------------------------------------------------------------------- +push_request_count = Counter( + "feast_push_request_total", + "Total push requests to the feature store", + ["push_source", "mode"], +) + +# --------------------------------------------------------------------------- +# Materialization metrics +# --------------------------------------------------------------------------- +materialization_total = Counter( + "feast_materialization_total", + "Total materialization runs per feature view", + ["feature_view", "status"], +) +materialization_duration_seconds = Histogram( + "feast_materialization_duration_seconds", + "Duration of materialization per feature view in seconds", + ["feature_view"], + buckets=(1.0, 5.0, 10.0, 30.0, 60.0, 120.0, 300.0, 600.0, 1800.0, 3600.0), +) + +# --------------------------------------------------------------------------- +# Feature freshness metrics — "max" shows the worst-case staleness across +# processes (freshness is identical regardless of which process computes it). +# --------------------------------------------------------------------------- +feature_freshness_seconds = Gauge( + "feast_feature_freshness_seconds", + "Seconds since the most recent materialization end time per feature view", + ["feature_view", "project"], + multiprocess_mode="max", +) + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +class RequestMetricsContext: + """Mutable label holder yielded by :func:`track_request_latency`. + + Callers that need to resolve labels *inside* the ``with`` block + (e.g. ``/get-online-features`` where the feature count is only + known after ``_get_features`` succeeds) can set the attributes + on the yielded object and they will be picked up in ``finally``. + """ + + __slots__ = ("feature_count", "feature_view_count") + + def __init__(self, feature_count: str = "", feature_view_count: str = ""): + self.feature_count = feature_count + self.feature_view_count = feature_view_count + + +@contextmanager +def track_request_latency( + endpoint: str, feature_count: str = "", feature_view_count: str = "" +): + """Context manager that records endpoint latency and increments request count. + + Yields a :class:`RequestMetricsContext` whose ``feature_count`` and + ``feature_view_count`` attributes can be updated inside the block. + The final values are used when recording the histogram and counter + in ``finally``, so labels are accurate even when they depend on + work done inside the block. + + Gated by the ``request`` category flag. + """ + ctx = RequestMetricsContext(feature_count, feature_view_count) + if not _config.request: + yield ctx + return + + start = time.monotonic() + status_label = "success" + try: + yield ctx + except Exception: + status_label = "error" + raise + finally: + elapsed = time.monotonic() - start + request_latency.labels( + endpoint=endpoint, + feature_count=ctx.feature_count, + feature_view_count=ctx.feature_view_count, + ).observe(elapsed) + request_count.labels(endpoint=endpoint, status=status_label).inc() + + +def track_online_features_entities(entity_count: int): + """Record the number of entity rows in an online feature request.""" + if not _config.online_features: + return + online_features_request_count.inc() + online_features_entity_count.observe(entity_count) + + +def track_push(push_source: str, mode: str): + """Increment the push request counter.""" + if not _config.push: + return + push_request_count.labels(push_source=push_source, mode=mode).inc() + + +def track_materialization( + feature_view_name: str, success: bool, duration_seconds: float +): + """Record materialization outcome and duration for a single feature view.""" + if not _config.materialization: + return + status = "success" if success else "failure" + materialization_total.labels(feature_view=feature_view_name, status=status).inc() + materialization_duration_seconds.labels(feature_view=feature_view_name).observe( + duration_seconds + ) + + +def update_feature_freshness( + store: "FeatureStore", +) -> None: + """ + Compute and set the freshness gauge for every feature view in the registry. + + Freshness = now - most_recent_end_time (from materialization_intervals). + A higher value means the feature data is more stale. + """ + try: + feature_views = store.list_feature_views(allow_cache=True) + now = datetime.now(tz=timezone.utc) + for fv in feature_views: + end_time = fv.most_recent_end_time + if end_time is not None: + if end_time.tzinfo is None: + end_time = end_time.replace(tzinfo=timezone.utc) + staleness = (now - end_time).total_seconds() + feature_freshness_seconds.labels( + feature_view=fv.name, project=store.project + ).set(staleness) + except Exception: + logger.debug("Failed to update feature freshness metrics", exc_info=True) + + +def monitor_resources(interval: int = 5): + """Background thread target that updates CPU and memory usage gauges.""" + logger.debug("Starting resource monitoring with interval %d seconds", interval) + p = psutil.Process() + logger.debug("PID is %d", p.pid) + while True: + with p.oneshot(): + cpu_usage = p.cpu_percent() + memory_usage = p.memory_percent() + logger.debug("CPU usage: %s%%, Memory usage: %s%%", cpu_usage, memory_usage) + cpu_usage_gauge.set(cpu_usage) + memory_usage_gauge.set(memory_usage) + time.sleep(interval) + + +def monitor_freshness(store: "FeatureStore", interval: int = 30): + """Background thread target that periodically updates feature freshness gauges.""" + logger.debug( + "Starting feature freshness monitoring with interval %d seconds", interval + ) + while True: + update_feature_freshness(store) + time.sleep(interval) + + +# --------------------------------------------------------------------------- +# Gunicorn multiprocess helpers +# --------------------------------------------------------------------------- + + +def mark_process_dead(pid: int): + """Clean up metric files for a dead Gunicorn worker. + + Called from the Gunicorn ``child_exit`` hook so that stale worker + data no longer appears in scraped output. + """ + if not _config.enabled: + return + try: + from prometheus_client import multiprocess + + multiprocess.mark_process_dead(pid) + except Exception: + logger.debug("Failed to mark process %d as dead", pid, exc_info=True) + + +def init_worker_monitoring(): + """Start resource monitoring inside a Gunicorn worker process. + + Called from the ``post_worker_init`` hook so that each worker + tracks its own CPU/memory independently of the master. + """ + if _config.resource: + t = threading.Thread(target=monitor_resources, args=(5,), daemon=True) + t.start() + + +def start_metrics_server( + store: "FeatureStore", + port: int = 8000, + metrics_config: Optional["_MetricsFlags"] = None, + start_resource_monitoring: bool = True, +): + """ + Start the Prometheus metrics HTTP server and background monitoring threads. + + Uses ``MultiProcessCollector`` so that metrics from all Gunicorn + workers are correctly aggregated when Prometheus scrapes port *port*. + + Args: + store: The FeatureStore instance (used for freshness checks). + port: TCP port for the Prometheus HTTP endpoint. + metrics_config: Optional pre-built ``_MetricsFlags``. When + ``None`` every category defaults to **enabled**. + start_resource_monitoring: Whether to start the CPU/memory + monitoring thread. Set to ``False`` when Gunicorn will + fork workers — the ``post_worker_init`` hook starts + per-worker monitoring instead. + """ + global _config + + if metrics_config is not None: + _config = metrics_config + else: + _config = _MetricsFlags( + enabled=True, + resource=True, + request=True, + online_features=True, + push=True, + materialization=True, + freshness=True, + ) + + from prometheus_client import CollectorRegistry, make_wsgi_app + from prometheus_client.multiprocess import MultiProcessCollector + + registry = CollectorRegistry() + MultiProcessCollector(registry) + + from wsgiref.simple_server import make_server + + httpd = make_server("", port, make_wsgi_app(registry)) + metrics_thread = threading.Thread(target=httpd.serve_forever, daemon=True) + metrics_thread.start() + logger.info( + "Prometheus metrics server started on port %d (multiprocess-safe)", port + ) + + if _config.resource and start_resource_monitoring: + resource_thread = threading.Thread( + target=monitor_resources, args=(5,), daemon=True + ) + resource_thread.start() + + if _config.freshness: + freshness_thread = threading.Thread( + target=monitor_freshness, args=(store, 30), daemon=True + ) + freshness_thread.start() diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index 7ead26cb984..6430675f4e7 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -16,6 +16,7 @@ from feast.feature_view import DUMMY_ENTITY_NAME, FeatureView from feast.feature_view_projection import FeatureViewProjection from feast.field import Field, from_value_type +from feast.proto_utils import transformation_to_proto from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( OnDemandFeatureView as OnDemandFeatureViewProto, ) @@ -24,9 +25,6 @@ OnDemandFeatureViewSpec, OnDemandSource, ) -from feast.protos.feast.core.Transformation_pb2 import ( - FeatureTransformationV2 as FeatureTransformationProto, -) from feast.protos.feast.core.Transformation_pb2 import ( UserDefinedFunctionV2 as UserDefinedFunctionProto, ) @@ -471,29 +469,11 @@ def to_proto(self) -> OnDemandFeatureViewProto: request_data_source=request_sources.to_proto() ) - user_defined_function_proto = cast( - UserDefinedFunctionProto, - self.feature_transformation.to_proto() - if isinstance( - self.feature_transformation, - (PandasTransformation, PythonTransformation), - ) - else None, - ) + feature_transformation = transformation_to_proto(self.feature_transformation) - substrait_transformation_proto = ( - self.feature_transformation.to_proto() - if isinstance(self.feature_transformation, SubstraitTransformation) - else None - ) - - feature_transformation = FeatureTransformationProto( - user_defined_function=user_defined_function_proto, - substrait_transformation=substrait_transformation_proto, - ) spec = OnDemandFeatureViewSpec( name=self.name, - entities=self.entities if self.entities else None, + entities=self.entities or None, entity_columns=[ field.to_proto() for field in self.entity_columns if self.entity_columns ], @@ -505,7 +485,7 @@ def to_proto(self) -> OnDemandFeatureViewProto: tags=self.tags, owner=self.owner, write_to_online_store=self.write_to_online_store, - singleton=self.singleton if self.singleton else False, + singleton=self.singleton or False, aggregations=self.aggregations, ) return OnDemandFeatureViewProto(spec=spec, meta=meta) diff --git a/sdk/python/feast/proto_utils.py b/sdk/python/feast/proto_utils.py new file mode 100644 index 00000000000..a81a9fd6c23 --- /dev/null +++ b/sdk/python/feast/proto_utils.py @@ -0,0 +1,101 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Utility functions for protobuf serialization of Feast objects. +""" + +from typing import TYPE_CHECKING, Any, Optional, Union + +from google.protobuf.message import Message + +from feast.protos.feast.core.Transformation_pb2 import ( + FeatureTransformationV2 as FeatureTransformationProto, +) +from feast.protos.feast.core.Transformation_pb2 import ( + SubstraitTransformationV2 as SubstraitTransformationProto, +) +from feast.protos.feast.core.Transformation_pb2 import ( + UserDefinedFunctionV2 as UserDefinedFunctionProto, +) + +if TYPE_CHECKING: + from feast.data_source import DataSource + from feast.transformation.mode import TransformationMode + + +def serialize_data_source(source: Optional["DataSource"]) -> Optional[Message]: + """Serialize a data source to proto with class type annotation. + + Args: + source: The data source to serialize, or None. + + Returns: + The serialized proto with data_source_class_type set, or None if source is None. + """ + if source is None: + return None + proto = source.to_proto() + proto.data_source_class_type = ( + f"{source.__class__.__module__}.{source.__class__.__name__}" + ) + return proto + + +def transformation_to_proto( + transformation: Optional[Any], +) -> Optional[FeatureTransformationProto]: + """Convert a transformation to FeatureTransformationProto. + + Args: + transformation: The transformation object with a to_proto() method. + + Returns: + A FeatureTransformationProto wrapping the transformation, or None. + """ + if transformation is None: + return None + + if not hasattr(transformation, "to_proto"): + return None + + transformation_proto = transformation.to_proto() + + if isinstance(transformation_proto, UserDefinedFunctionProto): + return FeatureTransformationProto( + user_defined_function=transformation_proto, + ) + elif isinstance(transformation_proto, SubstraitTransformationProto): + return FeatureTransformationProto( + substrait_transformation=transformation_proto, + ) + return None + + +def mode_to_string(mode: Optional[Union["TransformationMode", str]]) -> str: + """Convert mode to string value. + + Args: + mode: A TransformationMode enum or string, or None. + + Returns: + The string representation of the mode, or empty string if None. + """ + from feast.transformation.mode import TransformationMode + + if mode is None: + return "" + if isinstance(mode, TransformationMode): + return mode.value + return mode diff --git a/sdk/python/feast/protos/feast/core/Aggregation_pb2.py b/sdk/python/feast/protos/feast/core/Aggregation_pb2.py index 922f8f40aa2..48f107b8eff 100644 --- a/sdk/python/feast/protos/feast/core/Aggregation_pb2.py +++ b/sdk/python/feast/protos/feast/core/Aggregation_pb2.py @@ -15,7 +15,7 @@ from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66\x65\x61st/core/Aggregation.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\"\x92\x01\n\x0b\x41ggregation\x12\x0e\n\x06\x63olumn\x18\x01 \x01(\t\x12\x10\n\x08\x66unction\x18\x02 \x01(\t\x12.\n\x0btime_window\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0eslide_interval\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationBU\n\x10\x66\x65\x61st.proto.coreB\x10\x41ggregationProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cfeast/core/Aggregation.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\"\xd3\x01\n\x0bAggregation\x12\x16\n\x06column\x18\x01 \x01(\tR\x06column\x12\x1a\n\x08function\x18\x02 \x01(\tR\x08function\x12:\n\x0btime_window\x18\x03 \x01(\x0b2\x19.google.protobuf.DurationR\ntimeWindow\x12@\n\x0eslide_interval\x18\x04 \x01(\x0b2\x19.google.protobuf.DurationR\rslideInterval\x12\x12\n\x04name\x18\x05 \x01(\tR\x04nameBU\n\x10feast.proto.coreB\x10AggregationProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -24,5 +24,5 @@ _globals['DESCRIPTOR']._options = None _globals['DESCRIPTOR']._serialized_options = b'\n\020feast.proto.coreB\020AggregationProtoZ/github.com/feast-dev/feast/go/protos/feast/core' _globals['_AGGREGATION']._serialized_start=77 - _globals['_AGGREGATION']._serialized_end=223 + _globals['_AGGREGATION']._serialized_end=288 # @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/Aggregation_pb2.pyi b/sdk/python/feast/protos/feast/core/Aggregation_pb2.pyi index ceb8b1f8131..af9ec2b191f 100644 --- a/sdk/python/feast/protos/feast/core/Aggregation_pb2.pyi +++ b/sdk/python/feast/protos/feast/core/Aggregation_pb2.pyi @@ -22,8 +22,10 @@ class Aggregation(google.protobuf.message.Message): FUNCTION_FIELD_NUMBER: builtins.int TIME_WINDOW_FIELD_NUMBER: builtins.int SLIDE_INTERVAL_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int column: builtins.str function: builtins.str + name: builtins.str @property def time_window(self) -> google.protobuf.duration_pb2.Duration: ... @property @@ -35,8 +37,9 @@ class Aggregation(google.protobuf.message.Message): function: builtins.str = ..., time_window: google.protobuf.duration_pb2.Duration | None = ..., slide_interval: google.protobuf.duration_pb2.Duration | None = ..., + name: builtins.str = ..., ) -> None: ... def HasField(self, field_name: typing_extensions.Literal["slide_interval", b"slide_interval", "time_window", b"time_window"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["column", b"column", "function", b"function", "slide_interval", b"slide_interval", "time_window", b"time_window"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["column", b"column", "function", b"function", "name", b"name", "slide_interval", b"slide_interval", "time_window", b"time_window"]) -> None: ... global___Aggregation = Aggregation diff --git a/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.pyi b/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.pyi index 7b5a629eb7a..6339a97536e 100644 --- a/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.pyi +++ b/sdk/python/feast/protos/feast/core/DatastoreTable_pb2.pyi @@ -1,19 +1,19 @@ """ @generated by mypy-protobuf. Do not edit manually! isort:skip_file - -* Copyright 2021 The Feast Authors -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* https://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and + +* Copyright 2021 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and * limitations under the License. """ import builtins diff --git a/sdk/python/feast/protos/feast/core/Entity_pb2.pyi b/sdk/python/feast/protos/feast/core/Entity_pb2.pyi index a5924a13451..025817edfee 100644 --- a/sdk/python/feast/protos/feast/core/Entity_pb2.pyi +++ b/sdk/python/feast/protos/feast/core/Entity_pb2.pyi @@ -1,19 +1,19 @@ """ @generated by mypy-protobuf. Do not edit manually! isort:skip_file - -* Copyright 2020 The Feast Authors -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* https://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and + +* Copyright 2020 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and * limitations under the License. """ import builtins diff --git a/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.pyi b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.pyi index 72426f55c9f..6b44ad4a931 100644 --- a/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.pyi +++ b/sdk/python/feast/protos/feast/core/FeatureViewProjection_pb2.pyi @@ -19,7 +19,7 @@ else: DESCRIPTOR: google.protobuf.descriptor.FileDescriptor class FeatureViewProjection(google.protobuf.message.Message): - """A projection to be applied on top of a FeatureView. + """A projection to be applied on top of a FeatureView. Contains the modifications to a FeatureView such as the features subset to use. """ diff --git a/sdk/python/feast/protos/feast/core/FeatureView_pb2.py b/sdk/python/feast/protos/feast/core/FeatureView_pb2.py index 9a59255375f..0221a96031b 100644 --- a/sdk/python/feast/protos/feast/core/FeatureView_pb2.py +++ b/sdk/python/feast/protos/feast/core/FeatureView_pb2.py @@ -19,7 +19,7 @@ from feast.protos.feast.core import Transformation_pb2 as feast_dot_core_dot_Transformation__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66\x65\x61st/core/FeatureView.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\x1a\x1f\x66\x65\x61st/core/Transformation.proto\"c\n\x0b\x46\x65\x61tureView\x12)\n\x04spec\x18\x01 \x01(\x0b\x32\x1b.feast.core.FeatureViewSpec\x12)\n\x04meta\x18\x02 \x01(\x0b\x32\x1b.feast.core.FeatureViewMeta\"\xd4\x04\n\x0f\x46\x65\x61tureViewSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x10\n\x08\x65ntities\x18\x03 \x03(\t\x12+\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x33\n\x04tags\x18\x05 \x03(\x0b\x32%.feast.core.FeatureViewSpec.TagsEntry\x12&\n\x03ttl\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12,\n\x0c\x62\x61tch_source\x18\x07 \x01(\x0b\x32\x16.feast.core.DataSource\x12\x0e\n\x06online\x18\x08 \x01(\x08\x12-\n\rstream_source\x18\t \x01(\x0b\x32\x16.feast.core.DataSource\x12\x13\n\x0b\x64\x65scription\x18\n \x01(\t\x12\r\n\x05owner\x18\x0b \x01(\t\x12\x31\n\x0e\x65ntity_columns\x18\x0c \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x0f\n\x07offline\x18\r \x01(\x08\x12\x31\n\x0csource_views\x18\x0e \x03(\x0b\x32\x1b.feast.core.FeatureViewSpec\x12\x43\n\x16\x66\x65\x61ture_transformation\x18\x0f \x01(\x0b\x32#.feast.core.FeatureTransformationV2\x12\x0c\n\x04mode\x18\x10 \x01(\t\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xcc\x01\n\x0f\x46\x65\x61tureViewMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x46\n\x19materialization_intervals\x18\x03 \x03(\x0b\x32#.feast.core.MaterializationInterval\"w\n\x17MaterializationInterval\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"@\n\x0f\x46\x65\x61tureViewList\x12-\n\x0c\x66\x65\x61tureviews\x18\x01 \x03(\x0b\x32\x17.feast.core.FeatureViewBU\n\x10\x66\x65\x61st.proto.coreB\x10\x46\x65\x61tureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66\x65\x61st/core/FeatureView.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\x1a\x1f\x66\x65\x61st/core/Transformation.proto\"c\n\x0b\x46\x65\x61tureView\x12)\n\x04spec\x18\x01 \x01(\x0b\x32\x1b.feast.core.FeatureViewSpec\x12)\n\x04meta\x18\x02 \x01(\x0b\x32\x1b.feast.core.FeatureViewMeta\"\xef\x04\n\x0f\x46\x65\x61tureViewSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x10\n\x08\x65ntities\x18\x03 \x03(\t\x12+\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x33\n\x04tags\x18\x05 \x03(\x0b\x32%.feast.core.FeatureViewSpec.TagsEntry\x12&\n\x03ttl\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12,\n\x0c\x62\x61tch_source\x18\x07 \x01(\x0b\x32\x16.feast.core.DataSource\x12\x0e\n\x06online\x18\x08 \x01(\x08\x12-\n\rstream_source\x18\t \x01(\x0b\x32\x16.feast.core.DataSource\x12\x13\n\x0b\x64\x65scription\x18\n \x01(\t\x12\r\n\x05owner\x18\x0b \x01(\t\x12\x31\n\x0e\x65ntity_columns\x18\x0c \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x0f\n\x07offline\x18\r \x01(\x08\x12\x31\n\x0csource_views\x18\x0e \x03(\x0b\x32\x1b.feast.core.FeatureViewSpec\x12\x43\n\x16\x66\x65\x61ture_transformation\x18\x0f \x01(\x0b\x32#.feast.core.FeatureTransformationV2\x12\x0c\n\x04mode\x18\x10 \x01(\t\x12\x19\n\x11\x65nable_validation\x18\x11 \x01(\x08\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xcc\x01\n\x0f\x46\x65\x61tureViewMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16last_updated_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x46\n\x19materialization_intervals\x18\x03 \x03(\x0b\x32#.feast.core.MaterializationInterval\"w\n\x17MaterializationInterval\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"@\n\x0f\x46\x65\x61tureViewList\x12-\n\x0c\x66\x65\x61tureviews\x18\x01 \x03(\x0b\x32\x17.feast.core.FeatureViewBU\n\x10\x66\x65\x61st.proto.coreB\x10\x46\x65\x61tureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -32,13 +32,13 @@ _globals['_FEATUREVIEW']._serialized_start=197 _globals['_FEATUREVIEW']._serialized_end=296 _globals['_FEATUREVIEWSPEC']._serialized_start=299 - _globals['_FEATUREVIEWSPEC']._serialized_end=895 - _globals['_FEATUREVIEWSPEC_TAGSENTRY']._serialized_start=852 - _globals['_FEATUREVIEWSPEC_TAGSENTRY']._serialized_end=895 - _globals['_FEATUREVIEWMETA']._serialized_start=898 - _globals['_FEATUREVIEWMETA']._serialized_end=1102 - _globals['_MATERIALIZATIONINTERVAL']._serialized_start=1104 - _globals['_MATERIALIZATIONINTERVAL']._serialized_end=1223 - _globals['_FEATUREVIEWLIST']._serialized_start=1225 - _globals['_FEATUREVIEWLIST']._serialized_end=1289 + _globals['_FEATUREVIEWSPEC']._serialized_end=922 + _globals['_FEATUREVIEWSPEC_TAGSENTRY']._serialized_start=879 + _globals['_FEATUREVIEWSPEC_TAGSENTRY']._serialized_end=922 + _globals['_FEATUREVIEWMETA']._serialized_start=925 + _globals['_FEATUREVIEWMETA']._serialized_end=1129 + _globals['_MATERIALIZATIONINTERVAL']._serialized_start=1131 + _globals['_MATERIALIZATIONINTERVAL']._serialized_end=1250 + _globals['_FEATUREVIEWLIST']._serialized_start=1252 + _globals['_FEATUREVIEWLIST']._serialized_end=1316 # @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/FeatureView_pb2.pyi b/sdk/python/feast/protos/feast/core/FeatureView_pb2.pyi index a7115be8459..c5a54394320 100644 --- a/sdk/python/feast/protos/feast/core/FeatureView_pb2.pyi +++ b/sdk/python/feast/protos/feast/core/FeatureView_pb2.pyi @@ -58,7 +58,7 @@ class FeatureView(google.protobuf.message.Message): global___FeatureView = FeatureView class FeatureViewSpec(google.protobuf.message.Message): - """Next available id: 17 + """Next available id: 18 TODO(adchia): refactor common fields from this and ODFV into separate metadata proto """ @@ -95,6 +95,7 @@ class FeatureViewSpec(google.protobuf.message.Message): SOURCE_VIEWS_FIELD_NUMBER: builtins.int FEATURE_TRANSFORMATION_FIELD_NUMBER: builtins.int MODE_FIELD_NUMBER: builtins.int + ENABLE_VALIDATION_FIELD_NUMBER: builtins.int name: builtins.str """Name of the feature view. Must be unique. Not updated.""" project: builtins.str @@ -141,6 +142,8 @@ class FeatureViewSpec(google.protobuf.message.Message): """Feature transformation for batch feature views""" mode: builtins.str """The transformation mode (e.g., "python", "pandas", "spark", "sql", "ray")""" + enable_validation: builtins.bool + """Whether schema validation is enabled during materialization""" def __init__( self, *, @@ -160,9 +163,10 @@ class FeatureViewSpec(google.protobuf.message.Message): source_views: collections.abc.Iterable[global___FeatureViewSpec] | None = ..., feature_transformation: feast.core.Transformation_pb2.FeatureTransformationV2 | None = ..., mode: builtins.str = ..., + enable_validation: builtins.bool = ..., ) -> None: ... def HasField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "feature_transformation", b"feature_transformation", "stream_source", b"stream_source", "ttl", b"ttl"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "description", b"description", "entities", b"entities", "entity_columns", b"entity_columns", "feature_transformation", b"feature_transformation", "features", b"features", "mode", b"mode", "name", b"name", "offline", b"offline", "online", b"online", "owner", b"owner", "project", b"project", "source_views", b"source_views", "stream_source", b"stream_source", "tags", b"tags", "ttl", b"ttl"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "description", b"description", "enable_validation", b"enable_validation", "entities", b"entities", "entity_columns", b"entity_columns", "feature_transformation", b"feature_transformation", "features", b"features", "mode", b"mode", "name", b"name", "offline", b"offline", "online", b"online", "owner", b"owner", "project", b"project", "source_views", b"source_views", "stream_source", b"stream_source", "tags", b"tags", "ttl", b"ttl"]) -> None: ... global___FeatureViewSpec = FeatureViewSpec diff --git a/sdk/python/feast/protos/feast/core/Project_pb2.pyi b/sdk/python/feast/protos/feast/core/Project_pb2.pyi index 3196304a19b..e3cce2ec425 100644 --- a/sdk/python/feast/protos/feast/core/Project_pb2.pyi +++ b/sdk/python/feast/protos/feast/core/Project_pb2.pyi @@ -1,19 +1,19 @@ """ @generated by mypy-protobuf. Do not edit manually! isort:skip_file - -* Copyright 2020 The Feast Authors -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* https://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and + +* Copyright 2020 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and * limitations under the License. """ import builtins diff --git a/sdk/python/feast/protos/feast/core/Registry_pb2.pyi b/sdk/python/feast/protos/feast/core/Registry_pb2.pyi index ad09878b77f..fca49c75481 100644 --- a/sdk/python/feast/protos/feast/core/Registry_pb2.pyi +++ b/sdk/python/feast/protos/feast/core/Registry_pb2.pyi @@ -1,19 +1,19 @@ """ @generated by mypy-protobuf. Do not edit manually! isort:skip_file - -* Copyright 2020 The Feast Authors -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* https://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and + +* Copyright 2020 The Feast Authors +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* https://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and * limitations under the License. """ import builtins diff --git a/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.py b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.py index f64c2852aa9..cd3ec690574 100644 --- a/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.py +++ b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.py @@ -21,7 +21,7 @@ from feast.protos.feast.core import Transformation_pb2 as feast_dot_core_dot_Transformation__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"feast/core/StreamFeatureView.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\x1a$feast/core/OnDemandFeatureView.proto\x1a\x1c\x66\x65\x61st/core/FeatureView.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x1c\x66\x65\x61st/core/Aggregation.proto\x1a\x1f\x66\x65\x61st/core/Transformation.proto\"o\n\x11StreamFeatureView\x12/\n\x04spec\x18\x01 \x01(\x0b\x32!.feast.core.StreamFeatureViewSpec\x12)\n\x04meta\x18\x02 \x01(\x0b\x32\x1b.feast.core.FeatureViewMeta\"\xf3\x05\n\x15StreamFeatureViewSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x10\n\x08\x65ntities\x18\x03 \x03(\t\x12+\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x31\n\x0e\x65ntity_columns\x18\x05 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x39\n\x04tags\x18\x07 \x03(\x0b\x32+.feast.core.StreamFeatureViewSpec.TagsEntry\x12\r\n\x05owner\x18\x08 \x01(\t\x12&\n\x03ttl\x18\t \x01(\x0b\x32\x19.google.protobuf.Duration\x12,\n\x0c\x62\x61tch_source\x18\n \x01(\x0b\x32\x16.feast.core.DataSource\x12-\n\rstream_source\x18\x0b \x01(\x0b\x32\x16.feast.core.DataSource\x12\x0e\n\x06online\x18\x0c \x01(\x08\x12\x42\n\x15user_defined_function\x18\r \x01(\x0b\x32\x1f.feast.core.UserDefinedFunctionB\x02\x18\x01\x12\x0c\n\x04mode\x18\x0e \x01(\t\x12-\n\x0c\x61ggregations\x18\x0f \x03(\x0b\x32\x17.feast.core.Aggregation\x12\x17\n\x0ftimestamp_field\x18\x10 \x01(\t\x12\x43\n\x16\x66\x65\x61ture_transformation\x18\x11 \x01(\x0b\x32#.feast.core.FeatureTransformationV2\x12\x15\n\renable_tiling\x18\x12 \x01(\x08\x12\x32\n\x0ftiling_hop_size\x18\x13 \x01(\x0b\x32\x19.google.protobuf.Duration\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42[\n\x10\x66\x65\x61st.proto.coreB\x16StreamFeatureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"feast/core/StreamFeatureView.proto\x12\nfeast.core\x1a\x1egoogle/protobuf/duration.proto\x1a$feast/core/OnDemandFeatureView.proto\x1a\x1c\x66\x65\x61st/core/FeatureView.proto\x1a\x18\x66\x65\x61st/core/Feature.proto\x1a\x1b\x66\x65\x61st/core/DataSource.proto\x1a\x1c\x66\x65\x61st/core/Aggregation.proto\x1a\x1f\x66\x65\x61st/core/Transformation.proto\"o\n\x11StreamFeatureView\x12/\n\x04spec\x18\x01 \x01(\x0b\x32!.feast.core.StreamFeatureViewSpec\x12)\n\x04meta\x18\x02 \x01(\x0b\x32\x1b.feast.core.FeatureViewMeta\"\x8e\x06\n\x15StreamFeatureViewSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07project\x18\x02 \x01(\t\x12\x10\n\x08\x65ntities\x18\x03 \x03(\t\x12+\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x31\n\x0e\x65ntity_columns\x18\x05 \x03(\x0b\x32\x19.feast.core.FeatureSpecV2\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x39\n\x04tags\x18\x07 \x03(\x0b\x32+.feast.core.StreamFeatureViewSpec.TagsEntry\x12\r\n\x05owner\x18\x08 \x01(\t\x12&\n\x03ttl\x18\t \x01(\x0b\x32\x19.google.protobuf.Duration\x12,\n\x0c\x62\x61tch_source\x18\n \x01(\x0b\x32\x16.feast.core.DataSource\x12-\n\rstream_source\x18\x0b \x01(\x0b\x32\x16.feast.core.DataSource\x12\x0e\n\x06online\x18\x0c \x01(\x08\x12\x42\n\x15user_defined_function\x18\r \x01(\x0b\x32\x1f.feast.core.UserDefinedFunctionB\x02\x18\x01\x12\x0c\n\x04mode\x18\x0e \x01(\t\x12-\n\x0c\x61ggregations\x18\x0f \x03(\x0b\x32\x17.feast.core.Aggregation\x12\x17\n\x0ftimestamp_field\x18\x10 \x01(\t\x12\x43\n\x16\x66\x65\x61ture_transformation\x18\x11 \x01(\x0b\x32#.feast.core.FeatureTransformationV2\x12\x15\n\renable_tiling\x18\x12 \x01(\x08\x12\x32\n\x0ftiling_hop_size\x18\x13 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x19\n\x11\x65nable_validation\x18\x14 \x01(\x08\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42[\n\x10\x66\x65\x61st.proto.coreB\x16StreamFeatureViewProtoZ/github.com/feast-dev/feast/go/protos/feast/coreb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -36,7 +36,7 @@ _globals['_STREAMFEATUREVIEW']._serialized_start=268 _globals['_STREAMFEATUREVIEW']._serialized_end=379 _globals['_STREAMFEATUREVIEWSPEC']._serialized_start=382 - _globals['_STREAMFEATUREVIEWSPEC']._serialized_end=1137 - _globals['_STREAMFEATUREVIEWSPEC_TAGSENTRY']._serialized_start=1094 - _globals['_STREAMFEATUREVIEWSPEC_TAGSENTRY']._serialized_end=1137 + _globals['_STREAMFEATUREVIEWSPEC']._serialized_end=1164 + _globals['_STREAMFEATUREVIEWSPEC_TAGSENTRY']._serialized_start=1121 + _globals['_STREAMFEATUREVIEWSPEC_TAGSENTRY']._serialized_end=1164 # @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.pyi b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.pyi index 160a59b35df..853ada60a27 100644 --- a/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.pyi +++ b/sdk/python/feast/protos/feast/core/StreamFeatureView_pb2.pyi @@ -59,7 +59,7 @@ class StreamFeatureView(google.protobuf.message.Message): global___StreamFeatureView = StreamFeatureView class StreamFeatureViewSpec(google.protobuf.message.Message): - """Next available id: 20""" + """Next available id: 21""" DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -97,6 +97,7 @@ class StreamFeatureViewSpec(google.protobuf.message.Message): FEATURE_TRANSFORMATION_FIELD_NUMBER: builtins.int ENABLE_TILING_FIELD_NUMBER: builtins.int TILING_HOP_SIZE_FIELD_NUMBER: builtins.int + ENABLE_VALIDATION_FIELD_NUMBER: builtins.int name: builtins.str """Name of the feature view. Must be unique. Not updated.""" project: builtins.str @@ -152,6 +153,8 @@ class StreamFeatureViewSpec(google.protobuf.message.Message): """Hop size for tiling (e.g., 5 minutes). Determines the granularity of pre-aggregated tiles. If not specified, defaults to 5 minutes. Only used when enable_tiling is true. """ + enable_validation: builtins.bool + """Whether schema validation is enabled during materialization""" def __init__( self, *, @@ -174,8 +177,9 @@ class StreamFeatureViewSpec(google.protobuf.message.Message): feature_transformation: feast.core.Transformation_pb2.FeatureTransformationV2 | None = ..., enable_tiling: builtins.bool = ..., tiling_hop_size: google.protobuf.duration_pb2.Duration | None = ..., + enable_validation: builtins.bool = ..., ) -> None: ... def HasField(self, field_name: typing_extensions.Literal["batch_source", b"batch_source", "feature_transformation", b"feature_transformation", "stream_source", b"stream_source", "tiling_hop_size", b"tiling_hop_size", "ttl", b"ttl", "user_defined_function", b"user_defined_function"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["aggregations", b"aggregations", "batch_source", b"batch_source", "description", b"description", "enable_tiling", b"enable_tiling", "entities", b"entities", "entity_columns", b"entity_columns", "feature_transformation", b"feature_transformation", "features", b"features", "mode", b"mode", "name", b"name", "online", b"online", "owner", b"owner", "project", b"project", "stream_source", b"stream_source", "tags", b"tags", "tiling_hop_size", b"tiling_hop_size", "timestamp_field", b"timestamp_field", "ttl", b"ttl", "user_defined_function", b"user_defined_function"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["aggregations", b"aggregations", "batch_source", b"batch_source", "description", b"description", "enable_tiling", b"enable_tiling", "enable_validation", b"enable_validation", "entities", b"entities", "entity_columns", b"entity_columns", "feature_transformation", b"feature_transformation", "features", b"features", "mode", b"mode", "name", b"name", "online", b"online", "owner", b"owner", "project", b"project", "stream_source", b"stream_source", "tags", b"tags", "tiling_hop_size", b"tiling_hop_size", "timestamp_field", b"timestamp_field", "ttl", b"ttl", "user_defined_function", b"user_defined_function"]) -> None: ... global___StreamFeatureViewSpec = StreamFeatureViewSpec diff --git a/sdk/python/feast/protos/feast/types/Value_pb2.py b/sdk/python/feast/protos/feast/types/Value_pb2.py index 2ab1d2cc8fb..5edd8c5bde9 100644 --- a/sdk/python/feast/protos/feast/types/Value_pb2.py +++ b/sdk/python/feast/protos/feast/types/Value_pb2.py @@ -14,7 +14,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x66\x65\x61st/types/Value.proto\x12\x0b\x66\x65\x61st.types\"\xb0\x03\n\tValueType\"\xa2\x03\n\x04\x45num\x12\x0b\n\x07INVALID\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\n\n\x06STRING\x10\x02\x12\t\n\x05INT32\x10\x03\x12\t\n\x05INT64\x10\x04\x12\n\n\x06\x44OUBLE\x10\x05\x12\t\n\x05\x46LOAT\x10\x06\x12\x08\n\x04\x42OOL\x10\x07\x12\x12\n\x0eUNIX_TIMESTAMP\x10\x08\x12\x0e\n\nBYTES_LIST\x10\x0b\x12\x0f\n\x0bSTRING_LIST\x10\x0c\x12\x0e\n\nINT32_LIST\x10\r\x12\x0e\n\nINT64_LIST\x10\x0e\x12\x0f\n\x0b\x44OUBLE_LIST\x10\x0f\x12\x0e\n\nFLOAT_LIST\x10\x10\x12\r\n\tBOOL_LIST\x10\x11\x12\x17\n\x13UNIX_TIMESTAMP_LIST\x10\x12\x12\x08\n\x04NULL\x10\x13\x12\x07\n\x03MAP\x10\x14\x12\x0c\n\x08MAP_LIST\x10\x15\x12\r\n\tBYTES_SET\x10\x16\x12\x0e\n\nSTRING_SET\x10\x17\x12\r\n\tINT32_SET\x10\x18\x12\r\n\tINT64_SET\x10\x19\x12\x0e\n\nDOUBLE_SET\x10\x1a\x12\r\n\tFLOAT_SET\x10\x1b\x12\x0c\n\x08\x42OOL_SET\x10\x1c\x12\x16\n\x12UNIX_TIMESTAMP_SET\x10\x1d\"\xe0\x08\n\x05Value\x12\x13\n\tbytes_val\x18\x01 \x01(\x0cH\x00\x12\x14\n\nstring_val\x18\x02 \x01(\tH\x00\x12\x13\n\tint32_val\x18\x03 \x01(\x05H\x00\x12\x13\n\tint64_val\x18\x04 \x01(\x03H\x00\x12\x14\n\ndouble_val\x18\x05 \x01(\x01H\x00\x12\x13\n\tfloat_val\x18\x06 \x01(\x02H\x00\x12\x12\n\x08\x62ool_val\x18\x07 \x01(\x08H\x00\x12\x1c\n\x12unix_timestamp_val\x18\x08 \x01(\x03H\x00\x12\x30\n\x0e\x62ytes_list_val\x18\x0b \x01(\x0b\x32\x16.feast.types.BytesListH\x00\x12\x32\n\x0fstring_list_val\x18\x0c \x01(\x0b\x32\x17.feast.types.StringListH\x00\x12\x30\n\x0eint32_list_val\x18\r \x01(\x0b\x32\x16.feast.types.Int32ListH\x00\x12\x30\n\x0eint64_list_val\x18\x0e \x01(\x0b\x32\x16.feast.types.Int64ListH\x00\x12\x32\n\x0f\x64ouble_list_val\x18\x0f \x01(\x0b\x32\x17.feast.types.DoubleListH\x00\x12\x30\n\x0e\x66loat_list_val\x18\x10 \x01(\x0b\x32\x16.feast.types.FloatListH\x00\x12.\n\rbool_list_val\x18\x11 \x01(\x0b\x32\x15.feast.types.BoolListH\x00\x12\x39\n\x17unix_timestamp_list_val\x18\x12 \x01(\x0b\x32\x16.feast.types.Int64ListH\x00\x12%\n\x08null_val\x18\x13 \x01(\x0e\x32\x11.feast.types.NullH\x00\x12#\n\x07map_val\x18\x14 \x01(\x0b\x32\x10.feast.types.MapH\x00\x12,\n\x0cmap_list_val\x18\x15 \x01(\x0b\x32\x14.feast.types.MapListH\x00\x12.\n\rbytes_set_val\x18\x16 \x01(\x0b\x32\x15.feast.types.BytesSetH\x00\x12\x30\n\x0estring_set_val\x18\x17 \x01(\x0b\x32\x16.feast.types.StringSetH\x00\x12.\n\rint32_set_val\x18\x18 \x01(\x0b\x32\x15.feast.types.Int32SetH\x00\x12.\n\rint64_set_val\x18\x19 \x01(\x0b\x32\x15.feast.types.Int64SetH\x00\x12\x30\n\x0e\x64ouble_set_val\x18\x1a \x01(\x0b\x32\x16.feast.types.DoubleSetH\x00\x12.\n\rfloat_set_val\x18\x1b \x01(\x0b\x32\x15.feast.types.FloatSetH\x00\x12,\n\x0c\x62ool_set_val\x18\x1c \x01(\x0b\x32\x14.feast.types.BoolSetH\x00\x12\x37\n\x16unix_timestamp_set_val\x18\x1d \x01(\x0b\x32\x15.feast.types.Int64SetH\x00\x42\x05\n\x03val\"\x18\n\tBytesList\x12\x0b\n\x03val\x18\x01 \x03(\x0c\"\x19\n\nStringList\x12\x0b\n\x03val\x18\x01 \x03(\t\"\x18\n\tInt32List\x12\x0b\n\x03val\x18\x01 \x03(\x05\"\x18\n\tInt64List\x12\x0b\n\x03val\x18\x01 \x03(\x03\"\x19\n\nDoubleList\x12\x0b\n\x03val\x18\x01 \x03(\x01\"\x18\n\tFloatList\x12\x0b\n\x03val\x18\x01 \x03(\x02\"\x17\n\x08\x42oolList\x12\x0b\n\x03val\x18\x01 \x03(\x08\"\x17\n\x08\x42ytesSet\x12\x0b\n\x03val\x18\x01 \x03(\x0c\"\x18\n\tStringSet\x12\x0b\n\x03val\x18\x01 \x03(\t\"\x17\n\x08Int32Set\x12\x0b\n\x03val\x18\x01 \x03(\x05\"\x17\n\x08Int64Set\x12\x0b\n\x03val\x18\x01 \x03(\x03\"\x18\n\tDoubleSet\x12\x0b\n\x03val\x18\x01 \x03(\x01\"\x17\n\x08\x46loatSet\x12\x0b\n\x03val\x18\x01 \x03(\x02\"\x16\n\x07\x42oolSet\x12\x0b\n\x03val\x18\x01 \x03(\x08\"m\n\x03Map\x12&\n\x03val\x18\x01 \x03(\x0b\x32\x19.feast.types.Map.ValEntry\x1a>\n\x08ValEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.feast.types.Value:\x02\x38\x01\"(\n\x07MapList\x12\x1d\n\x03val\x18\x01 \x03(\x0b\x32\x10.feast.types.Map\"0\n\rRepeatedValue\x12\x1f\n\x03val\x18\x01 \x03(\x0b\x32\x12.feast.types.Value*\x10\n\x04Null\x12\x08\n\x04NULL\x10\x00\x42Q\n\x11\x66\x65\x61st.proto.typesB\nValueProtoZ0github.com/feast-dev/feast/go/protos/feast/typesb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x66\x65\x61st/types/Value.proto\x12\x0b\x66\x65\x61st.types\"\xe6\x03\n\tValueType\"\xd8\x03\n\x04\x45num\x12\x0b\n\x07INVALID\x10\x00\x12\t\n\x05\x42YTES\x10\x01\x12\n\n\x06STRING\x10\x02\x12\t\n\x05INT32\x10\x03\x12\t\n\x05INT64\x10\x04\x12\n\n\x06\x44OUBLE\x10\x05\x12\t\n\x05\x46LOAT\x10\x06\x12\x08\n\x04\x42OOL\x10\x07\x12\x12\n\x0eUNIX_TIMESTAMP\x10\x08\x12\x0e\n\nBYTES_LIST\x10\x0b\x12\x0f\n\x0bSTRING_LIST\x10\x0c\x12\x0e\n\nINT32_LIST\x10\r\x12\x0e\n\nINT64_LIST\x10\x0e\x12\x0f\n\x0b\x44OUBLE_LIST\x10\x0f\x12\x0e\n\nFLOAT_LIST\x10\x10\x12\r\n\tBOOL_LIST\x10\x11\x12\x17\n\x13UNIX_TIMESTAMP_LIST\x10\x12\x12\x08\n\x04NULL\x10\x13\x12\x07\n\x03MAP\x10\x14\x12\x0c\n\x08MAP_LIST\x10\x15\x12\r\n\tBYTES_SET\x10\x16\x12\x0e\n\nSTRING_SET\x10\x17\x12\r\n\tINT32_SET\x10\x18\x12\r\n\tINT64_SET\x10\x19\x12\x0e\n\nDOUBLE_SET\x10\x1a\x12\r\n\tFLOAT_SET\x10\x1b\x12\x0c\n\x08\x42OOL_SET\x10\x1c\x12\x16\n\x12UNIX_TIMESTAMP_SET\x10\x1d\x12\x08\n\x04JSON\x10 \x12\r\n\tJSON_LIST\x10!\x12\n\n\x06STRUCT\x10\"\x12\x0f\n\x0bSTRUCT_LIST\x10#\"\xff\t\n\x05Value\x12\x13\n\tbytes_val\x18\x01 \x01(\x0cH\x00\x12\x14\n\nstring_val\x18\x02 \x01(\tH\x00\x12\x13\n\tint32_val\x18\x03 \x01(\x05H\x00\x12\x13\n\tint64_val\x18\x04 \x01(\x03H\x00\x12\x14\n\ndouble_val\x18\x05 \x01(\x01H\x00\x12\x13\n\tfloat_val\x18\x06 \x01(\x02H\x00\x12\x12\n\x08\x62ool_val\x18\x07 \x01(\x08H\x00\x12\x1c\n\x12unix_timestamp_val\x18\x08 \x01(\x03H\x00\x12\x30\n\x0e\x62ytes_list_val\x18\x0b \x01(\x0b\x32\x16.feast.types.BytesListH\x00\x12\x32\n\x0fstring_list_val\x18\x0c \x01(\x0b\x32\x17.feast.types.StringListH\x00\x12\x30\n\x0eint32_list_val\x18\r \x01(\x0b\x32\x16.feast.types.Int32ListH\x00\x12\x30\n\x0eint64_list_val\x18\x0e \x01(\x0b\x32\x16.feast.types.Int64ListH\x00\x12\x32\n\x0f\x64ouble_list_val\x18\x0f \x01(\x0b\x32\x17.feast.types.DoubleListH\x00\x12\x30\n\x0e\x66loat_list_val\x18\x10 \x01(\x0b\x32\x16.feast.types.FloatListH\x00\x12.\n\rbool_list_val\x18\x11 \x01(\x0b\x32\x15.feast.types.BoolListH\x00\x12\x39\n\x17unix_timestamp_list_val\x18\x12 \x01(\x0b\x32\x16.feast.types.Int64ListH\x00\x12%\n\x08null_val\x18\x13 \x01(\x0e\x32\x11.feast.types.NullH\x00\x12#\n\x07map_val\x18\x14 \x01(\x0b\x32\x10.feast.types.MapH\x00\x12,\n\x0cmap_list_val\x18\x15 \x01(\x0b\x32\x14.feast.types.MapListH\x00\x12.\n\rbytes_set_val\x18\x16 \x01(\x0b\x32\x15.feast.types.BytesSetH\x00\x12\x30\n\x0estring_set_val\x18\x17 \x01(\x0b\x32\x16.feast.types.StringSetH\x00\x12.\n\rint32_set_val\x18\x18 \x01(\x0b\x32\x15.feast.types.Int32SetH\x00\x12.\n\rint64_set_val\x18\x19 \x01(\x0b\x32\x15.feast.types.Int64SetH\x00\x12\x30\n\x0e\x64ouble_set_val\x18\x1a \x01(\x0b\x32\x16.feast.types.DoubleSetH\x00\x12.\n\rfloat_set_val\x18\x1b \x01(\x0b\x32\x15.feast.types.FloatSetH\x00\x12,\n\x0c\x62ool_set_val\x18\x1c \x01(\x0b\x32\x14.feast.types.BoolSetH\x00\x12\x37\n\x16unix_timestamp_set_val\x18\x1d \x01(\x0b\x32\x15.feast.types.Int64SetH\x00\x12\x12\n\x08json_val\x18 \x01(\tH\x00\x12\x30\n\rjson_list_val\x18! \x01(\x0b\x32\x17.feast.types.StringListH\x00\x12&\n\nstruct_val\x18\" \x01(\x0b\x32\x10.feast.types.MapH\x00\x12/\n\x0fstruct_list_val\x18# \x01(\x0b\x32\x14.feast.types.MapListH\x00\x42\x05\n\x03val\"\x18\n\tBytesList\x12\x0b\n\x03val\x18\x01 \x03(\x0c\"\x19\n\nStringList\x12\x0b\n\x03val\x18\x01 \x03(\t\"\x18\n\tInt32List\x12\x0b\n\x03val\x18\x01 \x03(\x05\"\x18\n\tInt64List\x12\x0b\n\x03val\x18\x01 \x03(\x03\"\x19\n\nDoubleList\x12\x0b\n\x03val\x18\x01 \x03(\x01\"\x18\n\tFloatList\x12\x0b\n\x03val\x18\x01 \x03(\x02\"\x17\n\x08\x42oolList\x12\x0b\n\x03val\x18\x01 \x03(\x08\"\x17\n\x08\x42ytesSet\x12\x0b\n\x03val\x18\x01 \x03(\x0c\"\x18\n\tStringSet\x12\x0b\n\x03val\x18\x01 \x03(\t\"\x17\n\x08Int32Set\x12\x0b\n\x03val\x18\x01 \x03(\x05\"\x17\n\x08Int64Set\x12\x0b\n\x03val\x18\x01 \x03(\x03\"\x18\n\tDoubleSet\x12\x0b\n\x03val\x18\x01 \x03(\x01\"\x17\n\x08\x46loatSet\x12\x0b\n\x03val\x18\x01 \x03(\x02\"\x16\n\x07\x42oolSet\x12\x0b\n\x03val\x18\x01 \x03(\x08\"m\n\x03Map\x12&\n\x03val\x18\x01 \x03(\x0b\x32\x19.feast.types.Map.ValEntry\x1a>\n\x08ValEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.feast.types.Value:\x02\x38\x01\"(\n\x07MapList\x12\x1d\n\x03val\x18\x01 \x03(\x0b\x32\x10.feast.types.Map\"0\n\rRepeatedValue\x12\x1f\n\x03val\x18\x01 \x03(\x0b\x32\x12.feast.types.Value*\x10\n\x04Null\x12\x08\n\x04NULL\x10\x00\x42Q\n\x11\x66\x65\x61st.proto.typesB\nValueProtoZ0github.com/feast-dev/feast/go/protos/feast/typesb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -24,48 +24,48 @@ _globals['DESCRIPTOR']._serialized_options = b'\n\021feast.proto.typesB\nValueProtoZ0github.com/feast-dev/feast/go/protos/feast/types' _globals['_MAP_VALENTRY']._options = None _globals['_MAP_VALENTRY']._serialized_options = b'8\001' - _globals['_NULL']._serialized_start=2160 - _globals['_NULL']._serialized_end=2176 + _globals['_NULL']._serialized_start=2373 + _globals['_NULL']._serialized_end=2389 _globals['_VALUETYPE']._serialized_start=41 - _globals['_VALUETYPE']._serialized_end=473 + _globals['_VALUETYPE']._serialized_end=527 _globals['_VALUETYPE_ENUM']._serialized_start=55 - _globals['_VALUETYPE_ENUM']._serialized_end=473 - _globals['_VALUE']._serialized_start=476 - _globals['_VALUE']._serialized_end=1596 - _globals['_BYTESLIST']._serialized_start=1598 - _globals['_BYTESLIST']._serialized_end=1622 - _globals['_STRINGLIST']._serialized_start=1624 - _globals['_STRINGLIST']._serialized_end=1649 - _globals['_INT32LIST']._serialized_start=1651 - _globals['_INT32LIST']._serialized_end=1675 - _globals['_INT64LIST']._serialized_start=1677 - _globals['_INT64LIST']._serialized_end=1701 - _globals['_DOUBLELIST']._serialized_start=1703 - _globals['_DOUBLELIST']._serialized_end=1728 - _globals['_FLOATLIST']._serialized_start=1730 - _globals['_FLOATLIST']._serialized_end=1754 - _globals['_BOOLLIST']._serialized_start=1756 - _globals['_BOOLLIST']._serialized_end=1779 - _globals['_BYTESSET']._serialized_start=1781 - _globals['_BYTESSET']._serialized_end=1804 - _globals['_STRINGSET']._serialized_start=1806 - _globals['_STRINGSET']._serialized_end=1830 - _globals['_INT32SET']._serialized_start=1832 - _globals['_INT32SET']._serialized_end=1855 - _globals['_INT64SET']._serialized_start=1857 - _globals['_INT64SET']._serialized_end=1880 - _globals['_DOUBLESET']._serialized_start=1882 - _globals['_DOUBLESET']._serialized_end=1906 - _globals['_FLOATSET']._serialized_start=1908 - _globals['_FLOATSET']._serialized_end=1931 - _globals['_BOOLSET']._serialized_start=1933 - _globals['_BOOLSET']._serialized_end=1955 - _globals['_MAP']._serialized_start=1957 - _globals['_MAP']._serialized_end=2066 - _globals['_MAP_VALENTRY']._serialized_start=2004 - _globals['_MAP_VALENTRY']._serialized_end=2066 - _globals['_MAPLIST']._serialized_start=2068 - _globals['_MAPLIST']._serialized_end=2108 - _globals['_REPEATEDVALUE']._serialized_start=2110 - _globals['_REPEATEDVALUE']._serialized_end=2158 + _globals['_VALUETYPE_ENUM']._serialized_end=527 + _globals['_VALUE']._serialized_start=530 + _globals['_VALUE']._serialized_end=1809 + _globals['_BYTESLIST']._serialized_start=1811 + _globals['_BYTESLIST']._serialized_end=1835 + _globals['_STRINGLIST']._serialized_start=1837 + _globals['_STRINGLIST']._serialized_end=1862 + _globals['_INT32LIST']._serialized_start=1864 + _globals['_INT32LIST']._serialized_end=1888 + _globals['_INT64LIST']._serialized_start=1890 + _globals['_INT64LIST']._serialized_end=1914 + _globals['_DOUBLELIST']._serialized_start=1916 + _globals['_DOUBLELIST']._serialized_end=1941 + _globals['_FLOATLIST']._serialized_start=1943 + _globals['_FLOATLIST']._serialized_end=1967 + _globals['_BOOLLIST']._serialized_start=1969 + _globals['_BOOLLIST']._serialized_end=1992 + _globals['_BYTESSET']._serialized_start=1994 + _globals['_BYTESSET']._serialized_end=2017 + _globals['_STRINGSET']._serialized_start=2019 + _globals['_STRINGSET']._serialized_end=2043 + _globals['_INT32SET']._serialized_start=2045 + _globals['_INT32SET']._serialized_end=2068 + _globals['_INT64SET']._serialized_start=2070 + _globals['_INT64SET']._serialized_end=2093 + _globals['_DOUBLESET']._serialized_start=2095 + _globals['_DOUBLESET']._serialized_end=2119 + _globals['_FLOATSET']._serialized_start=2121 + _globals['_FLOATSET']._serialized_end=2144 + _globals['_BOOLSET']._serialized_start=2146 + _globals['_BOOLSET']._serialized_end=2168 + _globals['_MAP']._serialized_start=2170 + _globals['_MAP']._serialized_end=2279 + _globals['_MAP_VALENTRY']._serialized_start=2217 + _globals['_MAP_VALENTRY']._serialized_end=2279 + _globals['_MAPLIST']._serialized_start=2281 + _globals['_MAPLIST']._serialized_end=2321 + _globals['_REPEATEDVALUE']._serialized_start=2323 + _globals['_REPEATEDVALUE']._serialized_end=2371 # @@protoc_insertion_point(module_scope) diff --git a/sdk/python/feast/protos/feast/types/Value_pb2.pyi b/sdk/python/feast/protos/feast/types/Value_pb2.pyi index 0e10849ebad..64079291f4d 100644 --- a/sdk/python/feast/protos/feast/types/Value_pb2.pyi +++ b/sdk/python/feast/protos/feast/types/Value_pb2.pyi @@ -82,6 +82,10 @@ class ValueType(google.protobuf.message.Message): FLOAT_SET: ValueType._Enum.ValueType # 27 BOOL_SET: ValueType._Enum.ValueType # 28 UNIX_TIMESTAMP_SET: ValueType._Enum.ValueType # 29 + JSON: ValueType._Enum.ValueType # 32 + JSON_LIST: ValueType._Enum.ValueType # 33 + STRUCT: ValueType._Enum.ValueType # 34 + STRUCT_LIST: ValueType._Enum.ValueType # 35 class Enum(_Enum, metaclass=_EnumEnumTypeWrapper): ... INVALID: ValueType.Enum.ValueType # 0 @@ -112,6 +116,10 @@ class ValueType(google.protobuf.message.Message): FLOAT_SET: ValueType.Enum.ValueType # 27 BOOL_SET: ValueType.Enum.ValueType # 28 UNIX_TIMESTAMP_SET: ValueType.Enum.ValueType # 29 + JSON: ValueType.Enum.ValueType # 32 + JSON_LIST: ValueType.Enum.ValueType # 33 + STRUCT: ValueType.Enum.ValueType # 34 + STRUCT_LIST: ValueType.Enum.ValueType # 35 def __init__( self, @@ -149,6 +157,10 @@ class Value(google.protobuf.message.Message): FLOAT_SET_VAL_FIELD_NUMBER: builtins.int BOOL_SET_VAL_FIELD_NUMBER: builtins.int UNIX_TIMESTAMP_SET_VAL_FIELD_NUMBER: builtins.int + JSON_VAL_FIELD_NUMBER: builtins.int + JSON_LIST_VAL_FIELD_NUMBER: builtins.int + STRUCT_VAL_FIELD_NUMBER: builtins.int + STRUCT_LIST_VAL_FIELD_NUMBER: builtins.int bytes_val: builtins.bytes string_val: builtins.str int32_val: builtins.int @@ -194,6 +206,13 @@ class Value(google.protobuf.message.Message): def bool_set_val(self) -> global___BoolSet: ... @property def unix_timestamp_set_val(self) -> global___Int64Set: ... + json_val: builtins.str + @property + def json_list_val(self) -> global___StringList: ... + @property + def struct_val(self) -> global___Map: ... + @property + def struct_list_val(self) -> global___MapList: ... def __init__( self, *, @@ -224,10 +243,14 @@ class Value(google.protobuf.message.Message): float_set_val: global___FloatSet | None = ..., bool_set_val: global___BoolSet | None = ..., unix_timestamp_set_val: global___Int64Set | None = ..., + json_val: builtins.str = ..., + json_list_val: global___StringList | None = ..., + struct_val: global___Map | None = ..., + struct_list_val: global___MapList | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["bool_list_val", b"bool_list_val", "bool_set_val", b"bool_set_val", "bool_val", b"bool_val", "bytes_list_val", b"bytes_list_val", "bytes_set_val", b"bytes_set_val", "bytes_val", b"bytes_val", "double_list_val", b"double_list_val", "double_set_val", b"double_set_val", "double_val", b"double_val", "float_list_val", b"float_list_val", "float_set_val", b"float_set_val", "float_val", b"float_val", "int32_list_val", b"int32_list_val", "int32_set_val", b"int32_set_val", "int32_val", b"int32_val", "int64_list_val", b"int64_list_val", "int64_set_val", b"int64_set_val", "int64_val", b"int64_val", "map_list_val", b"map_list_val", "map_val", b"map_val", "null_val", b"null_val", "string_list_val", b"string_list_val", "string_set_val", b"string_set_val", "string_val", b"string_val", "unix_timestamp_list_val", b"unix_timestamp_list_val", "unix_timestamp_set_val", b"unix_timestamp_set_val", "unix_timestamp_val", b"unix_timestamp_val", "val", b"val"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["bool_list_val", b"bool_list_val", "bool_set_val", b"bool_set_val", "bool_val", b"bool_val", "bytes_list_val", b"bytes_list_val", "bytes_set_val", b"bytes_set_val", "bytes_val", b"bytes_val", "double_list_val", b"double_list_val", "double_set_val", b"double_set_val", "double_val", b"double_val", "float_list_val", b"float_list_val", "float_set_val", b"float_set_val", "float_val", b"float_val", "int32_list_val", b"int32_list_val", "int32_set_val", b"int32_set_val", "int32_val", b"int32_val", "int64_list_val", b"int64_list_val", "int64_set_val", b"int64_set_val", "int64_val", b"int64_val", "map_list_val", b"map_list_val", "map_val", b"map_val", "null_val", b"null_val", "string_list_val", b"string_list_val", "string_set_val", b"string_set_val", "string_val", b"string_val", "unix_timestamp_list_val", b"unix_timestamp_list_val", "unix_timestamp_set_val", b"unix_timestamp_set_val", "unix_timestamp_val", b"unix_timestamp_val", "val", b"val"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["val", b"val"]) -> typing_extensions.Literal["bytes_val", "string_val", "int32_val", "int64_val", "double_val", "float_val", "bool_val", "unix_timestamp_val", "bytes_list_val", "string_list_val", "int32_list_val", "int64_list_val", "double_list_val", "float_list_val", "bool_list_val", "unix_timestamp_list_val", "null_val", "map_val", "map_list_val", "bytes_set_val", "string_set_val", "int32_set_val", "int64_set_val", "double_set_val", "float_set_val", "bool_set_val", "unix_timestamp_set_val"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["bool_list_val", b"bool_list_val", "bool_set_val", b"bool_set_val", "bool_val", b"bool_val", "bytes_list_val", b"bytes_list_val", "bytes_set_val", b"bytes_set_val", "bytes_val", b"bytes_val", "double_list_val", b"double_list_val", "double_set_val", b"double_set_val", "double_val", b"double_val", "float_list_val", b"float_list_val", "float_set_val", b"float_set_val", "float_val", b"float_val", "int32_list_val", b"int32_list_val", "int32_set_val", b"int32_set_val", "int32_val", b"int32_val", "int64_list_val", b"int64_list_val", "int64_set_val", b"int64_set_val", "int64_val", b"int64_val", "json_list_val", b"json_list_val", "json_val", b"json_val", "map_list_val", b"map_list_val", "map_val", b"map_val", "null_val", b"null_val", "string_list_val", b"string_list_val", "string_set_val", b"string_set_val", "string_val", b"string_val", "struct_list_val", b"struct_list_val", "struct_val", b"struct_val", "unix_timestamp_list_val", b"unix_timestamp_list_val", "unix_timestamp_set_val", b"unix_timestamp_set_val", "unix_timestamp_val", b"unix_timestamp_val", "val", b"val"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["bool_list_val", b"bool_list_val", "bool_set_val", b"bool_set_val", "bool_val", b"bool_val", "bytes_list_val", b"bytes_list_val", "bytes_set_val", b"bytes_set_val", "bytes_val", b"bytes_val", "double_list_val", b"double_list_val", "double_set_val", b"double_set_val", "double_val", b"double_val", "float_list_val", b"float_list_val", "float_set_val", b"float_set_val", "float_val", b"float_val", "int32_list_val", b"int32_list_val", "int32_set_val", b"int32_set_val", "int32_val", b"int32_val", "int64_list_val", b"int64_list_val", "int64_set_val", b"int64_set_val", "int64_val", b"int64_val", "json_list_val", b"json_list_val", "json_val", b"json_val", "map_list_val", b"map_list_val", "map_val", b"map_val", "null_val", b"null_val", "string_list_val", b"string_list_val", "string_set_val", b"string_set_val", "string_val", b"string_val", "struct_list_val", b"struct_list_val", "struct_val", b"struct_val", "unix_timestamp_list_val", b"unix_timestamp_list_val", "unix_timestamp_set_val", b"unix_timestamp_set_val", "unix_timestamp_val", b"unix_timestamp_val", "val", b"val"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["val", b"val"]) -> typing_extensions.Literal["bytes_val", "string_val", "int32_val", "int64_val", "double_val", "float_val", "bool_val", "unix_timestamp_val", "bytes_list_val", "string_list_val", "int32_list_val", "int64_list_val", "double_list_val", "float_list_val", "bool_list_val", "unix_timestamp_list_val", "null_val", "map_val", "map_list_val", "bytes_set_val", "string_set_val", "int32_set_val", "int64_set_val", "double_set_val", "float_set_val", "bool_set_val", "unix_timestamp_set_val", "json_val", "json_list_val", "struct_val", "struct_list_val"] | None: ... global___Value = Value diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index 72cd46ba0ab..86d2851331f 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -58,7 +58,6 @@ "feast.infra.online_stores.contrib.cassandra_online_store.cassandra_online_store.CassandraOnlineStore": "feast.infra.online_stores.cassandra_online_store.cassandra_online_store.CassandraOnlineStore", "feast.infra.online_stores.contrib.mysql_online_store.mysql.MySQLOnlineStore": "feast.infra.online_stores.mysql_online_store.mysql.MySQLOnlineStore", "feast.infra.online_stores.contrib.hazelcast_online_store.hazelcast_online_store.HazelcastOnlineStore": "feast.infra.online_stores.hazelcast_online_store.hazelcast_online_store.HazelcastOnlineStore", - "feast.infra.online_stores.contrib.ikv_online_store.ikv.IKVOnlineStore": "feast.infra.online_stores.ikv_online_store.ikv.IKVOnlineStore", "feast.infra.online_stores.contrib.elasticsearch.ElasticSearchOnlineStore": "feast.infra.online_stores.elasticsearch_online_store.elasticsearch.ElasticSearchOnlineStore", "feast.infra.online_stores.contrib.singlestore_online_store.singlestore.SingleStoreOnlineStore": "feast.infra.online_stores.singlestore_online_store.singlestore.SingleStoreOnlineStore", "feast.infra.online_stores.contrib.qdrant.QdrantOnlineStore": "feast.infra.online_stores.qdrant_online_store.qdrant.QdrantOnlineStore", @@ -77,13 +76,13 @@ "cassandra": "feast.infra.online_stores.cassandra_online_store.cassandra_online_store.CassandraOnlineStore", "mysql": "feast.infra.online_stores.mysql_online_store.mysql.MySQLOnlineStore", "hazelcast": "feast.infra.online_stores.hazelcast_online_store.hazelcast_online_store.HazelcastOnlineStore", - "ikv": "feast.infra.online_stores.ikv_online_store.ikv.IKVOnlineStore", "elasticsearch": "feast.infra.online_stores.elasticsearch_online_store.elasticsearch.ElasticSearchOnlineStore", "remote": "feast.infra.online_stores.remote.RemoteOnlineStore", "singlestore": "feast.infra.online_stores.singlestore_online_store.singlestore.SingleStoreOnlineStore", "qdrant": "feast.infra.online_stores.qdrant_online_store.qdrant.QdrantOnlineStore", "couchbase.online": "feast.infra.online_stores.couchbase_online_store.couchbase.CouchbaseOnlineStore", "milvus": "feast.infra.online_stores.milvus_online_store.milvus.MilvusOnlineStore", + "mongodb": "feast.infra.online_stores.mongodb_online_store.MongoDBOnlineStore", "hybrid": "feast.infra.online_stores.hybrid_online_store.hybrid_online_store.HybridOnlineStore", **LEGACY_ONLINE_STORE_CLASS_FOR_TYPE, } diff --git a/sdk/python/feast/repo_operations.py b/sdk/python/feast/repo_operations.py index fa5d297752a..1a6de75b2e4 100644 --- a/sdk/python/feast/repo_operations.py +++ b/sdk/python/feast/repo_operations.py @@ -164,8 +164,9 @@ def parse_repo(repo_root: Path) -> RepoContents: # Handle batch sources defined with feature views. batch_source = obj.batch_source - assert batch_source - if not any((batch_source is ds) for ds in res.data_sources): + if batch_source is not None and not any( + (batch_source is ds) for ds in res.data_sources + ): res.data_sources.append(batch_source) # Handle stream sources defined with feature views. @@ -180,10 +181,10 @@ def parse_repo(repo_root: Path) -> RepoContents: # Handle batch sources defined with feature views. batch_source = obj.batch_source - if not any((batch_source is ds) for ds in res.data_sources): + if batch_source is not None and not any( + (batch_source is ds) for ds in res.data_sources + ): res.data_sources.append(batch_source) - - # Handle stream sources defined with feature views. assert obj.stream_source stream_source = obj.stream_source if not any((stream_source is ds) for ds in res.data_sources): @@ -195,7 +196,9 @@ def parse_repo(repo_root: Path) -> RepoContents: # Handle batch sources defined with feature views. batch_source = obj.batch_source - if not any((batch_source is ds) for ds in res.data_sources): + if batch_source is not None and not any( + (batch_source is ds) for ds in res.data_sources + ): res.data_sources.append(batch_source) elif isinstance(obj, Entity) and not any( (obj is entity) for entity in res.entities @@ -234,7 +237,9 @@ def plan( # TODO: When we support multiple projects in a single repo, we should filter repo contents by project if not skip_source_validation: provider = store._get_provider() - data_sources = [t.batch_source for t in repo.feature_views] + data_sources = [ + t.batch_source for t in repo.feature_views if t.batch_source is not None + ] # Make sure the data source used by this feature view is supported by Feast for data_source in data_sources: provider.validate_data_source(store.config, data_source) @@ -345,7 +350,9 @@ def apply_total_with_repo_instance( ): if not skip_source_validation: provider = store._get_provider() - data_sources = [t.batch_source for t in repo.feature_views] + data_sources = [ + t.batch_source for t in repo.feature_views if t.batch_source is not None + ] # Make sure the data source used by this feature view is supported by Feast for data_source in data_sources: provider.validate_data_source(store.config, data_source) diff --git a/sdk/python/feast/stream_feature_view.py b/sdk/python/feast/stream_feature_view.py index d30db29b299..b8f410f9a48 100644 --- a/sdk/python/feast/stream_feature_view.py +++ b/sdk/python/feast/stream_feature_view.py @@ -16,6 +16,7 @@ from feast.entity import Entity from feast.feature_view import FeatureView from feast.field import Field +from feast.proto_utils import mode_to_string, serialize_data_source from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( UserDefinedFunction as UserDefinedFunctionProto, @@ -120,6 +121,7 @@ def __init__( stream_engine: Optional[Dict[str, Any]] = None, enable_tiling: bool = False, tiling_hop_size: Optional[timedelta] = None, + enable_validation: bool = False, ): if not flags_helper.is_test(): warnings.warn( @@ -183,6 +185,7 @@ def __init__( source=source, # type: ignore[arg-type] mode=mode, sink_source=sink_source, + enable_validation=enable_validation, ) def get_feature_transformation(self) -> Optional[Transformation]: @@ -233,15 +236,8 @@ def to_proto(self): meta = self.to_proto_meta() ttl_duration = self.get_ttl_duration() - batch_source_proto = None - if self.batch_source: - batch_source_proto = self.batch_source.to_proto() - batch_source_proto.data_source_class_type = f"{self.batch_source.__class__.__module__}.{self.batch_source.__class__.__name__}" - - stream_source_proto = None - if self.stream_source: - stream_source_proto = self.stream_source.to_proto() - stream_source_proto.data_source_class_type = f"{self.stream_source.__class__.__module__}.{self.stream_source.__class__.__name__}" + batch_source_proto = serialize_data_source(self.batch_source) + stream_source_proto = serialize_data_source(self.stream_source) udf_proto, feature_transformation = None, None if self.udf: @@ -260,10 +256,6 @@ def to_proto(self): user_defined_function=udf_proto_v2, ) - mode = ( - self.mode.value if isinstance(self.mode, TransformationMode) else self.mode - ) - # Serialize tiling configuration tiling_hop_size_duration = None if self.tiling_hop_size is not None: @@ -282,13 +274,14 @@ def to_proto(self): owner=self.owner, ttl=ttl_duration, online=self.online, - batch_source=batch_source_proto or None, - stream_source=stream_source_proto or None, + batch_source=batch_source_proto, + stream_source=stream_source_proto, timestamp_field=self.timestamp_field, aggregations=[agg.to_proto() for agg in self.aggregations], - mode=mode, + mode=mode_to_string(self.mode), enable_tiling=self.enable_tiling, tiling_hop_size=tiling_hop_size_duration, + enable_validation=self.enable_validation, ) return StreamFeatureViewProto(spec=spec, meta=meta) @@ -350,6 +343,7 @@ def from_proto(cls, sfv_proto): and sfv_proto.spec.tiling_hop_size.ToNanoseconds() != 0 else None ), + enable_validation=sfv_proto.spec.enable_validation, ) if batch_source: @@ -403,6 +397,7 @@ def __copy__(self): udf=self.udf, udf_string=self.udf_string, feature_transformation=self.feature_transformation, + enable_validation=self.enable_validation, ) fv.entities = self.entities fv.features = copy.copy(self.features) @@ -428,6 +423,7 @@ def stream_feature_view( aggregations: Optional[List[Aggregation]] = None, mode: Optional[str] = "spark", timestamp_field: Optional[str] = "", + enable_validation: bool = False, ): """ Creates an StreamFeatureView object with the given user function as udf. @@ -459,6 +455,7 @@ def decorator(user_function): aggregations=aggregations, mode=mode, timestamp_field=timestamp_field, + enable_validation=enable_validation, ) functools.update_wrapper(wrapper=stream_feature_view_obj, wrapped=user_function) return stream_feature_view_obj diff --git a/sdk/python/feast/templates/local/feature_repo/feature_definitions.py b/sdk/python/feast/templates/local/feature_repo/feature_definitions.py index e2fd0a891cf..6fe94a5fa59 100644 --- a/sdk/python/feast/templates/local/feature_repo/feature_definitions.py +++ b/sdk/python/feast/templates/local/feature_repo/feature_definitions.py @@ -17,7 +17,7 @@ from feast.feature_logging import LoggingConfig from feast.infra.offline_stores.file_source import FileLoggingDestination from feast.on_demand_feature_view import on_demand_feature_view -from feast.types import Float32, Float64, Int64 +from feast.types import Float32, Float64, Int64, Json, Map, String, Struct # Define a project for the feature repo project = Project(name="%PROJECT_NAME%", description="A project for driver statistics") @@ -52,12 +52,26 @@ Field(name="conv_rate", dtype=Float32), Field(name="acc_rate", dtype=Float32), Field(name="avg_daily_trips", dtype=Int64, description="Average daily trips"), + Field( + name="driver_metadata", + dtype=Map, + description="Driver metadata as key-value pairs", + ), + Field( + name="driver_config", dtype=Json, description="Driver configuration as JSON" + ), + Field( + name="driver_profile", + dtype=Struct({"name": String, "age": String}), + description="Driver profile as a typed struct", + ), ], online=True, source=driver_stats_source, # Tags are user defined key/value pairs that are attached to each # feature view tags={"team": "driver_performance"}, + enable_validation=True, ) # Define a request data source which encodes features / information only @@ -119,6 +133,9 @@ def transformed_conv_rate(inputs: pd.DataFrame) -> pd.DataFrame: Field(name="conv_rate", dtype=Float32), Field(name="acc_rate", dtype=Float32), Field(name="avg_daily_trips", dtype=Int64), + Field(name="driver_metadata", dtype=Map), + Field(name="driver_config", dtype=Json), + Field(name="driver_profile", dtype=Struct({"name": String, "age": String})), ], online=True, source=driver_stats_push_source, # Changed from above diff --git a/sdk/python/feast/templates/local/feature_repo/test_workflow.py b/sdk/python/feast/templates/local/feature_repo/test_workflow.py index eebeb113115..82175972321 100644 --- a/sdk/python/feast/templates/local/feature_repo/test_workflow.py +++ b/sdk/python/feast/templates/local/feature_repo/test_workflow.py @@ -1,3 +1,4 @@ +import json import subprocess from datetime import datetime @@ -45,6 +46,11 @@ def run_demo(): "conv_rate": [1.0], "acc_rate": [1.0], "avg_daily_trips": [1000], + "driver_metadata": [{"vehicle_type": "truck", "rating": "5.0"}], + "driver_config": [ + json.dumps({"max_distance_km": 500, "preferred_zones": ["north"]}) + ], + "driver_profile": [{"name": "driver_1001_updated", "age": "30"}], } ) print(event_df) @@ -115,6 +121,9 @@ def fetch_online_features(store, source: str = ""): else: features_to_fetch = [ "driver_hourly_stats:acc_rate", + "driver_hourly_stats:driver_metadata", + "driver_hourly_stats:driver_config", + "driver_hourly_stats:driver_profile", "transformed_conv_rate:conv_rate_plus_val1", "transformed_conv_rate:conv_rate_plus_val2", ] diff --git a/sdk/python/feast/type_map.py b/sdk/python/feast/type_map.py index b7b97b0a29a..da3093f2a19 100644 --- a/sdk/python/feast/type_map.py +++ b/sdk/python/feast/type_map.py @@ -83,6 +83,30 @@ def feast_value_type_to_python_type(field_value_proto: ProtoValue) -> Any: return None val = getattr(field_value_proto, val_attr) + # Handle JSON types — stored as strings but returned as parsed Python objects + if val_attr == "json_val": + try: + return json.loads(val) + except (json.JSONDecodeError, TypeError): + return val + elif val_attr == "json_list_val": + result = [] + for v in val.val: + if isinstance(v, str): + try: + result.append(json.loads(v)) + except (json.JSONDecodeError, TypeError): + result.append(v) + else: + result.append(v) + return result + + # Handle Struct types — stored using Map proto, returned as dicts + if val_attr == "struct_val": + return _handle_map_value(val) + elif val_attr == "struct_list_val": + return _handle_map_list_value(val) + # Handle Map and MapList types FIRST (before generic list processing) if val_attr == "map_val": return _handle_map_value(val) @@ -162,7 +186,7 @@ def feast_value_type_to_pandas_type(value_type: ValueType) -> Any: ValueType.UNIX_TIMESTAMP: "datetime64[ns]", } if ( - value_type.name == "MAP" + value_type.name in ("MAP", "JSON", "STRUCT") or value_type.name.endswith("_LIST") or value_type.name.endswith("_SET") ): @@ -375,6 +399,12 @@ def _convert_value_type_str_to_value_type(type_str: str) -> ValueType: "FLOAT_LIST": ValueType.FLOAT_LIST, "BOOL_LIST": ValueType.BOOL_LIST, "UNIX_TIMESTAMP_LIST": ValueType.UNIX_TIMESTAMP_LIST, + "MAP": ValueType.MAP, + "MAP_LIST": ValueType.MAP_LIST, + "JSON": ValueType.JSON, + "JSON_LIST": ValueType.JSON_LIST, + "STRUCT": ValueType.STRUCT, + "STRUCT_LIST": ValueType.STRUCT_LIST, } return type_map.get(type_str, ValueType.STRING) @@ -483,6 +513,105 @@ def _python_datetime_to_int_timestamp( return int_timestamps +def _convert_timestamp_collection_to_proto( + values: List[Any], + proto_field: str, + proto_type: type, +) -> List[ProtoValue]: + """Convert timestamp collection values (list or set) to proto. + + Args: + values: List of timestamp collections to convert. + proto_field: The proto field name (e.g., 'unix_timestamp_list_val'). + proto_type: The proto type class (e.g., Int64List). + + Returns: + List of ProtoValue with converted timestamps. + """ + result = [] + for value in values: + if value is not None: + result.append( + ProtoValue( + **{ + proto_field: proto_type( + val=_python_datetime_to_int_timestamp(value) + ) + } # type: ignore + ) + ) + else: + result.append(ProtoValue()) + return result + + +def _convert_bool_collection_to_proto( + values: List[Any], + proto_field: str, + proto_type: type, +) -> List[ProtoValue]: + """Convert boolean collection values (list or set) to proto. + + ProtoValue does not support direct conversion of np.bool_, so we need to + explicitly convert each element to Python bool. + + Args: + values: List of boolean collections to convert. + proto_field: The proto field name (e.g., 'bool_list_val'). + proto_type: The proto type class (e.g., BoolList). + + Returns: + List of ProtoValue with converted booleans. + """ + result = [] + for value in values: + if value is not None: + result.append( + ProtoValue(**{proto_field: proto_type(val=[bool(e) for e in value])}) # type: ignore + ) + else: + result.append(ProtoValue()) + return result + + +def _validate_collection_item_types( + sample: Any, + valid_types: List[Type], + feast_value_type: ValueType, +) -> None: + """Validate that collection items match expected types. + + Args: + sample: A sample collection value to check. + valid_types: List of valid Python types for items. + feast_value_type: The Feast value type for error messages. + + Raises: + TypeError: If any item in sample is not a valid type. + """ + if sample is None: + return + if all(type(item) in valid_types for item in sample): + return + + # to_numpy() upcasts INT32/INT64 with NULL to Float64 automatically + int_collection_types = [ + ValueType.INT32_LIST, + ValueType.INT64_LIST, + ValueType.INT32_SET, + ValueType.INT64_SET, + ] + for item in sample: + if type(item) not in valid_types: + if feast_value_type in int_collection_types: + # Check if the float values are due to NULL upcast + if not any(np.isnan(i) for i in sample if isinstance(i, float)): + logger.error( + f"{feast_value_type.name} has NULL values. to_numpy() upcasts to Float64 automatically." + ) + raise _type_err(item, valid_types[0]) + + def _python_set_to_proto_values( feast_value_type: ValueType, values: List[Any] ) -> List[ProtoValue]: @@ -505,25 +634,20 @@ def _python_set_to_proto_values( ) # Convert set to list for proto (proto doesn't have native set type) - # We store unique values in a repeated field - def convert_set_to_list(value): + def convert_set_to_list(value: Any) -> Any: if value is None: return None - # If it's already a set, convert to list if isinstance(value, set): return list(value) - # If it's a list/tuple/ndarray, remove duplicates - elif isinstance(value, (list, tuple, np.ndarray)): + if isinstance(value, (list, tuple, np.ndarray)): return list(set(value)) - else: - return value + return value converted_values = [convert_set_to_list(v) for v in values] sample = next(filter(_non_empty_value, converted_values), None) # Bytes to array type conversion if isinstance(sample, (bytes, bytearray)): - # Bytes of an array containing elements of bytes not supported if feast_value_type == ValueType.BYTES_SET: raise _type_err(sample, ValueType.BYTES_SET) @@ -541,72 +665,164 @@ def convert_set_to_list(value): for list_item in json_values ] return [ - ( - ProtoValue(**{set_field_name: set_proto_type(val=v)}) # type: ignore - if v is not None - else ProtoValue() - ) + ProtoValue(**{set_field_name: set_proto_type(val=v)}) # type: ignore[arg-type] + if v is not None + else ProtoValue() for v in json_values ] raise _type_err(sample, set_valid_types[0]) - if sample is not None and not all(type(item) in set_valid_types for item in sample): - for item in sample: - if type(item) not in set_valid_types: - if feast_value_type in [ - ValueType.INT32_SET, - ValueType.INT64_SET, - ]: - if not any(np.isnan(item) for item in sample): - logger.error("Set of Int32 or Int64 type has NULL values.") - raise _type_err(item, set_valid_types[0]) + # Validate item types using shared helper + _validate_collection_item_types(sample, set_valid_types, feast_value_type) + # Handle special types using shared helpers if feast_value_type == ValueType.UNIX_TIMESTAMP_SET: - result = [] - for value in converted_values: - if value is not None: - result.append( - ProtoValue( - unix_timestamp_set_val=Int64Set( - val=_python_datetime_to_int_timestamp(value) # type: ignore - ) - ) - ) - else: - result.append(ProtoValue()) - return result + return _convert_timestamp_collection_to_proto( + converted_values, "unix_timestamp_set_val", Int64Set + ) if feast_value_type == ValueType.BOOL_SET: - result = [] - for value in converted_values: - if value is not None: - result.append( - ProtoValue( - **{ - set_field_name: set_proto_type( - val=[bool(e) for e in value] # type: ignore - ) - } - ) - ) - else: - result.append(ProtoValue()) - return result - return [ - ( - ProtoValue(**{set_field_name: set_proto_type(val=value)}) # type: ignore - if value is not None - else ProtoValue() + return _convert_bool_collection_to_proto( + converted_values, set_field_name, set_proto_type ) + + # Generic set conversion + return [ + ProtoValue(**{set_field_name: set_proto_type(val=value)}) # type: ignore[arg-type] + if value is not None + else ProtoValue() for value in converted_values ] +def _convert_list_values_to_proto( + feast_value_type: ValueType, + values: List[Any], + sample: Any, +) -> List[ProtoValue]: + """Convert list-type values to proto. + + Args: + feast_value_type: The target list value type. + values: List of list values to convert. + sample: First non-empty value for type checking. + + Returns: + List of ProtoValue. + """ + if feast_value_type not in PYTHON_LIST_VALUE_TYPE_TO_PROTO_VALUE: + raise Exception(f"Unsupported list type: {feast_value_type}") + + proto_type, field_name, valid_types = PYTHON_LIST_VALUE_TYPE_TO_PROTO_VALUE[ + feast_value_type + ] + + # Bytes to array type conversion + if isinstance(sample, (bytes, bytearray)): + if feast_value_type == ValueType.BYTES_LIST: + raise _type_err(sample, ValueType.BYTES_LIST) + + json_sample = json.loads(sample) + if isinstance(json_sample, list): + json_values = [json.loads(value) for value in values] + if feast_value_type == ValueType.BOOL_LIST: + json_values = [ + [bool(item) for item in list_item] for list_item in json_values + ] + return [ + ProtoValue(**{field_name: proto_type(val=v)}) # type: ignore[arg-type] + for v in json_values + ] + raise _type_err(sample, valid_types[0]) + + # Validate item types using shared helper + _validate_collection_item_types(sample, valid_types, feast_value_type) + + # Handle special types using shared helpers + if feast_value_type == ValueType.UNIX_TIMESTAMP_LIST: + return _convert_timestamp_collection_to_proto( + values, "unix_timestamp_list_val", Int64List + ) + if feast_value_type == ValueType.BOOL_LIST: + return _convert_bool_collection_to_proto(values, field_name, proto_type) + + # Generic list conversion + return [ + ProtoValue(**{field_name: proto_type(val=value)}) # type: ignore[arg-type] + if value is not None + else ProtoValue() + for value in values + ] + + +def _convert_scalar_values_to_proto( + feast_value_type: ValueType, + values: List[Any], + sample: Any, +) -> List[ProtoValue]: + """Convert scalar-type values to proto. + + Args: + feast_value_type: The target scalar value type. + values: List of scalar values to convert. + sample: First non-empty value for type checking. + + Returns: + List of ProtoValue. + """ + if sample is None: + # All input values are None + return [ProtoValue()] * len(values) + + if feast_value_type == ValueType.UNIX_TIMESTAMP: + int_timestamps = _python_datetime_to_int_timestamp(values) + return [ProtoValue(unix_timestamp_val=ts) for ts in int_timestamps] # type: ignore + + field_name, func, valid_scalar_types = PYTHON_SCALAR_VALUE_TYPE_TO_PROTO_VALUE[ + feast_value_type + ] + + # Validate scalar types + if valid_scalar_types: + if (sample == 0 or sample == 0.0) and feast_value_type != ValueType.BOOL: + # Numpy converts 0 to int, but column type may be float + allowed_types = {np.int64, int, np.float64, float, decimal.Decimal} + assert type(sample) in allowed_types, ( + f"Type `{type(sample)}` not in {allowed_types}" + ) + else: + assert type(sample) in valid_scalar_types, ( + f"Type `{type(sample)}` not in {valid_scalar_types}" + ) + + # Handle BOOL specially due to np.bool_ conversion requirement + if feast_value_type == ValueType.BOOL: + return [ + ProtoValue( + **{field_name: func(bool(value) if type(value) is np.bool_ else value)} + ) # type: ignore + if not pd.isnull(value) + else ProtoValue() + for value in values + ] + + # Generic scalar conversion + out = [] + for value in values: + if isinstance(value, ProtoValue): + out.append(value) + elif not pd.isnull(value): + out.append(ProtoValue(**{field_name: func(value)})) + else: + out.append(ProtoValue()) + return out + + def _python_value_to_proto_value( feast_value_type: ValueType, values: List[Any] ) -> List[ProtoValue]: """ Converts a Python (native, pandas) value to a Feast Proto Value based - on a provided value type + on a provided value type. Args: feast_value_type: The target value type @@ -615,167 +831,131 @@ def _python_value_to_proto_value( Returns: List of Feast Value Proto """ - # Handle Map and MapList types first + # Handle Map types if feast_value_type == ValueType.MAP: - return [ - ProtoValue(map_val=_python_dict_to_map_proto(value)) - if value is not None - else ProtoValue() - for value in values - ] + result = [] + for value in values: + if value is None: + result.append(ProtoValue()) + else: + if isinstance(value, str): + value = json.loads(value) + if not isinstance(value, dict): + raise TypeError( + f"Expected dict for MAP type, got {type(value).__name__}: {value!r}" + ) + result.append(ProtoValue(map_val=_python_dict_to_map_proto(value))) + return result if feast_value_type == ValueType.MAP_LIST: - return [ - ProtoValue(map_list_val=_python_list_to_map_list_proto(value)) - if value is not None - else ProtoValue() - for value in values - ] - - # ToDo: make a better sample for type checks (more than one element) - sample = next(filter(_non_empty_value, values), None) # first not empty value + result = [] + for value in values: + if value is None: + result.append(ProtoValue()) + else: + if isinstance(value, str): + value = json.loads(value) + if not isinstance(value, list): + raise TypeError( + f"Expected list for MAP_LIST type, got {type(value).__name__}: {value!r}" + ) + result.append( + ProtoValue(map_list_val=_python_list_to_map_list_proto(value)) + ) + return result - # Detect list type and handle separately - if "list" in feast_value_type.name.lower(): - # Feature can be list but None is still valid - if feast_value_type in PYTHON_LIST_VALUE_TYPE_TO_PROTO_VALUE: - proto_type, field_name, valid_types = PYTHON_LIST_VALUE_TYPE_TO_PROTO_VALUE[ - feast_value_type - ] + # Handle JSON type — serialize Python objects as JSON strings + if feast_value_type == ValueType.JSON: + result = [] + for value in values: + if value is None: + result.append(ProtoValue()) + else: + if isinstance(value, str): + try: + json.loads(value) + except (json.JSONDecodeError, TypeError) as e: + raise ValueError( + f"Invalid JSON string for JSON type: {e}" + ) from e + json_str = value + else: + json_str = json.dumps(value) + result.append(ProtoValue(json_val=json_str)) + return result - # Bytes to array type conversion - if isinstance(sample, (bytes, bytearray)): - # Bytes of an array containing elements of bytes not supported - if feast_value_type == ValueType.BYTES_LIST: - raise _type_err(sample, ValueType.BYTES_LIST) - - json_sample = json.loads(sample) - if isinstance(json_sample, list): - json_values = [json.loads(value) for value in values] - if feast_value_type == ValueType.BOOL_LIST: - json_values = [ - [bool(item) for item in list_item] - for list_item in json_values - ] - return [ - ProtoValue(**{field_name: proto_type(val=v)}) # type: ignore - for v in json_values - ] - raise _type_err(sample, valid_types[0]) - - if sample is not None and not all( - type(item) in valid_types for item in sample - ): - # to_numpy() in utils._convert_arrow_to_proto() upcasts values of type Array of INT32 or INT64 with NULL values to Float64 automatically. - for item in sample: - if type(item) not in valid_types: - if feast_value_type in [ - ValueType.INT32_LIST, - ValueType.INT64_LIST, - ]: - if not any(np.isnan(item) for item in sample): - logger.error( - "Array of Int32 or Int64 type has NULL values. to_numpy() upcasts to Float64 automatically." - ) - raise _type_err(item, valid_types[0]) - - if feast_value_type == ValueType.UNIX_TIMESTAMP_LIST: - result = [] - for value in values: - if value is not None: - # ProtoValue does actually accept `np.int_` but the typing complains. - result.append( - ProtoValue( - unix_timestamp_list_val=Int64List( - val=_python_datetime_to_int_timestamp(value) # type: ignore - ) - ) - ) - else: - result.append(ProtoValue()) - return result - if feast_value_type == ValueType.BOOL_LIST: - # ProtoValue does not support conversion of np.bool_ so we need to convert it to support np.bool_. - result = [] - for value in values: - if value is not None: - result.append( - ProtoValue( - **{field_name: proto_type(val=[bool(e) for e in value])} # type: ignore - ) - ) + if feast_value_type == ValueType.JSON_LIST: + result = [] + for value in values: + if value is None: + result.append(ProtoValue()) + else: + json_strings = [] + for v in value: + if isinstance(v, str): + try: + json.loads(v) + except (json.JSONDecodeError, TypeError) as e: + raise ValueError( + f"Invalid JSON string in JSON_LIST: {e}" + ) from e + json_strings.append(v) else: - result.append(ProtoValue()) - return result - return [ - ( - ProtoValue(**{field_name: proto_type(val=value)}) # type: ignore - if value is not None - else ProtoValue() - ) - for value in values - ] - - # Detect set type and handle separately - if "set" in feast_value_type.name.lower(): - return _python_set_to_proto_values(feast_value_type, values) + json_strings.append(json.dumps(v)) + result.append(ProtoValue(json_list_val=StringList(val=json_strings))) + return result - # Handle scalar types below - else: - if sample is None: - # all input values are None - return [ProtoValue()] * len(values) - - if feast_value_type == ValueType.UNIX_TIMESTAMP: - int_timestamps = _python_datetime_to_int_timestamp(values) - # ProtoValue does actually accept `np.int_` but the typing complains. - return [ProtoValue(unix_timestamp_val=ts) for ts in int_timestamps] # type: ignore - - ( - field_name, - func, - valid_scalar_types, - ) = PYTHON_SCALAR_VALUE_TYPE_TO_PROTO_VALUE[feast_value_type] - if valid_scalar_types: - if (sample == 0 or sample == 0.0) and feast_value_type != ValueType.BOOL: - # Numpy convert 0 to int. However, in the feature view definition, the type of column may be a float. - # So, if value is 0, type validation must pass if scalar_types are either int or float. - allowed_types = {np.int64, int, np.float64, float, decimal.Decimal} - assert type(sample) in allowed_types, ( - f"Type `{type(sample)}` not in {allowed_types}" - ) + # Handle Struct type — reuses Map proto for storage + if feast_value_type == ValueType.STRUCT: + result = [] + for value in values: + if value is None: + result.append(ProtoValue()) else: - assert type(sample) in valid_scalar_types, ( - f"Type `{type(sample)}` not in {valid_scalar_types}" - ) - if feast_value_type == ValueType.BOOL: - # ProtoValue does not support conversion of np.bool_ so we need to convert it to support np.bool_. - return [ - ( - ProtoValue( - **{ - field_name: func( - bool(value) if type(value) is np.bool_ else value # type: ignore - ) - } + if isinstance(value, str): + value = json.loads(value) + if not isinstance(value, dict): + value = ( + dict(value) + if hasattr(value, "items") + else {"_value": str(value)} ) - if not pd.isnull(value) - else ProtoValue() + result.append(ProtoValue(struct_val=_python_dict_to_map_proto(value))) + return result + + if feast_value_type == ValueType.STRUCT_LIST: + result = [] + for value in values: + if value is None: + result.append(ProtoValue()) + else: + if isinstance(value, str): + value = json.loads(value) + result.append( + ProtoValue(struct_list_val=_python_list_to_map_list_proto(value)) ) - for value in values - ] - if feast_value_type in PYTHON_SCALAR_VALUE_TYPE_TO_PROTO_VALUE: - out = [] - for value in values: - if isinstance(value, ProtoValue): - out.append(value) - elif not pd.isnull(value): - out.append(ProtoValue(**{field_name: func(value)})) - else: - out.append(ProtoValue()) - return out + return result - raise Exception(f"Unsupported data type: ${str(type(values[0]))}") + # Get sample for type checking + sample = next(filter(_non_empty_value, values), None) + + # Dispatch to appropriate converter based on type category + type_name_lower = feast_value_type.name.lower() + + if "list" in type_name_lower: + return _convert_list_values_to_proto(feast_value_type, values, sample) + + if "set" in type_name_lower: + return _python_set_to_proto_values(feast_value_type, values) + + # Scalar types + if ( + feast_value_type in PYTHON_SCALAR_VALUE_TYPE_TO_PROTO_VALUE + or feast_value_type == ValueType.UNIX_TIMESTAMP + ): + return _convert_scalar_values_to_proto(feast_value_type, values, sample) + + raise Exception(f"Unsupported data type: {feast_value_type}") def _python_dict_to_map_proto(python_dict: Dict[str, Any]) -> Map: @@ -866,6 +1046,10 @@ def python_values_to_proto_values( "unix_timestamp_list_val": ValueType.UNIX_TIMESTAMP_LIST, "map_val": ValueType.MAP, "map_list_val": ValueType.MAP_LIST, + "json_val": ValueType.JSON, + "json_list_val": ValueType.JSON_LIST, + "struct_val": ValueType.STRUCT, + "struct_list_val": ValueType.STRUCT_LIST, "int32_set_val": ValueType.INT32_SET, "int64_set_val": ValueType.INT64_SET, "double_set_val": ValueType.DOUBLE_SET, @@ -905,6 +1089,12 @@ def pa_to_feast_value_type(pa_type_as_str: str) -> ValueType: if pa_type_as_str.startswith("timestamp"): value_type = ValueType.UNIX_TIMESTAMP + elif pa_type_as_str.startswith("map<"): + value_type = ValueType.MAP + elif pa_type_as_str == "large_string": + value_type = ValueType.JSON + elif pa_type_as_str.startswith("struct<") or pa_type_as_str.startswith("struct{"): + value_type = ValueType.STRUCT else: type_map = { "int32": ValueType.INT32, @@ -950,6 +1140,9 @@ def bq_to_feast_value_type(bq_type_as_str: str) -> ValueType: "BOOL": ValueType.BOOL, "BOOLEAN": ValueType.BOOL, # legacy sql data type "NULL": ValueType.NULL, + "JSON": ValueType.JSON, + "STRUCT": ValueType.STRUCT, + "RECORD": ValueType.STRUCT, } value_type = type_map.get(bq_type_as_str, ValueType.STRING) @@ -974,6 +1167,7 @@ def mssql_to_feast_value_type(mssql_type_as_str: str) -> ValueType: "nchar": ValueType.STRING, "nvarchar": ValueType.STRING, "nvarchar(max)": ValueType.STRING, + "json": ValueType.JSON, "real": ValueType.FLOAT, "smallint": ValueType.INT32, "tinyint": ValueType.INT32, @@ -1003,6 +1197,13 @@ def pa_to_mssql_type(pa_type: "pyarrow.DataType") -> str: if pa_type_as_str.startswith("decimal"): return pa_type_as_str + if pa_type_as_str.startswith("map<"): + return "nvarchar(max)" + if pa_type_as_str == "large_string": + return "nvarchar(max)" + if pa_type_as_str.startswith("struct<") or pa_type_as_str.startswith("struct{"): + return "nvarchar(max)" + # We have to take into account how arrow types map to parquet types as well. # For example, null type maps to int32 in parquet, so we have to use int4 in Redshift. # Other mappings have also been adjusted accordingly. @@ -1043,7 +1244,8 @@ def redshift_to_feast_value_type(redshift_type_as_str: str) -> ValueType: "varchar": ValueType.STRING, "timestamp": ValueType.UNIX_TIMESTAMP, "timestamptz": ValueType.UNIX_TIMESTAMP, - "super": ValueType.BYTES, + "super": ValueType.MAP, + "json": ValueType.JSON, # skip date, geometry, hllsketch, time, timetz } @@ -1064,6 +1266,10 @@ def snowflake_type_to_feast_value_type(snowflake_type: str) -> ValueType: "TIMESTAMP_TZ": ValueType.UNIX_TIMESTAMP, "TIMESTAMP_LTZ": ValueType.UNIX_TIMESTAMP, "TIMESTAMP_NTZ": ValueType.UNIX_TIMESTAMP, + "VARIANT": ValueType.MAP, + "OBJECT": ValueType.MAP, + "ARRAY": ValueType.STRING_LIST, + "JSON": ValueType.JSON, } return type_map[snowflake_type] @@ -1110,6 +1316,15 @@ def pa_to_redshift_value_type(pa_type: "pyarrow.DataType") -> str: if pa_type_as_str.startswith("list"): return "super" + if pa_type_as_str.startswith("map<"): + return "super" + + if pa_type_as_str == "large_string": + return "super" + + if pa_type_as_str.startswith("struct<"): + return "super" + # We have to take into account how arrow types map to parquet types as well. # For example, null type maps to int32 in parquet, so we have to use int4 in Redshift. # Other mappings have also been adjusted accordingly. @@ -1146,8 +1361,7 @@ def _non_empty_value(value: Any) -> bool: def spark_to_feast_value_type(spark_type_as_str: str) -> ValueType: - # TODO not all spark types are convertible - # Current non-convertible types: interval, map, struct, structfield, binary + # Current non-convertible types: interval, struct, structfield, binary type_map: Dict[str, ValueType] = { "null": ValueType.UNKNOWN, "byte": ValueType.BYTES, @@ -1173,14 +1387,24 @@ def spark_to_feast_value_type(spark_type_as_str: str) -> ValueType: "array": ValueType.UNIX_TIMESTAMP_LIST, "array": ValueType.UNIX_TIMESTAMP_LIST, } - if spark_type_as_str.startswith("decimal"): - spark_type_as_str = "decimal" - if spark_type_as_str.startswith("array Iterator[np.dtype]: @@ -1207,6 +1431,12 @@ def arrow_to_pg_type(t_str: str) -> str: try: if t_str.startswith("timestamp") or t_str.startswith("datetime"): return "timestamptz" if "tz=" in t_str else "timestamp" + if t_str.startswith("map<"): + return "jsonb" + if t_str == "large_string": + return "jsonb" + if t_str.startswith("struct<") or t_str.startswith("struct{"): + return "jsonb" return { "null": "null", "bool": "boolean", @@ -1267,6 +1497,10 @@ def pg_type_to_feast_value_type(type_str: str) -> ValueType: "numeric": ValueType.DOUBLE, "uuid": ValueType.STRING, "uuid[]": ValueType.STRING_LIST, + "json": ValueType.MAP, + "jsonb": ValueType.MAP, + "json[]": ValueType.MAP_LIST, + "jsonb[]": ValueType.MAP_LIST, } value = ( type_map[type_str.lower()] @@ -1300,6 +1534,14 @@ def feast_value_type_to_pa( ValueType.BYTES_LIST: pyarrow.list_(pyarrow.binary()), ValueType.BOOL_LIST: pyarrow.list_(pyarrow.bool_()), ValueType.UNIX_TIMESTAMP_LIST: pyarrow.list_(pyarrow.timestamp(timestamp_unit)), + ValueType.MAP: pyarrow.map_(pyarrow.string(), pyarrow.string()), + ValueType.MAP_LIST: pyarrow.list_( + pyarrow.map_(pyarrow.string(), pyarrow.string()) + ), + ValueType.JSON: pyarrow.large_string(), + ValueType.JSON_LIST: pyarrow.list_(pyarrow.large_string()), + ValueType.STRUCT: pyarrow.struct([]), + ValueType.STRUCT_LIST: pyarrow.list_(pyarrow.struct([])), ValueType.NULL: pyarrow.null(), } return type_map[feast_type] @@ -1380,7 +1622,9 @@ def athena_to_feast_value_type(athena_type_as_str: str) -> ValueType: "varchar": ValueType.STRING, "string": ValueType.STRING, "timestamp": ValueType.UNIX_TIMESTAMP, - # skip date,decimal,array,map,struct + "json": ValueType.JSON, + "struct": ValueType.STRUCT, + "map": ValueType.MAP, } return type_map[athena_type_as_str.lower()] @@ -1398,6 +1642,18 @@ def pa_to_athena_value_type(pa_type: "pyarrow.DataType") -> str: if pa_type_as_str.startswith("python_values_to_proto_values"): return pa_type_as_str + if pa_type_as_str.startswith("list"): + return "array" + + if pa_type_as_str.startswith("map<"): + return "string" + + if pa_type_as_str == "large_string": + return "string" + + if pa_type_as_str.startswith("struct<"): + return "string" + # We have to take into account how arrow types map to parquet types as well. # For example, null type maps to int32 in parquet, so we have to use int4 in Redshift. # Other mappings have also been adjusted accordingly. @@ -1467,6 +1723,8 @@ def convert_scalar_column( return series.astype("string") elif value_type == ValueType.UNIX_TIMESTAMP: return pd.to_datetime(series, unit="s", errors="coerce") + elif value_type in (ValueType.JSON, ValueType.STRUCT, ValueType.MAP): + return series else: return series.astype(target_pandas_type) diff --git a/sdk/python/feast/types.py b/sdk/python/feast/types.py index 922b3cce0ac..d94c356cd1a 100644 --- a/sdk/python/feast/types.py +++ b/sdk/python/feast/types.py @@ -33,6 +33,7 @@ "BOOL": "BOOL", "UNIX_TIMESTAMP": "UNIX_TIMESTAMP", "MAP": "MAP", + "JSON": "JSON", } @@ -85,6 +86,7 @@ class PrimitiveFeastType(Enum): PDF_BYTES = 9 IMAGE_BYTES = 10 MAP = 11 + JSON = 12 def to_value_type(self) -> ValueType: """ @@ -118,6 +120,7 @@ def __hash__(self): Float64 = PrimitiveFeastType.FLOAT64 UnixTimestamp = PrimitiveFeastType.UNIX_TIMESTAMP Map = PrimitiveFeastType.MAP +Json = PrimitiveFeastType.JSON SUPPORTED_BASE_TYPES = [ Invalid, @@ -132,6 +135,7 @@ def __hash__(self): Float64, UnixTimestamp, Map, + Json, ] PRIMITIVE_FEAST_TYPES_TO_STRING = { @@ -147,6 +151,7 @@ def __hash__(self): "FLOAT64": "Float64", "UNIX_TIMESTAMP": "UnixTimestamp", "MAP": "Map", + "JSON": "Json", } @@ -160,8 +165,9 @@ class Array(ComplexFeastType): base_type: Union[PrimitiveFeastType, ComplexFeastType] - def __init__(self, base_type: Union[PrimitiveFeastType, ComplexFeastType]): - if base_type not in SUPPORTED_BASE_TYPES: + def __init__(self, base_type: Union[PrimitiveFeastType, "ComplexFeastType"]): + # Allow Struct as a base type for Array(Struct(...)) + if not isinstance(base_type, Struct) and base_type not in SUPPORTED_BASE_TYPES: raise ValueError( f"Type {type(base_type)} is currently not supported as a base type for Array." ) @@ -169,6 +175,8 @@ def __init__(self, base_type: Union[PrimitiveFeastType, ComplexFeastType]): self.base_type = base_type def to_value_type(self) -> ValueType: + if isinstance(self.base_type, Struct): + return ValueType.STRUCT_LIST assert isinstance(self.base_type, PrimitiveFeastType) value_type_name = PRIMITIVE_FEAST_TYPES_TO_VALUE_TYPES[self.base_type.name] value_type_list_name = value_type_name + "_LIST" @@ -208,6 +216,53 @@ def __str__(self): return f"Set({self.base_type})" +class Struct(ComplexFeastType): + """ + A Struct represents a structured type with named, typed fields. + + Attributes: + fields: A dictionary mapping field names to their FeastTypes. + """ + + fields: Dict[str, Union[PrimitiveFeastType, "ComplexFeastType"]] + + def __init__( + self, fields: Dict[str, Union[PrimitiveFeastType, "ComplexFeastType"]] + ): + if not fields: + raise ValueError("Struct must have at least one field.") + self.fields = fields + + def to_value_type(self) -> ValueType: + return ValueType.STRUCT + + def to_pyarrow_type(self) -> pyarrow.DataType: + pa_fields = [] + for name, feast_type in self.fields.items(): + pa_type = from_feast_to_pyarrow_type(feast_type) + pa_fields.append(pyarrow.field(name, pa_type)) + return pyarrow.struct(pa_fields) + + def __str__(self): + field_strs = ", ".join( + f"{name}: {ftype}" for name, ftype in self.fields.items() + ) + return f"Struct({{{field_strs}}})" + + def __eq__(self, other): + if isinstance(other, Struct): + return self.fields == other.fields + return False + + def __hash__(self): + return hash( + ( + "Struct", + tuple((k, hash(v)) for k, v in sorted(self.fields.items())), + ) + ) + + FeastType = Union[ComplexFeastType, PrimitiveFeastType] VALUE_TYPES_TO_FEAST_TYPES: Dict["ValueType", FeastType] = { @@ -232,6 +287,8 @@ def __str__(self): ValueType.UNIX_TIMESTAMP_LIST: Array(UnixTimestamp), ValueType.MAP: Map, ValueType.MAP_LIST: Array(Map), + ValueType.JSON: Json, + ValueType.JSON_LIST: Array(Json), ValueType.BYTES_SET: Set(Bytes), ValueType.STRING_SET: Set(String), ValueType.INT32_SET: Set(Int32), @@ -251,6 +308,8 @@ def __str__(self): Float64: pyarrow.float64(), # Note: datetime only supports microseconds https://github.com/python/cpython/blob/3.8/Lib/datetime.py#L1559 UnixTimestamp: pyarrow.timestamp("us", tz=_utc_now().tzname()), + Map: pyarrow.map_(pyarrow.string(), pyarrow.string()), + Json: pyarrow.large_string(), } FEAST_VECTOR_TYPES: List[Union[ValueType, PrimitiveFeastType, ComplexFeastType]] = [ @@ -279,12 +338,25 @@ def from_feast_to_pyarrow_type(feast_type: FeastType) -> pyarrow.DataType: assert isinstance(feast_type, (ComplexFeastType, PrimitiveFeastType)), ( f"Expected FeastType, got {type(feast_type)}" ) + if isinstance(feast_type, Struct): + return feast_type.to_pyarrow_type() if isinstance(feast_type, PrimitiveFeastType): if feast_type in FEAST_TYPES_TO_PYARROW_TYPES: return FEAST_TYPES_TO_PYARROW_TYPES[feast_type] - elif isinstance(feast_type, ComplexFeastType): - # Handle the case when feast_type is an instance of ComplexFeastType - pass + elif isinstance(feast_type, Array): + base_type = feast_type.base_type + if isinstance(base_type, Struct): + return pyarrow.list_(base_type.to_pyarrow_type()) + if isinstance(base_type, PrimitiveFeastType): + if base_type == Map: + return pyarrow.list_(pyarrow.map_(pyarrow.string(), pyarrow.string())) + if base_type in FEAST_TYPES_TO_PYARROW_TYPES: + return pyarrow.list_(FEAST_TYPES_TO_PYARROW_TYPES[base_type]) + elif isinstance(feast_type, Set): + base_type = feast_type.base_type + if isinstance(base_type, PrimitiveFeastType): + if base_type in FEAST_TYPES_TO_PYARROW_TYPES: + return pyarrow.list_(FEAST_TYPES_TO_PYARROW_TYPES[base_type]) raise ValueError(f"Could not convert Feast type {feast_type} to PyArrow type.") @@ -304,6 +376,14 @@ def from_value_type( if value_type in VALUE_TYPES_TO_FEAST_TYPES: return VALUE_TYPES_TO_FEAST_TYPES[value_type] + # Struct types cannot be looked up from the dict because they require + # field definitions. Return a default placeholder Struct that can be + # enriched later from Field tags / schema metadata. + if value_type == ValueType.STRUCT: + return Struct({"_value": String}) + if value_type == ValueType.STRUCT_LIST: + return Array(Struct({"_value": String})) + raise ValueError(f"Could not convert value type {value_type} to FeastType.") @@ -322,6 +402,12 @@ def from_feast_type( Raises: ValueError: The conversion could not be performed. """ + # Handle Struct types directly since they are not in the dict + if isinstance(feast_type, Struct): + return ValueType.STRUCT + if isinstance(feast_type, Array) and isinstance(feast_type.base_type, Struct): + return ValueType.STRUCT_LIST + if feast_type in VALUE_TYPES_TO_FEAST_TYPES.values(): return list(VALUE_TYPES_TO_FEAST_TYPES.keys())[ list(VALUE_TYPES_TO_FEAST_TYPES.values()).index(feast_type) diff --git a/sdk/python/feast/utils.py b/sdk/python/feast/utils.py index ebdd56929bb..511186066c6 100644 --- a/sdk/python/feast/utils.py +++ b/sdk/python/feast/utils.py @@ -45,7 +45,7 @@ from feast.protos.feast.types.Value_pb2 import RepeatedValue as RepeatedValueProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.type_map import python_values_to_proto_values -from feast.types import ComplexFeastType, PrimitiveFeastType, from_feast_to_pyarrow_type +from feast.types import ComplexFeastType, PrimitiveFeastType from feast.value_type import ValueType from feast.version import get_version @@ -152,6 +152,11 @@ def _get_column_names( and reverse-mapped created timestamp column that will be passed into the query to the offline store. """ + if feature_view.batch_source is None: + raise ValueError( + f"Feature view '{feature_view.name}' has no batch_source and cannot be used for offline retrieval." + ) + # if we have mapped fields, use the original field names in the call to the offline store timestamp_field = feature_view.batch_source.timestamp_field @@ -265,6 +270,58 @@ def _coerce_datetime(ts): return ts +def _columns_to_proto_values( + table: pyarrow.RecordBatch, + columns: List[Tuple[str, ValueType]], + allow_missing: bool = False, +) -> Dict[str, List[ValueProto]]: + """Convert table columns to proto values dict. + + Args: + table: PyArrow RecordBatch containing the data. + columns: List of (column_name, value_type) tuples to convert. + allow_missing: If True, skip columns not found in table. If False, raise ValueError. + + Returns: + Dict mapping column names to lists of ValueProto. + """ + result: Dict[str, List[ValueProto]] = {} + for column, value_type in columns: + if column in table.column_names: + result[column] = python_values_to_proto_values( + table.column(column).to_numpy(zero_copy_only=False), value_type + ) + elif not allow_missing: + raise ValueError(f"Column {column} not found in table") + return result + + +def _build_entity_keys( + num_rows: int, + join_keys: Dict[str, ValueType], + proto_values: Dict[str, List[ValueProto]], +) -> List[EntityKeyProto]: + """Build entity key protos for each row. + + Args: + num_rows: Number of rows to generate entity keys for. + join_keys: Dict mapping join key names to their value types. + proto_values: Dict mapping column names to lists of ValueProto values. + + Returns: + List of EntityKeyProto, one per row. + """ + return [ + EntityKeyProto( + join_keys=list(join_keys.keys()), + entity_values=[ + proto_values[k][idx] for k in join_keys if k in proto_values + ], + ) + for idx in range(num_rows) + ] + + def _convert_arrow_to_proto( table: Union[pyarrow.Table, pyarrow.RecordBatch], feature_view: Union["FeatureView", "BaseFeatureView", "OnDemandFeatureView"], @@ -290,25 +347,21 @@ def _convert_arrow_fv_to_proto( if isinstance(table, pyarrow.Table): table = table.to_batches()[0] + if feature_view.batch_source is None: + raise ValueError( + f"Feature view '{feature_view.name}' has no batch_source and cannot be converted to proto." + ) + # TODO: This will break if the feature view has aggregations or transformations columns = [ (field.name, field.dtype.to_value_type()) for field in feature_view.features ] + list(join_keys.items()) - proto_values_by_column = { - column: python_values_to_proto_values( - table.column(column).to_numpy(zero_copy_only=False), value_type - ) - for column, value_type in columns - } + proto_values_by_column = _columns_to_proto_values( + table, columns, allow_missing=False + ) - entity_keys = [ - EntityKeyProto( - join_keys=join_keys, - entity_values=[proto_values_by_column[k][idx] for k in join_keys], - ) - for idx in range(table.num_rows) - ] + entity_keys = _build_entity_keys(table.num_rows, join_keys, proto_values_by_column) # Serialize the features per row feature_dict = { @@ -356,62 +409,36 @@ def _convert_arrow_odfv_to_proto( (field.name, field.dtype.to_value_type()) for field in feature_view.features ] + list(join_keys.items()) - proto_values_by_column = { - column: python_values_to_proto_values( - table.column(column).to_numpy(zero_copy_only=False), value_type - ) - for column, value_type in columns - if column in table.column_names - } + # Convert columns that exist in the table + proto_values_by_column = _columns_to_proto_values( + table, columns, allow_missing=True + ) - # Ensure join keys are included in proto_values_by_column, but check if they exist first + # Ensure join keys are included, creating null values if missing from table for join_key, value_type in join_keys.items(): if join_key not in proto_values_by_column: - # Check if the join key exists in the table before trying to access it if join_key in table.column_names: proto_values_by_column[join_key] = python_values_to_proto_values( table.column(join_key).to_numpy(zero_copy_only=False), value_type ) else: - # Create null/default values if the join key isn't in the table - null_column = [None] * table.num_rows + # Create null proto values directly (no need to build a PyArrow array) proto_values_by_column[join_key] = python_values_to_proto_values( - null_column, value_type + [None] * table.num_rows, value_type ) - # Adding On Demand Features + # Cache column names set to avoid recreating list in loop + column_names = {c[0] for c in columns} + + # Adding On Demand Features that are missing from proto_values for feature in feature_view.features: - if ( - feature.name in [c[0] for c in columns] - and feature.name not in proto_values_by_column - ): - # initializing the column as null - null_column = pyarrow.array( - [None] * table.num_rows, - type=from_feast_to_pyarrow_type(feature.dtype), - ) - updated_table = pyarrow.RecordBatch.from_arrays( - table.columns + [null_column], - schema=table.schema.append( - pyarrow.field(feature.name, null_column.type) # type: ignore[attr-defined] - ), - ) + if feature.name in column_names and feature.name not in proto_values_by_column: + # Create null proto values directly (more efficient than building PyArrow array) proto_values_by_column[feature.name] = python_values_to_proto_values( - updated_table.column(feature.name).to_numpy(zero_copy_only=False), - feature.dtype.to_value_type(), + [None] * table.num_rows, feature.dtype.to_value_type() ) - entity_keys = [ - EntityKeyProto( - join_keys=join_keys, - entity_values=[ - proto_values_by_column[k][idx] - for k in join_keys - if k in proto_values_by_column - ], - ) - for idx in range(table.num_rows) - ] + entity_keys = _build_entity_keys(table.num_rows, join_keys, proto_values_by_column) # Serialize the features per row feature_dict = { @@ -420,7 +447,7 @@ def _convert_arrow_odfv_to_proto( if feature.name in proto_values_by_column } if feature_view.write_to_online_store: - table_columns = [col.name for col in table.schema] + table_columns = {col.name for col in table.schema} for feature in feature_view.schema: if feature.name not in feature_dict and feature.name in table_columns: feature_dict[feature.name] = proto_values_by_column[feature.name] @@ -428,11 +455,10 @@ def _convert_arrow_odfv_to_proto( features = [dict(zip(feature_dict, vars)) for vars in zip(*feature_dict.values())] # We need to artificially add event_timestamps and created_timestamps - event_timestamps = [] - timestamp_values = pd.to_datetime([_utc_now() for i in range(table.num_rows)]) - - for val in timestamp_values: - event_timestamps.append(_coerce_datetime(val)) + now = _utc_now() + event_timestamps = [ + _coerce_datetime(pd.Timestamp(now)) for _ in range(table.num_rows) + ] # setting them equivalent created_timestamps = event_timestamps @@ -775,13 +801,20 @@ def _get_entity_maps( ) -> Tuple[Dict[str, str], Dict[str, ValueType], Set[str]]: # TODO(felixwang9817): Support entities that have different types for different feature views. entities = registry.list_entities(project, allow_cache=True) + + entity_by_name: Dict[str, "Entity"] = {entity.name: entity for entity in entities} + entity_name_to_join_key_map: Dict[str, str] = {} entity_type_map: Dict[str, ValueType] = {} for entity in entities: entity_name_to_join_key_map[entity.name] = entity.join_key for feature_view in feature_views: for entity_name in feature_view.entities: - entity = registry.get_entity(entity_name, project, allow_cache=True) + entity = entity_by_name.get(entity_name) + if entity is None: + from feast.errors import EntityNotFoundException + + raise EntityNotFoundException(entity_name, project=project) # User directly uses join_key as the entity reference in the entity_rows for the # entity mapping case. entity_name = feature_view.projection.join_key_map.get( @@ -1413,27 +1446,27 @@ def _convert_rows_to_protobuf( requested_features: List[str], read_rows: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]], ) -> List[Tuple[List[Timestamp], List["FieldStatus.ValueType"], List[ValueProto]]]: - # Pre-calculate the length to avoid repeated calculations n_rows = len(read_rows) - # Create single instances of commonly used values null_value = ValueProto() null_status = FieldStatus.NOT_FOUND - null_timestamp = Timestamp() present_status = FieldStatus.PRESENT + # Pre-compute timestamps once per entity (not per feature) + # This reduces O(features * entities) to O(entities) for timestamp conversion + row_timestamps = [] + for row_ts, _ in read_rows: + ts_proto = Timestamp() + if row_ts is not None: + ts_proto.FromDatetime(row_ts) + row_timestamps.append(ts_proto) + requested_features_vectors = [] for feature_name in requested_features: - ts_vector = [null_timestamp] * n_rows + ts_vector = list(row_timestamps) # Shallow copy of pre-computed timestamps status_vector = [null_status] * n_rows value_vector = [null_value] * n_rows - for idx, read_row in enumerate(read_rows): - row_ts_proto = Timestamp() - row_ts, feature_data = read_row - # TODO (Ly): reuse whatever timestamp if row_ts is None? - if row_ts is not None: - row_ts_proto.FromDatetime(row_ts) - ts_vector[idx] = row_ts_proto + for idx, (_, feature_data) in enumerate(read_rows): if (feature_data is not None) and (feature_name in feature_data): status_vector[idx] = present_status value_vector[idx] = feature_data[feature_name] diff --git a/sdk/python/feast/value_type.py b/sdk/python/feast/value_type.py index bdd47952dc6..d05691199b4 100644 --- a/sdk/python/feast/value_type.py +++ b/sdk/python/feast/value_type.py @@ -67,6 +67,10 @@ class ValueType(enum.Enum): UNIX_TIMESTAMP_SET = 29 PDF_BYTES = 30 IMAGE_BYTES = 31 + JSON = 32 + JSON_LIST = 33 + STRUCT = 34 + STRUCT_LIST = 35 ListType = Union[ diff --git a/sdk/python/pytest.ini b/sdk/python/pytest.ini index 31640e509d7..1ad76b978e4 100644 --- a/sdk/python/pytest.ini +++ b/sdk/python/pytest.ini @@ -20,6 +20,7 @@ markers = slow: Tests taking >30 seconds cloud: Tests requiring cloud credentials local_only: Tests that run entirely locally + xdist_group: Group tests to run in the same xdist worker timeout = 300 timeout_method = thread diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index 03ecb497a30..7c7961ad52e 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -3002,7 +3002,9 @@ orjson==3.11.7 \ --hash=sha256:f4f7c956b5215d949a1f65334cf9d7612dde38f20a95f2315deef167def91a6f \ --hash=sha256:f50979824bde13d32b4320eedd513431c921102796d86be3eee0b58e58a3ecd1 \ --hash=sha256:f904c24bdeabd4298f7a977ef14ca2a022ca921ed670b92ecd16ab6f3d01f867 - # via trino + # via + # feast (pyproject.toml) + # trino overrides==7.7.0 \ --hash=sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a \ --hash=sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49 diff --git a/sdk/python/requirements/py3.10-minimal-requirements.txt b/sdk/python/requirements/py3.10-minimal-requirements.txt index 0a281d5e653..d5db5e39505 100644 --- a/sdk/python/requirements/py3.10-minimal-requirements.txt +++ b/sdk/python/requirements/py3.10-minimal-requirements.txt @@ -1698,6 +1698,82 @@ oauthlib==3.3.1 \ --hash=sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9 \ --hash=sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1 # via requests-oauthlib +orjson==3.11.7 \ + --hash=sha256:043d3006b7d32c7e233b8cfb1f01c651013ea079e08dcef7189a29abd8befe11 \ + --hash=sha256:0527a4510c300e3b406591b0ba69b5dc50031895b0a93743526a3fc45f59d26e \ + --hash=sha256:0724e265bc548af1dedebd9cb3d24b4e1c1e685a343be43e87ba922a5c5fff2f \ + --hash=sha256:136dcd6a2e796dfd9ffca9fc027d778567b0b7c9968d092842d3c323cef88aa8 \ + --hash=sha256:14f440c7268c8f8633d1b3d443a434bd70cb15686117ea6beff8fdc8f5917a1e \ + --hash=sha256:1d98b30cc1313d52d4af17d9c3d307b08389752ec5f2e5febdfada70b0f8c733 \ + --hash=sha256:1eb80451a9c351a71dfaf5b7ccc13ad065405217726b59fdbeadbcc544f9d223 \ + --hash=sha256:1ee5cc7160a821dfe14f130bc8e63e7611051f964b463d9e2a3a573204446a4d \ + --hash=sha256:23d6c20517a97a9daf1d48b580fcdc6f0516c6f4b5038823426033690b4d2650 \ + --hash=sha256:26c3b9132f783b7d7903bf1efb095fed8d4a3a85ec0d334ee8beff3d7a4749d5 \ + --hash=sha256:31c80ce534ac4ea3739c5ee751270646cbc46e45aea7576a38ffec040b4029a1 \ + --hash=sha256:3726be79e36e526e3d9c1aceaadbfb4a04ee80a72ab47b3f3c17fefb9812e7b8 \ + --hash=sha256:390a1dce0c055ddf8adb6aa94a73b45a4a7d7177b5c584b8d1c1947f2ba60fb3 \ + --hash=sha256:3a2479753bbb95b0ebcf7969f562cdb9668e6d12416a35b0dda79febf89cdea2 \ + --hash=sha256:3c4bc6c6ac52cdaa267552544c73e486fecbd710b7ac09bc024d5a78555a22f6 \ + --hash=sha256:411ebaf34d735e25e358a6d9e7978954a9c9d58cfb47bc6683cdc3964cd2f910 \ + --hash=sha256:4682d1db3bcebd2b64757e0ddf9e87ae5f00d29d16c5cdf3a62f561d08cc3dd2 \ + --hash=sha256:4a2e9c5be347b937a2e0203866f12bba36082e89b402ddb9e927d5822e43088d \ + --hash=sha256:57036b27ac8a25d81112eb0cc9835cd4833c5b16e1467816adc0015f59e870dc \ + --hash=sha256:5ede977b5fe5ac91b1dffc0a517ca4542d2ec8a6a4ff7b2652d94f640796342a \ + --hash=sha256:5fdfad2093bdd08245f2e204d977facd5f871c88c4a71230d5bcbd0e43bf6222 \ + --hash=sha256:623ad1b9548ef63886319c16fa317848e465a21513b31a6ad7b57443c3e0dcf5 \ + --hash=sha256:652c6c3af76716f4a9c290371ba2e390ede06f6603edb277b481daf37f6f464e \ + --hash=sha256:6543001328aa857187f905308a028935864aefe9968af3848401b6fe80dbb471 \ + --hash=sha256:6e776b998ac37c0396093d10290e60283f59cfe0fc3fccbd0ccc4bd04dd19892 \ + --hash=sha256:71924496986275a737f38e3f22b4e0878882b3f7a310d2ff4dc96e812789120c \ + --hash=sha256:733ae23ada68b804b222c44affed76b39e30806d38660bf1eb200520d259cc16 \ + --hash=sha256:7477aa6a6ec6139c5cb1cc7b214643592169a5494d200397c7fc95d740d5fcf3 \ + --hash=sha256:79cacb0b52f6004caf92405a7e1f11e6e2de8bdf9019e4f76b44ba045125cd6b \ + --hash=sha256:7ba61079379b0ae29e117db13bda5f28d939766e410d321ec1624afc6a0b0504 \ + --hash=sha256:800988273a014a0541483dc81021247d7eacb0c845a9d1a34a422bc718f41539 \ + --hash=sha256:814be4b49b228cfc0b3c565acf642dd7d13538f966e3ccde61f4f55be3e20785 \ + --hash=sha256:845c3e0d8ded9c9271cd79596b9b552448b885b97110f628fb687aee2eed11c1 \ + --hash=sha256:849e38203e5be40b776ed2718e587faf204d184fc9a008ae441f9442320c0cab \ + --hash=sha256:89e13dd3f89f1c38a9c9eba5fbf7cdc2d1feca82f5f290864b4b7a6aac704576 \ + --hash=sha256:89e440ebc74ce8ab5c7bc4ce6757b4a6b1041becb127df818f6997b5c71aa60b \ + --hash=sha256:8ff206156006da5b847c9304b6308a01e8cdbc8cce824e2779a5ba71c3def141 \ + --hash=sha256:91c81ef070c8f3220054115e1ef468b1c9ce8497b4e526cb9f68ab4dc0a7ac62 \ + --hash=sha256:9487abc2c2086e7c8eb9a211d2ce8855bae0e92586279d0d27b341d5ad76c85c \ + --hash=sha256:962d046ee1765f74a1da723f4b33e3b228fe3a48bd307acce5021dfefe0e29b2 \ + --hash=sha256:996b65230271f1a97026fd0e6a753f51fbc0c335d2ad0c6201f711b0da32693b \ + --hash=sha256:9b1a67243945819ce55d24a30b59d6a168e86220452d2c96f4d1f093e71c0c49 \ + --hash=sha256:9c0b51672e466fd7e56230ffbae7f1639e18d0ce023351fb75da21b71bc2c960 \ + --hash=sha256:9e54f3808e2b6b945078c41aa8d9b5834b28c50843846e97807e5adb75fa9705 \ + --hash=sha256:a02c833f38f36546ba65a452127633afce4cf0dd7296b753d3bb54e55e5c0174 \ + --hash=sha256:a12b80df61aab7b98b490fe9e4879925ba666fccdfcd175252ce4d9035865ace \ + --hash=sha256:a16bcd08ab0bcdfc7e8801d9c4a9cc17e58418e4d48ddc6ded4e9e4b1a94062b \ + --hash=sha256:a56df3239294ea5964adf074c54bcc4f0ccd21636049a2cf3ca9cf03b5d03cf1 \ + --hash=sha256:a709e881723c9b18acddcfb8ba357322491ad553e277cf467e1e7e20e2d90561 \ + --hash=sha256:ab49d4b2a6a1d415ddb9f37a21e02e0d5dbfe10b7870b21bf779fc21e9156157 \ + --hash=sha256:ae9e0b37a834cef7ce8f99de6498f8fad4a2c0bf6bfc3d02abd8ed56aa15b2de \ + --hash=sha256:b4a9eefdc70bf8bf9857f0290f973dec534ac84c35cd6a7f4083be43e7170a8f \ + --hash=sha256:b63c6e6738d7c3470ad01601e23376aa511e50e1f3931395b9f9c722406d1a67 \ + --hash=sha256:b7b1dae39230a393df353827c855a5f176271c23434cfd2db74e0e424e693e10 \ + --hash=sha256:b8d14b71c0b12963fe8a62aac87119f1afdf4cb88a400f61ca5ae581449efcb5 \ + --hash=sha256:b9f95dcdea9d4f805daa9ddf02617a89e484c6985fa03055459f90e87d7a0757 \ + --hash=sha256:b9fc4d0f81f394689e0814617aadc4f2ea0e8025f38c226cbf22d3b5ddbf025d \ + --hash=sha256:bd03ea7606833655048dab1a00734a2875e3e86c276e1d772b2a02556f0d895f \ + --hash=sha256:bd0d68edd7dfca1b2eca9361a44ac9f24b078de3481003159929a0573f21a6bf \ + --hash=sha256:bda117c4148e81f746655d5a3239ae9bd00cb7bc3ca178b5fc5a5997e9744183 \ + --hash=sha256:bf742e149121dc5648ba0a08ea0871e87b660467ef168a3a5e53bc1fbd64bb74 \ + --hash=sha256:c2428d358d85e8da9d37cba18b8c4047c55222007a84f97156a5b22028dfbfc0 \ + --hash=sha256:c2e85fe4698b6a56d5e2ebf7ae87544d668eb6bde1ad1226c13f44663f20ec9e \ + --hash=sha256:c43b8b5bab288b6b90dac410cca7e986a4fa747a2e8f94615aea407da706980d \ + --hash=sha256:cededd6738e1c153530793998e31c05086582b08315db48ab66649768f326baa \ + --hash=sha256:d06e5c5fed5caedd2e540d62e5b1c25e8c82431b9e577c33537e5fa4aa909539 \ + --hash=sha256:d772afdb22555f0c58cfc741bdae44180122b3616faa1ecadb595cd526e4c993 \ + --hash=sha256:d897e81f8d0cbd2abb82226d1860ad2e1ab3ff16d7b08c96ca00df9d45409ef4 \ + --hash=sha256:de0a37f21d0d364954ad5de1970491d7fbd0fb1ef7417d4d56a36dc01ba0c0a0 \ + --hash=sha256:e7745312efa9e11c17fbd3cb3097262d079da26930ae9ae7ba28fb738367cbad \ + --hash=sha256:ed46f17096e28fb28d2975834836a639af7278aa87c84f68ab08fbe5b8bd75fa \ + --hash=sha256:f4f7c956b5215d949a1f65334cf9d7612dde38f20a95f2315deef167def91a6f \ + --hash=sha256:f50979824bde13d32b4320eedd513431c921102796d86be3eee0b58e58a3ecd1 \ + --hash=sha256:f904c24bdeabd4298f7a977ef14ca2a022ca921ed670b92ecd16ab6f3d01f867 + # via feast (pyproject.toml) packaging==26.0 \ --hash=sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4 \ --hash=sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529 @@ -2714,9 +2790,9 @@ typeguard==4.5.0 \ --hash=sha256:749bea21cdb2553e12831bc29f1eae980b22c7de8331ab67ae7db9e85470b5a7 \ --hash=sha256:cfda388fc88a9ce42a41890900d6f31ee124bea9b73bb84701a32438e92165c3 # via feast (pyproject.toml) -typer==0.23.1 \ - --hash=sha256:2070374e4d31c83e7b61362fd859aa683576432fd5b026b060ad6b4cd3b86134 \ - --hash=sha256:3291ad0d3c701cbf522012faccfbb29352ff16ad262db2139e6b01f15781f14e +typer==0.24.0 \ + --hash=sha256:5fc435a9c8356f6160ed6e85a6301fdd6e3d8b2851da502050d1f92c5e9eddc8 \ + --hash=sha256:f9373dc4eff901350694f519f783c29b6d7a110fc0dcc11b1d7e353b85ca6504 # via fastapi-mcp types-pymysql==1.1.0.20251220 \ --hash=sha256:ae1c3df32a777489431e2e9963880a0df48f6591e0aa2fd3a6fabd9dee6eca54 \ diff --git a/sdk/python/requirements/py3.10-minimal-sdist-requirements-build.txt b/sdk/python/requirements/py3.10-minimal-sdist-requirements-build.txt index 70db9118276..d272d9a8eeb 100644 --- a/sdk/python/requirements/py3.10-minimal-sdist-requirements-build.txt +++ b/sdk/python/requirements/py3.10-minimal-sdist-requirements-build.txt @@ -534,6 +534,7 @@ maturin==1.12.2 \ --hash=sha256:f1c2e4ee43bf286b052091a3b2356a157978985837c7aed42354deb2947a4006 # via # cryptography + # orjson # pydantic-core # rpds-py # watchfiles @@ -654,13 +655,13 @@ poetry-core==2.3.1 \ # rich # rsa # tomlkit -pybind11-global==3.0.1 \ - --hash=sha256:0e8d5a68d084c50bf145ce5efdbdd00704dbe6315035d0b7a255708ddeb9faca \ - --hash=sha256:eb6ef4e8f5a60f4c0b6a9396cb7806f78d42f8d081e42a93c0bb62288f6cf403 +pybind11-global==3.0.2 \ + --hash=sha256:00a26be4cd65974133eaae7e7532e7141ccb7a88cd131995bc8d1f652852aaf9 \ + --hash=sha256:e183b4456459c35fbbbc8296eb29e241f6cf0774c0bbc3fc8349789611c6df4b # via pybind11 -pybind11==3.0.1 \ - --hash=sha256:9c0f40056a016da59bab516efb523089139fcc6f2ba7e4930854c61efb932051 \ - --hash=sha256:aa8f0aa6e0a94d3b64adfc38f560f33f15e589be2175e103c0a33c6bce55ee89 +pybind11==3.0.2 \ + --hash=sha256:432f01aeb68e361a3a7fc7575c2c7f497595bf640f747acd909ff238dd766e06 \ + --hash=sha256:f8a6500548919cc33bcd220d5f984688326f574fa97f1107f2f4fdb4c6fb019f # via duckdb pycparser==3.0 \ --hash=sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29 \ diff --git a/sdk/python/requirements/py3.10-minimal-sdist-requirements.txt b/sdk/python/requirements/py3.10-minimal-sdist-requirements.txt index da8f2ae618e..567e1d283a0 100644 --- a/sdk/python/requirements/py3.10-minimal-sdist-requirements.txt +++ b/sdk/python/requirements/py3.10-minimal-sdist-requirements.txt @@ -1864,6 +1864,82 @@ oauthlib==3.3.1 \ --hash=sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9 \ --hash=sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1 # via requests-oauthlib +orjson==3.11.7 \ + --hash=sha256:043d3006b7d32c7e233b8cfb1f01c651013ea079e08dcef7189a29abd8befe11 \ + --hash=sha256:0527a4510c300e3b406591b0ba69b5dc50031895b0a93743526a3fc45f59d26e \ + --hash=sha256:0724e265bc548af1dedebd9cb3d24b4e1c1e685a343be43e87ba922a5c5fff2f \ + --hash=sha256:136dcd6a2e796dfd9ffca9fc027d778567b0b7c9968d092842d3c323cef88aa8 \ + --hash=sha256:14f440c7268c8f8633d1b3d443a434bd70cb15686117ea6beff8fdc8f5917a1e \ + --hash=sha256:1d98b30cc1313d52d4af17d9c3d307b08389752ec5f2e5febdfada70b0f8c733 \ + --hash=sha256:1eb80451a9c351a71dfaf5b7ccc13ad065405217726b59fdbeadbcc544f9d223 \ + --hash=sha256:1ee5cc7160a821dfe14f130bc8e63e7611051f964b463d9e2a3a573204446a4d \ + --hash=sha256:23d6c20517a97a9daf1d48b580fcdc6f0516c6f4b5038823426033690b4d2650 \ + --hash=sha256:26c3b9132f783b7d7903bf1efb095fed8d4a3a85ec0d334ee8beff3d7a4749d5 \ + --hash=sha256:31c80ce534ac4ea3739c5ee751270646cbc46e45aea7576a38ffec040b4029a1 \ + --hash=sha256:3726be79e36e526e3d9c1aceaadbfb4a04ee80a72ab47b3f3c17fefb9812e7b8 \ + --hash=sha256:390a1dce0c055ddf8adb6aa94a73b45a4a7d7177b5c584b8d1c1947f2ba60fb3 \ + --hash=sha256:3a2479753bbb95b0ebcf7969f562cdb9668e6d12416a35b0dda79febf89cdea2 \ + --hash=sha256:3c4bc6c6ac52cdaa267552544c73e486fecbd710b7ac09bc024d5a78555a22f6 \ + --hash=sha256:411ebaf34d735e25e358a6d9e7978954a9c9d58cfb47bc6683cdc3964cd2f910 \ + --hash=sha256:4682d1db3bcebd2b64757e0ddf9e87ae5f00d29d16c5cdf3a62f561d08cc3dd2 \ + --hash=sha256:4a2e9c5be347b937a2e0203866f12bba36082e89b402ddb9e927d5822e43088d \ + --hash=sha256:57036b27ac8a25d81112eb0cc9835cd4833c5b16e1467816adc0015f59e870dc \ + --hash=sha256:5ede977b5fe5ac91b1dffc0a517ca4542d2ec8a6a4ff7b2652d94f640796342a \ + --hash=sha256:5fdfad2093bdd08245f2e204d977facd5f871c88c4a71230d5bcbd0e43bf6222 \ + --hash=sha256:623ad1b9548ef63886319c16fa317848e465a21513b31a6ad7b57443c3e0dcf5 \ + --hash=sha256:652c6c3af76716f4a9c290371ba2e390ede06f6603edb277b481daf37f6f464e \ + --hash=sha256:6543001328aa857187f905308a028935864aefe9968af3848401b6fe80dbb471 \ + --hash=sha256:6e776b998ac37c0396093d10290e60283f59cfe0fc3fccbd0ccc4bd04dd19892 \ + --hash=sha256:71924496986275a737f38e3f22b4e0878882b3f7a310d2ff4dc96e812789120c \ + --hash=sha256:733ae23ada68b804b222c44affed76b39e30806d38660bf1eb200520d259cc16 \ + --hash=sha256:7477aa6a6ec6139c5cb1cc7b214643592169a5494d200397c7fc95d740d5fcf3 \ + --hash=sha256:79cacb0b52f6004caf92405a7e1f11e6e2de8bdf9019e4f76b44ba045125cd6b \ + --hash=sha256:7ba61079379b0ae29e117db13bda5f28d939766e410d321ec1624afc6a0b0504 \ + --hash=sha256:800988273a014a0541483dc81021247d7eacb0c845a9d1a34a422bc718f41539 \ + --hash=sha256:814be4b49b228cfc0b3c565acf642dd7d13538f966e3ccde61f4f55be3e20785 \ + --hash=sha256:845c3e0d8ded9c9271cd79596b9b552448b885b97110f628fb687aee2eed11c1 \ + --hash=sha256:849e38203e5be40b776ed2718e587faf204d184fc9a008ae441f9442320c0cab \ + --hash=sha256:89e13dd3f89f1c38a9c9eba5fbf7cdc2d1feca82f5f290864b4b7a6aac704576 \ + --hash=sha256:89e440ebc74ce8ab5c7bc4ce6757b4a6b1041becb127df818f6997b5c71aa60b \ + --hash=sha256:8ff206156006da5b847c9304b6308a01e8cdbc8cce824e2779a5ba71c3def141 \ + --hash=sha256:91c81ef070c8f3220054115e1ef468b1c9ce8497b4e526cb9f68ab4dc0a7ac62 \ + --hash=sha256:9487abc2c2086e7c8eb9a211d2ce8855bae0e92586279d0d27b341d5ad76c85c \ + --hash=sha256:962d046ee1765f74a1da723f4b33e3b228fe3a48bd307acce5021dfefe0e29b2 \ + --hash=sha256:996b65230271f1a97026fd0e6a753f51fbc0c335d2ad0c6201f711b0da32693b \ + --hash=sha256:9b1a67243945819ce55d24a30b59d6a168e86220452d2c96f4d1f093e71c0c49 \ + --hash=sha256:9c0b51672e466fd7e56230ffbae7f1639e18d0ce023351fb75da21b71bc2c960 \ + --hash=sha256:9e54f3808e2b6b945078c41aa8d9b5834b28c50843846e97807e5adb75fa9705 \ + --hash=sha256:a02c833f38f36546ba65a452127633afce4cf0dd7296b753d3bb54e55e5c0174 \ + --hash=sha256:a12b80df61aab7b98b490fe9e4879925ba666fccdfcd175252ce4d9035865ace \ + --hash=sha256:a16bcd08ab0bcdfc7e8801d9c4a9cc17e58418e4d48ddc6ded4e9e4b1a94062b \ + --hash=sha256:a56df3239294ea5964adf074c54bcc4f0ccd21636049a2cf3ca9cf03b5d03cf1 \ + --hash=sha256:a709e881723c9b18acddcfb8ba357322491ad553e277cf467e1e7e20e2d90561 \ + --hash=sha256:ab49d4b2a6a1d415ddb9f37a21e02e0d5dbfe10b7870b21bf779fc21e9156157 \ + --hash=sha256:ae9e0b37a834cef7ce8f99de6498f8fad4a2c0bf6bfc3d02abd8ed56aa15b2de \ + --hash=sha256:b4a9eefdc70bf8bf9857f0290f973dec534ac84c35cd6a7f4083be43e7170a8f \ + --hash=sha256:b63c6e6738d7c3470ad01601e23376aa511e50e1f3931395b9f9c722406d1a67 \ + --hash=sha256:b7b1dae39230a393df353827c855a5f176271c23434cfd2db74e0e424e693e10 \ + --hash=sha256:b8d14b71c0b12963fe8a62aac87119f1afdf4cb88a400f61ca5ae581449efcb5 \ + --hash=sha256:b9f95dcdea9d4f805daa9ddf02617a89e484c6985fa03055459f90e87d7a0757 \ + --hash=sha256:b9fc4d0f81f394689e0814617aadc4f2ea0e8025f38c226cbf22d3b5ddbf025d \ + --hash=sha256:bd03ea7606833655048dab1a00734a2875e3e86c276e1d772b2a02556f0d895f \ + --hash=sha256:bd0d68edd7dfca1b2eca9361a44ac9f24b078de3481003159929a0573f21a6bf \ + --hash=sha256:bda117c4148e81f746655d5a3239ae9bd00cb7bc3ca178b5fc5a5997e9744183 \ + --hash=sha256:bf742e149121dc5648ba0a08ea0871e87b660467ef168a3a5e53bc1fbd64bb74 \ + --hash=sha256:c2428d358d85e8da9d37cba18b8c4047c55222007a84f97156a5b22028dfbfc0 \ + --hash=sha256:c2e85fe4698b6a56d5e2ebf7ae87544d668eb6bde1ad1226c13f44663f20ec9e \ + --hash=sha256:c43b8b5bab288b6b90dac410cca7e986a4fa747a2e8f94615aea407da706980d \ + --hash=sha256:cededd6738e1c153530793998e31c05086582b08315db48ab66649768f326baa \ + --hash=sha256:d06e5c5fed5caedd2e540d62e5b1c25e8c82431b9e577c33537e5fa4aa909539 \ + --hash=sha256:d772afdb22555f0c58cfc741bdae44180122b3616faa1ecadb595cd526e4c993 \ + --hash=sha256:d897e81f8d0cbd2abb82226d1860ad2e1ab3ff16d7b08c96ca00df9d45409ef4 \ + --hash=sha256:de0a37f21d0d364954ad5de1970491d7fbd0fb1ef7417d4d56a36dc01ba0c0a0 \ + --hash=sha256:e7745312efa9e11c17fbd3cb3097262d079da26930ae9ae7ba28fb738367cbad \ + --hash=sha256:ed46f17096e28fb28d2975834836a639af7278aa87c84f68ab08fbe5b8bd75fa \ + --hash=sha256:f4f7c956b5215d949a1f65334cf9d7612dde38f20a95f2315deef167def91a6f \ + --hash=sha256:f50979824bde13d32b4320eedd513431c921102796d86be3eee0b58e58a3ecd1 \ + --hash=sha256:f904c24bdeabd4298f7a977ef14ca2a022ca921ed670b92ecd16ab6f3d01f867 + # via feast (pyproject.toml) packaging==26.0 \ --hash=sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4 \ --hash=sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529 @@ -2964,9 +3040,9 @@ typeguard==4.5.0 \ --hash=sha256:749bea21cdb2553e12831bc29f1eae980b22c7de8331ab67ae7db9e85470b5a7 \ --hash=sha256:cfda388fc88a9ce42a41890900d6f31ee124bea9b73bb84701a32438e92165c3 # via feast (pyproject.toml) -typer==0.23.1 \ - --hash=sha256:2070374e4d31c83e7b61362fd859aa683576432fd5b026b060ad6b4cd3b86134 \ - --hash=sha256:3291ad0d3c701cbf522012faccfbb29352ff16ad262db2139e6b01f15781f14e +typer==0.24.0 \ + --hash=sha256:5fc435a9c8356f6160ed6e85a6301fdd6e3d8b2851da502050d1f92c5e9eddc8 \ + --hash=sha256:f9373dc4eff901350694f519f783c29b6d7a110fc0dcc11b1d7e353b85ca6504 # via fastapi-mcp types-psutil==7.0.0.20250218 \ --hash=sha256:1447a30c282aafefcf8941ece854e1100eee7b0296a9d9be9977292f0269b121 \ diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index 2eb2776034e..e8245f6cf27 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -669,6 +669,82 @@ numpy==2.2.6 \ # feast (pyproject.toml) # dask # pandas +orjson==3.11.7 \ + --hash=sha256:043d3006b7d32c7e233b8cfb1f01c651013ea079e08dcef7189a29abd8befe11 \ + --hash=sha256:0527a4510c300e3b406591b0ba69b5dc50031895b0a93743526a3fc45f59d26e \ + --hash=sha256:0724e265bc548af1dedebd9cb3d24b4e1c1e685a343be43e87ba922a5c5fff2f \ + --hash=sha256:136dcd6a2e796dfd9ffca9fc027d778567b0b7c9968d092842d3c323cef88aa8 \ + --hash=sha256:14f440c7268c8f8633d1b3d443a434bd70cb15686117ea6beff8fdc8f5917a1e \ + --hash=sha256:1d98b30cc1313d52d4af17d9c3d307b08389752ec5f2e5febdfada70b0f8c733 \ + --hash=sha256:1eb80451a9c351a71dfaf5b7ccc13ad065405217726b59fdbeadbcc544f9d223 \ + --hash=sha256:1ee5cc7160a821dfe14f130bc8e63e7611051f964b463d9e2a3a573204446a4d \ + --hash=sha256:23d6c20517a97a9daf1d48b580fcdc6f0516c6f4b5038823426033690b4d2650 \ + --hash=sha256:26c3b9132f783b7d7903bf1efb095fed8d4a3a85ec0d334ee8beff3d7a4749d5 \ + --hash=sha256:31c80ce534ac4ea3739c5ee751270646cbc46e45aea7576a38ffec040b4029a1 \ + --hash=sha256:3726be79e36e526e3d9c1aceaadbfb4a04ee80a72ab47b3f3c17fefb9812e7b8 \ + --hash=sha256:390a1dce0c055ddf8adb6aa94a73b45a4a7d7177b5c584b8d1c1947f2ba60fb3 \ + --hash=sha256:3a2479753bbb95b0ebcf7969f562cdb9668e6d12416a35b0dda79febf89cdea2 \ + --hash=sha256:3c4bc6c6ac52cdaa267552544c73e486fecbd710b7ac09bc024d5a78555a22f6 \ + --hash=sha256:411ebaf34d735e25e358a6d9e7978954a9c9d58cfb47bc6683cdc3964cd2f910 \ + --hash=sha256:4682d1db3bcebd2b64757e0ddf9e87ae5f00d29d16c5cdf3a62f561d08cc3dd2 \ + --hash=sha256:4a2e9c5be347b937a2e0203866f12bba36082e89b402ddb9e927d5822e43088d \ + --hash=sha256:57036b27ac8a25d81112eb0cc9835cd4833c5b16e1467816adc0015f59e870dc \ + --hash=sha256:5ede977b5fe5ac91b1dffc0a517ca4542d2ec8a6a4ff7b2652d94f640796342a \ + --hash=sha256:5fdfad2093bdd08245f2e204d977facd5f871c88c4a71230d5bcbd0e43bf6222 \ + --hash=sha256:623ad1b9548ef63886319c16fa317848e465a21513b31a6ad7b57443c3e0dcf5 \ + --hash=sha256:652c6c3af76716f4a9c290371ba2e390ede06f6603edb277b481daf37f6f464e \ + --hash=sha256:6543001328aa857187f905308a028935864aefe9968af3848401b6fe80dbb471 \ + --hash=sha256:6e776b998ac37c0396093d10290e60283f59cfe0fc3fccbd0ccc4bd04dd19892 \ + --hash=sha256:71924496986275a737f38e3f22b4e0878882b3f7a310d2ff4dc96e812789120c \ + --hash=sha256:733ae23ada68b804b222c44affed76b39e30806d38660bf1eb200520d259cc16 \ + --hash=sha256:7477aa6a6ec6139c5cb1cc7b214643592169a5494d200397c7fc95d740d5fcf3 \ + --hash=sha256:79cacb0b52f6004caf92405a7e1f11e6e2de8bdf9019e4f76b44ba045125cd6b \ + --hash=sha256:7ba61079379b0ae29e117db13bda5f28d939766e410d321ec1624afc6a0b0504 \ + --hash=sha256:800988273a014a0541483dc81021247d7eacb0c845a9d1a34a422bc718f41539 \ + --hash=sha256:814be4b49b228cfc0b3c565acf642dd7d13538f966e3ccde61f4f55be3e20785 \ + --hash=sha256:845c3e0d8ded9c9271cd79596b9b552448b885b97110f628fb687aee2eed11c1 \ + --hash=sha256:849e38203e5be40b776ed2718e587faf204d184fc9a008ae441f9442320c0cab \ + --hash=sha256:89e13dd3f89f1c38a9c9eba5fbf7cdc2d1feca82f5f290864b4b7a6aac704576 \ + --hash=sha256:89e440ebc74ce8ab5c7bc4ce6757b4a6b1041becb127df818f6997b5c71aa60b \ + --hash=sha256:8ff206156006da5b847c9304b6308a01e8cdbc8cce824e2779a5ba71c3def141 \ + --hash=sha256:91c81ef070c8f3220054115e1ef468b1c9ce8497b4e526cb9f68ab4dc0a7ac62 \ + --hash=sha256:9487abc2c2086e7c8eb9a211d2ce8855bae0e92586279d0d27b341d5ad76c85c \ + --hash=sha256:962d046ee1765f74a1da723f4b33e3b228fe3a48bd307acce5021dfefe0e29b2 \ + --hash=sha256:996b65230271f1a97026fd0e6a753f51fbc0c335d2ad0c6201f711b0da32693b \ + --hash=sha256:9b1a67243945819ce55d24a30b59d6a168e86220452d2c96f4d1f093e71c0c49 \ + --hash=sha256:9c0b51672e466fd7e56230ffbae7f1639e18d0ce023351fb75da21b71bc2c960 \ + --hash=sha256:9e54f3808e2b6b945078c41aa8d9b5834b28c50843846e97807e5adb75fa9705 \ + --hash=sha256:a02c833f38f36546ba65a452127633afce4cf0dd7296b753d3bb54e55e5c0174 \ + --hash=sha256:a12b80df61aab7b98b490fe9e4879925ba666fccdfcd175252ce4d9035865ace \ + --hash=sha256:a16bcd08ab0bcdfc7e8801d9c4a9cc17e58418e4d48ddc6ded4e9e4b1a94062b \ + --hash=sha256:a56df3239294ea5964adf074c54bcc4f0ccd21636049a2cf3ca9cf03b5d03cf1 \ + --hash=sha256:a709e881723c9b18acddcfb8ba357322491ad553e277cf467e1e7e20e2d90561 \ + --hash=sha256:ab49d4b2a6a1d415ddb9f37a21e02e0d5dbfe10b7870b21bf779fc21e9156157 \ + --hash=sha256:ae9e0b37a834cef7ce8f99de6498f8fad4a2c0bf6bfc3d02abd8ed56aa15b2de \ + --hash=sha256:b4a9eefdc70bf8bf9857f0290f973dec534ac84c35cd6a7f4083be43e7170a8f \ + --hash=sha256:b63c6e6738d7c3470ad01601e23376aa511e50e1f3931395b9f9c722406d1a67 \ + --hash=sha256:b7b1dae39230a393df353827c855a5f176271c23434cfd2db74e0e424e693e10 \ + --hash=sha256:b8d14b71c0b12963fe8a62aac87119f1afdf4cb88a400f61ca5ae581449efcb5 \ + --hash=sha256:b9f95dcdea9d4f805daa9ddf02617a89e484c6985fa03055459f90e87d7a0757 \ + --hash=sha256:b9fc4d0f81f394689e0814617aadc4f2ea0e8025f38c226cbf22d3b5ddbf025d \ + --hash=sha256:bd03ea7606833655048dab1a00734a2875e3e86c276e1d772b2a02556f0d895f \ + --hash=sha256:bd0d68edd7dfca1b2eca9361a44ac9f24b078de3481003159929a0573f21a6bf \ + --hash=sha256:bda117c4148e81f746655d5a3239ae9bd00cb7bc3ca178b5fc5a5997e9744183 \ + --hash=sha256:bf742e149121dc5648ba0a08ea0871e87b660467ef168a3a5e53bc1fbd64bb74 \ + --hash=sha256:c2428d358d85e8da9d37cba18b8c4047c55222007a84f97156a5b22028dfbfc0 \ + --hash=sha256:c2e85fe4698b6a56d5e2ebf7ae87544d668eb6bde1ad1226c13f44663f20ec9e \ + --hash=sha256:c43b8b5bab288b6b90dac410cca7e986a4fa747a2e8f94615aea407da706980d \ + --hash=sha256:cededd6738e1c153530793998e31c05086582b08315db48ab66649768f326baa \ + --hash=sha256:d06e5c5fed5caedd2e540d62e5b1c25e8c82431b9e577c33537e5fa4aa909539 \ + --hash=sha256:d772afdb22555f0c58cfc741bdae44180122b3616faa1ecadb595cd526e4c993 \ + --hash=sha256:d897e81f8d0cbd2abb82226d1860ad2e1ab3ff16d7b08c96ca00df9d45409ef4 \ + --hash=sha256:de0a37f21d0d364954ad5de1970491d7fbd0fb1ef7417d4d56a36dc01ba0c0a0 \ + --hash=sha256:e7745312efa9e11c17fbd3cb3097262d079da26930ae9ae7ba28fb738367cbad \ + --hash=sha256:ed46f17096e28fb28d2975834836a639af7278aa87c84f68ab08fbe5b8bd75fa \ + --hash=sha256:f4f7c956b5215d949a1f65334cf9d7612dde38f20a95f2315deef167def91a6f \ + --hash=sha256:f50979824bde13d32b4320eedd513431c921102796d86be3eee0b58e58a3ecd1 \ + --hash=sha256:f904c24bdeabd4298f7a977ef14ca2a022ca921ed670b92ecd16ab6f3d01f867 + # via feast (pyproject.toml) packaging==26.0 \ --hash=sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4 \ --hash=sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529 diff --git a/sdk/python/requirements/py3.11-ci-requirements.txt b/sdk/python/requirements/py3.11-ci-requirements.txt index 0cb441f9906..537b73876a4 100644 --- a/sdk/python/requirements/py3.11-ci-requirements.txt +++ b/sdk/python/requirements/py3.11-ci-requirements.txt @@ -3147,7 +3147,9 @@ orjson==3.11.7 \ --hash=sha256:f4f7c956b5215d949a1f65334cf9d7612dde38f20a95f2315deef167def91a6f \ --hash=sha256:f50979824bde13d32b4320eedd513431c921102796d86be3eee0b58e58a3ecd1 \ --hash=sha256:f904c24bdeabd4298f7a977ef14ca2a022ca921ed670b92ecd16ab6f3d01f867 - # via trino + # via + # feast (pyproject.toml) + # trino overrides==7.7.0 \ --hash=sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a \ --hash=sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49 @@ -5719,9 +5721,9 @@ thriftpy2==0.6.0 \ --hash=sha256:f6b86112cca7bd04151ce248d781763ea5f74cc18d148476c6d16cee32db81ac \ --hash=sha256:f837ab85ae93b118766b8b28a1cec47a1daddee303e1f986a595c56379062a5c # via happybase -tifffile==2026.2.15 \ - --hash=sha256:28fe145c615fe3d33d40c2d4c9cc848f7631fd30af852583c4186069458895b2 \ - --hash=sha256:d9b427d269a708c58400e8ce5a702b26b2502087537beb88b8e29ba7ba825a90 +tifffile==2026.2.16 \ + --hash=sha256:9d509a9121431c7228c1f6f71736a73af155bdeb60c324ab09c9eb2e83cfc4b6 \ + --hash=sha256:ea76cb4d8aa290f7f164840dfe4e244d104bd90c84d5ee1e6de6d84fd4745a48 # via scikit-image timm==1.0.24 \ --hash=sha256:8301ac783410c6ad72c73c49326af6d71a9e4d1558238552796e825c2464913f \ diff --git a/sdk/python/requirements/py3.11-minimal-requirements.txt b/sdk/python/requirements/py3.11-minimal-requirements.txt index e6ae289dd43..7a2b49432ec 100644 --- a/sdk/python/requirements/py3.11-minimal-requirements.txt +++ b/sdk/python/requirements/py3.11-minimal-requirements.txt @@ -1709,6 +1709,82 @@ oauthlib==3.3.1 \ --hash=sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9 \ --hash=sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1 # via requests-oauthlib +orjson==3.11.7 \ + --hash=sha256:043d3006b7d32c7e233b8cfb1f01c651013ea079e08dcef7189a29abd8befe11 \ + --hash=sha256:0527a4510c300e3b406591b0ba69b5dc50031895b0a93743526a3fc45f59d26e \ + --hash=sha256:0724e265bc548af1dedebd9cb3d24b4e1c1e685a343be43e87ba922a5c5fff2f \ + --hash=sha256:136dcd6a2e796dfd9ffca9fc027d778567b0b7c9968d092842d3c323cef88aa8 \ + --hash=sha256:14f440c7268c8f8633d1b3d443a434bd70cb15686117ea6beff8fdc8f5917a1e \ + --hash=sha256:1d98b30cc1313d52d4af17d9c3d307b08389752ec5f2e5febdfada70b0f8c733 \ + --hash=sha256:1eb80451a9c351a71dfaf5b7ccc13ad065405217726b59fdbeadbcc544f9d223 \ + --hash=sha256:1ee5cc7160a821dfe14f130bc8e63e7611051f964b463d9e2a3a573204446a4d \ + --hash=sha256:23d6c20517a97a9daf1d48b580fcdc6f0516c6f4b5038823426033690b4d2650 \ + --hash=sha256:26c3b9132f783b7d7903bf1efb095fed8d4a3a85ec0d334ee8beff3d7a4749d5 \ + --hash=sha256:31c80ce534ac4ea3739c5ee751270646cbc46e45aea7576a38ffec040b4029a1 \ + --hash=sha256:3726be79e36e526e3d9c1aceaadbfb4a04ee80a72ab47b3f3c17fefb9812e7b8 \ + --hash=sha256:390a1dce0c055ddf8adb6aa94a73b45a4a7d7177b5c584b8d1c1947f2ba60fb3 \ + --hash=sha256:3a2479753bbb95b0ebcf7969f562cdb9668e6d12416a35b0dda79febf89cdea2 \ + --hash=sha256:3c4bc6c6ac52cdaa267552544c73e486fecbd710b7ac09bc024d5a78555a22f6 \ + --hash=sha256:411ebaf34d735e25e358a6d9e7978954a9c9d58cfb47bc6683cdc3964cd2f910 \ + --hash=sha256:4682d1db3bcebd2b64757e0ddf9e87ae5f00d29d16c5cdf3a62f561d08cc3dd2 \ + --hash=sha256:4a2e9c5be347b937a2e0203866f12bba36082e89b402ddb9e927d5822e43088d \ + --hash=sha256:57036b27ac8a25d81112eb0cc9835cd4833c5b16e1467816adc0015f59e870dc \ + --hash=sha256:5ede977b5fe5ac91b1dffc0a517ca4542d2ec8a6a4ff7b2652d94f640796342a \ + --hash=sha256:5fdfad2093bdd08245f2e204d977facd5f871c88c4a71230d5bcbd0e43bf6222 \ + --hash=sha256:623ad1b9548ef63886319c16fa317848e465a21513b31a6ad7b57443c3e0dcf5 \ + --hash=sha256:652c6c3af76716f4a9c290371ba2e390ede06f6603edb277b481daf37f6f464e \ + --hash=sha256:6543001328aa857187f905308a028935864aefe9968af3848401b6fe80dbb471 \ + --hash=sha256:6e776b998ac37c0396093d10290e60283f59cfe0fc3fccbd0ccc4bd04dd19892 \ + --hash=sha256:71924496986275a737f38e3f22b4e0878882b3f7a310d2ff4dc96e812789120c \ + --hash=sha256:733ae23ada68b804b222c44affed76b39e30806d38660bf1eb200520d259cc16 \ + --hash=sha256:7477aa6a6ec6139c5cb1cc7b214643592169a5494d200397c7fc95d740d5fcf3 \ + --hash=sha256:79cacb0b52f6004caf92405a7e1f11e6e2de8bdf9019e4f76b44ba045125cd6b \ + --hash=sha256:7ba61079379b0ae29e117db13bda5f28d939766e410d321ec1624afc6a0b0504 \ + --hash=sha256:800988273a014a0541483dc81021247d7eacb0c845a9d1a34a422bc718f41539 \ + --hash=sha256:814be4b49b228cfc0b3c565acf642dd7d13538f966e3ccde61f4f55be3e20785 \ + --hash=sha256:845c3e0d8ded9c9271cd79596b9b552448b885b97110f628fb687aee2eed11c1 \ + --hash=sha256:849e38203e5be40b776ed2718e587faf204d184fc9a008ae441f9442320c0cab \ + --hash=sha256:89e13dd3f89f1c38a9c9eba5fbf7cdc2d1feca82f5f290864b4b7a6aac704576 \ + --hash=sha256:89e440ebc74ce8ab5c7bc4ce6757b4a6b1041becb127df818f6997b5c71aa60b \ + --hash=sha256:8ff206156006da5b847c9304b6308a01e8cdbc8cce824e2779a5ba71c3def141 \ + --hash=sha256:91c81ef070c8f3220054115e1ef468b1c9ce8497b4e526cb9f68ab4dc0a7ac62 \ + --hash=sha256:9487abc2c2086e7c8eb9a211d2ce8855bae0e92586279d0d27b341d5ad76c85c \ + --hash=sha256:962d046ee1765f74a1da723f4b33e3b228fe3a48bd307acce5021dfefe0e29b2 \ + --hash=sha256:996b65230271f1a97026fd0e6a753f51fbc0c335d2ad0c6201f711b0da32693b \ + --hash=sha256:9b1a67243945819ce55d24a30b59d6a168e86220452d2c96f4d1f093e71c0c49 \ + --hash=sha256:9c0b51672e466fd7e56230ffbae7f1639e18d0ce023351fb75da21b71bc2c960 \ + --hash=sha256:9e54f3808e2b6b945078c41aa8d9b5834b28c50843846e97807e5adb75fa9705 \ + --hash=sha256:a02c833f38f36546ba65a452127633afce4cf0dd7296b753d3bb54e55e5c0174 \ + --hash=sha256:a12b80df61aab7b98b490fe9e4879925ba666fccdfcd175252ce4d9035865ace \ + --hash=sha256:a16bcd08ab0bcdfc7e8801d9c4a9cc17e58418e4d48ddc6ded4e9e4b1a94062b \ + --hash=sha256:a56df3239294ea5964adf074c54bcc4f0ccd21636049a2cf3ca9cf03b5d03cf1 \ + --hash=sha256:a709e881723c9b18acddcfb8ba357322491ad553e277cf467e1e7e20e2d90561 \ + --hash=sha256:ab49d4b2a6a1d415ddb9f37a21e02e0d5dbfe10b7870b21bf779fc21e9156157 \ + --hash=sha256:ae9e0b37a834cef7ce8f99de6498f8fad4a2c0bf6bfc3d02abd8ed56aa15b2de \ + --hash=sha256:b4a9eefdc70bf8bf9857f0290f973dec534ac84c35cd6a7f4083be43e7170a8f \ + --hash=sha256:b63c6e6738d7c3470ad01601e23376aa511e50e1f3931395b9f9c722406d1a67 \ + --hash=sha256:b7b1dae39230a393df353827c855a5f176271c23434cfd2db74e0e424e693e10 \ + --hash=sha256:b8d14b71c0b12963fe8a62aac87119f1afdf4cb88a400f61ca5ae581449efcb5 \ + --hash=sha256:b9f95dcdea9d4f805daa9ddf02617a89e484c6985fa03055459f90e87d7a0757 \ + --hash=sha256:b9fc4d0f81f394689e0814617aadc4f2ea0e8025f38c226cbf22d3b5ddbf025d \ + --hash=sha256:bd03ea7606833655048dab1a00734a2875e3e86c276e1d772b2a02556f0d895f \ + --hash=sha256:bd0d68edd7dfca1b2eca9361a44ac9f24b078de3481003159929a0573f21a6bf \ + --hash=sha256:bda117c4148e81f746655d5a3239ae9bd00cb7bc3ca178b5fc5a5997e9744183 \ + --hash=sha256:bf742e149121dc5648ba0a08ea0871e87b660467ef168a3a5e53bc1fbd64bb74 \ + --hash=sha256:c2428d358d85e8da9d37cba18b8c4047c55222007a84f97156a5b22028dfbfc0 \ + --hash=sha256:c2e85fe4698b6a56d5e2ebf7ae87544d668eb6bde1ad1226c13f44663f20ec9e \ + --hash=sha256:c43b8b5bab288b6b90dac410cca7e986a4fa747a2e8f94615aea407da706980d \ + --hash=sha256:cededd6738e1c153530793998e31c05086582b08315db48ab66649768f326baa \ + --hash=sha256:d06e5c5fed5caedd2e540d62e5b1c25e8c82431b9e577c33537e5fa4aa909539 \ + --hash=sha256:d772afdb22555f0c58cfc741bdae44180122b3616faa1ecadb595cd526e4c993 \ + --hash=sha256:d897e81f8d0cbd2abb82226d1860ad2e1ab3ff16d7b08c96ca00df9d45409ef4 \ + --hash=sha256:de0a37f21d0d364954ad5de1970491d7fbd0fb1ef7417d4d56a36dc01ba0c0a0 \ + --hash=sha256:e7745312efa9e11c17fbd3cb3097262d079da26930ae9ae7ba28fb738367cbad \ + --hash=sha256:ed46f17096e28fb28d2975834836a639af7278aa87c84f68ab08fbe5b8bd75fa \ + --hash=sha256:f4f7c956b5215d949a1f65334cf9d7612dde38f20a95f2315deef167def91a6f \ + --hash=sha256:f50979824bde13d32b4320eedd513431c921102796d86be3eee0b58e58a3ecd1 \ + --hash=sha256:f904c24bdeabd4298f7a977ef14ca2a022ca921ed670b92ecd16ab6f3d01f867 + # via feast (pyproject.toml) packaging==26.0 \ --hash=sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4 \ --hash=sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529 @@ -2723,9 +2799,9 @@ typeguard==4.5.0 \ --hash=sha256:749bea21cdb2553e12831bc29f1eae980b22c7de8331ab67ae7db9e85470b5a7 \ --hash=sha256:cfda388fc88a9ce42a41890900d6f31ee124bea9b73bb84701a32438e92165c3 # via feast (pyproject.toml) -typer==0.23.1 \ - --hash=sha256:2070374e4d31c83e7b61362fd859aa683576432fd5b026b060ad6b4cd3b86134 \ - --hash=sha256:3291ad0d3c701cbf522012faccfbb29352ff16ad262db2139e6b01f15781f14e +typer==0.24.0 \ + --hash=sha256:5fc435a9c8356f6160ed6e85a6301fdd6e3d8b2851da502050d1f92c5e9eddc8 \ + --hash=sha256:f9373dc4eff901350694f519f783c29b6d7a110fc0dcc11b1d7e353b85ca6504 # via fastapi-mcp types-pymysql==1.1.0.20251220 \ --hash=sha256:ae1c3df32a777489431e2e9963880a0df48f6591e0aa2fd3a6fabd9dee6eca54 \ diff --git a/sdk/python/requirements/py3.11-minimal-sdist-requirements-build.txt b/sdk/python/requirements/py3.11-minimal-sdist-requirements-build.txt index 26a3f9f0b15..d7307a44af0 100644 --- a/sdk/python/requirements/py3.11-minimal-sdist-requirements-build.txt +++ b/sdk/python/requirements/py3.11-minimal-sdist-requirements-build.txt @@ -526,6 +526,7 @@ maturin==1.12.2 \ --hash=sha256:f1c2e4ee43bf286b052091a3b2356a157978985837c7aed42354deb2947a4006 # via # cryptography + # orjson # pydantic-core # rpds-py # watchfiles @@ -666,13 +667,13 @@ poetry-core==2.3.1 \ # rich # rsa # tomlkit -pybind11-global==3.0.1 \ - --hash=sha256:0e8d5a68d084c50bf145ce5efdbdd00704dbe6315035d0b7a255708ddeb9faca \ - --hash=sha256:eb6ef4e8f5a60f4c0b6a9396cb7806f78d42f8d081e42a93c0bb62288f6cf403 +pybind11-global==3.0.2 \ + --hash=sha256:00a26be4cd65974133eaae7e7532e7141ccb7a88cd131995bc8d1f652852aaf9 \ + --hash=sha256:e183b4456459c35fbbbc8296eb29e241f6cf0774c0bbc3fc8349789611c6df4b # via pybind11 -pybind11==3.0.1 \ - --hash=sha256:9c0f40056a016da59bab516efb523089139fcc6f2ba7e4930854c61efb932051 \ - --hash=sha256:aa8f0aa6e0a94d3b64adfc38f560f33f15e589be2175e103c0a33c6bce55ee89 +pybind11==3.0.2 \ + --hash=sha256:432f01aeb68e361a3a7fc7575c2c7f497595bf640f747acd909ff238dd766e06 \ + --hash=sha256:f8a6500548919cc33bcd220d5f984688326f574fa97f1107f2f4fdb4c6fb019f # via duckdb pycparser==3.0 \ --hash=sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29 \ diff --git a/sdk/python/requirements/py3.11-minimal-sdist-requirements.txt b/sdk/python/requirements/py3.11-minimal-sdist-requirements.txt index c77e5c1b558..63b875a0c94 100644 --- a/sdk/python/requirements/py3.11-minimal-sdist-requirements.txt +++ b/sdk/python/requirements/py3.11-minimal-sdist-requirements.txt @@ -1873,6 +1873,82 @@ oauthlib==3.3.1 \ --hash=sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9 \ --hash=sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1 # via requests-oauthlib +orjson==3.11.7 \ + --hash=sha256:043d3006b7d32c7e233b8cfb1f01c651013ea079e08dcef7189a29abd8befe11 \ + --hash=sha256:0527a4510c300e3b406591b0ba69b5dc50031895b0a93743526a3fc45f59d26e \ + --hash=sha256:0724e265bc548af1dedebd9cb3d24b4e1c1e685a343be43e87ba922a5c5fff2f \ + --hash=sha256:136dcd6a2e796dfd9ffca9fc027d778567b0b7c9968d092842d3c323cef88aa8 \ + --hash=sha256:14f440c7268c8f8633d1b3d443a434bd70cb15686117ea6beff8fdc8f5917a1e \ + --hash=sha256:1d98b30cc1313d52d4af17d9c3d307b08389752ec5f2e5febdfada70b0f8c733 \ + --hash=sha256:1eb80451a9c351a71dfaf5b7ccc13ad065405217726b59fdbeadbcc544f9d223 \ + --hash=sha256:1ee5cc7160a821dfe14f130bc8e63e7611051f964b463d9e2a3a573204446a4d \ + --hash=sha256:23d6c20517a97a9daf1d48b580fcdc6f0516c6f4b5038823426033690b4d2650 \ + --hash=sha256:26c3b9132f783b7d7903bf1efb095fed8d4a3a85ec0d334ee8beff3d7a4749d5 \ + --hash=sha256:31c80ce534ac4ea3739c5ee751270646cbc46e45aea7576a38ffec040b4029a1 \ + --hash=sha256:3726be79e36e526e3d9c1aceaadbfb4a04ee80a72ab47b3f3c17fefb9812e7b8 \ + --hash=sha256:390a1dce0c055ddf8adb6aa94a73b45a4a7d7177b5c584b8d1c1947f2ba60fb3 \ + --hash=sha256:3a2479753bbb95b0ebcf7969f562cdb9668e6d12416a35b0dda79febf89cdea2 \ + --hash=sha256:3c4bc6c6ac52cdaa267552544c73e486fecbd710b7ac09bc024d5a78555a22f6 \ + --hash=sha256:411ebaf34d735e25e358a6d9e7978954a9c9d58cfb47bc6683cdc3964cd2f910 \ + --hash=sha256:4682d1db3bcebd2b64757e0ddf9e87ae5f00d29d16c5cdf3a62f561d08cc3dd2 \ + --hash=sha256:4a2e9c5be347b937a2e0203866f12bba36082e89b402ddb9e927d5822e43088d \ + --hash=sha256:57036b27ac8a25d81112eb0cc9835cd4833c5b16e1467816adc0015f59e870dc \ + --hash=sha256:5ede977b5fe5ac91b1dffc0a517ca4542d2ec8a6a4ff7b2652d94f640796342a \ + --hash=sha256:5fdfad2093bdd08245f2e204d977facd5f871c88c4a71230d5bcbd0e43bf6222 \ + --hash=sha256:623ad1b9548ef63886319c16fa317848e465a21513b31a6ad7b57443c3e0dcf5 \ + --hash=sha256:652c6c3af76716f4a9c290371ba2e390ede06f6603edb277b481daf37f6f464e \ + --hash=sha256:6543001328aa857187f905308a028935864aefe9968af3848401b6fe80dbb471 \ + --hash=sha256:6e776b998ac37c0396093d10290e60283f59cfe0fc3fccbd0ccc4bd04dd19892 \ + --hash=sha256:71924496986275a737f38e3f22b4e0878882b3f7a310d2ff4dc96e812789120c \ + --hash=sha256:733ae23ada68b804b222c44affed76b39e30806d38660bf1eb200520d259cc16 \ + --hash=sha256:7477aa6a6ec6139c5cb1cc7b214643592169a5494d200397c7fc95d740d5fcf3 \ + --hash=sha256:79cacb0b52f6004caf92405a7e1f11e6e2de8bdf9019e4f76b44ba045125cd6b \ + --hash=sha256:7ba61079379b0ae29e117db13bda5f28d939766e410d321ec1624afc6a0b0504 \ + --hash=sha256:800988273a014a0541483dc81021247d7eacb0c845a9d1a34a422bc718f41539 \ + --hash=sha256:814be4b49b228cfc0b3c565acf642dd7d13538f966e3ccde61f4f55be3e20785 \ + --hash=sha256:845c3e0d8ded9c9271cd79596b9b552448b885b97110f628fb687aee2eed11c1 \ + --hash=sha256:849e38203e5be40b776ed2718e587faf204d184fc9a008ae441f9442320c0cab \ + --hash=sha256:89e13dd3f89f1c38a9c9eba5fbf7cdc2d1feca82f5f290864b4b7a6aac704576 \ + --hash=sha256:89e440ebc74ce8ab5c7bc4ce6757b4a6b1041becb127df818f6997b5c71aa60b \ + --hash=sha256:8ff206156006da5b847c9304b6308a01e8cdbc8cce824e2779a5ba71c3def141 \ + --hash=sha256:91c81ef070c8f3220054115e1ef468b1c9ce8497b4e526cb9f68ab4dc0a7ac62 \ + --hash=sha256:9487abc2c2086e7c8eb9a211d2ce8855bae0e92586279d0d27b341d5ad76c85c \ + --hash=sha256:962d046ee1765f74a1da723f4b33e3b228fe3a48bd307acce5021dfefe0e29b2 \ + --hash=sha256:996b65230271f1a97026fd0e6a753f51fbc0c335d2ad0c6201f711b0da32693b \ + --hash=sha256:9b1a67243945819ce55d24a30b59d6a168e86220452d2c96f4d1f093e71c0c49 \ + --hash=sha256:9c0b51672e466fd7e56230ffbae7f1639e18d0ce023351fb75da21b71bc2c960 \ + --hash=sha256:9e54f3808e2b6b945078c41aa8d9b5834b28c50843846e97807e5adb75fa9705 \ + --hash=sha256:a02c833f38f36546ba65a452127633afce4cf0dd7296b753d3bb54e55e5c0174 \ + --hash=sha256:a12b80df61aab7b98b490fe9e4879925ba666fccdfcd175252ce4d9035865ace \ + --hash=sha256:a16bcd08ab0bcdfc7e8801d9c4a9cc17e58418e4d48ddc6ded4e9e4b1a94062b \ + --hash=sha256:a56df3239294ea5964adf074c54bcc4f0ccd21636049a2cf3ca9cf03b5d03cf1 \ + --hash=sha256:a709e881723c9b18acddcfb8ba357322491ad553e277cf467e1e7e20e2d90561 \ + --hash=sha256:ab49d4b2a6a1d415ddb9f37a21e02e0d5dbfe10b7870b21bf779fc21e9156157 \ + --hash=sha256:ae9e0b37a834cef7ce8f99de6498f8fad4a2c0bf6bfc3d02abd8ed56aa15b2de \ + --hash=sha256:b4a9eefdc70bf8bf9857f0290f973dec534ac84c35cd6a7f4083be43e7170a8f \ + --hash=sha256:b63c6e6738d7c3470ad01601e23376aa511e50e1f3931395b9f9c722406d1a67 \ + --hash=sha256:b7b1dae39230a393df353827c855a5f176271c23434cfd2db74e0e424e693e10 \ + --hash=sha256:b8d14b71c0b12963fe8a62aac87119f1afdf4cb88a400f61ca5ae581449efcb5 \ + --hash=sha256:b9f95dcdea9d4f805daa9ddf02617a89e484c6985fa03055459f90e87d7a0757 \ + --hash=sha256:b9fc4d0f81f394689e0814617aadc4f2ea0e8025f38c226cbf22d3b5ddbf025d \ + --hash=sha256:bd03ea7606833655048dab1a00734a2875e3e86c276e1d772b2a02556f0d895f \ + --hash=sha256:bd0d68edd7dfca1b2eca9361a44ac9f24b078de3481003159929a0573f21a6bf \ + --hash=sha256:bda117c4148e81f746655d5a3239ae9bd00cb7bc3ca178b5fc5a5997e9744183 \ + --hash=sha256:bf742e149121dc5648ba0a08ea0871e87b660467ef168a3a5e53bc1fbd64bb74 \ + --hash=sha256:c2428d358d85e8da9d37cba18b8c4047c55222007a84f97156a5b22028dfbfc0 \ + --hash=sha256:c2e85fe4698b6a56d5e2ebf7ae87544d668eb6bde1ad1226c13f44663f20ec9e \ + --hash=sha256:c43b8b5bab288b6b90dac410cca7e986a4fa747a2e8f94615aea407da706980d \ + --hash=sha256:cededd6738e1c153530793998e31c05086582b08315db48ab66649768f326baa \ + --hash=sha256:d06e5c5fed5caedd2e540d62e5b1c25e8c82431b9e577c33537e5fa4aa909539 \ + --hash=sha256:d772afdb22555f0c58cfc741bdae44180122b3616faa1ecadb595cd526e4c993 \ + --hash=sha256:d897e81f8d0cbd2abb82226d1860ad2e1ab3ff16d7b08c96ca00df9d45409ef4 \ + --hash=sha256:de0a37f21d0d364954ad5de1970491d7fbd0fb1ef7417d4d56a36dc01ba0c0a0 \ + --hash=sha256:e7745312efa9e11c17fbd3cb3097262d079da26930ae9ae7ba28fb738367cbad \ + --hash=sha256:ed46f17096e28fb28d2975834836a639af7278aa87c84f68ab08fbe5b8bd75fa \ + --hash=sha256:f4f7c956b5215d949a1f65334cf9d7612dde38f20a95f2315deef167def91a6f \ + --hash=sha256:f50979824bde13d32b4320eedd513431c921102796d86be3eee0b58e58a3ecd1 \ + --hash=sha256:f904c24bdeabd4298f7a977ef14ca2a022ca921ed670b92ecd16ab6f3d01f867 + # via feast (pyproject.toml) packaging==26.0 \ --hash=sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4 \ --hash=sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529 @@ -2969,9 +3045,9 @@ typeguard==4.5.0 \ --hash=sha256:749bea21cdb2553e12831bc29f1eae980b22c7de8331ab67ae7db9e85470b5a7 \ --hash=sha256:cfda388fc88a9ce42a41890900d6f31ee124bea9b73bb84701a32438e92165c3 # via feast (pyproject.toml) -typer==0.23.1 \ - --hash=sha256:2070374e4d31c83e7b61362fd859aa683576432fd5b026b060ad6b4cd3b86134 \ - --hash=sha256:3291ad0d3c701cbf522012faccfbb29352ff16ad262db2139e6b01f15781f14e +typer==0.24.0 \ + --hash=sha256:5fc435a9c8356f6160ed6e85a6301fdd6e3d8b2851da502050d1f92c5e9eddc8 \ + --hash=sha256:f9373dc4eff901350694f519f783c29b6d7a110fc0dcc11b1d7e353b85ca6504 # via fastapi-mcp types-psutil==7.0.0.20250218 \ --hash=sha256:1447a30c282aafefcf8941ece854e1100eee7b0296a9d9be9977292f0269b121 \ diff --git a/sdk/python/requirements/py3.11-mongodb-dev-requirements.txt b/sdk/python/requirements/py3.11-mongodb-dev-requirements.txt new file mode 100644 index 00000000000..cc38cfb316c --- /dev/null +++ b/sdk/python/requirements/py3.11-mongodb-dev-requirements.txt @@ -0,0 +1,7078 @@ +# This file was autogenerated by uv via the following command: +# uv pip compile -p 3.11 --no-strip-extras setup.py --extra mongodb --extra ci --generate-hashes --output-file sdk/python/requirements/py3.11-mongodb-dev-requirements.txt +accelerate==1.12.0 \ + --hash=sha256:3e2091cd341423207e2f084a6654b1efcd250dc326f2a37d6dde446e07cabb11 \ + --hash=sha256:70988c352feb481887077d2ab845125024b2a137a5090d6d7a32b57d03a45df6 + # via docling-ibm-models +aiobotocore==2.23.1 \ + --hash=sha256:a59f2a78629b97d52f10936b79c73de64e481a8c44a62c1871f088df6c1afc4f \ + --hash=sha256:d81c54d2eae2406ea9a473fea518fed580cf37bc4fc51ce43ba81546e5305114 + # via feast (setup.py) +aiohappyeyeballs==2.6.1 \ + --hash=sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558 \ + --hash=sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8 + # via aiohttp +aiohttp==3.13.3 \ + --hash=sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf \ + --hash=sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c \ + --hash=sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c \ + --hash=sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423 \ + --hash=sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f \ + --hash=sha256:0db318f7a6f065d84cb1e02662c526294450b314a02bd9e2a8e67f0d8564ce40 \ + --hash=sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2 \ + --hash=sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf \ + --hash=sha256:147e422fd1223005c22b4fe080f5d93ced44460f5f9c105406b753612b587821 \ + --hash=sha256:1cb93e166e6c28716c8c6aeb5f99dfb6d5ccf482d29fe9bf9a794110e6d0ab64 \ + --hash=sha256:215a685b6fbbfcf71dfe96e3eba7a6f58f10da1dfdf4889c7dd856abe430dca7 \ + --hash=sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998 \ + --hash=sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d \ + --hash=sha256:28e027cf2f6b641693a09f631759b4d9ce9165099d2b5d92af9bd4e197690eea \ + --hash=sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463 \ + --hash=sha256:2ba0eea45eb5cc3172dbfc497c066f19c41bac70963ea1a67d51fc92e4cf9a80 \ + --hash=sha256:2be0e9ccf23e8a94f6f0650ce06042cefc6ac703d0d7ab6c7a917289f2539ad4 \ + --hash=sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767 \ + --hash=sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43 \ + --hash=sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592 \ + --hash=sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a \ + --hash=sha256:31a83ea4aead760dfcb6962efb1d861db48c34379f2ff72db9ddddd4cda9ea2e \ + --hash=sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687 \ + --hash=sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8 \ + --hash=sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261 \ + --hash=sha256:37da61e244d1749798c151421602884db5270faf479cf0ef03af0ff68954c9dd \ + --hash=sha256:3b61b7169ababd7802f9568ed96142616a9118dd2be0d1866e920e77ec8fa92a \ + --hash=sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4 \ + --hash=sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587 \ + --hash=sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91 \ + --hash=sha256:40c5e40ecc29ba010656c18052b877a1c28f84344825efa106705e835c28530f \ + --hash=sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3 \ + --hash=sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344 \ + --hash=sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6 \ + --hash=sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3 \ + --hash=sha256:4ae5b5a0e1926e504c81c5b84353e7a5516d8778fbbff00429fe7b05bb25cbce \ + --hash=sha256:4e239d501f73d6db1522599e14b9b321a7e3b1de66ce33d53a765d975e9f4808 \ + --hash=sha256:56339a36b9f1fc708260c76c87e593e2afb30d26de9ae1eb445b5e051b98a7a1 \ + --hash=sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29 \ + --hash=sha256:5b179331a481cb5529fca8b432d8d3c7001cb217513c94cd72d668d1248688a3 \ + --hash=sha256:5b6073099fb654e0a068ae678b10feff95c5cae95bbfcbfa7af669d361a8aa6b \ + --hash=sha256:5d2d94f1f5fcbe40838ac51a6ab5704a6f9ea42e72ceda48de5e6b898521da51 \ + --hash=sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c \ + --hash=sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926 \ + --hash=sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64 \ + --hash=sha256:642f752c3eb117b105acbd87e2c143de710987e09860d674e068c4c2c441034f \ + --hash=sha256:65d2ccb7eabee90ce0503c17716fc77226be026dcc3e65cce859a30db715025b \ + --hash=sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e \ + --hash=sha256:694976222c711d1d00ba131904beb60534f93966562f64440d0c9d41b8cdb440 \ + --hash=sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6 \ + --hash=sha256:69c56fbc1993fa17043e24a546959c0178fe2b5782405ad4559e6c13975c15e3 \ + --hash=sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d \ + --hash=sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415 \ + --hash=sha256:75ca857eba4e20ce9f546cd59c7007b33906a4cd48f2ff6ccf1ccfc3b646f279 \ + --hash=sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce \ + --hash=sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603 \ + --hash=sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0 \ + --hash=sha256:7e63f210bc1b57ef699035f2b4b6d9ce096b5914414a49b0997c839b2bd2223c \ + --hash=sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf \ + --hash=sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591 \ + --hash=sha256:80dd4c21b0f6237676449c6baaa1039abae86b91636b6c91a7f8e61c87f89540 \ + --hash=sha256:81e97251d9298386c2b7dbeb490d3d1badbdc69107fb8c9299dd04eb39bddc0e \ + --hash=sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26 \ + --hash=sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a \ + --hash=sha256:859bd3f2156e81dd01432f5849fc73e2243d4a487c4fd26609b1299534ee1845 \ + --hash=sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a \ + --hash=sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9 \ + --hash=sha256:8a60e60746623925eab7d25823329941aee7242d559baa119ca2b253c88a7bd6 \ + --hash=sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba \ + --hash=sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df \ + --hash=sha256:947c26539750deeaee933b000fb6517cc770bbd064bad6033f1cff4803881e43 \ + --hash=sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679 \ + --hash=sha256:988a8c5e317544fdf0d39871559e67b6341065b87fceac641108c2096d5506b7 \ + --hash=sha256:9a9dc347e5a3dc7dfdbc1f82da0ef29e388ddb2ed281bfce9dd8248a313e62b7 \ + --hash=sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc \ + --hash=sha256:9af5e68ee47d6534d36791bbe9b646d2a7c7deb6fc24d7943628edfbb3581f29 \ + --hash=sha256:9b174f267b5cfb9a7dba9ee6859cecd234e9a681841eb85068059bc867fb8f02 \ + --hash=sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984 \ + --hash=sha256:9d4c940f02f49483b18b079d1c27ab948721852b281f8b015c058100e9421dd1 \ + --hash=sha256:9ebf57d09e131f5323464bd347135a88622d1c0976e88ce15b670e7ad57e4bd6 \ + --hash=sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632 \ + --hash=sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56 \ + --hash=sha256:a2212ad43c0833a873d0fb3c63fa1bacedd4cf6af2fee62bf4b739ceec3ab239 \ + --hash=sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168 \ + --hash=sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88 \ + --hash=sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc \ + --hash=sha256:af71fff7bac6bb7508956696dce8f6eec2bbb045eceb40343944b1ae62b5ef11 \ + --hash=sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046 \ + --hash=sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0 \ + --hash=sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3 \ + --hash=sha256:b46020d11d23fe16551466c77823df9cc2f2c1e63cc965daf67fa5eec6ca1877 \ + --hash=sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1 \ + --hash=sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c \ + --hash=sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25 \ + --hash=sha256:b99281b0704c103d4e11e72a76f1b543d4946fea7dd10767e7e1b5f00d4e5704 \ + --hash=sha256:bae5c2ed2eae26cc382020edad80d01f36cb8e746da40b292e68fec40421dc6a \ + --hash=sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033 \ + --hash=sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1 \ + --hash=sha256:bfc1cc2fe31a6026a8a88e4ecfb98d7f6b1fec150cfd708adbfd1d2f42257c29 \ + --hash=sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d \ + --hash=sha256:c048058117fd649334d81b4b526e94bde3ccaddb20463a815ced6ecbb7d11160 \ + --hash=sha256:c0e2d366af265797506f0283487223146af57815b388623f0357ef7eac9b209d \ + --hash=sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f \ + --hash=sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f \ + --hash=sha256:c6b8568a3bb5819a0ad087f16d40e5a3fb6099f39ea1d5625a3edc1e923fc538 \ + --hash=sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29 \ + --hash=sha256:d5a372fd5afd301b3a89582817fdcdb6c34124787c70dbcc616f259013e7eef7 \ + --hash=sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72 \ + --hash=sha256:dca68018bf48c251ba17c72ed479f4dafe9dbd5a73707ad8d28a38d11f3d42af \ + --hash=sha256:de2c184bb1fe2cbd2cefba613e9db29a5ab559323f994b6737e370d3da0ac455 \ + --hash=sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57 \ + --hash=sha256:e50a2e1404f063427c9d027378472316201a2290959a295169bcf25992d04558 \ + --hash=sha256:e636b3c5f61da31a92bf0d91da83e58fdfa96f178ba682f11d24f31944cdd28c \ + --hash=sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808 \ + --hash=sha256:f33ed1a2bf1997a36661874b017f5c4b760f41266341af36febaf271d179f6d7 \ + --hash=sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0 \ + --hash=sha256:f9444f105664c4ce47a2a7171a2418bce5b7bae45fb610f4e2c36045d85911d3 \ + --hash=sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730 \ + --hash=sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa \ + --hash=sha256:fee0c6bc7db1de362252affec009707a17478a00ec69f797d23ca256e36d5940 + # via + # aiobotocore + # aiohttp-cors + # fsspec + # ray +aiohttp-cors==0.8.1 \ + --hash=sha256:3180cf304c5c712d626b9162b195b1db7ddf976a2a25172b35bb2448b890a80d \ + --hash=sha256:ccacf9cb84b64939ea15f859a146af1f662a6b1d68175754a07315e305fb1403 + # via ray +aioitertools==0.13.0 \ + --hash=sha256:0be0292b856f08dfac90e31f4739432f4cb6d7520ab9eb73e143f4f2fa5259be \ + --hash=sha256:620bd241acc0bbb9ec819f1ab215866871b4bbd1f73836a55f799200ee86950c + # via aiobotocore +aiosignal==1.4.0 \ + --hash=sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e \ + --hash=sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7 + # via aiohttp +alabaster==0.7.16 \ + --hash=sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65 \ + --hash=sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92 + # via sphinx +altair==4.2.2 \ + --hash=sha256:39399a267c49b30d102c10411e67ab26374156a84b1aeb9fcd15140429ba49c5 \ + --hash=sha256:8b45ebeaf8557f2d760c5c77b79f02ae12aee7c46c27c06014febab6f849bc87 + # via great-expectations +annotated-doc==0.0.4 \ + --hash=sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320 \ + --hash=sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4 + # via fastapi +annotated-types==0.7.0 \ + --hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \ + --hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89 + # via pydantic +anyio==4.12.1 \ + --hash=sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703 \ + --hash=sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c + # via + # elasticsearch + # httpx + # jupyter-server + # mcp + # sse-starlette + # starlette + # watchfiles +appnope==0.1.4 \ + --hash=sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee \ + --hash=sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c + # via ipykernel +argon2-cffi==25.1.0 \ + --hash=sha256:694ae5cc8a42f4c4e2bf2ca0e64e51e23a040c6a517a85074683d3959e1346c1 \ + --hash=sha256:fdc8b074db390fccb6eb4a3604ae7231f219aa669a2652e0f20e16ba513d5741 + # via + # jupyter-server + # minio +argon2-cffi-bindings==25.1.0 \ + --hash=sha256:1db89609c06afa1a214a69a462ea741cf735b29a57530478c06eb81dd403de99 \ + --hash=sha256:1e021e87faa76ae0d413b619fe2b65ab9a037f24c60a1e6cc43457ae20de6dc6 \ + --hash=sha256:21378b40e1b8d1655dd5310c84a40fc19a9aa5e6366e835ceb8576bf0fea716d \ + --hash=sha256:2630b6240b495dfab90aebe159ff784d08ea999aa4b0d17efa734055a07d2f44 \ + --hash=sha256:3c6702abc36bf3ccba3f802b799505def420a1b7039862014a65db3205967f5a \ + --hash=sha256:3d3f05610594151994ca9ccb3c771115bdb4daef161976a266f0dd8aa9996b8f \ + --hash=sha256:473bcb5f82924b1becbb637b63303ec8d10e84c8d241119419897a26116515d2 \ + --hash=sha256:5acb4e41090d53f17ca1110c3427f0a130f944b896fc8c83973219c97f57b690 \ + --hash=sha256:5d588dec224e2a83edbdc785a5e6f3c6cd736f46bfd4b441bbb5aa1f5085e584 \ + --hash=sha256:6dca33a9859abf613e22733131fc9194091c1fa7cb3e131c143056b4856aa47e \ + --hash=sha256:7aef0c91e2c0fbca6fc68e7555aa60ef7008a739cbe045541e438373bc54d2b0 \ + --hash=sha256:84a461d4d84ae1295871329b346a97f68eade8c53b6ed9a7ca2d7467f3c8ff6f \ + --hash=sha256:87c33a52407e4c41f3b70a9c2d3f6056d88b10dad7695be708c5021673f55623 \ + --hash=sha256:8b8efee945193e667a396cbc7b4fb7d357297d6234d30a489905d96caabde56b \ + --hash=sha256:a1c70058c6ab1e352304ac7e3b52554daadacd8d453c1752e547c76e9c99ac44 \ + --hash=sha256:a98cd7d17e9f7ce244c0803cad3c23a7d379c301ba618a5fa76a67d116618b98 \ + --hash=sha256:aecba1723ae35330a008418a91ea6cfcedf6d31e5fbaa056a166462ff066d500 \ + --hash=sha256:b0fdbcf513833809c882823f98dc2f931cf659d9a1429616ac3adebb49f5db94 \ + --hash=sha256:b55aec3565b65f56455eebc9b9f34130440404f27fe21c3b375bf1ea4d8fbae6 \ + --hash=sha256:b957f3e6ea4d55d820e40ff76f450952807013d361a65d7f28acc0acbf29229d \ + --hash=sha256:ba92837e4a9aa6a508c8d2d7883ed5a8f6c308c89a4790e1e447a220deb79a85 \ + --hash=sha256:c4f9665de60b1b0e99bcd6be4f17d90339698ce954cfd8d9cf4f91c995165a92 \ + --hash=sha256:c87b72589133f0346a1cb8d5ecca4b933e3c9b64656c9d175270a000e73b288d \ + --hash=sha256:d3e924cfc503018a714f94a49a149fdc0b644eaead5d1f089330399134fa028a \ + --hash=sha256:da0c79c23a63723aa5d782250fbf51b768abca630285262fb5144ba5ae01e520 \ + --hash=sha256:e2fd3bfbff3c5d74fef31a722f729bf93500910db650c925c2d6ef879a7e51cb + # via argon2-cffi +arrow==1.4.0 \ + --hash=sha256:749f0769958ebdc79c173ff0b0670d59051a535fa26e8eba02953dc19eb43205 \ + --hash=sha256:ed0cc050e98001b8779e84d461b0098c4ac597e88704a655582b21d116e526d7 + # via isoduration +asn1crypto==1.5.1 \ + --hash=sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c \ + --hash=sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67 + # via snowflake-connector-python +assertpy==1.1 \ + --hash=sha256:acc64329934ad71a3221de185517a43af33e373bb44dc05b5a9b174394ef4833 + # via feast (setup.py) +asttokens==3.0.1 \ + --hash=sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a \ + --hash=sha256:71a4ee5de0bde6a31d64f6b13f2293ac190344478f081c3d1bccfcf5eacb0cb7 + # via stack-data +async-lru==2.1.0 \ + --hash=sha256:9eeb2fecd3fe42cc8a787fc32ead53a3a7158cc43d039c3c55ab3e4e5b2a80ed \ + --hash=sha256:fa12dcf99a42ac1280bc16c634bbaf06883809790f6304d85cdab3f666f33a7e + # via jupyterlab +async-property==0.2.2 \ + --hash=sha256:17d9bd6ca67e27915a75d92549df64b5c7174e9dc806b30a3934dc4ff0506380 \ + --hash=sha256:8924d792b5843994537f8ed411165700b27b2bd966cefc4daeefc1253442a9d7 + # via python-keycloak +async-timeout==5.0.1 \ + --hash=sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c \ + --hash=sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3 + # via redis +atpublic==5.1 \ + --hash=sha256:135783dbd887fbddb6ef032d104da70c124f2b44b9e2d79df07b9da5334825e3 \ + --hash=sha256:abc1f4b3dbdd841cc3539e4b5e4f3ad41d658359de704e30cb36da4d4e9d3022 + # via ibis-framework +attrs==25.4.0 \ + --hash=sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11 \ + --hash=sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373 + # via + # aiohttp + # jsonlines + # jsonschema + # referencing +azure-core==1.38.0 \ + --hash=sha256:8194d2682245a3e4e3151a667c686464c3786fed7918b394d035bdcd61bb5993 \ + --hash=sha256:ab0c9b2cd71fecb1842d52c965c95285d3cfb38902f6766e4a471f1cd8905335 + # via + # azure-identity + # azure-storage-blob +azure-identity==1.25.1 \ + --hash=sha256:87ca8328883de6036443e1c37b40e8dc8fb74898240f61071e09d2e369361456 \ + --hash=sha256:e9edd720af03dff020223cd269fa3a61e8f345ea75443858273bcb44844ab651 + # via feast (setup.py) +azure-storage-blob==12.28.0 \ + --hash=sha256:00fb1db28bf6a7b7ecaa48e3b1d5c83bfadacc5a678b77826081304bd87d6461 \ + --hash=sha256:e7d98ea108258d29aa0efbfd591b2e2075fa1722a2fae8699f0b3c9de11eff41 + # via feast (setup.py) +babel==2.17.0 \ + --hash=sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d \ + --hash=sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2 + # via + # jupyterlab-server + # sphinx +bcrypt==5.0.0 \ + --hash=sha256:046ad6db88edb3c5ece4369af997938fb1c19d6a699b9c1b27b0db432faae4c4 \ + --hash=sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a \ + --hash=sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464 \ + --hash=sha256:0cae4cb350934dfd74c020525eeae0a5f79257e8a201c0c176f4b84fdbf2a4b4 \ + --hash=sha256:137c5156524328a24b9fac1cb5db0ba618bc97d11970b39184c1d87dc4bf1746 \ + --hash=sha256:200af71bc25f22006f4069060c88ed36f8aa4ff7f53e67ff04d2ab3f1e79a5b2 \ + --hash=sha256:212139484ab3207b1f0c00633d3be92fef3c5f0af17cad155679d03ff2ee1e41 \ + --hash=sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd \ + --hash=sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9 \ + --hash=sha256:38cac74101777a6a7d3b3e3cfefa57089b5ada650dce2baf0cbdd9d65db22a9e \ + --hash=sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538 \ + --hash=sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10 \ + --hash=sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb \ + --hash=sha256:4870a52610537037adb382444fefd3706d96d663ac44cbb2f37e3919dca3d7ef \ + --hash=sha256:48f753100931605686f74e27a7b49238122aa761a9aefe9373265b8b7aa43ea4 \ + --hash=sha256:4bfd2a34de661f34d0bda43c3e4e79df586e4716ef401fe31ea39d69d581ef23 \ + --hash=sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef \ + --hash=sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75 \ + --hash=sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42 \ + --hash=sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a \ + --hash=sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172 \ + --hash=sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683 \ + --hash=sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2 \ + --hash=sha256:6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4 \ + --hash=sha256:741449132f64b3524e95cd30e5cd3343006ce146088f074f31ab26b94e6c75ba \ + --hash=sha256:744d3c6b164caa658adcb72cb8cc9ad9b4b75c7db507ab4bc2480474a51989da \ + --hash=sha256:79cfa161eda8d2ddf29acad370356b47f02387153b11d46042e93a0a95127493 \ + --hash=sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254 \ + --hash=sha256:7edda91d5ab52b15636d9c30da87d2cc84f426c72b9dba7a9b4fe142ba11f534 \ + --hash=sha256:7f277a4b3390ab4bebe597800a90da0edae882c6196d3038a73adf446c4f969f \ + --hash=sha256:7f4c94dec1b5ab5d522750cb059bb9409ea8872d4494fd152b53cca99f1ddd8c \ + --hash=sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c \ + --hash=sha256:83e787d7a84dbbfba6f250dd7a5efd689e935f03dd83b0f919d39349e1f23f83 \ + --hash=sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff \ + --hash=sha256:92864f54fb48b4c718fc92a32825d0e42265a627f956bc0361fe869f1adc3e7d \ + --hash=sha256:9d52ed507c2488eddd6a95bccee4e808d3234fa78dd370e24bac65a21212b861 \ + --hash=sha256:9fffdb387abe6aa775af36ef16f55e318dcda4194ddbf82007a6f21da29de8f5 \ + --hash=sha256:a28bc05039bdf3289d757f49d616ab3efe8cf40d8e8001ccdd621cd4f98f4fc9 \ + --hash=sha256:a5393eae5722bcef046a990b84dff02b954904c36a194f6cfc817d7dca6c6f0b \ + --hash=sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac \ + --hash=sha256:b17366316c654e1ad0306a6858e189fc835eca39f7eb2cafd6aaca8ce0c40a2e \ + --hash=sha256:baade0a5657654c2984468efb7d6c110db87ea63ef5a4b54732e7e337253e44f \ + --hash=sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb \ + --hash=sha256:c58b56cdfb03202b3bcc9fd8daee8e8e9b6d7e3163aa97c631dfcfcc24d36c86 \ + --hash=sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980 \ + --hash=sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd \ + --hash=sha256:d8d65b564ec849643d9f7ea05c6d9f0cd7ca23bdd4ac0c2dbef1104ab504543d \ + --hash=sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1 \ + --hash=sha256:dcd58e2b3a908b5ecc9b9df2f0085592506ac2d5110786018ee5e160f28e0911 \ + --hash=sha256:dd19cf5184a90c873009244586396a6a884d591a5323f0e8a5922560718d4993 \ + --hash=sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191 \ + --hash=sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4 \ + --hash=sha256:ed2e1365e31fc73f1825fa830f1c8f8917ca1b3ca6185773b349c20fd606cec2 \ + --hash=sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8 \ + --hash=sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db \ + --hash=sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927 \ + --hash=sha256:f3c08197f3039bec79cee59a606d62b96b16669cff3949f21e74796b6e3cd2be \ + --hash=sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb \ + --hash=sha256:f6984a24db30548fd39a44360532898c33528b74aedf81c26cf29c51ee47057e \ + --hash=sha256:f70aadb7a809305226daedf75d90379c397b094755a710d7014b8b117df1ebbf \ + --hash=sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd \ + --hash=sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822 \ + --hash=sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b + # via paramiko +beautifulsoup4==4.14.3 \ + --hash=sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb \ + --hash=sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86 + # via + # docling + # nbconvert +bigtree==1.2.0 \ + --hash=sha256:157bab9492a644243563e63a5c9a730d51267c6653046986ad42d5268bedeef7 \ + --hash=sha256:86c09a4d5cc5597db057813205f34972b4db6aac4f99fe3b97d3f322ebc13030 + # via feast (setup.py) +bleach[css]==6.3.0 \ + --hash=sha256:6f3b91b1c0a02bb9a78b5a454c92506aa0fdf197e1d5e114d2e00c6f64306d22 \ + --hash=sha256:fe10ec77c93ddf3d13a73b035abaac7a9f5e436513864ccdad516693213c65d6 + # via nbconvert +boto3==1.38.27 \ + --hash=sha256:94bd7fdd92d5701b362d4df100d21e28f8307a67ff56b6a8b0398119cf22f859 \ + --hash=sha256:95f5fe688795303a8a15e8b7e7f255cadab35eae459d00cc281a4fd77252ea80 + # via + # feast (setup.py) + # ikvpy + # moto + # snowflake-connector-python +botocore==1.38.46 \ + --hash=sha256:8798e5a418c27cf93195b077153644aea44cb171fcd56edc1ecebaa1e49e226e \ + --hash=sha256:89ca782ffbf2e8769ca9c89234cfa5ca577f1987d07d913ee3c68c4776b1eb5b + # via + # aiobotocore + # boto3 + # moto + # s3transfer + # snowflake-connector-python +build==1.4.0 \ + --hash=sha256:6a07c1b8eb6f2b311b96fcbdbce5dab5fe637ffda0fd83c9cac622e927501596 \ + --hash=sha256:f1b91b925aa322be454f8330c6fb48b465da993d1e7e7e6fa35027ec49f3c936 + # via + # feast (setup.py) + # pip-tools + # singlestoredb +cassandra-driver==3.29.3 \ + --hash=sha256:064bf45d3ca87239e11168c0110676fc64f7fdbddb4bcba9be787b8ad5f6d734 \ + --hash=sha256:0785f6e0986089e922378ae3b64b5f696440aeb595fb84c2cf3ccef220c6ae91 \ + --hash=sha256:158f7e5cb894a76a592aa0ca659a8e7c2a57ef603e04c07bbbc289a70e9ac893 \ + --hash=sha256:1c241ba08473baf31a333feb59793190d01625541c2368d3bbb0f43a586f1d6a \ + --hash=sha256:26013d768b2ea4728c09144b08c0eb86ad692e85cb15f4e52e3107abca83683c \ + --hash=sha256:27adf8869937461ad08c5fefb47857532e467b408db496db4dbf8b132a4bd623 \ + --hash=sha256:281f67af1b8df88741eef551afbb49f78e4f366a7ab23e7060a1f0d6ba655752 \ + --hash=sha256:29fc241475801872dc27c3dd1a3976373536223dd4fd1c01868ff86bdbbfd48b \ + --hash=sha256:2b72312a8b62a905da6133effbba9b0731c8e30af96e10ca77fc5c34532c6827 \ + --hash=sha256:2cb72808dfc46c40a6ee352ace181ce3170adde1cfd1447da91709a8cf482e20 \ + --hash=sha256:38216e13d6f2e0d4513a5b8806e70ce4a8f28a82962793a67371582fc2c7141b \ + --hash=sha256:3f654b01d8d49f68deedfaff1edcff314e3103d29130b2a034df6c490c522351 \ + --hash=sha256:51d6a5390e2454b599500049f0a5c72aa701db155c1e542f9a1157c1c45814b1 \ + --hash=sha256:54afde4aaa5b55fbc2c075e1c55fb14a5739459428f3bb81f849ad020f7d5bcf \ + --hash=sha256:572bd5a01089ab92da12f4f52b32b878547bbc544a798d8cfd042e7fc2601c75 \ + --hash=sha256:5a0113020d86e8f61c7a2ae3d508720cd036df7462a55926b85dd97ada27e143 \ + --hash=sha256:5f9858b5ccdf75dd89c20d74474b59dd3a2e2f86c7251b310011c46acdef3874 \ + --hash=sha256:638047c1f70fb14c9d8f743931d4f4f42aff6793b47afded3097c002ef8c1165 \ + --hash=sha256:63adca0f9219be3fe8789f4aa7b77c5f6a7bf65d6442959db52c653140ca4185 \ + --hash=sha256:7552fb7189acd06161f8feac7045a387dc9e03b3b9f7dcb5675178906cee792e \ + --hash=sha256:7a2f371af54cd1d153ef373a733889ebfbcc9c30e00429fc12a2569bad9239e1 \ + --hash=sha256:84b24f69a7bbe76302330d47422a7fcc1998a6a96ffd414a795d7d95992b49cb \ + --hash=sha256:891a1b6a111a591ad9f1c9e088846848dc9e6be030a6086c8c3aa5d2d837f266 \ + --hash=sha256:96ad742f5cbfb771df512959ab5de36e248ce9aa2c487fd81c37d5c0a627c094 \ + --hash=sha256:9abedc832e9a6636741299aae46c032d8c1248b507d8cebbaa2f48ec202904bc \ + --hash=sha256:9b7032b44769c454e96aa11483bfd167a87ea341268f1075b0ff84f780c910a9 \ + --hash=sha256:c935431682557ffcd3efc1c7bcb01b0f6769a1c90751a7154d5e3c905a6a2042 \ + --hash=sha256:e1d09691d757f5b1900a98cc3b6cc7d8506683a2188c01eca86545f91edbbaf5 \ + --hash=sha256:facd488c2b9be8bffcad5903566581e96d2863d2ec4bcad7f114d1b2b2f39ad0 \ + --hash=sha256:fcf45725ae1751cb934b9b827a7d9cd899bbd09eb1ad28e2160b4584de35ba77 \ + --hash=sha256:ff6b82ee4533f6fd4474d833e693b44b984f58337173ee98ed76bce08721a636 + # via feast (setup.py) +certifi==2026.1.4 \ + --hash=sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c \ + --hash=sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120 + # via + # clickhouse-connect + # docling + # elastic-transport + # httpcore + # httpx + # kubernetes + # minio + # requests + # snowflake-connector-python +cffi==1.17.1 \ + --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ + --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ + --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ + --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ + --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ + --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ + --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ + --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ + --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ + --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ + --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ + --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ + --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ + --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ + --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ + --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ + --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ + --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ + --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ + --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ + --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ + --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ + --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ + --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ + --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ + --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ + --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ + --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ + --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ + --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ + --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ + --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ + --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ + --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ + --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ + --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ + --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ + --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ + --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ + --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ + --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ + --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ + --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ + --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ + --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ + --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ + --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ + --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ + --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ + --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ + --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ + --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ + --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ + --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ + --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ + --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ + --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ + --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ + --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ + --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ + --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ + --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ + --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ + --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ + --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ + --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ + --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b + # via + # feast (setup.py) + # argon2-cffi-bindings + # cryptography + # ikvpy + # pynacl + # snowflake-connector-python +cfgv==3.5.0 \ + --hash=sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0 \ + --hash=sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132 + # via pre-commit +charset-normalizer==3.4.4 \ + --hash=sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad \ + --hash=sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93 \ + --hash=sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394 \ + --hash=sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89 \ + --hash=sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc \ + --hash=sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86 \ + --hash=sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63 \ + --hash=sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d \ + --hash=sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f \ + --hash=sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8 \ + --hash=sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0 \ + --hash=sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505 \ + --hash=sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161 \ + --hash=sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af \ + --hash=sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152 \ + --hash=sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318 \ + --hash=sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72 \ + --hash=sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4 \ + --hash=sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e \ + --hash=sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3 \ + --hash=sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576 \ + --hash=sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c \ + --hash=sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1 \ + --hash=sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8 \ + --hash=sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1 \ + --hash=sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2 \ + --hash=sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44 \ + --hash=sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26 \ + --hash=sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88 \ + --hash=sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016 \ + --hash=sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede \ + --hash=sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf \ + --hash=sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a \ + --hash=sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc \ + --hash=sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0 \ + --hash=sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84 \ + --hash=sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db \ + --hash=sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1 \ + --hash=sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7 \ + --hash=sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed \ + --hash=sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8 \ + --hash=sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133 \ + --hash=sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e \ + --hash=sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef \ + --hash=sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14 \ + --hash=sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2 \ + --hash=sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0 \ + --hash=sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d \ + --hash=sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828 \ + --hash=sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f \ + --hash=sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf \ + --hash=sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6 \ + --hash=sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328 \ + --hash=sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090 \ + --hash=sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa \ + --hash=sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381 \ + --hash=sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c \ + --hash=sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb \ + --hash=sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc \ + --hash=sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a \ + --hash=sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec \ + --hash=sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc \ + --hash=sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac \ + --hash=sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e \ + --hash=sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313 \ + --hash=sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569 \ + --hash=sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3 \ + --hash=sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d \ + --hash=sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525 \ + --hash=sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894 \ + --hash=sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3 \ + --hash=sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9 \ + --hash=sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a \ + --hash=sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9 \ + --hash=sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14 \ + --hash=sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25 \ + --hash=sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50 \ + --hash=sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf \ + --hash=sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1 \ + --hash=sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3 \ + --hash=sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac \ + --hash=sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e \ + --hash=sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815 \ + --hash=sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c \ + --hash=sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6 \ + --hash=sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6 \ + --hash=sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e \ + --hash=sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4 \ + --hash=sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84 \ + --hash=sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69 \ + --hash=sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15 \ + --hash=sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191 \ + --hash=sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0 \ + --hash=sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897 \ + --hash=sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd \ + --hash=sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2 \ + --hash=sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794 \ + --hash=sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d \ + --hash=sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074 \ + --hash=sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3 \ + --hash=sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224 \ + --hash=sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838 \ + --hash=sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a \ + --hash=sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d \ + --hash=sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d \ + --hash=sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f \ + --hash=sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8 \ + --hash=sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490 \ + --hash=sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966 \ + --hash=sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9 \ + --hash=sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3 \ + --hash=sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e \ + --hash=sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608 + # via + # requests + # snowflake-connector-python +click==8.2.1 \ + --hash=sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202 \ + --hash=sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b + # via + # feast (setup.py) + # dask + # geomet + # great-expectations + # pip-tools + # ray + # typer + # uvicorn +clickhouse-connect==0.10.0 \ + --hash=sha256:01e5ba7696789b445392816180910a6bc9b0995cb86f3d503179e2be13991919 \ + --hash=sha256:0b3bbb1efdb3d71b6a2a2dcd607b0899f3b1ffe1e8125662709ee2ebbc1503cc \ + --hash=sha256:1159ee2c33e7eca40b53dda917a8b6a2ed889cb4c54f3d83b303b31ddb4f351d \ + --hash=sha256:1246137a53fb270d4bb8b51e56816d5b3f5cc595a5b2d281393308a34d8a5f43 \ + --hash=sha256:1405057ae1b6225e2de7879f582afcf7049d2cde858d0bda32b615d5f82ed330 \ + --hash=sha256:185975081de4dbec4096210f0c5adf1cf89e4c03e92f5eab1afbb70cf0636c14 \ + --hash=sha256:195f1824405501b747b572e1365c6265bb1629eeb712ce91eda91da3c5794879 \ + --hash=sha256:19cb3af95721013a0f8e88276277e23e960b08f7c14613a325a14c418207f54f \ + --hash=sha256:21e9fe9fbca37724898ff15e29c5332682786e0b95ba0c15b5f3a9c628c83873 \ + --hash=sha256:225d052bd5b885e43dd13b3a3bb251f76fcdd429b160558d2abb50ebe958f921 \ + --hash=sha256:28f2666e59bf478461693e10e84acaa9a7e32b427d2d3d72843fd7e0a7415a77 \ + --hash=sha256:2c755df1791c779b3a0a54e0789f6f55cbedfc6d6aa49046223e62986886b90d \ + --hash=sha256:3646fc9184a5469b95cf4a0846e6954e6e9e85666f030a5d2acae58fa8afb37e \ + --hash=sha256:40b7cf86d016ae6c6c3af6a7b5786f41c18632bfbc9e58d0c4a21a4c5d50c674 \ + --hash=sha256:42a5101decf2d9b49cf95619486e9f4d192e08d05886c513001f6238a21f4c70 \ + --hash=sha256:48554e836c6b56fe0854d9a9f565569010583d4960094d60b68a53f9f83042f0 \ + --hash=sha256:51193dc39f4169b0dd6da13003bbea60527dea92eb2408aecae7f1fb4ad2c5a4 \ + --hash=sha256:57239e8f49fc31d5993cb6b3bc14c00f2704d6a4a73c96ad97496c6c00144da5 \ + --hash=sha256:5b20b3f8f93743f4dcc61dc2bd9e5c374de1e57d4a601f48e46dd06d2d4f7b97 \ + --hash=sha256:5fa4f3763d46b90dc28b1f38eba8de83fbf6c9928f071dd66074e7d6de80e21b \ + --hash=sha256:60772faa54d56f0fa34650460910752a583f5948f44dddeabfafaecbca21fc54 \ + --hash=sha256:6286832cc79affc6fddfbf5563075effa65f80e7cd1481cf2b771ce317c67d08 \ + --hash=sha256:63bbb5721bfece698e155c01b8fa95ce4377c584f4d04b43f383824e8a8fa129 \ + --hash=sha256:6db414cd78333c5430e95d21c75968ad5416a37662fb7ef5536ddae1e46283ee \ + --hash=sha256:71cafb1918ec41dd46d6ec943a1d8caa3bf1f9a59c5b3d73d2dfda065d4834b7 \ + --hash=sha256:75a91c5c29d1afad1f925037747200c2a57106665dc40234bfd5e92436588874 \ + --hash=sha256:75e9de32b9a9f3c39caf5c8837eb07512fa4e8de7a182bcdbb82f2ae551d7651 \ + --hash=sha256:7907624635fe7f28e1b85c7c8b125a72679a63ecdb0b9f4250b704106ef438f8 \ + --hash=sha256:7c72d7a0564fe8e3c393ad89f19cfdc31cd7bd8b2abd9ff1a4ea66a034180a70 \ + --hash=sha256:7e0d9ad118a398c269b45591077d496ee5472cf78f4e334a709e9e2aa064eedf \ + --hash=sha256:7fbdba6b414d52e21cccb23545e3562873318a898247e9b7108aec019911f1b4 \ + --hash=sha256:7fe2a6cd98517330c66afe703fb242c0d3aa2c91f2f7dc9fb97c122c5c60c34b \ + --hash=sha256:88b4890f13163e163bf6fa61f3a013bb974c95676853b7a4e63061faf33911ac \ + --hash=sha256:8a4f20ea756e0c019e06a51d23f41edf1f0c260615e0572cb7ab0f696dfec91c \ + --hash=sha256:8d70432f1dfb88f49d7d95f62c51d762cf1fb5867e7e52aeab1f97f1bebf678e \ + --hash=sha256:92b8b6691a92d2613ee35f5759317bd4be7ba66d39bf81c4deed620feb388ca6 \ + --hash=sha256:93bf4869d27d9e86469f8fa4f0f27a618e4e63a970c3084f531c0d4706efba49 \ + --hash=sha256:9c30c902da7eb01d60b61b566603ab2069e0813b8db60b7c75a4be34b62f63e8 \ + --hash=sha256:9d9b815ec685e143ba22fb6b6803a397da2daacccaa700ced998633ff0ef5e24 \ + --hash=sha256:9eb8df083e5fda78ac7249938691c2c369e8578b5df34c709467147e8289f1d9 \ + --hash=sha256:a0256328802c6e5580513e197cef7f9ba49a99fc98e9ba410922873427569564 \ + --hash=sha256:a22457d56570eea77618e30e2a250484a7d70594dc10d636b4d5a454bb405e9a \ + --hash=sha256:a2427d312bc3526520a0be8c648479af3f6353da7a33a62db2368d6203b08efd \ + --hash=sha256:a545a9a1ebbd8489bf81dfad43ae877ce54d51ed88b635a35df9f4ea42eba6a4 \ + --hash=sha256:aacaff01523192fd319f60440908b67ca5e26c762a74a00a7c32f9913fe59e12 \ + --hash=sha256:b090c7d8e602dd084b2795265cd30610461752284763d9ad93a5d619a0e0ff21 \ + --hash=sha256:b3e393dd95bcce02307f558f6aee53bf2a1bfc83f13030c9b4e47b2045de293f \ + --hash=sha256:b8a708d38b81dcc8c13bb85549c904817e304d2b7f461246fed2945524b7a31b \ + --hash=sha256:bd6e1870df82dd57a47bc2a2a6f39c57da8aee43cc291a44d04babfdec5986dc \ + --hash=sha256:c4cf7a2e62874f173b34c593941da1d7472c9db6ffdd6de0123ecc3cfecf6b8d \ + --hash=sha256:d0afc1b2fef342f4b077c66fb8bf87bbe7ec74547940357239d35c249d45f983 \ + --hash=sha256:d69b3f55a3a2f5414db7bed45afcca940e78ce1867cf5cc0c202f7be21cf48e9 \ + --hash=sha256:db8452ef4efe1948c180a7becb572fb4926dfc69f9f5cdd29e70841b7e97e8dd \ + --hash=sha256:e32ef05046558928728d577ff6e053495cb5bf870e1f61fd2ea0c980587fefb7 \ + --hash=sha256:ef58f431e2ef3c2a91a6d5535484186f2f57f50eff791410548b17017563784b \ + --hash=sha256:f50fe43ddd9161986cc881ce2276d665d99c3d77f5d595c9e9497f9f10e0270b \ + --hash=sha256:f798b9941490e9d6aa1b86c6f06a602d0568cc12c0589c8cfc406fb871f42062 \ + --hash=sha256:f927722c5e054cf833a4112cf82d633e37d3b329f01e232754cc2678be268020 \ + --hash=sha256:fe7e6be0f40a8a77a90482944f5cc2aa39084c1570899e8d2d1191f62460365b + # via feast (setup.py) +cloudpickle==3.1.2 \ + --hash=sha256:7fda9eb655c9c230dab534f1983763de5835249750e85fbcef43aaa30a9a2414 \ + --hash=sha256:9acb47f6afd73f60dc1df93bb801b472f05ff42fa6c84167d25cb206be1fbf4a + # via dask +codeflare-sdk==0.33.1 \ + --hash=sha256:6622a73edde5042455ae7d76279a5279c55db4950533ea7f12aac2fc51d49bb8 \ + --hash=sha256:ad41ec5260217fd030904fb4b9fe62e26c3f51ac7999a5d607eb4b1359edd5e5 + # via feast (setup.py) +colorama==0.4.6 \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + # via + # feast (setup.py) + # great-expectations +colorful==0.5.8 \ + --hash=sha256:a9381fdda3337fbaba5771991020abc69676afa102646650b759927892875992 \ + --hash=sha256:bb16502b198be2f1c42ba3c52c703d5f651d826076817185f0294c1a549a7445 + # via ray +comm==0.2.3 \ + --hash=sha256:2dc8048c10962d55d7ad693be1e7045d891b7ce8d999c97963a5e3e99c055971 \ + --hash=sha256:c615d91d75f7f04f095b30d1c1711babd43bdc6419c1be9886a85f2f4e489417 + # via + # ipykernel + # ipywidgets +couchbase==4.3.2 \ + --hash=sha256:032a180afd6621358b2c73543b9c5db9939b442fc3ad6d54417c36c8a8f65838 \ + --hash=sha256:11ce688ed46edf8387bf51866618c7b4e06399e7fb34a6df002764996c109d1f \ + --hash=sha256:1f0bf68a2a67958db87a81da4d30d63915f39598482d62cd7fe9752b625dcb19 \ + --hash=sha256:220fe6b75ebbda4651a8e0370642c0f5db5da7f3b0acd9fc8e2b5b31427f9fb1 \ + --hash=sha256:2841b86eb80454279548d8102ce6e71f5bc791eb4a83cb3575b7cf4104c374b2 \ + --hash=sha256:30115cde63fc18abe587d167bbb0d37e8c253f7430e08e69f64eaca2eb7e4ca0 \ + --hash=sha256:3fb88b9f52c2099a4e25fbc30f27734c3da5c86af930de1f4102d03a3df8e77a \ + --hash=sha256:401a7d5a442196bf805746b8354636753ec12c788fba918245345c32211bdf0c \ + --hash=sha256:419a79b2a98bf3d168b264e0dfc1a0949727a2c3ef455e2ffe8e734a2fdc6e7a \ + --hash=sha256:46e492814163162a43d5cbf867eac0d685ea73c803f3238f65c0068ea4a2ce61 \ + --hash=sha256:52cdcd65fb6565f51ea4540fe823143f0fb9d650136b96b904b1650b05a06746 \ + --hash=sha256:540545fd867de02985eb16733c1b16228b0e09461e44a9c2bfad0200fdf7d09a \ + --hash=sha256:5b4eccd4a0ac30d58ea0570a1882a8c0e367a9d54d80c0f6a288e348d1b2b41e \ + --hash=sha256:5b8ee78c7a4b7451db6fcd534dc24462f2cd0e863f16495e687a23b7aecf8296 \ + --hash=sha256:687cb157c89822f463fd5373356e38ee7ebecf6306c9da9cae5b54cedad2b66d \ + --hash=sha256:6a43ce67ffe90bbb5460da5c95d778e804bcd81c037cc8a347f2966afc6c4b5a \ + --hash=sha256:6dcff2c9907ee506e63814c417212e9b51760b914609771dff92508888031dfa \ + --hash=sha256:6f3c2fc874a8b6ca7e6c8b3aaebcbfa4e14937afdb470aaa9b16724c4746b8d5 \ + --hash=sha256:6f820249b9d71593b29fb0ab3eff66ba36914076e00486ca0b99ce6ac6fd1ce9 \ + --hash=sha256:841c59deff25a3185469726d96cce0e120e6b062e724cb6981d1a2c1e2a629e5 \ + --hash=sha256:85df43305f2646203192d67a9433abb5e1098390cba6adec8c4f675ba9ba7fdc \ + --hash=sha256:86e2713759b26ee279574d740151ba4b0ad859a45fc7ac1d13fcdd39d8ee2951 \ + --hash=sha256:a4b964dd2b804036c57b670734b7aa02985dc5cf6d1a8f9f7d4af8feaf012fef \ + --hash=sha256:aac415f89c38482ac076b4b766537701e99f728cc1c5b2de8c16fa28ce7caa53 \ + --hash=sha256:b12288e15326b6fae027a419467403d6c9f3f9641cd9cfee0ab0930a9d2315cd \ + --hash=sha256:bbefa6111be033587b04b2586f5fc8d6db9a76ec138fea5288c8698c8f294bc2 \ + --hash=sha256:bd93d352ccb35c86eec9e5b4d1de015c26c15b52c80204f75189bde627b8b529 \ + --hash=sha256:bf5814a8e9efe405c9c81145c8afca7e55b964543984c9d8dc340163597b09b2 \ + --hash=sha256:c139c594118e1df6932491c26b825ac777fd201f49cda303892065e607f9c3ef \ + --hash=sha256:c18b7c937e63e6d869371d9c4b0a0f9cc1a87dba48950a4276e294c091d76ce5 \ + --hash=sha256:c50ae993c81d81ed0f02a37cbe034825408195b9fe29980b3379e2e05b2e1bec + # via feast (setup.py) +couchbase-columnar==1.0.0 \ + --hash=sha256:08b0947ee67f8fb15949e9323d60e5dbb44fb2d86d09f6e997a0bdcde6cd2b15 \ + --hash=sha256:0f5ea6a24a73008a2f5a6e3aae51a49f4bb360b198a1f3d2ca4bb22044fe9671 \ + --hash=sha256:16d97e8d87a5cedc12771167860ddf02e0d9c66486ef26f83622284a2aff5de2 \ + --hash=sha256:1e516734b59f6994c8485ebb7d0070c0927f8bffa8bc7e58b7b8e0f81657c1d2 \ + --hash=sha256:2ba2be7e7e13032696f690f11841a895afcde70f1a9b2aa959c95663eacfddc3 \ + --hash=sha256:2be53deb619b9770a23433ca83146076c12e9f6fd59b74009f3afb5664339c7b \ + --hash=sha256:2fa7cfc9fb06f9c8911453a7e7c80ce115dd9f24e753eedbab9f2b9415aae45d \ + --hash=sha256:3835259a260faf3818db0612b11acf0e5db525ecea44ee8cc9164ce3d564f1a8 \ + --hash=sha256:4879c98941fbba04c50c24c8635827bbcf44259b35488456623b6e3448351533 \ + --hash=sha256:49624c174b2bb7d1350be4c6a8554de7461d5748d9ee4d7aea6f880bed35ca2f \ + --hash=sha256:51d6c64aa89bebc3a157b812387bfca9592ef5efb8fbe84ed85a6da8618e911e \ + --hash=sha256:5743193fe5314b34e4c7dc6b4fbe60b7678af6b401ba3b30f81c03bf3c2ff8ab \ + --hash=sha256:732d25b08acd8e30fd620920e788213c8b0edf3936c74ad21d89db911510b64b \ + --hash=sha256:7469365678072900e4d5498b0c744011135037a840ca8d0d2c33dd627508f2e0 \ + --hash=sha256:776c7a52a2253250d5a75ec1f395d2d919df4b7b417005700f241dfd2b074260 \ + --hash=sha256:7c2906720ac80d5b846a077f71ffaf955af41fddd319b3c50b4496e8eec875d9 \ + --hash=sha256:82b2591691d9a188c0319ae1a5b2d67928f0c541fefbf92421a7fc604372cfcf \ + --hash=sha256:85864666cbe8fd726df310a635a522a7c27134ff66cdce455bb314ab990a0839 \ + --hash=sha256:9612170835fd2668d9968eb87a8d85ebfa38ea997d697b4265a0632ffd107b90 \ + --hash=sha256:a698825b6eb7a611fcd76a314ff470d92101505731b9252bd211a1d24ad24a32 \ + --hash=sha256:ab95caa0e5008bb2fc6b90022c6e780ceb2cf21ace2f6306e09e386f19089c18 \ + --hash=sha256:aca0a4d3453857454919dfcd2c360f91a5894c7e522c1e115335c2d3ad0673ed \ + --hash=sha256:b7a949818efdea84bf050e821e81c6293c20b7ee2c55fd68a8e772c08cdb93ba \ + --hash=sha256:cbe66361da2dda11945fcfae584bbeda153300b2dec45ebb708ba9ff53ac8373 \ + --hash=sha256:dc934b033524a4353177a792a7b525e4d2e2f67ba654dacfa80921f48e7edc1c \ + --hash=sha256:e6301c5cba0803c819ab94330c0382805f546c06dbb16108ba28af5f65cc31ab \ + --hash=sha256:ebe16a763af54ebf6aaa21bddddc28089739e37c383c206dc51353399209278d \ + --hash=sha256:efaff88520c34babf243ab0429df5c141e0dbe0c952a24e091a6e5b1374352ab \ + --hash=sha256:fa8fbddf971a2391543bc7dafaf3b581ad1a69c1fa0a474295b38a6fd8aed54f \ + --hash=sha256:fc0fad2d386c5b5df7aaaccd8751e01caa886cc640cc8c92523dd07c4e7be519 \ + --hash=sha256:fc4efa3e15190c3731478006de494b046bc57785e9c8ae99ac8b375a91683e38 + # via feast (setup.py) +coverage[toml]==7.13.1 \ + --hash=sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784 \ + --hash=sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e \ + --hash=sha256:0b609fc9cdbd1f02e51f67f51e5aee60a841ef58a68d00d5ee2c0faf357481a3 \ + --hash=sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0 \ + --hash=sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79 \ + --hash=sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75 \ + --hash=sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6 \ + --hash=sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f \ + --hash=sha256:1a55d509a1dc5a5b708b5dad3b5334e07a16ad4c2185e27b40e4dba796ab7f88 \ + --hash=sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b \ + --hash=sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573 \ + --hash=sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0 \ + --hash=sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2 \ + --hash=sha256:339dc63b3eba969067b00f41f15ad161bf2946613156fb131266d8debc8e44d0 \ + --hash=sha256:3820778ea1387c2b6a818caec01c63adc5b3750211af6447e8dcfb9b6f08dbba \ + --hash=sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd \ + --hash=sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c \ + --hash=sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673 \ + --hash=sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e \ + --hash=sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee \ + --hash=sha256:4d010d080c4888371033baab27e47c9df7d6fb28d0b7b7adf85a4a49be9298b3 \ + --hash=sha256:4de84e71173d4dada2897e5a0e1b7877e5eefbfe0d6a44edee6ce31d9b8ec09e \ + --hash=sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461 \ + --hash=sha256:562ec27dfa3f311e0db1ba243ec6e5f6ab96b1edfcfc6cf86f28038bc4961ce6 \ + --hash=sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6 \ + --hash=sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500 \ + --hash=sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a \ + --hash=sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9 \ + --hash=sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc \ + --hash=sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5 \ + --hash=sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3 \ + --hash=sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d \ + --hash=sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842 \ + --hash=sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1 \ + --hash=sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486 \ + --hash=sha256:77cc258aeb29a3417062758975521eae60af6f79e930d6993555eeac6a8eac29 \ + --hash=sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968 \ + --hash=sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9 \ + --hash=sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4 \ + --hash=sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c \ + --hash=sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a \ + --hash=sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465 \ + --hash=sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8 \ + --hash=sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09 \ + --hash=sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416 \ + --hash=sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894 \ + --hash=sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a \ + --hash=sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9 \ + --hash=sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4 \ + --hash=sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5 \ + --hash=sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f \ + --hash=sha256:a03a4f3a19a189919c7055098790285cc5c5b0b3976f8d227aea39dbf9f8bfdb \ + --hash=sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd \ + --hash=sha256:a5a68357f686f8c4d527a2dc04f52e669c2fc1cbde38f6f7eb6a0e58cbd17cae \ + --hash=sha256:a998cc0aeeea4c6d5622a3754da5a493055d2d95186bad877b0a34ea6e6dbe0a \ + --hash=sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e \ + --hash=sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564 \ + --hash=sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd \ + --hash=sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6 \ + --hash=sha256:bb4f8c3c9a9f34423dba193f241f617b08ffc63e27f67159f60ae6baf2dcfe0f \ + --hash=sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7 \ + --hash=sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a \ + --hash=sha256:bf100a3288f9bb7f919b87eb84f87101e197535b9bd0e2c2b5b3179633324fee \ + --hash=sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62 \ + --hash=sha256:c3d8c679607220979434f494b139dfb00131ebf70bb406553d69c1ff01a5c33d \ + --hash=sha256:c43257717611ff5e9a1d79dce8e47566235ebda63328718d9b65dd640bc832ef \ + --hash=sha256:c832ec92c4499ac463186af72f9ed4d8daec15499b16f0a879b0d1c8e5cf4a3b \ + --hash=sha256:c8e2706ceb622bc63bac98ebb10ef5da80ed70fbd8a7999a5076de3afaef0fb1 \ + --hash=sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78 \ + --hash=sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398 \ + --hash=sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53 \ + --hash=sha256:d1443ba9acbb593fa7c1c29e011d7c9761545fe35e7652e85ce7f51a16f7e08d \ + --hash=sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c \ + --hash=sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909 \ + --hash=sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f \ + --hash=sha256:d938b4a840fb1523b9dfbbb454f652967f18e197569c32266d4d13f37244c3d9 \ + --hash=sha256:db622b999ffe49cb891f2fff3b340cdc2f9797d01a0a202a0973ba2562501d90 \ + --hash=sha256:e09fbecc007f7b6afdfb3b07ce5bd9f8494b6856dd4f577d26c66c391b829851 \ + --hash=sha256:e1fa280b3ad78eea5be86f94f461c04943d942697e0dac889fa18fff8f5f9147 \ + --hash=sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf \ + --hash=sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864 \ + --hash=sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4 \ + --hash=sha256:ef6688db9bf91ba111ae734ba6ef1a063304a881749726e0d3575f5c10a9facf \ + --hash=sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4 \ + --hash=sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a \ + --hash=sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4 \ + --hash=sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c \ + --hash=sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992 \ + --hash=sha256:fea07c1a39a22614acb762e3fbbb4011f65eedafcb2948feeef641ac78b4ee5c \ + --hash=sha256:ff10896fa55167371960c5908150b434b71c876dfab97b69478f22c8b445ea19 \ + --hash=sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7 \ + --hash=sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766 + # via pytest-cov +cryptography==43.0.3 \ + --hash=sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362 \ + --hash=sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4 \ + --hash=sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa \ + --hash=sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83 \ + --hash=sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff \ + --hash=sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805 \ + --hash=sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6 \ + --hash=sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664 \ + --hash=sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08 \ + --hash=sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e \ + --hash=sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18 \ + --hash=sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f \ + --hash=sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73 \ + --hash=sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5 \ + --hash=sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984 \ + --hash=sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd \ + --hash=sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3 \ + --hash=sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e \ + --hash=sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405 \ + --hash=sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2 \ + --hash=sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c \ + --hash=sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995 \ + --hash=sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73 \ + --hash=sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16 \ + --hash=sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7 \ + --hash=sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd \ + --hash=sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7 + # via + # feast (setup.py) + # azure-identity + # azure-storage-blob + # codeflare-sdk + # great-expectations + # jwcrypto + # moto + # msal + # paramiko + # pyjwt + # pyopenssl + # snowflake-connector-python + # types-pyopenssl + # types-redis +dask[dataframe]==2026.1.1 \ + --hash=sha256:12b1dbb0d6e92f287feb4076871600b2fba3a843d35ff214776ada5e9e7a1529 \ + --hash=sha256:146b0ef2918eb581e06139183a88801b4a8c52d7c37758a91f8c3b75c54b0e15 + # via feast (setup.py) +datasets==4.0.0 \ + --hash=sha256:7ef95e62025fd122882dbce6cb904c8cd3fbc829de6669a5eb939c77d50e203d \ + --hash=sha256:9657e7140a9050db13443ba21cb5de185af8af944479b00e7ff1e00a61c8dbf1 + # via feast (setup.py) +db-dtypes==1.5.0 \ + --hash=sha256:abdbb2e4eb965800ed6f98af0c5c1cafff9063ace09114be2d26a7f046be2c8a \ + --hash=sha256:ad9e94243f53e104bc77dbf9ae44b580d83a770d3694483aba59c9767966daa5 + # via + # google-cloud-bigquery + # pandas-gbq +debugpy==1.8.19 \ + --hash=sha256:0601708223fe1cd0e27c6cce67a899d92c7d68e73690211e6788a4b0e1903f5b \ + --hash=sha256:14035cbdbb1fe4b642babcdcb5935c2da3b1067ac211c5c5a8fdc0bb31adbcaa \ + --hash=sha256:1e8c4d1bd230067bf1bbcdbd6032e5a57068638eb28b9153d008ecde288152af \ + --hash=sha256:327cb28c3ad9e17bc925efc7f7018195fd4787c2fe4b7af1eec11f1d19bdec62 \ + --hash=sha256:360ffd231a780abbc414ba0f005dad409e71c78637efe8f2bd75837132a41d38 \ + --hash=sha256:4468de0c30012d367944f0eab4ecb8371736e8ef9522a465f61214f344c11183 \ + --hash=sha256:6599cab8a783d1496ae9984c52cb13b7c4a3bd06a8e6c33446832a5d97ce0bee \ + --hash=sha256:66e3d2fd8f2035a8f111eb127fa508469dfa40928a89b460b41fd988684dc83d \ + --hash=sha256:76f566baaf7f3e06adbe67ffedccd2ee911d1e486f55931939ce3f0fe1090774 \ + --hash=sha256:783a519e6dfb1f3cd773a9bda592f4887a65040cb0c7bd38dde410f4e53c40d4 \ + --hash=sha256:7b62c0f015120ede25e5124a5f9d8a424e1208e3d96a36c89958f046ee21fff6 \ + --hash=sha256:806d6800246244004625d5222d7765874ab2d22f3ba5f615416cf1342d61c488 \ + --hash=sha256:85016a73ab84dea1c1f1dcd88ec692993bcbe4532d1b49ecb5f3c688ae50c606 \ + --hash=sha256:8e19a725f5d486f20e53a1dde2ab8bb2c9607c40c00a42ab646def962b41125f \ + --hash=sha256:91e35db2672a0abaf325f4868fcac9c1674a0d9ad9bb8a8c849c03a5ebba3e6d \ + --hash=sha256:a21bfdea088f713df05fa246ba0520f6ba44dd7eaec224742f51987a6979a648 \ + --hash=sha256:b1cb98e5325da3059ca24445fca48314bfddfdf65ce1b59ff07055e723f06bd2 \ + --hash=sha256:b605f17e89ba0ecee994391194285fada89cee111cfcd29d6f2ee11cbdc40976 \ + --hash=sha256:b7dd275cf2c99e53adb9654f5ae015f70415bbe2bacbe24cfee30d54b6aa03c5 \ + --hash=sha256:bccb1540a49cde77edc7ce7d9d075c1dbeb2414751bc0048c7a11e1b597a4c2e \ + --hash=sha256:c047177ab2d286451f242b855b650d313198c4a987140d4b35218b2855a64a4a \ + --hash=sha256:c30639998a9f9cd9699b4b621942c0179a6527f083c72351f95c6ab1728d5b73 \ + --hash=sha256:c5dcfa21de1f735a4f7ced4556339a109aa0f618d366ede9da0a3600f2516d8b \ + --hash=sha256:c9b9bf440141a36836bdbe4320a2b126bb38aafa85e1aed05d7bfbb0e2a278bf \ + --hash=sha256:d40c016c1f538dbf1762936e3aeb43a89b965069d9f60f9e39d35d9d25e6b809 \ + --hash=sha256:d9b6f633fd2865af2afba2beb0c1819b6ecd4aed1c8f90f5d1bbca3272306b10 \ + --hash=sha256:e24b1652a1df1ab04d81e7ead446a91c226de704ff5dde6bd0a0dbaab07aa3f2 \ + --hash=sha256:e9c68d9a382ec754dc05ed1d1b4ed5bd824b9f7c1a8cd1083adb84b3c93501de \ + --hash=sha256:eea7e5987445ab0b5ed258093722d5ecb8bb72217c5c9b1e21f64efe23ddebdb \ + --hash=sha256:fce6da15d73be5935b4438435c53adb512326a3e11e4f90793ea87cd9f018254 + # via ipykernel +decorator==5.2.1 \ + --hash=sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360 \ + --hash=sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a + # via ipython +defusedxml==0.7.1 \ + --hash=sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69 \ + --hash=sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61 + # via nbconvert +deltalake==0.25.5 \ + --hash=sha256:0b36afba5936f74c42920c06d140535e6efc8361f659770014944d8e69fbca09 \ + --hash=sha256:0ca70e824fd7bcd16aeaaf9a43800eb9dc6c5d05b7854328c4cb4a240643ef78 \ + --hash=sha256:173e4b83fcff10f26474ae117161c3f2bdd5f44c30c20463c24b6b8b520e7656 \ + --hash=sha256:4ea62150f9d7d37dce0d973e833b91b07139031cc416ba72ebddbdd1a748f270 \ + --hash=sha256:76be7e1ed8d13f2dc933361057a44a47a89e6112d4f5ea0a73fb510bedd96efc \ + --hash=sha256:cb1c7e826fd7c3bdd3676c7471d3b551e1a3674e44cd8e3747a0017a2c0292b7 \ + --hash=sha256:e8f0d24bf64455f702da8402307b22e01f91e0f76694f7c5e33c9513011e8d29 + # via feast (setup.py) +deprecation==2.1.0 \ + --hash=sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff \ + --hash=sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a + # via python-keycloak +dill==0.3.8 \ + --hash=sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca \ + --hash=sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7 + # via + # feast (setup.py) + # datasets + # multiprocess +distlib==0.4.0 \ + --hash=sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16 \ + --hash=sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d + # via virtualenv +dnspython==2.8.0 \ + --hash=sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af \ + --hash=sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f + # via + # feast (setup.py) + # pymongo +docker==7.1.0 \ + --hash=sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c \ + --hash=sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0 + # via testcontainers +docling==2.27.0 \ + --hash=sha256:1288ed75b27e33bf94daff34faffc6d11b7d7ccc13e3df84fb24adad3991f72d \ + --hash=sha256:faba35662612a2c687a3a463e501d95f645316436084af92a0442ce162429a3d + # via feast (setup.py) +docling-core[chunking]==2.60.0 \ + --hash=sha256:25499f43c9e894e6ff0be5c57a3c4fb78f87caa7808d0829fa11c056600cc2b6 \ + --hash=sha256:7703dfbad7e64fdf38bd3a9518ec80dd0162b5e7850a83fb572bfd88b62b8a39 + # via + # docling + # docling-ibm-models + # docling-parse +docling-ibm-models==3.10.3 \ + --hash=sha256:6be756e45df155a367087b93e0e5f2d65905e7e81a5f57c1d3ae57096631655a \ + --hash=sha256:e034d1398c99059998da18e38ef80af8a5d975f04de17f6e93efa075fb29cac4 + # via docling +docling-parse==4.7.3 \ + --hash=sha256:1790e7e4ae202d67875c1c48fd6f8ef5c51d10b0c23157e4989b8673f2f31308 \ + --hash=sha256:281347b3e937c1a5ffa6f8774ee603b64a0899fe8a6885573dec7eb48a3421d8 \ + --hash=sha256:29c91f78c877ae4637011efdb478f20a571e6794be924795b3469958a6401cd6 \ + --hash=sha256:32a2a8aedc56e82e2e3337b7afb83070db1fcfde86cbd93bba80ef2e331b6c13 \ + --hash=sha256:3b04459cc97a8a4929622e341b9981e23987a63af07db599afc5e1c4d389060b \ + --hash=sha256:45ec74bda63738c72e9f3989d19ef6ea7e3b1d61328ffc68d55b1b18eb6c4002 \ + --hash=sha256:53bd45241dca228715800afa0f96fdc826f7c234e9effcd5cefc86026ff19301 \ + --hash=sha256:5936e6bcb7969c2a13f38ecc75cada3b0919422dc845e96da4b0b7b3bbc394ce \ + --hash=sha256:5fc8f4770f9f6f90ba25f52451864a64394ddb158aea3a8fdda46a208c029cf6 \ + --hash=sha256:659234b800c094525476c6a97e771cd61491201e0c9f4af8ee6d39df9758bcae \ + --hash=sha256:65e0653d9617d38e73bab069dc3e7960668ff4a6b0ff45a7635c3790eeed8a08 \ + --hash=sha256:66896bbe925073e4d48f18ec29dcd611a390d6b2378fae72125e77b020cd5664 \ + --hash=sha256:6cb4fe8c62de06b70e6b38c4bd608f41ea3e9d7154a4e05f9a3c4d8944fe3a25 \ + --hash=sha256:75522790df921b6be5d86cf26d184a4af97c1c65e2d22698a9516bc049c398cf \ + --hash=sha256:91b9fbe8209922f46bbd8c6fd1a44193a4c364ff3fa398af7bcc8aaa404567d9 \ + --hash=sha256:978e7e7032760385264896871ae87cb3a04081766cc966c57e9750ce803162ac \ + --hash=sha256:9d18a5b1f7eecabed631c497a19f19d281a0d86f24bfe5d239e3df89bdc4df32 \ + --hash=sha256:a6e0f9e18d808c87ce0fe1900c74a3496a42743f4bba7ed4dd83a0e6e168644a \ + --hash=sha256:bd23eeb479355316fe807703220439fd1de1df4ca0145a49c35f71b184f87254 \ + --hash=sha256:c5a416ae2e1761914ee8d7dbfbe3858e106c876b5a7fccaa3917c038e2f126ec \ + --hash=sha256:ca64977a19ecd580a48f22137a30470d7ccf0995b2c25a74136c6facec7c617d \ + --hash=sha256:d3d86c51f9ce35a1b40b2f410f7271d9bd5fc58e7240f4cae7fdd2cef757e671 \ + --hash=sha256:d89231aa4fba3e38b80c11beb8edc07569e934c1f3935b51f57904fefe958ba5 \ + --hash=sha256:dc32b6f25a673e41b9a8112b6b841284f60dbac9427b7848a03b435460f74aee \ + --hash=sha256:dffd19ed373b0da5cea124606b183489a8686c3d18643e94485be1bdda5713ea \ + --hash=sha256:ef691045623863624f2cb7347572d0262a53cb84940ef7dd851d9f13a2eb8833 \ + --hash=sha256:f4a93f91f97055e19cade33bb957d83f8615f1d2a0103b89827aca16b31a3e22 + # via docling +docutils==0.19 \ + --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ + --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc + # via sphinx +duckdb==1.1.3 \ + --hash=sha256:00cca22df96aa3473fe4584f84888e2cf1c516e8c2dd837210daec44eadba586 \ + --hash=sha256:08935700e49c187fe0e9b2b86b5aad8a2ccd661069053e38bfaed3b9ff795efd \ + --hash=sha256:0897f83c09356206ce462f62157ce064961a5348e31ccb2a557a7531d814e70e \ + --hash=sha256:09c68522c30fc38fc972b8a75e9201616b96ae6da3444585f14cf0d116008c95 \ + --hash=sha256:0a55169d2d2e2e88077d91d4875104b58de45eff6a17a59c7dc41562c73df4be \ + --hash=sha256:0ba6baa0af33ded836b388b09433a69b8bec00263247f6bf0a05c65c897108d3 \ + --hash=sha256:183ac743f21c6a4d6adfd02b69013d5fd78e5e2cd2b4db023bc8a95457d4bc5d \ + --hash=sha256:1aa3abec8e8995a03ff1a904b0e66282d19919f562dd0a1de02f23169eeec461 \ + --hash=sha256:1c0226dc43e2ee4cc3a5a4672fddb2d76fd2cf2694443f395c02dd1bea0b7fce \ + --hash=sha256:1d9ab6143e73bcf17d62566e368c23f28aa544feddfd2d8eb50ef21034286f24 \ + --hash=sha256:2141c6b28162199999075d6031b5d63efeb97c1e68fb3d797279d31c65676269 \ + --hash=sha256:252d9b17d354beb9057098d4e5d5698e091a4f4a0d38157daeea5fc0ec161670 \ + --hash=sha256:25fb02629418c0d4d94a2bc1776edaa33f6f6ccaa00bd84eb96ecb97ae4b50e9 \ + --hash=sha256:2f073d15d11a328f2e6d5964a704517e818e930800b7f3fa83adea47f23720d3 \ + --hash=sha256:35c420f58abc79a68a286a20fd6265636175fadeca1ce964fc8ef159f3acc289 \ + --hash=sha256:4ebf5f60ddbd65c13e77cddb85fe4af671d31b851f125a4d002a313696af43f1 \ + --hash=sha256:4f0e2e5a6f5a53b79aee20856c027046fba1d73ada6178ed8467f53c3877d5e0 \ + --hash=sha256:51c6d79e05b4a0933672b1cacd6338f882158f45ef9903aef350c4427d9fc898 \ + --hash=sha256:51e7dbd968b393343b226ab3f3a7b5a68dee6d3fe59be9d802383bf916775cb8 \ + --hash=sha256:5ace6e4b1873afdd38bd6cc8fcf90310fb2d454f29c39a61d0c0cf1a24ad6c8d \ + --hash=sha256:5d57776539211e79b11e94f2f6d63de77885f23f14982e0fac066f2885fcf3ff \ + --hash=sha256:6411e21a2128d478efbd023f2bdff12464d146f92bc3e9c49247240448ace5a6 \ + --hash=sha256:647f17bd126170d96a38a9a6f25fca47ebb0261e5e44881e3782989033c94686 \ + --hash=sha256:68c3a46ab08836fe041d15dcbf838f74a990d551db47cb24ab1c4576fc19351c \ + --hash=sha256:77f26884c7b807c7edd07f95cf0b00e6d47f0de4a534ac1706a58f8bc70d0d31 \ + --hash=sha256:7c71169fa804c0b65e49afe423ddc2dc83e198640e3b041028da8110f7cd16f7 \ + --hash=sha256:80158f4c7c7ada46245837d5b6869a336bbaa28436fbb0537663fa324a2750cd \ + --hash=sha256:872d38b65b66e3219d2400c732585c5b4d11b13d7a36cd97908d7981526e9898 \ + --hash=sha256:8ee97ec337794c162c0638dda3b4a30a483d0587deda22d45e1909036ff0b739 \ + --hash=sha256:911d58c22645bfca4a5a049ff53a0afd1537bc18fedb13bc440b2e5af3c46148 \ + --hash=sha256:9c619e4849837c8c83666f2cd5c6c031300cd2601e9564b47aa5de458ff6e69d \ + --hash=sha256:9d0767ada9f06faa5afcf63eb7ba1befaccfbcfdac5ff86f0168c673dd1f47aa \ + --hash=sha256:9e3f5cd604e7c39527e6060f430769b72234345baaa0987f9500988b2814f5e4 \ + --hash=sha256:a1f83c7217c188b7ab42e6a0963f42070d9aed114f6200e3c923c8899c090f16 \ + --hash=sha256:a1fa0c502f257fa9caca60b8b1478ec0f3295f34bb2efdc10776fc731b8a6c5f \ + --hash=sha256:a30dd599b8090ea6eafdfb5a9f1b872d78bac318b6914ada2d35c7974d643640 \ + --hash=sha256:a433ae9e72c5f397c44abdaa3c781d94f94f4065bcbf99ecd39433058c64cb38 \ + --hash=sha256:a4748635875fc3c19a7320a6ae7410f9295557450c0ebab6d6712de12640929a \ + --hash=sha256:b74e121ab65dbec5290f33ca92301e3a4e81797966c8d9feef6efdf05fc6dafd \ + --hash=sha256:c443d3d502335e69fc1e35295fcfd1108f72cb984af54c536adfd7875e79cee5 \ + --hash=sha256:c5336939d83837af52731e02b6a78a446794078590aa71fd400eb17f083dda3e \ + --hash=sha256:cddc6c1a3b91dcc5f32493231b3ba98f51e6d3a44fe02839556db2b928087378 \ + --hash=sha256:d08308e0a46c748d9c30f1d67ee1143e9c5ea3fbcccc27a47e115b19e7e78aa9 \ + --hash=sha256:d5724fd8a49e24d730be34846b814b98ba7c304ca904fbdc98b47fa95c0b0cee \ + --hash=sha256:e4ef7ba97a65bd39d66f2a7080e6fb60e7c3e41d4c1e19245f90f53b98e3ac32 \ + --hash=sha256:e59087dbbb63705f2483544e01cccf07d5b35afa58be8931b224f3221361d537 \ + --hash=sha256:e86006958e84c5c02f08f9b96f4bc26990514eab329b1b4f71049b3727ce5989 \ + --hash=sha256:ecb1dc9062c1cc4d2d88a5e5cd8cc72af7818ab5a3c0f796ef0ffd60cfd3efb4 \ + --hash=sha256:eeacb598120040e9591f5a4edecad7080853aa8ac27e62d280f151f8c862afa3 \ + --hash=sha256:f549af9f7416573ee48db1cf8c9d27aeed245cb015f4b4f975289418c6cf7320 \ + --hash=sha256:f58db1b65593ff796c8ea6e63e2e144c944dd3d51c8d8e40dffa7f41693d35d3 \ + --hash=sha256:f9b47036945e1db32d70e414a10b1593aec641bd4c5e2056873d971cc21e978b + # via ibis-framework +dunamai==1.25.0 \ + --hash=sha256:7f9dc687dd3256e613b6cc978d9daabfd2bb5deb8adc541fc135ee423ffa98ab \ + --hash=sha256:a7f8360ea286d3dbaf0b6a1473f9253280ac93d619836ad4514facb70c0719d1 + # via poetry-dynamic-versioning +durationpy==0.10 \ + --hash=sha256:1fa6893409a6e739c9c72334fc65cca1f355dbdd93405d30f726deb5bde42fba \ + --hash=sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286 + # via kubernetes +easyocr==1.7.2 \ + --hash=sha256:5be12f9b0e595d443c9c3d10b0542074b50f0ec2d98b141a109cd961fd1c177c + # via docling +elastic-transport==9.2.1 \ + --hash=sha256:39e1a25e486af34ce7aa1bc9005d1c736f1b6fb04c9b64ea0604ded5a61fc1d4 \ + --hash=sha256:97d9abd638ba8aa90faa4ca1bf1a18bde0fe2088fbc8757f2eb7b299f205773d + # via elasticsearch +elasticsearch==9.2.1 \ + --hash=sha256:8665f5a0b4d29a7c2772851c05ea8a09279abb7928b7d727524613bd61d75958 \ + --hash=sha256:97f473418e8976611349757287ac982acf12f4e305182863d985d5a031c36830 + # via feast (setup.py) +entrypoints==0.4 \ + --hash=sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4 \ + --hash=sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f + # via altair +et-xmlfile==2.0.0 \ + --hash=sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa \ + --hash=sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54 + # via openpyxl +execnet==2.1.2 \ + --hash=sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd \ + --hash=sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec + # via pytest-xdist +executing==1.2.0 \ + --hash=sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc \ + --hash=sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107 + # via + # codeflare-sdk + # stack-data +faiss-cpu==1.10.0 \ + --hash=sha256:035e4d797e2db7fc0d0c90531d4a655d089ad5d1382b7a49358c1f2307b3a309 \ + --hash=sha256:2aca486fe2d680ea64a18d356206c91ff85db99fd34c19a757298c67c23262b1 \ + --hash=sha256:2f15b7957d474391fc63f02bfb8011b95317a580e4d9bd70c276f4bc179a17b3 \ + --hash=sha256:3118b5d7680b0e0a3cd64b3d29389d8384de4298739504fc661b658109540b4b \ + --hash=sha256:345a52dbfa980d24b93c94410eadf82d1eef359c6a42e5e0768cca96539f1c3c \ + --hash=sha256:449f3eb778d6d937e01a16a3170de4bb8aabfe87c7cb479b458fb790276310c5 \ + --hash=sha256:473d158fbd638d6ad5fb64469ba79a9f09d3494b5f4e8dfb4f40ce2fc335dca4 \ + --hash=sha256:49b6647aa9e159a2c4603cbff2e1b313becd98ad6e851737ab325c74fe8e0278 \ + --hash=sha256:6693474be296a7142ade1051ea18e7d85cedbfdee4b7eac9c52f83fed0467855 \ + --hash=sha256:6f8c0ef8b615c12c7bf612bd1fc51cffa49c1ddaa6207c6981f01ab6782e6b3b \ + --hash=sha256:70ebe60a560414dc8dd6cfe8fed105c8f002c0d11f765f5adfe8d63d42c0467f \ + --hash=sha256:74c5712d4890f15c661ab7b1b75867812e9596e1469759956fad900999bedbb5 \ + --hash=sha256:7a9fef4039ed877d40e41d5563417b154c7f8cd57621487dad13c4eb4f32515f \ + --hash=sha256:82ca5098de694e7b8495c1a8770e2c08df6e834922546dad0ae1284ff519ced6 \ + --hash=sha256:8ff6924b0f00df278afe70940ae86302066466580724c2f3238860039e9946f1 \ + --hash=sha256:9899c340f92bd94071d6faf4bef0ccb5362843daea42144d4ba857a2a1f67511 \ + --hash=sha256:c1108a4059c66c37c403183e566ca1ed0974a6af7557c92d49207639aab661bc \ + --hash=sha256:cb77a6a5f304890c23ffb4c566bc819c0e0cf34370b20ddff02477f2bbbaf7a3 \ + --hash=sha256:cb80b530a9ded44a7d4031a7355a237aaa0ff1f150c1176df050e0254ea5f6f6 \ + --hash=sha256:cb8473d69c3964c1bf3f8eb3e04287bb3275f536e6d9635ef32242b5f506b45d \ + --hash=sha256:dadbbb834ddc34ca7e21411811833cebaae4c5a86198dd7c2a349dbe4e7e0398 \ + --hash=sha256:dcd0cb2ec84698cbe3df9ed247d2392f09bda041ad34b92d38fa916cd019ad4b \ + --hash=sha256:e02af3696a6b9e1f9072e502f48095a305de2163c42ceb1f6f6b1db9e7ffe574 \ + --hash=sha256:e71f7e24d5b02d3a51df47b77bd10f394a1b48a8331d5c817e71e9e27a8a75ac \ + --hash=sha256:f71c5860c860df2320299f9e4f2ca1725beb559c04acb1cf961ed24e6218277a + # via feast (setup.py) +fastapi==0.128.0 \ + --hash=sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a \ + --hash=sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d + # via + # feast (setup.py) + # fastapi-mcp +fastapi-mcp==0.4.0 \ + --hash=sha256:d4a3fe7966af24d44e4b412720561c95eb12bed999a4443a88221834b3b15aec \ + --hash=sha256:d4ca9410996f4c7b8ea0d7b20fdf79878dc359ebf89cbf3b222e0b675a55097d + # via feast (setup.py) +fastjsonschema==2.21.2 \ + --hash=sha256:1c797122d0a86c5cace2e54bf4e819c36223b552017172f32c5c024a6b77e463 \ + --hash=sha256:b1eb43748041c880796cd077f1a07c3d94e93ae84bba5ed36800a33554ae05de + # via nbformat +filelock==3.20.3 \ + --hash=sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1 \ + --hash=sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1 + # via + # datasets + # huggingface-hub + # ray + # snowflake-connector-python + # torch + # transformers + # virtualenv +filetype==1.2.0 \ + --hash=sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb \ + --hash=sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25 + # via docling +fqdn==1.5.1 \ + --hash=sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f \ + --hash=sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014 + # via jsonschema +frozenlist==1.8.0 \ + --hash=sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686 \ + --hash=sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0 \ + --hash=sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121 \ + --hash=sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd \ + --hash=sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7 \ + --hash=sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c \ + --hash=sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84 \ + --hash=sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d \ + --hash=sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b \ + --hash=sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79 \ + --hash=sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967 \ + --hash=sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f \ + --hash=sha256:13d23a45c4cebade99340c4165bd90eeb4a56c6d8a9d8aa49568cac19a6d0dc4 \ + --hash=sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7 \ + --hash=sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef \ + --hash=sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9 \ + --hash=sha256:1a7607e17ad33361677adcd1443edf6f5da0ce5e5377b798fba20fae194825f3 \ + --hash=sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd \ + --hash=sha256:1aa77cb5697069af47472e39612976ed05343ff2e84a3dcf15437b232cbfd087 \ + --hash=sha256:1b9290cf81e95e93fdf90548ce9d3c1211cf574b8e3f4b3b7cb0537cf2227068 \ + --hash=sha256:20e63c9493d33ee48536600d1a5c95eefc870cd71e7ab037763d1fbb89cc51e7 \ + --hash=sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed \ + --hash=sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b \ + --hash=sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f \ + --hash=sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25 \ + --hash=sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe \ + --hash=sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143 \ + --hash=sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e \ + --hash=sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930 \ + --hash=sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37 \ + --hash=sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128 \ + --hash=sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2 \ + --hash=sha256:332db6b2563333c5671fecacd085141b5800cb866be16d5e3eb15a2086476675 \ + --hash=sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f \ + --hash=sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746 \ + --hash=sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df \ + --hash=sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8 \ + --hash=sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c \ + --hash=sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0 \ + --hash=sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad \ + --hash=sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82 \ + --hash=sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29 \ + --hash=sha256:42145cd2748ca39f32801dad54aeea10039da6f86e303659db90db1c4b614c8c \ + --hash=sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30 \ + --hash=sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf \ + --hash=sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62 \ + --hash=sha256:48e6d3f4ec5c7273dfe83ff27c91083c6c9065af655dc2684d2c200c94308bb5 \ + --hash=sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383 \ + --hash=sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c \ + --hash=sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52 \ + --hash=sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d \ + --hash=sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1 \ + --hash=sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a \ + --hash=sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714 \ + --hash=sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65 \ + --hash=sha256:59a6a5876ca59d1b63af8cd5e7ffffb024c3dc1e9cf9301b21a2e76286505c95 \ + --hash=sha256:5a3a935c3a4e89c733303a2d5a7c257ea44af3a56c8202df486b7f5de40f37e1 \ + --hash=sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506 \ + --hash=sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888 \ + --hash=sha256:667c3777ca571e5dbeb76f331562ff98b957431df140b54c85fd4d52eea8d8f6 \ + --hash=sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41 \ + --hash=sha256:6dc4126390929823e2d2d9dc79ab4046ed74680360fc5f38b585c12c66cdf459 \ + --hash=sha256:7398c222d1d405e796970320036b1b563892b65809d9e5261487bb2c7f7b5c6a \ + --hash=sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608 \ + --hash=sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa \ + --hash=sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8 \ + --hash=sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1 \ + --hash=sha256:799345ab092bee59f01a915620b5d014698547afd011e691a208637312db9186 \ + --hash=sha256:7bf6cdf8e07c8151fba6fe85735441240ec7f619f935a5205953d58009aef8c6 \ + --hash=sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed \ + --hash=sha256:80f85f0a7cc86e7a54c46d99c9e1318ff01f4687c172ede30fd52d19d1da1c8e \ + --hash=sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52 \ + --hash=sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231 \ + --hash=sha256:8a76ea0f0b9dfa06f254ee06053d93a600865b3274358ca48a352ce4f0798450 \ + --hash=sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496 \ + --hash=sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a \ + --hash=sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3 \ + --hash=sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24 \ + --hash=sha256:940d4a017dbfed9daf46a3b086e1d2167e7012ee297fef9e1c545c4d022f5178 \ + --hash=sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695 \ + --hash=sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7 \ + --hash=sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4 \ + --hash=sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e \ + --hash=sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e \ + --hash=sha256:9ff15928d62a0b80bb875655c39bf517938c7d589554cbd2669be42d97c2cb61 \ + --hash=sha256:a6483e309ca809f1efd154b4d37dc6d9f61037d6c6a81c2dc7a15cb22c8c5dca \ + --hash=sha256:a88f062f072d1589b7b46e951698950e7da00442fc1cacbe17e19e025dc327ad \ + --hash=sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b \ + --hash=sha256:adbeebaebae3526afc3c96fad434367cafbfd1b25d72369a9e5858453b1bb71a \ + --hash=sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8 \ + --hash=sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51 \ + --hash=sha256:b37f6d31b3dcea7deb5e9696e529a6aa4a898adc33db82da12e4c60a7c4d2011 \ + --hash=sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8 \ + --hash=sha256:b4f3b365f31c6cd4af24545ca0a244a53688cad8834e32f56831c4923b50a103 \ + --hash=sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b \ + --hash=sha256:b9be22a69a014bc47e78072d0ecae716f5eb56c15238acca0f43d6eb8e4a5bda \ + --hash=sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806 \ + --hash=sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042 \ + --hash=sha256:c23c3ff005322a6e16f71bf8692fcf4d5a304aaafe1e262c98c6d4adc7be863e \ + --hash=sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b \ + --hash=sha256:c7366fe1418a6133d5aa824ee53d406550110984de7637d65a178010f759c6ef \ + --hash=sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d \ + --hash=sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567 \ + --hash=sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a \ + --hash=sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2 \ + --hash=sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0 \ + --hash=sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e \ + --hash=sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b \ + --hash=sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d \ + --hash=sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a \ + --hash=sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52 \ + --hash=sha256:d8b7138e5cd0647e4523d6685b0eac5d4be9a184ae9634492f25c6eb38c12a47 \ + --hash=sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1 \ + --hash=sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94 \ + --hash=sha256:e2de870d16a7a53901e41b64ffdf26f2fbb8917b3e6ebf398098d72c5b20bd7f \ + --hash=sha256:e4a3408834f65da56c83528fb52ce7911484f0d1eaf7b761fc66001db1646eff \ + --hash=sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822 \ + --hash=sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a \ + --hash=sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11 \ + --hash=sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581 \ + --hash=sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51 \ + --hash=sha256:ef2b7b394f208233e471abc541cc6991f907ffd47dc72584acee3147899d6565 \ + --hash=sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40 \ + --hash=sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92 \ + --hash=sha256:f57fb59d9f385710aa7060e89410aeb5058b99e62f4d16b08b91986b9a2140c2 \ + --hash=sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5 \ + --hash=sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4 \ + --hash=sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93 \ + --hash=sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027 \ + --hash=sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd + # via + # aiohttp + # aiosignal +fsspec[http]==2024.9.0 \ + --hash=sha256:4b0afb90c2f21832df142f292649035d80b421f60a9e1c027802e5a0da2b04e8 \ + --hash=sha256:a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b + # via + # feast (setup.py) + # dask + # datasets + # huggingface-hub + # ray + # torch +geomet==1.1.0 \ + --hash=sha256:4372fe4e286a34acc6f2e9308284850bd8c4aa5bc12065e2abbd4995900db12f \ + --hash=sha256:51e92231a0ef6aaa63ac20c443377ba78a303fd2ecd179dc3567de79f3c11605 + # via cassandra-driver +google-api-core[grpc]==2.29.0 \ + --hash=sha256:84181be0f8e6b04006df75ddfe728f24489f0af57c96a529ff7cf45bc28797f7 \ + --hash=sha256:d30bc60980daa36e314b5d5a3e5958b0200cb44ca8fa1be2b614e932b75a3ea9 + # via + # feast (setup.py) + # google-cloud-bigquery + # google-cloud-bigquery-storage + # google-cloud-bigtable + # google-cloud-core + # google-cloud-datastore + # google-cloud-storage + # opencensus + # pandas-gbq +google-auth==2.47.0 \ + --hash=sha256:833229070a9dfee1a353ae9877dcd2dec069a8281a4e72e72f77d4a70ff945da \ + --hash=sha256:c516d68336bfde7cf0da26aab674a36fedcf04b37ac4edd59c597178760c3498 + # via + # google-api-core + # google-auth-oauthlib + # google-cloud-bigquery + # google-cloud-bigquery-storage + # google-cloud-bigtable + # google-cloud-core + # google-cloud-datastore + # google-cloud-storage + # pandas-gbq + # pydata-google-auth +google-auth-oauthlib==1.2.4 \ + --hash=sha256:0e922eea5f2baacaf8867febb782e46e7b153236c21592ed76ab3ddb77ffd772 \ + --hash=sha256:3ca93859c6cc9003c8e12b2a0868915209d7953f05a70f4880ab57d57e56ee3e + # via + # pandas-gbq + # pydata-google-auth +google-cloud-bigquery[pandas]==3.40.0 \ + --hash=sha256:0469bcf9e3dad3cab65b67cce98180c8c0aacf3253d47f0f8e976f299b49b5ab \ + --hash=sha256:b3ccb11caf0029f15b29569518f667553fe08f6f1459b959020c83fbbd8f2e68 + # via + # feast (setup.py) + # pandas-gbq +google-cloud-bigquery-storage==2.36.0 \ + --hash=sha256:1769e568070db672302771d2aec18341de10712aa9c4a8c549f417503e0149f0 \ + --hash=sha256:d3c1ce9d2d3a4d7116259889dcbe3c7c70506f71f6ce6bbe54aa0a68bbba8f8f + # via feast (setup.py) +google-cloud-bigtable==2.35.0 \ + --hash=sha256:f355bfce1f239453ec2bb3839b0f4f9937cf34ef06ef29e1ca63d58fd38d0c50 \ + --hash=sha256:f5699012c5fea4bd4bdf7e80e5e3a812a847eb8f41bf8dc2f43095d6d876b83b + # via feast (setup.py) +google-cloud-core==2.5.0 \ + --hash=sha256:67d977b41ae6c7211ee830c7912e41003ea8194bff15ae7d72fd6f51e57acabc \ + --hash=sha256:7c1b7ef5c92311717bd05301aa1a91ffbc565673d3b0b4163a52d8413a186963 + # via + # google-cloud-bigquery + # google-cloud-bigtable + # google-cloud-datastore + # google-cloud-storage +google-cloud-datastore==2.23.0 \ + --hash=sha256:24a1b1d29b902148fe41b109699f76fd3aa60591e9d547c0f8b87d7bf9ff213f \ + --hash=sha256:80049883a4ae928fdcc661ba6803ec267665dc0e6f3ce2da91441079a6bb6387 + # via feast (setup.py) +google-cloud-storage==2.19.0 \ + --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ + --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 + # via feast (setup.py) +google-crc32c==1.8.0 \ + --hash=sha256:014a7e68d623e9a4222d663931febc3033c5c7c9730785727de2a81f87d5bab8 \ + --hash=sha256:01f126a5cfddc378290de52095e2c7052be2ba7656a9f0caf4bcd1bfb1833f8a \ + --hash=sha256:0470b8c3d73b5f4e3300165498e4cf25221c7eb37f1159e221d1825b6df8a7ff \ + --hash=sha256:119fcd90c57c89f30040b47c211acee231b25a45d225e3225294386f5d258288 \ + --hash=sha256:14f87e04d613dfa218d6135e81b78272c3b904e2a7053b841481b38a7d901411 \ + --hash=sha256:17446feb05abddc187e5441a45971b8394ea4c1b6efd88ab0af393fd9e0a156a \ + --hash=sha256:19b40d637a54cb71e0829179f6cb41835f0fbd9e8eb60552152a8b52c36cbe15 \ + --hash=sha256:2a3dc3318507de089c5384cc74d54318401410f82aa65b2d9cdde9d297aca7cb \ + --hash=sha256:3b9776774b24ba76831609ffbabce8cdf6fa2bd5e9df37b594221c7e333a81fa \ + --hash=sha256:3cc0c8912038065eafa603b238abf252e204accab2a704c63b9e14837a854962 \ + --hash=sha256:3d488e98b18809f5e322978d4506373599c0c13e6c5ad13e53bb44758e18d215 \ + --hash=sha256:3ebb04528e83b2634857f43f9bb8ef5b2bbe7f10f140daeb01b58f972d04736b \ + --hash=sha256:450dc98429d3e33ed2926fc99ee81001928d63460f8538f21a5d6060912a8e27 \ + --hash=sha256:4b8286b659c1335172e39563ab0a768b8015e88e08329fa5321f774275fc3113 \ + --hash=sha256:57a50a9035b75643996fbf224d6661e386c7162d1dfdab9bc4ca790947d1007f \ + --hash=sha256:61f58b28e0b21fcb249a8247ad0db2e64114e201e2e9b4200af020f3b6242c9f \ + --hash=sha256:6f35aaffc8ccd81ba3162443fabb920e65b1f20ab1952a31b13173a67811467d \ + --hash=sha256:71734788a88f551fbd6a97be9668a0020698e07b2bf5b3aa26a36c10cdfb27b2 \ + --hash=sha256:864abafe7d6e2c4c66395c1eb0fe12dc891879769b52a3d56499612ca93b6092 \ + --hash=sha256:86cfc00fe45a0ac7359e5214a1704e51a99e757d0272554874f419f79838c5f7 \ + --hash=sha256:87b0072c4ecc9505cfa16ee734b00cd7721d20a0f595be4d40d3d21b41f65ae2 \ + --hash=sha256:87fa445064e7db928226b2e6f0d5304ab4cd0339e664a4e9a25029f384d9bb93 \ + --hash=sha256:89c17d53d75562edfff86679244830599ee0a48efc216200691de8b02ab6b2b8 \ + --hash=sha256:8b3f68782f3cbd1bce027e48768293072813469af6a61a86f6bb4977a4380f21 \ + --hash=sha256:a428e25fb7691024de47fecfbff7ff957214da51eddded0da0ae0e0f03a2cf79 \ + --hash=sha256:b0d1a7afc6e8e4635564ba8aa5c0548e3173e41b6384d7711a9123165f582de2 \ + --hash=sha256:ba6aba18daf4d36ad4412feede6221414692f44d17e5428bdd81ad3fc1eee5dc \ + --hash=sha256:cb5c869c2923d56cb0c8e6bcdd73c009c36ae39b652dbe46a05eb4ef0ad01454 \ + --hash=sha256:d511b3153e7011a27ab6ee6bb3a5404a55b994dc1a7322c0b87b29606d9790e2 \ + --hash=sha256:db3fe8eaf0612fc8b20fa21a5f25bd785bc3cd5be69f8f3412b0ac2ffd49e733 \ + --hash=sha256:e6584b12cb06796d285d09e33f63309a09368b9d806a551d8036a4207ea43697 \ + --hash=sha256:f4b51844ef67d6cf2e9425983274da75f18b1597bb2c998e1c0a0e8d46f8f651 \ + --hash=sha256:f639065ea2042d5c034bf258a9f085eaa7af0cd250667c0635a3118e8f92c69c + # via + # google-cloud-bigtable + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.8.0 \ + --hash=sha256:dd14a116af303845a8d932ddae161a26e86cc229645bc98b39f026f9b1717582 \ + --hash=sha256:f1157ed8b46994d60a1bc432544db62352043113684d4e030ee02e77ebe9a1ae + # via + # google-cloud-bigquery + # google-cloud-storage +googleapis-common-protos[grpc]==1.72.0 \ + --hash=sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038 \ + --hash=sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5 + # via + # feast (setup.py) + # google-api-core + # grpc-google-iam-v1 + # grpcio-status +great-expectations==0.18.8 \ + --hash=sha256:ab41cfa3de829a4f77bdcd4a23244684cbb67fdacc734d38910164cd02ec95b6 \ + --hash=sha256:c1205bede593f679e22e0b3826d6ae1623c439cafd553f9f0bc2b0fd441f6ed9 + # via feast (setup.py) +grpc-google-iam-v1==0.14.3 \ + --hash=sha256:7a7f697e017a067206a3dfef44e4c634a34d3dee135fe7d7a4613fe3e59217e6 \ + --hash=sha256:879ac4ef33136c5491a6300e27575a9ec760f6cdf9a2518798c1b8977a5dc389 + # via google-cloud-bigtable +grpcio==1.62.3 \ + --hash=sha256:059444f0ed5dba73ab7dd0ee7e8e6b606df4130d2b0a9f010f84da4ab9f6c2d8 \ + --hash=sha256:114f2a865886ff33f85d70670e971fe0e3d252a1209656fefa5470286e3fcc76 \ + --hash=sha256:13571a5b868dcc308a55d36669a2d17d9dcd6ec8335213f6c49cc68da7305abe \ + --hash=sha256:1ac0944e9e3ee3e20825226d1e17985e9f88487055c475986cf0922a7d806d8a \ + --hash=sha256:1de3d04d9a4ec31ebe848ae1fe61e4cbc367fb9495cbf6c54368e60609a998d9 \ + --hash=sha256:216740723fc5971429550c374a0c039723b9d4dcaf7ba05227b7e0a500b06417 \ + --hash=sha256:25cd75dc73c5269932413e517be778640402f18cf9a81147e68645bd8af18ab0 \ + --hash=sha256:325c56ce94d738c31059cf91376f625d3effdff8f85c96660a5fd6395d5a707f \ + --hash=sha256:3737e5ef0aa0fcdfeaf3b4ecc1a6be78b494549b28aec4b7f61b5dc357f7d8be \ + --hash=sha256:377babc817e8b4186aed7ed56e832867c513e4e9b6c3503565c344ffdef440d4 \ + --hash=sha256:3fb7d966a976d762a31346353a19fce4afcffbeda3027dd563bc8cb521fcf799 \ + --hash=sha256:43670a25b752b7ed960fcec3db50ae5886dc0df897269b3f5119cde9b731745f \ + --hash=sha256:4439bbd759636e37b66841117a66444b454937e27f0125205d2d117d7827c643 \ + --hash=sha256:454a6aed4ebd56198d37e1f3be6f1c70838e33dd62d1e2cea12f2bcb08efecc5 \ + --hash=sha256:4c9c1502c76cadbf2e145061b63af077b08d5677afcef91970d6db87b30e2f8b \ + --hash=sha256:4dab8b64c438e19c763a6332b55e5efdbecfb7c55ae59a42c38c81ed27955fa5 \ + --hash=sha256:56757d3e4cf5d4b98a30f2c5456151607261c891fa2298a4554848dcbf83083d \ + --hash=sha256:57823dc7299c4f258ae9c32fd327d29f729d359c34d7612b36e48ed45b3ab8d0 \ + --hash=sha256:582bd03e9c3d1bd1162eb51fa0f1a35633d66e73f4f36702d3b8484a8b45eda7 \ + --hash=sha256:620165df24aae3d5b3e84cb8dd6b98f6ed49aed04126186bbf43061e301d6a21 \ + --hash=sha256:646c14e9f3356d3f34a65b58b0f8d08daa741ba1d4fcd4966b79407543332154 \ + --hash=sha256:668211f3699bbee4deaf1d6e6b8df59328bf63f077bf2dc9b8bfa4a17df4a279 \ + --hash=sha256:6be243f3954b0ca709f56f9cae926c84ac96e1cce19844711e647a1f1db88b99 \ + --hash=sha256:6da20a1ae010a988bc4ed47850f1122de0a88e18cd2f901fcf56007be1fc6c30 \ + --hash=sha256:7349cd7445ac65fbe1b744dcab9cc1ec02dae2256941a2e67895926cbf7422b4 \ + --hash=sha256:74f3fc9b93290e58264844f5bc46df4c58a94c4287a277dbcf75344fc6c37ca4 \ + --hash=sha256:75a4e9ac7ff185cad529f35934c5d711b88aca48b90c70e195f5657da50ce321 \ + --hash=sha256:7b33c1807d4ac564a3027d06f21a2220c116ceacaaef614deb96b3341ee58896 \ + --hash=sha256:807176971c504c598976f5a9ea62363cffbbbb6c7509d9808c2342b020880fa2 \ + --hash=sha256:80a82fdee14dc27e9299248b7aabd5a8739a1cf6b76c78aa2b848158b44a99d5 \ + --hash=sha256:81b7c121c4e52a0749bf0759185b8d5cfa48a786cd7d411cdab08269813e0aab \ + --hash=sha256:8257cc9e55fb0e2149a652d9dc14c023720f9e73c9145776e07c97e0a553922e \ + --hash=sha256:8a5f00b2508937952d23a1767739e95bbbe1120f8a66d10187d5e971d56bb55c \ + --hash=sha256:8ae2e7a390b2cdd2a95d3bf3b3385245eeb48a5e853943cb46139666462c2d1a \ + --hash=sha256:940459d81685549afdfe13a6de102c52ea4cdda093477baa53056884aadf7c48 \ + --hash=sha256:9c4aae4e683776c319169d87e7891b67b75e3f1c0beeb877902ea148b0585164 \ + --hash=sha256:9d5f8e0050a179b3bce9189b522dc91008d44f08c757a7c310e0fd06b4d3d147 \ + --hash=sha256:a1b85d35a7d9638c03321dfe466645b87e23c30df1266f9e04bbb5f44e7579a9 \ + --hash=sha256:a82410d7620c07cb32624e38f2a106980564dfef9dbe78f5b295cda9ef217c03 \ + --hash=sha256:abfe64811177e681edc81d9d9d1bd23edc5f599bd9846650864769264ace30cd \ + --hash=sha256:ac9783d5679c8da612465168c820fd0b916e70ec5496c840bddba0be7f2d124c \ + --hash=sha256:b033d50bd41e506e3b579775f54a30c16c222e0d88847ac8098d2eca2a7454cc \ + --hash=sha256:b097347441b86a8c3ad9579abaf5e5f7f82b1d74a898f47360433b2bca0e4536 \ + --hash=sha256:b708401ede2c4cb8943e8a713988fcfe6cbea105b07cd7fa7c8a9f137c22bddb \ + --hash=sha256:bd900e666bb68fff49703084be14407cd73b8a5752a7590cea98ec22de24fb5d \ + --hash=sha256:c118cfc80e2402a5595be36e9245ffd9b0e146f426cc40bdf60015bf183f8373 \ + --hash=sha256:c175b252d063af388523a397dbe8edbc4319761f5ee892a8a0f5890acc067362 \ + --hash=sha256:c8bb1a7aa82af6c7713cdf9dcb8f4ea1024ac7ce82bb0a0a82a49aea5237da34 \ + --hash=sha256:c95a0b76a44c548e6bd8c5f7dbecf89c77e2e16d3965be817b57769c4a30bea2 \ + --hash=sha256:e202e3f963480ca067a261179b1ac610c0f0272cb4a7942d11b7e2b3fc99c3aa \ + --hash=sha256:e9ffdb7bc9ccd56ec201aec3eab3432e1e820335b5a16ad2b37e094218dcd7a6 \ + --hash=sha256:ea7ca66a58421411c6486fa5015fe7704e2816ff0b4ec4fb779ad5e1cbbdabf3 \ + --hash=sha256:f2ff8ac447765e173842b554b31307b98b3bb1852710903ebb936e7efb7df6e5 \ + --hash=sha256:f5def814c5a4c90c8fe389c526ab881f4a28b7e239b23ed8e02dd02934dfaa1a + # via + # feast (setup.py) + # google-api-core + # google-cloud-bigquery + # google-cloud-bigquery-storage + # google-cloud-datastore + # googleapis-common-protos + # grpc-google-iam-v1 + # grpcio-health-checking + # grpcio-reflection + # grpcio-status + # grpcio-testing + # grpcio-tools + # ikvpy + # pymilvus + # qdrant-client + # ray +grpcio-health-checking==1.62.3 \ + --hash=sha256:5074ba0ce8f0dcfe328408ec5c7551b2a835720ffd9b69dade7fa3e0dc1c7a93 \ + --hash=sha256:f29da7dd144d73b4465fe48f011a91453e9ff6c8af0d449254cf80021cab3e0d + # via feast (setup.py) +grpcio-reflection==1.62.3 \ + --hash=sha256:a48ef37df81a3bada78261fc92ef382f061112f989d1312398b945cc69838b9c \ + --hash=sha256:cb84682933c400bddf94dd94f928d1c6570f500b6dd255973d4bfb495b82585f + # via feast (setup.py) +grpcio-status==1.62.3 \ + --hash=sha256:289bdd7b2459794a12cf95dc0cb727bd4a1742c37bd823f760236c937e53a485 \ + --hash=sha256:f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8 + # via + # google-api-core + # ikvpy +grpcio-testing==1.62.3 \ + --hash=sha256:06a4d7eb30d22f91368aa7f48bfc33563da13b9d951314455ca8c9c987fb75bb \ + --hash=sha256:f63577f28aaa95ea525124a0fd63c3429d71f769f4179b13f5e6cbc54979bfab + # via feast (setup.py) +grpcio-tools==1.62.3 \ + --hash=sha256:0a52cc9444df978438b8d2332c0ca99000521895229934a59f94f37ed896b133 \ + --hash=sha256:0a8c0c4724ae9c2181b7dbc9b186df46e4f62cb18dc184e46d06c0ebeccf569e \ + --hash=sha256:0cb3a3436ac119cbd37a7d3331d9bdf85dad21a6ac233a3411dff716dcbf401e \ + --hash=sha256:11c625eebefd1fd40a228fc8bae385e448c7e32a6ae134e43cf13bbc23f902b7 \ + --hash=sha256:11f363570dea661dde99e04a51bd108a5807b5df32a6f8bdf4860e34e94a4dbf \ + --hash=sha256:141d028bf5762d4a97f981c501da873589df3f7e02f4c1260e1921e565b376fa \ + --hash=sha256:1c989246c2aebc13253f08be32538a4039a64e12d9c18f6d662d7aee641dc8b5 \ + --hash=sha256:1da38070738da53556a4b35ab67c1b9884a5dd48fa2f243db35dc14079ea3d0c \ + --hash=sha256:27cd9ef5c5d68d5ed104b6dcb96fe9c66b82050e546c9e255716903c3d8f0373 \ + --hash=sha256:2e02d3b96f2d0e4bab9ceaa30f37d4f75571e40c6272e95364bff3125a64d184 \ + --hash=sha256:2f968b049c2849540751ec2100ab05e8086c24bead769ca734fdab58698408c1 \ + --hash=sha256:350a80485e302daaa95d335a931f97b693e170e02d43767ab06552c708808950 \ + --hash=sha256:3eae6ea76d62fcac091e1f15c2dcedf1dc3f114f8df1a972a8a0745e89f4cf61 \ + --hash=sha256:47a5c093ab256dec5714a7a345f8cc89315cb57c298b276fa244f37a0ba507f0 \ + --hash=sha256:5782883a27d3fae8c425b29a9d3dcf5f47d992848a1b76970da3b5a28d424b26 \ + --hash=sha256:6a56d344b0bab30bf342a67e33d386b0b3c4e65868ffe93c341c51e1a8853ca5 \ + --hash=sha256:6c3064610826f50bd69410c63101954676edc703e03f9e8f978a135f1aaf97c1 \ + --hash=sha256:703f46e0012af83a36082b5f30341113474ed0d91e36640da713355cd0ea5d23 \ + --hash=sha256:710fecf6a171dcbfa263a0a3e7070e0df65ba73158d4c539cec50978f11dad5d \ + --hash=sha256:7c7136015c3d62c3eef493efabaf9e3380e3e66d24ee8e94c01cb71377f57833 \ + --hash=sha256:7cc83023acd8bc72cf74c2edbe85b52098501d5b74d8377bfa06f3e929803492 \ + --hash=sha256:7f2483ea232bd72d98a6dc6d7aefd97e5bc80b15cd909b9e356d6f3e326b6e43 \ + --hash=sha256:7ff7d58a45b75df67d25f8f144936a3e44aabd91afec833ee06826bd02b7fbe7 \ + --hash=sha256:8ad0473af5544f89fc5a1ece8676dd03bdf160fb3230f967e05d0f4bf89620e3 \ + --hash=sha256:8c5d22b252dcef11dd1e0fbbe5bbfb9b4ae048e8880d33338215e8ccbdb03edc \ + --hash=sha256:8e62cc7164b0b7c5128e637e394eb2ef3db0e61fc798e80c301de3b2379203ed \ + --hash=sha256:962c84b4da0f3b14b3cdb10bc3837ebc5f136b67d919aea8d7bb3fd3df39528a \ + --hash=sha256:ace43b26d88a58dcff16c20d23ff72b04d0a415f64d2820f4ff06b1166f50557 \ + --hash=sha256:b47d0dda1bdb0a0ba7a9a6de88e5a1ed61f07fad613964879954961e36d49193 \ + --hash=sha256:b77f9f9cee87cd798f0fe26b7024344d1b03a7cd2d2cba7035f8433b13986325 \ + --hash=sha256:b881fd9505a84457e9f7e99362eeedd86497b659030cf57c6f0070df6d9c2b9b \ + --hash=sha256:bfda6ee8990997a9df95c5606f3096dae65f09af7ca03a1e9ca28f088caca5cf \ + --hash=sha256:c3a1ac9d394f8e229eb28eec2e04b9a6f5433fa19c9d32f1cb6066e3c5114a1d \ + --hash=sha256:c8ad5cce554e2fcaf8842dee5d9462583b601a3a78f8b76a153c38c963f58c10 \ + --hash=sha256:ca246dffeca0498be9b4e1ee169b62e64694b0f92e6d0be2573e65522f39eea9 \ + --hash=sha256:ca4f5eeadbb57cf03317d6a2857823239a63a59cc935f5bd6cf6e8b7af7a7ecc \ + --hash=sha256:d102b9b21c4e1e40af9a2ab3c6d41afba6bd29c0aa50ca013bf85c99cdc44ac5 \ + --hash=sha256:db3bc9fa39afc5e4e2767da4459df82b095ef0cab2f257707be06c44a1c2c3e5 \ + --hash=sha256:dc9ad9950119d8ae27634e68b7663cc8d340ae535a0f80d85a55e56a6973ab1f \ + --hash=sha256:e02d7c1a02e3814c94ba0cfe43d93e872c758bd8fd5c2797f894d0c49b4a1dfc \ + --hash=sha256:e0898d412a434e768a0c7e365acabe13ff1558b767e400936e26b5b6ed1ee51f \ + --hash=sha256:e18e15287c31baf574fcdf8251fb7f997d64e96c6ecf467906e576da0a079af6 \ + --hash=sha256:ec279dcf3518201fc592c65002754f58a6b542798cd7f3ecd4af086422f33f29 \ + --hash=sha256:ec6fbded0c61afe6f84e3c2a43e6d656791d95747d6d28b73eff1af64108c434 \ + --hash=sha256:eec73a005443061f4759b71a056f745e3b000dc0dc125c9f20560232dfbcbd14 \ + --hash=sha256:f3d812daffd0c2d2794756bd45a353f89e55dc8f91eb2fc840c51b9f6be62667 \ + --hash=sha256:f4b1615adf67bd8bb71f3464146a6f9949972d06d21a4f5e87e73f6464d97f57 \ + --hash=sha256:f6831fdec2b853c9daa3358535c55eed3694325889aa714070528cf8f92d7d6d + # via feast (setup.py) +gunicorn==23.0.0 \ + --hash=sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d \ + --hash=sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec + # via + # feast (setup.py) + # uvicorn-worker +h11==0.16.0 \ + --hash=sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1 \ + --hash=sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86 + # via + # httpcore + # uvicorn +h2==4.3.0 \ + --hash=sha256:6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1 \ + --hash=sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd + # via httpx +happybase==1.3.0 \ + --hash=sha256:43b6275d2865fc1364680a03f085491cd85d8b84db3c5aa94d25186685dfd87e \ + --hash=sha256:f2644cf1ef9d662fbe6f709fcfd66bf13e949f3efd4745a3230cf5f904fb7839 + # via feast (setup.py) +hazelcast-python-client==5.6.0 \ + --hash=sha256:834b87076a47c781ef80bdcb522b86abc75ff28992dfe384e47f669f06cabb18 \ + --hash=sha256:e2cec409068990ca9b4381fe97160cc2375412334782bef45ab4c8fe4d10536c + # via feast (setup.py) +hf-xet==1.2.0 \ + --hash=sha256:10bfab528b968c70e062607f663e21e34e2bba349e8038db546646875495179e \ + --hash=sha256:210d577732b519ac6ede149d2f2f34049d44e8622bf14eb3d63bbcd2d4b332dc \ + --hash=sha256:27df617a076420d8845bea087f59303da8be17ed7ec0cd7ee3b9b9f579dff0e4 \ + --hash=sha256:293a7a3787e5c95d7be1857358a9130694a9c6021de3f27fa233f37267174382 \ + --hash=sha256:29c8fc913a529ec0a91867ce3d119ac1aac966e098cf49501800c870328cc090 \ + --hash=sha256:2a212e842647b02eb6a911187dc878e79c4aa0aa397e88dd3b26761676e8c1f8 \ + --hash=sha256:30e06daccb3a7d4c065f34fc26c14c74f4653069bb2b194e7f18f17cbe9939c0 \ + --hash=sha256:3651fd5bfe0281951b988c0facbe726aa5e347b103a675f49a3fa8144c7968fd \ + --hash=sha256:46740d4ac024a7ca9b22bebf77460ff43332868b661186a8e46c227fdae01848 \ + --hash=sha256:4c1428c9ae73ec0939410ec73023c4f842927f39db09b063b9482dac5a3bb737 \ + --hash=sha256:66e159cbfcfbb29f920db2c09ed8b660eb894640d284f102ada929b6e3dc410a \ + --hash=sha256:6de1fc44f58f6dd937956c8d304d8c2dea264c80680bcfa61ca4a15e7b76780f \ + --hash=sha256:7d40b18769bb9a8bc82a9ede575ce1a44c75eb80e7375a01d76259089529b5dc \ + --hash=sha256:9c91d5ae931510107f148874e9e2de8a16052b6f1b3ca3c1b12f15ccb491390f \ + --hash=sha256:a55558084c16b09b5ed32ab9ed38421e2d87cf3f1f89815764d1177081b99865 \ + --hash=sha256:a8c27070ca547293b6890c4bf389f713f80e8c478631432962bb7f4bc0bd7d7f \ + --hash=sha256:b70218dd548e9840224df5638fdc94bd033552963cfa97f9170829381179c813 \ + --hash=sha256:cd3a6027d59cfb60177c12d6424e31f4b5ff13d8e3a1247b3a584bf8977e6df5 \ + --hash=sha256:ceeefcd1b7aed4956ae8499e2199607765fbd1c60510752003b6cc0b8413b649 \ + --hash=sha256:d06fa97c8562fb3ee7a378dd9b51e343bc5bc8190254202c9771029152f5e08c \ + --hash=sha256:e6584a52253f72c9f52f9e549d5895ca7a471608495c4ecaa6cc73dba2b24d69 \ + --hash=sha256:f182f264ed2acd566c514e45da9f2119110e48a87a327ca271027904c70c5832 + # via huggingface-hub +hiredis==2.4.0 \ + --hash=sha256:06815c3b9bf7225c4dcc9dd9dfb5a9fa91b4f680104443ef3fcd78410d7eb027 \ + --hash=sha256:070a0198401bc567709b9edff7f01e94c136dcca69d0ded4747b116bb0b8b577 \ + --hash=sha256:082ba6a3189d59f44bf75ca2c0467cdbc67c860eacd4bf564b9a927471888603 \ + --hash=sha256:0a87a249124666db2b795a0eb77cea5b8af8b148566616a681304826b4405869 \ + --hash=sha256:1537d13eefe4f48cb979362264851ee90d2bb7a221c8c350e9ceeda9f0392228 \ + --hash=sha256:168de1672bd73f7f3cdf0097084b4a71651ac35f7d99d0229ea8f223358d3a79 \ + --hash=sha256:1bfa50491d3222e3c2297b52c14e835ac52702ac8a91ec3fc1ff5201912623bb \ + --hash=sha256:1c0e706e0c3d1ec54d8243410e0fd5974b1c7b69db5c54cd9ae6a3a4b64fae33 \ + --hash=sha256:1d16f5023c1d9971f284231eb7036a25d4d123138a5adc4512c92a73d83b9a77 \ + --hash=sha256:2a21e2740c33347740dceb106b64b8a384e91da49aac7e8b3f2a25a9b33714b9 \ + --hash=sha256:2b76a5600047387c73c1b3d950e4ae3feffaefd442b20ba2f5fea773881d9bcd \ + --hash=sha256:2b90d9861673b0ba04651ade62e0fe568df71bbff8468657406848e9abf3650a \ + --hash=sha256:2d7715598c9034369cf739475ccc2db53a8ca895ff398fef6b9c597c30960ea8 \ + --hash=sha256:339f29542be968153afd6c6495c1222681c4b66b9a5a5573c11512378b7167c9 \ + --hash=sha256:38dd931f1124bd9781d3027a0cd6fb6f5a75b5c4ba4fe5540584105239b1f901 \ + --hash=sha256:39e1c7212dea1bbed0b075574808bc7c3192b324f54ea5d9ee522f6c35014ce7 \ + --hash=sha256:3abc0936c1efc59b510c7eab3799119a6ce8da94cea1f891854a6c3678d711f0 \ + --hash=sha256:3ced14fbec28fbabda7cb9f9094f2578c154c14f1a820a91c30fc8ee0bea1a0d \ + --hash=sha256:400a42b8d16206e45c8223cdaf5acc35839e10c35383b3fba3f43e7eb315c213 \ + --hash=sha256:468efdcbad7349a44aace693aed8324a01de180fcd4ef5513199eedb9b4341c8 \ + --hash=sha256:469c1a85017abf11d854fb16eca9a4093ebe1f2dacf777fed869d726f02b1389 \ + --hash=sha256:48baae8fbebf3b11660db6e51a55ff51516ed32edcd44a57f51ea9b373aca330 \ + --hash=sha256:4bf4b8513cea6e04ddee1b578ab306fb8bfa84b2f7e92ee3dbaf65652abb07d1 \ + --hash=sha256:4da6d881033a1bcb31bba152ea0925344127f0a98f86a6cf2ceb01cf6ecd29e2 \ + --hash=sha256:52d92df0eb5bba7f31f302a08174d628956d7216453da9d96498da9341179288 \ + --hash=sha256:54409fbefebe26274170c1c54e1852d310d84b85e405258aea6a78bec03b3eba \ + --hash=sha256:5598afad9e2f8e4fc9a456d281a9cc80315b0e18f5064437223dbfe67f49bded \ + --hash=sha256:5b0b2463906cc4119187dfaad493c48a7b2e17120946feb3eb7c2328c8cb4bca \ + --hash=sha256:5bdb223e7c3b9470f126bb77879ee2593fd79b28e1e8b11ad9edd3f866556109 \ + --hash=sha256:5cc3c59dd0cd67d0aa0481a43392848a60f1a81d12b38ce8d56d6a5d6c190de8 \ + --hash=sha256:5e45171fd046bbed2ce6ac485071cd0575d18ae98b5bbcf6533356e443ec47ea \ + --hash=sha256:6033cc6caaf056969af9ce372282a6ef2838559f2eadffe7ddb73bf65dcb27d6 \ + --hash=sha256:605fe35ebb482b7c8d5daadcf3d264dc5edd205a352d89ee3a983861ef73cda8 \ + --hash=sha256:6494120d0a0f46a1d7dfc7def55782782856bdd5acb2f6039fb1eafecea2c2c0 \ + --hash=sha256:668b02556d12046e7ce94ded5bfe0ad9989d26e6977ecc55941b9a1a4a49d7d5 \ + --hash=sha256:68e39d2c0beed53e5361caacd0de98f864b3532344edb79e27e62efba2262de5 \ + --hash=sha256:6c3f8e0c3a0744d843e3044ea76db8aa996a6cc7541693111acc2c9c30a05182 \ + --hash=sha256:6ceaf7c6b593bf62e0567fd16547727f502ed704352392708a57c65bfd2feb73 \ + --hash=sha256:6dac8a5be01d92707409feec61b98721b7b5c3e77fe7e9e5c7cfb9fdd28385af \ + --hash=sha256:6e38f66dd7fd07a9306ed37d6d02bc584b67e5945f2ddc98e5c78420cc66dbac \ + --hash=sha256:7236b26828e005435fb3013894eed6a40c6f9b1b11a48391a904eee693ded204 \ + --hash=sha256:737585b122fca03273bbf1f4e98909254dba6f8cd85f1cb566d6c890d0389277 \ + --hash=sha256:764032f2222d70a130445fd332cf45d46d8226f4b3a7bf8abc314aa93d5a8212 \ + --hash=sha256:76503a0edaf3d1557518127511e69e5d9fa37b6ff15598b0d9d9c2db18b08a41 \ + --hash=sha256:83538638a788b7b4a0b02de0eedcf0e71ae27474b031276e4c8ca88285281a2e \ + --hash=sha256:8767cae1474f8102ec3d362976f80c8dd4eafd4109c6072adee0a15e37ba919c \ + --hash=sha256:87a8ece3e893f45354395c6b9dc0479744c1c8c6ee4471b60945d96c9b5ce6c2 \ + --hash=sha256:8b88390a5e31572e05e8eab476ed3176cc3d2f9622ccc059398ffdb02aaefec4 \ + --hash=sha256:90d7af678056c7889d86821344d79fec3932a6a1480ebba3d644cb29a3135348 \ + --hash=sha256:98148ecaa7836f76ed33429e84a23253ac00acbad90c62b8b4ad0f61de31da2b \ + --hash=sha256:9aabc6098ef00e158598489db5a8b9e12d57a55ea5a4ec35ba3b527dfb88d16e \ + --hash=sha256:9ae4b19cab270fae77d7f944d56bbb308c9886d9577891b347a8deea75563995 \ + --hash=sha256:9b4039cd40335f66e55a8bee314b6a795f169fb02d70215d482023ec74613371 \ + --hash=sha256:9fc1a6c78197eff8b4d125bb98410b661e732f3ec563c03264d2d7378cf9e613 \ + --hash=sha256:a40f1d985047fe4654a1afb4702cbe0daeacde3868d52be9e4652615d387e05b \ + --hash=sha256:a459b7ff3d802792254d6fc6a622e53ca9cf9f002ed79db7e4dee536b2e20e5d \ + --hash=sha256:a4f733882b67407d4b667eafd61fce86e8e204b158258cc1d0cb0843f6bb4708 \ + --hash=sha256:a56a35e2e0b7eda39957ccd33059b79bb2fc57f54c501a917d1092c895f56d08 \ + --hash=sha256:a5c3a32af789b0ec413a606c99b55579abbcb6c86220610a5c5041da8688e7ca \ + --hash=sha256:a5d2776c7cd6a338cd9338fb50f2a38a7ca3e16250b40ab2d0c41eb1697ebc12 \ + --hash=sha256:a816f732f695261798a8a0fc1e0232a3638933b8ddfc574c00f9ef70d9f34cb8 \ + --hash=sha256:a9d559775a95aee0ff06c0aaac638691619d6342b7cde85c62ad228804f82829 \ + --hash=sha256:ac9d91b4d9c306e66a1abd224524fada07684a57f7da72a675e4b8bee9302b38 \ + --hash=sha256:ae340c41024b9be566f600f364c8d286217f2975fd765fb3fb4dd6dfbdbec825 \ + --hash=sha256:aeb60452d5b6150075974bc36e1cc74a46bd4b125cd5e72a86a04f4d6abf4e67 \ + --hash=sha256:aee6c4e8f670ea685345ce4ca01c574a52e0a4318af2b8cdd563de9567731056 \ + --hash=sha256:b027b53adb1df11923753d85587e3ab611fe70bc69596e9eb3269acab809c376 \ + --hash=sha256:b0adbe8f33f57f2b6bfa8a2ea18f3e4ed91676503673f70f796bfbd06a1a2214 \ + --hash=sha256:b30dcfbc5ab2fc932a723a39c2cb52d4f5c8b1705aa05a0bae23f28f70e06982 \ + --hash=sha256:b385fc7fc7b0811c3fcac4b0a35e5606eca693efba0d1446623ef0158a078034 \ + --hash=sha256:b4e5e9d1f84bbc01bf6a32a4704920c72e37d9090b3e0e29bd1574d06b3249f1 \ + --hash=sha256:b50ad622d8a71c8b72582dc84a990f3f079775edc1bcf0f43ed59bb2277fca2f \ + --hash=sha256:b544a1a78e0812134572cc13f5ee330bfb6bfe6dda58d2e26c20557bb0e0cec9 \ + --hash=sha256:b8472151e6f7ae90d7fd231a1ac16d2e628b93ce20d0f8063da25bd8bfdeb9e5 \ + --hash=sha256:b868b7fc24dd8ab4762b59a533bdbd096ebba7eabc853c7f78af8edce46d1390 \ + --hash=sha256:b8eee5d25efee64e172ed0d60ebcf6bca92b0b26a7fd048bb946b32fb90dbdc0 \ + --hash=sha256:bae7f07731c6c285b87111c7d5c5efa65f8b48016a98bcc57eebc24a3c7d854d \ + --hash=sha256:beb0f7f8371d933072e9bdc00c6df7eb5fdf76b93f08bfe73094f60c3f011f57 \ + --hash=sha256:c2676e2a934e046200faf0dc26ffa48c4989c3561c9bb97832e79969a41b2afe \ + --hash=sha256:c77113fbdbd7ca5de72dd3b7d113856609a1b878f6164de09dd95d12e6a51de2 \ + --hash=sha256:c85110f536e59fe19ea4b002d04228f57f55462add1630a0785cd6ec62e70415 \ + --hash=sha256:c9f8827cd7a84f5344779754ebb633bca71c470e028f92ecc959e666ef5c5e3c \ + --hash=sha256:cb62c82a2518b8446be1cc5eb4319e282776bf96fdb2964e81ff2c15d632248b \ + --hash=sha256:d5c711c8ca8d5767ed8ecd5fb5602c12eaf8fb256a5f4308ae36f2dc79e6f853 \ + --hash=sha256:d851b7ff732ebc9d823de3c7cc95a5ed4261a0226acd46861a18369ac9568f36 \ + --hash=sha256:e2a917ab420cd88b040ec85b5abc1244ab82b34d56461e2ffff58e0c7d018bae \ + --hash=sha256:e3215b43632a23b5b99165097949ce51dd093ab33d410bcf8aa901cdbc64d9cd \ + --hash=sha256:e71386f89dc2db805b4c9518dee6d81abddb8e79e4d9313cecdb702c924b8187 \ + --hash=sha256:f34b39057956305935c71f51a0860709b6124c92281dc03841587dd45a86322c \ + --hash=sha256:f44715d6a3313d614ff7550e52ecff67a283776909d960f338701b57e6013542 \ + --hash=sha256:f74bfa9f1b91718d6664d4708d092f7d44e2f0f825a5fab82819d43d41e0302d \ + --hash=sha256:f76fcf2867d19259b53680c08314435b46f632d20a4d7b9f0ccbb5dd3e925e79 \ + --hash=sha256:fa4842977924209ae653e856238a30b1c68e579ecde5cf1c16c4de471b35cec7 \ + --hash=sha256:fc8d3edbc9f32da930da6ea33d43ce0c3239e6b2018a77907fbf4e9836bd6def + # via feast (setup.py) +hpack==4.1.0 \ + --hash=sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496 \ + --hash=sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca + # via h2 +httpcore==1.0.9 \ + --hash=sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55 \ + --hash=sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8 + # via httpx +httptools==0.7.1 \ + --hash=sha256:04c6c0e6c5fb0739c5b8a9eb046d298650a0ff38cf42537fc372b28dc7e4472c \ + --hash=sha256:0d92b10dbf0b3da4823cde6a96d18e6ae358a9daa741c71448975f6a2c339cad \ + --hash=sha256:0e68b8582f4ea9166be62926077a3334064d422cf08ab87d8b74664f8e9058e1 \ + --hash=sha256:11d01b0ff1fe02c4c32d60af61a4d613b74fad069e47e06e9067758c01e9ac78 \ + --hash=sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb \ + --hash=sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03 \ + --hash=sha256:322d00c2068d125bd570f7bf78b2d367dad02b919d8581d7476d8b75b294e3e6 \ + --hash=sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df \ + --hash=sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5 \ + --hash=sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321 \ + --hash=sha256:44c8f4347d4b31269c8a9205d8a5ee2df5322b09bbbd30f8f862185bb6b05346 \ + --hash=sha256:465275d76db4d554918aba40bf1cbebe324670f3dfc979eaffaa5d108e2ed650 \ + --hash=sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657 \ + --hash=sha256:49794f9250188a57fa73c706b46cb21a313edb00d337ca4ce1a011fe3c760b28 \ + --hash=sha256:5ddbd045cfcb073db2449563dd479057f2c2b681ebc232380e63ef15edc9c023 \ + --hash=sha256:601b7628de7504077dd3dcb3791c6b8694bbd967148a6d1f01806509254fb1ca \ + --hash=sha256:654968cb6b6c77e37b832a9be3d3ecabb243bbe7a0b8f65fbc5b6b04c8fcabed \ + --hash=sha256:69d4f9705c405ae3ee83d6a12283dc9feba8cc6aaec671b412917e644ab4fa66 \ + --hash=sha256:6babce6cfa2a99545c60bfef8bee0cc0545413cb0018f617c8059a30ad985de3 \ + --hash=sha256:7347714368fb2b335e9063bc2b96f2f87a9ceffcd9758ac295f8bbcd3ffbc0ca \ + --hash=sha256:7aea2e3c3953521c3c51106ee11487a910d45586e351202474d45472db7d72d3 \ + --hash=sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2 \ + --hash=sha256:84d86c1e5afdc479a6fdabf570be0d3eb791df0ae727e8dbc0259ed1249998d4 \ + --hash=sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70 \ + --hash=sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9 \ + --hash=sha256:ac50afa68945df63ec7a2707c506bd02239272288add34539a2ef527254626a4 \ + --hash=sha256:aeefa0648362bb97a7d6b5ff770bfb774930a327d7f65f8208394856862de517 \ + --hash=sha256:b580968316348b474b020edf3988eecd5d6eec4634ee6561e72ae3a2a0e00a8a \ + --hash=sha256:c08fe65728b8d70b6923ce31e3956f859d5e1e8548e6f22ec520a962c6757270 \ + --hash=sha256:c8c751014e13d88d2be5f5f14fc8b89612fcfa92a9cc480f2bc1598357a23a05 \ + --hash=sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e \ + --hash=sha256:cbf8317bfccf0fed3b5680c559d3459cccf1abe9039bfa159e62e391c7270568 \ + --hash=sha256:cfabda2a5bb85aa2a904ce06d974a3f30fb36cc63d7feaddec05d2050acede96 \ + --hash=sha256:d169162803a24425eb5e4d51d79cbf429fd7a491b9e570a55f495ea55b26f0bf \ + --hash=sha256:d496e2f5245319da9d764296e86c5bb6fcf0cf7a8806d3d000717a889c8c0b7b \ + --hash=sha256:de987bb4e7ac95b99b805b99e0aae0ad51ae61df4263459d36e07cf4052d8b3a \ + --hash=sha256:df091cf961a3be783d6aebae963cc9b71e00d57fa6f149025075217bc6a55a7b \ + --hash=sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c \ + --hash=sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274 \ + --hash=sha256:f084813239e1eb403ddacd06a30de3d3e09a9b76e7894dcda2b22f8a726e9c60 \ + --hash=sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5 \ + --hash=sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec \ + --hash=sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362 + # via uvicorn +httpx[http2]==0.27.2 \ + --hash=sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0 \ + --hash=sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2 + # via + # feast (setup.py) + # fastapi-mcp + # jupyterlab + # mcp + # python-keycloak + # qdrant-client +httpx-sse==0.4.3 \ + --hash=sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc \ + --hash=sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d + # via mcp +huggingface-hub==0.36.0 \ + --hash=sha256:47b3f0e2539c39bf5cde015d63b72ec49baff67b6931c3d97f3f84532e2b8d25 \ + --hash=sha256:7bcc9ad17d5b3f07b57c78e79d527102d08313caa278a641993acddcb894548d + # via + # accelerate + # datasets + # docling + # docling-ibm-models + # timm + # tokenizers + # transformers +hyperframe==6.1.0 \ + --hash=sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5 \ + --hash=sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08 + # via h2 +ibis-framework[duckdb, mssql]==9.5.0 \ + --hash=sha256:145fe30d94f111cff332580c275ce77725c5ff7086eede93af0b371649d009c0 \ + --hash=sha256:1c8a29277e63ee0dfc289bc8f550164b5e3bdaec1b76b62436c37d331bb4ef84 + # via + # feast (setup.py) + # ibis-substrait +ibis-substrait==4.0.1 \ + --hash=sha256:107ca49383a3cca2fdc88f67ea2f0172620c16fa8f39c9c52305af85dd6180b4 \ + --hash=sha256:614810a173d096fbc49d87a9b419e2162a3c25d8efda1a4d57a389ce56b9041f + # via feast (setup.py) +identify==2.6.16 \ + --hash=sha256:391ee4d77741d994189522896270b787aed8670389bfd60f326d677d64a6dfb0 \ + --hash=sha256:846857203b5511bbe94d5a352a48ef2359532bc8f6727b5544077a0dcfb24980 + # via pre-commit +idna==3.11 \ + --hash=sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea \ + --hash=sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902 + # via + # anyio + # httpx + # jsonschema + # requests + # snowflake-connector-python + # yarl +ikvpy==0.0.36 \ + --hash=sha256:b0edf6fb6482877940f6c2b5d59a7fabe30cb554b13b88ca52805f043cfda5b3 \ + --hash=sha256:c0ce7dfb61456c283c9ba2cdeb68b3647f245c3905bca652ca2a1068804939d1 + # via feast (setup.py) +imageio==2.37.2 \ + --hash=sha256:0212ef2727ac9caa5ca4b2c75ae89454312f440a756fcfc8ef1993e718f50f8a \ + --hash=sha256:ad9adfb20335d718c03de457358ed69f141021a333c40a53e57273d8a5bd0b9b + # via scikit-image +imagesize==1.4.1 \ + --hash=sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b \ + --hash=sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a + # via sphinx +importlib-metadata==8.7.1 \ + --hash=sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb \ + --hash=sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151 + # via + # dask + # opentelemetry-api +importlib-resources==6.5.2 \ + --hash=sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c \ + --hash=sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec + # via happybase +iniconfig==2.3.0 \ + --hash=sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730 \ + --hash=sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12 + # via pytest +invoke==2.2.1 \ + --hash=sha256:2413bc441b376e5cd3f55bb5d364f973ad8bdd7bf87e53c79de3c11bf3feecc8 \ + --hash=sha256:515bf49b4a48932b79b024590348da22f39c4942dff991ad1fb8b8baea1be707 + # via paramiko +ipykernel==7.1.0 \ + --hash=sha256:58a3fc88533d5930c3546dc7eac66c6d288acde4f801e2001e65edc5dc9cf0db \ + --hash=sha256:763b5ec6c5b7776f6a8d7ce09b267693b4e5ce75cb50ae696aaefb3c85e1ea4c + # via jupyterlab +ipython==9.9.0 \ + --hash=sha256:48fbed1b2de5e2c7177eefa144aba7fcb82dac514f09b57e2ac9da34ddb54220 \ + --hash=sha256:b457fe9165df2b84e8ec909a97abcf2ed88f565970efba16b1f7229c283d252b + # via + # great-expectations + # ipykernel + # ipywidgets +ipython-pygments-lexers==1.1.1 \ + --hash=sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81 \ + --hash=sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c + # via ipython +ipywidgets==8.1.2 \ + --hash=sha256:bbe43850d79fb5e906b14801d6c01402857996864d1e5b6fa62dd2ee35559f60 \ + --hash=sha256:d0b9b41e49bae926a866e613a39b0f0097745d2b9f1f3dd406641b4a57ec42c9 + # via + # codeflare-sdk + # great-expectations +isodate==0.7.2 \ + --hash=sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15 \ + --hash=sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6 + # via azure-storage-blob +isoduration==20.11.0 \ + --hash=sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9 \ + --hash=sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042 + # via jsonschema +jedi==0.19.2 \ + --hash=sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0 \ + --hash=sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9 + # via ipython +jinja2==3.1.6 \ + --hash=sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d \ + --hash=sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67 + # via + # feast (setup.py) + # altair + # great-expectations + # jupyter-server + # jupyterlab + # jupyterlab-server + # moto + # nbconvert + # poetry-dynamic-versioning + # sphinx + # torch +jmespath==1.0.1 \ + --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \ + --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe + # via + # aiobotocore + # boto3 + # botocore +joblib==1.5.3 \ + --hash=sha256:5fc3c5039fc5ca8c0276333a188bbd59d6b7ab37fe6632daa76bc7f9ec18e713 \ + --hash=sha256:8561a3269e6801106863fd0d6d84bb737be9e7631e33aaed3fb9ce5953688da3 + # via scikit-learn +json5==0.13.0 \ + --hash=sha256:9a08e1dd65f6a4d4c6fa82d216cf2477349ec2346a38fd70cc11d2557499fbcc \ + --hash=sha256:b1edf8d487721c0bf64d83c28e91280781f6e21f4a797d3261c7c828d4c165bf + # via jupyterlab-server +jsonlines==4.0.0 \ + --hash=sha256:0c6d2c09117550c089995247f605ae4cf77dd1533041d366351f6f298822ea74 \ + --hash=sha256:185b334ff2ca5a91362993f42e83588a360cf95ce4b71a73548502bda52a7c55 + # via docling-ibm-models +jsonpatch==1.33 \ + --hash=sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade \ + --hash=sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c + # via great-expectations +jsonpointer==3.0.0 \ + --hash=sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942 \ + --hash=sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef + # via + # jsonpatch + # jsonschema +jsonref==1.1.0 \ + --hash=sha256:32fe8e1d85af0fdefbebce950af85590b22b60f9e95443176adbde4e1ecea552 \ + --hash=sha256:590dc7773df6c21cbf948b5dac07a72a251db28b0238ceecce0a2abfa8ec30a9 + # via docling-core +jsonschema[format-nongpl]==4.26.0 \ + --hash=sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326 \ + --hash=sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce + # via + # feast (setup.py) + # altair + # docling-core + # great-expectations + # jupyter-events + # jupyterlab-server + # mcp + # nbformat + # ray +jsonschema-specifications==2025.9.1 \ + --hash=sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe \ + --hash=sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d + # via jsonschema +jupyter-client==8.8.0 \ + --hash=sha256:d556811419a4f2d96c869af34e854e3f059b7cc2d6d01a9cd9c85c267691be3e \ + --hash=sha256:f93a5b99c5e23a507b773d3a1136bd6e16c67883ccdbd9a829b0bbdb98cd7d7a + # via + # ipykernel + # jupyter-server + # nbclient +jupyter-core==5.9.1 \ + --hash=sha256:4d09aaff303b9566c3ce657f580bd089ff5c91f5f89cf7d8846c3cdf465b5508 \ + --hash=sha256:ebf87fdc6073d142e114c72c9e29a9d7ca03fad818c5d300ce2adc1fb0743407 + # via + # ipykernel + # jupyter-client + # jupyter-server + # jupyterlab + # nbclient + # nbconvert + # nbformat +jupyter-events==0.12.0 \ + --hash=sha256:6464b2fa5ad10451c3d35fabc75eab39556ae1e2853ad0c0cc31b656731a97fb \ + --hash=sha256:fc3fce98865f6784c9cd0a56a20644fc6098f21c8c33834a8d9fe383c17e554b + # via jupyter-server +jupyter-lsp==2.3.0 \ + --hash=sha256:458aa59339dc868fb784d73364f17dbce8836e906cd75fd471a325cba02e0245 \ + --hash=sha256:e914a3cb2addf48b1c7710914771aaf1819d46b2e5a79b0f917b5478ec93f34f + # via jupyterlab +jupyter-server==2.17.0 \ + --hash=sha256:c38ea898566964c888b4772ae1ed58eca84592e88251d2cfc4d171f81f7e99d5 \ + --hash=sha256:e8cb9c7db4251f51ed307e329b81b72ccf2056ff82d50524debde1ee1870e13f + # via + # jupyter-lsp + # jupyterlab + # jupyterlab-server + # notebook + # notebook-shim +jupyter-server-terminals==0.5.4 \ + --hash=sha256:55be353fc74a80bc7f3b20e6be50a55a61cd525626f578dcb66a5708e2007d14 \ + --hash=sha256:bbda128ed41d0be9020349f9f1f2a4ab9952a73ed5f5ac9f1419794761fb87f5 + # via jupyter-server +jupyterlab==4.5.2 \ + --hash=sha256:76466ebcfdb7a9bb7e2fbd6459c0e2c032ccf75be673634a84bee4b3e6b13ab6 \ + --hash=sha256:c80a6b9f6dace96a566d590c65ee2785f61e7cd4aac5b4d453dcc7d0d5e069b7 + # via notebook +jupyterlab-pygments==0.3.0 \ + --hash=sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d \ + --hash=sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780 + # via nbconvert +jupyterlab-server==2.28.0 \ + --hash=sha256:35baa81898b15f93573e2deca50d11ac0ae407ebb688299d3a5213265033712c \ + --hash=sha256:e4355b148fdcf34d312bbbc80f22467d6d20460e8b8736bf235577dd18506968 + # via + # jupyterlab + # notebook +jupyterlab-widgets==3.0.16 \ + --hash=sha256:423da05071d55cf27a9e602216d35a3a65a3e41cdf9c5d3b643b814ce38c19e0 \ + --hash=sha256:45fa36d9c6422cf2559198e4db481aa243c7a32d9926b500781c830c80f7ecf8 + # via ipywidgets +jwcrypto==1.5.6 \ + --hash=sha256:150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789 \ + --hash=sha256:771a87762a0c081ae6166958a954f80848820b2ab066937dc8b8379d65b1b039 + # via python-keycloak +kubernetes==35.0.0 \ + --hash=sha256:39e2b33b46e5834ef6c3985ebfe2047ab39135d41de51ce7641a7ca5b372a13d \ + --hash=sha256:3d00d344944239821458b9efd484d6df9f011da367ecb155dadf9513f05f09ee + # via + # feast (setup.py) + # codeflare-sdk +lark==1.3.1 \ + --hash=sha256:b426a7a6d6d53189d318f2b6236ab5d6429eaf09259f1ca33eb716eed10d2905 \ + --hash=sha256:c629b661023a014c37da873b4ff58a817398d12635d3bbb2c5a03be7fe5d1e12 + # via rfc3987-syntax +latex2mathml==3.78.1 \ + --hash=sha256:f089b6d75e85b937f99693c93e8c16c0804008672c3dd2a3d25affd36f238100 \ + --hash=sha256:f941db80bf41db33f31df87b304e8b588f8166b813b0257c11c98f7a9d0aac71 + # via docling-core +lazy-loader==0.4 \ + --hash=sha256:342aa8e14d543a154047afb4ba8ef17f5563baad3fc610d7b15b213b0f119efc \ + --hash=sha256:47c75182589b91a4e1a85a136c074285a5ad4d9f39c63e0d7fb76391c4574cd1 + # via scikit-image +locket==1.0.0 \ + --hash=sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632 \ + --hash=sha256:b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3 + # via partd +lxml==5.4.0 \ + --hash=sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5 \ + --hash=sha256:073eb6dcdf1f587d9b88c8c93528b57eccda40209cf9be549d469b942b41d70b \ + --hash=sha256:09846782b1ef650b321484ad429217f5154da4d6e786636c38e434fa32e94e49 \ + --hash=sha256:0a01ce7d8479dce84fc03324e3b0c9c90b1ece9a9bb6a1b6c9025e7e4520e78c \ + --hash=sha256:0be91891bdb06ebe65122aa6bf3fc94489960cf7e03033c6f83a90863b23c58b \ + --hash=sha256:0cef4feae82709eed352cd7e97ae062ef6ae9c7b5dbe3663f104cd2c0e8d94ba \ + --hash=sha256:0e108352e203c7afd0eb91d782582f00a0b16a948d204d4dec8565024fafeea5 \ + --hash=sha256:0ea0252b51d296a75f6118ed0d8696888e7403408ad42345d7dfd0d1e93309a7 \ + --hash=sha256:0fce1294a0497edb034cb416ad3e77ecc89b313cff7adbee5334e4dc0d11f422 \ + --hash=sha256:1320091caa89805df7dcb9e908add28166113dcd062590668514dbd510798c88 \ + --hash=sha256:142accb3e4d1edae4b392bd165a9abdee8a3c432a2cca193df995bc3886249c8 \ + --hash=sha256:14479c2ad1cb08b62bb941ba8e0e05938524ee3c3114644df905d2331c76cd57 \ + --hash=sha256:151d6c40bc9db11e960619d2bf2ec5829f0aaffb10b41dcf6ad2ce0f3c0b2325 \ + --hash=sha256:15a665ad90054a3d4f397bc40f73948d48e36e4c09f9bcffc7d90c87410e478a \ + --hash=sha256:1a42b3a19346e5601d1b8296ff6ef3d76038058f311902edd574461e9c036982 \ + --hash=sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8 \ + --hash=sha256:1b717b00a71b901b4667226bba282dd462c42ccf618ade12f9ba3674e1fabc55 \ + --hash=sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2 \ + --hash=sha256:20e16c08254b9b6466526bc1828d9370ee6c0d60a4b64836bc3ac2917d1e16df \ + --hash=sha256:226046e386556a45ebc787871d6d2467b32c37ce76c2680f5c608e25823ffc84 \ + --hash=sha256:24974f774f3a78ac12b95e3a20ef0931795ff04dbb16db81a90c37f589819551 \ + --hash=sha256:24f6df5f24fc3385f622c0c9d63fe34604893bc1a5bdbb2dbf5870f85f9a404a \ + --hash=sha256:27a9ded0f0b52098ff89dd4c418325b987feed2ea5cc86e8860b0f844285d740 \ + --hash=sha256:29f451a4b614a7b5b6c2e043d7b64a15bd8304d7e767055e8ab68387a8cacf4e \ + --hash=sha256:2b31a3a77501d86d8ade128abb01082724c0dfd9524f542f2f07d693c9f1175f \ + --hash=sha256:2c62891b1ea3094bb12097822b3d44b93fc6c325f2043c4d2736a8ff09e65f60 \ + --hash=sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e \ + --hash=sha256:31e63621e073e04697c1b2d23fcb89991790eef370ec37ce4d5d469f40924ed6 \ + --hash=sha256:32697d2ea994e0db19c1df9e40275ffe84973e4232b5c274f47e7c1ec9763cdd \ + --hash=sha256:3a3178b4873df8ef9457a4875703488eb1622632a9cee6d76464b60e90adbfcd \ + --hash=sha256:3b9c2754cef6963f3408ab381ea55f47dabc6f78f4b8ebb0f0b25cf1ac1f7609 \ + --hash=sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20 \ + --hash=sha256:3e6d5557989cdc3ebb5302bbdc42b439733a841891762ded9514e74f60319ad6 \ + --hash=sha256:4025bf2884ac4370a3243c5aa8d66d3cb9e15d3ddd0af2d796eccc5f0244390e \ + --hash=sha256:4291d3c409a17febf817259cb37bc62cb7eb398bcc95c1356947e2871911ae61 \ + --hash=sha256:4329422de653cdb2b72afa39b0aa04252fca9071550044904b2e7036d9d97fe4 \ + --hash=sha256:43d549b876ce64aa18b2328faff70f5877f8c6dede415f80a2f799d31644d776 \ + --hash=sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779 \ + --hash=sha256:47fb24cc0f052f0576ea382872b3fc7e1f7e3028e53299ea751839418ade92a6 \ + --hash=sha256:48b4afaf38bf79109bb060d9016fad014a9a48fb244e11b94f74ae366a64d252 \ + --hash=sha256:497cab4d8254c2a90bf988f162ace2ddbfdd806fce3bda3f581b9d24c852e03c \ + --hash=sha256:4aa412a82e460571fad592d0f93ce9935a20090029ba08eca05c614f99b0cc92 \ + --hash=sha256:4b7ce10634113651d6f383aa712a194179dcd496bd8c41e191cec2099fa09de5 \ + --hash=sha256:4cd915c0fb1bed47b5e6d6edd424ac25856252f09120e3e8ba5154b6b921860e \ + --hash=sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f \ + --hash=sha256:4f5322cf38fe0e21c2d73901abf68e6329dc02a4994e483adbcf92b568a09a54 \ + --hash=sha256:50441c9de951a153c698b9b99992e806b71c1f36d14b154592580ff4a9d0d877 \ + --hash=sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e \ + --hash=sha256:53370c26500d22b45182f98847243efb518d268374a9570409d2e2276232fd37 \ + --hash=sha256:53d9469ab5460402c19553b56c3648746774ecd0681b1b27ea74d5d8a3ef5590 \ + --hash=sha256:56dbdbab0551532bb26c19c914848d7251d73edb507c3079d6805fa8bba5b706 \ + --hash=sha256:5a99d86351f9c15e4a901fc56404b485b1462039db59288b203f8c629260a142 \ + --hash=sha256:5cca36a194a4eb4e2ed6be36923d3cffd03dcdf477515dea687185506583d4c9 \ + --hash=sha256:5f11a1526ebd0dee85e7b1e39e39a0cc0d9d03fb527f56d8457f6df48a10dc0c \ + --hash=sha256:61c7bbf432f09ee44b1ccaa24896d21075e533cd01477966a5ff5a71d88b2f56 \ + --hash=sha256:639978bccb04c42677db43c79bdaa23785dc7f9b83bfd87570da8207872f1ce5 \ + --hash=sha256:63e7968ff83da2eb6fdda967483a7a023aa497d85ad8f05c3ad9b1f2e8c84987 \ + --hash=sha256:664cdc733bc87449fe781dbb1f309090966c11cc0c0cd7b84af956a02a8a4729 \ + --hash=sha256:67ed8a40665b84d161bae3181aa2763beea3747f748bca5874b4af4d75998f87 \ + --hash=sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7 \ + --hash=sha256:6854f8bd8a1536f8a1d9a3655e6354faa6406621cf857dc27b681b69860645c7 \ + --hash=sha256:696ea9e87442467819ac22394ca36cb3d01848dad1be6fac3fb612d3bd5a12cf \ + --hash=sha256:6ef80aeac414f33c24b3815ecd560cee272786c3adfa5f31316d8b349bfade28 \ + --hash=sha256:72ac9762a9f8ce74c9eed4a4e74306f2f18613a6b71fa065495a67ac227b3056 \ + --hash=sha256:75133890e40d229d6c5837b0312abbe5bac1c342452cf0e12523477cd3aa21e7 \ + --hash=sha256:7605c1c32c3d6e8c990dd28a0970a3cbbf1429d5b92279e37fda05fb0c92190e \ + --hash=sha256:773e27b62920199c6197130632c18fb7ead3257fce1ffb7d286912e56ddb79e0 \ + --hash=sha256:795f61bcaf8770e1b37eec24edf9771b307df3af74d1d6f27d812e15a9ff3872 \ + --hash=sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079 \ + --hash=sha256:7a62cc23d754bb449d63ff35334acc9f5c02e6dae830d78dab4dd12b78a524f4 \ + --hash=sha256:7be701c24e7f843e6788353c055d806e8bd8466b52907bafe5d13ec6a6dbaecd \ + --hash=sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9 \ + --hash=sha256:7ce1a171ec325192c6a636b64c94418e71a1964f56d002cc28122fceff0b6121 \ + --hash=sha256:891f7f991a68d20c75cb13c5c9142b2a3f9eb161f1f12a9489c82172d1f133c0 \ + --hash=sha256:8f82125bc7203c5ae8633a7d5d20bcfdff0ba33e436e4ab0abc026a53a8960b7 \ + --hash=sha256:91505d3ddebf268bb1588eb0f63821f738d20e1e7f05d3c647a5ca900288760b \ + --hash=sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d \ + --hash=sha256:9454b8d8200ec99a224df8854786262b1bd6461f4280064c807303c642c05e76 \ + --hash=sha256:9459e6892f59ecea2e2584ee1058f5d8f629446eab52ba2305ae13a32a059530 \ + --hash=sha256:9776af1aad5a4b4a1317242ee2bea51da54b2a7b7b48674be736d463c999f37d \ + --hash=sha256:97dac543661e84a284502e0cf8a67b5c711b0ad5fb661d1bd505c02f8cf716d7 \ + --hash=sha256:98a3912194c079ef37e716ed228ae0dcb960992100461b704aea4e93af6b0bb9 \ + --hash=sha256:9b4a3bd174cc9cdaa1afbc4620c049038b441d6ba07629d89a83b408e54c35cd \ + --hash=sha256:9c886b481aefdf818ad44846145f6eaf373a20d200b5ce1a5c8e1bc2d8745410 \ + --hash=sha256:9ceaf423b50ecfc23ca00b7f50b64baba85fb3fb91c53e2c9d00bc86150c7e40 \ + --hash=sha256:a11a96c3b3f7551c8a8109aa65e8594e551d5a84c76bf950da33d0fb6dfafab7 \ + --hash=sha256:a3bcdde35d82ff385f4ede021df801b5c4a5bcdfb61ea87caabcebfc4945dc1b \ + --hash=sha256:a7fb111eef4d05909b82152721a59c1b14d0f365e2be4c742a473c5d7372f4f5 \ + --hash=sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5 \ + --hash=sha256:a8c9b7f16b63e65bbba889acb436a1034a82d34fa09752d754f88d708eca80e1 \ + --hash=sha256:a8ef956fce64c8551221f395ba21d0724fed6b9b6242ca4f2f7beb4ce2f41997 \ + --hash=sha256:ab339536aa798b1e17750733663d272038bf28069761d5be57cb4a9b0137b4f8 \ + --hash=sha256:ac7ba71f9561cd7d7b55e1ea5511543c0282e2b6450f122672a2694621d63b7e \ + --hash=sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc \ + --hash=sha256:aea7c06667b987787c7d1f5e1dfcd70419b711cdb47d6b4bb4ad4b76777a0563 \ + --hash=sha256:aefe1a7cb852fa61150fcb21a8c8fcea7b58c4cb11fbe59c97a0a4b31cae3c8c \ + --hash=sha256:b0989737a3ba6cf2a16efb857fb0dfa20bc5c542737fddb6d893fde48be45433 \ + --hash=sha256:b108134b9667bcd71236c5a02aad5ddd073e372fb5d48ea74853e009fe38acb6 \ + --hash=sha256:b12cb6527599808ada9eb2cd6e0e7d3d8f13fe7bbb01c6311255a15ded4c7ab4 \ + --hash=sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4 \ + --hash=sha256:b67319b4aef1a6c56576ff544b67a2a6fbd7eaee485b241cabf53115e8908b8f \ + --hash=sha256:b7c86884ad23d61b025989d99bfdd92a7351de956e01c61307cb87035960bcb1 \ + --hash=sha256:b92b69441d1bd39f4940f9eadfa417a25862242ca2c396b406f9272ef09cdcaa \ + --hash=sha256:bcb7a1096b4b6b24ce1ac24d4942ad98f983cd3810f9711bcd0293f43a9d8b9f \ + --hash=sha256:bda3ea44c39eb74e2488297bb39d47186ed01342f0022c8ff407c250ac3f498e \ + --hash=sha256:be2ba4c3c5b7900246a8f866580700ef0d538f2ca32535e991027bdaba944063 \ + --hash=sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4 \ + --hash=sha256:c5d32f5284012deaccd37da1e2cd42f081feaa76981f0eaa474351b68df813c5 \ + --hash=sha256:c6364038c519dffdbe07e3cf42e6a7f8b90c275d4d1617a69bb59734c1a2d571 \ + --hash=sha256:c70e93fba207106cb16bf852e421c37bbded92acd5964390aad07cb50d60f5cf \ + --hash=sha256:ca755eebf0d9e62d6cb013f1261e510317a41bf4650f22963474a663fdfe02aa \ + --hash=sha256:cccd007d5c95279e529c146d095f1d39ac05139de26c098166c4beb9374b0f4d \ + --hash=sha256:ce31158630a6ac85bddd6b830cffd46085ff90498b397bd0a259f59d27a12188 \ + --hash=sha256:ce9c671845de9699904b1e9df95acfe8dfc183f2310f163cdaa91a3535af95de \ + --hash=sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd \ + --hash=sha256:d2ed1b3cb9ff1c10e6e8b00941bb2e5bb568b307bfc6b17dffbbe8be5eecba86 \ + --hash=sha256:d5663bc1b471c79f5c833cffbc9b87d7bf13f87e055a5c86c363ccd2348d7e82 \ + --hash=sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f \ + --hash=sha256:dc0af80267edc68adf85f2a5d9be1cdf062f973db6790c1d065e45025fa26140 \ + --hash=sha256:de5b4e1088523e2b6f730d0509a9a813355b7f5659d70eb4f319c76beea2e250 \ + --hash=sha256:de6f6bb8a7840c7bf216fb83eec4e2f79f7325eca8858167b68708b929ab2172 \ + --hash=sha256:df53330a3bff250f10472ce96a9af28628ff1f4efc51ccba351a8820bca2a8ba \ + --hash=sha256:e094ec83694b59d263802ed03a8384594fcce477ce484b0cbcd0008a211ca751 \ + --hash=sha256:e794f698ae4c5084414efea0f5cc9f4ac562ec02d66e1484ff822ef97c2cadff \ + --hash=sha256:e7bc6df34d42322c5289e37e9971d6ed114e3776b45fa879f734bded9d1fea9c \ + --hash=sha256:eaf24066ad0b30917186420d51e2e3edf4b0e2ea68d8cd885b14dc8afdcf6556 \ + --hash=sha256:ecf4c4b83f1ab3d5a7ace10bafcb6f11df6156857a3c418244cef41ca9fa3e44 \ + --hash=sha256:ef5a7178fcc73b7d8c07229e89f8eb45b2908a9238eb90dcfc46571ccf0383b8 \ + --hash=sha256:f5cb182f6396706dc6cc1896dd02b1c889d644c081b0cdec38747573db88a7d7 \ + --hash=sha256:fa0e294046de09acd6146be0ed6727d1f42ded4ce3ea1e9a19c11b6774eea27c \ + --hash=sha256:fb54f7c6bafaa808f27166569b1511fc42701a7713858dddc08afdde9746849e \ + --hash=sha256:fd3be6481ef54b8cfd0e1e953323b7aa9d9789b94842d0e5b142ef4bb7999539 + # via + # docling + # python-docx + # python-pptx +lz4==4.4.5 \ + --hash=sha256:0846e6e78f374156ccf21c631de80967e03cc3c01c373c665789dc0c5431e7fc \ + --hash=sha256:0bba042ec5a61fa77c7e380351a61cb768277801240249841defd2ff0a10742f \ + --hash=sha256:12233624f1bc2cebc414f9efb3113a03e89acce3ab6f72035577bc61b270d24d \ + --hash=sha256:13254bd78fef50105872989a2dc3418ff09aefc7d0765528adc21646a7288294 \ + --hash=sha256:15551280f5656d2206b9b43262799c89b25a25460416ec554075a8dc568e4397 \ + --hash=sha256:1dd4d91d25937c2441b9fc0f4af01704a2d09f30a38c5798bc1d1b5a15ec9581 \ + --hash=sha256:214e37cfe270948ea7eb777229e211c601a3e0875541c1035ab408fbceaddf50 \ + --hash=sha256:216ca0c6c90719731c64f41cfbd6f27a736d7e50a10b70fad2a9c9b262ec923d \ + --hash=sha256:24092635f47538b392c4eaeff14c7270d2c8e806bf4be2a6446a378591c5e69e \ + --hash=sha256:28ccaeb7c5222454cd5f60fcd152564205bcb801bd80e125949d2dfbadc76bbd \ + --hash=sha256:2a2b7504d2dffed3fd19d4085fe1cc30cf221263fd01030819bdd8d2bb101cf1 \ + --hash=sha256:2c3ea562c3af274264444819ae9b14dbbf1ab070aff214a05e97db6896c7597e \ + --hash=sha256:33dd86cea8375d8e5dd001e41f321d0a4b1eb7985f39be1b6a4f466cd480b8a7 \ + --hash=sha256:3b84a42da86e8ad8537aabef062e7f661f4a877d1c74d65606c49d835d36d668 \ + --hash=sha256:451039b609b9a88a934800b5fc6ee401c89ad9c175abf2f4d9f8b2e4ef1afc64 \ + --hash=sha256:533298d208b58b651662dd972f52d807d48915176e5b032fb4f8c3b6f5fe535c \ + --hash=sha256:5f0b9e53c1e82e88c10d7c180069363980136b9d7a8306c4dca4f760d60c39f0 \ + --hash=sha256:609a69c68e7cfcfa9d894dc06be13f2e00761485b62df4e2472f1b66f7b405fb \ + --hash=sha256:61d0ee03e6c616f4a8b69987d03d514e8896c8b1b7cc7598ad029e5c6aedfd43 \ + --hash=sha256:66c5de72bf4988e1b284ebdd6524c4bead2c507a2d7f172201572bac6f593901 \ + --hash=sha256:67531da3b62f49c939e09d56492baf397175ff39926d0bd5bd2d191ac2bff95f \ + --hash=sha256:6bb05416444fafea170b07181bc70640975ecc2a8c92b3b658c554119519716c \ + --hash=sha256:6d0bf51e7745484d2092b3a51ae6eb58c3bd3ce0300cf2b2c14f76c536d5697a \ + --hash=sha256:713a777de88a73425cf08eb11f742cd2c98628e79a8673d6a52e3c5f0c116f33 \ + --hash=sha256:75419bb1a559af00250b8f1360d508444e80ed4b26d9d40ec5b09fe7875cb989 \ + --hash=sha256:7b62f94b523c251cf32aa4ab555f14d39bd1a9df385b72443fd76d7c7fb051f5 \ + --hash=sha256:7c4e7c44b6a31de77d4dc9772b7d2561937c9588a734681f70ec547cfbc51ecd \ + --hash=sha256:7dc1e1e2dbd872f8fae529acd5e4839efd0b141eaa8ae7ce835a9fe80fbad89f \ + --hash=sha256:83bc23ef65b6ae44f3287c38cbf82c269e2e96a26e560aa551735883388dcc4b \ + --hash=sha256:8a842ead8ca7c0ee2f396ca5d878c4c40439a527ebad2b996b0444f0074ed004 \ + --hash=sha256:92159782a4502858a21e0079d77cdcaade23e8a5d252ddf46b0652604300d7be \ + --hash=sha256:9b5e6abca8df9f9bdc5c3085f33ff32cdc86ed04c65e0355506d46a5ac19b6e9 \ + --hash=sha256:a1acbbba9edbcbb982bc2cac5e7108f0f553aebac1040fbec67a011a45afa1ba \ + --hash=sha256:a2af2897333b421360fdcce895c6f6281dc3fab018d19d341cf64d043fc8d90d \ + --hash=sha256:a482eecc0b7829c89b498fda883dbd50e98153a116de612ee7c111c8bcf82d1d \ + --hash=sha256:a5f197ffa6fc0e93207b0af71b302e0a2f6f29982e5de0fbda61606dd3a55832 \ + --hash=sha256:a88cbb729cc333334ccfb52f070463c21560fca63afcf636a9f160a55fac3301 \ + --hash=sha256:b424df1076e40d4e884cfcc4c77d815368b7fb9ebcd7e634f937725cd9a8a72a \ + --hash=sha256:bd85d118316b53ed73956435bee1997bd06cc66dd2fa74073e3b1322bd520a67 \ + --hash=sha256:c1cfa663468a189dab510ab231aad030970593f997746d7a324d40104db0d0a9 \ + --hash=sha256:c216b6d5275fc060c6280936bb3bb0e0be6126afb08abccde27eed23dead135f \ + --hash=sha256:c8e71b14938082ebaf78144f3b3917ac715f72d14c076f384a4c062df96f9df6 \ + --hash=sha256:cdd4bdcbaf35056086d910d219106f6a04e1ab0daa40ec0eeef1626c27d0fddb \ + --hash=sha256:d221fa421b389ab2345640a508db57da36947a437dfe31aeddb8d5c7b646c22d \ + --hash=sha256:d64141085864918392c3159cdad15b102a620a67975c786777874e1e90ef15ce \ + --hash=sha256:d6da84a26b3aa5da13a62e4b89ab36a396e9327de8cd48b436a3467077f8ccd4 \ + --hash=sha256:d994b87abaa7a88ceb7a37c90f547b8284ff9da694e6afcfaa8568d739faf3f7 \ + --hash=sha256:da68497f78953017deb20edff0dba95641cc86e7423dfadf7c0264e1ac60dc22 \ + --hash=sha256:daffa4807ef54b927451208f5f85750c545a4abbff03d740835fc444cd97f758 \ + --hash=sha256:df5aa4cead2044bab83e0ebae56e0944cc7fcc1505c7787e9e1057d6d549897e \ + --hash=sha256:e099ddfaa88f59dd8d36c8a3c66bd982b4984edf127eb18e30bb49bdba68ce67 \ + --hash=sha256:e64e61f29cf95afb43549063d8433b46352baf0c8a70aa45e2585618fcf59d86 \ + --hash=sha256:e928ec2d84dc8d13285b4a9288fd6246c5cde4f5f935b479f50d986911f085e3 \ + --hash=sha256:f32b9e65d70f3684532358255dc053f143835c5f5991e28a5ac4c93ce94b9ea7 \ + --hash=sha256:f6538aaaedd091d6e5abdaa19b99e6e82697d67518f114721b5248709b639fad \ + --hash=sha256:f9b8bde9909a010c75b3aea58ec3910393b758f3c219beed67063693df854db0 \ + --hash=sha256:ff1b50aeeec64df5603f17984e4b5be6166058dcf8f1e26a3da40d7a0f6ab547 + # via + # clickhouse-connect + # trino +makefun==1.16.0 \ + --hash=sha256:43baa4c3e7ae2b17de9ceac20b669e9a67ceeadff31581007cca20a07bbe42c4 \ + --hash=sha256:e14601831570bff1f6d7e68828bcd30d2f5856f24bad5de0ccb22921ceebc947 + # via great-expectations +markdown-it-py==4.0.0 \ + --hash=sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147 \ + --hash=sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3 + # via rich +marko==2.2.2 \ + --hash=sha256:6940308e655f63733ca518c47a68ec9510279dbb916c83616e4c4b5829f052e8 \ + --hash=sha256:f064ae8c10416285ad1d96048dc11e98ef04e662d3342ae416f662b70aa7959e + # via docling +markupsafe==3.0.3 \ + --hash=sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f \ + --hash=sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a \ + --hash=sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf \ + --hash=sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19 \ + --hash=sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf \ + --hash=sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c \ + --hash=sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175 \ + --hash=sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219 \ + --hash=sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb \ + --hash=sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6 \ + --hash=sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab \ + --hash=sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26 \ + --hash=sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1 \ + --hash=sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce \ + --hash=sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218 \ + --hash=sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634 \ + --hash=sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695 \ + --hash=sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad \ + --hash=sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73 \ + --hash=sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c \ + --hash=sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe \ + --hash=sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa \ + --hash=sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559 \ + --hash=sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa \ + --hash=sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37 \ + --hash=sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758 \ + --hash=sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f \ + --hash=sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8 \ + --hash=sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d \ + --hash=sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c \ + --hash=sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97 \ + --hash=sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a \ + --hash=sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19 \ + --hash=sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9 \ + --hash=sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9 \ + --hash=sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc \ + --hash=sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2 \ + --hash=sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4 \ + --hash=sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354 \ + --hash=sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50 \ + --hash=sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698 \ + --hash=sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9 \ + --hash=sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b \ + --hash=sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc \ + --hash=sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115 \ + --hash=sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e \ + --hash=sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485 \ + --hash=sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f \ + --hash=sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12 \ + --hash=sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025 \ + --hash=sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009 \ + --hash=sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d \ + --hash=sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b \ + --hash=sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a \ + --hash=sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5 \ + --hash=sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f \ + --hash=sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d \ + --hash=sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1 \ + --hash=sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287 \ + --hash=sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6 \ + --hash=sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f \ + --hash=sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581 \ + --hash=sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed \ + --hash=sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b \ + --hash=sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c \ + --hash=sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026 \ + --hash=sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8 \ + --hash=sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676 \ + --hash=sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6 \ + --hash=sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e \ + --hash=sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d \ + --hash=sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d \ + --hash=sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01 \ + --hash=sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7 \ + --hash=sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419 \ + --hash=sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795 \ + --hash=sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1 \ + --hash=sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5 \ + --hash=sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d \ + --hash=sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42 \ + --hash=sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe \ + --hash=sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda \ + --hash=sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e \ + --hash=sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737 \ + --hash=sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523 \ + --hash=sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591 \ + --hash=sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc \ + --hash=sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a \ + --hash=sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50 + # via + # jinja2 + # nbconvert + # werkzeug +marshmallow==3.26.2 \ + --hash=sha256:013fa8a3c4c276c24d26d84ce934dc964e2aa794345a0f8c7e5a7191482c8a73 \ + --hash=sha256:bbe2adb5a03e6e3571b573f42527c6fe926e17467833660bebd11593ab8dfd57 + # via great-expectations +matplotlib-inline==0.2.1 \ + --hash=sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76 \ + --hash=sha256:e1ee949c340d771fc39e241ea75683deb94762c8fa5f2927ec57c83c4dffa9fe + # via + # ipykernel + # ipython +mcp==1.25.0 \ + --hash=sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802 \ + --hash=sha256:b37c38144a666add0862614cc79ec276e97d72aa8ca26d622818d4e278b9721a + # via fastapi-mcp +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py +milvus-lite==2.4.12 \ + --hash=sha256:20087663e7b4385050b7ad08f1f03404426d4c87b1ff91d5a8723eee7fd49e88 \ + --hash=sha256:334037ebbab60243b5d8b43d54ca2f835d81d48c3cda0c6a462605e588deb05d \ + --hash=sha256:a0f3a5ddbfd19f4a6b842b2fd3445693c796cde272b701a1646a94c1ac45d3d7 \ + --hash=sha256:e8d4f7cdd5f731efd6faeee3715d280fd91a5f9b4d89312664d56401f65b1473 + # via + # feast (setup.py) + # pymilvus +minio==7.2.11 \ + --hash=sha256:153582ed52ff3b5005ba558e1f25bfe1e9e834f7f0745e594777f28e3e81e1a0 \ + --hash=sha256:4db95a21fe1e2022ec975292d8a1f83bd5b18f830d23d42a4518ac7a5281d7c5 + # via feast (setup.py) +mistune==3.2.0 \ + --hash=sha256:708487c8a8cdd99c9d90eb3ed4c3ed961246ff78ac82f03418f5183ab70e398a \ + --hash=sha256:febdc629a3c78616b94393c6580551e0e34cc289987ec6c35ed3f4be42d0eee1 + # via + # great-expectations + # nbconvert +mmh3==5.2.0 \ + --hash=sha256:03e08c6ebaf666ec1e3d6ea657a2d363bb01effd1a9acfe41f9197decaef0051 \ + --hash=sha256:097e13c8b8a66c5753c6968b7640faefe85d8e38992703c1f666eda6ef4c3762 \ + --hash=sha256:0b898cecff57442724a0f52bf42c2de42de63083a91008fb452887e372f9c328 \ + --hash=sha256:10983c10f5c77683bd845751905ba535ec47409874acc759d5ce3ff7ef34398a \ + --hash=sha256:11730eeb16dfcf9674fdea9bb6b8e6dd9b40813b7eb839bc35113649eef38aeb \ + --hash=sha256:127c95336f2a98c51e7682341ab7cb0be3adb9df0819ab8505a726ed1801876d \ + --hash=sha256:12da42c0a55c9d86ab566395324213c319c73ecb0c239fad4726324212b9441c \ + --hash=sha256:132dd943451a7c7546978863d2f5a64977928410782e1a87d583cb60eb89e667 \ + --hash=sha256:1556e31e4bd0ac0c17eaf220be17a09c171d7396919c3794274cb3415a9d3646 \ + --hash=sha256:1a5f4d2e59d6bba8ef01b013c472741835ad961e7c28f50c82b27c57748744a4 \ + --hash=sha256:1ba55d6ca32eeef8b2625e1e4bfc3b3db52bc63014bd7e5df8cc11bf2b036b12 \ + --hash=sha256:1efc8fec8478e9243a78bb993422cf79f8ff85cb4cf6b79647480a31e0d950a8 \ + --hash=sha256:1f8d8b627799f4e2fcc7c034fed8f5f24dc7724ff52f69838a3d6d15f1ad4765 \ + --hash=sha256:1fae471339ae1b9c641f19cf46dfe6ffd7f64b1fba7c4333b99fa3dd7f21ae0a \ + --hash=sha256:1fdb36b940e9261aff0b5177c5b74a36936b902f473180f6c15bde26143681a9 \ + --hash=sha256:2421b9d665a0b1ad724ec7332fb5a98d075f50bc51a6ff854f3a1882bd650d49 \ + --hash=sha256:29c2b9ce61886809d0492a274a5a53047742dea0f703f9c4d5d223c3ea6377d3 \ + --hash=sha256:2c9da0d568569cc87315cb063486d761e38458b8ad513fedd3dc9263e1b81bcd \ + --hash=sha256:2ebfc46b39168ab1cd44670a32ea5489bcbc74a25795c61b6d888c5c2cf654ed \ + --hash=sha256:3193752fc05ea72366c2b63ff24b9a190f422e32d75fdeae71087c08fff26115 \ + --hash=sha256:33576136c06b46a7046b6d83a3d75fbca7d25f84cec743f1ae156362608dc6d2 \ + --hash=sha256:37a358cc881fe796e099c1db6ce07ff757f088827b4e8467ac52b7a7ffdca647 \ + --hash=sha256:382a6bb3f8c6532ea084e7acc5be6ae0c6effa529240836d59352398f002e3fc \ + --hash=sha256:384eda9361a7bf83a85e09447e1feafe081034af9dd428893701b959230d84be \ + --hash=sha256:38d899a156549da8ef6a9f1d6f7ef231228d29f8f69bce2ee12f5fba6d6fd7c5 \ + --hash=sha256:3bc244802ccab5220008cb712ca1508cb6a12f0eb64ad62997156410579a1770 \ + --hash=sha256:3c6041fd9d5fb5fcac57d5c80f521a36b74aea06b8566431c63e4ffc49aced51 \ + --hash=sha256:3ca975c51c5028947bbcfc24966517aac06a01d6c921e30f7c5383c195f87991 \ + --hash=sha256:3d6bfd9662a20c054bc216f861fa330c2dac7c81e7fb8307b5e32ab5b9b4d2e0 \ + --hash=sha256:419005f84ba1cab47a77465a2a843562dadadd6671b8758bf179d82a15ca63eb \ + --hash=sha256:45b590e31bc552c6f8e2150ff1ad0c28dd151e9f87589e7eaf508fbdd8e8e908 \ + --hash=sha256:49037d417419863b222ae47ee562b2de9c3416add0a45c8d7f4e864be8dc4f89 \ + --hash=sha256:4a5f5536b1cbfa72318ab3bfc8a8188b949260baed186b75f0abc75b95d8c051 \ + --hash=sha256:582f9dbeefe15c32a5fa528b79b088b599a1dfe290a4436351c6090f90ddebb8 \ + --hash=sha256:58477cf9ef16664d1ce2b038f87d2dc96d70fe50733a34a7f07da6c9a5e3538c \ + --hash=sha256:58981d6ea9646dbbf9e59a30890cbf9f610df0e4a57dbfe09215116fd90b0093 \ + --hash=sha256:5a5dba98e514fb26241868f6eb90a7f7ca0e039aed779342965ce24ea32ba513 \ + --hash=sha256:5b0b58215befe0f0e120b828f7645e97719bbba9f23b69e268ed0ac7adde8645 \ + --hash=sha256:61ac226af521a572700f863d6ecddc6ece97220ce7174e311948ff8c8919a363 \ + --hash=sha256:63830f846797187c5d3e2dae50f0848fdc86032f5bfdc58ae352f02f857e9025 \ + --hash=sha256:69fc339d7202bea69ef9bd7c39bfdf9fdabc8e6822a01eba62fb43233c1b3932 \ + --hash=sha256:6d541038b3fc360ec538fc116de87462627944765a6750308118f8b509a8eec7 \ + --hash=sha256:6ecb4e750d712abde046858ee6992b65c93f1f71b397fce7975c3860c07365d2 \ + --hash=sha256:72d80005b7634a3a2220f81fbeb94775ebd12794623bb2e1451701ea732b4aa3 \ + --hash=sha256:7303aab41e97adcf010a09efd8f1403e719e59b7705d5e3cfed3dd7571589290 \ + --hash=sha256:7434a27754049144539d2099a6d2da5d88b8bdeedf935180bf42ad59b3607aa3 \ + --hash=sha256:746a5ee71c6d1103d9b560fa147881b5e68fd35da56e54e03d5acefad0e7c055 \ + --hash=sha256:7733ec52296fc1ba22e9b90a245c821adbb943e98c91d8a330a2254612726106 \ + --hash=sha256:7901c893e704ee3c65f92d39b951f8f34ccf8e8566768c58103fb10e55afb8c1 \ + --hash=sha256:7aa18cdb58983ee660c9c400b46272e14fa253c675ed963d3812487f8ca42037 \ + --hash=sha256:7b986d506a8e8ea345791897ba5d8ba0d9d8820cd4fc3e52dbe6de19388de2e7 \ + --hash=sha256:7bbb0df897944b5ec830f3ad883e32c5a7375370a521565f5fe24443bfb2c4f7 \ + --hash=sha256:7c7f0b342fd06044bedd0b6e72177ddc0076f54fd89ee239447f8b271d919d9b \ + --hash=sha256:7e5634565367b6d98dc4aa2983703526ef556b3688ba3065edb4b9b90ede1c54 \ + --hash=sha256:7fddccd4113e7b736706e17a239a696332360cbaddf25ae75b57ba1acce65081 \ + --hash=sha256:81c504ad11c588c8629536b032940f2a359dda3b6cbfd4ad8f74cb24dcd1b0bc \ + --hash=sha256:81df0dae22cd0da87f1c978602750f33d17fb3d21fb0f326c89dc89834fea79b \ + --hash=sha256:86d1be5d63232e6eb93c50881aea55ff06eb86d8e08f9b5417c8c9b10db9db96 \ + --hash=sha256:8b0c53fe0994beade1ad7c0f13bd6fec980a0664bfbe5a6a7d64500b9ab76772 \ + --hash=sha256:8ebf241072cf2777a492d0e09252f8cc2b3edd07dfdb9404b9757bffeb4f2cee \ + --hash=sha256:931d47e08c9c8a67bf75d82f0ada8399eac18b03388818b62bfa42882d571d72 \ + --hash=sha256:932a6eec1d2e2c3c9e630d10f7128d80e70e2d47fe6b8c7ea5e1afbd98733e65 \ + --hash=sha256:941603bfd75a46023807511c1ac2f1b0f39cccc393c15039969806063b27e6db \ + --hash=sha256:956127e663d05edbeec54df38885d943dfa27406594c411139690485128525de \ + --hash=sha256:96f1e1ac44cbb42bcc406e509f70c9af42c594e72ccc7b1257f97554204445f0 \ + --hash=sha256:99bb6a4d809aa4e528ddfe2c85dd5239b78b9dd14be62cca0329db78505e7b50 \ + --hash=sha256:9f64bf06f4bf623325fda3a6d02d36cd69199b9ace99b04bb2d7fd9f89688504 \ + --hash=sha256:a094319ec0db52a04af9fdc391b4d39a1bc72bc8424b47c4411afb05413a44b5 \ + --hash=sha256:a367d4741ac0103f8198c82f429bccb9359f543ca542b06a51f4f0332e8de279 \ + --hash=sha256:a7c0c7845566b9686480e6a7e9044db4afb60038d5fabd19227443f0104eeee4 \ + --hash=sha256:aa6e5d31fdc5ed9e3e95f9873508615a778fe9b523d52c17fc770a3eb39ab6e4 \ + --hash=sha256:ae9d032488fcec32d22be6542d1a836f00247f40f320844dbb361393b5b22773 \ + --hash=sha256:b0271ac12415afd3171ab9a3c7cbfc71dee2c68760a7dc9d05bf8ed6ddfa3a7a \ + --hash=sha256:b0d753ad566c721faa33db7e2e0eddd74b224cdd3eaf8481d76c926603c7a00e \ + --hash=sha256:b29044e1ffdb84fe164d0a7ea05c7316afea93c00f8ed9449cf357c36fc4f814 \ + --hash=sha256:b5995088dd7023d2d9f310a0c67de5a2b2e06a570ecfd00f9ff4ab94a67cde43 \ + --hash=sha256:b5f317a727bba0e633a12e71228bc6a4acb4f471a98b1c003163b917311ea9a9 \ + --hash=sha256:b9a87025121d1c448f24f27ff53a5fe7b6ef980574b4a4f11acaabe702420d63 \ + --hash=sha256:bb0fdc451fb6d86d81ab8f23d881b8d6e37fc373a2deae1c02d27002d2ad7a05 \ + --hash=sha256:bb4fe46bdc6104fbc28db7a6bacb115ee6368ff993366bbd8a2a7f0076e6f0c0 \ + --hash=sha256:bc44fc2b886243d7c0d8daeb37864e16f232e5b56aaec27cc781d848264cfd28 \ + --hash=sha256:bdde97310d59604f2a9119322f61b31546748499a21b44f6715e8ced9308a6c5 \ + --hash=sha256:be1374df449465c9f2500e62eee73a39db62152a8bdfbe12ec5b5c1cd451344d \ + --hash=sha256:be7d3dca9358e01dab1bad881fb2b4e8730cec58d36dd44482bc068bfcd3bc65 \ + --hash=sha256:bf7bee43e17e81671c447e9c83499f53d99bf440bc6d9dc26a841e21acfbe094 \ + --hash=sha256:c3dca4cb5b946ee91b3d6bb700d137b1cd85c20827f89fdf9c16258253489044 \ + --hash=sha256:c3f563e8901960e2eaa64c8e8821895818acabeb41c96f2efbb936f65dbe486c \ + --hash=sha256:c463d7c1c4cfc9d751efeaadd936bbba07b5b0ed81a012b3a9f5a12f0872bd6e \ + --hash=sha256:c4a2f3d83879e3de2eb8cbf562e71563a8ed15ee9b9c2e77ca5d9f73072ac15c \ + --hash=sha256:c5584061fd3da584659b13587f26c6cad25a096246a481636d64375d0c1f6c07 \ + --hash=sha256:c677d78887244bf3095020b73c42b505b700f801c690f8eaa90ad12d3179612f \ + --hash=sha256:c903e71fd8debb35ad2a4184c1316b3cb22f64ce517b4e6747f25b0a34e41266 \ + --hash=sha256:c9ff37ba9f15637e424c2ab57a1a590c52897c845b768e4e0a4958084ec87f22 \ + --hash=sha256:cadc16e8ea64b5d9a47363013e2bea469e121e6e7cb416a7593aeb24f2ad122e \ + --hash=sha256:cedac4f4054b8f7859e5aed41aaa31ad03fce6851901a7fdc2af0275ac533c10 \ + --hash=sha256:d22c9dcafed659fadc605538946c041722b6d1104fe619dbf5cc73b3c8a0ded8 \ + --hash=sha256:d765058da196f68dc721116cab335e696e87e76720e6ef8ee5a24801af65e63d \ + --hash=sha256:d86651fa45799530885ba4dab3d21144486ed15285e8784181a0ab37a4552384 \ + --hash=sha256:dd966df3489ec13848d6c6303429bbace94a153f43d1ae2a55115fd36fd5ca5d \ + --hash=sha256:ddc63328889bcaee77b743309e5c7d2d52cee0d7d577837c91b6e7cc9e755e0b \ + --hash=sha256:dfbead5575f6470c17e955b94f92d62a03dfc3d07f2e6f817d9b93dc211a1515 \ + --hash=sha256:e0f3ed828d709f5b82d8bfe14f8856120718ec4bd44a5b26102c3030a1e12501 \ + --hash=sha256:e1861fb6b1d0453ed7293200139c0a9011eeb1376632e048e3766945b13313c5 \ + --hash=sha256:e5015f0bb6eb50008bed2d4b1ce0f2a294698a926111e4bb202c0987b4f89078 \ + --hash=sha256:e651e17bfde5840e9e4174b01e9e080ce49277b70d424308b36a7969d0d1af73 \ + --hash=sha256:e7884931fe5e788163e7b3c511614130c2c59feffdc21112290a194487efb2e9 \ + --hash=sha256:e79c00eba78f7258e5b354eccd4d7907d60317ced924ea4a5f2e9d83f5453065 \ + --hash=sha256:e912b19cf2378f2967d0c08e86ff4c6c360129887f678e27e4dde970d21b3f4d \ + --hash=sha256:e9a011469b47b752e7d20de296bb34591cdfcbe76c99c2e863ceaa2aa61113d2 \ + --hash=sha256:eb756caf8975882630ce4e9fbbeb9d3401242a72528230422c9ab3a0d278e60c \ + --hash=sha256:eba01ec3bd4a49b9ac5ca2bc6a73ff5f3af53374b8556fcc2966dd2af9eb7779 \ + --hash=sha256:ecbfc0437ddfdced5e7822d1ce4855c9c64f46819d0fdc4482c53f56c707b935 \ + --hash=sha256:eed4bba7ff8a0d37106ba931ab03bdd3915fbb025bcf4e1f0aa02bc8114960c5 \ + --hash=sha256:f35727c5118aba95f0397e18a1a5b8405425581bfe53e821f0fb444cbdc2bc9b \ + --hash=sha256:f698733a8a494466432d611a8f0d1e026f5286dee051beea4b3c3146817e35d5 \ + --hash=sha256:f7f9034c7cf05ddfaac8d7a2e63a3c97a840d4615d0a0e65ba8bdf6f8576e3be \ + --hash=sha256:fa0c966ee727aad5406d516375593c5f058c766b21236ab8985693934bb5085b \ + --hash=sha256:fc9c5f280438cf1c1a8f9abb87dc8ce9630a964120cfb5dd50d1e7ce79690c7a \ + --hash=sha256:fd6e6c3d90660d085f7e73710eab6f5545d4854b81b0135a3526e797009dbda3 \ + --hash=sha256:fdfd3fb739f4e22746e13ad7ba0c6eedf5f454b18d11249724a388868e308ee4 \ + --hash=sha256:ff3d50dc3fe8a98059f99b445dfb62792b5d006c5e0b8f03c6de2813b8376110 + # via feast (setup.py) +mock==2.0.0 \ + --hash=sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1 \ + --hash=sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba + # via feast (setup.py) +moto==4.2.14 \ + --hash=sha256:6d242dbbabe925bb385ddb6958449e5c827670b13b8e153ed63f91dbdb50372c \ + --hash=sha256:8f9263ca70b646f091edcc93e97cda864a542e6d16ed04066b1370ed217bd190 + # via feast (setup.py) +mpire[dill]==2.10.2 \ + --hash=sha256:d627707f7a8d02aa4c7f7d59de399dec5290945ddf7fbd36cbb1d6ebb37a51fb \ + --hash=sha256:f66a321e93fadff34585a4bfa05e95bd946cf714b442f51c529038eb45773d97 + # via semchunk +mpmath==1.3.0 \ + --hash=sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f \ + --hash=sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c + # via sympy +msal==1.34.0 \ + --hash=sha256:76ba83b716ea5a6d75b0279c0ac353a0e05b820ca1f6682c0eb7f45190c43c2f \ + --hash=sha256:f669b1644e4950115da7a176441b0e13ec2975c29528d8b9e81316023676d6e1 + # via + # azure-identity + # msal-extensions +msal-extensions==1.3.1 \ + --hash=sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca \ + --hash=sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4 + # via azure-identity +msgpack==1.1.2 \ + --hash=sha256:0051fffef5a37ca2cd16978ae4f0aef92f164df86823871b5162812bebecd8e2 \ + --hash=sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014 \ + --hash=sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931 \ + --hash=sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b \ + --hash=sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b \ + --hash=sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999 \ + --hash=sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029 \ + --hash=sha256:283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0 \ + --hash=sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9 \ + --hash=sha256:2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c \ + --hash=sha256:350ad5353a467d9e3b126d8d1b90fe05ad081e2e1cef5753f8c345217c37e7b8 \ + --hash=sha256:354e81bcdebaab427c3df4281187edc765d5d76bfb3a7c125af9da7a27e8458f \ + --hash=sha256:365c0bbe981a27d8932da71af63ef86acc59ed5c01ad929e09a0b88c6294e28a \ + --hash=sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42 \ + --hash=sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e \ + --hash=sha256:41d1a5d875680166d3ac5c38573896453bbbea7092936d2e107214daf43b1d4f \ + --hash=sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7 \ + --hash=sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb \ + --hash=sha256:454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef \ + --hash=sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf \ + --hash=sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245 \ + --hash=sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794 \ + --hash=sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af \ + --hash=sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff \ + --hash=sha256:602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e \ + --hash=sha256:61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296 \ + --hash=sha256:67016ae8c8965124fdede9d3769528ad8284f14d635337ffa6a713a580f6c030 \ + --hash=sha256:6bde749afe671dc44893f8d08e83bf475a1a14570d67c4bb5cec5573463c8833 \ + --hash=sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939 \ + --hash=sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa \ + --hash=sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90 \ + --hash=sha256:7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c \ + --hash=sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717 \ + --hash=sha256:86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406 \ + --hash=sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a \ + --hash=sha256:8b696e83c9f1532b4af884045ba7f3aa741a63b2bc22617293a2c6a7c645f251 \ + --hash=sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2 \ + --hash=sha256:94fd7dc7d8cb0a54432f296f2246bc39474e017204ca6f4ff345941d4ed285a7 \ + --hash=sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e \ + --hash=sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b \ + --hash=sha256:9fba231af7a933400238cb357ecccf8ab5d51535ea95d94fc35b7806218ff844 \ + --hash=sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9 \ + --hash=sha256:a605409040f2da88676e9c9e5853b3449ba8011973616189ea5ee55ddbc5bc87 \ + --hash=sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b \ + --hash=sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c \ + --hash=sha256:a8f6e7d30253714751aa0b0c84ae28948e852ee7fb0524082e6716769124bc23 \ + --hash=sha256:ad09b984828d6b7bb52d1d1d0c9be68ad781fa004ca39216c8a1e63c0f34ba3c \ + --hash=sha256:bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e \ + --hash=sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620 \ + --hash=sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69 \ + --hash=sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f \ + --hash=sha256:d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68 \ + --hash=sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27 \ + --hash=sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46 \ + --hash=sha256:db6192777d943bdaaafb6ba66d44bf65aa0e9c5616fa1d2da9bb08828c6b39aa \ + --hash=sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00 \ + --hash=sha256:e64c8d2f5e5d5fda7b842f55dec6133260ea8f53c4257d64494c534f306bf7a9 \ + --hash=sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84 \ + --hash=sha256:ea5405c46e690122a76531ab97a079e184c0daf491e588592d6a23d3e32af99e \ + --hash=sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20 \ + --hash=sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e \ + --hash=sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162 + # via ray +multidict==6.7.0 \ + --hash=sha256:03ca744319864e92721195fa28c7a3b2bc7b686246b35e4078c1e4d0eb5466d3 \ + --hash=sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec \ + --hash=sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd \ + --hash=sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b \ + --hash=sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb \ + --hash=sha256:08325c9e5367aa379a3496aa9a022fe8837ff22e00b94db256d3a1378c76ab32 \ + --hash=sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f \ + --hash=sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7 \ + --hash=sha256:096f52730c3fb8ed419db2d44391932b63891b2c5ed14850a7e215c0ba9ade36 \ + --hash=sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd \ + --hash=sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff \ + --hash=sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8 \ + --hash=sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d \ + --hash=sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721 \ + --hash=sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0 \ + --hash=sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3 \ + --hash=sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d \ + --hash=sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa \ + --hash=sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10 \ + --hash=sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202 \ + --hash=sha256:23bfeee5316266e5ee2d625df2d2c602b829435fc3a235c2ba2131495706e4a0 \ + --hash=sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718 \ + --hash=sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e \ + --hash=sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6 \ + --hash=sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1 \ + --hash=sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2 \ + --hash=sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754 \ + --hash=sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c \ + --hash=sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390 \ + --hash=sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128 \ + --hash=sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912 \ + --hash=sha256:363eb68a0a59bd2303216d2346e6c441ba10d36d1f9969fcb6f1ba700de7bb5c \ + --hash=sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3 \ + --hash=sha256:3996b50c3237c4aec17459217c1e7bbdead9a22a0fcd3c365564fbd16439dde6 \ + --hash=sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2 \ + --hash=sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f \ + --hash=sha256:3ba3ef510467abb0667421a286dc906e30eb08569365f5cdb131d7aff7c2dd84 \ + --hash=sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842 \ + --hash=sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9 \ + --hash=sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6 \ + --hash=sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd \ + --hash=sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8 \ + --hash=sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599 \ + --hash=sha256:478cc36476687bac1514d651cbbaa94b86b0732fb6855c60c673794c7dd2da62 \ + --hash=sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec \ + --hash=sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34 \ + --hash=sha256:4b73189894398d59131a66ff157837b1fafea9974be486d036bb3d32331fdbf0 \ + --hash=sha256:4b7a9db5a870f780220e931d0002bbfd88fb53aceb6293251e2c839415c1b20e \ + --hash=sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6 \ + --hash=sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc \ + --hash=sha256:4d72a9a2d885f5c208b0cb91ff2ed43636bb7e345ec839ff64708e04f69a13cc \ + --hash=sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c \ + --hash=sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7 \ + --hash=sha256:521f33e377ff64b96c4c556b81c55d0cfffb96a11c194fd0c3f1e56f3d8dd5a4 \ + --hash=sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4 \ + --hash=sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38 \ + --hash=sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5 \ + --hash=sha256:661709cdcd919a2ece2234f9bae7174e5220c80b034585d7d8a755632d3e2111 \ + --hash=sha256:680878b9f3d45c31e1f730eef731f9b0bc1da456155688c6745ee84eb818e90e \ + --hash=sha256:6843b28b0364dc605f21481c90fadb5f60d9123b442eb8a726bb74feef588a84 \ + --hash=sha256:68af405971779d8b37198726f2b6fe3955db846fee42db7a4286fc542203934c \ + --hash=sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1 \ + --hash=sha256:6bdce131e14b04fd34a809b6380dbfd826065c3e2fe8a50dbae659fa0c390546 \ + --hash=sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a \ + --hash=sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c \ + --hash=sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036 \ + --hash=sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38 \ + --hash=sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99 \ + --hash=sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64 \ + --hash=sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e \ + --hash=sha256:7e73299c99939f089dd9b2120a04a516b95cdf8c1cd2b18c53ebf0de80b1f18f \ + --hash=sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159 \ + --hash=sha256:7f5170993a0dd3ab871c74f45c0a21a4e2c37a2f2b01b5f722a2ad9c6650469e \ + --hash=sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12 \ + --hash=sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1 \ + --hash=sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0 \ + --hash=sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184 \ + --hash=sha256:8b55d5497b51afdfde55925e04a022f1de14d4f4f25cdfd4f5d9b0aa96166851 \ + --hash=sha256:8cfc12a8630a29d601f48d47787bd7eb730e475e83edb5d6c5084317463373eb \ + --hash=sha256:9281bf5b34f59afbc6b1e477a372e9526b66ca446f4bf62592839c195a718b32 \ + --hash=sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b \ + --hash=sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288 \ + --hash=sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81 \ + --hash=sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd \ + --hash=sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45 \ + --hash=sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a \ + --hash=sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca \ + --hash=sha256:9cf41880c991716f3c7cec48e2f19ae4045fc9db5fc9cff27347ada24d710bb5 \ + --hash=sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb \ + --hash=sha256:9f474ad5acda359c8758c8accc22032c6abe6dc87a8be2440d097785e27a9349 \ + --hash=sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b \ + --hash=sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f \ + --hash=sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32 \ + --hash=sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5 \ + --hash=sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34 \ + --hash=sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c \ + --hash=sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4 \ + --hash=sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17 \ + --hash=sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60 \ + --hash=sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394 \ + --hash=sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff \ + --hash=sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00 \ + --hash=sha256:afa8a2978ec65d2336305550535c9c4ff50ee527914328c8677b3973ade52b85 \ + --hash=sha256:b15b3afff74f707b9275d5ba6a91ae8f6429c3ffb29bbfd216b0b375a56f13d7 \ + --hash=sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304 \ + --hash=sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13 \ + --hash=sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e \ + --hash=sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e \ + --hash=sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792 \ + --hash=sha256:b61189b29081a20c7e4e0b49b44d5d44bb0dc92be3c6d06a11cc043f81bf9329 \ + --hash=sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb \ + --hash=sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b \ + --hash=sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000 \ + --hash=sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6 \ + --hash=sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62 \ + --hash=sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63 \ + --hash=sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5 \ + --hash=sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e \ + --hash=sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c \ + --hash=sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827 \ + --hash=sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8 \ + --hash=sha256:ce8fdc2dca699f8dbf055a61d73eaa10482569ad20ee3c36ef9641f69afa8c91 \ + --hash=sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96 \ + --hash=sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad \ + --hash=sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6 \ + --hash=sha256:d874eb056410ca05fed180b6642e680373688efafc7f077b2a2f61811e873a40 \ + --hash=sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7 \ + --hash=sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4 \ + --hash=sha256:df0e3bf7993bdbeca5ac25aa859cf40d39019e015c9c91809ba7093967f7a648 \ + --hash=sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064 \ + --hash=sha256:e2862408c99f84aa571ab462d25236ef9cb12a602ea959ba9c9009a54902fc73 \ + --hash=sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b \ + --hash=sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762 \ + --hash=sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e \ + --hash=sha256:eb866162ef2f45063acc7a53a88ef6fe8bf121d45c30ea3c9cd87ce7e191a8d4 \ + --hash=sha256:ec81878ddf0e98817def1e77d4f50dae5ef5b0e4fe796fae3bd674304172416e \ + --hash=sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546 \ + --hash=sha256:f0e77e3c0008bc9316e662624535b88d360c3a5d3f81e15cf12c139a75250046 \ + --hash=sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6 \ + --hash=sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9 \ + --hash=sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d \ + --hash=sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf \ + --hash=sha256:f8e5c0031b90ca9ce555e2e8fd5c3b02a25f14989cbc310701823832c99eb687 \ + --hash=sha256:fb287618b9c7aa3bf8d825f02d9201b2f13078a5ed3b293c8f4d953917d84d5e \ + --hash=sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885 \ + --hash=sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7 + # via + # aiobotocore + # aiohttp + # yarl +multiprocess==0.70.16 \ + --hash=sha256:0dfd078c306e08d46d7a8d06fb120313d87aa43af60d66da43ffff40b44d2f41 \ + --hash=sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1 \ + --hash=sha256:37b55f71c07e2d741374998c043b9520b626a8dddc8b3129222ca4f1a06ef67a \ + --hash=sha256:476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee \ + --hash=sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3 \ + --hash=sha256:a71d82033454891091a226dfc319d0cfa8019a4e888ef9ca910372a446de4435 \ + --hash=sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a \ + --hash=sha256:ba8c31889abf4511c7308a8c52bb4a30b9d590e7f58523302ba00237702ca054 \ + --hash=sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02 \ + --hash=sha256:d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec \ + --hash=sha256:e7b9d0f307cd9bd50851afaac0dba2cb6c44449efff697df7c7645f7d3f2be3a \ + --hash=sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e + # via + # datasets + # mpire +mypy==1.11.2 \ + --hash=sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36 \ + --hash=sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce \ + --hash=sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6 \ + --hash=sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b \ + --hash=sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca \ + --hash=sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24 \ + --hash=sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383 \ + --hash=sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7 \ + --hash=sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86 \ + --hash=sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d \ + --hash=sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4 \ + --hash=sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8 \ + --hash=sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987 \ + --hash=sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385 \ + --hash=sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79 \ + --hash=sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef \ + --hash=sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6 \ + --hash=sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70 \ + --hash=sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca \ + --hash=sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70 \ + --hash=sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12 \ + --hash=sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104 \ + --hash=sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a \ + --hash=sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318 \ + --hash=sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1 \ + --hash=sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b \ + --hash=sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d + # via + # feast (setup.py) + # sqlalchemy +mypy-extensions==1.1.0 \ + --hash=sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505 \ + --hash=sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558 + # via mypy +mypy-protobuf==3.3.0 \ + --hash=sha256:15604f6943b16c05db646903261e3b3e775cf7f7990b7c37b03d043a907b650d \ + --hash=sha256:24f3b0aecb06656e983f58e07c732a90577b9d7af3e1066fc2b663bbf0370248 + # via feast (setup.py) +nbclient==0.10.4 \ + --hash=sha256:1e54091b16e6da39e297b0ece3e10f6f29f4ac4e8ee515d29f8a7099bd6553c9 \ + --hash=sha256:9162df5a7373d70d606527300a95a975a47c137776cd942e52d9c7e29ff83440 + # via nbconvert +nbconvert==7.16.6 \ + --hash=sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b \ + --hash=sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582 + # via jupyter-server +nbformat==5.10.4 \ + --hash=sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a \ + --hash=sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b + # via + # great-expectations + # jupyter-server + # nbclient + # nbconvert +nest-asyncio==1.6.0 \ + --hash=sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe \ + --hash=sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c + # via ipykernel +networkx==3.6.1 \ + --hash=sha256:26b7c357accc0c8cde558ad486283728b65b6a95d85ee1cd66bafab4c8168509 \ + --hash=sha256:d47fbf302e7d9cbbb9e2555a0d267983d2aa476bac30e90dfbe5669bd57f3762 + # via + # scikit-image + # torch +ninja==1.13.0 \ + --hash=sha256:11be2d22027bde06f14c343f01d31446747dbb51e72d00decca2eb99be911e2f \ + --hash=sha256:1c97223cdda0417f414bf864cfb73b72d8777e57ebb279c5f6de368de0062988 \ + --hash=sha256:3c0b40b1f0bba764644385319028650087b4c1b18cdfa6f45cb39a3669b81aa9 \ + --hash=sha256:3d00c692fb717fd511abeb44b8c5d00340c36938c12d6538ba989fe764e79630 \ + --hash=sha256:3d7d7779d12cb20c6d054c61b702139fd23a7a964ec8f2c823f1ab1b084150db \ + --hash=sha256:4a40ce995ded54d9dc24f8ea37ff3bf62ad192b547f6c7126e7e25045e76f978 \ + --hash=sha256:4be9c1b082d244b1ad7ef41eb8ab088aae8c109a9f3f0b3e56a252d3e00f42c1 \ + --hash=sha256:5f8e1e8a1a30835eeb51db05cf5a67151ad37542f5a4af2a438e9490915e5b72 \ + --hash=sha256:60056592cf495e9a6a4bea3cd178903056ecb0943e4de45a2ea825edb6dc8d3e \ + --hash=sha256:6739d3352073341ad284246f81339a384eec091d9851a886dfa5b00a6d48b3e2 \ + --hash=sha256:8cfbb80b4a53456ae8a39f90ae3d7a2129f45ea164f43fadfa15dc38c4aef1c9 \ + --hash=sha256:aa45b4037b313c2f698bc13306239b8b93b4680eb47e287773156ac9e9304714 \ + --hash=sha256:b4f2a072db3c0f944c32793e91532d8948d20d9ab83da9c0c7c15b5768072200 \ + --hash=sha256:be7f478ff9f96a128b599a964fc60a6a87b9fa332ee1bd44fa243ac88d50291c \ + --hash=sha256:d741a5e6754e0bda767e3274a0f0deeef4807f1fec6c0d7921a0244018926ae5 \ + --hash=sha256:e8bad11f8a00b64137e9b315b137d8bb6cbf3086fbdc43bf1f90fd33324d2e96 \ + --hash=sha256:fa2a8bfc62e31b08f83127d1613d10821775a0eb334197154c4d6067b7068ff1 \ + --hash=sha256:fb46acf6b93b8dd0322adc3a4945452a4e774b75b91293bafcc7b7f8e6517dfa \ + --hash=sha256:fb8ee8719f8af47fed145cced4a85f0755dd55d45b2bddaf7431fa89803c5f3e + # via easyocr +nodeenv==1.10.0 \ + --hash=sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827 \ + --hash=sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb + # via pre-commit +notebook==7.5.2 \ + --hash=sha256:17d078a98603d70d62b6b4b3fcb67e87d7a68c398a7ae9b447eb2d7d9aec9979 \ + --hash=sha256:83e82f93c199ca730313bea1bb24bc279ea96f74816d038a92d26b6b9d5f3e4a + # via great-expectations +notebook-shim==0.2.4 \ + --hash=sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef \ + --hash=sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb + # via + # jupyterlab + # notebook +numpy==2.4.1 \ + --hash=sha256:0093e85df2960d7e4049664b26afc58b03236e967fb942354deef3208857a04c \ + --hash=sha256:09aa8a87e45b55a1c2c205d42e2808849ece5c484b2aab11fecabec3841cafba \ + --hash=sha256:0cce2a669e3c8ba02ee563c7835f92c153cf02edff1ae05e1823f1dde21b16a5 \ + --hash=sha256:0e6e8f9d9ecf95399982019c01223dc130542960a12edfa8edd1122dfa66a8a8 \ + --hash=sha256:0f118ce6b972080ba0758c6087c3617b5ba243d806268623dc34216d69099ba0 \ + --hash=sha256:178de8f87948163d98a4c9ab5bee4ce6519ca918926ec8df195af582de28544d \ + --hash=sha256:18e14c4d09d55eef39a6ab5b08406e84bc6869c1e34eef45564804f90b7e0574 \ + --hash=sha256:2023ef86243690c2791fd6353e5b4848eedaa88ca8a2d129f462049f6d484696 \ + --hash=sha256:20d4649c773f66cc2fc36f663e091f57c3b7655f936a4c681b4250855d1da8f5 \ + --hash=sha256:2302dc0224c1cbc49bb94f7064f3f923a971bfae45c33870dcbff63a2a550505 \ + --hash=sha256:26f0bcd9c79a00e339565b303badc74d3ea2bd6d52191eeca5f95936cad107d0 \ + --hash=sha256:297c72b1b98100c2e8f873d5d35fb551fce7040ade83d67dd51d38c8d42a2162 \ + --hash=sha256:2f44de05659b67d20499cbc96d49f2650769afcb398b79b324bb6e297bfe3844 \ + --hash=sha256:2ffd257026eb1b34352e749d7cc1678b5eeec3e329ad8c9965a797e08ccba205 \ + --hash=sha256:382ad67d99ef49024f11d1ce5dcb5ad8432446e4246a4b014418ba3a1175a1f4 \ + --hash=sha256:3869ea1ee1a1edc16c29bbe3a2f2a4e515cc3a44d43903ad41e0cacdbaf733dc \ + --hash=sha256:3d1a100e48cb266090a031397863ff8a30050ceefd798f686ff92c67a486753d \ + --hash=sha256:423797bdab2eeefbe608d7c1ec7b2b4fd3c58d51460f1ee26c7500a1d9c9ee93 \ + --hash=sha256:42d7dd5fa36d16d52a84f821eb96031836fd405ee6955dd732f2023724d0aa01 \ + --hash=sha256:49e792ec351315e16da54b543db06ca8a86985ab682602d90c60ef4ff4db2a9c \ + --hash=sha256:4e53170557d37ae404bf8d542ca5b7c629d6efa1117dac6a83e394142ea0a43f \ + --hash=sha256:4f1b68ff47680c2925f8063402a693ede215f0257f02596b1318ecdfb1d79e33 \ + --hash=sha256:4f9c360ecef085e5841c539a9a12b883dff005fbd7ce46722f5e9cef52634d82 \ + --hash=sha256:529050522e983e00a6c1c6b67411083630de8b57f65e853d7b03d9281b8694d2 \ + --hash=sha256:52b5f61bdb323b566b528899cc7db2ba5d1015bda7ea811a8bcf3c89c331fa42 \ + --hash=sha256:538bf4ec353709c765ff75ae616c34d3c3dca1a68312727e8f2676ea644f8509 \ + --hash=sha256:5adf01965456a664fc727ed69cc71848f28d063217c63e1a0e200a118d5eec9a \ + --hash=sha256:5b55aa56165b17aaf15520beb9cbd33c9039810e0d9643dd4379e44294c7303e \ + --hash=sha256:5d558123217a83b2d1ba316b986e9248a1ed1971ad495963d555ccd75dcb1556 \ + --hash=sha256:5de60946f14ebe15e713a6f22850c2372fa72f4ff9a432ab44aa90edcadaa65a \ + --hash=sha256:62fea415f83ad8fdb6c20840578e5fbaf5ddd65e0ec6c3c47eda0f69da172510 \ + --hash=sha256:6436cffb4f2bf26c974344439439c95e152c9a527013f26b3577be6c2ca64295 \ + --hash=sha256:6461de5113088b399d655d45c3897fa188766415d0f568f175ab071c8873bd73 \ + --hash=sha256:69e7419c9012c4aaf695109564e3387f1259f001b4326dfa55907b098af082d3 \ + --hash=sha256:71abbea030f2cfc3092a0ff9f8c8fdefdc5e0bf7d9d9c99663538bb0ecdac0b9 \ + --hash=sha256:7211b95ca365519d3596a1d8688a95874cc94219d417504d9ecb2df99fa7bfa8 \ + --hash=sha256:727c6c3275ddefa0dc078524a85e064c057b4f4e71ca5ca29a19163c607be745 \ + --hash=sha256:79e9e06c4c2379db47f3f6fc7a8652e7498251789bf8ff5bd43bf478ef314ca2 \ + --hash=sha256:7ad270f438cbdd402c364980317fb6b117d9ec5e226fff5b4148dd9aa9fc6e02 \ + --hash=sha256:7d5d7999df434a038d75a748275cd6c0094b0ecdb0837342b332a82defc4dc4d \ + --hash=sha256:8097529164c0f3e32bb89412a0905d9100bf434d9692d9fc275e18dcf53c9344 \ + --hash=sha256:82c55962006156aeef1629b953fd359064aa47e4d82cfc8e67f0918f7da3344f \ + --hash=sha256:8361ea4220d763e54cff2fbe7d8c93526b744f7cd9ddab47afeff7e14e8503be \ + --hash=sha256:899d2c18024984814ac7e83f8f49d8e8180e2fbe1b2e252f2e7f1d06bea92425 \ + --hash=sha256:8ad35f20be147a204e28b6a0575fbf3540c5e5f802634d4258d55b1ff5facce1 \ + --hash=sha256:8f085da926c0d491ffff3096f91078cc97ea67e7e6b65e490bc8dcda65663be2 \ + --hash=sha256:9171a42fcad32dcf3fa86f0a4faa5e9f8facefdb276f54b8b390d90447cff4e2 \ + --hash=sha256:92a0e65272fd60bfa0d9278e0484c2f52fe03b97aedc02b357f33fe752c52ffb \ + --hash=sha256:941c2a93313d030f219f3a71fd3d91a728b82979a5e8034eb2e60d394a2b83f9 \ + --hash=sha256:98b35775e03ab7f868908b524fc0a84d38932d8daf7b7e1c3c3a1b6c7a2c9f15 \ + --hash=sha256:a1ceafc5042451a858231588a104093474c6a5c57dcc724841f5c888d237d690 \ + --hash=sha256:a73044b752f5d34d4232f25f18160a1cc418ea4507f5f11e299d8ac36875f8a0 \ + --hash=sha256:a7870e8c5fc11aef57d6fea4b4085e537a3a60ad2cdd14322ed531fdca68d261 \ + --hash=sha256:a92f227dbcdc9e4c3e193add1a189a9909947d4f8504c576f4a732fd0b54240a \ + --hash=sha256:ac08c63cb7779b85e9d5318e6c3518b424bc1f364ac4cb2c6136f12e5ff2dccc \ + --hash=sha256:b6bcf39112e956594b3331316d90c90c90fb961e39696bda97b89462f5f3943f \ + --hash=sha256:c0faba4a331195bfa96f93dd9dfaa10b2c7aa8cda3a02b7fd635e588fe821bf5 \ + --hash=sha256:ce9ce141a505053b3c7bce3216071f3bf5c182b8b28930f14cd24d43932cd2df \ + --hash=sha256:cf6470d91d34bf669f61d515499859fa7a4c2f7c36434afb70e82df7217933f9 \ + --hash=sha256:d3703409aac693fa82c0aee023a1ae06a6e9d065dba10f5e8e80f642f1e9d0a2 \ + --hash=sha256:d3e3087f53e2b4428766b54932644d148613c5a595150533ae7f00dab2f319a8 \ + --hash=sha256:d3f8f0df9f4b8be57b3bf74a1d087fec68f927a2fab68231fdb442bf2c12e426 \ + --hash=sha256:d797454e37570cfd61143b73b8debd623c3c0952959adb817dd310a483d58a1b \ + --hash=sha256:e1a27bb1b2dee45a2a53f5ca6ff2d1a7f135287883a1689e930d44d1ff296c87 \ + --hash=sha256:e3bd2cb07841166420d2fa7146c96ce00cb3410664cbc1a6be028e456c4ee220 \ + --hash=sha256:e7b6b5e28bbd47b7532698e5db2fe1db693d84b58c254e4389d99a27bb9b8f6b \ + --hash=sha256:e867df947d427cdd7a60e3e271729090b0f0df80f5f10ab7dd436f40811699c3 \ + --hash=sha256:ea66d2b41ca4a1630aae5507ee0a71647d3124d1741980138aa8f28f44dac36e \ + --hash=sha256:edee228f76ee2dab4579fad6f51f6a305de09d444280109e0f75df247ff21501 \ + --hash=sha256:f0a90aba7d521e6954670550e561a4cb925713bd944445dbe9e729b71f6cabee \ + --hash=sha256:f93bc6892fe7b0663e5ffa83b61aab510aacffd58c16e012bb9352d489d90cb7 \ + --hash=sha256:fb1461c99de4d040666ca0444057b06541e5642f800b71c56e6ea92d6a853a0c + # via + # feast (setup.py) + # accelerate + # altair + # dask + # datasets + # db-dtypes + # docling-ibm-models + # easyocr + # faiss-cpu + # great-expectations + # ibis-framework + # imageio + # opencv-python-headless + # pandas + # pandas-gbq + # pyarrow + # qdrant-client + # ray + # safetensors + # scikit-image + # scikit-learn + # scipy + # shapely + # tifffile + # torchvision + # transformers +oauthlib==3.3.1 \ + --hash=sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9 \ + --hash=sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1 + # via requests-oauthlib +opencensus==0.11.4 \ + --hash=sha256:a18487ce68bc19900336e0ff4655c5a116daf10c1b3685ece8d971bddad6a864 \ + --hash=sha256:cbef87d8b8773064ab60e5c2a1ced58bbaa38a6d052c41aec224958ce544eff2 + # via ray +opencensus-context==0.1.3 \ + --hash=sha256:073bb0590007af276853009fac7e4bab1d523c3f03baf4cb4511ca38967c6039 \ + --hash=sha256:a03108c3c10d8c80bb5ddf5c8a1f033161fa61972a9917f9b9b3a18517f0088c + # via opencensus +opencv-python-headless==4.13.0.90 \ + --hash=sha256:0e0c8c9f620802fddc4fa7f471a1d263c7b0dca16cd9e7e2f996bb8bd2128c0c \ + --hash=sha256:12a28674f215542c9bf93338de1b5bffd76996d32da9acb9e739fdb9c8bbd738 \ + --hash=sha256:32255203040dc98803be96362e13f9e4bce20146898222d2e5c242f80de50da5 \ + --hash=sha256:96060fc57a1abb1144b0b8129e2ff3bfcdd0ccd8e8bd05bd85256ff4ed587d3b \ + --hash=sha256:dbc1f4625e5af3a80ebdbd84380227c0f445228588f2521b11af47710caca1ba \ + --hash=sha256:e13790342591557050157713af17a7435ac1b50c65282715093c9297fa045d8f \ + --hash=sha256:eba38bc255d0b7d1969c5bcc90a060ca2b61a3403b613872c750bfa5dfe9e03b \ + --hash=sha256:f46b17ea0aa7e4124ca6ad71143f89233ae9557f61d2326bcdb34329a1ddf9bd + # via easyocr +openpyxl==3.1.5 \ + --hash=sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2 \ + --hash=sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050 + # via docling +openshift-client==1.0.18 \ + --hash=sha256:be3979440cfd96788146a3a1650dabe939d4d516eea0b39f87e66d2ab39495b1 \ + --hash=sha256:d8a84080307ccd9556f6c62a3707a3e6507baedee36fa425754f67db9ded528b + # via codeflare-sdk +opentelemetry-api==1.39.1 \ + --hash=sha256:2edd8463432a7f8443edce90972169b195e7d6a05500cd29e6d13898187c9950 \ + --hash=sha256:fbde8c80e1b937a2c61f20347e91c0c18a1940cecf012d62e65a7caf08967c9c + # via + # opentelemetry-exporter-prometheus + # opentelemetry-sdk + # opentelemetry-semantic-conventions +opentelemetry-exporter-prometheus==0.60b1 \ + --hash=sha256:49f59178de4f4590e3cef0b8b95cf6e071aae70e1f060566df5546fad773b8fd \ + --hash=sha256:a4011b46906323f71724649d301b4dc188aaa068852e814f4df38cc76eac616b + # via ray +opentelemetry-proto==1.27.0 \ + --hash=sha256:33c9345d91dafd8a74fc3d7576c5a38f18b7fdf8d02983ac67485386132aedd6 \ + --hash=sha256:b133873de5581a50063e1e4b29cdcf0c5e253a8c2d8dc1229add20a4c3830ace + # via ray +opentelemetry-sdk==1.39.1 \ + --hash=sha256:4d5482c478513ecb0a5d938dcc61394e647066e0cc2676bee9f3af3f3f45f01c \ + --hash=sha256:cf4d4563caf7bff906c9f7967e2be22d0d6b349b908be0d90fb21c8e9c995cc6 + # via + # opentelemetry-exporter-prometheus + # ray +opentelemetry-semantic-conventions==0.60b1 \ + --hash=sha256:87c228b5a0669b748c76d76df6c364c369c28f1c465e50f661e39737e84bc953 \ + --hash=sha256:9fa8c8b0c110da289809292b0591220d3a7b53c1526a23021e977d68597893fb + # via opentelemetry-sdk +orjson==3.11.5 \ + --hash=sha256:0522003e9f7fba91982e83a97fec0708f5a714c96c4209db7104e6b9d132f111 \ + --hash=sha256:073aab025294c2f6fc0807201c76fdaed86f8fc4be52c440fb78fbb759a1ac09 \ + --hash=sha256:09b94b947ac08586af635ef922d69dc9bc63321527a3a04647f4986a73f4bd30 \ + --hash=sha256:1b280e2d2d284a6713b0cfec7b08918ebe57df23e3f76b27586197afca3cb1e9 \ + --hash=sha256:1b6bd351202b2cd987f35a13b5e16471cf4d952b42a73c391cc537974c43ef6d \ + --hash=sha256:1cbf2735722623fcdee8e712cbaaab9e372bbcb0c7924ad711b261c2eccf4a5c \ + --hash=sha256:1db2088b490761976c1b2e956d5d4e6409f3732e9d79cfa69f876c5248d1baf9 \ + --hash=sha256:23d04c4543e78f724c4dfe656b3791b5f98e4c9253e13b2636f1af5d90e4a880 \ + --hash=sha256:298d2451f375e5f17b897794bcc3e7b821c0f32b4788b9bcae47ada24d7f3cf7 \ + --hash=sha256:2b91126e7b470ff2e75746f6f6ee32b9ab67b7a93c8ba1d15d3a0caaf16ec875 \ + --hash=sha256:2cc79aaad1dfabe1bd2d50ee09814a1253164b3da4c00a78c458d82d04b3bdef \ + --hash=sha256:334e5b4bff9ad101237c2d799d9fd45737752929753bf4faf4b207335a416b7d \ + --hash=sha256:38b22f476c351f9a1c43e5b07d8b5a02eb24a6ab8e75f700f7d479d4568346a5 \ + --hash=sha256:3b01799262081a4c47c035dd77c1301d40f568f77cc7ec1bb7db5d63b0a01629 \ + --hash=sha256:3c8d8a112b274fae8c5f0f01954cb0480137072c271f3f4958127b010dfefaec \ + --hash=sha256:3fd15f9fc8c203aeceff4fda211157fad114dde66e92e24097b3647a08f4ee9e \ + --hash=sha256:42e8961196af655bb5e63ce6c60d25e8798cd4dfbc04f4203457fa3869322c2e \ + --hash=sha256:4bdd8d164a871c4ec773f9de0f6fe8769c2d6727879c37a9666ba4183b7f8228 \ + --hash=sha256:4dad582bc93cef8f26513e12771e76385a7e6187fd713157e971c784112aad56 \ + --hash=sha256:53deb5addae9c22bbe3739298f5f2196afa881ea75944e7720681c7080909a81 \ + --hash=sha256:54aae9b654554c3b4edd61896b978568c6daa16af96fa4681c9b5babd469f863 \ + --hash=sha256:59ac72ea775c88b163ba8d21b0177628bd015c5dd060647bbab6e22da3aad287 \ + --hash=sha256:5f0a2ae6f09ac7bd47d2d5a5305c1d9ed08ac057cda55bb0a49fa506f0d2da00 \ + --hash=sha256:5f691263425d3177977c8d1dd896cde7b98d93cbf390b2544a090675e83a6a0a \ + --hash=sha256:61026196a1c4b968e1b1e540563e277843082e9e97d78afa03eb89315af531f1 \ + --hash=sha256:61de247948108484779f57a9f406e4c84d636fa5a59e411e6352484985e8a7c3 \ + --hash=sha256:667c132f1f3651c14522a119e4dd631fad98761fa960c55e8e7430bb2a1ba4ac \ + --hash=sha256:67394d3becd50b954c4ecd24ac90b5051ee7c903d167459f93e77fc6f5b4c968 \ + --hash=sha256:69a0f6ac618c98c74b7fbc8c0172ba86f9e01dbf9f62aa0b1776c2231a7bffe5 \ + --hash=sha256:6af8680328c69e15324b5af3ae38abbfcf9cbec37b5346ebfd52339c3d7e8a18 \ + --hash=sha256:7339f41c244d0eea251637727f016b3d20050636695bc78345cce9029b189401 \ + --hash=sha256:7403851e430a478440ecc1258bcbacbfbd8175f9ac1e39031a7121dd0de05ff8 \ + --hash=sha256:75412ca06e20904c19170f8a24486c4e6c7887dea591ba18a1ab572f1300ee9f \ + --hash=sha256:75bc2e59e6a2ac1dd28901d07115abdebc4563b5b07dd612bf64260a201b1c7f \ + --hash=sha256:7bb2ce0b82bc9fd1168a513ddae7a857994b780b2945a8c51db4ab1c4b751ebc \ + --hash=sha256:7cce16ae2f5fb2c53c3eafdd1706cb7b6530a67cc1c17abe8ec747f5cd7c0c51 \ + --hash=sha256:801a821e8e6099b8c459ac7540b3c32dba6013437c57fdcaec205b169754f38c \ + --hash=sha256:82393ab47b4fe44ffd0a7659fa9cfaacc717eb617c93cde83795f14af5c2e9d5 \ + --hash=sha256:82cd00d49d6063d2b8791da5d4f9d20539c5951f965e45ccf4e96d33505ce68f \ + --hash=sha256:835f26fa24ba0bb8c53ae2a9328d1706135b74ec653ed933869b74b6909e63fd \ + --hash=sha256:86cfc555bfd5794d24c6a1903e558b50644e5e68e6471d66502ce5cb5fdef3f9 \ + --hash=sha256:894aea2e63d4f24a7f04a1908307c738d0dce992e9249e744b8f4e8dd9197f39 \ + --hash=sha256:8be318da8413cdbbce77b8c5fac8d13f6eb0f0db41b30bb598631412619572e8 \ + --hash=sha256:8d5f16195bb671a5dd3d1dbea758918bada8f6cc27de72bd64adfbd748770814 \ + --hash=sha256:9172578c4eb09dbfcf1657d43198de59b6cef4054de385365060ed50c458ac98 \ + --hash=sha256:92a8d676748fca47ade5bc3da7430ed7767afe51b2f8100e3cd65e151c0eaceb \ + --hash=sha256:9645ef655735a74da4990c24ffbd6894828fbfa117bc97c1edd98c282ecb52e1 \ + --hash=sha256:9c8494625ad60a923af6b2b0bd74107146efe9b55099e20d7740d995f338fcd8 \ + --hash=sha256:9cc1e55c884921434a84a0c3dd2699eb9f92e7b441d7f53f3941079ec6ce7499 \ + --hash=sha256:9df95000fbe6777bf9820ae82ab7578e8662051bb5f83d71a28992f539d2cda7 \ + --hash=sha256:a230065027bc2a025e944f9d4714976a81e7ecfa940923283bca7bbc1f10f626 \ + --hash=sha256:a261fef929bcf98a60713bf5e95ad067cea16ae345d9a35034e73c3990e927d2 \ + --hash=sha256:a4f3cb2d874e03bc7767c8f88adaa1a9a05cecea3712649c3b58589ec7317310 \ + --hash=sha256:a66d7769e98a08a12a139049aac2f0ca3adae989817f8c43337455fbc7669b85 \ + --hash=sha256:a86fe4ff4ea523eac8f4b57fdac319faf037d3c1be12405e6a7e86b3fbc4756a \ + --hash=sha256:aa0f513be38b40234c77975e68805506cad5d57b3dfd8fe3baa7f4f4051e15b4 \ + --hash=sha256:aa5e4244063db8e1d87e0f54c3f7522f14b2dc937e65d5241ef0076a096409fd \ + --hash=sha256:acbc5fac7e06777555b0722b8ad5f574739e99ffe99467ed63da98f97f9ca0fe \ + --hash=sha256:b29d36b60e606df01959c4b982729c8845c69d1963f88686608be9ced96dbfaa \ + --hash=sha256:b42ffbed9128e547a1647a3e50bc88ab28ae9daa61713962e0d3dd35e820c125 \ + --hash=sha256:b923c1c13fa02084eb38c9c065afd860a5cff58026813319a06949c3af5732ac \ + --hash=sha256:b9f86d69ae822cabc2a0f6c099b43e8733dda788405cba2665595b7e8dd8d167 \ + --hash=sha256:bb150d529637d541e6af06bbe3d02f5498d628b7f98267ff87647584293ab439 \ + --hash=sha256:c028a394c766693c5c9909dec76b24f37e6a1b91999e8d0c0d5feecbe93c3e05 \ + --hash=sha256:c0d87bd1896faac0d10b4f849016db81a63e4ec5df38757ffae84d45ab38aa71 \ + --hash=sha256:c0e5d9f7a0227df2927d343a6e3859bebf9208b427c79bd31949abcc2fa32fa5 \ + --hash=sha256:c2021afda46c1ed64d74b555065dbd4c2558d510d8cec5ea6a53001b3e5e82a9 \ + --hash=sha256:c2ed66358f32c24e10ceea518e16eb3549e34f33a9d51f99ce23b0251776a1ef \ + --hash=sha256:c404603df4865f8e0afe981aa3c4b62b406e6d06049564d58934860b62b7f91d \ + --hash=sha256:c74099c6b230d4261fdc3169d50efc09abf38ace1a42ea2f9994b1d79153d477 \ + --hash=sha256:ccc70da619744467d8f1f49a8cadae5ec7bbe054e5232d95f92ed8737f8c5870 \ + --hash=sha256:d4be86b58e9ea262617b8ca6251a2f0d63cc132a6da4b5fcc8e0a4128782c829 \ + --hash=sha256:d7345c759276b798ccd6d77a87136029e71e66a8bbf2d2755cbdde1d82e78706 \ + --hash=sha256:ddbfdb5099b3e6ba6d6ea818f61997bb66de14b411357d24c4612cf1ebad08ca \ + --hash=sha256:ddc21521598dbe369d83d4d40338e23d4101dad21dae0e79fa20465dbace019f \ + --hash=sha256:df9eadb2a6386d5ea2bfd81309c505e125cfc9ba2b1b99a97e60985b0b3665d1 \ + --hash=sha256:e08ca8a6c851e95aaecc32bc44a5aa75d0ad26af8cdac7c77e4ed93acf3d5b69 \ + --hash=sha256:e446a8ea0a4c366ceafc7d97067bfd55292969143b57e3c846d87fc701e797a0 \ + --hash=sha256:e46c762d9f0e1cfb4ccc8515de7f349abbc95b59cb5a2bd68df5973fdef913f8 \ + --hash=sha256:e607b49b1a106ee2086633167033afbd63f76f2999e9236f638b06b112b24ea7 \ + --hash=sha256:e697d06ad57dd0c7a737771d470eedc18e68dfdefcdd3b7de7f33dfda5b6212e \ + --hash=sha256:e8b5f96c05fce7d0218df3fdfeb962d6b8cfff7e3e20264306b46dd8b217c0f3 \ + --hash=sha256:ed24250e55efbcb0b35bed7caaec8cedf858ab2f9f2201f17b8938c618c8ca6f \ + --hash=sha256:fa1863e75b92891f553b7922ce4ee10ed06db061e104f2b7815de80cdcb135ad \ + --hash=sha256:fea7339bdd22e6f1060c55ac31b6a755d86a5b2ad3657f2669ec243f8e3b2bdb \ + --hash=sha256:ff770589960a86eae279f5d8aa536196ebda8273a2a07db2a54e82b93bc86626 \ + --hash=sha256:ff7877d376add4e16b274e35a3f58b7f37b362abf4aa31863dadacdd20e3a583 + # via trino +overrides==7.7.0 \ + --hash=sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a \ + --hash=sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49 + # via jupyter-server +packaging==24.2 \ + --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ + --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f + # via + # accelerate + # build + # dask + # datasets + # db-dtypes + # deprecation + # dunamai + # faiss-cpu + # google-cloud-bigquery + # great-expectations + # gunicorn + # huggingface-hub + # ibis-framework + # ibis-substrait + # ipykernel + # jupyter-events + # jupyter-server + # jupyterlab + # jupyterlab-server + # lazy-loader + # marshmallow + # nbconvert + # pandas-gbq + # pytest + # ray + # safetensors + # scikit-image + # snowflake-connector-python + # sphinx + # transformers +pandas==2.3.3 \ + --hash=sha256:0242fe9a49aa8b4d78a4fa03acb397a58833ef6199e9aa40a95f027bb3a1b6e7 \ + --hash=sha256:1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593 \ + --hash=sha256:1b07204a219b3b7350abaae088f451860223a52cfb8a6c53358e7948735158e5 \ + --hash=sha256:1d37b5848ba49824e5c30bedb9c830ab9b7751fd049bc7914533e01c65f79791 \ + --hash=sha256:23ebd657a4d38268c7dfbdf089fbc31ea709d82e4923c5ffd4fbd5747133ce73 \ + --hash=sha256:2462b1a365b6109d275250baaae7b760fd25c726aaca0054649286bcfbb3e8ec \ + --hash=sha256:28083c648d9a99a5dd035ec125d42439c6c1c525098c58af0fc38dd1a7a1b3d4 \ + --hash=sha256:2e3ebdb170b5ef78f19bfb71b0dc5dc58775032361fa188e814959b74d726dd5 \ + --hash=sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac \ + --hash=sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084 \ + --hash=sha256:376c6446ae31770764215a6c937f72d917f214b43560603cd60da6408f183b6c \ + --hash=sha256:3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87 \ + --hash=sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35 \ + --hash=sha256:4793891684806ae50d1288c9bae9330293ab4e083ccd1c5e383c34549c6e4250 \ + --hash=sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c \ + --hash=sha256:503cf027cf9940d2ceaa1a93cfb5f8c8c7e6e90720a2850378f0b3f3b1e06826 \ + --hash=sha256:5554c929ccc317d41a5e3d1234f3be588248e61f08a74dd17c9eabb535777dc9 \ + --hash=sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713 \ + --hash=sha256:5caf26f64126b6c7aec964f74266f435afef1c1b13da3b0636c7518a1fa3e2b1 \ + --hash=sha256:602b8615ebcc4a0c1751e71840428ddebeb142ec02c786e8ad6b1ce3c8dec523 \ + --hash=sha256:6253c72c6a1d990a410bc7de641d34053364ef8bcd3126f7e7450125887dffe3 \ + --hash=sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78 \ + --hash=sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53 \ + --hash=sha256:6d2cefc361461662ac48810cb14365a365ce864afe85ef1f447ff5a1e99ea81c \ + --hash=sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21 \ + --hash=sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5 \ + --hash=sha256:854d00d556406bffe66a4c0802f334c9ad5a96b4f1f868adf036a21b11ef13ff \ + --hash=sha256:8fe25fc7b623b0ef6b5009149627e34d2a4657e880948ec3c840e9402e5c1b45 \ + --hash=sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110 \ + --hash=sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493 \ + --hash=sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b \ + --hash=sha256:a21d830e78df0a515db2b3d2f5570610f5e6bd2e27749770e8bb7b524b89b450 \ + --hash=sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86 \ + --hash=sha256:a637c5cdfa04b6d6e2ecedcb81fc52ffb0fd78ce2ebccc9ea964df9f658de8c8 \ + --hash=sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98 \ + --hash=sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89 \ + --hash=sha256:b468d3dad6ff947df92dcb32ede5b7bd41a9b3cceef0a30ed925f6d01fb8fa66 \ + --hash=sha256:b98560e98cb334799c0b07ca7967ac361a47326e9b4e5a7dfb5ab2b1c9d35a1b \ + --hash=sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8 \ + --hash=sha256:bf1f8a81d04ca90e32a0aceb819d34dbd378a98bf923b6398b9a3ec0bf44de29 \ + --hash=sha256:c46467899aaa4da076d5abc11084634e2d197e9460643dd455ac3db5856b24d6 \ + --hash=sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc \ + --hash=sha256:c503ba5216814e295f40711470446bc3fd00f0faea8a086cbc688808e26f92a2 \ + --hash=sha256:d051c0e065b94b7a3cea50eb1ec32e912cd96dba41647eb24104b6c6c14c5788 \ + --hash=sha256:d3e28b3e83862ccf4d85ff19cf8c20b2ae7e503881711ff2d534dc8f761131aa \ + --hash=sha256:db4301b2d1f926ae677a751eb2bd0e8c5f5319c9cb3f88b0becbbb0b07b34151 \ + --hash=sha256:dd7478f1463441ae4ca7308a70e90b33470fa593429f9d4c578dd00d1fa78838 \ + --hash=sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b \ + --hash=sha256:e19d192383eab2f4ceb30b412b22ea30690c9e618f78870357ae1d682912015a \ + --hash=sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d \ + --hash=sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908 \ + --hash=sha256:ee15f284898e7b246df8087fc82b87b01686f98ee67d85a17b7ab44143a3a9a0 \ + --hash=sha256:ee67acbbf05014ea6c763beb097e03cd629961c8a632075eeb34247120abcb4b \ + --hash=sha256:f086f6fe114e19d92014a1966f43a3e62285109afe874f067f5abbdcbb10e59c \ + --hash=sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee + # via + # feast (setup.py) + # altair + # dask + # datasets + # db-dtypes + # docling + # docling-core + # google-cloud-bigquery + # great-expectations + # ibis-framework + # pandas-gbq + # pymilvus + # ray + # snowflake-connector-python +pandas-gbq==0.30.0 \ + --hash=sha256:8fe811786e4ad2e0d4608e897534207d9fbe768ab3168f766a99f0cb4cd5ed20 \ + --hash=sha256:d9b4454b17aee3c23ef1dfcfd91df6e2b77f1e69e1e4b28467701cd75850664a + # via google-cloud-bigquery +pandocfilters==1.5.1 \ + --hash=sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e \ + --hash=sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc + # via nbconvert +paramiko==4.0.0 \ + --hash=sha256:0e20e00ac666503bf0b4eda3b6d833465a2b7aff2e2b3d79a8bba5ef144ee3b9 \ + --hash=sha256:6a25f07b380cc9c9a88d2b920ad37167ac4667f8d9886ccebd8f90f654b5d69f + # via openshift-client +parsimonious==0.11.0 \ + --hash=sha256:32e3818abf9f05b3b9f3b6d87d128645e30177e91f614d2277d88a0aea98fae2 \ + --hash=sha256:e080377d98957beec053580d38ae54fcdf7c470fb78670ba4bf8b5f9d5cad2a9 + # via singlestoredb +parso==0.8.5 \ + --hash=sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a \ + --hash=sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887 + # via jedi +parsy==2.2 \ + --hash=sha256:5e981613d9d2d8b68012d1dd0afe928967bea2e4eefdb76c2f545af0dd02a9e7 \ + --hash=sha256:e943147644a8cf0d82d1bcb5c5867dd517495254cea3e3eb058b1e421cb7561f + # via ibis-framework +partd==1.4.2 \ + --hash=sha256:978e4ac767ec4ba5b86c6eaa52e5a2a3bc748a2ca839e8cc798f1cc6ce6efb0f \ + --hash=sha256:d022c33afbdc8405c226621b015e8067888173d85f7f5ecebb3cafed9a20f02c + # via dask +pbr==7.0.3 \ + --hash=sha256:b46004ec30a5324672683ec848aed9e8fc500b0d261d40a3229c2d2bbfcedc29 \ + --hash=sha256:ff223894eb1cd271a98076b13d3badff3bb36c424074d26334cd25aebeecea6b + # via mock +pexpect==4.9.0 \ + --hash=sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523 \ + --hash=sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f + # via ipython +pillow==11.3.0 \ + --hash=sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2 \ + --hash=sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214 \ + --hash=sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e \ + --hash=sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59 \ + --hash=sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50 \ + --hash=sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632 \ + --hash=sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06 \ + --hash=sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a \ + --hash=sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51 \ + --hash=sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced \ + --hash=sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f \ + --hash=sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12 \ + --hash=sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8 \ + --hash=sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6 \ + --hash=sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580 \ + --hash=sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f \ + --hash=sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac \ + --hash=sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860 \ + --hash=sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd \ + --hash=sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722 \ + --hash=sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8 \ + --hash=sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4 \ + --hash=sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673 \ + --hash=sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788 \ + --hash=sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542 \ + --hash=sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e \ + --hash=sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd \ + --hash=sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8 \ + --hash=sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523 \ + --hash=sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967 \ + --hash=sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809 \ + --hash=sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477 \ + --hash=sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027 \ + --hash=sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae \ + --hash=sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b \ + --hash=sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c \ + --hash=sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f \ + --hash=sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e \ + --hash=sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b \ + --hash=sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7 \ + --hash=sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27 \ + --hash=sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361 \ + --hash=sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae \ + --hash=sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d \ + --hash=sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc \ + --hash=sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58 \ + --hash=sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad \ + --hash=sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6 \ + --hash=sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024 \ + --hash=sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978 \ + --hash=sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb \ + --hash=sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d \ + --hash=sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0 \ + --hash=sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9 \ + --hash=sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f \ + --hash=sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874 \ + --hash=sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa \ + --hash=sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081 \ + --hash=sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149 \ + --hash=sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6 \ + --hash=sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d \ + --hash=sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd \ + --hash=sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f \ + --hash=sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c \ + --hash=sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31 \ + --hash=sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e \ + --hash=sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db \ + --hash=sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6 \ + --hash=sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f \ + --hash=sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494 \ + --hash=sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69 \ + --hash=sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94 \ + --hash=sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77 \ + --hash=sha256:97afb3a00b65cc0804d1c7abddbf090a81eaac02768af58cbdcaaa0a931e0b6d \ + --hash=sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7 \ + --hash=sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a \ + --hash=sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438 \ + --hash=sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288 \ + --hash=sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b \ + --hash=sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635 \ + --hash=sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3 \ + --hash=sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d \ + --hash=sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe \ + --hash=sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0 \ + --hash=sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe \ + --hash=sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a \ + --hash=sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805 \ + --hash=sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8 \ + --hash=sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36 \ + --hash=sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a \ + --hash=sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b \ + --hash=sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e \ + --hash=sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25 \ + --hash=sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12 \ + --hash=sha256:e5c5858ad8ec655450a7c7df532e9842cf8df7cc349df7225c60d5d348c8aada \ + --hash=sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c \ + --hash=sha256:ea944117a7974ae78059fcc1800e5d3295172bb97035c0c1d9345fca1419da71 \ + --hash=sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d \ + --hash=sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c \ + --hash=sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6 \ + --hash=sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1 \ + --hash=sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50 \ + --hash=sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653 \ + --hash=sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c \ + --hash=sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4 \ + --hash=sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3 + # via + # feast (setup.py) + # docling + # docling-core + # docling-ibm-models + # docling-parse + # easyocr + # imageio + # python-pptx + # scikit-image + # torchvision +pip==25.3 \ + --hash=sha256:8d0538dbbd7babbd207f261ed969c65de439f6bc9e5dbd3b3b9a77f25d95f343 \ + --hash=sha256:9655943313a94722b7774661c21049070f6bbb0a1516bf02f7c8d5d9201514cd + # via pip-tools +pip-tools==7.5.2 \ + --hash=sha256:2d64d72da6a044da1110257d333960563d7a4743637e8617dd2610ae7b82d60f \ + --hash=sha256:2fe16db727bbe5bf28765aeb581e792e61be51fc275545ef6725374ad720a1ce + # via feast (setup.py) +platformdirs==3.11.0 \ + --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ + --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e + # via + # jupyter-core + # snowflake-connector-python + # virtualenv +pluggy==1.6.0 \ + --hash=sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3 \ + --hash=sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746 + # via + # docling + # pytest + # pytest-cov +ply==3.11 \ + --hash=sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3 \ + --hash=sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce + # via thriftpy2 +poetry-core==1.9.1 \ + --hash=sha256:6f45dd3598e0de8d9b0367360253d4c5d4d0110c8f5c71120a14f0e0f116c1a0 \ + --hash=sha256:7a2d49214bf58b4f17f99d6891d947a9836c9899a67a5069f52d7b67217f61b8 + # via feast (setup.py) +poetry-dynamic-versioning==1.9.1 \ + --hash=sha256:65a0c814e6d30d4807734a3c34edf261fd7cc3b340dbd23b6a33ee41f7d0b547 \ + --hash=sha256:d6e7b9df817aa2ca4946cd695c6c89e1379d2e6c640f008a9b6170d081a9da48 + # via feast (setup.py) +portalocker==3.2.0 \ + --hash=sha256:1f3002956a54a8c3730586c5c77bf18fae4149e07eaf1c29fc3faf4d5a3f89ac \ + --hash=sha256:3cdc5f565312224bc570c49337bd21428bba0ef363bbcf58b9ef4a9f11779968 + # via qdrant-client +pre-commit==3.3.1 \ + --hash=sha256:218e9e3f7f7f3271ebc355a15598a4d3893ad9fc7b57fe446db75644543323b9 \ + --hash=sha256:733f78c9a056cdd169baa6cd4272d51ecfda95346ef8a89bf93712706021b907 + # via feast (setup.py) +prometheus-client==0.24.1 \ + --hash=sha256:150db128af71a5c2482b36e588fc8a6b95e498750da4b17065947c16070f4055 \ + --hash=sha256:7e0ced7fbbd40f7b84962d5d2ab6f17ef88a72504dcf7c0b40737b43b2a461f9 + # via + # feast (setup.py) + # jupyter-server + # opentelemetry-exporter-prometheus + # ray +prompt-toolkit==3.0.52 \ + --hash=sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855 \ + --hash=sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955 + # via ipython +propcache==0.4.1 \ + --hash=sha256:0002004213ee1f36cfb3f9a42b5066100c44276b9b72b4e1504cddd3d692e86e \ + --hash=sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4 \ + --hash=sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be \ + --hash=sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3 \ + --hash=sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85 \ + --hash=sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b \ + --hash=sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367 \ + --hash=sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf \ + --hash=sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393 \ + --hash=sha256:182b51b421f0501952d938dc0b0eb45246a5b5153c50d42b495ad5fb7517c888 \ + --hash=sha256:1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37 \ + --hash=sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8 \ + --hash=sha256:1f0978529a418ebd1f49dad413a2b68af33f85d5c5ca5c6ca2a3bed375a7ac60 \ + --hash=sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1 \ + --hash=sha256:296f4c8ed03ca7476813fe666c9ea97869a8d7aec972618671b33a38a5182ef4 \ + --hash=sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717 \ + --hash=sha256:2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7 \ + --hash=sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc \ + --hash=sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe \ + --hash=sha256:357f5bb5c377a82e105e44bd3d52ba22b616f7b9773714bff93573988ef0a5fb \ + --hash=sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75 \ + --hash=sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6 \ + --hash=sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e \ + --hash=sha256:3d233076ccf9e450c8b3bc6720af226b898ef5d051a2d145f7d765e6e9f9bcff \ + --hash=sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566 \ + --hash=sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12 \ + --hash=sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367 \ + --hash=sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874 \ + --hash=sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf \ + --hash=sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566 \ + --hash=sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a \ + --hash=sha256:4b536b39c5199b96fc6245eb5fb796c497381d3942f169e44e8e392b29c9ebcc \ + --hash=sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a \ + --hash=sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1 \ + --hash=sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6 \ + --hash=sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61 \ + --hash=sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726 \ + --hash=sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49 \ + --hash=sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44 \ + --hash=sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af \ + --hash=sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa \ + --hash=sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153 \ + --hash=sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc \ + --hash=sha256:5d4e2366a9c7b837555cf02fb9be2e3167d333aff716332ef1b7c3a142ec40c5 \ + --hash=sha256:5fd37c406dd6dc85aa743e214cef35dc54bbdd1419baac4f6ae5e5b1a2976938 \ + --hash=sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf \ + --hash=sha256:66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925 \ + --hash=sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8 \ + --hash=sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c \ + --hash=sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85 \ + --hash=sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e \ + --hash=sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0 \ + --hash=sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1 \ + --hash=sha256:71b749281b816793678ae7f3d0d84bd36e694953822eaad408d682efc5ca18e0 \ + --hash=sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992 \ + --hash=sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db \ + --hash=sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f \ + --hash=sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d \ + --hash=sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1 \ + --hash=sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e \ + --hash=sha256:8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900 \ + --hash=sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89 \ + --hash=sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a \ + --hash=sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b \ + --hash=sha256:948dab269721ae9a87fd16c514a0a2c2a1bdb23a9a61b969b0f9d9ee2968546f \ + --hash=sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f \ + --hash=sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1 \ + --hash=sha256:99d43339c83aaf4d32bda60928231848eee470c6bda8d02599cc4cebe872d183 \ + --hash=sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66 \ + --hash=sha256:9a52009f2adffe195d0b605c25ec929d26b36ef986ba85244891dee3b294df21 \ + --hash=sha256:9d2b6caef873b4f09e26ea7e33d65f42b944837563a47a94719cc3544319a0db \ + --hash=sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded \ + --hash=sha256:a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb \ + --hash=sha256:a129e76735bc792794d5177069691c3217898b9f5cee2b2661471e52ffe13f19 \ + --hash=sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0 \ + --hash=sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165 \ + --hash=sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778 \ + --hash=sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455 \ + --hash=sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f \ + --hash=sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b \ + --hash=sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237 \ + --hash=sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81 \ + --hash=sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859 \ + --hash=sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c \ + --hash=sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835 \ + --hash=sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393 \ + --hash=sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5 \ + --hash=sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641 \ + --hash=sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144 \ + --hash=sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74 \ + --hash=sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db \ + --hash=sha256:cbc3b6dfc728105b2a57c06791eb07a94229202ea75c59db644d7d496b698cac \ + --hash=sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403 \ + --hash=sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9 \ + --hash=sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f \ + --hash=sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311 \ + --hash=sha256:d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581 \ + --hash=sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36 \ + --hash=sha256:daede9cd44e0f8bdd9e6cc9a607fc81feb80fae7a5fc6cecaff0e0bb32e42d00 \ + --hash=sha256:db65d2af507bbfbdcedb254a11149f894169d90488dd3e7190f7cdcb2d6cd57a \ + --hash=sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f \ + --hash=sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2 \ + --hash=sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7 \ + --hash=sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239 \ + --hash=sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757 \ + --hash=sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72 \ + --hash=sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9 \ + --hash=sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4 \ + --hash=sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24 \ + --hash=sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207 \ + --hash=sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e \ + --hash=sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1 \ + --hash=sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d \ + --hash=sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37 \ + --hash=sha256:f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c \ + --hash=sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e \ + --hash=sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570 \ + --hash=sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af \ + --hash=sha256:fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f \ + --hash=sha256:fd2dbc472da1f772a4dae4fa24be938a6c544671a912e30529984dd80400cd88 \ + --hash=sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48 \ + --hash=sha256:fe49d0a85038f36ba9e3ffafa1103e61170b28e95b16622e11be0a0ea07c6781 + # via + # aiohttp + # yarl +proto-plus==1.27.0 \ + --hash=sha256:1baa7f81cf0f8acb8bc1f6d085008ba4171eaf669629d1b6d1673b21ed1c0a82 \ + --hash=sha256:873af56dd0d7e91836aee871e5799e1c6f1bda86ac9a983e0bb9f0c266a568c4 + # via + # google-api-core + # google-cloud-bigquery-storage + # google-cloud-bigtable + # google-cloud-datastore +protobuf==4.25.8 \ + --hash=sha256:077ff8badf2acf8bc474406706ad890466274191a48d0abd3bd6987107c9cde5 \ + --hash=sha256:15a0af558aa3b13efef102ae6e4f3efac06f1eea11afb3a57db2901447d9fb59 \ + --hash=sha256:27d498ffd1f21fb81d987a041c32d07857d1d107909f5134ba3350e1ce80a4af \ + --hash=sha256:504435d831565f7cfac9f0714440028907f1975e4bed228e58e72ecfff58a1e0 \ + --hash=sha256:6135cf8affe1fc6f76cced2641e4ea8d3e59518d1f24ae41ba97bcad82d397cd \ + --hash=sha256:83e6e54e93d2b696a92cad6e6efc924f3850f82b52e1563778dfab8b355101b0 \ + --hash=sha256:9ad7ef62d92baf5a8654fbb88dac7fa5594cfa70fd3440488a5ca3bfc6d795a7 \ + --hash=sha256:bd551eb1fe1d7e92c1af1d75bdfa572eff1ab0e5bf1736716814cdccdb2360f9 \ + --hash=sha256:ca809b42f4444f144f2115c4c1a747b9a404d590f18f37e9402422033e464e0f \ + --hash=sha256:d552c53d0415449c8d17ced5c341caba0d89dbf433698e1436c8fa0aae7808a3 \ + --hash=sha256:f4510b93a3bec6eba8fd8f1093e9d7fb0d4a24d1a81377c10c0e5bbfe9e4ed24 + # via + # feast (setup.py) + # google-api-core + # google-cloud-bigquery-storage + # google-cloud-bigtable + # google-cloud-datastore + # googleapis-common-protos + # grpc-google-iam-v1 + # grpcio-health-checking + # grpcio-reflection + # grpcio-status + # grpcio-testing + # grpcio-tools + # ikvpy + # mypy-protobuf + # opentelemetry-proto + # proto-plus + # pymilvus + # qdrant-client + # ray + # substrait +psutil==5.9.0 \ + --hash=sha256:072664401ae6e7c1bfb878c65d7282d4b4391f1bc9a56d5e03b5a490403271b5 \ + --hash=sha256:1070a9b287846a21a5d572d6dddd369517510b68710fca56b0e9e02fd24bed9a \ + --hash=sha256:1d7b433519b9a38192dfda962dd8f44446668c009833e1429a52424624f408b4 \ + --hash=sha256:3151a58f0fbd8942ba94f7c31c7e6b310d2989f4da74fcbf28b934374e9bf841 \ + --hash=sha256:32acf55cb9a8cbfb29167cd005951df81b567099295291bcfd1027365b36591d \ + --hash=sha256:3611e87eea393f779a35b192b46a164b1d01167c9d323dda9b1e527ea69d697d \ + --hash=sha256:3d00a664e31921009a84367266b35ba0aac04a2a6cad09c550a89041034d19a0 \ + --hash=sha256:4e2fb92e3aeae3ec3b7b66c528981fd327fb93fd906a77215200404444ec1845 \ + --hash=sha256:539e429da49c5d27d5a58e3563886057f8fc3868a5547b4f1876d9c0f007bccf \ + --hash=sha256:55ce319452e3d139e25d6c3f85a1acf12d1607ddedea5e35fb47a552c051161b \ + --hash=sha256:58c7d923dc209225600aec73aa2c4ae8ea33b1ab31bc11ef8a5933b027476f07 \ + --hash=sha256:7336292a13a80eb93c21f36bde4328aa748a04b68c13d01dfddd67fc13fd0618 \ + --hash=sha256:742c34fff804f34f62659279ed5c5b723bb0195e9d7bd9907591de9f8f6558e2 \ + --hash=sha256:7641300de73e4909e5d148e90cc3142fb890079e1525a840cf0dfd39195239fd \ + --hash=sha256:76cebf84aac1d6da5b63df11fe0d377b46b7b500d892284068bacccf12f20666 \ + --hash=sha256:7779be4025c540d1d65a2de3f30caeacc49ae7a2152108adeaf42c7534a115ce \ + --hash=sha256:7d190ee2eaef7831163f254dc58f6d2e2a22e27382b936aab51c835fc080c3d3 \ + --hash=sha256:8293942e4ce0c5689821f65ce6522ce4786d02af57f13c0195b40e1edb1db61d \ + --hash=sha256:869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25 \ + --hash=sha256:90a58b9fcae2dbfe4ba852b57bd4a1dded6b990a33d6428c7614b7d48eccb492 \ + --hash=sha256:9b51917c1af3fa35a3f2dabd7ba96a2a4f19df3dec911da73875e1edaf22a40b \ + --hash=sha256:b2237f35c4bbae932ee98902a08050a27821f8f6dfa880a47195e5993af4702d \ + --hash=sha256:c3400cae15bdb449d518545cbd5b649117de54e3596ded84aacabfbb3297ead2 \ + --hash=sha256:c51f1af02334e4b516ec221ee26b8fdf105032418ca5a5ab9737e8c87dafe203 \ + --hash=sha256:cb8d10461c1ceee0c25a64f2dd54872b70b89c26419e147a05a10b753ad36ec2 \ + --hash=sha256:d62a2796e08dd024b8179bd441cb714e0f81226c352c802fca0fd3f89eeacd94 \ + --hash=sha256:df2c8bd48fb83a8408c8390b143c6a6fa10cb1a674ca664954de193fdcab36a9 \ + --hash=sha256:e5c783d0b1ad6ca8a5d3e7b680468c9c926b804be83a3a8e95141b05c39c9f64 \ + --hash=sha256:e9805fed4f2a81de98ae5fe38b75a74c6e6ad2df8a5c479594c7629a1fe35f56 \ + --hash=sha256:ea42d747c5f71b5ccaa6897b216a7dadb9f52c72a0fe2b872ef7d3e1eacf3ba3 \ + --hash=sha256:ef216cc9feb60634bda2f341a9559ac594e2eeaadd0ba187a4c2eb5b5d40b91c \ + --hash=sha256:ff0d41f8b3e9ebb6b6110057e40019a432e96aae2008951121ba4e56040b84f3 + # via + # feast (setup.py) + # accelerate + # ipykernel +psycopg[binary, pool]==3.2.5 \ + --hash=sha256:b782130983e5b3de30b4c529623d3687033b4dafa05bb661fc6bf45837ca5879 \ + --hash=sha256:f5f750611c67cb200e85b408882f29265c66d1de7f813add4f8125978bfd70e8 + # via feast (setup.py) +psycopg-binary==3.2.5 \ + --hash=sha256:02fb96091e2fb3ea1470b113fef08953baaedbca1d39a3f72d82cb615177846c \ + --hash=sha256:11e3ed8b94c750d54fc3e4502dd930fb0fd041629845b6a7ce089873ac9756b0 \ + --hash=sha256:1494827c43265820d5dcdc6f8086521bc7dd04b9da8831310978a788cdcd2e62 \ + --hash=sha256:21b839f9bfd77ed074f7f71464a43f453400c57d038a0ba0716329a28e335897 \ + --hash=sha256:23a1dc61abb8f7cc702472ab29554167a9421842f976c201ceb3b722c0299769 \ + --hash=sha256:274e852f9e61252bc8e80a0a43d300ba352d40219e856733054023a3bb960eb4 \ + --hash=sha256:28bd5cb2324567e5e70f07fe1d646398d6b0e210e28b49be0e69593590a59980 \ + --hash=sha256:2b053eae21dd3a6828b516a1171e1274d1af5f7c07d2d9a8f597f2e19c732168 \ + --hash=sha256:2cbb8649cfdacbd14e17f5ab78edc52d33350013888518c73e90c5d17d7bea55 \ + --hash=sha256:2cc86657c05e09c701e97f87132cd58e0d55381dd568520081ac1fe7580a9bbb \ + --hash=sha256:2d10ce4c39eb9631381a0c3792727946a4391e843625a7ee9579ac6bb11495a5 \ + --hash=sha256:2d22a15e45f43d36ed35aed4d5261f8ef6ab7d9b84ee075576ca56ae03b9e0aa \ + --hash=sha256:2dbaf32c18c0d11c4480016b89c9c5cadb7b64c55de7f181d222b189bd13a558 \ + --hash=sha256:32b5673736f04c36ccbf8012800fe5bc01b46dac22c5d59e41b043bebaad9d3d \ + --hash=sha256:375149006e21d58ed8aba640e0295d8e636043064c433af94eb58057f9b96877 \ + --hash=sha256:393ab353196d364858b47317d27804ecc58ab56dbde32217bd67f0f2f2980662 \ + --hash=sha256:39e2cd10bf15442d95c3f48376b25dc33360418ea6c3c05884d8bf42407768c0 \ + --hash=sha256:3d2e57a1d06f3968e49e948ba374f21a7d8dcf44f37d582a4aeddeb7c85ce239 \ + --hash=sha256:3eb71cfc35116e4a8e336b7e785f1fe06ca23b4516a48ea91facd577d1a1fdf6 \ + --hash=sha256:3f893c0ed3d5c7b83b76b1f8f7d3ca5a03e38bcd3cab5d65b5c25a0d1064aca4 \ + --hash=sha256:473f6827cf1faf3924eb77146d1e85126a1b5e48a88053b8d8b78dd29e971d78 \ + --hash=sha256:48f97936145cb7de18b95d85670b2d3e2c257277263272be05815b74fb0ef195 \ + --hash=sha256:48fcb12a0a72fdfe4102bdb1252a7366e8d73a2c89fe6ce5923be890de367c2f \ + --hash=sha256:4914dc60f2fddf0884464985e31d775aa865b665471fa156ec2f56fa72a1a097 \ + --hash=sha256:51a96d9fe51f718912b4a0089784f1f32d800217499fd0f0095b888506aba4c5 \ + --hash=sha256:5244bebaa9734a236b7157fb57c065b6c0f2344281916187bd73f951df1899e0 \ + --hash=sha256:5b81342e139ddccfa417832089cd213bd4beacd7a1462ca4019cafe71682d177 \ + --hash=sha256:5d2253189aa4cca0a425e2ca896d1a29760cd3a2b10ab12194e4e827a566505c \ + --hash=sha256:5fd017d7ed71c58f19b0f614e7bfb8f01ec862bacb67ae584f494d090956102e \ + --hash=sha256:605f70e267222d567fc40de7813ee3fb29f8145a1a20aa6fd3dc62baba9312f1 \ + --hash=sha256:60d0f36a42a822e43c4c7472df8a0c980c0f32e5d74ed871333c423a4e942f11 \ + --hash=sha256:62965045cc0fe3dc5dd55d39779620b225ef75962825c7b1b533033cb91810bd \ + --hash=sha256:65162a9cc3f86d70b1d895dbda506e3c079f80d082eb41c54d3f6d33a00b3965 \ + --hash=sha256:659f2c675d478b1bc01b95a8d3ded74fa939b370e71ffbecd496f617b215eb05 \ + --hash=sha256:6b581da13126b8715c0c0585cd37ce934c9864d44b2a4019f5487c0b943275e6 \ + --hash=sha256:71d82dbc7c6c7f5746468e7992e5483aa45b12250d78d220a2431ab88795825c \ + --hash=sha256:7376b13504396da9678b646f5338462347da01286b2a688a0d8493ec764683a2 \ + --hash=sha256:7623659d44a6aa032be4a066c658ba45009d768c2481526fbef7c609702af116 \ + --hash=sha256:7a94020821723a6a210206ddb458001f3ed27e1e6a0555b9422bebf7ead8ff37 \ + --hash=sha256:7d5f1bfc848a94e0d63fe693adee4f88bd9e5c415ecb4c9c17d2d44eba6795a6 \ + --hash=sha256:7efe6c732fd2d7e22d72dc4f7cf9b644020adacfff61b0a8a151343da8e661c0 \ + --hash=sha256:8a602d9fdb567cca090ca19ac3ebf10219065be2a4f8cf9eb8356cffb5a7ab1d \ + --hash=sha256:8cd9ebf335262e864d740f9dad3f672f61162cc0d4825a5eb5cf50df334a688f \ + --hash=sha256:8e6f2bef5aed021fbdf46323d3cd8847bf960efb56394698644a8ee2306f8892 \ + --hash=sha256:93221d5a759bd39b1face1d7d887d2b9ede3e55aefaff8eacf1b663ccdcd204b \ + --hash=sha256:9639289b72f9339721982e156527c296693236d6192ccc31412ab36fccd1683c \ + --hash=sha256:98efaedf2bf79f4d563ca039a57a025b72847bd80568f54709cc39fc1404772c \ + --hash=sha256:9abe093a303e25ac58774a11241150e2fe2947358d1ca12521ad03c90b131060 \ + --hash=sha256:a4321ee8180982d70458d3e8378e31448901bf0ee40fe0d410a87413578f4098 \ + --hash=sha256:a82211a43372cba9b1555a110e84e679deec2dc9463ae4c736977dad99dca5ed \ + --hash=sha256:a91b0e096fdfeb52d86bb8f5ee25dc22483d6960af9b968e6b381a8ec5bfbf82 \ + --hash=sha256:b5e0acbc991472188c9df40eb56d8a97ad3ad00d4de560b8b74bdc2d94041a8f \ + --hash=sha256:b6b5a4542aca4095ab35e184517cb0d18895ba4b6661c92865b431fa7b7974d8 \ + --hash=sha256:ba4a610882171bdaae0779f14e0ff45f3ee271fd2dbf16cdadfc81bd67323232 \ + --hash=sha256:bc5bd9bf5f5894923b78a41c5becd52d6bced1e1e43744855bd85cb341376ca6 \ + --hash=sha256:c37eb3be7a6be93f4925ccf52bbfa60244da6c63201770a709dd81a3d2d08534 \ + --hash=sha256:c3c5fa3d4fa0a651cefab391b783f89bc5e331afa0a4e93c9b16141993fa05c8 \ + --hash=sha256:ca5e36a3e7480a5c09aed99ecdb8e6554b21485c3b064297fe77f7b1b5806106 \ + --hash=sha256:d4e0c1b1aa5283f6d9a384ffc7a8400d25386bb98fdb9bddae446e4ef4da7366 \ + --hash=sha256:dc8bc40d82d1ee8dec136e10707c7f3147a6322fd8014e174a0f3446fb793649 \ + --hash=sha256:de576c49d7deab2b78088feb24e1f6ae3e16a0020e8496cdd3b8543f5e350e87 \ + --hash=sha256:e7d215a43343d91ba08301865f059d9518818d66a222a85fb425e4156716f5a6 \ + --hash=sha256:eb8293d66c6a4ddc72fceb7ad0e111cb196cc394954ae0f9b63c251d97f1b00e \ + --hash=sha256:ee6d8f489a9b116ea8dc797664a50671585a4ca20573359f067858e1231cc217 \ + --hash=sha256:efb878d08dd49d7d9d18512e791b418a1171d08f935475eec98305f0886b7c14 + # via psycopg +psycopg-pool==3.3.0 \ + --hash=sha256:2e44329155c410b5e8666372db44276a8b1ebd8c90f1c3026ebba40d4bc81063 \ + --hash=sha256:fa115eb2860bd88fce1717d75611f41490dec6135efb619611142b24da3f6db5 + # via psycopg +ptyprocess==0.7.0 \ + --hash=sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35 \ + --hash=sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220 + # via + # pexpect + # terminado +pure-eval==0.2.3 \ + --hash=sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0 \ + --hash=sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42 + # via stack-data +py==1.11.0 \ + --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ + --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 + # via feast (setup.py) +py-cpuinfo==9.0.0 \ + --hash=sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690 \ + --hash=sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5 + # via pytest-benchmark +py-spy==0.4.1 \ + --hash=sha256:1fb8bf71ab8df95a95cc387deed6552934c50feef2cf6456bc06692a5508fd0c \ + --hash=sha256:4972c21890b6814017e39ac233c22572c4a61fd874524ebc5ccab0f2237aee0a \ + --hash=sha256:532d3525538254d1859b49de1fbe9744df6b8865657c9f0e444bf36ce3f19226 \ + --hash=sha256:6a80ec05eb8a6883863a367c6a4d4f2d57de68466f7956b6367d4edd5c61bb29 \ + --hash=sha256:809094208c6256c8f4ccadd31e9a513fe2429253f48e20066879239ba12cd8cc \ + --hash=sha256:d92e522bd40e9bf7d87c204033ce5bb5c828fca45fa28d970f58d71128069fdc \ + --hash=sha256:e53aa53daa2e47c2eef97dd2455b47bb3a7e7f962796a86cc3e7dbde8e6f4db4 \ + --hash=sha256:ee776b9d512a011d1ad3907ed53ae32ce2f3d9ff3e1782236554e22103b5c084 + # via ray +py4j==0.10.9.9 \ + --hash=sha256:c7c26e4158defb37b0bb124933163641a2ff6e3a3913f7811b0ddbe07ed61533 \ + --hash=sha256:f694cad19efa5bd1dee4f3e5270eb406613c974394035e5bfc4ec1aba870b879 + # via pyspark +pyarrow==17.0.0 \ + --hash=sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a \ + --hash=sha256:02dae06ce212d8b3244dd3e7d12d9c4d3046945a5933d28026598e9dbbda1fca \ + --hash=sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597 \ + --hash=sha256:0cdb0e627c86c373205a2f94a510ac4376fdc523f8bb36beab2e7f204416163c \ + --hash=sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb \ + --hash=sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977 \ + --hash=sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3 \ + --hash=sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687 \ + --hash=sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7 \ + --hash=sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204 \ + --hash=sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28 \ + --hash=sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087 \ + --hash=sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15 \ + --hash=sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc \ + --hash=sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2 \ + --hash=sha256:7c7916bff914ac5d4a8fe25b7a25e432ff921e72f6f2b7547d1e325c1ad9d155 \ + --hash=sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df \ + --hash=sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22 \ + --hash=sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a \ + --hash=sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b \ + --hash=sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03 \ + --hash=sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda \ + --hash=sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07 \ + --hash=sha256:af5ff82a04b2171415f1410cff7ebb79861afc5dae50be73ce06d6e870615204 \ + --hash=sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b \ + --hash=sha256:d7d192305d9d8bc9082d10f361fc70a73590a4c65cf31c3e6926cd72b76bc35c \ + --hash=sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545 \ + --hash=sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655 \ + --hash=sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420 \ + --hash=sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5 \ + --hash=sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4 \ + --hash=sha256:edca18eaca89cd6382dfbcff3dd2d87633433043650c07375d095cd3517561d8 \ + --hash=sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053 \ + --hash=sha256:f553ca691b9e94b202ff741bdd40f6ccb70cdd5fbf65c187af132f1317de6145 \ + --hash=sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047 \ + --hash=sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8 + # via + # feast (setup.py) + # dask + # datasets + # db-dtypes + # deltalake + # google-cloud-bigquery + # ibis-framework + # pandas-gbq + # ray + # snowflake-connector-python +pyarrow-hotfix==0.7 \ + --hash=sha256:3236f3b5f1260f0e2ac070a55c1a7b339c4bb7267839bd2015e283234e758100 \ + --hash=sha256:59399cd58bdd978b2e42816a4183a55c6472d4e33d183351b6069f11ed42661d + # via ibis-framework +pyasn1==0.6.2 \ + --hash=sha256:1eb26d860996a18e9b6ed05e7aae0e9fc21619fcee6af91cca9bad4fbea224bf \ + --hash=sha256:9b59a2b25ba7e4f8197db7686c09fb33e658b98339fadb826e9512629017833b + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.4.2 \ + --hash=sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a \ + --hash=sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6 + # via google-auth +pybindgen==0.22.0 \ + --hash=sha256:21612f4806a2af25a175716d7e694563af7e10c704538a350cb595d187952f6f \ + --hash=sha256:5b4837d3138ac56863d93fe462f1dac39fb87bd50898e0da4c57fefd645437ac + # via feast (setup.py) +pyclipper==1.4.0 \ + --hash=sha256:0a4d2736fb3c42e8eb1d38bf27a720d1015526c11e476bded55138a977c17d9d \ + --hash=sha256:0b74a9dd44b22a7fd35d65fb1ceeba57f3817f34a97a28c3255556362e491447 \ + --hash=sha256:0b8c2105b3b3c44dbe1a266f64309407fe30bf372cf39a94dc8aaa97df00da5b \ + --hash=sha256:14c8bdb5a72004b721c4e6f448d2c2262d74a7f0c9e3076aeff41e564a92389f \ + --hash=sha256:1b6c8d75ba20c6433c9ea8f1a0feb7e4d3ac06a09ad1fd6d571afc1ddf89b869 \ + --hash=sha256:222ac96c8b8281b53d695b9c4fedc674f56d6d4320ad23f1bdbd168f4e316140 \ + --hash=sha256:29dae3e0296dff8502eeb7639fcfee794b0eec8590ba3563aee28db269da6b04 \ + --hash=sha256:37bfec361e174110cdddffd5ecd070a8064015c99383d95eb692c253951eee8a \ + --hash=sha256:3ef44b64666ebf1cb521a08a60c3e639d21b8c50bfbe846ba7c52a0415e936f4 \ + --hash=sha256:58e29d7443d7cc0e83ee9daf43927730386629786d00c63b04fe3b53ac01462c \ + --hash=sha256:6a97b961f182b92d899ca88c1bb3632faea2e00ce18d07c5f789666ebb021ca4 \ + --hash=sha256:6c317e182590c88ec0194149995e3d71a979cfef3b246383f4e035f9d4a11826 \ + --hash=sha256:773c0e06b683214dcfc6711be230c83b03cddebe8a57eae053d4603dd63582f9 \ + --hash=sha256:7c87480fc91a5af4c1ba310bdb7de2f089a3eeef5fe351a3cedc37da1fcced1c \ + --hash=sha256:81d8bb2d1fb9d66dc7ea4373b176bb4b02443a7e328b3b603a73faec088b952e \ + --hash=sha256:8d42b07a2f6cfe2d9b87daf345443583f00a14e856927782fde52f3a255e305a \ + --hash=sha256:9882bd889f27da78add4dd6f881d25697efc740bf840274e749988d25496c8e1 \ + --hash=sha256:98b2a40f98e1fc1b29e8a6094072e7e0c7dfe901e573bf6cfc6eb7ce84a7ae87 \ + --hash=sha256:9bc45f2463d997848450dbed91c950ca37c6cf27f84a49a5cad4affc0b469e39 \ + --hash=sha256:a8d2b5fb75ebe57e21ce61e79a9131edec2622ff23cc665e4d1d1f201bc1a801 \ + --hash=sha256:a9f11ad133257c52c40d50de7a0ca3370a0cdd8e3d11eec0604ad3c34ba549e9 \ + --hash=sha256:adcb7ca33c5bdc33cd775e8b3eadad54873c802a6d909067a57348bcb96e7a2d \ + --hash=sha256:b3b3630051b53ad2564cb079e088b112dd576e3d91038338ad1cc7915e0f14dc \ + --hash=sha256:bafad70d2679c187120e8c44e1f9a8b06150bad8c0aecf612ad7dfbfa9510f73 \ + --hash=sha256:bbc827b77442c99deaeee26e0e7f172355ddb097a5e126aea206d447d3b26286 \ + --hash=sha256:c9a3faa416ff536cee93417a72bfb690d9dea136dc39a39dbbe1e5dadf108c9c \ + --hash=sha256:ce1f83c9a4e10ea3de1959f0ae79e9a5bd41346dff648fee6228ba9eaf8b3872 \ + --hash=sha256:d1e5498d883b706a4ce636247f0d830c6eb34a25b843a1b78e2c969754ca9037 \ + --hash=sha256:d1f807e2b4760a8e5c6d6b4e8c1d71ef52b7fe1946ff088f4fa41e16a881a5ca \ + --hash=sha256:d49df13cbb2627ccb13a1046f3ea6ebf7177b5504ec61bdef87d6a704046fd6e \ + --hash=sha256:d4b2d7c41086f1927d14947c563dfc7beed2f6c0d9af13c42fe3dcdc20d35832 \ + --hash=sha256:e9b973467d9c5fa9bc30bb6ac95f9f4d7c3d9fc25f6cf2d1cc972088e5955c01 \ + --hash=sha256:f160a2c6ba036f7eaf09f1f10f4fbfa734234af9112fb5187877efed78df9303 \ + --hash=sha256:f2a50c22c3a78cb4e48347ecf06930f61ce98cf9252f2e292aa025471e9d75b1 \ + --hash=sha256:f3672dbafbb458f1b96e1ee3e610d174acb5ace5bd2ed5d1252603bb797f2fc6 \ + --hash=sha256:fd24849d2b94ec749ceac7c34c9f01010d23b6e9d9216cf2238b8481160e703d + # via easyocr +pycparser==2.23 \ + --hash=sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2 \ + --hash=sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934 + # via cffi +pycryptodome==3.23.0 \ + --hash=sha256:0011f7f00cdb74879142011f95133274741778abba114ceca229adbf8e62c3e4 \ + --hash=sha256:11eeeb6917903876f134b56ba11abe95c0b0fd5e3330def218083c7d98bbcb3c \ + --hash=sha256:14e15c081e912c4b0d75632acd8382dfce45b258667aa3c67caf7a4d4c13f630 \ + --hash=sha256:156df9667ad9f2ad26255926524e1c136d6664b741547deb0a86a9acf5ea631f \ + --hash=sha256:187058ab80b3281b1de11c2e6842a357a1f71b42cb1e15bce373f3d238135c27 \ + --hash=sha256:257bb3572c63ad8ba40b89f6fc9d63a2a628e9f9708d31ee26560925ebe0210a \ + --hash=sha256:350ebc1eba1da729b35ab7627a833a1a355ee4e852d8ba0447fafe7b14504d56 \ + --hash=sha256:447700a657182d60338bab09fdb27518f8856aecd80ae4c6bdddb67ff5da44ef \ + --hash=sha256:45c69ad715ca1a94f778215a11e66b7ff989d792a4d63b68dc586a1da1392ff5 \ + --hash=sha256:4764e64b269fc83b00f682c47443c2e6e85b18273712b98aa43bcb77f8570477 \ + --hash=sha256:507dbead45474b62b2bbe318eb1c4c8ee641077532067fec9c1aa82c31f84886 \ + --hash=sha256:53ecbafc2b55353edcebd64bf5da94a2a2cdf5090a6915bcca6eca6cc452585a \ + --hash=sha256:573a0b3017e06f2cffd27d92ef22e46aa3be87a2d317a5abf7cc0e84e321bd75 \ + --hash=sha256:63dad881b99ca653302b2c7191998dd677226222a3f2ea79999aa51ce695f720 \ + --hash=sha256:64093fc334c1eccfd3933c134c4457c34eaca235eeae49d69449dc4728079339 \ + --hash=sha256:6501790c5b62a29fcb227bd6b62012181d886a767ce9ed03b303d1f22eb5c625 \ + --hash=sha256:67bd81fcbe34f43ad9422ee8fd4843c8e7198dd88dd3d40e6de42ee65fbe1490 \ + --hash=sha256:6fe8258e2039eceb74dfec66b3672552b6b7d2c235b2dfecc05d16b8921649a8 \ + --hash=sha256:763d1d74f56f031788e5d307029caef067febf890cd1f8bf61183ae142f1a77b \ + --hash=sha256:7ac1080a8da569bde76c0a104589c4f414b8ba296c0b3738cf39a466a9fb1818 \ + --hash=sha256:865d83c906b0fc6a59b510deceee656b6bc1c4fa0d82176e2b77e97a420a996a \ + --hash=sha256:89d4d56153efc4d81defe8b65fd0821ef8b2d5ddf8ed19df31ba2f00872b8002 \ + --hash=sha256:90460fc9e088ce095f9ee8356722d4f10f86e5be06e2354230a9880b9c549aae \ + --hash=sha256:93837e379a3e5fd2bb00302a47aee9fdf7940d83595be3915752c74033d17ca7 \ + --hash=sha256:954af0e2bd7cea83ce72243b14e4fb518b18f0c1649b576d114973e2073b273d \ + --hash=sha256:9a53a4fe5cb075075d515797d6ce2f56772ea7e6a1e5e4b96cf78a14bac3d265 \ + --hash=sha256:9a77627a330ab23ca43b48b130e202582e91cc69619947840ea4d2d1be21eb39 \ + --hash=sha256:a176b79c49af27d7f6c12e4b178b0824626f40a7b9fed08f712291b6d54bf566 \ + --hash=sha256:a7fc76bf273353dc7e5207d172b83f569540fc9a28d63171061c42e361d22353 \ + --hash=sha256:aa0698f65e5b570426fc31b8162ed4603b0c2841cbb9088e2b01641e3065915b \ + --hash=sha256:b34e8e11d97889df57166eda1e1ddd7676da5fcd4d71a0062a760e75060514b4 \ + --hash=sha256:c75b52aacc6c0c260f204cbdd834f76edc9fb0d8e0da9fbf8352ef58202564e2 \ + --hash=sha256:c8987bd3307a39bc03df5c8e0e3d8be0c4c3518b7f044b0f4c15d1aa78f52575 \ + --hash=sha256:ce64e84a962b63a47a592690bdc16a7eaf709d2c2697ababf24a0def566899a6 \ + --hash=sha256:cfb5cd445280c5b0a4e6187a7ce8de5a07b5f3f897f235caa11f1f435f182843 \ + --hash=sha256:d8e95564beb8782abfd9e431c974e14563a794a4944c29d6d3b7b5ea042110b4 \ + --hash=sha256:d97618c9c6684a97ef7637ba43bdf6663a2e2e77efe0f863cce97a76af396446 \ + --hash=sha256:ddb95b49df036ddd264a0ad246d1be5b672000f12d6961ea2c267083a5e19379 \ + --hash=sha256:dea827b4d55ee390dc89b2afe5927d4308a8b538ae91d9c6f7a5090f397af1aa \ + --hash=sha256:e3f2d0aaf8080bda0587d58fc9fe4766e012441e2eed4269a77de6aea981c8be \ + --hash=sha256:eb8f24adb74984aa0e5d07a2368ad95276cf38051fe2dc6605cbcf482e04f2a7 + # via minio +pydantic==2.12.5 \ + --hash=sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49 \ + --hash=sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d + # via + # feast (setup.py) + # codeflare-sdk + # docling + # docling-core + # docling-ibm-models + # docling-parse + # fastapi + # fastapi-mcp + # great-expectations + # mcp + # pydantic-settings + # qdrant-client + # ray +pydantic-core==2.41.5 \ + --hash=sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90 \ + --hash=sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740 \ + --hash=sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504 \ + --hash=sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84 \ + --hash=sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33 \ + --hash=sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c \ + --hash=sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0 \ + --hash=sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e \ + --hash=sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0 \ + --hash=sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a \ + --hash=sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34 \ + --hash=sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2 \ + --hash=sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3 \ + --hash=sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815 \ + --hash=sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14 \ + --hash=sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba \ + --hash=sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375 \ + --hash=sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf \ + --hash=sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963 \ + --hash=sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1 \ + --hash=sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808 \ + --hash=sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553 \ + --hash=sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1 \ + --hash=sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2 \ + --hash=sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5 \ + --hash=sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470 \ + --hash=sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2 \ + --hash=sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b \ + --hash=sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660 \ + --hash=sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c \ + --hash=sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093 \ + --hash=sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5 \ + --hash=sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594 \ + --hash=sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008 \ + --hash=sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a \ + --hash=sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a \ + --hash=sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd \ + --hash=sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284 \ + --hash=sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586 \ + --hash=sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869 \ + --hash=sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294 \ + --hash=sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f \ + --hash=sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66 \ + --hash=sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51 \ + --hash=sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc \ + --hash=sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97 \ + --hash=sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a \ + --hash=sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d \ + --hash=sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9 \ + --hash=sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c \ + --hash=sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07 \ + --hash=sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36 \ + --hash=sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e \ + --hash=sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05 \ + --hash=sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e \ + --hash=sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941 \ + --hash=sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3 \ + --hash=sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612 \ + --hash=sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3 \ + --hash=sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b \ + --hash=sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe \ + --hash=sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146 \ + --hash=sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11 \ + --hash=sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60 \ + --hash=sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd \ + --hash=sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b \ + --hash=sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c \ + --hash=sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a \ + --hash=sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460 \ + --hash=sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1 \ + --hash=sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf \ + --hash=sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf \ + --hash=sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858 \ + --hash=sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2 \ + --hash=sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9 \ + --hash=sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2 \ + --hash=sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3 \ + --hash=sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6 \ + --hash=sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770 \ + --hash=sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d \ + --hash=sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc \ + --hash=sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23 \ + --hash=sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26 \ + --hash=sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa \ + --hash=sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8 \ + --hash=sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d \ + --hash=sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3 \ + --hash=sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d \ + --hash=sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034 \ + --hash=sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9 \ + --hash=sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1 \ + --hash=sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56 \ + --hash=sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b \ + --hash=sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c \ + --hash=sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a \ + --hash=sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e \ + --hash=sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9 \ + --hash=sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5 \ + --hash=sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a \ + --hash=sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556 \ + --hash=sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e \ + --hash=sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49 \ + --hash=sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2 \ + --hash=sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9 \ + --hash=sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b \ + --hash=sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc \ + --hash=sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb \ + --hash=sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0 \ + --hash=sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8 \ + --hash=sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82 \ + --hash=sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69 \ + --hash=sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b \ + --hash=sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c \ + --hash=sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75 \ + --hash=sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5 \ + --hash=sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f \ + --hash=sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad \ + --hash=sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b \ + --hash=sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7 \ + --hash=sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425 \ + --hash=sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52 + # via pydantic +pydantic-settings==2.12.0 \ + --hash=sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0 \ + --hash=sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809 + # via + # docling + # fastapi-mcp + # mcp +pydata-google-auth==1.9.1 \ + --hash=sha256:0a51ce41c601ca0bc69b8795bf58bedff74b4a6a007c9106c7cbcdec00eaced2 \ + --hash=sha256:75ffce5d106e34b717b31844c1639ea505b7d9550dc23b96fb6c20d086b53fa3 + # via pandas-gbq +pygments==2.19.2 \ + --hash=sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887 \ + --hash=sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b + # via + # feast (setup.py) + # ipython + # ipython-pygments-lexers + # mpire + # nbconvert + # rich + # sphinx +pyjwt[crypto]==2.10.1 \ + --hash=sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953 \ + --hash=sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb + # via + # feast (setup.py) + # mcp + # msal + # singlestoredb + # snowflake-connector-python +pylatexenc==2.10 \ + --hash=sha256:3dd8fd84eb46dc30bee1e23eaab8d8fb5a7f507347b23e5f38ad9675c84f40d3 + # via docling +pymilvus==2.4.15 \ + --hash=sha256:0601591ce0498315e19e9ac3f4fdd3051102ca87b6ddff5b33849f522288cff7 \ + --hash=sha256:b21878e5df74dca91b3f3cf0b0597fa6b6aed7bf5cde9a1b10641994faa353bf + # via feast (setup.py) +pymongo==4.16.0 \ + --hash=sha256:03f42396c1b2c6f46f5401c5b185adc25f6113716e16d9503977ee5386fca0fb \ + --hash=sha256:12762e7cc0f8374a8cae3b9f9ed8dabb5d438c7b33329232dd9b7de783454033 \ + --hash=sha256:15bb062c0d6d4b0be650410032152de656a2a9a2aa4e1a7443a22695afacb103 \ + --hash=sha256:19a1c96e7f39c7a59a9cfd4d17920cf9382f6f684faeff4649bf587dc59f8edc \ + --hash=sha256:1c01e8a7cd0ea66baf64a118005535ab5bf9f9eb63a1b50ac3935dccf9a54abe \ + --hash=sha256:1d638b0b1b294d95d0fdc73688a3b61e05cc4188872818cd240d51460ccabcb5 \ + --hash=sha256:21d02cc10a158daa20cb040985e280e7e439832fc6b7857bff3d53ef6914ad50 \ + --hash=sha256:2290909275c9b8f637b0a92eb9b89281e18a72922749ebb903403ab6cc7da914 \ + --hash=sha256:25a6b03a68f9907ea6ec8bc7cf4c58a1b51a18e23394f962a6402f8e46d41211 \ + --hash=sha256:2a3ba6be3d8acf64b77cdcd4e36f0e4a8e87965f14a8b09b90ca86f10a1dd2f2 \ + --hash=sha256:2b0714d7764efb29bf9d3c51c964aed7c4c7237b341f9346f15ceaf8321fdb35 \ + --hash=sha256:2cd60cd1e05de7f01927f8e25ca26b3ea2c09de8723241e5d3bcfdc70eaff76b \ + --hash=sha256:2d0082631a7510318befc2b4fdab140481eb4b9dd62d9245e042157085da2a70 \ + --hash=sha256:311d4549d6bf1f8c61d025965aebb5ba29d1481dc6471693ab91610aaffbc0eb \ + --hash=sha256:36ef2fee50eee669587d742fb456e349634b4fcf8926208766078b089054b24b \ + --hash=sha256:3ead8a0050c53eaa55935895d6919d393d0328ec24b2b9115bdbe881aa222673 \ + --hash=sha256:46ffb728d92dd5b09fc034ed91acf5595657c7ca17d4cf3751322cd554153c17 \ + --hash=sha256:4a19ea46a0fe71248965305a020bc076a163311aefbaa1d83e47d06fa30ac747 \ + --hash=sha256:4a9390dce61d705a88218f0d7b54d7e1fa1b421da8129fc7c009e029a9a6b81e \ + --hash=sha256:4c4872299ebe315a79f7f922051061634a64fda95b6b17677ba57ef00b2ba2a4 \ + --hash=sha256:4cd047ba6cc83cc24193b9208c93e134a985ead556183077678c59af7aacc725 \ + --hash=sha256:4d4f7ba040f72a9f43a44059872af5a8c8c660aa5d7f90d5344f2ed1c3c02721 \ + --hash=sha256:4d79aa147ce86aef03079096d83239580006ffb684eead593917186aee407767 \ + --hash=sha256:4fbb8d3552c2ad99d9e236003c0b5f96d5f05e29386ba7abae73949bfebc13dd \ + --hash=sha256:55f8d5a6fe2fa0b823674db2293f92d74cd5f970bc0360f409a1fc21003862d3 \ + --hash=sha256:5b9c6d689bbe5beb156374508133218610e14f8c81e35bc17d7a14e30ab593e6 \ + --hash=sha256:5d9fdb386cf958e6ef6ff537d6149be7edb76c3268cd6833e6c36aa447e4443f \ + --hash=sha256:60307bb91e0ab44e560fe3a211087748b2b5f3e31f403baf41f5b7b0a70bd104 \ + --hash=sha256:61567f712bda04c7545a037e3284b4367cad8d29b3dec84b4bf3b2147020a75b \ + --hash=sha256:66af44ed23686dd5422307619a6db4b56733c5e36fe8c4adf91326dcf993a043 \ + --hash=sha256:6af1aaa26f0835175d2200e62205b78e7ec3ffa430682e322cc91aaa1a0dbf28 \ + --hash=sha256:6b2a20edb5452ac8daa395890eeb076c570790dfce6b7a44d788af74c2f8cf96 \ + --hash=sha256:6f2077ec24e2f1248f9cac7b9a2dfb894e50cc7939fcebfb1759f99304caabef \ + --hash=sha256:77cfd37a43a53b02b7bd930457c7994c924ad8bbe8dff91817904bcbf291b371 \ + --hash=sha256:78037d02389745e247fe5ab0bcad5d1ab30726eaac3ad79219c7d6bbb07eec53 \ + --hash=sha256:7902882ed0efb7f0e991458ab3b8cf0eb052957264949ece2f09b63c58b04f78 \ + --hash=sha256:85dc2f3444c346ea019a371e321ac868a4fab513b7a55fe368f0cc78de8177cc \ + --hash=sha256:8a0f73af1ea56c422b2dcfc0437459148a799ef4231c6aee189d2d4c59d6728f \ + --hash=sha256:8a254d49a9ffe9d7f888e3c677eed3729b14ce85abb08cd74732cead6ccc3c66 \ + --hash=sha256:8ba8405065f6e258a6f872fe62d797a28f383a12178c7153c01ed04e845c600c \ + --hash=sha256:91899dd7fb9a8c50f09c3c1cf0cb73bfbe2737f511f641f19b9650deb61c00ca \ + --hash=sha256:91ac0cb0fe2bf17616c2039dac88d7c9a5088f5cb5829b27c9d250e053664d31 \ + --hash=sha256:92a232af9927710de08a6c16a9710cc1b175fb9179c0d946cd4e213b92b2a69a \ + --hash=sha256:948152b30eddeae8355495f9943a3bf66b708295c0b9b6f467de1c620f215487 \ + --hash=sha256:96aa7ab896889bf330209d26459e493d00f8855772a9453bfb4520bb1f495baf \ + --hash=sha256:9caacac0dd105e2555521002e2d17afc08665187017b466b5753e84c016628e6 \ + --hash=sha256:9d9885aad05f82fd7ea0c9ca505d60939746b39263fa273d0125170da8f59098 \ + --hash=sha256:9dc2c00bed568732b89e211b6adca389053d5e6d2d5a8979e80b813c3ec4d1f9 \ + --hash=sha256:a1bf44e13cf2d44d2ea2e928a8140d5d667304abe1a61c4d55b4906f389fbe64 \ + --hash=sha256:aa30cd16ddd2f216d07ba01d9635c873e97ddb041c61cf0847254edc37d1c60e \ + --hash=sha256:acda193f440dd88c2023cb00aa8bd7b93a9df59978306d14d87a8b12fe426b05 \ + --hash=sha256:bd4911c40a43a821dfd93038ac824b756b6e703e26e951718522d29f6eb166a8 \ + --hash=sha256:be1099a8295b1a722d03fb7b48be895d30f4301419a583dcf50e9045968a041c \ + --hash=sha256:c126fb72be2518395cc0465d4bae03125119136462e1945aea19840e45d89cfc \ + --hash=sha256:c53338613043038005bf2e41a2fafa08d29cdbc0ce80891b5366c819456c1ae9 \ + --hash=sha256:c789236366525c3ee3cd6e4e450a9ff629a7d1f4d88b8e18a0aea0615fd7ecf8 \ + --hash=sha256:cf0ec79e8ca7077f455d14d915d629385153b6a11abc0b93283ed73a8013e376 \ + --hash=sha256:d15f060bc6d0964a8bb70aba8f0cb6d11ae99715438f640cff11bbcf172eb0e8 \ + --hash=sha256:d284bf68daffc57516535f752e290609b3b643f4bd54b28fc13cb16a89a8bda6 \ + --hash=sha256:dabbf3c14de75a20cc3c30bf0c6527157224a93dfb605838eabb1a2ee3be008d \ + --hash=sha256:dbbc5b254c36c37d10abb50e899bc3939bbb7ab1e7c659614409af99bd3e7675 \ + --hash=sha256:dfc320f08ea9a7ec5b2403dc4e8150636f0d6150f4b9792faaae539c88e7db3b \ + --hash=sha256:e2d509786344aa844ae243f68f833ca1ac92ac3e35a92ae038e2ceb44aa355ef \ + --hash=sha256:e37469602473f41221cea93fd3736708f561f0fa08ab6b2873dd962014390d52 \ + --hash=sha256:ed162b2227f98d5b270ecbe1d53be56c8c81db08a1a8f5f02d89c7bb4d19591d \ + --hash=sha256:efe020c46ce3c3a89af6baec6569635812129df6fb6cf76d4943af3ba6ee2069 \ + --hash=sha256:f1c5f1f818b669875d191323a48912d3fcd2e4906410e8297bb09ac50c4d5ccc \ + --hash=sha256:f25001a955073b80510c0c3db0e043dbbc36904fd69e511c74e3d8640b8a5111 \ + --hash=sha256:f3867dc225d9423c245a51eaac2cfcd53dde8e0a8d8090bb6aed6e31bd6c2d4f \ + --hash=sha256:f513b2c6c0d5c491f478422f6b5b5c27ac1af06a54c93ef8631806f7231bd92e \ + --hash=sha256:f6e42c1bc985d9beee884780ae6048790eb4cd565c46251932906bdb1630034a + # via feast (setup.py) +pymssql==2.3.2 \ + --hash=sha256:06883bc9bdb297ae9132d9371b5b1a3a223c8f93dd6a87d1c112c6a688f26d53 \ + --hash=sha256:0768d90f96ae3267d7561d3bcfe94dd671d107489e870388b12570c3debbc552 \ + --hash=sha256:0831c5c95aab0b9aba5142dc97e28f59c4130e1c34ffc13ecbfdd4d2fe45b8a0 \ + --hash=sha256:08facd25a50a7279385d1ffcee9d6d83c4e361db1af38e14519a87d7b1cadb10 \ + --hash=sha256:0a20a17db870fb0e446a6d6bf7664aaf84af7be58ab1fad025cafa4e092507a1 \ + --hash=sha256:1037053e6c74d6fe14c428cc942968b4e4bf06854706a83fe8e822e475e3f107 \ + --hash=sha256:148b7714fff5a5b7ce038e92b02dd9bf68fe442c181a3aae32148e7b13f6db95 \ + --hash=sha256:18089641b687be1ebd0f64f0d1ff977478a397ffa1af372bdf10dbec29cf6d2e \ + --hash=sha256:1afda7b7022eff9451bd83e3f64c450a1a8cdff4ba8b8e399866dcd2cb861a1e \ + --hash=sha256:1bc33ed9af6d8ebea2d49144cd2317b7ae1105dd51dddfd46982c90c8f0cf6ab \ + --hash=sha256:1c99dba4bf5b1ce10657e9e2885f18ba9179190251b63d1498e7d6d72e64f1ce \ + --hash=sha256:1cdc2619e7b4192b8d6619fd52ba8a2eae18b38b376f8649fb8f0727c4e88ff9 \ + --hash=sha256:22b1ce3a48f28ee7d06ebc9ed94276d0bf1c99051ee1df3d2377b74721bd62ef \ + --hash=sha256:22fb0fdd3b889bc10abbe3aa2abe7a008b30a6367b9ba159412d185d7d8fda9d \ + --hash=sha256:235c230e56d8c8c5f289e665c538f31d967fec302d05ad269dcd64fa9d6eb3b7 \ + --hash=sha256:23f5e2e2bdba1cf7cecbac66dd07de7631a8efca5692efee18ff46ebc087b757 \ + --hash=sha256:2568944db3888996e161b40ad06c1b9e0fbb6cfcb38279a3abb98ece7a8e1c4a \ + --hash=sha256:26bdb7abd5f107b6be422635f03e2cecaa52a5f4c394a205014586abbff9e72a \ + --hash=sha256:2a44a0898dacba4e25cac8778d0ed112e297883fe862204e447081888da78dc4 \ + --hash=sha256:2f4093b95f1f3a1232687fc92f652aaf675eb423db8549c16d146b91ac2f0eba \ + --hash=sha256:33ad813092f8fb8f74578c5b5e37c818c4ae130fd4047cb28f0b256f2f107367 \ + --hash=sha256:3870085a49e5332bc67ecb24f217c586977d5352eb51598244fc7bc278eee3e1 \ + --hash=sha256:41d09e1b2534229b288c37b88c1de3d964317af2c7eec58bfb97e01d679eba27 \ + --hash=sha256:5904d78e61668ec89761d3ae01efd4b42b31d820f612929f449e93cd23ba3c54 \ + --hash=sha256:6019d2939963112662288704f608f31634038bffcfd5cad1bc79cb167edb3cc1 \ + --hash=sha256:72b6599963b6e066998c4b27b7bf207684c243b12b1e5dcc180b2af22802ae6c \ + --hash=sha256:73fac766b448613d7ae26e6b304b2cb8a7ffebccaa373633bad3b3cbcc829935 \ + --hash=sha256:793a93da1521fa66bf02b3b873065e22bf14bda5570e005ce3d5fae0776d7b92 \ + --hash=sha256:79cdc3ed1da3129ba56232127db86279728c4328595e2532ed4d0da6379a5c72 \ + --hash=sha256:82ed3dd560d3fb222d26ce3a7373f46dc3ad1d50b6e6417ef7399e87fa9aefe1 \ + --hash=sha256:8cd806380d362d4cef2d925a6baee6a4b2b151a92cac2cab5c4bfabed4be4849 \ + --hash=sha256:9361593a89c9162fc631baf648a87e2666373382d9d54aacfb19edab9ceb2007 \ + --hash=sha256:97fbd8491ad3ece0adcb321acec6db48b8fe37bc74af4c91bb657d4d9347d634 \ + --hash=sha256:9e3d6fada7fbe7a5f5fafc420673f777bab3f399c78fa44e29de6a8cbc36e515 \ + --hash=sha256:a3f9e7eb813dfeab6d01bf6474049bb76b0521235159d3e969ec82df384eac49 \ + --hash=sha256:aa08b6203b2b5ed5ce47f80d5c529459181300d7e0d0c1e84390a4d01d45e509 \ + --hash=sha256:ab48de09864fa6f49c575ef569f6773981d0cd905ff7288b5b185f8079a5a21f \ + --hash=sha256:ab912d1178d5977e421cf9c4d4071958b223cbe4a2b6dd64611d521aa6bb7187 \ + --hash=sha256:ae02cc1594f0addd748bf5ac1ccc7a73c03846ada9c553663c381b242b586606 \ + --hash=sha256:b0c2b11aca16617cacaf385fb94134e73ba0216a924f9b85778cc7e3d3713361 \ + --hash=sha256:b14cc65369d1425f2fb517609113465a0f55f19a49648160f2d10be4cb43ff4d \ + --hash=sha256:b156b15165f7a0bbb392a124d8e2d678145c93e5bfcfef3b637e4d87eadcc85b \ + --hash=sha256:b16d5880f7028442d6c49c94801ce9bff3af8af0fbda7c6039febb936714aed5 \ + --hash=sha256:b3eb201c402bcf4f5b9399df0bb20d522636d2e87d1c6957a0b6d772ee636c61 \ + --hash=sha256:bac6f355c454f94b0e15a04b7841236e5c5c4ef44d2d1beed00a3ad7b50ccc53 \ + --hash=sha256:c24ba6aedb9b5540b56f3e74bff92b687c6e90c00650823385729c7e55923cf5 \ + --hash=sha256:cbe9058b6520be74463476ff2cdb17bbab5ff60b60b3ed7bd8bd2d086bdfd9bd \ + --hash=sha256:cc13c2e0f1b8efc3a46941de9db768fa59937b5a54081ec0cb0ff0da17d1fff3 \ + --hash=sha256:dd5fe7552edc81628e4242b4671f7bad5ff1ec790bae5c7615d989375620edac \ + --hash=sha256:eb629b5fb0376fbf39d575cf1365e504b84877b19f9e8d53caa5228fed56894a \ + --hash=sha256:ee8ee2c7c227c413ad9b88ddba1cb6a25e28c217ae73ecac1c7a6b8c29003604 \ + --hash=sha256:ef0d29c705db552f9e75230f946b0ca9db0db903c5c9ee79ce8b88ad25ea9670 \ + --hash=sha256:f1791f4627c42fe2d2833c884d036b0c5c8cf628f2cdfa3536191c217acf729e \ + --hash=sha256:f282e701dca155b3e7f1644d7e3b60c201ca5f3be8045bce34042d3c737d63ee \ + --hash=sha256:f2b1da4e68d618c7972e583ae19f386ae620258acb61564e8067c203f27cd769 \ + --hash=sha256:f9737c06b13ca2012b9900185fa3af72a37941c532da2e6373dd7c9ab16abddf \ + --hash=sha256:fb8a7b197aaf466a7577ca6690aa9d747081b653ab212d052d71f3cc10587c3b \ + --hash=sha256:fdd774b26407babd0205ef85a098f90553e6b3da77a22322a1e7d2cb51f742c0 + # via feast (setup.py) +pymysql==1.1.2 \ + --hash=sha256:4961d3e165614ae65014e361811a724e2044ad3ea3739de9903ae7c21f539f03 \ + --hash=sha256:e6b1d89711dd51f8f74b1631fe08f039e7d76cf67a42a323d3178f0f25762ed9 + # via feast (setup.py) +pynacl==1.6.0 \ + --hash=sha256:04f20784083014e265ad58c1b2dd562c3e35864b5394a14ab54f5d150ee9e53e \ + --hash=sha256:10d755cf2a455d8c0f8c767a43d68f24d163b8fe93ccfaabfa7bafd26be58d73 \ + --hash=sha256:140373378e34a1f6977e573033d1dd1de88d2a5d90ec6958c9485b2fd9f3eb90 \ + --hash=sha256:16c60daceee88d04f8d41d0a4004a7ed8d9a5126b997efd2933e08e93a3bd850 \ + --hash=sha256:16dd347cdc8ae0b0f6187a2608c0af1c8b7ecbbe6b4a06bff8253c192f696990 \ + --hash=sha256:25720bad35dfac34a2bcdd61d9e08d6bfc6041bebc7751d9c9f2446cf1e77d64 \ + --hash=sha256:2d6cd56ce4998cb66a6c112fda7b1fdce5266c9f05044fa72972613bef376d15 \ + --hash=sha256:347dcddce0b4d83ed3f32fd00379c83c425abee5a9d2cd0a2c84871334eaff64 \ + --hash=sha256:4853c154dc16ea12f8f3ee4b7e763331876316cc3a9f06aeedf39bcdca8f9995 \ + --hash=sha256:49c336dd80ea54780bcff6a03ee1a476be1612423010472e60af83452aa0f442 \ + --hash=sha256:4a25cfede801f01e54179b8ff9514bd7b5944da560b7040939732d1804d25419 \ + --hash=sha256:51fed9fe1bec9e7ff9af31cd0abba179d0e984a2960c77e8e5292c7e9b7f7b5d \ + --hash=sha256:536703b8f90e911294831a7fbcd0c062b837f3ccaa923d92a6254e11178aaf42 \ + --hash=sha256:5789f016e08e5606803161ba24de01b5a345d24590a80323379fc4408832d290 \ + --hash=sha256:6b08eab48c9669d515a344fb0ef27e2cbde847721e34bba94a343baa0f33f1f4 \ + --hash=sha256:6b393bc5e5a0eb86bb85b533deb2d2c815666665f840a09e0aa3362bb6088736 \ + --hash=sha256:84709cea8f888e618c21ed9a0efdb1a59cc63141c403db8bf56c469b71ad56f2 \ + --hash=sha256:8bfaa0a28a1ab718bad6239979a5a57a8d1506d0caf2fba17e524dbb409441cf \ + --hash=sha256:bbcc4452a1eb10cd5217318c822fde4be279c9de8567f78bad24c773c21254f8 \ + --hash=sha256:cb36deafe6e2bce3b286e5d1f3e1c246e0ccdb8808ddb4550bb2792f2df298f2 \ + --hash=sha256:cf831615cc16ba324240de79d925eacae8265b7691412ac6b24221db157f6bd1 \ + --hash=sha256:dcdeb41c22ff3c66eef5e63049abf7639e0db4edee57ba70531fc1b6b133185d \ + --hash=sha256:dea103a1afcbc333bc0e992e64233d360d393d1e63d0bc88554f572365664348 \ + --hash=sha256:ef214b90556bb46a485b7da8258e59204c244b1b5b576fb71848819b468c44a7 \ + --hash=sha256:f3482abf0f9815e7246d461fab597aa179b7524628a4bc36f86a7dc418d2608d \ + --hash=sha256:f46386c24a65383a9081d68e9c2de909b1834ec74ff3013271f1bca9c2d233eb \ + --hash=sha256:f4b3824920e206b4f52abd7de621ea7a44fd3cb5c8daceb7c3612345dfc54f2e + # via paramiko +pyodbc==5.3.0 \ + --hash=sha256:01166162149adf2b8a6dc21a212718f205cabbbdff4047dc0c415af3fd85867e \ + --hash=sha256:0263323fc47082c2bf02562f44149446bbbfe91450d271e44bffec0c3143bfb1 \ + --hash=sha256:08b2439500e212625471d32f8fde418075a5ddec556e095e5a4ba56d61df2dc6 \ + --hash=sha256:0df7ff47fab91ea05548095b00e5eb87ed88ddf4648c58c67b4db95ea4913e23 \ + --hash=sha256:101313a21d2654df856a60e4a13763e4d9f6c5d3fd974bcf3fc6b4e86d1bbe8e \ + --hash=sha256:13656184faa3f2d5c6f19b701b8f247342ed581484f58bf39af7315c054e69db \ + --hash=sha256:1629af4706e9228d79dabb4863c11cceb22a6dab90700db0ef449074f0150c0d \ + --hash=sha256:197bb6ddafe356a916b8ee1b8752009057fce58e216e887e2174b24c7ab99269 \ + --hash=sha256:2035c7dfb71677cd5be64d3a3eb0779560279f0a8dc6e33673499498caa88937 \ + --hash=sha256:25b6766e56748eb1fc1d567d863e06cbb7b7c749a41dfed85db0031e696fa39a \ + --hash=sha256:25c4cfb2c08e77bc6e82f666d7acd52f0e52a0401b1876e60f03c73c3b8aedc0 \ + --hash=sha256:2eb7151ed0a1959cae65b6ac0454f5c8bbcd2d8bafeae66483c09d58b0c7a7fc \ + --hash=sha256:2fe0e063d8fb66efd0ac6dc39236c4de1a45f17c33eaded0d553d21c199f4d05 \ + --hash=sha256:349a9abae62a968b98f6bbd23d2825151f8d9de50b3a8f5f3271b48958fdb672 \ + --hash=sha256:363311bd40320b4a61454bebf7c38b243cd67c762ed0f8a5219de3ec90c96353 \ + --hash=sha256:3cc472c8ae2feea5b4512e23b56e2b093d64f7cbc4b970af51da488429ff7818 \ + --hash=sha256:3f1bdb3ce6480a17afaaef4b5242b356d4997a872f39e96f015cabef00613797 \ + --hash=sha256:452e7911a35ee12a56b111ac5b596d6ed865b83fcde8427127913df53132759e \ + --hash=sha256:46185a1a7f409761716c71de7b95e7bbb004390c650d00b0b170193e3d6224bb \ + --hash=sha256:46869b9a6555ff003ed1d8ebad6708423adf2a5c88e1a578b9f029fb1435186e \ + --hash=sha256:58635a1cc859d5af3f878c85910e5d7228fe5c406d4571bffcdd281375a54b39 \ + --hash=sha256:5cbe4d753723c8a8f65020b7a259183ef5f14307587165ce37e8c7e251951852 \ + --hash=sha256:5ceaed87ba2ea848c11223f66f629ef121f6ebe621f605cde9cfdee4fd9f4b68 \ + --hash=sha256:5dd3d5e469f89a3112cf8b0658c43108a4712fad65e576071e4dd44d2bd763c7 \ + --hash=sha256:5ebf6b5d989395efe722b02b010cb9815698a4d681921bf5db1c0e1195ac1bde \ + --hash=sha256:6132554ffbd7910524d643f13ce17f4a72f3a6824b0adef4e9a7f66efac96350 \ + --hash=sha256:6682cdec78f1302d0c559422c8e00991668e039ed63dece8bf99ef62173376a5 \ + --hash=sha256:676031723aac7dcbbd2813bddda0e8abf171b20ec218ab8dfb21d64a193430ea \ + --hash=sha256:705903acf6f43c44fc64e764578d9a88649eb21bf7418d78677a9d2e337f56f2 \ + --hash=sha256:729c535341bb09c476f219d6f7ab194bcb683c4a0a368010f1cb821a35136f05 \ + --hash=sha256:74528fe148980d0c735c0ebb4a4dc74643ac4574337c43c1006ac4d09593f92d \ + --hash=sha256:754d052030d00c3ac38da09ceb9f3e240e8dd1c11da8906f482d5419c65b9ef5 \ + --hash=sha256:7713c740a10f33df3cb08f49a023b7e1e25de0c7c99650876bbe717bc95ee780 \ + --hash=sha256:7e9ab0b91de28a5ab838ac4db0253d7cc8ce2452efe4ad92ee6a57b922bf0c24 \ + --hash=sha256:8339d3094858893c1a68ee1af93efc4dff18b8b65de54d99104b99af6306320d \ + --hash=sha256:8aa396c6d6af52ccd51b8c8a5bffbb46fd44e52ce07ea4272c1d28e5e5b12722 \ + --hash=sha256:9b987a25a384f31e373903005554230f5a6d59af78bce62954386736a902a4b3 \ + --hash=sha256:9cd3f0a9796b3e1170a9fa168c7e7ca81879142f30e20f46663b882db139b7d2 \ + --hash=sha256:a48d731432abaee5256ed6a19a3e1528b8881f9cb25cb9cf72d8318146ea991b \ + --hash=sha256:ac23feb7ddaa729f6b840639e92f83ff0ccaa7072801d944f1332cd5f5b05f47 \ + --hash=sha256:af4d8c9842fc4a6360c31c35508d6594d5a3b39922f61b282c2b4c9d9da99514 \ + --hash=sha256:afe7c4ac555a8d10a36234788fc6cfc22a86ce37fc5ba88a1f75b3e6696665dc \ + --hash=sha256:b180bc5e49b74fd40a24ef5b0fe143d0c234ac1506febe810d7434bf47cb925b \ + --hash=sha256:b35b9983ad300e5aea82b8d1661fc9d3afe5868de527ee6bd252dd550e61ecd6 \ + --hash=sha256:bc834567c2990584b9726cba365834d039380c9dbbcef3030ddeb00c6541b943 \ + --hash=sha256:bfeb3e34795d53b7d37e66dd54891d4f9c13a3889a8f5fe9640e56a82d770955 \ + --hash=sha256:c25dc9c41f61573bdcf61a3408c34b65e4c0f821b8f861ca7531b1353b389804 \ + --hash=sha256:c2eb0b08e24fe5c40c7ebe9240c5d3bd2f18cd5617229acee4b0a0484dc226f2 \ + --hash=sha256:c5c30c5cd40b751f77bbc73edd32c4498630939bcd4e72ee7e6c9a4b982cc5ca \ + --hash=sha256:c67e7f2ce649155ea89beb54d3b42d83770488f025cf3b6f39ca82e9c598a02e \ + --hash=sha256:c68d9c225a97aedafb7fff1c0e1bfe293093f77da19eaf200d0e988fa2718d16 \ + --hash=sha256:c6ccb5315ec9e081f5cbd66f36acbc820ad172b8fa3736cf7f993cdf69bd8a96 \ + --hash=sha256:c79df54bbc25bce9f2d87094e7b39089c28428df5443d1902b0cc5f43fd2da6f \ + --hash=sha256:cf18797a12e70474e1b7f5027deeeccea816372497e3ff2d46b15bec2d18a0cc \ + --hash=sha256:d255f6b117d05cfc046a5201fdf39535264045352ea536c35777cf66d321fbb8 \ + --hash=sha256:d32c3259762bef440707098010035bbc83d1c73d81a434018ab8c688158bd3bb \ + --hash=sha256:d89a7f2e24227150c13be8164774b7e1f9678321a4248f1356a465b9cc17d31e \ + --hash=sha256:e3c39de3005fff3ae79246f952720d44affc6756b4b85398da4c5ea76bf8f506 \ + --hash=sha256:e981db84fee4cebec67f41bd266e1e7926665f1b99c3f8f4ea73cd7f7666e381 \ + --hash=sha256:ebc3be93f61ea0553db88589e683ace12bf975baa954af4834ab89f5ee7bf8ae \ + --hash=sha256:f1ad0e93612a6201621853fc661209d82ff2a35892b7d590106fe8f97d9f1f2a \ + --hash=sha256:f927b440c38ade1668f0da64047ffd20ec34e32d817f9a60d07553301324b364 \ + --hash=sha256:fc5ac4f2165f7088e74ecec5413b5c304247949f9702c8853b0e43023b4187e8 \ + --hash=sha256:fe77eb9dcca5fc1300c9121f81040cc9011d28cff383e2c35416e9ec06d4bc95 + # via + # feast (setup.py) + # ibis-framework +pyopenssl==25.1.0 \ + --hash=sha256:2b11f239acc47ac2e5aca04fd7fa829800aeee22a2eb30d744572a157bd8a1ab \ + --hash=sha256:8d031884482e0c67ee92bf9a4d8cceb08d92aba7136432ffb0703c5280fc205b + # via snowflake-connector-python +pyparsing==3.3.1 \ + --hash=sha256:023b5e7e5520ad96642e2c6db4cb683d3970bd640cdf7115049a6e9c3682df82 \ + --hash=sha256:47fad0f17ac1e2cad3de3b458570fbc9b03560aa029ed5e16ee5554da9a2251c + # via great-expectations +pypdfium2==4.30.0 \ + --hash=sha256:0dfa61421b5eb68e1188b0b2231e7ba35735aef2d867d86e48ee6cab6975195e \ + --hash=sha256:119b2969a6d6b1e8d55e99caaf05290294f2d0fe49c12a3f17102d01c441bd29 \ + --hash=sha256:3d0dd3ecaffd0b6dbda3da663220e705cb563918249bda26058c6036752ba3a2 \ + --hash=sha256:48b5b7e5566665bc1015b9d69c1ebabe21f6aee468b509531c3c8318eeee2e16 \ + --hash=sha256:4e55689f4b06e2d2406203e771f78789bd4f190731b5d57383d05cf611d829de \ + --hash=sha256:4e6e50f5ce7f65a40a33d7c9edc39f23140c57e37144c2d6d9e9262a2a854854 \ + --hash=sha256:5eda3641a2da7a7a0b2f4dbd71d706401a656fea521b6b6faa0675b15d31a163 \ + --hash=sha256:90dbb2ac07be53219f56be09961eb95cf2473f834d01a42d901d13ccfad64b4c \ + --hash=sha256:b33ceded0b6ff5b2b93bc1fe0ad4b71aa6b7e7bd5875f1ca0cdfb6ba6ac01aab \ + --hash=sha256:cc3bf29b0db8c76cdfaac1ec1cde8edf211a7de7390fbf8934ad2aa9b4d6dfad \ + --hash=sha256:ee2410f15d576d976c2ab2558c93d392a25fb9f6635e8dd0a8a3a5241b275e0e \ + --hash=sha256:f1f78d2189e0ddf9ac2b7a9b9bd4f0c66f54d1389ff6c17e9fd9dc034d06eb3f \ + --hash=sha256:f33bd79e7a09d5f7acca3b0b69ff6c8a488869a7fab48fdf400fec6e20b9c8be + # via docling +pyproject-hooks==1.2.0 \ + --hash=sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8 \ + --hash=sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913 + # via + # build + # pip-tools +pyspark==4.1.1 \ + --hash=sha256:77f78984aa84fbe865c717dd37b49913b4e5c97d76ef6824f932f1aefa6621ec + # via feast (setup.py) +pytest==7.4.4 \ + --hash=sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280 \ + --hash=sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8 + # via + # feast (setup.py) + # pytest-asyncio + # pytest-benchmark + # pytest-cov + # pytest-env + # pytest-lazy-fixture + # pytest-mock + # pytest-ordering + # pytest-timeout + # pytest-xdist +pytest-asyncio==0.23.8 \ + --hash=sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2 \ + --hash=sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3 + # via feast (setup.py) +pytest-benchmark==3.4.1 \ + --hash=sha256:36d2b08c4882f6f997fd3126a3d6dfd70f3249cde178ed8bbc0b73db7c20f809 \ + --hash=sha256:40e263f912de5a81d891619032983557d62a3d85843f9a9f30b98baea0cd7b47 + # via feast (setup.py) +pytest-cov==7.0.0 \ + --hash=sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1 \ + --hash=sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861 + # via feast (setup.py) +pytest-env==1.1.3 \ + --hash=sha256:aada77e6d09fcfb04540a6e462c58533c37df35fa853da78707b17ec04d17dfc \ + --hash=sha256:fcd7dc23bb71efd3d35632bde1bbe5ee8c8dc4489d6617fb010674880d96216b + # via feast (setup.py) +pytest-lazy-fixture==0.6.3 \ + --hash=sha256:0e7d0c7f74ba33e6e80905e9bfd81f9d15ef9a790de97993e34213deb5ad10ac \ + --hash=sha256:e0b379f38299ff27a653f03eaa69b08a6fd4484e46fd1c9907d984b9f9daeda6 + # via feast (setup.py) +pytest-mock==1.10.4 \ + --hash=sha256:43ce4e9dd5074993e7c021bb1c22cbb5363e612a2b5a76bc6d956775b10758b7 \ + --hash=sha256:5bf5771b1db93beac965a7347dc81c675ec4090cb841e49d9d34637a25c30568 + # via feast (setup.py) +pytest-ordering==0.6 \ + --hash=sha256:27fba3fc265f5d0f8597e7557885662c1bdc1969497cd58aff6ed21c3b617de2 \ + --hash=sha256:3f314a178dbeb6777509548727dc69edf22d6d9a2867bf2d310ab85c403380b6 \ + --hash=sha256:561ad653626bb171da78e682f6d39ac33bb13b3e272d406cd555adb6b006bda6 + # via feast (setup.py) +pytest-timeout==1.4.2 \ + --hash=sha256:20b3113cf6e4e80ce2d403b6fb56e9e1b871b510259206d40ff8d609f48bda76 \ + --hash=sha256:541d7aa19b9a6b4e475c759fd6073ef43d7cdc9a92d95644c260076eb257a063 + # via feast (setup.py) +pytest-xdist==3.8.0 \ + --hash=sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88 \ + --hash=sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1 + # via feast (setup.py) +python-bidi==0.6.7 \ + --hash=sha256:01ff2fd676ef8351f32e820b2d3b61eac875a21702d2118263a2641b458e1996 \ + --hash=sha256:05fe5971110013610f0db40505d0b204edc756e92eafac1372a464f8b9162b11 \ + --hash=sha256:06650a164e63e94dc8a291cc9d415b4027cb1cce125bc9b02dac0f34d535ed47 \ + --hash=sha256:0cb75e8a410166fd677d55095e505bf6a4773c066f51efbda72d302ebc56e79b \ + --hash=sha256:0dbb4bbae212cca5bcf6e522fe8f572aff7d62544557734c2f810ded844d9eea \ + --hash=sha256:0f86e447e94ae78db7d56e7da2124c435eaee4425c87d3d92aea271317811112 \ + --hash=sha256:11c51579e01f768446a7e13a0059fea1530936a707abcbeaad9467a55cb16073 \ + --hash=sha256:1395e236c71f11267860b53293a33b19b991b06e0f4ac61045b892e6a99d96f2 \ + --hash=sha256:17572944e6d8fb616d111fc702c759da2bf7cedab85a3e4fa2af0c9eb95ed438 \ + --hash=sha256:19737d217088ef27014f98eac1827c5913e6fb1dea96332ed84ede61791070d9 \ + --hash=sha256:1ba28642928d1c8fdb18b0632fe931f156e888c646326a3ad8eb3e55ee904951 \ + --hash=sha256:1c061207212cd1db27bf6140b96dcd0536246f1e13e99bb5d03f4632f8e2ad7f \ + --hash=sha256:1c5fb99f774748de283fadf915106f130b74be1bade934b7f73a7a8488b95da1 \ + --hash=sha256:1dd0a5ec0d8710905cebb4c9e5018aa8464395a33cb32a3a6c2a951bf1984fe5 \ + --hash=sha256:24388c77cb00b8aa0f9c84beb7e3e523a3dac4f786ece64a1d8175a07b24da72 \ + --hash=sha256:24a4a268289bbe80ad7da3064d7325f1571173859e8ad75d2f99075d5278b02b \ + --hash=sha256:24afff65c581a5d6f658a9ec027d6719d19a1d8a4401000fdb22d2eeb677b8e3 \ + --hash=sha256:257d6dd0e07221f1dc8720fa61158471f5aae30d5f89837c38a026386151c250 \ + --hash=sha256:26a8fe0d532b966708fc5f8aea0602107fde4745a8a5ae961edd3cf02e807d07 \ + --hash=sha256:2a93b0394cc684d64356b0475858c116f1e335ffbaba388db93bf47307deadfa \ + --hash=sha256:2d28e2bdcadf5b6161bb4ee9313ce41eac746ba57e744168bf723a415a11af05 \ + --hash=sha256:349b89c3110bd25aa56d79418239ca4785d4bcc7a596e63bb996a9696fc6a907 \ + --hash=sha256:3a85275dfc24a96629da058c4c2fc93af6390aefe2f7cdde1500b6ac3fd40ca0 \ + --hash=sha256:3b63d19f3f56ff7f99bce5ca9ef8c811dbf0f509d8e84c1bc06105ed26a49528 \ + --hash=sha256:3b96744e4709f4445788a3645cea7ef8d7520ccd4fa8bbbfb3b650702e12c1e6 \ + --hash=sha256:414004fe9cba33d288ff4a04e1c9afe6a737f440595d01b5bbed00d750296bbd \ + --hash=sha256:4283f8b517411cc81b3c92d11998981fe54ac0d2300f4c58d803e0c071aba1ba \ + --hash=sha256:4636d572b357ab9f313c5340915c1cf51e3e54dd069351e02b6b76577fd1a854 \ + --hash=sha256:47deaada8949af3a790f2cd73b613f9bfa153b4c9450f91c44a60c3109a81f73 \ + --hash=sha256:49639743f1230648fd4fb47547f8a48ada9c5ca1426b17ac08e3be607c65394c \ + --hash=sha256:4c73cd980d45bb967799c7f0fc98ea93ae3d65b21ef2ba6abef6a057720bf483 \ + --hash=sha256:4d84e70923392f8c9611f0fb6b341577346ef6224f3809b05f0ae1fbf8f17578 \ + --hash=sha256:4ea928c31c7364098f853f122868f6f2155d6840661f7ea8b2ccfdf6084eb9f4 \ + --hash=sha256:5013ba963e9da606c4c03958cc737ebd5f8b9b8404bd71ab0d580048c746f875 \ + --hash=sha256:5debaab33562fdfc79ffdbd8d9c51cf07b8529de0e889d8cd145d78137aab21e \ + --hash=sha256:5ebc19f24e65a1f5c472e26d88e78b9d316e293bc6f205f32de4c4e99276336e \ + --hash=sha256:630cee960ba9e3016f95a8e6f725a621ddeff6fd287839f5693ccfab3f3a9b5c \ + --hash=sha256:6323e943c7672b271ad9575a2232508f17e87e81a78d7d10d6e93040e210eddf \ + --hash=sha256:6c051f2d28ca542092d01da8b5fe110fb6191ff58d298a54a93dc183bece63bf \ + --hash=sha256:6c19ab378fefb1f09623f583fcfa12ed42369a998ddfbd39c40908397243c56b \ + --hash=sha256:6df7be07af867ec1d121c92ea827efad4d77b25457c06eeab477b601e82b2340 \ + --hash=sha256:6f9fa1257e075eeeed67d21f95e411036b7ca2b5c78f757d4ac66485c191720a \ + --hash=sha256:7336a3c4ba4fc9e6741fbe60c6483266fe39e1f24830724dfce453471d11fa40 \ + --hash=sha256:73a88dc333efc42281bd800d5182c8625c6e11d109fc183fe3d7a11d48ab1150 \ + --hash=sha256:766d5f5a686eb99b53168a7bdfb338035931a609bdbbcb537cef9e050a86f359 \ + --hash=sha256:77bb4cbadf4121db395189065c58c9dd5d1950257cc1983004e6df4a3e2f97ad \ + --hash=sha256:77fea54c2379b93def4ed16db6390e1232e7b235679587295a23dd8b1925475f \ + --hash=sha256:8047c33b85f7790474a1f488bef95689f049976a4e1c6f213a8d075d180a93e4 \ + --hash=sha256:80e6fd06f6e4074d183cea73962c89cf76cb4f70c0ee403689f57a429ebde488 \ + --hash=sha256:849a57d39feaf897955d0b19bbf4796bea53d1bcdf83b82e0a7b059167eb2049 \ + --hash=sha256:8678c2272e7bd60a75f781409e900c9ddb9f01f55c625d83ae0d49dfc6a2674f \ + --hash=sha256:8814db38fa317bebec8eb74b826bae7d0cb978a7eca30dfe4ecf60e61f06ee0b \ + --hash=sha256:8860d67dc04dc530b8b4f588f38b7341a76f2ec44a45685a2d54e9dcffa5d15a \ + --hash=sha256:898db0ea3e4aaa95b7fecba02a7560dfbf368f9d85053f2875f6d610c4d4ec2c \ + --hash=sha256:8a17631e3e691eec4ae6a370f7b035cf0a5767f4457bd615d11728c23df72e43 \ + --hash=sha256:8a18c61817f3210ba74ad5792c8a5048d9550ba233233a0a8fe35800350988f4 \ + --hash=sha256:8d4e621caadfdbc73d36eabdb2f392da850d28c58b020738411d09dda6208509 \ + --hash=sha256:94dbfd6a6ec0ae64b5262290bf014d6063f9ac8688bda9ec668dc175378d2c80 \ + --hash=sha256:95867a07c5dee0ea2340fe1d0e4f6d9f5c5687d473193b6ee6f86fa44aac45d1 \ + --hash=sha256:95c9de7ebc55ffb777548f2ecaf4b96b0fa0c92f42bf4d897b9f4cd164ec7394 \ + --hash=sha256:9adeec7cab0f2c2c291bd7faf9fa3fa233365fd0bf1c1c27a6ddd6cc563d4b32 \ + --hash=sha256:9c463ae15e94b1c6a8a50bd671d6166b0b0d779fd1e56cbf46d8a4a84c9aa2d0 \ + --hash=sha256:9d9de35eb5987da27dd81e371c52142dd8e924bd61c1006003071ea05a735587 \ + --hash=sha256:a2eb8fca918c7381531035c3aae31c29a1c1300ab8a63cad1ec3a71331096c78 \ + --hash=sha256:a4319f478ab1b90bbbe9921606ecb7baa0ebf0b332e821d41c3abdf1a30f0c35 \ + --hash=sha256:a507fe6928a27a308e04ebf2065719b7850d1bf9ff1924f4e601ef77758812bd \ + --hash=sha256:a8892a7da0f617135fe9c92dc7070d13a0f96ab3081f9db7ff5b172a3905bd78 \ + --hash=sha256:a99d898ad1a399d9c8cab5561b3667fd24f4385820ac90c3340aa637aa5adfc9 \ + --hash=sha256:aa4136f8ccb9a8cd32befd1b3882c2597e6791e64e8b3cf3129c55549b5de62f \ + --hash=sha256:ab2a5177522b62426db897b655a02f574e27d9735bbeb6da41bc981b771df636 \ + --hash=sha256:ab806fd026bfd48bade5e21e06d0d799cbfad32f236989ff6f37db03a5fbe34f \ + --hash=sha256:ad5f0847da00687f52d2b81828e8d887bdea9eb8686a9841024ea7a0e153028e \ + --hash=sha256:b0bee27fb596a0f518369c275a965d0448c39a0730e53a030b311bb10562d4d5 \ + --hash=sha256:b31d66b62736b8514982a24a7dedcf8c062b27a8e9b51e52d7a5899045a45fe1 \ + --hash=sha256:b38ddfab41d10e780edb431edc30aec89bee4ce43d718e3896e99f33dae5c1d3 \ + --hash=sha256:be1bdbd52145dfe46880d8bb56eacc25aa75c3bb075fa103de7974295eb2811f \ + --hash=sha256:c10065081c0e137975de5d9ba2ff2306286dbf5e0c586d4d5aec87c856239b41 \ + --hash=sha256:c11c62a3cdb9d1426b1536de9e3446cb09c7d025bd4df125275cae221f214899 \ + --hash=sha256:c3777ae3e088e94df854fbcbd8d59f9239b74aac036cb6bbd19f8035c8e42478 \ + --hash=sha256:c3d93171dd65b36eca5367acf19eef82c79b4df557cb4bd0daf323b7a27f2d3b \ + --hash=sha256:c9a679b24f5c6f366a0dec75745e1abeae2f597f033d0d54c74cbe62e7e6ae28 \ + --hash=sha256:caa71c723f512f8d859fa239573086e16f38ffc426b5b2f7dab5d40fdb356c80 \ + --hash=sha256:ce86d9dfc6b409ad16556384244572bb3cbefa2ca0f0eab7fba0ff2112b2f068 \ + --hash=sha256:d4cd82e65b5aeb31bd73534e61ece1cab625f4bcbdc13bc4ddc5f8cbfb37c24a \ + --hash=sha256:d524a4ba765bae9b950706472a77a887a525ed21144fe4b41f6190f6e57caa2c \ + --hash=sha256:d7310312a68fdb1a8249cf114acb5435aa6b6a958b15810f053c1df5f98476e4 \ + --hash=sha256:d8274ff02d447cca026ba00f56070ba15f95e184b2d028ee0e4b6c9813d2aaf9 \ + --hash=sha256:d879be7fb5296409e18731c7ba666d56ecd45b816b2c9eb35138aa1d7777aeb5 \ + --hash=sha256:d87ed09e5c9b6d2648e8856a4e556147b9d3cd4d63905fa664dd6706bc414256 \ + --hash=sha256:dde1c3f3edb1f0095dcbf79cf8a0bb768f9539e809d0ad010d78200eea97d42a \ + --hash=sha256:df5e9db9539d70426f5d20c7ebb6f7b33da5fbd40620e11261fe3fba7e177145 \ + --hash=sha256:e7cad66317f12f0fd755fe41ee7c6b06531d2189a9048a8f37addb5109f7e3e3 \ + --hash=sha256:ec1694134961b71ac05241ac989b49ccf08e232b5834d5fc46f8a7c3bb1c13a9 \ + --hash=sha256:ec985386bc3cd54155f2ef0434fccbfd743617ed6fc1a84dae2ab1de6062e0c6 \ + --hash=sha256:ef9d103706560c15fecaf7d3cff939e0f68ce5763cf0e64d0e4e5d37f9bdd2d1 \ + --hash=sha256:f1350033431d75be749273236dcfc808e54404cd6ece6204cdb1bc4ccc163455 \ + --hash=sha256:f1fe71c203f66bc169a393964d5702f9251cfd4d70279cb6453fdd42bd2e675f \ + --hash=sha256:f24189dc3aea3a0a94391a047076e1014306b39ba17d7a38ebab510553cd1a97 \ + --hash=sha256:f57726b5a90d818625e6996f5116971b7a4ceb888832337d0e2cf43d1c362a90 \ + --hash=sha256:f7c055a50d068b3a924bd33a327646346839f55bcb762a26ec3fde8ea5d40564 \ + --hash=sha256:f7e5072269c34a1b719910ee4decf13b288159fb320f18aba3885f6b6aab7753 \ + --hash=sha256:f7e507e1e798ebca77ddc9774fd405107833315ad802cfdaa1ab07b6d9154fc8 \ + --hash=sha256:fbbffb948a32f9783d1a28bc0c53616f0a76736ed1e7c1d62e3e99a8dfaab869 \ + --hash=sha256:fd87d112eda1f0528074e1f7c0312881816cb75854133021124269a27c6c48dc \ + --hash=sha256:ff06e4aa781aa4f68fbfaf1e727fe221fa1c552fef8ae70b6d2a0178e1f229ad + # via easyocr +python-dateutil==2.9.0 \ + --hash=sha256:78e73e19c63f5b20ffa567001531680d939dc042bf7850431877645523c66709 \ + --hash=sha256:cbf2f1da5e6083ac2fbfd4da39a25f34312230110440f424a14c7558bb85d82e + # via + # feast (setup.py) + # aiobotocore + # arrow + # botocore + # elasticsearch + # google-cloud-bigquery + # great-expectations + # ibis-framework + # jupyter-client + # kubernetes + # moto + # pandas + # trino +python-docx==1.2.0 \ + --hash=sha256:3fd478f3250fbbbfd3b94fe1e985955737c145627498896a8a6bf81f4baf66c7 \ + --hash=sha256:7bc9d7b7d8a69c9c02ca09216118c86552704edc23bac179283f2e38f86220ce + # via docling +python-dotenv==1.2.1 \ + --hash=sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6 \ + --hash=sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61 + # via + # pydantic-settings + # pymilvus + # testcontainers + # uvicorn +python-json-logger==4.0.0 \ + --hash=sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2 \ + --hash=sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f + # via jupyter-events +python-keycloak==4.2.2 \ + --hash=sha256:1d43a1accd4a038ed39317fcb3eb78211df6c75bbcbc4c482c99ee76327136f2 \ + --hash=sha256:5137fd87c69031a372a578df96bae96b9aead2c9dad976613bc978e9e0246a1e + # via feast (setup.py) +python-multipart==0.0.21 \ + --hash=sha256:7137ebd4d3bbf70ea1622998f902b97a29434a9e8dc40eb203bbcf7c2a2cba92 \ + --hash=sha256:cf7a6713e01c87aa35387f4774e812c4361150938d20d232800f75ffcf266090 + # via mcp +python-pptx==1.0.2 \ + --hash=sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba \ + --hash=sha256:479a8af0eaf0f0d76b6f00b0887732874ad2e3188230315290cd1f9dd9cc7095 + # via docling +pytz==2025.2 \ + --hash=sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3 \ + --hash=sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00 + # via + # clickhouse-connect + # great-expectations + # ibis-framework + # pandas + # snowflake-connector-python + # trino +pyyaml==6.0.3 \ + --hash=sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c \ + --hash=sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a \ + --hash=sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3 \ + --hash=sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956 \ + --hash=sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6 \ + --hash=sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c \ + --hash=sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65 \ + --hash=sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a \ + --hash=sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0 \ + --hash=sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b \ + --hash=sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1 \ + --hash=sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6 \ + --hash=sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7 \ + --hash=sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e \ + --hash=sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007 \ + --hash=sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310 \ + --hash=sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4 \ + --hash=sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9 \ + --hash=sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295 \ + --hash=sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea \ + --hash=sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0 \ + --hash=sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e \ + --hash=sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac \ + --hash=sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9 \ + --hash=sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7 \ + --hash=sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35 \ + --hash=sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb \ + --hash=sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b \ + --hash=sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69 \ + --hash=sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5 \ + --hash=sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b \ + --hash=sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c \ + --hash=sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369 \ + --hash=sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd \ + --hash=sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824 \ + --hash=sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198 \ + --hash=sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065 \ + --hash=sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c \ + --hash=sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c \ + --hash=sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764 \ + --hash=sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196 \ + --hash=sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b \ + --hash=sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00 \ + --hash=sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac \ + --hash=sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8 \ + --hash=sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e \ + --hash=sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28 \ + --hash=sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3 \ + --hash=sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5 \ + --hash=sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4 \ + --hash=sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b \ + --hash=sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf \ + --hash=sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5 \ + --hash=sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702 \ + --hash=sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8 \ + --hash=sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788 \ + --hash=sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da \ + --hash=sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d \ + --hash=sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc \ + --hash=sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c \ + --hash=sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba \ + --hash=sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f \ + --hash=sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917 \ + --hash=sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5 \ + --hash=sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26 \ + --hash=sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f \ + --hash=sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b \ + --hash=sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be \ + --hash=sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c \ + --hash=sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3 \ + --hash=sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6 \ + --hash=sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926 \ + --hash=sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0 + # via + # feast (setup.py) + # accelerate + # dask + # datasets + # docling-core + # easyocr + # huggingface-hub + # ibis-substrait + # jupyter-events + # kubernetes + # openshift-client + # pre-commit + # ray + # responses + # timm + # transformers + # uvicorn +pyzmq==27.1.0 \ + --hash=sha256:01c0e07d558b06a60773744ea6251f769cd79a41a97d11b8bf4ab8f034b0424d \ + --hash=sha256:01f9437501886d3a1dd4b02ef59fb8cc384fa718ce066d52f175ee49dd5b7ed8 \ + --hash=sha256:03ff0b279b40d687691a6217c12242ee71f0fba28bf8626ff50e3ef0f4410e1e \ + --hash=sha256:05b12f2d32112bf8c95ef2e74ec4f1d4beb01f8b5e703b38537f8849f92cb9ba \ + --hash=sha256:0790a0161c281ca9723f804871b4027f2e8b5a528d357c8952d08cd1a9c15581 \ + --hash=sha256:08363b2011dec81c354d694bdecaef4770e0ae96b9afea70b3f47b973655cc05 \ + --hash=sha256:08e90bb4b57603b84eab1d0ca05b3bbb10f60c1839dc471fc1c9e1507bef3386 \ + --hash=sha256:0c996ded912812a2fcd7ab6574f4ad3edc27cb6510349431e4930d4196ade7db \ + --hash=sha256:0de3028d69d4cdc475bfe47a6128eb38d8bc0e8f4d69646adfbcd840facbac28 \ + --hash=sha256:15c8bd0fe0dabf808e2d7a681398c4e5ded70a551ab47482067a572c054c8e2e \ + --hash=sha256:1779be8c549e54a1c38f805e56d2a2e5c009d26de10921d7d51cfd1c8d4632ea \ + --hash=sha256:18339186c0ed0ce5835f2656cdfb32203125917711af64da64dbaa3d949e5a1b \ + --hash=sha256:18770c8d3563715387139060d37859c02ce40718d1faf299abddcdcc6a649066 \ + --hash=sha256:190cbf120fbc0fc4957b56866830def56628934a9d112aec0e2507aa6a032b97 \ + --hash=sha256:19c9468ae0437f8074af379e986c5d3d7d7bfe033506af442e8c879732bedbe0 \ + --hash=sha256:1c179799b118e554b66da67d88ed66cd37a169f1f23b5d9f0a231b4e8d44a113 \ + --hash=sha256:1f0b2a577fd770aa6f053211a55d1c47901f4d537389a034c690291485e5fe92 \ + --hash=sha256:1f8426a01b1c4098a750973c37131cf585f61c7911d735f729935a0c701b68d3 \ + --hash=sha256:226b091818d461a3bef763805e75685e478ac17e9008f49fce2d3e52b3d58b86 \ + --hash=sha256:250e5436a4ba13885494412b3da5d518cd0d3a278a1ae640e113c073a5f88edd \ + --hash=sha256:346e9ba4198177a07e7706050f35d733e08c1c1f8ceacd5eb6389d653579ffbc \ + --hash=sha256:3837439b7f99e60312f0c926a6ad437b067356dc2bc2ec96eb395fd0fe804233 \ + --hash=sha256:3970778e74cb7f85934d2b926b9900e92bfe597e62267d7499acc39c9c28e345 \ + --hash=sha256:43ad9a73e3da1fab5b0e7e13402f0b2fb934ae1c876c51d0afff0e7c052eca31 \ + --hash=sha256:448f9cb54eb0cee4732b46584f2710c8bc178b0e5371d9e4fc8125201e413a74 \ + --hash=sha256:452631b640340c928fa343801b0d07eb0c3789a5ffa843f6e1a9cee0ba4eb4fc \ + --hash=sha256:49d3980544447f6bd2968b6ac913ab963a49dcaa2d4a2990041f16057b04c429 \ + --hash=sha256:4a19387a3dddcc762bfd2f570d14e2395b2c9701329b266f83dd87a2b3cbd381 \ + --hash=sha256:4c618fbcd069e3a29dcd221739cacde52edcc681f041907867e0f5cc7e85f172 \ + --hash=sha256:50081a4e98472ba9f5a02850014b4c9b629da6710f8f14f3b15897c666a28f1b \ + --hash=sha256:507b6f430bdcf0ee48c0d30e734ea89ce5567fd7b8a0f0044a369c176aa44556 \ + --hash=sha256:508e23ec9bc44c0005c4946ea013d9317ae00ac67778bd47519fdf5a0e930ff4 \ + --hash=sha256:510869f9df36ab97f89f4cff9d002a89ac554c7ac9cadd87d444aa4cf66abd27 \ + --hash=sha256:53b40f8ae006f2734ee7608d59ed661419f087521edbfc2149c3932e9c14808c \ + --hash=sha256:544b4e3b7198dde4a62b8ff6685e9802a9a1ebf47e77478a5eb88eca2a82f2fd \ + --hash=sha256:5bbf8d3630bf96550b3be8e1fc0fea5cbdc8d5466c1192887bd94869da17a63e \ + --hash=sha256:677e744fee605753eac48198b15a2124016c009a11056f93807000ab11ce6526 \ + --hash=sha256:6bb54ca21bcfe361e445256c15eedf083f153811c37be87e0514934d6913061e \ + --hash=sha256:6df079c47d5902af6db298ec92151db82ecb557af663098b92f2508c398bb54f \ + --hash=sha256:6f3afa12c392f0a44a2414056d730eebc33ec0926aae92b5ad5cf26ebb6cc128 \ + --hash=sha256:7200bb0f03345515df50d99d3db206a0a6bee1955fbb8c453c76f5bf0e08fb96 \ + --hash=sha256:722ea791aa233ac0a819fc2c475e1292c76930b31f1d828cb61073e2fe5e208f \ + --hash=sha256:726b6a502f2e34c6d2ada5e702929586d3ac948a4dbbb7fed9854ec8c0466027 \ + --hash=sha256:753d56fba8f70962cd8295fb3edb40b9b16deaa882dd2b5a3a2039f9ff7625aa \ + --hash=sha256:75a2f36223f0d535a0c919e23615fc85a1e23b71f40c7eb43d7b1dedb4d8f15f \ + --hash=sha256:7be883ff3d722e6085ee3f4afc057a50f7f2e0c72d289fd54df5706b4e3d3a50 \ + --hash=sha256:7ccc0700cfdf7bd487bea8d850ec38f204478681ea02a582a8da8171b7f90a1c \ + --hash=sha256:8085a9fba668216b9b4323be338ee5437a235fe275b9d1610e422ccc279733e2 \ + --hash=sha256:80d834abee71f65253c91540445d37c4c561e293ba6e741b992f20a105d69146 \ + --hash=sha256:849ca054d81aa1c175c49484afaaa5db0622092b5eccb2055f9f3bb8f703782d \ + --hash=sha256:90e6e9441c946a8b0a667356f7078d96411391a3b8f80980315455574177ec97 \ + --hash=sha256:93ad4b0855a664229559e45c8d23797ceac03183c7b6f5b4428152a6b06684a5 \ + --hash=sha256:9541c444cfe1b1c0156c5c86ece2bb926c7079a18e7b47b0b1b3b1b875e5d098 \ + --hash=sha256:96c71c32fff75957db6ae33cd961439f386505c6e6b377370af9b24a1ef9eafb \ + --hash=sha256:9a916f76c2ab8d045b19f2286851a38e9ac94ea91faf65bd64735924522a8b32 \ + --hash=sha256:9c1790386614232e1b3a40a958454bdd42c6d1811837b15ddbb052a032a43f62 \ + --hash=sha256:9ce490cf1d2ca2ad84733aa1d69ce6855372cb5ce9223802450c9b2a7cba0ccf \ + --hash=sha256:a1aa0ee920fb3825d6c825ae3f6c508403b905b698b6460408ebd5bb04bbb312 \ + --hash=sha256:a5b42d7a0658b515319148875fcb782bbf118dd41c671b62dae33666c2213bda \ + --hash=sha256:ac0765e3d44455adb6ddbf4417dcce460fc40a05978c08efdf2948072f6db540 \ + --hash=sha256:ac25465d42f92e990f8d8b0546b01c391ad431c3bf447683fdc40565941d0604 \ + --hash=sha256:ad68808a61cbfbbae7ba26d6233f2a4aa3b221de379ce9ee468aa7a83b9c36b0 \ + --hash=sha256:add071b2d25f84e8189aaf0882d39a285b42fa3853016ebab234a5e78c7a43db \ + --hash=sha256:b1267823d72d1e40701dcba7edc45fd17f71be1285557b7fe668887150a14b78 \ + --hash=sha256:b2e592db3a93128daf567de9650a2f3859017b3f7a66bc4ed6e4779d6034976f \ + --hash=sha256:b721c05d932e5ad9ff9344f708c96b9e1a485418c6618d765fca95d4daacfbef \ + --hash=sha256:bafcb3dd171b4ae9f19ee6380dfc71ce0390fefaf26b504c0e5f628d7c8c54f2 \ + --hash=sha256:bd67e7c8f4654bef471c0b1ca6614af0b5202a790723a58b79d9584dc8022a78 \ + --hash=sha256:bf7b38f9fd7b81cb6d9391b2946382c8237fd814075c6aa9c3b746d53076023b \ + --hash=sha256:c0bb87227430ee3aefcc0ade2088100e528d5d3298a0a715a64f3d04c60ba02f \ + --hash=sha256:c17e03cbc9312bee223864f1a2b13a99522e0dc9f7c5df0177cd45210ac286e6 \ + --hash=sha256:c65047adafe573ff023b3187bb93faa583151627bc9c51fc4fb2c561ed689d39 \ + --hash=sha256:c895a6f35476b0c3a54e3eb6ccf41bf3018de937016e6e18748317f25d4e925f \ + --hash=sha256:c9f7f6e13dff2e44a6afeaf2cf54cee5929ad64afaf4d40b50f93c58fc687355 \ + --hash=sha256:ce980af330231615756acd5154f29813d553ea555485ae712c491cd483df6b7a \ + --hash=sha256:cedc4c68178e59a4046f97eca31b148ddcf51e88677de1ef4e78cf06c5376c9a \ + --hash=sha256:cf44a7763aea9298c0aa7dbf859f87ed7012de8bda0f3977b6fb1d96745df856 \ + --hash=sha256:d54530c8c8b5b8ddb3318f481297441af102517602b569146185fa10b63f4fa9 \ + --hash=sha256:da96ecdcf7d3919c3be2de91a8c513c186f6762aa6cf7c01087ed74fad7f0968 \ + --hash=sha256:dc5dbf68a7857b59473f7df42650c621d7e8923fb03fa74a526890f4d33cc4d7 \ + --hash=sha256:dd2fec2b13137416a1c5648b7009499bcc8fea78154cd888855fa32514f3dad1 \ + --hash=sha256:df7cd397ece96cf20a76fae705d40efbab217d217897a5053267cd88a700c266 \ + --hash=sha256:e2687c2d230e8d8584fbea433c24382edfeda0c60627aca3446aa5e58d5d1831 \ + --hash=sha256:e30a74a39b93e2e1591b58eb1acef4902be27c957a8720b0e368f579b82dc22f \ + --hash=sha256:e343d067f7b151cfe4eb3bb796a7752c9d369eed007b91231e817071d2c2fec7 \ + --hash=sha256:e829529fcaa09937189178115c49c504e69289abd39967cd8a4c215761373394 \ + --hash=sha256:eca6b47df11a132d1745eb3b5b5e557a7dae2c303277aa0e69c6ba91b8736e07 \ + --hash=sha256:f30f395a9e6fbca195400ce833c731e7b64c3919aa481af4d88c3759e0cb7496 \ + --hash=sha256:f328d01128373cb6763823b2b4e7f73bdf767834268c565151eacb3b7a392f90 \ + --hash=sha256:f605d884e7c8be8fe1aa94e0a783bf3f591b84c24e4bc4f3e7564c82ac25e271 \ + --hash=sha256:fbb4f2400bfda24f12f009cba62ad5734148569ff4949b1b6ec3b519444342e6 \ + --hash=sha256:ff8d114d14ac671d88c89b9224c63d6c4e5a613fe8acd5594ce53d752a3aafe9 + # via + # ipykernel + # jupyter-client + # jupyter-server +qdrant-client==1.16.2 \ + --hash=sha256:442c7ef32ae0f005e88b5d3c0783c63d4912b97ae756eb5e052523be682f17d3 \ + --hash=sha256:ca4ef5f9be7b5eadeec89a085d96d5c723585a391eb8b2be8192919ab63185f0 + # via feast (setup.py) +ray[data, default]==2.52.1 \ + --hash=sha256:08eb8f5fd55292ba6bee363a32491136a5e54af54e007f81e0603986fbea41a4 \ + --hash=sha256:24694e60cdc7770b90f123cc578cabb9d1a231c1fe673b5da0027b118de45846 \ + --hash=sha256:2b57ef272a2a0a0dbae6d18d70aa541eab620b4fe3b44d50466d3a533c16f9d9 \ + --hash=sha256:4e8478544fef69a17d865431c0bebdcfeff7c0f76a306f29b73c3bc3cbb0bdb9 \ + --hash=sha256:65bf461fdfe4ffa667c46f9455f8740b2ad6c1fa471b461d5f5cf6b7baf177b5 \ + --hash=sha256:6831592fedf0a122016f5dab4b67d85fa3d4db3b21f588d18834b5c031396d1c \ + --hash=sha256:8045172ad3fcff62b9dab9a4cd2e0991ad0e27fc814fe625a8d3a120306651d6 \ + --hash=sha256:843c0108ad72bb7fc6c23a22e29e6099546a5eaad3ad675c78a146d9080f6ec6 \ + --hash=sha256:993194a8be70540e0f819862031bbf19a64401fbe6c31b42065fd313ba466d34 \ + --hash=sha256:a5a3c268d45060c50cd029979ecc5f1eaaec040b19fa88dd4fe9e927d19ff13e \ + --hash=sha256:b3f9e61b799fb3cc8fd7077a3d2eb676ddfef7db644f6b6a2b657c5c3214cf19 \ + --hash=sha256:b5bc29548abb0a0a7ae9e6ff3b0ccca2824edaf011a4336e15a32793d574fbfd \ + --hash=sha256:bbe492c780a39a64bd3d0766cad10d54cf12222df88d287ec2d8f2d52de37c79 \ + --hash=sha256:e3826aeb4e4399de0c6885bd8be7ce2f629fa0010f0013f1183e0726b3d25e40 \ + --hash=sha256:f59e3b2d1a1466ac0778f2c6fac9ccb5f30107d77e3dddd1d60167248d268474 + # via codeflare-sdk +redis==4.6.0 \ + --hash=sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d \ + --hash=sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c + # via feast (setup.py) +referencing==0.37.0 \ + --hash=sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231 \ + --hash=sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8 + # via + # jsonschema + # jsonschema-specifications + # jupyter-events +regex==2026.1.15 \ + --hash=sha256:0057de9eaef45783ff69fa94ae9f0fd906d629d0bd4c3217048f46d1daa32e9b \ + --hash=sha256:008b185f235acd1e53787333e5690082e4f156c44c87d894f880056089e9bc7c \ + --hash=sha256:05d75a668e9ea16f832390d22131fe1e8acc8389a694c8febc3e340b0f810b93 \ + --hash=sha256:069f56a7bf71d286a6ff932a9e6fb878f151c998ebb2519a9f6d1cee4bffdba3 \ + --hash=sha256:0751a26ad39d4f2ade8fe16c59b2bf5cb19eb3d2cd543e709e583d559bd9efde \ + --hash=sha256:08df9722d9b87834a3d701f3fca570b2be115654dbfd30179f30ab2f39d606d3 \ + --hash=sha256:0bda75ebcac38d884240914c6c43d8ab5fb82e74cde6da94b43b17c411aa4c2b \ + --hash=sha256:0bf065240704cb8951cc04972cf107063917022511273e0969bdb34fc173456c \ + --hash=sha256:0bf650f26087363434c4e560011f8e4e738f6f3e029b85d4904c50135b86cfa5 \ + --hash=sha256:0dcd31594264029b57bf16f37fd7248a70b3b764ed9e0839a8f271b2d22c0785 \ + --hash=sha256:0f0c7684c7f9ca241344ff95a1de964f257a5251968484270e91c25a755532c5 \ + --hash=sha256:124dc36c85d34ef2d9164da41a53c1c8c122cfb1f6e1ec377a1f27ee81deb794 \ + --hash=sha256:164759aa25575cbc0651bef59a0b18353e54300d79ace8084c818ad8ac72b7d5 \ + --hash=sha256:166551807ec20d47ceaeec380081f843e88c8949780cd42c40f18d16168bed10 \ + --hash=sha256:1704d204bd42b6bb80167df0e4554f35c255b579ba99616def38f69e14a5ccb9 \ + --hash=sha256:18388a62989c72ac24de75f1449d0fb0b04dfccd0a1a7c1c43af5eb503d890f6 \ + --hash=sha256:194312a14819d3e44628a44ed6fea6898fdbecb0550089d84c403475138d0a09 \ + --hash=sha256:1ae6020fb311f68d753b7efa9d4b9a5d47a5d6466ea0d5e3b5a471a960ea6e4a \ + --hash=sha256:1cb740d044aff31898804e7bf1181cc72c03d11dfd19932b9911ffc19a79070a \ + --hash=sha256:1e1808471fbe44c1a63e5f577a1d5f02fe5d66031dcbdf12f093ffc1305a858e \ + --hash=sha256:1e8cd52557603f5c66a548f69421310886b28b7066853089e1a71ee710e1cdc1 \ + --hash=sha256:21ca32c28c30d5d65fc9886ff576fc9b59bbca08933e844fa2363e530f4c8218 \ + --hash=sha256:2748c1ec0663580b4510bd89941a31560b4b439a0b428b49472a3d9944d11cd8 \ + --hash=sha256:27618391db7bdaf87ac6c92b31e8f0dfb83a9de0075855152b720140bda177a2 \ + --hash=sha256:2a8d7b50c34578d0d3bf7ad58cde9652b7d683691876f83aedc002862a35dc5e \ + --hash=sha256:2b091aefc05c78d286657cd4db95f2e6313375ff65dcf085e42e4c04d9c8d410 \ + --hash=sha256:2c2b80399a422348ce5de4fe40c418d6299a0fa2803dd61dc0b1a2f28e280fcf \ + --hash=sha256:2f2775843ca49360508d080eaa87f94fa248e2c946bbcd963bb3aae14f333413 \ + --hash=sha256:3038a62fc7d6e5547b8915a3d927a0fbeef84cdbe0b1deb8c99bbd4a8961b52a \ + --hash=sha256:32655d17905e7ff8ba5c764c43cb124e34a9245e45b83c22e81041e1071aee10 \ + --hash=sha256:343db82cb3712c31ddf720f097ef17c11dab2f67f7a3e7be976c4f82eba4e6df \ + --hash=sha256:3601ffb5375de85a16f407854d11cca8fe3f5febbe3ac78fb2866bb220c74d10 \ + --hash=sha256:3d6ce5ae80066b319ae3bc62fd55a557c9491baa5efd0d355f0de08c4ba54e79 \ + --hash=sha256:3d7d92495f47567a9b1669c51fc8d6d809821849063d168121ef801bbc213846 \ + --hash=sha256:40c86d8046915bb9aeb15d3f3f15b6fd500b8ea4485b30e1bbc799dab3fe29f8 \ + --hash=sha256:4161d87f85fa831e31469bfd82c186923070fc970b9de75339b68f0c75b51903 \ + --hash=sha256:41aef6f953283291c4e4e6850607bd71502be67779586a61472beacb315c97ec \ + --hash=sha256:453078802f1b9e2b7303fb79222c054cb18e76f7bdc220f7530fdc85d319f99e \ + --hash=sha256:492534a0ab925d1db998defc3c302dae3616a2fc3fe2e08db1472348f096ddf2 \ + --hash=sha256:4c5ef43b5c2d4114eb8ea424bb8c9cec01d5d17f242af88b2448f5ee81caadbc \ + --hash=sha256:4c8fcc5793dde01641a35905d6731ee1548f02b956815f8f1cab89e515a5bdf1 \ + --hash=sha256:4def140aa6156bc64ee9912383d4038f3fdd18fee03a6f222abd4de6357ce42a \ + --hash=sha256:4e3dd93c8f9abe8aa4b6c652016da9a3afa190df5ad822907efe6b206c09896e \ + --hash=sha256:505831646c945e3e63552cc1b1b9b514f0e93232972a2d5bedbcc32f15bc82e3 \ + --hash=sha256:5170907244b14303edc5978f522f16c974f32d3aa92109fabc2af52411c9433b \ + --hash=sha256:55b4ea996a8e4458dd7b584a2f89863b1655dd3d17b88b46cbb9becc495a0ec5 \ + --hash=sha256:55e9d0118d97794367309635df398bdfd7c33b93e2fdfa0b239661cd74b4c14e \ + --hash=sha256:56a5595d0f892f214609c9f76b41b7428bed439d98dc961efafdd1354d42baae \ + --hash=sha256:57e7d17f59f9ebfa9667e6e5a1c0127b96b87cb9cede8335482451ed00788ba4 \ + --hash=sha256:5ef19071f4ac9f0834793af85bd04a920b4407715624e40cb7a0631a11137cdf \ + --hash=sha256:5ff818702440a5878a81886f127b80127f5d50563753a28211482867f8318106 \ + --hash=sha256:619843841e220adca114118533a574a9cd183ed8a28b85627d2844c500a2b0db \ + --hash=sha256:621f73a07595d83f28952d7bd1e91e9d1ed7625fb7af0064d3516674ec93a2a2 \ + --hash=sha256:693b465171707bbe882a7a05de5e866f33c76aa449750bee94a8d90463533cc9 \ + --hash=sha256:6bfc31a37fd1592f0c4fc4bfc674b5c42e52efe45b4b7a6a14f334cca4bcebe4 \ + --hash=sha256:6d220a2517f5893f55daac983bfa9fe998a7dbcaee4f5d27a88500f8b7873788 \ + --hash=sha256:6e42844ad64194fa08d5ccb75fe6a459b9b08e6d7296bd704460168d58a388f3 \ + --hash=sha256:726ea4e727aba21643205edad8f2187ec682d3305d790f73b7a51c7587b64bdd \ + --hash=sha256:74f45d170a21df41508cb67165456538425185baaf686281fa210d7e729abc34 \ + --hash=sha256:7dcc02368585334f5bc81fc73a2a6a0bbade60e7d83da21cead622faf408f32c \ + --hash=sha256:7e1e28be779884189cdd57735e997f282b64fd7ccf6e2eef3e16e57d7a34a815 \ + --hash=sha256:7ef7d5d4bd49ec7364315167a4134a015f61e8266c6d446fc116a9ac4456e10d \ + --hash=sha256:8050ba2e3ea1d8731a549e83c18d2f0999fbc99a5f6bd06b4c91449f55291804 \ + --hash=sha256:82345326b1d8d56afbe41d881fdf62f1926d7264b2fc1537f99ae5da9aad7913 \ + --hash=sha256:8355ad842a7c7e9e5e55653eade3b7d1885ba86f124dd8ab1f722f9be6627434 \ + --hash=sha256:86c1077a3cc60d453d4084d5b9649065f3bf1184e22992bd322e1f081d3117fb \ + --hash=sha256:87adf5bd6d72e3e17c9cb59ac4096b1faaf84b7eb3037a5ffa61c4b4370f0f13 \ + --hash=sha256:8db052bbd981e1666f09e957f3790ed74080c2229007c1dd67afdbf0b469c48b \ + --hash=sha256:8dd16fba2758db7a3780a051f245539c4451ca20910f5a5e6ea1c08d06d4a76b \ + --hash=sha256:8e32f7896f83774f91499d239e24cebfadbc07639c1494bb7213983842348337 \ + --hash=sha256:91c5036ebb62663a6b3999bdd2e559fd8456d17e2b485bf509784cd31a8b1705 \ + --hash=sha256:9250d087bc92b7d4899ccd5539a1b2334e44eee85d848c4c1aef8e221d3f8c8f \ + --hash=sha256:9479cae874c81bf610d72b85bb681a94c95722c127b55445285fb0e2c82db8e1 \ + --hash=sha256:968c14d4f03e10b2fd960f1d5168c1f0ac969381d3c1fcc973bc45fb06346599 \ + --hash=sha256:97499ff7862e868b1977107873dd1a06e151467129159a6ffd07b66706ba3a9f \ + --hash=sha256:99ad739c3686085e614bf77a508e26954ff1b8f14da0e3765ff7abbf7799f952 \ + --hash=sha256:9d787e3310c6a6425eb346be4ff2ccf6eece63017916fd77fe8328c57be83521 \ + --hash=sha256:a1774cd1981cd212506a23a14dba7fdeaee259f5deba2df6229966d9911e767a \ + --hash=sha256:a30a68e89e5a218b8b23a52292924c1f4b245cb0c68d1cce9aec9bbda6e2c160 \ + --hash=sha256:adc97a9077c2696501443d8ad3fa1b4fc6d131fc8fd7dfefd1a723f89071cf0a \ + --hash=sha256:b0d190e6f013ea938623a58706d1469a62103fb2a241ce2873a9906e0386582c \ + --hash=sha256:b10e42a6de0e32559a92f2f8dc908478cc0fa02838d7dbe764c44dca3fa13569 \ + --hash=sha256:b2a13dd6a95e95a489ca242319d18fc02e07ceb28fa9ad146385194d95b3c829 \ + --hash=sha256:b30bcbd1e1221783c721483953d9e4f3ab9c5d165aa709693d3f3946747b1aea \ + --hash=sha256:b325d4714c3c48277bfea1accd94e193ad6ed42b4bad79ad64f3b8f8a31260a5 \ + --hash=sha256:b5a28980a926fa810dbbed059547b02783952e2efd9c636412345232ddb87ff6 \ + --hash=sha256:b5f7d8d2867152cdb625e72a530d2ccb48a3d199159144cbdd63870882fb6f80 \ + --hash=sha256:bfb0d6be01fbae8d6655c8ca21b3b72458606c4aec9bbc932db758d47aba6db1 \ + --hash=sha256:bfd876041a956e6a90ad7cdb3f6a630c07d491280bfeed4544053cd434901681 \ + --hash=sha256:c08c1f3e34338256732bd6938747daa3c0d5b251e04b6e43b5813e94d503076e \ + --hash=sha256:c243da3436354f4af6c3058a3f81a97d47ea52c9bd874b52fd30274853a1d5df \ + --hash=sha256:c32bef3e7aeee75746748643667668ef941d28b003bfc89994ecf09a10f7a1b5 \ + --hash=sha256:c661fc820cfb33e166bf2450d3dadbda47c8d8981898adb9b6fe24e5e582ba60 \ + --hash=sha256:c6c4dcdfff2c08509faa15d36ba7e5ef5fcfab25f1e8f85a0c8f45bc3a30725d \ + --hash=sha256:c6c565d9a6e1a8d783c1948937ffc377dd5771e83bd56de8317c450a954d2056 \ + --hash=sha256:c8a154cf6537ebbc110e24dabe53095e714245c272da9c1be05734bdad4a61aa \ + --hash=sha256:c9c08c2fbc6120e70abff5d7f28ffb4d969e14294fb2143b4b5c7d20e46d1714 \ + --hash=sha256:ca89c5e596fc05b015f27561b3793dc2fa0917ea0d7507eebb448efd35274a70 \ + --hash=sha256:cc7cd0b2be0f0269283a45c0d8b2c35e149d1319dcb4a43c9c3689fa935c1ee6 \ + --hash=sha256:cda1ed70d2b264952e88adaa52eea653a33a1b98ac907ae2f86508eb44f65cdc \ + --hash=sha256:cf8ff04c642716a7f2048713ddc6278c5fd41faa3b9cab12607c7abecd012c22 \ + --hash=sha256:cfecdaa4b19f9ca534746eb3b55a5195d5c95b88cac32a205e981ec0a22b7d31 \ + --hash=sha256:d426616dae0967ca225ab12c22274eb816558f2f99ccb4a1d52ca92e8baf180f \ + --hash=sha256:d5eaa4a4c5b1906bd0d2508d68927f15b81821f85092e06f1a34a4254b0e1af3 \ + --hash=sha256:d639a750223132afbfb8f429c60d9d318aeba03281a5f1ab49f877456448dcf1 \ + --hash=sha256:d920392a6b1f353f4aa54328c867fec3320fa50657e25f64abf17af054fc97ac \ + --hash=sha256:d991483606f3dbec93287b9f35596f41aa2e92b7c2ebbb935b63f409e243c9af \ + --hash=sha256:d9ea2604370efc9a174c1b5dcc81784fb040044232150f7f33756049edfc9026 \ + --hash=sha256:dbaf3c3c37ef190439981648ccbf0c02ed99ae066087dd117fcb616d80b010a4 \ + --hash=sha256:dca3582bca82596609959ac39e12b7dad98385b4fefccb1151b937383cec547d \ + --hash=sha256:e3174a5ed4171570dc8318afada56373aa9289eb6dc0d96cceb48e7358b0e220 \ + --hash=sha256:e43a55f378df1e7a4fa3547c88d9a5a9b7113f653a66821bcea4718fe6c58763 \ + --hash=sha256:e69d0deeb977ffe7ed3d2e4439360089f9c3f217ada608f0f88ebd67afb6385e \ + --hash=sha256:e85dc94595f4d766bd7d872a9de5ede1ca8d3063f3bdf1e2c725f5eb411159e3 \ + --hash=sha256:e90b8db97f6f2c97eb045b51a6b2c5ed69cedd8392459e0642d4199b94fabd7e \ + --hash=sha256:e9bf3f0bbdb56633c07d7116ae60a576f846efdd86a8848f8d62b749e1209ca7 \ + --hash=sha256:ea4e6b3566127fda5e007e90a8fd5a4169f0cf0619506ed426db647f19c8454a \ + --hash=sha256:ec94c04149b6a7b8120f9f44565722c7ae31b7a6d2275569d2eefa76b83da3be \ + --hash=sha256:eddf73f41225942c1f994914742afa53dc0d01a6e20fe14b878a1b1edc74151f \ + --hash=sha256:ee6854c9000a10938c79238de2379bea30c82e4925a371711af45387df35cab8 \ + --hash=sha256:ef71d476caa6692eea743ae5ea23cde3260677f70122c4d258ca952e5c2d4e84 \ + --hash=sha256:f052d1be37ef35a54e394de66136e30fa1191fab64f71fc06ac7bc98c9a84618 \ + --hash=sha256:f1862739a1ffb50615c0fde6bae6569b5efbe08d98e59ce009f68a336f64da75 \ + --hash=sha256:f192a831d9575271a22d804ff1a5355355723f94f31d9eef25f0d45a152fdc1a \ + --hash=sha256:f42e68301ff4afee63e365a5fc302b81bb8ba31af625a671d7acb19d10168a8c \ + --hash=sha256:f7792f27d3ee6e0244ea4697d92b825f9a329ab5230a78c1a68bd274e64b5077 \ + --hash=sha256:f82110ab962a541737bd0ce87978d4c658f06e7591ba899192e2712a517badbb \ + --hash=sha256:f9ca1cbdc0fbfe5e6e6f8221ef2309988db5bcede52443aeaee9a4ad555e0dac \ + --hash=sha256:fd65af65e2aaf9474e468f9e571bd7b189e1df3a61caa59dcbabd0000e4ea839 \ + --hash=sha256:fe2fda4110a3d0bc163c2e0664be44657431440722c5c5315c65155cab92f9e5 \ + --hash=sha256:febd38857b09867d3ed3f4f1af7d241c5c50362e25ef43034995b77a50df494e + # via + # feast (setup.py) + # parsimonious + # transformers +requests==2.32.5 \ + --hash=sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6 \ + --hash=sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf + # via + # feast (setup.py) + # azure-core + # datasets + # docker + # docling + # fastapi-mcp + # google-api-core + # google-cloud-bigquery + # google-cloud-storage + # great-expectations + # huggingface-hub + # jupyterlab-server + # kubernetes + # moto + # msal + # python-keycloak + # ray + # requests-oauthlib + # requests-toolbelt + # responses + # singlestoredb + # snowflake-connector-python + # sphinx + # transformers + # trino +requests-oauthlib==2.0.0 \ + --hash=sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36 \ + --hash=sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9 + # via + # google-auth-oauthlib + # kubernetes +requests-toolbelt==1.0.0 \ + --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ + --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 + # via python-keycloak +responses==0.25.8 \ + --hash=sha256:0c710af92def29c8352ceadff0c3fe340ace27cf5af1bbe46fb71275bcd2831c \ + --hash=sha256:9374d047a575c8f781b94454db5cab590b6029505f488d12899ddb10a4af1cf4 + # via moto +rfc3339-validator==0.1.4 \ + --hash=sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b \ + --hash=sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa + # via + # jsonschema + # jupyter-events +rfc3986-validator==0.1.1 \ + --hash=sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9 \ + --hash=sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055 + # via + # jsonschema + # jupyter-events +rfc3987-syntax==1.1.0 \ + --hash=sha256:6c3d97604e4c5ce9f714898e05401a0445a641cfa276432b0a648c80856f6a3f \ + --hash=sha256:717a62cbf33cffdd16dfa3a497d81ce48a660ea691b1ddd7be710c22f00b4a0d + # via jsonschema +rich==13.9.4 \ + --hash=sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098 \ + --hash=sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90 + # via + # codeflare-sdk + # fastapi-mcp + # ibis-framework + # typer +rpds-py==0.30.0 \ + --hash=sha256:07ae8a593e1c3c6b82ca3292efbe73c30b61332fd612e05abee07c79359f292f \ + --hash=sha256:0a59119fc6e3f460315fe9d08149f8102aa322299deaa5cab5b40092345c2136 \ + --hash=sha256:0c0e95f6819a19965ff420f65578bacb0b00f251fefe2c8b23347c37174271f3 \ + --hash=sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7 \ + --hash=sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65 \ + --hash=sha256:12f90dd7557b6bd57f40abe7747e81e0c0b119bef015ea7726e69fe550e394a4 \ + --hash=sha256:1726859cd0de969f88dc8673bdd954185b9104e05806be64bcd87badbe313169 \ + --hash=sha256:1ab5b83dbcf55acc8b08fc62b796ef672c457b17dbd7820a11d6c52c06839bdf \ + --hash=sha256:1b151685b23929ab7beec71080a8889d4d6d9fa9a983d213f07121205d48e2c4 \ + --hash=sha256:1f3587eb9b17f3789ad50824084fa6f81921bbf9a795826570bda82cb3ed91f2 \ + --hash=sha256:250fa00e9543ac9b97ac258bd37367ff5256666122c2d0f2bc97577c60a1818c \ + --hash=sha256:2771c6c15973347f50fece41fc447c054b7ac2ae0502388ce3b6738cd366e3d4 \ + --hash=sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3 \ + --hash=sha256:2e6ecb5a5bcacf59c3f912155044479af1d0b6681280048b338b28e364aca1f6 \ + --hash=sha256:32c8528634e1bf7121f3de08fa85b138f4e0dc47657866630611b03967f041d7 \ + --hash=sha256:33f559f3104504506a44bb666b93a33f5d33133765b0c216a5bf2f1e1503af89 \ + --hash=sha256:3896fa1be39912cf0757753826bc8bdc8ca331a28a7c4ae46b7a21280b06bb85 \ + --hash=sha256:389a2d49eded1896c3d48b0136ead37c48e221b391c052fba3f4055c367f60a6 \ + --hash=sha256:39c02563fc592411c2c61d26b6c5fe1e51eaa44a75aa2c8735ca88b0d9599daa \ + --hash=sha256:3adbb8179ce342d235c31ab8ec511e66c73faa27a47e076ccc92421add53e2bb \ + --hash=sha256:3d4a69de7a3e50ffc214ae16d79d8fbb0922972da0356dcf4d0fdca2878559c6 \ + --hash=sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87 \ + --hash=sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856 \ + --hash=sha256:422c3cb9856d80b09d30d2eb255d0754b23e090034e1deb4083f8004bd0761e4 \ + --hash=sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f \ + --hash=sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53 \ + --hash=sha256:47b0ef6231c58f506ef0b74d44e330405caa8428e770fec25329ed2cb971a229 \ + --hash=sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad \ + --hash=sha256:47f236970bccb2233267d89173d3ad2703cd36a0e2a6e92d0560d333871a3d23 \ + --hash=sha256:47f9a91efc418b54fb8190a6b4aa7813a23fb79c51f4bb84e418f5476c38b8db \ + --hash=sha256:495aeca4b93d465efde585977365187149e75383ad2684f81519f504f5c13038 \ + --hash=sha256:4c5f36a861bc4b7da6516dbdf302c55313afa09b81931e8280361a4f6c9a2d27 \ + --hash=sha256:4cc2206b76b4f576934f0ed374b10d7ca5f457858b157ca52064bdfc26b9fc00 \ + --hash=sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18 \ + --hash=sha256:51a1234d8febafdfd33a42d97da7a43f5dcb120c1060e352a3fbc0c6d36e2083 \ + --hash=sha256:55f66022632205940f1827effeff17c4fa7ae1953d2b74a8581baaefb7d16f8c \ + --hash=sha256:58edca431fb9b29950807e301826586e5bbf24163677732429770a697ffe6738 \ + --hash=sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898 \ + --hash=sha256:5ba103fb455be00f3b1c2076c9d4264bfcb037c976167a6047ed82f23153f02e \ + --hash=sha256:5d4c2aa7c50ad4728a094ebd5eb46c452e9cb7edbfdb18f9e1221f597a73e1e7 \ + --hash=sha256:61046904275472a76c8c90c9ccee9013d70a6d0f73eecefd38c1ae7c39045a08 \ + --hash=sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6 \ + --hash=sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551 \ + --hash=sha256:669b1805bd639dd2989b281be2cfd951c6121b65e729d9b843e9639ef1fd555e \ + --hash=sha256:679ae98e00c0e8d68a7fda324e16b90fd5260945b45d3b824c892cec9eea3288 \ + --hash=sha256:67b02ec25ba7a9e8fa74c63b6ca44cf5707f2fbfadae3ee8e7494297d56aa9df \ + --hash=sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0 \ + --hash=sha256:692bef75a5525db97318e8cd061542b5a79812d711ea03dbc1f6f8dbb0c5f0d2 \ + --hash=sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05 \ + --hash=sha256:6bdfdb946967d816e6adf9a3d8201bfad269c67efe6cefd7093ef959683c8de0 \ + --hash=sha256:6de2a32a1665b93233cde140ff8b3467bdb9e2af2b91079f0333a0974d12d464 \ + --hash=sha256:73c67f2db7bc334e518d097c6d1e6fed021bbc9b7d678d6cc433478365d1d5f5 \ + --hash=sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404 \ + --hash=sha256:76fec018282b4ead0364022e3c54b60bf368b9d926877957a8624b58419169b7 \ + --hash=sha256:7c64d38fb49b6cdeda16ab49e35fe0da2e1e9b34bc38bd78386530f218b37139 \ + --hash=sha256:7cee9c752c0364588353e627da8a7e808a66873672bcb5f52890c33fd965b394 \ + --hash=sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb \ + --hash=sha256:806f36b1b605e2d6a72716f321f20036b9489d29c51c91f4dd29a3e3afb73b15 \ + --hash=sha256:858738e9c32147f78b3ac24dc0edb6610000e56dc0f700fd5f651d0a0f0eb9ff \ + --hash=sha256:8d6d1cc13664ec13c1b84241204ff3b12f9bb82464b8ad6e7a5d3486975c2eed \ + --hash=sha256:9027da1ce107104c50c81383cae773ef5c24d296dd11c99e2629dbd7967a20c6 \ + --hash=sha256:922e10f31f303c7c920da8981051ff6d8c1a56207dbdf330d9047f6d30b70e5e \ + --hash=sha256:945dccface01af02675628334f7cf49c2af4c1c904748efc5cf7bbdf0b579f95 \ + --hash=sha256:946fe926af6e44f3697abbc305ea168c2c31d3e3ef1058cf68f379bf0335a78d \ + --hash=sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950 \ + --hash=sha256:9854cf4f488b3d57b9aaeb105f06d78e5529d3145b1e4a41750167e8c213c6d3 \ + --hash=sha256:993914b8e560023bc0a8bf742c5f303551992dcb85e247b1e5c7f4a7d145bda5 \ + --hash=sha256:99b47d6ad9a6da00bec6aabe5a6279ecd3c06a329d4aa4771034a21e335c3a97 \ + --hash=sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e \ + --hash=sha256:9cf69cdda1f5968a30a359aba2f7f9aa648a9ce4b580d6826437f2b291cfc86e \ + --hash=sha256:a090322ca841abd453d43456ac34db46e8b05fd9b3b4ac0c78bcde8b089f959b \ + --hash=sha256:a1010ed9524c73b94d15919ca4d41d8780980e1765babf85f9a2f90d247153dd \ + --hash=sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad \ + --hash=sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8 \ + --hash=sha256:a2bffea6a4ca9f01b3f8e548302470306689684e61602aa3d141e34da06cf425 \ + --hash=sha256:a452763cc5198f2f98898eb98f7569649fe5da666c2dc6b5ddb10fde5a574221 \ + --hash=sha256:a4796a717bf12b9da9d3ad002519a86063dcac8988b030e405704ef7d74d2d9d \ + --hash=sha256:a51033ff701fca756439d641c0ad09a41d9242fa69121c7d8769604a0a629825 \ + --hash=sha256:a8fa71a2e078c527c3e9dc9fc5a98c9db40bcc8a92b4e8858e36d329f8684b51 \ + --hash=sha256:ac37f9f516c51e5753f27dfdef11a88330f04de2d564be3991384b2f3535d02e \ + --hash=sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f \ + --hash=sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8 \ + --hash=sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f \ + --hash=sha256:b40fb160a2db369a194cb27943582b38f79fc4887291417685f3ad693c5a1d5d \ + --hash=sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07 \ + --hash=sha256:ba3af48635eb83d03f6c9735dfb21785303e73d22ad03d489e88adae6eab8877 \ + --hash=sha256:ba81a9203d07805435eb06f536d95a266c21e5b2dfbf6517748ca40c98d19e31 \ + --hash=sha256:c2262bdba0ad4fc6fb5545660673925c2d2a5d9e2e0fb603aad545427be0fc58 \ + --hash=sha256:c77afbd5f5250bf27bf516c7c4a016813eb2d3e116139aed0096940c5982da94 \ + --hash=sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28 \ + --hash=sha256:cdc62c8286ba9bf7f47befdcea13ea0e26bf294bda99758fd90535cbaf408000 \ + --hash=sha256:d948b135c4693daff7bc2dcfc4ec57237a29bd37e60c2fabf5aff2bbacf3e2f1 \ + --hash=sha256:d96c2086587c7c30d44f31f42eae4eac89b60dabbac18c7669be3700f13c3ce1 \ + --hash=sha256:d9a0ca5da0386dee0655b4ccdf46119df60e0f10da268d04fe7cc87886872ba7 \ + --hash=sha256:da279aa314f00acbb803da1e76fa18666778e8a8f83484fba94526da5de2cba7 \ + --hash=sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40 \ + --hash=sha256:dc4f992dfe1e2bc3ebc7444f6c7051b4bc13cd8e33e43511e8ffd13bf407010d \ + --hash=sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0 \ + --hash=sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84 \ + --hash=sha256:dea5b552272a944763b34394d04577cf0f9bd013207bc32323b5a89a53cf9c2f \ + --hash=sha256:dff13836529b921e22f15cb099751209a60009731a68519630a24d61f0b1b30a \ + --hash=sha256:e0b65193a413ccc930671c55153a03ee57cecb49e6227204b04fae512eb657a7 \ + --hash=sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419 \ + --hash=sha256:e7536cd91353c5273434b4e003cbda89034d67e7710eab8761fd918ec6c69cf8 \ + --hash=sha256:eb0b93f2e5c2189ee831ee43f156ed34e2a89a78a66b98cadad955972548be5a \ + --hash=sha256:eb2c4071ab598733724c08221091e8d80e89064cd472819285a9ab0f24bcedb9 \ + --hash=sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be \ + --hash=sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed \ + --hash=sha256:ee6af14263f25eedc3bb918a3c04245106a42dfd4f5c2285ea6f997b1fc3f89a \ + --hash=sha256:f14fc5df50a716f7ece6a80b6c78bb35ea2ca47c499e422aa4463455dd96d56d \ + --hash=sha256:f207f69853edd6f6700b86efb84999651baf3789e78a466431df1331608e5324 \ + --hash=sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f \ + --hash=sha256:f83424d738204d9770830d35290ff3273fbb02b41f919870479fab14b9d303b2 \ + --hash=sha256:f8d1736cfb49381ba528cd5baa46f82fdc65c06e843dab24dd70b63d09121b3f \ + --hash=sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5 + # via + # jsonschema + # referencing +rsa==4.9.1 \ + --hash=sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762 \ + --hash=sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75 + # via google-auth +rtree==1.4.1 \ + --hash=sha256:12de4578f1b3381a93a655846900be4e3d5f4cd5e306b8b00aa77c1121dc7e8c \ + --hash=sha256:3d46f55729b28138e897ffef32f7ce93ac335cb67f9120125ad3742a220800f0 \ + --hash=sha256:a7e48d805e12011c2cf739a29d6a60ae852fb1de9fc84220bbcef67e6e595d7d \ + --hash=sha256:b558edda52eca3e6d1ee629042192c65e6b7f2c150d6d6cd207ce82f85be3967 \ + --hash=sha256:c6b1b3550881e57ebe530cc6cffefc87cd9bf49c30b37b894065a9f810875e46 \ + --hash=sha256:d672184298527522d4914d8ae53bf76982b86ca420b0acde9298a7a87d81d4a4 \ + --hash=sha256:efa8c4496e31e9ad58ff6c7df89abceac7022d906cb64a3e18e4fceae6b77f65 \ + --hash=sha256:efe125f416fd27150197ab8521158662943a40f87acab8028a1aac4ad667a489 \ + --hash=sha256:f155bc8d6bac9dcd383481dee8c130947a4866db1d16cb6dff442329a038a0dc + # via + # docling + # docling-ibm-models +ruamel-yaml==0.17.17 \ + --hash=sha256:9751de4cbb57d4bfbf8fc394e125ed4a2f170fbff3dc3d78abf50be85924f8be \ + --hash=sha256:9af3ec5d7f8065582f3aa841305465025d0afd26c5fb54e15b964e11838fc74f + # via great-expectations +ruff==0.14.13 \ + --hash=sha256:4acdf009f32b46f6e8864af19cbf6841eaaed8638e65c8dac845aea0d703c841 \ + --hash=sha256:591a7f68860ea4e003917d19b5c4f5ac39ff558f162dc753a2c5de897fd5502c \ + --hash=sha256:6070bd026e409734b9257e03e3ef18c6e1a216f0435c6751d7a8ec69cb59abef \ + --hash=sha256:61f4e40077a1248436772bb6512db5fc4457fe4c49e7a94ea7c5088655dd21ae \ + --hash=sha256:642442b42957093811cd8d2140dfadd19c7417030a7a68cf8d51fcdd5f217427 \ + --hash=sha256:6d02f1428357fae9e98ac7aa94b7e966fd24151088510d32cf6f902d6c09235e \ + --hash=sha256:76f62c62cd37c276cb03a275b198c7c15bd1d60c989f944db08a8c1c2dbec18b \ + --hash=sha256:774c77e841cc6e046fc3e91623ce0903d1cd07e3a36b1a9fe79b81dab3de506b \ + --hash=sha256:78d2b1097750d90ba82ce4ba676e85230a0ed694178ca5e61aa9b459970b3eb9 \ + --hash=sha256:7ab819e14f1ad9fe39f246cfcc435880ef7a9390d81a2b6ac7e01039083dd247 \ + --hash=sha256:7d0bf87705acbbcb8d4c24b2d77fbb73d40210a95c3903b443cd9e30824a5032 \ + --hash=sha256:83cd6c0763190784b99650a20fec7633c59f6ebe41c5cc9d45ee42749563ad47 \ + --hash=sha256:914a8023ece0528d5cc33f5a684f5f38199bbb566a04815c2c211d8f40b5d0ed \ + --hash=sha256:9aaf3870f14d925bbaf18b8a2347ee0ae7d95a2e490e4d4aea6813ed15ebc80e \ + --hash=sha256:a3eb5da8e2c9e9f13431032fdcbe7681de9ceda5835efee3269417c13f1fed5c \ + --hash=sha256:ac5b7f63dd3b27cc811850f5ffd8fff845b00ad70e60b043aabf8d6ecc304e09 \ + --hash=sha256:d24899478c35ebfa730597a4a775d430ad0d5631b8647a3ab368c29b7e7bd063 \ + --hash=sha256:e399341472ce15237be0c0ae5fbceca4b04cd9bebab1a2b2c979e015455d8f0c \ + --hash=sha256:ef720f529aec113968b45dfdb838ac8934e519711da53a0456038a0efecbd680 + # via feast (setup.py) +s3transfer==0.13.1 \ + --hash=sha256:a981aa7429be23fe6dfc13e80e4020057cbab622b08c0315288758d67cabc724 \ + --hash=sha256:c3fdba22ba1bd367922f27ec8032d6a1cf5f10c934fb5d68cf60fd5a23d936cf + # via boto3 +safetensors[torch]==0.7.0 \ + --hash=sha256:0071bffba4150c2f46cae1432d31995d77acfd9f8db598b5d1a2ce67e8440ad2 \ + --hash=sha256:07663963b67e8bd9f0b8ad15bb9163606cd27cc5a1b96235a50d8369803b96b0 \ + --hash=sha256:12f49080303fa6bb424b362149a12949dfbbf1e06811a88f2307276b0c131afd \ + --hash=sha256:1d060c70284127fa805085d8f10fbd0962792aed71879d00864acda69dbab981 \ + --hash=sha256:42cb091236206bb2016d245c377ed383aa7f78691748f3bb6ee1bfa51ae2ce6a \ + --hash=sha256:473b32699f4200e69801bf5abf93f1a4ecd432a70984df164fc22ccf39c4a6f3 \ + --hash=sha256:54bef08bf00a2bff599982f6b08e8770e09cc012d7bba00783fc7ea38f1fb37d \ + --hash=sha256:5d72abdb8a4d56d4020713724ba81dac065fedb7f3667151c4a637f1d3fb26c0 \ + --hash=sha256:672132907fcad9f2aedcb705b2d7b3b93354a2aec1b2f706c4db852abe338f85 \ + --hash=sha256:6999421eb8ba9df4450a16d9184fcb7bef26240b9f98e95401f17af6c2210b71 \ + --hash=sha256:7b95a3fa7b3abb9b5b0e07668e808364d0d40f6bbbf9ae0faa8b5b210c97b140 \ + --hash=sha256:8469155f4cb518bafb4acf4865e8bb9d6804110d2d9bdcaa78564b9fd841e104 \ + --hash=sha256:94fd4858284736bb67a897a41608b5b0c2496c9bdb3bf2af1fa3409127f20d57 \ + --hash=sha256:b0f6d66c1c538d5a94a73aa9ddca8ccc4227e6c9ff555322ea40bdd142391dd4 \ + --hash=sha256:c74af94bf3ac15ac4d0f2a7c7b4663a15f8c2ab15ed0fc7531ca61d0835eccba \ + --hash=sha256:c82f4d474cf725255d9e6acf17252991c3c8aac038d6ef363a4bf8be2f6db517 \ + --hash=sha256:cdab83a366799fa730f90a4ebb563e494f28e9e92c4819e556152ad55e43591b \ + --hash=sha256:cfdead2f57330d76aa7234051dadfa7d4eedc0e5a27fd08e6f96714a92b00f09 \ + --hash=sha256:d1239932053f56f3456f32eb9625590cc7582e905021f94636202a864d470755 \ + --hash=sha256:dac7252938f0696ddea46f5e855dd3138444e82236e3be475f54929f0c510d48 \ + --hash=sha256:dc92bc2db7b45bda4510e4f51c59b00fe80b2d6be88928346e4294ce1c2abe7c \ + --hash=sha256:e07d91d0c92a31200f25351f4acb2bc6aff7f48094e13ebb1d0fb995b54b6542 \ + --hash=sha256:f4729811a6640d019a4b7ba8638ee2fd21fa5ca8c7e7bdf0fed62068fcaac737 + # via + # accelerate + # docling-ibm-models + # timm + # transformers +scikit-image==0.26.0 \ + --hash=sha256:0608aa4a9ec39e0843de10d60edb2785a30c1c47819b67866dd223ebd149acaf \ + --hash=sha256:0660b83968c15293fd9135e8d860053ee19500d52bf55ca4fb09de595a1af650 \ + --hash=sha256:09bad6a5d5949c7896c8347424c4cca899f1d11668030e5548813ab9c2865dcb \ + --hash=sha256:0baa0108d2d027f34d748e84e592b78acc23e965a5de0e4bb03cf371de5c0581 \ + --hash=sha256:163e9afb5b879562b9aeda0dd45208a35316f26cc7a3aed54fd601604e5cf46f \ + --hash=sha256:20ef4a155e2e78b8ab973998e04d8a361d49d719e65412405f4dadd9155a61d9 \ + --hash=sha256:21a818ee6ca2f2131b9e04d8eb7637b5c18773ebe7b399ad23dcc5afaa226d2d \ + --hash=sha256:27d58bc8b2acd351f972c6508c1b557cfed80299826080a4d803dd29c51b707e \ + --hash=sha256:2c1e7bd342f43e7a97e571b3f03ba4c1293ea1a35c3f13f41efdc8a81c1dc8f2 \ + --hash=sha256:3268f13310e6857508bd87202620df996199a016a1d281b309441d227c822394 \ + --hash=sha256:3409e89d66eff5734cd2b672d1c48d2759360057e714e1d92a11df82c87cba37 \ + --hash=sha256:3f5bf622d7c0435884e1e141ebbe4b2804e16b2dd23ae4c6183e2ea99233be70 \ + --hash=sha256:4c717490cec9e276afb0438dd165b7c3072d6c416709cc0f9f5a4c1070d23a44 \ + --hash=sha256:4d57e39ef67a95d26860c8caf9b14b8fb130f83b34c6656a77f191fa6d1d04d8 \ + --hash=sha256:52c496f75a7e45844d951557f13c08c81487c6a1da2e3c9c8a39fcde958e02cc \ + --hash=sha256:6381edf972b32e4f54085449afde64365a57316637496c1325a736987083e2ab \ + --hash=sha256:63af3d3a26125f796f01052052f86806da5b5e54c6abef152edb752683075a9c \ + --hash=sha256:6caec76e16c970c528d15d1c757363334d5cb3069f9cea93d2bead31820511f3 \ + --hash=sha256:724f79fd9b6cb6f4a37864fe09f81f9f5d5b9646b6868109e1b100d1a7019e59 \ + --hash=sha256:74aa5518ccea28121f57a95374581d3b979839adc25bb03f289b1bc9b99c58af \ + --hash=sha256:7af7aa331c6846bd03fa28b164c18d0c3fd419dbb888fb05e958ac4257a78fdd \ + --hash=sha256:7df650e79031634ac90b11e64a9eedaf5a5e06fcd09bcd03a34be01745744466 \ + --hash=sha256:915bb3ba66455cf8adac00dc8fdf18a4cd29656aec7ddd38cb4dda90289a6f21 \ + --hash=sha256:92242351bccf391fc5df2d1529d15470019496d2498d615beb68da85fe7fdf37 \ + --hash=sha256:9490360c8d3f9a7e85c8de87daf7c0c66507960cf4947bb9610d1751928721c7 \ + --hash=sha256:98329aab3bc87db352b9887f64ce8cdb8e75f7c2daa19927f2e121b797b678d5 \ + --hash=sha256:9ea6207d9e9d21c3f464efe733121c0504e494dbdc7728649ff3e23c3c5a4953 \ + --hash=sha256:9eefb4adad066da408a7601c4c24b07af3b472d90e08c3e7483d4e9e829d8c49 \ + --hash=sha256:a07200fe09b9d99fcdab959859fe0f7db8df6333d6204344425d476850ce3604 \ + --hash=sha256:a2d211bc355f59725efdcae699b93b30348a19416cc9e017f7b2fb599faf7219 \ + --hash=sha256:a2e852eccf41d2d322b8e60144e124802873a92b8d43a6f96331aa42888491c7 \ + --hash=sha256:abed017474593cd3056ae0fe948d07d0747b27a085e92df5474f4955dd65aec0 \ + --hash=sha256:ac529eb9dbd5954f9aaa2e3fe9a3fd9661bfe24e134c688587d811a0233127f1 \ + --hash=sha256:aeb14db1ed09ad4bee4ceb9e635547a8d5f3549be67fc6c768c7f923e027e6cd \ + --hash=sha256:b1ede33a0fb3731457eaf53af6361e73dd510f449dac437ab54573b26788baf0 \ + --hash=sha256:b36ab5e778bf50af5ff386c3ac508027dc3aaeccf2161bdf96bde6848f44d21b \ + --hash=sha256:b702c3bb115e1dcf4abf5297429b5c90f2189655888cbed14921f3d26f81d3a4 \ + --hash=sha256:b8d14d3181c21c11170477a42542c1addc7072a90b986675a71266ad17abc37f \ + --hash=sha256:c6624a76c6085218248154cc7e1500e6b488edcd9499004dd0d35040607d7505 \ + --hash=sha256:c9087cf7d0e7f33ab5c46d2068d86d785e70b05400a891f73a13400f1e1faf6a \ + --hash=sha256:cde0bbd57e6795eba83cb10f71a677f7239271121dc950bc060482834a668ad1 \ + --hash=sha256:ce00600cd70d4562ed59f80523e18cdcc1fae0e10676498a01f73c255774aefd \ + --hash=sha256:cefd85033e66d4ea35b525bb0937d7f42d4cdcfed2d1888e1570d5ce450d3932 \ + --hash=sha256:d454b93a6fa770ac5ae2d33570f8e7a321bb80d29511ce4b6b78058ebe176e8c \ + --hash=sha256:d5c244656de905e195a904e36dbc18585e06ecf67d90f0482cbde63d7f9ad59d \ + --hash=sha256:ede4d6d255cc5da9faeb2f9ba7fedbc990abbc652db429f40a16b22e770bb578 \ + --hash=sha256:f5f970ab04efad85c24714321fcc91613fcb64ef2a892a13167df2f3e59199fa \ + --hash=sha256:f775f0e420faac9c2aa6757135f4eb468fb7b70e0b67fa77a5e79be3c30ee331 \ + --hash=sha256:fac96a1f9b06cd771cbbb3cd96c5332f36d4efd839b1d8b053f79e5887acde62 + # via easyocr +scikit-learn==1.8.0 \ + --hash=sha256:00d6f1d66fbcf4eba6e356e1420d33cc06c70a45bb1363cd6f6a8e4ebbbdece2 \ + --hash=sha256:0d6ae97234d5d7079dc0040990a6f7aeb97cb7fa7e8945f1999a429b23569e0a \ + --hash=sha256:146b4d36f800c013d267b29168813f7a03a43ecd2895d04861f1240b564421da \ + --hash=sha256:15fc3b5d19cc2be65404786857f2e13c70c83dd4782676dd6814e3b89dc8f5b9 \ + --hash=sha256:2838551e011a64e3053ad7618dda9310175f7515f1742fa2d756f7c874c05961 \ + --hash=sha256:29ffc74089f3d5e87dfca4c2c8450f88bdc61b0fc6ed5d267f3988f19a1309f6 \ + --hash=sha256:2de443b9373b3b615aec1bb57f9baa6bb3a9bd093f1269ba95c17d870422b271 \ + --hash=sha256:35c007dedb2ffe38fe3ee7d201ebac4a2deccd2408e8621d53067733e3c74809 \ + --hash=sha256:3bad7565bc9cf37ce19a7c0d107742b320c1285df7aab1a6e2d28780df167242 \ + --hash=sha256:4496bb2cf7a43ce1a2d7524a79e40bc5da45cf598dbf9545b7e8316ccba47bb4 \ + --hash=sha256:4511be56637e46c25721e83d1a9cea9614e7badc7040c4d573d75fbe257d6fd7 \ + --hash=sha256:5025ce924beccb28298246e589c691fe1b8c1c96507e6d27d12c5fadd85bfd76 \ + --hash=sha256:56079a99c20d230e873ea40753102102734c5953366972a71d5cb39a32bc40c6 \ + --hash=sha256:5e30adb87f0cc81c7690a84f7932dd66be5bac57cfe16b91cb9151683a4a2d3b \ + --hash=sha256:5fb63362b5a7ddab88e52b6dbb47dac3fd7dafeee740dc6c8d8a446ddedade8e \ + --hash=sha256:6b595b07a03069a2b1740dc08c2299993850ea81cce4fe19b2421e0c970de6b7 \ + --hash=sha256:72358cce49465d140cc4e7792015bb1f0296a9742d5622c67e31399b75468b9e \ + --hash=sha256:74b66d8689d52ed04c271e1329f0c61635bcaf5b926db9b12d58914cdc01fe57 \ + --hash=sha256:7cc267b6108f0a1499a734167282c00c4ebf61328566b55ef262d48e9849c735 \ + --hash=sha256:80832434a6cc114f5219211eec13dcbc16c2bac0e31ef64c6d346cde3cf054cb \ + --hash=sha256:8c497fff237d7b4e07e9ef1a640887fa4fb765647f86fbe00f969ff6280ce2bb \ + --hash=sha256:8fdf95767f989b0cfedb85f7ed8ca215d4be728031f56ff5a519ee1e3276dc2e \ + --hash=sha256:9bccbb3b40e3de10351f8f5068e105d0f4083b1a65fa07b6634fbc401a6287fd \ + --hash=sha256:a0bcfe4d0d14aec44921545fd2af2338c7471de9cb701f1da4c9d85906ab847a \ + --hash=sha256:a69525355a641bf8ef136a7fa447672fb54fe8d60cab5538d9eb7c6438543fb9 \ + --hash=sha256:ada8121bcb4dac28d930febc791a69f7cb1673c8495e5eee274190b73a4559c1 \ + --hash=sha256:bf97c10a3f5a7543f9b88cbf488d33d175e9146115a451ae34568597ba33dcde \ + --hash=sha256:c22a2da7a198c28dd1a6e1136f19c830beab7fdca5b3e5c8bba8394f8a5c45b3 \ + --hash=sha256:c2656924ec73e5939c76ac4c8b026fc203b83d8900362eb2599d8aee80e4880f \ + --hash=sha256:c57b1b610bd1f40ba43970e11ce62821c2e6569e4d74023db19c6b26f246cb3b \ + --hash=sha256:eddde82a035681427cbedded4e6eff5e57fa59216c2e3e90b10b19ab1d0a65c3 \ + --hash=sha256:edec98c5e7c128328124a029bceb09eda2d526997780fef8d65e9a69eead963e \ + --hash=sha256:ee787491dbfe082d9c3013f01f5991658b0f38aa8177e4cd4bf434c58f551702 \ + --hash=sha256:f28dd15c6bb0b66ba09728cf09fd8736c304be29409bd8445a080c1280619e8c \ + --hash=sha256:f984ca4b14914e6b4094c5d52a32ea16b49832c03bd17a110f004db3c223e8e1 \ + --hash=sha256:fb65db5d7531bccf3a4f6bec3462223bea71384e2cda41da0f10b7c292b9e7c4 \ + --hash=sha256:fe1c011a640a9f0791146011dfd3c7d9669785f9fed2b2a5f9e207536cf5c2fd + # via feast (setup.py) +scipy==1.17.0 \ + --hash=sha256:00fb5f8ec8398ad90215008d8b6009c9db9fa924fd4c7d6be307c6f945f9cd73 \ + --hash=sha256:031121914e295d9791319a1875444d55079885bbae5bdc9c5e0f2ee5f09d34ff \ + --hash=sha256:0937a0b0d8d593a198cededd4c439a0ea216a3f36653901ea1f3e4be949056f8 \ + --hash=sha256:0cf46c8013fec9d3694dc572f0b54100c28405d55d3e2cb15e2895b25057996e \ + --hash=sha256:0d5018a57c24cb1dd828bcf51d7b10e65986d549f52ef5adb6b4d1ded3e32a57 \ + --hash=sha256:130d12926ae34399d157de777472bf82e9061c60cc081372b3118edacafe1d00 \ + --hash=sha256:13c4096ac6bc31d706018f06a49abe0485f96499deb82066b94d19b02f664209 \ + --hash=sha256:13e861634a2c480bd237deb69333ac79ea1941b94568d4b0efa5db5e263d4fd1 \ + --hash=sha256:1f9586a58039d7229ce77b52f8472c972448cded5736eaf102d5658bbac4c269 \ + --hash=sha256:1ff269abf702f6c7e67a4b7aad981d42871a11b9dd83c58d2d2ea624efbd1088 \ + --hash=sha256:255c0da161bd7b32a6c898e7891509e8a9289f0b1c6c7d96142ee0d2b114c2ea \ + --hash=sha256:2591060c8e648d8b96439e111ac41fd8342fdeff1876be2e19dea3fe8930454e \ + --hash=sha256:272a9f16d6bb4667e8b50d25d71eddcc2158a214df1b566319298de0939d2ab7 \ + --hash=sha256:2abd71643797bd8a106dff97894ff7869eeeb0af0f7a5ce02e4227c6a2e9d6fd \ + --hash=sha256:2b531f57e09c946f56ad0b4a3b2abee778789097871fc541e267d2eca081cff1 \ + --hash=sha256:30509da9dbec1c2ed8f168b8d8aa853bc6723fede1dbc23c7d43a56f5ab72a67 \ + --hash=sha256:33af70d040e8af9d5e7a38b5ed3b772adddd281e3062ff23fec49e49681c38cf \ + --hash=sha256:357ca001c6e37601066092e7c89cca2f1ce74e2a520ca78d063a6d2201101df2 \ + --hash=sha256:3625c631a7acd7cfd929e4e31d2582cf00f42fcf06011f59281271746d77e061 \ + --hash=sha256:363ad4ae2853d88ebcde3ae6ec46ccca903ea9835ee8ba543f12f575e7b07e4e \ + --hash=sha256:40052543f7bbe921df4408f46003d6f01c6af109b9e2c8a66dd1cf6cf57f7d5d \ + --hash=sha256:423ca1f6584fc03936972b5f7c06961670dbba9f234e71676a7c7ccf938a0d61 \ + --hash=sha256:474da16199f6af66601a01546144922ce402cb17362e07d82f5a6cf8f963e449 \ + --hash=sha256:4e00562e519c09da34c31685f6acc3aa384d4d50604db0f245c14e1b4488bfa2 \ + --hash=sha256:5194c445d0a1c7a6c1a4a4681b6b7c71baad98ff66d96b949097e7513c9d6742 \ + --hash=sha256:5fb10d17e649e1446410895639f3385fd2bf4c3c7dfc9bea937bddcbc3d7b9ba \ + --hash=sha256:65ec32f3d32dfc48c72df4291345dae4f048749bc8d5203ee0a3f347f96c5ce6 \ + --hash=sha256:6680f2dfd4f6182e7d6db161344537da644d1cf85cf293f015c60a17ecf08752 \ + --hash=sha256:6e886000eb4919eae3a44f035e63f0fd8b651234117e8f6f29bad1cd26e7bc45 \ + --hash=sha256:7204fddcbec2fe6598f1c5fdf027e9f259106d05202a959a9f1aecf036adc9f6 \ + --hash=sha256:819fc26862b4b3c73a60d486dbb919202f3d6d98c87cf20c223511429f2d1a97 \ + --hash=sha256:8547e7c57f932e7354a2319fab613981cde910631979f74c9b542bb167a8b9db \ + --hash=sha256:85b0ac3ad17fa3be50abd7e69d583d98792d7edc08367e01445a1e2076005379 \ + --hash=sha256:87b411e42b425b84777718cc41516b8a7e0795abfa8e8e1d573bf0ef014f0812 \ + --hash=sha256:88c22af9e5d5a4f9e027e26772cc7b5922fab8bcc839edb3ae33de404feebd9e \ + --hash=sha256:9244608d27eafe02b20558523ba57f15c689357c85bdcfe920b1828750aa26eb \ + --hash=sha256:979c3a0ff8e5ba254d45d59ebd38cde48fce4f10b5125c680c7a4bfe177aab07 \ + --hash=sha256:9eeb9b5f5997f75507814ed9d298ab23f62cf79f5a3ef90031b1ee2506abdb5b \ + --hash=sha256:9fad7d3578c877d606b1150135c2639e9de9cecd3705caa37b66862977cc3e72 \ + --hash=sha256:a38c3337e00be6fd8a95b4ed66b5d988bac4ec888fd922c2ea9fe5fb1603dd67 \ + --hash=sha256:aabf057c632798832f071a8dde013c2e26284043934f53b00489f1773b33527e \ + --hash=sha256:c17514d11b78be8f7e6331b983a65a7f5ca1fd037b95e27b280921fe5606286a \ + --hash=sha256:c5e8647f60679790c2f5c76be17e2e9247dc6b98ad0d3b065861e082c56e078d \ + --hash=sha256:cacbaddd91fcffde703934897c5cd2c7cb0371fac195d383f4e1f1c5d3f3bd04 \ + --hash=sha256:d7425fcafbc09a03731e1bc05581f5fad988e48c6a861f441b7ab729a49a55ea \ + --hash=sha256:dac97a27520d66c12a34fd90a4fe65f43766c18c0d6e1c0a80f114d2260080e4 \ + --hash=sha256:dbf133ced83889583156566d2bdf7a07ff89228fe0c0cb727f777de92092ec6b \ + --hash=sha256:e8c0b331c2c1f531eb51f1b4fc9ba709521a712cce58f1aa627bc007421a5306 \ + --hash=sha256:eb2651271135154aa24f6481cbae5cc8af1f0dd46e6533fb7b56aa9727b6a232 \ + --hash=sha256:ebb7446a39b3ae0fe8f416a9a3fdc6fba3f11c634f680f16a239c5187bc487c0 \ + --hash=sha256:ec0827aa4d36cb79ff1b81de898e948a51ac0b9b1c43e4a372c0508c38c0f9a3 \ + --hash=sha256:edce1a1cf66298cccdc48a1bdf8fb10a3bf58e8b58d6c3883dd1530e103f87c0 \ + --hash=sha256:eec3842ec9ac9de5917899b277428886042a93db0b227ebbe3a333b64ec7643d \ + --hash=sha256:ef28d815f4d2686503e5f4f00edc387ae58dfd7a2f42e348bb53359538f01558 \ + --hash=sha256:f2a4942b0f5f7c23c7cd641a0ca1955e2ae83dedcff537e3a0259096635e186b \ + --hash=sha256:f3cd947f20fe17013d401b64e857c6b2da83cae567adbb75b9dcba865abc66d8 \ + --hash=sha256:f603d8a5518c7426414d1d8f82e253e454471de682ce5e39c29adb0df1efb86b \ + --hash=sha256:f7df7941d71314e60a481e02d5ebcb3f0185b8d799c70d03d8258f6c80f3d467 \ + --hash=sha256:f9eb55bb97d00f8b7ab95cb64f873eb0bf54d9446264d9f3609130381233483f \ + --hash=sha256:fc02c37a5639ee67d8fb646ffded6d793c06c5622d36b35cfa8fe5ececb8f042 \ + --hash=sha256:fe508b5690e9eaaa9467fc047f833af58f1152ae51a0d0aed67aa5801f4dd7d6 + # via + # docling + # easyocr + # great-expectations + # scikit-image + # scikit-learn +semchunk==2.2.2 \ + --hash=sha256:940e89896e64eeb01de97ba60f51c8c7b96c6a3951dfcf574f25ce2146752f52 \ + --hash=sha256:94ca19020c013c073abdfd06d79a7c13637b91738335f3b8cdb5655ee7cc94d2 + # via docling-core +send2trash==2.1.0 \ + --hash=sha256:0da2f112e6d6bb22de6aa6daa7e144831a4febf2a87261451c4ad849fe9a873c \ + --hash=sha256:1c72b39f09457db3c05ce1d19158c2cbef4c32b8bedd02c155e49282b7ea7459 + # via jupyter-server +setuptools==80.9.0 \ + --hash=sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922 \ + --hash=sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c + # via + # feast (setup.py) + # grpcio-tools + # jupyterlab + # pandas-gbq + # pbr + # pip-tools + # pydata-google-auth + # pymilvus + # singlestoredb +shapely==2.1.2 \ + --hash=sha256:0036ac886e0923417932c2e6369b6c52e38e0ff5d9120b90eef5cd9a5fc5cae9 \ + --hash=sha256:01d0d304b25634d60bd7cf291828119ab55a3bab87dc4af1e44b07fb225f188b \ + --hash=sha256:0bd308103340030feef6c111d3eb98d50dc13feea33affc8a6f9fa549e9458a3 \ + --hash=sha256:136ab87b17e733e22f0961504d05e77e7be8c9b5a8184f685b4a91a84efe3c26 \ + --hash=sha256:16a9c722ba774cf50b5d4541242b4cce05aafd44a015290c82ba8a16931ff63d \ + --hash=sha256:16c5d0fc45d3aa0a69074979f4f1928ca2734fb2e0dde8af9611e134e46774e7 \ + --hash=sha256:19efa3611eef966e776183e338b2d7ea43569ae99ab34f8d17c2c054d3205cc0 \ + --hash=sha256:1d0bfb4b8f661b3b4ec3565fa36c340bfb1cda82087199711f86a88647d26b2f \ + --hash=sha256:1e7d4d7ad262a48bb44277ca12c7c78cb1b0f56b32c10734ec9a1d30c0b0c54b \ + --hash=sha256:1f2f33f486777456586948e333a56ae21f35ae273be99255a191f5c1fa302eb4 \ + --hash=sha256:1ff629e00818033b8d71139565527ced7d776c269a49bd78c9df84e8f852190c \ + --hash=sha256:21952dc00df38a2c28375659b07a3979d22641aeb104751e769c3ee825aadecf \ + --hash=sha256:2d93d23bdd2ed9dc157b46bc2f19b7da143ca8714464249bef6771c679d5ff40 \ + --hash=sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9 \ + --hash=sha256:2fa78b49485391224755a856ed3b3bd91c8455f6121fee0db0e71cefb07d0ef6 \ + --hash=sha256:346ec0c1a0fcd32f57f00e4134d1200e14bf3f5ae12af87ba83ca275c502498c \ + --hash=sha256:361b6d45030b4ac64ddd0a26046906c8202eb60d0f9f53085f5179f1d23021a0 \ + --hash=sha256:40d784101f5d06a1fd30b55fc11ea58a61be23f930d934d86f19a180909908a4 \ + --hash=sha256:4a44bc62a10d84c11a7a3d7c1c4fe857f7477c3506e24c9062da0db0ae0c449c \ + --hash=sha256:5860eb9f00a1d49ebb14e881f5caf6c2cf472c7fd38bd7f253bbd34f934eb076 \ + --hash=sha256:5ebe3f84c6112ad3d4632b1fd2290665aa75d4cef5f6c5d77c4c95b324527c6a \ + --hash=sha256:61edcd8d0d17dd99075d320a1dd39c0cb9616f7572f10ef91b4b5b00c4aeb566 \ + --hash=sha256:6305993a35989391bd3476ee538a5c9a845861462327efe00dd11a5c8c709a99 \ + --hash=sha256:6ddc759f72b5b2b0f54a7e7cde44acef680a55019eb52ac63a7af2cf17cb9cd2 \ + --hash=sha256:743044b4cfb34f9a67205cee9279feaf60ba7d02e69febc2afc609047cb49179 \ + --hash=sha256:7ae48c236c0324b4e139bea88a306a04ca630f49be66741b340729d380d8f52f \ + --hash=sha256:7ed1a5bbfb386ee8332713bf7508bc24e32d24b74fc9a7b9f8529a55db9f4ee6 \ + --hash=sha256:8cff473e81017594d20ec55d86b54bc635544897e13a7cfc12e36909c5309a2a \ + --hash=sha256:8d8382dd120d64b03698b7298b89611a6ea6f55ada9d39942838b79c9bc89801 \ + --hash=sha256:9111274b88e4d7b54a95218e243282709b330ef52b7b86bc6aaf4f805306f454 \ + --hash=sha256:91121757b0a36c9aac3427a651a7e6567110a4a67c97edf04f8d55d4765f6618 \ + --hash=sha256:980c777c612514c0cf99bc8a9de6d286f5e186dcaf9091252fcd444e5638193d \ + --hash=sha256:9a522f460d28e2bf4e12396240a5fc1518788b2fcd73535166d748399ef0c223 \ + --hash=sha256:9c3a3c648aedc9f99c09263b39f2d8252f199cb3ac154fadc173283d7d111350 \ + --hash=sha256:a1fd0ea855b2cf7c9cddaf25543e914dd75af9de08785f20ca3085f2c9ca60b0 \ + --hash=sha256:a444e7afccdb0999e203b976adb37ea633725333e5b119ad40b1ca291ecf311c \ + --hash=sha256:a84e0582858d841d54355246ddfcbd1fce3179f185da7470f41ce39d001ee1af \ + --hash=sha256:b510dda1a3672d6879beb319bc7c5fd302c6c354584690973c838f46ec3e0fa8 \ + --hash=sha256:b54df60f1fbdecc8ebc2c5b11870461a6417b3d617f555e5033f1505d36e5735 \ + --hash=sha256:b705c99c76695702656327b819c9660768ec33f5ce01fa32b2af62b56ba400a1 \ + --hash=sha256:ba4d1333cc0bc94381d6d4308d2e4e008e0bd128bdcff5573199742ee3634359 \ + --hash=sha256:c64d5c97b2f47e3cd9b712eaced3b061f2b71234b3fc263e0fcf7d889c6559dc \ + --hash=sha256:c8876673449f3401f278c86eb33224c5764582f72b653a415d0e6672fde887bf \ + --hash=sha256:ca2591bff6645c216695bdf1614fca9c82ea1144d4a7591a466fef64f28f0715 \ + --hash=sha256:cc4f7397459b12c0b196c9efe1f9d7e92463cbba142632b4cc6d8bbbbd3e2b09 \ + --hash=sha256:cf831a13e0d5a7eb519e96f58ec26e049b1fad411fc6fc23b162a7ce04d9cffc \ + --hash=sha256:dc3487447a43d42adcdf52d7ac73804f2312cbfa5d433a7d2c506dcab0033dfd \ + --hash=sha256:df90e2db118c3671a0754f38e36802db75fe0920d211a27481daf50a711fdf26 \ + --hash=sha256:e38a190442aacc67ff9f75ce60aec04893041f16f97d242209106d502486a142 \ + --hash=sha256:e9eddfe513096a71896441a7c37db72da0687b34752c4e193577a145c71736fc \ + --hash=sha256:eba6710407f1daa8e7602c347dfc94adc02205ec27ed956346190d66579eb9ea \ + --hash=sha256:ef4a456cc8b7b3d50ccec29642aa4aeda959e9da2fe9540a92754770d5f0cf1f \ + --hash=sha256:f67b34271dedc3c653eba4e3d7111aa421d5be9b4c4c7d38d30907f796cb30df \ + --hash=sha256:f6f6cd5819c50d9bcf921882784586aab34a4bd53e7553e175dece6db513a6f0 \ + --hash=sha256:fe2533caae6a91a543dec62e8360fe86ffcdc42a7c55f9dfd0128a977a896b94 \ + --hash=sha256:fe7b77dc63d707c09726b7908f575fc04ff1d1ad0f3fb92aec212396bc6cfe5e \ + --hash=sha256:fe9627c39c59e553c90f5bc3128252cb85dc3b3be8189710666d2f8bc3a5503e + # via easyocr +shellingham==1.5.4 \ + --hash=sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686 \ + --hash=sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de + # via typer +singlestoredb==1.7.2 \ + --hash=sha256:2c5379ae7730b54a5b18f93a3c998bd22394d727837edc76a9cbb2b0eba3b567 \ + --hash=sha256:346ed52cb4280d44472d9f80197c90d220cd5d92ae687423beea89e4b291a701 \ + --hash=sha256:5bebdd48dc40dd670d0ae21f68bab4a14eb69aeabd357f177f5fc88821a4f0a1 \ + --hash=sha256:69c07490e0cd22fcfd823e493b630cfe845ad4c484fee80b340bfcce3427f48a \ + --hash=sha256:92bc932df8b124a3c88b552210f9e0bb11cba4bdfbc9e7568c1582c00f0e8bcb \ + --hash=sha256:c2a23b2b22f1e76cb0d53c99250de9a600bec9621766e25ae379c50914d6436a \ + --hash=sha256:fba7f30f7fddb88e656e4309157d9e0016b6b1127d5adf348ba831bf77872d07 + # via feast (setup.py) +six==1.17.0 \ + --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ + --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 + # via + # happybase + # kubernetes + # mock + # opencensus + # openshift-client + # python-dateutil + # rfc3339-validator + # thriftpy2 +smart-open==7.5.0 \ + --hash=sha256:87e695c5148bbb988f15cec00971602765874163be85acb1c9fb8abc012e6599 \ + --hash=sha256:f394b143851d8091011832ac8113ea4aba6b92e6c35f6e677ddaaccb169d7cb9 + # via ray +sniffio==1.3.1 \ + --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ + --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc + # via + # elastic-transport + # elasticsearch + # httpx +snowballstemmer==3.0.1 \ + --hash=sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064 \ + --hash=sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895 + # via sphinx +snowflake-connector-python[pandas]==3.18.0 \ + --hash=sha256:0af10b207af3d2de2b130e89018d49a60f2e5cfe841f3bf459e58f2e1c4c4506 \ + --hash=sha256:1841b60dc376639493dfc520cf39ad4f4da1f30286bba57e878d57414263d628 \ + --hash=sha256:1afbd9e21180d2b4a76500ac2978b11865fdb3230609f2a9d80ba459fc27f2e4 \ + --hash=sha256:1fb9fc9d8c2c7d209ba89282d367a32e75b0688afd4a3f02409e24f153c1a32e \ + --hash=sha256:283366b35df88cd0c71caf0215ba80370ddef4dd37d2adf43b24208c747231ee \ + --hash=sha256:2e4c285cc6a7f6431cff98c8f235a0fe9da2262462dd3dfc2b97120574a95cf9 \ + --hash=sha256:32b1abfea32561d817b0a2f80b06d936cb32712af06bf7b848a428bfd857a10a \ + --hash=sha256:3fee7035f865088f948510b094101c8a0e5b22501891f2115f7fb1cb555de76a \ + --hash=sha256:41a46eb9824574c5f8068e3ed5c02a2dc0a733ed08ee81fa1fb3dd0ebe921728 \ + --hash=sha256:4c068c8d3cd0c9736cb0679a9f544d34327e64415303bbfe07ec8ce3c5dae800 \ + --hash=sha256:4ed2d593f1983939d5d8d88b212d86fd4f14f0ceefc1df9882b4a18534adbde9 \ + --hash=sha256:51eb789a09dc6c62119cfabd044fba1a6b8378206f05a1e83ddb2e9cb49acc0b \ + --hash=sha256:5d89f608fde2fb0597ca5e020c4ac602027dc67f11b61b4d1e5448163bae4edc \ + --hash=sha256:65d37263dd288abb649820b7e34af96dc6b2d2115bf5521a2526245f81ddb0cb \ + --hash=sha256:7116cfa410d517328fd25fabffb54845b88667586718578c4333ce034fead1ba \ + --hash=sha256:783a9ab206563d7b52fdcdd7a72af62de811d3381ca64132fd3445537b4d041b \ + --hash=sha256:7a5fcb9a25a9b77b6cd86dfc6a6324b9910e15a493a916983229011ce3509b5f \ + --hash=sha256:8d3e96e1d09b07edca6c1f6ca675b6fdd05a4a7e428e4cdf6fb697d87b9f60fc \ + --hash=sha256:94e041e347b5151b66d19d6cfc3b3172dac1f51e44bbf7cf58f3989427dd464a \ + --hash=sha256:a8c570edff5a4888840dbe1e9e65c5e4d77d55c5c800cd359fe0903a769201e0 \ + --hash=sha256:aeeb181a156333480f60b5f8ddbb3d087e288b4509adbef7993236defe4d7570 \ + --hash=sha256:b211b4240596a225b895261a4ced2633e0262e82e2e32f6fb8dfc7d4bfedf8ca \ + --hash=sha256:b99f261c82be92224ac20c8c12bdf26ce3ed5dfd8a3df8a97f15a1e11c46ad27 \ + --hash=sha256:bd1de3038b6d7059ca59f93e105aba2a673151c693cc4292f72f38bfaf147df2 \ + --hash=sha256:cfa6b234f53ec624149e21156d0a98e43408d194f2e65bcfaf30acefd35a581e \ + --hash=sha256:e17a9e806823d3a0e578cf9349f6a93810a582b3132903ea9e1683854d08da00 + # via feast (setup.py) +sortedcontainers==2.4.0 \ + --hash=sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88 \ + --hash=sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0 + # via snowflake-connector-python +soupsieve==2.8.3 \ + --hash=sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349 \ + --hash=sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95 + # via beautifulsoup4 +sphinx==6.2.1 \ + --hash=sha256:6d56a34697bb749ffa0152feafc4b19836c755d90a7c59b72bc7dfd371b9cc6b \ + --hash=sha256:97787ff1fa3256a3eef9eda523a63dbf299f7b47e053cfcf684a1c2a8380c912 + # via feast (setup.py) +sphinxcontrib-applehelp==2.0.0 \ + --hash=sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1 \ + --hash=sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5 + # via sphinx +sphinxcontrib-devhelp==2.0.0 \ + --hash=sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad \ + --hash=sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2 + # via sphinx +sphinxcontrib-htmlhelp==2.1.0 \ + --hash=sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8 \ + --hash=sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9 + # via sphinx +sphinxcontrib-jsmath==1.0.1 \ + --hash=sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 \ + --hash=sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8 + # via sphinx +sphinxcontrib-qthelp==2.0.0 \ + --hash=sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab \ + --hash=sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb + # via sphinx +sphinxcontrib-serializinghtml==2.0.0 \ + --hash=sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331 \ + --hash=sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d + # via sphinx +sqlalchemy[mypy]==2.0.45 \ + --hash=sha256:0209d9753671b0da74da2cfbb9ecf9c02f72a759e4b018b3ab35f244c91842c7 \ + --hash=sha256:040f6f0545b3b7da6b9317fc3e922c9a98fc7243b2a1b39f78390fc0942f7826 \ + --hash=sha256:0c9f6ada57b58420a2c0277ff853abe40b9e9449f8d7d231763c6bc30f5c4953 \ + --hash=sha256:0f02325709d1b1a1489f23a39b318e175a171497374149eae74d612634b234c0 \ + --hash=sha256:107029bf4f43d076d4011f1afb74f7c3e2ea029ec82eb23d8527d5e909e97aa6 \ + --hash=sha256:12c694ed6468333a090d2f60950e4250b928f457e4962389553d6ba5fe9951ac \ + --hash=sha256:13e27397a7810163440c6bfed6b3fe46f1bfb2486eb540315a819abd2c004128 \ + --hash=sha256:1632a4bda8d2d25703fdad6363058d882541bdaaee0e5e3ddfa0cd3229efce88 \ + --hash=sha256:1d8b4a7a8c9b537509d56d5cd10ecdcfbb95912d72480c8861524efecc6a3fff \ + --hash=sha256:215f0528b914e5c75ef2559f69dca86878a3beeb0c1be7279d77f18e8d180ed4 \ + --hash=sha256:2c0b74aa79e2deade948fe8593654c8ef4228c44ba862bb7c9585c8e0db90f33 \ + --hash=sha256:2e90a344c644a4fa871eb01809c32096487928bd2038bf10f3e4515cb688cc56 \ + --hash=sha256:3c5f76216e7b85770d5bb5130ddd11ee89f4d52b11783674a662c7dd57018177 \ + --hash=sha256:470daea2c1ce73910f08caf10575676a37159a6d16c4da33d0033546bddebc9b \ + --hash=sha256:4748601c8ea959e37e03d13dcda4a44837afcd1b21338e637f7c935b8da06177 \ + --hash=sha256:4b6bec67ca45bc166c8729910bd2a87f1c0407ee955df110d78948f5b5827e8a \ + --hash=sha256:5225a288e4c8cc2308dbdd874edad6e7d0fd38eac1e9e5f23503425c8eee20d0 \ + --hash=sha256:56ead1f8dfb91a54a28cd1d072c74b3d635bcffbd25e50786533b822d4f2cde2 \ + --hash=sha256:5964f832431b7cdfaaa22a660b4c7eb1dfcd6ed41375f67fd3e3440fd95cb3cc \ + --hash=sha256:59a8b8bd9c6bedf81ad07c8bd5543eedca55fe9b8780b2b628d495ba55f8db1e \ + --hash=sha256:672c45cae53ba88e0dad74b9027dddd09ef6f441e927786b05bec75d949fbb2e \ + --hash=sha256:6d0beadc2535157070c9c17ecf25ecec31e13c229a8f69196d7590bde8082bf1 \ + --hash=sha256:7ae64ebf7657395824a19bca98ab10eb9a3ecb026bf09524014f1bb81cb598d4 \ + --hash=sha256:7f46ec744e7f51275582e6a24326e10c49fbdd3fc99103e01376841213028774 \ + --hash=sha256:830d434d609fe7bfa47c425c445a8b37929f140a7a44cdaf77f6d34df3a7296a \ + --hash=sha256:83d7009f40ce619d483d26ac1b757dfe3167b39921379a8bd1b596cf02dab4a6 \ + --hash=sha256:883c600c345123c033c2f6caca18def08f1f7f4c3ebeb591a63b6fceffc95cce \ + --hash=sha256:8a420169cef179d4c9064365f42d779f1e5895ad26ca0c8b4c0233920973db74 \ + --hash=sha256:8defe5737c6d2179c7997242d6473587c3beb52e557f5ef0187277009f73e5e1 \ + --hash=sha256:9a62b446b7d86a3909abbcd1cd3cc550a832f99c2bc37c5b22e1925438b9367b \ + --hash=sha256:9c6378449e0940476577047150fd09e242529b761dc887c9808a9a937fe990c8 \ + --hash=sha256:a15b98adb7f277316f2c276c090259129ee4afca783495e212048daf846654b2 \ + --hash=sha256:afbf47dc4de31fa38fd491f3705cac5307d21d4bb828a4f020ee59af412744ee \ + --hash=sha256:b3ee2aac15169fb0d45822983631466d60b762085bc4535cd39e66bea362df5f \ + --hash=sha256:b8c8b41b97fba5f62349aa285654230296829672fc9939cd7f35aab246d1c08b \ + --hash=sha256:ba547ac0b361ab4f1608afbc8432db669bd0819b3e12e29fb5fa9529a8bba81d \ + --hash=sha256:c1c2091b1489435ff85728fafeb990f073e64f6f5e81d5cd53059773e8521eb6 \ + --hash=sha256:c64772786d9eee72d4d3784c28f0a636af5b0a29f3fe26ff11f55efe90c0bd85 \ + --hash=sha256:cd337d3526ec5298f67d6a30bbbe4ed7e5e68862f0bf6dd21d289f8d37b7d60b \ + --hash=sha256:d29b2b99d527dbc66dd87c3c3248a5dd789d974a507f4653c969999fc7c1191b \ + --hash=sha256:d2c3684fca8a05f0ac1d9a21c1f4a266983a7ea9180efb80ffeb03861ecd01a0 \ + --hash=sha256:d62e47f5d8a50099b17e2bfc1b0c7d7ecd8ba6b46b1507b58cc4f05eefc3bb1c \ + --hash=sha256:d8a2ca754e5415cde2b656c27900b19d50ba076aa05ce66e2207623d3fe41f5a \ + --hash=sha256:db6834900338fb13a9123307f0c2cbb1f890a8656fcd5e5448ae3ad5bbe8d312 \ + --hash=sha256:e057f928ffe9c9b246a55b469c133b98a426297e1772ad24ce9f0c47d123bd5b \ + --hash=sha256:e50dcb81a5dfe4b7b4a4aa8f338116d127cb209559124f3694c70d6cd072b68f \ + --hash=sha256:ebd300afd2b62679203435f596b2601adafe546cb7282d5a0cd3ed99e423720f \ + --hash=sha256:ed3635353e55d28e7f4a95c8eda98a5cdc0a0b40b528433fbd41a9ae88f55b3d \ + --hash=sha256:ee580ab50e748208754ae8980cec79ec205983d8cf8b3f7c39067f3d9f2c8e22 \ + --hash=sha256:f7d27a1d977a1cfef38a0e2e1ca86f09c4212666ce34e6ae542f3ed0a33bc606 \ + --hash=sha256:fd93c6f5d65f254ceabe97548c709e073d6da9883343adaa51bf1a913ce93f8e \ + --hash=sha256:fe187fc31a54d7fd90352f34e8c008cf3ad5d064d08fedd3de2e8df83eb4a1cf + # via feast (setup.py) +sqlglot[rs]==25.20.2 \ + --hash=sha256:169fe8308dd70d7bd40117b2221b62bdc7c4e2ea8eb07394b2a6146cdedf05ab \ + --hash=sha256:cdbfd7ce3f2f39f32bd7b4c23fd9e0fd261636a6b14285b914e8def25fd0a567 + # via + # feast (setup.py) + # ibis-framework +sqlglotrs==0.2.12 \ + --hash=sha256:0338c7770a5cb5bb0ec1dcbe5206359fe9b83da0aba8dde53b9e7bd1afc89a22 \ + --hash=sha256:057a8db59a6c4bcdc42831e7ad01f41cf9e7f388ed5b139816adafbcacf2f591 \ + --hash=sha256:065835e7f2be50ba83895b64d044a39dab9d95098fff995427365e4bd8bc7bc6 \ + --hash=sha256:08e8be22da77c964be76ab4438da2c77096f5871088466ca950ee1b4712a97d4 \ + --hash=sha256:147cda8412f45af290ad190d9a98b5829a5f46a575ce768279ccebf9b7b53785 \ + --hash=sha256:155b0d59e34851b119c7ff0b2c7968c7b51667c1a1c2abefe1ac7244b3c1d78e \ + --hash=sha256:17b289ef0f25a7c034d183c588345e2b56622f7f64a85d1020633a75f8e3ac96 \ + --hash=sha256:1fc98b7649445e726a492841b8b8b39a4e5724ec2787cd1436404ebccf42519a \ + --hash=sha256:2554ead3126c83864a4b7e48e8e7e1bc23faf7160a6f28d3db967661cf529c9e \ + --hash=sha256:2824fc87fd7e41a785150ff042c7934e1fff97c6ccd59e4d96bebf6697a90762 \ + --hash=sha256:2db7e6cd41ef88c2ac647ad0258f87906de822955dec8f14e91829083047d784 \ + --hash=sha256:315f7f7bbfedf0c87d98068e62363454e986bdd05baa165b7fb448b5c6fe9f1a \ + --hash=sha256:327bfc2d71449f4dffba93d63f0565c4a1fa818143b1cfbc3f936fa8c9bcce10 \ + --hash=sha256:39a6ef72cf271db93ec6019847b7832defa9f4013c1e66851ca9c0a11c010c0c \ + --hash=sha256:4364116b7b0c72b841de6acd149a002bfc8fe360125989d4f39debd387c874d8 \ + --hash=sha256:4c07d3dba9c3ae8b56a0e45a9e47aa2a2c6ed95870c5bcc67dacaadb873843ff \ + --hash=sha256:4ceb28cf2ee3850cd745167cebe59a5fc3d506b32e9c81307938d8d272c1d670 \ + --hash=sha256:4ec38035523d54ba33de1e2b5562de4938254b61e1df48eb1db0e26ea189de28 \ + --hash=sha256:5026eada48f258ce9ad26fa41994b2ea5404bef2c3df9cb5cb2a159112a6269f \ + --hash=sha256:59499adc27a70a72170db9241404a18d4829cd3a83a076b9e112ad365c4b1452 \ + --hash=sha256:5be231acf95920bed473524dd1cac93e4cb320ed7e6ae937531b232c54cfc232 \ + --hash=sha256:67e288759d2be822db2175d0025c1f61283b019f2cc3e2577f31ad0ef3b5854d \ + --hash=sha256:6aacab6e20d92be3ca76f7358fa12346f29985e2d408660c764b7f1c75cc40ee \ + --hash=sha256:6ef3a827f2980aad17af4f8548297c93c4989d4cd3f64b9bcb7443952c542423 \ + --hash=sha256:732516bffffc70f172306ad8bc747dd9f16512cdbc09475abe6ad6f744479dee \ + --hash=sha256:76e4e1765c6be438329e234e56a6772537f6de16c4bb5ba7170e344664cccdf7 \ + --hash=sha256:7b553cdb9e8afcfea5466815e865f874f6f51aaace4fb4101670e150f7bbfe5a \ + --hash=sha256:7c79c43c5cde1f4017641032f11770ed8111c963dccc096cd15df906d4fb46a4 \ + --hash=sha256:8174aa227193d0a755f4515e6c3883be4681c9b669a65c2316f09be27b84be4d \ + --hash=sha256:8a18b3a09c32788d1ee2d0610ab35af862413c56b65f8ad8bc0131701f03103b \ + --hash=sha256:8f268aea3d2ebc05cb9148bb487f21e532f8af1b0a4aed6b7374703aadfb6a7c \ + --hash=sha256:91971032603d05428fd42a978084110afb2a4c0975e4343b075f69a23889e3da \ + --hash=sha256:9334f6c394a671a630c61339d52fb7da1a72eca057570f039b2a4035d2e39380 \ + --hash=sha256:954ccd912391ab5922adb23159ebcc0c5dccb468381e2a1ce92117cb4b0f0ed3 \ + --hash=sha256:9597865efc40e5c41af7719106c7620e1338aaa64646726652c63bae14225391 \ + --hash=sha256:97b2c74fcdd89f0d4458c0e2b5783989be99a1e0b2d627797688ab716ad9391b \ + --hash=sha256:989ccc5dc6b38da937481b6eb2dc1fc0b13676fe129697b874828e577984d7ef \ + --hash=sha256:9c4c6f6fe1c54fff614f9d0b2dd7a6bf948bda87ce51a245dcd3f447f20c8b74 \ + --hash=sha256:9d5b9a9d6259b72258f6764f88a89faa3c648438bd1b2c3a9598b725d42bf6f2 \ + --hash=sha256:a266c9047726d83c51a8ec3d5278ceb9caf131307c9c93c4ceefd99c0116e538 \ + --hash=sha256:a4a2cacb31f75e242c7b9ff4afae1d95f548df8441444114376d8007cc91b55b \ + --hash=sha256:aaf86275a3388da1ed2161645aa346bfca3ee6e1dc0e2115867db9e78f1caddd \ + --hash=sha256:ab676d2d7da28907a139ad5fc20dee0890054967bac0b18e653ac048837c9ea1 \ + --hash=sha256:acc25d651eb663332157c2e5d2736516cddf4cd0effe67a887723934de5051d1 \ + --hash=sha256:b10bf6b71961b31951bf4dff937d8d5d399ea1b3bd47fb5c5810386710fe7dfb \ + --hash=sha256:b40601e67f5abae5d09d23f92394dbd735539de469ce663b596eb42bf77d2c54 \ + --hash=sha256:b6020825e58af6e2795e6dcb69639f5500e45e1da78f1b1abd74c4d11083a249 \ + --hash=sha256:bc1807c6222e32fc9bf6f5c7e12b85c4b72f12227800d40c1693244c198b33bb \ + --hash=sha256:bd6c4e6a7670f761c8e69b45d6d302a4d37a3cddb1fdca2ad90e54b77858fe80 \ + --hash=sha256:bf3e2eab11f06f1df13c0f85b3e26fbab0b7e8a5d189e5edfed951bc85f6bd48 \ + --hash=sha256:c3d62905ce74a48714b7662ad95efe299fad62f193be4b482a327af060f98710 \ + --hash=sha256:c3e0edde0fdf598561e7404ac56fb4b12276394ee5155b5365e42434c6f287a3 \ + --hash=sha256:c64066d13bd2e5e788b845c933c765af9991faa93982e273b623019a1161fadc \ + --hash=sha256:c8bf7ae29c0fc66e9c998d7f8e6f6fc26309c6eb5a4728e1443cb628218bc307 \ + --hash=sha256:d2827c7bf7e57496f9b95658bcd2395cfb0c51adc3023cd3386988337dfaf6a5 \ + --hash=sha256:e7b2da43b2a6a85807df6c56b2627abe244aff28fdf9a4940d38d749cb4b8e3e \ + --hash=sha256:ebc162a599fac86e59f899631716752fbc7f89598e94729eadb707e54db371b2 \ + --hash=sha256:f0a2ddeab27a94447270b7a240770a31a3afed0a972d60085205baec990ad76a \ + --hash=sha256:f104a98182761d4613f920eda7ec5fc921afb3608f7db648206ce06dd10a6be5 \ + --hash=sha256:f83ad3fb4ea57218c0e65d3499e31c9bb3051bbb5dccbb11593eaf1640964b51 \ + --hash=sha256:fa1ae834fb78bd52bb76e3c8d02cb79f45717ab1f02f4ad8154bf33a5408a502 + # via sqlglot +sqlite-vec==0.1.6 \ + --hash=sha256:77491bcaa6d496f2acb5cc0d0ff0b8964434f141523c121e313f9a7d8088dee3 \ + --hash=sha256:7b0519d9cd96164cd2e08e8eed225197f9cd2f0be82cb04567692a0a4be02da3 \ + --hash=sha256:823b0493add80d7fe82ab0fe25df7c0703f4752941aee1c7b2b02cec9656cb24 \ + --hash=sha256:c65bcfd90fa2f41f9000052bcb8bb75d38240b2dae49225389eca6c3136d3f0c \ + --hash=sha256:fdca35f7ee3243668a055255d4dee4dea7eed5a06da8cad409f89facf4595361 + # via feast (setup.py) +sqlparams==6.2.0 \ + --hash=sha256:3744a2ad16f71293db6505b21fd5229b4757489a9b09f3553656a1ae97ba7ca5 \ + --hash=sha256:63b32ed9051bdc52e7e8b38bc4f78aed51796cdd9135e730f4c6a7db1048dedf + # via singlestoredb +sse-starlette==3.2.0 \ + --hash=sha256:5876954bd51920fc2cd51baee47a080eb88a37b5b784e615abb0b283f801cdbf \ + --hash=sha256:8127594edfb51abe44eac9c49e59b0b01f1039d0c7461c6fd91d4e03b70da422 + # via mcp +stack-data==0.6.3 \ + --hash=sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9 \ + --hash=sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695 + # via ipython +starlette==0.50.0 \ + --hash=sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca \ + --hash=sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca + # via + # fastapi + # mcp + # sse-starlette +substrait==0.24.2 \ + --hash=sha256:743cc352e96b0927b2cd37cd5a8fdac0a96a68df9600bd104fc36aebd222a836 \ + --hash=sha256:d1d475833566fa9d67eed3273456883c0568486ccced92b524b31709d2817e19 + # via + # feast (setup.py) + # ibis-substrait +sympy==1.14.0 \ + --hash=sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517 \ + --hash=sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5 + # via torch +tabulate==0.9.0 \ + --hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c \ + --hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f + # via + # feast (setup.py) + # docling-core + # docling-parse +tenacity==8.5.0 \ + --hash=sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78 \ + --hash=sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687 + # via feast (setup.py) +terminado==0.18.1 \ + --hash=sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0 \ + --hash=sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e + # via + # jupyter-server + # jupyter-server-terminals +testcontainers==4.9.0 \ + --hash=sha256:2cd6af070109ff68c1ab5389dc89c86c2dc3ab30a21ca734b2cb8f0f80ad479e \ + --hash=sha256:c6fee929990972c40bf6b91b7072c94064ff3649b405a14fde0274c8b2479d32 + # via feast (setup.py) +threadpoolctl==3.6.0 \ + --hash=sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb \ + --hash=sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e + # via scikit-learn +thriftpy2==0.5.3 \ + --hash=sha256:08d8699d318b6a8fe9e9fd4c2234ec7912462d90cc636c371b4f4f6500a44328 \ + --hash=sha256:0f36f80a038dbfc2b3b048151ca4732f310ebd0385cdf20e7864d781d5d6f582 \ + --hash=sha256:13c0316a1a9b6f7840d9c084a5a1fa2e419ae86645e45530593558704e792d7f \ + --hash=sha256:195beb93caa104879d808e87d92962fff8d59d40486590fd653b5aeb7774420e \ + --hash=sha256:1bb6c0482663887f2a9ab98453ab0ca20a3e1f2336a500b7da12af33614c0d75 \ + --hash=sha256:2023abcc504e4fc8825419964ecfab904244b0bc189d0082380d481ecba951d7 \ + --hash=sha256:236a7d4627b1aa692a901ca45d7dfa4e516bcd3f309efc18ac69671b31789e39 \ + --hash=sha256:29b09fa1fb77f1927ac4ce21d8f8f6663d8917f75780aa6bad57ca9473d0a3b3 \ + --hash=sha256:2d3c0403673a3b7fc38304cf89e07c792f44ac6aa3b15c12e6cc411a85d10af3 \ + --hash=sha256:2eb14e24febbeca84d603e88a24db9ccb4a1437f90e9a862dcee02dc0a2194c2 \ + --hash=sha256:2fb20edf082965487bddfba03b2c05bb50db38ceda3111540cb2353949fdb29e \ + --hash=sha256:387c04d02f23ae83415cb2de35a88ba79321619af25cf34a481cabc367ddf1aa \ + --hash=sha256:3c00c340114c3041961906a628e70e6b6b5805ee691e682c290cec3513e77efc \ + --hash=sha256:3da0e3c1a5c17f67a203d9814853dd1d8fe8b0ec69a26d30d6b634e4c0e2c87c \ + --hash=sha256:3e2fa8c3d2b1505d2a463c090d9e771b8fed0eda8b01b0365e7547ba106bf2db \ + --hash=sha256:455440132b01b3a895001dc59ecf6056e8fd041ad6e745ff22391cf3a1f8361e \ + --hash=sha256:523c480a4b3aad480e4738c32f97b3f356ed998e6549f5f55eb6f7852474cfff \ + --hash=sha256:524d69102843fed087e30c6edc5b99f7b42b768d88bf910787add60e37e2a2a5 \ + --hash=sha256:53e761ea5eab24bb3520f8adecdbb633e69dda7cf9678ca2eb6ad1952cc56540 \ + --hash=sha256:5e799f6c4caf79a5566bc14941e768b132c533bed66e0a5ef0a127a74f98acab \ + --hash=sha256:6196d7d5adb6214ed21633ec57a222c90a6a66498cdd9f8da7c85c7514c7c439 \ + --hash=sha256:6384a142514982b380431b7d8811f137c5ec9cf5cf3affa33884b7ce4a51c8c2 \ + --hash=sha256:67ec304d83510d0ec83338ef029ea3bae91fdbb3bf0504f7990dd446b483773e \ + --hash=sha256:6efccde429f93740943bc4c0e2664a49f0799ed21663cc26b16a823e8719040f \ + --hash=sha256:710b7f3d9fdbb5788b37b4b694d30fa37a3c08c0d52aeb1def57a5140fa2f4fd \ + --hash=sha256:716c36885e29a9480bdabe117235967b5fe36dc179aca80cd8ef9e12866e4cef \ + --hash=sha256:72a1cdfd3bcf16b667379f8277e83295a34ae74227e54711c657305bf4c9e63b \ + --hash=sha256:73832e79732bab15920bed700a5efbe2b6e9dbd35710e815e870feb873b15059 \ + --hash=sha256:74dc6ed8c9098b66a17b916dd898abc32801a8ef0b439a407bce4f11c1b3da34 \ + --hash=sha256:772c9c1015d05177e37b9a547bcd27b560fc888ecd1e179ebf7f114ec467914d \ + --hash=sha256:86489105abb39c6ff93c3d270cf1474f7fddd380847f2b6bc8d09e5d0f0a23ab \ + --hash=sha256:8a6652e823e0ae6fa9f73b62c1a2ed04d7c0e1ac402c7ca7c509f9f14fbcc80f \ + --hash=sha256:9460a8284881854e210907eeb1761f44afacb4d164d1b6ecfddc184ed1b03277 \ + --hash=sha256:94806a0c3436189a75efac4ab067cdab7298876fee40cc0006300cc1d7982055 \ + --hash=sha256:9cba3454b4e5e05102d2dbfadd3a9a66c19488c6aa18c147bbeff2097ae67f04 \ + --hash=sha256:9eda43701a94def9d063550b0d8261630c40ade312c35b1f6e4804859c783ed8 \ + --hash=sha256:a57c67a880c9da2d252a6244e9ccf7b08850388c7afa4f0e98cb60fdca904a09 \ + --hash=sha256:a7a00b772783847c0c48a43e098b64f5741ca5a2e52e6c66d9b753765cd93ed3 \ + --hash=sha256:a7f2913ea3beac18767784059f02a67751d99094d4a368b350911784f0e09709 \ + --hash=sha256:ab47f689b0003ec63a881e5ad4f46046f62632da5168b0665fd369a3561eaa2a \ + --hash=sha256:addcc3ba9c106758e9073ab205e4bcf9a9540acb335fafa0184a1fa8e76a78cb \ + --hash=sha256:ade0165ba060b97333bc7a927229e992441bfa17bb8e13ea05590c2ec3551b17 \ + --hash=sha256:b208f3c23f916ca0517285c11748ca1fcf43a2ac2224ea5eef8bcba464a20652 \ + --hash=sha256:b5670936016aeaeb7111c96661ced36541211c0e82eb357a9bee5d4176ebbcff \ + --hash=sha256:be23631c152323dd3d7d51368dadcec75e60e90e4662be4f2b8ada208c61fa34 \ + --hash=sha256:bf69d246c39d0ce4ed922b6e00e643ca514cdf40010b00b46f82b0f758a840b3 \ + --hash=sha256:c01e0da29120709d46cb4310944fc717f28ce097d8845c4c29e111ff98c9deff \ + --hash=sha256:c0cf4418810ecf984f6d7f538988175c459f6bd5c85d94b878ebb11dbdbfa62b \ + --hash=sha256:ca4d554f8fc79c8152119bbd576e5d6a5c11e907e0baf467fb4676b1d274558a \ + --hash=sha256:d5080c1c4bd13c4431613a2c0cd607c5e3a07a496a865a0d01f534401d3b09c7 \ + --hash=sha256:e6a77d3d190f1c2726cfb11d1115678fcfa4b0ff509bd8bb38e451c629d9383c \ + --hash=sha256:eb440b7d8e7460f6969016d77e25ebfdee2aa6d5fed95aecf2bd59310c2c5530 \ + --hash=sha256:ec49907ee15513ca2344540c4ad2bf1945c41a6c0236d589eebd32be8298faa9 \ + --hash=sha256:f2ccb893ae687ff946902d96a5615a93847a7868bc5d66f51caf5ccf46466314 \ + --hash=sha256:f4210c10b686fe4a32b121f618b407aaccc7a72021c6d64fa181a09df72c4d89 \ + --hash=sha256:f4d122a82cf7cd4743a2640199b066a994f6527802c2dd16f2e4fffc15efa2a3 \ + --hash=sha256:f768756c0c105e98a3760fc7b4f4df12e25f5334b204060bb6bdab3ce1599e11 \ + --hash=sha256:f965fff2f2f323ddb5d9cb7fabe33c4c9f008955dbb59728ffc3111557b87793 \ + --hash=sha256:fb86f4c0cfcb39949a53dcc689e3758594a18724753861ba9f59646b72417383 \ + --hash=sha256:fd4c6131ca6e919f03263cc83b713f1797bc20126a858da8518ed49b3e32c334 \ + --hash=sha256:fdc5676b52fa6a3009d205360eb9ba257b8b4813883ed52797a20838bcc45dde + # via happybase +tifffile==2026.1.14 \ + --hash=sha256:29cf4adb43562a4624fc959018ab1b44e0342015d3db4581b983fe40e05f5924 \ + --hash=sha256:a423c583e1eecd9ca255642d47f463efa8d7f2365a0e110eb0167570493e0c8c + # via scikit-image +timm==1.0.24 \ + --hash=sha256:8301ac783410c6ad72c73c49326af6d71a9e4d1558238552796e825c2464913f \ + --hash=sha256:c7b909f43fe2ef8fe62c505e270cd4f1af230dfbc37f2ee93e3608492b9d9a40 + # via feast (setup.py) +tinycss2==1.4.0 \ + --hash=sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7 \ + --hash=sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289 + # via bleach +tokenizers==0.22.2 \ + --hash=sha256:143b999bdc46d10febb15cbffb4207ddd1f410e2c755857b5a0797961bbdc113 \ + --hash=sha256:1a62ba2c5faa2dd175aaeed7b15abf18d20266189fb3406c5d0550dd34dd5f37 \ + --hash=sha256:1c774b1276f71e1ef716e5486f21e76333464f47bece56bbd554485982a9e03e \ + --hash=sha256:1e418a55456beedca4621dbab65a318981467a2b188e982a23e117f115ce5001 \ + --hash=sha256:1e50f8554d504f617d9e9d6e4c2c2884a12b388a97c5c77f0bc6cf4cd032feee \ + --hash=sha256:2249487018adec45d6e3554c71d46eb39fa8ea67156c640f7513eb26f318cec7 \ + --hash=sha256:25b85325d0815e86e0bac263506dd114578953b7b53d7de09a6485e4a160a7dd \ + --hash=sha256:29c30b83d8dcd061078b05ae0cb94d3c710555fbb44861139f9f83dcca3dc3e4 \ + --hash=sha256:319f659ee992222f04e58f84cbf407cfa66a65fe3a8de44e8ad2bc53e7d99012 \ + --hash=sha256:369cc9fc8cc10cb24143873a0d95438bb8ee257bb80c71989e3ee290e8d72c67 \ + --hash=sha256:37ae80a28c1d3265bb1f22464c856bd23c02a05bb211e56d0c5301a435be6c1a \ + --hash=sha256:38337540fbbddff8e999d59970f3c6f35a82de10053206a7562f1ea02d046fa5 \ + --hash=sha256:473b83b915e547aa366d1eee11806deaf419e17be16310ac0a14077f1e28f917 \ + --hash=sha256:544dd704ae7238755d790de45ba8da072e9af3eea688f698b137915ae959281c \ + --hash=sha256:64d94e84f6660764e64e7e0b22baa72f6cd942279fdbb21d46abd70d179f0195 \ + --hash=sha256:753d47ebd4542742ef9261d9da92cd545b2cacbb48349a1225466745bb866ec4 \ + --hash=sha256:791135ee325f2336f498590eb2f11dc5c295232f288e75c99a36c5dbce63088a \ + --hash=sha256:9ce725d22864a1e965217204946f830c37876eee3b2ba6fc6255e8e903d5fcbc \ + --hash=sha256:a6bf3f88c554a2b653af81f3204491c818ae2ac6fbc09e76ef4773351292bc92 \ + --hash=sha256:bfb88f22a209ff7b40a576d5324bf8286b519d7358663db21d6246fb17eea2d5 \ + --hash=sha256:c9ea31edff2968b44a88f97d784c2f16dc0729b8b143ed004699ebca91f05c48 \ + --hash=sha256:df6c4265b289083bf710dff49bc51ef252f9d5be33a45ee2bed151114a56207b \ + --hash=sha256:e10bf9113d209be7cd046d40fbabbaf3278ff6d18eb4da4c500443185dc1896c \ + --hash=sha256:f01a9c019878532f98927d2bacb79bbb404b43d3437455522a00a30718cdedb5 + # via transformers +toml==0.10.2 \ + --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ + --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f + # via feast (setup.py) +tomli==2.4.0 \ + --hash=sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729 \ + --hash=sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b \ + --hash=sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d \ + --hash=sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df \ + --hash=sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576 \ + --hash=sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d \ + --hash=sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1 \ + --hash=sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a \ + --hash=sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e \ + --hash=sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc \ + --hash=sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702 \ + --hash=sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6 \ + --hash=sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd \ + --hash=sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4 \ + --hash=sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776 \ + --hash=sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a \ + --hash=sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66 \ + --hash=sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87 \ + --hash=sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2 \ + --hash=sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f \ + --hash=sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475 \ + --hash=sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f \ + --hash=sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95 \ + --hash=sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9 \ + --hash=sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3 \ + --hash=sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9 \ + --hash=sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76 \ + --hash=sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da \ + --hash=sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8 \ + --hash=sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51 \ + --hash=sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86 \ + --hash=sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8 \ + --hash=sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0 \ + --hash=sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b \ + --hash=sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1 \ + --hash=sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e \ + --hash=sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d \ + --hash=sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c \ + --hash=sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867 \ + --hash=sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a \ + --hash=sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c \ + --hash=sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0 \ + --hash=sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4 \ + --hash=sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614 \ + --hash=sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132 \ + --hash=sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa \ + --hash=sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087 + # via + # coverage + # fastapi-mcp +tomlkit==0.14.0 \ + --hash=sha256:592064ed85b40fa213469f81ac584f67a4f2992509a7c3ea2d632208623a3680 \ + --hash=sha256:cf00efca415dbd57575befb1f6634c4f42d2d87dbba376128adb42c121b87064 + # via + # poetry-dynamic-versioning + # snowflake-connector-python +toolz==0.12.1 \ + --hash=sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85 \ + --hash=sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d + # via + # altair + # dask + # ibis-framework + # partd +torch==2.9.1 \ + --hash=sha256:07c8a9660bc9414c39cac530ac83b1fb1b679d7155824144a40a54f4a47bfa73 \ + --hash=sha256:0a2bd769944991c74acf0c4ef23603b9c777fdf7637f115605a4b2d8023110c7 \ + --hash=sha256:0d06b30a9207b7c3516a9e0102114024755a07045f0c1d2f2a56b1819ac06bcb \ + --hash=sha256:19d144d6b3e29921f1fc70503e9f2fc572cde6a5115c0c0de2f7ca8b1483e8b6 \ + --hash=sha256:1cc208435f6c379f9b8fdfd5ceb5be1e3b72a6bdf1cb46c0d2812aa73472db9e \ + --hash=sha256:1edee27a7c9897f4e0b7c14cfc2f3008c571921134522d5b9b5ec4ebbc69041a \ + --hash=sha256:27331cd902fb4322252657f3902adf1c4f6acad9dcad81d8df3ae14c7c4f07c4 \ + --hash=sha256:2af70e3be4a13becba4655d6cc07dcfec7ae844db6ac38d6c1dafeb245d17d65 \ + --hash=sha256:2c14b3da5df416cf9cb5efab83aa3056f5b8cd8620b8fde81b4987ecab730587 \ + --hash=sha256:2e1c42c0ae92bf803a4b2409fdfed85e30f9027a66887f5e7dcdbc014c7531db \ + --hash=sha256:30a3e170a84894f3652434b56d59a64a2c11366b0ed5776fab33c2439396bf9a \ + --hash=sha256:52347912d868653e1528b47cafaf79b285b98be3f4f35d5955389b1b95224475 \ + --hash=sha256:524de44cd13931208ba2c4bde9ec7741fd4ae6bfd06409a604fc32f6520c2bc9 \ + --hash=sha256:545844cc16b3f91e08ce3b40e9c2d77012dd33a48d505aed34b7740ed627a1b2 \ + --hash=sha256:5be4bf7496f1e3ffb1dd44b672adb1ac3f081f204c5ca81eba6442f5f634df8e \ + --hash=sha256:62b3fd888277946918cba4478cf849303da5359f0fb4e3bfb86b0533ba2eaf8d \ + --hash=sha256:81a285002d7b8cfd3fdf1b98aa8df138d41f1a8334fd9ea37511517cedf43083 \ + --hash=sha256:8301a7b431e51764629208d0edaa4f9e4c33e6df0f2f90b90e261d623df6a4e2 \ + --hash=sha256:9fd35c68b3679378c11f5eb73220fdcb4e6f4592295277fbb657d31fd053237c \ + --hash=sha256:a83b0e84cc375e3318a808d032510dde99d696a85fe9473fc8575612b63ae951 \ + --hash=sha256:c0d25d1d8e531b8343bea0ed811d5d528958f1dcbd37e7245bc686273177ad7e \ + --hash=sha256:c29455d2b910b98738131990394da3e50eea8291dfeb4b12de71ecf1fdeb21cb \ + --hash=sha256:c432d04376f6d9767a9852ea0def7b47a7bbc8e7af3b16ac9cf9ce02b12851c9 \ + --hash=sha256:c88d3299ddeb2b35dcc31753305612db485ab6f1823e37fb29451c8b2732b87e \ + --hash=sha256:cb10896a1f7fedaddbccc2017ce6ca9ecaaf990f0973bdfcf405439750118d2c \ + --hash=sha256:d033ff0ac3f5400df862a51bdde9bad83561f3739ea0046e68f5401ebfa67c1b \ + --hash=sha256:d187566a2cdc726fc80138c3cdb260970fab1c27e99f85452721f7759bbd554d \ + --hash=sha256:da5f6f4d7f4940a173e5572791af238cb0b9e21b1aab592bd8b26da4c99f1cd6 + # via + # feast (setup.py) + # accelerate + # docling-ibm-models + # easyocr + # safetensors + # timm + # torchvision +torchvision==0.24.1 \ + --hash=sha256:056c525dc875f18fe8e9c27079ada166a7b2755cea5a2199b0bc7f1f8364e600 \ + --hash=sha256:1540a9e7f8cf55fe17554482f5a125a7e426347b71de07327d5de6bfd8d17caa \ + --hash=sha256:16274823b93048e0a29d83415166a2e9e0bf4e1b432668357b657612a4802864 \ + --hash=sha256:18f9cb60e64b37b551cd605a3d62c15730c086362b40682d23e24b616a697d41 \ + --hash=sha256:1b495edd3a8f9911292424117544f0b4ab780452e998649425d1f4b2bed6695f \ + --hash=sha256:1e39619de698e2821d71976c92c8a9e50cdfd1e993507dfb340f2688bfdd8283 \ + --hash=sha256:480b271d6edff83ac2e8d69bbb4cf2073f93366516a50d48f140ccfceedb002e \ + --hash=sha256:4aa6cb806eb8541e92c9b313e96192c6b826e9eb0042720e2fa250d021079952 \ + --hash=sha256:54ed17c3d30e718e08d8da3fd5b30ea44b0311317e55647cb97077a29ecbc25b \ + --hash=sha256:66a98471fc18cad9064123106d810a75f57f0838eee20edc56233fd8484b0cc7 \ + --hash=sha256:7fb7590c737ebe3e1c077ad60c0e5e2e56bb26e7bccc3b9d04dbfc34fd09f050 \ + --hash=sha256:8a6696db7fb71eadb2c6a48602106e136c785642e598eb1533e0b27744f2cce6 \ + --hash=sha256:9ef95d819fd6df81bc7cc97b8f21a15d2c0d3ac5dbfaab5cbc2d2ce57114b19e \ + --hash=sha256:a0f106663e60332aa4fcb1ca2159ef8c3f2ed266b0e6df88de261048a840e0df \ + --hash=sha256:a9308cdd37d8a42e14a3e7fd9d271830c7fecb150dd929b642f3c1460514599a \ + --hash=sha256:ab211e1807dc3e53acf8f6638df9a7444c80c0ad050466e8d652b3e83776987b \ + --hash=sha256:af9201184c2712d808bd4eb656899011afdfce1e83721c7cb08000034df353fe \ + --hash=sha256:cccf4b4fec7fdfcd3431b9ea75d1588c0a8596d0333245dafebee0462abe3388 \ + --hash=sha256:d83e16d70ea85d2f196d678bfb702c36be7a655b003abed84e465988b6128938 \ + --hash=sha256:db2125c46f9cb25dc740be831ce3ce99303cfe60439249a41b04fd9f373be671 \ + --hash=sha256:ded5e625788572e4e1c4d155d1bbc48805c113794100d70e19c76e39e4d53465 \ + --hash=sha256:e3f96208b4bef54cd60e415545f5200346a65024e04f29a26cd0006dbf9e8e66 \ + --hash=sha256:e48bf6a8ec95872eb45763f06499f87bd2fb246b9b96cb00aae260fda2f96193 \ + --hash=sha256:ec9d7379c519428395e4ffda4dbb99ec56be64b0a75b95989e00f9ec7ae0b2d7 \ + --hash=sha256:f035f0cacd1f44a8ff6cb7ca3627d84c54d685055961d73a1a9fb9827a5414c8 \ + --hash=sha256:f231f6a4f2aa6522713326d0d2563538fa72d613741ae364f9913027fa52ea35 \ + --hash=sha256:f476da4e085b7307aaab6f540219617d46d5926aeda24be33e1359771c83778f \ + --hash=sha256:fbdbdae5e540b868a681240b7dbd6473986c862445ee8a138680a6a97d6c34ff + # via + # feast (setup.py) + # docling-ibm-models + # easyocr + # timm +tornado==6.5.4 \ + --hash=sha256:053e6e16701eb6cbe641f308f4c1a9541f91b6261991160391bfc342e8a551a1 \ + --hash=sha256:1768110f2411d5cd281bac0a090f707223ce77fd110424361092859e089b38d1 \ + --hash=sha256:2d50f63dda1d2cac3ae1fa23d254e16b5e38153758470e9956cbc3d813d40843 \ + --hash=sha256:50ff0a58b0dc97939d29da29cd624da010e7f804746621c78d14b80238669335 \ + --hash=sha256:6076d5dda368c9328ff41ab5d9dd3608e695e8225d1cd0fd1e006f05da3635a8 \ + --hash=sha256:6eb82872335a53dd063a4f10917b3efd28270b56a33db69009606a0312660a6f \ + --hash=sha256:9c86b1643b33a4cd415f8d0fe53045f913bf07b4a3ef646b735a6a86047dda84 \ + --hash=sha256:a22fa9047405d03260b483980635f0b041989d8bcc9a313f8fe18b411d84b1d7 \ + --hash=sha256:d1cf66105dc6acb5af613c054955b8137e34a03698aa53272dbda4afe252be17 \ + --hash=sha256:d6241c1a16b1c9e4cc28148b1cda97dd1c6cb4fb7068ac1bedc610768dff0ba9 \ + --hash=sha256:e5fb5e04efa54cf0baabdd10061eb4148e0be137166146fff835745f59ab9f7f \ + --hash=sha256:fa07d31e0cd85c60713f2b995da613588aa03e1303d75705dca6af8babc18ddc + # via + # ipykernel + # jupyter-client + # jupyter-server + # jupyterlab + # notebook + # terminado +tqdm==4.67.1 \ + --hash=sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2 \ + --hash=sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2 + # via + # feast (setup.py) + # datasets + # docling + # docling-ibm-models + # great-expectations + # huggingface-hub + # milvus-lite + # mpire + # semchunk + # transformers +traitlets==5.14.3 \ + --hash=sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7 \ + --hash=sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f + # via + # ipykernel + # ipython + # ipywidgets + # jupyter-client + # jupyter-core + # jupyter-events + # jupyter-server + # jupyterlab + # matplotlib-inline + # nbclient + # nbconvert + # nbformat +transformers==4.57.6 \ + --hash=sha256:4c9e9de11333ddfe5114bc872c9f370509198acf0b87a832a0ab9458e2bd0550 \ + --hash=sha256:55e44126ece9dc0a291521b7e5492b572e6ef2766338a610b9ab5afbb70689d3 + # via + # feast (setup.py) + # docling-core + # docling-ibm-models +tree-sitter==0.24.0 \ + --hash=sha256:01ea01a7003b88b92f7f875da6ba9d5d741e0c84bb1bd92c503c0eecd0ee6409 \ + --hash=sha256:033506c1bc2ba7bd559b23a6bdbeaf1127cee3c68a094b82396718596dfe98bc \ + --hash=sha256:098a81df9f89cf254d92c1cd0660a838593f85d7505b28249216661d87adde4a \ + --hash=sha256:0b26bf9e958da6eb7e74a081aab9d9c7d05f9baeaa830dbb67481898fd16f1f5 \ + --hash=sha256:0d4a6416ed421c4210f0ca405a4834d5ccfbb8ad6692d4d74f7773ef68f92071 \ + --hash=sha256:14beeff5f11e223c37be7d5d119819880601a80d0399abe8c738ae2288804afc \ + --hash=sha256:23641bd25dcd4bb0b6fa91b8fb3f46cc9f1c9f475efe4d536d3f1f688d1b84c8 \ + --hash=sha256:24a8dd03b0d6b8812425f3b84d2f4763322684e38baf74e5bb766128b5633dc7 \ + --hash=sha256:26a5b130f70d5925d67b47db314da209063664585a2fd36fa69e0717738efaf4 \ + --hash=sha256:2a84ff87a2f2a008867a1064aba510ab3bd608e3e0cd6e8fef0379efee266c73 \ + --hash=sha256:3b1f3cbd9700e1fba0be2e7d801527e37c49fc02dc140714669144ef6ab58dce \ + --hash=sha256:464fa5b2cac63608915a9de8a6efd67a4da1929e603ea86abaeae2cb1fe89921 \ + --hash=sha256:4ddb113e6b8b3e3b199695b1492a47d87d06c538e63050823d90ef13cac585fd \ + --hash=sha256:57277a12fbcefb1c8b206186068d456c600dbfbc3fd6c76968ee22614c5cd5ad \ + --hash=sha256:5fc5c3c26d83c9d0ecb4fc4304fba35f034b7761d35286b936c1db1217558b4e \ + --hash=sha256:772e1bd8c0931c866b848d0369b32218ac97c24b04790ec4b0e409901945dd8e \ + --hash=sha256:7d5d9537507e1c8c5fa9935b34f320bfec4114d675e028f3ad94f11cf9db37b9 \ + --hash=sha256:a7c9c89666dea2ce2b2bf98e75f429d2876c569fab966afefdcd71974c6d8538 \ + --hash=sha256:abd95af65ca2f4f7eca356343391ed669e764f37748b5352946f00f7fc78e734 \ + --hash=sha256:c012e4c345c57a95d92ab5a890c637aaa51ab3b7ff25ed7069834b1087361c95 \ + --hash=sha256:d25fa22766d63f73716c6fec1a31ee5cf904aa429484256bd5fdf5259051ed74 \ + --hash=sha256:de0fb7c18c6068cacff46250c0a0473e8fc74d673e3e86555f131c2c1346fb13 \ + --hash=sha256:e0992d483677e71d5c5d37f30dfb2e3afec2f932a9c53eec4fca13869b788c6c \ + --hash=sha256:f3f00feff1fc47a8e4863561b8da8f5e023d382dd31ed3e43cd11d4cae445445 \ + --hash=sha256:f3f08a2ca9f600b3758792ba2406971665ffbad810847398d180c48cee174ee2 \ + --hash=sha256:f58bb4956917715ec4d5a28681829a8dad5c342cafd4aea269f9132a83ca9b34 \ + --hash=sha256:f733a83d8355fc95561582b66bbea92ffd365c5d7a665bc9ebd25e049c2b2abb \ + --hash=sha256:f9691be48d98c49ef8f498460278884c666b44129222ed6217477dffad5d4831 \ + --hash=sha256:f9e8b1605ab60ed43803100f067eed71b0b0e6c1fb9860a262727dbfbbb74751 + # via docling-core +tree-sitter-c==0.23.4 \ + --hash=sha256:013403e74765d74e523f380f9df8f3d99e9fe94132a3fc0c8b29cba538a7b2bf \ + --hash=sha256:2c92c0571b36b6da06f8882f34151dc11e67a493e9101cc0026a16da27709c05 \ + --hash=sha256:5e42a3519825ca59c91b2b7aec08dd3c89e02690c7b315d54a1e1743f9be3f15 \ + --hash=sha256:9215c7888dd019038f162ea5646178f6e129cd2b49fc506d14becf5e426121d7 \ + --hash=sha256:98c285a23bf4fb6fb34140d6ea0f0d25d0a93e0d93692f9dffe3db6d1fe08534 \ + --hash=sha256:a4d7bdeaca8f1da72352a945853f56aa5d34e7bc22569ec5bda5d7c1a04e5b0f \ + --hash=sha256:c15c7588c3d95872328019073a8d5eaf7c2691b4d4ef0393a0168399b2ad2356 \ + --hash=sha256:edd36e12cc79b8b5bbc81fc336ff7d2577d0fe16afd18163c9aff7ae3ff69e15 + # via docling-core +tree-sitter-javascript==0.23.1 \ + --hash=sha256:041fa22b34250ea6eb313d33104d5303f79504cb259d374d691e38bbdc49145b \ + --hash=sha256:056dc04fb6b24293f8c5fec43c14e7e16ba2075b3009c643abf8c85edc4c7c3c \ + --hash=sha256:5a6bc1055b061c5055ec58f39ee9b2e9efb8e6e0ae970838af74da0afb811f0a \ + --hash=sha256:6ca583dad4bd79d3053c310b9f7208cd597fd85f9947e4ab2294658bb5c11e35 \ + --hash=sha256:94100e491a6a247aa4d14caf61230c171b6376c863039b6d9cd71255c2d815ec \ + --hash=sha256:a11ca1c0f736da42967586b568dff8a465ee148a986c15ebdc9382806e0ce871 \ + --hash=sha256:b2059ce8b150162cda05a457ca3920450adbf915119c04b8c67b5241cd7fcfed \ + --hash=sha256:eb28130cd2fb30d702d614cbf61ef44d1c7f6869e7d864a9cc17111e370be8f7 + # via docling-core +tree-sitter-python==0.23.6 \ + --hash=sha256:28fbec8f74eeb2b30292d97715e60fac9ccf8a8091ce19b9d93e9b580ed280fb \ + --hash=sha256:29dacdc0cd2f64e55e61d96c6906533ebb2791972bec988450c46cce60092f5d \ + --hash=sha256:354bfa0a2f9217431764a631516f85173e9711af2c13dbd796a8815acfe505d9 \ + --hash=sha256:680b710051b144fedf61c95197db0094f2245e82551bf7f0c501356333571f7a \ + --hash=sha256:71334371bd73d5fe080aed39fbff49ed8efb9506edebe16795b0c7567ed6a272 \ + --hash=sha256:7e048733c36f564b379831689006801feb267d8194f9e793fbb395ef1723335d \ + --hash=sha256:8a9dcef55507b6567207e8ee0a6b053d0688019b47ff7f26edc1764b7f4dc0a4 \ + --hash=sha256:a24027248399fb41594b696f929f9956828ae7cc85596d9f775e6c239cd0c2be + # via docling-core +tree-sitter-typescript==0.23.2 \ + --hash=sha256:05db58f70b95ef0ea126db5560f3775692f609589ed6f8dd0af84b7f19f1cbb7 \ + --hash=sha256:3cd752d70d8e5371fdac6a9a4df9d8924b63b6998d268586f7d374c9fba2a478 \ + --hash=sha256:3f730b66396bc3e11811e4465c41ee45d9e9edd6de355a58bbbc49fa770da8f9 \ + --hash=sha256:4b1eed5b0b3a8134e86126b00b743d667ec27c63fc9de1b7bb23168803879e31 \ + --hash=sha256:7b167b5827c882261cb7a50dfa0fb567975f9b315e87ed87ad0a0a3aedb3834d \ + --hash=sha256:8d4f0f9bcb61ad7b7509d49a1565ff2cc363863644a234e1e0fe10960e55aea0 \ + --hash=sha256:c7cc1b0ff5d91bac863b0e38b1578d5505e718156c9db577c8baea2557f66de8 \ + --hash=sha256:e96d36b85bcacdeb8ff5c2618d75593ef12ebaf1b4eace3477e2bdb2abb1752c + # via docling-core +trino==0.336.0 \ + --hash=sha256:389150841446949119c3c2c13c1a51bb4be1a27818e40ae40dd3701f36c02550 \ + --hash=sha256:e82339e9fffe5c6c51de3bfdf28f083e3ae5945a4502739ab2094a0d08d68070 + # via feast (setup.py) +typeguard==4.4.4 \ + --hash=sha256:3a7fd2dffb705d4d0efaed4306a704c89b9dee850b688f060a8b1615a79e5f74 \ + --hash=sha256:b5f562281b6bfa1f5492470464730ef001646128b180769880468bd84b68b09e + # via feast (setup.py) +typer==0.12.5 \ + --hash=sha256:62fe4e471711b147e3365034133904df3e235698399bc4de2b36c8579298d52b \ + --hash=sha256:f592f089bedcc8ec1b974125d64851029c3b1af145f04aca64d69410f0c9b722 + # via + # docling + # docling-core + # fastapi-mcp +types-cffi==1.17.0.20250915 \ + --hash=sha256:4362e20368f78dabd5c56bca8004752cc890e07a71605d9e0d9e069dbaac8c06 \ + --hash=sha256:cef4af1116c83359c11bb4269283c50f0688e9fc1d7f0eeb390f3661546da52c + # via types-pyopenssl +types-protobuf==3.19.22 \ + --hash=sha256:d291388678af91bb045fafa864f142dc4ac22f5d4cdca097c7d8d8a32fa9b3ab \ + --hash=sha256:d2b26861b0cb46a3c8669b0df507b7ef72e487da66d61f9f3576aa76ce028a83 + # via + # feast (setup.py) + # mypy-protobuf +types-pymysql==1.1.0.20251220 \ + --hash=sha256:ae1c3df32a777489431e2e9963880a0df48f6591e0aa2fd3a6fabd9dee6eca54 \ + --hash=sha256:fa1082af7dea6c53b6caa5784241924b1296ea3a8d3bd060417352c5e10c0618 + # via feast (setup.py) +types-pyopenssl==24.1.0.20240722 \ + --hash=sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39 \ + --hash=sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54 + # via types-redis +types-python-dateutil==2.9.0.20251115 \ + --hash=sha256:8a47f2c3920f52a994056b8786309b43143faa5a64d4cbb2722d6addabdf1a58 \ + --hash=sha256:9cf9c1c582019753b8639a081deefd7e044b9fa36bd8217f565c6c4e36ee0624 + # via feast (setup.py) +types-pytz==2025.2.0.20251108 \ + --hash=sha256:0f1c9792cab4eb0e46c52f8845c8f77cf1e313cb3d68bf826aa867fe4717d91c \ + --hash=sha256:fca87917836ae843f07129567b74c1929f1870610681b4c92cb86a3df5817bdb + # via feast (setup.py) +types-pyyaml==6.0.12.20250915 \ + --hash=sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3 \ + --hash=sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6 + # via feast (setup.py) +types-redis==4.6.0.20241004 \ + --hash=sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e \ + --hash=sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed + # via feast (setup.py) +types-requests==2.30.0.0 \ + --hash=sha256:c6cf08e120ca9f0dc4fa4e32c3f953c3fba222bcc1db6b97695bce8da1ba9864 \ + --hash=sha256:dec781054324a70ba64430ae9e62e7e9c8e4618c185a5cb3f87a6738251b5a31 + # via feast (setup.py) +types-setuptools==80.9.0.20251223 \ + --hash=sha256:1b36db79d724c2287d83dc052cf887b47c0da6a2fff044378be0b019545f56e6 \ + --hash=sha256:d3411059ae2f5f03985217d86ac6084efea2c9e9cacd5f0869ef950f308169b2 + # via + # feast (setup.py) + # types-cffi +types-tabulate==0.9.0.20241207 \ + --hash=sha256:ac1ac174750c0a385dfd248edc6279fa328aaf4ea317915ab879a2ec47833230 \ + --hash=sha256:b8dad1343c2a8ba5861c5441370c3e35908edd234ff036d4298708a1d4cf8a85 + # via feast (setup.py) +types-urllib3==1.26.25.14 \ + --hash=sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f \ + --hash=sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e + # via types-requests +typing-extensions==4.15.0 \ + --hash=sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466 \ + --hash=sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548 + # via + # aiosignal + # anyio + # azure-core + # azure-identity + # azure-storage-blob + # beautifulsoup4 + # docling-core + # elasticsearch + # fastapi + # great-expectations + # huggingface-hub + # ibis-framework + # ipython + # jwcrypto + # mcp + # minio + # mypy + # opentelemetry-api + # opentelemetry-sdk + # opentelemetry-semantic-conventions + # psycopg + # psycopg-pool + # pydantic + # pydantic-core + # pyopenssl + # python-docx + # python-pptx + # referencing + # snowflake-connector-python + # sqlalchemy + # starlette + # testcontainers + # torch + # typeguard + # typer + # typing-inspection +typing-inspection==0.4.2 \ + --hash=sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7 \ + --hash=sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464 + # via + # mcp + # pydantic + # pydantic-settings +tzdata==2025.3 \ + --hash=sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1 \ + --hash=sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7 + # via + # arrow + # pandas +tzlocal==5.3.1 \ + --hash=sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd \ + --hash=sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d + # via + # great-expectations + # trino +ujson==5.11.0 \ + --hash=sha256:0180a480a7d099082501cad1fe85252e4d4bf926b40960fb3d9e87a3a6fbbc80 \ + --hash=sha256:04c41afc195fd477a59db3a84d5b83a871bd648ef371cf8c6f43072d89144eef \ + --hash=sha256:0654a2691fc252c3c525e3d034bb27b8a7546c9d3eb33cd29ce6c9feda361a6a \ + --hash=sha256:090b4d11b380ae25453100b722d0609d5051ffe98f80ec52853ccf8249dfd840 \ + --hash=sha256:109f59885041b14ee9569bf0bb3f98579c3fa0652317b355669939e5fc5ede53 \ + --hash=sha256:10f29e71ecf4ecd93a6610bd8efa8e7b6467454a363c3d6416db65de883eb076 \ + --hash=sha256:1194b943e951092db611011cb8dbdb6cf94a3b816ed07906e14d3bc6ce0e90ab \ + --hash=sha256:12b5e7e22a1fe01058000d1b317d3b65cc3daf61bd2ea7a2b76721fe160fa74d \ + --hash=sha256:16ccb973b7ada0455201808ff11d48fe9c3f034a6ab5bd93b944443c88299f89 \ + --hash=sha256:181fb5b15703a8b9370b25345d2a1fd1359f0f18776b3643d24e13ed9c036d4c \ + --hash=sha256:185f93ebccffebc8baf8302c869fac70dd5dd78694f3b875d03a31b03b062cdb \ + --hash=sha256:1a0a9b76a89827a592656fe12e000cf4f12da9692f51a841a4a07aa4c7ecc41c \ + --hash=sha256:1a325fd2c3a056cf6c8e023f74a0c478dd282a93141356ae7f16d5309f5ff823 \ + --hash=sha256:1aa8a2ab482f09f6c10fba37112af5f957689a79ea598399c85009f2f29898b5 \ + --hash=sha256:1d663b96eb34c93392e9caae19c099ec4133ba21654b081956613327f0e973ac \ + --hash=sha256:29113c003ca33ab71b1b480bde952fbab2a0b6b03a4ee4c3d71687cdcbd1a29d \ + --hash=sha256:30f607c70091483550fbd669a0b37471e5165b317d6c16e75dba2aa967608723 \ + --hash=sha256:3134b783ab314d2298d58cda7e47e7a0f7f71fc6ade6ac86d5dbeaf4b9770fa6 \ + --hash=sha256:34032aeca4510a7c7102bd5933f59a37f63891f30a0706fb46487ab6f0edf8f0 \ + --hash=sha256:3772e4fe6b0c1e025ba3c50841a0ca4786825a4894c8411bf8d3afe3a8061328 \ + --hash=sha256:3d2720e9785f84312b8e2cb0c2b87f1a0b1c53aaab3b2af3ab817d54409012e0 \ + --hash=sha256:416389ec19ef5f2013592f791486bef712ebce0cd59299bf9df1ba40bb2f6e04 \ + --hash=sha256:446e8c11c06048611c9d29ef1237065de0af07cabdd97e6b5b527b957692ec25 \ + --hash=sha256:4598bf3965fc1a936bd84034312bcbe00ba87880ef1ee33e33c1e88f2c398b49 \ + --hash=sha256:48055e1061c1bb1f79e75b4ac39e821f3f35a9b82de17fce92c3140149009bec \ + --hash=sha256:4843f3ab4fe1cc596bb7e02228ef4c25d35b4bb0809d6a260852a4bfcab37ba3 \ + --hash=sha256:49e56ef8066f11b80d620985ae36869a3ff7e4b74c3b6129182ec5d1df0255f3 \ + --hash=sha256:4b42c115c7c6012506e8168315150d1e3f76e7ba0f4f95616f4ee599a1372bbc \ + --hash=sha256:4c9f5d6a27d035dd90a146f7761c2272cf7103de5127c9ab9c4cd39ea61e878a \ + --hash=sha256:5600202a731af24a25e2d7b6eb3f648e4ecd4bb67c4d5cf12f8fab31677469c9 \ + --hash=sha256:65724738c73645db88f70ba1f2e6fb678f913281804d5da2fd02c8c5839af302 \ + --hash=sha256:65f3c279f4ed4bf9131b11972040200c66ae040368abdbb21596bf1564899694 \ + --hash=sha256:674f306e3e6089f92b126eb2fe41bcb65e42a15432c143365c729fdb50518547 \ + --hash=sha256:683f57f0dd3acdd7d9aff1de0528d603aafcb0e6d126e3dc7ce8b020a28f5d01 \ + --hash=sha256:6b6ec7e7321d7fc19abdda3ad809baef935f49673951a8bab486aea975007e02 \ + --hash=sha256:6cd2df62f24c506a0ba322d5e4fe4466d47a9467b57e881ee15a31f7ecf68ff6 \ + --hash=sha256:6dd703c3e86dc6f7044c5ac0b3ae079ed96bf297974598116aa5fb7f655c3a60 \ + --hash=sha256:6eff24e1abd79e0ec6d7eae651dd675ddbc41f9e43e29ef81e16b421da896915 \ + --hash=sha256:7855ccea3f8dad5e66d8445d754fc1cf80265a4272b5f8059ebc7ec29b8d0835 \ + --hash=sha256:787aff4a84da301b7f3bac09bc696e2e5670df829c6f8ecf39916b4e7e24e701 \ + --hash=sha256:7895f0d2d53bd6aea11743bd56e3cb82d729980636cd0ed9b89418bf66591702 \ + --hash=sha256:78c684fb21255b9b90320ba7e199780f653e03f6c2528663768965f4126a5b50 \ + --hash=sha256:7e0ec1646db172beb8d3df4c32a9d78015e671d2000af548252769e33079d9a6 \ + --hash=sha256:7e3cff632c1d78023b15f7e3a81c3745cd3f94c044d1e8fa8efbd6b161997bbc \ + --hash=sha256:7f1a27ab91083b4770e160d17f61b407f587548f2c2b5fbf19f94794c495594a \ + --hash=sha256:80017e870d882d5517d28995b62e4e518a894f932f1e242cbc802a2fd64d365c \ + --hash=sha256:8254e858437c00f17cb72e7a644fc42dad0ebb21ea981b71df6e84b1072aaa7c \ + --hash=sha256:837da4d27fed5fdc1b630bd18f519744b23a0b5ada1bbde1a36ba463f2900c03 \ + --hash=sha256:849e65b696f0d242833f1df4182096cedc50d414215d1371fca85c541fbff629 \ + --hash=sha256:85e6796631165f719084a9af00c79195d3ebf108151452fefdcb1c8bb50f0105 \ + --hash=sha256:86baf341d90b566d61a394869ce77188cc8668f76d7bb2c311d77a00f4bdf844 \ + --hash=sha256:8fa2af7c1459204b7a42e98263b069bd535ea0cd978b4d6982f35af5a04a4241 \ + --hash=sha256:94fcae844f1e302f6f8095c5d1c45a2f0bfb928cccf9f1b99e3ace634b980a2a \ + --hash=sha256:952c0be400229940248c0f5356514123d428cba1946af6fa2bbd7503395fef26 \ + --hash=sha256:99c49400572cd77050894e16864a335225191fd72a818ea6423ae1a06467beac \ + --hash=sha256:9aacbeb23fdbc4b256a7d12e0beb9063a1ba5d9e0dbb2cfe16357c98b4334596 \ + --hash=sha256:a0af6574fc1d9d53f4ff371f58c96673e6d988ed2b5bf666a6143c782fa007e9 \ + --hash=sha256:a31c6b8004438e8c20fc55ac1c0e07dad42941db24176fe9acf2815971f8e752 \ + --hash=sha256:a4df61a6df0a4a8eb5b9b1ffd673429811f50b235539dac586bb7e9e91994138 \ + --hash=sha256:a638425d3c6eed0318df663df44480f4a40dc87cc7c6da44d221418312f6413b \ + --hash=sha256:aa6b3d4f1c0d3f82930f4cbd7fe46d905a4a9205a7c13279789c1263faf06dba \ + --hash=sha256:aa6d7a5e09217ff93234e050e3e380da62b084e26b9f2e277d2606406a2fc2e5 \ + --hash=sha256:ab2cb8351d976e788669c8281465d44d4e94413718af497b4e7342d7b2f78018 \ + --hash=sha256:abae0fb58cc820092a0e9e8ba0051ac4583958495bfa5262a12f628249e3b362 \ + --hash=sha256:b16930f6a0753cdc7d637b33b4e8f10d5e351e1fb83872ba6375f1e87be39746 \ + --hash=sha256:b7b136cc6abc7619124fd897ef75f8e63105298b5ca9bdf43ebd0e1fa0ee105f \ + --hash=sha256:be6b0eaf92cae8cdee4d4c9e074bde43ef1c590ed5ba037ea26c9632fb479c88 \ + --hash=sha256:c44c703842024d796b4c78542a6fcd5c3cb948b9fc2a73ee65b9c86a22ee3638 \ + --hash=sha256:c6618f480f7c9ded05e78a1938873fde68baf96cdd74e6d23c7e0a8441175c4b \ + --hash=sha256:ce076f2df2e1aa62b685086fbad67f2b1d3048369664b4cdccc50707325401f9 \ + --hash=sha256:d06e87eded62ff0e5f5178c916337d2262fdbc03b31688142a3433eabb6511db \ + --hash=sha256:d7c46cb0fe5e7056b9acb748a4c35aa1b428025853032540bb7e41f46767321f \ + --hash=sha256:d8951bb7a505ab2a700e26f691bdfacf395bc7e3111e3416d325b513eea03a58 \ + --hash=sha256:da473b23e3a54448b008d33f742bcd6d5fb2a897e42d1fc6e7bf306ea5d18b1b \ + --hash=sha256:de6e88f62796372fba1de973c11138f197d3e0e1d80bcb2b8aae1e826096d433 \ + --hash=sha256:e204ae6f909f099ba6b6b942131cee359ddda2b6e4ea39c12eb8b991fe2010e0 \ + --hash=sha256:e73df8648c9470af2b6a6bf5250d4744ad2cf3d774dcf8c6e31f018bdd04d764 \ + --hash=sha256:e750c436fb90edf85585f5c62a35b35082502383840962c6983403d1bd96a02c \ + --hash=sha256:e979fbc469a7f77f04ec2f4e853ba00c441bf2b06720aa259f0f720561335e34 \ + --hash=sha256:ecd6ff8a3b5a90c292c2396c2d63c687fd0ecdf17de390d852524393cd9ed052 \ + --hash=sha256:f278b31a7c52eb0947b2db55a5133fbc46b6f0ef49972cd1a80843b72e135aba \ + --hash=sha256:f62b9976fabbcde3ab6e413f4ec2ff017749819a0786d84d7510171109f2d53c \ + --hash=sha256:fa79fdb47701942c2132a9dd2297a1a85941d966d8c87bfd9e29b0cf423f26cc \ + --hash=sha256:fac6c0649d6b7c3682a0a6e18d3de6857977378dce8d419f57a0b20e3d775b39 + # via pymilvus +uri-template==1.3.0 \ + --hash=sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7 \ + --hash=sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363 + # via jsonschema +urllib3==2.6.3 \ + --hash=sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed \ + --hash=sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4 + # via + # feast (setup.py) + # botocore + # clickhouse-connect + # docker + # elastic-transport + # great-expectations + # kubernetes + # minio + # qdrant-client + # requests + # responses + # testcontainers +uvicorn[standard]==0.34.0 \ + --hash=sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4 \ + --hash=sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9 + # via + # feast (setup.py) + # fastapi-mcp + # mcp + # uvicorn-worker +uvicorn-worker==0.3.0 \ + --hash=sha256:6baeab7b2162ea6b9612cbe149aa670a76090ad65a267ce8e27316ed13c7de7b \ + --hash=sha256:ef0fe8aad27b0290a9e602a256b03f5a5da3a9e5f942414ca587b645ec77dd52 + # via feast (setup.py) +uvloop==0.22.1 \ + --hash=sha256:017bd46f9e7b78e81606329d07141d3da446f8798c6baeec124260e22c262772 \ + --hash=sha256:0530a5fbad9c9e4ee3f2b33b148c6a64d47bbad8000ea63704fa8260f4cf728e \ + --hash=sha256:05e4b5f86e621cf3927631789999e697e58f0d2d32675b67d9ca9eb0bca55743 \ + --hash=sha256:0ae676de143db2b2f60a9696d7eca5bb9d0dd6cc3ac3dad59a8ae7e95f9e1b54 \ + --hash=sha256:1489cf791aa7b6e8c8be1c5a080bae3a672791fcb4e9e12249b05862a2ca9cec \ + --hash=sha256:17d4e97258b0172dfa107b89aa1eeba3016f4b1974ce85ca3ef6a66b35cbf659 \ + --hash=sha256:1cdf5192ab3e674ca26da2eada35b288d2fa49fdd0f357a19f0e7c4e7d5077c8 \ + --hash=sha256:1f38ec5e3f18c8a10ded09742f7fb8de0108796eb673f30ce7762ce1b8550cad \ + --hash=sha256:286322a90bea1f9422a470d5d2ad82d38080be0a29c4dd9b3e6384320a4d11e7 \ + --hash=sha256:297c27d8003520596236bdb2335e6b3f649480bd09e00d1e3a99144b691d2a35 \ + --hash=sha256:37554f70528f60cad66945b885eb01f1bb514f132d92b6eeed1c90fd54ed6289 \ + --hash=sha256:3879b88423ec7e97cd4eba2a443aa26ed4e59b45e6b76aabf13fe2f27023a142 \ + --hash=sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77 \ + --hash=sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733 \ + --hash=sha256:481c990a7abe2c6f4fc3d98781cc9426ebd7f03a9aaa7eb03d3bfc68ac2a46bd \ + --hash=sha256:4a968a72422a097b09042d5fa2c5c590251ad484acf910a651b4b620acd7f193 \ + --hash=sha256:4baa86acedf1d62115c1dc6ad1e17134476688f08c6efd8a2ab076e815665c74 \ + --hash=sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0 \ + --hash=sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6 \ + --hash=sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473 \ + --hash=sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21 \ + --hash=sha256:55502bc2c653ed2e9692e8c55cb95b397d33f9f2911e929dc97c4d6b26d04242 \ + --hash=sha256:561577354eb94200d75aca23fbde86ee11be36b00e52a4eaf8f50fb0c86b7705 \ + --hash=sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702 \ + --hash=sha256:57df59d8b48feb0e613d9b1f5e57b7532e97cbaf0d61f7aa9aa32221e84bc4b6 \ + --hash=sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f \ + --hash=sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e \ + --hash=sha256:6e2ea3d6190a2968f4a14a23019d3b16870dd2190cd69c8180f7c632d21de68d \ + --hash=sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370 \ + --hash=sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4 \ + --hash=sha256:7cd375a12b71d33d46af85a3343b35d98e8116134ba404bd657b3b1d15988792 \ + --hash=sha256:80eee091fe128e425177fbd82f8635769e2f32ec9daf6468286ec57ec0313efa \ + --hash=sha256:93f617675b2d03af4e72a5333ef89450dfaa5321303ede6e67ba9c9d26878079 \ + --hash=sha256:a592b043a47ad17911add5fbd087c76716d7c9ccc1d64ec9249ceafd735f03c2 \ + --hash=sha256:ac33ed96229b7790eb729702751c0e93ac5bc3bcf52ae9eccbff30da09194b86 \ + --hash=sha256:b31dc2fccbd42adc73bc4e7cdbae4fc5086cf378979e53ca5d0301838c5682c6 \ + --hash=sha256:b45649628d816c030dba3c80f8e2689bab1c89518ed10d426036cdc47874dfc4 \ + --hash=sha256:b76324e2dc033a0b2f435f33eb88ff9913c156ef78e153fb210e03c13da746b3 \ + --hash=sha256:b91328c72635f6f9e0282e4a57da7470c7350ab1c9f48546c0f2866205349d21 \ + --hash=sha256:badb4d8e58ee08dad957002027830d5c3b06aea446a6a3744483c2b3b745345c \ + --hash=sha256:bc5ef13bbc10b5335792360623cc378d52d7e62c2de64660616478c32cd0598e \ + --hash=sha256:c1955d5a1dd43198244d47664a5858082a3239766a839b2102a269aaff7a4e25 \ + --hash=sha256:c3e5c6727a57cb6558592a95019e504f605d1c54eb86463ee9f7a2dbd411c820 \ + --hash=sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9 \ + --hash=sha256:daf620c2995d193449393d6c62131b3fbd40a63bf7b307a1527856ace637fe88 \ + --hash=sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2 \ + --hash=sha256:ea721dd3203b809039fcc2983f14608dae82b212288b346e0bfe46ec2fab0b7c \ + --hash=sha256:ef6f0d4cc8a9fa1f6a910230cd53545d9a14479311e87e3cb225495952eb672c \ + --hash=sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42 + # via uvicorn +virtualenv==20.23.0 \ + --hash=sha256:6abec7670e5802a528357fdc75b26b9f57d5d92f29c5462ba0fbe45feacc685e \ + --hash=sha256:a85caa554ced0c0afbd0d638e7e2d7b5f92d23478d05d17a76daeac8f279f924 + # via + # feast (setup.py) + # pre-commit + # ray +watchfiles==1.1.1 \ + --hash=sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c \ + --hash=sha256:03fa0f5237118a0c5e496185cafa92878568b652a2e9a9382a5151b1a0380a43 \ + --hash=sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510 \ + --hash=sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0 \ + --hash=sha256:08af70fd77eee58549cd69c25055dc344f918d992ff626068242259f98d598a2 \ + --hash=sha256:0b495de0bb386df6a12b18335a0285dda90260f51bdb505503c02bcd1ce27a8b \ + --hash=sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18 \ + --hash=sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219 \ + --hash=sha256:17ef139237dfced9da49fb7f2232c86ca9421f666d78c264c7ffca6601d154c3 \ + --hash=sha256:1a0bb430adb19ef49389e1ad368450193a90038b5b752f4ac089ec6942c4dff4 \ + --hash=sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803 \ + --hash=sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94 \ + --hash=sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6 \ + --hash=sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce \ + --hash=sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099 \ + --hash=sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae \ + --hash=sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4 \ + --hash=sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43 \ + --hash=sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd \ + --hash=sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10 \ + --hash=sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374 \ + --hash=sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051 \ + --hash=sha256:3bc570d6c01c206c46deb6e935a260be44f186a2f05179f52f7fcd2be086a94d \ + --hash=sha256:3dbd8cbadd46984f802f6d479b7e3afa86c42d13e8f0f322d669d79722c8ec34 \ + --hash=sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49 \ + --hash=sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7 \ + --hash=sha256:3f6d37644155fb5beca5378feb8c1708d5783145f2a0f1c4d5a061a210254844 \ + --hash=sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77 \ + --hash=sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b \ + --hash=sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741 \ + --hash=sha256:4b943d3668d61cfa528eb949577479d3b077fd25fb83c641235437bc0b5bc60e \ + --hash=sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33 \ + --hash=sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42 \ + --hash=sha256:544364b2b51a9b0c7000a4b4b02f90e9423d97fbbf7e06689236443ebcad81ab \ + --hash=sha256:5524298e3827105b61951a29c3512deb9578586abf3a7c5da4a8069df247cccc \ + --hash=sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5 \ + --hash=sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da \ + --hash=sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e \ + --hash=sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05 \ + --hash=sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a \ + --hash=sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d \ + --hash=sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701 \ + --hash=sha256:620bae625f4cb18427b1bb1a2d9426dc0dd5a5ba74c7c2cdb9de405f7b129863 \ + --hash=sha256:672b8adf25b1a0d35c96b5888b7b18699d27d4194bac8beeae75be4b7a3fc9b2 \ + --hash=sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101 \ + --hash=sha256:6c3631058c37e4a0ec440bf583bc53cdbd13e5661bb6f465bc1d88ee9a0a4d02 \ + --hash=sha256:6c9c9262f454d1c4d8aaa7050121eb4f3aea197360553699520767daebf2180b \ + --hash=sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6 \ + --hash=sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb \ + --hash=sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620 \ + --hash=sha256:74472234c8370669850e1c312490f6026d132ca2d396abfad8830b4f1c096957 \ + --hash=sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6 \ + --hash=sha256:77a13aea58bc2b90173bc69f2a90de8e282648939a00a602e1dc4ee23e26b66d \ + --hash=sha256:79ff6c6eadf2e3fc0d7786331362e6ef1e51125892c75f1004bd6b52155fb956 \ + --hash=sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef \ + --hash=sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261 \ + --hash=sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02 \ + --hash=sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af \ + --hash=sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9 \ + --hash=sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21 \ + --hash=sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336 \ + --hash=sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d \ + --hash=sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c \ + --hash=sha256:8ca65483439f9c791897f7db49202301deb6e15fe9f8fe2fed555bf986d10c31 \ + --hash=sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81 \ + --hash=sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9 \ + --hash=sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff \ + --hash=sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2 \ + --hash=sha256:a36d8efe0f290835fd0f33da35042a1bb5dc0e83cbc092dcf69bce442579e88e \ + --hash=sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc \ + --hash=sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404 \ + --hash=sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01 \ + --hash=sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18 \ + --hash=sha256:acb08650863767cbc58bca4813b92df4d6c648459dcaa3d4155681962b2aa2d3 \ + --hash=sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606 \ + --hash=sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04 \ + --hash=sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3 \ + --hash=sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14 \ + --hash=sha256:b9c4702f29ca48e023ffd9b7ff6b822acdf47cb1ff44cb490a3f1d5ec8987e9c \ + --hash=sha256:bbe1ef33d45bc71cf21364df962af171f96ecaeca06bd9e3d0b583efb12aec82 \ + --hash=sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610 \ + --hash=sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0 \ + --hash=sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150 \ + --hash=sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5 \ + --hash=sha256:c1f5210f1b8fc91ead1283c6fd89f70e76fb07283ec738056cf34d51e9c1d62c \ + --hash=sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a \ + --hash=sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b \ + --hash=sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d \ + --hash=sha256:c882d69f6903ef6092bedfb7be973d9319940d56b8427ab9187d1ecd73438a70 \ + --hash=sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70 \ + --hash=sha256:cdab464fee731e0884c35ae3588514a9bcf718d0e2c82169c1c4a85cc19c3c7f \ + --hash=sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24 \ + --hash=sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e \ + --hash=sha256:cf57a27fb986c6243d2ee78392c503826056ffe0287e8794503b10fb51b881be \ + --hash=sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5 \ + --hash=sha256:d6ff426a7cb54f310d51bfe83fe9f2bbe40d540c741dc974ebc30e6aa238f52e \ + --hash=sha256:d7e7067c98040d646982daa1f37a33d3544138ea155536c2e0e63e07ff8a7e0f \ + --hash=sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88 \ + --hash=sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb \ + --hash=sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849 \ + --hash=sha256:e84087b432b6ac94778de547e08611266f1f8ffad28c0ee4c82e028b0fc5966d \ + --hash=sha256:eef58232d32daf2ac67f42dea51a2c80f0d03379075d44a587051e63cc2e368c \ + --hash=sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44 \ + --hash=sha256:f0ab1c1af0cb38e3f598244c17919fb1a84d1629cc08355b0074b6d7f53138ac \ + --hash=sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428 \ + --hash=sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b \ + --hash=sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5 \ + --hash=sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa \ + --hash=sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf + # via uvicorn +wcwidth==0.2.14 \ + --hash=sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605 \ + --hash=sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1 + # via prompt-toolkit +webcolors==25.10.0 \ + --hash=sha256:032c727334856fc0b968f63daa252a1ac93d33db2f5267756623c210e57a4f1d \ + --hash=sha256:62abae86504f66d0f6364c2a8520de4a0c47b80c03fc3a5f1815fedbef7c19bf + # via jsonschema +webencodings==0.5.1 \ + --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ + --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 + # via + # bleach + # tinycss2 +websocket-client==1.9.0 \ + --hash=sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98 \ + --hash=sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef + # via + # jupyter-server + # kubernetes +websockets==16.0 \ + --hash=sha256:0298d07ee155e2e9fda5be8a9042200dd2e3bb0b8a38482156576f863a9d457c \ + --hash=sha256:04cdd5d2d1dacbad0a7bf36ccbcd3ccd5a30ee188f2560b7a62a30d14107b31a \ + --hash=sha256:08d7af67b64d29823fed316505a89b86705f2b7981c07848fb5e3ea3020c1abe \ + --hash=sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e \ + --hash=sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec \ + --hash=sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1 \ + --hash=sha256:1c1b30e4f497b0b354057f3467f56244c603a79c0d1dafce1d16c283c25f6e64 \ + --hash=sha256:2b9f1e0d69bc60a4a87349d50c09a037a2607918746f07de04df9e43252c77a3 \ + --hash=sha256:31a52addea25187bde0797a97d6fc3d2f92b6f72a9370792d65a6e84615ac8a8 \ + --hash=sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206 \ + --hash=sha256:335c23addf3d5e6a8633f9f8eda77efad001671e80b95c491dd0924587ece0b3 \ + --hash=sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156 \ + --hash=sha256:349f83cd6c9a415428ee1005cadb5c2c56f4389bc06a9af16103c3bc3dcc8b7d \ + --hash=sha256:37b31c1623c6605e4c00d466c9d633f9b812ea430c11c8a278774a1fde1acfa9 \ + --hash=sha256:417b28978cdccab24f46400586d128366313e8a96312e4b9362a4af504f3bbad \ + --hash=sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2 \ + --hash=sha256:4a1aba3340a8dca8db6eb5a7986157f52eb9e436b74813764241981ca4888f03 \ + --hash=sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8 \ + --hash=sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230 \ + --hash=sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8 \ + --hash=sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea \ + --hash=sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641 \ + --hash=sha256:583b7c42688636f930688d712885cf1531326ee05effd982028212ccc13e5957 \ + --hash=sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6 \ + --hash=sha256:5f451484aeb5cafee1ccf789b1b66f535409d038c56966d6101740c1614b86c6 \ + --hash=sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5 \ + --hash=sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f \ + --hash=sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00 \ + --hash=sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e \ + --hash=sha256:7be95cfb0a4dae143eaed2bcba8ac23f4892d8971311f1b06f3c6b78952ee70b \ + --hash=sha256:7d837379b647c0c4c2355c2499723f82f1635fd2c26510e1f587d89bc2199e72 \ + --hash=sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39 \ + --hash=sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9 \ + --hash=sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79 \ + --hash=sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0 \ + --hash=sha256:8d7f0659570eefb578dacde98e24fb60af35350193e4f56e11190787bee77dac \ + --hash=sha256:8e1dab317b6e77424356e11e99a432b7cb2f3ec8c5ab4dabbcee6add48f72b35 \ + --hash=sha256:8ff32bb86522a9e5e31439a58addbb0166f0204d64066fb955265c4e214160f0 \ + --hash=sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5 \ + --hash=sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c \ + --hash=sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8 \ + --hash=sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1 \ + --hash=sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244 \ + --hash=sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3 \ + --hash=sha256:a653aea902e0324b52f1613332ddf50b00c06fdaf7e92624fbf8c77c78fa5767 \ + --hash=sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a \ + --hash=sha256:af80d74d4edfa3cb9ed973a0a5ba2b2a549371f8a741e0800cb07becdd20f23d \ + --hash=sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd \ + --hash=sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e \ + --hash=sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944 \ + --hash=sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82 \ + --hash=sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d \ + --hash=sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4 \ + --hash=sha256:d6297ce39ce5c2e6feb13c1a996a2ded3b6832155fcfc920265c76f24c7cceb5 \ + --hash=sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904 \ + --hash=sha256:df57afc692e517a85e65b72e165356ed1df12386ecb879ad5693be08fac65dde \ + --hash=sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f \ + --hash=sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c \ + --hash=sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89 \ + --hash=sha256:f4a32d1bd841d4bcbffdcb3d2ce50c09c3909fbead375ab28d0181af89fd04da \ + --hash=sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4 + # via uvicorn +werkzeug==3.1.5 \ + --hash=sha256:5111e36e91086ece91f93268bb39b4a35c1e6f1feac762c9c822ded0a4e322dc \ + --hash=sha256:6a548b0e88955dd07ccb25539d7d0cc97417ee9e179677d22c7041c8f078ce67 + # via moto +wheel==0.45.1 \ + --hash=sha256:661e1abd9198507b1409a20c02106d9670b2576e916d58f520316666abca6729 \ + --hash=sha256:708e7481cc80179af0e556bbf0cc00b8444c7321e2700b8d8580231d13017248 + # via + # pip-tools + # singlestoredb +widgetsnbextension==4.0.15 \ + --hash=sha256:8156704e4346a571d9ce73b84bee86a29906c9abfd7223b7228a28899ccf3366 \ + --hash=sha256:de8610639996f1567952d763a5a41af8af37f2575a41f9852a38f947eb82a3b9 + # via ipywidgets +wrapt==1.17.3 \ + --hash=sha256:02b551d101f31694fc785e58e0720ef7d9a10c4e62c1c9358ce6f63f23e30a56 \ + --hash=sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828 \ + --hash=sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f \ + --hash=sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396 \ + --hash=sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77 \ + --hash=sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d \ + --hash=sha256:0f5f51a6466667a5a356e6381d362d259125b57f059103dd9fdc8c0cf1d14139 \ + --hash=sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7 \ + --hash=sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb \ + --hash=sha256:1f23fa283f51c890eda8e34e4937079114c74b4c81d2b2f1f1d94948f5cc3d7f \ + --hash=sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f \ + --hash=sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067 \ + --hash=sha256:24c2ed34dc222ed754247a2702b1e1e89fdbaa4016f324b4b8f1a802d4ffe87f \ + --hash=sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7 \ + --hash=sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b \ + --hash=sha256:30ce38e66630599e1193798285706903110d4f057aab3168a34b7fdc85569afc \ + --hash=sha256:33486899acd2d7d3066156b03465b949da3fd41a5da6e394ec49d271baefcf05 \ + --hash=sha256:343e44b2a8e60e06a7e0d29c1671a0d9951f59174f3709962b5143f60a2a98bd \ + --hash=sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7 \ + --hash=sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9 \ + --hash=sha256:3e62d15d3cfa26e3d0788094de7b64efa75f3a53875cdbccdf78547aed547a81 \ + --hash=sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977 \ + --hash=sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa \ + --hash=sha256:46acc57b331e0b3bcb3e1ca3b421d65637915cfcd65eb783cb2f78a511193f9b \ + --hash=sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe \ + --hash=sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58 \ + --hash=sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8 \ + --hash=sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77 \ + --hash=sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85 \ + --hash=sha256:55cbbc356c2842f39bcc553cf695932e8b30e30e797f961860afb308e6b1bb7c \ + --hash=sha256:59923aa12d0157f6b82d686c3fd8e1166fa8cdfb3e17b42ce3b6147ff81528df \ + --hash=sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454 \ + --hash=sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a \ + --hash=sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e \ + --hash=sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c \ + --hash=sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6 \ + --hash=sha256:656873859b3b50eeebe6db8b1455e99d90c26ab058db8e427046dbc35c3140a5 \ + --hash=sha256:65d1d00fbfb3ea5f20add88bbc0f815150dbbde3b026e6c24759466c8b5a9ef9 \ + --hash=sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd \ + --hash=sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277 \ + --hash=sha256:70d86fa5197b8947a2fa70260b48e400bf2ccacdcab97bb7de47e3d1e6312225 \ + --hash=sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22 \ + --hash=sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116 \ + --hash=sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16 \ + --hash=sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc \ + --hash=sha256:758895b01d546812d1f42204bd443b8c433c44d090248bf22689df673ccafe00 \ + --hash=sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2 \ + --hash=sha256:7e18f01b0c3e4a07fe6dfdb00e29049ba17eadbc5e7609a2a3a4af83ab7d710a \ + --hash=sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804 \ + --hash=sha256:88bbae4d40d5a46142e70d58bf664a89b6b4befaea7b2ecc14e03cedb8e06c04 \ + --hash=sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1 \ + --hash=sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba \ + --hash=sha256:a36692b8491d30a8c75f1dfee65bef119d6f39ea84ee04d9f9311f83c5ad9390 \ + --hash=sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0 \ + --hash=sha256:a7c06742645f914f26c7f1fa47b8bc4c91d222f76ee20116c43d5ef0912bba2d \ + --hash=sha256:a9a2203361a6e6404f80b99234fe7fb37d1fc73487b5a78dc1aa5b97201e0f22 \ + --hash=sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0 \ + --hash=sha256:ad85e269fe54d506b240d2d7b9f5f2057c2aa9a2ea5b32c66f8902f768117ed2 \ + --hash=sha256:af338aa93554be859173c39c85243970dc6a289fa907402289eeae7543e1ae18 \ + --hash=sha256:afd964fd43b10c12213574db492cb8f73b2f0826c8df07a68288f8f19af2ebe6 \ + --hash=sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311 \ + --hash=sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89 \ + --hash=sha256:caea3e9c79d5f0d2c6d9ab96111601797ea5da8e6d0723f77eabb0d4068d2b2f \ + --hash=sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39 \ + --hash=sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4 \ + --hash=sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5 \ + --hash=sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa \ + --hash=sha256:df7d30371a2accfe4013e90445f6388c570f103d61019b6b7c57e0265250072a \ + --hash=sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050 \ + --hash=sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6 \ + --hash=sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235 \ + --hash=sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056 \ + --hash=sha256:e6b13af258d6a9ad602d57d889f83b9d5543acd471eee12eb51f5b01f8eb1bc2 \ + --hash=sha256:e6f40a8aa5a92f150bdb3e1c44b7e98fb7113955b2e5394122fa5532fec4b418 \ + --hash=sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c \ + --hash=sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a \ + --hash=sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6 \ + --hash=sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0 \ + --hash=sha256:f9b2601381be482f70e5d1051a5965c25fb3625455a2bf520b5a077b22afb775 \ + --hash=sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10 \ + --hash=sha256:fd341868a4b6714a5962c1af0bd44f7c404ef78720c7de4892901e540417111c + # via + # aiobotocore + # smart-open + # testcontainers +xlsxwriter==3.2.9 \ + --hash=sha256:254b1c37a368c444eac6e2f867405cc9e461b0ed97a3233b2ac1e574efb4140c \ + --hash=sha256:9a5db42bc5dff014806c58a20b9eae7322a134abb6fce3c92c181bfb275ec5b3 + # via python-pptx +xmltodict==1.0.2 \ + --hash=sha256:54306780b7c2175a3967cad1db92f218207e5bc1aba697d887807c0fb68b7649 \ + --hash=sha256:62d0fddb0dcbc9f642745d8bbf4d81fd17d6dfaec5a15b5c1876300aad92af0d + # via moto +xxhash==3.6.0 \ + --hash=sha256:01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad \ + --hash=sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c \ + --hash=sha256:016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3 \ + --hash=sha256:01be0c5b500c5362871fc9cfdf58c69b3e5c4f531a82229ddb9eb1eb14138004 \ + --hash=sha256:0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b \ + --hash=sha256:02ea4cb627c76f48cd9fb37cf7ab22bd51e57e1b519807234b473faebe526796 \ + --hash=sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f \ + --hash=sha256:08d45aef063a4531b785cd72de4887766d01dc8f362a515693df349fdb825e0c \ + --hash=sha256:0d50101e57aad86f4344ca9b32d091a2135a9d0a4396f19133426c88025b09f1 \ + --hash=sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1 \ + --hash=sha256:0f7b7e2ec26c1666ad5fc9dbfa426a6a3367ceaf79db5dd76264659d509d73b0 \ + --hash=sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec \ + --hash=sha256:15e0dac10eb9309508bfc41f7f9deaa7755c69e35af835db9cb10751adebc35d \ + --hash=sha256:18b242455eccdfcd1fa4134c431a30737d2b4f045770f8fe84356b3469d4b919 \ + --hash=sha256:1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67 \ + --hash=sha256:1fc1ed882d1e8df932a66e2999429ba6cc4d5172914c904ab193381fba825360 \ + --hash=sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799 \ + --hash=sha256:25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679 \ + --hash=sha256:26734cdc2d4ffe449b41d186bbeac416f704a482ed835d375a5c0cb02bc63fef \ + --hash=sha256:2762bfff264c4e73c0e507274b40634ff465e025f0eaf050897e88ec8367575d \ + --hash=sha256:277175a73900ad43a8caeb8b99b9604f21fe8d7c842f2f9061a364a7e220ddb7 \ + --hash=sha256:297b7fbf86c82c550e12e8fb71968b3f033d27b874276ba3624ea868c11165a8 \ + --hash=sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa \ + --hash=sha256:2ab89a6b80f22214b43d98693c30da66af910c04f9858dd39c8e570749593d7e \ + --hash=sha256:2b6821e94346f96db75abaa6e255706fb06ebd530899ed76d32cd99f20dc52fa \ + --hash=sha256:2f171a900d59d51511209f7476933c34a0c2c711078d3c80e74e0fe4f38680ec \ + --hash=sha256:339f518c3c7a850dd033ab416ea25a692759dc7478a71131fe8869010d2b75e4 \ + --hash=sha256:39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad \ + --hash=sha256:3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7 \ + --hash=sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5 \ + --hash=sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11 \ + --hash=sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae \ + --hash=sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d \ + --hash=sha256:44e342e8cc11b4e79dae5c57f2fb6360c3c20cc57d32049af8f567f5b4bcb5f4 \ + --hash=sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6 \ + --hash=sha256:45aae0c9df92e7fa46fbb738737324a563c727990755ec1965a6a339ea10a1df \ + --hash=sha256:48e6f2ffb07a50b52465a1032c3cf1f4a5683f944acaca8a134a2f23674c2058 \ + --hash=sha256:4903530e866b7a9c1eadfd3fa2fbe1b97d3aed4739a80abf506eb9318561c850 \ + --hash=sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2 \ + --hash=sha256:4a082ffff8c6ac07707fb6b671caf7c6e020c75226c561830b73d862060f281d \ + --hash=sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89 \ + --hash=sha256:4ccbff013972390b51a18ef1255ef5ac125c92dc9143b2d1909f59abc765540e \ + --hash=sha256:4da8168ae52c01ac64c511d6f4a709479da8b7a4a1d7621ed51652f93747dffa \ + --hash=sha256:4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6 \ + --hash=sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb \ + --hash=sha256:51312c768403d8540487dbbfb557454cfc55589bbde6424456951f7fcd4facb3 \ + --hash=sha256:51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b \ + --hash=sha256:5576b002a56207f640636056b4160a378fe36a58db73ae5c27a7ec8db35f71d4 \ + --hash=sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db \ + --hash=sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119 \ + --hash=sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec \ + --hash=sha256:5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518 \ + --hash=sha256:5dc1e14d14fa0f5789ec29a7062004b5933964bb9b02aae6622b8f530dc40296 \ + --hash=sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033 \ + --hash=sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729 \ + --hash=sha256:627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca \ + --hash=sha256:63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063 \ + --hash=sha256:653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5 \ + --hash=sha256:6551880383f0e6971dc23e512c9ccc986147ce7bfa1cd2e4b520b876c53e9f3d \ + --hash=sha256:6812c25fe0d6c36a46ccb002f40f27ac903bf18af9f6dd8f9669cb4d176ab18f \ + --hash=sha256:6965e0e90f1f0e6cb78da568c13d4a348eeb7f40acfd6d43690a666a459458b8 \ + --hash=sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42 \ + --hash=sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e \ + --hash=sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392 \ + --hash=sha256:780b90c313348f030b811efc37b0fa1431163cb8db8064cf88a7936b6ce5f222 \ + --hash=sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f \ + --hash=sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd \ + --hash=sha256:7a0b169aafb98f4284f73635a8e93f0735f9cbde17bd5ec332480484241aaa77 \ + --hash=sha256:7c35c4cdc65f2a29f34425c446f2f5cdcd0e3c34158931e1cc927ece925ab802 \ + --hash=sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d \ + --hash=sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1 \ + --hash=sha256:7dac94fad14a3d1c92affb661021e1d5cbcf3876be5f5b4d90730775ccb7ac41 \ + --hash=sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374 \ + --hash=sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263 \ + --hash=sha256:87ff03d7e35c61435976554477a7f4cd1704c3596a89a8300d5ce7fc83874a71 \ + --hash=sha256:881b47fc47e051b37d94d13e7455131054b56749b91b508b0907eb07900d1c13 \ + --hash=sha256:89952ea539566b9fed2bbd94e589672794b4286f342254fad28b149f9615fef8 \ + --hash=sha256:8a8f1972e75ebdd161d7896743122834fe87378160c20e97f8b09166213bf8cc \ + --hash=sha256:8b29ee68625ab37b04c0b40c3fafdf24d2f75ccd778333cfb698f65f6c463f62 \ + --hash=sha256:8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11 \ + --hash=sha256:9085e798c163ce310d91f8aa6b325dda3c2944c93c6ce1edb314030d4167cc65 \ + --hash=sha256:9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0 \ + --hash=sha256:929142361a48ee07f09121fe9e96a84950e8d4df3bb298ca5d88061969f34d7b \ + --hash=sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2 \ + --hash=sha256:97460eec202017f719e839a0d3551fbc0b2fcc9c6c6ffaa5af85bbd5de432788 \ + --hash=sha256:9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6 \ + --hash=sha256:9e040d3e762f84500961791fa3709ffa4784d4dcd7690afc655c095e02fff05f \ + --hash=sha256:a034590a727b44dd8ac5914236a7b8504144447a9682586c3327e935f33ec8cc \ + --hash=sha256:a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e \ + --hash=sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702 \ + --hash=sha256:a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405 \ + --hash=sha256:a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f \ + --hash=sha256:a75ffc1bd5def584129774c158e108e5d768e10b75813f2b32650bb041066ed6 \ + --hash=sha256:a87f271a33fad0e5bf3be282be55d78df3a45ae457950deb5241998790326f87 \ + --hash=sha256:a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3 \ + --hash=sha256:aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a \ + --hash=sha256:aed058764db109dc9052720da65fafe84873b05eb8b07e5e653597951af57c3b \ + --hash=sha256:af1f3278bd02814d6dedc5dec397993b549d6f16c19379721e5a1d31e132c49b \ + --hash=sha256:b0359391c3dad6de872fefb0cf5b69d55b0655c55ee78b1bb7a568979b2ce96b \ + --hash=sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8 \ + --hash=sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db \ + --hash=sha256:b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99 \ + --hash=sha256:b47bbd8cf2d72797f3c2772eaaac0ded3d3af26481a26d7d7d41dc2d3c46b04a \ + --hash=sha256:b5b848ad6c16d308c3ac7ad4ba6bede80ed5df2ba8ed382f8932df63158dd4b2 \ + --hash=sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204 \ + --hash=sha256:b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b \ + --hash=sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546 \ + --hash=sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95 \ + --hash=sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9 \ + --hash=sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54 \ + --hash=sha256:bf48889c9630542d4709192578aebbd836177c9f7a4a2778a7d6340107c65f06 \ + --hash=sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c \ + --hash=sha256:c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152 \ + --hash=sha256:c2f9ccd5c4be370939a2e17602fbc49995299203da72a3429db013d44d590e86 \ + --hash=sha256:c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4 \ + --hash=sha256:c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93 \ + --hash=sha256:c6dc31591899f5e5666f04cc2e529e69b4072827085c1ef15294d91a004bc1bd \ + --hash=sha256:c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd \ + --hash=sha256:cc604dc06027dbeb8281aeac5899c35fcfe7c77b25212833709f0bff4ce74d2a \ + --hash=sha256:cfbc5b91397c8c2972fdac13fb3e4ed2f7f8ccac85cd2c644887557780a9b6e2 \ + --hash=sha256:d0a9751f71a1a65ce3584e9cae4467651c7e70c9d31017fa57574583a4540248 \ + --hash=sha256:d1927a69feddc24c987b337ce81ac15c4720955b667fe9b588e02254b80446fd \ + --hash=sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6 \ + --hash=sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf \ + --hash=sha256:d72f67ef8bf36e05f5b6c65e8524f265bd61071471cd4cf1d36743ebeeeb06b7 \ + --hash=sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490 \ + --hash=sha256:dea26ae1eb293db089798d3973a5fc928a18fdd97cc8801226fae705b02b14b0 \ + --hash=sha256:e4ff728a2894e7f436b9e94c667b0f426b9c74b71f900cf37d5468c6b5da0536 \ + --hash=sha256:e82da5670f2d0d98950317f82a0e4a0197150ff19a6df2ba40399c2a3b9ae5fb \ + --hash=sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829 \ + --hash=sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746 \ + --hash=sha256:ee34327b187f002a596d7b167ebc59a1b729e963ce645964bbc050d2f1b73d07 \ + --hash=sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292 \ + --hash=sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6 \ + --hash=sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd \ + --hash=sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7 \ + --hash=sha256:f572dfd3d0e2eb1a57511831cf6341242f5a9f8298a45862d085f5b93394a27d \ + --hash=sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0 \ + --hash=sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee \ + --hash=sha256:ffc578717a347baf25be8397cb10d2528802d24f94cfc005c0e44fef44b5cdd6 + # via datasets +yarl==1.22.0 \ + --hash=sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a \ + --hash=sha256:029866bde8d7b0878b9c160e72305bbf0a7342bcd20b9999381704ae03308dc8 \ + --hash=sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b \ + --hash=sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da \ + --hash=sha256:07a524d84df0c10f41e3ee918846e1974aba4ec017f990dc735aad487a0bdfdf \ + --hash=sha256:088e4e08f033db4be2ccd1f34cf29fe994772fb54cfe004bbf54db320af56890 \ + --hash=sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093 \ + --hash=sha256:0cf71bf877efeac18b38d3930594c0948c82b64547c1cf420ba48722fe5509f6 \ + --hash=sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79 \ + --hash=sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683 \ + --hash=sha256:10619d9fdee46d20edc49d3479e2f8269d0779f1b031e6f7c2aa1c76be04b7ed \ + --hash=sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2 \ + --hash=sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff \ + --hash=sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02 \ + --hash=sha256:14291620375b1060613f4aab9ebf21850058b6b1b438f386cc814813d901c60b \ + --hash=sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03 \ + --hash=sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511 \ + --hash=sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c \ + --hash=sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124 \ + --hash=sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c \ + --hash=sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da \ + --hash=sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2 \ + --hash=sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0 \ + --hash=sha256:2e4e1f6f0b4da23e61188676e3ed027ef0baa833a2e633c29ff8530800edccba \ + --hash=sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d \ + --hash=sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53 \ + --hash=sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138 \ + --hash=sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4 \ + --hash=sha256:3aa27acb6de7a23785d81557577491f6c38a5209a254d1191519d07d8fe51748 \ + --hash=sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7 \ + --hash=sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d \ + --hash=sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503 \ + --hash=sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d \ + --hash=sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2 \ + --hash=sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa \ + --hash=sha256:437840083abe022c978470b942ff832c3940b2ad3734d424b7eaffcd07f76737 \ + --hash=sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f \ + --hash=sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1 \ + --hash=sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d \ + --hash=sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694 \ + --hash=sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3 \ + --hash=sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a \ + --hash=sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d \ + --hash=sha256:4dcc74149ccc8bba31ce1944acee24813e93cfdee2acda3c172df844948ddf7b \ + --hash=sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a \ + --hash=sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6 \ + --hash=sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b \ + --hash=sha256:595697f68bd1f0c1c159fcb97b661fc9c3f5db46498043555d04805430e79bea \ + --hash=sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5 \ + --hash=sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f \ + --hash=sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df \ + --hash=sha256:5cdac20da754f3a723cceea5b3448e1a2074866406adeb4ef35b469d089adb8f \ + --hash=sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b \ + --hash=sha256:5dbeefd6ca588b33576a01b0ad58aa934bc1b41ef89dee505bf2932b22ddffba \ + --hash=sha256:62441e55958977b8167b2709c164c91a6363e25da322d87ae6dd9c6019ceecf9 \ + --hash=sha256:663e1cadaddae26be034a6ab6072449a8426ddb03d500f43daf952b74553bba0 \ + --hash=sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6 \ + --hash=sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b \ + --hash=sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967 \ + --hash=sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2 \ + --hash=sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708 \ + --hash=sha256:6dcbb0829c671f305be48a7227918cfcd11276c2d637a8033a99a02b67bf9eda \ + --hash=sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8 \ + --hash=sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10 \ + --hash=sha256:75976c6945d85dbb9ee6308cd7ff7b1fb9409380c82d6119bd778d8fcfe2931c \ + --hash=sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b \ + --hash=sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028 \ + --hash=sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e \ + --hash=sha256:80ddf7a5f8c86cb3eb4bc9028b07bbbf1f08a96c5c0bc1244be5e8fefcb94147 \ + --hash=sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33 \ + --hash=sha256:84fc3ec96fce86ce5aa305eb4aa9358279d1aa644b71fab7b8ed33fe3ba1a7ca \ + --hash=sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590 \ + --hash=sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c \ + --hash=sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53 \ + --hash=sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74 \ + --hash=sha256:99b6fc1d55782461b78221e95fc357b47ad98b041e8e20f47c1411d0aacddc60 \ + --hash=sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f \ + --hash=sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1 \ + --hash=sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27 \ + --hash=sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520 \ + --hash=sha256:a4fcfc8eb2c34148c118dfa02e6427ca278bfd0f3df7c5f99e33d2c0e81eae3e \ + --hash=sha256:a899cbd98dce6f5d8de1aad31cb712ec0a530abc0a86bd6edaa47c1090138467 \ + --hash=sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca \ + --hash=sha256:af74f05666a5e531289cb1cc9c883d1de2088b8e5b4de48004e5ca8a830ac859 \ + --hash=sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273 \ + --hash=sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e \ + --hash=sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601 \ + --hash=sha256:b580e71cac3f8113d3135888770903eaf2f507e9421e5697d6ee6d8cd1c7f054 \ + --hash=sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376 \ + --hash=sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7 \ + --hash=sha256:b85b982afde6df99ecc996990d4ad7ccbdbb70e2a4ba4de0aecde5922ba98a0b \ + --hash=sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb \ + --hash=sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65 \ + --hash=sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784 \ + --hash=sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71 \ + --hash=sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b \ + --hash=sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a \ + --hash=sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c \ + --hash=sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face \ + --hash=sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d \ + --hash=sha256:c7bd6683587567e5a49ee6e336e0612bec8329be1b7d4c8af5687dcdeb67ee1e \ + --hash=sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e \ + --hash=sha256:cb95a9b1adaa48e41815a55ae740cfda005758104049a640a398120bf02515ca \ + --hash=sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9 \ + --hash=sha256:d332fc2e3c94dad927f2112395772a4e4fedbcf8f80efc21ed7cdfae4d574fdb \ + --hash=sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95 \ + --hash=sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed \ + --hash=sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf \ + --hash=sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca \ + --hash=sha256:dd7afd3f8b0bfb4e0d9fc3c31bfe8a4ec7debe124cfd90619305def3c8ca8cd2 \ + --hash=sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62 \ + --hash=sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df \ + --hash=sha256:e1b329cb8146d7b736677a2440e422eadd775d1806a81db2d4cded80a48efc1a \ + --hash=sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67 \ + --hash=sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f \ + --hash=sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529 \ + --hash=sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486 \ + --hash=sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a \ + --hash=sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e \ + --hash=sha256:e81fda2fb4a07eda1a2252b216aa0df23ebcd4d584894e9612e80999a78fd95b \ + --hash=sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74 \ + --hash=sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d \ + --hash=sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b \ + --hash=sha256:f0d97c18dfd9a9af4490631905a3f131a8e4c9e80a39353919e2cfed8f00aedc \ + --hash=sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2 \ + --hash=sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e \ + --hash=sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8 \ + --hash=sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82 \ + --hash=sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd \ + --hash=sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249 + # via aiohttp +zipp==3.23.0 \ + --hash=sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e \ + --hash=sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166 + # via importlib-metadata +zstandard==0.25.0 \ + --hash=sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64 \ + --hash=sha256:01582723b3ccd6939ab7b3a78622c573799d5d8737b534b86d0e06ac18dbde4a \ + --hash=sha256:05353cef599a7b0b98baca9b068dd36810c3ef0f42bf282583f438caf6ddcee3 \ + --hash=sha256:05df5136bc5a011f33cd25bc9f506e7426c0c9b3f9954f056831ce68f3b6689f \ + --hash=sha256:06acb75eebeedb77b69048031282737717a63e71e4ae3f77cc0c3b9508320df6 \ + --hash=sha256:07b527a69c1e1c8b5ab1ab14e2afe0675614a09182213f21a0717b62027b5936 \ + --hash=sha256:0bbc9a0c65ce0eea3c34a691e3c4b6889f5f3909ba4822ab385fab9057099431 \ + --hash=sha256:0be7622c37c183406f3dbf0cba104118eb16a4ea7359eeb5752f0794882fc250 \ + --hash=sha256:106281ae350e494f4ac8a80470e66d1fe27e497052c8d9c3b95dc4cf1ade81aa \ + --hash=sha256:10ef2a79ab8e2974e2075fb984e5b9806c64134810fac21576f0668e7ea19f8f \ + --hash=sha256:1673b7199bbe763365b81a4f3252b8e80f44c9e323fc42940dc8843bfeaf9851 \ + --hash=sha256:172de1f06947577d3a3005416977cce6168f2261284c02080e7ad0185faeced3 \ + --hash=sha256:181eb40e0b6a29b3cd2849f825e0fa34397f649170673d385f3598ae17cca2e9 \ + --hash=sha256:1869da9571d5e94a85a5e8d57e4e8807b175c9e4a6294e3b66fa4efb074d90f6 \ + --hash=sha256:19796b39075201d51d5f5f790bf849221e58b48a39a5fc74837675d8bafc7362 \ + --hash=sha256:1cd5da4d8e8ee0e88be976c294db744773459d51bb32f707a0f166e5ad5c8649 \ + --hash=sha256:1f3689581a72eaba9131b1d9bdbfe520ccd169999219b41000ede2fca5c1bfdb \ + --hash=sha256:1f830a0dac88719af0ae43b8b2d6aef487d437036468ef3c2ea59c51f9d55fd5 \ + --hash=sha256:223415140608d0f0da010499eaa8ccdb9af210a543fac54bce15babbcfc78439 \ + --hash=sha256:22a06c5df3751bb7dc67406f5374734ccee8ed37fc5981bf1ad7041831fa1137 \ + --hash=sha256:22a086cff1b6ceca18a8dd6096ec631e430e93a8e70a9ca5efa7561a00f826fa \ + --hash=sha256:23ebc8f17a03133b4426bcc04aabd68f8236eb78c3760f12783385171b0fd8bd \ + --hash=sha256:25f8f3cd45087d089aef5ba3848cd9efe3ad41163d3400862fb42f81a3a46701 \ + --hash=sha256:2b6bd67528ee8b5c5f10255735abc21aa106931f0dbaf297c7be0c886353c3d0 \ + --hash=sha256:2e54296a283f3ab5a26fc9b8b5d4978ea0532f37b231644f367aa588930aa043 \ + --hash=sha256:3756b3e9da9b83da1796f8809dd57cb024f838b9eeafde28f3cb472012797ac1 \ + --hash=sha256:37daddd452c0ffb65da00620afb8e17abd4adaae6ce6310702841760c2c26860 \ + --hash=sha256:3a39c94ad7866160a4a46d772e43311a743c316942037671beb264e395bdd611 \ + --hash=sha256:3b870ce5a02d4b22286cf4944c628e0f0881b11b3f14667c1d62185a99e04f53 \ + --hash=sha256:3c83b0188c852a47cd13ef3bf9209fb0a77fa5374958b8c53aaa699398c6bd7b \ + --hash=sha256:4203ce3b31aec23012d3a4cf4a2ed64d12fea5269c49aed5e4c3611b938e4088 \ + --hash=sha256:457ed498fc58cdc12fc48f7950e02740d4f7ae9493dd4ab2168a47c93c31298e \ + --hash=sha256:474d2596a2dbc241a556e965fb76002c1ce655445e4e3bf38e5477d413165ffa \ + --hash=sha256:4b14abacf83dfb5c25eb4e4a79520de9e7e205f72c9ee7702f91233ae57d33a2 \ + --hash=sha256:4b6d83057e713ff235a12e73916b6d356e3084fd3d14ced499d84240f3eecee0 \ + --hash=sha256:4d441506e9b372386a5271c64125f72d5df6d2a8e8a2a45a0ae09b03cb781ef7 \ + --hash=sha256:4f187a0bb61b35119d1926aee039524d1f93aaf38a9916b8c4b78ac8514a0aaf \ + --hash=sha256:51526324f1b23229001eb3735bc8c94f9c578b1bd9e867a0a646a3b17109f388 \ + --hash=sha256:53e08b2445a6bc241261fea89d065536f00a581f02535f8122eba42db9375530 \ + --hash=sha256:53f94448fe5b10ee75d246497168e5825135d54325458c4bfffbaafabcc0a577 \ + --hash=sha256:5a56ba0db2d244117ed744dfa8f6f5b366e14148e00de44723413b2f3938a902 \ + --hash=sha256:5f1ad7bf88535edcf30038f6919abe087f606f62c00a87d7e33e7fc57cb69fcc \ + --hash=sha256:5f5e4c2a23ca271c218ac025bd7d635597048b366d6f31f420aaeb715239fc98 \ + --hash=sha256:6a573a35693e03cf1d67799fd01b50ff578515a8aeadd4595d2a7fa9f3ec002a \ + --hash=sha256:6c0e5a65158a7946e7a7affa6418878ef97ab66636f13353b8502d7ea03c8097 \ + --hash=sha256:6dffecc361d079bb48d7caef5d673c88c8988d3d33fb74ab95b7ee6da42652ea \ + --hash=sha256:7030defa83eef3e51ff26f0b7bfb229f0204b66fe18e04359ce3474ac33cbc09 \ + --hash=sha256:7149623bba7fdf7e7f24312953bcf73cae103db8cae49f8154dd1eadc8a29ecb \ + --hash=sha256:72d35d7aa0bba323965da807a462b0966c91608ef3a48ba761678cb20ce5d8b7 \ + --hash=sha256:75ffc32a569fb049499e63ce68c743155477610532da1eb38e7f24bf7cd29e74 \ + --hash=sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b \ + --hash=sha256:78228d8a6a1c177a96b94f7e2e8d012c55f9c760761980da16ae7546a15a8e9b \ + --hash=sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b \ + --hash=sha256:809c5bcb2c67cd0ed81e9229d227d4ca28f82d0f778fc5fea624a9def3963f91 \ + --hash=sha256:81dad8d145d8fd981b2962b686b2241d3a1ea07733e76a2f15435dfb7fb60150 \ + --hash=sha256:85304a43f4d513f5464ceb938aa02c1e78c2943b29f44a750b48b25ac999a049 \ + --hash=sha256:89c4b48479a43f820b749df49cd7ba2dbc2b1b78560ecb5ab52985574fd40b27 \ + --hash=sha256:8e735494da3db08694d26480f1493ad2cf86e99bdd53e8e9771b2752a5c0246a \ + --hash=sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00 \ + --hash=sha256:9174f4ed06f790a6869b41cba05b43eeb9a35f8993c4422ab853b705e8112bbd \ + --hash=sha256:9300d02ea7c6506f00e627e287e0492a5eb0371ec1670ae852fefffa6164b072 \ + --hash=sha256:933b65d7680ea337180733cf9e87293cc5500cc0eb3fc8769f4d3c88d724ec5c \ + --hash=sha256:9654dbc012d8b06fc3d19cc825af3f7bf8ae242226df5f83936cb39f5fdc846c \ + --hash=sha256:98750a309eb2f020da61e727de7d7ba3c57c97cf6213f6f6277bb7fb42a8e065 \ + --hash=sha256:99c0c846e6e61718715a3c9437ccc625de26593fea60189567f0118dc9db7512 \ + --hash=sha256:a1a4ae2dec3993a32247995bdfe367fc3266da832d82f8438c8570f989753de1 \ + --hash=sha256:a3f79487c687b1fc69f19e487cd949bf3aae653d181dfb5fde3bf6d18894706f \ + --hash=sha256:a4089a10e598eae6393756b036e0f419e8c1d60f44a831520f9af41c14216cf2 \ + --hash=sha256:a51ff14f8017338e2f2e5dab738ce1ec3b5a851f23b18c1ae1359b1eecbee6df \ + --hash=sha256:a5a419712cf88862a45a23def0ae063686db3d324cec7edbe40509d1a79a0aab \ + --hash=sha256:a9ec8c642d1ec73287ae3e726792dd86c96f5681eb8df274a757bf62b750eae7 \ + --hash=sha256:aaf21ba8fb76d102b696781bddaa0954b782536446083ae3fdaa6f16b25a1c4b \ + --hash=sha256:ab85470ab54c2cb96e176f40342d9ed41e58ca5733be6a893b730e7af9c40550 \ + --hash=sha256:b9af1fe743828123e12b41dd8091eca1074d0c1569cc42e6e1eee98027f2bbd0 \ + --hash=sha256:bfc4e20784722098822e3eee42b8e576b379ed72cca4a7cb856ae733e62192ea \ + --hash=sha256:bfd06b1c5584b657a2892a6014c2f4c20e0db0208c159148fa78c65f7e0b0277 \ + --hash=sha256:c19bcdd826e95671065f8692b5a4aa95c52dc7a02a4c5a0cac46deb879a017a2 \ + --hash=sha256:c2ba942c94e0691467ab901fc51b6f2085ff48f2eea77b1a48240f011e8247c7 \ + --hash=sha256:c8e167d5adf59476fa3e37bee730890e389410c354771a62e3c076c86f9f7778 \ + --hash=sha256:ca54090275939dc8ec5dea2d2afb400e0f83444b2fc24e07df7fdef677110859 \ + --hash=sha256:d7541afd73985c630bafcd6338d2518ae96060075f9463d7dc14cfb33514383d \ + --hash=sha256:d8c56bb4e6c795fc77d74d8e8b80846e1fb8292fc0b5060cd8131d522974b751 \ + --hash=sha256:da469dc041701583e34de852d8634703550348d5822e66a0c827d39b05365b12 \ + --hash=sha256:daab68faadb847063d0c56f361a289c4f268706b598afbf9ad113cbe5c38b6b2 \ + --hash=sha256:e05ab82ea7753354bb054b92e2f288afb750e6b439ff6ca78af52939ebbc476d \ + --hash=sha256:e09bb6252b6476d8d56100e8147b803befa9a12cea144bbe629dd508800d1ad0 \ + --hash=sha256:e29f0cf06974c899b2c188ef7f783607dbef36da4c242eb6c82dcd8b512855e3 \ + --hash=sha256:e59fdc271772f6686e01e1b3b74537259800f57e24280be3f29c8a0deb1904dd \ + --hash=sha256:e7360eae90809efd19b886e59a09dad07da4ca9ba096752e61a2e03c8aca188e \ + --hash=sha256:e96594a5537722fdfb79951672a2a63aec5ebfb823e7560586f7484819f2a08f \ + --hash=sha256:ea9d54cc3d8064260114a0bbf3479fc4a98b21dffc89b3459edd506b69262f6e \ + --hash=sha256:ec996f12524f88e151c339688c3897194821d7f03081ab35d31d1e12ec975e94 \ + --hash=sha256:f27662e4f7dbf9f9c12391cb37b4c4c3cb90ffbd3b1fb9284dadbbb8935fa708 \ + --hash=sha256:f373da2c1757bb7f1acaf09369cdc1d51d84131e50d5fa9863982fd626466313 \ + --hash=sha256:f5aeea11ded7320a84dcdd62a3d95b5186834224a9e55b92ccae35d21a8b63d4 \ + --hash=sha256:f604efd28f239cc21b3adb53eb061e2a205dc164be408e553b41ba2ffe0ca15c \ + --hash=sha256:f67e8f1a324a900e75b5e28ffb152bcac9fbed1cc7b43f99cd90f395c4375344 \ + --hash=sha256:fd7a5004eb1980d3cefe26b2685bcb0b17989901a70a1040d1ac86f1d898c551 \ + --hash=sha256:ffef5a74088f1e09947aecf91011136665152e0b4b359c42be3373897fb39b01 + # via + # clickhouse-connect + # trino diff --git a/sdk/python/requirements/py3.11-requirements.txt b/sdk/python/requirements/py3.11-requirements.txt index 9fc7d9ee07b..263d93d0fb6 100644 --- a/sdk/python/requirements/py3.11-requirements.txt +++ b/sdk/python/requirements/py3.11-requirements.txt @@ -682,6 +682,82 @@ numpy==2.4.2 \ # feast (pyproject.toml) # dask # pandas +orjson==3.11.7 \ + --hash=sha256:043d3006b7d32c7e233b8cfb1f01c651013ea079e08dcef7189a29abd8befe11 \ + --hash=sha256:0527a4510c300e3b406591b0ba69b5dc50031895b0a93743526a3fc45f59d26e \ + --hash=sha256:0724e265bc548af1dedebd9cb3d24b4e1c1e685a343be43e87ba922a5c5fff2f \ + --hash=sha256:136dcd6a2e796dfd9ffca9fc027d778567b0b7c9968d092842d3c323cef88aa8 \ + --hash=sha256:14f440c7268c8f8633d1b3d443a434bd70cb15686117ea6beff8fdc8f5917a1e \ + --hash=sha256:1d98b30cc1313d52d4af17d9c3d307b08389752ec5f2e5febdfada70b0f8c733 \ + --hash=sha256:1eb80451a9c351a71dfaf5b7ccc13ad065405217726b59fdbeadbcc544f9d223 \ + --hash=sha256:1ee5cc7160a821dfe14f130bc8e63e7611051f964b463d9e2a3a573204446a4d \ + --hash=sha256:23d6c20517a97a9daf1d48b580fcdc6f0516c6f4b5038823426033690b4d2650 \ + --hash=sha256:26c3b9132f783b7d7903bf1efb095fed8d4a3a85ec0d334ee8beff3d7a4749d5 \ + --hash=sha256:31c80ce534ac4ea3739c5ee751270646cbc46e45aea7576a38ffec040b4029a1 \ + --hash=sha256:3726be79e36e526e3d9c1aceaadbfb4a04ee80a72ab47b3f3c17fefb9812e7b8 \ + --hash=sha256:390a1dce0c055ddf8adb6aa94a73b45a4a7d7177b5c584b8d1c1947f2ba60fb3 \ + --hash=sha256:3a2479753bbb95b0ebcf7969f562cdb9668e6d12416a35b0dda79febf89cdea2 \ + --hash=sha256:3c4bc6c6ac52cdaa267552544c73e486fecbd710b7ac09bc024d5a78555a22f6 \ + --hash=sha256:411ebaf34d735e25e358a6d9e7978954a9c9d58cfb47bc6683cdc3964cd2f910 \ + --hash=sha256:4682d1db3bcebd2b64757e0ddf9e87ae5f00d29d16c5cdf3a62f561d08cc3dd2 \ + --hash=sha256:4a2e9c5be347b937a2e0203866f12bba36082e89b402ddb9e927d5822e43088d \ + --hash=sha256:57036b27ac8a25d81112eb0cc9835cd4833c5b16e1467816adc0015f59e870dc \ + --hash=sha256:5ede977b5fe5ac91b1dffc0a517ca4542d2ec8a6a4ff7b2652d94f640796342a \ + --hash=sha256:5fdfad2093bdd08245f2e204d977facd5f871c88c4a71230d5bcbd0e43bf6222 \ + --hash=sha256:623ad1b9548ef63886319c16fa317848e465a21513b31a6ad7b57443c3e0dcf5 \ + --hash=sha256:652c6c3af76716f4a9c290371ba2e390ede06f6603edb277b481daf37f6f464e \ + --hash=sha256:6543001328aa857187f905308a028935864aefe9968af3848401b6fe80dbb471 \ + --hash=sha256:6e776b998ac37c0396093d10290e60283f59cfe0fc3fccbd0ccc4bd04dd19892 \ + --hash=sha256:71924496986275a737f38e3f22b4e0878882b3f7a310d2ff4dc96e812789120c \ + --hash=sha256:733ae23ada68b804b222c44affed76b39e30806d38660bf1eb200520d259cc16 \ + --hash=sha256:7477aa6a6ec6139c5cb1cc7b214643592169a5494d200397c7fc95d740d5fcf3 \ + --hash=sha256:79cacb0b52f6004caf92405a7e1f11e6e2de8bdf9019e4f76b44ba045125cd6b \ + --hash=sha256:7ba61079379b0ae29e117db13bda5f28d939766e410d321ec1624afc6a0b0504 \ + --hash=sha256:800988273a014a0541483dc81021247d7eacb0c845a9d1a34a422bc718f41539 \ + --hash=sha256:814be4b49b228cfc0b3c565acf642dd7d13538f966e3ccde61f4f55be3e20785 \ + --hash=sha256:845c3e0d8ded9c9271cd79596b9b552448b885b97110f628fb687aee2eed11c1 \ + --hash=sha256:849e38203e5be40b776ed2718e587faf204d184fc9a008ae441f9442320c0cab \ + --hash=sha256:89e13dd3f89f1c38a9c9eba5fbf7cdc2d1feca82f5f290864b4b7a6aac704576 \ + --hash=sha256:89e440ebc74ce8ab5c7bc4ce6757b4a6b1041becb127df818f6997b5c71aa60b \ + --hash=sha256:8ff206156006da5b847c9304b6308a01e8cdbc8cce824e2779a5ba71c3def141 \ + --hash=sha256:91c81ef070c8f3220054115e1ef468b1c9ce8497b4e526cb9f68ab4dc0a7ac62 \ + --hash=sha256:9487abc2c2086e7c8eb9a211d2ce8855bae0e92586279d0d27b341d5ad76c85c \ + --hash=sha256:962d046ee1765f74a1da723f4b33e3b228fe3a48bd307acce5021dfefe0e29b2 \ + --hash=sha256:996b65230271f1a97026fd0e6a753f51fbc0c335d2ad0c6201f711b0da32693b \ + --hash=sha256:9b1a67243945819ce55d24a30b59d6a168e86220452d2c96f4d1f093e71c0c49 \ + --hash=sha256:9c0b51672e466fd7e56230ffbae7f1639e18d0ce023351fb75da21b71bc2c960 \ + --hash=sha256:9e54f3808e2b6b945078c41aa8d9b5834b28c50843846e97807e5adb75fa9705 \ + --hash=sha256:a02c833f38f36546ba65a452127633afce4cf0dd7296b753d3bb54e55e5c0174 \ + --hash=sha256:a12b80df61aab7b98b490fe9e4879925ba666fccdfcd175252ce4d9035865ace \ + --hash=sha256:a16bcd08ab0bcdfc7e8801d9c4a9cc17e58418e4d48ddc6ded4e9e4b1a94062b \ + --hash=sha256:a56df3239294ea5964adf074c54bcc4f0ccd21636049a2cf3ca9cf03b5d03cf1 \ + --hash=sha256:a709e881723c9b18acddcfb8ba357322491ad553e277cf467e1e7e20e2d90561 \ + --hash=sha256:ab49d4b2a6a1d415ddb9f37a21e02e0d5dbfe10b7870b21bf779fc21e9156157 \ + --hash=sha256:ae9e0b37a834cef7ce8f99de6498f8fad4a2c0bf6bfc3d02abd8ed56aa15b2de \ + --hash=sha256:b4a9eefdc70bf8bf9857f0290f973dec534ac84c35cd6a7f4083be43e7170a8f \ + --hash=sha256:b63c6e6738d7c3470ad01601e23376aa511e50e1f3931395b9f9c722406d1a67 \ + --hash=sha256:b7b1dae39230a393df353827c855a5f176271c23434cfd2db74e0e424e693e10 \ + --hash=sha256:b8d14b71c0b12963fe8a62aac87119f1afdf4cb88a400f61ca5ae581449efcb5 \ + --hash=sha256:b9f95dcdea9d4f805daa9ddf02617a89e484c6985fa03055459f90e87d7a0757 \ + --hash=sha256:b9fc4d0f81f394689e0814617aadc4f2ea0e8025f38c226cbf22d3b5ddbf025d \ + --hash=sha256:bd03ea7606833655048dab1a00734a2875e3e86c276e1d772b2a02556f0d895f \ + --hash=sha256:bd0d68edd7dfca1b2eca9361a44ac9f24b078de3481003159929a0573f21a6bf \ + --hash=sha256:bda117c4148e81f746655d5a3239ae9bd00cb7bc3ca178b5fc5a5997e9744183 \ + --hash=sha256:bf742e149121dc5648ba0a08ea0871e87b660467ef168a3a5e53bc1fbd64bb74 \ + --hash=sha256:c2428d358d85e8da9d37cba18b8c4047c55222007a84f97156a5b22028dfbfc0 \ + --hash=sha256:c2e85fe4698b6a56d5e2ebf7ae87544d668eb6bde1ad1226c13f44663f20ec9e \ + --hash=sha256:c43b8b5bab288b6b90dac410cca7e986a4fa747a2e8f94615aea407da706980d \ + --hash=sha256:cededd6738e1c153530793998e31c05086582b08315db48ab66649768f326baa \ + --hash=sha256:d06e5c5fed5caedd2e540d62e5b1c25e8c82431b9e577c33537e5fa4aa909539 \ + --hash=sha256:d772afdb22555f0c58cfc741bdae44180122b3616faa1ecadb595cd526e4c993 \ + --hash=sha256:d897e81f8d0cbd2abb82226d1860ad2e1ab3ff16d7b08c96ca00df9d45409ef4 \ + --hash=sha256:de0a37f21d0d364954ad5de1970491d7fbd0fb1ef7417d4d56a36dc01ba0c0a0 \ + --hash=sha256:e7745312efa9e11c17fbd3cb3097262d079da26930ae9ae7ba28fb738367cbad \ + --hash=sha256:ed46f17096e28fb28d2975834836a639af7278aa87c84f68ab08fbe5b8bd75fa \ + --hash=sha256:f4f7c956b5215d949a1f65334cf9d7612dde38f20a95f2315deef167def91a6f \ + --hash=sha256:f50979824bde13d32b4320eedd513431c921102796d86be3eee0b58e58a3ecd1 \ + --hash=sha256:f904c24bdeabd4298f7a977ef14ca2a022ca921ed670b92ecd16ab6f3d01f867 + # via feast (pyproject.toml) packaging==26.0 \ --hash=sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4 \ --hash=sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529 diff --git a/sdk/python/requirements/py3.12-ci-requirements.txt b/sdk/python/requirements/py3.12-ci-requirements.txt index e93a6c3784f..04e3c8e86ff 100644 --- a/sdk/python/requirements/py3.12-ci-requirements.txt +++ b/sdk/python/requirements/py3.12-ci-requirements.txt @@ -3141,7 +3141,9 @@ orjson==3.11.7 \ --hash=sha256:f4f7c956b5215d949a1f65334cf9d7612dde38f20a95f2315deef167def91a6f \ --hash=sha256:f50979824bde13d32b4320eedd513431c921102796d86be3eee0b58e58a3ecd1 \ --hash=sha256:f904c24bdeabd4298f7a977ef14ca2a022ca921ed670b92ecd16ab6f3d01f867 - # via trino + # via + # feast (pyproject.toml) + # trino packaging==26.0 \ --hash=sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4 \ --hash=sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529 @@ -5710,9 +5712,9 @@ thriftpy2==0.6.0 \ --hash=sha256:f6b86112cca7bd04151ce248d781763ea5f74cc18d148476c6d16cee32db81ac \ --hash=sha256:f837ab85ae93b118766b8b28a1cec47a1daddee303e1f986a595c56379062a5c # via happybase -tifffile==2026.2.15 \ - --hash=sha256:28fe145c615fe3d33d40c2d4c9cc848f7631fd30af852583c4186069458895b2 \ - --hash=sha256:d9b427d269a708c58400e8ce5a702b26b2502087537beb88b8e29ba7ba825a90 +tifffile==2026.2.16 \ + --hash=sha256:9d509a9121431c7228c1f6f71736a73af155bdeb60c324ab09c9eb2e83cfc4b6 \ + --hash=sha256:ea76cb4d8aa290f7f164840dfe4e244d104bd90c84d5ee1e6de6d84fd4745a48 # via scikit-image timm==1.0.24 \ --hash=sha256:8301ac783410c6ad72c73c49326af6d71a9e4d1558238552796e825c2464913f \ diff --git a/sdk/python/requirements/py3.12-minimal-requirements.txt b/sdk/python/requirements/py3.12-minimal-requirements.txt index ca3934a0e20..83d4b266f75 100644 --- a/sdk/python/requirements/py3.12-minimal-requirements.txt +++ b/sdk/python/requirements/py3.12-minimal-requirements.txt @@ -1701,6 +1701,82 @@ oauthlib==3.3.1 \ --hash=sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9 \ --hash=sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1 # via requests-oauthlib +orjson==3.11.7 \ + --hash=sha256:043d3006b7d32c7e233b8cfb1f01c651013ea079e08dcef7189a29abd8befe11 \ + --hash=sha256:0527a4510c300e3b406591b0ba69b5dc50031895b0a93743526a3fc45f59d26e \ + --hash=sha256:0724e265bc548af1dedebd9cb3d24b4e1c1e685a343be43e87ba922a5c5fff2f \ + --hash=sha256:136dcd6a2e796dfd9ffca9fc027d778567b0b7c9968d092842d3c323cef88aa8 \ + --hash=sha256:14f440c7268c8f8633d1b3d443a434bd70cb15686117ea6beff8fdc8f5917a1e \ + --hash=sha256:1d98b30cc1313d52d4af17d9c3d307b08389752ec5f2e5febdfada70b0f8c733 \ + --hash=sha256:1eb80451a9c351a71dfaf5b7ccc13ad065405217726b59fdbeadbcc544f9d223 \ + --hash=sha256:1ee5cc7160a821dfe14f130bc8e63e7611051f964b463d9e2a3a573204446a4d \ + --hash=sha256:23d6c20517a97a9daf1d48b580fcdc6f0516c6f4b5038823426033690b4d2650 \ + --hash=sha256:26c3b9132f783b7d7903bf1efb095fed8d4a3a85ec0d334ee8beff3d7a4749d5 \ + --hash=sha256:31c80ce534ac4ea3739c5ee751270646cbc46e45aea7576a38ffec040b4029a1 \ + --hash=sha256:3726be79e36e526e3d9c1aceaadbfb4a04ee80a72ab47b3f3c17fefb9812e7b8 \ + --hash=sha256:390a1dce0c055ddf8adb6aa94a73b45a4a7d7177b5c584b8d1c1947f2ba60fb3 \ + --hash=sha256:3a2479753bbb95b0ebcf7969f562cdb9668e6d12416a35b0dda79febf89cdea2 \ + --hash=sha256:3c4bc6c6ac52cdaa267552544c73e486fecbd710b7ac09bc024d5a78555a22f6 \ + --hash=sha256:411ebaf34d735e25e358a6d9e7978954a9c9d58cfb47bc6683cdc3964cd2f910 \ + --hash=sha256:4682d1db3bcebd2b64757e0ddf9e87ae5f00d29d16c5cdf3a62f561d08cc3dd2 \ + --hash=sha256:4a2e9c5be347b937a2e0203866f12bba36082e89b402ddb9e927d5822e43088d \ + --hash=sha256:57036b27ac8a25d81112eb0cc9835cd4833c5b16e1467816adc0015f59e870dc \ + --hash=sha256:5ede977b5fe5ac91b1dffc0a517ca4542d2ec8a6a4ff7b2652d94f640796342a \ + --hash=sha256:5fdfad2093bdd08245f2e204d977facd5f871c88c4a71230d5bcbd0e43bf6222 \ + --hash=sha256:623ad1b9548ef63886319c16fa317848e465a21513b31a6ad7b57443c3e0dcf5 \ + --hash=sha256:652c6c3af76716f4a9c290371ba2e390ede06f6603edb277b481daf37f6f464e \ + --hash=sha256:6543001328aa857187f905308a028935864aefe9968af3848401b6fe80dbb471 \ + --hash=sha256:6e776b998ac37c0396093d10290e60283f59cfe0fc3fccbd0ccc4bd04dd19892 \ + --hash=sha256:71924496986275a737f38e3f22b4e0878882b3f7a310d2ff4dc96e812789120c \ + --hash=sha256:733ae23ada68b804b222c44affed76b39e30806d38660bf1eb200520d259cc16 \ + --hash=sha256:7477aa6a6ec6139c5cb1cc7b214643592169a5494d200397c7fc95d740d5fcf3 \ + --hash=sha256:79cacb0b52f6004caf92405a7e1f11e6e2de8bdf9019e4f76b44ba045125cd6b \ + --hash=sha256:7ba61079379b0ae29e117db13bda5f28d939766e410d321ec1624afc6a0b0504 \ + --hash=sha256:800988273a014a0541483dc81021247d7eacb0c845a9d1a34a422bc718f41539 \ + --hash=sha256:814be4b49b228cfc0b3c565acf642dd7d13538f966e3ccde61f4f55be3e20785 \ + --hash=sha256:845c3e0d8ded9c9271cd79596b9b552448b885b97110f628fb687aee2eed11c1 \ + --hash=sha256:849e38203e5be40b776ed2718e587faf204d184fc9a008ae441f9442320c0cab \ + --hash=sha256:89e13dd3f89f1c38a9c9eba5fbf7cdc2d1feca82f5f290864b4b7a6aac704576 \ + --hash=sha256:89e440ebc74ce8ab5c7bc4ce6757b4a6b1041becb127df818f6997b5c71aa60b \ + --hash=sha256:8ff206156006da5b847c9304b6308a01e8cdbc8cce824e2779a5ba71c3def141 \ + --hash=sha256:91c81ef070c8f3220054115e1ef468b1c9ce8497b4e526cb9f68ab4dc0a7ac62 \ + --hash=sha256:9487abc2c2086e7c8eb9a211d2ce8855bae0e92586279d0d27b341d5ad76c85c \ + --hash=sha256:962d046ee1765f74a1da723f4b33e3b228fe3a48bd307acce5021dfefe0e29b2 \ + --hash=sha256:996b65230271f1a97026fd0e6a753f51fbc0c335d2ad0c6201f711b0da32693b \ + --hash=sha256:9b1a67243945819ce55d24a30b59d6a168e86220452d2c96f4d1f093e71c0c49 \ + --hash=sha256:9c0b51672e466fd7e56230ffbae7f1639e18d0ce023351fb75da21b71bc2c960 \ + --hash=sha256:9e54f3808e2b6b945078c41aa8d9b5834b28c50843846e97807e5adb75fa9705 \ + --hash=sha256:a02c833f38f36546ba65a452127633afce4cf0dd7296b753d3bb54e55e5c0174 \ + --hash=sha256:a12b80df61aab7b98b490fe9e4879925ba666fccdfcd175252ce4d9035865ace \ + --hash=sha256:a16bcd08ab0bcdfc7e8801d9c4a9cc17e58418e4d48ddc6ded4e9e4b1a94062b \ + --hash=sha256:a56df3239294ea5964adf074c54bcc4f0ccd21636049a2cf3ca9cf03b5d03cf1 \ + --hash=sha256:a709e881723c9b18acddcfb8ba357322491ad553e277cf467e1e7e20e2d90561 \ + --hash=sha256:ab49d4b2a6a1d415ddb9f37a21e02e0d5dbfe10b7870b21bf779fc21e9156157 \ + --hash=sha256:ae9e0b37a834cef7ce8f99de6498f8fad4a2c0bf6bfc3d02abd8ed56aa15b2de \ + --hash=sha256:b4a9eefdc70bf8bf9857f0290f973dec534ac84c35cd6a7f4083be43e7170a8f \ + --hash=sha256:b63c6e6738d7c3470ad01601e23376aa511e50e1f3931395b9f9c722406d1a67 \ + --hash=sha256:b7b1dae39230a393df353827c855a5f176271c23434cfd2db74e0e424e693e10 \ + --hash=sha256:b8d14b71c0b12963fe8a62aac87119f1afdf4cb88a400f61ca5ae581449efcb5 \ + --hash=sha256:b9f95dcdea9d4f805daa9ddf02617a89e484c6985fa03055459f90e87d7a0757 \ + --hash=sha256:b9fc4d0f81f394689e0814617aadc4f2ea0e8025f38c226cbf22d3b5ddbf025d \ + --hash=sha256:bd03ea7606833655048dab1a00734a2875e3e86c276e1d772b2a02556f0d895f \ + --hash=sha256:bd0d68edd7dfca1b2eca9361a44ac9f24b078de3481003159929a0573f21a6bf \ + --hash=sha256:bda117c4148e81f746655d5a3239ae9bd00cb7bc3ca178b5fc5a5997e9744183 \ + --hash=sha256:bf742e149121dc5648ba0a08ea0871e87b660467ef168a3a5e53bc1fbd64bb74 \ + --hash=sha256:c2428d358d85e8da9d37cba18b8c4047c55222007a84f97156a5b22028dfbfc0 \ + --hash=sha256:c2e85fe4698b6a56d5e2ebf7ae87544d668eb6bde1ad1226c13f44663f20ec9e \ + --hash=sha256:c43b8b5bab288b6b90dac410cca7e986a4fa747a2e8f94615aea407da706980d \ + --hash=sha256:cededd6738e1c153530793998e31c05086582b08315db48ab66649768f326baa \ + --hash=sha256:d06e5c5fed5caedd2e540d62e5b1c25e8c82431b9e577c33537e5fa4aa909539 \ + --hash=sha256:d772afdb22555f0c58cfc741bdae44180122b3616faa1ecadb595cd526e4c993 \ + --hash=sha256:d897e81f8d0cbd2abb82226d1860ad2e1ab3ff16d7b08c96ca00df9d45409ef4 \ + --hash=sha256:de0a37f21d0d364954ad5de1970491d7fbd0fb1ef7417d4d56a36dc01ba0c0a0 \ + --hash=sha256:e7745312efa9e11c17fbd3cb3097262d079da26930ae9ae7ba28fb738367cbad \ + --hash=sha256:ed46f17096e28fb28d2975834836a639af7278aa87c84f68ab08fbe5b8bd75fa \ + --hash=sha256:f4f7c956b5215d949a1f65334cf9d7612dde38f20a95f2315deef167def91a6f \ + --hash=sha256:f50979824bde13d32b4320eedd513431c921102796d86be3eee0b58e58a3ecd1 \ + --hash=sha256:f904c24bdeabd4298f7a977ef14ca2a022ca921ed670b92ecd16ab6f3d01f867 + # via feast (pyproject.toml) packaging==26.0 \ --hash=sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4 \ --hash=sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529 @@ -2715,9 +2791,9 @@ typeguard==4.5.0 \ --hash=sha256:749bea21cdb2553e12831bc29f1eae980b22c7de8331ab67ae7db9e85470b5a7 \ --hash=sha256:cfda388fc88a9ce42a41890900d6f31ee124bea9b73bb84701a32438e92165c3 # via feast (pyproject.toml) -typer==0.23.1 \ - --hash=sha256:2070374e4d31c83e7b61362fd859aa683576432fd5b026b060ad6b4cd3b86134 \ - --hash=sha256:3291ad0d3c701cbf522012faccfbb29352ff16ad262db2139e6b01f15781f14e +typer==0.24.0 \ + --hash=sha256:5fc435a9c8356f6160ed6e85a6301fdd6e3d8b2851da502050d1f92c5e9eddc8 \ + --hash=sha256:f9373dc4eff901350694f519f783c29b6d7a110fc0dcc11b1d7e353b85ca6504 # via fastapi-mcp types-pymysql==1.1.0.20251220 \ --hash=sha256:ae1c3df32a777489431e2e9963880a0df48f6591e0aa2fd3a6fabd9dee6eca54 \ diff --git a/sdk/python/requirements/py3.12-minimal-sdist-requirements-build.txt b/sdk/python/requirements/py3.12-minimal-sdist-requirements-build.txt index f90866aad88..b365e051de5 100644 --- a/sdk/python/requirements/py3.12-minimal-sdist-requirements-build.txt +++ b/sdk/python/requirements/py3.12-minimal-sdist-requirements-build.txt @@ -519,6 +519,7 @@ maturin==1.12.2 \ --hash=sha256:f1c2e4ee43bf286b052091a3b2356a157978985837c7aed42354deb2947a4006 # via # cryptography + # orjson # pydantic-core # rpds-py # watchfiles @@ -659,13 +660,13 @@ poetry-core==2.3.1 \ # rich # rsa # tomlkit -pybind11-global==3.0.1 \ - --hash=sha256:0e8d5a68d084c50bf145ce5efdbdd00704dbe6315035d0b7a255708ddeb9faca \ - --hash=sha256:eb6ef4e8f5a60f4c0b6a9396cb7806f78d42f8d081e42a93c0bb62288f6cf403 +pybind11-global==3.0.2 \ + --hash=sha256:00a26be4cd65974133eaae7e7532e7141ccb7a88cd131995bc8d1f652852aaf9 \ + --hash=sha256:e183b4456459c35fbbbc8296eb29e241f6cf0774c0bbc3fc8349789611c6df4b # via pybind11 -pybind11==3.0.1 \ - --hash=sha256:9c0f40056a016da59bab516efb523089139fcc6f2ba7e4930854c61efb932051 \ - --hash=sha256:aa8f0aa6e0a94d3b64adfc38f560f33f15e589be2175e103c0a33c6bce55ee89 +pybind11==3.0.2 \ + --hash=sha256:432f01aeb68e361a3a7fc7575c2c7f497595bf640f747acd909ff238dd766e06 \ + --hash=sha256:f8a6500548919cc33bcd220d5f984688326f574fa97f1107f2f4fdb4c6fb019f # via duckdb pycparser==3.0 \ --hash=sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29 \ diff --git a/sdk/python/requirements/py3.12-minimal-sdist-requirements.txt b/sdk/python/requirements/py3.12-minimal-sdist-requirements.txt index 33f812556d2..5a850d1c2d4 100644 --- a/sdk/python/requirements/py3.12-minimal-sdist-requirements.txt +++ b/sdk/python/requirements/py3.12-minimal-sdist-requirements.txt @@ -1865,6 +1865,82 @@ oauthlib==3.3.1 \ --hash=sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9 \ --hash=sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1 # via requests-oauthlib +orjson==3.11.7 \ + --hash=sha256:043d3006b7d32c7e233b8cfb1f01c651013ea079e08dcef7189a29abd8befe11 \ + --hash=sha256:0527a4510c300e3b406591b0ba69b5dc50031895b0a93743526a3fc45f59d26e \ + --hash=sha256:0724e265bc548af1dedebd9cb3d24b4e1c1e685a343be43e87ba922a5c5fff2f \ + --hash=sha256:136dcd6a2e796dfd9ffca9fc027d778567b0b7c9968d092842d3c323cef88aa8 \ + --hash=sha256:14f440c7268c8f8633d1b3d443a434bd70cb15686117ea6beff8fdc8f5917a1e \ + --hash=sha256:1d98b30cc1313d52d4af17d9c3d307b08389752ec5f2e5febdfada70b0f8c733 \ + --hash=sha256:1eb80451a9c351a71dfaf5b7ccc13ad065405217726b59fdbeadbcc544f9d223 \ + --hash=sha256:1ee5cc7160a821dfe14f130bc8e63e7611051f964b463d9e2a3a573204446a4d \ + --hash=sha256:23d6c20517a97a9daf1d48b580fcdc6f0516c6f4b5038823426033690b4d2650 \ + --hash=sha256:26c3b9132f783b7d7903bf1efb095fed8d4a3a85ec0d334ee8beff3d7a4749d5 \ + --hash=sha256:31c80ce534ac4ea3739c5ee751270646cbc46e45aea7576a38ffec040b4029a1 \ + --hash=sha256:3726be79e36e526e3d9c1aceaadbfb4a04ee80a72ab47b3f3c17fefb9812e7b8 \ + --hash=sha256:390a1dce0c055ddf8adb6aa94a73b45a4a7d7177b5c584b8d1c1947f2ba60fb3 \ + --hash=sha256:3a2479753bbb95b0ebcf7969f562cdb9668e6d12416a35b0dda79febf89cdea2 \ + --hash=sha256:3c4bc6c6ac52cdaa267552544c73e486fecbd710b7ac09bc024d5a78555a22f6 \ + --hash=sha256:411ebaf34d735e25e358a6d9e7978954a9c9d58cfb47bc6683cdc3964cd2f910 \ + --hash=sha256:4682d1db3bcebd2b64757e0ddf9e87ae5f00d29d16c5cdf3a62f561d08cc3dd2 \ + --hash=sha256:4a2e9c5be347b937a2e0203866f12bba36082e89b402ddb9e927d5822e43088d \ + --hash=sha256:57036b27ac8a25d81112eb0cc9835cd4833c5b16e1467816adc0015f59e870dc \ + --hash=sha256:5ede977b5fe5ac91b1dffc0a517ca4542d2ec8a6a4ff7b2652d94f640796342a \ + --hash=sha256:5fdfad2093bdd08245f2e204d977facd5f871c88c4a71230d5bcbd0e43bf6222 \ + --hash=sha256:623ad1b9548ef63886319c16fa317848e465a21513b31a6ad7b57443c3e0dcf5 \ + --hash=sha256:652c6c3af76716f4a9c290371ba2e390ede06f6603edb277b481daf37f6f464e \ + --hash=sha256:6543001328aa857187f905308a028935864aefe9968af3848401b6fe80dbb471 \ + --hash=sha256:6e776b998ac37c0396093d10290e60283f59cfe0fc3fccbd0ccc4bd04dd19892 \ + --hash=sha256:71924496986275a737f38e3f22b4e0878882b3f7a310d2ff4dc96e812789120c \ + --hash=sha256:733ae23ada68b804b222c44affed76b39e30806d38660bf1eb200520d259cc16 \ + --hash=sha256:7477aa6a6ec6139c5cb1cc7b214643592169a5494d200397c7fc95d740d5fcf3 \ + --hash=sha256:79cacb0b52f6004caf92405a7e1f11e6e2de8bdf9019e4f76b44ba045125cd6b \ + --hash=sha256:7ba61079379b0ae29e117db13bda5f28d939766e410d321ec1624afc6a0b0504 \ + --hash=sha256:800988273a014a0541483dc81021247d7eacb0c845a9d1a34a422bc718f41539 \ + --hash=sha256:814be4b49b228cfc0b3c565acf642dd7d13538f966e3ccde61f4f55be3e20785 \ + --hash=sha256:845c3e0d8ded9c9271cd79596b9b552448b885b97110f628fb687aee2eed11c1 \ + --hash=sha256:849e38203e5be40b776ed2718e587faf204d184fc9a008ae441f9442320c0cab \ + --hash=sha256:89e13dd3f89f1c38a9c9eba5fbf7cdc2d1feca82f5f290864b4b7a6aac704576 \ + --hash=sha256:89e440ebc74ce8ab5c7bc4ce6757b4a6b1041becb127df818f6997b5c71aa60b \ + --hash=sha256:8ff206156006da5b847c9304b6308a01e8cdbc8cce824e2779a5ba71c3def141 \ + --hash=sha256:91c81ef070c8f3220054115e1ef468b1c9ce8497b4e526cb9f68ab4dc0a7ac62 \ + --hash=sha256:9487abc2c2086e7c8eb9a211d2ce8855bae0e92586279d0d27b341d5ad76c85c \ + --hash=sha256:962d046ee1765f74a1da723f4b33e3b228fe3a48bd307acce5021dfefe0e29b2 \ + --hash=sha256:996b65230271f1a97026fd0e6a753f51fbc0c335d2ad0c6201f711b0da32693b \ + --hash=sha256:9b1a67243945819ce55d24a30b59d6a168e86220452d2c96f4d1f093e71c0c49 \ + --hash=sha256:9c0b51672e466fd7e56230ffbae7f1639e18d0ce023351fb75da21b71bc2c960 \ + --hash=sha256:9e54f3808e2b6b945078c41aa8d9b5834b28c50843846e97807e5adb75fa9705 \ + --hash=sha256:a02c833f38f36546ba65a452127633afce4cf0dd7296b753d3bb54e55e5c0174 \ + --hash=sha256:a12b80df61aab7b98b490fe9e4879925ba666fccdfcd175252ce4d9035865ace \ + --hash=sha256:a16bcd08ab0bcdfc7e8801d9c4a9cc17e58418e4d48ddc6ded4e9e4b1a94062b \ + --hash=sha256:a56df3239294ea5964adf074c54bcc4f0ccd21636049a2cf3ca9cf03b5d03cf1 \ + --hash=sha256:a709e881723c9b18acddcfb8ba357322491ad553e277cf467e1e7e20e2d90561 \ + --hash=sha256:ab49d4b2a6a1d415ddb9f37a21e02e0d5dbfe10b7870b21bf779fc21e9156157 \ + --hash=sha256:ae9e0b37a834cef7ce8f99de6498f8fad4a2c0bf6bfc3d02abd8ed56aa15b2de \ + --hash=sha256:b4a9eefdc70bf8bf9857f0290f973dec534ac84c35cd6a7f4083be43e7170a8f \ + --hash=sha256:b63c6e6738d7c3470ad01601e23376aa511e50e1f3931395b9f9c722406d1a67 \ + --hash=sha256:b7b1dae39230a393df353827c855a5f176271c23434cfd2db74e0e424e693e10 \ + --hash=sha256:b8d14b71c0b12963fe8a62aac87119f1afdf4cb88a400f61ca5ae581449efcb5 \ + --hash=sha256:b9f95dcdea9d4f805daa9ddf02617a89e484c6985fa03055459f90e87d7a0757 \ + --hash=sha256:b9fc4d0f81f394689e0814617aadc4f2ea0e8025f38c226cbf22d3b5ddbf025d \ + --hash=sha256:bd03ea7606833655048dab1a00734a2875e3e86c276e1d772b2a02556f0d895f \ + --hash=sha256:bd0d68edd7dfca1b2eca9361a44ac9f24b078de3481003159929a0573f21a6bf \ + --hash=sha256:bda117c4148e81f746655d5a3239ae9bd00cb7bc3ca178b5fc5a5997e9744183 \ + --hash=sha256:bf742e149121dc5648ba0a08ea0871e87b660467ef168a3a5e53bc1fbd64bb74 \ + --hash=sha256:c2428d358d85e8da9d37cba18b8c4047c55222007a84f97156a5b22028dfbfc0 \ + --hash=sha256:c2e85fe4698b6a56d5e2ebf7ae87544d668eb6bde1ad1226c13f44663f20ec9e \ + --hash=sha256:c43b8b5bab288b6b90dac410cca7e986a4fa747a2e8f94615aea407da706980d \ + --hash=sha256:cededd6738e1c153530793998e31c05086582b08315db48ab66649768f326baa \ + --hash=sha256:d06e5c5fed5caedd2e540d62e5b1c25e8c82431b9e577c33537e5fa4aa909539 \ + --hash=sha256:d772afdb22555f0c58cfc741bdae44180122b3616faa1ecadb595cd526e4c993 \ + --hash=sha256:d897e81f8d0cbd2abb82226d1860ad2e1ab3ff16d7b08c96ca00df9d45409ef4 \ + --hash=sha256:de0a37f21d0d364954ad5de1970491d7fbd0fb1ef7417d4d56a36dc01ba0c0a0 \ + --hash=sha256:e7745312efa9e11c17fbd3cb3097262d079da26930ae9ae7ba28fb738367cbad \ + --hash=sha256:ed46f17096e28fb28d2975834836a639af7278aa87c84f68ab08fbe5b8bd75fa \ + --hash=sha256:f4f7c956b5215d949a1f65334cf9d7612dde38f20a95f2315deef167def91a6f \ + --hash=sha256:f50979824bde13d32b4320eedd513431c921102796d86be3eee0b58e58a3ecd1 \ + --hash=sha256:f904c24bdeabd4298f7a977ef14ca2a022ca921ed670b92ecd16ab6f3d01f867 + # via feast (pyproject.toml) packaging==26.0 \ --hash=sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4 \ --hash=sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529 @@ -2961,9 +3037,9 @@ typeguard==4.5.0 \ --hash=sha256:749bea21cdb2553e12831bc29f1eae980b22c7de8331ab67ae7db9e85470b5a7 \ --hash=sha256:cfda388fc88a9ce42a41890900d6f31ee124bea9b73bb84701a32438e92165c3 # via feast (pyproject.toml) -typer==0.23.1 \ - --hash=sha256:2070374e4d31c83e7b61362fd859aa683576432fd5b026b060ad6b4cd3b86134 \ - --hash=sha256:3291ad0d3c701cbf522012faccfbb29352ff16ad262db2139e6b01f15781f14e +typer==0.24.0 \ + --hash=sha256:5fc435a9c8356f6160ed6e85a6301fdd6e3d8b2851da502050d1f92c5e9eddc8 \ + --hash=sha256:f9373dc4eff901350694f519f783c29b6d7a110fc0dcc11b1d7e353b85ca6504 # via fastapi-mcp types-psutil==7.0.0.20250218 \ --hash=sha256:1447a30c282aafefcf8941ece854e1100eee7b0296a9d9be9977292f0269b121 \ diff --git a/sdk/python/requirements/py3.12-requirements.txt b/sdk/python/requirements/py3.12-requirements.txt index fd1728e2ab2..a17dba267bc 100644 --- a/sdk/python/requirements/py3.12-requirements.txt +++ b/sdk/python/requirements/py3.12-requirements.txt @@ -678,6 +678,82 @@ numpy==2.4.2 \ # feast (pyproject.toml) # dask # pandas +orjson==3.11.7 \ + --hash=sha256:043d3006b7d32c7e233b8cfb1f01c651013ea079e08dcef7189a29abd8befe11 \ + --hash=sha256:0527a4510c300e3b406591b0ba69b5dc50031895b0a93743526a3fc45f59d26e \ + --hash=sha256:0724e265bc548af1dedebd9cb3d24b4e1c1e685a343be43e87ba922a5c5fff2f \ + --hash=sha256:136dcd6a2e796dfd9ffca9fc027d778567b0b7c9968d092842d3c323cef88aa8 \ + --hash=sha256:14f440c7268c8f8633d1b3d443a434bd70cb15686117ea6beff8fdc8f5917a1e \ + --hash=sha256:1d98b30cc1313d52d4af17d9c3d307b08389752ec5f2e5febdfada70b0f8c733 \ + --hash=sha256:1eb80451a9c351a71dfaf5b7ccc13ad065405217726b59fdbeadbcc544f9d223 \ + --hash=sha256:1ee5cc7160a821dfe14f130bc8e63e7611051f964b463d9e2a3a573204446a4d \ + --hash=sha256:23d6c20517a97a9daf1d48b580fcdc6f0516c6f4b5038823426033690b4d2650 \ + --hash=sha256:26c3b9132f783b7d7903bf1efb095fed8d4a3a85ec0d334ee8beff3d7a4749d5 \ + --hash=sha256:31c80ce534ac4ea3739c5ee751270646cbc46e45aea7576a38ffec040b4029a1 \ + --hash=sha256:3726be79e36e526e3d9c1aceaadbfb4a04ee80a72ab47b3f3c17fefb9812e7b8 \ + --hash=sha256:390a1dce0c055ddf8adb6aa94a73b45a4a7d7177b5c584b8d1c1947f2ba60fb3 \ + --hash=sha256:3a2479753bbb95b0ebcf7969f562cdb9668e6d12416a35b0dda79febf89cdea2 \ + --hash=sha256:3c4bc6c6ac52cdaa267552544c73e486fecbd710b7ac09bc024d5a78555a22f6 \ + --hash=sha256:411ebaf34d735e25e358a6d9e7978954a9c9d58cfb47bc6683cdc3964cd2f910 \ + --hash=sha256:4682d1db3bcebd2b64757e0ddf9e87ae5f00d29d16c5cdf3a62f561d08cc3dd2 \ + --hash=sha256:4a2e9c5be347b937a2e0203866f12bba36082e89b402ddb9e927d5822e43088d \ + --hash=sha256:57036b27ac8a25d81112eb0cc9835cd4833c5b16e1467816adc0015f59e870dc \ + --hash=sha256:5ede977b5fe5ac91b1dffc0a517ca4542d2ec8a6a4ff7b2652d94f640796342a \ + --hash=sha256:5fdfad2093bdd08245f2e204d977facd5f871c88c4a71230d5bcbd0e43bf6222 \ + --hash=sha256:623ad1b9548ef63886319c16fa317848e465a21513b31a6ad7b57443c3e0dcf5 \ + --hash=sha256:652c6c3af76716f4a9c290371ba2e390ede06f6603edb277b481daf37f6f464e \ + --hash=sha256:6543001328aa857187f905308a028935864aefe9968af3848401b6fe80dbb471 \ + --hash=sha256:6e776b998ac37c0396093d10290e60283f59cfe0fc3fccbd0ccc4bd04dd19892 \ + --hash=sha256:71924496986275a737f38e3f22b4e0878882b3f7a310d2ff4dc96e812789120c \ + --hash=sha256:733ae23ada68b804b222c44affed76b39e30806d38660bf1eb200520d259cc16 \ + --hash=sha256:7477aa6a6ec6139c5cb1cc7b214643592169a5494d200397c7fc95d740d5fcf3 \ + --hash=sha256:79cacb0b52f6004caf92405a7e1f11e6e2de8bdf9019e4f76b44ba045125cd6b \ + --hash=sha256:7ba61079379b0ae29e117db13bda5f28d939766e410d321ec1624afc6a0b0504 \ + --hash=sha256:800988273a014a0541483dc81021247d7eacb0c845a9d1a34a422bc718f41539 \ + --hash=sha256:814be4b49b228cfc0b3c565acf642dd7d13538f966e3ccde61f4f55be3e20785 \ + --hash=sha256:845c3e0d8ded9c9271cd79596b9b552448b885b97110f628fb687aee2eed11c1 \ + --hash=sha256:849e38203e5be40b776ed2718e587faf204d184fc9a008ae441f9442320c0cab \ + --hash=sha256:89e13dd3f89f1c38a9c9eba5fbf7cdc2d1feca82f5f290864b4b7a6aac704576 \ + --hash=sha256:89e440ebc74ce8ab5c7bc4ce6757b4a6b1041becb127df818f6997b5c71aa60b \ + --hash=sha256:8ff206156006da5b847c9304b6308a01e8cdbc8cce824e2779a5ba71c3def141 \ + --hash=sha256:91c81ef070c8f3220054115e1ef468b1c9ce8497b4e526cb9f68ab4dc0a7ac62 \ + --hash=sha256:9487abc2c2086e7c8eb9a211d2ce8855bae0e92586279d0d27b341d5ad76c85c \ + --hash=sha256:962d046ee1765f74a1da723f4b33e3b228fe3a48bd307acce5021dfefe0e29b2 \ + --hash=sha256:996b65230271f1a97026fd0e6a753f51fbc0c335d2ad0c6201f711b0da32693b \ + --hash=sha256:9b1a67243945819ce55d24a30b59d6a168e86220452d2c96f4d1f093e71c0c49 \ + --hash=sha256:9c0b51672e466fd7e56230ffbae7f1639e18d0ce023351fb75da21b71bc2c960 \ + --hash=sha256:9e54f3808e2b6b945078c41aa8d9b5834b28c50843846e97807e5adb75fa9705 \ + --hash=sha256:a02c833f38f36546ba65a452127633afce4cf0dd7296b753d3bb54e55e5c0174 \ + --hash=sha256:a12b80df61aab7b98b490fe9e4879925ba666fccdfcd175252ce4d9035865ace \ + --hash=sha256:a16bcd08ab0bcdfc7e8801d9c4a9cc17e58418e4d48ddc6ded4e9e4b1a94062b \ + --hash=sha256:a56df3239294ea5964adf074c54bcc4f0ccd21636049a2cf3ca9cf03b5d03cf1 \ + --hash=sha256:a709e881723c9b18acddcfb8ba357322491ad553e277cf467e1e7e20e2d90561 \ + --hash=sha256:ab49d4b2a6a1d415ddb9f37a21e02e0d5dbfe10b7870b21bf779fc21e9156157 \ + --hash=sha256:ae9e0b37a834cef7ce8f99de6498f8fad4a2c0bf6bfc3d02abd8ed56aa15b2de \ + --hash=sha256:b4a9eefdc70bf8bf9857f0290f973dec534ac84c35cd6a7f4083be43e7170a8f \ + --hash=sha256:b63c6e6738d7c3470ad01601e23376aa511e50e1f3931395b9f9c722406d1a67 \ + --hash=sha256:b7b1dae39230a393df353827c855a5f176271c23434cfd2db74e0e424e693e10 \ + --hash=sha256:b8d14b71c0b12963fe8a62aac87119f1afdf4cb88a400f61ca5ae581449efcb5 \ + --hash=sha256:b9f95dcdea9d4f805daa9ddf02617a89e484c6985fa03055459f90e87d7a0757 \ + --hash=sha256:b9fc4d0f81f394689e0814617aadc4f2ea0e8025f38c226cbf22d3b5ddbf025d \ + --hash=sha256:bd03ea7606833655048dab1a00734a2875e3e86c276e1d772b2a02556f0d895f \ + --hash=sha256:bd0d68edd7dfca1b2eca9361a44ac9f24b078de3481003159929a0573f21a6bf \ + --hash=sha256:bda117c4148e81f746655d5a3239ae9bd00cb7bc3ca178b5fc5a5997e9744183 \ + --hash=sha256:bf742e149121dc5648ba0a08ea0871e87b660467ef168a3a5e53bc1fbd64bb74 \ + --hash=sha256:c2428d358d85e8da9d37cba18b8c4047c55222007a84f97156a5b22028dfbfc0 \ + --hash=sha256:c2e85fe4698b6a56d5e2ebf7ae87544d668eb6bde1ad1226c13f44663f20ec9e \ + --hash=sha256:c43b8b5bab288b6b90dac410cca7e986a4fa747a2e8f94615aea407da706980d \ + --hash=sha256:cededd6738e1c153530793998e31c05086582b08315db48ab66649768f326baa \ + --hash=sha256:d06e5c5fed5caedd2e540d62e5b1c25e8c82431b9e577c33537e5fa4aa909539 \ + --hash=sha256:d772afdb22555f0c58cfc741bdae44180122b3616faa1ecadb595cd526e4c993 \ + --hash=sha256:d897e81f8d0cbd2abb82226d1860ad2e1ab3ff16d7b08c96ca00df9d45409ef4 \ + --hash=sha256:de0a37f21d0d364954ad5de1970491d7fbd0fb1ef7417d4d56a36dc01ba0c0a0 \ + --hash=sha256:e7745312efa9e11c17fbd3cb3097262d079da26930ae9ae7ba28fb738367cbad \ + --hash=sha256:ed46f17096e28fb28d2975834836a639af7278aa87c84f68ab08fbe5b8bd75fa \ + --hash=sha256:f4f7c956b5215d949a1f65334cf9d7612dde38f20a95f2315deef167def91a6f \ + --hash=sha256:f50979824bde13d32b4320eedd513431c921102796d86be3eee0b58e58a3ecd1 \ + --hash=sha256:f904c24bdeabd4298f7a977ef14ca2a022ca921ed670b92ecd16ab6f3d01f867 + # via feast (pyproject.toml) packaging==26.0 \ --hash=sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4 \ --hash=sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529 diff --git a/sdk/python/tests/README.md b/sdk/python/tests/README.md index 5b930129026..418e2131928 100644 --- a/sdk/python/tests/README.md +++ b/sdk/python/tests/README.md @@ -48,19 +48,21 @@ $ tree │ ├── test_offline_write.py │ ├── test_push_features_to_offline_store.py │ ├── test_s3_custom_endpoint.py -│ └── test_universal_historical_retrieval.py +│ ├── test_universal_historical_retrieval.py +│ └── test_universal_types.py +├── cli +│ └── test_universal_cli.py ├── online_store │ ├── test_online_retrieval.py │ ├── test_push_features_to_online_store.py -│ └── test_universal_online.py +│ ├── test_universal_online.py +│ └── test_universal_online_types.py └── registration ├── test_feature_store.py ├── test_inference.py ├── test_registry.py ├── test_sql_registry.py - ├── test_universal_cli.py - ├── test_universal_odfv_feature_inference.py - └── test_universal_types.py + └── test_universal_odfv_feature_inference.py ``` diff --git a/sdk/python/tests/benchmarks/test_key_encoding_benchmarks.py b/sdk/python/tests/benchmarks/test_key_encoding_benchmarks.py new file mode 100644 index 00000000000..b34cabbdb37 --- /dev/null +++ b/sdk/python/tests/benchmarks/test_key_encoding_benchmarks.py @@ -0,0 +1,465 @@ +""" +Benchmarks for entity key serialization/deserialization performance. + +This module provides comprehensive performance tests for the key encoding utilities +to validate and track the performance improvements from optimization efforts. +""" + +import time + +import pytest + +from feast.infra.key_encoding_utils import ( + deserialize_entity_key, + serialize_entity_key, + serialize_entity_key_prefix, +) +from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import Value as ValueProto + + +@pytest.fixture +def single_entity_key(): + """Single entity key (most common case - 90% of usage)""" + return EntityKeyProto( + join_keys=["user_id"], entity_values=[ValueProto(string_val="user123")] + ) + + +@pytest.fixture +def single_entity_key_int(): + """Single entity key with int64 value""" + return EntityKeyProto( + join_keys=["user_id"], entity_values=[ValueProto(int64_val=123456789)] + ) + + +@pytest.fixture +def multi_entity_key_small(): + """Small multi-entity key (2-3 entities)""" + return EntityKeyProto( + join_keys=["user_id", "session_id"], + entity_values=[ + ValueProto(string_val="user123"), + ValueProto(string_val="sess456"), + ], + ) + + +@pytest.fixture +def multi_entity_key_large(): + """Large multi-entity key (5+ entities)""" + return EntityKeyProto( + join_keys=["user_id", "session_id", "device_id", "app_version", "region"], + entity_values=[ + ValueProto(string_val="user123"), + ValueProto(string_val="sess456"), + ValueProto(string_val="dev789"), + ValueProto(string_val="v1.2.3"), + ValueProto(string_val="us-west-2"), + ], + ) + + +@pytest.fixture +def mixed_value_types_key(): + """Entity key with mixed value types""" + return EntityKeyProto( + join_keys=["user_id", "timestamp", "count", "score"], + entity_values=[ + ValueProto(string_val="user123"), + ValueProto(unix_timestamp_val=1758823656), + ValueProto(int64_val=42), + ValueProto(int32_val=95), + ], + ) + + +@pytest.fixture +def batch_entity_keys(single_entity_key, multi_entity_key_small): + """Batch of entity keys for bulk operation testing""" + keys = [] + # Generate 100 single entity keys (typical batch size) + for i in range(80): # 80% single entities + keys.append( + EntityKeyProto( + join_keys=["user_id"], entity_values=[ValueProto(string_val=f"user{i}")] + ) + ) + + # Add 20 multi-entity keys + for i in range(20): + keys.append( + EntityKeyProto( + join_keys=["user_id", "session_id"], + entity_values=[ + ValueProto(string_val=f"user{i}"), + ValueProto(string_val=f"sess{i}"), + ], + ) + ) + + return keys + + +# Serialization Benchmarks + + +@pytest.mark.benchmark(group="serialize_single") +@pytest.mark.parametrize("entity_key_serialization_version", [3]) +def test_serialize_single_entity_string( + benchmark, single_entity_key, entity_key_serialization_version +): + """Benchmark single entity key serialization (string value) - most common case.""" + result = benchmark( + serialize_entity_key, single_entity_key, entity_key_serialization_version + ) + assert len(result) > 0 + + +@pytest.mark.benchmark(group="serialize_single") +@pytest.mark.parametrize("entity_key_serialization_version", [3]) +def test_serialize_single_entity_int( + benchmark, single_entity_key_int, entity_key_serialization_version +): + """Benchmark single entity key serialization (int64 value).""" + result = benchmark( + serialize_entity_key, single_entity_key_int, entity_key_serialization_version + ) + assert len(result) > 0 + + +@pytest.mark.benchmark(group="serialize_multi") +@pytest.mark.parametrize("entity_key_serialization_version", [3]) +def test_serialize_multi_entity_small( + benchmark, multi_entity_key_small, entity_key_serialization_version +): + """Benchmark small multi-entity key serialization.""" + result = benchmark( + serialize_entity_key, multi_entity_key_small, entity_key_serialization_version + ) + assert len(result) > 0 + + +@pytest.mark.benchmark(group="serialize_multi") +@pytest.mark.parametrize("entity_key_serialization_version", [3]) +def test_serialize_multi_entity_large( + benchmark, multi_entity_key_large, entity_key_serialization_version +): + """Benchmark large multi-entity key serialization.""" + result = benchmark( + serialize_entity_key, multi_entity_key_large, entity_key_serialization_version + ) + assert len(result) > 0 + + +@pytest.mark.benchmark(group="serialize_mixed") +@pytest.mark.parametrize("entity_key_serialization_version", [3]) +def test_serialize_mixed_value_types( + benchmark, mixed_value_types_key, entity_key_serialization_version +): + """Benchmark serialization with mixed value types.""" + result = benchmark( + serialize_entity_key, mixed_value_types_key, entity_key_serialization_version + ) + assert len(result) > 0 + + +# Deserialization Benchmarks + + +@pytest.mark.benchmark(group="deserialize_single") +@pytest.mark.parametrize("entity_key_serialization_version", [3]) +def test_deserialize_single_entity_string( + benchmark, single_entity_key, entity_key_serialization_version +): + """Benchmark single entity key deserialization (string value).""" + serialized = serialize_entity_key( + single_entity_key, entity_key_serialization_version + ) + result = benchmark( + deserialize_entity_key, serialized, entity_key_serialization_version + ) + assert result == single_entity_key + + +@pytest.mark.benchmark(group="deserialize_single") +@pytest.mark.parametrize("entity_key_serialization_version", [3]) +def test_deserialize_single_entity_int( + benchmark, single_entity_key_int, entity_key_serialization_version +): + """Benchmark single entity key deserialization (int64 value).""" + serialized = serialize_entity_key( + single_entity_key_int, entity_key_serialization_version + ) + result = benchmark( + deserialize_entity_key, serialized, entity_key_serialization_version + ) + assert result == single_entity_key_int + + +@pytest.mark.benchmark(group="deserialize_multi") +@pytest.mark.parametrize("entity_key_serialization_version", [3]) +def test_deserialize_multi_entity_small( + benchmark, multi_entity_key_small, entity_key_serialization_version +): + """Benchmark small multi-entity key deserialization.""" + serialized = serialize_entity_key( + multi_entity_key_small, entity_key_serialization_version + ) + result = benchmark( + deserialize_entity_key, serialized, entity_key_serialization_version + ) + assert result == multi_entity_key_small + + +@pytest.mark.benchmark(group="deserialize_multi") +@pytest.mark.parametrize("entity_key_serialization_version", [3]) +def test_deserialize_multi_entity_large( + benchmark, multi_entity_key_large, entity_key_serialization_version +): + """Benchmark large multi-entity key deserialization.""" + serialized = serialize_entity_key( + multi_entity_key_large, entity_key_serialization_version + ) + result = benchmark( + deserialize_entity_key, serialized, entity_key_serialization_version + ) + assert result == multi_entity_key_large + + +@pytest.mark.benchmark(group="deserialize_mixed") +@pytest.mark.parametrize("entity_key_serialization_version", [3]) +def test_deserialize_mixed_value_types( + benchmark, mixed_value_types_key, entity_key_serialization_version +): + """Benchmark deserialization with mixed value types.""" + serialized = serialize_entity_key( + mixed_value_types_key, entity_key_serialization_version + ) + result = benchmark( + deserialize_entity_key, serialized, entity_key_serialization_version + ) + assert result == mixed_value_types_key + + +# Round-trip Benchmarks + + +@pytest.mark.benchmark(group="roundtrip_single") +def test_roundtrip_single_entity(benchmark, single_entity_key): + """Benchmark complete serialize + deserialize round-trip for single entity.""" + + def roundtrip(): + serialized = serialize_entity_key(single_entity_key, 3) + return deserialize_entity_key(serialized, 3) + + result = benchmark(roundtrip) + assert result == single_entity_key + + +@pytest.mark.benchmark(group="roundtrip_multi") +def test_roundtrip_multi_entity(benchmark, multi_entity_key_small): + """Benchmark complete serialize + deserialize round-trip for multi-entity.""" + + def roundtrip(): + serialized = serialize_entity_key(multi_entity_key_small, 3) + return deserialize_entity_key(serialized, 3) + + result = benchmark(roundtrip) + assert result == multi_entity_key_small + + +# Prefix Serialization Benchmarks + + +@pytest.mark.benchmark(group="prefix") +def test_serialize_entity_key_prefix_single(benchmark): + """Benchmark entity key prefix serialization for single key.""" + result = benchmark(serialize_entity_key_prefix, ["user_id"], 3) + assert len(result) > 0 + + +@pytest.mark.benchmark(group="prefix") +def test_serialize_entity_key_prefix_multi(benchmark): + """Benchmark entity key prefix serialization for multiple keys.""" + keys = ["user_id", "session_id", "device_id"] + result = benchmark(serialize_entity_key_prefix, keys, 3) + assert len(result) > 0 + + +# Bulk Operations Benchmarks + + +@pytest.mark.benchmark(group="bulk_serialize") +def test_bulk_serialize_batch(benchmark, batch_entity_keys): + """Benchmark batch serialization of 100 mixed entity keys.""" + + def bulk_serialize(): + results = [] + for entity_key in batch_entity_keys: + serialized = serialize_entity_key(entity_key, 3) + results.append(serialized) + return results + + results = benchmark(bulk_serialize) + assert len(results) == 100 + + +@pytest.mark.benchmark(group="bulk_deserialize") +def test_bulk_deserialize_batch(benchmark, batch_entity_keys): + """Benchmark batch deserialization of 100 mixed entity keys.""" + # Pre-serialize all keys + serialized_keys = [serialize_entity_key(key, 3) for key in batch_entity_keys] + + def bulk_deserialize(): + results = [] + for serialized in serialized_keys: + deserialized = deserialize_entity_key(serialized, 3) + results.append(deserialized) + return results + + results = benchmark(bulk_deserialize) + assert len(results) == 100 + + +@pytest.mark.benchmark(group="bulk_roundtrip") +def test_bulk_roundtrip_batch(benchmark, batch_entity_keys): + """Benchmark bulk serialize + deserialize for realistic workload.""" + + def bulk_roundtrip(): + results = [] + for entity_key in batch_entity_keys: + serialized = serialize_entity_key(entity_key, 3) + deserialized = deserialize_entity_key(serialized, 3) + results.append(deserialized) + return results + + results = benchmark(bulk_roundtrip) + assert len(results) == 100 + + +# Memory Efficiency Tests + + +def test_memory_efficiency_serialization(single_entity_key): + """Test memory usage during serialization (not a benchmark, just validation).""" + import os + + import psutil + + process = psutil.Process(os.getpid()) + initial_memory = process.memory_info().rss + + # Perform many serializations + for i in range(10000): + entity_key = EntityKeyProto( + join_keys=["user_id"], entity_values=[ValueProto(string_val=f"user{i}")] + ) + serialize_entity_key(entity_key, 3) + + final_memory = process.memory_info().rss + memory_increase = final_memory - initial_memory + + # Memory increase should be minimal (< 10MB for 10k operations) + # This validates that we're not leaking memory in the optimized version + assert memory_increase < 10 * 1024 * 1024, ( + f"Memory usage increased by {memory_increase / 1024 / 1024:.2f} MB" + ) + + +# Performance Regression Tests + + +def test_performance_regression_single_entity(): + """Regression test: single entity serialization should be faster than baseline.""" + entity_key = EntityKeyProto( + join_keys=["user_id"], entity_values=[ValueProto(string_val="user123")] + ) + + # Warm up + for _ in range(100): + serialize_entity_key(entity_key, 3) + + # Time 1000 operations + start_time = time.perf_counter() + for _ in range(1000): + serialize_entity_key(entity_key, 3) + elapsed = time.perf_counter() - start_time + + # Should be able to do 1000 single entity serializations in < 100ms + # Using a generous threshold to avoid flaky failures on CI runners + assert elapsed < 0.1, ( + f"Single entity serialization too slow: {elapsed:.4f}s for 1000 operations" + ) + + +def test_performance_regression_deserialization(): + """Regression test: deserialization should be fast with memoryview optimization.""" + entity_key = EntityKeyProto( + join_keys=["user_id", "session_id"], + entity_values=[ + ValueProto(string_val="user123"), + ValueProto(string_val="sess456"), + ], + ) + + serialized = serialize_entity_key(entity_key, 3) + + # Warm up + for _ in range(100): + deserialize_entity_key(serialized, 3) + + # Time 1000 operations + start_time = time.perf_counter() + for _ in range(1000): + deserialize_entity_key(serialized, 3) + elapsed = time.perf_counter() - start_time + + # Should be able to do 1000 deserializations in < 200ms + # Using a generous threshold to avoid flaky failures on CI runners + assert elapsed < 0.2, ( + f"Deserialization too slow: {elapsed:.4f}s for 1000 operations" + ) + + +# Binary Compatibility Tests + + +def test_binary_format_consistency_single(): + """Ensure optimizations don't change binary format for single entities.""" + entity_key = EntityKeyProto( + join_keys=["user_id"], entity_values=[ValueProto(string_val="test")] + ) + + # Serialize multiple times - results should be identical + results = [] + for _ in range(10): + serialized = serialize_entity_key(entity_key, 3) + results.append(serialized) + + # All results should be identical + for result in results[1:]: + assert result == results[0], "Binary format inconsistency detected" + + +def test_binary_format_consistency_multi(): + """Ensure optimizations don't change binary format for multi-entity keys.""" + entity_key = EntityKeyProto( + join_keys=["user", "session", "device"], + entity_values=[ + ValueProto(string_val="u1"), + ValueProto(string_val="s1"), + ValueProto(string_val="d1"), + ], + ) + + # Serialize multiple times - results should be identical + results = [] + for _ in range(10): + serialized = serialize_entity_key(entity_key, 3) + results.append(serialized) + + # All results should be identical + for result in results[1:]: + assert result == results[0], "Binary format inconsistency detected" diff --git a/sdk/python/feast/infra/offline_stores/contrib/ray_offline_store/tests/__init__.py b/sdk/python/tests/component/.gitkeep similarity index 100% rename from sdk/python/feast/infra/offline_stores/contrib/ray_offline_store/tests/__init__.py rename to sdk/python/tests/component/.gitkeep diff --git a/sdk/python/feast/infra/online_stores/ikv_online_store/__init__.py b/sdk/python/tests/component/__init__.py similarity index 100% rename from sdk/python/feast/infra/online_stores/ikv_online_store/__init__.py rename to sdk/python/tests/component/__init__.py diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/__init__.py b/sdk/python/tests/component/ray/.gitkeep similarity index 100% rename from sdk/python/tests/integration/feature_repos/universal/data_sources/__init__.py rename to sdk/python/tests/component/ray/.gitkeep diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/__init__.py b/sdk/python/tests/component/ray/__init__.py similarity index 100% rename from sdk/python/tests/integration/feature_repos/universal/online_store/__init__.py rename to sdk/python/tests/component/ray/__init__.py diff --git a/sdk/python/tests/integration/compute_engines/ray_compute/conftest.py b/sdk/python/tests/component/ray/conftest.py similarity index 87% rename from sdk/python/tests/integration/compute_engines/ray_compute/conftest.py rename to sdk/python/tests/component/ray/conftest.py index 885b1555ec7..d2d46b7c9a7 100644 --- a/sdk/python/tests/integration/compute_engines/ray_compute/conftest.py +++ b/sdk/python/tests/component/ray/conftest.py @@ -4,7 +4,7 @@ auto-discovered by pytest. """ -from tests.integration.compute_engines.ray_compute.ray_shared_utils import ( +from tests.component.ray.ray_shared_utils import ( entity_df, feature_dataset, ray_environment, diff --git a/sdk/python/tests/integration/compute_engines/ray_compute/ray_shared_utils.py b/sdk/python/tests/component/ray/ray_shared_utils.py similarity index 98% rename from sdk/python/tests/integration/compute_engines/ray_compute/ray_shared_utils.py rename to sdk/python/tests/component/ray/ray_shared_utils.py index 6b28949f401..ba79685d6cb 100644 --- a/sdk/python/tests/integration/compute_engines/ray_compute/ray_shared_utils.py +++ b/sdk/python/tests/component/ray/ray_shared_utils.py @@ -14,7 +14,7 @@ from feast.data_source import DataSource from feast.infra.ray_initializer import shutdown_ray from feast.utils import _utc_now -from tests.integration.feature_repos.repo_configuration import ( +from tests.universal.feature_repos.repo_configuration import ( construct_test_environment, ) diff --git a/sdk/python/tests/integration/compute_engines/ray_compute/repo_configuration.py b/sdk/python/tests/component/ray/repo_configuration.py similarity index 71% rename from sdk/python/tests/integration/compute_engines/ray_compute/repo_configuration.py rename to sdk/python/tests/component/ray/repo_configuration.py index 37d0d020ccd..f18185c35c9 100644 --- a/sdk/python/tests/integration/compute_engines/ray_compute/repo_configuration.py +++ b/sdk/python/tests/component/ray/repo_configuration.py @@ -3,19 +3,16 @@ from feast.infra.offline_stores.contrib.ray_repo_configuration import ( RayDataSourceCreator, ) -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.universal.online_store.redis import ( - RedisOnlineStoreCreator, -) def get_ray_compute_engine_test_config() -> IntegrationTestRepoConfig: """Get test configuration for Ray compute engine.""" return IntegrationTestRepoConfig( provider="local", - online_store_creator=RedisOnlineStoreCreator, + online_store={"type": "sqlite"}, offline_store_creator=RayDataSourceCreator, batch_engine={ "type": "ray.engine", diff --git a/sdk/python/tests/integration/compute_engines/ray_compute/test_compute.py b/sdk/python/tests/component/ray/test_compute.py similarity index 99% rename from sdk/python/tests/integration/compute_engines/ray_compute/test_compute.py rename to sdk/python/tests/component/ray/test_compute.py index ef4bfa131da..87a86b983e0 100644 --- a/sdk/python/tests/integration/compute_engines/ray_compute/test_compute.py +++ b/sdk/python/tests/component/ray/test_compute.py @@ -21,7 +21,7 @@ ) from feast.transformation.ray_transformation import RayTransformation from feast.types import Float32, Int32, Int64 -from tests.integration.compute_engines.ray_compute.ray_shared_utils import ( +from tests.component.ray.ray_shared_utils import ( driver, now, ) diff --git a/sdk/python/tests/unit/infra/compute_engines/ray_compute/test_nodes.py b/sdk/python/tests/component/ray/test_nodes.py similarity index 100% rename from sdk/python/tests/unit/infra/compute_engines/ray_compute/test_nodes.py rename to sdk/python/tests/component/ray/test_nodes.py diff --git a/sdk/python/tests/integration/compute_engines/ray_compute/test_source_feature_views.py b/sdk/python/tests/component/ray/test_source_feature_views.py similarity index 99% rename from sdk/python/tests/integration/compute_engines/ray_compute/test_source_feature_views.py rename to sdk/python/tests/component/ray/test_source_feature_views.py index 7d8f23e1bf6..73f64c67dbe 100644 --- a/sdk/python/tests/integration/compute_engines/ray_compute/test_source_feature_views.py +++ b/sdk/python/tests/component/ray/test_source_feature_views.py @@ -10,7 +10,7 @@ MaterializationJobStatus, ) from feast.types import Float32, Int32, Int64 -from tests.integration.compute_engines.ray_compute.ray_shared_utils import ( +from tests.component.ray.ray_shared_utils import ( create_entity_df, create_feature_dataset, create_unique_sink_source, diff --git a/sdk/python/tests/integration/offline_store/test_hybrid_offline_store.py b/sdk/python/tests/component/spark/.gitkeep similarity index 100% rename from sdk/python/tests/integration/offline_store/test_hybrid_offline_store.py rename to sdk/python/tests/component/spark/.gitkeep diff --git a/sdk/python/tests/component/spark/__init__.py b/sdk/python/tests/component/spark/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/tests/component/spark/conftest.py b/sdk/python/tests/component/spark/conftest.py new file mode 100644 index 00000000000..8bcd1772296 --- /dev/null +++ b/sdk/python/tests/component/spark/conftest.py @@ -0,0 +1,23 @@ +import pytest +from pyspark.sql import SparkSession + + +@pytest.fixture(scope="session") +def spark_session(): + spark = ( + SparkSession.builder.appName("FeastSparkTests") + .master("local[*]") + .config("spark.sql.shuffle.partitions", "1") + .config("spark.driver.host", "127.0.0.1") + .config("spark.driver.bindAddress", "127.0.0.1") + .getOrCreate() + ) + + yield spark + + spark.stop() + + +@pytest.fixture +def spark_fixture(spark_session): + yield spark_session diff --git a/sdk/python/tests/integration/compute_engines/spark/test_compute.py b/sdk/python/tests/component/spark/test_compute.py similarity index 99% rename from sdk/python/tests/integration/compute_engines/spark/test_compute.py rename to sdk/python/tests/component/spark/test_compute.py index e0855ae31f3..803cd505513 100644 --- a/sdk/python/tests/integration/compute_engines/spark/test_compute.py +++ b/sdk/python/tests/component/spark/test_compute.py @@ -19,7 +19,7 @@ SparkOfflineStore, ) from feast.types import Float32, Int32, Int64 -from tests.integration.compute_engines.spark.utils import ( +from tests.component.spark.utils import ( _check_offline_features, _check_online_features, create_entity_df, diff --git a/sdk/python/tests/integration/compute_engines/spark/test_compute_dag.py b/sdk/python/tests/component/spark/test_compute_dag.py similarity index 99% rename from sdk/python/tests/integration/compute_engines/spark/test_compute_dag.py rename to sdk/python/tests/component/spark/test_compute_dag.py index 24277d9a323..04b8a0b2edf 100644 --- a/sdk/python/tests/integration/compute_engines/spark/test_compute_dag.py +++ b/sdk/python/tests/component/spark/test_compute_dag.py @@ -19,7 +19,7 @@ SparkSource, ) from feast.types import Float32, Int32, Int64 -from tests.integration.compute_engines.spark.utils import ( +from tests.component.spark.utils import ( _check_offline_features, _check_online_features, create_entity_df, diff --git a/sdk/python/tests/unit/infra/compute_engines/spark/test_nodes.py b/sdk/python/tests/component/spark/test_nodes.py similarity index 93% rename from sdk/python/tests/unit/infra/compute_engines/spark/test_nodes.py rename to sdk/python/tests/component/spark/test_nodes.py index 7fcc332aefb..c8ed1157d86 100644 --- a/sdk/python/tests/unit/infra/compute_engines/spark/test_nodes.py +++ b/sdk/python/tests/component/spark/test_nodes.py @@ -1,9 +1,6 @@ from datetime import datetime, timedelta from unittest.mock import MagicMock -import pytest -from pyspark.sql import SparkSession - from feast.aggregation import Aggregation from feast.infra.compute_engines.dag.context import ColumnInfo, ExecutionContext from feast.infra.compute_engines.dag.model import DAGFormat @@ -19,22 +16,6 @@ ) -@pytest.fixture(scope="session") -def spark_session(): - spark = ( - SparkSession.builder.appName("FeastSparkTests") - .master("local[*]") - .config("spark.sql.shuffle.partitions", "1") - .config("spark.driver.host", "127.0.0.1") - .config("spark.driver.bindAddress", "127.0.0.1") - .getOrCreate() - ) - - yield spark - - spark.stop() - - def test_spark_transformation_node_executes_udf(spark_session): # Sample Spark input df = spark_session.createDataFrame( diff --git a/sdk/python/tests/unit/infra/offline_stores/contrib/spark_offline_store/test_spark.py b/sdk/python/tests/component/spark/test_spark.py similarity index 100% rename from sdk/python/tests/unit/infra/offline_stores/contrib/spark_offline_store/test_spark.py rename to sdk/python/tests/component/spark/test_spark.py diff --git a/sdk/python/tests/integration/materialization/contrib/spark/test_spark_materialization_engine.py b/sdk/python/tests/component/spark/test_spark_materialization_engine.py similarity index 90% rename from sdk/python/tests/integration/materialization/contrib/spark/test_spark_materialization_engine.py rename to sdk/python/tests/component/spark/test_spark_materialization_engine.py index 03f942c2f96..dbd4d842914 100644 --- a/sdk/python/tests/integration/materialization/contrib/spark/test_spark_materialization_engine.py +++ b/sdk/python/tests/component/spark/test_spark_materialization_engine.py @@ -10,13 +10,13 @@ ) from feast.types import Float32 from tests.data.data_creator import create_basic_driver_dataset -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.repo_configuration import ( +from tests.universal.feature_repos.repo_configuration import ( construct_test_environment, ) -from tests.integration.feature_repos.universal.online_store.redis import ( +from tests.universal.feature_repos.universal.online_store.redis import ( RedisOnlineStoreCreator, ) from tests.utils.e2e_test_validation import validate_offline_online_store_consistency diff --git a/sdk/python/tests/unit/infra/offline_stores/contrib/spark_offline_store/test_spark_table_format_integration.py b/sdk/python/tests/component/spark/test_spark_table_format_integration.py similarity index 100% rename from sdk/python/tests/unit/infra/offline_stores/contrib/spark_offline_store/test_spark_table_format_integration.py rename to sdk/python/tests/component/spark/test_spark_table_format_integration.py diff --git a/sdk/python/tests/component/spark/test_spark_transformation.py b/sdk/python/tests/component/spark/test_spark_transformation.py new file mode 100644 index 00000000000..1345c77f0f4 --- /dev/null +++ b/sdk/python/tests/component/spark/test_spark_transformation.py @@ -0,0 +1,105 @@ +from unittest.mock import patch + +from pyspark.sql.functions import col, regexp_replace +from pyspark.testing.utils import assertDataFrameEqual + +from feast.transformation.base import Transformation +from feast.transformation.mode import TransformationMode +from feast.transformation.spark_transformation import SparkTransformation + + +def get_sample_df(spark): + sample_data = [ + {"name": "John D.", "age": 30}, + {"name": "Alice G.", "age": 25}, + {"name": "Bob T.", "age": 35}, + {"name": "Eve A.", "age": 28}, + ] + df = spark.createDataFrame(sample_data) + return df + + +def get_expected_df(spark): + expected_data = [ + {"name": "John D.", "age": 30}, + {"name": "Alice G.", "age": 25}, + {"name": "Bob T.", "age": 35}, + {"name": "Eve A.", "age": 28}, + ] + + expected_df = spark.createDataFrame(expected_data) + return expected_df + + +def remove_extra_spaces(df, column_name): + df_transformed = df.withColumn( + column_name, regexp_replace(col(column_name), "\\s+", " ") + ) + return df_transformed + + +def remove_extra_spaces_sql(df, column_name): + sql = f""" + SELECT + age, + regexp_replace({column_name}, '\\\\s+', ' ') as {column_name} + FROM {df} + """ + return sql + + +def test_spark_transformation(spark_fixture): + with patch( + "feast.infra.compute_engines.spark.utils.get_or_create_new_spark_session" + ) as m: + m.return_value = spark_fixture + spark = spark_fixture + df = get_sample_df(spark) + + spark_transformation = Transformation( + mode=TransformationMode.SPARK, + udf=remove_extra_spaces, + udf_string="remove extra spaces", + ) + + transformed_df = spark_transformation.transform(df, "name") + expected_df = get_expected_df(spark) + assertDataFrameEqual(transformed_df, expected_df) + + +def test_spark_transformation_init_transformation(spark_fixture): + with patch( + "feast.infra.compute_engines.spark.utils.get_or_create_new_spark_session" + ) as m: + m.return_value = spark_fixture + spark = spark_fixture + df = get_sample_df(spark) + + spark_transformation = SparkTransformation( + mode=TransformationMode.SPARK, + udf=remove_extra_spaces, + udf_string="remove extra spaces", + ) + + transformed_df = spark_transformation.transform(df, "name") + expected_df = get_expected_df(spark) + assertDataFrameEqual(transformed_df, expected_df) + + +def test_spark_transformation_sql(spark_fixture): + with patch( + "feast.infra.compute_engines.spark.utils.get_or_create_new_spark_session" + ) as m: + m.return_value = spark_fixture + spark = spark_fixture + df = get_sample_df(spark) + + spark_transformation = SparkTransformation( + mode=TransformationMode.SPARK_SQL, + udf=remove_extra_spaces_sql, + udf_string="remove extra spaces sql", + ) + + transformed_df = spark_transformation.transform(df, "name") + expected_df = get_expected_df(spark) + assertDataFrameEqual(transformed_df, expected_df) diff --git a/sdk/python/tests/integration/compute_engines/spark/utils.py b/sdk/python/tests/component/spark/utils.py similarity index 94% rename from sdk/python/tests/integration/compute_engines/spark/utils.py rename to sdk/python/tests/component/spark/utils.py index 20ffba4eff1..f12eef58948 100644 --- a/sdk/python/tests/integration/compute_engines/spark/utils.py +++ b/sdk/python/tests/component/spark/utils.py @@ -7,13 +7,13 @@ from feast.infra.offline_stores.contrib.spark_offline_store.tests.data_source import ( SparkDataSourceCreator, ) -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.repo_configuration import ( +from tests.universal.feature_repos.repo_configuration import ( construct_test_environment, ) -from tests.integration.feature_repos.universal.online_store.redis import ( +from tests.universal.feature_repos.universal.online_store.redis import ( RedisOnlineStoreCreator, ) diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index a57846c7e2e..8302e313a2d 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -36,10 +36,10 @@ create_document_dataset, create_image_dataset, ) -from tests.integration.feature_repos.integration_test_repo_config import ( # noqa: E402 +from tests.universal.feature_repos.integration_test_repo_config import ( # noqa: E402 IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.repo_configuration import ( # noqa: E402 +from tests.universal.feature_repos.repo_configuration import ( # noqa: E402 AVAILABLE_OFFLINE_STORES, AVAILABLE_ONLINE_STORES, OFFLINE_STORE_TO_PROVIDER_CONFIG, @@ -49,10 +49,10 @@ construct_universal_feature_views, construct_universal_test_data, ) -from tests.integration.feature_repos.universal.data_sources.file import ( # noqa: E402 +from tests.universal.feature_repos.universal.data_sources.file import ( # noqa: E402 FileDataSourceCreator, ) -from tests.integration.feature_repos.universal.entities import ( # noqa: E402 +from tests.universal.feature_repos.universal.entities import ( # noqa: E402 customer, driver, location, @@ -101,6 +101,10 @@ def pytest_configure(config): "markers", "universal_offline_stores: mark tests that can be run against different offline stores", ) + config.addinivalue_line( + "markers", + "ray_offline_stores_only: mark tests that currently only work with Ray offline store", + ) def pytest_addoption(parser): @@ -431,6 +435,9 @@ def fake_ingest_data(): "conv_rate": [0.5], "acc_rate": [0.6], "avg_daily_trips": [4], + "driver_metadata": [None], + "driver_config": [None], + "driver_profile": [None], "event_timestamp": [pd.Timestamp(_utc_now()).round("ms")], "created": [pd.Timestamp(_utc_now()).round("ms")], } diff --git a/sdk/python/tests/doctest/test_all.py b/sdk/python/tests/doctest/test_all.py index 802ae513e16..4f8a553bfac 100644 --- a/sdk/python/tests/doctest/test_all.py +++ b/sdk/python/tests/doctest/test_all.py @@ -72,27 +72,40 @@ def test_docstrings(): for package in current_packages: try: - for _, name, is_pkg in pkgutil.walk_packages(package.__path__): + for _, name, is_pkg in pkgutil.walk_packages( + package.__path__, onerror=lambda _: None + ): if name in FILES_TO_IGNORE: continue full_name = package.__name__ + "." + name + temp_module = None try: # https://github.com/feast-dev/feast/issues/5088 # Skip ray_transformation doctests - they hang on macOS due to # Ray worker spawning issues with uv-managed environments if ( - "ikv" not in full_name - and "milvus" not in full_name + "milvus" not in full_name and "openlineage" not in full_name and "ray_transformation" not in full_name ): temp_module = importlib.import_module(full_name) if is_pkg: next_packages.append(temp_module) - except ModuleNotFoundError: + except Exception: # noqa: BLE001 + # Gracefully skip modules that fail to import due to: + # - ModuleNotFoundError: optional system dependency missing + # - FeastExtrasDependencyImportError: optional Python extra + # missing (e.g. pymongo, couchbase) + # - TypeError or other errors: third-party libraries with + # internal incompatibilities at import time (e.g. + # qdrant_client raises TypeError when a grpc EnumTypeWrapper + # is used with the | union operator on Python < 3.12) pass + if temp_module is None: + continue + # Retrieve the setup and teardown functions defined in this file. relative_path_from_feast = full_name.split(".", 1)[1] function_suffix = relative_path_from_feast.replace(".", "_") diff --git a/sdk/python/tests/example_repos/example_feature_repo_1.py b/sdk/python/tests/example_repos/example_feature_repo_1.py index 492b8c5555d..4cdcb9622a4 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_1.py +++ b/sdk/python/tests/example_repos/example_feature_repo_1.py @@ -5,7 +5,7 @@ from feast import Entity, FeatureService, FeatureView, Field, FileSource, PushSource from feast.on_demand_feature_view import on_demand_feature_view from feast.types import Array, Float32, Int64, String -from tests.integration.feature_repos.universal.feature_views import TAGS +from tests.universal.feature_repos.universal.feature_views import TAGS # Note that file source paths are not validated, so there doesn't actually need to be any data # at the paths for these file sources. Since these paths are effectively fake, this example diff --git a/sdk/python/tests/example_repos/example_feature_repo_with_project_1.py b/sdk/python/tests/example_repos/example_feature_repo_with_project_1.py index ad04d7ae664..a6db2d63a33 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_with_project_1.py +++ b/sdk/python/tests/example_repos/example_feature_repo_with_project_1.py @@ -6,7 +6,7 @@ from feast.on_demand_feature_view import on_demand_feature_view from feast.project import Project from feast.types import Array, Float32, Int64, String -from tests.integration.feature_repos.universal.feature_views import TAGS +from tests.universal.feature_repos.universal.feature_views import TAGS # Note that file source paths are not validated, so there doesn't actually need to be any data # at the paths for these file sources. Since these paths are effectively fake, this example diff --git a/sdk/python/tests/integration/cli/__init__.py b/sdk/python/tests/integration/cli/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/tests/integration/registration/test_universal_cli.py b/sdk/python/tests/integration/cli/test_universal_cli.py similarity index 99% rename from sdk/python/tests/integration/registration/test_universal_cli.py rename to sdk/python/tests/integration/cli/test_universal_cli.py index 735f71407f6..ed9aea7ff91 100644 --- a/sdk/python/tests/integration/registration/test_universal_cli.py +++ b/sdk/python/tests/integration/cli/test_universal_cli.py @@ -7,7 +7,7 @@ from assertpy import assertpy from feast.feature_store import FeatureStore -from tests.integration.feature_repos.universal.data_sources.file import ( +from tests.universal.feature_repos.universal.data_sources.file import ( FileDataSourceCreator, ) from tests.utils.basic_read_write_test import basic_rw_test diff --git a/sdk/python/tests/integration/compute_engines/ray_compute/__init__.py b/sdk/python/tests/integration/compute_engines/ray_compute/__init__.py deleted file mode 100644 index 7938db59420..00000000000 --- a/sdk/python/tests/integration/compute_engines/ray_compute/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Ray compute engine integration tests.""" diff --git a/sdk/python/tests/integration/dbt/conftest.py b/sdk/python/tests/integration/dbt/conftest.py index d1df145f3ec..00ee01e7027 100644 --- a/sdk/python/tests/integration/dbt/conftest.py +++ b/sdk/python/tests/integration/dbt/conftest.py @@ -29,4 +29,7 @@ def pytest_collection_modifyitems(config, items): # noqa: ARG001 reason="dbt manifest.json not found - run 'dbt build' first or use dbt-integration-test workflow" ) for item in items: - item.add_marker(skip_marker) + if str(TEST_DBT_PROJECT_DIR) in str(item.fspath) or "/dbt/" in str( + item.fspath + ): + item.add_marker(skip_marker) diff --git a/sdk/python/tests/integration/materialization/kubernetes/test_k8s.py b/sdk/python/tests/integration/materialization/kubernetes/test_k8s.py index a944ae3e943..bb8d17da5bd 100644 --- a/sdk/python/tests/integration/materialization/kubernetes/test_k8s.py +++ b/sdk/python/tests/integration/materialization/kubernetes/test_k8s.py @@ -4,14 +4,14 @@ from feast import Entity, Feature, FeatureView, ValueType from tests.data.data_creator import create_basic_driver_dataset -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, RegistryLocation, ) -from tests.integration.feature_repos.repo_configuration import ( +from tests.universal.feature_repos.repo_configuration import ( construct_test_environment, ) -from tests.integration.feature_repos.universal.data_sources.redshift import ( +from tests.universal.feature_repos.universal.data_sources.redshift import ( RedshiftDataSourceCreator, ) from tests.utils.e2e_test_validation import validate_offline_online_store_consistency diff --git a/sdk/python/tests/integration/materialization/test_lambda.py b/sdk/python/tests/integration/materialization/test_lambda.py index f0c1e108694..8e317d7059f 100644 --- a/sdk/python/tests/integration/materialization/test_lambda.py +++ b/sdk/python/tests/integration/materialization/test_lambda.py @@ -7,14 +7,14 @@ from feast.field import Field from feast.types import Float32 from tests.data.data_creator import create_basic_driver_dataset -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, RegistryLocation, ) -from tests.integration.feature_repos.repo_configuration import ( +from tests.universal.feature_repos.repo_configuration import ( construct_test_environment, ) -from tests.integration.feature_repos.universal.data_sources.redshift import ( +from tests.universal.feature_repos.universal.data_sources.redshift import ( RedshiftDataSourceCreator, ) from tests.utils.e2e_test_validation import validate_offline_online_store_consistency diff --git a/sdk/python/tests/integration/materialization/test_snowflake.py b/sdk/python/tests/integration/materialization/test_snowflake.py index a783eac0380..e6f600746d2 100644 --- a/sdk/python/tests/integration/materialization/test_snowflake.py +++ b/sdk/python/tests/integration/materialization/test_snowflake.py @@ -9,13 +9,13 @@ from feast.types import Array, Bool, Bytes, Float64, Int32, Int64, String, UnixTimestamp from feast.utils import _utc_now from tests.data.data_creator import create_basic_driver_dataset -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.repo_configuration import ( +from tests.universal.feature_repos.repo_configuration import ( construct_test_environment, ) -from tests.integration.feature_repos.universal.data_sources.snowflake import ( +from tests.universal.feature_repos.universal.data_sources.snowflake import ( SnowflakeDataSourceCreator, ) from tests.utils.e2e_test_validation import validate_offline_online_store_consistency diff --git a/sdk/python/tests/integration/materialization/test_universal_materialization.py b/sdk/python/tests/integration/materialization/test_universal_materialization.py index cf15746bf9e..3bf1eac51c2 100644 --- a/sdk/python/tests/integration/materialization/test_universal_materialization.py +++ b/sdk/python/tests/integration/materialization/test_universal_materialization.py @@ -10,8 +10,6 @@ ) from feast.on_demand_feature_view import on_demand_feature_view from feast.types import Float32, Float64 -from tests.data.data_creator import create_basic_driver_dataset -from tests.utils.e2e_test_validation import validate_offline_online_store_consistency def _create_test_entities(): @@ -217,37 +215,6 @@ def odfv_multi(df: pd.DataFrame) -> pd.DataFrame: assert resp["price_plus_revenue"][0] == 105.0 -@pytest.mark.integration -@pytest.mark.universal_offline_stores -@pytest.mark.parametrize("materialization_pull_latest", [True, False]) -def test_universal_materialization_consistency( - environment, materialization_pull_latest -): - environment.materialization.pull_latest_features = materialization_pull_latest - - fs = environment.feature_store - df = create_basic_driver_dataset() - ds = environment.data_source_creator.create_data_source( - df, - fs.project, - field_mapping={"ts_1": "ts"}, - ) - driver = Entity( - name="driver_id", - join_keys=["driver_id"], - ) - driver_stats_fv = FeatureView( - name="driver_hourly_stats", - entities=[driver], - ttl=timedelta(weeks=52), - schema=[Field(name="value", dtype=Float32)], - source=ds, - ) - fs.apply([driver, driver_stats_fv]) - split_dt = df["ts_1"][4].to_pydatetime() - timedelta(seconds=1) - validate_offline_online_store_consistency(fs, driver_stats_fv, split_dt) - - @pytest.mark.integration def test_odfv_write_methods(environment): """ diff --git a/sdk/python/tests/integration/offline_store/test_dqm_validation.py b/sdk/python/tests/integration/offline_store/test_dqm_validation.py index 710dd6ca2e6..3fce9f07597 100644 --- a/sdk/python/tests/integration/offline_store/test_dqm_validation.py +++ b/sdk/python/tests/integration/offline_store/test_dqm_validation.py @@ -18,10 +18,10 @@ from feast.protos.feast.serving.ServingService_pb2 import FieldStatus from feast.utils import _utc_now, make_tzaware from feast.wait import wait_retry_backoff -from tests.integration.feature_repos.repo_configuration import ( +from tests.universal.feature_repos.repo_configuration import ( construct_universal_feature_views, ) -from tests.integration.feature_repos.universal.entities import ( +from tests.universal.feature_repos.universal.entities import ( customer, driver, location, diff --git a/sdk/python/tests/integration/offline_store/test_feature_logging.py b/sdk/python/tests/integration/offline_store/test_feature_logging.py index 53147d242ef..dc98ba69d2c 100644 --- a/sdk/python/tests/integration/offline_store/test_feature_logging.py +++ b/sdk/python/tests/integration/offline_store/test_feature_logging.py @@ -13,15 +13,15 @@ ) from feast.feature_service import FeatureService from feast.wait import wait_retry_backoff -from tests.integration.feature_repos.repo_configuration import ( +from tests.universal.feature_repos.repo_configuration import ( construct_universal_feature_views, ) -from tests.integration.feature_repos.universal.entities import ( +from tests.universal.feature_repos.universal.entities import ( customer, driver, location, ) -from tests.integration.feature_repos.universal.feature_views import conv_rate_plus_100 +from tests.universal.feature_repos.universal.feature_views import conv_rate_plus_100 from tests.utils.test_log_creator import prepare_logs, to_logs_dataset diff --git a/sdk/python/tests/integration/offline_store/test_non_entity_mode.py b/sdk/python/tests/integration/offline_store/test_non_entity_mode.py new file mode 100644 index 00000000000..f17352fb356 --- /dev/null +++ b/sdk/python/tests/integration/offline_store/test_non_entity_mode.py @@ -0,0 +1,138 @@ +from datetime import timedelta + +import pandas as pd +import pytest + +from feast.utils import _utc_now +from tests.universal.feature_repos.repo_configuration import ( + construct_universal_feature_views, +) +from tests.universal.feature_repos.universal.entities import driver + + +@pytest.mark.integration +@pytest.mark.universal_offline_stores +@pytest.mark.ray_offline_stores_only +def test_non_entity_mode_basic(environment, universal_data_sources): + """Test historical features retrieval without entity_df (non-entity mode). + + This tests the basic functionality where entity_df=None and start_date/end_date + are provided to retrieve all features within the time range. + """ + store = environment.feature_store + + (entities, datasets, data_sources) = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + + store.apply( + [ + driver(), + feature_views.driver, + ] + ) + + # Use the environment's start and end dates for the query + start_date = environment.start_date + end_date = environment.end_date + + # Non-entity mode: entity_df=None with start_date and end_date + result_df = store.get_historical_features( + entity_df=None, + features=[ + "driver_stats:conv_rate", + "driver_stats:acc_rate", + "driver_stats:avg_daily_trips", + ], + full_feature_names=False, + start_date=start_date, + end_date=end_date, + ).to_df() + + # Verify data was retrieved + assert len(result_df) > 0, "Non-entity mode should return data" + assert "conv_rate" in result_df.columns + assert "acc_rate" in result_df.columns + assert "avg_daily_trips" in result_df.columns + assert "event_timestamp" in result_df.columns + assert "driver_id" in result_df.columns + + # Verify timestamps are within the requested range + result_df["event_timestamp"] = pd.to_datetime( + result_df["event_timestamp"], utc=True + ) + assert (result_df["event_timestamp"] >= start_date).all() + assert (result_df["event_timestamp"] <= end_date).all() + + +@pytest.mark.integration +@pytest.mark.universal_offline_stores +@pytest.mark.ray_offline_stores_only +def test_non_entity_mode_preserves_multiple_timestamps( + environment, universal_data_sources +): + """Test that non-entity mode preserves multiple transactions per entity ID. + + This is a regression test for the fix that ensures distinct (entity_key, event_timestamp) + combinations are preserved, not just distinct entity keys. This is critical for + proper point-in-time joins when an entity has multiple transactions. + """ + store = environment.feature_store + + (entities, datasets, data_sources) = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + + store.apply( + [ + driver(), + feature_views.driver, + ] + ) + + now = _utc_now() + ts1 = pd.Timestamp(now - timedelta(hours=2)).round("ms") + ts2 = pd.Timestamp(now - timedelta(hours=1)).round("ms") + ts3 = pd.Timestamp(now).round("ms") + + # Write data with multiple timestamps for the same entity (driver_id=9001) + df_to_write = pd.DataFrame.from_dict( + { + "event_timestamp": [ts1, ts2, ts3], + "driver_id": [9001, 9001, 9001], # Same entity, different timestamps + "conv_rate": [0.1, 0.2, 0.3], + "acc_rate": [0.9, 0.8, 0.7], + "avg_daily_trips": [10, 20, 30], + "driver_metadata": [None, None, None], + "driver_config": [None, None, None], + "driver_profile": [None, None, None], + "created": [ts1, ts2, ts3], + }, + ) + + store.write_to_offline_store( + feature_views.driver.name, df_to_write, allow_registry_cache=False + ) + + # Query without entity_df - should get all 3 rows for driver_id=9001 + result_df = store.get_historical_features( + entity_df=None, + features=[ + "driver_stats:conv_rate", + "driver_stats:acc_rate", + ], + full_feature_names=False, + start_date=ts1 - timedelta(minutes=1), + end_date=ts3 + timedelta(minutes=1), + ).to_df() + + # Filter to just our test entity + result_df = result_df[result_df["driver_id"] == 9001] + + # Verify we got all 3 rows with different timestamps (not just 1 row) + assert len(result_df) == 3, ( + f"Expected 3 rows for driver_id=9001 (one per timestamp), got {len(result_df)}" + ) + + # Verify the feature values are correct for each timestamp + result_df = result_df.sort_values("event_timestamp").reset_index(drop=True) + assert list(result_df["conv_rate"]) == [0.1, 0.2, 0.3] + assert list(result_df["acc_rate"]) == [0.9, 0.8, 0.7] diff --git a/sdk/python/tests/integration/offline_store/test_offline_write.py b/sdk/python/tests/integration/offline_store/test_offline_write.py index 21672991b94..df60e40ed56 100644 --- a/sdk/python/tests/integration/offline_store/test_offline_write.py +++ b/sdk/python/tests/integration/offline_store/test_offline_write.py @@ -1,3 +1,4 @@ +import json import random from datetime import timedelta @@ -6,12 +7,12 @@ import pytest from feast import FeatureView, Field -from feast.types import Float32, Int32 +from feast.types import Float32, Int32, Json, Map, String, Struct from feast.utils import _utc_now -from tests.integration.feature_repos.repo_configuration import ( +from tests.universal.feature_repos.repo_configuration import ( construct_universal_feature_views, ) -from tests.integration.feature_repos.universal.entities import driver +from tests.universal.feature_repos.universal.entities import driver @pytest.mark.integration @@ -36,6 +37,18 @@ def test_reorder_columns(environment, universal_data_sources): "event_timestamp": [ts, ts], "acc_rate": [random.random(), random.random()], "driver_id": [1001, 1001], + "driver_metadata": [ + {"vehicle_type": "sedan", "rating": "4.5"}, + {"vehicle_type": "suv", "rating": "3.8"}, + ], + "driver_config": [ + json.dumps({"max_distance_km": 100, "preferred_zones": ["north"]}), + json.dumps({"max_distance_km": 50, "preferred_zones": ["south"]}), + ], + "driver_profile": [ + {"name": "driver_1001", "age": "30"}, + {"name": "driver_1001", "age": "30"}, + ], }, ) @@ -66,7 +79,13 @@ def test_writing_incorrect_schema_fails(environment, universal_data_sources): "created": [ts, ts], }, ) - expected_missing = ["acc_rate", "avg_daily_trips"] + expected_missing = [ + "acc_rate", + "avg_daily_trips", + "driver_config", + "driver_metadata", + "driver_profile", + ] expected_extra = ["incorrect_schema"] with pytest.raises(ValueError, match="missing_expected_columns") as excinfo: @@ -92,6 +111,12 @@ def test_writing_consecutively_to_offline_store(environment, universal_data_sour Field(name="avg_daily_trips", dtype=Int32), Field(name="conv_rate", dtype=Float32), Field(name="acc_rate", dtype=Float32), + Field(name="driver_metadata", dtype=Map), + Field(name="driver_config", dtype=Json), + Field( + name="driver_profile", + dtype=Struct({"name": String, "age": String}), + ), ], source=data_sources.driver, ttl=timedelta( @@ -132,6 +157,18 @@ def test_writing_consecutively_to_offline_store(environment, universal_data_sour "acc_rate": [random.random(), random.random()], "avg_daily_trips": [random.randint(0, 10), random.randint(0, 10)], "created": [ts, ts], + "driver_metadata": [ + {"vehicle_type": "sedan", "rating": "4.5"}, + {"vehicle_type": "suv", "rating": "3.8"}, + ], + "driver_config": [ + json.dumps({"max_distance_km": 100, "preferred_zones": ["north"]}), + json.dumps({"max_distance_km": 50, "preferred_zones": ["south"]}), + ], + "driver_profile": [ + {"name": "driver_1001", "age": "30"}, + {"name": "driver_1001", "age": "35"}, + ], }, ) first_df = first_df.astype({"conv_rate": "float32", "acc_rate": "float32"}) @@ -176,6 +213,18 @@ def test_writing_consecutively_to_offline_store(environment, universal_data_sour "acc_rate": [random.random(), random.random()], "avg_daily_trips": [random.randint(0, 10), random.randint(0, 10)], "created": [ts, ts], + "driver_metadata": [ + {"vehicle_type": "truck", "rating": "4.0"}, + {"vehicle_type": "sedan", "rating": "4.2"}, + ], + "driver_config": [ + json.dumps({"max_distance_km": 150, "preferred_zones": ["east"]}), + json.dumps({"max_distance_km": 200, "preferred_zones": ["west"]}), + ], + "driver_profile": [ + {"name": "driver_1001", "age": "31"}, + {"name": "driver_1001", "age": "36"}, + ], }, ) second_df = second_df.astype({"conv_rate": "float32", "acc_rate": "float32"}) diff --git a/sdk/python/tests/integration/offline_store/test_persist.py b/sdk/python/tests/integration/offline_store/test_persist.py index 8e6f1829174..80b024310b1 100644 --- a/sdk/python/tests/integration/offline_store/test_persist.py +++ b/sdk/python/tests/integration/offline_store/test_persist.py @@ -2,10 +2,10 @@ from feast.errors import SavedDatasetLocationAlreadyExists from feast.saved_dataset import SavedDatasetStorage -from tests.integration.feature_repos.repo_configuration import ( +from tests.universal.feature_repos.repo_configuration import ( construct_universal_feature_views, ) -from tests.integration.feature_repos.universal.entities import ( +from tests.universal.feature_repos.universal.entities import ( customer, driver, location, diff --git a/sdk/python/tests/integration/offline_store/test_push_features_to_offline_store.py b/sdk/python/tests/integration/offline_store/test_push_features_to_offline_store.py index 5e3d72e671b..7d038063ee5 100644 --- a/sdk/python/tests/integration/offline_store/test_push_features_to_offline_store.py +++ b/sdk/python/tests/integration/offline_store/test_push_features_to_offline_store.py @@ -4,10 +4,10 @@ from feast.data_source import PushMode from feast.utils import _utc_now -from tests.integration.feature_repos.repo_configuration import ( +from tests.universal.feature_repos.repo_configuration import ( construct_universal_feature_views, ) -from tests.integration.feature_repos.universal.entities import location +from tests.universal.feature_repos.universal.entities import location @pytest.mark.integration diff --git a/sdk/python/tests/integration/offline_store/test_s3_custom_endpoint.py b/sdk/python/tests/integration/offline_store/test_s3_custom_endpoint.py index 645e0f7331f..45426c63b8d 100644 --- a/sdk/python/tests/integration/offline_store/test_s3_custom_endpoint.py +++ b/sdk/python/tests/integration/offline_store/test_s3_custom_endpoint.py @@ -3,12 +3,12 @@ import pytest from feast.feature_store import FeastObject -from tests.integration.feature_repos.repo_configuration import ( +from tests.universal.feature_repos.repo_configuration import ( IntegrationTestRepoConfig, construct_test_environment, construct_universal_feature_views, ) -from tests.integration.feature_repos.universal.entities import customer, driver +from tests.universal.feature_repos.universal.entities import customer, driver # TODO: Allow integration tests to run using different credentials. @@ -21,7 +21,7 @@ def test_registration_and_retrieval_from_custom_s3_endpoint( universal_data_sources, ): config = IntegrationTestRepoConfig( - offline_store_creator="tests.integration.feature_repos.universal.data_sources.file.S3FileDataSourceCreator" + offline_store_creator="tests.universal.feature_repos.universal.data_sources.file.S3FileDataSourceCreator" ) import os diff --git a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py index a56609e53ce..757e0d72a6d 100644 --- a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py +++ b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py @@ -16,19 +16,16 @@ ) from feast.types import Float32, Int32, String from feast.utils import _utc_now -from tests.integration.feature_repos.repo_configuration import ( +from tests.universal.feature_repos.repo_configuration import ( construct_universal_feature_views, table_name_from_data_source, ) -from tests.integration.feature_repos.universal.data_sources.file import ( +from tests.universal.feature_repos.universal.data_sources.file import ( RemoteOfflineOidcAuthStoreDataSourceCreator, RemoteOfflineStoreDataSourceCreator, RemoteOfflineTlsStoreDataSourceCreator, ) -from tests.integration.feature_repos.universal.data_sources.snowflake import ( - SnowflakeDataSourceCreator, -) -from tests.integration.feature_repos.universal.entities import ( +from tests.universal.feature_repos.universal.entities import ( customer, driver, location, @@ -279,7 +276,7 @@ def test_historical_features_with_entities_from_query( raise pytest.skip("Offline source is not sql-based") data_source_creator = environment.data_source_creator - if isinstance(data_source_creator, SnowflakeDataSourceCreator): + if type(data_source_creator).__name__ == "SnowflakeDataSourceCreator": entity_df_query = f""" SELECT "customer_id", "driver_id", "order_id", "origin_id", "destination_id", "event_timestamp" FROM "{orders_table}" diff --git a/sdk/python/tests/integration/offline_store/test_universal_materialization.py b/sdk/python/tests/integration/offline_store/test_universal_materialization.py new file mode 100644 index 00000000000..bbb41bf5d36 --- /dev/null +++ b/sdk/python/tests/integration/offline_store/test_universal_materialization.py @@ -0,0 +1,39 @@ +from datetime import timedelta + +import pytest + +from feast import Entity, FeatureView, Field +from feast.types import Float32 +from tests.data.data_creator import create_basic_driver_dataset +from tests.utils.e2e_test_validation import validate_offline_online_store_consistency + + +@pytest.mark.integration +@pytest.mark.universal_offline_stores +@pytest.mark.parametrize("materialization_pull_latest", [True, False]) +def test_universal_materialization_consistency( + environment, materialization_pull_latest +): + environment.materialization.pull_latest_features = materialization_pull_latest + + fs = environment.feature_store + df = create_basic_driver_dataset() + ds = environment.data_source_creator.create_data_source( + df, + fs.project, + field_mapping={"ts_1": "ts"}, + ) + driver = Entity( + name="driver_id", + join_keys=["driver_id"], + ) + driver_stats_fv = FeatureView( + name="driver_hourly_stats", + entities=[driver], + ttl=timedelta(weeks=52), + schema=[Field(name="value", dtype=Float32)], + source=ds, + ) + fs.apply([driver, driver_stats_fv]) + split_dt = df["ts_1"][4].to_pydatetime() - timedelta(seconds=1) + validate_offline_online_store_consistency(fs, driver_stats_fv, split_dt) diff --git a/sdk/python/tests/integration/registration/test_universal_types.py b/sdk/python/tests/integration/offline_store/test_universal_types.py similarity index 58% rename from sdk/python/tests/integration/registration/test_universal_types.py rename to sdk/python/tests/integration/offline_store/test_universal_types.py index b464cf2f766..011508dd634 100644 --- a/sdk/python/tests/integration/registration/test_universal_types.py +++ b/sdk/python/tests/integration/offline_store/test_universal_types.py @@ -1,7 +1,6 @@ import logging -from dataclasses import dataclass from datetime import datetime, timedelta -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Dict, List, Tuple, Union import numpy as np import pandas as pd @@ -10,24 +9,48 @@ from feast.infra.offline_stores.offline_store import RetrievalJob from feast.types import ( - Array, - Bool, - FeastType, Float32, Float64, Int32, Int64, String, - UnixTimestamp, ) from feast.utils import _utc_now from tests.data.data_creator import create_basic_driver_dataset -from tests.integration.feature_repos.universal.entities import driver -from tests.integration.feature_repos.universal.feature_views import driver_feature_view +from tests.universal.feature_repos.universal.entities import driver +from tests.universal.feature_repos.universal.feature_views import driver_feature_view +from tests.utils.type_test_utils import ( + TypeTestConfig, + get_feast_type, + get_type_test_fixtures, + populate_test_configs, +) logger = logging.getLogger(__name__) +OFFLINE_TYPE_TEST_CONFIGS: List[TypeTestConfig] = populate_test_configs() + + +@pytest.fixture( + params=OFFLINE_TYPE_TEST_CONFIGS, + ids=[str(c) for c in OFFLINE_TYPE_TEST_CONFIGS], +) +def offline_types_test_fixtures(request, environment): + config: TypeTestConfig = request.param + if environment.provider == "aws" and config.feature_is_list is True: + pytest.skip("Redshift doesn't support list features") + if ( + environment.data_source_creator.__class__.__name__ + == "ClickhouseDataSourceCreator" + and config.feature_is_list + and not config.has_empty_list + ): + pytest.skip("Clickhouse doesn't support Nullable(Array) type features") + + return get_type_test_fixtures(request, environment) + + @pytest.mark.integration @pytest.mark.universal_offline_stores @pytest.mark.parametrize("entity_type", [Int32, Int64, String]) @@ -48,7 +71,7 @@ def test_entity_inference_types_match(environment, entity_type): data_source=data_source, name=f"fv_entity_type_{entity_type.name.lower()}", infer_entities=True, # Forces entity inference by not including a field for the entity. - dtype=_get_feast_type("int32", False), + dtype=get_feast_type("int32", False), entity_type=entity_type, ) @@ -88,7 +111,7 @@ def test_feature_get_historical_features_types_match( fv = driver_feature_view( data_source=data_source, name="get_historical_features_types_match", - dtype=_get_feast_type(config.feature_dtype, config.feature_is_list), + dtype=get_feast_type(config.feature_dtype, config.feature_is_list), ) fs.apply([fv, entity]) @@ -127,88 +150,6 @@ def test_feature_get_historical_features_types_match( ) -@pytest.mark.integration -@pytest.mark.universal_online_stores(only=["sqlite"]) -def test_feature_get_online_features_types_match( - online_types_test_fixtures, environment -): - config, data_source, fv = online_types_test_fixtures - entity = driver() - fv = driver_feature_view( - data_source=data_source, - name="get_online_features_types_match", - dtype=_get_feast_type(config.feature_dtype, config.feature_is_list), - ) - fs = environment.feature_store - features = [fv.name + ":value"] - fs.apply([fv, entity]) - fs.materialize( - environment.start_date, - environment.end_date - - timedelta(hours=1), # throwing out last record to make sure - # we can successfully infer type even from all empty values - ) - - online_features = fs.get_online_features( - features=features, - entity_rows=[{"driver_id": 1}], - ).to_dict() - - feature_list_dtype_to_expected_online_response_value_type = { - "int32": int, - "int64": int, - "float": float, - "string": str, - "bool": bool, - "datetime": datetime, - } - expected_dtype = feature_list_dtype_to_expected_online_response_value_type[ - config.feature_dtype - ] - - assert len(online_features["value"]) == 1 - - if config.feature_is_list: - for feature in online_features["value"]: - assert isinstance(feature, list), "Feature value should be a list" - assert config.has_empty_list or len(feature) > 0, ( - "List of values should not be empty" - ) - for element in feature: - assert isinstance(element, expected_dtype) - else: - for feature in online_features["value"]: - assert isinstance(feature, expected_dtype) - - -def _get_feast_type(feature_dtype: str, feature_is_list: bool) -> FeastType: - dtype: Optional[FeastType] = None - if feature_is_list is True: - if feature_dtype == "int32": - dtype = Array(Int32) - elif feature_dtype == "int64": - dtype = Array(Int64) - elif feature_dtype == "float": - dtype = Array(Float32) - elif feature_dtype == "bool": - dtype = Array(Bool) - elif feature_dtype == "datetime": - dtype = Array(UnixTimestamp) - else: - if feature_dtype == "int32": - dtype = Int32 - elif feature_dtype == "int64": - dtype = Int64 - elif feature_dtype == "float": - dtype = Float32 - elif feature_dtype == "bool": - dtype = Bool - elif feature_dtype == "datetime": - dtype = UnixTimestamp - assert dtype - return dtype - - def assert_expected_historical_feature_types( feature_dtype: str, historical_features_df: pd.DataFrame ): @@ -293,96 +234,3 @@ def assert_expected_arrow_types( assert arrow_type_checker(pa_type.value_type) else: assert arrow_type_checker(pa_type) - - -def populate_test_configs(offline: bool): - feature_dtypes = [ - "int32", - "int64", - "float", - "bool", - "datetime", - ] - configs: List[TypeTestConfig] = [] - for feature_dtype in feature_dtypes: - for feature_is_list in [True, False]: - for has_empty_list in [True, False]: - # For non list features `has_empty_list` does nothing - if feature_is_list is False and has_empty_list is True: - continue - - configs.append( - TypeTestConfig( - feature_dtype=feature_dtype, - feature_is_list=feature_is_list, - has_empty_list=has_empty_list, - ) - ) - return configs - - -@dataclass(frozen=True, repr=True) -class TypeTestConfig: - feature_dtype: str - feature_is_list: bool - has_empty_list: bool - - -OFFLINE_TYPE_TEST_CONFIGS: List[TypeTestConfig] = populate_test_configs(offline=True) -ONLINE_TYPE_TEST_CONFIGS: List[TypeTestConfig] = populate_test_configs(offline=False) - - -@pytest.fixture( - params=OFFLINE_TYPE_TEST_CONFIGS, - ids=[str(c) for c in OFFLINE_TYPE_TEST_CONFIGS], -) -def offline_types_test_fixtures(request, environment): - config: TypeTestConfig = request.param - if environment.provider == "aws" and config.feature_is_list is True: - pytest.skip("Redshift doesn't support list features") - if ( - environment.data_source_creator.__class__.__name__ - == "ClickhouseDataSourceCreator" - and config.feature_is_list - and not config.has_empty_list - ): - pytest.skip("Clickhouse doesn't support Nullable(Array) type features") - - return get_fixtures(request, environment) - - -@pytest.fixture( - params=ONLINE_TYPE_TEST_CONFIGS, - ids=[str(c) for c in ONLINE_TYPE_TEST_CONFIGS], -) -def online_types_test_fixtures(request, environment): - return get_fixtures(request, environment) - - -def get_fixtures(request, environment): - config: TypeTestConfig = request.param - # Lower case needed because Redshift lower-cases all table names - destination_name = ( - f"feature_type_{config.feature_dtype}{config.feature_is_list}".replace( - ".", "" - ).lower() - ) - config = request.param - df = create_basic_driver_dataset( - Int64, - config.feature_dtype, - config.feature_is_list, - config.has_empty_list, - ) - data_source = environment.data_source_creator.create_data_source( - df, - destination_name=destination_name, - field_mapping={"ts_1": "ts"}, - ) - fv = driver_feature_view( - data_source=data_source, - name=destination_name, - dtype=_get_feast_type(config.feature_dtype, config.feature_is_list), - ) - - return config, data_source, fv diff --git a/sdk/python/tests/integration/online_store/__init__.py b/sdk/python/tests/integration/online_store/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/tests/integration/online_store/test_push_features_to_online_store.py b/sdk/python/tests/integration/online_store/test_push_features_to_online_store.py index 8986e21c57d..536864ed97e 100644 --- a/sdk/python/tests/integration/online_store/test_push_features_to_online_store.py +++ b/sdk/python/tests/integration/online_store/test_push_features_to_online_store.py @@ -2,10 +2,10 @@ import pytest from feast.utils import _utc_now -from tests.integration.feature_repos.repo_configuration import ( +from tests.universal.feature_repos.repo_configuration import ( construct_universal_feature_views, ) -from tests.integration.feature_repos.universal.entities import location +from tests.universal.feature_repos.universal.entities import location @pytest.fixture @@ -47,7 +47,7 @@ def test_push_features_and_read(store): @pytest.mark.integration -@pytest.mark.universal_online_stores(only=["dynamodb"]) +@pytest.mark.universal_online_stores(only=["dynamodb", "mongodb"]) async def test_push_features_and_read_async(store): await store.push_async("location_stats_push_source", _ingest_df()) diff --git a/sdk/python/tests/integration/online_store/test_remote_online_store.py b/sdk/python/tests/integration/online_store/test_remote_online_store.py index 80166abf431..3ee3d161d14 100644 --- a/sdk/python/tests/integration/online_store/test_remote_online_store.py +++ b/sdk/python/tests/integration/online_store/test_remote_online_store.py @@ -1,3 +1,4 @@ +import json import logging import os import tempfile @@ -383,6 +384,18 @@ def test_remote_online_store_read_write(auth_config, tls_mode): "avg_daily_trips": [50, 45], "event_timestamp": [pd.Timestamp(_utc_now()).round("ms")] * 2, "created": [pd.Timestamp(_utc_now()).round("ms")] * 2, + "driver_metadata": [ + {"vehicle_type": "sedan", "rating": "4.5"}, + {"vehicle_type": "suv", "rating": "3.8"}, + ], + "driver_config": [ + json.dumps({"max_distance_km": 100, "preferred_zones": ["north"]}), + json.dumps({"max_distance_km": 50, "preferred_zones": ["south"]}), + ], + "driver_profile": [ + {"name": "driver_1000", "age": "30"}, + {"name": "driver_1001", "age": "35"}, + ], } ) diff --git a/sdk/python/tests/integration/materialization/test_universal_e2e.py b/sdk/python/tests/integration/online_store/test_universal_e2e.py similarity index 84% rename from sdk/python/tests/integration/materialization/test_universal_e2e.py rename to sdk/python/tests/integration/online_store/test_universal_e2e.py index 202ae859aed..cfacbca59af 100644 --- a/sdk/python/tests/integration/materialization/test_universal_e2e.py +++ b/sdk/python/tests/integration/online_store/test_universal_e2e.py @@ -2,8 +2,8 @@ import pytest -from tests.integration.feature_repos.universal.entities import driver -from tests.integration.feature_repos.universal.feature_views import driver_feature_view +from tests.universal.feature_repos.universal.entities import driver +from tests.universal.feature_repos.universal.feature_views import driver_feature_view from tests.utils.e2e_test_validation import validate_offline_online_store_consistency diff --git a/sdk/python/tests/integration/online_store/test_universal_online.py b/sdk/python/tests/integration/online_store/test_universal_online.py index 51296f31607..c4b141700b5 100644 --- a/sdk/python/tests/integration/online_store/test_universal_online.py +++ b/sdk/python/tests/integration/online_store/test_universal_online.py @@ -32,12 +32,12 @@ ) from feast.utils import _utc_now from feast.wait import wait_retry_backoff -from tests.integration.feature_repos.repo_configuration import ( +from tests.universal.feature_repos.repo_configuration import ( Environment, construct_universal_feature_views, ) -from tests.integration.feature_repos.universal.entities import driver, item -from tests.integration.feature_repos.universal.feature_views import ( +from tests.universal.feature_repos.universal.entities import driver, item +from tests.universal.feature_repos.universal.feature_views import ( TAGS, create_driver_hourly_stats_feature_view, create_item_embeddings_feature_view, @@ -258,6 +258,9 @@ def test_write_to_online_store(environment, universal_data_sources): "conv_rate": [0.85], "acc_rate": [0.91], "avg_daily_trips": [14], + "driver_metadata": [None], + "driver_config": [None], + "driver_profile": [None], "event_timestamp": [pd.Timestamp(_utc_now()).round("ms")], "created": [pd.Timestamp(_utc_now()).round("ms")], } @@ -435,6 +438,9 @@ def setup_feature_store_universal_feature_views( "conv_rate": [0.5, 0.3], "acc_rate": [0.6, 0.4], "avg_daily_trips": [4, 5], + "driver_metadata": [None, None], + "driver_config": [None, None], + "driver_profile": [None, None], "event_timestamp": [ pd.to_datetime(1646263500, utc=True, unit="s"), pd.to_datetime(1646263600, utc=True, unit="s"), @@ -517,7 +523,7 @@ async def _do_async_retrieval_test(environment, universal_data_sources): @pytest.mark.asyncio @pytest.mark.integration -@pytest.mark.universal_online_stores(only=["redis", "postgres"]) +@pytest.mark.universal_online_stores(only=["redis", "postgres", "mongodb"]) async def test_async_online_retrieval_with_event_timestamps( environment, universal_data_sources ): diff --git a/sdk/python/tests/integration/online_store/test_universal_online_types.py b/sdk/python/tests/integration/online_store/test_universal_online_types.py new file mode 100644 index 00000000000..7340d00f274 --- /dev/null +++ b/sdk/python/tests/integration/online_store/test_universal_online_types.py @@ -0,0 +1,77 @@ +from datetime import datetime, timedelta +from typing import List + +import pytest + +from tests.universal.feature_repos.universal.entities import driver +from tests.universal.feature_repos.universal.feature_views import driver_feature_view +from tests.utils.type_test_utils import ( + TypeTestConfig, + get_feast_type, + get_type_test_fixtures, + populate_test_configs, +) + +ONLINE_TYPE_TEST_CONFIGS: List[TypeTestConfig] = populate_test_configs() + + +@pytest.fixture( + params=ONLINE_TYPE_TEST_CONFIGS, + ids=[str(c) for c in ONLINE_TYPE_TEST_CONFIGS], +) +def online_types_test_fixtures(request, environment): + return get_type_test_fixtures(request, environment) + + +@pytest.mark.integration +@pytest.mark.universal_online_stores(only=["sqlite"]) +def test_feature_get_online_features_types_match( + online_types_test_fixtures, environment +): + config, data_source, fv = online_types_test_fixtures + entity = driver() + fv = driver_feature_view( + data_source=data_source, + name="get_online_features_types_match", + dtype=get_feast_type(config.feature_dtype, config.feature_is_list), + ) + fs = environment.feature_store + features = [fv.name + ":value"] + fs.apply([fv, entity]) + fs.materialize( + environment.start_date, + environment.end_date + - timedelta(hours=1), # throwing out last record to make sure + # we can successfully infer type even from all empty values + ) + + online_features = fs.get_online_features( + features=features, + entity_rows=[{"driver_id": 1}], + ).to_dict() + + feature_list_dtype_to_expected_online_response_value_type = { + "int32": int, + "int64": int, + "float": float, + "string": str, + "bool": bool, + "datetime": datetime, + } + expected_dtype = feature_list_dtype_to_expected_online_response_value_type[ + config.feature_dtype + ] + + assert len(online_features["value"]) == 1 + + if config.feature_is_list: + for feature in online_features["value"]: + assert isinstance(feature, list), "Feature value should be a list" + assert config.has_empty_list or len(feature) > 0, ( + "List of values should not be empty" + ) + for element in feature: + assert isinstance(element, expected_dtype) + else: + for feature in online_features["value"]: + assert isinstance(feature, expected_dtype) diff --git a/sdk/python/tests/integration/registration/rest_api/conftest.py b/sdk/python/tests/integration/registration/rest_api/conftest.py deleted file mode 100644 index 36c358a9aa6..00000000000 --- a/sdk/python/tests/integration/registration/rest_api/conftest.py +++ /dev/null @@ -1,150 +0,0 @@ -import os -from pathlib import Path - -import pytest -import requests -from kubernetes import client, config - -from tests.integration.registration.rest_api.support import ( - applyFeastProject, - create_feast_project, - create_namespace, - create_route, - delete_namespace, - deploy_and_validate_pod, - execPodCommand, - get_pod_name_by_prefix, - run_kubectl_apply_with_sed, - run_kubectl_command, - validate_feature_store_cr_status, -) - - -class FeastRestClient: - def __init__(self, base_url): - self.base_url = base_url.rstrip("/") - self.api_prefix = "/api/v1" - - def _build_url(self, endpoint): - if not endpoint.startswith("/"): - endpoint = "/" + endpoint - return f"{self.base_url}{self.api_prefix}{endpoint}" - - def get(self, endpoint, params=None): - params = params or {} - params.setdefault("allow_cache", "false") - url = self._build_url(endpoint) - return requests.get(url, params=params, verify=False) - - -@pytest.fixture(scope="session") -def feast_rest_client(): - # Load kubeconfig and initialize Kubernetes client - config.load_kube_config() - api_instance = client.CoreV1Api() - - # Get the directory containing this conftest.py file - test_dir = Path(__file__).parent - resource_dir = test_dir / "resource" - - # Constants and environment values - namespace = "test-ns-feast-rest" - credit_scoring = "credit-scoring" - driver_ranking = "driver-ranking" - service_name = "feast-test-s3-registry-rest" - run_on_openshift = os.getenv("RUN_ON_OPENSHIFT_CI", "false").lower() == "true" - - # Create test namespace - create_namespace(api_instance, namespace) - - try: - if not run_on_openshift: - # Deploy dependencies - deploy_and_validate_pod( - namespace, str(resource_dir / "redis.yaml"), "app=redis" - ) - deploy_and_validate_pod( - namespace, str(resource_dir / "postgres.yaml"), "app=postgres" - ) - - # Create and validate FeatureStore CRs - create_feast_project( - str(resource_dir / "feast_config_credit_scoring.yaml"), - namespace, - credit_scoring, - ) - validate_feature_store_cr_status(namespace, credit_scoring) - - create_feast_project( - str(resource_dir / "feast_config_driver_ranking.yaml"), - namespace, - driver_ranking, - ) - validate_feature_store_cr_status(namespace, driver_ranking) - - # Deploy ingress and get route URL - run_kubectl_command( - [ - "apply", - "-f", - str(resource_dir / "feast-registry-nginx.yaml"), - "-n", - namespace, - ] - ) - ingress_host = run_kubectl_command( - [ - "get", - "ingress", - "feast-registry-ingress", - "-n", - namespace, - "-o", - "jsonpath={.spec.rules[0].host}", - ] - ) - route_url = f"http://{ingress_host}" - - # Apply feast projects - - applyFeastProject(namespace, credit_scoring) - - applyFeastProject(namespace, driver_ranking) - - # Create Saved Datasets and Permissions - pod_name = get_pod_name_by_prefix(namespace, credit_scoring) - - # Apply datasets - execPodCommand( - namespace, pod_name, ["python", "create_ui_visible_datasets.py"] - ) - - # Apply permissions - execPodCommand(namespace, pod_name, ["python", "permissions_apply.py"]) - - else: - # OpenShift cluster setup using S3-based registry - aws_access_key = os.getenv("AWS_ACCESS_KEY") - aws_secret_key = os.getenv("AWS_SECRET_KEY") - aws_bucket = os.getenv("AWS_BUCKET_NAME") - registry_path = os.getenv("AWS_REGISTRY_FILE_PATH") - - run_kubectl_apply_with_sed( - aws_access_key, - aws_secret_key, - aws_bucket, - registry_path, - str(resource_dir / "feast_config_rhoai.yaml"), - namespace, - ) - validate_feature_store_cr_status(namespace, "test-s3") - route_url = create_route(namespace, credit_scoring, service_name) - if not route_url: - raise RuntimeError("Route URL could not be fetched.") - - print(f"\n Connected to Feast REST at: {route_url}") - yield FeastRestClient(route_url) - - finally: - print(f"\n Deleting namespace: {namespace}") - delete_namespace(api_instance, namespace) diff --git a/sdk/python/tests/integration/registration/test_feature_store.py b/sdk/python/tests/integration/registration/test_feature_store.py index b59af900190..93334fbda19 100644 --- a/sdk/python/tests/integration/registration/test_feature_store.py +++ b/sdk/python/tests/integration/registration/test_feature_store.py @@ -14,15 +14,23 @@ from datetime import timedelta from tempfile import mkstemp +import pandas as pd import pytest from pytest_lazyfixture import lazy_fixture +from feast import FileSource +from feast.data_format import AvroFormat +from feast.data_source import KafkaSource from feast.entity import Entity -from feast.feature_store import FeatureStore +from feast.errors import ConflictingFeatureViewNames +from feast.feature_store import FeatureStore, _validate_feature_views from feast.feature_view import FeatureView +from feast.field import Field from feast.infra.online_stores.sqlite import SqliteOnlineStoreConfig +from feast.on_demand_feature_view import on_demand_feature_view from feast.repo_config import RepoConfig -from feast.types import Float64, Int64, String +from feast.stream_feature_view import StreamFeatureView +from feast.types import Float32, Float64, Int64, String from tests.utils.data_source_test_creator import prep_file_source @@ -75,3 +83,146 @@ def feature_store_with_local_registry(): entity_key_serialization_version=3, ) ) + + +@pytest.mark.integration +def test_validate_feature_views_cross_type_conflict(): + """ + Test that _validate_feature_views() catches cross-type name conflicts. + + This is a unit test for the validation that happens during feast plan/apply. + The validation must catch conflicts across FeatureView, StreamFeatureView, + and OnDemandFeatureView to prevent silent data correctness bugs in + get_online_features (which uses fixed-order lookup). + + See: https://github.com/feast-dev/feast/issues/5995 + """ + # Create a simple entity + entity = Entity(name="driver_entity", join_keys=["test_key"]) + + # Create a regular FeatureView + file_source = FileSource(name="my_file_source", path="test.parquet") + feature_view = FeatureView( + name="my_feature_view", + entities=[entity], + schema=[Field(name="feature1", dtype=Float32)], + source=file_source, + ) + + # Create a StreamFeatureView with the SAME name + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=file_source, + watermark_delay_threshold=timedelta(days=1), + ) + stream_feature_view = StreamFeatureView( + name="my_feature_view", # Same name as FeatureView! + entities=[entity], + ttl=timedelta(days=30), + schema=[Field(name="feature1", dtype=Float32)], + source=stream_source, + ) + + # Validate should raise ConflictingFeatureViewNames + with pytest.raises(ConflictingFeatureViewNames) as exc_info: + _validate_feature_views([feature_view, stream_feature_view]) + + # Verify error message contains type information + error_message = str(exc_info.value) + assert "my_feature_view" in error_message + assert "FeatureView" in error_message + assert "StreamFeatureView" in error_message + + +def test_validate_feature_views_same_type_conflict(): + """ + Test that _validate_feature_views() also catches same-type name conflicts + with a proper error message indicating duplicate FeatureViews. + """ + # Create a simple entity + entity = Entity(name="driver_entity", join_keys=["test_key"]) + + # Create two FeatureViews with the same name + file_source = FileSource(name="my_file_source", path="test.parquet") + fv1 = FeatureView( + name="duplicate_fv", + entities=[entity], + schema=[Field(name="feature1", dtype=Float32)], + source=file_source, + ) + fv2 = FeatureView( + name="duplicate_fv", # Same name! + entities=[entity], + schema=[Field(name="feature2", dtype=Float32)], + source=file_source, + ) + + # Validate should raise ConflictingFeatureViewNames + with pytest.raises(ConflictingFeatureViewNames) as exc_info: + _validate_feature_views([fv1, fv2]) + + # Verify error message indicates same-type duplicate + error_message = str(exc_info.value) + assert "duplicate_fv" in error_message + assert "Multiple FeatureViews" in error_message + assert "case-insensitively unique" in error_message + + +def test_validate_feature_views_case_insensitive(): + """ + Test that _validate_feature_views() catches case-insensitive conflicts. + """ + entity = Entity(name="driver_entity", join_keys=["test_key"]) + file_source = FileSource(name="my_file_source", path="test.parquet") + + fv1 = FeatureView( + name="MyFeatureView", + entities=[entity], + schema=[Field(name="feature1", dtype=Float32)], + source=file_source, + ) + fv2 = FeatureView( + name="myfeatureview", # Same name, different case! + entities=[entity], + schema=[Field(name="feature2", dtype=Float32)], + source=file_source, + ) + + # Validate should raise ConflictingFeatureViewNames (case-insensitive) + with pytest.raises(ConflictingFeatureViewNames): + _validate_feature_views([fv1, fv2]) + + +def test_validate_feature_views_odfv_conflict(): + """ + Test that _validate_feature_views() catches OnDemandFeatureView name conflicts. + """ + entity = Entity(name="driver_entity", join_keys=["test_key"]) + file_source = FileSource(name="my_file_source", path="test.parquet") + + fv = FeatureView( + name="shared_name", + entities=[entity], + schema=[Field(name="feature1", dtype=Float32)], + source=file_source, + ) + + @on_demand_feature_view( + sources=[fv], + schema=[Field(name="output", dtype=Float32)], + ) + def shared_name(inputs: pd.DataFrame) -> pd.DataFrame: + return pd.DataFrame({"output": inputs["feature1"] * 2}) + + # Validate should raise ConflictingFeatureViewNames + with pytest.raises(ConflictingFeatureViewNames) as exc_info: + _validate_feature_views([fv, shared_name]) + + error_message = str(exc_info.value) + assert "shared_name" in error_message + assert "FeatureView" in error_message + assert "OnDemandFeatureView" in error_message diff --git a/sdk/python/tests/integration/registration/test_universal_odfv_feature_inference.py b/sdk/python/tests/integration/registration/test_universal_odfv_feature_inference.py index 151f629289f..e1b936a3042 100644 --- a/sdk/python/tests/integration/registration/test_universal_odfv_feature_inference.py +++ b/sdk/python/tests/integration/registration/test_universal_odfv_feature_inference.py @@ -6,8 +6,8 @@ from feast.infra.offline_stores.file_source import FileSource from feast.types import Float64 from feast.utils import _utc_now -from tests.integration.feature_repos.universal.entities import customer, driver, item -from tests.integration.feature_repos.universal.feature_views import ( +from tests.universal.feature_repos.universal.entities import customer, driver, item +from tests.universal.feature_repos.universal.feature_views import ( conv_rate_plus_100_feature_view, create_conv_rate_request_source, create_driver_hourly_stats_batch_feature_view, @@ -18,7 +18,6 @@ @pytest.mark.integration -@pytest.mark.universal_offline_stores @pytest.mark.parametrize("infer_features", [True, False], ids=lambda v: str(v)) def test_infer_odfv_features(environment, universal_data_sources, infer_features): store = environment.feature_store diff --git a/sdk/python/tests/integration/registration/test_universal_registry.py b/sdk/python/tests/integration/registration/test_universal_registry.py index 29b31ef1b75..fb09395d789 100644 --- a/sdk/python/tests/integration/registration/test_universal_registry.py +++ b/sdk/python/tests/integration/registration/test_universal_registry.py @@ -35,7 +35,7 @@ from feast.data_format import AvroFormat, ParquetFormat from feast.data_source import KafkaSource from feast.entity import Entity -from feast.errors import FeatureViewNotFoundException +from feast.errors import ConflictingFeatureViewNames, FeatureViewNotFoundException from feast.feature_view import FeatureView from feast.field import Field from feast.infra.infra_object import Infra @@ -56,7 +56,7 @@ from feast.types import Array, Bytes, Float32, Int32, Int64, String from feast.utils import _utc_now from feast.value_type import ValueType -from tests.integration.feature_repos.universal.entities import driver +from tests.universal.feature_repos.universal.entities import driver @pytest.fixture @@ -386,33 +386,41 @@ def mock_remote_registry(): yield registry -if os.getenv("FEAST_IS_LOCAL_TEST", "False") == "False": - all_fixtures = [lazy_fixture("s3_registry"), lazy_fixture("gcs_registry")] +all_fixtures = [ + lazy_fixture("local_registry"), + pytest.param( + lazy_fixture("pg_registry"), + marks=pytest.mark.xdist_group(name="pg_registry"), + ), + pytest.param( + lazy_fixture("mysql_registry"), + marks=pytest.mark.xdist_group(name="mysql_registry"), + ), + lazy_fixture("sqlite_registry"), + pytest.param( + lazy_fixture("mock_remote_registry"), + marks=pytest.mark.rbac_remote_integration_test, + ), +] + +if os.getenv("FEAST_IS_LOCAL_TEST", "False") != "True": + all_fixtures.extend( + [ + lazy_fixture("s3_registry"), + lazy_fixture("gcs_registry"), + pytest.param( + lazy_fixture("hdfs_registry"), + marks=pytest.mark.xdist_group(name="hdfs_registry"), + ), + ] + ) else: - all_fixtures = [ - lazy_fixture("local_registry"), + all_fixtures.append( pytest.param( lazy_fixture("minio_registry"), marks=pytest.mark.xdist_group(name="minio_registry"), - ), - pytest.param( - lazy_fixture("pg_registry"), - marks=pytest.mark.xdist_group(name="pg_registry"), - ), - pytest.param( - lazy_fixture("mysql_registry"), - marks=pytest.mark.xdist_group(name="mysql_registry"), - ), - lazy_fixture("sqlite_registry"), - pytest.param( - lazy_fixture("mock_remote_registry"), - marks=pytest.mark.rbac_remote_integration_test, - ), - pytest.param( - lazy_fixture("hdfs_registry"), - marks=pytest.mark.xdist_group(name="hdfs_registry"), - ), - ] + ) + ) sql_fixtures = [ pytest.param( @@ -611,6 +619,81 @@ def test_apply_feature_view_success(test_registry: BaseRegistry): test_registry.teardown() +@pytest.mark.integration +@pytest.mark.parametrize( + "test_registry", + all_fixtures, +) +def test_apply_feature_view_without_source_success(test_registry: BaseRegistry): + """Test that a FeatureView with no source can be applied, retrieved, updated, and deleted.""" + entity = Entity(name="fs1_my_entity_1", join_keys=["test"]) + + fv1 = FeatureView( + name="my_feature_view_no_source", + schema=[ + Field(name="test", dtype=Int64), + Field(name="fs1_my_feature_1", dtype=Int64), + Field(name="fs1_my_feature_2", dtype=String), + Field(name="fs1_my_feature_3", dtype=Array(String)), + ], + entities=[entity], + tags={"team": "matchmaking"}, + source=None, + ttl=timedelta(minutes=5), + ) + + project = "project" + + # Register Feature View + test_registry.apply_feature_view(fv1, project) + + feature_views = test_registry.list_feature_views(project, tags=fv1.tags) + + assert len(feature_views) == 1 + assert feature_views[0].name == "my_feature_view_no_source" + assert feature_views[0].batch_source is None + assert feature_views[0].stream_source is None + assert feature_views[0].features[0].name == "fs1_my_feature_1" + assert feature_views[0].features[0].dtype == Int64 + assert feature_views[0].features[1].name == "fs1_my_feature_2" + assert feature_views[0].features[1].dtype == String + assert feature_views[0].features[2].name == "fs1_my_feature_3" + assert feature_views[0].features[2].dtype == Array(String) + + feature_view = test_registry.get_feature_view("my_feature_view_no_source", project) + any_feature_view = test_registry.get_any_feature_view( + "my_feature_view_no_source", project + ) + + assert feature_view.name == "my_feature_view_no_source" + assert feature_view.batch_source is None + assert feature_view.stream_source is None + assert feature_view.ttl == timedelta(minutes=5) + assert feature_view == any_feature_view + + # After the first apply, created_timestamp should equal last_updated_timestamp. + assert feature_view.created_timestamp == feature_view.last_updated_timestamp + + # Update the feature view and verify created_timestamp is preserved. + fv1.ttl = timedelta(minutes=10) + test_registry.apply_feature_view(fv1, project) + feature_views = test_registry.list_feature_views(project) + assert len(feature_views) == 1 + updated_feature_view = test_registry.get_feature_view( + "my_feature_view_no_source", project + ) + assert updated_feature_view.ttl == timedelta(minutes=10) + assert updated_feature_view.batch_source is None + assert updated_feature_view.created_timestamp == feature_view.created_timestamp + + # Delete the feature view. + test_registry.delete_feature_view("my_feature_view_no_source", project) + feature_views = test_registry.list_feature_views(project) + assert len(feature_views) == 0 + + test_registry.teardown() + + @pytest.mark.integration @pytest.mark.parametrize( "test_registry", @@ -1993,3 +2076,119 @@ def test_commit_for_read_only_user(): assert len(entities) == 1 write_registry.teardown() + + +@pytest.mark.integration +@pytest.mark.parametrize( + "test_registry", + # mock_remote_registry excluded: the mock gRPC channel does not propagate + # server-side errors, so ConflictingFeatureViewNames is not raised client-side. + [f for f in all_fixtures if "mock_remote" not in str(f)], +) +def test_cross_type_feature_view_name_conflict(test_registry: BaseRegistry): + """ + Test that feature view names must be unique across all feature view types. + + This validates the fix for feast-dev/feast#5995: If a FeatureView and + StreamFeatureView share the same name, get_online_features would silently + return the wrong one (fixed order lookup). This test ensures such conflicts + are caught during registration. + """ + project = "project" + + # Create a simple entity + entity = Entity(name="driver_entity", join_keys=["test_key"]) + + # Create a regular FeatureView + file_source = FileSource(name="my_file_source", path="test.parquet") + feature_view = FeatureView( + name="shared_feature_view_name", + entities=[entity], + schema=[Field(name="feature1", dtype=Float32)], + source=file_source, + ) + + # Create a StreamFeatureView with the SAME name + stream_source = KafkaSource( + name="kafka", + timestamp_field="event_timestamp", + kafka_bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=FileSource(path="some path"), + watermark_delay_threshold=timedelta(days=1), + ) + + def simple_udf(x: int): + return x + 3 + + stream_feature_view = StreamFeatureView( + name="shared_feature_view_name", # Same name as FeatureView! + entities=[entity], + ttl=timedelta(days=30), + schema=[Field(name="feature1", dtype=Float32)], + source=stream_source, + udf=simple_udf, + ) + + # Register the regular FeatureView first - should succeed + test_registry.apply_feature_view(feature_view, project) + + # Attempt to register StreamFeatureView with same name - should fail + with pytest.raises(ConflictingFeatureViewNames) as exc_info: + test_registry.apply_feature_view(stream_feature_view, project) + + # Verify error message contains the conflicting types + error_message = str(exc_info.value) + assert "shared_feature_view_name" in error_message + + # Cleanup + test_registry.delete_feature_view("shared_feature_view_name", project) + test_registry.teardown() + + +@pytest.mark.integration +@pytest.mark.parametrize( + "test_registry", + [f for f in all_fixtures if "mock_remote" not in str(f)], +) +def test_cross_type_feature_view_odfv_conflict(test_registry: BaseRegistry): + """ + Test that OnDemandFeatureView names must be unique across all feature view types. + """ + project = "project" + + # Create a simple entity + entity = Entity(name="driver_entity", join_keys=["test_key"]) + + # Create a regular FeatureView + file_source = FileSource(name="my_file_source", path="test.parquet") + feature_view = FeatureView( + name="shared_odfv_name", + entities=[entity], + schema=[Field(name="feature1", dtype=Float32)], + source=file_source, + ) + + # Create an OnDemandFeatureView with the SAME name + @on_demand_feature_view( + sources=[feature_view], + schema=[Field(name="output", dtype=Float32)], + ) + def shared_odfv_name(inputs: pd.DataFrame) -> pd.DataFrame: + return pd.DataFrame({"output": inputs["feature1"] * 2}) + + # Register the regular FeatureView first - should succeed + test_registry.apply_feature_view(feature_view, project) + + # Attempt to register OnDemandFeatureView with same name - should fail + with pytest.raises(ConflictingFeatureViewNames) as exc_info: + test_registry.apply_feature_view(shared_odfv_name, project) + + # Verify error message contains the conflicting types + error_message = str(exc_info.value) + assert "shared_odfv_name" in error_message + + # Cleanup + test_registry.delete_feature_view("shared_odfv_name", project) + test_registry.teardown() diff --git a/sdk/python/tests/integration/rest_api/conftest.py b/sdk/python/tests/integration/rest_api/conftest.py new file mode 100644 index 00000000000..6e55d5825f2 --- /dev/null +++ b/sdk/python/tests/integration/rest_api/conftest.py @@ -0,0 +1,171 @@ +import os +import time +from pathlib import Path + +import pytest +import requests +from kubernetes import client, config + +from tests.integration.rest_api.support import ( + applyFeastProject, + create_feast_project, + create_namespace, + create_route, + delete_namespace, + deploy_and_validate_pod, + execPodCommand, + get_pod_name_by_prefix, + run_kubectl_command, + validate_feature_store_cr_status, +) + + +class FeastRestClient: + def __init__(self, base_url): + self.base_url = base_url.rstrip("/") + self.api_prefix = "/api/v1" + + def _build_url(self, endpoint): + if not endpoint.startswith("/"): + endpoint = "/" + endpoint + return f"{self.base_url}{self.api_prefix}{endpoint}" + + def get(self, endpoint, params=None): + params = params or {} + params.setdefault("allow_cache", "false") + url = self._build_url(endpoint) + return requests.get(url, params=params, verify=False) + + +def _wait_for_http_ready(route_url: str, timeout: int = 180, interval: int = 5) -> None: + """ + Poll the HTTP endpoint until it returns a non-502 response. + + After Pod/CR readiness is confirmed, the backend behind the ingress may + still be initializing. This helper avoids the race condition where tests + start before the Feast server is ready, causing all requests to return 502. + """ + health_url = f"{route_url}/api/v1/projects" + deadline = time.time() + timeout + last_status = None + + print( + f"\n Waiting for HTTP endpoint to become ready (timeout={timeout}s): {health_url}" + ) + + while time.time() < deadline: + try: + resp = requests.get(health_url, timeout=10, verify=False) + last_status = resp.status_code + if resp.status_code != 502: + print(f" HTTP endpoint is ready (status={resp.status_code})") + return + print( + f" HTTP endpoint returned {resp.status_code}, retrying in {interval}s..." + ) + except requests.exceptions.RequestException as exc: + last_status = str(exc) + print(f" HTTP request failed ({exc}), retrying in {interval}s...") + + time.sleep(interval) + + raise RuntimeError( + f"HTTP endpoint {health_url} did not become ready within {timeout}s " + f"(last status: {last_status})" + ) + + +@pytest.fixture(scope="session") +def feast_rest_client(): + # Load kubeconfig and initialize Kubernetes client + config.load_kube_config() + api_instance = client.CoreV1Api() + + # Get the directory containing this conftest.py file + test_dir = Path(__file__).parent + resource_dir = test_dir / "resource" + + # Constants and environment values + namespace = "test-ns-feast-rest" + credit_scoring = "credit-scoring" + driver_ranking = "driver-ranking" + # Registry REST service name created by the operator for credit-scoring (kind and OpenShift) + registry_rest_service = "feast-credit-scoring-registry-rest" + run_on_openshift = os.getenv("RUN_ON_OPENSHIFT_CI", "false").lower() == "true" + + # Create test namespace + create_namespace(api_instance, namespace) + + try: + # Deploy dependencies (same for kind and OpenShift) + deploy_and_validate_pod( + namespace, str(resource_dir / "redis.yaml"), "app=redis" + ) + deploy_and_validate_pod( + namespace, str(resource_dir / "postgres.yaml"), "app=postgres" + ) + + # Create and validate FeatureStore CRs (SQL registry, same as kind) + create_feast_project( + str(resource_dir / "feast_config_credit_scoring.yaml"), + namespace, + credit_scoring, + ) + validate_feature_store_cr_status(namespace, credit_scoring) + + create_feast_project( + str(resource_dir / "feast_config_driver_ranking.yaml"), + namespace, + driver_ranking, + ) + validate_feature_store_cr_status(namespace, driver_ranking) + + if run_on_openshift: + # OpenShift: expose registry REST via route (no nginx ingress) + route_url = create_route(namespace, credit_scoring, registry_rest_service) + else: + # Kind: deploy nginx ingress and get route URL + run_kubectl_command( + [ + "apply", + "-f", + str(resource_dir / "feast-registry-nginx.yaml"), + "-n", + namespace, + ] + ) + ingress_host = run_kubectl_command( + [ + "get", + "ingress", + "feast-registry-ingress", + "-n", + namespace, + "-o", + "jsonpath={.spec.rules[0].host}", + ] + ) + route_url = f"http://{ingress_host}" + + # Apply feast projects + applyFeastProject(namespace, credit_scoring) + applyFeastProject(namespace, driver_ranking) + + # Create Saved Datasets and Permissions + pod_name = get_pod_name_by_prefix(namespace, credit_scoring) + execPodCommand(namespace, pod_name, ["python", "create_ui_visible_datasets.py"]) + execPodCommand(namespace, pod_name, ["python", "permissions_apply.py"]) + if not route_url: + raise RuntimeError("Route URL could not be fetched.") + + # Wait for the HTTP endpoint to become ready before running tests. + # Pod/CR readiness does not guarantee the backend is serving traffic; + # the ingress may return 502 while the Feast server is still starting. + _wait_for_http_ready(route_url) + + print(f"\n Connected to Feast REST at: {route_url}") + yield FeastRestClient(route_url) + + finally: + print(f"\n Deleting namespace: {namespace}") + delete_namespace(api_instance, namespace) diff --git a/sdk/python/tests/integration/registration/rest_api/resource/feast-registry-nginx.yaml b/sdk/python/tests/integration/rest_api/resource/feast-registry-nginx.yaml similarity index 100% rename from sdk/python/tests/integration/registration/rest_api/resource/feast-registry-nginx.yaml rename to sdk/python/tests/integration/rest_api/resource/feast-registry-nginx.yaml diff --git a/sdk/python/tests/integration/registration/rest_api/resource/feast_config_credit_scoring.yaml b/sdk/python/tests/integration/rest_api/resource/feast_config_credit_scoring.yaml similarity index 100% rename from sdk/python/tests/integration/registration/rest_api/resource/feast_config_credit_scoring.yaml rename to sdk/python/tests/integration/rest_api/resource/feast_config_credit_scoring.yaml diff --git a/sdk/python/tests/integration/registration/rest_api/resource/feast_config_driver_ranking.yaml b/sdk/python/tests/integration/rest_api/resource/feast_config_driver_ranking.yaml similarity index 100% rename from sdk/python/tests/integration/registration/rest_api/resource/feast_config_driver_ranking.yaml rename to sdk/python/tests/integration/rest_api/resource/feast_config_driver_ranking.yaml diff --git a/sdk/python/tests/integration/registration/rest_api/resource/feast_config_rhoai.yaml b/sdk/python/tests/integration/rest_api/resource/feast_config_rhoai.yaml similarity index 100% rename from sdk/python/tests/integration/registration/rest_api/resource/feast_config_rhoai.yaml rename to sdk/python/tests/integration/rest_api/resource/feast_config_rhoai.yaml diff --git a/sdk/python/tests/integration/registration/rest_api/resource/postgres.yaml b/sdk/python/tests/integration/rest_api/resource/postgres.yaml similarity index 100% rename from sdk/python/tests/integration/registration/rest_api/resource/postgres.yaml rename to sdk/python/tests/integration/rest_api/resource/postgres.yaml diff --git a/sdk/python/tests/integration/registration/rest_api/resource/redis.yaml b/sdk/python/tests/integration/rest_api/resource/redis.yaml similarity index 100% rename from sdk/python/tests/integration/registration/rest_api/resource/redis.yaml rename to sdk/python/tests/integration/rest_api/resource/redis.yaml diff --git a/sdk/python/tests/integration/registration/rest_api/support.py b/sdk/python/tests/integration/rest_api/support.py similarity index 100% rename from sdk/python/tests/integration/registration/rest_api/support.py rename to sdk/python/tests/integration/rest_api/support.py diff --git a/sdk/python/tests/integration/registration/rest_api/test_registry_rest_api.py b/sdk/python/tests/integration/rest_api/test_registry_rest_api.py similarity index 100% rename from sdk/python/tests/integration/registration/rest_api/test_registry_rest_api.py rename to sdk/python/tests/integration/rest_api/test_registry_rest_api.py diff --git a/sdk/python/tests/unit/cli/test_cli_apply_duplicates.py b/sdk/python/tests/unit/cli/test_cli_apply_duplicates.py index b3e350fe73c..cf5b64dbd20 100644 --- a/sdk/python/tests/unit/cli/test_cli_apply_duplicates.py +++ b/sdk/python/tests/unit/cli/test_cli_apply_duplicates.py @@ -8,7 +8,7 @@ def test_cli_apply_duplicated_featureview_names() -> None: run_simple_apply_test( example_repo_file_name="example_feature_repo_with_duplicated_featureview_names.py", - expected_error=b"Please ensure that all feature view names are case-insensitively unique", + expected_error=b"Feature view names must be case-insensitively unique", ) @@ -152,9 +152,7 @@ def test_cli_apply_imported_featureview_with_duplication() -> None: rc, output = runner.run_with_output(["apply"], cwd=repo_path) assert rc != 0 - assert ( - b"More than one feature view with name driver_hourly_stats found." in output - ) + assert b"Multiple FeatureViews with name 'driver_hourly_stats' found." in output def test_cli_apply_duplicated_featureview_names_multiple_py_files() -> None: @@ -195,6 +193,5 @@ def test_cli_apply_duplicated_featureview_names_multiple_py_files() -> None: assert ( rc != 0 - and b"Please ensure that all feature view names are case-insensitively unique" - in output + and b"Feature view names must be case-insensitively unique" in output ) diff --git a/sdk/python/tests/unit/infra/compute_engines/ray_compute/__init__.py b/sdk/python/tests/unit/infra/compute_engines/ray_compute/__init__.py deleted file mode 100644 index 2734c36c704..00000000000 --- a/sdk/python/tests/unit/infra/compute_engines/ray_compute/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Ray compute engine unit tests.""" diff --git a/sdk/python/tests/unit/infra/offline_stores/test_clickhouse.py b/sdk/python/tests/unit/infra/offline_stores/test_clickhouse.py index f5440ed367d..7789cde72b3 100644 --- a/sdk/python/tests/unit/infra/offline_stores/test_clickhouse.py +++ b/sdk/python/tests/unit/infra/offline_stores/test_clickhouse.py @@ -1,5 +1,6 @@ import logging import threading +from datetime import datetime, timedelta, timezone from unittest.mock import MagicMock, patch import pytest @@ -133,3 +134,109 @@ def test_clickhouse_config_handles_none_additional_client_args(): config = ClickhouseConfig(**raw_config) assert config.additional_client_args is None + + +class TestNonEntityRetrieval: + """Test the non-entity retrieval logic (entity_df=None) for ClickHouse.""" + + _MODULE = "feast.infra.offline_stores.contrib.clickhouse_offline_store.clickhouse" + + def _call_get_historical_features(self, feature_views, **kwargs): + """Call get_historical_features with entity_df=None, mocking the pipeline.""" + from feast.infra.offline_stores.contrib.clickhouse_offline_store.clickhouse import ( + ClickhouseOfflineStore, + ClickhouseOfflineStoreConfig, + ) + from feast.repo_config import RepoConfig + + config = RepoConfig( + project="test_project", + registry="test_registry", + provider="local", + offline_store=ClickhouseOfflineStoreConfig( + type="clickhouse", + host="localhost", + port=9000, + database="test_db", + user="default", + password="password", + ), + ) + + end = kwargs.get("end_date", datetime(2023, 1, 7, tzinfo=timezone.utc)) + + with ( + patch.multiple( + self._MODULE, + _upload_entity_df=MagicMock(), + _get_entity_schema=MagicMock( + return_value={"event_timestamp": "timestamp"} + ), + _get_entity_df_event_timestamp_range=MagicMock( + return_value=(end - timedelta(days=1), end) + ), + ), + patch( + f"{self._MODULE}.offline_utils.get_expected_join_keys", + return_value=[], + ), + patch( + f"{self._MODULE}.offline_utils.assert_expected_columns_in_entity_df", + ), + patch( + f"{self._MODULE}.offline_utils.get_feature_view_query_context", + return_value=[], + ), + ): + refs = [f"{fv.name}:feature1" for fv in feature_views] + return ClickhouseOfflineStore.get_historical_features( + config=config, + feature_views=feature_views, + feature_refs=refs, + entity_df=None, + registry=MagicMock(), + project="test_project", + **kwargs, + ) + + @staticmethod + def _make_feature_view(name, ttl=None): + from feast.entity import Entity + from feast.feature_view import FeatureView, Field + from feast.infra.offline_stores.contrib.clickhouse_offline_store.clickhouse_source import ( + ClickhouseSource, + ) + from feast.types import Float32 + + return FeatureView( + name=name, + entities=[Entity(name="driver_id", join_keys=["driver_id"])], + ttl=ttl, + source=ClickhouseSource( + name=f"{name}_source", + table=f"{name}_table", + timestamp_field="event_timestamp", + ), + schema=[ + Field(name="feature1", dtype=Float32), + ], + ) + + def test_non_entity_mode_with_end_date(self): + """entity_df=None with explicit end_date produces a valid RetrievalJob.""" + from feast.infra.offline_stores.offline_store import RetrievalJob + + fv = self._make_feature_view("test_fv") + job = self._call_get_historical_features( + [fv], + end_date=datetime(2023, 1, 7, tzinfo=timezone.utc), + ) + assert isinstance(job, RetrievalJob) + + def test_non_entity_mode_defaults_end_date(self): + """entity_df=None without end_date defaults to now.""" + from feast.infra.offline_stores.offline_store import RetrievalJob + + fv = self._make_feature_view("test_fv") + job = self._call_get_historical_features([fv]) + assert isinstance(job, RetrievalJob) diff --git a/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py b/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py index 6dd8a99f884..7e5558e19d7 100644 --- a/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py +++ b/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py @@ -780,3 +780,273 @@ def test_dynamodb_update_online_store_int_list(repo_config, dynamodb_online_stor assert len(result) == 1 scores = result[0][1]["scores"] assert _extract_int32_list(scores) == [10, 20, 30] + + +@mock_dynamodb +def test_dynamodb_online_store_online_read_empty_entities( + repo_config, dynamodb_online_store +): + """Test DynamoDBOnlineStore online_read with empty entity list.""" + db_table_name = f"{TABLE_NAME}_empty_entities" + create_test_table(PROJECT, db_table_name, REGION) + + returned_items = dynamodb_online_store.online_read( + config=repo_config, + table=MockFeatureView(name=db_table_name), + entity_keys=[], + ) + assert returned_items == [] + + +@mock_dynamodb +def test_dynamodb_online_store_online_read_parallel_batches( + repo_config, dynamodb_online_store +): + """Test DynamoDBOnlineStore online_read with multiple batches (parallel execution). + + With batch_size=100 (default), 250 entities should create 3 batches + that are executed in parallel via ThreadPoolExecutor. + """ + n_samples = 250 + db_table_name = f"{TABLE_NAME}_parallel_batches" + create_test_table(PROJECT, db_table_name, REGION) + data = create_n_customer_test_samples(n=n_samples) + insert_data_test_table(data, PROJECT, db_table_name, REGION) + + entity_keys, features, *rest = zip(*data) + returned_items = dynamodb_online_store.online_read( + config=repo_config, + table=MockFeatureView(name=db_table_name), + entity_keys=entity_keys, + ) + + # Verify all items returned + assert len(returned_items) == n_samples + # Verify order is preserved + assert [item[1] for item in returned_items] == list(features) + + +@mock_dynamodb +def test_dynamodb_online_store_online_read_single_batch_no_parallel( + repo_config, dynamodb_online_store +): + """Test DynamoDBOnlineStore online_read with single batch (no parallelization). + + With batch_size=100, 50 entities should use single batch path + without ThreadPoolExecutor overhead. + """ + n_samples = 50 + db_table_name = f"{TABLE_NAME}_single_batch" + create_test_table(PROJECT, db_table_name, REGION) + data = create_n_customer_test_samples(n=n_samples) + insert_data_test_table(data, PROJECT, db_table_name, REGION) + + entity_keys, features, *rest = zip(*data) + returned_items = dynamodb_online_store.online_read( + config=repo_config, + table=MockFeatureView(name=db_table_name), + entity_keys=entity_keys, + ) + + assert len(returned_items) == n_samples + assert [item[1] for item in returned_items] == list(features) + + +@mock_dynamodb +def test_dynamodb_online_store_online_read_order_preservation_across_batches( + repo_config, dynamodb_online_store +): + """Test that entity order is preserved across parallel batch reads. + + This is critical: parallel execution must not change the order of results. + """ + n_samples = 150 # 2 batches with batch_size=100 + db_table_name = f"{TABLE_NAME}_order_preservation" + create_test_table(PROJECT, db_table_name, REGION) + data = create_n_customer_test_samples(n=n_samples) + insert_data_test_table(data, PROJECT, db_table_name, REGION) + + entity_keys, features, *rest = zip(*data) + + # Read multiple times to verify consistent ordering + for _ in range(3): + returned_items = dynamodb_online_store.online_read( + config=repo_config, + table=MockFeatureView(name=db_table_name), + entity_keys=entity_keys, + ) + assert len(returned_items) == n_samples + # Verify exact order matches + for i, (returned, expected) in enumerate(zip(returned_items, features)): + assert returned[1] == expected, f"Mismatch at index {i}" + + +@mock_dynamodb +def test_dynamodb_online_store_online_read_small_batch_size(dynamodb_online_store): + """Test parallel reads with small batch_size. + + Verifies correctness with small batch sizes that create multiple batches. + """ + small_batch_config = RepoConfig( + registry=REGISTRY, + project=PROJECT, + provider=PROVIDER, + online_store=DynamoDBOnlineStoreConfig(region=REGION, batch_size=5), + offline_store=DaskOfflineStoreConfig(), + entity_key_serialization_version=3, + ) + + n_samples = 25 # 5 batches with batch_size=5 + db_table_name = f"{TABLE_NAME}_small_batch" + create_test_table(PROJECT, db_table_name, REGION) + data = create_n_customer_test_samples(n=n_samples) + insert_data_test_table(data, PROJECT, db_table_name, REGION) + + entity_keys, features, *rest = zip(*data) + returned_items = dynamodb_online_store.online_read( + config=small_batch_config, + table=MockFeatureView(name=db_table_name), + entity_keys=entity_keys, + ) + + assert len(returned_items) == n_samples + assert [item[1] for item in returned_items] == list(features) + + +@mock_dynamodb +def test_dynamodb_online_store_online_read_many_batches(dynamodb_online_store): + """Test parallel reads with many batches (>10). + + Verifies correctness when number of batches exceeds max_workers cap. + """ + many_batch_config = RepoConfig( + registry=REGISTRY, + project=PROJECT, + provider=PROVIDER, + online_store=DynamoDBOnlineStoreConfig(region=REGION, batch_size=10), + offline_store=DaskOfflineStoreConfig(), + entity_key_serialization_version=3, + ) + + n_samples = 150 # 15 batches with batch_size=10 + db_table_name = f"{TABLE_NAME}_many_batches" + create_test_table(PROJECT, db_table_name, REGION) + data = create_n_customer_test_samples(n=n_samples) + insert_data_test_table(data, PROJECT, db_table_name, REGION) + + entity_keys, features, *rest = zip(*data) + returned_items = dynamodb_online_store.online_read( + config=many_batch_config, + table=MockFeatureView(name=db_table_name), + entity_keys=entity_keys, + ) + + assert len(returned_items) == n_samples + assert [item[1] for item in returned_items] == list(features) + + +@mock_dynamodb +def test_dynamodb_online_store_max_workers_capped_at_config(dynamodb_online_store): + """Verify ThreadPoolExecutor max_workers uses max_read_workers config. + + Bug: Old code used min(len(batches), batch_size) which fails with small batch_size. + Fix: New code uses min(len(batches), max_read_workers) for proper parallelization. + + This test uses batch_size=5 with 15 batches to expose the bug: + - OLD (buggy): max_workers = min(15, 5) = 5 (insufficient parallelism) + - NEW (fixed): max_workers = min(15, 10) = 10 (uses max_read_workers default) + """ + # Use small batch_size to expose the bug + small_batch_config = RepoConfig( + registry=REGISTRY, + project=PROJECT, + provider=PROVIDER, + online_store=DynamoDBOnlineStoreConfig(region=REGION, batch_size=5), + offline_store=DaskOfflineStoreConfig(), + entity_key_serialization_version=3, + ) + + n_samples = 75 # 15 batches with batch_size=5 + db_table_name = f"{TABLE_NAME}_max_workers_cap" + create_test_table(PROJECT, db_table_name, REGION) + data = create_n_customer_test_samples(n=n_samples) + insert_data_test_table(data, PROJECT, db_table_name, REGION) + + entity_keys, features, *rest = zip(*data) + + with patch( + "feast.infra.online_stores.dynamodb.ThreadPoolExecutor" + ) as mock_executor: + # Configure mock to work like real ThreadPoolExecutor + mock_executor.return_value.__enter__.return_value.map.return_value = iter( + [{"Responses": {}} for _ in range(15)] + ) + + dynamodb_online_store.online_read( + config=small_batch_config, + table=MockFeatureView(name=db_table_name), + entity_keys=entity_keys, + ) + + # Verify ThreadPoolExecutor was called with max_workers=10 (capped at 10, NOT batch_size=5) + mock_executor.assert_called_once() + call_kwargs = mock_executor.call_args + assert call_kwargs[1]["max_workers"] == 10, ( + f"Expected max_workers=10 (capped), got {call_kwargs[1]['max_workers']}. " + f"If got 5, the bug is using batch_size instead of 10 as cap." + ) + + +@mock_dynamodb +def test_dynamodb_online_store_thread_safety_uses_shared_client( + dynamodb_online_store, +): + """Verify multi-batch reads use a shared thread-safe boto3 client. + + boto3 clients ARE thread-safe, so we share a single client across threads + for better performance (avoids creating new sessions per thread). + https://docs.aws.amazon.com/boto3/latest/guide/clients.html#multithreading-or-multiprocessing-with-clients + """ + config = RepoConfig( + registry=REGISTRY, + project=PROJECT, + provider=PROVIDER, + online_store=DynamoDBOnlineStoreConfig(region=REGION, batch_size=50), + offline_store=DaskOfflineStoreConfig(), + entity_key_serialization_version=3, + ) + + n_samples = 150 # 3 batches + db_table_name = f"{TABLE_NAME}_thread_safety" + create_test_table(PROJECT, db_table_name, REGION) + data = create_n_customer_test_samples(n=n_samples) + insert_data_test_table(data, PROJECT, db_table_name, REGION) + + entity_keys, features, *rest = zip(*data) + + # Track clients created to verify thread-safety via shared client + clients_created = [] + original_client = boto3.client + + def tracking_client(*args, **kwargs): + client = original_client(*args, **kwargs) + clients_created.append(id(client)) + return client + + with patch.object(boto3, "client", side_effect=tracking_client): + returned_items = dynamodb_online_store.online_read( + config=config, + table=MockFeatureView(name=db_table_name), + entity_keys=entity_keys, + ) + + # Verify results are correct (functional correctness) + assert len(returned_items) == n_samples + + # Verify only one client was created (shared across threads) + # The client is cached and reused for all batch requests + dynamodb_clients = [c for c in clients_created] + assert len(set(dynamodb_clients)) == 1, ( + f"Expected 1 shared client for thread-safety, " + f"got {len(set(dynamodb_clients))} unique clients" + ) diff --git a/sdk/python/tests/unit/infra/online_store/test_redis.py b/sdk/python/tests/unit/infra/online_store/test_redis.py index 83c8d3d61e4..0d9f2cd8739 100644 --- a/sdk/python/tests/unit/infra/online_store/test_redis.py +++ b/sdk/python/tests/unit/infra/online_store/test_redis.py @@ -128,3 +128,127 @@ def test_get_features_for_entity(redis_online_store: RedisOnlineStore, feature_v assert "feature_view_1:feature_11" in features assert features["feature_view_1:feature_10"].int32_val == 1 assert features["feature_view_1:feature_11"].int32_val == 2 + + +def test_get_features_for_entity_with_memoryview( + redis_online_store: RedisOnlineStore, feature_view +): + """Test that _get_features_for_entity handles memoryview inputs correctly. + + Redis may return memoryview objects instead of bytes in some cases. + The optimized code should handle both without unnecessary conversions. + """ + requested_features = [ + "feature_view_1:feature_10", + "feature_view_1:feature_11", + "_ts:feature_view_1", + ] + # Create memoryview objects to simulate redis returning memoryview + val1_bytes = ValueProto(int32_val=100).SerializeToString() + val2_bytes = ValueProto(int32_val=200).SerializeToString() + ts_bytes = Timestamp(seconds=1234567890, nanos=123456789).SerializeToString() + + values = [ + memoryview(val1_bytes), + memoryview(val2_bytes), + memoryview(ts_bytes), + ] + + timestamp, features = redis_online_store._get_features_for_entity( + values=values, + feature_view=feature_view.name, + requested_features=requested_features, + ) + assert features["feature_view_1:feature_10"].int32_val == 100 + assert features["feature_view_1:feature_11"].int32_val == 200 + assert timestamp is not None + + +def test_get_features_for_entity_with_none_values( + redis_online_store: RedisOnlineStore, feature_view +): + """Test that _get_features_for_entity handles None values correctly.""" + requested_features = [ + "feature_view_1:feature_10", + "feature_view_1:feature_11", + "_ts:feature_view_1", + ] + values = [ + ValueProto(int32_val=1).SerializeToString(), + None, # Missing feature value + Timestamp().SerializeToString(), + ] + + timestamp, features = redis_online_store._get_features_for_entity( + values=values, + feature_view=feature_view.name, + requested_features=requested_features, + ) + assert features["feature_view_1:feature_10"].int32_val == 1 + # None value should result in empty ValueProto + assert features["feature_view_1:feature_11"].WhichOneof("val") is None + + +def test_convert_redis_values_to_protobuf_multiple_entities( + redis_online_store: RedisOnlineStore, feature_view +): + """Test batch conversion with multiple entities.""" + requested_features = [ + "feature_view_1:feature_10", + "feature_view_1:feature_11", + "_ts:feature_view_1", + ] + # Multiple entity values + values = [ + [ + ValueProto(int32_val=1).SerializeToString(), + ValueProto(int32_val=2).SerializeToString(), + Timestamp(seconds=1000).SerializeToString(), + ], + [ + ValueProto(int32_val=10).SerializeToString(), + ValueProto(int32_val=20).SerializeToString(), + Timestamp(seconds=2000).SerializeToString(), + ], + [ + ValueProto(int32_val=100).SerializeToString(), + ValueProto(int32_val=200).SerializeToString(), + Timestamp(seconds=3000).SerializeToString(), + ], + ] + + results = redis_online_store._convert_redis_values_to_protobuf( + redis_values=values, + feature_view=feature_view.name, + requested_features=requested_features, + ) + + assert len(results) == 3 + assert results[0][1]["feature_view_1:feature_10"].int32_val == 1 + assert results[1][1]["feature_view_1:feature_10"].int32_val == 10 + assert results[2][1]["feature_view_1:feature_10"].int32_val == 100 + + +def test_get_features_for_entity_with_all_none_values( + redis_online_store: RedisOnlineStore, feature_view +): + """Test that None feature values result in empty ValueProto objects.""" + requested_features = [ + "feature_view_1:feature_10", + "_ts:feature_view_1", + ] + # All None values except timestamp + values = [ + None, + Timestamp().SerializeToString(), + ] + + timestamp, features = redis_online_store._get_features_for_entity( + values=values, + feature_view=feature_view.name, + requested_features=requested_features, + ) + # Even with None value, an empty ValueProto is created + assert features is not None + assert "feature_view_1:feature_10" in features + assert features["feature_view_1:feature_10"].WhichOneof("val") is None diff --git a/sdk/python/tests/unit/infra/online_store/test_remote_online_store.py b/sdk/python/tests/unit/infra/online_store/test_remote_online_store.py index 1c074a40d40..2b41a630d17 100644 --- a/sdk/python/tests/unit/infra/online_store/test_remote_online_store.py +++ b/sdk/python/tests/unit/infra/online_store/test_remote_online_store.py @@ -138,8 +138,8 @@ def test_retrieve_online_documents_success( call_args = mock_get_remote_online_documents.call_args assert call_args[1]["config"] == config - # Parse the request body to verify it's correct - req_body = json.loads(call_args[1]["req_body"]) + # Verify the request body dict is correct + req_body = call_args[1]["req_body"] assert req_body["features"] == ["test_feature_view:feature1"] assert req_body["query"] == [0.1, 0.2, 0.3] assert req_body["top_k"] == 2 @@ -189,8 +189,8 @@ def test_retrieve_online_documents_v2_success( call_args = mock_get_remote_online_documents.call_args assert call_args[1]["config"] == config - # Parse the request body to verify it's correct - req_body = json.loads(call_args[1]["req_body"]) + # Verify the request body dict is correct + req_body = call_args[1]["req_body"] assert req_body["features"] == ["test_feature_view:feature1"] assert req_body["query"] == [0.1, 0.2, 0.3] assert req_body["top_k"] == 2 @@ -302,14 +302,13 @@ def test_construct_online_documents_api_json_request( distance_metric="cosine", ) - parsed_result = json.loads(result) - assert parsed_result["features"] == [ + assert result["features"] == [ "test_feature_view:feature1", "test_feature_view:feature2", ] - assert parsed_result["query"] == [0.1, 0.2, 0.3] - assert parsed_result["top_k"] == 5 - assert parsed_result["distance_metric"] == "cosine" + assert result["query"] == [0.1, 0.2, 0.3] + assert result["top_k"] == 5 + assert result["distance_metric"] == "cosine" def test_construct_online_documents_v2_api_json_request( self, remote_store, feature_view @@ -325,13 +324,12 @@ def test_construct_online_documents_v2_api_json_request( api_version=2, ) - parsed_result = json.loads(result) - assert parsed_result["features"] == ["test_feature_view:feature1"] - assert parsed_result["query"] == [0.1, 0.2] - assert parsed_result["top_k"] == 3 - assert parsed_result["distance_metric"] == "L2" - assert parsed_result["query_string"] == "test query" - assert parsed_result["api_version"] == 2 + assert result["features"] == ["test_feature_view:feature1"] + assert result["query"] == [0.1, 0.2] + assert result["top_k"] == 3 + assert result["distance_metric"] == "L2" + assert result["query_string"] == "test query" + assert result["api_version"] == 2 def test_extract_requested_feature_value(self, remote_store): """Test _extract_requested_feature_value helper method.""" diff --git a/sdk/python/tests/unit/infra/registry/test_sql_registry.py b/sdk/python/tests/unit/infra/registry/test_sql_registry.py index 8e5154da47b..5f144adbaf4 100644 --- a/sdk/python/tests/unit/infra/registry/test_sql_registry.py +++ b/sdk/python/tests/unit/infra/registry/test_sql_registry.py @@ -13,11 +13,20 @@ # limitations under the License. import tempfile +from datetime import timedelta import pytest +from feast import Field +from feast.data_source import PushSource from feast.entity import Entity +from feast.errors import ConflictingFeatureViewNames +from feast.feature_view import FeatureView +from feast.infra.offline_stores.file_source import FileSource from feast.infra.registry.sql import SqlRegistry, SqlRegistryConfig +from feast.stream_feature_view import StreamFeatureView +from feast.types import Float32 +from feast.value_type import ValueType @pytest.fixture @@ -56,3 +65,43 @@ def test_sql_registry(sqlite_registry): sqlite_registry.delete_entity("test_entity", "test_project") with pytest.raises(Exception): sqlite_registry.get_entity("test_entity", "test_project") + + +def _build_feature_view(name: str, entity: Entity, source: FileSource) -> FeatureView: + return FeatureView( + name=name, + entities=[entity], + ttl=timedelta(days=1), + schema=[Field(name="conv_rate", dtype=Float32)], + source=source, + ) + + +def test_feature_view_name_conflict_between_stream_and_batch(sqlite_registry): + entity = Entity( + name="driver", + value_type=ValueType.STRING, + join_keys=["driver_id"], + ) + sqlite_registry.apply_entity(entity, "test_project") + + file_source = FileSource( + path="driver_stats.parquet", + timestamp_field="event_timestamp", + created_timestamp_column="created", + ) + + batch_view = _build_feature_view("driver_activity", entity, file_source) + sqlite_registry.apply_feature_view(batch_view, "test_project") + + push_source = PushSource(name="driver_push", batch_source=file_source) + stream_view = StreamFeatureView( + name="driver_activity", + source=push_source, + entities=[entity], + schema=[Field(name="conv_rate", dtype=Float32)], + timestamp_field="event_timestamp", + ) + + with pytest.raises(ConflictingFeatureViewNames): + sqlite_registry.apply_feature_view(stream_view, "test_project") diff --git a/sdk/python/tests/unit/infra/test_key_encoding_utils.py b/sdk/python/tests/unit/infra/test_key_encoding_utils.py index 14433a41e65..ba481adf234 100644 --- a/sdk/python/tests/unit/infra/test_key_encoding_utils.py +++ b/sdk/python/tests/unit/infra/test_key_encoding_utils.py @@ -4,6 +4,7 @@ deserialize_entity_key, reserialize_entity_v2_key_to_v3, serialize_entity_key, + serialize_entity_key_prefix, ) from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto from feast.protos.feast.types.Value_pb2 import Value as ValueProto @@ -151,3 +152,186 @@ def test_reserialize_entity_v2_key_to_v3(): join_keys=["user"], entity_values=[ValueProto(int64_val=int(2**15))], ) + + +def test_single_entity_fast_path(): + """Test that single entity optimization works correctly.""" + entity_key_proto = EntityKeyProto( + join_keys=["user_id"], + entity_values=[ValueProto(string_val="test_user")], + ) + + serialized_key = serialize_entity_key( + entity_key_proto, entity_key_serialization_version=3 + ) + deserialized_key = deserialize_entity_key( + serialized_key, entity_key_serialization_version=3 + ) + + assert deserialized_key == entity_key_proto + + +def test_empty_entity_key(): + """Test handling of empty entity keys.""" + entity_key_proto = EntityKeyProto(join_keys=[], entity_values=[]) + + serialized_key = serialize_entity_key( + entity_key_proto, entity_key_serialization_version=3 + ) + deserialized_key = deserialize_entity_key( + serialized_key, entity_key_serialization_version=3 + ) + + assert deserialized_key == entity_key_proto + + +def test_binary_format_deterministic(): + """Test that serialization is deterministic (same input produces same output).""" + entity_key_proto = EntityKeyProto( + join_keys=["customer", "user", "session"], + entity_values=[ + ValueProto(string_val="cust1"), + ValueProto(string_val="user1"), + ValueProto(string_val="sess1"), + ], + ) + + # Serialize the same entity multiple times + serializations = [] + for _ in range(5): + serialized = serialize_entity_key( + entity_key_proto, entity_key_serialization_version=3 + ) + serializations.append(serialized) + + # All serializations should be identical + for s in serializations[1:]: + assert s == serializations[0], "Serialization is not deterministic" + + +def test_optimization_preserves_sorting(): + """Test that optimizations preserve the sorting behavior for multi-entity keys.""" + # Create entity key with unsorted keys + entity_key_proto = EntityKeyProto( + join_keys=["zebra", "alpha", "beta"], + entity_values=[ + ValueProto(string_val="z_val"), + ValueProto(string_val="a_val"), + ValueProto(string_val="b_val"), + ], + ) + + serialized = serialize_entity_key( + entity_key_proto, entity_key_serialization_version=3 + ) + deserialized = deserialize_entity_key( + serialized, entity_key_serialization_version=3 + ) + + # Keys should be sorted in the result + expected_sorted_keys = ["alpha", "beta", "zebra"] + expected_sorted_values = ["a_val", "b_val", "z_val"] + + assert deserialized.join_keys == expected_sorted_keys + assert [v.string_val for v in deserialized.entity_values] == expected_sorted_values + + +def test_performance_bounds_single_entity(): + """Regression test to ensure single entity performance meets minimum bounds.""" + import time + + entity_key = EntityKeyProto( + join_keys=["user_id"], entity_values=[ValueProto(string_val="user123")] + ) + + # Measure serialization time for 1000 operations + start = time.perf_counter() + for _ in range(1000): + serialize_entity_key(entity_key, entity_key_serialization_version=3) + serialize_time = time.perf_counter() - start + + # Measure deserialization time + serialized = serialize_entity_key(entity_key, entity_key_serialization_version=3) + start = time.perf_counter() + for _ in range(1000): + deserialize_entity_key(serialized, entity_key_serialization_version=3) + deserialize_time = time.perf_counter() - start + + # Performance bounds with generous thresholds to avoid flaky failures on CI runners + assert serialize_time < 0.2, f"Serialization too slow: {serialize_time:.4f}s" + assert deserialize_time < 0.2, f"Deserialization too slow: {deserialize_time:.4f}s" + + +def test_non_ascii_prefix_compatibility(): + """Critical test: ensure prefix serialization matches full entity key serialization for non-ASCII keys.""" + # Test with non-ASCII characters that have different byte vs character lengths + non_ascii_keys = ["用户ID", "사용자ID", "идентификатор", "مُعرِّف"] + + for key in non_ascii_keys: + # Test single key prefix + prefix_result = serialize_entity_key_prefix( + [key], entity_key_serialization_version=3 + ) + + # Create full entity key and serialize it + entity_key = EntityKeyProto( + join_keys=[key], entity_values=[ValueProto(string_val="test_value")] + ) + full_result = serialize_entity_key( + entity_key, entity_key_serialization_version=3 + ) + + # The prefix should match the beginning of the full serialization + # Extract just the key portion (skip entity count, but include key metadata) + prefix_len = len(prefix_result) + assert full_result[:prefix_len] == prefix_result, ( + f"Prefix mismatch for non-ASCII key '{key}': " + f"Character length: {len(key)}, " + f"UTF-8 byte length: {len(key.encode('utf8'))}" + ) + + +def test_ascii_prefix_compatibility(): + """Verify prefix compatibility still works for ASCII keys.""" + ascii_keys = ["user_id", "session_id", "device_id"] + + for key in ascii_keys: + prefix_result = serialize_entity_key_prefix( + [key], entity_key_serialization_version=3 + ) + + entity_key = EntityKeyProto( + join_keys=[key], entity_values=[ValueProto(string_val="test_value")] + ) + full_result = serialize_entity_key( + entity_key, entity_key_serialization_version=3 + ) + + prefix_len = len(prefix_result) + assert full_result[:prefix_len] == prefix_result, ( + f"Prefix mismatch for ASCII key '{key}'" + ) + + +def test_multi_key_non_ascii_prefix_compatibility(): + """Test multi-key prefix compatibility with non-ASCII characters.""" + mixed_keys = ["user_id", "用户会话", "session_id"] # Mix ASCII and non-ASCII + + prefix_result = serialize_entity_key_prefix( + mixed_keys, entity_key_serialization_version=3 + ) + + entity_key = EntityKeyProto( + join_keys=mixed_keys, + entity_values=[ + ValueProto(string_val="test1"), + ValueProto(string_val="test2"), + ValueProto(string_val="test3"), + ], + ) + full_result = serialize_entity_key(entity_key, entity_key_serialization_version=3) + + prefix_len = len(prefix_result) + assert full_result[:prefix_len] == prefix_result, ( + "Multi-key prefix mismatch with non-ASCII" + ) diff --git a/sdk/python/tests/unit/local_feast_tests/test_feature_service.py b/sdk/python/tests/unit/local_feast_tests/test_feature_service.py index 75ceb463085..c331f2913de 100644 --- a/sdk/python/tests/unit/local_feast_tests/test_feature_service.py +++ b/sdk/python/tests/unit/local_feast_tests/test_feature_service.py @@ -6,7 +6,7 @@ create_driver_hourly_stats_df, create_global_daily_stats_df, ) -from tests.integration.feature_repos.universal.feature_views import TAGS +from tests.universal.feature_repos.universal.feature_views import TAGS from tests.utils.basic_read_write_test import basic_rw_test from tests.utils.cli_repo_creator import CliRunner, get_example_repo @@ -71,7 +71,7 @@ def test_apply_with_fv_inference() -> None: fs = store.get_feature_service("all_stats") assert len(fs.feature_view_projections) == 2 - assert len(fs.feature_view_projections[0].features) == 3 + assert len(fs.feature_view_projections[0].features) == 6 assert len(fs.feature_view_projections[0].desired_features) == 0 assert len(fs.feature_view_projections[1].features) == 2 assert len(fs.feature_view_projections[1].desired_features) == 0 diff --git a/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py b/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py index debe14beee2..9b7660bf692 100644 --- a/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py +++ b/sdk/python/tests/unit/local_feast_tests/test_local_feature_store.py @@ -11,6 +11,7 @@ from feast.data_format import AvroFormat, ParquetFormat from feast.data_source import KafkaSource from feast.entity import Entity +from feast.errors import ConflictingFeatureViewNames from feast.feast_object import ALL_RESOURCE_TYPES from feast.feature_store import FeatureStore from feast.feature_view import DUMMY_ENTITY_ID, DUMMY_ENTITY_NAME, FeatureView @@ -24,7 +25,7 @@ from feast.repo_config import RegistryConfig, RepoConfig from feast.stream_feature_view import stream_feature_view from feast.types import Array, Bytes, Float32, Int64, String, ValueType, from_value_type -from tests.integration.feature_repos.universal.feature_views import TAGS +from tests.universal.feature_repos.universal.feature_views import TAGS from tests.utils.cli_repo_creator import CliRunner, get_example_repo from tests.utils.data_source_test_creator import prep_file_source @@ -534,16 +535,10 @@ def test_apply_conflicting_feature_view_names(feature_store_with_local_registry) source=FileSource(path="customer_stats.parquet"), tags={}, ) - try: + with pytest.raises(ConflictingFeatureViewNames) as exc_info: feature_store_with_local_registry.apply([driver_stats, customer_stats]) - error = None - except ValueError as e: - error = e - assert ( - isinstance(error, ValueError) - and "Please ensure that all feature view names are case-insensitively unique" - in error.args[0] - ) + + assert "Feature view names must be case-insensitively unique" in str(exc_info.value) feature_store_with_local_registry.teardown() diff --git a/sdk/python/tests/unit/online_store/test_mongodb_online_retrieval.py b/sdk/python/tests/unit/online_store/test_mongodb_online_retrieval.py new file mode 100644 index 00000000000..cfadc3151fd --- /dev/null +++ b/sdk/python/tests/unit/online_store/test_mongodb_online_retrieval.py @@ -0,0 +1,305 @@ +""" +Unit tests for MongoDB online store. + +Docker-dependent tests are marked with ``@_requires_docker`` and are skipped when +Docker is unavailable. Pure Python tests (no container needed) run in all environments. +""" + +from datetime import datetime, timedelta, timezone + +import pytest + +pytest.importorskip("pymongo") + +from feast import FeatureView, Field, FileSource # noqa: E402 +from feast.infra.online_stores.mongodb_online_store.mongodb import ( # noqa: E402 + MongoDBOnlineStore, +) +from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import Value as ValueProto +from feast.types import Int64 +from feast.utils import _utc_now +from tests.universal.feature_repos.universal.feature_views import TAGS +from tests.utils.cli_repo_creator import CliRunner, get_example_repo + +# Check if Docker is available +docker_available = False +try: + import docker + from testcontainers.mongodb import MongoDbContainer + + # Try to connect to Docker daemon + try: + client = docker.from_env() + client.ping() + docker_available = True + except Exception: + pass +except ImportError: + pass + +# Applied per-test so that pure Python tests still run without Docker. +_requires_docker = pytest.mark.skipif( + not docker_available, + reason="Docker is not available or not running. Start Docker daemon to run these tests.", +) + + +@pytest.fixture(scope="module") +def mongodb_container(): + """Start a MongoDB container for testing.""" + container = MongoDbContainer( + "mongo:latest", + username="test", + password="test", # pragma: allowlist secret + ).with_exposed_ports(27017) + container.start() + yield container + container.stop() + + +@pytest.fixture +def mongodb_connection_string(mongodb_container): + """Get MongoDB connection string from the container.""" + exposed_port = mongodb_container.get_exposed_port(27017) + return f"mongodb://test:test@localhost:{exposed_port}" # pragma: allowlist secret + + +@_requires_docker +def test_mongodb_online_features(mongodb_connection_string): + """ + Test reading from MongoDB online store using testcontainers. + """ + runner = CliRunner() + with ( + runner.local_repo( + get_example_repo("example_feature_repo_1.py"), + offline_store="file", + online_store="mongodb", + teardown=False, # Disable CLI teardown since container will be stopped by fixture + ) as store + ): + # Update the connection string to use the test container + store.config.online_store.connection_string = mongodb_connection_string + + # Write some data to two tables + driver_locations_fv = store.get_feature_view(name="driver_locations") + customer_profile_fv = store.get_feature_view(name="customer_profile") + customer_driver_combined_fv = store.get_feature_view( + name="customer_driver_combined" + ) + + provider = store._get_provider() + + driver_key = EntityKeyProto( + join_keys=["driver_id"], entity_values=[ValueProto(int64_val=1)] + ) + provider.online_write_batch( + config=store.config, + table=driver_locations_fv, + data=[ + ( + driver_key, + { + "lat": ValueProto(double_val=0.1), + "lon": ValueProto(string_val="1.0"), + }, + _utc_now(), + _utc_now(), + ) + ], + progress=None, + ) + + customer_key = EntityKeyProto( + join_keys=["customer_id"], entity_values=[ValueProto(string_val="5")] + ) + provider.online_write_batch( + config=store.config, + table=customer_profile_fv, + data=[ + ( + customer_key, + { + "avg_orders_day": ValueProto(float_val=1.0), + "name": ValueProto(string_val="John"), + "age": ValueProto(int64_val=3), + }, + _utc_now(), + _utc_now(), + ) + ], + progress=None, + ) + + customer_key = EntityKeyProto( + join_keys=["customer_id", "driver_id"], + entity_values=[ValueProto(string_val="5"), ValueProto(int64_val=1)], + ) + provider.online_write_batch( + config=store.config, + table=customer_driver_combined_fv, + data=[ + ( + customer_key, + {"trips": ValueProto(int64_val=7)}, + _utc_now(), + _utc_now(), + ) + ], + progress=None, + ) + + assert len(store.list_entities()) == 3 + assert len(store.list_entities(tags=TAGS)) == 2 + + # Retrieve features using two keys + result = store.get_online_features( + features=[ + "driver_locations:lon", + "customer_profile:avg_orders_day", + "customer_profile:name", + "customer_driver_combined:trips", + ], + entity_rows=[ + {"driver_id": 1, "customer_id": "5"}, + {"driver_id": 1, "customer_id": 5}, + ], + full_feature_names=False, + ).to_dict() + + assert "lon" in result + assert "avg_orders_day" in result + assert "name" in result + assert result["driver_id"] == [1, 1] + assert result["customer_id"] == ["5", "5"] + assert result["lon"] == ["1.0", "1.0"] + assert result["avg_orders_day"] == [1.0, 1.0] + assert result["name"] == ["John", "John"] + assert result["trips"] == [7, 7] + + # Ensure features are still in result when keys not found + result = store.get_online_features( + features=["customer_driver_combined:trips"], + entity_rows=[{"driver_id": 0, "customer_id": 0}], + full_feature_names=False, + ).to_dict() + + assert result["trips"] == [None] + + +# --------------------------------------------------------------------------- +# Pure Python tests — no Docker required +# --------------------------------------------------------------------------- + + +def _make_fv(*field_names: str) -> FeatureView: + """Build a minimal FeatureView with Int64 features for use in unit tests.""" + return FeatureView( + name="test_fv", + entities=[], + schema=[Field(name=n, dtype=Int64) for n in field_names], + source=FileSource(path="fake.parquet", timestamp_field="event_timestamp"), + ttl=timedelta(days=1), + ) + + +def test_convert_raw_docs_missing_entity(): + """Entity key absent from docs → result tuple is (None, None) for that position.""" + fv = _make_fv("score") + ts = datetime(2024, 1, 1, tzinfo=timezone.utc) + ids = [b"present", b"missing"] + docs = { + b"present": { + "features": {"test_fv": {"score": 42}}, + "event_timestamps": {"test_fv": ts}, + } + } + + results = MongoDBOnlineStore._convert_raw_docs_to_proto(ids, docs, fv) + + assert len(results) == 2 + ts_out, feats_out = results[0] + assert ts_out == ts + assert feats_out["score"].int64_val == 42 + assert results[1] == (None, None) + + +def test_convert_raw_docs_partial_doc(): + """Entity exists but one feature key is absent → empty ValueProto for that feature.""" + fv = _make_fv("present_feat", "missing_feat") + ts = datetime(2024, 1, 1, tzinfo=timezone.utc) + ids = [b"entity1"] + docs = { + b"entity1": { + # missing_feat intentionally omitted (e.g. schema migration scenario) + "features": {"test_fv": {"present_feat": 99}}, + "event_timestamps": {"test_fv": ts}, + } + } + + results = MongoDBOnlineStore._convert_raw_docs_to_proto(ids, docs, fv) + + assert len(results) == 1 + ts_out, feats_out = results[0] + assert ts_out == ts + assert feats_out["present_feat"].int64_val == 99 + assert feats_out["missing_feat"] == ValueProto() # null / not-set + + +def test_convert_raw_docs_entity_exists_but_fv_not_written(): + """Entity doc exists (written by another FV) but this FV was never written → (None, None). + + MongoDB stores all feature views for the same entity in one document. + If FV "driver_stats" was written, an entity doc exists for driver_1. + A subsequent read for FV "pricing" (never written) must return (None, None), + not a truthy dict of empty ValueProtos. + """ + pricing_fv = _make_fv("price") + ts = datetime(2024, 1, 1, tzinfo=timezone.utc) + ids = [b"driver_1"] + # doc was created by driver_stats, pricing key is absent entirely + docs = { + b"driver_1": { + "features": {"driver_stats": {"acc_rate": 0.9}}, + "event_timestamps": {"driver_stats": ts}, + } + } + + results = MongoDBOnlineStore._convert_raw_docs_to_proto(ids, docs, pricing_fv) + + assert len(results) == 1 + assert results[0] == (None, None) + + +def test_convert_raw_docs_ordering(): + """Result order matches the ids list regardless of dict insertion order in docs.""" + fv = _make_fv("score") + ts = datetime(2024, 1, 1, tzinfo=timezone.utc) + + # Request entity keys in z → a → m order + ids = [b"entity_z", b"entity_a", b"entity_m"] + + # docs is in a different order (simulating arbitrary MongoDB cursor return order) + docs = { + b"entity_a": { + "features": {"test_fv": {"score": 2}}, + "event_timestamps": {"test_fv": ts}, + }, + b"entity_m": { + "features": {"test_fv": {"score": 3}}, + "event_timestamps": {"test_fv": ts}, + }, + b"entity_z": { + "features": {"test_fv": {"score": 1}}, + "event_timestamps": {"test_fv": ts}, + }, + } + + results = MongoDBOnlineStore._convert_raw_docs_to_proto(ids, docs, fv) + + assert len(results) == 3 + # Results must follow the ids order: z=1, a=2, m=3 + assert results[0][1]["score"].int64_val == 1 # entity_z + assert results[1][1]["score"].int64_val == 2 # entity_a + assert results[2][1]["score"].int64_val == 3 # entity_m diff --git a/sdk/python/tests/unit/online_store/test_online_retrieval.py b/sdk/python/tests/unit/online_store/test_online_retrieval.py index 501586f7828..60f583ad669 100644 --- a/sdk/python/tests/unit/online_store/test_online_retrieval.py +++ b/sdk/python/tests/unit/online_store/test_online_retrieval.py @@ -21,7 +21,7 @@ from feast.torch_wrapper import get_torch from feast.types import ValueType from feast.utils import _utc_now -from tests.integration.feature_repos.universal.feature_views import TAGS +from tests.universal.feature_repos.universal.feature_views import TAGS from tests.utils.cli_repo_creator import CliRunner, get_example_repo diff --git a/sdk/python/tests/unit/test_aggregation_ops.py b/sdk/python/tests/unit/test_aggregation_ops.py index 0a5f0bd6ed5..9a22da0ecec 100644 --- a/sdk/python/tests/unit/test_aggregation_ops.py +++ b/sdk/python/tests/unit/test_aggregation_ops.py @@ -1,13 +1,16 @@ +from datetime import timedelta + import pytest -from feast.aggregation import aggregation_specs_to_agg_ops +from feast.aggregation import Aggregation, aggregation_specs_to_agg_ops class DummyAggregation: - def __init__(self, *, function: str, column: str, time_window=None): + def __init__(self, *, function: str, column: str, time_window=None, name: str = ""): self.function = function self.column = column self.time_window = time_window + self.name = name def test_aggregation_specs_to_agg_ops_success(): @@ -42,3 +45,54 @@ def test_aggregation_specs_to_agg_ops_time_window_unsupported(error_message: str agg_specs, time_window_unsupported_error_message=error_message, ) + + +def test_aggregation_specs_to_agg_ops_custom_name(): + agg_specs = [ + DummyAggregation( + function="sum", + column="seconds_watched", + name="sum_seconds_watched_per_ad_1d", + ), + ] + + agg_ops = aggregation_specs_to_agg_ops( + agg_specs, + time_window_unsupported_error_message="Time window aggregation is not supported.", + ) + + assert agg_ops == { + "sum_seconds_watched_per_ad_1d": ("sum", "seconds_watched"), + } + + +def test_aggregation_specs_to_agg_ops_mixed_names(): + agg_specs = [ + DummyAggregation(function="sum", column="trips", name="total_trips"), + DummyAggregation(function="mean", column="fare"), + ] + + agg_ops = aggregation_specs_to_agg_ops( + agg_specs, + time_window_unsupported_error_message="Time window aggregation is not supported.", + ) + + assert agg_ops == { + "total_trips": ("sum", "trips"), + "mean_fare": ("mean", "fare"), + } + + +def test_aggregation_round_trip_with_name(): + agg = Aggregation( + column="seconds_watched", + function="sum", + time_window=timedelta(days=1), + name="sum_seconds_watched_per_ad_1d", + ) + proto = agg.to_proto() + assert proto.name == "sum_seconds_watched_per_ad_1d" + + restored = Aggregation.from_proto(proto) + assert restored.name == "sum_seconds_watched_per_ad_1d" + assert restored == agg diff --git a/sdk/python/tests/unit/test_feature_views.py b/sdk/python/tests/unit/test_feature_views.py index 9030e6e0c69..3427cfdfc4b 100644 --- a/sdk/python/tests/unit/test_feature_views.py +++ b/sdk/python/tests/unit/test_feature_views.py @@ -23,6 +23,18 @@ from feast.utils import _utc_now, make_tzaware +def test_create_feature_view_without_source(): + fv = FeatureView(name="test_no_source", ttl=timedelta(days=1)) + assert fv.batch_source is None + assert fv.stream_source is None + + proto = fv.to_proto() + assert not proto.spec.HasField("batch_source") + + fv_roundtrip = FeatureView.from_proto(proto) + assert fv_roundtrip.batch_source is None + + def test_create_feature_view_with_conflicting_entities(): user1 = Entity(name="user1", join_keys=["user_id"]) user2 = Entity(name="user2", join_keys=["user_id"]) @@ -48,7 +60,7 @@ def test_create_batch_feature_view(): udf=lambda x: x, ) - with pytest.raises(TypeError): + with pytest.raises(ValueError): BatchFeatureView( name="test batch feature view", entities=[], ttl=timedelta(days=30) ) diff --git a/sdk/python/tests/unit/test_metrics.py b/sdk/python/tests/unit/test_metrics.py new file mode 100644 index 00000000000..ba014064669 --- /dev/null +++ b/sdk/python/tests/unit/test_metrics.py @@ -0,0 +1,826 @@ +# Copyright 2025 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import datetime, timedelta, timezone +from unittest.mock import MagicMock, patch + +import pytest + +from feast.metrics import ( + feature_freshness_seconds, + materialization_duration_seconds, + materialization_total, + online_features_entity_count, + online_features_request_count, + push_request_count, + request_count, + request_latency, + track_materialization, + track_online_features_entities, + track_push, + track_request_latency, + update_feature_freshness, +) + + +@pytest.fixture(autouse=True) +def _enable_metrics(): + """Enable all metric categories for each test, then restore.""" + import feast.metrics as m + + original = m._config + m._config = m._MetricsFlags( + enabled=True, + resource=True, + request=True, + online_features=True, + push=True, + materialization=True, + freshness=True, + ) + yield + m._config = original + + +class TestTrackRequestLatency: + def test_success_increments_counter_and_records_latency(self): + before_count = request_count.labels( + endpoint="/test", status="success" + )._value.get() + + with track_request_latency("/test"): + pass + + after_count = request_count.labels( + endpoint="/test", status="success" + )._value.get() + assert after_count == before_count + 1 + + def test_error_increments_error_counter(self): + before_count = request_count.labels( + endpoint="/test-err", status="error" + )._value.get() + + with pytest.raises(ValueError): + with track_request_latency("/test-err"): + raise ValueError("boom") + + after_count = request_count.labels( + endpoint="/test-err", status="error" + )._value.get() + assert after_count == before_count + 1 + + def test_latency_is_recorded(self): + before_sum = request_latency.labels( + endpoint="/test-latency", feature_count="", feature_view_count="" + )._sum.get() + + with track_request_latency("/test-latency"): + import time + + time.sleep(0.01) + + after_sum = request_latency.labels( + endpoint="/test-latency", feature_count="", feature_view_count="" + )._sum.get() + assert after_sum > before_sum + + def test_feature_count_and_feature_view_count_labels(self): + """Latency histogram carries feature_count and feature_view_count labels.""" + label_set = dict( + endpoint="/get-online-features", + feature_count="5", + feature_view_count="2", + ) + before_sum = request_latency.labels(**label_set)._sum.get() + + with track_request_latency( + "/get-online-features", feature_count="5", feature_view_count="2" + ): + pass + + after_sum = request_latency.labels(**label_set)._sum.get() + assert after_sum > before_sum + + def test_default_labels_are_empty_string(self): + """Non-online-features endpoints get empty-string labels by default.""" + label_set = dict( + endpoint="/materialize", feature_count="", feature_view_count="" + ) + before_sum = request_latency.labels(**label_set)._sum.get() + + with track_request_latency("/materialize"): + pass + + after_sum = request_latency.labels(**label_set)._sum.get() + assert after_sum > before_sum + + def test_labels_updated_via_yielded_context(self): + """Labels set on the yielded context are used in the final metrics.""" + label_set = dict( + endpoint="/ctx-update", feature_count="3", feature_view_count="1" + ) + before_sum = request_latency.labels(**label_set)._sum.get() + + with track_request_latency("/ctx-update") as ctx: + ctx.feature_count = "3" + ctx.feature_view_count = "1" + + after_sum = request_latency.labels(**label_set)._sum.get() + assert after_sum > before_sum + + def test_error_before_labels_set_still_records(self): + """Errors before labels are updated still record with default labels.""" + before_count = request_count.labels( + endpoint="/early-fail", status="error" + )._value.get() + + with pytest.raises(RuntimeError): + with track_request_latency("/early-fail") as _ctx: + raise RuntimeError("auth failed") + + after_count = request_count.labels( + endpoint="/early-fail", status="error" + )._value.get() + assert after_count == before_count + 1 + + recorded_sum = request_latency.labels( + endpoint="/early-fail", feature_count="", feature_view_count="" + )._sum.get() + assert recorded_sum > 0 + + +class TestMetricsOptIn: + """Verify that when a category is disabled, its helpers are true no-ops.""" + + @staticmethod + def _all_off(): + import feast.metrics as m + + m._config = m._MetricsFlags() # everything False + + def test_track_request_latency_noop_when_disabled(self): + self._all_off() + label_set = dict( + endpoint="/disabled-test", feature_count="", feature_view_count="" + ) + before_sum = request_latency.labels(**label_set)._sum.get() + + with track_request_latency("/disabled-test"): + pass + + assert request_latency.labels(**label_set)._sum.get() == before_sum + + def test_track_online_features_entities_noop_when_disabled(self): + self._all_off() + before = online_features_request_count._value.get() + track_online_features_entities(100) + assert online_features_request_count._value.get() == before + + def test_track_push_noop_when_disabled(self): + self._all_off() + before = push_request_count.labels( + push_source="src", mode="online" + )._value.get() + track_push("src", "online") + assert ( + push_request_count.labels(push_source="src", mode="online")._value.get() + == before + ) + + def test_track_materialization_noop_when_disabled(self): + self._all_off() + before = materialization_total.labels( + feature_view="fv_disabled", status="success" + )._value.get() + track_materialization("fv_disabled", success=True, duration_seconds=1.0) + assert ( + materialization_total.labels( + feature_view="fv_disabled", status="success" + )._value.get() + == before + ) + + +class TestGranularCategoryControl: + """Verify individual category toggles work independently.""" + + def test_request_disabled_but_push_enabled(self): + import feast.metrics as m + + m._config = m._MetricsFlags( + enabled=True, + request=False, + push=True, + resource=True, + online_features=True, + materialization=True, + freshness=True, + ) + + # request should be no-op + label_set = dict( + endpoint="/granular-req", feature_count="", feature_view_count="" + ) + before_req = request_latency.labels(**label_set)._sum.get() + with track_request_latency("/granular-req"): + pass + assert request_latency.labels(**label_set)._sum.get() == before_req + + # push should still record + before_push = push_request_count.labels( + push_source="s", mode="online" + )._value.get() + track_push("s", "online") + assert ( + push_request_count.labels(push_source="s", mode="online")._value.get() + == before_push + 1 + ) + + def test_online_features_disabled_but_materialization_enabled(self): + import feast.metrics as m + + m._config = m._MetricsFlags( + enabled=True, + online_features=False, + materialization=True, + resource=True, + request=True, + push=True, + freshness=True, + ) + + # online_features should be no-op + before_of = online_features_request_count._value.get() + track_online_features_entities(50) + assert online_features_request_count._value.get() == before_of + + # materialization should still record + before_mat = materialization_total.labels( + feature_view="fv_gran", status="success" + )._value.get() + track_materialization("fv_gran", success=True, duration_seconds=1.0) + assert ( + materialization_total.labels( + feature_view="fv_gran", status="success" + )._value.get() + == before_mat + 1 + ) + + def test_only_resource_enabled(self): + """When only resource is on, all request-path helpers are no-ops.""" + import feast.metrics as m + + m._config = m._MetricsFlags( + enabled=True, + resource=True, + request=False, + online_features=False, + push=False, + materialization=False, + freshness=False, + ) + + label_set = dict(endpoint="/res-only", feature_count="", feature_view_count="") + before_req = request_latency.labels(**label_set)._sum.get() + before_of = online_features_request_count._value.get() + before_push = push_request_count.labels( + push_source="x", mode="offline" + )._value.get() + before_mat = materialization_total.labels( + feature_view="fv_res", status="success" + )._value.get() + + with track_request_latency("/res-only"): + pass + track_online_features_entities(10) + track_push("x", "offline") + track_materialization("fv_res", success=True, duration_seconds=1.0) + + assert request_latency.labels(**label_set)._sum.get() == before_req + assert online_features_request_count._value.get() == before_of + assert ( + push_request_count.labels(push_source="x", mode="offline")._value.get() + == before_push + ) + assert ( + materialization_total.labels( + feature_view="fv_res", status="success" + )._value.get() + == before_mat + ) + + +class TestMetricsYamlConfig: + """Verify metrics config in feature_store.yaml is respected. + + We mock out everything past the metrics-gate check in ``start_server`` + so these tests never actually launch a real HTTP server. + """ + + @staticmethod + def _call_start_server(mock_store, cli_metrics: bool): + """Call start_server with enough mocking to avoid side-effects.""" + from feast.feature_server import start_server + + with ( + patch("feast.feature_server.feast_metrics") as mock_fm, + patch("feast.feature_server.str_to_auth_manager_type"), + patch("feast.feature_server.init_security_manager"), + patch("feast.feature_server.init_auth_manager"), + patch( + "feast.feature_server.FeastServeApplication", + side_effect=RuntimeError("stop"), + ) + if hasattr(__import__("sys"), "platform") + and __import__("sys").platform != "win32" + else patch("uvicorn.run", side_effect=RuntimeError("stop")), + ): + try: + start_server( + store=mock_store, + host="127.0.0.1", + port=6566, + no_access_log=True, + workers=1, + worker_connections=1000, + max_requests=1000, + max_requests_jitter=50, + keep_alive_timeout=30, + registry_ttl_sec=60, + tls_key_path="", + tls_cert_path="", + metrics=cli_metrics, + ) + except (RuntimeError, Exception): + pass + return mock_fm + + def test_metrics_enabled_from_yaml_config(self): + """start_server enables metrics when config has metrics.enabled=True, + even though the CLI flag is False.""" + from types import SimpleNamespace + + metrics_cfg = SimpleNamespace(enabled=True) + fs_cfg = SimpleNamespace(metrics=metrics_cfg) + mock_store = MagicMock() + mock_store.config = SimpleNamespace(feature_server=fs_cfg) + + mock_fm = self._call_start_server(mock_store, cli_metrics=False) + mock_fm.build_metrics_flags.assert_called_once_with(metrics_cfg) + mock_fm.start_metrics_server.assert_called_once() + + def test_cli_flag_enables_metrics_without_yaml_config(self): + """start_server enables metrics when --metrics is passed even without + any feature_server config section.""" + from types import SimpleNamespace + + mock_store = MagicMock() + mock_store.config = SimpleNamespace(feature_server=None) + + mock_fm = self._call_start_server(mock_store, cli_metrics=True) + mock_fm.build_metrics_flags.assert_called_once_with(None) + mock_fm.start_metrics_server.assert_called_once() + + def test_metrics_not_started_when_both_disabled(self): + """start_server does NOT start metrics when neither CLI nor config enables it.""" + from types import SimpleNamespace + + mock_store = MagicMock() + mock_store.config = SimpleNamespace( + feature_server=SimpleNamespace(metrics=SimpleNamespace(enabled=False)), + ) + + mock_fm = self._call_start_server(mock_store, cli_metrics=False) + mock_fm.start_metrics_server.assert_not_called() + + def test_metrics_not_started_when_config_is_none(self): + """start_server does NOT start metrics when feature_server config is None + and CLI flag is also False.""" + from types import SimpleNamespace + + mock_store = MagicMock() + mock_store.config = SimpleNamespace(feature_server=None) + + mock_fm = self._call_start_server(mock_store, cli_metrics=False) + mock_fm.start_metrics_server.assert_not_called() + + +class TestTrackOnlineFeaturesEntities: + def test_increments_request_count(self): + before = online_features_request_count._value.get() + track_online_features_entities(10) + assert online_features_request_count._value.get() == before + 1 + + def test_records_entity_count(self): + before_count = online_features_entity_count._sum.get() + track_online_features_entities(42) + assert online_features_entity_count._sum.get() >= before_count + 42 + + +class TestTrackPush: + def test_increments_push_counter(self): + before = push_request_count.labels( + push_source="my_source", mode="online" + )._value.get() + track_push("my_source", "online") + assert ( + push_request_count.labels( + push_source="my_source", mode="online" + )._value.get() + == before + 1 + ) + + +class TestTrackMaterialization: + def test_success_counter(self): + before = materialization_total.labels( + feature_view="fv1", status="success" + )._value.get() + track_materialization("fv1", success=True, duration_seconds=1.5) + assert ( + materialization_total.labels( + feature_view="fv1", status="success" + )._value.get() + == before + 1 + ) + + def test_failure_counter(self): + before = materialization_total.labels( + feature_view="fv2", status="failure" + )._value.get() + track_materialization("fv2", success=False, duration_seconds=0.5) + assert ( + materialization_total.labels( + feature_view="fv2", status="failure" + )._value.get() + == before + 1 + ) + + def test_duration_histogram(self): + before_sum = materialization_duration_seconds.labels( + feature_view="fv3" + )._sum.get() + track_materialization("fv3", success=True, duration_seconds=3.7) + after_sum = materialization_duration_seconds.labels( + feature_view="fv3" + )._sum.get() + assert pytest.approx(after_sum - before_sum, abs=0.01) == 3.7 + + +class TestUpdateFeatureFreshness: + def test_sets_freshness_for_materialized_views(self): + mock_fv = MagicMock() + mock_fv.name = "test_fv" + mock_fv.most_recent_end_time = datetime.now(tz=timezone.utc) - timedelta( + minutes=5 + ) + + mock_store = MagicMock() + mock_store.project = "test_project" + mock_store.list_feature_views.return_value = [mock_fv] + + update_feature_freshness(mock_store) + + staleness = feature_freshness_seconds.labels( + feature_view="test_fv", project="test_project" + )._value.get() + assert 280 < staleness < 320 + + def test_skips_unmaterialized_views(self): + mock_fv = MagicMock() + mock_fv.name = "unmaterialized_fv" + mock_fv.most_recent_end_time = None + + mock_store = MagicMock() + mock_store.project = "test_project" + mock_store.list_feature_views.return_value = [mock_fv] + + update_feature_freshness(mock_store) + + def test_handles_naive_datetime(self): + mock_fv = MagicMock() + mock_fv.name = "naive_fv" + # Simulate a naive UTC datetime (no tzinfo), as Feast typically stores + naive_utc_now = datetime.now(tz=timezone.utc).replace(tzinfo=None) + mock_fv.most_recent_end_time = naive_utc_now - timedelta(hours=1) + + mock_store = MagicMock() + mock_store.project = "test_project" + mock_store.list_feature_views.return_value = [mock_fv] + + update_feature_freshness(mock_store) + + staleness = feature_freshness_seconds.labels( + feature_view="naive_fv", project="test_project" + )._value.get() + assert 3500 < staleness < 3700 + + def test_handles_registry_errors_gracefully(self): + mock_store = MagicMock() + mock_store.list_feature_views.side_effect = Exception("registry down") + + update_feature_freshness(mock_store) + + +class TestResolveFeatureCounts: + """Verify _resolve_feature_counts for both feature-ref lists and FeatureService.""" + + def test_feature_ref_list(self): + from feast.feature_server import _resolve_feature_counts + + refs = ["driver_fv:conv_rate", "driver_fv:acc_rate", "vehicle_fv:mileage"] + feat_count, fv_count = _resolve_feature_counts(refs) + assert feat_count == "3" + assert fv_count == "2" + + def test_single_feature_view(self): + from feast.feature_server import _resolve_feature_counts + + refs = ["fv1:a", "fv1:b", "fv1:c"] + feat_count, fv_count = _resolve_feature_counts(refs) + assert feat_count == "3" + assert fv_count == "1" + + def test_empty_list(self): + from feast.feature_server import _resolve_feature_counts + + feat_count, fv_count = _resolve_feature_counts([]) + assert feat_count == "0" + assert fv_count == "0" + + def test_feature_service(self): + from feast.feature_server import _resolve_feature_counts + + proj1 = MagicMock() + proj1.features = [MagicMock(), MagicMock()] + proj2 = MagicMock() + proj2.features = [MagicMock()] + + fs_svc = MagicMock() + fs_svc.feature_view_projections = [proj1, proj2] + + from feast.feature_service import FeatureService + + fs_svc.__class__ = FeatureService + + feat_count, fv_count = _resolve_feature_counts(fs_svc) + assert feat_count == "3" + assert fv_count == "2" + + +class TestFeatureServerMetricsIntegration: + """Test that feature server endpoints record metrics.""" + + @pytest.fixture + def mock_fs_factory(self): + from tests.foo_provider import FooProvider + + def builder(**async_support): + provider = FooProvider.with_async_support(**async_support) + fs = MagicMock() + fs._get_provider.return_value = provider + from feast.online_response import OnlineResponse + from feast.protos.feast.serving.ServingService_pb2 import ( + GetOnlineFeaturesResponse, + ) + + empty_response = OnlineResponse(GetOnlineFeaturesResponse(results=[])) + fs.get_online_features = MagicMock(return_value=empty_response) + fs.push = MagicMock() + fs.get_online_features_async = MagicMock(return_value=empty_response) + fs.push_async = MagicMock() + return fs + + return builder + + def test_get_online_features_records_metrics(self, mock_fs_factory): + from fastapi.testclient import TestClient + + from feast.feature_server import get_app + + fs = mock_fs_factory(online_read=False) + client = TestClient(get_app(fs)) + + before_req = request_count.labels( + endpoint="/get-online-features", status="success" + )._value.get() + before_entity = online_features_request_count._value.get() + + client.post( + "/get-online-features", + json={ + "features": ["fv:feat1"], + "entities": {"id": [1, 2, 3]}, + }, + ) + + assert ( + request_count.labels( + endpoint="/get-online-features", status="success" + )._value.get() + == before_req + 1 + ) + assert online_features_request_count._value.get() == before_entity + 1 + + @pytest.mark.parametrize( + "features,expected_feat_count,expected_fv_count", + [ + (["fv1:a"], "1", "1"), + (["fv1:a", "fv1:b", "fv2:c"], "3", "2"), + ( + ["fv1:a", "fv1:b", "fv2:c", "fv2:d", "fv3:e"], + "5", + "3", + ), + ], + ids=["1_feat_1_fv", "3_feats_2_fvs", "5_feats_3_fvs"], + ) + def test_latency_labels_with_varying_request_sizes( + self, mock_fs_factory, features, expected_feat_count, expected_fv_count + ): + """Verify feature_count and feature_view_count labels change with request size.""" + from fastapi.testclient import TestClient + + from feast.feature_server import get_app + + fs = mock_fs_factory(online_read=False) + client = TestClient(get_app(fs)) + + label_set = dict( + endpoint="/get-online-features", + feature_count=expected_feat_count, + feature_view_count=expected_fv_count, + ) + before_sum = request_latency.labels(**label_set)._sum.get() + + client.post( + "/get-online-features", + json={ + "features": features, + "entities": {"id": [1]}, + }, + ) + + after_sum = request_latency.labels(**label_set)._sum.get() + assert after_sum > before_sum + + def test_push_records_metrics(self, mock_fs_factory): + from fastapi.testclient import TestClient + + from feast.feature_server import get_app + from feast.utils import _utc_now + + fs = mock_fs_factory(online_write=False) + client = TestClient(get_app(fs)) + + before = push_request_count.labels( + push_source="driver_locations_push", mode="online" + )._value.get() + + client.post( + "/push", + json={ + "push_source_name": "driver_locations_push", + "df": { + "driver_lat": [42.0], + "driver_long": ["42.0"], + "driver_id": [123], + "event_timestamp": [str(_utc_now())], + "created_timestamp": [str(_utc_now())], + }, + "to": "online", + }, + ) + + assert ( + push_request_count.labels( + push_source="driver_locations_push", mode="online" + )._value.get() + == before + 1 + ) + + +class TestBuildMetricsFlags: + """Verify build_metrics_flags correctly maps MetricsConfig to _MetricsFlags.""" + + def test_no_config_enables_all(self): + from feast.metrics import build_metrics_flags + + flags = build_metrics_flags(None) + assert flags.enabled is True + assert flags.resource is True + assert flags.request is True + assert flags.online_features is True + assert flags.push is True + assert flags.materialization is True + assert flags.freshness is True + + def test_selective_disable(self): + from types import SimpleNamespace + + from feast.metrics import build_metrics_flags + + mc = SimpleNamespace( + enabled=True, + resource=True, + request=False, + online_features=True, + push=False, + materialization=True, + freshness=False, + ) + flags = build_metrics_flags(mc) + assert flags.enabled is True + assert flags.resource is True + assert flags.request is False + assert flags.online_features is True + assert flags.push is False + assert flags.materialization is True + assert flags.freshness is False + + def test_all_categories_disabled(self): + from types import SimpleNamespace + + from feast.metrics import build_metrics_flags + + mc = SimpleNamespace( + enabled=True, + resource=False, + request=False, + online_features=False, + push=False, + materialization=False, + freshness=False, + ) + flags = build_metrics_flags(mc) + assert flags.enabled is True + assert flags.resource is False + assert flags.request is False + + +class TestCleanupMultiprocessDir: + """Verify the atexit handler only deletes the temp dir in the owner process.""" + + def test_cleanup_skipped_in_forked_child(self, tmp_path): + """Simulate a forked worker: _owns_mp_dir=True but _owner_pid != current PID.""" + import feast.metrics as m + + original_dir = m._prometheus_mp_dir + original_owns = m._owns_mp_dir + original_pid = m._owner_pid + + fake_dir = tmp_path / "feast_metrics_test" + fake_dir.mkdir() + + m._prometheus_mp_dir = str(fake_dir) + m._owns_mp_dir = True + m._owner_pid = -1 # Different from os.getpid() + + try: + m._cleanup_multiprocess_dir() + assert fake_dir.exists(), ( + "Directory should NOT be deleted when _owner_pid != os.getpid()" + ) + finally: + m._prometheus_mp_dir = original_dir + m._owns_mp_dir = original_owns + m._owner_pid = original_pid + + def test_cleanup_runs_in_owner_process(self, tmp_path): + """The owner process (matching PID) should delete the directory.""" + import os + + import feast.metrics as m + + original_dir = m._prometheus_mp_dir + original_owns = m._owns_mp_dir + original_pid = m._owner_pid + + fake_dir = tmp_path / "feast_metrics_test" + fake_dir.mkdir() + + m._prometheus_mp_dir = str(fake_dir) + m._owns_mp_dir = True + m._owner_pid = os.getpid() + + try: + m._cleanup_multiprocess_dir() + assert not fake_dir.exists(), ( + "Directory SHOULD be deleted when _owner_pid == os.getpid()" + ) + finally: + m._prometheus_mp_dir = original_dir + m._owns_mp_dir = original_owns + m._owner_pid = original_pid diff --git a/sdk/python/tests/unit/test_on_demand_python_transformation.py b/sdk/python/tests/unit/test_on_demand_python_transformation.py index 6a0f777b283..3ce02255c7c 100644 --- a/sdk/python/tests/unit/test_on_demand_python_transformation.py +++ b/sdk/python/tests/unit/test_on_demand_python_transformation.py @@ -400,291 +400,292 @@ def test_stored_writes(self): class TestOnDemandPythonTransformationAllDataTypes(unittest.TestCase): def setUp(self): - with tempfile.TemporaryDirectory() as data_dir: - self.store = FeatureStore( - config=RepoConfig( - project="test_on_demand_python_transformation", - registry=os.path.join(data_dir, "registry.db"), - provider="local", - entity_key_serialization_version=3, - online_store=SqliteOnlineStoreConfig( - path=os.path.join(data_dir, "online.db") - ), - ) + self.data_dir = tempfile.mkdtemp() + data_dir = self.data_dir + self.store = FeatureStore( + config=RepoConfig( + project="test_on_demand_python_transformation", + registry=os.path.join(data_dir, "registry.db"), + provider="local", + entity_key_serialization_version=3, + online_store=SqliteOnlineStoreConfig( + path=os.path.join(data_dir, "online.db") + ), ) + ) - # Generate test data. - end_date = datetime.now().replace(microsecond=0, second=0, minute=0) - start_date = end_date - timedelta(days=15) + # Generate test data. + end_date = datetime.now().replace(microsecond=0, second=0, minute=0) + start_date = end_date - timedelta(days=15) - driver_entities = [1001, 1002, 1003, 1004, 1005] - driver_df = create_driver_hourly_stats_df( - driver_entities, start_date, end_date - ) - driver_stats_path = os.path.join(data_dir, "driver_stats.parquet") - driver_df.to_parquet( - path=driver_stats_path, allow_truncated_timestamps=True - ) + driver_entities = [1001, 1002, 1003, 1004, 1005] + driver_df = create_driver_hourly_stats_df(driver_entities, start_date, end_date) + driver_stats_path = os.path.join(data_dir, "driver_stats.parquet") + driver_df.to_parquet(path=driver_stats_path, allow_truncated_timestamps=True) - driver = Entity(name="driver", join_keys=["driver_id"]) + driver = Entity(name="driver", join_keys=["driver_id"]) - driver_stats_source = FileSource( - name="driver_hourly_stats_source", - path=driver_stats_path, - timestamp_field="event_timestamp", - created_timestamp_column="created", - ) + driver_stats_source = FileSource( + name="driver_hourly_stats_source", + path=driver_stats_path, + timestamp_field="event_timestamp", + created_timestamp_column="created", + ) - driver_stats_fv = FeatureView( - name="driver_hourly_stats", - entities=[driver], - ttl=timedelta(days=0), - schema=[ - Field(name="conv_rate", dtype=Float32), - Field(name="acc_rate", dtype=Float32), - Field(name="avg_daily_trips", dtype=Int64), - ], - online=True, - source=driver_stats_source, - ) - assert driver_stats_fv.entities == [driver.name] - assert driver_stats_fv.entity_columns == [] + driver_stats_fv = FeatureView( + name="driver_hourly_stats", + entities=[driver], + ttl=timedelta(days=0), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_stats_source, + ) + assert driver_stats_fv.entities == [driver.name] + assert driver_stats_fv.entity_columns == [] - request_source = RequestSource( - name="request_source", - schema=[ - Field(name="avg_daily_trip_rank_thresholds", dtype=Array(Int64)), - Field(name="avg_daily_trip_rank_names", dtype=Array(String)), - ], - ) - input_request = RequestSource( - name="vals_to_add", - schema=[ - Field(name="val_to_add", dtype=Int64), - Field(name="val_to_add_2", dtype=Int64), - ], - ) + request_source = RequestSource( + name="request_source", + schema=[ + Field(name="avg_daily_trip_rank_thresholds", dtype=Array(Int64)), + Field(name="avg_daily_trip_rank_names", dtype=Array(String)), + ], + ) + input_request = RequestSource( + name="vals_to_add", + schema=[ + Field(name="val_to_add", dtype=Int64), + Field(name="val_to_add_2", dtype=Int64), + ], + ) - @on_demand_feature_view( - sources=[request_source, driver_stats_fv], - schema=[ - Field(name="highest_achieved_rank", dtype=String), - Field(name="avg_daily_trips_plus_one", dtype=Int64), - Field(name="conv_rate_plus_acc", dtype=Float64), - Field(name="is_highest_rank", dtype=Bool), - Field(name="achieved_ranks", dtype=Array(String)), - Field(name="trips_until_next_rank_int", dtype=Array(Int64)), - Field(name="trips_until_next_rank_float", dtype=Array(Float64)), - Field(name="achieved_ranks_mask", dtype=Array(Bool)), - ], - mode="python", - ) - def python_view(inputs: dict[str, Any]) -> dict[str, Any]: - output = {} - trips_until_next_rank = [ - [max(threshold - row[1], 0) for threshold in row[0]] - for row in zip( - inputs["avg_daily_trip_rank_thresholds"], - inputs["avg_daily_trips"], - ) - ] - mask = [[value <= 0 for value in row] for row in trips_until_next_rank] - ranks = [ - [rank if mask else "Locked" for mask, rank in zip(*row)] - for row in zip(mask, inputs["avg_daily_trip_rank_names"]) - ] - highest_rank = [ - ([rank for rank in row if rank != "Locked"][-1:] or ["None"])[0] - for row in ranks - ] + @on_demand_feature_view( + sources=[request_source, driver_stats_fv], + schema=[ + Field(name="highest_achieved_rank", dtype=String), + Field(name="avg_daily_trips_plus_one", dtype=Int64), + Field(name="conv_rate_plus_acc", dtype=Float64), + Field(name="is_highest_rank", dtype=Bool), + Field(name="achieved_ranks", dtype=Array(String)), + Field(name="trips_until_next_rank_int", dtype=Array(Int64)), + Field(name="trips_until_next_rank_float", dtype=Array(Float64)), + Field(name="achieved_ranks_mask", dtype=Array(Bool)), + ], + mode="python", + ) + def python_view(inputs: dict[str, Any]) -> dict[str, Any]: + output = {} + trips_until_next_rank = [ + [max(threshold - row[1], 0) for threshold in row[0]] + for row in zip( + inputs["avg_daily_trip_rank_thresholds"], + inputs["avg_daily_trips"], + ) + ] + mask = [[value <= 0 for value in row] for row in trips_until_next_rank] + ranks = [ + [rank if mask else "Locked" for mask, rank in zip(*row)] + for row in zip(mask, inputs["avg_daily_trip_rank_names"]) + ] + highest_rank = [ + ([rank for rank in row if rank != "Locked"][-1:] or ["None"])[0] + for row in ranks + ] - output["conv_rate_plus_acc"] = [ - sum(row) for row in zip(inputs["conv_rate"], inputs["acc_rate"]) - ] - output["avg_daily_trips_plus_one"] = [ - row + 1 for row in inputs["avg_daily_trips"] - ] - output["highest_achieved_rank"] = highest_rank - output["is_highest_rank"] = [row[-1] != "Locked" for row in ranks] + output["conv_rate_plus_acc"] = [ + sum(row) for row in zip(inputs["conv_rate"], inputs["acc_rate"]) + ] + output["avg_daily_trips_plus_one"] = [ + row + 1 for row in inputs["avg_daily_trips"] + ] + output["highest_achieved_rank"] = highest_rank + output["is_highest_rank"] = [row[-1] != "Locked" for row in ranks] - output["trips_until_next_rank_int"] = trips_until_next_rank - output["trips_until_next_rank_float"] = [ - [float(value) for value in row] for row in trips_until_next_rank - ] - output["achieved_ranks_mask"] = mask - output["achieved_ranks"] = ranks - return output + output["trips_until_next_rank_int"] = trips_until_next_rank + output["trips_until_next_rank_float"] = [ + [float(value) for value in row] for row in trips_until_next_rank + ] + output["achieved_ranks_mask"] = mask + output["achieved_ranks"] = ranks + return output - @on_demand_feature_view( - sources=[ - driver_stats_fv, - input_request, - ], - schema=[ - Field(name="conv_rate_plus_val1", dtype=Float64), - Field(name="conv_rate_plus_val2", dtype=Float64), - ], - mode="pandas", + @on_demand_feature_view( + sources=[ + driver_stats_fv, + input_request, + ], + schema=[ + Field(name="conv_rate_plus_val1", dtype=Float64), + Field(name="conv_rate_plus_val2", dtype=Float64), + ], + mode="pandas", + ) + def pandas_view(features_df: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["conv_rate_plus_val1"] = ( + features_df["conv_rate"] + features_df["val_to_add"] ) - def pandas_view(features_df: pd.DataFrame) -> pd.DataFrame: - df = pd.DataFrame() - df["conv_rate_plus_val1"] = ( - features_df["conv_rate"] + features_df["val_to_add"] - ) - df["conv_rate_plus_val2"] = ( - features_df["conv_rate"] + features_df["val_to_add_2"] - ) - return df - - self.store.apply( - [ - driver, - driver_stats_source, - driver_stats_fv, - python_view, - pandas_view, - input_request, - request_source, - ] + df["conv_rate_plus_val2"] = ( + features_df["conv_rate"] + features_df["val_to_add_2"] ) - fv_applied = self.store.get_feature_view("driver_hourly_stats") - assert fv_applied.entities == [driver.name] - # Note here that after apply() is called, the entity_columns are populated with the join_key - assert fv_applied.entity_columns[0].name == driver.join_key + return df - self.store.write_to_online_store( - feature_view_name="driver_hourly_stats", df=driver_df - ) + self.store.apply( + [ + driver, + driver_stats_source, + driver_stats_fv, + python_view, + pandas_view, + input_request, + request_source, + ] + ) + fv_applied = self.store.get_feature_view("driver_hourly_stats") + assert fv_applied.entities == [driver.name] + # Note here that after apply() is called, the entity_columns are populated with the join_key + assert fv_applied.entity_columns[0].name == driver.join_key - batch_sample = pd.DataFrame(driver_entities, columns=["driver_id"]) - batch_sample["val_to_add"] = 0 - batch_sample["val_to_add_2"] = 1 - batch_sample["event_timestamp"] = start_date - batch_sample["created"] = start_date - fv_only_cols = ["driver_id", "event_timestamp", "created"] + self.store.write_to_online_store( + feature_view_name="driver_hourly_stats", df=driver_df + ) - resp_base_fv = self.store.get_historical_features( - entity_df=batch_sample[fv_only_cols], - features=[ - "driver_hourly_stats:conv_rate", - "driver_hourly_stats:acc_rate", - "driver_hourly_stats:avg_daily_trips", - ], - ).to_df() - assert resp_base_fv is not None - assert sorted(resp_base_fv.columns) == [ - "acc_rate", - "avg_daily_trips", - "conv_rate", - "created__", - "driver_id", - "event_timestamp", - ] - resp = self.store.get_historical_features( - entity_df=batch_sample, - features=[ - "driver_hourly_stats:conv_rate", - "driver_hourly_stats:acc_rate", - "driver_hourly_stats:avg_daily_trips", - "pandas_view:conv_rate_plus_val1", - "pandas_view:conv_rate_plus_val2", - ], - ).to_df() - assert resp is not None - assert resp["conv_rate_plus_val1"].isnull().sum() == 0 - - batch_sample["avg_daily_trip_rank_thresholds"] = [ - [100, 250, 500, 1000] - ] * batch_sample.shape[0] - batch_sample["avg_daily_trip_rank_names"] = [ - ["Bronze", "Silver", "Gold", "Platinum"] - ] * batch_sample.shape[0] - resp_python = self.store.get_historical_features( - entity_df=batch_sample, - features=[ - "driver_hourly_stats:conv_rate", - "driver_hourly_stats:acc_rate", - "driver_hourly_stats:avg_daily_trips", - "python_view:conv_rate_plus_acc", - ], - ).to_df() - assert resp_python is not None - assert resp_python["conv_rate_plus_acc"].isnull().sum() == 0 - - # Now testing feature retrieval for driver ids not in the dataset - missing_batch_sample = pd.DataFrame([1234567890], columns=["driver_id"]) - missing_batch_sample["val_to_add"] = 0 - missing_batch_sample["val_to_add_2"] = 1 - missing_batch_sample["event_timestamp"] = start_date - missing_batch_sample["created"] = start_date - resp_offline = self.store.get_historical_features( - entity_df=missing_batch_sample, - features=[ - "driver_hourly_stats:conv_rate", - "driver_hourly_stats:acc_rate", - "driver_hourly_stats:avg_daily_trips", - "pandas_view:conv_rate_plus_val1", - "pandas_view:conv_rate_plus_val2", - ], - ).to_df() - assert resp_offline is not None - assert resp_offline["conv_rate_plus_val1"].isnull().sum() == 1 - assert sorted(resp_offline.columns) == [ - "acc_rate", - "avg_daily_trips", - "conv_rate", - "conv_rate_plus_val1", - "conv_rate_plus_val2", - "created__", - "driver_id", - "event_timestamp", - "val_to_add", - "val_to_add_2", - ] - resp_online_missing_entity = self.store.get_online_features( - entity_rows=[ - {"driver_id": 1234567890, "val_to_add": 0, "val_to_add_2": 1} - ], - features=[ - "driver_hourly_stats:conv_rate", - "driver_hourly_stats:acc_rate", - "driver_hourly_stats:avg_daily_trips", - "pandas_view:conv_rate_plus_val1", - "pandas_view:conv_rate_plus_val2", - ], - ) - assert resp_online_missing_entity is not None - resp_online = self.store.get_online_features( - entity_rows=[{"driver_id": 1001, "val_to_add": 0, "val_to_add_2": 1}], - features=[ - "driver_hourly_stats:conv_rate", - "driver_hourly_stats:acc_rate", - "driver_hourly_stats:avg_daily_trips", - "pandas_view:conv_rate_plus_val1", - "pandas_view:conv_rate_plus_val2", - ], - ).to_df() - assert resp_online is not None - assert sorted(resp_online.columns) == [ - "acc_rate", - "avg_daily_trips", - "conv_rate", - "conv_rate_plus_val1", - "conv_rate_plus_val2", - "driver_id", - # It does not have the items below - # "created__", - # "event_timestamp", - # "val_to_add", - # "val_to_add_2", - ] - # Note online and offline columns will not match because: - # you want to be space efficient online when considering the impact of network latency so you want to send - # and receive the minimally required set of data, which means after transformation you only need to send the - # output in the response. - # Offline, you will probably prioritize reproducibility and being able to iterate, which means you will want - # the underlying inputs into your transformation, so the extra data is tolerable. - assert sorted(resp_online.columns) != sorted(resp_offline.columns) + batch_sample = pd.DataFrame(driver_entities, columns=["driver_id"]) + batch_sample["val_to_add"] = 0 + batch_sample["val_to_add_2"] = 1 + batch_sample["event_timestamp"] = start_date + batch_sample["created"] = start_date + fv_only_cols = ["driver_id", "event_timestamp", "created"] + + resp_base_fv = self.store.get_historical_features( + entity_df=batch_sample[fv_only_cols], + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + ], + ).to_df() + assert resp_base_fv is not None + assert sorted(resp_base_fv.columns) == [ + "acc_rate", + "avg_daily_trips", + "conv_rate", + "created__", + "driver_id", + "event_timestamp", + ] + resp = self.store.get_historical_features( + entity_df=batch_sample, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "pandas_view:conv_rate_plus_val1", + "pandas_view:conv_rate_plus_val2", + ], + ).to_df() + assert resp is not None + assert resp["conv_rate_plus_val1"].isnull().sum() == 0 + + batch_sample["avg_daily_trip_rank_thresholds"] = [ + [100, 250, 500, 1000] + ] * batch_sample.shape[0] + batch_sample["avg_daily_trip_rank_names"] = [ + ["Bronze", "Silver", "Gold", "Platinum"] + ] * batch_sample.shape[0] + resp_python = self.store.get_historical_features( + entity_df=batch_sample, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "python_view:conv_rate_plus_acc", + ], + ).to_df() + assert resp_python is not None + assert resp_python["conv_rate_plus_acc"].isnull().sum() == 0 + + # Now testing feature retrieval for driver ids not in the dataset + missing_batch_sample = pd.DataFrame([1234567890], columns=["driver_id"]) + missing_batch_sample["val_to_add"] = 0 + missing_batch_sample["val_to_add_2"] = 1 + missing_batch_sample["event_timestamp"] = start_date + missing_batch_sample["created"] = start_date + resp_offline = self.store.get_historical_features( + entity_df=missing_batch_sample, + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "pandas_view:conv_rate_plus_val1", + "pandas_view:conv_rate_plus_val2", + ], + ).to_df() + assert resp_offline is not None + assert resp_offline["conv_rate_plus_val1"].isnull().sum() == 1 + assert sorted(resp_offline.columns) == [ + "acc_rate", + "avg_daily_trips", + "conv_rate", + "conv_rate_plus_val1", + "conv_rate_plus_val2", + "created__", + "driver_id", + "event_timestamp", + "val_to_add", + "val_to_add_2", + ] + resp_online_missing_entity = self.store.get_online_features( + entity_rows=[{"driver_id": 1234567890, "val_to_add": 0, "val_to_add_2": 1}], + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "pandas_view:conv_rate_plus_val1", + "pandas_view:conv_rate_plus_val2", + ], + ) + assert resp_online_missing_entity is not None + resp_online = self.store.get_online_features( + entity_rows=[{"driver_id": 1001, "val_to_add": 0, "val_to_add_2": 1}], + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + "pandas_view:conv_rate_plus_val1", + "pandas_view:conv_rate_plus_val2", + ], + ).to_df() + assert resp_online is not None + assert sorted(resp_online.columns) == [ + "acc_rate", + "avg_daily_trips", + "conv_rate", + "conv_rate_plus_val1", + "conv_rate_plus_val2", + "driver_id", + # It does not have the items below + # "created__", + # "event_timestamp", + # "val_to_add", + # "val_to_add_2", + ] + # Note online and offline columns will not match because: + # you want to be space efficient online when considering the impact of network latency so you want to send + # and receive the minimally required set of data, which means after transformation you only need to send the + # output in the response. + # Offline, you will probably prioritize reproducibility and being able to iterate, which means you will want + # the underlying inputs into your transformation, so the extra data is tolerable. + assert sorted(resp_online.columns) != sorted(resp_offline.columns) + + def tearDown(self): + import shutil + + if hasattr(self, "data_dir"): + shutil.rmtree(self.data_dir, ignore_errors=True) def test_setup(self): pass diff --git a/sdk/python/tests/unit/test_type_map.py b/sdk/python/tests/unit/test_type_map.py index 8508b490d78..8125ab61b90 100644 --- a/sdk/python/tests/unit/test_type_map.py +++ b/sdk/python/tests/unit/test_type_map.py @@ -1,15 +1,27 @@ import numpy as np import pandas as pd +import pyarrow import pytest from feast.protos.feast.types.Value_pb2 import Map, MapList from feast.type_map import ( + _convert_value_type_str_to_value_type, _python_dict_to_map_proto, _python_list_to_map_list_proto, + arrow_to_pg_type, + feast_value_type_to_pa, feast_value_type_to_python_type, + pa_to_feast_value_type, + pa_to_redshift_value_type, + pg_type_to_feast_value_type, python_type_to_feast_value_type, python_values_to_proto_values, + redshift_to_feast_value_type, + snowflake_type_to_feast_value_type, + spark_to_feast_value_type, ) +from feast.types import Array, from_feast_to_pyarrow_type +from feast.types import Map as FeastMap from feast.value_type import ValueType @@ -461,3 +473,945 @@ def test_multiple_set_values(self): assert feast_value_type_to_python_type(protos[0]) == {1, 2, 3} assert feast_value_type_to_python_type(protos[1]) == {4, 5} assert feast_value_type_to_python_type(protos[2]) == {6} + + +class TestMapArrowTypeSupport: + """Test cases for MAP and MAP_LIST Arrow type conversions.""" + + def test_feast_value_type_to_pa_map(self): + """Test that ValueType.MAP converts to a PyArrow map type.""" + pa_type = feast_value_type_to_pa(ValueType.MAP) + assert isinstance(pa_type, pyarrow.MapType) + assert pa_type.key_type == pyarrow.string() + + def test_feast_value_type_to_pa_map_list(self): + """Test that ValueType.MAP_LIST converts to a PyArrow list of maps.""" + pa_type = feast_value_type_to_pa(ValueType.MAP_LIST) + assert isinstance(pa_type, pyarrow.ListType) + assert isinstance(pa_type.value_type, pyarrow.MapType) + + def test_pa_to_feast_value_type_map(self): + """Test that PyArrow map type string converts to ValueType.MAP.""" + result = pa_to_feast_value_type("map") + assert result == ValueType.MAP + + def test_pa_to_feast_value_type_map_various_value_types(self): + """Test that various PyArrow map type strings all convert to MAP.""" + assert pa_to_feast_value_type("map") == ValueType.MAP + assert pa_to_feast_value_type("map") == ValueType.MAP + assert pa_to_feast_value_type("map") == ValueType.MAP + + def test_from_feast_to_pyarrow_type_map(self): + """Test that Feast Map type converts to PyArrow map type.""" + pa_type = from_feast_to_pyarrow_type(FeastMap) + assert isinstance(pa_type, pyarrow.MapType) + + def test_from_feast_to_pyarrow_type_array_map(self): + """Test that Feast Array(Map) converts to PyArrow list of maps.""" + pa_type = from_feast_to_pyarrow_type(Array(FeastMap)) + assert isinstance(pa_type, pyarrow.ListType) + assert isinstance(pa_type.value_type, pyarrow.MapType) + + def test_convert_value_type_str_map(self): + """Test that 'MAP' string converts to ValueType.MAP.""" + assert _convert_value_type_str_to_value_type("MAP") == ValueType.MAP + + def test_convert_value_type_str_map_list(self): + """Test that 'MAP_LIST' string converts to ValueType.MAP_LIST.""" + assert _convert_value_type_str_to_value_type("MAP_LIST") == ValueType.MAP_LIST + + def test_arrow_to_pg_type_map(self): + """Test that Arrow map type converts to Postgres jsonb.""" + assert arrow_to_pg_type("map") == "jsonb" + assert arrow_to_pg_type("map") == "jsonb" + + def test_pg_type_to_feast_value_type_json(self): + """Test that Postgres json/jsonb types convert to ValueType.MAP.""" + assert pg_type_to_feast_value_type("json") == ValueType.MAP + assert pg_type_to_feast_value_type("jsonb") == ValueType.MAP + + def test_pg_type_to_feast_value_type_json_array(self): + """Test that Postgres json[]/jsonb[] types convert to ValueType.MAP_LIST.""" + assert pg_type_to_feast_value_type("json[]") == ValueType.MAP_LIST + assert pg_type_to_feast_value_type("jsonb[]") == ValueType.MAP_LIST + + def test_snowflake_variant_to_map(self): + """Test that Snowflake VARIANT/OBJECT types convert to ValueType.MAP.""" + assert snowflake_type_to_feast_value_type("VARIANT") == ValueType.MAP + assert snowflake_type_to_feast_value_type("OBJECT") == ValueType.MAP + + def test_redshift_super_to_map(self): + """Test that Redshift super type converts to ValueType.MAP.""" + assert redshift_to_feast_value_type("super") == ValueType.MAP + + def test_map_roundtrip_proto_to_arrow_type(self): + """Test that MAP type survives a full conversion roundtrip.""" + pa_type = feast_value_type_to_pa(ValueType.MAP) + pa_type_str = str(pa_type) + roundtrip = pa_to_feast_value_type(pa_type_str) + assert roundtrip == ValueType.MAP + + def test_spark_map_to_feast(self): + """Test that Spark map types convert to ValueType.MAP.""" + assert spark_to_feast_value_type("map") == ValueType.MAP + assert spark_to_feast_value_type("map") == ValueType.MAP + assert spark_to_feast_value_type("MAP") == ValueType.MAP + + def test_spark_array_map_to_feast(self): + """Test that Spark array> types convert to ValueType.MAP_LIST.""" + assert ( + spark_to_feast_value_type("array>") == ValueType.MAP_LIST + ) + + def test_spark_unknown_still_returns_null(self): + """Test that unrecognized Spark types still return NULL.""" + assert spark_to_feast_value_type("interval") == ValueType.NULL + + def test_spark_struct_to_feast_struct(self): + """Test that Spark struct types now convert to ValueType.STRUCT.""" + assert spark_to_feast_value_type("struct") == ValueType.STRUCT + + +class TestEnableValidationOnFeatureView: + """Test that enable_validation is a real parameter on FeatureView.""" + + def test_feature_view_has_enable_validation_default_false(self): + """Test that FeatureView has enable_validation defaulting to False.""" + import inspect + + from feast.feature_view import FeatureView + + sig = inspect.signature(FeatureView.__init__) + assert "enable_validation" in sig.parameters + assert sig.parameters["enable_validation"].default is False + + def test_batch_feature_view_has_enable_validation(self): + """Test that BatchFeatureView has enable_validation parameter.""" + import inspect + + from feast.batch_feature_view import BatchFeatureView + + sig = inspect.signature(BatchFeatureView.__init__) + assert "enable_validation" in sig.parameters + assert sig.parameters["enable_validation"].default is False + + def test_stream_feature_view_has_enable_validation(self): + """Test that StreamFeatureView has enable_validation parameter.""" + import inspect + + from feast.stream_feature_view import StreamFeatureView + + sig = inspect.signature(StreamFeatureView.__init__) + assert "enable_validation" in sig.parameters + assert sig.parameters["enable_validation"].default is False + + +class TestRedshiftDynamoDBMapSupport: + """Test cases for DynamoDB + Redshift map type round-trips.""" + + def test_pa_to_redshift_value_type_map(self): + """Test that Arrow map type maps to Redshift 'super' type.""" + pa_type = feast_value_type_to_pa(ValueType.MAP) + assert pa_to_redshift_value_type(pa_type) == "super" + + def test_pa_to_redshift_value_type_map_list(self): + """Test that Arrow list-of-map type maps to Redshift 'super' type.""" + pa_type = feast_value_type_to_pa(ValueType.MAP_LIST) + assert pa_to_redshift_value_type(pa_type) == "super" + + def test_json_string_to_map_proto(self): + """Test that JSON strings are parsed to MAP protos during materialization.""" + json_str = '{"key1": "value1", "key2": "value2"}' + protos = python_values_to_proto_values([json_str], ValueType.MAP) + converted = feast_value_type_to_python_type(protos[0]) + assert isinstance(converted, dict) + assert converted["key1"] == "value1" + assert converted["key2"] == "value2" + + def test_json_string_to_map_list_proto(self): + """Test that JSON strings are parsed to MAP_LIST protos during materialization.""" + json_str = '[{"a": "1"}, {"b": "2"}]' + protos = python_values_to_proto_values([json_str], ValueType.MAP_LIST) + converted = feast_value_type_to_python_type(protos[0]) + assert isinstance(converted, list) + assert len(converted) == 2 + assert converted[0]["a"] == "1" + + def test_dict_still_works_for_map(self): + """Test that regular Python dicts still work for MAP (no regression).""" + test_dict = {"x": "y", "a": 1} + protos = python_values_to_proto_values([test_dict], ValueType.MAP) + converted = feast_value_type_to_python_type(protos[0]) + assert isinstance(converted, dict) + assert converted["x"] == "y" + + def test_none_map_still_works(self): + """Test that None MAP values still produce empty proto (no regression).""" + protos = python_values_to_proto_values([None], ValueType.MAP) + converted = feast_value_type_to_python_type(protos[0]) + assert converted is None + + def test_redshift_super_roundtrip(self): + """Test full type conversion roundtrip: Redshift super → Feast MAP → Arrow → Redshift super.""" + feast_type = redshift_to_feast_value_type("super") + assert feast_type == ValueType.MAP + pa_type = feast_value_type_to_pa(feast_type) + redshift_type = pa_to_redshift_value_type(pa_type) + assert redshift_type == "super" + + +class TestJsonTypeSupport: + """Test cases for JSON value type.""" + + def test_simple_json_conversion(self): + """Test basic JSON type conversion: Python dict -> proto (json_val) -> Python.""" + test_data = {"name": "Alice", "age": 30, "active": True} + protos = python_values_to_proto_values([test_data], ValueType.JSON) + converted = feast_value_type_to_python_type(protos[0]) + + assert isinstance(converted, dict) + assert converted["name"] == "Alice" + assert converted["age"] == 30 + assert converted["active"] is True + + def test_json_string_passthrough(self): + """Test that a raw JSON string is stored and returned correctly.""" + json_str = '{"key": "value", "count": 42}' + protos = python_values_to_proto_values([json_str], ValueType.JSON) + converted = feast_value_type_to_python_type(protos[0]) + + assert isinstance(converted, dict) + assert converted["key"] == "value" + assert converted["count"] == 42 + + def test_json_array_value(self): + """Test JSON type with an array as the top-level value.""" + test_data = [1, 2, 3, "four"] + protos = python_values_to_proto_values([test_data], ValueType.JSON) + converted = feast_value_type_to_python_type(protos[0]) + + assert isinstance(converted, list) + assert converted == [1, 2, 3, "four"] + + def test_json_nested(self): + """Test deeply nested JSON structures.""" + test_data = { + "level1": {"level2": {"level3": {"value": "deep"}}}, + "array": [{"a": 1}, {"b": 2}], + } + protos = python_values_to_proto_values([test_data], ValueType.JSON) + converted = feast_value_type_to_python_type(protos[0]) + + assert converted["level1"]["level2"]["level3"]["value"] == "deep" + assert converted["array"][0]["a"] == 1 + + def test_null_json(self): + """Test None JSON conversion.""" + protos = python_values_to_proto_values([None], ValueType.JSON) + converted = feast_value_type_to_python_type(protos[0]) + assert converted is None + + def test_json_list_conversion(self): + """Test JSON_LIST type conversion.""" + test_data = [ + {"name": "Alice"}, + '{"name": "Bob"}', + {"count": 42}, + ] + protos = python_values_to_proto_values([test_data], ValueType.JSON_LIST) + converted = feast_value_type_to_python_type(protos[0]) + + assert isinstance(converted, list) + assert len(converted) == 3 + assert converted[0] == {"name": "Alice"} + assert converted[1] == {"name": "Bob"} + assert converted[2] == {"count": 42} + + def test_null_json_list(self): + """Test None JSON_LIST conversion.""" + protos = python_values_to_proto_values([None], ValueType.JSON_LIST) + converted = feast_value_type_to_python_type(protos[0]) + assert converted is None + + def test_multiple_json_values(self): + """Test conversion of multiple JSON values.""" + test_values = [ + {"x": 1}, + {"y": 2}, + None, + {"z": 3}, + ] + protos = python_values_to_proto_values(test_values, ValueType.JSON) + converted = [feast_value_type_to_python_type(p) for p in protos] + + assert converted[0] == {"x": 1} + assert converted[1] == {"y": 2} + assert converted[2] is None + assert converted[3] == {"z": 3} + + def test_feast_value_type_to_pa_json(self): + """Test that ValueType.JSON converts to PyArrow large_string.""" + pa_type = feast_value_type_to_pa(ValueType.JSON) + assert pa_type == pyarrow.large_string() + + def test_feast_value_type_to_pa_json_list(self): + """Test that ValueType.JSON_LIST converts to PyArrow list of large_string.""" + pa_type = feast_value_type_to_pa(ValueType.JSON_LIST) + assert isinstance(pa_type, pyarrow.ListType) + assert pa_type.value_type == pyarrow.large_string() + + def test_convert_value_type_str_json(self): + """Test that 'JSON' string converts to ValueType.JSON.""" + assert _convert_value_type_str_to_value_type("JSON") == ValueType.JSON + assert _convert_value_type_str_to_value_type("JSON_LIST") == ValueType.JSON_LIST + + def test_arrow_to_pg_type_json(self): + """Test that Arrow large_string converts to Postgres jsonb.""" + assert arrow_to_pg_type("large_string") == "jsonb" + + def test_bq_json_to_feast(self): + """Test that BigQuery JSON type converts to ValueType.JSON.""" + from feast.type_map import bq_to_feast_value_type + + assert bq_to_feast_value_type("JSON") == ValueType.JSON + + def test_spark_struct_not_json(self): + """Test that Spark struct types map to STRUCT not JSON.""" + assert spark_to_feast_value_type("struct") == ValueType.STRUCT + + def test_snowflake_json_to_feast(self): + """Test that Snowflake JSON type converts to ValueType.JSON.""" + assert snowflake_type_to_feast_value_type("JSON") == ValueType.JSON + + def test_json_feast_type_aliases(self): + """Test Json FeastType alias and conversions.""" + from feast.types import Json, from_feast_to_pyarrow_type + + pa_type = from_feast_to_pyarrow_type(Json) + assert pa_type == pyarrow.large_string() + + def test_json_value_types_mapping(self): + """Test JSON types in VALUE_TYPES_TO_FEAST_TYPES.""" + from feast.types import VALUE_TYPES_TO_FEAST_TYPES, Json + + assert VALUE_TYPES_TO_FEAST_TYPES[ValueType.JSON] == Json + + def test_pa_to_feast_value_type_large_string(self): + """Test that large_string arrow type converts to ValueType.JSON.""" + result = pa_to_feast_value_type("large_string") + assert result == ValueType.JSON + + +class TestStructTypeSupport: + """Test cases for STRUCT value type.""" + + def test_simple_struct_conversion(self): + """Test basic STRUCT type conversion: Python dict -> proto (struct_val) -> Python dict.""" + test_data = {"name": "Alice", "age": 30} + protos = python_values_to_proto_values([test_data], ValueType.STRUCT) + converted = feast_value_type_to_python_type(protos[0]) + + assert isinstance(converted, dict) + assert converted["name"] == "Alice" + assert converted["age"] == 30 + + def test_nested_struct_conversion(self): + """Test nested STRUCT type conversion.""" + test_data = { + "address": {"street": "123 Main St", "city": "NYC"}, + "name": "Alice", + } + protos = python_values_to_proto_values([test_data], ValueType.STRUCT) + converted = feast_value_type_to_python_type(protos[0]) + + assert converted["address"]["street"] == "123 Main St" + assert converted["address"]["city"] == "NYC" + assert converted["name"] == "Alice" + + def test_null_struct(self): + """Test None STRUCT conversion.""" + protos = python_values_to_proto_values([None], ValueType.STRUCT) + converted = feast_value_type_to_python_type(protos[0]) + assert converted is None + + def test_struct_list_conversion(self): + """Test STRUCT_LIST type conversion.""" + test_data = [ + {"name": "Alice", "age": 30}, + {"name": "Bob", "age": 25}, + ] + protos = python_values_to_proto_values([test_data], ValueType.STRUCT_LIST) + converted = feast_value_type_to_python_type(protos[0]) + + assert isinstance(converted, list) + assert len(converted) == 2 + assert converted[0]["name"] == "Alice" + assert converted[1]["age"] == 25 + + def test_null_struct_list(self): + """Test None STRUCT_LIST conversion.""" + protos = python_values_to_proto_values([None], ValueType.STRUCT_LIST) + converted = feast_value_type_to_python_type(protos[0]) + assert converted is None + + def test_multiple_struct_values(self): + """Test conversion of multiple STRUCT values.""" + test_values = [ + {"x": 1}, + None, + {"y": 2, "z": 3}, + ] + protos = python_values_to_proto_values(test_values, ValueType.STRUCT) + converted = [feast_value_type_to_python_type(p) for p in protos] + + assert converted[0] == {"x": 1} + assert converted[1] is None + assert converted[2] == {"y": 2, "z": 3} + + def test_struct_class_creation(self): + """Test Struct FeastType creation and validation.""" + from feast.types import Int32, String, Struct + + struct_type = Struct({"name": String, "age": Int32}) + assert struct_type.to_value_type() == ValueType.STRUCT + assert "name" in struct_type.fields + assert struct_type.fields["name"] == String + assert struct_type.fields["age"] == Int32 + + def test_struct_empty_raises(self): + """Test that empty Struct raises ValueError.""" + from feast.types import Struct + + with pytest.raises(ValueError, match="at least one field"): + Struct({}) + + def test_struct_to_pyarrow(self): + """Test Struct type converts to PyArrow struct.""" + from feast.types import Int32, String, Struct + + struct_type = Struct({"name": String, "age": Int32}) + pa_type = struct_type.to_pyarrow_type() + + assert pyarrow.types.is_struct(pa_type) + assert pa_type.get_field_index("name") >= 0 + assert pa_type.get_field_index("age") >= 0 + + def test_struct_from_feast_to_pyarrow(self): + """Test from_feast_to_pyarrow_type handles Struct.""" + from feast.types import Int32, String, Struct + + struct_type = Struct({"name": String, "age": Int32}) + pa_type = from_feast_to_pyarrow_type(struct_type) + + assert pyarrow.types.is_struct(pa_type) + + def test_array_of_struct(self): + """Test Array(Struct(...)) works.""" + from feast.types import Array, Int32, String, Struct + + struct_type = Struct({"name": String, "value": Int32}) + array_type = Array(struct_type) + + assert array_type.to_value_type() == ValueType.STRUCT_LIST + pa_type = from_feast_to_pyarrow_type(array_type) + assert isinstance(pa_type, pyarrow.ListType) + assert pyarrow.types.is_struct(pa_type.value_type) + + def test_feast_value_type_to_pa_struct(self): + """Test that ValueType.STRUCT converts to PyArrow struct (empty default).""" + pa_type = feast_value_type_to_pa(ValueType.STRUCT) + assert pyarrow.types.is_struct(pa_type) + + def test_feast_value_type_to_pa_struct_list(self): + """Test that ValueType.STRUCT_LIST converts to PyArrow list of struct.""" + pa_type = feast_value_type_to_pa(ValueType.STRUCT_LIST) + assert isinstance(pa_type, pyarrow.ListType) + assert pyarrow.types.is_struct(pa_type.value_type) + + def test_convert_value_type_str_struct(self): + """Test that 'STRUCT' string converts to ValueType.STRUCT.""" + assert _convert_value_type_str_to_value_type("STRUCT") == ValueType.STRUCT + assert ( + _convert_value_type_str_to_value_type("STRUCT_LIST") + == ValueType.STRUCT_LIST + ) + + def test_spark_struct_to_feast(self): + """Test that Spark struct types convert to ValueType.STRUCT.""" + assert spark_to_feast_value_type("struct") == ValueType.STRUCT + assert spark_to_feast_value_type("STRUCT") == ValueType.STRUCT + + def test_spark_array_struct_to_feast(self): + """Test that Spark array> types convert to STRUCT_LIST.""" + assert ( + spark_to_feast_value_type("array>") == ValueType.STRUCT_LIST + ) + + def test_bq_struct_to_feast(self): + """Test that BigQuery STRUCT/RECORD types convert to ValueType.STRUCT.""" + from feast.type_map import bq_to_feast_value_type + + assert bq_to_feast_value_type("STRUCT") == ValueType.STRUCT + assert bq_to_feast_value_type("RECORD") == ValueType.STRUCT + + def test_pa_to_feast_value_type_struct(self): + """Test that struct arrow type string converts to ValueType.STRUCT.""" + result = pa_to_feast_value_type("struct") + assert result == ValueType.STRUCT + + def test_struct_schema_persistence(self): + """Test that Struct schema is preserved through Field serialization/deserialization.""" + from feast.field import Field + from feast.types import Int32, String, Struct + + struct_type = Struct({"street": String, "zip": Int32}) + field = Field(name="address", dtype=struct_type) + + proto = field.to_proto() + restored = Field.from_proto(proto) + + assert isinstance(restored.dtype, Struct) + assert "street" in restored.dtype.fields + assert "zip" in restored.dtype.fields + assert restored.dtype.fields["street"] == String + assert restored.dtype.fields["zip"] == Int32 + + def test_struct_json_string_parsing(self): + """Test that JSON string input is parsed for STRUCT type.""" + json_str = '{"name": "Alice", "score": 95}' + protos = python_values_to_proto_values([json_str], ValueType.STRUCT) + converted = feast_value_type_to_python_type(protos[0]) + + assert isinstance(converted, dict) + assert converted["name"] == "Alice" + assert converted["score"] == 95 + + def test_struct_equality(self): + """Test Struct type equality.""" + from feast.types import Int32, String, Struct + + s1 = Struct({"name": String, "age": Int32}) + s2 = Struct({"name": String, "age": Int32}) + s3 = Struct({"name": String}) + + assert s1 == s2 + assert s1 != s3 + + def test_from_feast_type_struct(self): + """Test from_feast_type works for Struct.""" + from feast.types import Int32, String, Struct, from_feast_type + + struct_type = Struct({"name": String, "age": Int32}) + value_type = from_feast_type(struct_type) + assert value_type == ValueType.STRUCT + + def test_from_value_type_struct(self): + """Test from_value_type works for STRUCT (returns placeholder).""" + from feast.types import Struct, from_value_type + + feast_type = from_value_type(ValueType.STRUCT) + assert isinstance(feast_type, Struct) + + def test_from_value_type_struct_list(self): + """Test from_value_type works for STRUCT_LIST (returns placeholder Array(Struct)).""" + from feast.types import Array, Struct, from_value_type + + feast_type = from_value_type(ValueType.STRUCT_LIST) + assert isinstance(feast_type, Array) + assert isinstance(feast_type.base_type, Struct) + + +class TestJsonValidation: + """Test JSON well-formedness validation.""" + + def test_proto_conversion_valid_json_string(self): + """Valid JSON strings should convert without error.""" + valid_json = '{"key": "value", "num": 42}' + protos = python_values_to_proto_values([valid_json], ValueType.JSON) + assert protos[0].json_val == valid_json + + def test_proto_conversion_invalid_json_string_raises(self): + """Invalid JSON strings should raise ValueError during proto conversion.""" + import pytest + + invalid_json = "this is not json {{" + with pytest.raises(ValueError, match="Invalid JSON string for JSON type"): + python_values_to_proto_values([invalid_json], ValueType.JSON) + + def test_proto_conversion_dict_no_validation_needed(self): + """Python dicts are valid by definition and should not raise.""" + data = {"name": "Alice", "items": [1, 2, 3]} + protos = python_values_to_proto_values([data], ValueType.JSON) + converted = feast_value_type_to_python_type(protos[0]) + assert converted == data + + def test_proto_conversion_list_no_validation_needed(self): + """Python lists are valid by definition and should not raise.""" + data = [1, "two", {"three": 3}] + protos = python_values_to_proto_values([data], ValueType.JSON) + converted = feast_value_type_to_python_type(protos[0]) + assert converted == data + + def test_proto_conversion_none_passes(self): + """None values should pass through without validation.""" + protos = python_values_to_proto_values([None], ValueType.JSON) + converted = feast_value_type_to_python_type(protos[0]) + assert converted is None + + def test_proto_conversion_json_list_invalid_string_raises(self): + """Invalid JSON strings in JSON_LIST should raise ValueError.""" + import pytest + + data = ['{"valid": true}', "not-json"] + with pytest.raises(ValueError, match="Invalid JSON string in JSON_LIST"): + python_values_to_proto_values([data], ValueType.JSON_LIST) + + def test_proto_conversion_json_list_valid_mixed(self): + """JSON_LIST with valid strings and dicts should succeed.""" + data = ['{"a": 1}', {"b": 2}] + protos = python_values_to_proto_values([data], ValueType.JSON_LIST) + converted = feast_value_type_to_python_type(protos[0]) + assert len(converted) == 2 + assert converted[0] == {"a": 1} + assert converted[1] == {"b": 2} + + def test_proto_conversion_json_scalar_string(self): + """JSON scalar values like numbers-as-strings should validate.""" + protos = python_values_to_proto_values(["42"], ValueType.JSON) + converted = feast_value_type_to_python_type(protos[0]) + assert converted == 42 + + def test_proto_conversion_json_null_string(self): + """The JSON string 'null' is valid JSON.""" + protos = python_values_to_proto_values(["null"], ValueType.JSON) + converted = feast_value_type_to_python_type(protos[0]) + assert converted is None + + def test_proto_conversion_json_empty_string_raises(self): + """An empty string is not valid JSON.""" + import pytest + + with pytest.raises(ValueError, match="Invalid JSON string for JSON type"): + python_values_to_proto_values([""], ValueType.JSON) + + def test_local_validation_node_valid_json(self): + """LocalValidationNode should accept valid JSON strings.""" + from feast.infra.compute_engines.local.nodes import LocalValidationNode + + table = pyarrow.table( + {"config": ['{"a": 1}', '{"b": 2}', "null"]}, + schema=pyarrow.schema([pyarrow.field("config", pyarrow.string())]), + ) + + node = LocalValidationNode( + name="test_validate", + validation_config={ + "columns": {"config": pyarrow.large_string()}, + "json_columns": {"config"}, + }, + backend=None, + ) + # Should not raise + node._validate_schema(table) + + def test_local_validation_node_invalid_json(self): + """LocalValidationNode should reject invalid JSON strings.""" + import pytest + + from feast.infra.compute_engines.local.nodes import LocalValidationNode + + table = pyarrow.table( + {"config": ['{"valid": true}', "not-json-at-all", '{"ok": 1}']}, + schema=pyarrow.schema([pyarrow.field("config", pyarrow.string())]), + ) + + node = LocalValidationNode( + name="test_validate", + validation_config={ + "columns": {"config": pyarrow.large_string()}, + "json_columns": {"config"}, + }, + backend=None, + ) + with pytest.raises(ValueError, match="invalid JSON value"): + node._validate_schema(table) + + def test_local_validation_node_skips_nulls(self): + """LocalValidationNode should skip null values in JSON columns.""" + from feast.infra.compute_engines.local.nodes import LocalValidationNode + + table = pyarrow.table( + {"config": ['{"a": 1}', None, '{"b": 2}']}, + schema=pyarrow.schema([pyarrow.field("config", pyarrow.string())]), + ) + + node = LocalValidationNode( + name="test_validate", + validation_config={ + "columns": {"config": pyarrow.large_string()}, + "json_columns": {"config"}, + }, + backend=None, + ) + # Should not raise + node._validate_schema(table) + + def test_local_validation_node_no_json_columns(self): + """LocalValidationNode should skip JSON validation if no json_columns.""" + from feast.infra.compute_engines.local.nodes import LocalValidationNode + + table = pyarrow.table( + {"data": ["not-json"]}, + schema=pyarrow.schema([pyarrow.field("data", pyarrow.string())]), + ) + + node = LocalValidationNode( + name="test_validate", + validation_config={ + "columns": {"data": pyarrow.string()}, + }, + backend=None, + ) + # Should not raise — no json_columns configured + node._validate_schema(table) + + def test_local_validation_node_error_message_shows_row_and_detail(self): + """Error message should include the row number and parse error.""" + import pytest + + from feast.infra.compute_engines.local.nodes import LocalValidationNode + + table = pyarrow.table( + {"config": ['{"ok": 1}', '{"ok": 2}', "{bad}"]}, + schema=pyarrow.schema([pyarrow.field("config", pyarrow.string())]), + ) + + node = LocalValidationNode( + name="test_validate", + validation_config={ + "columns": {"config": pyarrow.large_string()}, + "json_columns": {"config"}, + }, + backend=None, + ) + with pytest.raises(ValueError, match="row 2"): + node._validate_schema(table) + + +class TestSparkNativeTypeValidation: + """Test Spark-native type mapping and compatibility checking.""" + + def test_feast_string_to_spark_string(self): + from pyspark.sql.types import StringType + + from feast.infra.compute_engines.spark.nodes import from_feast_to_spark_type + from feast.types import String + + assert from_feast_to_spark_type(String) == StringType() + + def test_feast_int32_to_spark_integer(self): + from pyspark.sql.types import IntegerType + + from feast.infra.compute_engines.spark.nodes import from_feast_to_spark_type + from feast.types import Int32 + + assert from_feast_to_spark_type(Int32) == IntegerType() + + def test_feast_int64_to_spark_long(self): + from pyspark.sql.types import LongType + + from feast.infra.compute_engines.spark.nodes import from_feast_to_spark_type + from feast.types import Int64 + + assert from_feast_to_spark_type(Int64) == LongType() + + def test_feast_float32_to_spark_float(self): + from pyspark.sql.types import FloatType + + from feast.infra.compute_engines.spark.nodes import from_feast_to_spark_type + from feast.types import Float32 + + assert from_feast_to_spark_type(Float32) == FloatType() + + def test_feast_float64_to_spark_double(self): + from pyspark.sql.types import DoubleType + + from feast.infra.compute_engines.spark.nodes import from_feast_to_spark_type + from feast.types import Float64 + + assert from_feast_to_spark_type(Float64) == DoubleType() + + def test_feast_bool_to_spark_boolean(self): + from pyspark.sql.types import BooleanType + + from feast.infra.compute_engines.spark.nodes import from_feast_to_spark_type + from feast.types import Bool + + assert from_feast_to_spark_type(Bool) == BooleanType() + + def test_feast_bytes_to_spark_binary(self): + from pyspark.sql.types import BinaryType + + from feast.infra.compute_engines.spark.nodes import from_feast_to_spark_type + from feast.types import Bytes + + assert from_feast_to_spark_type(Bytes) == BinaryType() + + def test_feast_timestamp_to_spark_timestamp(self): + from pyspark.sql.types import TimestampType + + from feast.infra.compute_engines.spark.nodes import from_feast_to_spark_type + from feast.types import UnixTimestamp + + assert from_feast_to_spark_type(UnixTimestamp) == TimestampType() + + def test_feast_map_to_spark_map(self): + from pyspark.sql.types import MapType, StringType + + from feast.infra.compute_engines.spark.nodes import from_feast_to_spark_type + from feast.types import Map + + assert from_feast_to_spark_type(Map) == MapType(StringType(), StringType()) + + def test_feast_json_to_spark_string(self): + from pyspark.sql.types import StringType + + from feast.infra.compute_engines.spark.nodes import from_feast_to_spark_type + from feast.types import Json + + assert from_feast_to_spark_type(Json) == StringType() + + def test_feast_array_int_to_spark_array(self): + from pyspark.sql.types import ArrayType, IntegerType + + from feast.infra.compute_engines.spark.nodes import from_feast_to_spark_type + from feast.types import Array, Int32 + + assert from_feast_to_spark_type(Array(Int32)) == ArrayType(IntegerType()) + + def test_feast_array_map_to_spark_array(self): + from pyspark.sql.types import ArrayType, MapType, StringType + + from feast.infra.compute_engines.spark.nodes import from_feast_to_spark_type + from feast.types import Array, Map + + assert from_feast_to_spark_type(Array(Map)) == ArrayType( + MapType(StringType(), StringType()) + ) + + def test_feast_struct_to_spark_struct(self): + from pyspark.sql.types import IntegerType, StringType, StructField, StructType + + from feast.infra.compute_engines.spark.nodes import from_feast_to_spark_type + from feast.types import Int32, String, Struct + + struct = Struct({"name": String, "age": Int32}) + expected = StructType( + [ + StructField("name", StringType(), True), + StructField("age", IntegerType(), True), + ] + ) + assert from_feast_to_spark_type(struct) == expected + + def test_feast_array_struct_to_spark_array_struct(self): + from pyspark.sql.types import ( + ArrayType, + IntegerType, + StringType, + StructField, + StructType, + ) + + from feast.infra.compute_engines.spark.nodes import from_feast_to_spark_type + from feast.types import Array, Int32, String, Struct + + struct = Struct({"name": String, "age": Int32}) + expected = ArrayType( + StructType( + [ + StructField("name", StringType(), True), + StructField("age", IntegerType(), True), + ] + ) + ) + assert from_feast_to_spark_type(Array(struct)) == expected + + def test_unsupported_type_returns_none(self): + from feast.infra.compute_engines.spark.nodes import from_feast_to_spark_type + from feast.types import Invalid + + assert from_feast_to_spark_type(Invalid) is None + + # Compatibility tests + + def test_exact_match_compatible(self): + from pyspark.sql.types import StringType + + from feast.infra.compute_engines.spark.nodes import _spark_types_compatible + + assert _spark_types_compatible(StringType(), StringType()) + + def test_map_struct_compatible(self): + from pyspark.sql.types import MapType, StringType, StructType + + from feast.infra.compute_engines.spark.nodes import _spark_types_compatible + + assert _spark_types_compatible( + MapType(StringType(), StringType()), StructType([]) + ) + + def test_struct_map_compatible(self): + from pyspark.sql.types import MapType, StringType, StructType + + from feast.infra.compute_engines.spark.nodes import _spark_types_compatible + + assert _spark_types_compatible( + StructType([]), MapType(StringType(), StringType()) + ) + + def test_integer_long_widening_compatible(self): + from pyspark.sql.types import IntegerType, LongType + + from feast.infra.compute_engines.spark.nodes import _spark_types_compatible + + assert _spark_types_compatible(IntegerType(), LongType()) + assert _spark_types_compatible(LongType(), IntegerType()) + + def test_float_double_widening_compatible(self): + from pyspark.sql.types import DoubleType, FloatType + + from feast.infra.compute_engines.spark.nodes import _spark_types_compatible + + assert _spark_types_compatible(FloatType(), DoubleType()) + assert _spark_types_compatible(DoubleType(), FloatType()) + + def test_string_vs_integer_incompatible(self): + from pyspark.sql.types import IntegerType, StringType + + from feast.infra.compute_engines.spark.nodes import _spark_types_compatible + + assert not _spark_types_compatible(StringType(), IntegerType()) + + def test_bool_vs_double_incompatible(self): + from pyspark.sql.types import BooleanType, DoubleType + + from feast.infra.compute_engines.spark.nodes import _spark_types_compatible + + assert not _spark_types_compatible(BooleanType(), DoubleType()) + + def test_array_element_compatibility(self): + from pyspark.sql.types import ArrayType, IntegerType, LongType + + from feast.infra.compute_engines.spark.nodes import _spark_types_compatible + + assert _spark_types_compatible(ArrayType(IntegerType()), ArrayType(LongType())) + + def test_array_element_incompatibility(self): + from pyspark.sql.types import ArrayType, IntegerType, StringType + + from feast.infra.compute_engines.spark.nodes import _spark_types_compatible + + assert not _spark_types_compatible( + ArrayType(StringType()), ArrayType(IntegerType()) + ) diff --git a/sdk/python/tests/unit/test_utils.py b/sdk/python/tests/unit/test_utils.py new file mode 100644 index 00000000000..4ffec9750b2 --- /dev/null +++ b/sdk/python/tests/unit/test_utils.py @@ -0,0 +1,170 @@ +""" +Tests for feast.utils module. + +These unit tests cover the _convert_rows_to_protobuf function which is critical +for online feature retrieval performance. The function converts raw database +rows to protobuf format for the serving response. +""" + +from datetime import datetime, timezone + +from feast.protos.feast.serving.ServingService_pb2 import FieldStatus +from feast.protos.feast.types.Value_pb2 import Value as ValueProto +from feast.utils import _convert_rows_to_protobuf + + +class TestConvertRowsToProtobuf: + """Tests for _convert_rows_to_protobuf function.""" + + def test_basic_conversion(self): + """Test basic conversion with single feature and entity.""" + timestamp = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) + value = ValueProto(float_val=1.5) + + read_rows = [(timestamp, {"feature_1": value})] + requested_features = ["feature_1"] + + result = _convert_rows_to_protobuf(requested_features, read_rows) + + assert len(result) == 1 + ts_vector, status_vector, value_vector = result[0] + assert len(ts_vector) == 1 + assert ts_vector[0].seconds == int(timestamp.timestamp()) + assert value_vector[0] == value + + def test_multiple_features_same_timestamp(self): + """Test that multiple features share the same pre-computed timestamp. + + This verifies the optimization: timestamps are computed once per entity, + not once per feature per entity. + """ + timestamp = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) + value1 = ValueProto(float_val=1.0) + value2 = ValueProto(float_val=2.0) + + read_rows = [(timestamp, {"feature_1": value1, "feature_2": value2})] + requested_features = ["feature_1", "feature_2"] + + result = _convert_rows_to_protobuf(requested_features, read_rows) + + assert len(result) == 2 + ts1 = result[0][0][0] + ts2 = result[1][0][0] + assert ts1.seconds == ts2.seconds + assert ts1.seconds == int(timestamp.timestamp()) + + def test_multiple_entities(self): + """Test conversion with multiple entities having different timestamps.""" + ts1 = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) + ts2 = datetime(2024, 1, 2, 12, 0, 0, tzinfo=timezone.utc) + + read_rows = [ + (ts1, {"feature_1": ValueProto(float_val=1.0)}), + (ts2, {"feature_1": ValueProto(float_val=2.0)}), + ] + requested_features = ["feature_1"] + + result = _convert_rows_to_protobuf(requested_features, read_rows) + + assert len(result) == 1 + ts_vector, status_vector, value_vector = result[0] + assert len(ts_vector) == 2 + assert ts_vector[0].seconds == int(ts1.timestamp()) + assert ts_vector[1].seconds == int(ts2.timestamp()) + + def test_null_timestamp_handling(self): + """Test that null timestamps produce empty Timestamp proto.""" + read_rows = [ + (None, {"feature_1": ValueProto(float_val=1.0)}), + ( + datetime(2024, 1, 1, tzinfo=timezone.utc), + {"feature_1": ValueProto(float_val=2.0)}, + ), + ] + requested_features = ["feature_1"] + + result = _convert_rows_to_protobuf(requested_features, read_rows) + + ts_vector = result[0][0] + assert ts_vector[0].seconds == 0 # Null timestamp -> empty proto + assert ts_vector[1].seconds != 0 # Valid timestamp + + def test_missing_feature_data(self): + """Test handling of missing feature data (None row).""" + timestamp = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) + + read_rows = [ + (timestamp, {"feature_1": ValueProto(float_val=1.0)}), + (timestamp, None), # No feature data for this entity + ] + requested_features = ["feature_1"] + + result = _convert_rows_to_protobuf(requested_features, read_rows) + + ts_vector, status_vector, value_vector = result[0] + assert len(ts_vector) == 2 + assert status_vector[0] == FieldStatus.PRESENT + assert status_vector[1] == FieldStatus.NOT_FOUND + + def test_feature_not_in_row(self): + """Test handling when requested feature is not in the row's data.""" + timestamp = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) + + read_rows = [ + (timestamp, {"feature_1": ValueProto(float_val=1.0)}), + ] + requested_features = ["feature_1", "feature_2"] # feature_2 not in data + + result = _convert_rows_to_protobuf(requested_features, read_rows) + + assert len(result) == 2 + # feature_1 is present + assert result[0][1][0] == FieldStatus.PRESENT + # feature_2 is not found + assert result[1][1][0] == FieldStatus.NOT_FOUND + + def test_empty_inputs(self): + """Test handling of empty inputs.""" + # Empty rows + result = _convert_rows_to_protobuf(["feature_1"], []) + assert len(result) == 1 + assert len(result[0][0]) == 0 # Empty ts_vector + + # Empty features + timestamp = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) + result = _convert_rows_to_protobuf([], [(timestamp, {"f": ValueProto()})]) + assert len(result) == 0 + + def test_large_scale_correctness(self): + """Test correctness with large number of features and entities. + + This test verifies that the optimized implementation produces correct + results at scale (50 features x 500 entities = 25,000 data points). + """ + timestamp = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) + num_entities = 500 + num_features = 50 + + feature_data = { + f"feature_{i}": ValueProto(float_val=float(i)) for i in range(num_features) + } + read_rows = [(timestamp, feature_data.copy()) for _ in range(num_entities)] + requested_features = [f"feature_{i}" for i in range(num_features)] + + result = _convert_rows_to_protobuf(requested_features, read_rows) + + # Verify structure + assert len(result) == num_features + for feature_idx, (ts_vector, status_vector, value_vector) in enumerate(result): + assert len(ts_vector) == num_entities + assert len(status_vector) == num_entities + assert len(value_vector) == num_entities + + # Verify all timestamps are the same (pre-computed once) + expected_ts = int(timestamp.timestamp()) + for ts in ts_vector: + assert ts.seconds == expected_ts + + # Verify all statuses are PRESENT + for status in status_vector: + assert status == FieldStatus.PRESENT diff --git a/sdk/python/tests/unit/test_utils_entity_maps.py b/sdk/python/tests/unit/test_utils_entity_maps.py new file mode 100644 index 00000000000..cf331b4ad30 --- /dev/null +++ b/sdk/python/tests/unit/test_utils_entity_maps.py @@ -0,0 +1,280 @@ +# Copyright 2025 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for _get_entity_maps function in feast/utils.py + +These tests verify that the fix for issue #6012 correctly eliminates +redundant registry.get_entity() calls by using a local lookup dict. + +Related issue: https://github.com/feast-dev/feast/issues/6012 +""" + +from unittest.mock import MagicMock + +from feast.entity import Entity +from feast.utils import _get_entity_maps + + +class MockFeatureViewProjection: + """Mock FeatureViewProjection for testing.""" + + def __init__(self, join_key_map=None): + self.join_key_map = join_key_map or {} + + +class MockEntityColumn: + """Mock entity column for testing.""" + + def __init__(self, name: str, dtype): + self.name = name + self.dtype = dtype + + +class MockDtype: + """Mock dtype with to_value_type method.""" + + def __init__(self, value_type): + self._value_type = value_type + + def to_value_type(self): + return self._value_type + + +class MockFeatureView: + """Mock FeatureView for testing.""" + + def __init__(self, entities=None, entity_columns=None, join_key_map=None): + self.entities = entities or [] + self.entity_columns = entity_columns or [] + self.projection = MockFeatureViewProjection(join_key_map) + + +def create_mock_entity(name: str, join_key: str) -> Entity: + """Create a mock Entity with the specified name and join_key.""" + entity = MagicMock(spec=Entity) + entity.name = name + entity.join_key = join_key + return entity + + +class TestGetEntityMaps: + """Tests for _get_entity_maps function.""" + + def test_no_redundant_get_entity_calls(self): + """ + Verify that get_entity is NOT called after list_entities fetches all entities. + This is the core fix for issue #6012. + """ + # Create mock entities + entity1 = create_mock_entity("driver", "driver_id") + entity2 = create_mock_entity("customer", "customer_id") + + # Create mock registry + registry = MagicMock() + registry.list_entities.return_value = [entity1, entity2] + + # Create feature views that reference the entities + fv1 = MockFeatureView(entities=["driver"]) + fv2 = MockFeatureView(entities=["customer"]) + fv3 = MockFeatureView(entities=["driver", "customer"]) + + # Call the function under test + _get_entity_maps(registry, "test_project", [fv1, fv2, fv3]) + + # Verify list_entities was called once + registry.list_entities.assert_called_once_with("test_project", allow_cache=True) + + # Verify get_entity was NEVER called (this is the fix) + registry.get_entity.assert_not_called() + + def test_entity_name_to_join_key_mapping(self): + """Test that entity names are correctly mapped to join keys.""" + entity1 = create_mock_entity("driver", "driver_id") + entity2 = create_mock_entity("customer", "customer_id") + + registry = MagicMock() + registry.list_entities.return_value = [entity1, entity2] + + fv = MockFeatureView(entities=["driver", "customer"]) + + entity_name_to_join_key, _, _ = _get_entity_maps(registry, "test_project", [fv]) + + assert "driver" in entity_name_to_join_key + assert entity_name_to_join_key["driver"] == "driver_id" + assert "customer" in entity_name_to_join_key + assert entity_name_to_join_key["customer"] == "customer_id" + + def test_join_keys_set(self): + """Test that the join keys set is correctly returned.""" + entity1 = create_mock_entity("driver", "driver_id") + entity2 = create_mock_entity("customer", "customer_id") + + registry = MagicMock() + registry.list_entities.return_value = [entity1, entity2] + + fv = MockFeatureView(entities=["driver", "customer"]) + + _, _, join_keys = _get_entity_maps(registry, "test_project", [fv]) + + assert "driver_id" in join_keys + assert "customer_id" in join_keys + assert len(join_keys) == 2 + + def test_missing_entity_raises_exception(self): + """ + Test that missing entities (not in registry) raise EntityNotFoundException. + This maintains the original error behavior for misconfigured registries. + """ + import pytest + + from feast.errors import EntityNotFoundException + + entity1 = create_mock_entity("driver", "driver_id") + + registry = MagicMock() + registry.list_entities.return_value = [entity1] + + # Feature view references entity that doesn't exist in registry + fv = MockFeatureView(entities=["driver", "nonexistent_entity"]) + + # Should raise EntityNotFoundException for the missing entity + with pytest.raises(EntityNotFoundException) as exc_info: + _get_entity_maps(registry, "test_project", [fv]) + + assert "nonexistent_entity" in str(exc_info.value) + + def test_join_key_remapping(self): + """Test that join_key_map correctly remaps entity names and join keys.""" + entity = create_mock_entity("driver", "driver_id") + + registry = MagicMock() + registry.list_entities.return_value = [entity] + + # Feature view with join key mapping + fv = MockFeatureView( + entities=["driver"], + join_key_map={"driver_id": "remapped_driver_id"}, + ) + + entity_name_to_join_key, _, join_keys = _get_entity_maps( + registry, "test_project", [fv] + ) + + # The remapped join key should be in the mapping + assert "remapped_driver_id" in join_keys + + def test_empty_feature_views(self): + """Test with no feature views.""" + entity1 = create_mock_entity("driver", "driver_id") + + registry = MagicMock() + registry.list_entities.return_value = [entity1] + + entity_name_to_join_key, entity_type_map, join_keys = _get_entity_maps( + registry, "test_project", [] + ) + + # Should still have the base entity mapping from list_entities + assert "driver" in entity_name_to_join_key + assert entity_name_to_join_key["driver"] == "driver_id" + + def test_empty_registry_and_feature_views(self): + """Test with no entities and no feature views returns empty maps.""" + registry = MagicMock() + registry.list_entities.return_value = [] + + entity_name_to_join_key, entity_type_map, join_keys = _get_entity_maps( + registry, "test_project", [] + ) + + assert len(entity_name_to_join_key) == 0 + assert len(join_keys) == 0 + + def test_entity_type_map_from_entity_columns(self): + """Test that entity_type_map is populated from entity_columns.""" + from feast.value_type import ValueType + + entity = create_mock_entity("driver", "driver_id") + + registry = MagicMock() + registry.list_entities.return_value = [entity] + + # Create entity columns with dtype + driver_col = MockEntityColumn("driver_id", MockDtype(ValueType.INT64)) + rating_col = MockEntityColumn("rating", MockDtype(ValueType.FLOAT)) + + fv = MockFeatureView( + entities=["driver"], + entity_columns=[driver_col, rating_col], + ) + + _, entity_type_map, _ = _get_entity_maps(registry, "test_project", [fv]) + + assert "driver_id" in entity_type_map + assert entity_type_map["driver_id"] == ValueType.INT64 + assert "rating" in entity_type_map + assert entity_type_map["rating"] == ValueType.FLOAT + + +class TestGetEntityMapsPerformance: + """Performance-related tests for _get_entity_maps.""" + + def test_linear_scaling_with_feature_views(self): + """ + Verify that increasing feature views doesn't increase registry calls. + With N feature views referencing M entities, we should have: + - 1 list_entities call (not N*M get_entity calls) + """ + # Create many entities + entities = [create_mock_entity(f"entity_{i}", f"key_{i}") for i in range(10)] + + registry = MagicMock() + registry.list_entities.return_value = entities + + # Create many feature views, each referencing multiple entities + feature_views = [ + MockFeatureView(entities=[f"entity_{j}" for j in range(i % 10 + 1)]) + for i in range(50) + ] + + _get_entity_maps(registry, "test_project", feature_views) + + # Regardless of 50 feature views with varying entity counts: + # - list_entities should be called exactly once + # - get_entity should NEVER be called + registry.list_entities.assert_called_once() + registry.get_entity.assert_not_called() + + def test_duplicate_entity_references(self): + """ + Test that duplicate entity references across feature views + don't cause any issues or duplicate lookups. + """ + entity = create_mock_entity("driver", "driver_id") + + registry = MagicMock() + registry.list_entities.return_value = [entity] + + # Multiple feature views all referencing the same entity + feature_views = [MockFeatureView(entities=["driver"]) for _ in range(20)] + + entity_name_to_join_key, _, join_keys = _get_entity_maps( + registry, "test_project", feature_views + ) + + # Should work correctly with just one entity in the result + assert entity_name_to_join_key["driver"] == "driver_id" + assert "driver_id" in join_keys + registry.get_entity.assert_not_called() diff --git a/sdk/python/tests/unit/transformation/test_spark_transformation.py b/sdk/python/tests/unit/transformation/test_spark_transformation.py deleted file mode 100644 index 63d9b520ce9..00000000000 --- a/sdk/python/tests/unit/transformation/test_spark_transformation.py +++ /dev/null @@ -1,127 +0,0 @@ -from unittest.mock import patch - -import pytest -from pyspark.sql import SparkSession -from pyspark.sql.functions import col, regexp_replace -from pyspark.testing.utils import assertDataFrameEqual - -from feast.transformation.base import Transformation -from feast.transformation.mode import TransformationMode -from feast.transformation.spark_transformation import SparkTransformation - - -def get_sample_df(spark): - sample_data = [ - {"name": "John D.", "age": 30}, - {"name": "Alice G.", "age": 25}, - {"name": "Bob T.", "age": 35}, - {"name": "Eve A.", "age": 28}, - ] - df = spark.createDataFrame(sample_data) - return df - - -def get_expected_df(spark): - expected_data = [ - {"name": "John D.", "age": 30}, - {"name": "Alice G.", "age": 25}, - {"name": "Bob T.", "age": 35}, - {"name": "Eve A.", "age": 28}, - ] - - expected_df = spark.createDataFrame(expected_data) - return expected_df - - -def remove_extra_spaces(df, column_name): - df_transformed = df.withColumn( - column_name, regexp_replace(col(column_name), "\\s+", " ") - ) - return df_transformed - - -def remove_extra_spaces_sql(df, column_name): - sql = f""" - SELECT - age, - regexp_replace({column_name}, '\\\\s+', ' ') as {column_name} - FROM {df} - """ - return sql - - -@pytest.fixture -def spark_fixture(): - spark = ( - SparkSession.builder.appName("Testing PySpark Example") - .config("spark.driver.host", "127.0.0.1") - .config("spark.driver.bindAddress", "127.0.0.1") - .getOrCreate() - ) - try: - yield spark - finally: - spark.stop() - - -@patch("feast.infra.compute_engines.spark.utils.get_or_create_new_spark_session") -def test_spark_transformation(spark_fixture): - spark = ( - SparkSession.builder.appName("Testing PySpark Example") - .config("spark.driver.host", "127.0.0.1") - .config("spark.driver.bindAddress", "127.0.0.1") - .getOrCreate() - ) - df = get_sample_df(spark) - - spark_transformation = Transformation( - mode=TransformationMode.SPARK, - udf=remove_extra_spaces, - udf_string="remove extra spaces", - ) - - transformed_df = spark_transformation.transform(df, "name") - expected_df = get_expected_df(spark) - assertDataFrameEqual(transformed_df, expected_df) - - -@patch("feast.infra.compute_engines.spark.utils.get_or_create_new_spark_session") -def test_spark_transformation_init_transformation(spark_fixture): - spark = ( - SparkSession.builder.appName("Testing PySpark Example") - .config("spark.driver.host", "127.0.0.1") - .config("spark.driver.bindAddress", "127.0.0.1") - .getOrCreate() - ) - df = get_sample_df(spark) - - spark_transformation = SparkTransformation( - mode=TransformationMode.SPARK, - udf=remove_extra_spaces, - udf_string="remove extra spaces", - ) - - transformed_df = spark_transformation.transform(df, "name") - expected_df = get_expected_df(spark) - assertDataFrameEqual(transformed_df, expected_df) - - -@patch("feast.infra.compute_engines.spark.utils.get_or_create_new_spark_session") -def test_spark_transformation_sql(spark_fixture): - spark = ( - SparkSession.builder.appName("Testing PySpark Example") - .config("spark.driver.host", "127.0.0.1") - .config("spark.driver.bindAddress", "127.0.0.1") - .getOrCreate() - ) - df = get_sample_df(spark) - - spark_transformation = SparkTransformation( - mode=TransformationMode.SPARK_SQL, - udf=remove_extra_spaces_sql, - udf_string="remove extra spaces sql", - ) - - transformed_df = spark_transformation.transform(df, "name") - expected_df = get_expected_df(spark) - assertDataFrameEqual(transformed_df, expected_df) diff --git a/sdk/python/tests/universal/__init__.py b/sdk/python/tests/universal/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/tests/universal/feature_repos/duckdb_repo_configuration.py b/sdk/python/tests/universal/feature_repos/duckdb_repo_configuration.py new file mode 100644 index 00000000000..5e03065cc3b --- /dev/null +++ b/sdk/python/tests/universal/feature_repos/duckdb_repo_configuration.py @@ -0,0 +1,35 @@ +from feast.infra.offline_stores.duckdb import DuckDBOfflineStoreConfig +from tests.universal.feature_repos.universal.data_sources.file import ( + DeltaFileSourceCreator, + DeltaS3FileSourceCreator, + FileDataSourceCreator, +) + + +class DuckDBDataSourceCreator(FileDataSourceCreator): + def create_offline_store_config(self): + self.duckdb_offline_store_config = DuckDBOfflineStoreConfig() + return self.duckdb_offline_store_config + + +class DuckDBDeltaDataSourceCreator(DeltaFileSourceCreator): + def create_offline_store_config(self): + self.duckdb_offline_store_config = DuckDBOfflineStoreConfig() + return self.duckdb_offline_store_config + + +class DuckDBDeltaS3DataSourceCreator(DeltaS3FileSourceCreator): + def create_offline_store_config(self): + self.duckdb_offline_store_config = DuckDBOfflineStoreConfig( + staging_location="s3://test/staging", + staging_location_endpoint_override=self.endpoint_url, + ) + return self.duckdb_offline_store_config + + +AVAILABLE_OFFLINE_STORES = [ + ("local", DuckDBDataSourceCreator), + ("local", DuckDBDeltaDataSourceCreator), +] + +AVAILABLE_ONLINE_STORES = {"sqlite": ({"type": "sqlite"}, None)} diff --git a/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py b/sdk/python/tests/universal/feature_repos/integration_test_repo_config.py similarity index 91% rename from sdk/python/tests/integration/feature_repos/integration_test_repo_config.py rename to sdk/python/tests/universal/feature_repos/integration_test_repo_config.py index 309f92005a3..f635d2e9c17 100644 --- a/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py +++ b/sdk/python/tests/universal/feature_repos/integration_test_repo_config.py @@ -3,13 +3,13 @@ from enum import Enum from typing import Dict, Optional, Type, Union -from tests.integration.feature_repos.universal.data_source_creator import ( +from tests.universal.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) -from tests.integration.feature_repos.universal.data_sources.file import ( +from tests.universal.feature_repos.universal.data_sources.file import ( FileDataSourceCreator, ) -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/universal/feature_repos/ray_repo_configuration.py b/sdk/python/tests/universal/feature_repos/ray_repo_configuration.py new file mode 100644 index 00000000000..39e43ae7b12 --- /dev/null +++ b/sdk/python/tests/universal/feature_repos/ray_repo_configuration.py @@ -0,0 +1,6 @@ +from feast.infra.offline_stores.contrib.ray_repo_configuration import ( + RayDataSourceCreator, +) + +AVAILABLE_OFFLINE_STORES = [("local", RayDataSourceCreator)] +AVAILABLE_ONLINE_STORES = {"sqlite": ({"type": "sqlite"}, None)} diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/universal/feature_repos/repo_configuration.py similarity index 90% rename from sdk/python/tests/integration/feature_repos/repo_configuration.py rename to sdk/python/tests/universal/feature_repos/repo_configuration.py index 1f53df48f3e..33d189583b2 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/universal/feature_repos/repo_configuration.py @@ -27,40 +27,26 @@ FeatureLoggingConfig, ) from feast.infra.feature_servers.local_process.config import LocalFeatureServerConfig -from feast.infra.offline_stores.contrib.ray_repo_configuration import ( - RayDataSourceCreator, -) from feast.permissions.action import AuthzedAction from feast.permissions.auth_model import OidcClientAuthConfig from feast.permissions.permission import Permission from feast.permissions.policy import RoleBasedPolicy from feast.repo_config import MaterializationConfig, RegistryConfig, RepoConfig from feast.utils import _utc_now -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, RegistryLocation, ) -from tests.integration.feature_repos.universal.data_source_creator import ( +from tests.universal.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) -from tests.integration.feature_repos.universal.data_sources.bigquery import ( - BigQueryDataSourceCreator, -) -from tests.integration.feature_repos.universal.data_sources.file import ( - DuckDBDataSourceCreator, - DuckDBDeltaDataSourceCreator, +from tests.universal.feature_repos.universal.data_sources.file import ( FileDataSourceCreator, RemoteOfflineOidcAuthStoreDataSourceCreator, RemoteOfflineStoreDataSourceCreator, RemoteOfflineTlsStoreDataSourceCreator, ) -from tests.integration.feature_repos.universal.data_sources.redshift import ( - RedshiftDataSourceCreator, -) -from tests.integration.feature_repos.universal.data_sources.snowflake import ( - SnowflakeDataSourceCreator, -) -from tests.integration.feature_repos.universal.feature_views import ( +from tests.universal.feature_repos.universal.feature_views import ( conv_rate_plus_100_feature_view, create_conv_rate_request_source, create_customer_daily_profile_feature_view, @@ -72,22 +58,7 @@ create_order_feature_view, create_pushable_feature_view, ) -from tests.integration.feature_repos.universal.online_store.bigtable import ( - BigtableOnlineStoreCreator, -) -from tests.integration.feature_repos.universal.online_store.datastore import ( - DatastoreOnlineStoreCreator, -) -from tests.integration.feature_repos.universal.online_store.dynamodb import ( - DynamoDBOnlineStoreCreator, -) -from tests.integration.feature_repos.universal.online_store.milvus import ( - MilvusOnlineStoreCreator, -) -from tests.integration.feature_repos.universal.online_store.redis import ( - RedisOnlineStoreCreator, -) -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) @@ -118,29 +89,15 @@ "instance": os.getenv("BIGTABLE_INSTANCE_ID", "feast-integration-tests"), } -IKV_CONFIG = { - "type": "ikv", - "account_id": os.getenv("IKV_ACCOUNT_ID", ""), - "account_passkey": os.getenv("IKV_ACCOUNT_PASSKEY", ""), - "store_name": os.getenv("IKV_STORE_NAME", ""), - "mount_directory": os.getenv("IKV_MOUNT_DIR", ""), -} - OFFLINE_STORE_TO_PROVIDER_CONFIG: Dict[str, Tuple[str, Type[DataSourceCreator]]] = { "file": ("local", FileDataSourceCreator), - "bigquery": ("gcp", BigQueryDataSourceCreator), - "redshift": ("aws", RedshiftDataSourceCreator), - "snowflake": ("aws", SnowflakeDataSourceCreator), } AVAILABLE_OFFLINE_STORES: List[Tuple[str, Type[DataSourceCreator]]] = [ ("local", FileDataSourceCreator), - ("local", DuckDBDataSourceCreator), - ("local", DuckDBDeltaDataSourceCreator), ("local", RemoteOfflineStoreDataSourceCreator), ("local", RemoteOfflineOidcAuthStoreDataSourceCreator), ("local", RemoteOfflineTlsStoreDataSourceCreator), - ("local", RayDataSourceCreator), ] if os.getenv("FEAST_IS_LOCAL_TEST", "False") == "True": @@ -157,6 +114,16 @@ # Only configure Cloud DWH if running full integration tests if os.getenv("FEAST_IS_LOCAL_TEST", "False") != "True": + from tests.universal.feature_repos.universal.data_sources.bigquery import ( + BigQueryDataSourceCreator, + ) + from tests.universal.feature_repos.universal.data_sources.redshift import ( + RedshiftDataSourceCreator, + ) + from tests.universal.feature_repos.universal.data_sources.snowflake import ( + SnowflakeDataSourceCreator, + ) + AVAILABLE_OFFLINE_STORES.extend( [ ("gcp", BigQueryDataSourceCreator), @@ -165,6 +132,10 @@ ] ) + OFFLINE_STORE_TO_PROVIDER_CONFIG["bigquery"] = ("gcp", BigQueryDataSourceCreator) + OFFLINE_STORE_TO_PROVIDER_CONFIG["redshift"] = ("aws", RedshiftDataSourceCreator) + OFFLINE_STORE_TO_PROVIDER_CONFIG["snowflake"] = ("aws", SnowflakeDataSourceCreator) + AVAILABLE_ONLINE_STORES["redis"] = (REDIS_CONFIG, None) AVAILABLE_ONLINE_STORES["dynamodb"] = (DYNAMO_CONFIG, None) AVAILABLE_ONLINE_STORES["datastore"] = ("datastore", None) @@ -172,10 +143,6 @@ AVAILABLE_ONLINE_STORES["bigtable"] = (BIGTABLE_CONFIG, None) AVAILABLE_ONLINE_STORES["milvus"] = (MILVUS_CONFIG, None) - # Uncomment to test using private IKV account. Currently not enabled as - # there is no dedicated IKV instance for CI testing and there is no - # containerized version of IKV. - # AVAILABLE_ONLINE_STORES["ikv"] = (IKV_CONFIG, None) full_repo_configs_module = os.environ.get(FULL_REPO_CONFIGS_MODULE_ENV_NAME) if full_repo_configs_module is not None: @@ -214,6 +181,22 @@ # Replace online stores with emulated online stores if we're running local integration tests if os.getenv("FEAST_LOCAL_ONLINE_CONTAINER", "False").lower() == "true": + from tests.universal.feature_repos.universal.online_store.bigtable import ( + BigtableOnlineStoreCreator, + ) + from tests.universal.feature_repos.universal.online_store.datastore import ( + DatastoreOnlineStoreCreator, + ) + from tests.universal.feature_repos.universal.online_store.dynamodb import ( + DynamoDBOnlineStoreCreator, + ) + from tests.universal.feature_repos.universal.online_store.milvus import ( + MilvusOnlineStoreCreator, + ) + from tests.universal.feature_repos.universal.online_store.redis import ( + RedisOnlineStoreCreator, + ) + replacements: Dict[ str, Tuple[Union[str, Dict[str, Any]], Optional[Type[OnlineStoreCreator]]] ] = { diff --git a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py b/sdk/python/tests/universal/feature_repos/universal/data_source_creator.py similarity index 73% rename from sdk/python/tests/integration/feature_repos/universal/data_source_creator.py rename to sdk/python/tests/universal/feature_repos/universal/data_source_creator.py index 467db4dddce..6c0bc39b353 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py +++ b/sdk/python/tests/universal/feature_repos/universal/data_source_creator.py @@ -1,3 +1,4 @@ +import json from abc import ABC, abstractmethod from typing import Dict, Optional @@ -14,6 +15,29 @@ class DataSourceCreator(ABC): def __init__(self, project_name: str, *args, **kwargs): self.project_name = project_name + @staticmethod + def serialize_complex_columns(df: pd.DataFrame) -> pd.DataFrame: + """Serialize dict columns (Map/Struct types) to JSON strings. + + Backends like Snowflake, BigQuery, and Redshift cannot natively + ingest Python dicts via their bulk-load paths (VARIANT, STRUCT, + super types cause issues). Converting them to JSON strings lets + the data be stored as VARCHAR/STRING instead. + + List columns with primitive values (int, float, str, bool) are + left untouched since backends handle those as native ARRAY types. + """ + df = df.copy() + for col in df.columns: + if df[col].dropna().empty: + continue + sample = df[col].dropna().iloc[0] + if isinstance(sample, dict): + df[col] = df[col].apply( + lambda v: json.dumps(v) if v is not None else None + ) + return df + @abstractmethod def create_data_source( self, diff --git a/sdk/python/tests/universal/feature_repos/universal/data_sources/__init__.py b/sdk/python/tests/universal/feature_repos/universal/data_sources/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py b/sdk/python/tests/universal/feature_repos/universal/data_sources/bigquery.py similarity index 96% rename from sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py rename to sdk/python/tests/universal/feature_repos/universal/data_sources/bigquery.py index 4fcd9533e8e..39595acf536 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py +++ b/sdk/python/tests/universal/feature_repos/universal/data_sources/bigquery.py @@ -15,7 +15,7 @@ SavedDatasetBigQueryStorage, ) from feast.utils import make_df_tzaware -from tests.integration.feature_repos.universal.data_source_creator import ( +from tests.universal.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) @@ -80,6 +80,7 @@ def create_data_source( # `BigQueryOfflineStore.offline_write_batch`, but since we're bypassing that API here, we should follow the same # rule. The schema of this initial dataframe determines the schema in the newly created BigQuery table. df = make_df_tzaware(df) + df = self.serialize_complex_columns(df) job = self.client.load_table_from_dataframe(df, destination_name) job.result() diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py b/sdk/python/tests/universal/feature_repos/universal/data_sources/file.py similarity index 95% rename from sdk/python/tests/integration/feature_repos/universal/data_sources/file.py rename to sdk/python/tests/universal/feature_repos/universal/data_sources/file.py index a592dfc54a5..1084685e361 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py +++ b/sdk/python/tests/universal/feature_repos/universal/data_sources/file.py @@ -24,7 +24,6 @@ from feast.data_source import DataSource from feast.feature_logging import LoggingDestination from feast.infra.offline_stores.dask import DaskOfflineStoreConfig -from feast.infra.offline_stores.duckdb import DuckDBOfflineStoreConfig from feast.infra.offline_stores.file_source import ( FileLoggingDestination, SavedDatasetFileStorage, @@ -32,7 +31,7 @@ from feast.infra.offline_stores.remote import RemoteOfflineStoreConfig from feast.repo_config import FeastConfigBaseModel, RegistryConfig from feast.wait import wait_retry_backoff # noqa: E402 -from tests.integration.feature_repos.universal.data_source_creator import ( +from tests.universal.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) from tests.utils.auth_permissions_util import include_auth_config @@ -346,28 +345,6 @@ def teardown(self): self.f.close() -# TODO split up DataSourceCreator and OfflineStoreCreator -class DuckDBDataSourceCreator(FileDataSourceCreator): - def create_offline_store_config(self): - self.duckdb_offline_store_config = DuckDBOfflineStoreConfig() - return self.duckdb_offline_store_config - - -class DuckDBDeltaDataSourceCreator(DeltaFileSourceCreator): - def create_offline_store_config(self): - self.duckdb_offline_store_config = DuckDBOfflineStoreConfig() - return self.duckdb_offline_store_config - - -class DuckDBDeltaS3DataSourceCreator(DeltaS3FileSourceCreator): - def create_offline_store_config(self): - self.duckdb_offline_store_config = DuckDBOfflineStoreConfig( - staging_location="s3://test/staging", - staging_location_endpoint_override=self.endpoint_url, - ) - return self.duckdb_offline_store_config - - class RemoteOfflineStoreDataSourceCreator(FileDataSourceCreator): def __init__(self, project_name: str, *args, **kwargs): super().__init__(project_name) diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py b/sdk/python/tests/universal/feature_repos/universal/data_sources/redshift.py similarity index 97% rename from sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py rename to sdk/python/tests/universal/feature_repos/universal/data_sources/redshift.py index 91d1a74f071..000bc226694 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py +++ b/sdk/python/tests/universal/feature_repos/universal/data_sources/redshift.py @@ -14,7 +14,7 @@ ) from feast.infra.utils import aws_utils from feast.repo_config import FeastConfigBaseModel -from tests.integration.feature_repos.universal.data_source_creator import ( +from tests.universal.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) @@ -55,6 +55,7 @@ def create_data_source( ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) + df = self.serialize_complex_columns(df) aws_utils.upload_df_to_redshift( self.client, self.offline_store_config.cluster_id, diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py b/sdk/python/tests/universal/feature_repos/universal/data_sources/snowflake.py similarity index 96% rename from sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py rename to sdk/python/tests/universal/feature_repos/universal/data_sources/snowflake.py index e9c4ad21a31..8e456168786 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py +++ b/sdk/python/tests/universal/feature_repos/universal/data_sources/snowflake.py @@ -18,7 +18,7 @@ write_pandas, ) from feast.repo_config import FeastConfigBaseModel -from tests.integration.feature_repos.universal.data_source_creator import ( +from tests.universal.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) @@ -53,6 +53,7 @@ def create_data_source( ) -> DataSource: destination_name = self.get_prefixed_table_name(destination_name) + df = self.serialize_complex_columns(df) with GetSnowflakeConnection(self.offline_store_config) as conn: write_pandas(conn, df, destination_name, auto_create_table=True) diff --git a/sdk/python/tests/integration/feature_repos/universal/entities.py b/sdk/python/tests/universal/feature_repos/universal/entities.py similarity index 100% rename from sdk/python/tests/integration/feature_repos/universal/entities.py rename to sdk/python/tests/universal/feature_repos/universal/entities.py diff --git a/sdk/python/tests/integration/feature_repos/universal/feature_views.py b/sdk/python/tests/universal/feature_repos/universal/feature_views.py similarity index 93% rename from sdk/python/tests/integration/feature_repos/universal/feature_views.py rename to sdk/python/tests/universal/feature_repos/universal/feature_views.py index 8b663252a02..2d1e62ea617 100644 --- a/sdk/python/tests/integration/feature_repos/universal/feature_views.py +++ b/sdk/python/tests/universal/feature_repos/universal/feature_views.py @@ -16,8 +16,19 @@ from feast.data_source import DataSource, RequestSource from feast.feature_view_projection import FeatureViewProjection from feast.on_demand_feature_view import PandasTransformation -from feast.types import Array, FeastType, Float32, Float64, Int32, Int64, String -from tests.integration.feature_repos.universal.entities import ( +from feast.types import ( + Array, + FeastType, + Float32, + Float64, + Int32, + Int64, + Json, + Map, + String, + Struct, +) +from tests.universal.feature_repos.universal.entities import ( customer, driver, item, @@ -193,6 +204,12 @@ def create_driver_hourly_stats_feature_view(source, infer_features: bool = False Field(name="acc_rate", dtype=Float32), Field(name="avg_daily_trips", dtype=Int32), Field(name=d.join_key, dtype=Int64), + Field(name="driver_metadata", dtype=Map), + Field(name="driver_config", dtype=Json), + Field( + name="driver_profile", + dtype=Struct({"name": String, "age": String}), + ), ], source=source, ttl=timedelta(hours=2), @@ -213,6 +230,12 @@ def create_driver_hourly_stats_batch_feature_view( Field(name="conv_rate", dtype=Float32), Field(name="acc_rate", dtype=Float32), Field(name="avg_daily_trips", dtype=Int32), + Field(name="driver_metadata", dtype=Map), + Field(name="driver_config", dtype=Json), + Field( + name="driver_profile", + dtype=Struct({"name": String, "age": String}), + ), ], source=source, ttl=timedelta(hours=2), diff --git a/sdk/python/tests/universal/feature_repos/universal/online_store/__init__.py b/sdk/python/tests/universal/feature_repos/universal/online_store/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/bigtable.py b/sdk/python/tests/universal/feature_repos/universal/online_store/bigtable.py similarity index 95% rename from sdk/python/tests/integration/feature_repos/universal/online_store/bigtable.py rename to sdk/python/tests/universal/feature_repos/universal/online_store/bigtable.py index c06143e245b..a6f6ceb12da 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/bigtable.py +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/bigtable.py @@ -5,7 +5,7 @@ from testcontainers.core.container import DockerContainer from testcontainers.core.waiting_utils import wait_for_logs -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/cassandra.py b/sdk/python/tests/universal/feature_repos/universal/online_store/cassandra.py similarity index 96% rename from sdk/python/tests/integration/feature_repos/universal/online_store/cassandra.py rename to sdk/python/tests/universal/feature_repos/universal/online_store/cassandra.py index 190d94a8305..c7ca91e18b2 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/cassandra.py +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/cassandra.py @@ -20,7 +20,7 @@ from testcontainers.core.container import DockerContainer from testcontainers.core.waiting_utils import wait_for_logs -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/couchbase.py b/sdk/python/tests/universal/feature_repos/universal/online_store/couchbase.py similarity index 98% rename from sdk/python/tests/integration/feature_repos/universal/online_store/couchbase.py rename to sdk/python/tests/universal/feature_repos/universal/online_store/couchbase.py index 2723ff13a30..cb030b708c6 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/couchbase.py +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/couchbase.py @@ -5,7 +5,7 @@ from testcontainers.core.container import DockerContainer from testcontainers.core.waiting_utils import wait_for_logs -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/datastore.py b/sdk/python/tests/universal/feature_repos/universal/online_store/datastore.py similarity index 94% rename from sdk/python/tests/integration/feature_repos/universal/online_store/datastore.py rename to sdk/python/tests/universal/feature_repos/universal/online_store/datastore.py index b5bbb94f7c1..8eaa5139e43 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/datastore.py +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/datastore.py @@ -5,7 +5,7 @@ from testcontainers.core.container import DockerContainer from testcontainers.core.waiting_utils import wait_for_logs -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/dynamodb.py b/sdk/python/tests/universal/feature_repos/universal/online_store/dynamodb.py similarity index 93% rename from sdk/python/tests/integration/feature_repos/universal/online_store/dynamodb.py rename to sdk/python/tests/universal/feature_repos/universal/online_store/dynamodb.py index 1aefdffb24b..04a1ffac355 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/dynamodb.py +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/dynamodb.py @@ -3,7 +3,7 @@ from testcontainers.core.container import DockerContainer from testcontainers.core.waiting_utils import wait_for_logs -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/elasticsearch.py b/sdk/python/tests/universal/feature_repos/universal/online_store/elasticsearch.py similarity index 90% rename from sdk/python/tests/integration/feature_repos/universal/online_store/elasticsearch.py rename to sdk/python/tests/universal/feature_repos/universal/online_store/elasticsearch.py index 1e8088a997e..8ef467ae796 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/elasticsearch.py +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/elasticsearch.py @@ -2,7 +2,7 @@ from testcontainers.elasticsearch import ElasticSearchContainer -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/hazelcast.py b/sdk/python/tests/universal/feature_repos/universal/online_store/hazelcast.py similarity index 95% rename from sdk/python/tests/integration/feature_repos/universal/online_store/hazelcast.py rename to sdk/python/tests/universal/feature_repos/universal/online_store/hazelcast.py index d50f2b75a3d..22ea5dcc09d 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/hazelcast.py +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/hazelcast.py @@ -6,7 +6,7 @@ from testcontainers.core.container import DockerContainer from testcontainers.core.waiting_utils import wait_for_logs -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/hbase.py b/sdk/python/tests/universal/feature_repos/universal/online_store/hbase.py similarity index 92% rename from sdk/python/tests/integration/feature_repos/universal/online_store/hbase.py rename to sdk/python/tests/universal/feature_repos/universal/online_store/hbase.py index dba611b30bc..d350828d08a 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/hbase.py +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/hbase.py @@ -3,7 +3,7 @@ from testcontainers.core.container import DockerContainer from testcontainers.core.waiting_utils import wait_for_logs -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/hybrid_online_store.py b/sdk/python/tests/universal/feature_repos/universal/online_store/hybrid_online_store.py similarity index 90% rename from sdk/python/tests/integration/feature_repos/universal/online_store/hybrid_online_store.py rename to sdk/python/tests/universal/feature_repos/universal/online_store/hybrid_online_store.py index f0efbd11044..619702330e6 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/hybrid_online_store.py +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/hybrid_online_store.py @@ -1,4 +1,4 @@ -from sdk.python.tests.integration.feature_repos.universal.online_store_creator import ( +from sdk.python.tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/init.sql b/sdk/python/tests/universal/feature_repos/universal/online_store/init.sql similarity index 100% rename from sdk/python/tests/integration/feature_repos/universal/online_store/init.sql rename to sdk/python/tests/universal/feature_repos/universal/online_store/init.sql diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/milvus.py b/sdk/python/tests/universal/feature_repos/universal/online_store/milvus.py similarity index 91% rename from sdk/python/tests/integration/feature_repos/universal/online_store/milvus.py rename to sdk/python/tests/universal/feature_repos/universal/online_store/milvus.py index a8f2839bf8b..cfe6aec3677 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/milvus.py +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/milvus.py @@ -1,6 +1,6 @@ from typing import Any -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/universal/feature_repos/universal/online_store/mongodb.py b/sdk/python/tests/universal/feature_repos/universal/online_store/mongodb.py new file mode 100644 index 00000000000..0c0afd4908a --- /dev/null +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/mongodb.py @@ -0,0 +1,31 @@ +from typing import Any, Dict + +from testcontainers.mongodb import MongoDbContainer + +from tests.universal.feature_repos.universal.online_store_creator import ( + OnlineStoreCreator, +) + + +class MongoDBOnlineStoreCreator(OnlineStoreCreator): + def __init__(self, project_name: str, **kwargs): + super().__init__(project_name) + # MongoDbContainer from testcontainers sets up authentication by default + # with username and password from the constructor + self.container = MongoDbContainer( + "mongo:latest", + username="test", + password="test", # pragma: allowlist secret + ).with_exposed_ports(27017) + + def create_online_store(self) -> Dict[str, Any]: + self.container.start() + exposed_port = self.container.get_exposed_port(27017) + # Include authentication in the connection string + return { + "type": "mongodb", + "connection_string": f"mongodb://test:test@localhost:{exposed_port}", # pragma: allowlist secret + } + + def teardown(self): + self.container.stop() diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/mysql.py b/sdk/python/tests/universal/feature_repos/universal/online_store/mysql.py similarity index 95% rename from sdk/python/tests/integration/feature_repos/universal/online_store/mysql.py rename to sdk/python/tests/universal/feature_repos/universal/online_store/mysql.py index c0ba91d15a4..c1ebdf6c984 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/mysql.py +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/mysql.py @@ -2,7 +2,7 @@ from testcontainers.mysql import MySqlContainer -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/postgres.py b/sdk/python/tests/universal/feature_repos/universal/online_store/postgres.py similarity index 96% rename from sdk/python/tests/integration/feature_repos/universal/online_store/postgres.py rename to sdk/python/tests/universal/feature_repos/universal/online_store/postgres.py index d11f563cb5c..a3f8d28ef19 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/postgres.py +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/postgres.py @@ -5,7 +5,7 @@ from testcontainers.core.waiting_utils import wait_for_logs from testcontainers.postgres import PostgresContainer -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/qdrant.py b/sdk/python/tests/universal/feature_repos/universal/online_store/qdrant.py similarity index 90% rename from sdk/python/tests/integration/feature_repos/universal/online_store/qdrant.py rename to sdk/python/tests/universal/feature_repos/universal/online_store/qdrant.py index 82a027b416d..a9036252711 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/qdrant.py +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/qdrant.py @@ -2,7 +2,7 @@ from testcontainers.qdrant import QdrantContainer -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py b/sdk/python/tests/universal/feature_repos/universal/online_store/redis.py similarity index 92% rename from sdk/python/tests/integration/feature_repos/universal/online_store/redis.py rename to sdk/python/tests/universal/feature_repos/universal/online_store/redis.py index 8e18f7fb172..715755471df 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/redis.py @@ -3,7 +3,7 @@ from testcontainers.core.container import DockerContainer from testcontainers.core.waiting_utils import wait_for_logs -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/singlestore.py b/sdk/python/tests/universal/feature_repos/universal/online_store/singlestore.py similarity index 95% rename from sdk/python/tests/integration/feature_repos/universal/online_store/singlestore.py rename to sdk/python/tests/universal/feature_repos/universal/online_store/singlestore.py index d3a02421d0a..3c9a835dfe2 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/singlestore.py +++ b/sdk/python/tests/universal/feature_repos/universal/online_store/singlestore.py @@ -4,7 +4,7 @@ from testcontainers.core.container import DockerContainer -from tests.integration.feature_repos.universal.online_store_creator import ( +from tests.universal.feature_repos.universal.online_store_creator import ( OnlineStoreCreator, ) diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py b/sdk/python/tests/universal/feature_repos/universal/online_store_creator.py similarity index 100% rename from sdk/python/tests/integration/feature_repos/universal/online_store_creator.py rename to sdk/python/tests/universal/feature_repos/universal/online_store_creator.py diff --git a/sdk/python/tests/universal/offline_store/.gitkeep b/sdk/python/tests/universal/offline_store/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/tests/universal/online_store/.gitkeep b/sdk/python/tests/universal/online_store/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/tests/utils/cli_repo_creator.py b/sdk/python/tests/utils/cli_repo_creator.py index 554c5af99f2..3aa96768f61 100644 --- a/sdk/python/tests/utils/cli_repo_creator.py +++ b/sdk/python/tests/utils/cli_repo_creator.py @@ -148,6 +148,19 @@ def local_repo( entity_key_serialization_version: 3 """ ) + elif online_store: # Added for mongodb, but very general + yaml_config = dedent( + f""" + project: {project_id} + registry: {data_path / "registry.db"} + provider: local + online_store: + type: {online_store} + offline_store: + type: {offline_store} + entity_key_serialization_version: 3 + """ + ) else: pass diff --git a/sdk/python/tests/utils/e2e_test_validation.py b/sdk/python/tests/utils/e2e_test_validation.py index 6d887b8bb07..c89d61b9d5a 100644 --- a/sdk/python/tests/utils/e2e_test_validation.py +++ b/sdk/python/tests/utils/e2e_test_validation.py @@ -10,22 +10,16 @@ from feast import FeatureStore, FeatureView, RepoConfig from feast.utils import _utc_now -from tests.integration.feature_repos.integration_test_repo_config import ( +from tests.universal.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.universal.data_source_creator import ( +from tests.universal.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) -from tests.integration.feature_repos.universal.data_sources.bigquery import ( - BigQueryDataSourceCreator, -) -from tests.integration.feature_repos.universal.data_sources.file import ( +from tests.universal.feature_repos.universal.data_sources.file import ( FileDataSourceCreator, FileParquetDatasetSourceCreator, ) -from tests.integration.feature_repos.universal.data_sources.redshift import ( - RedshiftDataSourceCreator, -) def validate_offline_online_store_consistency( @@ -225,6 +219,13 @@ def make_feature_store_yaml( # Only test if this is NOT a local test if os.getenv("FEAST_IS_LOCAL_TEST", "False") != "True": + from tests.universal.feature_repos.universal.data_sources.bigquery import ( + BigQueryDataSourceCreator, + ) + from tests.universal.feature_repos.universal.data_sources.redshift import ( + RedshiftDataSourceCreator, + ) + NULLABLE_ONLINE_STORE_CONFIGS.extend( [ IntegrationTestRepoConfig( diff --git a/sdk/python/tests/utils/type_test_utils.py b/sdk/python/tests/utils/type_test_utils.py new file mode 100644 index 00000000000..5abf069b928 --- /dev/null +++ b/sdk/python/tests/utils/type_test_utils.py @@ -0,0 +1,103 @@ +from dataclasses import dataclass +from typing import List, Optional + +from feast.types import ( + Array, + Bool, + FeastType, + Float32, + Int32, + Int64, + UnixTimestamp, +) +from tests.data.data_creator import create_basic_driver_dataset +from tests.universal.feature_repos.universal.feature_views import driver_feature_view + + +@dataclass(frozen=True, repr=True) +class TypeTestConfig: + feature_dtype: str + feature_is_list: bool + has_empty_list: bool + + +def get_feast_type(feature_dtype: str, feature_is_list: bool) -> FeastType: + dtype: Optional[FeastType] = None + if feature_is_list is True: + if feature_dtype == "int32": + dtype = Array(Int32) + elif feature_dtype == "int64": + dtype = Array(Int64) + elif feature_dtype == "float": + dtype = Array(Float32) + elif feature_dtype == "bool": + dtype = Array(Bool) + elif feature_dtype == "datetime": + dtype = Array(UnixTimestamp) + else: + if feature_dtype == "int32": + dtype = Int32 + elif feature_dtype == "int64": + dtype = Int64 + elif feature_dtype == "float": + dtype = Float32 + elif feature_dtype == "bool": + dtype = Bool + elif feature_dtype == "datetime": + dtype = UnixTimestamp + assert dtype + return dtype + + +def populate_test_configs(): + feature_dtypes = [ + "int32", + "int64", + "float", + "bool", + "datetime", + ] + configs: List[TypeTestConfig] = [] + for feature_dtype in feature_dtypes: + for feature_is_list in [True, False]: + for has_empty_list in [True, False]: + # For non list features `has_empty_list` does nothing + if feature_is_list is False and has_empty_list is True: + continue + + configs.append( + TypeTestConfig( + feature_dtype=feature_dtype, + feature_is_list=feature_is_list, + has_empty_list=has_empty_list, + ) + ) + return configs + + +def get_type_test_fixtures(request, environment): + config: TypeTestConfig = request.param + # Lower case needed because Redshift lower-cases all table names + destination_name = ( + f"feature_type_{config.feature_dtype}{config.feature_is_list}".replace( + ".", "" + ).lower() + ) + df = create_basic_driver_dataset( + Int64, + config.feature_dtype, + config.feature_is_list, + config.has_empty_list, + ) + data_source = environment.data_source_creator.create_data_source( + df, + destination_name=destination_name, + field_mapping={"ts_1": "ts"}, + ) + fv = driver_feature_view( + data_source=data_source, + name=destination_name, + dtype=get_feast_type(config.feature_dtype, config.feature_is_list), + ) + + return config, data_source, fv diff --git a/ui/feature_repo/apply_permissions.py b/ui/feature_repo/apply_permissions.py index b7d39733634..4a62b8de9c3 100644 --- a/ui/feature_repo/apply_permissions.py +++ b/ui/feature_repo/apply_permissions.py @@ -11,15 +11,17 @@ store = FeatureStore(repo_path=".") -store.apply([ - zipcode_features_permission, - zipcode_source_permission, - model_v1_permission, - risky_features_permission, - document_embeddings_permission, - document_metadata_permission, - rag_model_permission, -]) +store.apply( + [ + zipcode_features_permission, + zipcode_source_permission, + model_v1_permission, + risky_features_permission, + document_embeddings_permission, + document_metadata_permission, + rag_model_permission, + ] +) print("Permissions applied successfully!") print("Current permissions:", store.list_permissions()) diff --git a/ui/feature_repo/apply_rag_data.py b/ui/feature_repo/apply_rag_data.py index 67d000f9d6c..be20dab38ab 100644 --- a/ui/feature_repo/apply_rag_data.py +++ b/ui/feature_repo/apply_rag_data.py @@ -5,28 +5,32 @@ now = datetime.now() embeddings = [] for i in range(10): - embeddings.append({ - 'document_id': f'doc_{i}', - 'embedding': np.random.rand(768).astype(np.float32), - 'event_timestamp': now - timedelta(days=i), - 'created_timestamp': now - timedelta(days=i, hours=1) - }) + embeddings.append( + { + "document_id": f"doc_{i}", + "embedding": np.random.rand(768).astype(np.float32), + "event_timestamp": now - timedelta(days=i), + "created_timestamp": now - timedelta(days=i, hours=1), + } + ) df_embeddings = pd.DataFrame(embeddings) -df_embeddings.to_parquet('data/document_embeddings.parquet', index=False) +df_embeddings.to_parquet("data/document_embeddings.parquet", index=False) metadata = [] for i in range(10): - metadata.append({ - 'document_id': f'doc_{i}', - 'title': f'Document {i}', - 'content': f'This is the content of document {i}', - 'source': 'web', - 'author': f'author_{i}', - 'publish_date': (now - timedelta(days=i*30)).strftime('%Y-%m-%d'), - 'event_timestamp': now - timedelta(days=i), - 'created_timestamp': now - timedelta(days=i, hours=1) - }) + metadata.append( + { + "document_id": f"doc_{i}", + "title": f"Document {i}", + "content": f"This is the content of document {i}", + "source": "web", + "author": f"author_{i}", + "publish_date": (now - timedelta(days=i * 30)).strftime("%Y-%m-%d"), + "event_timestamp": now - timedelta(days=i), + "created_timestamp": now - timedelta(days=i, hours=1), + } + ) df_metadata = pd.DataFrame(metadata) -df_metadata.to_parquet('data/document_metadata.parquet', index=False) +df_metadata.to_parquet("data/document_metadata.parquet", index=False) -print('Created RAG data files successfully!') +print("Created RAG data files successfully!") diff --git a/ui/feature_repo/features.py b/ui/feature_repo/features.py index 102dec74c7b..1c6854e257a 100644 --- a/ui/feature_repo/features.py +++ b/ui/feature_repo/features.py @@ -1,7 +1,6 @@ from datetime import timedelta import pandas as pd -import numpy as np from feast import Entity, FeatureService, FeatureView, Field, FileSource from feast.data_source import RequestSource @@ -139,6 +138,7 @@ ], ) + # Define an on demand feature view which can generate new features based on # existing feature views and RequestSource features @on_demand_feature_view( @@ -306,6 +306,7 @@ def transaction_gt_last_credit_card_due(inputs: pd.DataFrame) -> pd.DataFrame: ], ) + # Define an on-demand feature view for similarity calculation @on_demand_feature_view( sources=[document_embeddings_view, query_request], @@ -319,6 +320,7 @@ def document_similarity(inputs: pd.DataFrame) -> pd.DataFrame: df["similarity_score"] = 0.95 # Placeholder value return df + rag_model = FeatureService( name="rag_retriever", features=[ diff --git a/ui/package.json b/ui/package.json index 910ca25c2dd..d2d8814ca7f 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,6 +1,6 @@ { "name": "@feast-dev/feast-ui", - "version": "0.60.0", + "version": "0.61.0", "private": false, "files": [ "dist"