diff --git a/.github/workflows/java_pr.yml b/.github/workflows/java_pr.yml index 9906c246ecd..a87542b9454 100644 --- a/.github/workflows/java_pr.yml +++ b/.github/workflows/java_pr.yml @@ -54,7 +54,7 @@ jobs: integration-test: # all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. if: - (github.event.action == 'labeled' && (github.event.label.name == 'lgtm' || github.event.label.name == 'approved' || github.event.label.name == 'ok-to-test')) || + (github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'ok-to-test')) || (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved'))) runs-on: ubuntu-latest needs: unit-test-java diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index 971282966c5..b57126ca6c1 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -22,7 +22,7 @@ jobs: go-version: 1.17.7 - name: Upgrade pip version run: | - pip install --upgrade "pip>=21.3.1" + pip install --upgrade "pip>=21.3.1,<22.1" - name: Get pip cache dir id: pip-cache run: | @@ -62,7 +62,7 @@ jobs: python-version: "3.7" - name: Upgrade pip version run: | - pip install --upgrade "pip>=21.3.1" + pip install --upgrade "pip>=21.3.1,<22.1" - name: Install dependencies run: make install-go-proto-dependencies - name: Lint go diff --git a/.github/workflows/master_only.yml b/.github/workflows/master_only.yml index 5546686b247..2042987617b 100644 --- a/.github/workflows/master_only.yml +++ b/.github/workflows/master_only.yml @@ -107,7 +107,7 @@ jobs: run: aws sts get-caller-identity - name: Upgrade pip version run: | - pip install --upgrade "pip>=21.3.1" + pip install --upgrade "pip>=21.3.1,<22.1" - name: Get pip cache dir id: pip-cache run: | diff --git a/.github/workflows/pr_integration_tests.yml b/.github/workflows/pr_integration_tests.yml index 2373f96ba7a..089d9f47336 100644 --- a/.github/workflows/pr_integration_tests.yml +++ b/.github/workflows/pr_integration_tests.yml @@ -14,10 +14,10 @@ on: jobs: build-docker-image: - # all jobs MUST have this if check for 'ok-to-test' or 'approved' or 'lgtm' for security purposes. + # all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. if: - (github.event.action == 'labeled' && (github.event.label.name == 'lgtm' || github.event.label.name == 'approved' || github.event.label.name == 'ok-to-test')) || - (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved'))) + (github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm'))) runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -72,10 +72,10 @@ jobs: outputs: DOCKER_IMAGE_TAG: ${{ steps.image-tag.outputs.DOCKER_IMAGE_TAG }} integration-test-python: - # all jobs MUST have this if check for 'ok-to-test' or 'approved' or 'lgtm' for security purposes. + # all jobs MUST have this if check for 'ok-to-test' or 'approved' for security purposes. if: - (github.event.action == 'labeled' && (github.event.label.name == 'lgtm' || github.event.label.name == 'approved' || github.event.label.name == 'ok-to-test')) || - (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved'))) + (github.event.action == 'labeled' && (github.event.label.name == 'approved' || github.event.label.name == 'lgtm' || github.event.label.name == 'ok-to-test')) || + (github.event.action != 'labeled' && (contains(github.event.pull_request.labels.*.name, 'ok-to-test') || contains(github.event.pull_request.labels.*.name, 'approved') || contains(github.event.pull_request.labels.*.name, 'lgtm'))) needs: build-docker-image runs-on: ${{ matrix.os }} strategy: @@ -133,7 +133,7 @@ jobs: run: aws sts get-caller-identity - name: Upgrade pip version run: | - pip install --upgrade "pip>=21.3.1" + pip install --upgrade "pip>=21.3.1,<22.1" - name: Get pip cache dir id: pip-cache run: | diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index f8a8d40fb34..dad6eddc862 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -126,95 +126,160 @@ jobs: run: ./infra/scripts/helm/push-helm-charts.sh $VERSION_WITHOUT_PREFIX publish-python-sdk: + runs-on: ubuntu-latest + needs: [verify-python-wheel] + steps: + - uses: actions/download-artifact@v2 + with: + name: wheels + path: dist + - uses: pypa/gh-action-pypi-publish@v1.4.2 + with: + user: __token__ + password: ${{ secrets.PYPI_PASSWORD }} + + + verify-python-wheel: runs-on: ${{ matrix.os }} + needs: [build-python-sdk, build-python-sdk-macos-py310] strategy: - fail-fast: false matrix: - python-version: [ "3.7", "3.8", "3.9", "3.10" ] - os: [ ubuntu-latest, macOS-latest ] - compile-go: [ True ] - include: - - python-version: "3.7" - os: ubuntu-latest - compile-go: False + os: [ ubuntu-latest, macos-10.15 ] + python-version: [ "3.7", "3.8", "3.9", "3.10"] + from-source: [ True, False ] env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - COMPILE_GO: ${{ matrix.compile-go }} + # this script is for testing servers + # it starts server with timeout and checks whether process killed by timeout (started healthy) or died by itself + TEST_SCRIPT: | + timeout 10s $@ & pid=$! + wait $pid + ret=$? + if [[ $ret -ne 124 ]] + then + exit $ret + else + echo "Succeeded!" + fi steps: - - uses: actions/checkout@v2 - name: Setup Python id: setup-python uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} architecture: x64 - - name: Setup Go - id: setup-go - uses: actions/setup-go@v2 + - uses: actions/setup-go@v3 + with: + go-version: '>=1.17.0' + - uses: actions/download-artifact@v2 with: - go-version: 1.17.7 - - name: Upgrade pip version + name: wheels + path: dist + - name: Install wheel + if: ${{ !matrix.from-source }} + # try to install all wheels; only the current platform wheel should be actually installed run: | - pip install --upgrade "pip>=21.3.1" - - name: Install pip-tools - run: pip install pip-tools - - name: Install dependencies - run: make install-python-ci-dependencies PYTHON=${{ matrix.python-version }} - - name: Publish Python Package + cd dist/ + pip install wheel + for f in *.whl; do pip install $f || true; done + - name: Install sdist + if: ${{ matrix.from-source }} + env: + COMPILE_GO: "True" run: | - cd sdk/python - python3 -m pip install --user --upgrade setuptools wheel twine - python3 setup.py sdist bdist_wheel - python3 -m twine upload --verbose dist/*.whl + pip install 'grpcio-tools==1.44.0' 'pybindgen==0.22.0' + go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.26.0 + go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.1.0 + pip install dist/*tar.gz + - name: Install OS X dependencies + if: matrix.os == 'macos-10.15' + run: brew install coreutils + - name: Smoke test + run: | + feast init test_repo + cd test_repo/ + feast apply + echo "$TEST_SCRIPT" > run-and-wait.sh + bash run-and-wait.sh feast serve + bash run-and-wait.sh feast ui + + pip install cffi + printf "\ngo_feature_retrieval: True" >> feature_store.yaml + bash run-and-wait.sh feast serve - publish-python-sdk-no-telemetry: + build-python-sdk: + name: Build wheels on ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: - fail-fast: false matrix: - python-version: [ "3.7", "3.8", "3.9", "3.10" ] - os: [ ubuntu-latest, macOS-latest ] - compile-go: [ True ] - include: - - python-version: "3.7" - os: ubuntu-latest - compile-go: False - needs: get-version + os: [ ubuntu-latest, macos-10.15 ] + steps: + - uses: actions/checkout@v2 + - name: Setup Node + uses: actions/setup-node@v2 + with: + node-version: '17.x' + registry-url: 'https://registry.npmjs.org' + - name: Build UI + run: make build-ui + - name: Build wheels + uses: pypa/cibuildwheel@v2.4.0 + env: + CIBW_BUILD: "cp3*_x86_64" + CIBW_SKIP: "cp36-* *-musllinux_x86_64 cp310-macosx_x86_64" + CIBW_ARCHS: "native" + CIBW_ENVIRONMENT: > + COMPILE_GO=True PATH=$PATH:/usr/local/go/bin + CIBW_BEFORE_ALL_LINUX: | + curl -o go.tar.gz https://dl.google.com/go/go1.18.2.linux-amd64.tar.gz + tar -C /usr/local -xzf go.tar.gz + go version + CIBW_BEFORE_ALL_MACOS: | + curl -o python.pkg https://www.python.org/ftp/python/3.9.12/python-3.9.12-macosx10.9.pkg + sudo installer -pkg python.pkg -target / + CIBW_BEFORE_BUILD: | + make install-protoc-dependencies + make install-go-proto-dependencies + make install-go-ci-dependencies + + - uses: actions/upload-artifact@v2 + with: + name: wheels + path: ./wheelhouse/*.whl + + + build-python-sdk-macos-py310: + runs-on: macos-10.15 env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - COMPILE_GO: ${{ matrix.compile-go }} + COMPILE_GO: True steps: - uses: actions/checkout@v2 - name: Setup Python id: setup-python uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: "3.10" architecture: x64 - - name: Setup Go - id: setup-go - uses: actions/setup-go@v2 + - name: Setup Node + uses: actions/setup-node@v2 with: - go-version: 1.17.7 - - name: Upgrade pip version + node-version: '17.x' + registry-url: 'https://registry.npmjs.org' + - name: Build and install dependencies run: | - pip install --upgrade "pip>=21.3.1" - - name: Install pip-tools - run: pip install pip-tools - - name: Install dependencies - run: make install-python-ci-dependencies PYTHON=${{ matrix.python-version }} - - name: Publish Python Package - env: - SETUPTOOLS_SCM_PRETEND_VERSION: ${{ needs.get-version.outputs.version_without_prefix }} + pip install -U pip setuptools wheel twine + make install-protoc-dependencies + make install-go-proto-dependencies + make install-go-ci-dependencies + make build-ui + - name: Build run: | - cd sdk/python - sed -i 's/DEFAULT_FEAST_USAGE_VALUE = "True"/DEFAULT_FEAST_USAGE_VALUE = "False"/g' feast/constants.py - sed -i 's/NAME = "feast"/NAME = "feast-no-telemetry"/g' setup.py - python3 -m pip install --user --upgrade setuptools wheel twine python3 setup.py sdist bdist_wheel - python3 -m twine upload --verbose dist/*.whl + + - uses: actions/upload-artifact@v2 + with: + name: wheels + path: dist/* + publish-java-sdk: container: maven:3.6-jdk-11 @@ -268,6 +333,7 @@ jobs: working-directory: ./ui run: yarn build:lib - name: Publish UI package + working-directory: ./ui run: npm publish env: NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index 8f37e2e2a36..3573fe26568 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -9,6 +9,13 @@ jobs: matrix: python-version: [ "3.7", "3.8", "3.9", "3.10" ] os: [ ubuntu-latest, macOS-latest] + exclude: + - os: macOS-latest + python-version: "3.8" + - os: macOS-latest + python-version: "3.9" + - os: macOS-latest + python-version: "3.10" env: OS: ${{ matrix.os }} PYTHON: ${{ matrix.python-version }} @@ -27,7 +34,7 @@ jobs: go-version: 1.17.7 - name: Upgrade pip version run: | - pip install --upgrade "pip>=21.3.1" + pip install --upgrade "pip>=21.3.1,<22.1" - name: Get pip cache dir id: pip-cache run: | @@ -75,7 +82,7 @@ jobs: python-version: "3.7" - name: Upgrade pip version run: | - pip install --upgrade "pip>=21.3.1" + pip install --upgrade "pip>=21.3.1,<22.1" - name: Setup Go id: setup-go uses: actions/setup-go@v2 diff --git a/.gitignore b/.gitignore index 0f3165e8414..6a86eb2682b 100644 --- a/.gitignore +++ b/.gitignore @@ -105,7 +105,7 @@ coverage.xml .hypothesis/ .pytest_cache/ infra/scripts/*.conf -go/cmd/server/logging/feature_repo/data/ +go/internal/test/feature_repo/data/ # Translations *.mo @@ -200,6 +200,8 @@ go/protos/ # Feast UI dependencies +sdk/python/feast/ui/node_modules +sdk/python/feast/ui/build ui/node_modules ui/.pnp ui/.pnp.js diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 82721e21e30..f46f2af604f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,13 +8,16 @@ repos: stages: [ push ] language: system entry: make format + pass_filenames: false - id: lint name: Lint stages: [ push ] language: system entry: make lint + pass_filenames: false - id: template name: Build Templates stages: [ commit ] language: system - entry: make build-templates \ No newline at end of file + entry: make build-templates + pass_filenames: false \ No newline at end of file diff --git a/.releaserc.js b/.releaserc.js index 8cdcc1f2779..2acf9b73506 100644 --- a/.releaserc.js +++ b/.releaserc.js @@ -57,7 +57,8 @@ module.exports = { assets: [ "CHANGELOG.md", "java/pom.xml", - "infra/charts/**/*.*" + "infra/charts/**/*.*", + "ui/package.json" ], message: "chore(release): release ${nextRelease.version}\n\n${nextRelease.notes}" } diff --git a/CHANGELOG.md b/CHANGELOG.md index bcb6f8cde3e..9ef9d0ec369 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,70 @@ # Changelog +# [0.21.0](https://github.com/feast-dev/feast/compare/v0.20.0...v0.21.0) (2022-05-13) + + +### Bug Fixes + +* Addresses ZeroDivisionError when materializing file source with same timestamps ([#2551](https://github.com/feast-dev/feast/issues/2551)) ([1e398d9](https://github.com/feast-dev/feast/commit/1e398d9089dc4674be73ea0869efb0aede50cd93)) +* Asynchronously refresh registry for the feast ui command ([#2672](https://github.com/feast-dev/feast/issues/2672)) ([1b09ca2](https://github.com/feast-dev/feast/commit/1b09ca26e0ab7ba5593278e039bc64d293634e67)) +* Build platform specific python packages with ci-build-wheel ([#2555](https://github.com/feast-dev/feast/issues/2555)) ([b10a4cf](https://github.com/feast-dev/feast/commit/b10a4cfec40514d5c36cefdb4be24d39fdb1e74a)) +* Delete data sources from registry when using the diffing logic ([#2669](https://github.com/feast-dev/feast/issues/2669)) ([fc00ca8](https://github.com/feast-dev/feast/commit/fc00ca8fc091ab2642121de69d4624783f11445c)) +* Enforce kw args featureservice ([#2575](https://github.com/feast-dev/feast/issues/2575)) ([160d7b7](https://github.com/feast-dev/feast/commit/160d7b71a74531909fab575cb263d02b3996fac2)) +* Enforce kw args in datasources ([#2567](https://github.com/feast-dev/feast/issues/2567)) ([0b7ec53](https://github.com/feast-dev/feast/commit/0b7ec5386e66d21ac9e6d728e14e3663f27cf146)) +* Feature logging to Redshift is broken ([#2655](https://github.com/feast-dev/feast/issues/2655)) ([479cd51](https://github.com/feast-dev/feast/commit/479cd512213635551e70ae599564a898046461b2)) +* Feature service to templates ([#2649](https://github.com/feast-dev/feast/issues/2649)) ([1e02066](https://github.com/feast-dev/feast/commit/1e0206627fef891aa3a3f0b0f46507449e3663e7)) +* Feature with timestamp type is incorrectly interpreted by Go FS ([#2588](https://github.com/feast-dev/feast/issues/2588)) ([e3d9588](https://github.com/feast-dev/feast/commit/e3d958863b32df0158b278fb97d392397cdff672)) +* Fix `__hash__` methods ([#2556](https://github.com/feast-dev/feast/issues/2556)) ([ebb7dfe](https://github.com/feast-dev/feast/commit/ebb7dfeaee9dc337a90f5d6b984ad07365018d42)) +* Fix AWS bootstrap template ([#2604](https://github.com/feast-dev/feast/issues/2604)) ([c94a69c](https://github.com/feast-dev/feast/commit/c94a69cff541feb1557770bdaf77cd7cc4f2f919)) +* Fix broken proto conversion methods for data sources ([#2603](https://github.com/feast-dev/feast/issues/2603)) ([00ed65a](https://github.com/feast-dev/feast/commit/00ed65a77177cfe04877e9550d1c8c1e903dadf8)) +* Fix case where on demand feature view tab is broken if no custom tabs are passed. ([#2682](https://github.com/feast-dev/feast/issues/2682)) ([01d3568](https://github.com/feast-dev/feast/commit/01d3568168bb9febb9fbda4988283b3886c32a31)) +* Fix DynamoDB fetches when there are entities that are not found ([#2573](https://github.com/feast-dev/feast/issues/2573)) ([7076fe0](https://github.com/feast-dev/feast/commit/7076fe0483de50af21fe7d7e7da192823f66c3da)) +* Fix Feast UI parser to work with new APIs ([#2668](https://github.com/feast-dev/feast/issues/2668)) ([8d76751](https://github.com/feast-dev/feast/commit/8d76751d1e702ee7059def7721b97cfcdf9f6300)) +* Fix java server after odfv update ([#2602](https://github.com/feast-dev/feast/issues/2602)) ([0ca6297](https://github.com/feast-dev/feast/commit/0ca62970dd6bc33c00bd5d8b828752814d480588)) +* Fix materialization with ttl=0 bug ([#2666](https://github.com/feast-dev/feast/issues/2666)) ([ab78702](https://github.com/feast-dev/feast/commit/ab78702e20b4aaff13497dc8d29495f58ae5ee36)) +* Fix push sources and add docs / tests pushing via the python feature server ([#2561](https://github.com/feast-dev/feast/issues/2561)) ([e8e418e](https://github.com/feast-dev/feast/commit/e8e418ed87481638e098d52ee2ca2d0327b63cc9)) +* Fixed data mapping errors for Snowflake ([#2558](https://github.com/feast-dev/feast/issues/2558)) ([53c2ce2](https://github.com/feast-dev/feast/commit/53c2ce2c2255791fdd85aa80d69ab6214106e169)) +* Forcing ODFV udfs to be __main__ module and fixing false positive duplicate data source warning ([#2677](https://github.com/feast-dev/feast/issues/2677)) ([2ce33cd](https://github.com/feast-dev/feast/commit/2ce33cdd5e0a6a855e60c067aadafddf10ea2359)) +* Include the ui/build directory, and remove package data ([#2681](https://github.com/feast-dev/feast/issues/2681)) ([0384f5f](https://github.com/feast-dev/feast/commit/0384f5f78765a3d43534919428c4cc4744a101f2)) +* Infer features for feature services when they depend on feature views without schemas ([#2653](https://github.com/feast-dev/feast/issues/2653)) ([87c194c](https://github.com/feast-dev/feast/commit/87c194c22b2732c7060a130bb8861eff0d168c0b)) +* Pin dependencies to nearest major version ([#2647](https://github.com/feast-dev/feast/issues/2647)) ([bb72b7c](https://github.com/feast-dev/feast/commit/bb72b7c9f13442d6350867359ab764fe67c74c4c)) +* Pin pip<22.1 to get around breaking change in pip==22.1 ([#2678](https://github.com/feast-dev/feast/issues/2678)) ([d3e01bc](https://github.com/feast-dev/feast/commit/d3e01bc74da9f4678d3cf384afd4616a299f32fd)) +* Punt deprecation warnings and clean up some warnings. ([#2670](https://github.com/feast-dev/feast/issues/2670)) ([f775d2e](https://github.com/feast-dev/feast/commit/f775d2e8a4a185e59031504e6e8eda8b330d066c)) +* Reject undefined features when using `get_historical_features` or `get_online_features` ([#2665](https://github.com/feast-dev/feast/issues/2665)) ([36849fb](https://github.com/feast-dev/feast/commit/36849fb76716ba663d347369dfec3f3bbfcac3f8)) +* Remove ci extra from the feature transformation server dockerfile ([#2618](https://github.com/feast-dev/feast/issues/2618)) ([25613b4](https://github.com/feast-dev/feast/commit/25613b414470e8559b9d7b1f9bb74e269fea08c8)) +* Remove incorrect call to logging.basicConfig ([#2676](https://github.com/feast-dev/feast/issues/2676)) ([8cbf51c](https://github.com/feast-dev/feast/commit/8cbf51ca2ba3f672b6dc0e820ac18d4608220abf)) +* Small typo in CLI ([#2578](https://github.com/feast-dev/feast/issues/2578)) ([f372981](https://github.com/feast-dev/feast/commit/f3729812d0666a10290d3a5614729845058f835d)) +* Switch from `join_key` to `join_keys` in tests and docs ([#2580](https://github.com/feast-dev/feast/issues/2580)) ([d66c931](https://github.com/feast-dev/feast/commit/d66c931026823f77cbfd40bbe82035eb337e417d)) +* Teardown trino container correctly after tests ([#2562](https://github.com/feast-dev/feast/issues/2562)) ([72f1558](https://github.com/feast-dev/feast/commit/72f155882c95f21573b31a613edf066bdb55f630)) +* Update build_go_protos to use a consistent python path ([#2550](https://github.com/feast-dev/feast/issues/2550)) ([f136f8c](https://github.com/feast-dev/feast/commit/f136f8cc6c7feade73466aeb6267500377089485)) +* Update data source timestamp inference error message to make sense ([#2636](https://github.com/feast-dev/feast/issues/2636)) ([3eaf6b7](https://github.com/feast-dev/feast/commit/3eaf6b79a420ced39f46ca25dddd14b2c69c8ae4)) +* Update field api to add tag parameter corresponding to labels in Feature. ([#2610](https://github.com/feast-dev/feast/issues/2610)) ([689d20b](https://github.com/feast-dev/feast/commit/689d20bd0e4da91b9d24878d86787dede0c68f6d)) +* Update java integration tests and add more logging ([#2637](https://github.com/feast-dev/feast/issues/2637)) ([10e23b4](https://github.com/feast-dev/feast/commit/10e23b437cd83fe62c8e10c0f577781322970947)) +* Update on demand feature view api ([#2587](https://github.com/feast-dev/feast/issues/2587)) ([38cd7f9](https://github.com/feast-dev/feast/commit/38cd7f9ec3d4bf8d81f7f5d65110529b646ba09f)) +* Update RedisCluster to use redis-py official implementation ([#2554](https://github.com/feast-dev/feast/issues/2554)) ([ce5606f](https://github.com/feast-dev/feast/commit/ce5606f335e11f6efa13d638072c71062e63420d)) +* Use cwd when getting module path ([#2577](https://github.com/feast-dev/feast/issues/2577)) ([b550e59](https://github.com/feast-dev/feast/commit/b550e591f815aff53accdd064589ef06b6607d97)) +* Use ParquetDataset for Schema Inference ([#2686](https://github.com/feast-dev/feast/issues/2686)) ([4f85e3e](https://github.com/feast-dev/feast/commit/4f85e3e6b3bb5ca92595aed28db68ada58abbb75)) +* Use timestamp type when converting unixtimestamp feature type to arrow ([#2593](https://github.com/feast-dev/feast/issues/2593)) ([c439611](https://github.com/feast-dev/feast/commit/c4396118a99392a7b284b635b80486c94e0c86c8)) + + +### Features + +* Add hbase online store support in feast ([#2590](https://github.com/feast-dev/feast/issues/2590)) ([c9eda79](https://github.com/feast-dev/feast/commit/c9eda79c7b1169ef05a481a96f07960c014e88b9)) +* Adding SSL options for Postgres ([#2644](https://github.com/feast-dev/feast/issues/2644)) ([0e809c2](https://github.com/feast-dev/feast/commit/0e809c2c86a8687397b54a9e073d6d2abcafd35f)) +* Allow Feast UI to be spun up with CLI command: feast ui ([#2667](https://github.com/feast-dev/feast/issues/2667)) ([44ca9f5](https://github.com/feast-dev/feast/commit/44ca9f5f7eb495e23dd9a603b92ca6fd348e9ddb)) +* Allow to pass secrets and environment variables to transformation service ([#2632](https://github.com/feast-dev/feast/issues/2632)) ([ffa33ad](https://github.com/feast-dev/feast/commit/ffa33ad9860e3220f8c2bb5374c1fb86f716a0c7)) +* CLI command 'feast serve' should start go-based server if flag is enabled ([#2617](https://github.com/feast-dev/feast/issues/2617)) ([f3ff812](https://github.com/feast-dev/feast/commit/f3ff812a15d46287ed12918396167b6ad295cdc9)) +* Create stream and batch feature view abstractions ([#2559](https://github.com/feast-dev/feast/issues/2559)) ([d1f76e5](https://github.com/feast-dev/feast/commit/d1f76e5aa5d2e2006296b3ca2cd9b08816ea0d9b)) +* Postgres supported as Registry, Online store, and Offline store ([#2401](https://github.com/feast-dev/feast/issues/2401)) ([ed2f979](https://github.com/feast-dev/feast/commit/ed2f979fab0bb9ca4ea6f8e56cc55b0b9ad107a6)) +* Support entity fields in feature view `schema` parameter by dropping them ([#2568](https://github.com/feast-dev/feast/issues/2568)) ([c8fcc35](https://github.com/feast-dev/feast/commit/c8fcc3504c7438961d0dc05869e3a2fca5b11f20)) +* Write logged features to an offline store (Python API) ([#2574](https://github.com/feast-dev/feast/issues/2574)) ([134dc5f](https://github.com/feast-dev/feast/commit/134dc5f7e86e7c88df2b3b313c41eb8f89def6e2)) +* Write logged features to Offline Store (Go - Python integration) ([#2621](https://github.com/feast-dev/feast/issues/2621)) ([ccad832](https://github.com/feast-dev/feast/commit/ccad832a4457f5e6cbc001cbadd91b43d98d282d)) + + +### Reverts + +* Revert "chore: Deprecate value type (#2611)" (#2643) ([4fbdfb1](https://github.com/feast-dev/feast/commit/4fbdfb1da90b83978c431ce140ce113bf8284b9a)), closes [#2611](https://github.com/feast-dev/feast/issues/2611) [#2643](https://github.com/feast-dev/feast/issues/2643) + # [0.20.0](https://github.com/feast-dev/feast/compare/v0.19.0...v0.20.0) (2022-04-14) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index cb17012eea3..9cce520d346 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -89,9 +89,14 @@ source venv/bin/activate pip install --upgrade pip ``` -4. Install development dependencies for Feast Python SDK / CLI +4. (Optional): Install Node & Yarn. Then run the following to build Feast UI artifacts for use in `feast ui` +``` +make build-ui +``` + +5Install development dependencies for Feast Python SDK / CLI ```sh -pip install -e "sdk/python[dev]" +pip install -e ".[dev]" ``` ### Code Style & Linting @@ -176,6 +181,9 @@ The services with containerized replacements currently implemented are: - Datastore - DynamoDB - Redis +- Trino +- HBase +- Postgres You can run `make test-python-integration-container` to run tests against the containerized versions of dependencies. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 00000000000..96f7c38c8a5 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,8 @@ +prune sdk/python/tests +prune ui +prune java/ +prune docs +prune infra +prune examples + +graft sdk/python/feast/ui/build diff --git a/Makefile b/Makefile index 4d961a04725..e208ab3c451 100644 --- a/Makefile +++ b/Makefile @@ -37,24 +37,24 @@ build: protos build-java build-docker # Python SDK install-python-ci-dependencies: install-go-proto-dependencies install-go-ci-dependencies - cd sdk/python && python -m piptools sync requirements/py$(PYTHON)-ci-requirements.txt - cd sdk/python && COMPILE_GO=true python setup.py develop + python -m piptools sync sdk/python/requirements/py$(PYTHON)-ci-requirements.txt + COMPILE_GO=true python setup.py develop lock-python-ci-dependencies: - cd sdk/python && python -m piptools compile -U --extra ci --output-file requirements/py$(PYTHON)-ci-requirements.txt + python -m piptools compile -U --extra ci --output-file sdk/python/requirements/py$(PYTHON)-ci-requirements.txt package-protos: cp -r ${ROOT_DIR}/protos ${ROOT_DIR}/sdk/python/feast/protos compile-protos-python: - cd sdk/python && python setup.py build_python_protos + python setup.py build_python_protos install-python: - cd sdk/python && python -m piptools sync requirements/py$(PYTHON)-requirements.txt - cd sdk/python && python setup.py develop + python -m piptools sync sdk/python/requirements/py$(PYTHON)-requirements.txt + python setup.py develop lock-python-dependencies: - cd sdk/python && python -m piptools compile -U --output-file requirements/py$(PYTHON)-requirements.txt + python -m piptools compile -U --output-file sdk/python/requirements/py$(PYTHON)-requirements.txt benchmark-python: FEAST_USAGE=False IS_TEST=True python -m pytest --integration --benchmark --benchmark-autosave --benchmark-save-data sdk/python/tests @@ -72,13 +72,43 @@ test-python-integration-container: FEAST_USAGE=False IS_TEST=True FEAST_LOCAL_ONLINE_CONTAINER=True python -m pytest -n 8 --integration sdk/python/tests test-python-universal-contrib: - PYTHONPATH='.' FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.contrib_repo_configuration FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration --universal sdk/python/tests + PYTHONPATH='.' \ + FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.contrib_repo_configuration \ + PYTEST_PLUGINS=feast.infra.offline_stores.contrib.trino_offline_store.tests \ + FEAST_USAGE=False IS_TEST=True \ + python -m pytest -n 8 --integration \ + -k "not test_historical_retrieval_fails_on_validation and \ + not test_historical_retrieval_with_validation and \ + not test_historical_features_persisting and \ + not test_historical_retrieval_fails_on_validation and \ + not test_universal_cli and \ + not test_go_feature_server and \ + not test_feature_logging and \ + not test_universal_types" \ + sdk/python/tests + +test-python-universal-postgres: + PYTHONPATH='.' \ + FULL_REPO_CONFIGS_MODULE=sdk.python.feast.infra.offline_stores.contrib.postgres_repo_configuration \ + PYTEST_PLUGINS=sdk.python.feast.infra.offline_stores.contrib.postgres_offline_store.tests \ + FEAST_USAGE=False \ + IS_TEST=True \ + python -m pytest -x --integration \ + -k "not test_historical_retrieval_fails_on_validation and \ + not test_historical_retrieval_with_validation and \ + not test_historical_features_persisting and \ + not test_historical_retrieval_fails_on_validation and \ + not test_universal_cli and \ + not test_go_feature_server and \ + not test_feature_logging and \ + not test_universal_types" \ + sdk/python/tests test-python-universal-local: - FEAST_USAGE=False IS_TEST=True FEAST_IS_LOCAL_TEST=True python -m pytest -n 8 --integration --universal sdk/python/tests + FEAST_USAGE=False IS_TEST=True FEAST_IS_LOCAL_TEST=True python -m pytest -n 8 --integration sdk/python/tests test-python-universal: - FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration --universal sdk/python/tests + FEAST_USAGE=False IS_TEST=True python -m pytest -n 8 --integration sdk/python/tests test-python-go-server: compile-go-lib FEAST_USAGE=False IS_TEST=True FEAST_GO_FEATURE_RETRIEVAL=True pytest --integration --goserver sdk/python/tests @@ -128,7 +158,7 @@ start-trino-locally: sleep 15 test-trino-plugin-locally: - cd ${ROOT_DIR}/sdk/python; FULL_REPO_CONFIGS_MODULE=feast.infra.offline_stores.contrib.trino_offline_store.test_config.manual_tests FEAST_USAGE=False IS_TEST=True python -m pytest --integration --universal tests/ + cd ${ROOT_DIR}/sdk/python; FULL_REPO_CONFIGS_MODULE=feast.infra.offline_stores.contrib.trino_offline_store.test_config.manual_tests FEAST_USAGE=False IS_TEST=True python -m pytest --integration tests/ kill-trino-locally: cd ${ROOT_DIR}; docker stop trino @@ -145,27 +175,27 @@ install-go-ci-dependencies: go get github.com/go-python/gopy go install golang.org/x/tools/cmd/goimports go install github.com/go-python/gopy + python -m pip install pybindgen==0.22.0 install-protoc-dependencies: - pip install grpcio-tools==1.34.0 + pip install grpcio-tools==1.44.0 mypy-protobuf==3.1.0 compile-protos-go: install-go-proto-dependencies install-protoc-dependencies - cd sdk/python && python setup.py build_go_protos + python setup.py build_go_protos compile-go-lib: install-go-proto-dependencies install-go-ci-dependencies - python -m pip install pybindgen==0.22.0 - cd sdk/python && python setup.py build_go_lib + COMPILE_GO=True python setup.py build_ext --inplace # Needs feast package to setup the feature store test-go: compile-protos-go - pip install -e "sdk/python[ci]" + pip install -e ".[ci]" go test ./... format-go: gofmt -s -w go/ lint-go: compile-protos-go - go vet ./go/internal/feast ./go/cmd/server + go vet ./go/internal/feast ./go/embedded # Docker @@ -231,3 +261,9 @@ build-sphinx: compile-protos-python build-templates: python infra/scripts/compile-templates.py + +# Web UI + +# Note: requires node and yarn to be installed +build-ui: + cd $(ROOT_DIR)/sdk/python/feast/ui && yarn install && npm run build --omit=dev diff --git a/README.md b/README.md index b0cc61c91dc..5905bbd647f 100644 --- a/README.md +++ b/README.md @@ -48,6 +48,9 @@ feast apply ### 4. Explore your data in the web UI (experimental) ![Web UI](ui/sample.png) +```commandline +feast ui +``` ### 5. Build a training dataset ```python @@ -146,9 +149,9 @@ The list below contains the functionality that contributors are planning to deve * [x] [Parquet file source](https://docs.feast.dev/reference/data-sources/file) * [x] [Synapse source (community plugin)](https://github.com/Azure/feast-azure) * [x] [Hive (community plugin)](https://github.com/baineng/feast-hive) - * [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) - * [x] [Spark (community plugin)](https://docs.feast.dev/reference/data-sources/spark) - * [x] Kafka / Kinesis sources (via [push support into the online store](https://docs.feast.dev/reference/data-sources/push) + * [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/data-sources/postgres) + * [x] [Spark (contrib plugin)](https://docs.feast.dev/reference/data-sources/spark) + * [x] Kafka / Kinesis sources (via [push support into the online store](https://docs.feast.dev/reference/data-sources/push)) * [ ] HTTP source * **Offline Stores** * [x] [Snowflake](https://docs.feast.dev/reference/offline-stores/snowflake) @@ -156,9 +159,9 @@ The list below contains the functionality that contributors are planning to deve * [x] [BigQuery](https://docs.feast.dev/reference/offline-stores/bigquery) * [x] [Synapse (community plugin)](https://github.com/Azure/feast-azure) * [x] [Hive (community plugin)](https://github.com/baineng/feast-hive) - * [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) - * [x] [Trino (community plugin)](https://github.com/Shopify/feast-trino) - * [x] [Spark (community plugin)](https://docs.feast.dev/reference/offline-stores/spark) + * [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/offline-stores/postgres) + * [x] [Trino (contrib plugin)](https://github.com/Shopify/feast-trino) + * [x] [Spark (contrib plugin)](https://docs.feast.dev/reference/offline-stores/spark) * [x] [In-memory / Pandas](https://docs.feast.dev/reference/offline-stores/file) * [x] [Custom offline store support](https://docs.feast.dev/how-to-guides/adding-a-new-offline-store) * **Online Stores** @@ -167,7 +170,7 @@ The list below contains the functionality that contributors are planning to deve * [x] [Datastore](https://docs.feast.dev/reference/online-stores/datastore) * [x] [SQLite](https://docs.feast.dev/reference/online-stores/sqlite) * [x] [Azure Cache for Redis (community plugin)](https://github.com/Azure/feast-azure) - * [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) + * [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/online-stores/postgres) * [x] [Custom online store support](https://docs.feast.dev/how-to-guides/adding-support-for-a-new-online-store) * [ ] Bigtable (in progress) * [ ] Cassandra @@ -204,7 +207,7 @@ The list below contains the functionality that contributors are planning to deve * [x] CLI for browsing feature registry * [x] Model-centric feature tracking (feature services) * [x] Amundsen integration (see [Feast extractor](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/feast_extractor.py)) - * [x] Feast Web UI (alpha) + * [x] Feast Web UI (Alpha release. See [documentation](https://docs.feast.dev/reference/alpha-web-ui.md)) * [ ] REST API for browsing feature registry * [ ] Feature versioning diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index e73996665e5..f6f12e04d04 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -13,7 +13,6 @@ * [Data source](getting-started/concepts/data-source.md) * [Entity](getting-started/concepts/entity.md) * [Feature view](getting-started/concepts/feature-view.md) - * [Feature service](getting-started/concepts/feature-service.md) * [Feature retrieval](getting-started/concepts/feature-retrieval.md) * [Point-in-time joins](getting-started/concepts/point-in-time-joins.md) * [Dataset](getting-started/concepts/dataset.md) @@ -60,19 +59,22 @@ * [Snowflake](reference/data-sources/snowflake.md) * [BigQuery](reference/data-sources/bigquery.md) * [Redshift](reference/data-sources/redshift.md) - * [Spark](reference/data-sources/spark.md) * [Push](reference/data-sources/push.md) + * [Spark (contrib)](reference/data-sources/spark.md) + * [PostgreSQL (contrib)](reference/data-sources/postgres.md) * [Offline stores](reference/offline-stores/README.md) * [File](reference/offline-stores/file.md) * [Snowflake](reference/offline-stores/snowflake.md) * [BigQuery](reference/offline-stores/bigquery.md) * [Redshift](reference/offline-stores/redshift.md) - * [Spark](reference/offline-stores/spark.md) + * [Spark (contrib)](reference/offline-stores/spark.md) + * [PostgreSQL (contrib)](reference/offline-stores/postgres.md) * [Online stores](reference/online-stores/README.md) * [SQLite](reference/online-stores/sqlite.md) * [Redis](reference/online-stores/redis.md) * [Datastore](reference/online-stores/datastore.md) * [DynamoDB](reference/online-stores/dynamodb.md) + * [PostgreSQL (contrib)](reference/online-stores/postgres.md) * [Providers](reference/providers/README.md) * [Local](reference/providers/local.md) * [Google Cloud Platform](reference/providers/google-cloud-platform.md) @@ -81,11 +83,11 @@ * [feature\_store.yaml](reference/feature-repository/feature-store-yaml.md) * [.feastignore](reference/feature-repository/feast-ignore.md) * [Feature servers](reference/feature-servers/README.md) - * [Local feature server](reference/feature-servers/local-feature-server.md) + * [Python feature server](reference/feature-servers/python-feature-server.md) * [Go-based feature retrieval](reference/feature-servers/go-feature-retrieval.md) +* [\[Alpha\] Web UI](reference/alpha-web-ui.md) * [\[Alpha\] Data quality monitoring](reference/dqm.md) * [\[Alpha\] On demand feature view](reference/alpha-on-demand-feature-view.md) -* [\[Alpha\] Stream ingestion](reference/alpha-stream-ingestion.md) * [\[Alpha\] AWS Lambda feature server](reference/alpha-aws-lambda-feature-server.md) * [Feast CLI reference](reference/feast-cli-commands.md) * [Python API reference](http://rtd.feast.dev) diff --git a/docs/getting-started/concepts/README.md b/docs/getting-started/concepts/README.md index 7ad0115a72b..e7b29eb0047 100644 --- a/docs/getting-started/concepts/README.md +++ b/docs/getting-started/concepts/README.md @@ -8,8 +8,6 @@ {% page-ref page="feature-view.md" %} -{% page-ref page="feature-service.md" %} - {% page-ref page="feature-retrieval.md" %} {% page-ref page="point-in-time-joins.md" %} diff --git a/docs/getting-started/concepts/entity.md b/docs/getting-started/concepts/entity.md index bc8aa2ac995..77cfc0aff2e 100644 --- a/docs/getting-started/concepts/entity.md +++ b/docs/getting-started/concepts/entity.md @@ -3,7 +3,7 @@ An entity is a collection of semantically related features. Users define entities to map to the domain of their use case. For example, a ride-hailing service could have customers and drivers as their entities, which group related features that correspond to these customers and drivers. ```python -driver = Entity(name='driver', value_type=ValueType.STRING, join_key='driver_id') +driver = Entity(name='driver', value_type=ValueType.STRING, join_keys=['driver_id']) ``` Entities are typically defined as part of feature views. Entity name is used to reference the entity from a feature view definition and join key is used to identify the physical primary key on which feature values should be stored and retrieved. These keys are used during the lookup of feature values from the online store and the join process in point-in-time joins. It is possible to define composite entities \(more than one entity object\) in a feature view. It is also possible for feature views to have zero entities. See [feature view](feature-view.md) for more details. diff --git a/docs/getting-started/concepts/feature-retrieval.md b/docs/getting-started/concepts/feature-retrieval.md index bece0f55271..85b7d9c5b77 100644 --- a/docs/getting-started/concepts/feature-retrieval.md +++ b/docs/getting-started/concepts/feature-retrieval.md @@ -8,8 +8,59 @@ A dataset is a collection of rows that is produced by a historical retrieval fro **Dataset vs Data Source:** Datasets are the output of historical retrieval, whereas data sources are the inputs. One or more data sources can be used in the creation of a dataset. +## Feature Services +A feature service is an object that represents a logical group of features from one or more [feature views](feature-view.md#feature-view). Feature Services allows features from within a feature view to be used as needed by an ML model. Users can expect to create one feature service per model version, allowing for tracking of the features used by models. + +{% tabs %} +{% tab title="driver_trips_feature_service.py" %} +```python +from driver_ratings_feature_view import driver_ratings_fv +from driver_trips_feature_view import driver_stats_fv + +driver_stats_fs = FeatureService( + name="driver_activity", + features=[driver_stats_fv, driver_ratings_fv[["lifetime_rating"]]] +) +``` +{% endtab %} +{% endtabs %} + +Feature services are used during + +* The generation of training datasets when querying feature views in order to find historical feature values. A single training dataset may consist of features from multiple feature views. +* Retrieval of features for batch scoring from the offline store (e.g. with an entity dataframe where all timestamps are `now()`) +* Retrieval of features from the online store for online inference (with smaller batch sizes). The features retrieved from the online store may also belong to multiple feature views. + +{% hint style="info" %} +Applying a feature service does not result in an actual service being deployed. +{% endhint %} + +Feature services enable referencing all or some features from a feature view. + +Retrieving from the online store with a feature service +```python +from feast import FeatureStore +feature_store = FeatureStore('.') # Initialize the feature store + +feature_service = feature_store.get_feature_service("driver_activity") +features = feature_store.get_online_features( + features=feature_service, entity_rows=[entity_dict] +) +``` + +Retrieving from the offline store with a feature service +```python +from feast import FeatureStore +feature_store = FeatureStore('.') # Initialize the feature store + +feature_service = feature_store.get_feature_service("driver_activity") +feature_store.get_historical_features(features=feature_service, entity_df=entity_df) +``` + ## Feature References +This mechanism of retrieving features is only recommended as you're experimenting. Once you want to launch experiments or serve models, feature services are recommended. + Feature references uniquely identify feature values in Feast. The structure of a feature reference in string form is as follows: `:` Feature references are used for the retrieval of features from Feast: diff --git a/docs/getting-started/concepts/feature-service.md b/docs/getting-started/concepts/feature-service.md deleted file mode 100644 index adb4927113f..00000000000 --- a/docs/getting-started/concepts/feature-service.md +++ /dev/null @@ -1,48 +0,0 @@ -# Feature service - -A feature service is an object that represents a logical group of features from one or more [feature views](feature-view.md#feature-view). Feature Services allows features from within a feature view to be used as needed by an ML model. Users can expect to create one feature service per model, allowing for tracking of the features used by models. - -{% tabs %} -{% tab title="driver_trips_feature_service.py" %} -```python -from driver_ratings_feature_view import driver_ratings_fv -from driver_trips_feature_view import driver_stats_fv - -driver_stats_fs = FeatureService( - name="driver_activity", - features=[driver_stats_fv, driver_ratings_fv[["lifetime_rating"]]] -) -``` -{% endtab %} -{% endtabs %} - -Feature services are used during - -* The generation of training datasets when querying feature views in order to find historical feature values. A single training dataset may consist of features from multiple feature views. -* Retrieval of features from the online store. The features retrieved from the online store may also belong to multiple feature views. - -{% hint style="info" %} -Applying a feature service does not result in an actual service being deployed. -{% endhint %} - -Feature services can be retrieved from the feature store, and referenced when retrieving features from the online store. - -```python -from feast import FeatureStore -feature_store = FeatureStore('.') # Initialize the feature store - -feature_service = feature_store.get_feature_service("driver_activity") -features = feature_store.get_online_features( - features=feature_service, entity_rows=[entity_dict] -) -``` - -Feature services can also be used when retrieving historical features from the offline store. - -```python -from feast import FeatureStore -feature_store = FeatureStore('.') # Initialize the feature store - -feature_service = feature_store.get_feature_service("driver_activity") -feature_store.get_historical_features(features=feature_service, entity_df=entity_df) -``` diff --git a/docs/getting-started/concepts/feature-view.md b/docs/getting-started/concepts/feature-view.md index 80fd803d1f2..d0b80048284 100644 --- a/docs/getting-started/concepts/feature-view.md +++ b/docs/getting-started/concepts/feature-view.md @@ -79,7 +79,7 @@ It is suggested that you dynamically specify the new FeatureView name using `.wi from feast import BigQuerySource, Entity, FeatureView, Field, ValueType from feast.types import Int32 -location = Entity(name="location", join_key="location_id", value_type=ValueType.INT64) +location = Entity(name="location", join_keys=["location_id"], value_type=ValueType.INT64) location_stats_fv= FeatureView( name="location_stats", @@ -157,10 +157,10 @@ input_request = RequestSource( # Use the input data and feature view features to create new features @on_demand_feature_view( - sources={ - 'driver_hourly_stats': driver_hourly_stats_view, - 'vals_to_add': input_request - }, + sources=[ + driver_hourly_stats_view, + input_request + ], schema=[ Field(name='conv_rate_plus_val1', dtype=Float64), Field(name='conv_rate_plus_val2', dtype=Float64) diff --git a/docs/getting-started/faq.md b/docs/getting-started/faq.md index ebae9129622..b2438fdf7a3 100644 --- a/docs/getting-started/faq.md +++ b/docs/getting-started/faq.md @@ -3,7 +3,7 @@ {% hint style="info" %} **Don't see your question?** -We encourage you to ask questions on [Slack](https://slack.feast.dev) or [Github](https://github.com/feast-dev/feast). Even better, once you get an answer, add the answer to this FAQ via a [pull request](../project/development-guide.md)! +We encourage you to ask questions on [Slack](https://slack.feast.dev) or [GitHub](https://github.com/feast-dev/feast). Even better, once you get an answer, add the answer to this FAQ via a [pull request](../project/development-guide.md)! {% endhint %} ## Getting started @@ -14,14 +14,15 @@ The [quickstart](quickstart.md) is the easiest way to learn about Feast. For mor ## Concepts -### What is the difference between feature tables and feature views? - -Feature tables from Feast 0.9 have been renamed to feature views in Feast 0.10+. For more details, please see the discussion [here](https://github.com/feast-dev/feast/issues/1583). - ### Do feature views have to include entities? No, there are [feature views without entities](concepts/feature-view.md#feature-views-without-entities). +### How does Feast handle model or feature versioning? +Feast expects that each version of a model corresponds to a different feature service. + +Feature views once they are used by a feature service are intended to be immutable and not deleted (until a feature service is removed). In the future, `feast plan` and `feast apply will throw errors if it sees this kind of behavior. + ### What is the difference between data sources and the offline store? The data source itself defines the underlying data warehouse table in which the features are stored. The offline store interface defines the APIs required to make an arbitrary compute layer work for Feast (e.g. pulling features given a set of feature views from their sources, exporting the data set results to different formats). Please see [data sources](concepts/data-source.md) and [offline store](architecture-and-components/offline-store.md) for more details. @@ -32,13 +33,31 @@ Yes, this is possible. For example, you can use BigQuery as an offline store and ## Functionality +### How do I run `get_historical_features` without providing an entity dataframe? +Feast does not provide a way to do this right now. This is an area we're actively interested in contributions for. See [GitHub issue](https://github.com/feast-dev/feast/issues/1611) + ### Does Feast provide security or access control? Feast currently does not support any access control other than the access control required for the Provider's environment (for example, GCP and AWS permissions). +It is a good idea though to lock down the registry file so only the CI/CD pipeline can modify it. That way data scientists and other users cannot accidentally modify the registry and lose other team's data. + ### Does Feast support streaming sources? -Yes. In earlier versions of Feast, we used Feast Spark to manage ingestion from stream sources. In the current version of Feast, we support [push based ingestion](../reference/alpha-stream-ingestion.md). +Yes. In earlier versions of Feast, we used Feast Spark to manage ingestion from stream sources. In the current version of Feast, we support [push based ingestion](../reference/data-sources/push.md). Streaming transformations are actively being worked on. + +### Does Feast support feature transformation? + +There are several kinds of transformations: +- On demand transformations (See [docs](../reference/alpha-on-demand-feature-view.md)) + - These transformations are Pandas transformations run on batch data when you call `get_historical_features` and at online serving time when you call `get_online_features. + - Note that if you use push sources to ingest streaming features, these transformations will execute on the fly as well +- Batch transformations (WIP, see [RFC](https://docs.google.com/document/d/1964OkzuBljifDvkV-0fakp2uaijnVzdwWNGdz7Vz50A/edit#)) + - These will include SQL + PySpark based transformations on batch data sources. +- Streaming transformations (RFC in progress) + +### Does Feast have a Web UI? +Yes. See [documentation](../reference/alpha-web-ui.md). ### Does Feast support composite keys? @@ -50,7 +69,7 @@ Please see a detailed comparison of Feast vs. Tecton [here](https://www.tecton.a ### What are the performance/latency characteristics of Feast? -Feast is designed to work at scale and support low latency online serving. Benchmarks ([RFC](https://docs.google.com/document/d/12UuvTQnTTCJhdRgy6h10zSbInNGSyEJkIxpOcgOen1I/edit)) will be released soon, and active work is underway to support very latency sensitive use cases. +Feast is designed to work at scale and support low latency online serving. See our [benchmark blog post](https://feast.dev/blog/feast-benchmarks/) for details. ### Does Feast support embeddings and list features? @@ -77,18 +96,18 @@ Please follow the instructions [here](../how-to-guides/adding-support-for-a-new- ### Can the same storage engine be used for both the offline and online store? -Yes. For example, the Postgres [connector](https://github.com/nossrannug/feast-postgres) can be used as both an offline and online store. +Yes. For example, the Postgres connector can be used as both an offline and online store (as well as the registry). ### Does Feast support S3 as a data source? Yes. There are two ways to use S3 in Feast: -* Using Redshift as a data source via Spectrum ([AWS tutorial](https://docs.aws.amazon.com/redshift/latest/dg/tutorial-nested-data-create-table.html)), and then continuing with the [Running Feast with GCP/AWS](../how-to-guides/feast-gcp-aws/) guide. See a [presentation](https://youtu.be/pMFbRJ7AnBk?t=9463) we did on this at our apply() meetup. +* Using Redshift as a data source via Spectrum ([AWS tutorial](https://docs.aws.amazon.com/redshift/latest/dg/tutorial-nested-data-create-table.html)), and then continuing with the [Running Feast with Snowflake/GCP/AWS](../how-to-guides/feast-snowflake-gcp-aws/) guide. See a [presentation](https://youtu.be/pMFbRJ7AnBk?t=9463) we did on this at our apply() meetup. * Using the `s3_endpoint_override` in a `FileSource` data source. This endpoint is more suitable for quick proof of concepts that won't necessarily scale for production use cases. ### How can I use Spark with Feast? -Feast does not support Spark natively. However, you can create a [custom provider](../how-to-guides/creating-a-custom-provider.md) that will support Spark, which can help with more scalable materialization and ingestion. +Feast supports ingestion via Spark (See ) does not support Spark natively. However, you can create a [custom provider](../how-to-guides/creating-a-custom-provider.md) that will support Spark, which can help with more scalable materialization and ingestion. ### Is Feast planning on supporting X functionality? @@ -96,6 +115,13 @@ Please see the [roadmap](../roadmap.md). ## Project +### How do I contribute to Feast? + +For more details on contributing to the Feast community, see [here](../community.md) and this [here](../project/contributing.md). + + +## Feast 0.9 (legacy) + ### What is the difference between Feast 0.9 and Feast 0.10+? Feast 0.10+ is much lighter weight and more extensible than Feast 0.9. It is designed to be simple to install and use. Please see this [document](https://docs.google.com/document/d/1AOsr\_baczuARjCpmZgVd8mCqTF4AZ49OEyU4Cn-uTT0) for more details. @@ -104,9 +130,6 @@ Feast 0.10+ is much lighter weight and more extensible than Feast 0.9. It is des Please see this [document](https://docs.google.com/document/d/1AOsr\_baczuARjCpmZgVd8mCqTF4AZ49OEyU4Cn-uTT0). If you have any questions or suggestions, feel free to leave a comment on the document! -### How do I contribute to Feast? - -For more details on contributing to the Feast community, see [here](../community.md) and this [here](../project/contributing.md). ### What are the plans for Feast Core, Feast Serving, and Feast Spark? diff --git a/docs/getting-started/quickstart.md b/docs/getting-started/quickstart.md index b188e0189d6..41449b77e39 100644 --- a/docs/getting-started/quickstart.md +++ b/docs/getting-started/quickstart.md @@ -98,7 +98,7 @@ driver_hourly_stats = FileSource( # fetch features. # Entity has a name used for later reference (in a feature view, eg) # and join_key to identify physical field name used in storages -driver = Entity(name="driver", value_type=ValueType.INT64, join_key="driver_id", description="driver id",) +driver = Entity(name="driver", value_type=ValueType.INT64, join_keys=["driver_id"], description="driver id",) # Our parquet files contain sample data that includes a driver_id column, timestamps and # three feature column. Here we define a Feature View that will allow us to serve this @@ -116,6 +116,11 @@ driver_hourly_stats_view = FeatureView( source=driver_hourly_stats, tags={}, ) + +driver_stats_fs = FeatureService( + name="driver_activity", + features=[driver_hourly_stats_view] +) ``` {% endtab %} {% endtabs %} @@ -168,7 +173,7 @@ driver_hourly_stats = FileSource( # fetch features. # Entity has a name used for later reference (in a feature view, eg) # and join_key to identify physical field name used in storages -driver = Entity(name="driver", value_type=ValueType.INT64, join_key="driver_id", description="driver id",) +driver = Entity(name="driver", value_type=ValueType.INT64, join_keys=["driver_id"], description="driver id",) # Our parquet files contain sample data that includes a driver_id column, timestamps and # three feature column. Here we define a Feature View that will allow us to serve this @@ -186,6 +191,11 @@ driver_hourly_stats_view = FeatureView( source=driver_hourly_stats, tags={}, ) + +driver_stats_fs = FeatureService( + name="driver_activity", + features=[driver_hourly_stats_view] +) ``` {% endtab %} {% endtabs %} @@ -223,7 +233,7 @@ entity_df = pd.DataFrame.from_dict( "driver_id": [1001, 1002, 1003], # label name -> label values - "label_driver_reported_satisfaction": [1, 5, 3], + "label_driver_reported_satisfaction": [1, 5, 3], # "event_timestamp" (reserved key) -> timestamps "event_timestamp": [ @@ -263,14 +273,14 @@ print(training_df.head()) Int64Index: 3 entries, 0 to 2 Data columns (total 6 columns): - # Column Non-Null Count Dtype ---- ------ -------------- ----- + # Column Non-Null Count Dtype +--- ------ -------------- ----- 0 event_timestamp 3 non-null datetime64[ns, UTC] - 1 driver_id 3 non-null int64 - 2 label_driver_reported_satisfaction 3 non-null int64 - 3 conv_rate 3 non-null float32 - 4 acc_rate 3 non-null float32 - 5 avg_daily_trips 3 non-null int32 + 1 driver_id 3 non-null int64 + 2 label_driver_reported_satisfaction 3 non-null int64 + 3 conv_rate 3 non-null float32 + 4 acc_rate 3 non-null float32 + 5 avg_daily_trips 3 non-null int32 dtypes: datetime64[ns, UTC](1), float32(2), int32(1), int64(2) memory usage: 132.0 bytes None @@ -303,7 +313,7 @@ feast materialize-incremental $CURRENT_TIME {% tabs %} {% tab title="Output" %} ```bash -Materializing 1 feature views to 2021-08-23 16:25:46+00:00 into the sqlite online +Materializing 1 feature views to 2021-08-23 16:25:46+00:00 into the sqlite online store. driver_hourly_stats from 2021-08-22 16:25:47+00:00 to 2021-08-23 16:25:46+00:00: @@ -355,6 +365,48 @@ pprint(feature_vector) {% endtab %} {% endtabs %} +## Step 7: Using a feature service to fetch online features instead. + +You can also use feature services to manage multiple features, and decouple feature view definitions and the features needed by end applications. The feature store can also be used to fetch either online or historical features using the same api below. More information can be found [here](https://docs.feast.dev/getting-started/concepts/feature-service). + +{% tabs %} +{% tab title="Python" %} +```python +from feast import FeatureStore +feature_store = FeatureStore('.') # Initialize the feature store + +feature_service = feature_store.get_feature_service("driver_activity") +features = feature_store.get_online_features( + features=feature_service, + entity_rows=[ + # {join_key: entity_value} + {"driver_id": 1004}, + {"driver_id": 1005}, + ], +).to_dict() +``` + +{% tabs %} +{% tab title="Output" %} +```bash +{ + 'acc_rate': [0.5732735991477966, 0.7828438878059387], + 'avg_daily_trips': [33, 984], + 'conv_rate': [0.15498852729797363, 0.6263588070869446], + 'driver_id': [1004, 1005] +} +``` +{% endtab %} +{% endtabs %} + +## Step 8: Browse your features with the Web UI (experimental) + +View all registered features, data sources, entities, and feature services with the Web UI. + +One of the ways to view this is with the `feast ui` command. + +![](../reference/ui.png) + ## Next steps * Read the [Concepts](concepts/) page to understand the Feast data model. diff --git a/docs/getting-started/third-party-integrations.md b/docs/getting-started/third-party-integrations.md index a731fd85dc3..ab926682662 100644 --- a/docs/getting-started/third-party-integrations.md +++ b/docs/getting-started/third-party-integrations.md @@ -19,9 +19,9 @@ Don't see your offline store or online store of choice here? Check out our guide * [x] [Parquet file source](https://docs.feast.dev/reference/data-sources/file) * [x] [Synapse source (community plugin)](https://github.com/Azure/feast-azure) * [x] [Hive (community plugin)](https://github.com/baineng/feast-hive) -* [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) -* [x] [Spark (community plugin)](https://docs.feast.dev/reference/data-sources/spark) -* [x] Kafka / Kinesis sources (via [push support into the online store](https://docs.feast.dev/reference/data-sources/push) +* [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/data-sources/postgres) +* [x] [Spark (contrib plugin)](https://docs.feast.dev/reference/data-sources/spark) +* [x] Kafka / Kinesis sources (via [push support into the online store](https://docs.feast.dev/reference/data-sources/push)) * [ ] HTTP source ### Offline Stores @@ -31,9 +31,9 @@ Don't see your offline store or online store of choice here? Check out our guide * [x] [BigQuery](https://docs.feast.dev/reference/offline-stores/bigquery) * [x] [Synapse (community plugin)](https://github.com/Azure/feast-azure) * [x] [Hive (community plugin)](https://github.com/baineng/feast-hive) -* [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) -* [x] [Trino (community plugin)](https://github.com/Shopify/feast-trino) -* [x] [Spark (community plugin)](https://docs.feast.dev/reference/offline-stores/spark) +* [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/offline-stores/postgres) +* [x] [Trino (contrib plugin)](https://github.com/Shopify/feast-trino) +* [x] [Spark (contrib plugin)](https://docs.feast.dev/reference/offline-stores/spark) * [x] [In-memory / Pandas](https://docs.feast.dev/reference/offline-stores/file) * [x] [Custom offline store support](https://docs.feast.dev/how-to-guides/adding-a-new-offline-store) @@ -44,7 +44,7 @@ Don't see your offline store or online store of choice here? Check out our guide * [x] [Datastore](https://docs.feast.dev/reference/online-stores/datastore) * [x] [SQLite](https://docs.feast.dev/reference/online-stores/sqlite) * [x] [Azure Cache for Redis (community plugin)](https://github.com/Azure/feast-azure) -* [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) +* [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/online-stores/postgres) * [x] [Custom online store support](https://docs.feast.dev/how-to-guides/adding-support-for-a-new-online-store) * [ ] Bigtable (in progress) * [ ] Cassandra diff --git a/docs/how-to-guides/running-feast-in-production.md b/docs/how-to-guides/running-feast-in-production.md index 8518bb28d07..53808326091 100644 --- a/docs/how-to-guides/running-feast-in-production.md +++ b/docs/how-to-guides/running-feast-in-production.md @@ -274,7 +274,7 @@ For seamless integration with Kubernetes (including services created by Feast He ## 5. Ingesting features from a stream source -Recently Feast added functionality for [stream ingestion](../reference/alpha-stream-ingestion.md). +Recently Feast added functionality for [stream ingestion](../reference/data-sources/push.md). Please note that this is still in an early phase and new incompatible changes may be introduced. ### 5.1. Using Python SDK in your Apache Spark / Beam pipeline diff --git a/docs/reference/alpha-aws-lambda-feature-server.md b/docs/reference/alpha-aws-lambda-feature-server.md index 58a0f6862b7..eadcf40bb48 100644 --- a/docs/reference/alpha-aws-lambda-feature-server.md +++ b/docs/reference/alpha-aws-lambda-feature-server.md @@ -8,7 +8,7 @@ To enable this feature, run **`feast alpha enable aws_lambda_feature_server`** ## Overview -The AWS Lambda feature server is an HTTP endpoint that serves features with JSON I/O, deployed as a Docker image through AWS Lambda and AWS API Gateway. This enables users to get features from Feast using any programming language that can make HTTP requests. A [local feature server](feature-server.md) is also available. A remote feature server on GCP Cloud Run is currently being developed. +The AWS Lambda feature server is an HTTP endpoint that serves features with JSON I/O, deployed as a Docker image through AWS Lambda and AWS API Gateway. This enables users to get features from Feast using any programming language that can make HTTP requests. A [local feature server](feature-servers/python-feature-server.md) is also available. A remote feature server on GCP Cloud Run is currently being developed. ## Deployment diff --git a/docs/reference/alpha-on-demand-feature-view.md b/docs/reference/alpha-on-demand-feature-view.md index 6be55fb735e..eb8c4f62914 100644 --- a/docs/reference/alpha-on-demand-feature-view.md +++ b/docs/reference/alpha-on-demand-feature-view.md @@ -21,7 +21,7 @@ There are new CLI commands: ## Example -See [https://github.com/feast-dev/on-demand-feature-views-demo](https://github.com/feast-dev/on-demand-feature-views-demo) for an example on how to use on demand feature views. +See [https://github.com/feast-dev/on-demand-feature-views-demo](https://github.com/feast-dev/on-demand-feature-views-demo) for an example on how to use on demand feature views. ### **Registering transformations** @@ -32,7 +32,7 @@ from feast import Field, RequestSource from feast.types import Float64, Int64 import pandas as pd -# Define a request data source which encodes features / information only +# Define a request data source which encodes features / information only # available at request time (e.g. part of the user initiated HTTP request) input_request = RequestSource( name="vals_to_add", @@ -44,10 +44,10 @@ input_request = RequestSource( # Use the input data and feature view features to create new features @on_demand_feature_view( - sources={ - 'driver_hourly_stats': driver_hourly_stats_view, - 'vals_to_add': input_request - }, + sources=[ + driver_hourly_stats_view, + input_request + ], schema=[ Field(name='conv_rate_plus_val1', dtype=Float64), Field(name='conv_rate_plus_val2', dtype=Float64) @@ -63,7 +63,7 @@ def transformed_conv_rate(features_df: pd.DataFrame) -> pd.DataFrame: ### **Feature retrieval** {% hint style="info" %} -The on demand feature view's name is the function name \(i.e. `transformed_conv_rate`\). +The on demand feature view's name is the function name \(i.e. `transformed_conv_rate`\). {% endhint %} And then to retrieve historical or online features, we can call this in a feature service or reference individual features: diff --git a/docs/reference/alpha-web-ui.md b/docs/reference/alpha-web-ui.md new file mode 100644 index 00000000000..182f9fb13df --- /dev/null +++ b/docs/reference/alpha-web-ui.md @@ -0,0 +1,136 @@ +# \[Alpha\] Feast Web UI + +**Warning**: This is an _experimental_ feature. It's intended for early testing and feedback, and could change without warnings in future releases. + +## Overview + +The Feast Web UI allows users to explore their feature repository through a Web UI. It includes functionality such as: +- Browsing Feast objects (feature views, entities, data sources, feature services, and saved datasets) and their relationships +- Searching and filtering for Feast objects by tags + +![Sample UI](ui.png) + +## Usage + +There are several ways to use the Feast Web UI. + +### Feast CLI + +The easiest way to get started is to run the `feast ui` command within a feature repository: + +Output of `feast ui --help`: +```bash +Usage: feast ui [OPTIONS] + +Shows the Feast UI over the current directory + +Options: +-h, --host TEXT Specify a host for the server [default: 0.0.0.0] +-p, --port INTEGER Specify a port for the server [default: 8888] +-r, --registry_ttl_sec INTEGER Number of seconds after which the registry is refreshed. Default is 5 seconds. +--help Show this message and exit. +``` + +This will spin up a Web UI on localhost which automatically refreshes its view of the registry every `registry_ttl_sec` + +### Importing as a module to integrate with an existing React App + +This is the recommended way to use Feast UI for teams maintaining their own internal UI for their deployment of Feast. + +Start with bootstrapping a React app with `create-react-app` + +``` +npx create-react-app your-feast-ui +``` + +Then, in your app folder, install Feast UI and its peer dependencies. Assuming you use yarn + +``` +yarn add @feast-dev/feast-ui +yarn add @elastic/eui @elastic/datemath @emotion/react moment prop-types inter-ui react-query react-router-dom use-query-params zod typescript query-string d3 @types/d3 +``` + +Edit `index.js` in the React app to use Feast UI. + +```js +import React from "react"; +import ReactDOM from "react-dom"; +import "./index.css"; + +import FeastUI from "@feast-dev/feast-ui"; +import "@feast-dev/feast-ui/dist/feast-ui.css"; + +ReactDOM.render( + + + , + document.getElementById("root") +); +``` + +When you start the React app, it will look for `project-list.json` to find a list of your projects. The JSON should looks something like this. + +```json +{ + "projects": [ + { + "name": "Credit Score Project", + "description": "Project for credit scoring team and associated models.", + "id": "credit_score_project", + "registryPath": "/registry.json" + } + ] +} +``` + +Then start the React App +```bash +yarn start +``` + +#### Customization + +The advantage of importing Feast UI as a module is in the ease of customization. The `` component exposes a `feastUIConfigs` prop thorough which you can customize the UI. Currently it supports a few parameters. + +##### Fetching the Project List + +You can use `projectListPromise` to provide a promise that overrides where the Feast UI fetches the project list from. + +```jsx + { + return res.json(); + }) + }} +/> +``` + +##### Custom Tabs + +You can add custom tabs for any of the core Feast objects through the `tabsRegistry`. + +``` +const tabsRegistry = { + RegularFeatureViewCustomTabs: [ + { + label: "Custom Tab Demo", // Navigation Label for the tab + path: "demo-tab", // Subpath for the tab + Component: RFVDemoCustomTab, // a React Component + }, + ] +} + + +``` + +Examples of custom tabs can be found in the `ui/custom-tabs` folder. + diff --git a/docs/reference/data-sources/README.md b/docs/reference/data-sources/README.md index 43adf0d5936..89656b25702 100644 --- a/docs/reference/data-sources/README.md +++ b/docs/reference/data-sources/README.md @@ -18,10 +18,14 @@ Please see [Data Source](../../getting-started/concepts/feature-view.md#data-sou [redshift.md](redshift.md) {% endcontent-ref %} +{% content-ref url="push.md" %} +[push.md](push.md) +{% endcontent-ref %} + {% content-ref url="spark.md" %} [spark.md](spark.md) {% endcontent-ref %} -{% content-ref url="push.md" %} -[push.md](push.md) +{% content-ref url="postgres.md" %} +[postgres.md]([postgres].md) {% endcontent-ref %} diff --git a/docs/reference/data-sources/postgres.md b/docs/reference/data-sources/postgres.md new file mode 100644 index 00000000000..759cb50bbd7 --- /dev/null +++ b/docs/reference/data-sources/postgres.md @@ -0,0 +1,25 @@ +# PostgreSQL + +## Description + +**NOTE**: The Postgres plugin is a contrib plugin. This means it may not be fully stable. + + +The PostgreSQL data source allows for the retrieval of historical feature values from a PostgreSQL database for building training datasets as well as materializing features into an online store. + +## Examples + +Defining a Postgres source + +```python +from feast.infra.offline_stores.contrib.postgres_offline_store.postgres_source import ( + PostgreSQLSource, +) + +driver_stats_source = PostgreSQLSource( + name="feast_driver_hourly_stats", + query="SELECT * FROM feast_driver_hourly_stats", + timestamp_field="event_timestamp", + created_timestamp_column="created", +) +``` diff --git a/docs/reference/data-sources/push.md b/docs/reference/data-sources/push.md index 9f377d20998..e6eff312ec1 100644 --- a/docs/reference/data-sources/push.md +++ b/docs/reference/data-sources/push.md @@ -1,5 +1,7 @@ # Push source +**Warning**: This is an _experimental_ feature. It's intended for early testing and feedback, and could change without warnings in future releases. + ## Description Push sources allow feature values to be pushed to the online store in real time. This allows fresh feature values to be made available to applications. Push sources supercede the @@ -31,10 +33,6 @@ from feast.types import Int64 push_source = PushSource( name="push_source", - schema=[ - Field(name="user_id", dtype=Int64), - Field(name="life_time_value", dtype=Int64) - ], batch_source=BigQuerySource(table="test.test"), ) @@ -42,7 +40,7 @@ fv = FeatureView( name="feature view", entities=["user_id"], schema=[Field(name="life_time_value", dtype=Int64)], - stream_source=push_source, + source=push_source, ) ``` @@ -53,6 +51,8 @@ import pandas as pd fs = FeatureStore(...) feature_data_frame = pd.DataFrame() -fs.push("push_source", feature_data_frame) +fs.push("push_source_name", feature_data_frame) ``` +See also [Python feature server](../feature-servers/python-feature-server.md) for instructions on how to push data to a deployed feature server. + diff --git a/docs/reference/data-sources/spark.md b/docs/reference/data-sources/spark.md index 2c1d1ec879d..266a401a512 100644 --- a/docs/reference/data-sources/spark.md +++ b/docs/reference/data-sources/spark.md @@ -1,4 +1,4 @@ -# Spark +# Spark (contrib) ## Description diff --git a/docs/reference/feast-cli-commands.md b/docs/reference/feast-cli-commands.md index 7fb2ccbeb5f..38e85843d41 100644 --- a/docs/reference/feast-cli-commands.md +++ b/docs/reference/feast-cli-commands.md @@ -2,7 +2,7 @@ ## Overview -The Feast CLI comes bundled with the Feast Python package. It is immediately available after [installing Feast](../how-to-guides/feast-gcp-aws/install-feast.md). +The Feast CLI comes bundled with the Feast Python package. It is immediately available after [installing Feast](../how-to-guides/feast-snowflake-gcp-aws/install-feast.md). ```text Usage: feast [OPTIONS] COMMAND [ARGS]... @@ -54,7 +54,7 @@ feast apply **What does Feast apply do?** 1. Feast will scan Python files in your feature repository and find all Feast object definitions, such as feature views, entities, and data sources. -2. Feast will validate your feature definitions +2. Feast will validate your feature definitions (e.g. for uniqueness of features) 3. Feast will sync the metadata about Feast objects to the registry. If a registry does not exist, then it will be instantiated. The standard registry is a simple protobuf binary file that is stored on disk \(locally or in an object store\). 4. Feast CLI will create all necessary feature store infrastructure. The exact infrastructure that is deployed or configured depends on the `provider` configuration that you have set in `feature_store.yaml`. For example, setting `local` as your provider will result in a `sqlite` online store being created. diff --git a/docs/reference/feature-servers/README.md b/docs/reference/feature-servers/README.md index e9e3afa4c09..301cea372c4 100644 --- a/docs/reference/feature-servers/README.md +++ b/docs/reference/feature-servers/README.md @@ -2,4 +2,4 @@ Feast users can choose to retrieve features from a feature server, as opposed to through the Python SDK. -{% page-ref page="local-feature-server.md" %} +{% page-ref page="python-feature-server.md" %} diff --git a/docs/reference/feature-servers/go-feature-retrieval.md b/docs/reference/feature-servers/go-feature-retrieval.md index 999a142c07d..30c1a9ca721 100644 --- a/docs/reference/feature-servers/go-feature-retrieval.md +++ b/docs/reference/feature-servers/go-feature-retrieval.md @@ -2,7 +2,7 @@ ## Overview -The Go Feature Retrieval component is a Go implementation of the core feature serving logic, embedded in the Python SDK. It supports retrieval of feature references, feature services, and on demand feature views, and can be used either through the Python SDK or the [Python feature server](local-feature-server.md). +The Go Feature Retrieval component is a Go implementation of the core feature serving logic, embedded in the Python SDK. It supports retrieval of feature references, feature services, and on demand feature views, and can be used either through the Python SDK or the [Python feature server](python-feature-server.md). Currently, this component only supports online serving and does not have an offline component including APIs to create feast feature repositories or apply configuration to the registry to facilitate online materialization. It also does not expose its own dedicated cli to perform feast actions. Furthermore, this component is only meant to expose an online serving API that can be called through the python SDK to facilitate faster online feature retrieval. @@ -10,7 +10,12 @@ The Go Feature Retrieval component currently only supports Redis and Sqlite as o ## Installation -As long as you are running macOS or linux x86 with python version 3.7-3.10, the go component comes pre-compiled when you run install feast. +As long as you are running macOS or linux, on x86, with python version 3.7-3.10, the go component comes pre-compiled when you install feast. + +However, some additional dependencies are required for Go <-> Python interoperability. To install these dependencies run the following command in your console: +``` +pip install feast[go] +``` For developers, if you want to build from source, run `make compile-go-lib` to build and compile the go server. diff --git a/docs/reference/feature-servers/local-feature-server.md b/docs/reference/feature-servers/python-feature-server.md similarity index 64% rename from docs/reference/feature-servers/local-feature-server.md rename to docs/reference/feature-servers/python-feature-server.md index 4ea37d4f1eb..352f0edc167 100644 --- a/docs/reference/feature-servers/local-feature-server.md +++ b/docs/reference/feature-servers/python-feature-server.md @@ -1,15 +1,23 @@ -# Local feature server +# Python feature server ## Overview -The local feature server is an HTTP endpoint that serves features with JSON I/O. This enables users to get features from Feast using any programming language that can make HTTP requests. A [remote feature server](../alpha-aws-lambda-feature-server.md) on AWS Lambda is also available. A remote feature server on GCP Cloud Run is currently being developed. +The feature server is an HTTP endpoint that serves features with JSON I/O. This enables users to write + read features from Feast online stores using any programming language that can make HTTP requests. ## CLI -There is a new CLI command that starts the server: `feast serve`. By default Feast uses port 6566; the port be overridden by a `--port` flag. +There is a CLI command that starts the server: `feast serve`. By default, Feast uses port 6566; the port be overridden by a `--port` flag. + +## Deploying as a service + +One can also deploy a feature server by building a docker image that bundles in the project's `feature_store.yaml`. See [helm chart](https://github.com/feast-dev/feast/blob/master/infra/charts/feast-python-server) for example. + +A [remote feature server](../alpha-aws-lambda-feature-server.md) on AWS Lambda is available. A remote feature server on GCP Cloud Run is currently being developed. + ## Example +### Initializing a feature server Here's the local feature server usage example with the local template: ```bash @@ -41,6 +49,7 @@ INFO: Uvicorn running on http://127.0.0.1:6566 (Press CTRL+C to quit) 09/10/2021 10:42:11 AM INFO:Uvicorn running on http://127.0.0.1:6566 (Press CTRL+C to quit) ``` +### Retrieving features from the online store After the server starts, we can execute cURL commands from another terminal tab: ```bash @@ -142,3 +151,45 @@ curl -X POST \ } }' | jq ``` + +### Pushing features to the online store +You can push data corresponding to a push source to the online store (note that timestamps need to be strings): + +```text +curl -X POST "http://localhost:6566/push" -d '{ + "push_source_name": "driver_hourly_stats_push_source", + "df": { + "driver_id": [1001], + "event_timestamp": ["2022-05-13 10:59:42"], + "created": ["2022-05-13 10:59:42"], + "conv_rate": [1.0], + "acc_rate": [1.0], + "avg_daily_trips": [1000] + } + }' | jq +``` + +or equivalently from Python: +```python +import json +import requests +import pandas as pd +from datetime import datetime + +event_dict = { + "driver_id": [1001], + "event_timestamp": [str(datetime(2021, 5, 13, 10, 59, 42))], + "created": [str(datetime(2021, 5, 13, 10, 59, 42))], + "conv_rate": [1.0], + "acc_rate": [1.0], + "avg_daily_trips": [1000], + "string_feature": "test2", +} +push_data = { + "push_source_name":"driver_stats_push_source", + "df":event_dict +} +requests.post( + "http://localhost:6566/push", + data=json.dumps(push_data)) +``` diff --git a/docs/reference/offline-stores/README.md b/docs/reference/offline-stores/README.md index b3c85470b9d..57d7f35dea1 100644 --- a/docs/reference/offline-stores/README.md +++ b/docs/reference/offline-stores/README.md @@ -11,3 +11,6 @@ Please see [Offline Store](../../getting-started/architecture-and-components/off {% page-ref page="redshift.md" %} {% page-ref page="spark.md" %} + +{% page-ref page="postgres.md" %} + diff --git a/docs/reference/offline-stores/postgres.md b/docs/reference/offline-stores/postgres.md new file mode 100644 index 00000000000..9bd472673af --- /dev/null +++ b/docs/reference/offline-stores/postgres.md @@ -0,0 +1,41 @@ +# PostgreSQL (contrib) + +## Description + +The PostgreSQL offline store is an offline store that provides support for reading [PostgreSQL](../data-sources/postgres.md) data sources. + + +**DISCLAIMER**: This PostgreSQL offline store still does not achieve full test coverage. + +* Entity dataframes can be provided as a SQL query or can be provided as a Pandas dataframe. Pandas dataframes will be converted to a Spark dataframe and processed as a temporary view. +* A `PostgreSQLRetrievalJob` is returned when calling `get_historical_features()`. + * This allows you to call + * `to_df` to retrieve the pandas dataframe. + * `to_arrow` to retrieve the dataframe as a PyArrow table. + * `to_sql` to get the SQL query used to pull the features. + +* sslmode, sslkey_path, sslcert_path, and sslrootcert_path are optional + +## Example + +{% code title="feature_store.yaml" %} +```yaml +project: my_project +registry: data/registry.db +provider: local +offline_store: + type: postgres + host: DB_HOST + port: DB_PORT + database: DB_NAME + db_schema: DB_SCHEMA + user: DB_USERNAME + password: DB_PASSWORD + sslmode: verify-ca + sslkey_path: /path/to/client-key.pem + sslcert_path: /path/to/client-cert.pem + sslrootcert_path: /path/to/server-ca.pem +online_store: + path: data/online_store.db +``` +{% endcode %} diff --git a/docs/reference/offline-stores/spark.md b/docs/reference/offline-stores/spark.md index 48ddf46d179..7eec8d7b73d 100644 --- a/docs/reference/offline-stores/spark.md +++ b/docs/reference/offline-stores/spark.md @@ -1,4 +1,4 @@ -# Spark +# Spark (contrib) ## Description diff --git a/docs/reference/online-stores/README.md b/docs/reference/online-stores/README.md index 2c2902bc579..b3578b85394 100644 --- a/docs/reference/online-stores/README.md +++ b/docs/reference/online-stores/README.md @@ -9,3 +9,5 @@ Please see [Online Store](../../getting-started/architecture-and-components/onli {% page-ref page="datastore.md" %} {% page-ref page="dynamodb.md" %} + +{% page-ref page="postgres.md" %} diff --git a/docs/reference/online-stores/postgres.md b/docs/reference/online-stores/postgres.md new file mode 100644 index 00000000000..7d24079da9d --- /dev/null +++ b/docs/reference/online-stores/postgres.md @@ -0,0 +1,33 @@ +# PostgreSQL (contrib) + +## Description + +The PostgreSQL online store provides support for materializing feature values into a PostgreSQL database for serving online features. + +* Only the latest feature values are persisted + +* sslmode, sslkey_path, sslcert_path, and sslrootcert_path are optional + +## Example + +{% code title="feature_store.yaml" %} +```yaml +project: my_feature_repo +registry: data/registry.db +provider: local +online_store: + type: postgres + host: DB_HOST + port: DB_PORT + database: DB_NAME + db_schema: DB_SCHEMA + user: DB_USERNAME + password: DB_PASSWORD + sslmode: verify-ca + sslkey_path: /path/to/client-key.pem + sslcert_path: /path/to/client-cert.pem + sslrootcert_path: /path/to/server-ca.pem +``` +{% endcode %} + +Configuration options are available [here](https://rtd.feast.dev/en/latest/feast.infra.utils.postgres.html#module-feast.infra.utils.postgres.postgres_config). diff --git a/docs/reference/ui.png b/docs/reference/ui.png new file mode 100644 index 00000000000..360d57186d3 Binary files /dev/null and b/docs/reference/ui.png differ diff --git a/docs/roadmap.md b/docs/roadmap.md index 080cf16c02f..c2f5511f1ed 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -14,9 +14,9 @@ The list below contains the functionality that contributors are planning to deve * [x] [Parquet file source](https://docs.feast.dev/reference/data-sources/file) * [x] [Synapse source (community plugin)](https://github.com/Azure/feast-azure) * [x] [Hive (community plugin)](https://github.com/baineng/feast-hive) - * [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) - * [x] [Spark (community plugin)](https://docs.feast.dev/reference/data-sources/spark) - * [x] Kafka / Kinesis sources (via [push support into the online store](https://docs.feast.dev/reference/data-sources/push) + * [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/data-sources/postgres) + * [x] [Spark (contrib plugin)](https://docs.feast.dev/reference/data-sources/spark) + * [x] Kafka / Kinesis sources (via [push support into the online store](https://docs.feast.dev/reference/data-sources/push)) * [ ] HTTP source * **Offline Stores** * [x] [Snowflake](https://docs.feast.dev/reference/offline-stores/snowflake) @@ -24,9 +24,9 @@ The list below contains the functionality that contributors are planning to deve * [x] [BigQuery](https://docs.feast.dev/reference/offline-stores/bigquery) * [x] [Synapse (community plugin)](https://github.com/Azure/feast-azure) * [x] [Hive (community plugin)](https://github.com/baineng/feast-hive) - * [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) - * [x] [Trino (community plugin)](https://github.com/Shopify/feast-trino) - * [x] [Spark (community plugin)](https://docs.feast.dev/reference/offline-stores/spark) + * [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/offline-stores/postgres) + * [x] [Trino (contrib plugin)](https://github.com/Shopify/feast-trino) + * [x] [Spark (contrib plugin)](https://docs.feast.dev/reference/offline-stores/spark) * [x] [In-memory / Pandas](https://docs.feast.dev/reference/offline-stores/file) * [x] [Custom offline store support](https://docs.feast.dev/how-to-guides/adding-a-new-offline-store) * **Online Stores** @@ -35,7 +35,7 @@ The list below contains the functionality that contributors are planning to deve * [x] [Datastore](https://docs.feast.dev/reference/online-stores/datastore) * [x] [SQLite](https://docs.feast.dev/reference/online-stores/sqlite) * [x] [Azure Cache for Redis (community plugin)](https://github.com/Azure/feast-azure) - * [x] [Postgres (community plugin)](https://github.com/nossrannug/feast-postgres) + * [x] [Postgres (contrib plugin)](https://docs.feast.dev/reference/online-stores/postgres) * [x] [Custom online store support](https://docs.feast.dev/how-to-guides/adding-support-for-a-new-online-store) * [ ] Bigtable (in progress) * [ ] Cassandra @@ -72,6 +72,6 @@ The list below contains the functionality that contributors are planning to deve * [x] CLI for browsing feature registry * [x] Model-centric feature tracking (feature services) * [x] Amundsen integration (see [Feast extractor](https://github.com/amundsen-io/amundsen/blob/main/databuilder/databuilder/extractor/feast_extractor.py)) - * [x] Feast Web UI (alpha) + * [x] Feast Web UI (Alpha release. See [documentation](https://docs.feast.dev/reference/alpha-web-ui.md)) * [ ] REST API for browsing feature registry * [ ] Feature versioning diff --git a/docs/tutorials/validating-historical-features.md b/docs/tutorials/validating-historical-features.md index 5f85e66c946..70be38eced2 100644 --- a/docs/tutorials/validating-historical-features.md +++ b/docs/tutorials/validating-historical-features.md @@ -107,7 +107,7 @@ pyarrow.parquet.write_table(entities_2019_table, "entities.parquet") import pyarrow.parquet import pandas as pd -from feast import FeatureView, Entity, FeatureStore, Field +from feast import FeatureView, Entity, FeatureStore, Field, BatchFeatureView from feast.types import Float64, Int64 from feast.value_type import ValueType from feast.data_format import ParquetFormat @@ -129,12 +129,12 @@ batch_source = FileSource( ```python -taxi_entity = Entity(name='taxi', join_key='taxi_id') +taxi_entity = Entity(name='taxi', join_keys=['taxi_id']) ``` ```python -trips_stats_fv = FeatureView( +trips_stats_fv = BatchFeatureView( name='trip_stats', entities=['taxi'], features=[ @@ -160,9 +160,9 @@ trips_stats_fv = FeatureView( Field("avg_trip_seconds", Float64), Field("earned_per_hour", Float64), ], - sources={ - "stats": trips_stats_fv - } + sources=[ + trips_stats_fv, + ] ) def on_demand_stats(inp): out = pd.DataFrame() diff --git a/examples/java-demo/feature_repo/driver_repo.py b/examples/java-demo/feature_repo/driver_repo.py index ce9469647f6..c91e5a40bed 100644 --- a/examples/java-demo/feature_repo/driver_repo.py +++ b/examples/java-demo/feature_repo/driver_repo.py @@ -7,7 +7,7 @@ from google.protobuf.duration_pb2 import Duration from feast.field import Field -from feast import Entity, Feature, FeatureView, FileSource, ValueType +from feast import Entity, Feature, BatchFeatureView, FileSource, ValueType driver_hourly_stats = FileSource( path="data/driver_stats_with_string.parquet", @@ -15,7 +15,7 @@ created_timestamp_column="created", ) driver = Entity(name="driver_id", value_type=ValueType.INT64, description="driver id",) -driver_hourly_stats_view = FeatureView( +driver_hourly_stats_view = BatchFeatureView( name="driver_hourly_stats", entities=["driver_id"], ttl=Duration(seconds=86400000), @@ -43,10 +43,10 @@ # Define an on demand feature view which can generate new features based on # existing feature views and RequestSource features @on_demand_feature_view( - inputs={ - "driver_hourly_stats": driver_hourly_stats_view, - "vals_to_add": input_request, - }, + sources=[ + driver_hourly_stats_view, + input_request, + ], schema=[ Field(name="conv_rate_plus_val1", dtype=Float64), Field(name="conv_rate_plus_val2", dtype=Float64), diff --git a/examples/quickstart/quickstart.ipynb b/examples/quickstart/quickstart.ipynb index 3679fcc7788..60974d27513 100644 --- a/examples/quickstart/quickstart.ipynb +++ b/examples/quickstart/quickstart.ipynb @@ -59,7 +59,7 @@ "base_uri": "https://localhost:8080/" }, "id": "rXNMAAJKQPG5", - "outputId": "b27420ac-c6ba-4d9f-cae8-51a2007b4189" + "outputId": "52297709-380b-4200-8e7c-3d0102a82ea4" }, "source": [ "%%sh\n", @@ -67,14 +67,14 @@ "pip install Pygments -q\n", "echo \"Please restart your runtime now (Runtime -> Restart runtime). This ensures that the correct dependencies are loaded.\"" ], - "execution_count": null, + "execution_count": 1, "outputs": [ { "output_type": "stream", + "name": "stdout", "text": [ "Please restart your runtime now (Runtime -> Restart runtime). This ensures that the correct dependencies are loaded.\n" - ], - "name": "stdout" + ] } ] }, @@ -112,22 +112,22 @@ "base_uri": "https://localhost:8080/" }, "id": "IhirSkgUvYau", - "outputId": "a2a5631e-1703-4957-b896-9c432851a261" + "outputId": "df90af1a-06bd-48a1-94e6-7def19e87d5f" }, "source": [ "!feast init feature_repo" ], - "execution_count": null, + "execution_count": 1, "outputs": [ { "output_type": "stream", + "name": "stdout", "text": [ "Feast is an open source project that collects anonymized error reporting and usage statistics. To opt out or learn more see https://docs.feast.dev/reference/usage\n", "\n", "Creating a new Feast repository in \u001b[1m\u001b[32m/content/feature_repo\u001b[0m.\n", "\n" - ], - "name": "stdout" + ] } ] }, @@ -155,25 +155,25 @@ "base_uri": "https://localhost:8080/" }, "id": "9jXuzt4ovzA3", - "outputId": "1ef1bf42-2306-4cc0-c959-1ea2d62e3149" + "outputId": "bff15f0c-9f8e-4a3c-e605-5ad84be30709" }, "source": [ "%cd feature_repo\n", "!ls -R" ], - "execution_count": null, + "execution_count": 2, "outputs": [ { "output_type": "stream", + "name": "stdout", "text": [ "/content/feature_repo\n", ".:\n", - "data example.py feature_store.yaml\n", + "data example.py feature_store.yaml __init__.py\n", "\n", "./data:\n", "driver_stats.parquet\n" - ], - "name": "stdout" + ] } ] }, @@ -192,8 +192,7 @@ "* gcp: use BigQuery/Snowflake with Google Cloud Datastore/Redis\n", "* aws: use Redshift/Snowflake with DynamoDB/Redis\n", "\n", - "Note that there are many other sources Feast works with, including Azure, Hive, Trino, and PostgreSQL via community plugins. See https://docs.feast.dev/getting-started/third-party-integrations for all supported datasources.", - "\n", + "Note that there are many other sources Feast works with, including Azure, Hive, Trino, and PostgreSQL via community plugins. See https://docs.feast.dev/getting-started/third-party-integrations for all supported datasources.\n", "A custom setup can also be made by following https://docs.feast.dev/v/master/how-to-guides/creating-a-custom-provider" ] }, @@ -204,23 +203,23 @@ "base_uri": "https://localhost:8080/" }, "id": "9_YJ--uYdtcP", - "outputId": "8d772619-aa4d-4cb4-e7e0-2ed45bc09a87" + "outputId": "89268e31-6be0-43fb-e576-6d335a2c1dd9" }, "source": [ "!pygmentize feature_store.yaml" ], - "execution_count": null, + "execution_count": 3, "outputs": [ { "output_type": "stream", + "name": "stdout", "text": [ - "project: feature_repo\n", - "registry: data/registry.db\n", - "provider: local\n", - "online_store:\n", - " path: data/online_store.db\n" - ], - "name": "stdout" + "\u001b[94mproject\u001b[39;49;00m: feature_repo\n", + "\u001b[94mregistry\u001b[39;49;00m: data/registry.db\n", + "\u001b[94mprovider\u001b[39;49;00m: local\n", + "\u001b[94monline_store\u001b[39;49;00m:\n", + " \u001b[94mpath\u001b[39;49;00m: data/online_store.db\n" + ] } ] }, @@ -240,23 +239,55 @@ "metadata": { "colab": { "base_uri": "https://localhost:8080/", - "height": 419 + "height": 424 }, "id": "sIF2lO59dwzi", - "outputId": "3e7ff19e-1052-49a6-a889-de76cce61714" + "outputId": "80e798d5-df21-4ebd-de1c-9bde282bd742" }, "source": [ "import pandas as pd\n", "\n", "pd.read_parquet(\"data/driver_stats.parquet\")" ], - "execution_count": null, + "execution_count": 4, "outputs": [ { "output_type": "execute_result", "data": { + "text/plain": [ + " event_timestamp driver_id conv_rate acc_rate \\\n", + "0 2022-03-31 14:00:00+00:00 1005 0.313336 0.231481 \n", + "1 2022-03-31 15:00:00+00:00 1005 0.959499 0.942614 \n", + "2 2022-03-31 16:00:00+00:00 1005 0.231786 0.313516 \n", + "3 2022-03-31 17:00:00+00:00 1005 0.886911 0.531613 \n", + "4 2022-03-31 18:00:00+00:00 1005 0.574945 0.718223 \n", + "... ... ... ... ... \n", + "1802 2022-04-15 12:00:00+00:00 1001 0.521622 0.266667 \n", + "1803 2022-04-15 13:00:00+00:00 1001 0.003188 0.535501 \n", + "1804 2021-04-12 07:00:00+00:00 1001 0.709081 0.823138 \n", + "1805 2022-04-08 02:00:00+00:00 1003 0.033297 0.053268 \n", + "1806 2022-04-08 02:00:00+00:00 1003 0.033297 0.053268 \n", + "\n", + " avg_daily_trips created \n", + "0 303 2022-04-15 14:34:10.056 \n", + "1 842 2022-04-15 14:34:10.056 \n", + "2 782 2022-04-15 14:34:10.056 \n", + "3 634 2022-04-15 14:34:10.056 \n", + "4 441 2022-04-15 14:34:10.056 \n", + "... ... ... \n", + "1802 406 2022-04-15 14:34:10.056 \n", + "1803 593 2022-04-15 14:34:10.056 \n", + "1804 997 2022-04-15 14:34:10.056 \n", + "1805 534 2022-04-15 14:34:10.056 \n", + "1806 534 2022-04-15 14:34:10.056 \n", + "\n", + "[1807 rows x 6 columns]" + ], "text/html": [ - "
\n", + "\n", + "
\n", + "
\n", + "
\n", "\n", + "\n", + " \n", + "
\n", + "
\n", + " " ] }, "metadata": {}, - "execution_count": 5 + "execution_count": 4 } ] }, @@ -438,53 +529,54 @@ "base_uri": "https://localhost:8080/" }, "id": "DPqXCoNpL0SX", - "outputId": "a252e224-61da-48ee-92b8-1780def99244" + "outputId": "be1308b2-0c83-4dd3-eb88-e79ffcbd20d6" }, "source": [ "!pygmentize -f terminal16m example.py" ], - "execution_count": null, + "execution_count": 5, "outputs": [ { "output_type": "stream", + "name": "stdout", "text": [ - "\u001b[38;2;64;128;128m# This is an example feature definition file\u001b[39m\n", + "\u001b[38;2;64;128;128;03m# This is an example feature definition file\u001b[39;00m\n", "\n", - "\u001b[38;2;0;128;0;01mfrom\u001b[39;00m \u001b[38;2;0;0;255;01mgoogle.protobuf.duration_pb2\u001b[39;00m \u001b[38;2;0;128;0;01mimport\u001b[39;00m Duration\n", + "\u001b[38;2;0;128;0;01mfrom\u001b[39;00m \u001b[38;2;0;0;255;01mdatetime\u001b[39;00m \u001b[38;2;0;128;0;01mimport\u001b[39;00m timedelta\n", "\n", - "\u001b[38;2;0;128;0;01mfrom\u001b[39;00m \u001b[38;2;0;0;255;01mfeast\u001b[39;00m \u001b[38;2;0;128;0;01mimport\u001b[39;00m Entity, Feature, FeatureView, FileSource, ValueType\n", + "\u001b[38;2;0;128;0;01mfrom\u001b[39;00m \u001b[38;2;0;0;255;01mfeast\u001b[39;00m \u001b[38;2;0;128;0;01mimport\u001b[39;00m Entity, FeatureView, Field, FileSource, ValueType\n", + "\u001b[38;2;0;128;0;01mfrom\u001b[39;00m \u001b[38;2;0;0;255;01mfeast\u001b[39;00m\u001b[38;2;0;0;255;01m.\u001b[39;00m\u001b[38;2;0;0;255;01mtypes\u001b[39;00m \u001b[38;2;0;128;0;01mimport\u001b[39;00m Float32, Int64\n", "\n", - "\u001b[38;2;64;128;128m# Read data from parquet files. Parquet is convenient for local development mode. For\u001b[39m\n", - "\u001b[38;2;64;128;128m# production, you can use your favorite DWH, such as BigQuery. See Feast documentation\u001b[39m\n", - "\u001b[38;2;64;128;128m# for more info.\u001b[39m\n", + "\u001b[38;2;64;128;128;03m# Read data from parquet files. Parquet is convenient for local development mode. For\u001b[39;00m\n", + "\u001b[38;2;64;128;128;03m# production, you can use your favorite DWH, such as BigQuery. See Feast documentation\u001b[39;00m\n", + "\u001b[38;2;64;128;128;03m# for more info.\u001b[39;00m\n", "driver_hourly_stats \u001b[38;2;102;102;102m=\u001b[39m FileSource(\n", " path\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33m/content/feature_repo/data/driver_stats.parquet\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m,\n", - " event_timestamp_column\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mevent_timestamp\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m,\n", + " timestamp_field\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mevent_timestamp\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m,\n", " created_timestamp_column\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mcreated\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m,\n", ")\n", "\n", - "\u001b[38;2;64;128;128m# Define an entity for the driver. You can think of entity as a primary key used to\u001b[39m\n", - "\u001b[38;2;64;128;128m# fetch features.\u001b[39m\n", - "driver \u001b[38;2;102;102;102m=\u001b[39m Entity(name\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mdriver_id\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m, value_type\u001b[38;2;102;102;102m=\u001b[39mValueType\u001b[38;2;102;102;102m.\u001b[39mINT64, description\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mdriver id\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m,)\n", + "\u001b[38;2;64;128;128;03m# Define an entity for the driver. You can think of entity as a primary key used to\u001b[39;00m\n", + "\u001b[38;2;64;128;128;03m# fetch features.\u001b[39;00m\n", + "driver \u001b[38;2;102;102;102m=\u001b[39m Entity(name\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mdriver\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m, value_type\u001b[38;2;102;102;102m=\u001b[39mValueType\u001b[38;2;102;102;102m.\u001b[39mINT64, join_key\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mdriver_id\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m,)\n", "\n", - "\u001b[38;2;64;128;128m# Our parquet files contain sample data that includes a driver_id column, timestamps and\u001b[39m\n", - "\u001b[38;2;64;128;128m# three feature column. Here we define a Feature View that will allow us to serve this\u001b[39m\n", - "\u001b[38;2;64;128;128m# data to our model online.\u001b[39m\n", + "\u001b[38;2;64;128;128;03m# Our parquet files contain sample data that includes a driver_id column, timestamps and\u001b[39;00m\n", + "\u001b[38;2;64;128;128;03m# three feature column. Here we define a Feature View that will allow us to serve this\u001b[39;00m\n", + "\u001b[38;2;64;128;128;03m# data to our model online.\u001b[39;00m\n", "driver_hourly_stats_view \u001b[38;2;102;102;102m=\u001b[39m FeatureView(\n", " name\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mdriver_hourly_stats\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m,\n", - " entities\u001b[38;2;102;102;102m=\u001b[39m[\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mdriver_id\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m],\n", - " ttl\u001b[38;2;102;102;102m=\u001b[39mDuration(seconds\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;102;102;102m86400\u001b[39m \u001b[38;2;102;102;102m*\u001b[39m \u001b[38;2;102;102;102m1\u001b[39m),\n", - " features\u001b[38;2;102;102;102m=\u001b[39m[\n", - " Feature(name\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mconv_rate\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m, dtype\u001b[38;2;102;102;102m=\u001b[39mValueType\u001b[38;2;102;102;102m.\u001b[39mFLOAT),\n", - " Feature(name\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33macc_rate\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m, dtype\u001b[38;2;102;102;102m=\u001b[39mValueType\u001b[38;2;102;102;102m.\u001b[39mFLOAT),\n", - " Feature(name\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mavg_daily_trips\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m, dtype\u001b[38;2;102;102;102m=\u001b[39mValueType\u001b[38;2;102;102;102m.\u001b[39mINT64),\n", + " entities\u001b[38;2;102;102;102m=\u001b[39m[\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mdriver\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m],\n", + " ttl\u001b[38;2;102;102;102m=\u001b[39mtimedelta(days\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;102;102;102m1\u001b[39m),\n", + " schema\u001b[38;2;102;102;102m=\u001b[39m[\n", + " Field(name\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mconv_rate\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m, dtype\u001b[38;2;102;102;102m=\u001b[39mFloat32),\n", + " Field(name\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33macc_rate\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m, dtype\u001b[38;2;102;102;102m=\u001b[39mFloat32),\n", + " Field(name\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m\u001b[38;2;186;33;33mavg_daily_trips\u001b[39m\u001b[38;2;186;33;33m\"\u001b[39m, dtype\u001b[38;2;102;102;102m=\u001b[39mInt64),\n", " ],\n", - " online\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;0;128;0mTrue\u001b[39m,\n", - " batch_source\u001b[38;2;102;102;102m=\u001b[39mdriver_hourly_stats,\n", + " online\u001b[38;2;102;102;102m=\u001b[39m\u001b[38;2;0;128;0;01mTrue\u001b[39;00m,\n", + " source\u001b[38;2;102;102;102m=\u001b[39mdriver_hourly_stats,\n", " tags\u001b[38;2;102;102;102m=\u001b[39m{},\n", ")\n" - ], - "name": "stdout" + ] } ] }, @@ -505,21 +597,23 @@ "base_uri": "https://localhost:8080/" }, "id": "RYKCKKrcxYZG", - "outputId": "d36b3fb2-9292-4b43-f26a-5441c301c92d" + "outputId": "9745d7eb-b4b8-4a43-bf47-189bbf07ae09" }, "source": [ "!feast apply" ], - "execution_count": null, + "execution_count": 6, "outputs": [ { "output_type": "stream", + "name": "stdout", "text": [ - "Registered entity \u001b[1m\u001b[32mdriver_id\u001b[0m\n", - "Registered feature view \u001b[1m\u001b[32mdriver_hourly_stats\u001b[0m\n", - "Deploying infrastructure for \u001b[1m\u001b[32mdriver_hourly_stats\u001b[0m\n" - ], - "name": "stdout" + "Created entity \u001b[1m\u001b[32mdriver_id\u001b[0m\n", + "Created feature view \u001b[1m\u001b[32mdriver_hourly_stats\u001b[0m\n", + "\n", + "Created sqlite table \u001b[1m\u001b[32mfeature_repo_driver_hourly_stats\u001b[0m\n", + "\n" + ] } ] }, @@ -544,7 +638,7 @@ "base_uri": "https://localhost:8080/" }, "id": "C6Fzia7YwBzz", - "outputId": "250e9be1-2283-4d74-cf48-297b8ae0d23a" + "outputId": "b99aedae-9c47-4b9f-acdd-cd02e2e091b7" }, "source": [ "from datetime import datetime, timedelta\n", @@ -583,21 +677,22 @@ "print(\"----- Example features -----\\n\")\n", "print(training_df.head())" ], - "execution_count": null, + "execution_count": 7, "outputs": [ { "output_type": "stream", + "name": "stdout", "text": [ "----- Feature schema -----\n", "\n", "\n", - "Int64Index: 3 entries, 0 to 2\n", + "Int64Index: 3 entries, 720 to 1081\n", "Data columns (total 6 columns):\n", " # Column Non-Null Count Dtype \n", "--- ------ -------------- ----- \n", - " 0 event_timestamp 3 non-null datetime64[ns, UTC]\n", - " 1 driver_id 3 non-null int64 \n", - " 2 label_driver_reported_satisfaction 3 non-null int64 \n", + " 0 driver_id 3 non-null int64 \n", + " 1 label_driver_reported_satisfaction 3 non-null int64 \n", + " 2 event_timestamp 3 non-null datetime64[ns, UTC]\n", " 3 conv_rate 3 non-null float32 \n", " 4 acc_rate 3 non-null float32 \n", " 5 avg_daily_trips 3 non-null int32 \n", @@ -607,14 +702,16 @@ "\n", "----- Example features -----\n", "\n", - " event_timestamp driver_id ... acc_rate avg_daily_trips\n", - "0 2021-08-23 15:12:55.489091+00:00 1003 ... 0.120588 938\n", - "1 2021-08-23 15:49:55.489089+00:00 1002 ... 0.504881 635\n", - "2 2021-08-23 16:14:55.489075+00:00 1001 ... 0.138416 606\n", + " driver_id label_driver_reported_satisfaction \\\n", + "720 1002 5 \n", + "359 1001 1 \n", + "1081 1003 3 \n", "\n", - "[3 rows x 6 columns]\n" - ], - "name": "stdout" + " event_timestamp conv_rate acc_rate avg_daily_trips \n", + "720 2022-04-15 13:58:30.900257+00:00 0.368052 0.417923 346 \n", + "359 2022-04-15 14:23:30.900240+00:00 0.003188 0.535501 593 \n", + "1081 2022-04-15 13:21:30.900260+00:00 0.214944 0.788695 904 \n" + ] } ] }, @@ -645,23 +742,23 @@ "colab": { "base_uri": "https://localhost:8080/" }, - "outputId": "efbb493a-89a2-41ce-b3b4-d0d05131a8ff" + "outputId": "d38a0e0a-2802-4408-ab92-a26baf82752e" }, "source": [ "from datetime import datetime\n", "!feast materialize-incremental {datetime.now().isoformat()}" ], - "execution_count": null, + "execution_count": 8, "outputs": [ { "output_type": "stream", + "name": "stdout", "text": [ - "Materializing \u001b[1m\u001b[32m1\u001b[0m feature views to \u001b[1m\u001b[32m2021-08-23 16:25:46+00:00\u001b[0m into the \u001b[1m\u001b[32msqlite\u001b[0m online store.\n", + "Materializing \u001b[1m\u001b[32m1\u001b[0m feature views to \u001b[1m\u001b[32m2022-04-15 14:34:37+00:00\u001b[0m into the \u001b[1m\u001b[32msqlite\u001b[0m online store.\n", "\n", - "\u001b[1m\u001b[32mdriver_hourly_stats\u001b[0m from \u001b[1m\u001b[32m2021-08-22 16:25:47+00:00\u001b[0m to \u001b[1m\u001b[32m2021-08-23 16:25:46+00:00\u001b[0m:\n", - "\r 0%| | 0/5 [00:00 github.com/feast-dev/gopy v0.4.1-0.20220329011409-d705e6cd1d9b +replace github.com/go-python/gopy v0.4.0 => github.com/feast-dev/gopy v0.4.1-0.20220429180328-4257ac71a4d0 diff --git a/go.sum b/go.sum index ef9a0c6989b..e1f2436b296 100644 --- a/go.sum +++ b/go.sum @@ -85,8 +85,8 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.m github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/feast-dev/gopy v0.4.1-0.20220329011409-d705e6cd1d9b h1:C/oK6gi12Q7fiiVCI3e62tqWCSXqsTz9OpyK249XI84= -github.com/feast-dev/gopy v0.4.1-0.20220329011409-d705e6cd1d9b/go.mod h1:ZO6vpitQ61NVoQP/2yOubPS6ET5pP3CAWCiMYn5eqCc= +github.com/feast-dev/gopy v0.4.1-0.20220429180328-4257ac71a4d0 h1:Go714ObVP1O+a6qK7haXVL28QNm6WMD8bwnN9EA8PlM= +github.com/feast-dev/gopy v0.4.1-0.20220429180328-4257ac71a4d0/go.mod h1:ZO6vpitQ61NVoQP/2yOubPS6ET5pP3CAWCiMYn5eqCc= github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= @@ -168,8 +168,6 @@ github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/ github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.2.0 h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs= -github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= diff --git a/go/cmd/server/logging/feature_repo/__init__.py b/go/__init__.py similarity index 100% rename from go/cmd/server/logging/feature_repo/__init__.py rename to go/__init__.py diff --git a/go/cmd/server/logging/feature_repo/data/online_store.db b/go/cmd/server/logging/feature_repo/data/online_store.db deleted file mode 100644 index b6ccea139e5..00000000000 Binary files a/go/cmd/server/logging/feature_repo/data/online_store.db and /dev/null differ diff --git a/go/cmd/server/logging/filelogstorage.go b/go/cmd/server/logging/filelogstorage.go deleted file mode 100644 index 19e9569e69d..00000000000 --- a/go/cmd/server/logging/filelogstorage.go +++ /dev/null @@ -1,86 +0,0 @@ -package logging - -import ( - "errors" - "fmt" - "io" - "os" - "path/filepath" - - "github.com/apache/arrow/go/v8/arrow/array" - "github.com/apache/arrow/go/v8/parquet" - "github.com/apache/arrow/go/v8/parquet/pqarrow" - "github.com/feast-dev/feast/go/internal/feast/registry" -) - -type FileLogStorage struct { - // Feast project name - project string - path string -} - -func GetFileConfig(config *registry.RepoConfig) (*OfflineLogStoreConfig, error) { - fileConfig := OfflineLogStoreConfig{ - storeType: "file", - } - if onlineStorePath, ok := config.OfflineStore["path"]; ok { - path, success := onlineStorePath.(string) - if !success { - return &fileConfig, fmt.Errorf("path, %s, cannot be converted to string", path) - } - fileConfig.path = path - } else { - return nil, errors.New("need path for file log storage") - } - return &fileConfig, nil -} - -// This offline store is currently only used for testing. It will be instantiated during go unit tests to log to file -// and the parquet files will be cleaned up after the test is run. -func NewFileOfflineStore(project string, offlineStoreConfig *OfflineLogStoreConfig) (*FileLogStorage, error) { - store := FileLogStorage{project: project} - var absPath string - var err error - // TODO(kevjumba) remove this default catch. - if offlineStoreConfig.path != "" { - absPath, err = filepath.Abs(offlineStoreConfig.path) - } else { - return nil, errors.New("need path for file log storage") - } - if err != nil { - return nil, err - } - store.path = absPath - return &store, nil -} - -func openLogFile(absPath string) (*os.File, error) { - var _, err = os.Stat(absPath) - - // create file if not exists - if os.IsNotExist(err) { - var file, err = os.Create(absPath) - if err != nil { - return nil, err - } - return file, nil - } else { - return nil, fmt.Errorf("path %s already exists", absPath) - } -} - -func (f *FileLogStorage) FlushToStorage(tbl array.Table) error { - w, err := openLogFile(f.path) - var writer io.Writer = w - if err != nil { - return err - } - props := parquet.NewWriterProperties(parquet.WithDictionaryDefault(false)) - arrProps := pqarrow.DefaultWriterProps() - err = pqarrow.WriteTable(tbl, writer, 100, props, arrProps) - if err != nil { - return err - } - return nil - -} diff --git a/go/cmd/server/logging/filelogstorage_test.go b/go/cmd/server/logging/filelogstorage_test.go deleted file mode 100644 index 1da7dd38ad2..00000000000 --- a/go/cmd/server/logging/filelogstorage_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package logging - -import ( - "context" - "path/filepath" - - "testing" - - "github.com/apache/arrow/go/v8/arrow/array" - "github.com/apache/arrow/go/v8/arrow/memory" - "github.com/apache/arrow/go/v8/parquet/file" - "github.com/apache/arrow/go/v8/parquet/pqarrow" - "github.com/feast-dev/feast/go/internal/test" - "github.com/stretchr/testify/assert" -) - -func TestFlushToStorage(t *testing.T) { - ctx := context.Background() - table, expectedSchema, expectedColumns, err := GetTestArrowTableAndExpectedResults() - defer table.Release() - assert.Nil(t, err) - offlineStoreConfig := OfflineLogStoreConfig{ - storeType: "file", - path: "./log.parquet", - } - fileStore, err := NewFileOfflineStore("test", &offlineStoreConfig) - assert.Nil(t, err) - err = fileStore.FlushToStorage(array.Table(table)) - assert.Nil(t, err) - logPath, err := filepath.Abs(offlineStoreConfig.path) - assert.Nil(t, err) - pf, err := file.OpenParquetFile(logPath, false) - assert.Nil(t, err) - - reader, err := pqarrow.NewFileReader(pf, pqarrow.ArrowReadProperties{}, memory.DefaultAllocator) - assert.Nil(t, err) - - tbl, err := reader.ReadTable(ctx) - assert.Nil(t, err) - tr := array.NewTableReader(tbl, -1) - defer tbl.Release() - - defer tr.Release() - for tr.Next() { - rec := tr.Record() - assert.NotNil(t, rec) - for _, field := range rec.Schema().Fields() { - assert.Contains(t, expectedSchema, field.Name) - assert.Equal(t, field.Type, expectedSchema[field.Name]) - } - values, err := test.GetProtoFromRecord(rec) - - assert.Nil(t, err) - for name, val := range values { - if name == "RequestId" { - // Ensure there are request ids in record. - assert.Greater(t, len(val.Val), 0) - } else { - assert.Equal(t, len(val.Val), len(expectedColumns[name].Val)) - for idx, featureVal := range val.Val { - assert.Equal(t, featureVal.Val, expectedColumns[name].Val[idx].Val) - } - } - } - } - - err = test.CleanUpFile(logPath) - assert.Nil(t, err) - -} diff --git a/go/cmd/server/logging/logging.go b/go/cmd/server/logging/logging.go deleted file mode 100644 index 010644709ae..00000000000 --- a/go/cmd/server/logging/logging.go +++ /dev/null @@ -1,392 +0,0 @@ -package logging - -import ( - "errors" - "fmt" - "log" - "time" - - "github.com/apache/arrow/go/v8/arrow" - "github.com/apache/arrow/go/v8/arrow/array" - "github.com/apache/arrow/go/v8/arrow/memory" - "github.com/feast-dev/feast/go/internal/feast" - "github.com/feast-dev/feast/go/internal/feast/model" - "github.com/feast-dev/feast/go/protos/feast/serving" - "github.com/feast-dev/feast/go/protos/feast/types" - gotypes "github.com/feast-dev/feast/go/types" - "google.golang.org/protobuf/types/known/timestamppb" -) - -const DEFAULT_LOG_FLUSH_INTERVAL = 100 * time.Millisecond -const DEFAULT_LOG_INSERT_TIMEOUT = 20 * time.Millisecond - -type Log struct { - // Example: val{int64_val: 5017}, val{int64_val: 1003} - EntityValue []*types.Value - - FeatureValues []*types.Value - FeatureStatuses []serving.FieldStatus - EventTimestamps []*timestamppb.Timestamp - RequestContext map[string]*types.Value - RequestId string -} - -type MemoryBuffer struct { - featureService *model.FeatureService - logs []*Log -} - -type LoggingService struct { - memoryBuffer *MemoryBuffer - logChannel chan *Log - fs *feast.FeatureStore - offlineLogStorage OfflineLogStorage - logInsertTTl time.Duration - logFlushInterval time.Duration -} - -func NewLoggingService(fs *feast.FeatureStore, logChannelCapacity int, featureServiceName string, enableLogProcessing bool) (*LoggingService, error) { - var featureService *model.FeatureService = nil - var err error - if fs != nil { - featureService, err = fs.GetFeatureService(featureServiceName) - if err != nil { - return nil, err - } - - } - - loggingService := &LoggingService{ - logChannel: make(chan *Log, logChannelCapacity), - memoryBuffer: &MemoryBuffer{ - logs: make([]*Log, 0), - featureService: featureService, - }, - fs: fs, - logInsertTTl: DEFAULT_LOG_INSERT_TIMEOUT, - logFlushInterval: DEFAULT_LOG_FLUSH_INTERVAL, - } - - if fs != nil { - offlineLogStorage, err := NewOfflineStore(fs.GetRepoConfig()) - loggingService.offlineLogStorage = offlineLogStorage - if err != nil { - return nil, err - } - } - - // Start goroutine to process logs - if enableLogProcessing { - go loggingService.processLogs() - - } - return loggingService, nil -} - -func (s *LoggingService) EmitLog(l *Log) error { - select { - case s.logChannel <- l: - return nil - case <-time.After(s.logInsertTTl): - return fmt.Errorf("could not add to log channel with capacity %d. Operation timed out. Current log channel length is %d", cap(s.logChannel), len(s.logChannel)) - } -} - -func (s *LoggingService) processLogs() { - // start a periodic flush - // TODO(kevjumba): set param so users can configure flushing duration - ticker := time.NewTicker(s.logFlushInterval) - defer ticker.Stop() - - for { - s.PerformPeriodicAppendToMemoryBufferAndLogFlush(ticker) - } -} - -// Select that either ingests new logs that are added to the logging channel, one at a time to add -// to the in-memory buffer or flushes all of them synchronously to the OfflineStorage on a time interval. -func (s *LoggingService) PerformPeriodicAppendToMemoryBufferAndLogFlush(t *time.Ticker) { - select { - case t := <-t.C: - s.flushLogsToOfflineStorage(t) - case new_log := <-s.logChannel: - log.Printf("Adding %s to memory.\n", new_log.FeatureValues) - s.memoryBuffer.logs = append(s.memoryBuffer.logs, new_log) - } -} - -// Acquires the logging schema from the feature service, converts the memory buffer array of rows of logs and flushes -// them to the offline storage. -func (s *LoggingService) flushLogsToOfflineStorage(t time.Time) error { - offlineStoreType, ok := getOfflineStoreType(s.fs.GetRepoConfig().OfflineStore) - if !ok { - return fmt.Errorf("could not get offline storage type for config: %s", s.fs.GetRepoConfig().OfflineStore) - } - if offlineStoreType == "file" { - entityMap, featureViews, odfvs, err := s.GetFcos() - if err != nil { - return err - } - schema, err := GetSchemaFromFeatureService(s.memoryBuffer.featureService, entityMap, featureViews, odfvs) - if err != nil { - return err - } - table, err := ConvertMemoryBufferToArrowTable(s.memoryBuffer, schema) - if err != nil { - return err - } - s.offlineLogStorage.FlushToStorage(table) - if err != nil { - return err - } - s.memoryBuffer.logs = s.memoryBuffer.logs[:0] - } else { - // Currently don't support any other offline flushing. - return errors.New("currently only file type is supported for offline log storage") - } - return nil -} - -// Takes memory buffer of logs in array row and converts them to columnar with generated fcoschema generated by GetFcoSchema -// and writes them to arrow table. -// Returns arrow table that contains all of the logs in columnar format. -func ConvertMemoryBufferToArrowTable(memoryBuffer *MemoryBuffer, fcoSchema *Schema) (array.Table, error) { - arrowMemory := memory.NewGoAllocator() - - columnNameToProtoValueArray := make(map[string][]*types.Value) - columnNameToStatus := make(map[string][]int32) - columnNameToTimestamp := make(map[string][]int64) - entityNameToEntityValues := make(map[string][]*types.Value) - - strBuilder := array.NewStringBuilder(arrowMemory) - - for _, l := range memoryBuffer.logs { - // EntityTypes maps an entity name to the specific type and also which index in the entityValues array it is - // e.g if an Entity Key is {driver_id, customer_id}, then the driver_id entitytype would be dtype=int64, index=0. - // It's in the order of the entities as given by the schema. - for idx, joinKey := range fcoSchema.Entities { - if _, ok := entityNameToEntityValues[joinKey]; !ok { - entityNameToEntityValues[joinKey] = make([]*types.Value, 0) - } - entityNameToEntityValues[joinKey] = append(entityNameToEntityValues[joinKey], l.EntityValue[idx]) - } - - // Contains both fv and odfv feature value types => they are processed in order of how the appear in the featureService - for idx, featureName := range fcoSchema.Features { - // populate the proto value arrays with values from memory buffer in separate columns one for each feature name - if _, ok := columnNameToProtoValueArray[featureName]; !ok { - columnNameToProtoValueArray[featureName] = make([]*types.Value, 0) - columnNameToStatus[featureName] = make([]int32, 0) - columnNameToTimestamp[featureName] = make([]int64, 0) - } - columnNameToProtoValueArray[featureName] = append(columnNameToProtoValueArray[featureName], l.FeatureValues[idx]) - columnNameToStatus[featureName] = append(columnNameToStatus[featureName], int32(l.FeatureStatuses[idx])) - columnNameToTimestamp[featureName] = append(columnNameToTimestamp[featureName], l.EventTimestamps[idx].AsTime().UnixNano()/int64(time.Millisecond)) - } - strBuilder.Append(l.RequestId) - } - - fields := make([]arrow.Field, 0) - columns := make([]array.Interface, 0) - for _, entityName := range fcoSchema.Entities { - protoArr := entityNameToEntityValues[entityName] - if len(protoArr) == 0 { - break - } - valArrowArray, err := gotypes.ProtoValuesToArrowArray(protoArr, arrowMemory, len(columnNameToProtoValueArray)) - if err != nil { - return nil, err - } - arrowType, err := gotypes.ValueTypeEnumToArrowType(fcoSchema.EntityTypes[entityName]) - if err != nil { - return nil, err - } - fields = append(fields, arrow.Field{ - Name: entityName, - Type: arrowType, - }) - columns = append(columns, valArrowArray) - } - - for _, featureName := range fcoSchema.Features { - - protoArr := columnNameToProtoValueArray[featureName] - if len(protoArr) == 0 { - break - } - arrowArray, err := gotypes.ProtoValuesToArrowArray(protoArr, arrowMemory, len(columnNameToProtoValueArray)) - if err != nil { - return nil, err - } - - arrowType, err := gotypes.ValueTypeEnumToArrowType(fcoSchema.FeaturesTypes[featureName]) - - if err != nil { - return nil, err - } - fields = append(fields, arrow.Field{ - Name: featureName, - Type: arrowType, - }) - columns = append(columns, arrowArray) - } - fields = append(fields, arrow.Field{ - Name: "RequestId", - Type: &arrow.StringType{}, - }) - - columns = append(columns, strBuilder.NewArray()) - schema := arrow.NewSchema( - fields, - nil, - ) - - result := array.Record(array.NewRecord(schema, columns, int64(len(memoryBuffer.logs)))) - - tbl := array.NewTableFromRecords(schema, []array.Record{result}) - return array.Table(tbl), nil -} - -type Schema struct { - Entities []string - Features []string - EntityTypes map[string]types.ValueType_Enum - FeaturesTypes map[string]types.ValueType_Enum -} - -func GetSchemaFromFeatureService(featureService *model.FeatureService, entityMap map[string]*model.Entity, featureViews []*model.FeatureView, onDemandFeatureViews []*model.OnDemandFeatureView) (*Schema, error) { - fvs := make(map[string]*model.FeatureView) - odFvs := make(map[string]*model.OnDemandFeatureView) - - joinKeys := make([]string, 0) - // All joinkeys in the featureService are put in here - joinKeysSet := make(map[string]interface{}) - entityJoinKeyToType := make(map[string]types.ValueType_Enum) - var entities []string - for _, featureView := range featureViews { - fvs[featureView.Base.Name] = featureView - entities = featureView.Entities - } - - for _, onDemandFeatureView := range onDemandFeatureViews { - odFvs[onDemandFeatureView.Base.Name] = onDemandFeatureView - } - - allFeatureTypes := make(map[string]types.ValueType_Enum) - features := make([]string, 0) - for _, featureProjection := range featureService.Projections { - // Create copies of FeatureView that may contains the same *FeatureView but - // each differentiated by a *FeatureViewProjection - featureViewName := featureProjection.Name - if fv, ok := fvs[featureViewName]; ok { - for _, f := range featureProjection.Features { - features = append(features, GetFullFeatureName(featureViewName, f.Name)) - allFeatureTypes[GetFullFeatureName(featureViewName, f.Name)] = f.Dtype - } - for _, entityName := range fv.Entities { - entity := entityMap[entityName] - if joinKeyAlias, ok := featureProjection.JoinKeyMap[entity.JoinKey]; ok { - joinKeysSet[joinKeyAlias] = nil - } else { - joinKeysSet[entity.JoinKey] = nil - } - } - } else if _, ok := odFvs[featureViewName]; ok { - for _, f := range featureProjection.Features { - // TODO(kevjumba) check in test here. - features = append(features, GetFullFeatureName(featureViewName, f.Name)) - allFeatureTypes[GetFullFeatureName(featureViewName, f.Name)] = f.Dtype - } - } else { - return nil, fmt.Errorf("no such feature view found in feature service %s", featureViewName) - } - } - - // Only get entities in the current feature service. - for _, entity := range entities { - if _, ok := joinKeysSet[entity]; ok { - joinKeys = append(joinKeys, entityMap[entity].JoinKey) - entityJoinKeyToType[entityMap[entity].JoinKey] = entityMap[entity].ValueType - } - } - - schema := &Schema{ - Entities: joinKeys, - Features: features, - EntityTypes: entityJoinKeyToType, - FeaturesTypes: allFeatureTypes, - } - return schema, nil -} - -func GetFullFeatureName(featureViewName string, featureName string) string { - return fmt.Sprintf("%s__%s", featureViewName, featureName) -} - -func (s *LoggingService) GetFcos() (map[string]*model.Entity, []*model.FeatureView, []*model.OnDemandFeatureView, error) { - odfvs, err := s.fs.ListOnDemandFeatureViews() - if err != nil { - return nil, nil, nil, err - } - fvs, err := s.fs.ListFeatureViews() - if err != nil { - return nil, nil, nil, err - } - entities, err := s.fs.ListEntities(true) - if err != nil { - return nil, nil, nil, err - } - entityMap := make(map[string]*model.Entity) - for _, entity := range entities { - entityMap[entity.Name] = entity - } - return entityMap, fvs, odfvs, nil -} - -func (l *LoggingService) GenerateLogs(featureService *model.FeatureService, joinKeyToEntityValues map[string][]*types.Value, features []*serving.GetOnlineFeaturesResponse_FeatureVector, requestData map[string]*types.RepeatedValue, requestId string) error { - if len(features) <= 0 { - return nil - } - - entitySet, featureViews, odfvs, err := l.GetFcos() - if err != nil { - return err - } - schema, err := GetSchemaFromFeatureService(featureService, entitySet, featureViews, odfvs) - - if err != nil { - return err - } - - numFeatures := len(schema.Features) - // Should be equivalent to how many entities there are(each feature row has (entity) number of features) - numRows := len(features[0].Values) - - for row_idx := 0; row_idx < numRows; row_idx++ { - featureValueLogRow := make([]*types.Value, numFeatures) - featureStatusLogRow := make([]serving.FieldStatus, numFeatures) - eventTimestampLogRow := make([]*timestamppb.Timestamp, numFeatures) - for idx := 0; idx < len(features); idx++ { - featureValueLogRow[idx] = features[idx].Values[row_idx] - featureStatusLogRow[idx] = features[idx].Statuses[row_idx] - eventTimestampLogRow[idx] = features[idx].EventTimestamps[row_idx] - } - valuesPerEntityRow := make([]*types.Value, 0) - // ensure that the entity values are in the order that the schema defines which is the order that ListEntities returns the entities - for _, joinKey := range schema.Entities { - valuesPerEntityRow = append(valuesPerEntityRow, joinKeyToEntityValues[joinKey][row_idx]) - } - newLog := Log{ - EntityValue: valuesPerEntityRow, - FeatureValues: featureValueLogRow, - FeatureStatuses: featureStatusLogRow, - EventTimestamps: eventTimestampLogRow, - RequestId: requestId, - } - err := l.EmitLog(&newLog) - if err != nil { - return err - } - } - return nil -} diff --git a/go/cmd/server/logging/logging_test.go b/go/cmd/server/logging/logging_test.go deleted file mode 100644 index 68da0bf498f..00000000000 --- a/go/cmd/server/logging/logging_test.go +++ /dev/null @@ -1,402 +0,0 @@ -package logging - -import ( - "math/rand" - "reflect" - "testing" - "time" - - "github.com/apache/arrow/go/v8/arrow" - "github.com/apache/arrow/go/v8/arrow/array" - "github.com/feast-dev/feast/go/internal/feast/model" - "github.com/feast-dev/feast/go/internal/test" - "github.com/feast-dev/feast/go/protos/feast/serving" - "github.com/feast-dev/feast/go/protos/feast/types" - gotypes "github.com/feast-dev/feast/go/types" - "github.com/stretchr/testify/assert" - "google.golang.org/protobuf/types/known/timestamppb" -) - -func TestLoggingChannelTimeout(t *testing.T) { - // Pregenerated using `feast init`. - loggingService, err := NewLoggingService(nil, 1, "", false) - assert.Nil(t, err) - assert.Empty(t, loggingService.memoryBuffer.logs) - ts := timestamppb.New(time.Now()) - newLog := Log{ - FeatureStatuses: []serving.FieldStatus{serving.FieldStatus_PRESENT}, - EventTimestamps: []*timestamppb.Timestamp{ts, ts}, - } - loggingService.EmitLog(&newLog) - newTs := timestamppb.New(time.Now()) - - newLog2 := Log{ - FeatureStatuses: []serving.FieldStatus{serving.FieldStatus_PRESENT}, - EventTimestamps: []*timestamppb.Timestamp{newTs, newTs}, - } - err = loggingService.EmitLog(&newLog2) - // The channel times out and doesn't hang. - assert.NotNil(t, err) -} - -func TestSchemaTypeRetrieval(t *testing.T) { - featureService, entities, featureViews, odfvs := InitializeFeatureRepoVariablesForTest() - entityMap := make(map[string]*model.Entity) - expectedEntityNames := make([]string, 0) - expectedFeatureNames := make([]string, 0) - for _, entity := range entities { - entityMap[entity.Name] = entity - expectedEntityNames = append(expectedEntityNames, entity.Name) - } - for _, featureView := range featureViews { - for _, f := range featureView.Base.Features { - expectedFeatureNames = append(expectedFeatureNames, GetFullFeatureName(featureView.Base.Name, f.Name)) - } - } - for _, featureView := range odfvs { - for _, f := range featureView.Base.Features { - expectedFeatureNames = append(expectedFeatureNames, GetFullFeatureName(featureView.Base.Name, f.Name)) - } - } - - schema, err := GetSchemaFromFeatureService(featureService, entityMap, featureViews, odfvs) - assert.Nil(t, err) - - assert.Equal(t, expectedFeatureNames, schema.Features) - assert.Equal(t, expectedEntityNames, schema.Entities) - for _, entityName := range expectedEntityNames { - assert.Contains(t, schema.EntityTypes, entityName) - } - assert.True(t, reflect.DeepEqual(schema.EntityTypes["driver_id"], types.ValueType_INT64)) - - types := []types.ValueType_Enum{*types.ValueType_INT64.Enum(), *types.ValueType_FLOAT.Enum(), *types.ValueType_INT32.Enum(), *types.ValueType_DOUBLE.Enum(), *types.ValueType_INT32.Enum(), *types.ValueType_DOUBLE.Enum()} - for idx, featureName := range expectedFeatureNames { - assert.Contains(t, schema.FeaturesTypes, featureName) - assert.Equal(t, schema.FeaturesTypes[featureName], types[idx]) - } -} - -func TestSchemaRetrievalIgnoresEntitiesNotInFeatureService(t *testing.T) { - featureService, entities, featureViews, odfvs := InitializeFeatureRepoVariablesForTest() - //Remove entities in featureservice - for _, featureView := range featureViews { - featureView.Entities = []string{} - } - entityMap := make(map[string]*model.Entity) - for _, entity := range entities { - entityMap[entity.Name] = entity - } - schema, err := GetSchemaFromFeatureService(featureService, entityMap, featureViews, odfvs) - assert.Nil(t, err) - assert.Empty(t, schema.EntityTypes) -} - -func TestSchemaUsesOrderInFeatureService(t *testing.T) { - featureService, entities, featureViews, odfvs := InitializeFeatureRepoVariablesForTest() - expectedEntityNames := make([]string, 0) - expectedFeatureNames := make([]string, 0) - entityMap := make(map[string]*model.Entity) - for _, entity := range entities { - entityMap[entity.Name] = entity - } - for _, entity := range entities { - entityMap[entity.Name] = entity - expectedEntityNames = append(expectedEntityNames, entity.Name) - } - // Source of truth for order of featureNames - for _, featureView := range featureViews { - for _, f := range featureView.Base.Features { - expectedFeatureNames = append(expectedFeatureNames, GetFullFeatureName(featureView.Base.Name, f.Name)) - } - } - for _, featureView := range odfvs { - for _, f := range featureView.Base.Features { - expectedFeatureNames = append(expectedFeatureNames, GetFullFeatureName(featureView.Base.Name, f.Name)) - } - } - - rand.Seed(time.Now().UnixNano()) - // Shuffle the featureNames in incorrect order - for _, featureView := range featureViews { - rand.Shuffle(len(featureView.Base.Features), func(i, j int) { - featureView.Base.Features[i], featureView.Base.Features[j] = featureView.Base.Features[j], featureView.Base.Features[i] - }) - } - for _, featureView := range odfvs { - rand.Shuffle(len(featureView.Base.Features), func(i, j int) { - featureView.Base.Features[i], featureView.Base.Features[j] = featureView.Base.Features[j], featureView.Base.Features[i] - }) - } - - schema, err := GetSchemaFromFeatureService(featureService, entityMap, featureViews, odfvs) - assert.Nil(t, err) - - // Ensure the same results - assert.Equal(t, expectedFeatureNames, schema.Features) - assert.Equal(t, expectedEntityNames, schema.Entities) - for _, entityName := range expectedEntityNames { - assert.Contains(t, schema.EntityTypes, entityName) - } - assert.True(t, reflect.DeepEqual(schema.EntityTypes["driver_id"], types.ValueType_INT64)) - - types := []types.ValueType_Enum{*types.ValueType_INT64.Enum(), *types.ValueType_FLOAT.Enum(), *types.ValueType_INT32.Enum(), *types.ValueType_DOUBLE.Enum(), *types.ValueType_INT32.Enum(), *types.ValueType_DOUBLE.Enum()} - for idx, featureName := range expectedFeatureNames { - assert.Contains(t, schema.FeaturesTypes, featureName) - assert.Equal(t, schema.FeaturesTypes[featureName], types[idx]) - } -} - -func TestSerializeToArrowTable(t *testing.T) { - table, expectedSchema, expectedColumns, err := GetTestArrowTableAndExpectedResults() - assert.Nil(t, err) - defer table.Release() - tr := array.NewTableReader(table, -1) - - defer tr.Release() - for tr.Next() { - rec := tr.Record() - assert.NotNil(t, rec) - for _, field := range rec.Schema().Fields() { - assert.Contains(t, expectedSchema, field.Name) - assert.Equal(t, field.Type, expectedSchema[field.Name]) - } - values, err := test.GetProtoFromRecord(rec) - - assert.Nil(t, err) - for name, val := range values { - if name == "RequestId" { - continue - } - assert.Equal(t, len(val.Val), len(expectedColumns[name].Val)) - for idx, featureVal := range val.Val { - assert.Equal(t, featureVal.Val, expectedColumns[name].Val[idx].Val) - } - } - } -} - -// Initialize all dummy featureservice, entities and featureviews/on demand featureviews for testing. -func InitializeFeatureRepoVariablesForTest() (*model.FeatureService, []*model.Entity, []*model.FeatureView, []*model.OnDemandFeatureView) { - f1 := test.CreateNewFeature( - "int64", - types.ValueType_INT64, - ) - f2 := test.CreateNewFeature( - "float32", - types.ValueType_FLOAT, - ) - projection1 := test.CreateNewFeatureViewProjection( - "featureView1", - "", - []*model.Feature{f1, f2}, - map[string]string{}, - ) - baseFeatureView1 := test.CreateBaseFeatureView( - "featureView1", - []*model.Feature{f1, f2}, - projection1, - ) - featureView1 := test.CreateFeatureView(baseFeatureView1, nil, []string{"driver_id"}) - entity1 := test.CreateNewEntity("driver_id", types.ValueType_INT64, "driver_id") - f3 := test.CreateNewFeature( - "int32", - types.ValueType_INT32, - ) - f4 := test.CreateNewFeature( - "double", - types.ValueType_DOUBLE, - ) - projection2 := test.CreateNewFeatureViewProjection( - "featureView2", - "", - []*model.Feature{f3, f4}, - map[string]string{}, - ) - baseFeatureView2 := test.CreateBaseFeatureView( - "featureView2", - []*model.Feature{f3, f4}, - projection2, - ) - featureView2 := test.CreateFeatureView(baseFeatureView2, nil, []string{"driver_id"}) - - f5 := test.CreateNewFeature( - "odfv_f1", - types.ValueType_INT32, - ) - f6 := test.CreateNewFeature( - "odfv_f2", - types.ValueType_DOUBLE, - ) - projection3 := test.CreateNewFeatureViewProjection( - "od_bf1", - "", - []*model.Feature{f5, f6}, - map[string]string{}, - ) - od_bf1 := test.CreateBaseFeatureView( - "od_bf1", - []*model.Feature{f5, f6}, - projection3, - ) - odfv := model.NewOnDemandFeatureViewFromBase(od_bf1) - featureService := test.CreateNewFeatureService( - "test_service", - "test_project", - nil, - nil, - []*model.FeatureViewProjection{projection1, projection2, projection3}, - ) - return featureService, []*model.Entity{entity1}, []*model.FeatureView{featureView1, featureView2}, []*model.OnDemandFeatureView{odfv} -} - -// Create dummy FeatureService, Entities, and FeatureViews add them to the logger and convert the logs to Arrow table. -// Returns arrow table, expected test schema, and expected columns. -func GetTestArrowTableAndExpectedResults() (array.Table, map[string]arrow.DataType, map[string]*types.RepeatedValue, error) { - featureService, entities, featureViews, odfvs := InitializeFeatureRepoVariablesForTest() - entityMap := make(map[string]*model.Entity) - for _, entity := range entities { - entityMap[entity.Name] = entity - } - schema, err := GetSchemaFromFeatureService(featureService, entityMap, featureViews, odfvs) - if err != nil { - return nil, nil, nil, err - } - - ts := timestamppb.New(time.Now()) - log1 := Log{ - EntityValue: []*types.Value{ - {Val: &types.Value_Int64Val{Int64Val: 1001}}, - }, - FeatureValues: []*types.Value{ - /* normal feature values */ - {Val: &types.Value_Int64Val{Int64Val: rand.Int63()}}, - {Val: &types.Value_FloatVal{FloatVal: rand.Float32()}}, - {Val: &types.Value_Int32Val{Int32Val: rand.Int31()}}, - {Val: &types.Value_DoubleVal{DoubleVal: rand.Float64()}}, - /* odfv values */ - {Val: &types.Value_Int32Val{Int32Val: rand.Int31()}}, - {Val: &types.Value_DoubleVal{DoubleVal: rand.Float64()}}, - }, - FeatureStatuses: []serving.FieldStatus{ - serving.FieldStatus_PRESENT, - serving.FieldStatus_PRESENT, - serving.FieldStatus_PRESENT, - serving.FieldStatus_PRESENT, - serving.FieldStatus_PRESENT, - serving.FieldStatus_PRESENT, - }, - EventTimestamps: []*timestamppb.Timestamp{ - ts, ts, ts, ts, ts, ts, - }, - } - log2 := Log{ - EntityValue: []*types.Value{ - {Val: &types.Value_Int64Val{Int64Val: 1003}}, - }, - FeatureValues: []*types.Value{ - /* normal feature values */ - {Val: &types.Value_Int64Val{Int64Val: rand.Int63()}}, - {Val: &types.Value_FloatVal{FloatVal: rand.Float32()}}, - {Val: &types.Value_Int32Val{Int32Val: rand.Int31()}}, - {Val: &types.Value_DoubleVal{DoubleVal: rand.Float64()}}, - /* odfv values */ - {Val: &types.Value_Int32Val{Int32Val: rand.Int31()}}, - {Val: &types.Value_DoubleVal{DoubleVal: rand.Float64()}}, - }, - FeatureStatuses: []serving.FieldStatus{ - serving.FieldStatus_PRESENT, - serving.FieldStatus_PRESENT, - serving.FieldStatus_PRESENT, - serving.FieldStatus_PRESENT, - serving.FieldStatus_PRESENT, - serving.FieldStatus_PRESENT, - }, - EventTimestamps: []*timestamppb.Timestamp{ - ts, ts, ts, ts, ts, ts, - }, - } - - expectedSchema := make(map[string]arrow.DataType) - for joinKey, entityType := range schema.EntityTypes { - arrowType, err := gotypes.ValueTypeEnumToArrowType(entityType) - if err != nil { - return nil, nil, nil, err - } - expectedSchema[joinKey] = arrowType - } - expectedSchema["RequestId"] = arrow.BinaryTypes.String - for featureName, featureType := range schema.FeaturesTypes { - arrowType, err := gotypes.ValueTypeEnumToArrowType(featureType) - if err != nil { - return nil, nil, nil, err - } - expectedSchema[featureName] = arrowType - } - - expectedColumns := map[string]*types.RepeatedValue{ - "driver_id": { - Val: []*types.Value{ - log1.EntityValue[0], - log2.EntityValue[0]}, - }, - "featureView1__int64": { - Val: []*types.Value{ - log1.FeatureValues[0], - log2.FeatureValues[0]}, - }, - "featureView1__float32": { - Val: []*types.Value{ - log1.FeatureValues[1], - log2.FeatureValues[1]}, - }, - "featureView2__int32": { - Val: []*types.Value{ - log1.FeatureValues[2], - log2.FeatureValues[2]}, - }, - "featureView2__double": { - Val: []*types.Value{ - log1.FeatureValues[3], - log2.FeatureValues[3]}, - }, - "od_bf1__odfv_f1": { - Val: []*types.Value{ - log1.FeatureValues[4], - log2.FeatureValues[4]}, - }, - "od_bf1__odfv_f2": { - Val: []*types.Value{ - log1.FeatureValues[5], - log2.FeatureValues[5]}, - }, - } - loggingService, err := SetupLoggingServiceWithLogs([]*Log{&log1, &log2}) - if err != nil { - return nil, nil, nil, err - } - - table, err := ConvertMemoryBufferToArrowTable(loggingService.memoryBuffer, schema) - - if err != nil { - return nil, nil, nil, err - } - return table, expectedSchema, expectedColumns, nil -} - -func SetupLoggingServiceWithLogs(logs []*Log) (*LoggingService, error) { - loggingService, err := NewLoggingService(nil, len(logs), "", false) - if err != nil { - return nil, err - } - dummyTicker := time.NewTicker(10 * time.Second) - // stop the ticker so that the logs are not flushed to offline storage - dummyTicker.Stop() - for _, log := range logs { - loggingService.EmitLog(log) - } - // manually handle flushing logs - for i := 0; i < len(logs); i++ { - loggingService.PerformPeriodicAppendToMemoryBufferAndLogFlush(dummyTicker) - } - return loggingService, nil -} diff --git a/go/cmd/server/logging/offlinelogstorage.go b/go/cmd/server/logging/offlinelogstorage.go deleted file mode 100644 index 1a0f4142554..00000000000 --- a/go/cmd/server/logging/offlinelogstorage.go +++ /dev/null @@ -1,46 +0,0 @@ -package logging - -import ( - "errors" - - "github.com/apache/arrow/go/v8/arrow/array" - "github.com/feast-dev/feast/go/internal/feast/registry" -) - -type OfflineLogStoreConfig struct { - storeType string - project string - path string -} - -type OfflineLogStorage interface { - // Todo: Maybe we can add a must implement function that retrieves the correct config based on type - FlushToStorage(array.Table) error -} - -func getOfflineStoreType(offlineStoreConfig map[string]interface{}) (string, bool) { - if onlineStoreType, ok := offlineStoreConfig["storeType"]; !ok { - // Assume file for case of no specified. - return "", true - } else { - result, ok := onlineStoreType.(string) - return result, ok - } -} - -func NewOfflineStore(config *registry.RepoConfig) (OfflineLogStorage, error) { - offlineStoreType, _ := getOfflineStoreType(config.OfflineStore) - if offlineStoreType == "" { - // No offline store specified. - return nil, nil - } else if offlineStoreType == "file" { - fileConfig, err := GetFileConfig(config) - if err != nil { - return nil, err - } - offlineStore, err := NewFileOfflineStore(config.Project, fileConfig) - return offlineStore, err - } else { - return nil, errors.New("no offline storage besides file is currently supported") - } -} diff --git a/go/cmd/server/main.go b/go/cmd/server/main.go deleted file mode 100644 index 33d56e0a7a2..00000000000 --- a/go/cmd/server/main.go +++ /dev/null @@ -1,74 +0,0 @@ -package main - -import ( - "fmt" - "log" - "net" - "os" - - "github.com/feast-dev/feast/go/cmd/server/logging" - "github.com/feast-dev/feast/go/internal/feast" - "github.com/feast-dev/feast/go/internal/feast/registry" - "github.com/feast-dev/feast/go/protos/feast/serving" - "google.golang.org/grpc" -) - -const ( - flagFeastRepoPath = "FEAST_REPO_PATH" - flagFeastRepoConfig = "FEAST_REPO_CONFIG" - flagFeastSockFile = "FEAST_GRPC_SOCK_FILE" - feastServerVersion = "0.18.0" -) - -// TODO: Add a proper logging library such as https://github.com/Sirupsen/logrus -func main() { - repoPath := os.Getenv(flagFeastRepoPath) - repoConfigJSON := os.Getenv(flagFeastRepoConfig) - sockFile := os.Getenv(flagFeastSockFile) - if repoPath == "" && repoConfigJSON == "" { - log.Fatalln(fmt.Sprintf("One of %s of %s environment variables must be set", flagFeastRepoPath, flagFeastRepoConfig)) - } - - var repoConfig *registry.RepoConfig - var err error - if repoConfigJSON != "" { - repoConfig, err = registry.NewRepoConfigFromJSON(repoPath, repoConfigJSON) - if err != nil { - log.Fatalln(err) - } - } else { - repoConfig, err = registry.NewRepoConfigFromFile(repoPath) - if err != nil { - log.Fatalln(err) - } - } - - log.Println("Initializing feature store...") - fs, err := feast.NewFeatureStore(repoConfig, nil) - if err != nil { - log.Fatalln(err) - } - // Disable logging for now - loggingService, err := logging.NewLoggingService(fs, 1000, "", false) - if err != nil { - log.Fatalln(err) - } - defer fs.DestructOnlineStore() - startGrpcServer(fs, loggingService, sockFile) -} - -func startGrpcServer(fs *feast.FeatureStore, loggingService *logging.LoggingService, sockFile string) { - server := newServingServiceServer(fs, loggingService) - log.Printf("Starting a gRPC server listening on %s\n", sockFile) - lis, err := net.Listen("unix", sockFile) - if err != nil { - log.Fatalln(err) - } - grpcServer := grpc.NewServer() - defer grpcServer.Stop() - serving.RegisterServingServiceServer(grpcServer, server) - err = grpcServer.Serve(lis) - if err != nil { - log.Fatalln(err) - } -} diff --git a/go/embedded/online_features.go b/go/embedded/online_features.go index 24a54894306..7cd1e4ed815 100644 --- a/go/embedded/online_features.go +++ b/go/embedded/online_features.go @@ -4,22 +4,33 @@ import ( "context" "fmt" "log" + "net" + "os" + "os/signal" + "syscall" + "time" "github.com/apache/arrow/go/v8/arrow" "github.com/apache/arrow/go/v8/arrow/array" "github.com/apache/arrow/go/v8/arrow/cdata" "github.com/apache/arrow/go/v8/arrow/memory" + "google.golang.org/grpc" + "github.com/feast-dev/feast/go/internal/feast" "github.com/feast-dev/feast/go/internal/feast/model" "github.com/feast-dev/feast/go/internal/feast/onlineserving" "github.com/feast-dev/feast/go/internal/feast/registry" + "github.com/feast-dev/feast/go/internal/feast/server" + "github.com/feast-dev/feast/go/internal/feast/server/logging" "github.com/feast-dev/feast/go/internal/feast/transformation" + "github.com/feast-dev/feast/go/protos/feast/serving" prototypes "github.com/feast-dev/feast/go/protos/feast/types" "github.com/feast-dev/feast/go/types" ) type OnlineFeatureService struct { - fs *feast.FeatureStore + fs *feast.FeatureStore + grpcStopCh chan os.Signal } type OnlineFeatureServiceConfig struct { @@ -32,6 +43,15 @@ type DataTable struct { SchemaPtr uintptr } +// LoggingOptions is a public (embedded) copy of logging.LoggingOptions struct. +// See logging.LoggingOptions for properties description +type LoggingOptions struct { + ChannelCapacity int + EmitTimeout time.Duration + WriteInterval time.Duration + FlushInterval time.Duration +} + func NewOnlineFeatureService(conf *OnlineFeatureServiceConfig, transformationCallback transformation.TransformationCallback) *OnlineFeatureService { repoConfig, err := registry.NewRepoConfigFromJSON(conf.RepoPath, conf.RepoConfig) if err != nil { @@ -43,7 +63,11 @@ func NewOnlineFeatureService(conf *OnlineFeatureServiceConfig, transformationCal log.Fatalln(err) } - return &OnlineFeatureService{fs: fs} + // Notify this channel when receiving interrupt or termination signals from OS + c := make(chan os.Signal, 1) + signal.Notify(c, syscall.SIGINT, syscall.SIGTERM) + + return &OnlineFeatureService{fs: fs, grpcStopCh: c} } func (s *OnlineFeatureService) GetEntityTypesMap(featureRefs []string) (map[string]int32, error) { @@ -198,6 +222,76 @@ func (s *OnlineFeatureService) GetOnlineFeatures( return nil } +// StartGprcServer starts gRPC server with disabled feature logging and blocks the thread +func (s *OnlineFeatureService) StartGprcServer(host string, port int) error { + return s.StartGprcServerWithLogging(host, port, nil, LoggingOptions{}) +} + +// StartGprcServerWithLoggingDefaultOpts starts gRPC server with enabled feature logging but default configuration for logging +// Caller of this function must provide Python callback to flush buffered logs +func (s *OnlineFeatureService) StartGprcServerWithLoggingDefaultOpts(host string, port int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback) error { + defaultOpts := LoggingOptions{ + ChannelCapacity: logging.DefaultOptions.ChannelCapacity, + EmitTimeout: logging.DefaultOptions.EmitTimeout, + WriteInterval: logging.DefaultOptions.WriteInterval, + FlushInterval: logging.DefaultOptions.FlushInterval, + } + return s.StartGprcServerWithLogging(host, port, writeLoggedFeaturesCallback, defaultOpts) +} + +// StartGprcServerWithLogging starts gRPC server with enabled feature logging +// Caller of this function must provide Python callback to flush buffered logs as well as logging configuration (loggingOpts) +func (s *OnlineFeatureService) StartGprcServerWithLogging(host string, port int, writeLoggedFeaturesCallback logging.OfflineStoreWriteCallback, loggingOpts LoggingOptions) error { + var loggingService *logging.LoggingService = nil + var err error + if writeLoggedFeaturesCallback != nil { + sink, err := logging.NewOfflineStoreSink(writeLoggedFeaturesCallback) + if err != nil { + return err + } + + loggingService, err = logging.NewLoggingService(s.fs, sink, logging.LoggingOptions{ + ChannelCapacity: loggingOpts.ChannelCapacity, + EmitTimeout: loggingOpts.EmitTimeout, + WriteInterval: loggingOpts.WriteInterval, + FlushInterval: loggingOpts.FlushInterval, + }) + if err != nil { + return err + } + } + ser := server.NewGrpcServingServiceServer(s.fs, loggingService) + log.Printf("Starting a gRPC server on host %s port %d\n", host, port) + lis, err := net.Listen("tcp", fmt.Sprintf("%s:%d", host, port)) + if err != nil { + return err + } + + grpcServer := grpc.NewServer() + serving.RegisterServingServiceServer(grpcServer, ser) + + go func() { + // As soon as these signals are received from OS, try to gracefully stop the gRPC server + <-s.grpcStopCh + fmt.Println("Stopping the gRPC server...") + grpcServer.GracefulStop() + if loggingService != nil { + loggingService.Stop() + } + fmt.Println("gRPC server terminated") + }() + + err = grpcServer.Serve(lis) + if err != nil { + return err + } + return nil +} + +func (s *OnlineFeatureService) Stop() { + s.grpcStopCh <- syscall.SIGINT +} + /* Read Record Batch from memory managed by Python caller. Python part uses C ABI interface to export this record into C Data Interface, diff --git a/sdk/python/feast/embedded_go/lib/__init__.py b/go/internal/__init__.py similarity index 100% rename from sdk/python/feast/embedded_go/lib/__init__.py rename to go/internal/__init__.py diff --git a/go/internal/feast/__init__.py b/go/internal/feast/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/go/internal/feast/featurestore.go b/go/internal/feast/featurestore.go index 5e10f4978e0..4ecd781b746 100644 --- a/go/internal/feast/featurestore.go +++ b/go/internal/feast/featurestore.go @@ -5,6 +5,7 @@ import ( "errors" "github.com/apache/arrow/go/v8/arrow/memory" + "github.com/feast-dev/feast/go/internal/feast/model" "github.com/feast-dev/feast/go/internal/feast/onlineserving" "github.com/feast-dev/feast/go/internal/feast/onlinestore" @@ -287,3 +288,32 @@ func (fs *FeatureStore) readFromOnlineStore(ctx context.Context, entityRows []*p } return fs.onlineStore.OnlineRead(ctx, entityRowsValue, requestedFeatureViewNames, requestedFeatureNames) } + +func (fs *FeatureStore) GetFcosMap() (map[string]*model.Entity, map[string]*model.FeatureView, map[string]*model.OnDemandFeatureView, error) { + odfvs, err := fs.ListOnDemandFeatureViews() + if err != nil { + return nil, nil, nil, err + } + fvs, err := fs.ListFeatureViews() + if err != nil { + return nil, nil, nil, err + } + entities, err := fs.ListEntities(true) + if err != nil { + return nil, nil, nil, err + } + + entityMap := make(map[string]*model.Entity) + for _, entity := range entities { + entityMap[entity.Name] = entity + } + fvMap := make(map[string]*model.FeatureView) + for _, fv := range fvs { + fvMap[fv.Base.Name] = fv + } + odfvMap := make(map[string]*model.OnDemandFeatureView) + for _, odfv := range odfvs { + odfvMap[odfv.Base.Name] = odfv + } + return entityMap, fvMap, odfvMap, nil +} diff --git a/go/internal/feast/featurestore_test.go b/go/internal/feast/featurestore_test.go index c8f9049c4a5..dd08bc287e9 100644 --- a/go/internal/feast/featurestore_test.go +++ b/go/internal/feast/featurestore_test.go @@ -6,10 +6,11 @@ import ( "runtime" "testing" + "github.com/stretchr/testify/assert" + "github.com/feast-dev/feast/go/internal/feast/onlinestore" "github.com/feast-dev/feast/go/internal/feast/registry" "github.com/feast-dev/feast/go/protos/feast/types" - "github.com/stretchr/testify/assert" ) // Return absolute path to the test_repo registry regardless of the working directory diff --git a/go/internal/feast/model/featureservice.go b/go/internal/feast/model/featureservice.go index 5619dd90426..ce2781efc28 100644 --- a/go/internal/feast/model/featureservice.go +++ b/go/internal/feast/model/featureservice.go @@ -1,8 +1,9 @@ package model import ( - "github.com/feast-dev/feast/go/protos/feast/core" timestamppb "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/feast-dev/feast/go/protos/feast/core" ) type FeatureService struct { @@ -11,6 +12,11 @@ type FeatureService struct { CreatedTimestamp *timestamppb.Timestamp LastUpdatedTimestamp *timestamppb.Timestamp Projections []*FeatureViewProjection + LoggingConfig *FeatureServiceLoggingConfig +} + +type FeatureServiceLoggingConfig struct { + SampleRate float32 } func NewFeatureServiceFromProto(proto *core.FeatureService) *FeatureService { @@ -18,10 +24,17 @@ func NewFeatureServiceFromProto(proto *core.FeatureService) *FeatureService { for index, projectionProto := range proto.Spec.Features { projections[index] = NewFeatureViewProjectionFromProto(projectionProto) } + var loggingConfig *FeatureServiceLoggingConfig + if proto.GetSpec().GetLoggingConfig() != nil { + loggingConfig = &FeatureServiceLoggingConfig{ + SampleRate: proto.GetSpec().GetLoggingConfig().SampleRate, + } + } return &FeatureService{Name: proto.Spec.Name, Project: proto.Spec.Project, CreatedTimestamp: proto.Meta.CreatedTimestamp, LastUpdatedTimestamp: proto.Meta.LastUpdatedTimestamp, Projections: projections, + LoggingConfig: loggingConfig, } } diff --git a/go/internal/feast/model/featureview.go b/go/internal/feast/model/featureview.go index 85fc7a60eeb..6c198f99947 100644 --- a/go/internal/feast/model/featureview.go +++ b/go/internal/feast/model/featureview.go @@ -1,9 +1,10 @@ package model import ( + durationpb "google.golang.org/protobuf/types/known/durationpb" + "github.com/feast-dev/feast/go/protos/feast/core" "github.com/feast-dev/feast/go/protos/feast/types" - durationpb "google.golang.org/protobuf/types/known/durationpb" ) const ( diff --git a/go/internal/feast/model/ondemandfeatureview.go b/go/internal/feast/model/ondemandfeatureview.go index b7a352cc205..b637cd75ed0 100644 --- a/go/internal/feast/model/ondemandfeatureview.go +++ b/go/internal/feast/model/ondemandfeatureview.go @@ -49,8 +49,10 @@ func (fs *OnDemandFeatureView) NewWithProjection(projection *FeatureViewProjecti } func NewOnDemandFeatureViewFromBase(base *BaseFeatureView) *OnDemandFeatureView { - - featureView := &OnDemandFeatureView{Base: base} + featureView := &OnDemandFeatureView{ + Base: base, + SourceFeatureViewProjections: map[string]*FeatureViewProjection{}, + SourceRequestDataSources: map[string]*core.DataSource_RequestDataOptions{}} return featureView } diff --git a/go/internal/feast/onlineserving/serving.go b/go/internal/feast/onlineserving/serving.go index 381ba5f0f2f..1d0567c3547 100644 --- a/go/internal/feast/onlineserving/serving.go +++ b/go/internal/feast/onlineserving/serving.go @@ -9,14 +9,15 @@ import ( "github.com/apache/arrow/go/v8/arrow" "github.com/apache/arrow/go/v8/arrow/memory" + "github.com/golang/protobuf/proto" + "google.golang.org/protobuf/types/known/durationpb" + "google.golang.org/protobuf/types/known/timestamppb" + "github.com/feast-dev/feast/go/internal/feast/model" "github.com/feast-dev/feast/go/internal/feast/onlinestore" "github.com/feast-dev/feast/go/protos/feast/serving" prototypes "github.com/feast-dev/feast/go/protos/feast/types" "github.com/feast-dev/feast/go/types" - "github.com/golang/protobuf/proto" - "google.golang.org/protobuf/types/known/durationpb" - "google.golang.org/protobuf/types/known/timestamppb" ) /* diff --git a/go/internal/feast/onlineserving/serving_test.go b/go/internal/feast/onlineserving/serving_test.go index 2f4cf8eabaa..0a00f546f9e 100644 --- a/go/internal/feast/onlineserving/serving_test.go +++ b/go/internal/feast/onlineserving/serving_test.go @@ -3,12 +3,13 @@ package onlineserving import ( "testing" - "github.com/feast-dev/feast/go/internal/feast/model" - "github.com/feast-dev/feast/go/protos/feast/core" - "github.com/feast-dev/feast/go/protos/feast/types" "github.com/stretchr/testify/assert" "google.golang.org/protobuf/types/known/durationpb" "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/feast-dev/feast/go/internal/feast/model" + "github.com/feast-dev/feast/go/protos/feast/core" + "github.com/feast-dev/feast/go/protos/feast/types" ) func TestGroupingFeatureRefs(t *testing.T) { diff --git a/go/internal/feast/onlinestore/onlinestore.go b/go/internal/feast/onlinestore/onlinestore.go index b4a25714808..64a05f144ce 100644 --- a/go/internal/feast/onlinestore/onlinestore.go +++ b/go/internal/feast/onlinestore/onlinestore.go @@ -3,11 +3,13 @@ package onlinestore import ( "context" "fmt" + "github.com/feast-dev/feast/go/internal/feast/registry" + "github.com/golang/protobuf/ptypes/timestamp" + "github.com/feast-dev/feast/go/protos/feast/serving" "github.com/feast-dev/feast/go/protos/feast/types" - "github.com/golang/protobuf/ptypes/timestamp" ) type FeatureData struct { @@ -43,7 +45,8 @@ type OnlineStore interface { func getOnlineStoreType(onlineStoreConfig map[string]interface{}) (string, bool) { if onlineStoreType, ok := onlineStoreConfig["type"]; !ok { - return "", false + // If online store type isn't specified, default to sqlite + return "sqlite", true } else { result, ok := onlineStoreType.(string) return result, ok @@ -53,10 +56,11 @@ func getOnlineStoreType(onlineStoreConfig map[string]interface{}) (string, bool) func NewOnlineStore(config *registry.RepoConfig) (OnlineStore, error) { onlineStoreType, ok := getOnlineStoreType(config.OnlineStore) if !ok { + return nil, fmt.Errorf("could not get online store type from online store config: %+v", config.OnlineStore) + } else if onlineStoreType == "sqlite" { onlineStore, err := NewSqliteOnlineStore(config.Project, config, config.OnlineStore) return onlineStore, err - } - if onlineStoreType == "redis" { + } else if onlineStoreType == "redis" { onlineStore, err := NewRedisOnlineStore(config.Project, config.OnlineStore) return onlineStore, err } else { diff --git a/go/internal/feast/onlinestore/redisonlinestore.go b/go/internal/feast/onlinestore/redisonlinestore.go index 9049eae1033..df04856cbfa 100644 --- a/go/internal/feast/onlinestore/redisonlinestore.go +++ b/go/internal/feast/onlinestore/redisonlinestore.go @@ -9,12 +9,13 @@ import ( "strconv" "strings" - "github.com/feast-dev/feast/go/protos/feast/serving" - "github.com/feast-dev/feast/go/protos/feast/types" "github.com/go-redis/redis/v8" "github.com/golang/protobuf/proto" "github.com/spaolacci/murmur3" timestamppb "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/feast-dev/feast/go/protos/feast/serving" + "github.com/feast-dev/feast/go/protos/feast/types" ) type redisType int diff --git a/go/internal/feast/onlinestore/sqliteonlinestore.go b/go/internal/feast/onlinestore/sqliteonlinestore.go index f8c53255455..94ba0c0d568 100644 --- a/go/internal/feast/onlinestore/sqliteonlinestore.go +++ b/go/internal/feast/onlinestore/sqliteonlinestore.go @@ -5,19 +5,21 @@ import ( "database/sql" "encoding/hex" "errors" - "github.com/feast-dev/feast/go/internal/feast/registry" "strings" "sync" "time" + "github.com/feast-dev/feast/go/internal/feast/registry" + "context" "fmt" - "github.com/feast-dev/feast/go/protos/feast/serving" - "github.com/feast-dev/feast/go/protos/feast/types" _ "github.com/mattn/go-sqlite3" "google.golang.org/protobuf/proto" timestamppb "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/feast-dev/feast/go/protos/feast/serving" + "github.com/feast-dev/feast/go/protos/feast/types" ) type SqliteOnlineStore struct { diff --git a/go/internal/feast/onlinestore/sqliteonlinestore_test.go b/go/internal/feast/onlinestore/sqliteonlinestore_test.go index cbee9cd91c2..5af1c1f4ce4 100644 --- a/go/internal/feast/onlinestore/sqliteonlinestore_test.go +++ b/go/internal/feast/onlinestore/sqliteonlinestore_test.go @@ -8,17 +8,18 @@ import ( "github.com/feast-dev/feast/go/internal/feast/registry" + "github.com/stretchr/testify/assert" + "github.com/feast-dev/feast/go/internal/test" "github.com/feast-dev/feast/go/protos/feast/types" - "github.com/stretchr/testify/assert" ) func TestSqliteAndFeatureRepoSetup(t *testing.T) { - dir := "../../test" + dir := t.TempDir() feature_repo_path := filepath.Join(dir, "feature_repo") err := test.SetupCleanFeatureRepo(dir) assert.Nil(t, err) - defer test.CleanUpRepo(dir) + config, err := registry.NewRepoConfigFromFile(feature_repo_path) assert.Nil(t, err) assert.Equal(t, "feature_repo", config.Project) @@ -33,10 +34,10 @@ func TestSqliteAndFeatureRepoSetup(t *testing.T) { } func TestSqliteOnlineRead(t *testing.T) { - dir := "../../test" + dir := t.TempDir() feature_repo_path := filepath.Join(dir, "feature_repo") test.SetupCleanFeatureRepo(dir) - defer test.CleanUpRepo(dir) + config, err := registry.NewRepoConfigFromFile(feature_repo_path) assert.Nil(t, err) store, err := NewSqliteOnlineStore("feature_repo", config, config.OnlineStore) diff --git a/go/internal/feast/registry/local.go b/go/internal/feast/registry/local.go index 22db73a3206..8b35e5756b6 100644 --- a/go/internal/feast/registry/local.go +++ b/go/internal/feast/registry/local.go @@ -1,13 +1,15 @@ package registry import ( - "github.com/feast-dev/feast/go/protos/feast/core" - "github.com/golang/protobuf/proto" - "github.com/google/uuid" - "google.golang.org/protobuf/types/known/timestamppb" "io/ioutil" "os" "path/filepath" + + "github.com/golang/protobuf/proto" + "github.com/google/uuid" + "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/feast-dev/feast/go/protos/feast/core" ) // A LocalRegistryStore is a file-based implementation of the RegistryStore interface. diff --git a/go/internal/feast/registry/repoconfig.go b/go/internal/feast/registry/repoconfig.go index e5efd899de9..59d125b1bfc 100644 --- a/go/internal/feast/registry/repoconfig.go +++ b/go/internal/feast/registry/repoconfig.go @@ -2,9 +2,10 @@ package registry import ( "encoding/json" - "github.com/ghodss/yaml" "io/ioutil" "path/filepath" + + "github.com/ghodss/yaml" ) const ( diff --git a/go/internal/feast/registry/repoconfig_test.go b/go/internal/feast/registry/repoconfig_test.go index c3336fd618f..848977886c9 100644 --- a/go/internal/feast/registry/repoconfig_test.go +++ b/go/internal/feast/registry/repoconfig_test.go @@ -1,10 +1,11 @@ package registry import ( - "github.com/stretchr/testify/assert" "os" "path/filepath" "testing" + + "github.com/stretchr/testify/assert" ) func TestNewRepoConfig(t *testing.T) { diff --git a/go/cmd/server/server.go b/go/internal/feast/server/grpc_server.go similarity index 62% rename from go/cmd/server/server.go rename to go/internal/feast/server/grpc_server.go index 3708689268b..60408809594 100644 --- a/go/cmd/server/server.go +++ b/go/internal/feast/server/grpc_server.go @@ -1,27 +1,31 @@ -package main +package server import ( "context" + "fmt" + + "github.com/google/uuid" - "github.com/feast-dev/feast/go/cmd/server/logging" "github.com/feast-dev/feast/go/internal/feast" + "github.com/feast-dev/feast/go/internal/feast/server/logging" "github.com/feast-dev/feast/go/protos/feast/serving" prototypes "github.com/feast-dev/feast/go/protos/feast/types" "github.com/feast-dev/feast/go/types" - "github.com/google/uuid" ) -type servingServiceServer struct { +const feastServerVersion = "0.0.1" + +type grpcServingServiceServer struct { fs *feast.FeatureStore loggingService *logging.LoggingService serving.UnimplementedServingServiceServer } -func newServingServiceServer(fs *feast.FeatureStore, loggingService *logging.LoggingService) *servingServiceServer { - return &servingServiceServer{fs: fs, loggingService: loggingService} +func NewGrpcServingServiceServer(fs *feast.FeatureStore, loggingService *logging.LoggingService) *grpcServingServiceServer { + return &grpcServingServiceServer{fs: fs, loggingService: loggingService} } -func (s *servingServiceServer) GetFeastServingInfo(ctx context.Context, request *serving.GetFeastServingInfoRequest) (*serving.GetFeastServingInfoResponse, error) { +func (s *grpcServingServiceServer) GetFeastServingInfo(ctx context.Context, request *serving.GetFeastServingInfoRequest) (*serving.GetFeastServingInfoResponse, error) { return &serving.GetFeastServingInfoResponse{ Version: feastServerVersion, }, nil @@ -30,7 +34,7 @@ func (s *servingServiceServer) GetFeastServingInfo(ctx context.Context, request // Returns an object containing the response to GetOnlineFeatures. // Metadata contains featurenames that corresponds to the number of rows in response.Results. // Results contains values including the value of the feature, the event timestamp, and feature status in a columnar format. -func (s *servingServiceServer) GetOnlineFeatures(ctx context.Context, request *serving.GetOnlineFeaturesRequest) (*serving.GetOnlineFeaturesResponse, error) { +func (s *grpcServingServiceServer) GetOnlineFeatures(ctx context.Context, request *serving.GetOnlineFeaturesRequest) (*serving.GetOnlineFeaturesResponse, error) { requestId := GenerateRequestId() featuresOrService, err := s.fs.ParseFeatures(request.GetKind()) if err != nil { @@ -53,7 +57,7 @@ func (s *servingServiceServer) GetOnlineFeatures(ctx context.Context, request *s FeatureNames: &serving.FeatureList{Val: make([]string, 0)}, }, } - // Entities are currently part of the features as a value and the order that we add it to the resp MetaData + // JoinKeys are currently part of the features as a value and the order that we add it to the resp MetaData // Need to figure out a way to map the correct entities to the correct ordering entityValuesMap := make(map[string][]*prototypes.Value, 0) featureNames := make([]string, len(featureVectors)) @@ -74,8 +78,18 @@ func (s *servingServiceServer) GetOnlineFeatures(ctx context.Context, request *s EventTimestamps: vector.Timestamps, }) } - if featuresOrService.FeatureService != nil { - go s.loggingService.GenerateLogs(featuresOrService.FeatureService, entityValuesMap, resp.Results[len(request.Entities):], request.RequestContext, requestId) + + featureService := featuresOrService.FeatureService + if featureService != nil && featureService.LoggingConfig != nil && s.loggingService != nil { + logger, err := s.loggingService.GetOrCreateLogger(featureService) + if err != nil { + fmt.Printf("Couldn't instantiate logger for feature service %s: %+v", featuresOrService.FeatureService.Name, err) + } + + err = logger.Log(entityValuesMap, resp.Results[len(request.Entities):], resp.Metadata.FeatureNames.Val[len(request.Entities):], request.RequestContext, requestId) + if err != nil { + fmt.Printf("LoggerImpl error[%s]: %+v", featuresOrService.FeatureService.Name, err) + } } return resp, nil } diff --git a/go/cmd/server/server_test.go b/go/internal/feast/server/grpc_server_test.go similarity index 80% rename from go/cmd/server/server_test.go rename to go/internal/feast/server/grpc_server_test.go index 9d4ffb50bf8..52960321319 100644 --- a/go/cmd/server/server_test.go +++ b/go/internal/feast/server/grpc_server_test.go @@ -1,7 +1,8 @@ -package main +package server import ( "context" + "io/ioutil" "net" "os" "path/filepath" @@ -10,20 +11,23 @@ import ( "testing" "time" + "github.com/stretchr/testify/require" + "github.com/feast-dev/feast/go/internal/feast/registry" "github.com/apache/arrow/go/v8/arrow/array" "github.com/apache/arrow/go/v8/arrow/memory" "github.com/apache/arrow/go/v8/parquet/file" "github.com/apache/arrow/go/v8/parquet/pqarrow" - "github.com/feast-dev/feast/go/cmd/server/logging" + "github.com/stretchr/testify/assert" + "google.golang.org/grpc" + "google.golang.org/grpc/test/bufconn" + "github.com/feast-dev/feast/go/internal/feast" + "github.com/feast-dev/feast/go/internal/feast/server/logging" "github.com/feast-dev/feast/go/internal/test" "github.com/feast-dev/feast/go/protos/feast/serving" "github.com/feast-dev/feast/go/protos/feast/types" - "github.com/stretchr/testify/assert" - "google.golang.org/grpc" - "google.golang.org/grpc/test/bufconn" ) // Return absolute path to the test_repo directory regardless of the working directory @@ -41,27 +45,12 @@ func getRepoPath(basePath string) string { } // Starts a new grpc server, registers the serving service and returns a client. -func getClient(ctx context.Context, offlineStoreType string, basePath string, enableLogging bool) (serving.ServingServiceClient, func()) { +func getClient(ctx context.Context, offlineStoreType string, basePath string, logPath string) (serving.ServingServiceClient, func()) { buffer := 1024 * 1024 listener := bufconn.Listen(buffer) server := grpc.NewServer() config, err := registry.NewRepoConfigFromFile(getRepoPath(basePath)) - - // TODO(kevjumba): either add this officially or talk in design review about what the correct solution for what do with path. - // Currently in python we use the path in FileSource but it is not specified in configuration unless it is using file_url? - if enableLogging { - if config.OfflineStore == nil { - config.OfflineStore = map[string]interface{}{} - } - absPath, err := filepath.Abs(filepath.Join(getRepoPath(basePath), "log.parquet")) - if err != nil { - panic(err) - } - config.OfflineStore["path"] = absPath - config.OfflineStore["storeType"] = offlineStoreType - } - if err != nil { panic(err) } @@ -69,11 +58,24 @@ func getClient(ctx context.Context, offlineStoreType string, basePath string, en if err != nil { panic(err) } - loggingService, err := logging.NewLoggingService(fs, 1000, "test_service", enableLogging) + + var logSink logging.LogSink + if logPath != "" { + logSink, err = logging.NewFileLogSink(logPath) + if err != nil { + panic(err) + } + } + loggingService, err := logging.NewLoggingService(fs, logSink, logging.LoggingOptions{ + WriteInterval: 10 * time.Millisecond, + FlushInterval: logging.DefaultOptions.FlushInterval, + EmitTimeout: logging.DefaultOptions.EmitTimeout, + ChannelCapacity: logging.DefaultOptions.ChannelCapacity, + }) if err != nil { panic(err) } - servingServiceServer := newServingServiceServer(fs, loggingService) + servingServiceServer := NewGrpcServingServiceServer(fs, loggingService) serving.RegisterServingServiceServer(server, servingServiceServer) go func() { @@ -99,12 +101,13 @@ func getClient(ctx context.Context, offlineStoreType string, basePath string, en func TestGetFeastServingInfo(t *testing.T) { ctx := context.Background() // Pregenerated using `feast init`. - dir := "logging/" + dir := "../../test/" err := test.SetupInitializedRepo(dir) - assert.Nil(t, err) defer test.CleanUpInitializedRepo(dir) - client, closer := getClient(ctx, "", dir, false) + require.Nil(t, err) + + client, closer := getClient(ctx, "", dir, "") defer closer() response, err := client.GetFeastServingInfo(ctx, &serving.GetFeastServingInfoRequest{}) assert.Nil(t, err) @@ -114,12 +117,13 @@ func TestGetFeastServingInfo(t *testing.T) { func TestGetOnlineFeaturesSqlite(t *testing.T) { ctx := context.Background() // Pregenerated using `feast init`. - dir := "logging/" + dir := "../../test/" err := test.SetupInitializedRepo(dir) - assert.Nil(t, err) defer test.CleanUpInitializedRepo(dir) - client, closer := getClient(ctx, "", dir, false) + require.Nil(t, err) + + client, closer := getClient(ctx, "", dir, "") defer closer() entities := make(map[string]*types.RepeatedValue) entities["driver_id"] = &types.RepeatedValue{ @@ -173,12 +177,14 @@ func TestGetOnlineFeaturesSqlite(t *testing.T) { func TestGetOnlineFeaturesSqliteWithLogging(t *testing.T) { ctx := context.Background() // Pregenerated using `feast init`. - dir := "logging/" + dir := "../../test/" err := test.SetupInitializedRepo(dir) - assert.Nil(t, err) defer test.CleanUpInitializedRepo(dir) - client, closer := getClient(ctx, "file", dir, true) + require.Nil(t, err) + + logPath := t.TempDir() + client, closer := getClient(ctx, "file", dir, logPath) defer closer() entities := make(map[string]*types.RepeatedValue) entities["driver_id"] = &types.RepeatedValue{ @@ -207,18 +213,20 @@ func TestGetOnlineFeaturesSqliteWithLogging(t *testing.T) { // TODO(kevjumba): implement for timestamp and status expectedLogValues, _, _ := GetExpectedLogRows(featureNames, response.Results[len(request.Entities):]) expectedLogValues["driver_id"] = entities["driver_id"] - logPath, err := filepath.Abs(filepath.Join(dir, "feature_repo", "log.parquet")) + // Wait for logger to flush. - assert.Eventually(t, func() bool { - var _, err = os.Stat(logPath) - if os.IsNotExist(err) { + require.Eventually(t, func() bool { + files, err := ioutil.ReadDir(logPath) + if err != nil || len(files) == 0 { return false - } else { - return true } - }, 1*time.Second, logging.DEFAULT_LOG_FLUSH_INTERVAL) - assert.Nil(t, err) - pf, err := file.OpenParquetFile(logPath, false) + stat, err := os.Stat(filepath.Join(logPath, files[0].Name())) + return err == nil && stat.Size() > 0 + }, 1*time.Second, 100*time.Millisecond) + + files, err := ioutil.ReadDir(logPath) + logFile := filepath.Join(logPath, files[0].Name()) + pf, err := file.OpenParquetFile(logFile, false) assert.Nil(t, err) reader, err := pqarrow.NewFileReader(pf, pqarrow.ArrowReadProperties{}, memory.DefaultAllocator) @@ -232,26 +240,23 @@ func TestGetOnlineFeaturesSqliteWithLogging(t *testing.T) { for tr.Next() { rec := tr.Record() assert.NotNil(t, rec) - values, err := test.GetProtoFromRecord(rec) + actualValues, err := test.GetProtoFromRecord(rec) assert.Nil(t, err) - assert.Equal(t, len(values)-1 /*request id column not counted*/, len(expectedLogValues)) - // Need to iterate through and compare because certain values in types.RepeatedValues aren't accurately being compared. - for name, val := range values { + // Need to iterate through and compare because certain actualValues in types.RepeatedValues aren't accurately being compared. + for name, val := range expectedLogValues { if name == "RequestId" { // Ensure there are request ids for each entity. - assert.Equal(t, len(val.Val), len(response.Results[0].Values)) + assert.Equal(t, len(val.Val), len(actualValues[name].Val)) } else { - assert.Equal(t, len(val.Val), len(expectedLogValues[name].Val)) + assert.Equal(t, len(val.Val), len(actualValues[name].Val)) for idx, featureVal := range val.Val { - assert.Equal(t, featureVal.Val, expectedLogValues[name].Val[idx].Val) + assert.Equal(t, featureVal.Val, actualValues[name].Val[idx].Val) } } } } - err = test.CleanUpFile(logPath) - assert.Nil(t, err) } // Generate the expected log rows based on the resulting feature vector returned from GetOnlineFeatures. diff --git a/go/internal/feast/server/logging/featureserviceschema.go b/go/internal/feast/server/logging/featureserviceschema.go new file mode 100644 index 00000000000..5047346c2ca --- /dev/null +++ b/go/internal/feast/server/logging/featureserviceschema.go @@ -0,0 +1,97 @@ +package logging + +import ( + "fmt" + + "github.com/feast-dev/feast/go/internal/feast/model" + "github.com/feast-dev/feast/go/protos/feast/types" +) + +type FeatureServiceSchema struct { + JoinKeys []string + Features []string + RequestData []string + + JoinKeysTypes map[string]types.ValueType_Enum + FeaturesTypes map[string]types.ValueType_Enum + RequestDataTypes map[string]types.ValueType_Enum +} + +func GenerateSchemaFromFeatureService(fs FeatureStore, featureServiceName string) (*FeatureServiceSchema, error) { + entityMap, fvMap, odFvMap, err := fs.GetFcosMap() + if err != nil { + return nil, err + } + + featureService, err := fs.GetFeatureService(featureServiceName) + if err != nil { + return nil, err + } + + return generateSchema(featureService, entityMap, fvMap, odFvMap) +} + +func generateSchema(featureService *model.FeatureService, entityMap map[string]*model.Entity, fvMap map[string]*model.FeatureView, odFvMap map[string]*model.OnDemandFeatureView) (*FeatureServiceSchema, error) { + joinKeys := make([]string, 0) + features := make([]string, 0) + requestData := make([]string, 0) + + joinKeysSet := make(map[string]interface{}) + + entityJoinKeyToType := make(map[string]types.ValueType_Enum) + allFeatureTypes := make(map[string]types.ValueType_Enum) + requestDataTypes := make(map[string]types.ValueType_Enum) + + for _, featureProjection := range featureService.Projections { + // Create copies of FeatureView that may contains the same *FeatureView but + // each differentiated by a *FeatureViewProjection + featureViewName := featureProjection.Name + if fv, ok := fvMap[featureViewName]; ok { + for _, f := range featureProjection.Features { + fullFeatureName := getFullFeatureName(featureProjection.NameToUse(), f.Name) + features = append(features, fullFeatureName) + allFeatureTypes[fullFeatureName] = f.Dtype + } + for _, entityName := range fv.Entities { + entity := entityMap[entityName] + var joinKey string + if joinKeyAlias, ok := featureProjection.JoinKeyMap[entity.JoinKey]; ok { + joinKey = joinKeyAlias + } else { + joinKey = entity.JoinKey + } + + if _, ok := joinKeysSet[joinKey]; !ok { + joinKeys = append(joinKeys, joinKey) + } + + joinKeysSet[joinKey] = nil + entityJoinKeyToType[joinKey] = entity.ValueType + } + } else if odFv, ok := odFvMap[featureViewName]; ok { + for _, f := range featureProjection.Features { + fullFeatureName := getFullFeatureName(featureProjection.NameToUse(), f.Name) + features = append(features, fullFeatureName) + allFeatureTypes[fullFeatureName] = f.Dtype + } + for paramName, paramType := range odFv.GetRequestDataSchema() { + requestData = append(requestData, paramName) + requestDataTypes[paramName] = paramType + } + } else { + return nil, fmt.Errorf("no such feature view %s found (referenced from feature service %s)", + featureViewName, featureService.Name) + } + } + + schema := &FeatureServiceSchema{ + JoinKeys: joinKeys, + Features: features, + RequestData: requestData, + + JoinKeysTypes: entityJoinKeyToType, + FeaturesTypes: allFeatureTypes, + RequestDataTypes: requestDataTypes, + } + return schema, nil +} diff --git a/go/internal/feast/server/logging/featureserviceschema_test.go b/go/internal/feast/server/logging/featureserviceschema_test.go new file mode 100644 index 00000000000..efcd5ec7fcc --- /dev/null +++ b/go/internal/feast/server/logging/featureserviceschema_test.go @@ -0,0 +1,204 @@ +package logging + +import ( + "math/rand" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/feast-dev/feast/go/internal/feast/model" + "github.com/feast-dev/feast/go/internal/test" + "github.com/feast-dev/feast/go/protos/feast/core" + "github.com/feast-dev/feast/go/protos/feast/types" +) + +func buildFCOMaps(entities []*model.Entity, fvs []*model.FeatureView, odFvs []*model.OnDemandFeatureView) (map[string]*model.Entity, map[string]*model.FeatureView, map[string]*model.OnDemandFeatureView) { + entityMap := make(map[string]*model.Entity) + fvMap := make(map[string]*model.FeatureView) + odFvMap := make(map[string]*model.OnDemandFeatureView) + + for _, entity := range entities { + entityMap[entity.Name] = entity + } + + for _, fv := range fvs { + fvMap[fv.Base.Name] = fv + } + + for _, fv := range odFvs { + odFvMap[fv.Base.Name] = fv + } + + return entityMap, fvMap, odFvMap +} + +func TestSchemaTypeRetrieval(t *testing.T) { + featureService, entities, fvs, odfvs := InitializeFeatureRepoVariablesForTest() + entityMap, fvMap, odFvMap := buildFCOMaps(entities, fvs, odfvs) + + expectedFeatureNames := make([]string, 0) + expectedRequestData := make([]string, 0) + + for _, featureView := range fvs { + for _, f := range featureView.Base.Features { + expectedFeatureNames = append(expectedFeatureNames, getFullFeatureName(featureView.Base.Name, f.Name)) + } + } + for _, odFv := range odfvs { + for _, f := range odFv.Base.Features { + expectedFeatureNames = append(expectedFeatureNames, getFullFeatureName(odFv.Base.Name, f.Name)) + } + for _, dataSource := range odFv.SourceRequestDataSources { + for _, field := range dataSource.Schema { + expectedRequestData = append(expectedRequestData, field.Name) + } + } + } + + schema, err := generateSchema(featureService, entityMap, fvMap, odFvMap) + assert.Nil(t, err) + + assert.Equal(t, expectedFeatureNames, schema.Features) + assert.Equal(t, []string{"driver_id"}, schema.JoinKeys) + assert.Equal(t, schema.JoinKeysTypes["driver_id"], types.ValueType_INT64) + + types := []types.ValueType_Enum{*types.ValueType_INT64.Enum(), *types.ValueType_FLOAT.Enum(), *types.ValueType_INT32.Enum(), *types.ValueType_DOUBLE.Enum(), *types.ValueType_INT32.Enum(), *types.ValueType_DOUBLE.Enum()} + for idx, featureName := range expectedFeatureNames { + assert.Contains(t, schema.FeaturesTypes, featureName) + assert.Equal(t, schema.FeaturesTypes[featureName], types[idx]) + } +} + +func TestSchemaRetrievalIgnoresEntitiesNotInFeatureService(t *testing.T) { + featureService, entities, fvs, odfvs := InitializeFeatureRepoVariablesForTest() + entityMap, fvMap, odFvMap := buildFCOMaps(entities, fvs, odfvs) + + //Remove entities in featureservice + for _, featureView := range fvs { + featureView.Entities = []string{} + } + + schema, err := generateSchema(featureService, entityMap, fvMap, odFvMap) + assert.Nil(t, err) + assert.Empty(t, schema.JoinKeysTypes) +} + +func TestSchemaUsesOrderInFeatureService(t *testing.T) { + featureService, entities, fvs, odfvs := InitializeFeatureRepoVariablesForTest() + entityMap, fvMap, odFvMap := buildFCOMaps(entities, fvs, odfvs) + + expectedFeatureNames := make([]string, 0) + + // Source of truth for order of featureNames + for _, featureView := range fvs { + for _, f := range featureView.Base.Features { + expectedFeatureNames = append(expectedFeatureNames, getFullFeatureName(featureView.Base.Name, f.Name)) + } + } + for _, featureView := range odfvs { + for _, f := range featureView.Base.Features { + expectedFeatureNames = append(expectedFeatureNames, getFullFeatureName(featureView.Base.Name, f.Name)) + } + } + + rand.Seed(time.Now().UnixNano()) + // Shuffle the featureNames in incorrect order + for _, featureView := range fvs { + rand.Shuffle(len(featureView.Base.Features), func(i, j int) { + featureView.Base.Features[i], featureView.Base.Features[j] = featureView.Base.Features[j], featureView.Base.Features[i] + }) + } + for _, featureView := range odfvs { + rand.Shuffle(len(featureView.Base.Features), func(i, j int) { + featureView.Base.Features[i], featureView.Base.Features[j] = featureView.Base.Features[j], featureView.Base.Features[i] + }) + } + + schema, err := generateSchema(featureService, entityMap, fvMap, odFvMap) + assert.Nil(t, err) + + // Ensure the same results + assert.Equal(t, expectedFeatureNames, schema.Features) + assert.Equal(t, []string{"driver_id"}, schema.JoinKeys) + +} + +// Initialize all dummy featureservice, entities and featureviews/on demand featureviews for testing. +func InitializeFeatureRepoVariablesForTest() (*model.FeatureService, []*model.Entity, []*model.FeatureView, []*model.OnDemandFeatureView) { + f1 := test.CreateNewFeature( + "int64", + types.ValueType_INT64, + ) + f2 := test.CreateNewFeature( + "float32", + types.ValueType_FLOAT, + ) + projection1 := test.CreateNewFeatureViewProjection( + "featureView1", + "", + []*model.Feature{f1, f2}, + map[string]string{}, + ) + baseFeatureView1 := test.CreateBaseFeatureView( + "featureView1", + []*model.Feature{f1, f2}, + projection1, + ) + featureView1 := test.CreateFeatureView(baseFeatureView1, nil, []string{"driver_id"}) + entity1 := test.CreateNewEntity("driver_id", types.ValueType_INT64, "driver_id") + f3 := test.CreateNewFeature( + "int32", + types.ValueType_INT32, + ) + f4 := test.CreateNewFeature( + "double", + types.ValueType_DOUBLE, + ) + projection2 := test.CreateNewFeatureViewProjection( + "featureView2", + "", + []*model.Feature{f3, f4}, + map[string]string{}, + ) + baseFeatureView2 := test.CreateBaseFeatureView( + "featureView2", + []*model.Feature{f3, f4}, + projection2, + ) + featureView2 := test.CreateFeatureView(baseFeatureView2, nil, []string{"driver_id"}) + + f5 := test.CreateNewFeature( + "odfv_f1", + types.ValueType_INT32, + ) + f6 := test.CreateNewFeature( + "odfv_f2", + types.ValueType_DOUBLE, + ) + projection3 := test.CreateNewFeatureViewProjection( + "od_bf1", + "", + []*model.Feature{f5, f6}, + map[string]string{}, + ) + od_bf1 := test.CreateBaseFeatureView( + "od_bf1", + []*model.Feature{f5, f6}, + projection3, + ) + odfv := model.NewOnDemandFeatureViewFromBase(od_bf1) + odfv.SourceRequestDataSources["input"] = &core.DataSource_RequestDataOptions{ + Schema: []*core.FeatureSpecV2{ + {Name: "param1", ValueType: types.ValueType_FLOAT}, + }, + } + featureService := test.CreateNewFeatureService( + "test_service", + "test_project", + nil, + nil, + []*model.FeatureViewProjection{projection1, projection2, projection3}, + ) + return featureService, []*model.Entity{entity1}, []*model.FeatureView{featureView1, featureView2}, []*model.OnDemandFeatureView{odfv} +} diff --git a/go/internal/feast/server/logging/filelogsink.go b/go/internal/feast/server/logging/filelogsink.go new file mode 100644 index 00000000000..c9f2049a041 --- /dev/null +++ b/go/internal/feast/server/logging/filelogsink.go @@ -0,0 +1,55 @@ +package logging + +import ( + "fmt" + "io" + "os" + "path/filepath" + + "github.com/pkg/errors" + + "github.com/apache/arrow/go/v8/arrow" + "github.com/google/uuid" + + "github.com/apache/arrow/go/v8/arrow/array" + "github.com/apache/arrow/go/v8/parquet" + "github.com/apache/arrow/go/v8/parquet/pqarrow" +) + +type FileLogSink struct { + path string +} + +// FileLogSink is currently only used for testing. It will be instantiated during go unit tests to log to file +// and the parquet files will be cleaned up after the test is run. +func NewFileLogSink(path string) (*FileLogSink, error) { + if path == "" { + return nil, errors.New("need path for file log sink") + } + + absPath, err := filepath.Abs(path) + if err != nil { + return nil, err + } + return &FileLogSink{path: absPath}, nil +} + +func (s *FileLogSink) Write(record arrow.Record) error { + fileName, _ := uuid.NewUUID() + + var writer io.Writer + writer, err := os.Create(filepath.Join(s.path, fmt.Sprintf("%s.parquet", fileName.String()))) + if err != nil { + return err + } + table := array.NewTableFromRecords(record.Schema(), []arrow.Record{record}) + + props := parquet.NewWriterProperties(parquet.WithDictionaryDefault(false)) + arrProps := pqarrow.DefaultWriterProps() + return pqarrow.WriteTable(table, writer, 100, props, arrProps) +} + +func (s *FileLogSink) Flush(featureServiceName string) error { + // files are already flushed during Write + return nil +} diff --git a/go/internal/feast/server/logging/logger.go b/go/internal/feast/server/logging/logger.go new file mode 100644 index 00000000000..d7ed1fbe189 --- /dev/null +++ b/go/internal/feast/server/logging/logger.go @@ -0,0 +1,288 @@ +package logging + +import ( + "fmt" + "log" + "math/rand" + "strings" + "sync" + "time" + + "github.com/apache/arrow/go/v8/arrow" + "github.com/pkg/errors" + "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/feast-dev/feast/go/protos/feast/serving" + "github.com/feast-dev/feast/go/protos/feast/types" +) + +type Log struct { + // Example: val{int64_val: 5017}, val{int64_val: 1003} + EntityValue []*types.Value + RequestData []*types.Value + + FeatureValues []*types.Value + FeatureStatuses []serving.FieldStatus + EventTimestamps []*timestamppb.Timestamp + + RequestId string + LogTimestamp time.Time +} + +type LogSink interface { + // Write is used to unload logs from memory buffer. + // Logs are not guaranteed to be flushed to sink on this point. + // The data can just be written to local disk (depending on implementation). + Write(data arrow.Record) error + + // Flush actually send data to a sink. + // We want to control amount to interaction with sink, since it could be a costly operation. + // Also, some sinks like BigQuery might have quotes and physically limit amount of write requests per day. + Flush(featureServiceName string) error +} + +type Logger interface { + Log(joinKeyToEntityValues map[string][]*types.Value, featureVectors []*serving.GetOnlineFeaturesResponse_FeatureVector, featureNames []string, requestData map[string]*types.RepeatedValue, requestId string) error +} + +type LoggerImpl struct { + featureServiceName string + + buffer *MemoryBuffer + schema *FeatureServiceSchema + + logCh chan *Log + signalCh chan interface{} + + sink LogSink + config LoggerConfig + + isStopped bool + cond *sync.Cond +} + +type LoggerConfig struct { + LoggingOptions + + SampleRate float32 +} + +func NewLoggerConfig(sampleRate float32, opts LoggingOptions) LoggerConfig { + return LoggerConfig{ + LoggingOptions: opts, + SampleRate: sampleRate, + } +} + +func NewLogger(schema *FeatureServiceSchema, featureServiceName string, sink LogSink, config LoggerConfig) (*LoggerImpl, error) { + logger := &LoggerImpl{ + featureServiceName: featureServiceName, + + logCh: make(chan *Log, config.ChannelCapacity), + signalCh: make(chan interface{}, 2), + sink: sink, + + buffer: &MemoryBuffer{ + logs: make([]*Log, 0), + schema: schema, + }, + schema: schema, + config: config, + + isStopped: false, + cond: sync.NewCond(&sync.Mutex{}), + } + + logger.startLoggerLoop() + return logger, nil +} + +func (l *LoggerImpl) EmitLog(log *Log) error { + select { + case l.logCh <- log: + return nil + case <-time.After(l.config.EmitTimeout): + return fmt.Errorf("could not add to log channel with capacity %d. Operation timed out. Current log channel length is %d", cap(l.logCh), len(l.logCh)) + } +} + +func (l *LoggerImpl) startLoggerLoop() { + go func() { + for { + if err := l.loggerLoop(); err != nil { + log.Printf("LoggerImpl[%s] recovered from panic: %+v", l.featureServiceName, err) + + // Sleep for a couple of milliseconds to avoid CPU load from a potential infinite panic-recovery loop + time.Sleep(5 * time.Millisecond) + continue // try again + } + + // graceful stop + return + } + }() +} + +// Select that either ingests new logs that are added to the logging channel, one at a time to add +// to the in-memory buffer or flushes all of them synchronously to the OfflineStorage on a time interval. +func (l *LoggerImpl) loggerLoop() (lErr error) { + defer func() { + // Recover from panic in the logger loop, so that it doesn't bring down the entire feature server + if r := recover(); r != nil { + rErr, ok := r.(error) + if !ok { + rErr = fmt.Errorf("%v", r) + } + lErr = errors.WithStack(rErr) + } + }() + + writeTicker := time.NewTicker(l.config.WriteInterval) + flushTicker := time.NewTicker(l.config.FlushInterval) + + for { + shouldStop := false + + select { + case <-l.signalCh: + err := l.buffer.writeBatch(l.sink) + if err != nil { + log.Printf("Log write failed: %+v", err) + } + err = l.sink.Flush(l.featureServiceName) + if err != nil { + log.Printf("Log flush failed: %+v", err) + } + shouldStop = true + case <-writeTicker.C: + err := l.buffer.writeBatch(l.sink) + if err != nil { + log.Printf("Log write failed: %+v", err) + } + case <-flushTicker.C: + err := l.sink.Flush(l.featureServiceName) + if err != nil { + log.Printf("Log flush failed: %+v", err) + } + case logItem := <-l.logCh: + err := l.buffer.Append(logItem) + if err != nil { + log.Printf("Append log failed: %+v", err) + } + } + + if shouldStop { + break + } + } + + writeTicker.Stop() + flushTicker.Stop() + + // Notify all waiters for graceful stop + l.cond.L.Lock() + l.isStopped = true + l.cond.Broadcast() + l.cond.L.Unlock() + return nil +} + +// Stop the loop goroutine gracefully +func (l *LoggerImpl) Stop() { + select { + case l.signalCh <- nil: + default: + } +} + +func (l *LoggerImpl) WaitUntilStopped() { + l.cond.L.Lock() + defer l.cond.L.Unlock() + for !l.isStopped { + l.cond.Wait() + } +} + +func getFullFeatureName(featureViewName string, featureName string) string { + return fmt.Sprintf("%s__%s", featureViewName, featureName) +} + +func (l *LoggerImpl) Log(joinKeyToEntityValues map[string][]*types.Value, featureVectors []*serving.GetOnlineFeaturesResponse_FeatureVector, featureNames []string, requestData map[string]*types.RepeatedValue, requestId string) error { + if len(featureVectors) == 0 { + return nil + } + + if rand.Float32() > l.config.SampleRate { + return nil + } + + numFeatures := len(l.schema.Features) + // Should be equivalent to how many entities there are(each feature row has (entity) number of features) + numRows := len(featureVectors[0].Values) + + featureNameToVectorIdx := make(map[string]int) + for idx, name := range featureNames { + featureNameToVectorIdx[name] = idx + } + + for rowIdx := 0; rowIdx < numRows; rowIdx++ { + featureValues := make([]*types.Value, numFeatures) + featureStatuses := make([]serving.FieldStatus, numFeatures) + eventTimestamps := make([]*timestamppb.Timestamp, numFeatures) + + for idx, featureName := range l.schema.Features { + featureIdx, ok := featureNameToVectorIdx[featureName] + if !ok { + featureNameParts := strings.Split(featureName, "__") + featureIdx, ok = featureNameToVectorIdx[featureNameParts[1]] + if !ok { + return errors.Errorf("Missing feature %s in log data", featureName) + } + } + featureValues[idx] = featureVectors[featureIdx].Values[rowIdx] + featureStatuses[idx] = featureVectors[featureIdx].Statuses[rowIdx] + eventTimestamps[idx] = featureVectors[featureIdx].EventTimestamps[rowIdx] + } + + entityValues := make([]*types.Value, len(l.schema.JoinKeys)) + for idx, joinKey := range l.schema.JoinKeys { + rows, ok := joinKeyToEntityValues[joinKey] + if !ok { + return errors.Errorf("Missing join key %s in log data", joinKey) + } + entityValues[idx] = rows[rowIdx] + } + + requestDataValues := make([]*types.Value, len(l.schema.RequestData)) + for idx, requestParam := range l.schema.RequestData { + rows, ok := requestData[requestParam] + if !ok { + return errors.Errorf("Missing request parameter %s in log data", requestParam) + } + requestDataValues[idx] = rows.Val[rowIdx] + } + + newLog := Log{ + EntityValue: entityValues, + RequestData: requestDataValues, + + FeatureValues: featureValues, + FeatureStatuses: featureStatuses, + EventTimestamps: eventTimestamps, + + RequestId: requestId, + LogTimestamp: time.Now().UTC(), + } + err := l.EmitLog(&newLog) + if err != nil { + return err + } + } + return nil +} + +type DummyLoggerImpl struct{} + +func (l *DummyLoggerImpl) Log(joinKeyToEntityValues map[string][]*types.Value, featureVectors []*serving.GetOnlineFeaturesResponse_FeatureVector, featureNames []string, requestData map[string]*types.RepeatedValue, requestId string) error { + return nil +} diff --git a/go/internal/feast/server/logging/logger_test.go b/go/internal/feast/server/logging/logger_test.go new file mode 100644 index 00000000000..5625b05a766 --- /dev/null +++ b/go/internal/feast/server/logging/logger_test.go @@ -0,0 +1,137 @@ +package logging + +import ( + "context" + "io/ioutil" + "path/filepath" + "testing" + "time" + + "github.com/apache/arrow/go/v8/arrow" + "github.com/apache/arrow/go/v8/arrow/array" + "github.com/apache/arrow/go/v8/arrow/memory" + "github.com/apache/arrow/go/v8/parquet/file" + "github.com/apache/arrow/go/v8/parquet/pqarrow" + "github.com/stretchr/testify/require" + + "github.com/feast-dev/feast/go/protos/feast/types" + + "github.com/stretchr/testify/assert" + "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/feast-dev/feast/go/protos/feast/serving" +) + +type DummySink struct{} + +func (s *DummySink) Write(rec arrow.Record) error { + return nil +} + +func (s *DummySink) Flush(featureServiceName string) error { + return nil +} + +func TestLoggingChannelTimeout(t *testing.T) { + config := LoggerConfig{ + SampleRate: 1.0, + LoggingOptions: LoggingOptions{ + ChannelCapacity: 1, + EmitTimeout: DefaultOptions.EmitTimeout, + FlushInterval: DefaultOptions.FlushInterval, + WriteInterval: DefaultOptions.WriteInterval, + }, + } + logger, err := NewLogger(nil, "testFS", &DummySink{}, config) + + // stop log processing to check buffering channel + logger.Stop() + logger.WaitUntilStopped() + + assert.Nil(t, err) + assert.Empty(t, logger.buffer.logs) + ts := timestamppb.New(time.Now()) + newLog := Log{ + FeatureStatuses: []serving.FieldStatus{serving.FieldStatus_PRESENT}, + EventTimestamps: []*timestamppb.Timestamp{ts, ts}, + } + err = logger.EmitLog(&newLog) + assert.Nil(t, err) + + newLog2 := Log{ + FeatureStatuses: []serving.FieldStatus{serving.FieldStatus_PRESENT}, + EventTimestamps: []*timestamppb.Timestamp{ts, ts}, + } + err = logger.EmitLog(&newLog2) + // The channel times out and doesn't hang. + assert.NotNil(t, err) +} + +func TestLogAndFlushToFile(t *testing.T) { + sink, err := NewFileLogSink(t.TempDir()) + assert.Nil(t, err) + + schema := &FeatureServiceSchema{ + JoinKeys: []string{"driver_id"}, + Features: []string{"view__feature"}, + JoinKeysTypes: map[string]types.ValueType_Enum{"driver_id": types.ValueType_INT32}, + FeaturesTypes: map[string]types.ValueType_Enum{"view__feature": types.ValueType_DOUBLE}, + } + config := LoggerConfig{ + SampleRate: 1.0, + LoggingOptions: LoggingOptions{ + ChannelCapacity: DefaultOptions.ChannelCapacity, + EmitTimeout: DefaultOptions.EmitTimeout, + FlushInterval: DefaultOptions.FlushInterval, + WriteInterval: 10 * time.Millisecond, + }, + } + logger, err := NewLogger(schema, "testFS", sink, config) + assert.Nil(t, err) + + assert.Nil(t, logger.Log( + map[string][]*types.Value{"driver_id": {{Val: &types.Value_Int32Val{Int32Val: 111}}}}, + []*serving.GetOnlineFeaturesResponse_FeatureVector{ + { + Values: []*types.Value{{Val: &types.Value_DoubleVal{DoubleVal: 2.0}}}, + Statuses: []serving.FieldStatus{serving.FieldStatus_PRESENT}, + EventTimestamps: []*timestamppb.Timestamp{timestamppb.Now()}, + }, + }, + []string{"view__feature"}, + map[string]*types.RepeatedValue{}, + "req-id", + )) + + require.Eventually(t, func() bool { + files, _ := ioutil.ReadDir(sink.path) + return len(files) > 0 + }, 60*time.Second, 100*time.Millisecond) + + files, _ := ioutil.ReadDir(sink.path) + + pf, err := file.OpenParquetFile(filepath.Join(sink.path, files[0].Name()), false) + assert.Nil(t, err) + + reader, err := pqarrow.NewFileReader(pf, pqarrow.ArrowReadProperties{}, memory.DefaultAllocator) + assert.Nil(t, err) + + tbl, err := reader.ReadTable(context.Background()) + assert.Nil(t, err) + tr := array.NewTableReader(tbl, -1) + defer tbl.Release() + + fieldNameToIdx := make(map[string]int) + for idx, field := range tbl.Schema().Fields() { + fieldNameToIdx[field.Name] = idx + } + + tr.Next() + rec := tr.Record() + + assert.Equal(t, "req-id", rec.Column(fieldNameToIdx[LOG_REQUEST_ID_FIELD]).(*array.String).Value(0)) + assert.EqualValues(t, 111, rec.Column(fieldNameToIdx["driver_id"]).(*array.Int32).Value(0)) + assert.EqualValues(t, 2.0, rec.Column(fieldNameToIdx["view__feature"]).(*array.Float64).Value(0)) + assert.EqualValues(t, serving.FieldStatus_PRESENT, rec.Column(fieldNameToIdx["view__feature__status"]).(*array.Int32).Value(0)) + +} diff --git a/go/internal/feast/server/logging/memorybuffer.go b/go/internal/feast/server/logging/memorybuffer.go new file mode 100644 index 00000000000..36eb7118cb4 --- /dev/null +++ b/go/internal/feast/server/logging/memorybuffer.go @@ -0,0 +1,161 @@ +package logging + +import ( + "fmt" + + "github.com/apache/arrow/go/v8/arrow" + "github.com/apache/arrow/go/v8/arrow/array" + "github.com/apache/arrow/go/v8/arrow/memory" + + "github.com/feast-dev/feast/go/protos/feast/types" + gotypes "github.com/feast-dev/feast/go/types" +) + +type MemoryBuffer struct { + logs []*Log + schema *FeatureServiceSchema +} + +const ( + LOG_TIMESTAMP_FIELD = "__log_timestamp" + LOG_DATE_FIELD = "__log_date" + LOG_REQUEST_ID_FIELD = "__request_id" +) + +// Acquires the logging schema from the feature service, converts the memory buffer array of rows of logs and flushes +// them to the offline storage. +func (b *MemoryBuffer) writeBatch(sink LogSink) error { + if len(b.logs) == 0 { + return nil + } + + record, err := b.convertToArrowRecord() + + if err != nil { + return err + } + err = sink.Write(record) + if err != nil { + return err + } + + b.logs = b.logs[:0] + return nil +} + +func (b *MemoryBuffer) Append(log *Log) error { + b.logs = append(b.logs, log) + return nil +} + +func (b *MemoryBuffer) getArrowSchema() (*arrow.Schema, error) { + fields := make([]arrow.Field, 0) + + for _, joinKey := range b.schema.JoinKeys { + arrowType, err := gotypes.ValueTypeEnumToArrowType(b.schema.JoinKeysTypes[joinKey]) + if err != nil { + return nil, err + } + + fields = append(fields, arrow.Field{Name: joinKey, Type: arrowType}) + } + + for _, requestParam := range b.schema.RequestData { + arrowType, err := gotypes.ValueTypeEnumToArrowType(b.schema.RequestDataTypes[requestParam]) + if err != nil { + return nil, err + } + + fields = append(fields, arrow.Field{Name: requestParam, Type: arrowType}) + } + + for _, featureName := range b.schema.Features { + arrowType, err := gotypes.ValueTypeEnumToArrowType(b.schema.FeaturesTypes[featureName]) + if err != nil { + return nil, err + } + + fields = append(fields, arrow.Field{Name: featureName, Type: arrowType}) + fields = append(fields, arrow.Field{ + Name: fmt.Sprintf("%s__timestamp", featureName), + Type: arrow.FixedWidthTypes.Timestamp_s}) + fields = append(fields, arrow.Field{ + Name: fmt.Sprintf("%s__status", featureName), + Type: arrow.PrimitiveTypes.Int32}) + } + + fields = append(fields, arrow.Field{Name: LOG_TIMESTAMP_FIELD, Type: arrow.FixedWidthTypes.Timestamp_us}) + fields = append(fields, arrow.Field{Name: LOG_DATE_FIELD, Type: arrow.FixedWidthTypes.Date32}) + fields = append(fields, arrow.Field{Name: LOG_REQUEST_ID_FIELD, Type: arrow.BinaryTypes.String}) + + return arrow.NewSchema(fields, nil), nil +} + +// convertToArrowRecord Takes memory buffer of logs in array row and converts them to columnar with generated fcoschema generated by GetFcoSchema +// and writes them to arrow table. +// Returns arrow table that contains all of the logs in columnar format. +func (b *MemoryBuffer) convertToArrowRecord() (arrow.Record, error) { + arrowMemory := memory.NewGoAllocator() + numRows := len(b.logs) + + arrowSchema, err := b.getArrowSchema() + if err != nil { + return nil, err + } + + columns := make(map[string][]*types.Value) + fieldNameToIdx := make(map[string]int) + for idx, field := range arrowSchema.Fields() { + fieldNameToIdx[field.Name] = idx + } + + builder := array.NewRecordBuilder(arrowMemory, arrowSchema) + defer builder.Release() + + builder.Reserve(numRows) + + for rowIdx, logRow := range b.logs { + for colIdx, joinKey := range b.schema.JoinKeys { + if _, ok := columns[joinKey]; !ok { + columns[joinKey] = make([]*types.Value, numRows) + } + columns[joinKey][rowIdx] = logRow.EntityValue[colIdx] + } + for colIdx, requestParam := range b.schema.RequestData { + if _, ok := columns[requestParam]; !ok { + columns[requestParam] = make([]*types.Value, numRows) + } + columns[requestParam][rowIdx] = logRow.RequestData[colIdx] + } + for colIdx, featureName := range b.schema.Features { + if _, ok := columns[featureName]; !ok { + columns[featureName] = make([]*types.Value, numRows) + } + columns[featureName][rowIdx] = logRow.FeatureValues[colIdx] + + timestamp := arrow.Timestamp(logRow.EventTimestamps[colIdx].GetSeconds()) + timestampFieldIdx := fieldNameToIdx[fmt.Sprintf("%s__timestamp", featureName)] + statusFieldIdx := fieldNameToIdx[fmt.Sprintf("%s__status", featureName)] + + builder.Field(timestampFieldIdx).(*array.TimestampBuilder).UnsafeAppend(timestamp) + builder.Field(statusFieldIdx).(*array.Int32Builder).UnsafeAppend(int32(logRow.FeatureStatuses[colIdx])) + } + + logTimestamp := arrow.Timestamp(logRow.LogTimestamp.UnixMicro()) + logDate := arrow.Date32FromTime(logRow.LogTimestamp) + + builder.Field(fieldNameToIdx[LOG_TIMESTAMP_FIELD]).(*array.TimestampBuilder).UnsafeAppend(logTimestamp) + builder.Field(fieldNameToIdx[LOG_DATE_FIELD]).(*array.Date32Builder).UnsafeAppend(logDate) + builder.Field(fieldNameToIdx[LOG_REQUEST_ID_FIELD]).(*array.StringBuilder).Append(logRow.RequestId) + } + + for columnName, protoArray := range columns { + fieldIdx := fieldNameToIdx[columnName] + err := gotypes.CopyProtoValuesToArrowArray(builder.Field(fieldIdx), protoArray) + if err != nil { + return nil, err + } + } + + return builder.NewRecord(), nil +} diff --git a/go/internal/feast/server/logging/memorybuffer_test.go b/go/internal/feast/server/logging/memorybuffer_test.go new file mode 100644 index 00000000000..59f035799bb --- /dev/null +++ b/go/internal/feast/server/logging/memorybuffer_test.go @@ -0,0 +1,174 @@ +package logging + +import ( + "math/rand" + "testing" + "time" + + "github.com/apache/arrow/go/v8/arrow" + "github.com/apache/arrow/go/v8/arrow/array" + "github.com/apache/arrow/go/v8/arrow/memory" + "github.com/stretchr/testify/assert" + "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/feast-dev/feast/go/protos/feast/serving" + "github.com/feast-dev/feast/go/protos/feast/types" +) + +func TestArrowSchemaGeneration(t *testing.T) { + schema := &FeatureServiceSchema{ + JoinKeys: []string{ + "driver_id", + }, + Features: []string{ + "featureView1__int64", + "featureView1__float32", + "featureView2__int32", + "featureView2__double", + }, + JoinKeysTypes: map[string]types.ValueType_Enum{ + "driver_id": types.ValueType_INT32, + }, + FeaturesTypes: map[string]types.ValueType_Enum{ + "featureView1__int64": types.ValueType_INT64, + "featureView1__float32": types.ValueType_FLOAT, + "featureView2__int32": types.ValueType_INT32, + "featureView2__double": types.ValueType_DOUBLE, + }, + } + + b := &MemoryBuffer{ + schema: schema, + } + + expectedArrowSchema := []arrow.Field{ + {Name: "driver_id", Type: arrow.PrimitiveTypes.Int32}, + {Name: "featureView1__int64", Type: arrow.PrimitiveTypes.Int64}, + {Name: "featureView1__int64__timestamp", Type: arrow.FixedWidthTypes.Timestamp_s}, + {Name: "featureView1__int64__status", Type: arrow.PrimitiveTypes.Int32}, + {Name: "featureView1__float32", Type: arrow.PrimitiveTypes.Float32}, + {Name: "featureView1__float32__timestamp", Type: arrow.FixedWidthTypes.Timestamp_s}, + {Name: "featureView1__float32__status", Type: arrow.PrimitiveTypes.Int32}, + {Name: "featureView2__int32", Type: arrow.PrimitiveTypes.Int32}, + {Name: "featureView2__int32__timestamp", Type: arrow.FixedWidthTypes.Timestamp_s}, + {Name: "featureView2__int32__status", Type: arrow.PrimitiveTypes.Int32}, + {Name: "featureView2__double", Type: arrow.PrimitiveTypes.Float64}, + {Name: "featureView2__double__timestamp", Type: arrow.FixedWidthTypes.Timestamp_s}, + {Name: "featureView2__double__status", Type: arrow.PrimitiveTypes.Int32}, + {Name: "__log_timestamp", Type: arrow.FixedWidthTypes.Timestamp_us}, + {Name: "__log_date", Type: arrow.FixedWidthTypes.Date32}, + {Name: "__request_id", Type: arrow.BinaryTypes.String}, + } + + actualSchema, err := b.getArrowSchema() + assert.Nil(t, err) + assert.Equal(t, expectedArrowSchema, actualSchema.Fields()) +} + +func TestSerializeToArrowTable(t *testing.T) { + schema := &FeatureServiceSchema{ + JoinKeys: []string{ + "driver_id", + }, + Features: []string{ + "featureView1__int64", + "featureView1__float32", + }, + JoinKeysTypes: map[string]types.ValueType_Enum{ + "driver_id": types.ValueType_INT32, + }, + FeaturesTypes: map[string]types.ValueType_Enum{ + "featureView1__int64": types.ValueType_INT64, + "featureView1__float32": types.ValueType_FLOAT, + }, + } + + ts := timestamppb.New(time.Now()) + b := &MemoryBuffer{ + schema: schema, + logs: []*Log{ + { + EntityValue: []*types.Value{ + {Val: &types.Value_Int64Val{Int64Val: 1001}}, + }, + FeatureValues: []*types.Value{ + {Val: &types.Value_Int64Val{Int64Val: rand.Int63()}}, + {Val: &types.Value_FloatVal{FloatVal: rand.Float32()}}, + }, + FeatureStatuses: []serving.FieldStatus{ + serving.FieldStatus_PRESENT, + serving.FieldStatus_OUTSIDE_MAX_AGE, + }, + EventTimestamps: []*timestamppb.Timestamp{ + ts, ts, + }, + RequestId: "aaa", + LogTimestamp: time.Now(), + }, + { + EntityValue: []*types.Value{ + {Val: &types.Value_Int64Val{Int64Val: 1003}}, + }, + FeatureValues: []*types.Value{ + {Val: &types.Value_Int64Val{Int64Val: rand.Int63()}}, + {Val: &types.Value_FloatVal{FloatVal: rand.Float32()}}, + }, + FeatureStatuses: []serving.FieldStatus{ + serving.FieldStatus_PRESENT, + serving.FieldStatus_PRESENT, + }, + EventTimestamps: []*timestamppb.Timestamp{ + ts, ts, + }, + RequestId: "bbb", + LogTimestamp: time.Now(), + }, + }, + } + + pool := memory.NewGoAllocator() + arrowSchema, _ := b.getArrowSchema() + builder := array.NewRecordBuilder(pool, arrowSchema) + defer builder.Release() + + // join key: driver_id + builder.Field(0).(*array.Int32Builder).AppendValues( + []int32{b.logs[0].EntityValue[0].GetInt32Val(), b.logs[1].EntityValue[0].GetInt32Val()}, []bool{true, true}) + + // feature int64 + builder.Field(1).(*array.Int64Builder).AppendValues( + []int64{b.logs[0].FeatureValues[0].GetInt64Val(), b.logs[1].FeatureValues[0].GetInt64Val()}, []bool{true, true}) + builder.Field(2).(*array.TimestampBuilder).AppendValues( + []arrow.Timestamp{arrow.Timestamp(ts.GetSeconds()), arrow.Timestamp(ts.GetSeconds())}, []bool{true, true}) + builder.Field(3).(*array.Int32Builder).AppendValues( + []int32{int32(serving.FieldStatus_PRESENT), int32(serving.FieldStatus_PRESENT)}, []bool{true, true}) + + // feature float + builder.Field(4).(*array.Float32Builder).AppendValues( + []float32{b.logs[0].FeatureValues[1].GetFloatVal(), b.logs[1].FeatureValues[1].GetFloatVal()}, []bool{true, true}) + builder.Field(5).(*array.TimestampBuilder).AppendValues( + []arrow.Timestamp{arrow.Timestamp(ts.GetSeconds()), arrow.Timestamp(ts.GetSeconds())}, []bool{true, true}) + builder.Field(6).(*array.Int32Builder).AppendValues( + []int32{int32(serving.FieldStatus_OUTSIDE_MAX_AGE), int32(serving.FieldStatus_PRESENT)}, []bool{true, true}) + + // log timestamp + builder.Field(7).(*array.TimestampBuilder).AppendValues( + []arrow.Timestamp{arrow.Timestamp(b.logs[0].LogTimestamp.UnixMicro()), arrow.Timestamp(b.logs[1].LogTimestamp.UnixMicro())}, []bool{true, true}) + + // log date + today := time.Now().Truncate(24 * time.Hour) + builder.Field(8).(*array.Date32Builder).AppendValues( + []arrow.Date32{arrow.Date32FromTime(today), arrow.Date32FromTime(today)}, []bool{true, true}) + + // request id + builder.Field(9).(*array.StringBuilder).AppendValues( + []string{b.logs[0].RequestId, b.logs[1].RequestId}, []bool{true, true}) + + record, err := b.convertToArrowRecord() + expectedRecord := builder.NewRecord() + assert.Nil(t, err) + for colIdx := 0; colIdx < int(record.NumCols()); colIdx++ { + assert.Equal(t, expectedRecord.Column(colIdx), record.Column(colIdx), "Columns with idx %d are not equal", colIdx) + } + +} diff --git a/go/internal/feast/server/logging/offlinestoresink.go b/go/internal/feast/server/logging/offlinestoresink.go new file mode 100644 index 00000000000..ee4c646a9be --- /dev/null +++ b/go/internal/feast/server/logging/offlinestoresink.go @@ -0,0 +1,83 @@ +package logging + +import ( + "errors" + "fmt" + "io" + "io/ioutil" + "os" + "path/filepath" + + "github.com/apache/arrow/go/v8/arrow" + "github.com/apache/arrow/go/v8/arrow/array" + "github.com/apache/arrow/go/v8/parquet" + "github.com/apache/arrow/go/v8/parquet/pqarrow" + "github.com/google/uuid" +) + +type OfflineStoreWriteCallback func(featureServiceName, datasetDir string) string + +type OfflineStoreSink struct { + datasetDir string + writeCallback OfflineStoreWriteCallback +} + +func NewOfflineStoreSink(writeCallback OfflineStoreWriteCallback) (*OfflineStoreSink, error) { + return &OfflineStoreSink{ + datasetDir: "", + writeCallback: writeCallback, + }, nil +} + +func (s *OfflineStoreSink) getOrCreateDatasetDir() (string, error) { + if s.datasetDir != "" { + return s.datasetDir, nil + } + dir, err := ioutil.TempDir("", "*") + if err != nil { + return "", err + } + s.datasetDir = dir + return s.datasetDir, nil +} + +func (s *OfflineStoreSink) cleanCurrentDatasetDir() error { + if s.datasetDir == "" { + return nil + } + datasetDir := s.datasetDir + s.datasetDir = "" + return os.RemoveAll(datasetDir) +} + +func (s *OfflineStoreSink) Write(record arrow.Record) error { + fileName, _ := uuid.NewUUID() + datasetDir, err := s.getOrCreateDatasetDir() + if err != nil { + return err + } + + var writer io.Writer + writer, err = os.Create(filepath.Join(datasetDir, fmt.Sprintf("%s.parquet", fileName.String()))) + if err != nil { + return err + } + table := array.NewTableFromRecords(record.Schema(), []arrow.Record{record}) + + props := parquet.NewWriterProperties(parquet.WithDictionaryDefault(false)) + arrProps := pqarrow.DefaultWriterProps() + return pqarrow.WriteTable(table, writer, 1000, props, arrProps) +} + +func (s *OfflineStoreSink) Flush(featureServiceName string) error { + if s.datasetDir == "" { + return nil + } + + errMsg := s.writeCallback(featureServiceName, s.datasetDir) + if errMsg != "" { + return errors.New(errMsg) + } + + return s.cleanCurrentDatasetDir() +} diff --git a/go/internal/feast/server/logging/service.go b/go/internal/feast/server/logging/service.go new file mode 100644 index 00000000000..9249ad4f2f1 --- /dev/null +++ b/go/internal/feast/server/logging/service.go @@ -0,0 +1,107 @@ +package logging + +import ( + "sync" + "time" + + "github.com/pkg/errors" + + "github.com/feast-dev/feast/go/internal/feast/model" +) + +type FeatureStore interface { + GetFcosMap() (map[string]*model.Entity, map[string]*model.FeatureView, map[string]*model.OnDemandFeatureView, error) + GetFeatureService(name string) (*model.FeatureService, error) +} + +type LoggingOptions struct { + // How many log items can be buffered in channel + ChannelCapacity int + + // Waiting time when inserting new log into the channel + EmitTimeout time.Duration + + // Interval on which logs buffered in memory will be written to sink + WriteInterval time.Duration + + // Interval on which sink will be flushed + // (see LogSink interface for better explanation on differences with Write) + FlushInterval time.Duration +} + +type LoggingService struct { + // feature service name -> LoggerImpl + loggers map[string]*LoggerImpl + + fs FeatureStore + sink LogSink + opts LoggingOptions + + creationLock *sync.Mutex +} + +var ( + DefaultOptions = LoggingOptions{ + ChannelCapacity: 1000, + FlushInterval: 10 * time.Minute, + WriteInterval: 10 * time.Second, + EmitTimeout: 10 * time.Millisecond, + } +) + +func NewLoggingService(fs FeatureStore, sink LogSink, opts ...LoggingOptions) (*LoggingService, error) { + if len(opts) == 0 { + opts = append(opts, DefaultOptions) + } + + return &LoggingService{ + fs: fs, + loggers: make(map[string]*LoggerImpl), + sink: sink, + opts: opts[0], + creationLock: &sync.Mutex{}, + }, nil +} + +func (s *LoggingService) GetOrCreateLogger(featureService *model.FeatureService) (Logger, error) { + if logger, ok := s.loggers[featureService.Name]; ok { + return logger, nil + } + + if featureService.LoggingConfig == nil { + return nil, errors.New("Only feature services with configured logging can be used") + } + + s.creationLock.Lock() + defer s.creationLock.Unlock() + + // could be created by another go-routine on this point + if logger, ok := s.loggers[featureService.Name]; ok { + return logger, nil + } + + if s.sink == nil { + return &DummyLoggerImpl{}, nil + } + + config := NewLoggerConfig(featureService.LoggingConfig.SampleRate, s.opts) + schema, err := GenerateSchemaFromFeatureService(s.fs, featureService.Name) + if err != nil { + return nil, err + } + + logger, err := NewLogger(schema, featureService.Name, s.sink, config) + if err != nil { + return nil, err + } + s.loggers[featureService.Name] = logger + + return logger, nil +} + +func (s *LoggingService) Stop() { + for _, logger := range s.loggers { + logger.Stop() + logger.WaitUntilStopped() + } +} diff --git a/go/internal/feast/transformation/transformation.go b/go/internal/feast/transformation/transformation.go index 319bed3b2c2..3dfbdc7f1b7 100644 --- a/go/internal/feast/transformation/transformation.go +++ b/go/internal/feast/transformation/transformation.go @@ -3,18 +3,20 @@ package transformation import ( "errors" "fmt" + "strings" + "unsafe" + "github.com/apache/arrow/go/v8/arrow" "github.com/apache/arrow/go/v8/arrow/array" "github.com/apache/arrow/go/v8/arrow/cdata" "github.com/apache/arrow/go/v8/arrow/memory" + "google.golang.org/protobuf/types/known/timestamppb" + "github.com/feast-dev/feast/go/internal/feast/model" "github.com/feast-dev/feast/go/internal/feast/onlineserving" "github.com/feast-dev/feast/go/protos/feast/serving" prototypes "github.com/feast-dev/feast/go/protos/feast/types" "github.com/feast-dev/feast/go/types" - "google.golang.org/protobuf/types/known/timestamppb" - "strings" - "unsafe" ) /* diff --git a/go/internal/test/feature_repo/__init__.py b/go/internal/test/feature_repo/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/go/cmd/server/logging/feature_repo/driver_stats.parquet b/go/internal/test/feature_repo/driver_stats.parquet similarity index 100% rename from go/cmd/server/logging/feature_repo/driver_stats.parquet rename to go/internal/test/feature_repo/driver_stats.parquet diff --git a/go/cmd/server/logging/feature_repo/example.py b/go/internal/test/feature_repo/example.py similarity index 82% rename from go/cmd/server/logging/feature_repo/example.py rename to go/internal/test/feature_repo/example.py index f3ca6123083..2b1d74ad32e 100644 --- a/go/cmd/server/logging/feature_repo/example.py +++ b/go/internal/test/feature_repo/example.py @@ -3,13 +3,15 @@ from google.protobuf.duration_pb2 import Duration from feast import Entity, Feature, FeatureView, FileSource, ValueType, FeatureService +from feast.feature_logging import LoggingConfig +from feast.infra.offline_stores.file_source import FileLoggingDestination # Read data from parquet files. Parquet is convenient for local development mode. For # production, you can use your favorite DWH, such as BigQuery. See Feast documentation # for more info. driver_hourly_stats = FileSource( path="driver_stats.parquet", - event_timestamp_column="event_timestamp", + timestamp_field="event_timestamp", created_timestamp_column="created", ) @@ -36,5 +38,6 @@ driver_stats_fs = FeatureService( name="test_service", - features=[driver_hourly_stats_view] + features=[driver_hourly_stats_view], + logging_config=LoggingConfig(destination=FileLoggingDestination(path="")) ) \ No newline at end of file diff --git a/go/cmd/server/logging/feature_repo/feature_store.yaml b/go/internal/test/feature_repo/feature_store.yaml similarity index 100% rename from go/cmd/server/logging/feature_repo/feature_store.yaml rename to go/internal/test/feature_repo/feature_store.yaml diff --git a/go/internal/test/go_integration_test_utils.go b/go/internal/test/go_integration_test_utils.go index d66a5461930..eb727ba1db9 100644 --- a/go/internal/test/go_integration_test_utils.go +++ b/go/internal/test/go_integration_test_utils.go @@ -19,6 +19,7 @@ import ( "time" "github.com/apache/arrow/go/v8/arrow/array" + "github.com/feast-dev/feast/go/internal/feast/model" "github.com/feast-dev/feast/go/protos/feast/types" gotypes "github.com/feast-dev/feast/go/types" @@ -137,10 +138,10 @@ func SetupInitializedRepo(basePath string) error { // var stderr bytes.Buffer // var stdout bytes.Buffer applyCommand.Dir = featureRepoPath - err = applyCommand.Run() + out, err := applyCommand.CombinedOutput() if err != nil { + log.Println(string(out)) return err - } t := time.Now() @@ -151,7 +152,7 @@ func SetupInitializedRepo(basePath string) error { materializeCommand := exec.Command("feast", "materialize-incremental", formattedTime) materializeCommand.Env = os.Environ() materializeCommand.Dir = featureRepoPath - out, err := materializeCommand.Output() + out, err = materializeCommand.CombinedOutput() if err != nil { log.Println(string(out)) return err @@ -175,22 +176,14 @@ func CleanUpInitializedRepo(basePath string) { } } -func CleanUpRepo(basePath string) { - featureRepoPath, err := filepath.Abs(filepath.Join(basePath, "feature_repo")) - if err != nil { - log.Fatal(err) - } - err = os.RemoveAll(featureRepoPath) - if err != nil { - log.Fatal(err) - } -} - -func GetProtoFromRecord(rec array.Record) (map[string]*types.RepeatedValue, error) { +func GetProtoFromRecord(rec arrow.Record) (map[string]*types.RepeatedValue, error) { r := make(map[string]*types.RepeatedValue) schema := rec.Schema() for idx, column := range rec.Columns() { field := schema.Field(idx) + if field.Type.ID() == arrow.FixedWidthTypes.Timestamp_ms.ID() || field.Type.ID() == arrow.FixedWidthTypes.Date32.ID() { + continue + } values, err := gotypes.ArrowValuesToProtoValues(column) if err != nil { return nil, err @@ -200,10 +193,6 @@ func GetProtoFromRecord(rec array.Record) (map[string]*types.RepeatedValue, erro return r, nil } -func CleanUpFile(absPath string) error { - return os.Remove(absPath) -} - func CreateBaseFeatureView(name string, features []*model.Feature, projection *model.FeatureViewProjection) *model.BaseFeatureView { return &model.BaseFeatureView{ Name: name, diff --git a/go/types/typeconversion.go b/go/types/typeconversion.go index c02768c755d..30fcc1e393c 100644 --- a/go/types/typeconversion.go +++ b/go/types/typeconversion.go @@ -6,6 +6,7 @@ import ( "github.com/apache/arrow/go/v8/arrow" "github.com/apache/arrow/go/v8/arrow/array" "github.com/apache/arrow/go/v8/arrow/memory" + "github.com/feast-dev/feast/go/protos/feast/types" ) @@ -40,9 +41,9 @@ func ProtoTypeToArrowType(sample *types.Value) (arrow.DataType, error) { case *types.Value_DoubleListVal: return arrow.ListOf(arrow.PrimitiveTypes.Float64), nil case *types.Value_UnixTimestampVal: - return arrow.FixedWidthTypes.Time64ns, nil + return arrow.FixedWidthTypes.Timestamp_s, nil case *types.Value_UnixTimestampListVal: - return arrow.ListOf(arrow.FixedWidthTypes.Time64ns), nil + return arrow.ListOf(arrow.FixedWidthTypes.Timestamp_s), nil default: return nil, fmt.Errorf("unsupported proto type in proto to arrow conversion: %s", sample.Val) @@ -80,16 +81,16 @@ func ValueTypeEnumToArrowType(t types.ValueType_Enum) (arrow.DataType, error) { case types.ValueType_DOUBLE_LIST: return arrow.ListOf(arrow.PrimitiveTypes.Float64), nil case types.ValueType_UNIX_TIMESTAMP: - return arrow.FixedWidthTypes.Time64ns, nil + return arrow.FixedWidthTypes.Timestamp_s, nil case types.ValueType_UNIX_TIMESTAMP_LIST: - return arrow.ListOf(arrow.FixedWidthTypes.Time64ns), nil + return arrow.ListOf(arrow.FixedWidthTypes.Timestamp_s), nil default: return nil, fmt.Errorf("unsupported value type enum in enum to arrow type conversion: %s", t) } } -func copyProtoValuesToArrowArray(builder array.Builder, values []*types.Value) error { +func CopyProtoValuesToArrowArray(builder array.Builder, values []*types.Value) error { switch fieldBuilder := builder.(type) { case *array.BooleanBuilder: for _, v := range values { @@ -119,9 +120,9 @@ func copyProtoValuesToArrowArray(builder array.Builder, values []*types.Value) e for _, v := range values { fieldBuilder.Append(v.GetDoubleVal()) } - case *array.Time64Builder: + case *array.TimestampBuilder: for _, v := range values { - fieldBuilder.Append(arrow.Time64(v.GetUnixTimestampVal())) + fieldBuilder.Append(arrow.Timestamp(v.GetUnixTimestampVal())) } case *array.ListBuilder: for _, list := range values { @@ -157,9 +158,9 @@ func copyProtoValuesToArrowArray(builder array.Builder, values []*types.Value) e for _, v := range list.GetDoubleListVal().GetVal() { valueBuilder.Append(v) } - case *array.Time64Builder: + case *array.TimestampBuilder: for _, v := range list.GetUnixTimestampListVal().GetVal() { - valueBuilder.Append(arrow.Time64(v)) + valueBuilder.Append(arrow.Timestamp(v)) } } } @@ -227,10 +228,10 @@ func ArrowValuesToProtoValues(arr arrow.Array) ([]*types.Value, error) { } values = append(values, &types.Value{Val: &types.Value_BoolListVal{BoolListVal: &types.BoolList{Val: vals}}}) - case arrow.FixedWidthTypes.Time64ns: + case arrow.FixedWidthTypes.Timestamp_s: vals := make([]int64, int(offsets[idx])-pos) for j := pos; j < int(offsets[idx]); j++ { - vals[j-pos] = int64(listValues.(*array.Time64).Value(j)) + vals[j-pos] = int64(listValues.(*array.Timestamp).Value(j)) } values = append(values, @@ -278,11 +279,11 @@ func ArrowValuesToProtoValues(arr arrow.Array) ([]*types.Value, error) { values = append(values, &types.Value{Val: &types.Value_StringVal{StringVal: arr.(*array.String).Value(idx)}}) } - case arrow.FixedWidthTypes.Time64ns: + case arrow.FixedWidthTypes.Timestamp_s: for idx := 0; idx < arr.Len(); idx++ { values = append(values, &types.Value{Val: &types.Value_UnixTimestampVal{ - UnixTimestampVal: int64(arr.(*array.Time64).Value(idx))}}) + UnixTimestampVal: int64(arr.(*array.Timestamp).Value(idx))}}) } default: return nil, fmt.Errorf("unsupported arrow to proto conversion for type %s", arr.DataType()) @@ -307,7 +308,7 @@ func ProtoValuesToArrowArray(protoValues []*types.Value, arrowAllocator memory.A if fieldType != nil { builder := array.NewBuilder(arrowAllocator, fieldType) - err = copyProtoValuesToArrowArray(builder, protoValues) + err = CopyProtoValuesToArrowArray(builder, protoValues) if err != nil { return nil, err } diff --git a/go/types/typeconversion_test.go b/go/types/typeconversion_test.go index 05fc32f63ac..1f89593ea01 100644 --- a/go/types/typeconversion_test.go +++ b/go/types/typeconversion_test.go @@ -1,12 +1,14 @@ package types import ( + "testing" + "time" + "github.com/apache/arrow/go/v8/arrow/memory" - "github.com/feast-dev/feast/go/protos/feast/types" "github.com/golang/protobuf/proto" "github.com/stretchr/testify/assert" - "testing" - "time" + + "github.com/feast-dev/feast/go/protos/feast/types" ) var ( diff --git a/infra/charts/feast-python-server/Chart.yaml b/infra/charts/feast-python-server/Chart.yaml index 1c1271b11e4..bd0140485c4 100644 --- a/infra/charts/feast-python-server/Chart.yaml +++ b/infra/charts/feast-python-server/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: feast-python-server description: Feast Feature Server in Python type: application -version: 0.20.0 +version: 0.21.0 keywords: - machine learning - big data diff --git a/infra/charts/feast-python-server/README.md b/infra/charts/feast-python-server/README.md index 02a8054799b..af844bf7d35 100644 --- a/infra/charts/feast-python-server/README.md +++ b/infra/charts/feast-python-server/README.md @@ -1,6 +1,6 @@ # feast-python-server -![Version: 0.20.0](https://img.shields.io/badge/Version-0.20.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) +![Version: 0.21.0](https://img.shields.io/badge/Version-0.21.0-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) Feast Feature Server in Python diff --git a/infra/charts/feast/Chart.yaml b/infra/charts/feast/Chart.yaml index f526e0a7386..bf1020ffa28 100644 --- a/infra/charts/feast/Chart.yaml +++ b/infra/charts/feast/Chart.yaml @@ -1,7 +1,7 @@ apiVersion: v1 description: Feature store for machine learning name: feast -version: 0.20.0 +version: 0.21.0 keywords: - machine learning - big data diff --git a/infra/charts/feast/README.md b/infra/charts/feast/README.md index de28d54d8e3..8d2865f9e09 100644 --- a/infra/charts/feast/README.md +++ b/infra/charts/feast/README.md @@ -8,7 +8,7 @@ This repo contains Helm charts for Feast components that are being installed on ## Chart: Feast -Feature store for machine learning Current chart version is `0.20.0` +Feature store for machine learning Current chart version is `0.21.0` ## Installation @@ -55,8 +55,8 @@ For more details, please see: https://docs.feast.dev/how-to-guides/running-feast | Repository | Name | Version | |------------|------|---------| | https://charts.helm.sh/stable | redis | 10.5.6 | -| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.20.0 | -| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.20.0 | +| https://feast-helm-charts.storage.googleapis.com | feature-server(feature-server) | 0.21.0 | +| https://feast-helm-charts.storage.googleapis.com | transformation-service(transformation-service) | 0.21.0 | ## Values diff --git a/infra/charts/feast/charts/feature-server/Chart.yaml b/infra/charts/feast/charts/feature-server/Chart.yaml index 492a0f3b616..812719b5f09 100644 --- a/infra/charts/feast/charts/feature-server/Chart.yaml +++ b/infra/charts/feast/charts/feature-server/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: "Feast Feature Server: Online feature serving service for Feast" name: feature-server -version: 0.20.0 -appVersion: v0.20.0 +version: 0.21.0 +appVersion: v0.21.0 keywords: - machine learning - big data diff --git a/infra/charts/feast/charts/feature-server/README.md b/infra/charts/feast/charts/feature-server/README.md index 43d4787d7b2..5577b94dcf1 100644 --- a/infra/charts/feast/charts/feature-server/README.md +++ b/infra/charts/feast/charts/feature-server/README.md @@ -1,6 +1,6 @@ # feature-server -![Version: 0.20.0](https://img.shields.io/badge/Version-0.20.0-informational?style=flat-square) ![AppVersion: v0.20.0](https://img.shields.io/badge/AppVersion-v0.20.0-informational?style=flat-square) +![Version: 0.21.0](https://img.shields.io/badge/Version-0.21.0-informational?style=flat-square) ![AppVersion: v0.21.0](https://img.shields.io/badge/AppVersion-v0.21.0-informational?style=flat-square) Feast Feature Server: Online feature serving service for Feast @@ -17,7 +17,7 @@ Feast Feature Server: Online feature serving service for Feast | envOverrides | object | `{}` | Extra environment variables to set | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"feastdev/feature-server-java"` | Docker image for Feature Server repository | -| image.tag | string | `"0.20.0"` | Image tag | +| image.tag | string | `"0.21.0"` | Image tag | | ingress.grpc.annotations | object | `{}` | Extra annotations for the ingress | | ingress.grpc.auth.enabled | bool | `false` | Flag to enable auth | | ingress.grpc.class | string | `"nginx"` | Which ingress controller to use | diff --git a/infra/charts/feast/charts/feature-server/values.yaml b/infra/charts/feast/charts/feature-server/values.yaml index c8089b4d5e6..457e83875af 100644 --- a/infra/charts/feast/charts/feature-server/values.yaml +++ b/infra/charts/feast/charts/feature-server/values.yaml @@ -5,7 +5,7 @@ image: # image.repository -- Docker image for Feature Server repository repository: feastdev/feature-server-java # image.tag -- Image tag - tag: 0.20.0 + tag: 0.21.0 # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent diff --git a/infra/charts/feast/charts/transformation-service/Chart.yaml b/infra/charts/feast/charts/transformation-service/Chart.yaml index 8fd50e76641..7922a648c85 100644 --- a/infra/charts/feast/charts/transformation-service/Chart.yaml +++ b/infra/charts/feast/charts/transformation-service/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v1 description: "Transformation service: to compute on-demand features" name: transformation-service -version: 0.20.0 -appVersion: v0.20.0 +version: 0.21.0 +appVersion: v0.21.0 keywords: - machine learning - big data diff --git a/infra/charts/feast/charts/transformation-service/README.md b/infra/charts/feast/charts/transformation-service/README.md index 5324c191eec..73a738f1ed6 100644 --- a/infra/charts/feast/charts/transformation-service/README.md +++ b/infra/charts/feast/charts/transformation-service/README.md @@ -1,6 +1,6 @@ # transformation-service -![Version: 0.20.0](https://img.shields.io/badge/Version-0.20.0-informational?style=flat-square) ![AppVersion: v0.20.0](https://img.shields.io/badge/AppVersion-v0.20.0-informational?style=flat-square) +![Version: 0.21.0](https://img.shields.io/badge/Version-0.21.0-informational?style=flat-square) ![AppVersion: v0.21.0](https://img.shields.io/badge/AppVersion-v0.21.0-informational?style=flat-square) Transformation service: to compute on-demand features @@ -13,7 +13,7 @@ Transformation service: to compute on-demand features | envOverrides | object | `{}` | Extra environment variables to set | | image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | | image.repository | string | `"feastdev/feature-transformation-server"` | Docker image for Transformation Server repository | -| image.tag | string | `"0.20.0"` | Image tag | +| image.tag | string | `"0.21.0"` | Image tag | | nodeSelector | object | `{}` | Node labels for pod assignment | | podLabels | object | `{}` | Labels to be added to Feast Serving pods | | replicaCount | int | `1` | Number of pods that will be created | diff --git a/infra/charts/feast/charts/transformation-service/templates/deployment.yaml b/infra/charts/feast/charts/transformation-service/templates/deployment.yaml index 1b2172c305b..ac3e0c1f70d 100644 --- a/infra/charts/feast/charts/transformation-service/templates/deployment.yaml +++ b/infra/charts/feast/charts/transformation-service/templates/deployment.yaml @@ -30,6 +30,14 @@ spec: nodeSelector: {{- toYaml . | nindent 8 }} {{- end }} + + volumes: + {{- range $secret := .Values.secrets }} + - name: {{ $secret }} + secret: + secretName: {{ $secret }} + {{- end }} + containers: - name: {{ .Chart.Name }} image: {{ .Values.image.repository }}:{{ .Values.image.tag }} @@ -37,12 +45,29 @@ spec: ports: - name: grpc containerPort: {{ .Values.service.grpc.targetPort }} - + + volumeMounts: + {{- range $secret := .Values.secrets }} + - name: {{ $secret }} + mountPath: "/etc/secrets/{{ $secret }}" + readOnly: true + {{- end }} + env: - name: FEATURE_TRANSFORMATION_SERVER_PORT value: {{ .Values.service.grpc.targetPort | quote }} - name: FEATURE_STORE_YAML_BASE64 value: {{ tpl (.Files.Get "config/feature_store.yaml") . | b64enc | quote }} - + + {{- range $key, $value := .Values.envOverrides }} + - name: {{ printf "%s" $key | replace "." "_" | upper | quote }} + {{- if eq (kindOf $value) "map" }} + valueFrom: + {{- toYaml $value | nindent 12 }} + {{- else }} + value: {{ $value | quote }} + {{- end }} + {{- end }} + resources: {{- toYaml .Values.resources | nindent 10 }} diff --git a/infra/charts/feast/charts/transformation-service/values.yaml b/infra/charts/feast/charts/transformation-service/values.yaml index 248f9faac3d..3d111984e51 100644 --- a/infra/charts/feast/charts/transformation-service/values.yaml +++ b/infra/charts/feast/charts/transformation-service/values.yaml @@ -5,7 +5,7 @@ image: # image.repository -- Docker image for Transformation Server repository repository: feastdev/feature-transformation-server # image.tag -- Image tag - tag: 0.20.0 + tag: 0.21.0 # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent @@ -30,5 +30,8 @@ nodeSelector: {} # envOverrides -- Extra environment variables to set envOverrides: {} +# secrets -- List of Kubernetes secrets to be mounted. These secrets will be mounted on /etc/secrets/. +secrets: [] + # podLabels -- Labels to be added to Feast Serving pods podLabels: {} \ No newline at end of file diff --git a/infra/charts/feast/requirements.yaml b/infra/charts/feast/requirements.yaml index e0eb8368af6..ad0cb850bb3 100644 --- a/infra/charts/feast/requirements.yaml +++ b/infra/charts/feast/requirements.yaml @@ -1,12 +1,12 @@ dependencies: - name: feature-server alias: feature-server - version: 0.20.0 + version: 0.21.0 condition: feature-server.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: transformation-service alias: transformation-service - version: 0.20.0 + version: 0.21.0 condition: transformation-service.enabled repository: https://feast-helm-charts.storage.googleapis.com - name: redis diff --git a/infra/docker-compose/.env.sample b/infra/docker-compose/.env.sample deleted file mode 100644 index 8c48fbc976b..00000000000 --- a/infra/docker-compose/.env.sample +++ /dev/null @@ -1,6 +0,0 @@ -COMPOSE_PROJECT_NAME=feast -FEAST_VERSION=develop -FEAST_CORE_CONFIG=./core/core.yml -FEAST_ONLINE_SERVING_CONFIG=./serving/online-serving.yml -GCP_SERVICE_ACCOUNT=./gcp-service-accounts/placeholder.json -INGESTION_JAR_PATH=https://storage.googleapis.com/feast-jobs/spark/ingestion/feast-ingestion-spark-develop.jar \ No newline at end of file diff --git a/infra/docker-compose/core/core.yml b/infra/docker-compose/core/core.yml deleted file mode 100644 index 517b2649ca2..00000000000 --- a/infra/docker-compose/core/core.yml +++ /dev/null @@ -1,5 +0,0 @@ -spring: - datasource: - url: jdbc:postgresql://${DB_HOST:127.0.0.1}:${DB_PORT:5432}/${DB_DATABASE:postgres} - username: ${DB_USERNAME:postgres} - password: ${DB_PASSWORD:password} diff --git a/infra/docker-compose/docker-compose.dev.yml b/infra/docker-compose/docker-compose.dev.yml deleted file mode 100644 index 50c48874939..00000000000 --- a/infra/docker-compose/docker-compose.dev.yml +++ /dev/null @@ -1,67 +0,0 @@ -version: "3.7" - -services: - core: - image: maven:3.6-openjdk-11 - volumes: - - ${HOME}/.m2:/root/.m2:delegated - - ../../.:/code:cached - environment: - DB_HOST: db - FEAST_STREAM_OPTIONS_BOOTSTRAPSERVERS: kafka:9092 - GOOGLE_APPLICATION_CREDENTIALS: /etc/gcloud/service-accounts/key.json - restart: on-failure - depends_on: - - db - - kafka - ports: - - 6565:6565 - - working_dir: /code - command: - - mvn - - -pl - - core - - spring-boot:run - - jupyter: - image: gcr.io/kf-feast/feast-jupyter:${FEAST_VERSION} - volumes: - - ${GCP_SERVICE_ACCOUNT}:/etc/gcloud/service-accounts/key.json - depends_on: - - core - environment: - FEAST_CORE_URL: core:6565 - FEAST_SERVING_URL: online_serving:6566 - FEAST_HISTORICAL_SERVING_URL: historical_serving:6567 - GOOGLE_APPLICATION_CREDENTIALS: /etc/gcloud/service-accounts/key.json - ports: - - 8888:8888 - - kafka: - image: confluentinc/cp-kafka:5.2.1 - environment: - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 - KAFKA_ADVERTISED_LISTENERS: INSIDE://kafka:9092,OUTSIDE://localhost:9094 - KAFKA_LISTENERS: INSIDE://:9092,OUTSIDE://:9094 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT - KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE - ports: - - "9092:9092" - - "9094:9094" - - depends_on: - - zookeeper - - zookeeper: - image: confluentinc/cp-zookeeper:5.2.1 - environment: - ZOOKEEPER_CLIENT_PORT: 2181 - - db: - image: postgres:12-alpine - environment: - POSTGRES_PASSWORD: password - ports: - - "5432:5432" diff --git a/infra/docker-compose/docker-compose.yml b/infra/docker-compose/docker-compose.yml deleted file mode 100644 index 579dc6d65fb..00000000000 --- a/infra/docker-compose/docker-compose.yml +++ /dev/null @@ -1,118 +0,0 @@ -version: "3.7" - -services: - core: - image: gcr.io/kf-feast/feast-core:${FEAST_VERSION} - volumes: - - ${FEAST_CORE_CONFIG}:/etc/feast/application.yml - environment: - DB_HOST: db - restart: on-failure - depends_on: - - db - ports: - - 6565:6565 - command: - - java - - -jar - - /opt/feast/feast-core.jar - - classpath:/application.yml,file:/etc/feast/application.yml - - jobservice: - image: gcr.io/kf-feast/feast-jobservice:${FEAST_VERSION} - depends_on: - - core - ports: - - 6568:6568 - volumes: - - $PWD:/shared - environment: - FEAST_CORE_URL: core:6565 - FEAST_SERVING_URL: online_serving:6566 - FEAST_SPARK_LAUNCHER: standalone - FEAST_SPARK_STANDALONE_MASTER: local - FEAST_SPARK_HOME: /usr/local/spark - FEAST_SPARK_STAGING_LOCATION: file:///shared/staging - FEAST_HISTORICAL_FEATURE_OUTPUT_LOCATION: file:///shared/historical_feature_output - FEAST_HISTORICAL_FEATURE_OUTPUT_FORMAT: parquet - FEAST_REDIS_HOST: redis - FEAST_SPARK_INGESTION_JAR: ${INGESTION_JAR_PATH} - FEAST_STATSD_ENABLED: "true" - FEAST_STATSD_HOST: prometheus_statsd - FEAST_STATSD_PORT: 9125 - - jupyter: - image: gcr.io/kf-feast/feast-jupyter:${FEAST_VERSION} - volumes: - - ${GCP_SERVICE_ACCOUNT}:/etc/gcloud/service-accounts/key.json - - $PWD:/shared - depends_on: - - core - environment: - FEAST_CORE_URL: core:6565 - FEAST_SERVING_URL: online_serving:6566 - FEAST_SPARK_LAUNCHER: standalone - FEAST_SPARK_STANDALONE_MASTER: local - FEAST_SPARK_HOME: /usr/local/spark - FEAST_SPARK_STAGING_LOCATION: file:///shared/staging - FEAST_HISTORICAL_FEATURE_OUTPUT_LOCATION: file:///shared/historical_feature_output - FEAST_HISTORICAL_FEATURE_OUTPUT_FORMAT: parquet - FEAST_REDIS_HOST: redis - GOOGLE_APPLICATION_CREDENTIALS: /etc/gcloud/service-accounts/key.json - ports: - - 8888:8888 - - kafka: - image: confluentinc/cp-kafka:5.2.1 - environment: - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 - KAFKA_ADVERTISED_LISTENERS: INSIDE://kafka:9092,OUTSIDE://localhost:9094 - KAFKA_LISTENERS: INSIDE://:9092,OUTSIDE://:9094 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT - KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE - ports: - - "9092:9092" - - "9094:9094" - - depends_on: - - zookeeper - - zookeeper: - image: confluentinc/cp-zookeeper:5.2.1 - environment: - ZOOKEEPER_CLIENT_PORT: 2181 - - db: - image: postgres:12-alpine - environment: - POSTGRES_PASSWORD: password - ports: - - "5432:5432" - - online_serving: - image: gcr.io/kf-feast/feast-serving:${FEAST_VERSION} - volumes: - - ${FEAST_ONLINE_SERVING_CONFIG}:/etc/feast/application.yml - depends_on: - - core - - redis - ports: - - 6566:6566 - restart: on-failure - command: - - java - - -jar - - /opt/feast/feast-serving.jar - - classpath:/application.yml,file:/etc/feast/application.yml - - redis: - image: redis:5-alpine - ports: - - "6379:6379" - - prometheus_statsd: - image: prom/statsd-exporter:v0.12.1 - ports: - - "9125:9125" - - "9102:9102" \ No newline at end of file diff --git a/infra/docker-compose/gcp-service-accounts/placeholder.json b/infra/docker-compose/gcp-service-accounts/placeholder.json deleted file mode 100644 index 5609d6923cf..00000000000 --- a/infra/docker-compose/gcp-service-accounts/placeholder.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "type": "service_account", - "project_id": "just-some-project" -} \ No newline at end of file diff --git a/infra/docker-compose/serving/online-serving.yml b/infra/docker-compose/serving/online-serving.yml deleted file mode 100644 index 7ca1e7113f3..00000000000 --- a/infra/docker-compose/serving/online-serving.yml +++ /dev/null @@ -1,12 +0,0 @@ -feast: - core-host: core - active_store: online - stores: - - name: online - type: REDIS - config: - host: redis - port: 6379 - subscriptions: - - name: "*" - project: "*" \ No newline at end of file diff --git a/infra/docker/ci/Dockerfile b/infra/docker/ci/Dockerfile deleted file mode 100644 index f365c6339e6..00000000000 --- a/infra/docker/ci/Dockerfile +++ /dev/null @@ -1,87 +0,0 @@ -FROM ubuntu:18.04 - -ARG REVISION -ENV DEBIAN_FRONTEND=noninteractive - -RUN apt-get update && apt-get install -y curl unzip locales software-properties-common && \ - apt-add-repository ppa:git-core/ppa && \ - apt update && apt install -y git - -# Install Java (by default openjdk-11) -RUN apt-get install -y default-jdk - -RUN locale-gen en_US.UTF-8 && update-locale LANG=en_US.utf8 -ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' LC_ALL='en_US.UTF-8' - -# Install maven -ARG MAVEN_VERSION=3.6.3 -ARG SHA=c35a1803a6e70a126e80b2b3ae33eed961f83ed74d18fcd16909b2d44d7dada3203f1ffe726c17ef8dcca2dcaa9fca676987befeadc9b9f759967a8cb77181c0 -ARG BASE_URL=https://apache.osuosl.org/maven/maven-3/${MAVEN_VERSION}/binaries - -RUN mkdir -p /usr/share/maven /usr/share/maven/ref \ - && curl -fsSL -o /tmp/apache-maven.tar.gz ${BASE_URL}/apache-maven-${MAVEN_VERSION}-bin.tar.gz \ - && echo "${SHA} /tmp/apache-maven.tar.gz" | sha512sum -c - \ - && tar -xzf /tmp/apache-maven.tar.gz -C /usr/share/maven --strip-components=1 \ - && rm -f /tmp/apache-maven.tar.gz \ - && ln -s /usr/share/maven/bin/mvn /usr/bin/mvn - -ENV MAVEN_HOME /usr/share/maven - -# Install Make and Python -ENV PYTHON_VERSION 3.7 - -RUN apt-get install -y build-essential curl python${PYTHON_VERSION} \ - python${PYTHON_VERSION}-dev python${PYTHON_VERSION}-distutils && \ - update-alternatives --install /usr/bin/python python /usr/bin/python${PYTHON_VERSION} 1 && \ - update-alternatives --set python /usr/bin/python${PYTHON_VERSION} && \ - curl -s https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \ - python get-pip.py --force-reinstall && \ - rm get-pip.py - -# Install Google Cloud SDK -RUN echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" \ - | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \ - curl https://packages.cloud.google.com/apt/doc/apt-key.gpg \ - | apt-key --keyring /usr/share/keyrings/cloud.google.gpg \ - add - && apt-get update -y && apt-get install google-cloud-sdk -y - -# Instal boto3 -RUN pip install boto3==1.16.10 - -# Install Go -ENV GOLANG_VERSION 1.14.1 - -RUN curl -O https://storage.googleapis.com/golang/go${GOLANG_VERSION}.linux-amd64.tar.gz && \ - tar -xvf go${GOLANG_VERSION}.linux-amd64.tar.gz && chown -R root:root ./go && mv go /usr/local - -ENV GOPATH /go -ENV PATH $GOPATH/bin:/usr/local/go/bin:$PATH -ENV PATH="$HOME/bin:${PATH}" - -# Install Protoc and Plugins -ENV PROTOC_VERSION 3.12.2 - -RUN PROTOC_ZIP=protoc-${PROTOC_VERSION}-linux-x86_64.zip && \ - curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v${PROTOC_VERSION}/$PROTOC_ZIP && \ - unzip -o $PROTOC_ZIP -d /usr/local bin/protoc && \ - unzip -o $PROTOC_ZIP -d /usr/local 'include/*' && \ - rm -f $PROTOC_ZIP && \ - go get github.com/golang/protobuf/proto && \ - go get gopkg.in/russross/blackfriday.v2 && \ - git clone https://github.com/istio/tools/ && \ - cd tools/cmd/protoc-gen-docs && \ - go build && mkdir -p $HOME/bin && cp protoc-gen-docs $HOME/bin - -# Install AZ CLI -RUN curl -sL https://aka.ms/InstallAzureCLIDeb | bash - -# Install kubectl -RUN apt-get install -y kubectl=1.20.4-00 - -# Install helm -RUN curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 && \ - chmod 700 get_helm.sh && \ - ./get_helm.sh --version v3.4.2 - -# Install jq -RUN apt-get install -y jq diff --git a/infra/scripts/build-ingestion-py-dependencies.sh b/infra/scripts/build-ingestion-py-dependencies.sh deleted file mode 100755 index dd4e1992445..00000000000 --- a/infra/scripts/build-ingestion-py-dependencies.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail -PLATFORM=$1 -DESTINATION=$2 -PACKAGES=${PACKAGES:-"great-expectations==0.13.2 pyarrow==2.0.0 datadog==0.39.0 numpy<1.20.0"} - -tmp_dir=$(mktemp -d) - -pip3 install -t ${tmp_dir}/libs $PACKAGES - -cd $tmp_dir -tar -czf pylibs-ge-$PLATFORM.tar.gz libs/ -if [[ $DESTINATION == gs* ]]; then - gsutil cp pylibs-ge-$PLATFORM.tar.gz $DESTINATION -else - mv pylibs-ge-$PLATFORM.tar.gz $DESTINATION -fi diff --git a/infra/scripts/publish-python-sdk.sh b/infra/scripts/publish-python-sdk.sh deleted file mode 100755 index 582d9072b2a..00000000000 --- a/infra/scripts/publish-python-sdk.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env bash - -set -e -set -o pipefail - -usage() -{ - echo "usage: publish-python-sdk.sh - - --directory-path absolute path to the python package, this directory - should contain 'setup.py' file - - --repository the repository name where the package will be uploaded, - check your .pypirc configuration file for the list of - valid repositories, usually it's 'pypi' or 'testpypi' -" -} - -while [ "$1" != "" ]; do - case "$1" in - --directory-path ) DIRECTORY_PATH="$2"; shift;; - --repository ) REPOSITORY="$2"; shift;; - -h | --help ) usage; exit;; - * ) usage; exit 1 - esac - shift -done - -if [ -z $DIRECTORY_PATH ]; then usage; exit 1; fi -if [ -z $REPOSITORY ]; then usage; exit 1; fi - -ORIGINAL_DIR=$PWD -cd $DIRECTORY_PATH - -echo "============================================================" -echo "Generating distribution archives" -echo "============================================================" -python3 -m pip install --user --upgrade setuptools wheel -python3 setup.py sdist bdist_wheel - -echo "============================================================" -echo "Uploading distribution archives" -echo "============================================================" -python3 -m pip install --user --upgrade twine -python3 -m twine upload --repository $REPOSITORY dist/* - -cd $ORIGINAL_DIR diff --git a/infra/scripts/test-docker-compose.sh b/infra/scripts/test-docker-compose.sh deleted file mode 100755 index 69cdaac82fa..00000000000 --- a/infra/scripts/test-docker-compose.sh +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env bash - -set -e - -echo " -============================================================ -Running Docker Compose tests with pytest at 'tests/e2e' -============================================================ -" -LATEST_GH_COMMIT_SHA=$1 - -clean_up () { - ARG=$? - - # Shut down docker-compose images - - docker-compose down - - exit $ARG -} - -trap clean_up EXIT - -export PROJECT_ROOT_DIR=$(git rev-parse --show-toplevel) -export COMPOSE_INTERACTIVE_NO_CLI=1 - -# Create Docker Compose configuration file -cd ${PROJECT_ROOT_DIR}/infra/docker-compose/ -cp .env.sample .env - -# Replace FEAST_VERSION with latest github image SHA -export FEAST_VERSION=$LATEST_GH_COMMIT_SHA -echo "Testing docker-compose setup with version SHA, $FEAST_VERSION." - -# Start Docker Compose containers -docker-compose up -d - -# Get Jupyter container IP address -export JUPYTER_DOCKER_CONTAINER_IP_ADDRESS=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' feast_jupyter_1) - -# Print Jupyter container information -docker inspect feast_jupyter_1 - -# Wait for Jupyter Notebook Container to come online -${PROJECT_ROOT_DIR}/infra/scripts/wait-for-it.sh ${JUPYTER_DOCKER_CONTAINER_IP_ADDRESS}:8888 --timeout=60 - -# Get Feast Core container IP address -export FEAST_CORE_CONTAINER_IP_ADDRESS=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' feast_core_1) - -# Wait for Feast Core to be ready -${PROJECT_ROOT_DIR}/infra/scripts/wait-for-it.sh ${FEAST_CORE_CONTAINER_IP_ADDRESS}:6565 --timeout=120 - -# Get Feast Online Serving container IP address -export FEAST_ONLINE_SERVING_CONTAINER_IP_ADDRESS=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' feast_online_serving_1) - -# Wait for Feast Online Serving to be ready -${PROJECT_ROOT_DIR}/infra/scripts/wait-for-it.sh ${FEAST_ONLINE_SERVING_CONTAINER_IP_ADDRESS}:6566 --timeout=120 - - -# Get Feast Job Service container IP address -export FEAST_JOB_SERVICE_CONTAINER_IP_ADDRESS=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' feast_jobservice_1) - -# Wait for Feast Job Service to be ready -${PROJECT_ROOT_DIR}/infra/scripts/wait-for-it.sh ${FEAST_JOB_SERVICE_CONTAINER_IP_ADDRESS}:6568 --timeout=120 - -# Run e2e tests for Redis -docker exec \ - -e FEAST_VERSION=${FEAST_VERSION} \ - -e DISABLE_SERVICE_FIXTURES=true \ - -e DISABLE_FEAST_SERVICE_FIXTURES=true \ - --user root \ - feast_jupyter_1 bash \ - -c 'cd /feast/tests && python -m pip install -r requirements.txt && FEAST_USAGE=False pytest e2e/ --ingestion-jar https://storage.googleapis.com/feast-jobs/spark/ingestion/feast-ingestion-spark-${FEAST_VERSION}.jar --redis-url redis:6379 --core-url core:6565 --serving-url online_serving:6566 --job-service-url jobservice:6568 --staging-path file:///shared/staging/ --kafka-brokers kafka:9092 --statsd-url prometheus_statsd:9125 --prometheus-url prometheus_statsd:9102 --feast-version develop' diff --git a/infra/scripts/test-integration.sh b/infra/scripts/test-integration.sh deleted file mode 100755 index 6f50dd14504..00000000000 --- a/infra/scripts/test-integration.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash - -python -m pip install --upgrade pip setuptools wheel pip-tools -make install-python -python -m pip install -qr tests/requirements.txt - -export FEAST_USAGE="False" -pytest tests/integration --dataproc-cluster-name feast-e2e --dataproc-project kf-feast --dataproc-region us-central1 --dataproc-staging-location gs://feast-templocation-kf-feast diff --git a/infra/scripts/test-java-core-ingestion.sh b/infra/scripts/test-java-core-ingestion.sh deleted file mode 100755 index a812edc6166..00000000000 --- a/infra/scripts/test-java-core-ingestion.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env bash - -apt-get -qq update -apt-get -y install build-essential - -make lint-java - -infra/scripts/download-maven-cache.sh \ - --archive-uri gs://feast-templocation-kf-feast/.m2.2019-10-24.tar \ - --output-dir /root/ - -# Core depends on Ingestion so they are tested together -# Skip Maven enforcer: https://stackoverflow.com/questions/50647223/maven-enforcer-issue-when-running-from-reactor-level -mvn -f java/pom.xml --projects core,ingestion --batch-mode --define skipTests=true \ - --define enforcer.skip=true clean install -mvn -f java/pom.xml --projects core,ingestion --define enforcer.skip=true test -TEST_EXIT_CODE=$? - -# Default artifact location setting in Prow jobs -LOGS_ARTIFACT_PATH=/logs/artifacts -mkdir -p ${LOGS_ARTIFACT_PATH}/surefire-reports -cp core/target/surefire-reports/* ${LOGS_ARTIFACT_PATH}/surefire-reports/ -cp ingestion/target/surefire-reports/* ${LOGS_ARTIFACT_PATH}/surefire-reports/ - -exit ${TEST_EXIT_CODE} \ No newline at end of file diff --git a/infra/scripts/test-java-sdk.sh b/infra/scripts/test-java-sdk.sh deleted file mode 100755 index 58dc5b1a832..00000000000 --- a/infra/scripts/test-java-sdk.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env bash - -# Skip Maven enforcer: https://stackoverflow.com/questions/50647223/maven-enforcer-issue-when-running-from-reactor-level -mvn -f java/pom.xml --projects sdk/java --batch-mode --define skipTests=true \ - --define enforcer.skip=true clean install -mvn -f java/pom.xml --projects sdk/java --define enforcer.skip=true test -TEST_EXIT_CODE=$? - -# Default artifact location setting in Prow jobs -LOGS_ARTIFACT_PATH=/logs/artifacts -cp -r java/sdk/java/target/surefire-reports ${LOGS_ARTIFACT_PATH}/surefire-reports - -exit ${TEST_EXIT_CODE} \ No newline at end of file diff --git a/infra/scripts/test-load.sh b/infra/scripts/test-load.sh deleted file mode 100755 index 51a24691067..00000000000 --- a/infra/scripts/test-load.sh +++ /dev/null @@ -1,116 +0,0 @@ -#!/usr/bin/env bash - -set -e - -echo " -============================================================ -Running Load Tests -============================================================ -" - -clean_up() { - ARG=$? - - # Shut down docker-compose images - cd "${PROJECT_ROOT_DIR}"/infra/docker-compose - - docker-compose down - - exit $ARG -} - -# Get Feast project repository root and scripts directory -export PROJECT_ROOT_DIR=$(git rev-parse --show-toplevel) -export SCRIPTS_DIR=${PROJECT_ROOT_DIR}/infra/scripts -export COMPOSE_INTERACTIVE_NO_CLI=1 -source ${SCRIPTS_DIR}/setup-common-functions.sh - -if [ -z "$1" ] ; then - echo "No SHA/FEAST_VERSION provided as argument, using local HEAD"; - FEAST_VERSION=$(git rev-parse HEAD); - export FEAST_VERSION -else - echo "Using ${1} as SHA/FEAST_VERSION to test"; - FEAST_VERSION=${1} - export FEAST_VERSION -fi - -wait_for_docker_image gcr.io/kf-feast/feast-core:"${FEAST_VERSION}" -wait_for_docker_image gcr.io/kf-feast/feast-jobcontroller:"${FEAST_VERSION}" -wait_for_docker_image gcr.io/kf-feast/feast-serving:"${FEAST_VERSION}" -wait_for_docker_image gcr.io/kf-feast/feast-jupyter:"${FEAST_VERSION}" - -# Clean up Docker Compose if failure -trap clean_up EXIT - -# Create Docker Compose configuration file -cd "${PROJECT_ROOT_DIR}"/infra/docker-compose/ -cp .env.sample .env - -# Start Docker Compose containers -FEAST_VERSION=${FEAST_VERSION} docker-compose up -d - -# Get Jupyter container IP address -export JUPYTER_DOCKER_CONTAINER_IP_ADDRESS=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' feast_jupyter_1) - -# Print Jupyter container information -docker inspect feast_jupyter_1 -docker logs feast_jupyter_1 - -# Wait for Jupyter Notebook Container to come online -"${PROJECT_ROOT_DIR}"/infra/scripts/wait-for-it.sh ${JUPYTER_DOCKER_CONTAINER_IP_ADDRESS}:8888 --timeout=60 - -# Get Feast Core container IP address -export FEAST_CORE_CONTAINER_IP_ADDRESS=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' feast_core_1) - -# Wait for Feast Core to be ready -"${PROJECT_ROOT_DIR}"/infra/scripts/wait-for-it.sh ${FEAST_CORE_CONTAINER_IP_ADDRESS}:6565 --timeout=120 - -# Get Feast Job Controller container IP address -export FEAST_JOB_CONTROLLER_CONTAINER_IP_ADDRESS=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' feast_jobcontroller_1) - -# Wait for Feast Job Controller to be ready -"${PROJECT_ROOT_DIR}"/infra/scripts/wait-for-it.sh ${FEAST_JOB_CONTROLLER_CONTAINER_IP_ADDRESS}:6570 --timeout=120 - -# Get Feast Online Serving container IP address -export FEAST_ONLINE_SERVING_CONTAINER_IP_ADDRESS=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' feast_online_serving_1) - -# Wait for Feast Online Serving to be ready -"${PROJECT_ROOT_DIR}"/infra/scripts/wait-for-it.sh ${FEAST_ONLINE_SERVING_CONTAINER_IP_ADDRESS}:6566 --timeout=120 - -# Ingest data into Feast -pip install --user matplotlib feast pytz --upgrade -python "${PROJECT_ROOT_DIR}"/tests/load/ingest.py "${FEAST_CORE_CONTAINER_IP_ADDRESS}":6565 "${FEAST_ONLINE_SERVING_CONTAINER_IP_ADDRESS}":6566 - -# Download load test tool and proxy -cd $(mktemp -d) -wget -c https://github.com/feast-dev/feast-load-test-proxy/releases/download/v0.1.1/feast-load-test-proxy_0.1.1_Linux_x86_64.tar.gz -O - | tar -xz -git clone https://github.com/giltene/wrk2.git -cd wrk2 -make -cd .. -cp wrk2/wrk . - -# Start load test server -LOAD_FEAST_SERVING_HOST=${FEAST_ONLINE_SERVING_CONTAINER_IP_ADDRESS} LOAD_FEAST_SERVING_PORT=6566 ./feast-load-test-proxy & -sleep 5 - -# Run load tests -./wrk -t2 -c10 -d30s -R20 --latency http://localhost:8080/echo -./wrk -t2 -c10 -d30s -R20 --latency http://localhost:8080/send?entity_count=10 > load_test_results_1fs_13f_10e_20rps -./wrk -t2 -c10 -d30s -R50 --latency http://localhost:8080/send?entity_count=10 > load_test_results_1fs_13f_10e_50rps -./wrk -t2 -c10 -d30s -R250 --latency http://localhost:8080/send?entity_count=10 > load_test_results_1fs_13f_10e_250rps -./wrk -t2 -c10 -d30s -R20 --latency http://localhost:8080/send?entity_count=50 > load_test_results_1fs_13f_50e_20rps -./wrk -t2 -c10 -d30s -R50 --latency http://localhost:8080/send?entity_count=50 > load_test_results_1fs_13f_50e_50rps -./wrk -t2 -c10 -d30s -R250 --latency http://localhost:8080/send?entity_count=50 > load_test_results_1fs_13f_50e_250rps - -# Print load test results -cat $(ls -lah | grep load_test_results | awk '{print $9}' | tr '\n' ' ') - -# Create hdr-plot of load tests -export PLOT_FILE_NAME="load_test_graph_${FEAST_VERSION}"_$(date "+%Y%m%d-%H%M%S").png -python $PROJECT_ROOT_DIR/tests/load/hdr_plot.py --output "$PLOT_FILE_NAME" --title "Load test: ${FEAST_VERSION}" $(ls -lah | grep load_test_results | awk '{print $9}' | tr '\n' ' ') - -# Persist artifact -mkdir -p "${PROJECT_ROOT_DIR}"/load-test-output/ -cp -r load_test_* "${PROJECT_ROOT_DIR}"/load-test-output/ \ No newline at end of file diff --git a/infra/scripts/test-usage.sh b/infra/scripts/test-usage.sh deleted file mode 100755 index f70fdd62479..00000000000 --- a/infra/scripts/test-usage.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env bash - -set -e - -# Default artifact location setting in Prow jobs -LOGS_ARTIFACT_PATH=/logs/artifacts - -cd sdk/python/ -pip install -e ".[ci]" -cd usage_tests/ -pytest --junitxml=${LOGS_ARTIFACT_PATH}/python-sdk-test-report.xml diff --git a/infra/scripts/wait-for-it.sh b/infra/scripts/wait-for-it.sh deleted file mode 100755 index 51942ce6dc4..00000000000 --- a/infra/scripts/wait-for-it.sh +++ /dev/null @@ -1,183 +0,0 @@ -#!/usr/bin/env bash -# Use this script to test if a given TCP host/port are available -# Source: https://github.com/vishnubob/wait-for-it - -WAITFORIT_cmdname=${0##*/} - -echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } - -usage() -{ - cat << USAGE >&2 -Usage: - $WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args] - -h HOST | --host=HOST Host or IP under test - -p PORT | --port=PORT TCP port under test - Alternatively, you specify the host and port as host:port - -s | --strict Only execute subcommand if the test succeeds - -q | --quiet Don't output any status messages - -t TIMEOUT | --timeout=TIMEOUT - Timeout in seconds, zero for no timeout - -- COMMAND ARGS Execute command with args after the test finishes -USAGE - exit 1 -} - -wait_for() -{ - if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then - echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" - else - echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout" - fi - WAITFORIT_start_ts=$(date +%s) - while : - do - if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then - nc -z $WAITFORIT_HOST $WAITFORIT_PORT - WAITFORIT_result=$? - else - (echo > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1 - WAITFORIT_result=$? - fi - if [[ $WAITFORIT_result -eq 0 ]]; then - WAITFORIT_end_ts=$(date +%s) - echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds" - break - fi - sleep 1 - done - return $WAITFORIT_result -} - -wait_for_wrapper() -{ - # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 - if [[ $WAITFORIT_QUIET -eq 1 ]]; then - timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & - else - timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & - fi - WAITFORIT_PID=$! - trap "kill -INT -$WAITFORIT_PID" INT - wait $WAITFORIT_PID - WAITFORIT_RESULT=$? - if [[ $WAITFORIT_RESULT -ne 0 ]]; then - echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" - fi - return $WAITFORIT_RESULT -} - -# process arguments -while [[ $# -gt 0 ]] -do - case "$1" in - *:* ) - WAITFORIT_hostport=(${1//:/ }) - WAITFORIT_HOST=${WAITFORIT_hostport[0]} - WAITFORIT_PORT=${WAITFORIT_hostport[1]} - shift 1 - ;; - --child) - WAITFORIT_CHILD=1 - shift 1 - ;; - -q | --quiet) - WAITFORIT_QUIET=1 - shift 1 - ;; - -s | --strict) - WAITFORIT_STRICT=1 - shift 1 - ;; - -h) - WAITFORIT_HOST="$2" - if [[ $WAITFORIT_HOST == "" ]]; then break; fi - shift 2 - ;; - --host=*) - WAITFORIT_HOST="${1#*=}" - shift 1 - ;; - -p) - WAITFORIT_PORT="$2" - if [[ $WAITFORIT_PORT == "" ]]; then break; fi - shift 2 - ;; - --port=*) - WAITFORIT_PORT="${1#*=}" - shift 1 - ;; - -t) - WAITFORIT_TIMEOUT="$2" - if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi - shift 2 - ;; - --timeout=*) - WAITFORIT_TIMEOUT="${1#*=}" - shift 1 - ;; - --) - shift - WAITFORIT_CLI=("$@") - break - ;; - --help) - usage - ;; - *) - echoerr "Unknown argument: $1" - usage - ;; - esac -done - -if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then - echoerr "Error: you need to provide a host and port to test." - usage -fi - -WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15} -WAITFORIT_STRICT=${WAITFORIT_STRICT:-0} -WAITFORIT_CHILD=${WAITFORIT_CHILD:-0} -WAITFORIT_QUIET=${WAITFORIT_QUIET:-0} - -# Check to see if timeout is from busybox? -WAITFORIT_TIMEOUT_PATH=$(type -p timeout) -WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH) - -WAITFORIT_BUSYTIMEFLAG="" -if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then - WAITFORIT_ISBUSY=1 - # Check if busybox timeout uses -t flag - # (recent Alpine versions don't support -t anymore) - if timeout &>/dev/stdout | grep -q -e '-t '; then - WAITFORIT_BUSYTIMEFLAG="-t" - fi -else - WAITFORIT_ISBUSY=0 -fi - -if [[ $WAITFORIT_CHILD -gt 0 ]]; then - wait_for - WAITFORIT_RESULT=$? - exit $WAITFORIT_RESULT -else - if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then - wait_for_wrapper - WAITFORIT_RESULT=$? - else - wait_for - WAITFORIT_RESULT=$? - fi -fi - -if [[ $WAITFORIT_CLI != "" ]]; then - if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then - echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess" - exit $WAITFORIT_RESULT - fi - exec "${WAITFORIT_CLI[@]}" -else - exit $WAITFORIT_RESULT -fi \ No newline at end of file diff --git a/infra/templates/README.md.jinja2 b/infra/templates/README.md.jinja2 index 9d3b5649ce9..cd6e42c1d1b 100644 --- a/infra/templates/README.md.jinja2 +++ b/infra/templates/README.md.jinja2 @@ -46,6 +46,9 @@ feast apply ### 4. Explore your data in the web UI (experimental) ![Web UI](ui/sample.png) +```commandline +feast ui +``` ### 5. Build a training dataset ```python diff --git a/java/pom.xml b/java/pom.xml index 4a4049305b5..3b794d97ed0 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -38,7 +38,7 @@ - 0.20.0 + 0.21.0 https://github.com/feast-dev/feast UTF-8 @@ -68,7 +68,7 @@ 2.3.1 1.3.2 2.0.1.Final - 0.20.0 + 0.21.0 1.6.6 29.0-jre diff --git a/java/serving/src/test/java/feast/serving/it/ServingEnvironment.java b/java/serving/src/test/java/feast/serving/it/ServingEnvironment.java index c00dc7b1f31..ebbfa89d37b 100644 --- a/java/serving/src/test/java/feast/serving/it/ServingEnvironment.java +++ b/java/serving/src/test/java/feast/serving/it/ServingEnvironment.java @@ -33,6 +33,7 @@ import java.io.File; import java.io.IOException; import java.net.ServerSocket; +import java.time.Duration; import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; @@ -45,7 +46,7 @@ @Testcontainers abstract class ServingEnvironment { static DockerComposeContainer environment; - + static int serverPort = getFreePort(); ServingServiceGrpc.ServingServiceBlockingStub servingStub; Injector injector; String serverName; @@ -53,16 +54,15 @@ abstract class ServingEnvironment { Server server; MutableHandlerRegistry serviceRegistry; - static int serverPort = getFreePort(); - @BeforeAll static void globalSetup() { environment = new DockerComposeContainer( new File("src/test/resources/docker-compose/docker-compose-redis-it.yml")) .withExposedService("redis", 6379) - .withExposedService("feast", 8080) - .waitingFor("feast", Wait.forListeningPort()); + .withExposedService( + "feast", 8080, Wait.forListeningPort().withStartupTimeout(Duration.ofSeconds(180))) + .withTailChildContainers(true); environment.start(); } @@ -71,6 +71,20 @@ static void globalTeardown() { environment.stop(); } + private static int getFreePort() { + ServerSocket serverSocket; + try { + serverSocket = new ServerSocket(0); + } catch (IOException e) { + throw new RuntimeException("Couldn't allocate port"); + } + + assertThat(serverSocket, is(notNullValue())); + assertThat(serverSocket.getLocalPort(), greaterThan(0)); + + return serverSocket.getLocalPort(); + } + @BeforeEach public void envSetUp() throws Exception { AbstractModule appPropertiesModule = @@ -155,18 +169,4 @@ public void envTeardown() throws Exception { AbstractModule registryConfig() { return null; } - - private static int getFreePort() { - ServerSocket serverSocket; - try { - serverSocket = new ServerSocket(0); - } catch (IOException e) { - throw new RuntimeException("Couldn't allocate port"); - } - - assertThat(serverSocket, is(notNullValue())); - assertThat(serverSocket.getLocalPort(), greaterThan(0)); - - return serverSocket.getLocalPort(); - } } diff --git a/java/serving/src/test/java/feast/serving/util/DataGenerator.java b/java/serving/src/test/java/feast/serving/util/DataGenerator.java index e38d1ce4596..7a310828d2e 100644 --- a/java/serving/src/test/java/feast/serving/util/DataGenerator.java +++ b/java/serving/src/test/java/feast/serving/util/DataGenerator.java @@ -126,11 +126,11 @@ public static EntityProto.EntitySpecV2 createEntitySpecV2( } public static FeatureProto.FeatureSpecV2 createFeatureSpecV2( - String name, ValueProto.ValueType.Enum valueType, Map labels) { + String name, ValueProto.ValueType.Enum valueType, Map tags) { return FeatureProto.FeatureSpecV2.newBuilder() .setName(name) .setValueType(valueType) - .putAllLabels(labels) + .putAllTags(tags) .build(); } @@ -140,7 +140,7 @@ public static FeatureTableSpec createFeatureTableSpec( List entities, Map features, int maxAgeSecs, - Map labels) { + Map tags) { return FeatureTableSpec.newBuilder() .setName(name) @@ -152,7 +152,7 @@ public static FeatureTableSpec createFeatureTableSpec( FeatureSpecV2.newBuilder() .setName(entry.getKey()) .setValueType(entry.getValue()) - .putAllLabels(labels) + .putAllTags(tags) .build()) .collect(Collectors.toList())) .setMaxAge(Duration.newBuilder().setSeconds(3600).build()) @@ -169,7 +169,7 @@ public static FeatureTableSpec createFeatureTableSpec( .setUri("/dev/null") .build()) .build()) - .putAllLabels(labels) + .putAllLabels(tags) .build(); } @@ -178,7 +178,7 @@ public static FeatureTableSpec createFeatureTableSpec( List entities, ImmutableMap features, int maxAgeSecs, - Map labels) { + Map tags) { return FeatureTableSpec.newBuilder() .setName(name) @@ -190,11 +190,11 @@ public static FeatureTableSpec createFeatureTableSpec( FeatureSpecV2.newBuilder() .setName(entry.getKey()) .setValueType(entry.getValue()) - .putAllLabels(labels) + .putAllTags(tags) .build()) .collect(Collectors.toList())) .setMaxAge(Duration.newBuilder().setSeconds(maxAgeSecs).build()) - .putAllLabels(labels) + .putAllLabels(tags) .build(); } diff --git a/java/serving/src/test/resources/docker-compose/docker-compose-redis-it.yml b/java/serving/src/test/resources/docker-compose/docker-compose-redis-it.yml index 1dee243cb80..0522750d996 100644 --- a/java/serving/src/test/resources/docker-compose/docker-compose-redis-it.yml +++ b/java/serving/src/test/resources/docker-compose/docker-compose-redis-it.yml @@ -5,11 +5,13 @@ services: image: redis:6.2 command: redis-server --requirepass testpw ports: - - "6379:6379" + - "6379" feast: build: feast10 ports: - - "8080:8080" + - "8080" links: - redis + volumes: + - $PWD/../../../../../../:/mnt/feast diff --git a/java/serving/src/test/resources/docker-compose/feast10/Dockerfile b/java/serving/src/test/resources/docker-compose/feast10/Dockerfile index df14bb592b4..dc26c804a9a 100644 --- a/java/serving/src/test/resources/docker-compose/feast10/Dockerfile +++ b/java/serving/src/test/resources/docker-compose/feast10/Dockerfile @@ -5,11 +5,8 @@ WORKDIR /usr/src/ COPY requirements.txt ./ RUN pip install --no-cache-dir -r requirements.txt -RUN git clone https://github.com/feast-dev/feast.git /root/feast -RUN cd /root/feast/sdk/python && pip install -e '.[redis]' - WORKDIR /app COPY . . EXPOSE 8080 -CMD ["/bin/sh", "-c", "python materialize.py && feast serve_transformations --port 8080"] +CMD ["./entrypoint.sh"] diff --git a/java/serving/src/test/resources/docker-compose/feast10/definitions.py b/java/serving/src/test/resources/docker-compose/feast10/definitions.py index 374408a20bd..e514ef0d239 100644 --- a/java/serving/src/test/resources/docker-compose/feast10/definitions.py +++ b/java/serving/src/test/resources/docker-compose/feast10/definitions.py @@ -50,10 +50,10 @@ @on_demand_feature_view( - sources={ - "driver_hourly_stats": driver_hourly_stats_view, - "vals_to_add": input_request, - }, + sources=[ + driver_hourly_stats_view, + input_request, + ], schema=[ Field(name="conv_rate_plus_val1", dtype=Float64), Field(name="conv_rate_plus_val2", dtype=Float64), diff --git a/java/serving/src/test/resources/docker-compose/feast10/entrypoint.sh b/java/serving/src/test/resources/docker-compose/feast10/entrypoint.sh new file mode 100755 index 00000000000..d7dcd03c5fb --- /dev/null +++ b/java/serving/src/test/resources/docker-compose/feast10/entrypoint.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env sh + +set -e + +# feast root directory is expected to be mounted (eg, by docker compose) +cd /mnt/feast +pip install -e '.[redis]' + +cd /app +python materialize.py +feast serve_transformations --port 8080 \ No newline at end of file diff --git a/protos/feast/core/DataSource.proto b/protos/feast/core/DataSource.proto index d958281ca2c..9e6028ccfa4 100644 --- a/protos/feast/core/DataSource.proto +++ b/protos/feast/core/DataSource.proto @@ -222,8 +222,7 @@ message DataSource { // Defines options for DataSource that supports pushing data to it. This allows data to be pushed to // the online store on-demand, such as by stream consumers. message PushOptions { - // Mapping of feature name to type - map schema = 1; + reserved 1; } diff --git a/protos/feast/core/Feature.proto b/protos/feast/core/Feature.proto index a96423bfbde..f6826bef810 100644 --- a/protos/feast/core/Feature.proto +++ b/protos/feast/core/Feature.proto @@ -31,6 +31,6 @@ message FeatureSpecV2 { // Value type of the feature. Not updatable. feast.types.ValueType.Enum value_type = 2; - // Labels for user defined metadata on a feature - map labels = 3; + // Tags for user defined metadata on a feature + map tags = 3; } diff --git a/protos/feast/core/FeatureService.proto b/protos/feast/core/FeatureService.proto index 2295677583a..c04fa97507d 100644 --- a/protos/feast/core/FeatureService.proto +++ b/protos/feast/core/FeatureService.proto @@ -5,6 +5,7 @@ option go_package = "github.com/feast-dev/feast/go/protos/feast/core"; option java_outer_classname = "FeatureServiceProto"; option java_package = "feast.proto.core"; +import "google/protobuf/duration.proto"; import "google/protobuf/timestamp.proto"; import "feast/core/FeatureViewProjection.proto"; @@ -35,6 +36,9 @@ message FeatureServiceSpec { // Owner of the feature service. string owner = 6; + + // (optional) if provided logging will be enabled for this feature service. + LoggingConfig logging_config = 7; } @@ -46,3 +50,45 @@ message FeatureServiceMeta { google.protobuf.Timestamp last_updated_timestamp = 2; } + + +message LoggingConfig { + float sample_rate = 1; + google.protobuf.Duration partition_interval = 2; + + oneof destination { + FileDestination file_destination = 3; + BigQueryDestination bigquery_destination = 4; + RedshiftDestination redshift_destination = 5; + SnowflakeDestination snowflake_destination = 6; + CustomDestination custom_destination = 7; + } + + message FileDestination { + string path = 1; + string s3_endpoint_override = 2; + + // column names to use for partitioning + repeated string partition_by = 3; + } + + message BigQueryDestination { + // Full table reference in the form of [project:dataset.table] + string table_ref = 1; + } + + message RedshiftDestination { + // Destination table name. ClusterId and database will be taken from an offline store config + string table_name = 1; + } + + message SnowflakeDestination { + // Destination table name. Schema and database will be taken from an offline store config + string table_name = 1; + } + + message CustomDestination { + string kind = 1; + map config = 2; + } +} diff --git a/sdk/python/pyproject.toml b/pyproject.toml similarity index 50% rename from sdk/python/pyproject.toml rename to pyproject.toml index 93727dc5689..64394a558a6 100644 --- a/sdk/python/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,13 @@ +[build-system] +requires = ["setuptools>=60", "wheel", "setuptools_scm>=6.2", "grpcio", "grpcio-tools==1.44.0", "mypy-protobuf==3.1", "sphinx!=4.0.0"] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +# Including this section is comparable to supplying use_scm_version=True in setup.py. + [tool.black] line-length = 88 -target-version = ['py37'] +target-version = ['py38'] include = '\.pyi?$' exclude = ''' ( diff --git a/sdk/python/.gitignore b/sdk/python/.gitignore index f4e09ad9d62..9cab5dff201 100644 --- a/sdk/python/.gitignore +++ b/sdk/python/.gitignore @@ -115,4 +115,4 @@ dmypy.json .pyre/ .vscode/* -playground \ No newline at end of file +playground diff --git a/sdk/python/MANIFEST.in b/sdk/python/MANIFEST.in deleted file mode 100644 index 0eeaa181b23..00000000000 --- a/sdk/python/MANIFEST.in +++ /dev/null @@ -1,4 +0,0 @@ -recursive-include feast/protos/ *.py -recursive-include feast py.typed *.pyi - -recursive-include feast/embedded_go/lib/ *.py *.so diff --git a/sdk/python/README.md b/sdk/python/README.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/docs/index.rst b/sdk/python/docs/index.rst index 52783b40e3d..ca746707eac 100644 --- a/sdk/python/docs/index.rst +++ b/sdk/python/docs/index.rst @@ -59,6 +59,12 @@ Trino Source :members: :exclude-members: TrinoOptions +PostgreSQL Source +------------------ + +.. automodule:: feast.infra.offline_stores.contrib.postgres_offline_store.postgres_source + :members: + :exclude-members: PostgreSQLOptions File Source ------------------ @@ -192,6 +198,12 @@ Trino Offline Store .. automodule:: feast.infra.offline_stores.contrib.trino_offline_store.trino :members: +PostgreSQL Offline Store +------------------------ + +.. automodule:: feast.infra.offline_stores.contrib.postgres_offline_store.postgres + :members: + Online Store ================== @@ -223,4 +235,11 @@ Redis Online Store .. automodule:: feast.infra.online_stores.redis :members: - :noindex: \ No newline at end of file + :noindex: + +PostgreSQL Online Store +----------------------- + +.. automodule:: feast.infra.online_stores.contrib.postgres + :members: + :noindex: diff --git a/sdk/python/docs/source/feast.infra.offline_stores.contrib.postgres_offline_store.rst b/sdk/python/docs/source/feast.infra.offline_stores.contrib.postgres_offline_store.rst new file mode 100644 index 00000000000..95afafb5b80 --- /dev/null +++ b/sdk/python/docs/source/feast.infra.offline_stores.contrib.postgres_offline_store.rst @@ -0,0 +1,29 @@ +feast.infra.offline\_stores.contrib.postgres\_offline\_store package +==================================================================== + +Submodules +---------- + +feast.infra.offline\_stores.contrib.postgres\_offline\_store.postgres module +---------------------------------------------------------------------------- + +.. automodule:: feast.infra.offline_stores.contrib.postgres_offline_store.postgres + :members: + :undoc-members: + :show-inheritance: + +feast.infra.offline\_stores.contrib.postgres\_offline\_store.postgres\_source module +------------------------------------------------------------------------------------ + +.. automodule:: feast.infra.offline_stores.contrib.postgres_offline_store.postgres_source + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.infra.offline_stores.contrib.postgres_offline_store + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.offline_stores.contrib.rst b/sdk/python/docs/source/feast.infra.offline_stores.contrib.rst index 4fabad68446..39902da1304 100644 --- a/sdk/python/docs/source/feast.infra.offline_stores.contrib.rst +++ b/sdk/python/docs/source/feast.infra.offline_stores.contrib.rst @@ -7,6 +7,7 @@ Subpackages .. toctree:: :maxdepth: 4 + feast.infra.offline_stores.contrib.postgres_offline_store feast.infra.offline_stores.contrib.spark_offline_store feast.infra.offline_stores.contrib.trino_offline_store @@ -21,6 +22,14 @@ feast.infra.offline\_stores.contrib.contrib\_repo\_configuration module :undoc-members: :show-inheritance: +feast.infra.offline\_stores.contrib.postgres\_repo\_configuration module +------------------------------------------------------------------------ + +.. automodule:: feast.infra.offline_stores.contrib.postgres_repo_configuration + :members: + :undoc-members: + :show-inheritance: + Module contents --------------- diff --git a/sdk/python/docs/source/feast.infra.online_stores.contrib.rst b/sdk/python/docs/source/feast.infra.online_stores.contrib.rst new file mode 100644 index 00000000000..6ee9b1a532d --- /dev/null +++ b/sdk/python/docs/source/feast.infra.online_stores.contrib.rst @@ -0,0 +1,21 @@ +feast.infra.online\_stores.contrib package +========================================== + +Submodules +---------- + +feast.infra.online\_stores.contrib.postgres module +-------------------------------------------------- + +.. automodule:: feast.infra.online_stores.contrib.postgres + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.infra.online_stores.contrib + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.online_stores.rst b/sdk/python/docs/source/feast.infra.online_stores.rst index 5c23796bf8a..842522c9d7c 100644 --- a/sdk/python/docs/source/feast.infra.online_stores.rst +++ b/sdk/python/docs/source/feast.infra.online_stores.rst @@ -1,6 +1,14 @@ feast.infra.online\_stores package ================================== +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + feast.infra.online_stores.contrib + Submodules ---------- diff --git a/sdk/python/docs/source/feast.infra.utils.postgres.rst b/sdk/python/docs/source/feast.infra.utils.postgres.rst new file mode 100644 index 00000000000..119c8c1dee9 --- /dev/null +++ b/sdk/python/docs/source/feast.infra.utils.postgres.rst @@ -0,0 +1,29 @@ +feast.infra.utils.postgres package +================================== + +Submodules +---------- + +feast.infra.utils.postgres.connection\_utils module +--------------------------------------------------- + +.. automodule:: feast.infra.utils.postgres.connection_utils + :members: + :undoc-members: + :show-inheritance: + +feast.infra.utils.postgres.postgres\_config module +-------------------------------------------------- + +.. automodule:: feast.infra.utils.postgres.postgres_config + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: feast.infra.utils.postgres + :members: + :undoc-members: + :show-inheritance: diff --git a/sdk/python/docs/source/feast.infra.utils.rst b/sdk/python/docs/source/feast.infra.utils.rst index 9655acc206d..30ec6166bf4 100644 --- a/sdk/python/docs/source/feast.infra.utils.rst +++ b/sdk/python/docs/source/feast.infra.utils.rst @@ -1,6 +1,14 @@ feast.infra.utils package ========================= +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + feast.infra.utils.postgres + Submodules ---------- diff --git a/sdk/python/docs/source/feast.rst b/sdk/python/docs/source/feast.rst index 35220913be4..fb1e77a3736 100644 --- a/sdk/python/docs/source/feast.rst +++ b/sdk/python/docs/source/feast.rst @@ -24,6 +24,14 @@ feast.base\_feature\_view module :undoc-members: :show-inheritance: +feast.batch\_feature\_view module +--------------------------------- + +.. automodule:: feast.batch_feature_view + :members: + :undoc-members: + :show-inheritance: + feast.cli module ---------------- @@ -264,6 +272,14 @@ feast.saved\_dataset module :undoc-members: :show-inheritance: +feast.stream\_feature\_view module +---------------------------------- + +.. automodule:: feast.stream_feature_view + :members: + :undoc-members: + :show-inheritance: + feast.transformation\_server module ----------------------------------- diff --git a/sdk/python/docs/source/index.rst b/sdk/python/docs/source/index.rst index 52783b40e3d..ca746707eac 100644 --- a/sdk/python/docs/source/index.rst +++ b/sdk/python/docs/source/index.rst @@ -59,6 +59,12 @@ Trino Source :members: :exclude-members: TrinoOptions +PostgreSQL Source +------------------ + +.. automodule:: feast.infra.offline_stores.contrib.postgres_offline_store.postgres_source + :members: + :exclude-members: PostgreSQLOptions File Source ------------------ @@ -192,6 +198,12 @@ Trino Offline Store .. automodule:: feast.infra.offline_stores.contrib.trino_offline_store.trino :members: +PostgreSQL Offline Store +------------------------ + +.. automodule:: feast.infra.offline_stores.contrib.postgres_offline_store.postgres + :members: + Online Store ================== @@ -223,4 +235,11 @@ Redis Online Store .. automodule:: feast.infra.online_stores.redis :members: - :noindex: \ No newline at end of file + :noindex: + +PostgreSQL Online Store +----------------------- + +.. automodule:: feast.infra.online_stores.contrib.postgres + :members: + :noindex: diff --git a/sdk/python/docs/source/modules.rst b/sdk/python/docs/source/modules.rst new file mode 100644 index 00000000000..3a6f8333abd --- /dev/null +++ b/sdk/python/docs/source/modules.rst @@ -0,0 +1,7 @@ +feast +===== + +.. toctree:: + :maxdepth: 4 + + feast diff --git a/sdk/python/feast/__init__.py b/sdk/python/feast/__init__.py index 5127e03b560..8461cf31b62 100644 --- a/sdk/python/feast/__init__.py +++ b/sdk/python/feast/__init__.py @@ -1,12 +1,15 @@ -import logging - -from pkg_resources import DistributionNotFound, get_distribution +try: + from importlib.metadata import PackageNotFoundError + from importlib.metadata import version as _version +except ModuleNotFoundError: + from importlib_metadata import PackageNotFoundError, version as _version # type: ignore from feast.infra.offline_stores.bigquery_source import BigQuerySource from feast.infra.offline_stores.file_source import FileSource from feast.infra.offline_stores.redshift_source import RedshiftSource from feast.infra.offline_stores.snowflake_source import SnowflakeSource +from .batch_feature_view import BatchFeatureView from .data_source import ( KafkaSource, KinesisSource, @@ -23,21 +26,17 @@ from .on_demand_feature_view import OnDemandFeatureView from .repo_config import RepoConfig from .request_feature_view import RequestFeatureView +from .stream_feature_view import StreamFeatureView from .value_type import ValueType -logging.basicConfig( - format="%(asctime)s %(levelname)s:%(message)s", - datefmt="%m/%d/%Y %I:%M:%S %p", - level=logging.INFO, -) - try: - __version__ = get_distribution(__name__).version -except DistributionNotFound: + __version__ = _version("feast") +except PackageNotFoundError: # package is not installed pass __all__ = [ + "BatchFeatureView", "Entity", "KafkaSource", "KinesisSource", @@ -49,6 +48,7 @@ "OnDemandFeatureView", "RepoConfig", "SourceType", + "StreamFeatureView", "ValueType", "BigQuerySource", "FileSource", diff --git a/sdk/python/feast/base_feature_view.py b/sdk/python/feast/base_feature_view.py index 67435fa44c8..80b3b0cec82 100644 --- a/sdk/python/feast/base_feature_view.py +++ b/sdk/python/feast/base_feature_view.py @@ -110,7 +110,7 @@ def __str__(self): return str(MessageToJson(self.to_proto())) def __hash__(self): - return hash((id(self), self.name)) + return hash((self.name)) def __getitem__(self, item): assert isinstance(item, list) @@ -134,6 +134,7 @@ def __eq__(self, other): if ( self.name != other.name or sorted(self.features) != sorted(other.features) + or self.projection != other.projection or self.description != other.description or self.tags != other.tags or self.owner != other.owner diff --git a/sdk/python/feast/batch_feature_view.py b/sdk/python/feast/batch_feature_view.py new file mode 100644 index 00000000000..2f9fb080dbd --- /dev/null +++ b/sdk/python/feast/batch_feature_view.py @@ -0,0 +1,58 @@ +from datetime import timedelta +from typing import Dict, List, Optional, Union + +from feast.data_source import DataSource +from feast.entity import Entity +from feast.feature_view import FeatureView +from feast.field import Field +from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto + +SUPPORTED_BATCH_SOURCES = { + "BigQuerySource", + "FileSource", + "RedshiftSource", + "SnowflakeSource", + "SparkSource", + "TrinoSource", +} + + +class BatchFeatureView(FeatureView): + def __init__( + self, + *, + name: Optional[str] = None, + entities: Optional[Union[List[Entity], List[str]]] = None, + ttl: Optional[timedelta] = None, + tags: Optional[Dict[str, str]] = None, + online: bool = True, + description: str = "", + owner: str = "", + schema: Optional[List[Field]] = None, + source: Optional[DataSource] = None, + ): + + if source is None: + raise ValueError("Feature views need a source specified") + if ( + type(source).__name__ not in SUPPORTED_BATCH_SOURCES + and source.to_proto().type != DataSourceProto.SourceType.CUSTOM_SOURCE + ): + raise ValueError( + f"Batch feature views need a batch source, expected one of {SUPPORTED_BATCH_SOURCES} " + f"or CUSTOM_SOURCE, got {type(source).__name__}: {source.name} instead " + ) + + super().__init__( + name=name, + entities=entities, + ttl=ttl, + batch_source=None, + stream_source=None, + tags=tags, + online=online, + description=description, + owner=owner, + schema=schema, + source=source, + ) diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index 7dc8e198599..a4407132e4e 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -109,11 +109,50 @@ def version(): print(f'Feast SDK Version: "{pkg_resources.get_distribution("feast")}"') +@cli.command() +@click.option( + "--host", + "-h", + type=click.STRING, + default="0.0.0.0", + help="Specify a host for the server [default: 0.0.0.0]", +) +@click.option( + "--port", + "-p", + type=click.INT, + default=8888, + help="Specify a port for the server [default: 8888]", +) +@click.option( + "--registry_ttl_sec", + "-r", + help="Number of seconds after which the registry is refreshed. Default is 5 seconds.", + type=int, + default=5, +) +@click.pass_context +def ui(ctx: click.Context, host: str, port: int, registry_ttl_sec: int): + """ + Shows the Feast UI over the current directory + """ + repo = ctx.obj["CHDIR"] + cli_check_repo(repo) + store = FeatureStore(repo_path=str(repo)) + # Pass in the registry_dump method to get around a circular dependency + store.serve_ui( + host=host, + port=port, + get_registry_dump=registry_dump, + registry_ttl_sec=registry_ttl_sec, + ) + + @cli.command() @click.pass_context def endpoint(ctx: click.Context): """ - Display feature server endpoints. + Display feature server endpoints """ repo = ctx.obj["CHDIR"] cli_check_repo(repo) @@ -154,7 +193,7 @@ def data_source_describe(ctx: click.Context, name: str): warnings.warn( "Describing data sources will only work properly if all data sources have names or table names specified. " - "Starting Feast 0.21, data source unique names will be required to encourage data source discovery.", + "Starting Feast 0.23, data source unique names will be required to encourage data source discovery.", RuntimeWarning, ) print( @@ -181,7 +220,7 @@ def data_source_list(ctx: click.Context): warnings.warn( "Listing data sources will only work properly if all data sources have names or table names specified. " - "Starting Feast 0.21, data source unique names will be required to encourage data source discovery", + "Starting Feast 0.23, data source unique names will be required to encourage data source discovery", RuntimeWarning, ) print(tabulate(table, headers=["NAME", "CLASS"], tablefmt="plain")) @@ -475,7 +514,7 @@ def registry_dump_command(ctx: click.Context): cli_check_repo(repo) repo_config = load_repo_config(repo) - registry_dump(repo_config, repo_path=repo) + click.echo(registry_dump(repo_config, repo_path=repo)) @cli.command("materialize") @@ -539,7 +578,8 @@ def materialize_incremental_command(ctx: click.Context, end_ts: str, views: List "--template", "-t", type=click.Choice( - ["local", "gcp", "aws", "snowflake", "spark"], case_sensitive=False + ["local", "gcp", "aws", "snowflake", "spark", "postgres", "hbase"], + case_sensitive=False, ), help="Specify a template for the created project", default="local", @@ -593,7 +633,7 @@ def serve_command(ctx: click.Context, host: str, port: int, no_access_log: bool) ) @click.pass_context def serve_transformations_command(ctx: click.Context, port: int): - """[Experimental] Start a the feature consumption server locally on a given port.""" + """[Experimental] Start a feature consumption server locally on a given port.""" repo = ctx.obj["CHDIR"] cli_check_repo(repo) store = FeatureStore(repo_path=str(repo)) diff --git a/sdk/python/feast/data_source.py b/sdk/python/feast/data_source.py index 0e264117ae5..6a2b9a0d148 100644 --- a/sdk/python/feast/data_source.py +++ b/sdk/python/feast/data_source.py @@ -21,7 +21,7 @@ from feast import type_map from feast.data_format import StreamFormat -from feast.field import Field, from_value_type +from feast.field import Field from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto from feast.repo_config import RepoConfig, get_data_source_class_from_type from feast.types import VALUE_TYPES_TO_FEAST_TYPES @@ -186,6 +186,7 @@ class DataSource(ABC): def __init__( self, + *, event_timestamp_column: Optional[str] = None, created_timestamp_column: Optional[str] = None, field_mapping: Optional[Dict[str, str]] = None, @@ -245,7 +246,7 @@ def __init__( self.owner = owner or "" def __hash__(self): - return hash((id(self), self.name)) + return hash((self.name, self.timestamp_field)) def __str__(self): return str(MessageToJson(self.to_proto())) @@ -263,9 +264,9 @@ def __eq__(self, other): or self.created_timestamp_column != other.created_timestamp_column or self.field_mapping != other.field_mapping or self.date_partition_column != other.date_partition_column + or self.description != other.description or self.tags != other.tags or self.owner != other.owner - or self.description != other.description ): return False @@ -354,11 +355,12 @@ def get_table_column_names_and_types( def __init__( self, - name: str, - event_timestamp_column: str, - bootstrap_servers: str, - message_format: StreamFormat, - topic: str, + *args, + name: Optional[str] = None, + event_timestamp_column: Optional[str] = "", + bootstrap_servers: Optional[str] = None, + message_format: Optional[StreamFormat] = None, + topic: Optional[str] = None, created_timestamp_column: Optional[str] = "", field_mapping: Optional[Dict[str, str]] = None, date_partition_column: Optional[str] = "", @@ -368,22 +370,62 @@ def __init__( timestamp_field: Optional[str] = "", batch_source: Optional[DataSource] = None, ): + positional_attributes = [ + "name", + "event_timestamp_column", + "bootstrap_servers", + "message_format", + "topic", + ] + _name = name + _event_timestamp_column = event_timestamp_column + _bootstrap_servers = bootstrap_servers or "" + _message_format = message_format + _topic = topic or "" + + if args: + warnings.warn( + ( + "Kafka parameters should be specified as a keyword argument instead of a positional arg." + "Feast 0.23+ will not support positional arguments to construct Kafka sources" + ), + DeprecationWarning, + ) + if len(args) > len(positional_attributes): + raise ValueError( + f"Only {', '.join(positional_attributes)} are allowed as positional args when defining " + f"Kafka sources, for backwards compatibility." + ) + if len(args) >= 1: + _name = args[0] + if len(args) >= 2: + _event_timestamp_column = args[1] + if len(args) >= 3: + _bootstrap_servers = args[2] + if len(args) >= 4: + _message_format = args[3] + if len(args) >= 5: + _topic = args[4] + + if _message_format is None: + raise ValueError("Message format must be specified for Kafka source") + super().__init__( - event_timestamp_column=event_timestamp_column, + event_timestamp_column=_event_timestamp_column, created_timestamp_column=created_timestamp_column, field_mapping=field_mapping, date_partition_column=date_partition_column, description=description, tags=tags, owner=owner, - name=name, + name=_name, timestamp_field=timestamp_field, ) self.batch_source = batch_source self.kafka_options = KafkaOptions( - bootstrap_servers=bootstrap_servers, - message_format=message_format, - topic=topic, + bootstrap_servers=_bootstrap_servers, + message_format=_message_format, + topic=_topic, ) def __eq__(self, other): @@ -392,6 +434,9 @@ def __eq__(self, other): "Comparisons should only involve KafkaSource class objects." ) + if not super().__eq__(other): + return False + if ( self.kafka_options.bootstrap_servers != other.kafka_options.bootstrap_servers @@ -402,6 +447,9 @@ def __eq__(self, other): return True + def __hash__(self): + return super().__hash__() + @staticmethod def from_proto(data_source: DataSourceProto): return KafkaSource( @@ -419,7 +467,9 @@ def from_proto(data_source: DataSourceProto): description=data_source.description, tags=dict(data_source.tags), owner=data_source.owner, - batch_source=DataSource.from_proto(data_source.batch_source), + batch_source=DataSource.from_proto(data_source.batch_source) + if data_source.batch_source + else None, ) def to_proto(self) -> DataSourceProto: @@ -452,46 +502,73 @@ class RequestSource(DataSource): """ RequestSource that can be used to provide input features for on demand transforms - Args: + Attributes: name: Name of the request data source - schema Union[Dict[str, ValueType], List[Field]]: Schema mapping from the input feature name to a ValueType - description (optional): A human-readable description. - tags (optional): A dictionary of key-value pairs to store arbitrary metadata. - owner (optional): The owner of the request data source, typically the email of the primary + schema: Schema mapping from the input feature name to a ValueType + description: A human-readable description. + tags: A dictionary of key-value pairs to store arbitrary metadata. + owner: The owner of the request data source, typically the email of the primary maintainer. """ name: str schema: List[Field] + description: str + tags: Dict[str, str] + owner: str def __init__( self, - name: str, - schema: Union[Dict[str, ValueType], List[Field]], + *args, + name: Optional[str] = None, + schema: Optional[Union[Dict[str, ValueType], List[Field]]] = None, description: Optional[str] = "", tags: Optional[Dict[str, str]] = None, owner: Optional[str] = "", ): """Creates a RequestSource object.""" - super().__init__(name=name, description=description, tags=tags, owner=owner) - if isinstance(schema, Dict): + positional_attributes = ["name", "schema"] + _name = name + _schema = schema + if args: + warnings.warn( + ( + "Request source parameters should be specified as a keyword argument instead of a positional arg." + "Feast 0.23+ will not support positional arguments to construct request sources" + ), + DeprecationWarning, + ) + if len(args) > len(positional_attributes): + raise ValueError( + f"Only {', '.join(positional_attributes)} are allowed as positional args when defining " + f"feature views, for backwards compatibility." + ) + if len(args) >= 1: + _name = args[0] + if len(args) >= 2: + _schema = args[1] + + super().__init__(name=_name, description=description, tags=tags, owner=owner) + if not _schema: + raise ValueError("Schema needs to be provided for Request Source") + if isinstance(_schema, Dict): warnings.warn( "Schema in RequestSource is changing type. The schema data type Dict[str, ValueType] is being deprecated in Feast 0.23. " "Please use List[Field] instead for the schema", DeprecationWarning, ) schemaList = [] - for key, valueType in schema.items(): + for key, valueType in _schema.items(): schemaList.append( Field(name=key, dtype=VALUE_TYPES_TO_FEAST_TYPES[valueType]) ) self.schema = schemaList - elif isinstance(schema, List): - self.schema = schema + elif isinstance(_schema, List): + self.schema = _schema else: raise Exception( "Schema type must be either dictionary or list, not " - + str(type(schema)) + + str(type(_schema)) ) def validate(self, config: RepoConfig): @@ -507,13 +584,10 @@ def __eq__(self, other): raise TypeError( "Comparisons should only involve RequestSource class objects." ) - if ( - self.name != other.name - or self.description != other.description - or self.owner != other.owner - or self.tags != other.tags - ): + + if not super().__eq__(other): return False + if isinstance(self.schema, List) and isinstance(other.schema, List): for field1, field2 in zip(self.schema, other.schema): if field1 != field2: @@ -628,7 +702,9 @@ def from_proto(data_source: DataSourceProto): description=data_source.description, tags=dict(data_source.tags), owner=data_source.owner, - batch_source=DataSource.from_proto(data_source.batch_source), + batch_source=DataSource.from_proto(data_source.batch_source) + if data_source.batch_source + else None, ) @staticmethod @@ -640,12 +716,13 @@ def get_table_query_string(self) -> str: def __init__( self, - name: str, - event_timestamp_column: str, - created_timestamp_column: str, - record_format: StreamFormat, - region: str, - stream_name: str, + *args, + name: Optional[str] = None, + event_timestamp_column: Optional[str] = "", + created_timestamp_column: Optional[str] = "", + record_format: Optional[StreamFormat] = None, + region: Optional[str] = "", + stream_name: Optional[str] = "", field_mapping: Optional[Dict[str, str]] = None, date_partition_column: Optional[str] = "", description: Optional[str] = "", @@ -654,10 +731,53 @@ def __init__( timestamp_field: Optional[str] = "", batch_source: Optional[DataSource] = None, ): + positional_attributes = [ + "name", + "event_timestamp_column", + "created_timestamp_column", + "record_format", + "region", + "stream_name", + ] + _name = name + _event_timestamp_column = event_timestamp_column + _created_timestamp_column = created_timestamp_column + _record_format = record_format + _region = region or "" + _stream_name = stream_name or "" + if args: + warnings.warn( + ( + "Kinesis parameters should be specified as a keyword argument instead of a positional arg." + "Feast 0.23+ will not support positional arguments to construct kinesis sources" + ), + DeprecationWarning, + ) + if len(args) > len(positional_attributes): + raise ValueError( + f"Only {', '.join(positional_attributes)} are allowed as positional args when defining " + f"kinesis sources, for backwards compatibility." + ) + if len(args) >= 1: + _name = args[0] + if len(args) >= 2: + _event_timestamp_column = args[1] + if len(args) >= 3: + _created_timestamp_column = args[2] + if len(args) >= 4: + _record_format = args[3] + if len(args) >= 5: + _region = args[4] + if len(args) >= 6: + _stream_name = args[5] + + if _record_format is None: + raise ValueError("Record format must be specified for kinesis source") + super().__init__( - name=name, - event_timestamp_column=event_timestamp_column, - created_timestamp_column=created_timestamp_column, + name=_name, + event_timestamp_column=_event_timestamp_column, + created_timestamp_column=_created_timestamp_column, field_mapping=field_mapping, date_partition_column=date_partition_column, description=description, @@ -667,21 +787,20 @@ def __init__( ) self.batch_source = batch_source self.kinesis_options = KinesisOptions( - record_format=record_format, region=region, stream_name=stream_name + record_format=_record_format, region=_region, stream_name=_stream_name ) def __eq__(self, other): - if other is None: - return False - if not isinstance(other, KinesisSource): raise TypeError( "Comparisons should only involve KinesisSource class objects." ) + if not super().__eq__(other): + return False + if ( - self.name != other.name - or self.kinesis_options.record_format != other.kinesis_options.record_format + self.kinesis_options.record_format != other.kinesis_options.record_format or self.kinesis_options.region != other.kinesis_options.region or self.kinesis_options.stream_name != other.kinesis_options.stream_name ): @@ -689,6 +808,9 @@ def __eq__(self, other): return True + def __hash__(self): + return super().__hash__() + def to_proto(self) -> DataSourceProto: data_source_proto = DataSourceProto( name=self.name, @@ -714,45 +836,73 @@ class PushSource(DataSource): A source that can be used to ingest features on request """ - name: str - schema: List[Field] + # TODO(adchia): consider adding schema here in case where Feast manages pushing events to the offline store + # TODO(adchia): consider a "mode" to support pushing raw vs transformed events batch_source: DataSource - timestamp_field: str def __init__( self, - *, - name: str, - schema: List[Field], - batch_source: DataSource, + *args, + name: Optional[str] = None, + batch_source: Optional[DataSource] = None, description: Optional[str] = "", tags: Optional[Dict[str, str]] = None, owner: Optional[str] = "", - timestamp_field: Optional[str] = "", ): """ Creates a PushSource object. Args: name: Name of the push source - schema: Schema mapping from the input feature name to a ValueType batch_source: The batch source that backs this push source. It's used when materializing from the offline store to the online store, and when retrieving historical features. description (optional): A human-readable description. tags (optional): A dictionary of key-value pairs to store arbitrary metadata. owner (optional): The owner of the data source, typically the email of the primary maintainer. - timestamp_field (optional): Event timestamp foe;d used for point in time - joins of feature values. """ - super().__init__(name=name, description=description, tags=tags, owner=owner) - self.schema = sorted(schema) # TODO: add schema inference from a batch source - self.batch_source = batch_source - if not self.batch_source: - raise ValueError(f"batch_source is needed for push source {self.name}") - if not timestamp_field: - raise ValueError(f"timestamp field is needed for push source {self.name}") - self.timestamp_field = timestamp_field + positional_attributes = ["name", "batch_source"] + _name = name + _batch_source = batch_source + if args: + warnings.warn( + ( + "Push source parameters should be specified as a keyword argument instead of a positional arg." + "Feast 0.23+ will not support positional arguments to construct push sources" + ), + DeprecationWarning, + ) + if len(args) > len(positional_attributes): + raise ValueError( + f"Only {', '.join(positional_attributes)} are allowed as positional args when defining " + f"push sources, for backwards compatibility." + ) + if len(args) >= 1: + _name = args[0] + if len(args) >= 2: + _batch_source = args[1] + + super().__init__(name=_name, description=description, tags=tags, owner=owner) + if not _batch_source: + raise ValueError( + f"batch_source parameter is needed for push source {self.name}" + ) + self.batch_source = _batch_source + + def __eq__(self, other): + if not isinstance(other, PushSource): + raise TypeError("Comparisons should only involve PushSource class objects.") + + if not super().__eq__(other): + return False + + if self.batch_source != other.batch_source: + return False + + return True + + def __hash__(self): + return super().__hash__() def validate(self, config: RepoConfig): pass @@ -764,38 +914,25 @@ def get_table_column_names_and_types( @staticmethod def from_proto(data_source: DataSourceProto): - schema_pb = data_source.push_options.schema - schema = [] - for key, val in schema_pb.items(): - schema.append(Field(name=key, dtype=from_value_type(ValueType(val)))) - assert data_source.HasField("batch_source") batch_source = DataSource.from_proto(data_source.batch_source) return PushSource( name=data_source.name, - schema=sorted(schema), batch_source=batch_source, - timestamp_field=data_source.timestamp_field, description=data_source.description, tags=dict(data_source.tags), owner=data_source.owner, ) def to_proto(self) -> DataSourceProto: - schema_pb = {} - for field in self.schema: - schema_pb[field.name] = field.dtype.to_value_type().value batch_source_proto = None if self.batch_source: batch_source_proto = self.batch_source.to_proto() - options = DataSourceProto.PushOptions(schema=schema_pb,) data_source_proto = DataSourceProto( name=self.name, type=DataSourceProto.PUSH_SOURCE, - push_options=options, - timestamp_field=self.timestamp_field, description=self.description, tags=self.tags, owner=self.owner, diff --git a/sdk/python/feast/diff/registry_diff.py b/sdk/python/feast/diff/registry_diff.py index 10bd88c56f8..197bdfcefaf 100644 --- a/sdk/python/feast/diff/registry_diff.py +++ b/sdk/python/feast/diff/registry_diff.py @@ -61,7 +61,7 @@ def to_string(self): if feast_object_diff.transition_type == TransitionType.UNCHANGED: continue if feast_object_diff.feast_object_type == FeastObjectType.DATA_SOURCE: - # TODO(adchia): Print statements out starting in Feast 0.21 + # TODO(adchia): Print statements out starting in Feast 0.23 continue action, color = message_action_map[feast_object_diff.transition_type] log_string += f"{action} {feast_object_diff.feast_object_type.value} {Style.BRIGHT + color}{feast_object_diff.name}{Style.RESET_ALL}\n" @@ -177,7 +177,7 @@ def extract_objects_for_keep_delete_update_add( FeastObjectType, List[Any] ] = FeastObjectType.get_objects_from_registry(registry, current_project) registry_object_type_to_repo_contents: Dict[ - FeastObjectType, Set[Any] + FeastObjectType, List[Any] ] = FeastObjectType.get_objects_from_repo_contents(desired_repo_contents) for object_type in FEAST_OBJECT_TYPES: @@ -295,6 +295,11 @@ def apply_diff_to_registry( registry.delete_feature_view( feature_view_obj.name, project, commit=False, ) + elif feast_object_diff.feast_object_type == FeastObjectType.DATA_SOURCE: + ds_obj = cast(DataSource, feast_object_diff.current_feast_object) + registry.delete_data_source( + ds_obj.name, project, commit=False, + ) if feast_object_diff.transition_type in [ TransitionType.CREATE, diff --git a/sdk/python/feast/embedded_go/online_features_service.py b/sdk/python/feast/embedded_go/online_features_service.py index e2da2dfbd8f..48e31766cb3 100644 --- a/sdk/python/feast/embedded_go/online_features_service.py +++ b/sdk/python/feast/embedded_go/online_features_service.py @@ -1,4 +1,5 @@ from functools import partial +from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union import pyarrow as pa @@ -14,65 +15,29 @@ from feast.protos.feast.serving.ServingService_pb2 import GetOnlineFeaturesResponse from feast.protos.feast.types import Value_pb2 from feast.repo_config import RepoConfig +from feast.types import from_value_type from feast.value_type import ValueType -from .lib.embedded import DataTable, NewOnlineFeatureService, OnlineFeatureServiceConfig +from .lib.embedded import ( + DataTable, + LoggingOptions, + NewOnlineFeatureService, + OnlineFeatureServiceConfig, +) from .lib.go import Slice_string +from .type_map import FEAST_TYPE_TO_ARROW_TYPE, arrow_array_to_array_of_proto if TYPE_CHECKING: from feast.feature_store import FeatureStore -ARROW_TYPE_TO_PROTO_FIELD = { - pa.int32(): "int32_val", - pa.int64(): "int64_val", - pa.float32(): "float_val", - pa.float64(): "double_val", - pa.bool_(): "bool_val", - pa.string(): "string_val", - pa.binary(): "bytes_val", - pa.time64("ns"): "unix_timestamp_val", -} - -ARROW_LIST_TYPE_TO_PROTO_FIELD = { - pa.int32(): "int32_list_val", - pa.int64(): "int64_list_val", - pa.float32(): "float_list_val", - pa.float64(): "double_list_val", - pa.bool_(): "bool_list_val", - pa.string(): "string_list_val", - pa.binary(): "bytes_list_val", - pa.time64("ns"): "unix_timestamp_list_val", -} - -ARROW_LIST_TYPE_TO_PROTO_LIST_CLASS = { - pa.int32(): Value_pb2.Int32List, - pa.int64(): Value_pb2.Int64List, - pa.float32(): Value_pb2.FloatList, - pa.float64(): Value_pb2.DoubleList, - pa.bool_(): Value_pb2.BoolList, - pa.string(): Value_pb2.StringList, - pa.binary(): Value_pb2.BytesList, - pa.time64("ns"): Value_pb2.Int64List, -} - -# used for entity types only -PROTO_TYPE_TO_ARROW_TYPE = { - ValueType.INT32: pa.int32(), - ValueType.INT64: pa.int64(), - ValueType.FLOAT: pa.float32(), - ValueType.DOUBLE: pa.float64(), - ValueType.STRING: pa.string(), - ValueType.BYTES: pa.binary(), -} - - class EmbeddedOnlineFeatureServer: def __init__( self, repo_path: str, repo_config: RepoConfig, feature_store: "FeatureStore" ): # keep callback in self to prevent it from GC self._transformation_callback = partial(transformation_callback, feature_store) + self._logging_callback = partial(logging_callback, feature_store) self._service = NewOnlineFeatureService( OnlineFeatureServiceConfig( @@ -174,13 +139,37 @@ def get_online_features( resp = record_batch_to_online_response(record_batch) return OnlineResponse(resp) + def start_grpc_server( + self, + host: str, + port: int, + enable_logging: bool = True, + logging_options: Optional[LoggingOptions] = None, + ): + if enable_logging: + if logging_options: + self._service.StartGprcServerWithLogging( + host, port, self._logging_callback, logging_options + ) + else: + self._service.StartGprcServerWithLoggingDefaultOpts( + host, port, self._logging_callback + ) + else: + self._service.StartGprcServer(host, port) + + def stop_grpc_server(self): + self._service.Stop() + def _to_arrow(value, type_hint: Optional[ValueType]) -> pa.Array: if isinstance(value, Value_pb2.RepeatedValue): _proto_to_arrow(value) - if type_hint in PROTO_TYPE_TO_ARROW_TYPE: - return pa.array(value, PROTO_TYPE_TO_ARROW_TYPE[type_hint]) + if type_hint: + feast_type = from_value_type(type_hint) + if feast_type in FEAST_TYPE_TO_ARROW_TYPE: + return pa.array(value, FEAST_TYPE_TO_ARROW_TYPE[feast_type]) return pa.array(value) @@ -216,6 +205,18 @@ def transformation_callback( return output_record.num_rows +def logging_callback( + fs: "FeatureStore", feature_service_name: str, dataset_dir: str, +) -> bytes: + feature_service = fs.get_feature_service(feature_service_name, allow_cache=True) + try: + fs.write_logged_features(logs=Path(dataset_dir), source=feature_service) + except Exception as exc: + return repr(exc).encode() + + return "".encode() # no error + + def allocate_schema_and_array(): c_schema = ffi.new("struct ArrowSchema*") ptr_schema = int(ffi.cast("uintptr_t", c_schema)) @@ -263,31 +264,9 @@ def record_batch_to_online_response(record_batch): [Value_pb2.Value()] * len(record_batch.columns[idx]) ) else: - if isinstance(field.type, pa.ListType): - proto_list_class = ARROW_LIST_TYPE_TO_PROTO_LIST_CLASS[ - field.type.value_type - ] - proto_field_name = ARROW_LIST_TYPE_TO_PROTO_FIELD[field.type.value_type] - - column = record_batch.columns[idx] - if field.type.value_type == pa.time64("ns"): - column = column.cast(pa.list_(pa.int64())) - - for v in column.tolist(): - feature_vector.values.append( - Value_pb2.Value(**{proto_field_name: proto_list_class(val=v)}) - ) - else: - proto_field_name = ARROW_TYPE_TO_PROTO_FIELD[field.type] - - column = record_batch.columns[idx] - if field.type == pa.time64("ns"): - column = column.cast(pa.int64()) - - for v in column.tolist(): - feature_vector.values.append( - Value_pb2.Value(**{proto_field_name: v}) - ) + feature_vector.values.extend( + arrow_array_to_array_of_proto(field.type, record_batch.columns[idx]) + ) resp.results.append(feature_vector) resp.metadata.feature_names.val.append(field.name) diff --git a/sdk/python/feast/embedded_go/type_map.py b/sdk/python/feast/embedded_go/type_map.py new file mode 100644 index 00000000000..e70dc3be865 --- /dev/null +++ b/sdk/python/feast/embedded_go/type_map.py @@ -0,0 +1,88 @@ +from typing import List + +import pyarrow as pa +import pytz + +from feast.protos.feast.types import Value_pb2 +from feast.types import Array, PrimitiveFeastType + +PA_TIMESTAMP_TYPE = pa.timestamp("s", tz=pytz.UTC) + +ARROW_TYPE_TO_PROTO_FIELD = { + pa.int32(): "int32_val", + pa.int64(): "int64_val", + pa.float32(): "float_val", + pa.float64(): "double_val", + pa.bool_(): "bool_val", + pa.string(): "string_val", + pa.binary(): "bytes_val", + PA_TIMESTAMP_TYPE: "unix_timestamp_val", +} + +ARROW_LIST_TYPE_TO_PROTO_FIELD = { + pa.int32(): "int32_list_val", + pa.int64(): "int64_list_val", + pa.float32(): "float_list_val", + pa.float64(): "double_list_val", + pa.bool_(): "bool_list_val", + pa.string(): "string_list_val", + pa.binary(): "bytes_list_val", + PA_TIMESTAMP_TYPE: "unix_timestamp_list_val", +} + +ARROW_LIST_TYPE_TO_PROTO_LIST_CLASS = { + pa.int32(): Value_pb2.Int32List, + pa.int64(): Value_pb2.Int64List, + pa.float32(): Value_pb2.FloatList, + pa.float64(): Value_pb2.DoubleList, + pa.bool_(): Value_pb2.BoolList, + pa.string(): Value_pb2.StringList, + pa.binary(): Value_pb2.BytesList, + PA_TIMESTAMP_TYPE: Value_pb2.Int64List, +} + +FEAST_TYPE_TO_ARROW_TYPE = { + PrimitiveFeastType.INT32: pa.int32(), + PrimitiveFeastType.INT64: pa.int64(), + PrimitiveFeastType.FLOAT32: pa.float32(), + PrimitiveFeastType.FLOAT64: pa.float64(), + PrimitiveFeastType.STRING: pa.string(), + PrimitiveFeastType.BYTES: pa.binary(), + PrimitiveFeastType.BOOL: pa.bool_(), + PrimitiveFeastType.UNIX_TIMESTAMP: pa.timestamp("s"), + Array(PrimitiveFeastType.INT32): pa.list_(pa.int32()), + Array(PrimitiveFeastType.INT64): pa.list_(pa.int64()), + Array(PrimitiveFeastType.FLOAT32): pa.list_(pa.float32()), + Array(PrimitiveFeastType.FLOAT64): pa.list_(pa.float64()), + Array(PrimitiveFeastType.STRING): pa.list_(pa.string()), + Array(PrimitiveFeastType.BYTES): pa.list_(pa.binary()), + Array(PrimitiveFeastType.BOOL): pa.list_(pa.bool_()), + Array(PrimitiveFeastType.UNIX_TIMESTAMP): pa.list_(pa.timestamp("s")), +} + + +def arrow_array_to_array_of_proto( + arrow_type: pa.DataType, arrow_array: pa.Array +) -> List[Value_pb2.Value]: + values = [] + if isinstance(arrow_type, pa.ListType): + proto_list_class = ARROW_LIST_TYPE_TO_PROTO_LIST_CLASS[arrow_type.value_type] + proto_field_name = ARROW_LIST_TYPE_TO_PROTO_FIELD[arrow_type.value_type] + + if arrow_type.value_type == PA_TIMESTAMP_TYPE: + arrow_array = arrow_array.cast(pa.list_(pa.int64())) + + for v in arrow_array.tolist(): + values.append( + Value_pb2.Value(**{proto_field_name: proto_list_class(val=v)}) + ) + else: + proto_field_name = ARROW_TYPE_TO_PROTO_FIELD[arrow_type] + + if arrow_type == PA_TIMESTAMP_TYPE: + arrow_array = arrow_array.cast(pa.int64()) + + for v in arrow_array.tolist(): + values.append(Value_pb2.Value(**{proto_field_name: v})) + + return values diff --git a/sdk/python/feast/entity.py b/sdk/python/feast/entity.py index 39886268609..1e7c7cf3079 100644 --- a/sdk/python/feast/entity.py +++ b/sdk/python/feast/entity.py @@ -40,7 +40,7 @@ class Entity: owner: The owner of the entity, typically the email of the primary maintainer. created_timestamp: The time when the entity was created. last_updated_timestamp: The time when the entity was last updated. - join_keys: A list of property that uniquely identifies different entities within the + join_keys: A list of properties that uniquely identifies different entities within the collection. This is meant to replace the `join_key` parameter, but currently only supports a list of size one. """ @@ -67,7 +67,25 @@ def __init__( owner: str = "", join_keys: Optional[List[str]] = None, ): - """Creates an Entity object.""" + """ + Creates an Entity object. + + Args: + name: The unique name of the entity. + value_type: The type of the entity, such as string or float. + description: A human-readable description. + join_key (deprecated): A property that uniquely identifies different entities within the + collection. The join_key property is typically used for joining entities + with their associated features. If not specified, defaults to the name. + tags: A dictionary of key-value pairs to store arbitrary metadata. + owner: The owner of the entity, typically the email of the primary maintainer. + join_keys: A list of properties that uniquely identifies different entities within the + collection. This is meant to replace the `join_key` parameter, but currently only + supports a list of size one. + + Raises: + ValueError: Parameters are specified incorrectly. + """ if len(args) == 1: warnings.warn( ( @@ -88,6 +106,15 @@ def __init__( self.value_type = value_type + if join_key: + warnings.warn( + ( + "The `join_key` parameter is being deprecated in favor of the `join_keys` parameter. " + "Please switch from using `join_key` to `join_keys`. Feast 0.23 and onwards will not " + "support the `join_key` parameter." + ), + DeprecationWarning, + ) self.join_keys = join_keys or [] if join_keys and len(join_keys) > 1: raise ValueError( @@ -105,7 +132,7 @@ def __init__( self.last_updated_timestamp = None def __hash__(self) -> int: - return hash((id(self), self.name)) + return hash((self.name, self.join_key)) def __eq__(self, other): if not isinstance(other, Entity): @@ -153,7 +180,7 @@ def from_proto(cls, entity_proto: EntityProto): entity = cls( name=entity_proto.spec.name, value_type=ValueType(entity_proto.spec.value_type), - join_key=entity_proto.spec.join_key, + join_keys=[entity_proto.spec.join_key], description=entity_proto.spec.description, tags=entity_proto.spec.tags, owner=entity_proto.spec.owner, diff --git a/sdk/python/feast/feature.py b/sdk/python/feast/feature.py index 57f75c90d76..d1f96c302ae 100644 --- a/sdk/python/feast/feature.py +++ b/sdk/python/feast/feature.py @@ -91,7 +91,7 @@ def to_proto(self) -> FeatureSpecProto: value_type = ValueTypeProto.Enum.Value(self.dtype.name) return FeatureSpecProto( - name=self.name, value_type=value_type, labels=self.labels, + name=self.name, value_type=value_type, tags=self.labels, ) @classmethod @@ -106,7 +106,7 @@ def from_proto(cls, feature_proto: FeatureSpecProto): feature = cls( name=feature_proto.name, dtype=ValueType(feature_proto.value_type), - labels=dict(feature_proto.labels), + labels=dict(feature_proto.tags), ) return feature diff --git a/sdk/python/feast/feature_logging.py b/sdk/python/feast/feature_logging.py new file mode 100644 index 00000000000..e2982988366 --- /dev/null +++ b/sdk/python/feast/feature_logging.py @@ -0,0 +1,176 @@ +import abc +from typing import TYPE_CHECKING, Dict, Optional, Type, cast + +import pyarrow as pa +from pytz import UTC + +from feast.data_source import DataSource +from feast.embedded_go.type_map import FEAST_TYPE_TO_ARROW_TYPE, PA_TIMESTAMP_TYPE +from feast.errors import ( + FeastObjectNotFoundException, + FeatureViewNotFoundException, + OnDemandFeatureViewNotFoundException, +) +from feast.protos.feast.core.FeatureService_pb2 import ( + LoggingConfig as LoggingConfigProto, +) +from feast.types import from_value_type + +if TYPE_CHECKING: + from feast import FeatureService + from feast.registry import Registry + + +REQUEST_ID_FIELD = "__request_id" +LOG_TIMESTAMP_FIELD = "__log_timestamp" +LOG_DATE_FIELD = "__log_date" + + +class LoggingSource: + """ + Logging source describes object that produces logs (eg, feature service produces logs of served features). + It should be able to provide schema of produced logs table and additional metadata that describes logs data. + """ + + @abc.abstractmethod + def get_schema(self, registry: "Registry") -> pa.Schema: + """ Generate schema for logs destination. """ + raise NotImplementedError + + @abc.abstractmethod + def get_log_timestamp_column(self) -> str: + """ Return timestamp column that must exist in generated schema. """ + raise NotImplementedError + + +class FeatureServiceLoggingSource(LoggingSource): + def __init__(self, feature_service: "FeatureService", project: str): + self._feature_service = feature_service + self._project = project + + def get_schema(self, registry: "Registry") -> pa.Schema: + fields: Dict[str, pa.DataType] = {} + + for projection in self._feature_service.feature_view_projections: + # The order of fields in the generated schema should match + # the order created on the other side (inside Go logger). + # Otherwise, some offline stores might not accept parquet files (produced by Go). + # Go code can be found here: + # https://github.com/feast-dev/feast/blob/master/go/internal/feast/server/logging/memorybuffer.go#L51 + try: + feature_view = registry.get_feature_view(projection.name, self._project) + except FeatureViewNotFoundException: + try: + on_demand_feature_view = registry.get_on_demand_feature_view( + projection.name, self._project + ) + except OnDemandFeatureViewNotFoundException: + raise FeastObjectNotFoundException( + f"Can't recognize feature view with a name {projection.name}" + ) + + for ( + request_source + ) in on_demand_feature_view.source_request_sources.values(): + for field in request_source.schema: + fields[field.name] = FEAST_TYPE_TO_ARROW_TYPE[field.dtype] + + else: + for entity_name in feature_view.entities: + entity = registry.get_entity(entity_name, self._project) + join_key = projection.join_key_map.get( + entity.join_key, entity.join_key + ) + fields[join_key] = FEAST_TYPE_TO_ARROW_TYPE[ + from_value_type(entity.value_type) + ] + + for feature in projection.features: + fields[ + f"{projection.name_to_use()}__{feature.name}" + ] = FEAST_TYPE_TO_ARROW_TYPE[feature.dtype] + fields[ + f"{projection.name_to_use()}__{feature.name}__timestamp" + ] = PA_TIMESTAMP_TYPE + fields[ + f"{projection.name_to_use()}__{feature.name}__status" + ] = pa.int32() + + # system columns + fields[LOG_TIMESTAMP_FIELD] = pa.timestamp("us", tz=UTC) + fields[LOG_DATE_FIELD] = pa.date32() + fields[REQUEST_ID_FIELD] = pa.string() + + return pa.schema( + [pa.field(name, data_type) for name, data_type in fields.items()] + ) + + def get_log_timestamp_column(self) -> str: + return LOG_TIMESTAMP_FIELD + + +class _DestinationRegistry(type): + classes_by_proto_attr_name: Dict[str, Type["LoggingDestination"]] = {} + + def __new__(cls, name, bases, dct): + kls = type.__new__(cls, name, bases, dct) + if dct.get("_proto_kind"): + cls.classes_by_proto_attr_name[dct["_proto_kind"]] = kls + return kls + + +class LoggingDestination(metaclass=_DestinationRegistry): + """ + Logging destination contains details about where exactly logs should be written inside an offline store. + It is implementation specific - each offline store must implement LoggingDestination subclass. + + Kind of logging destination will be determined by matching attribute name in LoggingConfig protobuf message + and "_proto_kind" property of each subclass. + """ + + _proto_kind: str + + @classmethod + @abc.abstractmethod + def from_proto(cls, config_proto: LoggingConfigProto) -> "LoggingDestination": + raise NotImplementedError + + @abc.abstractmethod + def to_proto(self) -> LoggingConfigProto: + raise NotImplementedError + + @abc.abstractmethod + def to_data_source(self) -> DataSource: + """ + Convert this object into a data source to read logs from an offline store. + """ + raise NotImplementedError + + +class LoggingConfig: + destination: LoggingDestination + sample_rate: float + + def __init__(self, destination: LoggingDestination, sample_rate: float = 1.0): + self.destination = destination + self.sample_rate = sample_rate + + @classmethod + def from_proto(cls, config_proto: LoggingConfigProto) -> Optional["LoggingConfig"]: + proto_kind = cast(str, config_proto.WhichOneof("destination")) + if proto_kind is None: + return + + if proto_kind == "custom_destination": + proto_kind = config_proto.custom_destination.kind + + destination_class = _DestinationRegistry.classes_by_proto_attr_name[proto_kind] + return LoggingConfig( + destination=destination_class.from_proto(config_proto), + sample_rate=config_proto.sample_rate, + ) + + def to_proto(self) -> LoggingConfigProto: + proto = self.destination.to_proto() + proto.sample_rate = self.sample_rate + return proto diff --git a/sdk/python/feast/feature_server.py b/sdk/python/feast/feature_server.py index 20fcd410c20..8347bed6da3 100644 --- a/sdk/python/feast/feature_server.py +++ b/sdk/python/feast/feature_server.py @@ -94,9 +94,7 @@ def push(body=Depends(get_body)): @app.post("/write-to-online-store") def write_to_online_store(body=Depends(get_body)): warnings.warn( - "write_to_online_store is an experimental feature. " - "This API is unstable and it could be changed in the future. " - "We do not guarantee that future changes will maintain backward compatibility.", + "write_to_online_store is deprecated. Please consider using /push instead", RuntimeWarning, ) try: diff --git a/sdk/python/feast/feature_service.py b/sdk/python/feast/feature_service.py index 40030b34ceb..9490de38c93 100644 --- a/sdk/python/feast/feature_service.py +++ b/sdk/python/feast/feature_service.py @@ -1,9 +1,11 @@ +import warnings from datetime import datetime from typing import Dict, List, Optional, Union from google.protobuf.json_format import MessageToJson from feast.base_feature_view import BaseFeatureView +from feast.feature_logging import LoggingConfig from feast.feature_view import FeatureView from feast.feature_view_projection import FeatureViewProjection from feast.on_demand_feature_view import OnDemandFeatureView @@ -37,21 +39,25 @@ class FeatureService: """ name: str + _features: List[Union[FeatureView, OnDemandFeatureView]] feature_view_projections: List[FeatureViewProjection] description: str tags: Dict[str, str] owner: str created_timestamp: Optional[datetime] = None last_updated_timestamp: Optional[datetime] = None + logging_config: Optional[LoggingConfig] = None @log_exceptions def __init__( self, - name: str, - features: List[Union[FeatureView, OnDemandFeatureView]], + *args, + name: Optional[str] = None, + features: Optional[List[Union[FeatureView, OnDemandFeatureView]]] = None, tags: Dict[str, str] = None, description: str = "", owner: str = "", + logging_config: Optional[LoggingConfig] = None, ): """ Creates a FeatureService object. @@ -59,23 +65,64 @@ def __init__( Raises: ValueError: If one of the specified features is not a valid type. """ - self.name = name - self.feature_view_projections = [] - - for feature_grouping in features: - if isinstance(feature_grouping, BaseFeatureView): - self.feature_view_projections.append(feature_grouping.projection) - else: + positional_attributes = ["name", "features"] + _name = name + _features = features + if args: + warnings.warn( + ( + "Feature service parameters should be specified as a keyword argument instead of a positional arg." + "Feast 0.23+ will not support positional arguments to construct feature service" + ), + DeprecationWarning, + ) + if len(args) > len(positional_attributes): raise ValueError( - f"The feature service {name} has been provided with an invalid type " - f'{type(feature_grouping)} as part of the "features" argument.)' + f"Only {', '.join(positional_attributes)} are allowed as positional args when defining " + f"feature service, for backwards compatibility." ) + if len(args) >= 1: + _name = args[0] + if len(args) >= 2: + _features = args[1] + + if not _name: + raise ValueError("Feature service name needs to be specified") + + if not _features: + # Technically, legal to create feature service with no feature views before. + _features = [] + self.name = _name + self._features = _features + self.feature_view_projections = [] self.description = description self.tags = tags or {} self.owner = owner self.created_timestamp = None self.last_updated_timestamp = None + self.logging_config = logging_config + self.infer_features() + + def infer_features(self, fvs_to_update: Optional[Dict[str, FeatureView]] = None): + self.feature_view_projections = [] + for feature_grouping in self._features: + if isinstance(feature_grouping, BaseFeatureView): + # For feature services that depend on an unspecified feature view, apply inferred schema + if ( + fvs_to_update is not None + and len(feature_grouping.projection.features) == 0 + and feature_grouping.name in fvs_to_update + ): + feature_grouping.projection.features = fvs_to_update[ + feature_grouping.name + ].features + self.feature_view_projections.append(feature_grouping.projection) + else: + raise ValueError( + f"The feature service {self.name} has been provided with an invalid type " + f'{type(feature_grouping)} as part of the "features" argument.)' + ) def __repr__(self): items = (f"{k} = {v}" for k, v in self.__dict__.items()) @@ -85,7 +132,7 @@ def __str__(self): return str(MessageToJson(self.to_proto())) def __hash__(self): - return hash((id(self), self.name)) + return hash(self.name) def __eq__(self, other): if not isinstance(other, FeatureService): @@ -122,6 +169,9 @@ def from_proto(cls, feature_service_proto: FeatureServiceProto): tags=dict(feature_service_proto.spec.tags), description=feature_service_proto.spec.description, owner=feature_service_proto.spec.owner, + logging_config=LoggingConfig.from_proto( + feature_service_proto.spec.logging_config + ), ) fs.feature_view_projections.extend( [ @@ -162,6 +212,9 @@ def to_proto(self) -> FeatureServiceProto: tags=self.tags, description=self.description, owner=self.owner, + logging_config=self.logging_config.to_proto() + if self.logging_config + else None, ) return FeatureServiceProto(spec=spec, meta=meta) diff --git a/sdk/python/feast/feature_store.py b/sdk/python/feast/feature_store.py index 33d297f3ca8..73af4741ef3 100644 --- a/sdk/python/feast/feature_store.py +++ b/sdk/python/feast/feature_store.py @@ -16,11 +16,12 @@ import os import warnings from collections import Counter, defaultdict -from datetime import datetime +from datetime import datetime, timedelta from pathlib import Path from typing import ( TYPE_CHECKING, Any, + Callable, Dict, Iterable, List, @@ -34,11 +35,12 @@ ) import pandas as pd +import pyarrow as pa from colorama import Fore, Style from google.protobuf.timestamp_pb2 import Timestamp from tqdm import tqdm -from feast import feature_server, flags, flags_helper, utils +from feast import feature_server, flags, flags_helper, ui_server, utils from feast.base_feature_view import BaseFeatureView from feast.data_source import DataSource from feast.diff.infra_diff import InfraDiff, diff_infra_protos @@ -92,7 +94,6 @@ warnings.simplefilter("once", DeprecationWarning) - if TYPE_CHECKING: from feast.embedded_go.online_features_service import EmbeddedOnlineFeatureServer @@ -111,7 +112,7 @@ class FeatureStore: repo_path: Path _registry: Registry _provider: Provider - _go_server: Optional["EmbeddedOnlineFeatureServer"] + _go_server: "EmbeddedOnlineFeatureServer" @log_exceptions def __init__( @@ -475,8 +476,9 @@ def _make_inferences( entities_to_update: List[Entity], views_to_update: List[FeatureView], odfvs_to_update: List[OnDemandFeatureView], + feature_services_to_update: List[FeatureService], ): - """Makes inferences for entities, feature views, and odfvs.""" + """Makes inferences for entities, feature views, odfvs, and feature services.""" update_entities_with_inferred_types_from_feature_views( entities_to_update, views_to_update, self.config ) @@ -498,6 +500,10 @@ def _make_inferences( for odfv in odfvs_to_update: odfv.infer_features() + fvs_to_update_map = {view.name: view for view in views_to_update} + for feature_service in feature_services_to_update: + feature_service.infer_features(fvs_to_update=fvs_to_update_map) + @log_exceptions_and_usage def _plan( self, desired_repo_contents: RepoContents @@ -534,25 +540,26 @@ def _plan( ... batch_source=driver_hourly_stats, ... ) >>> registry_diff, infra_diff, new_infra = fs._plan(RepoContents( - ... data_sources={driver_hourly_stats}, - ... feature_views={driver_hourly_stats_view}, - ... on_demand_feature_views=set(), - ... request_feature_views=set(), - ... entities={driver}, - ... feature_services=set())) # register entity and feature view + ... data_sources=[driver_hourly_stats], + ... feature_views=[driver_hourly_stats_view], + ... on_demand_feature_views=list(), + ... request_feature_views=list(), + ... entities=[driver], + ... feature_services=list())) # register entity and feature view """ # Validate and run inference on all the objects to be registered. self._validate_all_feature_views( - list(desired_repo_contents.feature_views), - list(desired_repo_contents.on_demand_feature_views), - list(desired_repo_contents.request_feature_views), + desired_repo_contents.feature_views, + desired_repo_contents.on_demand_feature_views, + desired_repo_contents.request_feature_views, ) - _validate_data_sources(list(desired_repo_contents.data_sources)) + _validate_data_sources(desired_repo_contents.data_sources) self._make_inferences( - list(desired_repo_contents.data_sources), - list(desired_repo_contents.entities), - list(desired_repo_contents.feature_views), - list(desired_repo_contents.on_demand_feature_views), + desired_repo_contents.data_sources, + desired_repo_contents.entities, + desired_repo_contents.feature_views, + desired_repo_contents.on_demand_feature_views, + desired_repo_contents.feature_services, ) # Compute the desired difference between the current objects in the registry and @@ -692,7 +699,11 @@ def apply( views_to_update, odfvs_to_update, request_views_to_update ) self._make_inferences( - data_sources_to_update, entities_to_update, views_to_update, odfvs_to_update + data_sources_to_update, + entities_to_update, + views_to_update, + odfvs_to_update, + services_to_update, ) # Handle all entityless feature views by using DUMMY_ENTITY as a placeholder entity. @@ -865,7 +876,7 @@ def get_historical_features( DeprecationWarning, ) - # TODO(achal): _group_feature_refs returns the on demand feature views, but it's no passed into the provider. + # TODO(achal): _group_feature_refs returns the on demand feature views, but it's not passed into the provider. # This is a weird interface quirk - we should revisit the `get_historical_features` to # pass in the on demand feature views as well. fvs, odfvs, request_fvs, request_fv_refs = _group_feature_refs( @@ -1070,7 +1081,16 @@ def materialize_incremental( f"No start time found for feature view {feature_view.name}. materialize_incremental() requires" f" either a ttl to be set or for materialize() to have been run at least once." ) - start_date = datetime.utcnow() - feature_view.ttl + elif feature_view.ttl.total_seconds() > 0: + start_date = datetime.utcnow() - feature_view.ttl + else: + # TODO(felixwang9817): Find the earliest timestamp for this specific feature + # view from the offline store, and set the start date to that timestamp. + print( + f"Since the ttl is 0 for feature view {Style.BRIGHT + Fore.GREEN}{feature_view.name}{Style.RESET_ALL}, " + "the start date will be set to 1 year before the current time." + ) + start_date = datetime.utcnow() - timedelta(weeks=52) provider = self._get_provider() print( f"{Style.BRIGHT + Fore.GREEN}{feature_view.name}{Style.RESET_ALL}" @@ -1186,16 +1206,25 @@ def tqdm_builder(length): ) @log_exceptions_and_usage - def push(self, push_source_name: str, df: pd.DataFrame): + def push( + self, push_source_name: str, df: pd.DataFrame, allow_registry_cache: bool = True + ): """ Push features to a push source. This updates all the feature views that have the push source as stream source. Args: push_source_name: The name of the push source we want to push data to. df: the data being pushed. + allow_registry_cache: whether to allow cached versions of the registry. """ + warnings.warn( + "Push source is an experimental feature. " + "This API is unstable and it could and might change in the future. " + "We do not guarantee that future changes will maintain backward compatibility.", + RuntimeWarning, + ) from feast.data_source import PushSource - all_fvs = self.list_feature_views(allow_cache=True) + all_fvs = self.list_feature_views(allow_cache=allow_registry_cache) fvs_with_push_sources = { fv @@ -1208,7 +1237,9 @@ def push(self, push_source_name: str, df: pd.DataFrame): } for fv in fvs_with_push_sources: - self.write_to_online_store(fv.name, df, allow_registry_cache=True) + self.write_to_online_store( + fv.name, df, allow_registry_cache=allow_registry_cache + ) @log_exceptions_and_usage def write_to_online_store( @@ -1295,6 +1326,18 @@ def get_online_features( native_entity_values=True, ) + def _lazy_init_go_server(self): + """Lazily initialize self._go_server if it hasn't been initialized before.""" + from feast.embedded_go.online_features_service import ( + EmbeddedOnlineFeatureServer, + ) + + # Lazily start the go server on the first request + if self._go_server is None: + self._go_server = EmbeddedOnlineFeatureServer( + str(self.repo_path.absolute()), self.config, self + ) + def _get_online_features( self, features: Union[List[str], FeatureService], @@ -1312,15 +1355,7 @@ def _get_online_features( # If Go feature server is enabled, send request to it instead of going through regular Python logic if self.config.go_feature_retrieval: - from feast.embedded_go.online_features_service import ( - EmbeddedOnlineFeatureServer, - ) - - # Lazily start the go server on the first request - if self._go_server is None: - self._go_server = EmbeddedOnlineFeatureServer( - str(self.repo_path.absolute()), self.config, self - ) + self._lazy_init_go_server() entity_native_values: Dict[str, List[Any]] if not native_entity_values: @@ -1946,13 +1981,39 @@ def _get_feature_views_to_use( @log_exceptions_and_usage def serve(self, host: str, port: int, no_access_log: bool) -> None: """Start the feature consumption server locally on a given port.""" - feature_server.start_server(self, host, port, no_access_log) + if self.config.go_feature_retrieval: + # Start go server instead of python if the flag is enabled + self._lazy_init_go_server() + # TODO(tsotne) add http/grpc flag in CLI and call appropriate method here depending on that + self._go_server.start_grpc_server(host, port) + else: + # Start the python server if go server isn't enabled + feature_server.start_server(self, host, port, no_access_log) @log_exceptions_and_usage def get_feature_server_endpoint(self) -> Optional[str]: """Returns endpoint for the feature server, if it exists.""" return self._provider.get_feature_server_endpoint() + @log_exceptions_and_usage + def serve_ui( + self, host: str, port: int, get_registry_dump: Callable, registry_ttl_sec: int + ) -> None: + """Start the UI server locally""" + warnings.warn( + "The Feast UI is an experimental feature. " + "We do not guarantee that future changes will maintain backward compatibility.", + RuntimeWarning, + ) + ui_server.start_server( + self, + host=host, + port=port, + get_registry_dump=get_registry_dump, + project_id=self.config.project, + registry_ttl_sec=registry_ttl_sec, + ) + @log_exceptions_and_usage def serve_transformations(self, port: int) -> None: """Start the feature transformation server locally on a given port.""" @@ -1966,6 +2027,33 @@ def serve_transformations(self, port: int) -> None: def _teardown_go_server(self): self._go_server = None + def write_logged_features( + self, logs: Union[pa.Table, Path], source: Union[FeatureService] + ): + """ + Write logs produced by a source (currently only feature service is supported as a source) + to an offline store. + + Args: + logs: Arrow Table or path to parquet dataset directory on disk + source: Object that produces logs + """ + if not isinstance(source, FeatureService): + raise ValueError("Only feature service is currently supported as a source") + + assert ( + source.logging_config is not None + ), "Feature service must be configured with logging config in order to use this functionality" + + assert isinstance(logs, (pa.Table, Path)) + + self._get_provider().write_feature_service_logs( + feature_service=source, + logs=logs, + config=self.config, + registry=self._registry, + ) + def _validate_entity_values(join_key_values: Dict[str, List[Value]]): set_of_row_lengths = {len(v) for v in join_key_values.values()} @@ -2047,8 +2135,12 @@ def _group_feature_refs( for ref in features: view_name, feat_name = ref.split(":") if view_name in view_index: + view_index[view_name].projection.get_feature(feat_name) # For validation views_features[view_name].add(feat_name) elif view_name in on_demand_view_index: + on_demand_view_index[view_name].projection.get_feature( + feat_name + ) # For validation on_demand_view_features[view_name].add(feat_name) # Let's also add in any FV Feature dependencies here. for input_fv_projection in on_demand_view_index[ @@ -2057,6 +2149,9 @@ def _group_feature_refs( for input_feat in input_fv_projection.features: views_features[input_fv_projection.name].add(input_feat.name) elif view_name in request_view_index: + request_view_index[view_name].projection.get_feature( + feat_name + ) # For validation request_views_features[view_name].add(feat_name) request_view_refs.add(ref) else: @@ -2119,7 +2214,7 @@ def _validate_data_sources(data_sources: List[DataSource]): f"More than one data source with name {case_insensitive_ds_name} found. " f"Please ensure that all data source names are case-insensitively unique. " f"It may be necessary to ignore certain files in your feature repository by using a .feastignore " - f"file. Starting in Feast 0.21, unique names (perhaps inferred from the table name) will be " + f"file. Starting in Feast 0.23, unique names (perhaps inferred from the table name) will be " f"required in data sources to encourage data source discovery" ) else: diff --git a/sdk/python/feast/feature_view.py b/sdk/python/feast/feature_view.py index 7d29a4b69bb..4ff7618f2e1 100644 --- a/sdk/python/feast/feature_view.py +++ b/sdk/python/feast/feature_view.py @@ -45,7 +45,7 @@ DUMMY_ENTITY_NAME = "__dummy" DUMMY_ENTITY_VAL = "" DUMMY_ENTITY = Entity( - name=DUMMY_ENTITY_NAME, join_key=DUMMY_ENTITY_ID, value_type=ValueType.STRING, + name=DUMMY_ENTITY_NAME, join_keys=[DUMMY_ENTITY_ID], value_type=ValueType.STRING, ) @@ -137,7 +137,7 @@ def __init__( ValueError: A field mapping conflicts with an Entity or a Feature. """ - positional_attributes = ["name, entities, ttl"] + positional_attributes = ["name", "entities", "ttl"] _name = name _entities = entities @@ -180,7 +180,7 @@ def __init__( warnings.warn( ( "The option to pass a Duration object to the ttl parameter is being deprecated. " - "Please pass a timedelta object instead. Feast 0.21 and onwards will not support " + "Please pass a timedelta object instead. Feast 0.23 and onwards will not support " "Duration objects." ), DeprecationWarning, @@ -195,7 +195,7 @@ def __init__( ( "The `features` parameter is being deprecated in favor of the `schema` parameter. " "Please switch from using `features` to `schema`. This will also requiring switching " - "feature definitions from using `Feature` to `Field`. Feast 0.21 and onwards will not " + "feature definitions from using `Feature` to `Field`. Feast 0.23 and onwards will not " "support the `features` parameter." ), DeprecationWarning, @@ -270,7 +270,6 @@ def _initialize_sources(self, name, batch_source, stream_source, source): self.batch_source = batch_source self.source = source - # Note: Python requires redefining hash in child classes that override __eq__ def __hash__(self): return super().__hash__() @@ -298,19 +297,15 @@ def __eq__(self, other): return False if ( - self.tags != other.tags + sorted(self.entities) != sorted(other.entities) or self.ttl != other.ttl or self.online != other.online + or self.batch_source != other.batch_source + or self.stream_source != other.stream_source + or self.schema != other.schema ): return False - if sorted(self.entities) != sorted(other.entities): - return False - if self.batch_source != other.batch_source: - return False - if self.stream_source != other.stream_source: - return False - return True def ensure_valid(self): @@ -444,8 +439,9 @@ def from_proto(cls, feature_view_proto: FeatureViewProto): else feature_view_proto.spec.ttl.ToTimedelta() ), source=batch_source, - stream_source=stream_source, ) + if stream_source: + feature_view.stream_source = stream_source # FeatureViewProjections are not saved in the FeatureView proto. # Create the default projection. diff --git a/sdk/python/feast/feature_view_projection.py b/sdk/python/feast/feature_view_projection.py index a8e0e8cfe52..fbf0db5ccdf 100644 --- a/sdk/python/feast/feature_view_projection.py +++ b/sdk/python/feast/feature_view_projection.py @@ -64,3 +64,11 @@ def from_definition(base_feature_view: "BaseFeatureView"): name_alias=None, features=base_feature_view.features, ) + + def get_feature(self, feature_name: str) -> Field: + try: + return next(field for field in self.features if field.name == feature_name) + except StopIteration: + raise KeyError( + f"Feature {feature_name} not found in projection {self.name_to_use()}" + ) diff --git a/sdk/python/feast/field.py b/sdk/python/feast/field.py index f6c88f1850d..77011e6758c 100644 --- a/sdk/python/feast/field.py +++ b/sdk/python/feast/field.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Dict, Optional + from feast.feature import Feature from feast.protos.feast.core.Feature_pb2 import FeatureSpecV2 as FieldProto from feast.types import FeastType, from_value_type @@ -25,13 +27,15 @@ class Field: Attributes: name: The name of the field. dtype: The type of the field, such as string or float. + tags: User-defined metadata in dictionary form. """ name: str dtype: FeastType + tags: Dict[str, str] def __init__( - self, *, name: str, dtype: FeastType, + self, *, name: str, dtype: FeastType, tags: Optional[Dict[str, str]] = None, ): """ Creates a Field object. @@ -39,12 +43,18 @@ def __init__( Args: name: The name of the field. dtype: The type of the field, such as string or float. + tags (optional): User-defined metadata in dictionary form. """ self.name = name self.dtype = dtype + self.tags = tags or {} def __eq__(self, other): - if self.name != other.name or self.dtype != other.dtype: + if ( + self.name != other.name + or self.dtype != other.dtype + or self.tags != other.tags + ): return False return True @@ -58,12 +68,12 @@ def __repr__(self): return f"{self.name}-{self.dtype}" def __str__(self): - return f"Field(name={self.name}, dtype={self.dtype})" + return f"Field(name={self.name}, dtype={self.dtype}, tags={self.tags})" def to_proto(self) -> FieldProto: """Converts a Field object to its protobuf representation.""" value_type = self.dtype.to_value_type() - return FieldProto(name=self.name, value_type=value_type.value) + return FieldProto(name=self.name, value_type=value_type.value, tags=self.tags) @classmethod def from_proto(cls, field_proto: FieldProto): @@ -74,7 +84,11 @@ def from_proto(cls, field_proto: FieldProto): field_proto: FieldProto protobuf object """ value_type = ValueType(field_proto.value_type) - return cls(name=field_proto.name, dtype=from_value_type(value_type=value_type)) + return cls( + name=field_proto.name, + dtype=from_value_type(value_type=value_type), + tags=dict(field_proto.tags), + ) @classmethod def from_feature(cls, feature: Feature): @@ -84,4 +98,6 @@ def from_feature(cls, feature: Feature): Args: feature: Feature object to convert. """ - return cls(name=feature.name, dtype=from_value_type(feature.dtype)) + return cls( + name=feature.name, dtype=from_value_type(feature.dtype), tags=feature.labels + ) diff --git a/sdk/python/feast/inference.py b/sdk/python/feast/inference.py index 9d15a6a25f7..076cbc86ceb 100644 --- a/sdk/python/feast/inference.py +++ b/sdk/python/feast/inference.py @@ -2,7 +2,7 @@ from typing import List from feast import BigQuerySource, Entity, FileSource, RedshiftSource, SnowflakeSource -from feast.data_source import DataSource, RequestSource +from feast.data_source import DataSource, PushSource, RequestSource from feast.errors import RegistryInferenceFailure from feast.feature_view import FeatureView from feast.field import Field, from_value_type @@ -32,7 +32,9 @@ def update_entities_with_inferred_types_from_feature_views( if not (incomplete_entities_keys & set(view.entities)): continue # skip if view doesn't contain any entities that need inference - col_names_and_types = view.batch_source.get_table_column_names_and_types(config) + col_names_and_types = list( + view.batch_source.get_table_column_names_and_types(config) + ) for entity_name in view.entities: if entity_name in incomplete_entities: entity = incomplete_entities[entity_name] @@ -69,11 +71,13 @@ def update_entities_with_inferred_types_from_feature_views( def update_data_sources_with_inferred_event_timestamp_col( data_sources: List[DataSource], config: RepoConfig ) -> None: - ERROR_MSG_PREFIX = "Unable to infer DataSource event_timestamp_column" + ERROR_MSG_PREFIX = "Unable to infer DataSource timestamp_field" for data_source in data_sources: if isinstance(data_source, RequestSource): continue + if isinstance(data_source, PushSource): + data_source = data_source.batch_source if data_source.timestamp_field is None or data_source.timestamp_field == "": # prepare right match pattern for data source ts_column_type_regex_pattern = "" @@ -108,30 +112,30 @@ def update_data_sources_with_inferred_event_timestamp_col( ) # loop through table columns to find singular match - timestamp_field, matched_flag = None, False + timestamp_fields = [] for ( col_name, col_datatype, ) in data_source.get_table_column_names_and_types(config): if re.match(ts_column_type_regex_pattern, col_datatype): - if matched_flag: - raise RegistryInferenceFailure( - "DataSource", - f""" - {ERROR_MSG_PREFIX} due to multiple possible columns satisfying - the criteria. {ts_column_type_regex_pattern} {col_name} - """, - ) - matched_flag = True - timestamp_field = col_name - if matched_flag: - assert timestamp_field - data_source.timestamp_field = timestamp_field + timestamp_fields.append(col_name) + + if len(timestamp_fields) > 1: + raise RegistryInferenceFailure( + "DataSource", + f"""{ERROR_MSG_PREFIX}; found multiple possible columns of timestamp type. + Data source type: {data_source.__class__.__name__}, + Timestamp regex: `{ts_column_type_regex_pattern}`, columns: {timestamp_fields}""", + ) + elif len(timestamp_fields) == 1: + data_source.timestamp_field = timestamp_fields[0] else: raise RegistryInferenceFailure( "DataSource", f""" - {ERROR_MSG_PREFIX} due to an absence of columns that satisfy the criteria. + {ERROR_MSG_PREFIX}; Found no columns of timestamp type. + Data source type: {data_source.__class__.__name__}, + Timestamp regex: `{ts_column_type_regex_pattern}`. """, ) @@ -150,8 +154,13 @@ def update_feature_views_with_inferred_features( config: The config for the current feature store. """ entity_name_to_join_key_map = {entity.name: entity.join_key for entity in entities} + join_keys = entity_name_to_join_key_map.values() for fv in fvs: + # First drop all Entity fields. Then infer features if necessary. + fv.schema = [field for field in fv.schema if field.name not in join_keys] + fv.features = [field for field in fv.features if field.name not in join_keys] + if not fv.features: columns_to_exclude = { fv.batch_source.timestamp_field, diff --git a/sdk/python/feast/infra/aws.py b/sdk/python/feast/infra/aws.py index b7cc61de0e5..14301faf192 100644 --- a/sdk/python/feast/infra/aws.py +++ b/sdk/python/feast/infra/aws.py @@ -347,7 +347,7 @@ def _get_docker_image_version() -> str: f"version {get_version()}. If you want to update the Feast SDK version, make " "sure to first fetch all new release tags from Github and then reinstall the library:\n" "> git fetch --all --tags\n" - "> pip install -e sdk/python" + "> pip install -e '.'" ) return version diff --git a/sdk/python/feast/infra/feature_servers/aws_lambda/Dockerfile b/sdk/python/feast/infra/feature_servers/aws_lambda/Dockerfile index 5b685dbcf61..fe997afcae4 100644 --- a/sdk/python/feast/infra/feature_servers/aws_lambda/Dockerfile +++ b/sdk/python/feast/infra/feature_servers/aws_lambda/Dockerfile @@ -7,10 +7,16 @@ COPY sdk/python/feast/infra/feature_servers/aws_lambda/app.py ${LAMBDA_TASK_ROOT COPY sdk/python sdk/python COPY protos protos COPY go go +COPY setup.py setup.py +COPY pyproject.toml pyproject.toml COPY README.md README.md # Install Feast for AWS with Lambda dependencies -RUN pip3 install -e 'sdk/python[aws,redis]' +# We need this mount thingy because setuptools_scm needs access to the +# git dir to infer the version of feast we're installing. +# https://github.com/pypa/setuptools_scm#usage-from-docker +# I think it also assumes that this dockerfile is being built from the root of the directory. +RUN SETUPTOOLS_SCM_PRETEND_VERSION=1 pip3 install --no-cache-dir -e '.[aws,redis]' RUN pip3 install -r sdk/python/feast/infra/feature_servers/aws_lambda/requirements.txt --target "${LAMBDA_TASK_ROOT}" # Set the CMD to your handler (could also be done as a parameter override outside of the Dockerfile) diff --git a/sdk/python/feast/infra/feature_servers/gcp_cloudrun/Dockerfile b/sdk/python/feast/infra/feature_servers/gcp_cloudrun/Dockerfile index b4e1e4adb97..e0e16f6a14f 100644 --- a/sdk/python/feast/infra/feature_servers/gcp_cloudrun/Dockerfile +++ b/sdk/python/feast/infra/feature_servers/gcp_cloudrun/Dockerfile @@ -13,11 +13,13 @@ COPY sdk/python/feast/infra/feature_servers/gcp_cloudrun/app.py ./app.py # Copy necessary parts of the Feast codebase COPY sdk/python ./sdk/python COPY protos ./protos +COPY setup.py setup.py +COPY pyproject.toml pyproject.toml COPY README.md ./README.md # Install production dependencies. RUN pip install --no-cache-dir \ - -e 'sdk/python[gcp,redis]' \ + -e '.[gcp,redis]' \ -r ./sdk/python/feast/infra/feature_servers/gcp_cloudrun/requirements.txt # Run the web service on container startup. Here we use the gunicorn diff --git a/sdk/python/feast/infra/offline_stores/bigquery.py b/sdk/python/feast/infra/offline_stores/bigquery.py index 1e27fc326ba..e9d8bdccbfe 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery.py +++ b/sdk/python/feast/infra/offline_stores/bigquery.py @@ -1,6 +1,8 @@ import contextlib +import tempfile import uuid from datetime import date, datetime, timedelta +from pathlib import Path from typing import ( Callable, ContextManager, @@ -28,6 +30,7 @@ FeastProviderLoginError, InvalidEntityType, ) +from feast.feature_logging import LoggingConfig, LoggingSource from feast.feature_view import DUMMY_ENTITY_ID, DUMMY_ENTITY_VAL, FeatureView from feast.infra.offline_stores import offline_utils from feast.infra.offline_stores.offline_store import ( @@ -41,13 +44,18 @@ from ...saved_dataset import SavedDatasetStorage from ...usage import log_exceptions_and_usage -from .bigquery_source import BigQuerySource, SavedDatasetBigQueryStorage +from .bigquery_source import ( + BigQueryLoggingDestination, + BigQuerySource, + SavedDatasetBigQueryStorage, +) try: from google.api_core.exceptions import NotFound from google.auth.exceptions import DefaultCredentialsError from google.cloud import bigquery - from google.cloud.bigquery import Client, Table + from google.cloud.bigquery import Client, SchemaField, Table + from google.cloud.bigquery._pandas_helpers import ARROW_SCALAR_IDS_TO_BQ except ImportError as e: from feast.errors import FeastExtrasDependencyImportError @@ -83,7 +91,7 @@ def pull_latest_from_table_or_query( data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, created_timestamp_column: Optional[str], start_date: datetime, end_date: datetime, @@ -96,7 +104,7 @@ def pull_latest_from_table_or_query( partition_by_join_key_string = ( "PARTITION BY " + partition_by_join_key_string ) - timestamps = [event_timestamp_column] + timestamps = [timestamp_field] if created_timestamp_column: timestamps.append(created_timestamp_column) timestamp_desc_string = " DESC, ".join(timestamps) + " DESC" @@ -114,7 +122,7 @@ def pull_latest_from_table_or_query( SELECT {field_string}, ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS _feast_row FROM {from_expression} - WHERE {event_timestamp_column} BETWEEN TIMESTAMP('{start_date}') AND TIMESTAMP('{end_date}') + WHERE {timestamp_field} BETWEEN TIMESTAMP('{start_date}') AND TIMESTAMP('{end_date}') ) WHERE _feast_row = 1 """ @@ -131,7 +139,7 @@ def pull_all_from_table_or_query( data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, start_date: datetime, end_date: datetime, ) -> RetrievalJob: @@ -143,12 +151,12 @@ def pull_all_from_table_or_query( location=config.offline_store.location, ) field_string = ", ".join( - join_key_columns + feature_name_columns + [event_timestamp_column] + join_key_columns + feature_name_columns + [timestamp_field] ) query = f""" SELECT {field_string} FROM {from_expression} - WHERE {event_timestamp_column} BETWEEN TIMESTAMP('{start_date}') AND TIMESTAMP('{end_date}') + WHERE {timestamp_field} BETWEEN TIMESTAMP('{start_date}') AND TIMESTAMP('{end_date}') """ return BigQueryRetrievalJob( query=query, client=client, config=config, full_feature_names=False, @@ -248,6 +256,53 @@ def query_generator() -> Iterator[str]: ), ) + @staticmethod + def write_logged_features( + config: RepoConfig, + data: Union[pyarrow.Table, Path], + source: LoggingSource, + logging_config: LoggingConfig, + registry: Registry, + ): + destination = logging_config.destination + assert isinstance(destination, BigQueryLoggingDestination) + + client = _get_bigquery_client( + project=config.offline_store.project_id, + location=config.offline_store.location, + ) + + job_config = bigquery.LoadJobConfig( + source_format=bigquery.SourceFormat.PARQUET, + schema=arrow_schema_to_bq_schema(source.get_schema(registry)), + time_partitioning=bigquery.TimePartitioning( + type_=bigquery.TimePartitioningType.DAY, + field=source.get_log_timestamp_column(), + ), + ) + + if isinstance(data, Path): + for file in data.iterdir(): + with file.open("rb") as f: + client.load_table_from_file( + file_obj=f, + destination=destination.table, + job_config=job_config, + ) + + return + + with tempfile.TemporaryFile() as parquet_temp_file: + pyarrow.parquet.write_table(table=data, where=parquet_temp_file) + + parquet_temp_file.seek(0) + + client.load_table_from_file( + file_obj=parquet_temp_file, + destination=destination.table, + job_config=job_config, + ) + class BigQueryRetrievalJob(RetrievalJob): def __init__( @@ -513,7 +568,9 @@ def _get_entity_df_event_timestamp_range( return entity_df_event_timestamp_range -def _get_bigquery_client(project: Optional[str] = None, location: Optional[str] = None): +def _get_bigquery_client( + project: Optional[str] = None, location: Optional[str] = None +) -> bigquery.Client: try: client = bigquery.Client(project=project, location=location) except DefaultCredentialsError as e: @@ -533,6 +590,24 @@ def _get_bigquery_client(project: Optional[str] = None, location: Optional[str] return client +def arrow_schema_to_bq_schema(arrow_schema: pyarrow.Schema) -> List[SchemaField]: + bq_schema = [] + + for field in arrow_schema: + if pyarrow.types.is_list(field.type): + detected_mode = "REPEATED" + detected_type = ARROW_SCALAR_IDS_TO_BQ[field.type.value_type.id] + else: + detected_mode = "NULLABLE" + detected_type = ARROW_SCALAR_IDS_TO_BQ[field.type.id] + + bq_schema.append( + SchemaField(name=field.name, field_type=detected_type, mode=detected_mode) + ) + + return bq_schema + + # TODO: Optimizations # * Use GENERATE_UUID() instead of ROW_NUMBER(), or join on entity columns directly # * Precompute ROW_NUMBER() so that it doesn't have to be recomputed for every query on entity_dataframe @@ -583,9 +658,9 @@ def _get_bigquery_client(project: Optional[str] = None, location: Optional[str] 1. We first join the current feature_view to the entity dataframe that has been passed. This JOIN has the following logic: - - For each row of the entity dataframe, only keep the rows where the `event_timestamp_column` + - For each row of the entity dataframe, only keep the rows where the `timestamp_field` is less than the one provided in the entity dataframe - - If there a TTL for the current feature_view, also keep the rows where the `event_timestamp_column` + - If there a TTL for the current feature_view, also keep the rows where the `timestamp_field` is higher the the one provided minus the TTL - For each row, Join on the entity key and retrieve the `entity_row_unique_id` that has been computed previously @@ -596,16 +671,16 @@ def _get_bigquery_client(project: Optional[str] = None, location: Optional[str] {{ featureview.name }}__subquery AS ( SELECT - {{ featureview.event_timestamp_column }} as event_timestamp, + {{ featureview.timestamp_field }} as event_timestamp, {{ featureview.created_timestamp_column ~ ' as created_timestamp,' if featureview.created_timestamp_column else '' }} {{ featureview.entity_selections | join(', ')}}{% if featureview.entity_selections %},{% else %}{% endif %} {% for feature in featureview.features %} {{ feature }} as {% if full_feature_names %}{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}{% else %}{{ featureview.field_mapping.get(feature, feature) }}{% endif %}{% if loop.last %}{% else %}, {% endif %} {% endfor %} FROM {{ featureview.table_subquery }} - WHERE {{ featureview.event_timestamp_column }} <= '{{ featureview.max_event_timestamp }}' + WHERE {{ featureview.timestamp_field }} <= '{{ featureview.max_event_timestamp }}' {% if featureview.ttl == 0 %}{% else %} - AND {{ featureview.event_timestamp_column }} >= '{{ featureview.min_event_timestamp }}' + AND {{ featureview.timestamp_field }} >= '{{ featureview.min_event_timestamp }}' {% endif %} ), diff --git a/sdk/python/feast/infra/offline_stores/bigquery_source.py b/sdk/python/feast/infra/offline_stores/bigquery_source.py index 31b0ed617e9..06e9ce9f624 100644 --- a/sdk/python/feast/infra/offline_stores/bigquery_source.py +++ b/sdk/python/feast/infra/offline_stores/bigquery_source.py @@ -4,7 +4,11 @@ from feast import type_map from feast.data_source import DataSource from feast.errors import DataSourceNotFoundException +from feast.feature_logging import LoggingDestination from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto +from feast.protos.feast.core.FeatureService_pb2 import ( + LoggingConfig as LoggingConfigProto, +) from feast.protos.feast.core.SavedDataset_pb2 import ( SavedDatasetStorage as SavedDatasetStorageProto, ) @@ -16,6 +20,7 @@ class BigQuerySource(DataSource): def __init__( self, + *, event_timestamp_column: Optional[str] = "", table: Optional[str] = None, created_timestamp_column: Optional[str] = "", @@ -58,7 +63,7 @@ def __init__( warnings.warn( ( "The argument 'date_partition_column' is not supported for BigQuery sources. " - "It will be removed in Feast 0.21+" + "It will be removed in Feast 0.23+" ), DeprecationWarning, ) @@ -71,7 +76,7 @@ def __init__( else: warnings.warn( ( - f"Starting in Feast 0.21, Feast will require either a name for a data source (if using query) or `table`: {self.query}" + f"Starting in Feast 0.23, Feast will require either a name for a data source (if using query) or `table`: {self.query}" ), DeprecationWarning, ) @@ -98,15 +103,9 @@ def __eq__(self, other): ) return ( - self.name == other.name - and self.bigquery_options.table == other.bigquery_options.table - and self.bigquery_options.query == other.bigquery_options.query - and self.timestamp_field == other.timestamp_field - and self.created_timestamp_column == other.created_timestamp_column - and self.field_mapping == other.field_mapping - and self.description == other.description - and self.tags == other.tags - and self.owner == other.owner + super().__eq__(other) + and self.table == other.table + and self.query == other.query ) @property @@ -119,7 +118,6 @@ def query(self): @staticmethod def from_proto(data_source: DataSourceProto): - assert data_source.HasField("bigquery_options") return BigQuerySource( @@ -143,11 +141,10 @@ def to_proto(self) -> DataSourceProto: description=self.description, tags=self.tags, owner=self.owner, + timestamp_field=self.timestamp_field, + created_timestamp_column=self.created_timestamp_column, ) - data_source_proto.timestamp_field = self.timestamp_field - data_source_proto.created_timestamp_column = self.created_timestamp_column - return data_source_proto def validate(self, config: RepoConfig): @@ -178,7 +175,7 @@ def get_table_column_names_and_types( from google.cloud import bigquery client = bigquery.Client() - if self.table is not None: + if self.table: schema = client.get_table(self.table).schema if not isinstance(schema[0], bigquery.schema.SchemaField): raise TypeError("Could not parse BigQuery table schema.") @@ -199,42 +196,14 @@ def get_table_column_names_and_types( class BigQueryOptions: """ - DataSource BigQuery options used to source features from BigQuery query + Configuration options for a BigQuery data source. """ def __init__( self, table: Optional[str], query: Optional[str], ): - self._table = table - self._query = query - - @property - def query(self): - """ - Returns the BigQuery SQL query referenced by this source - """ - return self._query - - @query.setter - def query(self, query): - """ - Sets the BigQuery SQL query referenced by this source - """ - self._query = query - - @property - def table(self): - """ - Returns the table ref of this BQ table - """ - return self._table - - @table.setter - def table(self, table): - """ - Sets the table ref of this BQ table - """ - self._table = table + self.table = table or "" + self.query = query or "" @classmethod def from_proto(cls, bigquery_options_proto: DataSourceProto.BigQueryOptions): @@ -247,7 +216,6 @@ def from_proto(cls, bigquery_options_proto: DataSourceProto.BigQueryOptions): Returns: Returns a BigQueryOptions object based on the bigquery_options protobuf """ - bigquery_options = cls( table=bigquery_options_proto.table, query=bigquery_options_proto.query, ) @@ -261,7 +229,6 @@ def to_proto(self) -> DataSourceProto.BigQueryOptions: Returns: BigQueryOptionsProto protobuf """ - bigquery_options_proto = DataSourceProto.BigQueryOptions( table=self.table, query=self.query, ) @@ -290,3 +257,28 @@ def to_proto(self) -> SavedDatasetStorageProto: def to_data_source(self) -> DataSource: return BigQuerySource(table=self.bigquery_options.table) + + +class BigQueryLoggingDestination(LoggingDestination): + _proto_kind = "bigquery_destination" + + table: str + + def __init__(self, *, table_ref): + self.table = table_ref + + @classmethod + def from_proto(cls, config_proto: LoggingConfigProto) -> "LoggingDestination": + return BigQueryLoggingDestination( + table_ref=config_proto.bigquery_destination.table_ref, + ) + + def to_data_source(self) -> DataSource: + return BigQuerySource(table=self.table) + + def to_proto(self) -> LoggingConfigProto: + return LoggingConfigProto( + bigquery_destination=LoggingConfigProto.BigQueryDestination( + table_ref=self.table + ) + ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/contrib_repo_configuration.py b/sdk/python/feast/infra/offline_stores/contrib/contrib_repo_configuration.py index 370ed0f47cb..083ec2b2109 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/contrib_repo_configuration.py +++ b/sdk/python/feast/infra/offline_stores/contrib/contrib_repo_configuration.py @@ -1,14 +1,17 @@ -from tests.integration.feature_repos.integration_test_repo_config import ( - IntegrationTestRepoConfig, -) -from tests.integration.feature_repos.universal.data_sources.spark_data_source_creator import ( +from feast.infra.offline_stores.contrib.spark_offline_store.tests.data_source import ( SparkDataSourceCreator, ) -from tests.integration.feature_repos.universal.data_sources.trino import ( +from feast.infra.offline_stores.contrib.trino_offline_store.tests.data_source import ( TrinoSourceCreator, ) +from tests.integration.feature_repos.repo_configuration import REDIS_CONFIG +from tests.integration.feature_repos.universal.online_store.redis import ( + RedisOnlineStoreCreator, +) -FULL_REPO_CONFIGS = [ - IntegrationTestRepoConfig(offline_store_creator=SparkDataSourceCreator), - IntegrationTestRepoConfig(offline_store_creator=TrinoSourceCreator), +AVAILABLE_OFFLINE_STORES = [ + ("local", SparkDataSourceCreator), + ("local", TrinoSourceCreator), ] + +AVAILABLE_ONLINE_STORES = {"redis": (REDIS_CONFIG, RedisOnlineStoreCreator)} diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/__init__.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py new file mode 100644 index 00000000000..f2aa535c1d4 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres.py @@ -0,0 +1,545 @@ +import contextlib +from dataclasses import asdict +from datetime import datetime +from typing import ( + Any, + Callable, + ContextManager, + Iterator, + KeysView, + List, + Optional, + Tuple, + Union, +) + +import pandas as pd +import pyarrow as pa +from jinja2 import BaseLoader, Environment +from psycopg2 import sql +from pydantic.typing import Literal +from pytz import utc + +from feast.data_source import DataSource +from feast.errors import InvalidEntityType +from feast.feature_view import DUMMY_ENTITY_ID, DUMMY_ENTITY_VAL, FeatureView +from feast.infra.offline_stores import offline_utils +from feast.infra.offline_stores.offline_store import ( + OfflineStore, + RetrievalJob, + RetrievalMetadata, +) +from feast.infra.utils.postgres.connection_utils import ( + _get_conn, + df_to_postgres_table, + get_query_schema, +) +from feast.infra.utils.postgres.postgres_config import PostgreSQLConfig +from feast.on_demand_feature_view import OnDemandFeatureView +from feast.registry import Registry +from feast.repo_config import RepoConfig +from feast.saved_dataset import SavedDatasetStorage +from feast.type_map import pg_type_code_to_arrow +from feast.usage import log_exceptions_and_usage + +from .postgres_source import PostgreSQLSource + + +class PostgreSQLOfflineStoreConfig(PostgreSQLConfig): + type: Literal["postgres"] = "postgres" + + +class PostgreSQLOfflineStore(OfflineStore): + @staticmethod + @log_exceptions_and_usage(offline_store="postgres") + def pull_latest_from_table_or_query( + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + created_timestamp_column: Optional[str], + start_date: datetime, + end_date: datetime, + ) -> RetrievalJob: + assert isinstance(data_source, PostgreSQLSource) + from_expression = data_source.get_table_query_string() + + partition_by_join_key_string = ", ".join(_append_alias(join_key_columns, "a")) + if partition_by_join_key_string != "": + partition_by_join_key_string = ( + "PARTITION BY " + partition_by_join_key_string + ) + timestamps = [timestamp_field] + if created_timestamp_column: + timestamps.append(created_timestamp_column) + timestamp_desc_string = " DESC, ".join(_append_alias(timestamps, "a")) + " DESC" + a_field_string = ", ".join( + _append_alias(join_key_columns + feature_name_columns + timestamps, "a") + ) + b_field_string = ", ".join( + _append_alias(join_key_columns + feature_name_columns + timestamps, "b") + ) + + query = f""" + SELECT + {b_field_string} + {f", {repr(DUMMY_ENTITY_VAL)} AS {DUMMY_ENTITY_ID}" if not join_key_columns else ""} + FROM ( + SELECT {a_field_string}, + ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS _feast_row + FROM ({from_expression}) a + WHERE a."{timestamp_field}" BETWEEN '{start_date}'::timestamptz AND '{end_date}'::timestamptz + ) b + WHERE _feast_row = 1 + """ + + return PostgreSQLRetrievalJob( + query=query, + config=config, + full_feature_names=False, + on_demand_feature_views=None, + ) + + @staticmethod + @log_exceptions_and_usage(offline_store="postgres") + def get_historical_features( + config: RepoConfig, + feature_views: List[FeatureView], + feature_refs: List[str], + entity_df: Union[pd.DataFrame, str], + registry: Registry, + project: str, + full_feature_names: bool = False, + ) -> RetrievalJob: + @contextlib.contextmanager + def query_generator() -> Iterator[str]: + table_name = None + if isinstance(entity_df, pd.DataFrame): + table_name = offline_utils.get_temp_entity_table_name() + entity_schema = df_to_postgres_table( + config.offline_store, entity_df, table_name + ) + df_query = table_name + elif isinstance(entity_df, str): + df_query = f"({entity_df}) AS sub" + entity_schema = get_query_schema(config.offline_store, df_query) + else: + raise TypeError(entity_df) + + entity_df_event_timestamp_col = offline_utils.infer_event_timestamp_from_entity_df( + entity_schema + ) + + expected_join_keys = offline_utils.get_expected_join_keys( + project, feature_views, registry + ) + + offline_utils.assert_expected_columns_in_entity_df( + entity_schema, expected_join_keys, entity_df_event_timestamp_col + ) + + entity_df_event_timestamp_range = _get_entity_df_event_timestamp_range( + entity_df, entity_df_event_timestamp_col, config, df_query, + ) + + query_context = offline_utils.get_feature_view_query_context( + feature_refs, + feature_views, + registry, + project, + entity_df_event_timestamp_range, + ) + + query_context_dict = [asdict(context) for context in query_context] + # Hack for query_context.entity_selections to support uppercase in columns + for context in query_context_dict: + context["entity_selections"] = [ + f'''"{entity_selection.replace(' AS ', '" AS "')}\"''' + for entity_selection in context["entity_selections"] + ] + + try: + yield build_point_in_time_query( + query_context_dict, + left_table_query_string=df_query, + entity_df_event_timestamp_col=entity_df_event_timestamp_col, + entity_df_columns=entity_schema.keys(), + query_template=MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN, + full_feature_names=full_feature_names, + ) + finally: + if table_name: + with _get_conn(config.offline_store) as conn, conn.cursor() as cur: + cur.execute( + sql.SQL( + """ + DROP TABLE IF EXISTS {}; + """ + ).format(sql.Identifier(table_name)), + ) + + return PostgreSQLRetrievalJob( + query=query_generator, + config=config, + full_feature_names=full_feature_names, + on_demand_feature_views=OnDemandFeatureView.get_requested_odfvs( + feature_refs, project, registry + ), + ) + + @staticmethod + @log_exceptions_and_usage(offline_store="postgres") + def pull_all_from_table_or_query( + config: RepoConfig, + data_source: DataSource, + join_key_columns: List[str], + feature_name_columns: List[str], + timestamp_field: str, + start_date: datetime, + end_date: datetime, + ) -> RetrievalJob: + assert isinstance(data_source, PostgreSQLSource) + from_expression = data_source.get_table_query_string() + + field_string = ", ".join( + join_key_columns + feature_name_columns + [timestamp_field] + ) + + start_date = start_date.astimezone(tz=utc) + end_date = end_date.astimezone(tz=utc) + + query = f""" + SELECT {field_string} + FROM {from_expression} + WHERE "{timestamp_field}" BETWEEN '{start_date}'::timestamptz AND '{end_date}'::timestamptz + """ + + return PostgreSQLRetrievalJob( + query=query, + config=config, + full_feature_names=False, + on_demand_feature_views=None, + ) + + +class PostgreSQLRetrievalJob(RetrievalJob): + def __init__( + self, + query: Union[str, Callable[[], ContextManager[str]]], + config: RepoConfig, + full_feature_names: bool, + on_demand_feature_views: Optional[List[OnDemandFeatureView]], + metadata: Optional[RetrievalMetadata] = None, + ): + if not isinstance(query, str): + self._query_generator = query + else: + + @contextlib.contextmanager + def query_generator() -> Iterator[str]: + assert isinstance(query, str) + yield query + + self._query_generator = query_generator + self.config = config + self._full_feature_names = full_feature_names + self._on_demand_feature_views = on_demand_feature_views + self._metadata = metadata + + @property + def full_feature_names(self) -> bool: + return self._full_feature_names + + @property + def on_demand_feature_views(self) -> Optional[List[OnDemandFeatureView]]: + return self._on_demand_feature_views + + def _to_df_internal(self) -> pd.DataFrame: + # We use arrow format because it gives better control of the table schema + return self._to_arrow_internal().to_pandas() + + def to_sql(self) -> str: + with self._query_generator() as query: + return query + + def _to_arrow_internal(self) -> pa.Table: + with self._query_generator() as query: + with _get_conn(self.config.offline_store) as conn, conn.cursor() as cur: + conn.set_session(readonly=True) + cur.execute(query) + fields = [ + (c.name, pg_type_code_to_arrow(c.type_code)) + for c in cur.description + ] + data = cur.fetchall() + schema = pa.schema(fields) + # TODO: Fix... + data_transposed: List[List[Any]] = [] + for col in range(len(fields)): + data_transposed.append([]) + for row in range(len(data)): + data_transposed[col].append(data[row][col]) + + table = pa.Table.from_arrays( + [pa.array(row) for row in data_transposed], schema=schema + ) + return table + + @property + def metadata(self) -> Optional[RetrievalMetadata]: + return self._metadata + + def persist(self, storage: SavedDatasetStorage): + pass + + +def _get_entity_df_event_timestamp_range( + entity_df: Union[pd.DataFrame, str], + entity_df_event_timestamp_col: str, + config: RepoConfig, + table_name: str, +) -> Tuple[datetime, datetime]: + if isinstance(entity_df, pd.DataFrame): + entity_df_event_timestamp = entity_df.loc[ + :, entity_df_event_timestamp_col + ].infer_objects() + if pd.api.types.is_string_dtype(entity_df_event_timestamp): + entity_df_event_timestamp = pd.to_datetime( + entity_df_event_timestamp, utc=True + ) + entity_df_event_timestamp_range = ( + entity_df_event_timestamp.min(), + entity_df_event_timestamp.max(), + ) + elif isinstance(entity_df, str): + # If the entity_df is a string (SQL query), determine range + # from table + with _get_conn(config.offline_store) as conn, conn.cursor() as cur: + cur.execute( + f"SELECT MIN({entity_df_event_timestamp_col}) AS min, MAX({entity_df_event_timestamp_col}) AS max FROM {table_name}" + ), + res = cur.fetchone() + entity_df_event_timestamp_range = (res[0], res[1]) + else: + raise InvalidEntityType(type(entity_df)) + + return entity_df_event_timestamp_range + + +def _append_alias(field_names: List[str], alias: str) -> List[str]: + return [f'{alias}."{field_name}"' for field_name in field_names] + + +def build_point_in_time_query( + feature_view_query_contexts: List[dict], + left_table_query_string: str, + entity_df_event_timestamp_col: str, + entity_df_columns: KeysView[str], + query_template: str, + full_feature_names: bool = False, +) -> str: + """Build point-in-time query between each feature view table and the entity dataframe for PostgreSQL""" + template = Environment(loader=BaseLoader()).from_string(source=query_template) + + final_output_feature_names = list(entity_df_columns) + final_output_feature_names.extend( + [ + ( + f'{fv["name"]}__{fv["field_mapping"].get(feature, feature)}' + if full_feature_names + else fv["field_mapping"].get(feature, feature) + ) + for fv in feature_view_query_contexts + for feature in fv["features"] + ] + ) + + # Add additional fields to dict + template_context = { + "left_table_query_string": left_table_query_string, + "entity_df_event_timestamp_col": entity_df_event_timestamp_col, + "unique_entity_keys": set( + [entity for fv in feature_view_query_contexts for entity in fv["entities"]] + ), + "featureviews": feature_view_query_contexts, + "full_feature_names": full_feature_names, + "final_output_feature_names": final_output_feature_names, + } + + query = template.render(template_context) + return query + + +# Copied from the Feast Redshift offline store implementation +# Note: Keep this in sync with sdk/python/feast/infra/offline_stores/redshift.py: +# MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN +# https://github.com/feast-dev/feast/blob/master/sdk/python/feast/infra/offline_stores/redshift.py + +MULTIPLE_FEATURE_VIEW_POINT_IN_TIME_JOIN = """ +/* + Compute a deterministic hash for the `left_table_query_string` that will be used throughout + all the logic as the field to GROUP BY the data +*/ +WITH entity_dataframe AS ( + SELECT *, + {{entity_df_event_timestamp_col}} AS entity_timestamp + {% for featureview in featureviews %} + {% if featureview.entities %} + ,( + {% for entity in featureview.entities %} + CAST("{{entity}}" as VARCHAR) || + {% endfor %} + CAST("{{entity_df_event_timestamp_col}}" AS VARCHAR) + ) AS "{{featureview.name}}__entity_row_unique_id" + {% else %} + ,CAST("{{entity_df_event_timestamp_col}}" AS VARCHAR) AS "{{featureview.name}}__entity_row_unique_id" + {% endif %} + {% endfor %} + FROM {{ left_table_query_string }} +), + +{% for featureview in featureviews %} + +"{{ featureview.name }}__entity_dataframe" AS ( + SELECT + {% if featureview.entities %}"{{ featureview.entities | join('", "') }}",{% endif %} + entity_timestamp, + "{{featureview.name}}__entity_row_unique_id" + FROM entity_dataframe + GROUP BY + {% if featureview.entities %}"{{ featureview.entities | join('", "')}}",{% endif %} + entity_timestamp, + "{{featureview.name}}__entity_row_unique_id" +), + +/* + This query template performs the point-in-time correctness join for a single feature set table + to the provided entity table. + + 1. We first join the current feature_view to the entity dataframe that has been passed. + This JOIN has the following logic: + - For each row of the entity dataframe, only keep the rows where the `timestamp_field` + is less than the one provided in the entity dataframe + - If there a TTL for the current feature_view, also keep the rows where the `timestamp_field` + is higher the the one provided minus the TTL + - For each row, Join on the entity key and retrieve the `entity_row_unique_id` that has been + computed previously + + The output of this CTE will contain all the necessary information and already filtered out most + of the data that is not relevant. +*/ + +"{{ featureview.name }}__subquery" AS ( + SELECT + "{{ featureview.timestamp_field }}" as event_timestamp, + {{ '"' ~ featureview.created_timestamp_column ~ '" as created_timestamp,' if featureview.created_timestamp_column else '' }} + {{ featureview.entity_selections | join(', ')}}{% if featureview.entity_selections %},{% else %}{% endif %} + {% for feature in featureview.features %} + "{{ feature }}" as {% if full_feature_names %}"{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}"{% else %}"{{ featureview.field_mapping.get(feature, feature) }}"{% endif %}{% if loop.last %}{% else %}, {% endif %} + {% endfor %} + FROM {{ featureview.table_subquery }} AS sub + WHERE "{{ featureview.timestamp_field }}" <= (SELECT MAX(entity_timestamp) FROM entity_dataframe) + {% if featureview.ttl == 0 %}{% else %} + AND "{{ featureview.timestamp_field }}" >= (SELECT MIN(entity_timestamp) FROM entity_dataframe) - {{ featureview.ttl }} * interval '1' second + {% endif %} +), + +"{{ featureview.name }}__base" AS ( + SELECT + subquery.*, + entity_dataframe.entity_timestamp, + entity_dataframe."{{featureview.name}}__entity_row_unique_id" + FROM "{{ featureview.name }}__subquery" AS subquery + INNER JOIN "{{ featureview.name }}__entity_dataframe" AS entity_dataframe + ON TRUE + AND subquery.event_timestamp <= entity_dataframe.entity_timestamp + + {% if featureview.ttl == 0 %}{% else %} + AND subquery.event_timestamp >= entity_dataframe.entity_timestamp - {{ featureview.ttl }} * interval '1' second + {% endif %} + + {% for entity in featureview.entities %} + AND subquery."{{ entity }}" = entity_dataframe."{{ entity }}" + {% endfor %} +), + +/* + 2. If the `created_timestamp_column` has been set, we need to + deduplicate the data first. This is done by calculating the + `MAX(created_at_timestamp)` for each event_timestamp. + We then join the data on the next CTE +*/ +{% if featureview.created_timestamp_column %} +"{{ featureview.name }}__dedup" AS ( + SELECT + "{{featureview.name}}__entity_row_unique_id", + event_timestamp, + MAX(created_timestamp) as created_timestamp + FROM "{{ featureview.name }}__base" + GROUP BY "{{featureview.name}}__entity_row_unique_id", event_timestamp +), +{% endif %} + +/* + 3. The data has been filtered during the first CTE "*__base" + Thus we only need to compute the latest timestamp of each feature. +*/ +"{{ featureview.name }}__latest" AS ( + SELECT + event_timestamp, + {% if featureview.created_timestamp_column %}created_timestamp,{% endif %} + "{{featureview.name}}__entity_row_unique_id" + FROM + ( + SELECT *, + ROW_NUMBER() OVER( + PARTITION BY "{{featureview.name}}__entity_row_unique_id" + ORDER BY event_timestamp DESC{% if featureview.created_timestamp_column %},created_timestamp DESC{% endif %} + ) AS row_number + FROM "{{ featureview.name }}__base" + {% if featureview.created_timestamp_column %} + INNER JOIN "{{ featureview.name }}__dedup" + USING ("{{featureview.name}}__entity_row_unique_id", event_timestamp, created_timestamp) + {% endif %} + ) AS sub + WHERE row_number = 1 +), + +/* + 4. Once we know the latest value of each feature for a given timestamp, + we can join again the data back to the original "base" dataset +*/ +"{{ featureview.name }}__cleaned" AS ( + SELECT base.* + FROM "{{ featureview.name }}__base" as base + INNER JOIN "{{ featureview.name }}__latest" + USING( + "{{featureview.name}}__entity_row_unique_id", + event_timestamp + {% if featureview.created_timestamp_column %} + ,created_timestamp + {% endif %} + ) +){% if loop.last %}{% else %}, {% endif %} + + +{% endfor %} +/* + Joins the outputs of multiple time travel joins to a single table. + The entity_dataframe dataset being our source of truth here. + */ + +SELECT "{{ final_output_feature_names | join('", "')}}" +FROM entity_dataframe +{% for featureview in featureviews %} +LEFT JOIN ( + SELECT + "{{featureview.name}}__entity_row_unique_id" + {% for feature in featureview.features %} + ,"{% if full_feature_names %}{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}{% else %}{{ featureview.field_mapping.get(feature, feature) }}{% endif %}" + {% endfor %} + FROM "{{ featureview.name }}__cleaned" +) AS "{{featureview.name}}" USING ("{{featureview.name}}__entity_row_unique_id") +{% endfor %} +""" diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres_source.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres_source.py new file mode 100644 index 00000000000..c76bd7d2f9d --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/postgres_source.py @@ -0,0 +1,117 @@ +import json +from typing import Callable, Dict, Iterable, Optional, Tuple + +from feast.data_source import DataSource +from feast.infra.utils.postgres.connection_utils import _get_conn +from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto +from feast.repo_config import RepoConfig +from feast.type_map import pg_type_code_to_pg_type, pg_type_to_feast_value_type +from feast.value_type import ValueType + + +class PostgreSQLSource(DataSource): + def __init__( + self, + name: str, + query: str, + timestamp_field: Optional[str] = "", + created_timestamp_column: Optional[str] = "", + field_mapping: Optional[Dict[str, str]] = None, + date_partition_column: Optional[str] = "", + ): + self._postgres_options = PostgreSQLOptions(name=name, query=query) + + super().__init__( + name=name, + timestamp_field=timestamp_field, + created_timestamp_column=created_timestamp_column, + field_mapping=field_mapping, + date_partition_column=date_partition_column, + ) + + def __hash__(self): + return super().__hash__() + + def __eq__(self, other): + if not isinstance(other, PostgreSQLSource): + raise TypeError( + "Comparisons should only involve PostgreSQLSource class objects." + ) + + return ( + self._postgres_options._query == other._postgres_options._query + and self.timestamp_field == other.timestamp_field + and self.created_timestamp_column == other.created_timestamp_column + and self.field_mapping == other.field_mapping + ) + + @staticmethod + def from_proto(data_source: DataSourceProto): + assert data_source.HasField("custom_options") + + postgres_options = json.loads(data_source.custom_options.configuration) + return PostgreSQLSource( + name=postgres_options["name"], + query=postgres_options["query"], + field_mapping=dict(data_source.field_mapping), + timestamp_field=data_source.timestamp_field, + created_timestamp_column=data_source.created_timestamp_column, + date_partition_column=data_source.date_partition_column, + ) + + def to_proto(self) -> DataSourceProto: + data_source_proto = DataSourceProto( + type=DataSourceProto.CUSTOM_SOURCE, + data_source_class_type="feast.infra.offline_stores.contrib.postgres_offline_store.postgres_source.PostgreSQLSource", + field_mapping=self.field_mapping, + custom_options=self._postgres_options.to_proto(), + ) + + data_source_proto.timestamp_field = self.timestamp_field + data_source_proto.created_timestamp_column = self.created_timestamp_column + data_source_proto.date_partition_column = self.date_partition_column + + return data_source_proto + + def validate(self, config: RepoConfig): + pass + + @staticmethod + def source_datatype_to_feast_value_type() -> Callable[[str], ValueType]: + return pg_type_to_feast_value_type + + def get_table_column_names_and_types( + self, config: RepoConfig + ) -> Iterable[Tuple[str, str]]: + with _get_conn(config.offline_store) as conn, conn.cursor() as cur: + cur.execute( + f"SELECT * FROM ({self.get_table_query_string()}) AS sub LIMIT 0" + ) + return ( + (c.name, pg_type_code_to_pg_type(c.type_code)) for c in cur.description + ) + + def get_table_query_string(self) -> str: + return f"({self._postgres_options._query})" + + +class PostgreSQLOptions: + def __init__(self, name: str, query: Optional[str]): + self._name = name + self._query = query + + @classmethod + def from_proto(cls, postgres_options_proto: DataSourceProto.CustomSourceOptions): + config = json.loads(postgres_options_proto.configuration.decode("utf8")) + postgres_options = cls(name=config["name"], query=config["query"]) + + return postgres_options + + def to_proto(self) -> DataSourceProto.CustomSourceOptions: + postgres_options_proto = DataSourceProto.CustomSourceOptions( + configuration=json.dumps( + {"name": self._name, "query": self._query} + ).encode() + ) + + return postgres_options_proto diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/__init__.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/__init__.py new file mode 100644 index 00000000000..ba9bbbfd257 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/__init__.py @@ -0,0 +1 @@ +from .data_source import postgres_container # noqa diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py new file mode 100644 index 00000000000..6671a477654 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_offline_store/tests/data_source.py @@ -0,0 +1,122 @@ +import logging +from typing import Dict, Optional + +import pandas as pd +import pytest +from testcontainers.core.container import DockerContainer +from testcontainers.core.waiting_utils import wait_for_logs + +from feast.data_source import DataSource +from feast.infra.offline_stores.contrib.postgres_offline_store.postgres import ( + PostgreSQLOfflineStoreConfig, + PostgreSQLSource, +) +from feast.infra.utils.postgres.connection_utils import df_to_postgres_table +from tests.integration.feature_repos.universal.data_source_creator import ( + DataSourceCreator, +) +from tests.integration.feature_repos.universal.online_store_creator import ( + OnlineStoreCreator, +) + +logger = logging.getLogger(__name__) + +POSTGRES_USER = "test" +POSTGRES_PASSWORD = "test" +POSTGRES_DB = "test" + + +@pytest.fixture(scope="session") +def postgres_container(): + container = ( + DockerContainer("postgres:latest") + .with_exposed_ports(5432) + .with_env("POSTGRES_USER", POSTGRES_USER) + .with_env("POSTGRES_PASSWORD", POSTGRES_PASSWORD) + .with_env("POSTGRES_DB", POSTGRES_DB) + ) + + container.start() + + log_string_to_wait_for = "database system is ready to accept connections" + waited = wait_for_logs( + container=container, predicate=log_string_to_wait_for, timeout=30, interval=10, + ) + logger.info("Waited for %s seconds until postgres container was up", waited) + + yield container + container.stop() + + +class PostgreSQLDataSourceCreator(DataSourceCreator, OnlineStoreCreator): + def __init__( + self, project_name: str, fixture_request: pytest.FixtureRequest, **kwargs + ): + super().__init__(project_name,) + + self.project_name = project_name + self.container = fixture_request.getfixturevalue("postgres_container") + if not self.container: + raise RuntimeError( + "In order to use this data source " + "'feast.infra.offline_stores.contrib.postgres_offline_store.tests' " + "must be include into pytest plugins" + ) + + self.offline_store_config = PostgreSQLOfflineStoreConfig( + type="postgres", + host="localhost", + port=self.container.get_exposed_port(5432), + database=self.container.env["POSTGRES_DB"], + db_schema="public", + user=self.container.env["POSTGRES_USER"], + password=self.container.env["POSTGRES_PASSWORD"], + ) + + def create_data_source( + self, + df: pd.DataFrame, + destination_name: str, + suffix: Optional[str] = None, + timestamp_field="ts", + created_timestamp_column="created_ts", + field_mapping: Dict[str, str] = None, + ) -> DataSource: + destination_name = self.get_prefixed_table_name(destination_name) + + if self.offline_store_config: + df_to_postgres_table(self.offline_store_config, df, destination_name) + + return PostgreSQLSource( + name=destination_name, + query=f"SELECT * FROM {destination_name}", + timestamp_field=timestamp_field, + created_timestamp_column=created_timestamp_column, + field_mapping=field_mapping or {"ts_1": "ts"}, + ) + + def create_offline_store_config(self) -> PostgreSQLOfflineStoreConfig: + assert self.offline_store_config + return self.offline_store_config + + def get_prefixed_table_name(self, suffix: str) -> str: + return f"{self.project_name}_{suffix}" + + def create_online_store(self) -> Dict[str, str]: + assert self.container + return { + "type": "postgres", + "host": "localhost", + "port": self.container.get_exposed_port(5432), + "database": POSTGRES_DB, + "db_schema": "feature_store", + "user": POSTGRES_USER, + "password": POSTGRES_PASSWORD, + } + + def create_saved_dataset_destination(self): + # FIXME: ... + return None + + def teardown(self): + pass diff --git a/sdk/python/feast/infra/offline_stores/contrib/postgres_repo_configuration.py b/sdk/python/feast/infra/offline_stores/contrib/postgres_repo_configuration.py new file mode 100644 index 00000000000..9b107aa7a30 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/postgres_repo_configuration.py @@ -0,0 +1,7 @@ +from feast.infra.offline_stores.contrib.postgres_offline_store.tests.data_source import ( + PostgreSQLDataSourceCreator, +) + +AVAILABLE_OFFLINE_STORES = [("local", PostgreSQLDataSourceCreator)] + +AVAILABLE_ONLINE_STORES = {"postgres": (None, PostgreSQLDataSourceCreator)} diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py index 1b977ba622f..2a0925d9294 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py @@ -32,6 +32,9 @@ from feast.type_map import spark_schema_to_np_dtypes from feast.usage import log_exceptions_and_usage +# Make sure spark warning doesn't raise more than once. +warnings.simplefilter("once", RuntimeWarning) + class SparkOfflineStoreConfig(FeastConfigBaseModel): type: StrictStr = "spark" @@ -50,7 +53,7 @@ def pull_latest_from_table_or_query( data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, created_timestamp_column: Optional[str], start_date: datetime, end_date: datetime, @@ -76,7 +79,7 @@ def pull_latest_from_table_or_query( partition_by_join_key_string = ( "PARTITION BY " + partition_by_join_key_string ) - timestamps = [event_timestamp_column] + timestamps = [timestamp_field] if created_timestamp_column: timestamps.append(created_timestamp_column) timestamp_desc_string = " DESC, ".join(timestamps) + " DESC" @@ -92,7 +95,7 @@ def pull_latest_from_table_or_query( SELECT {field_string}, ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS feast_row_ FROM {from_expression} t1 - WHERE {event_timestamp_column} BETWEEN TIMESTAMP('{start_date_str}') AND TIMESTAMP('{end_date_str}') + WHERE {timestamp_field} BETWEEN TIMESTAMP('{start_date_str}') AND TIMESTAMP('{end_date_str}') ) t2 WHERE feast_row_ = 1 """ @@ -190,12 +193,12 @@ def pull_all_from_table_or_query( data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, start_date: datetime, end_date: datetime, ) -> RetrievalJob: """ - Note that join_key_columns, feature_name_columns, event_timestamp_column, and + Note that join_key_columns, feature_name_columns, timestamp_field, and created_timestamp_column have all already been mapped to column names of the source table and those column names are the values passed into this function. """ @@ -210,9 +213,7 @@ def pull_all_from_table_or_query( store_config=config.offline_store ) - fields = ", ".join( - join_key_columns + feature_name_columns + [event_timestamp_column] - ) + fields = ", ".join(join_key_columns + feature_name_columns + [timestamp_field]) from_expression = data_source.get_table_query_string() start_date = start_date.astimezone(tz=utc) end_date = end_date.astimezone(tz=utc) @@ -220,7 +221,7 @@ def pull_all_from_table_or_query( query = f""" SELECT {fields} FROM {from_expression} - WHERE {event_timestamp_column} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' + WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' """ return SparkRetrievalJob( @@ -422,9 +423,9 @@ def _format_datetime(t: datetime) -> str: 1. We first join the current feature_view to the entity dataframe that has been passed. This JOIN has the following logic: - - For each row of the entity dataframe, only keep the rows where the `event_timestamp_column` + - For each row of the entity dataframe, only keep the rows where the `timestamp_field` is less than the one provided in the entity dataframe - - If there a TTL for the current feature_view, also keep the rows where the `event_timestamp_column` + - If there a TTL for the current feature_view, also keep the rows where the `timestamp_field` is higher the the one provided minus the TTL - For each row, Join on the entity key and retrieve the `entity_row_unique_id` that has been computed previously @@ -435,16 +436,16 @@ def _format_datetime(t: datetime) -> str: {{ featureview.name }}__subquery AS ( SELECT - {{ featureview.event_timestamp_column }} as event_timestamp, + {{ featureview.timestamp_field }} as event_timestamp, {{ featureview.created_timestamp_column ~ ' as created_timestamp,' if featureview.created_timestamp_column else '' }} {{ featureview.entity_selections | join(', ')}}{% if featureview.entity_selections %},{% else %}{% endif %} {% for feature in featureview.features %} {{ feature }} as {% if full_feature_names %}{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}{% else %}{{ featureview.field_mapping.get(feature, feature) }}{% endif %}{% if loop.last %}{% else %}, {% endif %} {% endfor %} FROM {{ featureview.table_subquery }} - WHERE {{ featureview.event_timestamp_column }} <= '{{ featureview.max_event_timestamp }}' + WHERE {{ featureview.timestamp_field }} <= '{{ featureview.max_event_timestamp }}' {% if featureview.ttl == 0 %}{% else %} - AND {{ featureview.event_timestamp_column }} >= '{{ featureview.min_event_timestamp }}' + AND {{ featureview.timestamp_field }} >= '{{ featureview.min_event_timestamp }}' {% endif %} ), diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py index 65997040cc0..c94b1913598 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark_source.py @@ -30,6 +30,7 @@ class SparkSourceFormat(Enum): class SparkSource(DataSource): def __init__( self, + *, name: Optional[str] = None, table: Optional[str] = None, query: Optional[str] = None, @@ -56,7 +57,7 @@ def __init__( warnings.warn( ( "The argument 'date_partition_column' is not supported for Spark sources." - "It will be removed in Feast 0.21+" + "It will be removed in Feast 0.23+" ), DeprecationWarning, ) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/__init__.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/spark_data_source_creator.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py similarity index 100% rename from sdk/python/tests/integration/feature_repos/universal/data_sources/spark_data_source_creator.py rename to sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/tests/data_source.py diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py index 5228ed84a9d..9c73f018197 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/test_config/manual_tests.py @@ -1,9 +1,9 @@ +from feast.infra.offline_stores.contrib.trino_offline_store.tests.data_source import ( + TrinoSourceCreator, +) from tests.integration.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) -from tests.integration.feature_repos.universal.data_sources.trino import ( - TrinoSourceCreator, -) FULL_REPO_CONFIGS = [ IntegrationTestRepoConfig( diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/__init__.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/__init__.py new file mode 100644 index 00000000000..b3190925d57 --- /dev/null +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/__init__.py @@ -0,0 +1 @@ +from .data_source import trino_container # noqa diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/catalog/memory.properties b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/catalog/memory.properties similarity index 100% rename from sdk/python/tests/integration/feature_repos/universal/data_sources/catalog/memory.properties rename to sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/catalog/memory.properties diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/trino.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py similarity index 71% rename from sdk/python/tests/integration/feature_repos/universal/data_sources/trino.py rename to sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py index 07ae210b12c..f2b9f785a05 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/trino.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/tests/data_source.py @@ -3,6 +3,7 @@ from typing import Dict, List, Optional import pandas as pd +import pytest from testcontainers.core.container import DockerContainer from testcontainers.core.waiting_utils import wait_for_logs @@ -24,46 +25,49 @@ ) +@pytest.fixture(scope="session") +def trino_container(): + current_file = pathlib.Path(__file__).parent.resolve() + catalog_dir = current_file.parent.joinpath("catalog") + container = ( + DockerContainer("trinodb/trino:376") + .with_volume_mapping(catalog_dir, "/etc/catalog/") + .with_exposed_ports("8080") + ) + + container.start() + + log_string_to_wait_for = "SERVER STARTED" + wait_for_logs(container=container, predicate=log_string_to_wait_for, timeout=30) + + yield container + + container.stop() + + class TrinoSourceCreator(DataSourceCreator): tables: List[str] = [] - def __init__(self, project_name: str, **kwargs): + def __init__( + self, project_name: str, fixture_request: pytest.FixtureRequest, **kwargs + ): super().__init__(project_name) self.tables_created: List[str] = [] - - if "offline_container" not in kwargs or not kwargs.get( - "offline_container", None - ): - # If we don't get an offline container provided, we try to create it on the fly. - # the problem here is that each test creates its own conatiner, which basically - # browns out developer laptops. - current_file = pathlib.Path(__file__).parent.resolve() - catalog_dir = current_file.parent.joinpath("catalog") - self.container = ( - DockerContainer("trinodb/trino:376") - .with_volume_mapping(catalog_dir, "/etc/catalog/") - .with_exposed_ports("8080") + self.container = fixture_request.getfixturevalue("trino_container") + if not self.container: + raise RuntimeError( + "In order to use this data source " + "'feast.infra.offline_stores.contrib.trino_offline_store.tests' " + "must be include into pytest plugins" ) - - self.container.start() - self.provided_container = False - log_string_to_wait_for = "SERVER STARTED" - wait_for_logs( - container=self.container, predicate=log_string_to_wait_for, timeout=30 - ) - else: - self.provided_container = True - self.container = kwargs["offline_container"] - self.exposed_port = self.container.get_exposed_port("8080") self.client = Trino( user="user", catalog="memory", host="localhost", port=self.exposed_port, ) def teardown(self): - if not self.provided_container: - self.container.stop() + pass def create_data_source( self, @@ -92,7 +96,7 @@ def create_data_source( return TrinoSource( name="ci_trino_offline_store", table=destination_name, - event_timestamp_column=timestamp_field, + timestamp_field=timestamp_field, created_timestamp_column=created_timestamp_column, query=f"SELECT * FROM {destination_name}", field_mapping=field_mapping or {"ts_1": "ts"}, diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py index 442bdf66569..87a99b820e8 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino.py @@ -153,7 +153,7 @@ def pull_latest_from_table_or_query( data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, created_timestamp_column: Optional[str], start_date: datetime, end_date: datetime, @@ -177,7 +177,7 @@ def pull_latest_from_table_or_query( partition_by_join_key_string = ( "PARTITION BY " + partition_by_join_key_string ) - timestamps = [event_timestamp_column] + timestamps = [timestamp_field] if created_timestamp_column: timestamps.append(created_timestamp_column) timestamp_desc_string = " DESC, ".join(timestamps) + " DESC" @@ -195,7 +195,7 @@ def pull_latest_from_table_or_query( SELECT {field_string}, ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS _feast_row FROM {from_expression} - WHERE {event_timestamp_column} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' + WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' ) WHERE _feast_row = 1 """ @@ -302,7 +302,7 @@ def pull_all_from_table_or_query( data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, start_date: datetime, end_date: datetime, user: str = "user", @@ -319,12 +319,12 @@ def pull_all_from_table_or_query( config=config, user=user, auth=auth, http_scheme=http_scheme ) field_string = ", ".join( - join_key_columns + feature_name_columns + [event_timestamp_column] + join_key_columns + feature_name_columns + [timestamp_field] ) query = f""" SELECT {field_string} FROM {from_expression} - WHERE {event_timestamp_column} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' + WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' """ return TrinoRetrievalJob( query=query, client=client, config=config, full_feature_names=False, @@ -458,9 +458,9 @@ def _get_entity_df_event_timestamp_range( to the provided entity table. 1. We first join the current feature_view to the entity dataframe that has been passed. This JOIN has the following logic: - - For each row of the entity dataframe, only keep the rows where the `event_timestamp_column` + - For each row of the entity dataframe, only keep the rows where the `timestamp_field` is less than the one provided in the entity dataframe - - If there a TTL for the current feature_view, also keep the rows where the `event_timestamp_column` + - If there a TTL for the current feature_view, also keep the rows where the `timestamp_field` is higher the the one provided minus the TTL - For each row, Join on the entity key and retrieve the `entity_row_unique_id` that has been computed previously @@ -469,16 +469,16 @@ def _get_entity_df_event_timestamp_range( */ {{ featureview.name }}__subquery AS ( SELECT - {{ featureview.event_timestamp_column }} as event_timestamp, + {{ featureview.timestamp_field }} as event_timestamp, {{ featureview.created_timestamp_column ~ ' as created_timestamp,' if featureview.created_timestamp_column else '' }} {{ featureview.entity_selections | join(', ')}}{% if featureview.entity_selections %},{% else %}{% endif %} {% for feature in featureview.features %} {{ feature }} as {% if full_feature_names %}{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}{% else %}{{ featureview.field_mapping.get(feature, feature) }}{% endif %}{% if loop.last %}{% else %}, {% endif %} {% endfor %} FROM {{ featureview.table_subquery }} - WHERE {{ featureview.event_timestamp_column }} <= from_iso8601_timestamp('{{ featureview.max_event_timestamp }}') + WHERE {{ featureview.timestamp_field }} <= from_iso8601_timestamp('{{ featureview.max_event_timestamp }}') {% if featureview.ttl == 0 %}{% else %} - AND {{ featureview.event_timestamp_column }} >= from_iso8601_timestamp('{{ featureview.min_event_timestamp }}') + AND {{ featureview.timestamp_field }} >= from_iso8601_timestamp('{{ featureview.min_event_timestamp }}') {% endif %} ), {{ featureview.name }}__base AS ( diff --git a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py index 7d6280746ec..b559d0e59ea 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py +++ b/sdk/python/feast/infra/offline_stores/contrib/trino_offline_store/trino_source.py @@ -88,7 +88,6 @@ def __init__( table: Optional[str] = None, created_timestamp_column: Optional[str] = "", field_mapping: Optional[Dict[str, str]] = None, - date_partition_column: Optional[str] = None, query: Optional[str] = None, name: Optional[str] = None, description: Optional[str] = "", @@ -206,7 +205,7 @@ def get_table_column_names_and_types( host=config.offline_store.host, port=config.offline_store.port, ) - if self.table is not None: + if self.table: table_schema = client.execute_query( f"SELECT * FROM {self.table} LIMIT 1" ).schema diff --git a/sdk/python/feast/infra/offline_stores/file.py b/sdk/python/feast/infra/offline_stores/file.py index cb6e874f8a9..f36eb383d8e 100644 --- a/sdk/python/feast/infra/offline_stores/file.py +++ b/sdk/python/feast/infra/offline_stores/file.py @@ -1,17 +1,23 @@ from datetime import datetime +from pathlib import Path from typing import Callable, List, Optional, Tuple, Union import dask.dataframe as dd import pandas as pd import pyarrow +import pyarrow.parquet import pytz from pydantic.typing import Literal from feast import FileSource, OnDemandFeatureView from feast.data_source import DataSource from feast.errors import FeastJoinKeysDuringMaterialization +from feast.feature_logging import LoggingConfig, LoggingSource from feast.feature_view import DUMMY_ENTITY_ID, DUMMY_ENTITY_VAL, FeatureView -from feast.infra.offline_stores.file_source import SavedDatasetFileStorage +from feast.infra.offline_stores.file_source import ( + FileLoggingDestination, + SavedDatasetFileStorage, +) from feast.infra.offline_stores.offline_store import ( OfflineStore, RetrievalJob, @@ -181,12 +187,11 @@ def evaluate_historical_retrieval(): entity_df_event_timestamp_col ) - join_keys = [] all_join_keys = [] # Load feature view data from sources and join them incrementally for feature_view, features in feature_views_to_features.items(): - event_timestamp_column = feature_view.batch_source.timestamp_field + timestamp_field = feature_view.batch_source.timestamp_field created_timestamp_column = ( feature_view.batch_source.created_timestamp_column ) @@ -202,7 +207,7 @@ def evaluate_historical_retrieval(): join_keys.append(join_key) right_entity_key_columns = [ - event_timestamp_column, + timestamp_field, created_timestamp_column, ] + join_keys right_entity_key_columns = [c for c in right_entity_key_columns if c] @@ -211,39 +216,39 @@ def evaluate_historical_retrieval(): df_to_join = _read_datasource(feature_view.batch_source) - df_to_join, event_timestamp_column = _field_mapping( + df_to_join, timestamp_field = _field_mapping( df_to_join, feature_view, features, right_entity_key_columns, entity_df_event_timestamp_col, - event_timestamp_column, + timestamp_field, full_feature_names, ) df_to_join = _merge(entity_df_with_features, df_to_join, join_keys) df_to_join = _normalize_timestamp( - df_to_join, event_timestamp_column, created_timestamp_column + df_to_join, timestamp_field, created_timestamp_column ) df_to_join = _filter_ttl( df_to_join, feature_view, entity_df_event_timestamp_col, - event_timestamp_column, + timestamp_field, ) df_to_join = _drop_duplicates( df_to_join, all_join_keys, - event_timestamp_column, + timestamp_field, created_timestamp_column, entity_df_event_timestamp_col, ) entity_df_with_features = _drop_columns( - df_to_join, event_timestamp_column, created_timestamp_column + df_to_join, timestamp_field, created_timestamp_column ) # Ensure that we delete dataframes to free up memory @@ -273,7 +278,7 @@ def pull_latest_from_table_or_query( data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, created_timestamp_column: Optional[str], start_date: datetime, end_date: datetime, @@ -285,7 +290,7 @@ def evaluate_offline_job(): source_df = _read_datasource(data_source) source_df = _normalize_timestamp( - source_df, event_timestamp_column, created_timestamp_column + source_df, timestamp_field, created_timestamp_column ) source_columns = set(source_df.columns) @@ -295,19 +300,31 @@ def evaluate_offline_job(): ) ts_columns = ( - [event_timestamp_column, created_timestamp_column] + [timestamp_field, created_timestamp_column] if created_timestamp_column - else [event_timestamp_column] + else [timestamp_field] ) + # try-catch block is added to deal with this issue https://github.com/dask/dask/issues/8939. + # TODO(kevjumba): remove try catch when fix is merged upstream in Dask. + try: + if created_timestamp_column: + source_df = source_df.sort_values(by=created_timestamp_column,) + + source_df = source_df.sort_values(by=timestamp_field) + + except ZeroDivisionError: + # Use 1 partition to get around case where everything in timestamp column is the same so the partition algorithm doesn't + # try to divide by zero. + if created_timestamp_column: + source_df = source_df.sort_values( + by=created_timestamp_column, npartitions=1 + ) - if created_timestamp_column: - source_df = source_df.sort_values(by=created_timestamp_column) - - source_df = source_df.sort_values(by=event_timestamp_column) + source_df = source_df.sort_values(by=timestamp_field, npartitions=1) source_df = source_df[ - (source_df[event_timestamp_column] >= start_date) - & (source_df[event_timestamp_column] < end_date) + (source_df[timestamp_field] >= start_date) + & (source_df[timestamp_field] < end_date) ] source_df = source_df.persist() @@ -339,7 +356,7 @@ def pull_all_from_table_or_query( data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, start_date: datetime, end_date: datetime, ) -> RetrievalJob: @@ -347,14 +364,39 @@ def pull_all_from_table_or_query( config=config, data_source=data_source, join_key_columns=join_key_columns - + [event_timestamp_column], # avoid deduplication + + [timestamp_field], # avoid deduplication feature_name_columns=feature_name_columns, - event_timestamp_column=event_timestamp_column, + timestamp_field=timestamp_field, created_timestamp_column=None, start_date=start_date, end_date=end_date, ) + @staticmethod + def write_logged_features( + config: RepoConfig, + data: Union[pyarrow.Table, Path], + source: LoggingSource, + logging_config: LoggingConfig, + registry: Registry, + ): + destination = logging_config.destination + assert isinstance(destination, FileLoggingDestination) + + if isinstance(data, Path): + data = pyarrow.parquet.read_table(data) + + filesystem, path = FileSource.create_filesystem_and_path( + destination.path, destination.s3_endpoint_override, + ) + + pyarrow.parquet.write_to_dataset( + data, + root_path=path, + partition_cols=destination.partition_by, + filesystem=filesystem, + ) + def _get_entity_df_event_timestamp_range( entity_df: Union[pd.DataFrame, str], entity_df_event_timestamp_col: str, @@ -396,7 +438,7 @@ def _field_mapping( features: List[str], right_entity_key_columns: List[str], entity_df_event_timestamp_col: str, - event_timestamp_column: str, + timestamp_field: str, full_feature_names: bool, ) -> dd.DataFrame: # Rename columns by the field mapping dictionary if it exists @@ -435,13 +477,13 @@ def _field_mapping( df_to_join = df_to_join.persist() # Make sure to not have duplicated columns - if entity_df_event_timestamp_col == event_timestamp_column: + if entity_df_event_timestamp_col == timestamp_field: df_to_join = _run_dask_field_mapping( - df_to_join, {event_timestamp_column: f"__{event_timestamp_column}"}, + df_to_join, {timestamp_field: f"__{timestamp_field}"}, ) - event_timestamp_column = f"__{event_timestamp_column}" + timestamp_field = f"__{timestamp_field}" - return df_to_join.persist(), event_timestamp_column + return df_to_join.persist(), timestamp_field def _merge( @@ -475,24 +517,19 @@ def _merge( def _normalize_timestamp( - df_to_join: dd.DataFrame, - event_timestamp_column: str, - created_timestamp_column: str, + df_to_join: dd.DataFrame, timestamp_field: str, created_timestamp_column: str, ) -> dd.DataFrame: df_to_join_types = df_to_join.dtypes - event_timestamp_column_type = df_to_join_types[event_timestamp_column] + timestamp_field_type = df_to_join_types[timestamp_field] if created_timestamp_column: created_timestamp_column_type = df_to_join_types[created_timestamp_column] - if ( - not hasattr(event_timestamp_column_type, "tz") - or event_timestamp_column_type.tz != pytz.UTC - ): + if not hasattr(timestamp_field_type, "tz") or timestamp_field_type.tz != pytz.UTC: # Make sure all timestamp fields are tz-aware. We default tz-naive fields to UTC - df_to_join[event_timestamp_column] = df_to_join[event_timestamp_column].apply( + df_to_join[timestamp_field] = df_to_join[timestamp_field].apply( lambda x: x if x.tzinfo is not None else x.replace(tzinfo=pytz.utc), - meta=(event_timestamp_column, "datetime64[ns, UTC]"), + meta=(timestamp_field, "datetime64[ns, UTC]"), ) if created_timestamp_column and ( @@ -503,7 +540,7 @@ def _normalize_timestamp( created_timestamp_column ].apply( lambda x: x if x.tzinfo is not None else x.replace(tzinfo=pytz.utc), - meta=(event_timestamp_column, "datetime64[ns, UTC]"), + meta=(timestamp_field, "datetime64[ns, UTC]"), ) return df_to_join.persist() @@ -513,19 +550,16 @@ def _filter_ttl( df_to_join: dd.DataFrame, feature_view: FeatureView, entity_df_event_timestamp_col: str, - event_timestamp_column: str, + timestamp_field: str, ) -> dd.DataFrame: # Filter rows by defined timestamp tolerance if feature_view.ttl and feature_view.ttl.total_seconds() != 0: df_to_join = df_to_join[ ( - df_to_join[event_timestamp_column] + df_to_join[timestamp_field] >= df_to_join[entity_df_event_timestamp_col] - feature_view.ttl ) - & ( - df_to_join[event_timestamp_column] - <= df_to_join[entity_df_event_timestamp_col] - ) + & (df_to_join[timestamp_field] <= df_to_join[entity_df_event_timestamp_col]) ] df_to_join = df_to_join.persist() @@ -536,7 +570,7 @@ def _filter_ttl( def _drop_duplicates( df_to_join: dd.DataFrame, all_join_keys: List[str], - event_timestamp_column: str, + timestamp_field: str, created_timestamp_column: str, entity_df_event_timestamp_col: str, ) -> dd.DataFrame: @@ -546,7 +580,7 @@ def _drop_duplicates( ) df_to_join = df_to_join.persist() - df_to_join = df_to_join.sort_values(by=event_timestamp_column, na_position="first") + df_to_join = df_to_join.sort_values(by=timestamp_field, na_position="first") df_to_join = df_to_join.persist() df_to_join = df_to_join.drop_duplicates( @@ -557,13 +591,9 @@ def _drop_duplicates( def _drop_columns( - df_to_join: dd.DataFrame, - event_timestamp_column: str, - created_timestamp_column: str, + df_to_join: dd.DataFrame, timestamp_field: str, created_timestamp_column: str, ) -> dd.DataFrame: - entity_df_with_features = df_to_join.drop( - [event_timestamp_column], axis=1 - ).persist() + entity_df_with_features = df_to_join.drop([timestamp_field], axis=1).persist() if created_timestamp_column: entity_df_with_features = entity_df_with_features.drop( diff --git a/sdk/python/feast/infra/offline_stores/file_source.py b/sdk/python/feast/infra/offline_stores/file_source.py index 3df0db69b1c..5c2a521ac0d 100644 --- a/sdk/python/feast/infra/offline_stores/file_source.py +++ b/sdk/python/feast/infra/offline_stores/file_source.py @@ -1,14 +1,18 @@ import warnings -from typing import Callable, Dict, Iterable, Optional, Tuple +from typing import Callable, Dict, Iterable, List, Optional, Tuple from pyarrow._fs import FileSystem from pyarrow._s3fs import S3FileSystem -from pyarrow.parquet import ParquetFile +from pyarrow.parquet import ParquetDataset from feast import type_map from feast.data_format import FileFormat, ParquetFormat from feast.data_source import DataSource +from feast.feature_logging import LoggingDestination from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto +from feast.protos.feast.core.FeatureService_pb2 import ( + LoggingConfig as LoggingConfigProto, +) from feast.protos.feast.core.SavedDataset_pb2 import ( SavedDatasetStorage as SavedDatasetStorageProto, ) @@ -20,7 +24,8 @@ class FileSource(DataSource): def __init__( self, - path: str, + *args, + path: Optional[str] = None, event_timestamp_column: Optional[str] = "", file_format: Optional[FileFormat] = None, created_timestamp_column: Optional[str] = "", @@ -58,13 +63,31 @@ def __init__( >>> from feast import FileSource >>> file_source = FileSource(path="my_features.parquet", timestamp_field="event_timestamp") """ - if path is None: + positional_attributes = ["path"] + _path = path + if args: + if args: + warnings.warn( + ( + "File Source parameters should be specified as a keyword argument instead of a positional arg." + "Feast 0.23+ will not support positional arguments to construct File sources" + ), + DeprecationWarning, + ) + if len(args) > len(positional_attributes): + raise ValueError( + f"Only {', '.join(positional_attributes)} are allowed as positional args when defining " + f"File sources, for backwards compatibility." + ) + if len(args) >= 1: + _path = args[0] + if _path is None: raise ValueError( 'No "path" argument provided. Please set "path" to the location of your file source.' ) self.file_options = FileOptions( file_format=file_format, - uri=path, + uri=_path, s3_endpoint_override=s3_endpoint_override, ) @@ -72,7 +95,7 @@ def __init__( warnings.warn( ( "The argument 'date_partition_column' is not supported for File sources." - "It will be removed in Feast 0.21+" + "It will be removed in Feast 0.23+" ), DeprecationWarning, ) @@ -97,16 +120,11 @@ def __eq__(self, other): raise TypeError("Comparisons should only involve FileSource class objects.") return ( - self.name == other.name + super().__eq__(other) + and self.path == other.path and self.file_options.file_format == other.file_options.file_format - and self.timestamp_field == other.timestamp_field - and self.created_timestamp_column == other.created_timestamp_column - and self.field_mapping == other.field_mapping and self.file_options.s3_endpoint_override == other.file_options.s3_endpoint_override - and self.description == other.description - and self.tags == other.tags - and self.owner == other.owner ) @property @@ -161,9 +179,9 @@ def get_table_column_names_and_types( filesystem, path = FileSource.create_filesystem_and_path( self.path, self.file_options.s3_endpoint_override ) - schema = ParquetFile( + schema = ParquetDataset( path if filesystem is None else filesystem.open_input_file(path) - ).schema_arrow + ).schema.to_arrow_schema() return zip(schema.names, map(str, schema.types)) @staticmethod @@ -184,7 +202,7 @@ def get_table_query_string(self) -> str: class FileOptions: """ - DataSource File options used to source features from a file + Configuration options for a file data source. """ def __init__( @@ -194,66 +212,23 @@ def __init__( uri: Optional[str], ): """ - FileOptions initialization method + Initializes a FileOptions object. Args: - file_format (FileFormat, optional): file source format eg. parquet - s3_endpoint_override (str, optional): custom s3 endpoint (used only with s3 uri) - uri (str, optional): file source url eg. s3:// or local file - - """ - self._file_format = file_format - self._uri = uri - self._s3_endpoint_override = s3_endpoint_override - - @property - def file_format(self): - """ - Returns the file format of this file - """ - return self._file_format - - @file_format.setter - def file_format(self, file_format): - """ - Sets the file format of this file + file_format (optional): File source format, e.g. parquet. + s3_endpoint_override (optional): Custom s3 endpoint (used only with s3 uri). + uri (optional): File source url, e.g. s3:// or local file. """ - self._file_format = file_format - - @property - def uri(self): - """ - Returns the file url of this file - """ - return self._uri - - @uri.setter - def uri(self, uri): - """ - Sets the file url of this file - """ - self._uri = uri - - @property - def s3_endpoint_override(self): - """ - Returns the s3 endpoint override - """ - return None if self._s3_endpoint_override == "" else self._s3_endpoint_override - - @s3_endpoint_override.setter - def s3_endpoint_override(self, s3_endpoint_override): - """ - Sets the s3 endpoint override - """ - self._s3_endpoint_override = s3_endpoint_override + self.file_format = file_format + self.uri = uri or "" + self.s3_endpoint_override = s3_endpoint_override or "" @classmethod def from_proto(cls, file_options_proto: DataSourceProto.FileOptions): """ Creates a FileOptions from a protobuf representation of a file option - args: + Args: file_options_proto: a protobuf representation of a datasource Returns: @@ -319,3 +294,48 @@ def to_data_source(self) -> DataSource: file_format=self.file_options.file_format, s3_endpoint_override=self.file_options.s3_endpoint_override, ) + + +class FileLoggingDestination(LoggingDestination): + _proto_kind = "file_destination" + + path: str + s3_endpoint_override: str + partition_by: Optional[List[str]] + + def __init__( + self, + *, + path: str, + s3_endpoint_override="", + partition_by: Optional[List[str]] = None, + ): + self.path = path + self.s3_endpoint_override = s3_endpoint_override + self.partition_by = partition_by + + @classmethod + def from_proto(cls, config_proto: LoggingConfigProto) -> "LoggingDestination": + return FileLoggingDestination( + path=config_proto.file_destination.path, + s3_endpoint_override=config_proto.file_destination.s3_endpoint_override, + partition_by=list(config_proto.file_destination.partition_by) + if config_proto.file_destination.partition_by + else None, + ) + + def to_proto(self) -> LoggingConfigProto: + return LoggingConfigProto( + file_destination=LoggingConfigProto.FileDestination( + path=self.path, + s3_endpoint_override=self.s3_endpoint_override, + partition_by=self.partition_by, + ) + ) + + def to_data_source(self) -> DataSource: + return FileSource( + path=self.path, + file_format=ParquetFormat(), + s3_endpoint_override=self.s3_endpoint_override, + ) diff --git a/sdk/python/feast/infra/offline_stores/offline_store.py b/sdk/python/feast/infra/offline_stores/offline_store.py index e5937712f69..2996a1ed590 100644 --- a/sdk/python/feast/infra/offline_stores/offline_store.py +++ b/sdk/python/feast/infra/offline_stores/offline_store.py @@ -14,6 +14,7 @@ import warnings from abc import ABC, abstractmethod from datetime import datetime +from pathlib import Path from typing import TYPE_CHECKING, List, Optional, Union import pandas as pd @@ -21,6 +22,7 @@ from feast.data_source import DataSource from feast.dqm.errors import ValidationFailed +from feast.feature_logging import LoggingConfig, LoggingSource from feast.feature_view import FeatureView from feast.on_demand_feature_view import OnDemandFeatureView from feast.registry import Registry @@ -30,6 +32,8 @@ if TYPE_CHECKING: from feast.saved_dataset import ValidationReference +warnings.simplefilter("once", RuntimeWarning) + class RetrievalMetadata: min_event_timestamp: Optional[datetime] @@ -173,7 +177,7 @@ def pull_latest_from_table_or_query( data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, created_timestamp_column: Optional[str], start_date: datetime, end_date: datetime, @@ -185,7 +189,7 @@ def pull_latest_from_table_or_query( FeatureStore.materialize() method. This method pulls data from the offline store, and the FeatureStore class is used to write this data into the online store. - Note that join_key_columns, feature_name_columns, event_timestamp_column, and created_timestamp_column + Note that join_key_columns, feature_name_columns, timestamp_field, and created_timestamp_column have all already been mapped to column names of the source table and those column names are the values passed into this function. @@ -194,7 +198,7 @@ class is used to write this data into the online store. data_source: Data source to pull all of the columns from join_key_columns: Columns of the join keys feature_name_columns: Columns of the feature names needed - event_timestamp_column: Timestamp column + timestamp_field: Timestamp column start_date: Starting date of query end_date: Ending date of query """ @@ -220,14 +224,14 @@ def pull_all_from_table_or_query( data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, start_date: datetime, end_date: datetime, ) -> RetrievalJob: """ Returns a Retrieval Job for all join key columns, feature name columns, and the event timestamp columns that occur between the start_date and end_date. - Note that join_key_columns, feature_name_columns, event_timestamp_column, and created_timestamp_column + Note that join_key_columns, feature_name_columns, timestamp_field, and created_timestamp_column have all already been mapped to column names of the source table and those column names are the values passed into this function. @@ -236,8 +240,33 @@ def pull_all_from_table_or_query( data_source: Data source to pull all of the columns from join_key_columns: Columns of the join keys feature_name_columns: Columns of the feature names needed - event_timestamp_column: Timestamp column + timestamp_field: Timestamp column start_date: Starting date of query end_date: Ending date of query """ pass + + @staticmethod + def write_logged_features( + config: RepoConfig, + data: Union[pyarrow.Table, Path], + source: LoggingSource, + logging_config: LoggingConfig, + registry: Registry, + ): + """ + Write logged features to a specified destination (taken from logging_config) in the offline store. + Data can be appended to an existing table (destination) or a new one will be created automatically + (if it doesn't exist). + Hence, this function can be called repeatedly with the same destination to flush logs in chunks. + + Args: + config: Repo configuration object + data: Arrow table or path to parquet directory that contains logs dataset. + source: Logging source that provides schema and some additional metadata. + logging_config: used to determine destination + registry: Feast registry + + This is an optional method that could be supported only be some stores. + """ + raise NotImplementedError() diff --git a/sdk/python/feast/infra/offline_stores/offline_utils.py b/sdk/python/feast/infra/offline_stores/offline_utils.py index c62d0223a03..b6c3d300d49 100644 --- a/sdk/python/feast/infra/offline_stores/offline_utils.py +++ b/sdk/python/feast/infra/offline_stores/offline_utils.py @@ -86,7 +86,7 @@ class FeatureViewQueryContext: entities: List[str] features: List[str] # feature reference format field_mapping: Dict[str, str] - event_timestamp_column: str + timestamp_field: str created_timestamp_column: Optional[str] table_subquery: str entity_selections: List[str] @@ -154,7 +154,7 @@ def get_feature_view_query_context( entities=join_keys, features=features, field_mapping=feature_view.batch_source.field_mapping, - event_timestamp_column=timestamp_field, + timestamp_field=timestamp_field, created_timestamp_column=created_timestamp_column, # TODO: Make created column optional and not hardcoded table_subquery=feature_view.batch_source.get_table_query_string(), diff --git a/sdk/python/feast/infra/offline_stores/redshift.py b/sdk/python/feast/infra/offline_stores/redshift.py index e67cf13f5c4..74ba83cb004 100644 --- a/sdk/python/feast/infra/offline_stores/redshift.py +++ b/sdk/python/feast/infra/offline_stores/redshift.py @@ -1,6 +1,7 @@ import contextlib import uuid from datetime import datetime +from pathlib import Path from typing import ( Callable, ContextManager, @@ -14,6 +15,7 @@ import numpy as np import pandas as pd +import pyarrow import pyarrow as pa from dateutil import parser from pydantic import StrictStr @@ -23,6 +25,7 @@ from feast import OnDemandFeatureView, RedshiftSource from feast.data_source import DataSource from feast.errors import InvalidEntityType +from feast.feature_logging import LoggingConfig, LoggingSource from feast.feature_view import DUMMY_ENTITY_ID, DUMMY_ENTITY_VAL, FeatureView from feast.infra.offline_stores import offline_utils from feast.infra.offline_stores.offline_store import ( @@ -30,7 +33,10 @@ RetrievalJob, RetrievalMetadata, ) -from feast.infra.offline_stores.redshift_source import SavedDatasetRedshiftStorage +from feast.infra.offline_stores.redshift_source import ( + RedshiftLoggingDestination, + SavedDatasetRedshiftStorage, +) from feast.infra.utils import aws_utils from feast.registry import Registry from feast.repo_config import FeastConfigBaseModel, RepoConfig @@ -71,7 +77,7 @@ def pull_latest_from_table_or_query( data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, created_timestamp_column: Optional[str], start_date: datetime, end_date: datetime, @@ -86,7 +92,7 @@ def pull_latest_from_table_or_query( partition_by_join_key_string = ( "PARTITION BY " + partition_by_join_key_string ) - timestamp_columns = [event_timestamp_column] + timestamp_columns = [timestamp_field] if created_timestamp_column: timestamp_columns.append(created_timestamp_column) timestamp_desc_string = " DESC, ".join(timestamp_columns) + " DESC" @@ -110,7 +116,7 @@ def pull_latest_from_table_or_query( SELECT {field_string}, ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS _feast_row FROM {from_expression} - WHERE {event_timestamp_column} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' + WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' ) WHERE _feast_row = 1 """ @@ -130,7 +136,7 @@ def pull_all_from_table_or_query( data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, start_date: datetime, end_date: datetime, ) -> RetrievalJob: @@ -138,7 +144,7 @@ def pull_all_from_table_or_query( from_expression = data_source.get_table_query_string() field_string = ", ".join( - join_key_columns + feature_name_columns + [event_timestamp_column] + join_key_columns + feature_name_columns + [timestamp_field] ) redshift_client = aws_utils.get_redshift_data_client( @@ -152,7 +158,7 @@ def pull_all_from_table_or_query( query = f""" SELECT {field_string} FROM {from_expression} - WHERE {event_timestamp_column} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' + WHERE {timestamp_field} BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' """ return RedshiftRetrievalJob( @@ -257,6 +263,40 @@ def query_generator() -> Iterator[str]: ), ) + @staticmethod + def write_logged_features( + config: RepoConfig, + data: Union[pyarrow.Table, Path], + source: LoggingSource, + logging_config: LoggingConfig, + registry: Registry, + ): + destination = logging_config.destination + assert isinstance(destination, RedshiftLoggingDestination) + + redshift_client = aws_utils.get_redshift_data_client( + config.offline_store.region + ) + s3_resource = aws_utils.get_s3_resource(config.offline_store.region) + if isinstance(data, Path): + s3_path = f"{config.offline_store.s3_staging_location}/logged_features/{uuid.uuid4()}" + else: + s3_path = f"{config.offline_store.s3_staging_location}/logged_features/{uuid.uuid4()}.parquet" + + aws_utils.upload_arrow_table_to_redshift( + table=data, + redshift_data_client=redshift_client, + cluster_id=config.offline_store.cluster_id, + database=config.offline_store.database, + user=config.offline_store.user, + s3_resource=s3_resource, + s3_path=s3_path, + iam_role=config.offline_store.iam_role, + table_name=destination.table_name, + schema=source.get_schema(registry), + fail_if_exists=False, + ) + class RedshiftRetrievalJob(RetrievalJob): def __init__( @@ -546,9 +586,9 @@ def _get_entity_df_event_timestamp_range( 1. We first join the current feature_view to the entity dataframe that has been passed. This JOIN has the following logic: - - For each row of the entity dataframe, only keep the rows where the `event_timestamp_column` + - For each row of the entity dataframe, only keep the rows where the `timestamp_field` is less than the one provided in the entity dataframe - - If there a TTL for the current feature_view, also keep the rows where the `event_timestamp_column` + - If there a TTL for the current feature_view, also keep the rows where the `timestamp_field` is higher the the one provided minus the TTL - For each row, Join on the entity key and retrieve the `entity_row_unique_id` that has been computed previously @@ -559,16 +599,16 @@ def _get_entity_df_event_timestamp_range( {{ featureview.name }}__subquery AS ( SELECT - {{ featureview.event_timestamp_column }} as event_timestamp, + {{ featureview.timestamp_field }} as event_timestamp, {{ featureview.created_timestamp_column ~ ' as created_timestamp,' if featureview.created_timestamp_column else '' }} {{ featureview.entity_selections | join(', ')}}{% if featureview.entity_selections %},{% else %}{% endif %} {% for feature in featureview.features %} {{ feature }} as {% if full_feature_names %}{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}{% else %}{{ featureview.field_mapping.get(feature, feature) }}{% endif %}{% if loop.last %}{% else %}, {% endif %} {% endfor %} FROM {{ featureview.table_subquery }} - WHERE {{ featureview.event_timestamp_column }} <= '{{ featureview.max_event_timestamp }}' + WHERE {{ featureview.timestamp_field }} <= '{{ featureview.max_event_timestamp }}' {% if featureview.ttl == 0 %}{% else %} - AND {{ featureview.event_timestamp_column }} >= '{{ featureview.min_event_timestamp }}' + AND {{ featureview.timestamp_field }} >= '{{ featureview.min_event_timestamp }}' {% endif %} ), diff --git a/sdk/python/feast/infra/offline_stores/redshift_source.py b/sdk/python/feast/infra/offline_stores/redshift_source.py index f099e307cc8..ae9d8bab5c8 100644 --- a/sdk/python/feast/infra/offline_stores/redshift_source.py +++ b/sdk/python/feast/infra/offline_stores/redshift_source.py @@ -4,7 +4,11 @@ from feast import type_map from feast.data_source import DataSource from feast.errors import DataSourceNotFoundException, RedshiftCredentialsError +from feast.feature_logging import LoggingDestination from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto +from feast.protos.feast.core.FeatureService_pb2 import ( + LoggingConfig as LoggingConfigProto, +) from feast.protos.feast.core.SavedDataset_pb2 import ( SavedDatasetStorage as SavedDatasetStorageProto, ) @@ -16,6 +20,7 @@ class RedshiftSource(DataSource): def __init__( self, + *, event_timestamp_column: Optional[str] = "", table: Optional[str] = None, schema: Optional[str] = None, @@ -68,7 +73,7 @@ def __init__( else: warnings.warn( ( - f"Starting in Feast 0.21, Feast will require either a name for a data source (if using query) " + f"Starting in Feast 0.23, Feast will require either a name for a data source (if using query) " f"or `table`: {self.query}" ), DeprecationWarning, @@ -77,7 +82,7 @@ def __init__( warnings.warn( ( "The argument 'date_partition_column' is not supported for Redshift sources." - "It will be removed in Feast 0.21+" + "It will be removed in Feast 0.23+" ), DeprecationWarning, ) @@ -105,6 +110,7 @@ def from_proto(data_source: DataSourceProto): A RedshiftSource object based on the data_source protobuf. """ return RedshiftSource( + name=data_source.name, field_mapping=dict(data_source.field_mapping), table=data_source.redshift_options.table, schema=data_source.redshift_options.schema, @@ -128,17 +134,11 @@ def __eq__(self, other): ) return ( - self.name == other.name + super().__eq__(other) and self.redshift_options.table == other.redshift_options.table and self.redshift_options.schema == other.redshift_options.schema and self.redshift_options.query == other.redshift_options.query and self.redshift_options.database == other.redshift_options.database - and self.timestamp_field == other.timestamp_field - and self.created_timestamp_column == other.created_timestamp_column - and self.field_mapping == other.field_mapping - and self.description == other.description - and self.tags == other.tags - and self.owner == other.owner ) @property @@ -169,17 +169,17 @@ def to_proto(self) -> DataSourceProto: A DataSourceProto object. """ data_source_proto = DataSourceProto( + name=self.name, type=DataSourceProto.BATCH_REDSHIFT, field_mapping=self.field_mapping, redshift_options=self.redshift_options.to_proto(), description=self.description, tags=self.tags, owner=self.owner, + timestamp_field=self.timestamp_field, + created_timestamp_column=self.created_timestamp_column, ) - data_source_proto.timestamp_field = self.timestamp_field - data_source_proto.created_timestamp_column = self.created_timestamp_column - return data_source_proto def validate(self, config: RepoConfig): @@ -215,7 +215,7 @@ def get_table_column_names_and_types( assert isinstance(config.offline_store, RedshiftOfflineStoreConfig) client = aws_utils.get_redshift_data_client(config.offline_store.region) - if self.table is not None: + if self.table: try: table = client.describe_table( ClusterIdentifier=config.offline_store.cluster_id, @@ -255,7 +255,7 @@ def get_table_column_names_and_types( class RedshiftOptions: """ - DataSource Redshift options used to source features from Redshift query. + Configuration options for a Redshift data source. """ def __init__( @@ -265,50 +265,10 @@ def __init__( query: Optional[str], database: Optional[str], ): - self._table = table - self._schema = schema - self._query = query - self._database = database - - @property - def query(self): - """Returns the Redshift SQL query referenced by this source.""" - return self._query - - @query.setter - def query(self, query): - """Sets the Redshift SQL query referenced by this source.""" - self._query = query - - @property - def table(self): - """Returns the table name of this Redshift table.""" - return self._table - - @table.setter - def table(self, table_name): - """Sets the table ref of this Redshift table.""" - self._table = table_name - - @property - def schema(self): - """Returns the schema name of this Redshift table.""" - return self._schema - - @schema.setter - def schema(self, schema): - """Sets the schema of this Redshift table.""" - self._schema = schema - - @property - def database(self): - """Returns the schema name of this Redshift table.""" - return self._database - - @database.setter - def database(self, database): - """Sets the database name of this Redshift table.""" - self._database = database + self.table = table or "" + self.schema = schema or "" + self.query = query or "" + self.database = database or "" @classmethod def from_proto(cls, redshift_options_proto: DataSourceProto.RedshiftOptions): @@ -371,3 +331,28 @@ def to_proto(self) -> SavedDatasetStorageProto: def to_data_source(self) -> DataSource: return RedshiftSource(table=self.redshift_options.table) + + +class RedshiftLoggingDestination(LoggingDestination): + _proto_kind = "redshift_destination" + + table_name: str + + def __init__(self, *, table_name: str): + self.table_name = table_name + + @classmethod + def from_proto(cls, config_proto: LoggingConfigProto) -> "LoggingDestination": + return RedshiftLoggingDestination( + table_name=config_proto.redshift_destination.table_name, + ) + + def to_proto(self) -> LoggingConfigProto: + return LoggingConfigProto( + redshift_destination=LoggingConfigProto.RedshiftDestination( + table_name=self.table_name + ) + ) + + def to_data_source(self) -> DataSource: + return RedshiftSource(table=self.table_name) diff --git a/sdk/python/feast/infra/offline_stores/snowflake.py b/sdk/python/feast/infra/offline_stores/snowflake.py index c88e1b1844c..d39acc9f08d 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake.py +++ b/sdk/python/feast/infra/offline_stores/snowflake.py @@ -16,6 +16,7 @@ import numpy as np import pandas as pd +import pyarrow import pyarrow as pa from pydantic import Field from pydantic.typing import Literal @@ -24,6 +25,7 @@ from feast import OnDemandFeatureView from feast.data_source import DataSource from feast.errors import InvalidEntityType +from feast.feature_logging import LoggingConfig, LoggingSource from feast.feature_view import DUMMY_ENTITY_ID, DUMMY_ENTITY_VAL, FeatureView from feast.infra.offline_stores import offline_utils from feast.infra.offline_stores.offline_store import ( @@ -33,12 +35,14 @@ ) from feast.infra.offline_stores.snowflake_source import ( SavedDatasetSnowflakeStorage, + SnowflakeLoggingDestination, SnowflakeSource, ) from feast.infra.utils.snowflake_utils import ( execute_snowflake_statement, get_snowflake_conn, write_pandas, + write_parquet, ) from feast.registry import Registry from feast.repo_config import FeastConfigBaseModel, RepoConfig @@ -97,7 +101,7 @@ def pull_latest_from_table_or_query( data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, created_timestamp_column: Optional[str], start_date: datetime, end_date: datetime, @@ -117,7 +121,7 @@ def pull_latest_from_table_or_query( else: partition_by_join_key_string = "" - timestamp_columns = [event_timestamp_column] + timestamp_columns = [timestamp_field] if created_timestamp_column: timestamp_columns.append(created_timestamp_column) @@ -141,7 +145,7 @@ def pull_latest_from_table_or_query( SELECT {field_string}, ROW_NUMBER() OVER({partition_by_join_key_string} ORDER BY {timestamp_desc_string}) AS "_feast_row" FROM {from_expression} - WHERE "{event_timestamp_column}" BETWEEN TO_TIMESTAMP_NTZ({start_date.timestamp()}) AND TO_TIMESTAMP_NTZ({end_date.timestamp()}) + WHERE "{timestamp_field}" BETWEEN TO_TIMESTAMP_NTZ({start_date.timestamp()}) AND TO_TIMESTAMP_NTZ({end_date.timestamp()}) ) WHERE "_feast_row" = 1 """ @@ -161,7 +165,7 @@ def pull_all_from_table_or_query( data_source: DataSource, join_key_columns: List[str], feature_name_columns: List[str], - event_timestamp_column: str, + timestamp_field: str, start_date: datetime, end_date: datetime, ) -> RetrievalJob: @@ -170,9 +174,7 @@ def pull_all_from_table_or_query( field_string = ( '"' - + '", "'.join( - join_key_columns + feature_name_columns + [event_timestamp_column] - ) + + '", "'.join(join_key_columns + feature_name_columns + [timestamp_field]) + '"' ) @@ -187,7 +189,7 @@ def pull_all_from_table_or_query( query = f""" SELECT {field_string} FROM {from_expression} - WHERE "{event_timestamp_column}" BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' + WHERE "{timestamp_field}" BETWEEN TIMESTAMP '{start_date}' AND TIMESTAMP '{end_date}' """ return SnowflakeRetrievalJob( @@ -276,6 +278,34 @@ def query_generator() -> Iterator[str]: ), ) + @staticmethod + def write_logged_features( + config: RepoConfig, + data: Union[pyarrow.Table, Path], + source: LoggingSource, + logging_config: LoggingConfig, + registry: Registry, + ): + assert isinstance(logging_config.destination, SnowflakeLoggingDestination) + + snowflake_conn = get_snowflake_conn(config.offline_store) + + if isinstance(data, Path): + write_parquet( + snowflake_conn, + data, + source.get_schema(registry), + table_name=logging_config.destination.table_name, + auto_create_table=True, + ) + else: + write_pandas( + snowflake_conn, + data.to_pandas(), + table_name=logging_config.destination.table_name, + auto_create_table=True, + ) + class SnowflakeRetrievalJob(RetrievalJob): def __init__( @@ -512,9 +542,9 @@ def _get_entity_df_event_timestamp_range( 1. We first join the current feature_view to the entity dataframe that has been passed. This JOIN has the following logic: - - For each row of the entity dataframe, only keep the rows where the `event_timestamp_column` + - For each row of the entity dataframe, only keep the rows where the `timestamp_field` is less than the one provided in the entity dataframe - - If there a TTL for the current feature_view, also keep the rows where the `event_timestamp_column` + - If there a TTL for the current feature_view, also keep the rows where the `timestamp_field` is higher the the one provided minus the TTL - For each row, Join on the entity key and retrieve the `entity_row_unique_id` that has been computed previously @@ -525,16 +555,16 @@ def _get_entity_df_event_timestamp_range( "{{ featureview.name }}__subquery" AS ( SELECT - "{{ featureview.event_timestamp_column }}" as "event_timestamp", + "{{ featureview.timestamp_field }}" as "event_timestamp", {{'"' ~ featureview.created_timestamp_column ~ '" as "created_timestamp",' if featureview.created_timestamp_column else '' }} {{featureview.entity_selections | join(', ')}}{% if featureview.entity_selections %},{% else %}{% endif %} {% for feature in featureview.features %} "{{ feature }}" as {% if full_feature_names %}"{{ featureview.name }}__{{featureview.field_mapping.get(feature, feature)}}"{% else %}"{{ featureview.field_mapping.get(feature, feature) }}"{% endif %}{% if loop.last %}{% else %}, {% endif %} {% endfor %} FROM {{ featureview.table_subquery }} - WHERE "{{ featureview.event_timestamp_column }}" <= '{{ featureview.max_event_timestamp }}' + WHERE "{{ featureview.timestamp_field }}" <= '{{ featureview.max_event_timestamp }}' {% if featureview.ttl == 0 %}{% else %} - AND "{{ featureview.event_timestamp_column }}" >= '{{ featureview.min_event_timestamp }}' + AND "{{ featureview.timestamp_field }}" >= '{{ featureview.min_event_timestamp }}' {% endif %} ), diff --git a/sdk/python/feast/infra/offline_stores/snowflake_source.py b/sdk/python/feast/infra/offline_stores/snowflake_source.py index 1d24cba44ae..d76131f837b 100644 --- a/sdk/python/feast/infra/offline_stores/snowflake_source.py +++ b/sdk/python/feast/infra/offline_stores/snowflake_source.py @@ -3,7 +3,11 @@ from feast import type_map from feast.data_source import DataSource +from feast.feature_logging import LoggingDestination from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto +from feast.protos.feast.core.FeatureService_pb2 import ( + LoggingConfig as LoggingConfigProto, +) from feast.protos.feast.core.SavedDataset_pb2 import ( SavedDatasetStorage as SavedDatasetStorageProto, ) @@ -15,6 +19,7 @@ class SnowflakeSource(DataSource): def __init__( self, + *, database: Optional[str] = None, warehouse: Optional[str] = None, schema: Optional[str] = None, @@ -54,6 +59,7 @@ def __init__( """ if table is None and query is None: raise ValueError('No "table" argument provided.') + # The default Snowflake schema is named "PUBLIC". _schema = "PUBLIC" if (database and table and not schema) else schema @@ -73,7 +79,7 @@ def __init__( else: warnings.warn( ( - f"Starting in Feast 0.21, Feast will require either a name for a data source (if using query) " + f"Starting in Feast 0.23, Feast will require either a name for a data source (if using query) " f"or `table`: {self.query}" ), DeprecationWarning, @@ -83,7 +89,7 @@ def __init__( warnings.warn( ( "The argument 'date_partition_column' is not supported for Snowflake sources." - "It will be removed in Feast 0.21+" + "It will be removed in Feast 0.23+" ), DeprecationWarning, ) @@ -111,6 +117,7 @@ def from_proto(data_source: DataSourceProto): A SnowflakeSource object based on the data_source protobuf. """ return SnowflakeSource( + name=data_source.name, field_mapping=dict(data_source.field_mapping), database=data_source.snowflake_options.database, schema=data_source.snowflake_options.schema, @@ -135,18 +142,12 @@ def __eq__(self, other): ) return ( - self.name == other.name - and self.snowflake_options.database == other.snowflake_options.database - and self.snowflake_options.schema == other.snowflake_options.schema - and self.snowflake_options.table == other.snowflake_options.table - and self.snowflake_options.query == other.snowflake_options.query - and self.snowflake_options.warehouse == other.snowflake_options.warehouse - and self.timestamp_field == other.timestamp_field - and self.created_timestamp_column == other.created_timestamp_column - and self.field_mapping == other.field_mapping - and self.description == other.description - and self.tags == other.tags - and self.owner == other.owner + super().__eq__(other) + and self.database == other.database + and self.schema == other.schema + and self.table == other.table + and self.query == other.query + and self.warehouse == other.warehouse ) @property @@ -182,6 +183,7 @@ def to_proto(self) -> DataSourceProto: A DataSourceProto object. """ data_source_proto = DataSourceProto( + name=self.name, type=DataSourceProto.BATCH_SNOWFLAKE, field_mapping=self.field_mapping, snowflake_options=self.snowflake_options.to_proto(), @@ -251,7 +253,7 @@ def get_table_column_names_and_types( class SnowflakeOptions: """ - DataSource snowflake options used to source features from snowflake query. + Configuration options for a Snowflake data source. """ def __init__( @@ -262,61 +264,11 @@ def __init__( query: Optional[str], warehouse: Optional[str], ): - self._database = database - self._schema = schema - self._table = table - self._query = query - self._warehouse = warehouse - - @property - def query(self): - """Returns the snowflake SQL query referenced by this source.""" - return self._query - - @query.setter - def query(self, query): - """Sets the snowflake SQL query referenced by this source.""" - self._query = query - - @property - def database(self): - """Returns the database name of this snowflake table.""" - return self._database - - @database.setter - def database(self, database): - """Sets the database ref of this snowflake table.""" - self._database = database - - @property - def schema(self): - """Returns the schema name of this snowflake table.""" - return self._schema - - @schema.setter - def schema(self, schema): - """Sets the schema of this snowflake table.""" - self._schema = schema - - @property - def table(self): - """Returns the table name of this snowflake table.""" - return self._table - - @table.setter - def table(self, table): - """Sets the table ref of this snowflake table.""" - self._table = table - - @property - def warehouse(self): - """Returns the warehouse name of this snowflake table.""" - return self._warehouse - - @warehouse.setter - def warehouse(self, warehouse): - """Sets the warehouse name of this snowflake table.""" - self._warehouse = warehouse + self.database = database or "" + self.schema = schema or "" + self.table = table or "" + self.query = query or "" + self.warehouse = warehouse or "" @classmethod def from_proto(cls, snowflake_options_proto: DataSourceProto.SnowflakeOptions): @@ -381,3 +333,28 @@ def to_proto(self) -> SavedDatasetStorageProto: def to_data_source(self) -> DataSource: return SnowflakeSource(table=self.snowflake_options.table) + + +class SnowflakeLoggingDestination(LoggingDestination): + table_name: str + + _proto_kind = "snowflake_destination" + + def __init__(self, *, table_name: str): + self.table_name = table_name + + @classmethod + def from_proto(cls, config_proto: LoggingConfigProto) -> "LoggingDestination": + return SnowflakeLoggingDestination( + table_name=config_proto.snowflake_destination.table_name, + ) + + def to_proto(self) -> LoggingConfigProto: + return LoggingConfigProto( + snowflake_destination=LoggingConfigProto.SnowflakeDestination( + table_name=self.table_name, + ) + ) + + def to_data_source(self) -> DataSource: + return SnowflakeSource(table=self.table_name,) diff --git a/sdk/python/feast/infra/online_stores/contrib/__init__.py b/sdk/python/feast/infra/online_stores/contrib/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/README.md b/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/README.md new file mode 100644 index 00000000000..651e4e90b80 --- /dev/null +++ b/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/README.md @@ -0,0 +1,78 @@ +# HBase Online Store +HBase is not included in current [Feast](https://github.com/feast-dev/feast) roadmap, this project intends to add HBase support for Online Store. +We create a table _ which gets updated with data on every materialize call + + +#### Create a feature repository + +```shell +feast init feature_repo +cd feature_repo +``` + +#### Edit `feature_store.yaml` + +set `online_store` type to be `hbase` + +```yaml +project: feature_repo +registry: data/registry.db +provider: local +online_store: + type: hbase + host: 127.0.0.1 # hbase thrift endpoint + port: 9090 # hbase thrift api port +``` + +#### Apply the feature definitions in `example.py` + +```shell +feast -c feature_repo apply +``` +##### Output +``` +Registered entity driver_id +Registered feature view driver_hourly_stats_view +Deploying infrastructure for driver_hourly_stats_view +``` + +### Materialize Latest Data to Online Feature Store (HBase) +``` +$ CURRENT_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S") +$ feast -c feature_repo materialize-incremental $CURRENT_TIME +``` +#### Output +``` +Materializing 1 feature views from 2022-04-16 15:30:39+05:30 to 2022-04-19 15:31:04+05:30 into the hbase online store. + +driver_hourly_stats_view from 2022-04-16 15:30:39+05:30 to 2022-04-19 15:31:04+05:30: +100%|████████████████████████████████████████████████████████████████| 5/5 [00:00<00:00, 120.59it/s] +``` + +### Fetch the latest features for some entity id +```python +from pprint import pprint +from feast import FeatureStore + +store = FeatureStore(repo_path=".") +feature_vector = store.get_online_features( + features=[ + "driver_hourly_stats:conv_rate", + "driver_hourly_stats:acc_rate", + "driver_hourly_stats:avg_daily_trips", + ], + entity_rows=[ + {"driver_id": 1004}, + {"driver_id": 1005}, + ], +).to_dict() +pprint(feature_vector) + +``` +#### Output +``` +{'acc_rate': [0.01390857808291912, 0.4063614010810852], + 'avg_daily_trips': [69, 706], + 'conv_rate': [0.6624961495399475, 0.7595928311347961], + 'driver_id': [1004, 1005]} +``` \ No newline at end of file diff --git a/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/__init__.py b/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/hbase.py b/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/hbase.py new file mode 100644 index 00000000000..d95e83f4290 --- /dev/null +++ b/sdk/python/feast/infra/online_stores/contrib/hbase_online_store/hbase.py @@ -0,0 +1,239 @@ +import calendar +import struct +from datetime import datetime +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple + +from happybase import Connection +from pydantic.typing import Literal + +from feast import Entity +from feast.feature_view import FeatureView +from feast.infra.key_encoding_utils import serialize_entity_key +from feast.infra.online_stores.online_store import OnlineStore +from feast.infra.utils.hbase_utils import HbaseConstants, HbaseUtils +from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import Value as ValueProto +from feast.repo_config import FeastConfigBaseModel, RepoConfig +from feast.usage import log_exceptions_and_usage + + +class HbaseOnlineStoreConfig(FeastConfigBaseModel): + """Online store config for Hbase store""" + + type: Literal["hbase"] = "hbase" + """Online store type selector""" + + host: str + """Hostname of Hbase Thrift server""" + + port: str + """Port in which Hbase Thrift server is running""" + + +class HbaseConnection: + """ + Hbase connecttion to connect to hbase. + + Attributes: + store_config: Online store config for Hbase store. + """ + + def __init__(self, store_config: HbaseOnlineStoreConfig): + self._store_config = store_config + self._real_conn = Connection( + host=store_config.host, port=int(store_config.port) + ) + + @property + def real_conn(self) -> Connection: + """Stores the real happybase Connection to connect to hbase.""" + return self._real_conn + + def close(self) -> None: + """Close the happybase connection.""" + self.real_conn.close() + + +class HbaseOnlineStore(OnlineStore): + """ + Online feature store for Hbase. + + Attributes: + _conn: Happybase Connection to connect to hbase thrift server. + """ + + _conn: Connection = None + + def _get_conn(self, config: RepoConfig): + """ + Get or Create Hbase Connection from Repoconfig. + + Args: + config: The RepoConfig for the current FeatureStore. + """ + + store_config = config.online_store + assert isinstance(store_config, HbaseOnlineStoreConfig) + + if not self._conn: + self._conn = Connection(host=store_config.host, port=int(store_config.port)) + return self._conn + + @log_exceptions_and_usage(online_store="hbase") + def online_write_batch( + self, + config: RepoConfig, + table: FeatureView, + data: List[ + Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] + ], + progress: Optional[Callable[[int], Any]], + ) -> None: + """ + Write a batch of feature rows to Hbase online store. + + Args: + config: The RepoConfig for the current FeatureStore. + table: Feast FeatureView. + data: a list of quadruplets containing Feature data. Each quadruplet contains an Entity Key, + a dict containing feature values, an event timestamp for the row, and + the created timestamp for the row if it exists. + progress: Optional function to be called once every mini-batch of rows is written to + the online store. Can be used to display progress. + """ + + hbase = HbaseUtils(self._get_conn(config)) + project = config.project + table_name = _table_id(project, table) + + b = hbase.batch(table_name) + for entity_key, values, timestamp, created_ts in data: + row_key = serialize_entity_key(entity_key).hex() + values_dict = {} + for feature_name, val in values.items(): + values_dict[ + HbaseConstants.get_col_from_feature(feature_name) + ] = val.SerializeToString() + if isinstance(timestamp, datetime): + values_dict[HbaseConstants.DEFAULT_EVENT_TS] = struct.pack( + ">L", int(calendar.timegm(timestamp.timetuple())) + ) + else: + values_dict[HbaseConstants.DEFAULT_EVENT_TS] = timestamp + if created_ts is not None: + if isinstance(created_ts, datetime): + values_dict[HbaseConstants.DEFAULT_CREATED_TS] = struct.pack( + ">L", int(calendar.timegm(created_ts.timetuple())) + ) + else: + values_dict[HbaseConstants.DEFAULT_CREATED_TS] = created_ts + b.put(row_key, values_dict) + b.send() + + @log_exceptions_and_usage(online_store="hbase") + def online_read( + self, + config: RepoConfig, + table: FeatureView, + entity_keys: List[EntityKeyProto], + requested_features: Optional[List[str]] = None, + ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: + """ + Retrieve feature values from the Hbase online store. + + Args: + config: The RepoConfig for the current FeatureStore. + table: Feast FeatureView. + entity_keys: a list of entity keys that should be read from the FeatureStore. + requested_features: a list of requested feature names. + """ + hbase = HbaseUtils(self._get_conn(config)) + project = config.project + table_name = _table_id(project, table) + + result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] + + row_keys = [ + serialize_entity_key(entity_key).hex() for entity_key in entity_keys + ] + rows = hbase.rows(table_name, row_keys=row_keys) + + for _, row in rows: + res = {} + res_ts = None + for feature_name, feature_value in row.items(): + f_name = HbaseConstants.get_feature_from_col(feature_name) + if requested_features is not None and f_name in requested_features: + v = ValueProto() + v.ParseFromString(feature_value) + res[f_name] = v + if f_name is HbaseConstants.EVENT_TS: + ts = struct.unpack(">L", feature_value)[0] + res_ts = datetime.fromtimestamp(ts) + if not res: + result.append((None, None)) + else: + result.append((res_ts, res)) + return result + + @log_exceptions_and_usage(online_store="hbase") + def update( + self, + config: RepoConfig, + tables_to_delete: Sequence[FeatureView], + tables_to_keep: Sequence[FeatureView], + entities_to_delete: Sequence[Entity], + entities_to_keep: Sequence[Entity], + partial: bool, + ): + """ + Update tables from the Hbase Online Store. + + Args: + config: The RepoConfig for the current FeatureStore. + tables_to_delete: Tables to delete from the Hbase Online Store. + tables_to_keep: Tables to keep in the Hbase Online Store. + """ + hbase = HbaseUtils(self._get_conn(config)) + project = config.project + + # We don't create any special state for the entites in this implementation. + for table in tables_to_keep: + table_name = _table_id(project, table) + if not hbase.check_if_table_exist(table_name): + hbase.create_table_with_default_cf(table_name) + + for table in tables_to_delete: + table_name = _table_id(project, table) + hbase.delete_table(table_name) + + def teardown( + self, + config: RepoConfig, + tables: Sequence[FeatureView], + entities: Sequence[Entity], + ): + """ + Delete tables from the Hbase Online Store. + + Args: + config: The RepoConfig for the current FeatureStore. + tables: Tables to delete from the feature repo. + """ + hbase = HbaseUtils(self._get_conn(config)) + project = config.project + + for table in tables: + table_name = _table_id(project, table) + hbase.delete_table(table_name) + + +def _table_id(project: str, table: FeatureView) -> str: + """ + Returns table name given the project_name and the feature_view. + + Args: + project: Name of the feast project. + table: Feast FeatureView. + """ + return f"{project}_{table.name}" diff --git a/sdk/python/feast/infra/online_stores/contrib/hbase_repo_configuration.py b/sdk/python/feast/infra/online_stores/contrib/hbase_repo_configuration.py new file mode 100644 index 00000000000..4e32a654b55 --- /dev/null +++ b/sdk/python/feast/infra/online_stores/contrib/hbase_repo_configuration.py @@ -0,0 +1,10 @@ +from tests.integration.feature_repos.integration_test_repo_config import ( + IntegrationTestRepoConfig, +) +from tests.integration.feature_repos.universal.online_store.hbase import ( + HbaseOnlineStoreCreator, +) + +FULL_REPO_CONFIGS = [ + IntegrationTestRepoConfig(online_store_creator=HbaseOnlineStoreCreator), +] diff --git a/sdk/python/feast/infra/online_stores/contrib/postgres.py b/sdk/python/feast/infra/online_stores/contrib/postgres.py new file mode 100644 index 00000000000..81727067f5a --- /dev/null +++ b/sdk/python/feast/infra/online_stores/contrib/postgres.py @@ -0,0 +1,238 @@ +import logging +from collections import defaultdict +from datetime import datetime +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple + +import psycopg2 +import pytz +from psycopg2 import sql +from psycopg2.extras import execute_values +from pydantic.schema import Literal + +from feast import Entity +from feast.feature_view import FeatureView +from feast.infra.key_encoding_utils import serialize_entity_key +from feast.infra.online_stores.online_store import OnlineStore +from feast.infra.utils.postgres.connection_utils import _get_conn +from feast.infra.utils.postgres.postgres_config import PostgreSQLConfig +from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import Value as ValueProto +from feast.repo_config import RepoConfig +from feast.usage import log_exceptions_and_usage + + +class PostgreSQLOnlineStoreConfig(PostgreSQLConfig): + type: Literal["postgres"] = "postgres" + + +class PostgreSQLOnlineStore(OnlineStore): + _conn: Optional[psycopg2._psycopg.connection] = None + + def _get_conn(self, config: RepoConfig): + if not self._conn: + assert config.online_store.type == "postgres" + self._conn = _get_conn(config.online_store) + return self._conn + + @log_exceptions_and_usage(online_store="postgres") + def online_write_batch( + self, + config: RepoConfig, + table: FeatureView, + data: List[ + Tuple[EntityKeyProto, Dict[str, ValueProto], datetime, Optional[datetime]] + ], + progress: Optional[Callable[[int], Any]], + ) -> None: + project = config.project + + with self._get_conn(config) as conn, conn.cursor() as cur: + insert_values = [] + for entity_key, values, timestamp, created_ts in data: + entity_key_bin = serialize_entity_key(entity_key) + timestamp = _to_naive_utc(timestamp) + if created_ts is not None: + created_ts = _to_naive_utc(created_ts) + + for feature_name, val in values.items(): + insert_values.append( + ( + entity_key_bin, + feature_name, + val.SerializeToString(), + timestamp, + created_ts, + ) + ) + # Control the batch so that we can update the progress + batch_size = 5000 + for i in range(0, len(insert_values), batch_size): + cur_batch = insert_values[i : i + batch_size] + execute_values( + cur, + sql.SQL( + """ + INSERT INTO {} + (entity_key, feature_name, value, event_ts, created_ts) + VALUES %s + ON CONFLICT (entity_key, feature_name) DO + UPDATE SET + value = EXCLUDED.value, + event_ts = EXCLUDED.event_ts, + created_ts = EXCLUDED.created_ts; + """, + ).format(sql.Identifier(_table_id(project, table))), + cur_batch, + page_size=batch_size, + ) + if progress: + progress(len(cur_batch)) + + @log_exceptions_and_usage(online_store="postgres") + def online_read( + self, + config: RepoConfig, + table: FeatureView, + entity_keys: List[EntityKeyProto], + requested_features: Optional[List[str]] = None, + ) -> List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]]: + result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] + + project = config.project + with self._get_conn(config) as conn, conn.cursor() as cur: + # Collecting all the keys to a list allows us to make fewer round trips + # to PostgreSQL + keys = [] + for entity_key in entity_keys: + keys.append(serialize_entity_key(entity_key)) + + cur.execute( + sql.SQL( + """ + SELECT entity_key, feature_name, value, event_ts + FROM {} WHERE entity_key = ANY(%s); + """ + ).format(sql.Identifier(_table_id(project, table)),), + (keys,), + ) + + rows = cur.fetchall() + + # Since we don't know the order returned from PostgreSQL we'll need + # to construct a dict to be able to quickly look up the correct row + # when we iterate through the keys since they are in the correct order + values_dict = defaultdict(list) + for row in rows if rows is not None else []: + values_dict[row[0].tobytes()].append(row[1:]) + + for key in keys: + if key in values_dict: + value = values_dict[key] + res = {} + for feature_name, value_bin, event_ts in value: + val = ValueProto() + val.ParseFromString(bytes(value_bin)) + res[feature_name] = val + result.append((event_ts, res)) + else: + result.append((None, None)) + + return result + + @log_exceptions_and_usage(online_store="postgres") + def update( + self, + config: RepoConfig, + tables_to_delete: Sequence[FeatureView], + tables_to_keep: Sequence[FeatureView], + entities_to_delete: Sequence[Entity], + entities_to_keep: Sequence[Entity], + partial: bool, + ): + project = config.project + schema_name = config.online_store.db_schema or config.online_store.user + with self._get_conn(config) as conn, conn.cursor() as cur: + # If a db_schema is provided, then that schema gets created if it doesn't + # exist. Else a schema is created for the feature store user. + + cur.execute( + """ + SELECT schema_name + FROM information_schema.schemata + WHERE schema_name = %s + """, + (schema_name,), + ) + schema_exists = cur.fetchone() + if not schema_exists: + cur.execute( + sql.SQL("CREATE SCHEMA IF NOT EXISTS {} AUTHORIZATION {}").format( + sql.Identifier(schema_name), + sql.Identifier(config.online_store.user), + ), + ) + + for table in tables_to_delete: + table_name = _table_id(project, table) + cur.execute(_drop_table_and_index(table_name)) + + for table in tables_to_keep: + table_name = _table_id(project, table) + cur.execute( + sql.SQL( + """ + CREATE TABLE IF NOT EXISTS {} + ( + entity_key BYTEA, + feature_name TEXT, + value BYTEA, + event_ts TIMESTAMPTZ, + created_ts TIMESTAMPTZ, + PRIMARY KEY(entity_key, feature_name) + ); + CREATE INDEX IF NOT EXISTS {} ON {} (entity_key); + """ + ).format( + sql.Identifier(table_name), + sql.Identifier(f"{table_name}_ek"), + sql.Identifier(table_name), + ) + ) + + conn.commit() + + def teardown( + self, + config: RepoConfig, + tables: Sequence[FeatureView], + entities: Sequence[Entity], + ): + project = config.project + try: + with self._get_conn(config) as conn, conn.cursor() as cur: + for table in tables: + table_name = _table_id(project, table) + cur.execute(_drop_table_and_index(table_name)) + except Exception: + logging.exception("Teardown failed") + raise + + +def _table_id(project: str, table: FeatureView) -> str: + return f"{project}_{table.name}" + + +def _drop_table_and_index(table_name): + return sql.SQL( + """ + DROP TABLE IF EXISTS {}; + DROP INDEX IF EXISTS {}; + """ + ).format(sql.Identifier(table_name), sql.Identifier(f"{table_name}_ek"),) + + +def _to_naive_utc(ts: datetime): + if ts.tzinfo is None: + return ts + else: + return ts.astimezone(pytz.utc).replace(tzinfo=None) diff --git a/sdk/python/feast/infra/online_stores/datastore.py b/sdk/python/feast/infra/online_stores/datastore.py index e975ce138ca..fc3659ea1ae 100644 --- a/sdk/python/feast/infra/online_stores/datastore.py +++ b/sdk/python/feast/infra/online_stores/datastore.py @@ -15,7 +15,7 @@ import logging from datetime import datetime from multiprocessing.pool import ThreadPool -from queue import Queue +from queue import Empty, Queue from threading import Lock, Thread from typing import Any, Callable, Dict, Iterator, List, Optional, Sequence, Tuple @@ -292,22 +292,24 @@ def increment(self): def worker(shared_counter): while True: - client.delete_multi(deletion_queue.get()) + try: + job = deletion_queue.get(block=False) + except Empty: + return + + client.delete_multi(job) shared_counter.increment() LOGGER.debug( f"batch deletions completed: {shared_counter.value} ({shared_counter.value * BATCH_SIZE} total entries) & outstanding queue size: {deletion_queue.qsize()}" ) deletion_queue.task_done() - for _ in range(NUM_THREADS): - Thread(target=worker, args=(status_info_counter,), daemon=True).start() - query = client.query(kind="Row", ancestor=key) - while True: - entities = list(query.fetch(limit=BATCH_SIZE)) - if not entities: - break - deletion_queue.put([entity.key for entity in entities]) + for page in query.fetch().pages: + deletion_queue.put([entity.key for entity in page]) + + for _ in range(NUM_THREADS): + Thread(target=worker, args=(status_info_counter,)).start() deletion_queue.join() diff --git a/sdk/python/feast/infra/online_stores/dynamodb.py b/sdk/python/feast/infra/online_stores/dynamodb.py index 01562ad900c..406bee525f8 100644 --- a/sdk/python/feast/infra/online_stores/dynamodb.py +++ b/sdk/python/feast/infra/online_stores/dynamodb.py @@ -59,9 +59,6 @@ class DynamoDBOnlineStoreConfig(FeastConfigBaseModel): region: StrictStr """AWS Region Name""" - sort_response: bool = True - """Whether or not to sort BatchGetItem response.""" - table_name_template: StrictStr = "{project}.{table_name}" """DynamoDB table name template""" @@ -204,9 +201,6 @@ def online_read( """ Retrieve feature values from the online DynamoDB store. - Note: This method is currently not optimized to retrieve a lot of data at a time - as it does sequential gets from the DynamoDB table. - Args: config: The RepoConfig for the current FeatureStore. table: Feast FeatureView. @@ -224,7 +218,6 @@ def online_read( result: List[Tuple[Optional[datetime], Optional[Dict[str, ValueProto]]]] = [] entity_ids = [compute_entity_id(entity_key) for entity_key in entity_keys] batch_size = online_config.batch_size - sort_response = online_config.sort_response entity_ids_iter = iter(entity_ids) while True: batch = list(itertools.islice(entity_ids_iter, batch_size)) @@ -243,20 +236,27 @@ def online_read( response = response.get("Responses") table_responses = response.get(table_instance.name) if table_responses: - if sort_response: - table_responses = self._sort_dynamodb_response( - table_responses, entity_ids - ) + table_responses = self._sort_dynamodb_response( + table_responses, entity_ids + ) + entity_idx = 0 for tbl_res in table_responses: + entity_id = tbl_res["entity_id"] + while entity_id != batch[entity_idx]: + result.append((None, None)) + entity_idx += 1 res = {} for feature_name, value_bin in tbl_res["values"].items(): val = ValueProto() val.ParseFromString(value_bin.value) res[feature_name] = val result.append((datetime.fromisoformat(tbl_res["event_ts"]), res)) - else: - batch_size_nones = ((None, None),) * len(batch) - result.extend(batch_size_nones) + entity_idx += 1 + + # Not all entities in a batch may have responses + # Pad with remaining values in batch that were not found + batch_size_nones = ((None, None),) * (len(batch) - len(result)) + result.extend(batch_size_nones) return result def _get_dynamodb_client(self, region: str, endpoint_url: Optional[str] = None): diff --git a/sdk/python/feast/infra/online_stores/online_store.py b/sdk/python/feast/infra/online_stores/online_store.py index 1f177996dea..04c6a065fb2 100644 --- a/sdk/python/feast/infra/online_stores/online_store.py +++ b/sdk/python/feast/infra/online_stores/online_store.py @@ -76,9 +76,9 @@ def online_read( entity_keys: a list of entity keys that should be read from the FeatureStore. requested_features: (Optional) A subset of the features that should be read from the FeatureStore. Returns: - Data is returned as a list, one item per entity key. Each item in the list is a tuple - of event_ts for the row, and the feature data as a dict from feature names to values. - Values are returned as Value proto message. + Data is returned as a list, one item per entity key in the original order as the entity_keys argument. + Each item in the list is a tuple of event_ts for the row, and the feature data as a dict from feature names + to values. Values are returned as Value proto message. """ ... diff --git a/sdk/python/feast/infra/online_stores/redis.py b/sdk/python/feast/infra/online_stores/redis.py index a2e8e27d807..9ceceff0ac0 100644 --- a/sdk/python/feast/infra/online_stores/redis.py +++ b/sdk/python/feast/infra/online_stores/redis.py @@ -42,7 +42,7 @@ try: from redis import Redis - from rediscluster import RedisCluster + from redis.cluster import ClusterNode, RedisCluster except ImportError as e: from feast.errors import FeastExtrasDependencyImportError @@ -164,7 +164,9 @@ def _get_client(self, online_store_config: RedisOnlineStoreConfig): online_store_config.connection_string ) if online_store_config.redis_type == RedisType.redis_cluster: - kwargs["startup_nodes"] = startup_nodes + kwargs["startup_nodes"] = [ + ClusterNode(**node) for node in startup_nodes + ] self._client = RedisCluster(**kwargs) else: kwargs["host"] = startup_nodes[0]["host"] diff --git a/sdk/python/feast/infra/online_stores/sqlite.py b/sdk/python/feast/infra/online_stores/sqlite.py index 710f4c386a6..5657fbe3722 100644 --- a/sdk/python/feast/infra/online_stores/sqlite.py +++ b/sdk/python/feast/infra/online_stores/sqlite.py @@ -230,7 +230,9 @@ def teardown( def _initialize_conn(db_path: str): Path(db_path).parent.mkdir(exist_ok=True) return sqlite3.connect( - db_path, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES, + db_path, + detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES, + check_same_thread=False, ) diff --git a/sdk/python/feast/infra/passthrough_provider.py b/sdk/python/feast/infra/passthrough_provider.py index 3468b9dc927..b5965c91bfb 100644 --- a/sdk/python/feast/infra/passthrough_provider.py +++ b/sdk/python/feast/infra/passthrough_provider.py @@ -2,10 +2,13 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import pandas +import pyarrow import pyarrow as pa from tqdm import tqdm +from feast import FeatureService from feast.entity import Entity +from feast.feature_logging import FeatureServiceLoggingSource from feast.feature_view import FeatureView from feast.infra.offline_stores.offline_store import RetrievalJob from feast.infra.offline_stores.offline_utils import get_offline_store_from_config @@ -36,12 +39,24 @@ def __init__(self, config: RepoConfig): super().__init__(config) self.repo_config = config - self.offline_store = get_offline_store_from_config(config.offline_store) - self.online_store = ( - get_online_store_from_config(config.online_store) - if config.online_store - else None - ) + self._offline_store = None + self._online_store = None + + @property + def online_store(self): + if not self._online_store and self.repo_config.online_store: + self._online_store = get_online_store_from_config( + self.repo_config.online_store + ) + return self._online_store + + @property + def offline_store(self): + if not self._offline_store: + self._offline_store = get_offline_store_from_config( + self.repo_config.offline_store + ) + return self._offline_store def update_infra( self, @@ -136,7 +151,7 @@ def materialize_single_feature_view( ( join_key_columns, feature_name_columns, - event_timestamp_column, + timestamp_field, created_timestamp_column, ) = _get_column_names(feature_view, entities) @@ -145,7 +160,7 @@ def materialize_single_feature_view( data_source=feature_view.batch_source, join_key_columns=join_key_columns, feature_name_columns=feature_name_columns, - event_timestamp_column=event_timestamp_column, + timestamp_field=timestamp_field, created_timestamp_column=created_timestamp_column, start_date=start_date, end_date=end_date, @@ -210,7 +225,54 @@ def retrieve_saved_dataset( data_source=dataset.storage.to_data_source(), join_key_columns=dataset.join_keys, feature_name_columns=feature_name_columns, - event_timestamp_column=event_ts_column, + timestamp_field=event_ts_column, start_date=make_tzaware(dataset.min_event_timestamp), # type: ignore end_date=make_tzaware(dataset.max_event_timestamp + timedelta(seconds=1)), # type: ignore ) + + def write_feature_service_logs( + self, + feature_service: FeatureService, + logs: Union[pyarrow.Table, str], + config: RepoConfig, + registry: Registry, + ): + assert ( + feature_service.logging_config is not None + ), "Logging should be configured for the feature service before calling this function" + + self.offline_store.write_logged_features( + config=config, + data=logs, + source=FeatureServiceLoggingSource(feature_service, config.project), + logging_config=feature_service.logging_config, + registry=registry, + ) + + def retrieve_feature_service_logs( + self, + feature_service: FeatureService, + start_date: datetime, + end_date: datetime, + config: RepoConfig, + registry: Registry, + ) -> RetrievalJob: + assert ( + feature_service.logging_config is not None + ), "Logging should be configured for the feature service before calling this function" + + logging_source = FeatureServiceLoggingSource(feature_service, config.project) + schema = logging_source.get_schema(registry) + logging_config = feature_service.logging_config + ts_column = logging_source.get_log_timestamp_column() + columns = list(set(schema.names) - {ts_column}) + + return self.offline_store.pull_all_from_table_or_query( + config=config, + data_source=logging_config.destination.to_data_source(), + join_key_columns=[], + feature_name_columns=columns, + timestamp_field=ts_column, + start_date=start_date, + end_date=end_date, + ) diff --git a/sdk/python/feast/infra/provider.py b/sdk/python/feast/infra/provider.py index b379193ba38..7754a58319c 100644 --- a/sdk/python/feast/infra/provider.py +++ b/sdk/python/feast/infra/provider.py @@ -9,7 +9,7 @@ import pyarrow from tqdm import tqdm -from feast import errors +from feast import FeatureService, errors from feast.entity import Entity from feast.feature_view import DUMMY_ENTITY_ID, FeatureView from feast.importer import import_class @@ -186,6 +186,43 @@ def retrieve_saved_dataset( """ ... + @abc.abstractmethod + def write_feature_service_logs( + self, + feature_service: FeatureService, + logs: Union[pyarrow.Table, Path], + config: RepoConfig, + registry: Registry, + ): + """ + Write features and entities logged by a feature server to an offline store. + + Schema of logs table is being inferred from the provided feature service. + Only feature services with configured logging are accepted. + + Logs dataset can be passed as Arrow Table or path to parquet directory. + """ + ... + + @abc.abstractmethod + def retrieve_feature_service_logs( + self, + feature_service: FeatureService, + start_date: datetime, + end_date: datetime, + config: RepoConfig, + registry: Registry, + ) -> RetrievalJob: + """ + Read logged features from an offline store for a given time window [from, to). + Target table is determined based on logging configuration from the feature service. + + Returns: + RetrievalJob object, which wraps the query to the offline store. + + """ + ... + def get_feature_server_endpoint(self) -> Optional[str]: """Returns endpoint for the feature server, if it exists.""" return None @@ -258,7 +295,7 @@ def _get_column_names( the query to the offline store. """ # if we have mapped fields, use the original field names in the call to the offline store - event_timestamp_column = feature_view.batch_source.timestamp_field + timestamp_field = feature_view.batch_source.timestamp_field feature_names = [feature.name for feature in feature_view.features] created_timestamp_column = feature_view.batch_source.created_timestamp_column join_keys = [ @@ -268,10 +305,10 @@ def _get_column_names( reverse_field_mapping = { v: k for k, v in feature_view.batch_source.field_mapping.items() } - event_timestamp_column = ( - reverse_field_mapping[event_timestamp_column] - if event_timestamp_column in reverse_field_mapping.keys() - else event_timestamp_column + timestamp_field = ( + reverse_field_mapping[timestamp_field] + if timestamp_field in reverse_field_mapping.keys() + else timestamp_field ) created_timestamp_column = ( reverse_field_mapping[created_timestamp_column] @@ -294,13 +331,13 @@ def _get_column_names( name for name in feature_names if name not in join_keys - and name != event_timestamp_column + and name != timestamp_field and name != created_timestamp_column ] return ( join_keys, feature_names, - event_timestamp_column, + timestamp_field, created_timestamp_column, ) diff --git a/sdk/python/feast/infra/registry_stores/contrib/postgres/registry_store.py b/sdk/python/feast/infra/registry_stores/contrib/postgres/registry_store.py new file mode 100644 index 00000000000..b3c0c6bd365 --- /dev/null +++ b/sdk/python/feast/infra/registry_stores/contrib/postgres/registry_store.py @@ -0,0 +1,122 @@ +from typing import Optional + +import psycopg2 +from psycopg2 import sql + +from feast.infra.utils.postgres.connection_utils import _get_conn +from feast.infra.utils.postgres.postgres_config import PostgreSQLConfig +from feast.protos.feast.core.Registry_pb2 import Registry as RegistryProto +from feast.registry_store import RegistryStore +from feast.repo_config import RegistryConfig + + +class PostgresRegistryConfig(RegistryConfig): + host: str + port: int + database: str + db_schema: str + user: str + password: str + sslmode: Optional[str] + sslkey_path: Optional[str] + sslcert_path: Optional[str] + sslrootcert_path: Optional[str] + + +class PostgreSQLRegistryStore(RegistryStore): + def __init__(self, config: PostgresRegistryConfig, registry_path: str): + self.db_config = PostgreSQLConfig( + host=config.host, + port=config.port, + database=config.database, + db_schema=config.db_schema, + user=config.user, + password=config.password, + sslmode=getattr(config, "sslmode", None), + sslkey_path=getattr(config, "sslkey_path", None), + sslcert_path=getattr(config, "sslcert_path", None), + sslrootcert_path=getattr(config, "sslrootcert_path", None), + ) + self.table_name = config.path + self.cache_ttl_seconds = config.cache_ttl_seconds + + def get_registry_proto(self) -> RegistryProto: + registry_proto = RegistryProto() + try: + with _get_conn(self.db_config) as conn, conn.cursor() as cur: + cur.execute( + sql.SQL( + """ + SELECT registry + FROM {} + WHERE version = (SELECT max(version) FROM {}) + """ + ).format( + sql.Identifier(self.table_name), + sql.Identifier(self.table_name), + ) + ) + row = cur.fetchone() + if row: + registry_proto = registry_proto.FromString(row[0]) + except psycopg2.errors.UndefinedTable: + pass + return registry_proto + + def update_registry_proto(self, registry_proto: RegistryProto): + """ + Overwrites the current registry proto with the proto passed in. This method + writes to the registry path. + + Args: + registry_proto: the new RegistryProto + """ + schema_name = self.db_config.db_schema or self.db_config.user + with _get_conn(self.db_config) as conn, conn.cursor() as cur: + cur.execute( + """ + SELECT schema_name + FROM information_schema.schemata + WHERE schema_name = %s + """, + (schema_name,), + ) + schema_exists = cur.fetchone() + if not schema_exists: + cur.execute( + sql.SQL("CREATE SCHEMA IF NOT EXISTS {} AUTHORIZATION {}").format( + sql.Identifier(schema_name), + sql.Identifier(self.db_config.user), + ), + ) + + cur.execute( + sql.SQL( + """ + CREATE TABLE IF NOT EXISTS {} ( + version BIGSERIAL PRIMARY KEY, + registry BYTEA NOT NULL + ); + """ + ).format(sql.Identifier(self.table_name)), + ) + # Do we want to keep track of the history or just keep the latest? + cur.execute( + sql.SQL( + """ + INSERT INTO {} (registry) + VALUES (%s); + """ + ).format(sql.Identifier(self.table_name)), + [registry_proto.SerializeToString()], + ) + + def teardown(self): + with _get_conn(self.db_config) as conn, conn.cursor() as cur: + cur.execute( + sql.SQL( + """ + DROP TABLE IF EXISTS {}; + """ + ).format(sql.Identifier(self.table_name)) + ) diff --git a/sdk/python/feast/infra/transformation_servers/Dockerfile b/sdk/python/feast/infra/transformation_servers/Dockerfile index 653e34cdf54..ff276c7a70d 100644 --- a/sdk/python/feast/infra/transformation_servers/Dockerfile +++ b/sdk/python/feast/infra/transformation_servers/Dockerfile @@ -6,10 +6,14 @@ COPY sdk/python/feast/infra/transformation_servers/app.py app.py # Copy necessary parts of the Feast codebase COPY sdk/python sdk/python COPY protos protos +COPY go go +COPY setup.py setup.py +COPY pyproject.toml pyproject.toml COPY README.md README.md + # Install dependencies -RUN pip3 install -e 'sdk/python[ci]' +RUN pip3 install -e '.' # Start feature transformation server CMD [ "python", "app.py" ] diff --git a/sdk/python/feast/infra/utils/aws_utils.py b/sdk/python/feast/infra/utils/aws_utils.py index fe5eed774ec..bb75160a873 100644 --- a/sdk/python/feast/infra/utils/aws_utils.py +++ b/sdk/python/feast/infra/utils/aws_utils.py @@ -2,9 +2,11 @@ import os import tempfile import uuid -from typing import Any, Dict, Iterator, Optional, Tuple +from pathlib import Path +from typing import Any, Dict, Iterator, Optional, Tuple, Union import pandas as pd +import pyarrow import pyarrow as pa import pyarrow.parquet as pq from tenacity import ( @@ -194,13 +196,6 @@ def upload_df_to_redshift( The caller is responsible for deleting the table when no longer necessary. - Here's how the upload process works: - 1. Pandas DataFrame is converted to PyArrow Table - 2. PyArrow Table is serialized into a Parquet format on local disk - 3. The Parquet file is uploaded to S3 - 4. The S3 file is uploaded to Redshift as a new table through COPY command - 5. The local disk & s3 paths are cleaned up - Args: redshift_data_client: Redshift Data API Service client cluster_id: Redshift Cluster Identifier @@ -216,10 +211,6 @@ def upload_df_to_redshift( Raises: RedshiftTableNameTooLong: The specified table name is too long. """ - if len(table_name) > REDSHIFT_TABLE_NAME_MAX_LENGTH: - raise RedshiftTableNameTooLong(table_name) - - bucket, key = get_bucket_and_key(s3_path) # Drop the index so that we dont have unnecessary columns df.reset_index(drop=True, inplace=True) @@ -231,35 +222,110 @@ def upload_df_to_redshift( # More details at: # https://pandas.pydata.org/pandas-docs/stable/user_guide/missing_data.html#values-considered-missing table = pa.Table.from_pandas(df) - column_names, column_types = [], [] - for field in table.schema: - column_names.append(field.name) - column_types.append(pa_to_redshift_value_type(field.type)) + upload_arrow_table_to_redshift( + table, + redshift_data_client, + cluster_id=cluster_id, + database=database, + user=user, + s3_resource=s3_resource, + iam_role=iam_role, + s3_path=s3_path, + table_name=table_name, + ) + + +def upload_arrow_table_to_redshift( + table: Union[pyarrow.Table, Path], + redshift_data_client, + cluster_id: str, + database: str, + user: str, + s3_resource, + iam_role: str, + s3_path: str, + table_name: str, + schema: Optional[pyarrow.Schema] = None, + fail_if_exists: bool = True, +): + """Uploads an Arrow Table to Redshift to a new or existing table. + + Here's how the upload process works: + 1. PyArrow Table is serialized into a Parquet format on local disk + 2. The Parquet file is uploaded to S3 + 3. The S3 file is uploaded to Redshift as a new table through COPY command + 4. The local disk & s3 paths are cleaned up + + Args: + redshift_data_client: Redshift Data API Service client + cluster_id: Redshift Cluster Identifier + database: Redshift Database Name + user: Redshift username + s3_resource: S3 Resource object + s3_path: S3 path where the Parquet file is temporarily uploaded + iam_role: IAM Role for Redshift to assume during the COPY command. + The role must grant permission to read the S3 location. + table_name: The name of the new Redshift table where we copy the dataframe + table: The Arrow Table or Path to parquet dataset to upload + schema: (Optionally) client may provide arrow Schema which will be converted into redshift table schema + fail_if_exists: fail if table with such name exists or append data to existing table + + Raises: + RedshiftTableNameTooLong: The specified table name is too long. + """ + if len(table_name) > REDSHIFT_TABLE_NAME_MAX_LENGTH: + raise RedshiftTableNameTooLong(table_name) + + if isinstance(table, pyarrow.Table) and not schema: + schema = table.schema + + if not schema: + raise ValueError("Schema must be specified when data is passed as a Path") + + bucket, key = get_bucket_and_key(s3_path) + column_query_list = ", ".join( - [ - f"{column_name} {column_type}" - for column_name, column_type in zip(column_names, column_types) - ] + [f"{field.name} {pa_to_redshift_value_type(field.type)}" for field in schema] ) - # Write the PyArrow Table on disk in Parquet format and upload it to S3 - with tempfile.TemporaryDirectory() as temp_dir: - file_path = f"{temp_dir}/{uuid.uuid4()}.parquet" - pq.write_table(table, file_path) - s3_resource.Object(bucket, key).put(Body=open(file_path, "rb")) + uploaded_files = [] - # Create the table with the desired schema and - # copy the Parquet file contents to the Redshift table - create_and_copy_query = ( - f"CREATE TABLE {table_name}({column_query_list}); " - + f"COPY {table_name} FROM '{s3_path}' IAM_ROLE '{iam_role}' FORMAT AS PARQUET" + if isinstance(table, Path): + for file in table.iterdir(): + file_key = os.path.join(key, file.name) + with file.open("rb") as f: + s3_resource.Object(bucket, file_key).put(Body=f) + + uploaded_files.append(file_key) + else: + # Write the PyArrow Table on disk in Parquet format and upload it to S3 + with tempfile.TemporaryFile(suffix=".parquet") as parquet_temp_file: + pq.write_table(table, parquet_temp_file) + parquet_temp_file.seek(0) + s3_resource.Object(bucket, key).put(Body=parquet_temp_file) + + uploaded_files.append(key) + + copy_query = ( + f"COPY {table_name} FROM '{s3_path}' IAM_ROLE '{iam_role}' FORMAT AS PARQUET" ) - execute_redshift_statement( - redshift_data_client, cluster_id, database, user, create_and_copy_query + create_query = ( + f"CREATE TABLE {'IF NOT EXISTS' if not fail_if_exists else ''}" + f" {table_name}({column_query_list})" ) - # Clean up S3 temporary data - s3_resource.Object(bucket, key).delete() + try: + execute_redshift_statement( + redshift_data_client, + cluster_id, + database, + user, + f"{create_query}; {copy_query}", + ) + finally: + # Clean up S3 temporary data + for file_pah in uploaded_files: + s3_resource.Object(bucket, file_pah).delete() @contextlib.contextmanager diff --git a/sdk/python/feast/infra/utils/hbase_utils.py b/sdk/python/feast/infra/utils/hbase_utils.py new file mode 100644 index 00000000000..78a39caed8b --- /dev/null +++ b/sdk/python/feast/infra/utils/hbase_utils.py @@ -0,0 +1,189 @@ +from typing import List + +from happybase import Connection + +from feast.infra.key_encoding_utils import serialize_entity_key +from feast.protos.feast.types.EntityKey_pb2 import EntityKey + + +class HbaseConstants: + """Constants to be used by the Hbase Online Store.""" + + DEFAULT_COLUMN_FAMILY = "default" + EVENT_TS = "event_ts" + CREATED_TS = "created_ts" + DEFAULT_EVENT_TS = DEFAULT_COLUMN_FAMILY + ":" + EVENT_TS + DEFAULT_CREATED_TS = DEFAULT_COLUMN_FAMILY + ":" + CREATED_TS + + @staticmethod + def get_feature_from_col(col): + """Given the column name, exclude the column family to get the feature name.""" + return col.decode("utf-8").split(":")[1] + + @staticmethod + def get_col_from_feature(feature): + """Given the feature name, add the column family to get the column name.""" + if isinstance(feature, bytes): + feature = feature.decode("utf-8") + return HbaseConstants.DEFAULT_COLUMN_FAMILY + ":" + feature + + +class HbaseUtils: + """ + Utils class to manage different Hbase operations. + + Attributes: + conn: happybase Connection to connect to hbase. + host: hostname of the hbase thrift server. + port: port in which thrift server is running. + timeout: socket timeout in milliseconds. + """ + + def __init__( + self, conn: Connection = None, host: str = None, port: int = None, timeout=None + ): + if conn is None: + self.host = host + self.port = port + self.conn = Connection(host=host, port=port, timeout=timeout) + else: + self.conn = conn + + def create_table(self, table_name: str, colm_family: List[str]): + """ + Create table in hbase online store. + + Arguments: + table_name: Name of the Hbase table. + colm_family: List of names of column families to be created in the hbase table. + """ + cf_dict: dict = {} + for cf in colm_family: + cf_dict[cf] = dict() + return self.conn.create_table(table_name, cf_dict) + + def create_table_with_default_cf(self, table_name: str): + """ + Create table in hbase online store with one column family "default". + + Arguments: + table_name: Name of the Hbase table. + """ + return self.conn.create_table(table_name, {"default": dict()}) + + def check_if_table_exist(self, table_name: str): + """ + Check if table exists in hbase. + + Arguments: + table_name: Name of the Hbase table. + """ + return bytes(table_name, "utf-8") in self.conn.tables() + + def batch(self, table_name: str): + """ + Returns a 'Batch' instance that can be used for mass data manipulation in the hbase table. + + Arguments: + table_name: Name of the Hbase table. + """ + return self.conn.table(table_name).batch() + + def put(self, table_name: str, row_key: str, data: dict): + """ + Store data in the hbase table. + + Arguments: + table_name: Name of the Hbase table. + row_key: Row key of the row to be inserted to hbase table. + data: Mapping of column family name:column name to column values + """ + table = self.conn.table(table_name) + table.put(row_key, data) + + def row( + self, + table_name: str, + row_key, + columns=None, + timestamp=None, + include_timestamp=False, + ): + """ + Fetch a row of data from the hbase table. + + Arguments: + table_name: Name of the Hbase table. + row_key: Row key of the row to be inserted to hbase table. + columns: the name of columns that needs to be fetched. + timestamp: timestamp specifies the maximum version the cells can have. + include_timestamp: specifies if (column, timestamp) to be return instead of only column. + """ + table = self.conn.table(table_name) + return table.row(row_key, columns, timestamp, include_timestamp) + + def rows( + self, + table_name: str, + row_keys, + columns=None, + timestamp=None, + include_timestamp=False, + ): + """ + Fetch multiple rows of data from the hbase table. + + Arguments: + table_name: Name of the Hbase table. + row_keys: List of row key of the row to be inserted to hbase table. + columns: the name of columns that needs to be fetched. + timestamp: timestamp specifies the maximum version the cells can have. + include_timestamp: specifies if (column, timestamp) to be return instead of only column. + """ + table = self.conn.table(table_name) + return table.rows(row_keys, columns, timestamp, include_timestamp) + + def print_table(self, table_name): + """Prints the table scanning all the rows of the hbase table.""" + table = self.conn.table(table_name) + scan_data = table.scan() + for row_key, cols in scan_data: + print(row_key.decode("utf-8"), cols) + + def delete_table(self, table: str): + """Deletes the hbase table given the table name.""" + if self.check_if_table_exist(table): + self.conn.delete_table(table, disable=True) + + def close_conn(self): + """Closes the happybase connection.""" + self.conn.close() + + +def main(): + from feast.protos.feast.types.Value_pb2 import Value + + connection = Connection(host="localhost", port=9090) + table = connection.table("test_hbase_driver_hourly_stats") + row_keys = [ + serialize_entity_key( + EntityKey(join_keys=["driver_id"], entity_values=[Value(int64_val=1004)]) + ).hex(), + serialize_entity_key( + EntityKey(join_keys=["driver_id"], entity_values=[Value(int64_val=1005)]) + ).hex(), + serialize_entity_key( + EntityKey(join_keys=["driver_id"], entity_values=[Value(int64_val=1024)]) + ).hex(), + ] + rows = table.rows(row_keys) + + for row_key, row in rows: + for key, value in row.items(): + col_name = bytes.decode(key, "utf-8").split(":")[1] + print(col_name, value) + print() + + +if __name__ == "__main__": + main() diff --git a/sdk/python/feast/infra/utils/postgres/__init__.py b/sdk/python/feast/infra/utils/postgres/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/feast/infra/utils/postgres/connection_utils.py b/sdk/python/feast/infra/utils/postgres/connection_utils.py new file mode 100644 index 00000000000..6dbb4a4bc01 --- /dev/null +++ b/sdk/python/feast/infra/utils/postgres/connection_utils.py @@ -0,0 +1,68 @@ +from typing import Dict + +import numpy as np +import pandas as pd +import psycopg2 +import psycopg2.extras +import pyarrow as pa + +from feast.infra.utils.postgres.postgres_config import PostgreSQLConfig +from feast.type_map import arrow_to_pg_type + + +def _get_conn(config: PostgreSQLConfig): + conn = psycopg2.connect( + dbname=config.database, + host=config.host, + port=int(config.port), + user=config.user, + password=config.password, + sslmode=config.sslmode, + sslkey=config.sslkey_path, + sslcert=config.sslcert_path, + sslrootcert=config.sslrootcert_path, + options="-c search_path={}".format(config.db_schema or config.user), + ) + return conn + + +def _df_to_create_table_sql(entity_df, table_name) -> str: + pa_table = pa.Table.from_pandas(entity_df) + columns = [ + f""""{f.name}" {arrow_to_pg_type(str(f.type))}""" for f in pa_table.schema + ] + return f""" + CREATE TABLE "{table_name}" ( + {", ".join(columns)} + ); + """ + + +def df_to_postgres_table( + config: PostgreSQLConfig, df: pd.DataFrame, table_name: str +) -> Dict[str, np.dtype]: + """ + Create a table for the data frame, insert all the values, and return the table schema + """ + with _get_conn(config) as conn, conn.cursor() as cur: + cur.execute(_df_to_create_table_sql(df, table_name)) + psycopg2.extras.execute_values( + cur, + f""" + INSERT INTO {table_name} + VALUES %s + """, + df.replace({np.NaN: None}).to_numpy(), + ) + return dict(zip(df.columns, df.dtypes)) + + +def get_query_schema(config: PostgreSQLConfig, sql_query: str) -> Dict[str, np.dtype]: + """ + We'll use the statement when we perform the query rather than copying data to a + new table + """ + with _get_conn(config) as conn: + conn.set_session(readonly=True) + df = pd.read_sql(f"SELECT * FROM {sql_query} LIMIT 0", conn,) + return dict(zip(df.columns, df.dtypes)) diff --git a/sdk/python/feast/infra/utils/postgres/postgres_config.py b/sdk/python/feast/infra/utils/postgres/postgres_config.py new file mode 100644 index 00000000000..f22cc6c204e --- /dev/null +++ b/sdk/python/feast/infra/utils/postgres/postgres_config.py @@ -0,0 +1,18 @@ +from typing import Optional + +from pydantic import StrictStr + +from feast.repo_config import FeastConfigBaseModel + + +class PostgreSQLConfig(FeastConfigBaseModel): + host: StrictStr + port: int = 5432 + database: StrictStr + db_schema: StrictStr = "public" + user: StrictStr + password: StrictStr + sslmode: Optional[StrictStr] = None + sslkey_path: Optional[StrictStr] = None + sslcert_path: Optional[StrictStr] = None + sslrootcert_path: Optional[StrictStr] = None diff --git a/sdk/python/feast/infra/utils/snowflake_utils.py b/sdk/python/feast/infra/utils/snowflake_utils.py index a467a9de421..05834ae4369 100644 --- a/sdk/python/feast/infra/utils/snowflake_utils.py +++ b/sdk/python/feast/infra/utils/snowflake_utils.py @@ -3,10 +3,12 @@ import random import string from logging import getLogger +from pathlib import Path from tempfile import TemporaryDirectory from typing import Any, Dict, Iterator, List, Optional, Tuple, cast import pandas as pd +import pyarrow from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from tenacity import ( @@ -138,6 +140,73 @@ def write_pandas( the passed in DataFrame. The table will not be created if it already exists create_temp_table: Will make the auto-created table as a temporary table """ + + cursor: SnowflakeCursor = conn.cursor() + stage_name = create_temporary_sfc_stage(cursor) + + upload_df(df, cursor, stage_name, chunk_size, parallel, compression) + copy_uploaded_data_to_table( + cursor, + stage_name, + list(df.columns), + table_name, + database, + schema, + compression, + on_error, + quote_identifiers, + auto_create_table, + create_temp_table, + ) + + +def write_parquet( + conn: SnowflakeConnection, + path: Path, + dataset_schema: pyarrow.Schema, + table_name: str, + database: Optional[str] = None, + schema: Optional[str] = None, + compression: str = "gzip", + on_error: str = "abort_statement", + parallel: int = 4, + quote_identifiers: bool = True, + auto_create_table: bool = False, + create_temp_table: bool = False, +): + cursor: SnowflakeCursor = conn.cursor() + stage_name = create_temporary_sfc_stage(cursor) + + columns = [field.name for field in dataset_schema] + upload_local_pq(path, cursor, stage_name, parallel) + copy_uploaded_data_to_table( + cursor, + stage_name, + columns, + table_name, + database, + schema, + compression, + on_error, + quote_identifiers, + auto_create_table, + create_temp_table, + ) + + +def copy_uploaded_data_to_table( + cursor: SnowflakeCursor, + stage_name: str, + columns: List[str], + table_name: str, + database: Optional[str] = None, + schema: Optional[str] = None, + compression: str = "gzip", + on_error: str = "abort_statement", + quote_identifiers: bool = True, + auto_create_table: bool = False, + create_temp_table: bool = False, +): if database is not None and schema is None: raise ProgrammingError( "Schema has to be provided to write_pandas when a database is provided" @@ -163,37 +232,11 @@ def write_pandas( + (schema + "." if schema else "") + (table_name) ) - if chunk_size is None: - chunk_size = len(df) - cursor: SnowflakeCursor = conn.cursor() - stage_name = create_temporary_sfc_stage(cursor) - with TemporaryDirectory() as tmp_folder: - for i, chunk in chunk_helper(df, chunk_size): - chunk_path = os.path.join(tmp_folder, "file{}.txt".format(i)) - # Dump chunk into parquet file - chunk.to_parquet( - chunk_path, - compression=compression, - use_deprecated_int96_timestamps=True, - ) - # Upload parquet file - upload_sql = ( - "PUT /* Python:snowflake.connector.pandas_tools.write_pandas() */ " - "'file://{path}' @\"{stage_name}\" PARALLEL={parallel}" - ).format( - path=chunk_path.replace("\\", "\\\\").replace("'", "\\'"), - stage_name=stage_name, - parallel=parallel, - ) - logger.debug(f"uploading files with '{upload_sql}'") - cursor.execute(upload_sql, _is_internal=True) - # Remove chunk file - os.remove(chunk_path) if quote_identifiers: - columns = '"' + '","'.join(list(df.columns)) + '"' + quoted_columns = '"' + '","'.join(columns) + '"' else: - columns = ",".join(list(df.columns)) + quoted_columns = ",".join(columns) if auto_create_table: file_format_name = create_file_format(compression, compression_map, cursor) @@ -209,7 +252,7 @@ def write_pandas( # columns in order quote = '"' if quote_identifiers else "" create_table_columns = ", ".join( - [f"{quote}{c}{quote} {column_type_mapping[c]}" for c in df.columns] + [f"{quote}{c}{quote} {column_type_mapping[c]}" for c in columns] ) create_table_sql = ( f"CREATE {'TEMP ' if create_temp_table else ''}TABLE IF NOT EXISTS {location} " @@ -225,9 +268,9 @@ def write_pandas( # in Snowflake, all parquet data is stored in a single column, $1, so we must select columns explicitly # see (https://docs.snowflake.com/en/user-guide/script-data-load-transform-parquet.html) if quote_identifiers: - parquet_columns = "$1:" + ",$1:".join(f'"{c}"' for c in df.columns) + parquet_columns = "$1:" + ",$1:".join(f'"{c}"' for c in columns) else: - parquet_columns = "$1:" + ",$1:".join(df.columns) + parquet_columns = "$1:" + ",$1:".join(columns) copy_into_sql = ( "COPY INTO {location} /* Python:snowflake.connector.pandas_tools.write_pandas() */ " "({columns}) " @@ -236,7 +279,7 @@ def write_pandas( "PURGE=TRUE ON_ERROR={on_error}" ).format( location=location, - columns=columns, + columns=quoted_columns, parquet_columns=parquet_columns, stage_name=stage_name, compression=compression_map[compression], @@ -250,6 +293,78 @@ def write_pandas( result_cursor.close() +def upload_df( + df: pd.DataFrame, + cursor: SnowflakeCursor, + stage_name: str, + chunk_size: Optional[int] = None, + parallel: int = 4, + compression: str = "gzip", +): + """ + Args: + df: Dataframe we'd like to write back. + cursor: cursor to be used to communicate with Snowflake. + stage_name: stage name in Snowflake connection. + chunk_size: Number of elements to be inserted once, if not provided all elements will be dumped once + (Default value = None). + parallel: Number of threads to be used when uploading chunks, default follows documentation at: + https://docs.snowflake.com/en/sql-reference/sql/put.html#optional-parameters (Default value = 4). + compression: The compression used on the Parquet files, can only be gzip, or snappy. Gzip gives supposedly a + better compression, while snappy is faster. Use whichever is more appropriate (Default value = 'gzip'). + + """ + if chunk_size is None: + chunk_size = len(df) + + with TemporaryDirectory() as tmp_folder: + for i, chunk in chunk_helper(df, chunk_size): + chunk_path = os.path.join(tmp_folder, "file{}.txt".format(i)) + # Dump chunk into parquet file + chunk.to_parquet( + chunk_path, + compression=compression, + use_deprecated_int96_timestamps=True, + ) + # Upload parquet file + upload_sql = ( + "PUT /* Python:feast.infra.utils.snowflake_utils.upload_df() */ " + "'file://{path}' @\"{stage_name}\" PARALLEL={parallel}" + ).format( + path=chunk_path.replace("\\", "\\\\").replace("'", "\\'"), + stage_name=stage_name, + parallel=parallel, + ) + logger.debug(f"uploading files with '{upload_sql}'") + cursor.execute(upload_sql, _is_internal=True) + # Remove chunk file + os.remove(chunk_path) + + +def upload_local_pq( + path: Path, cursor: SnowflakeCursor, stage_name: str, parallel: int = 4, +): + """ + Args: + path: Path to parquet dataset on disk + cursor: cursor to be used to communicate with Snowflake. + stage_name: stage name in Snowflake connection. + parallel: Number of threads to be used when uploading chunks, default follows documentation at: + https://docs.snowflake.com/en/sql-reference/sql/put.html#optional-parameters (Default value = 4). + """ + for file in path.iterdir(): + upload_sql = ( + "PUT /* Python:feast.infra.utils.snowflake_utils.upload_local_pq() */ " + "'file://{path}' @\"{stage_name}\" PARALLEL={parallel}" + ).format( + path=str(file).replace("\\", "\\\\").replace("'", "\\'"), + stage_name=stage_name, + parallel=parallel, + ) + logger.debug(f"uploading files with '{upload_sql}'") + cursor.execute(upload_sql, _is_internal=True) + + @retry( wait=wait_exponential(multiplier=1, max=4), retry=retry_if_exception_type(ProgrammingError), diff --git a/sdk/python/feast/on_demand_feature_view.py b/sdk/python/feast/on_demand_feature_view.py index 790891b0781..1cddc0b8814 100644 --- a/sdk/python/feast/on_demand_feature_view.py +++ b/sdk/python/feast/on_demand_feature_view.py @@ -8,6 +8,7 @@ import pandas as pd from feast.base_feature_view import BaseFeatureView +from feast.batch_feature_view import BatchFeatureView from feast.data_source import RequestSource from feast.errors import RegistryInferenceFailure, SpecifiedFeaturesNotPresentError from feast.feature import Feature @@ -25,6 +26,7 @@ from feast.protos.feast.core.OnDemandFeatureView_pb2 import ( UserDefinedFunction as UserDefinedFunctionProto, ) +from feast.stream_feature_view import StreamFeatureView from feast.type_map import ( feast_value_type_to_pandas_type, python_type_to_feast_value_type, @@ -66,14 +68,21 @@ class OnDemandFeatureView(BaseFeatureView): tags: Dict[str, str] owner: str - @log_exceptions - def __init__( + @log_exceptions # noqa: C901 + def __init__( # noqa: C901 self, *args, name: Optional[str] = None, features: Optional[List[Feature]] = None, sources: Optional[ - Dict[str, Union[FeatureView, FeatureViewProjection, RequestSource]] + List[ + Union[ + BatchFeatureView, + StreamFeatureView, + RequestSource, + FeatureViewProjection, + ] + ] ] = None, udf: Optional[MethodType] = None, inputs: Optional[ @@ -92,11 +101,11 @@ def __init__( features (deprecated): The list of features in the output of the on demand feature view, after the transformation has been applied. sources (optional): A map from input source names to the actual input sources, - which may be feature views, feature view projections, or request data sources. + which may be feature views, or request data sources. These sources serve as inputs to the udf, which will refer to them by name. udf (optional): The user defined transformation function, which must take pandas dataframes as inputs. - inputs (optional): A map from input source names to the actual input sources, + inputs (optional): (Deprecated) A map from input source names to the actual input sources, which may be feature views, feature view projections, or request data sources. These sources serve as inputs to the udf, which will refer to them by name. schema (optional): The list of features in the output of the on demand feature @@ -118,24 +127,33 @@ def __init__( ( "The `features` parameter is being deprecated in favor of the `schema` parameter. " "Please switch from using `features` to `schema`. This will also requiring switching " - "feature definitions from using `Feature` to `Field`. Feast 0.21 and onwards will not " + "feature definitions from using `Feature` to `Field`. Feast 0.23 and onwards will not " "support the `features` parameter." ), DeprecationWarning, ) - - _sources = sources or inputs + _sources = sources or [] if inputs and sources: raise ValueError("At most one of `sources` or `inputs` can be specified.") elif inputs: warnings.warn( ( "The `inputs` parameter is being deprecated. Please use `sources` instead. " - "Feast 0.21 and onwards will not support the `inputs` parameter." + "Feast 0.23 and onwards will not support the `inputs` parameter." ), DeprecationWarning, ) - + for _, source in inputs.items(): + if isinstance(source, FeatureView): + _sources.append(feature_view_to_batch_feature_view(source)) + elif isinstance(source, RequestSource) or isinstance( + source, FeatureViewProjection + ): + _sources.append(source) + else: + raise ValueError( + "input can only accept FeatureView, FeatureViewProjection, or RequestSource" + ) _udf = udf if args: @@ -163,17 +181,28 @@ def __init__( ( "The `features` parameter is being deprecated in favor of the `schema` parameter. " "Please switch from using `features` to `schema`. This will also requiring switching " - "feature definitions from using `Feature` to `Field`. Feast 0.21 and onwards will not " + "feature definitions from using `Feature` to `Field`. Feast 0.23 and onwards will not " "support the `features` parameter." ), DeprecationWarning, ) if len(args) >= 3: - _sources = args[2] + _inputs = args[2] + for _, source in _inputs.items(): + if isinstance(source, FeatureView): + _sources.append(feature_view_to_batch_feature_view(source)) + elif isinstance(source, RequestSource) or isinstance( + source, FeatureViewProjection + ): + _sources.append(source) + else: + raise ValueError( + "input can only accept FeatureView, FeatureViewProjection, or RequestSource" + ) warnings.warn( ( "The `inputs` parameter is being deprecated. Please use `sources` instead. " - "Feast 0.21 and onwards will not support the `inputs` parameter." + "Feast 0.23 and onwards will not support the `inputs` parameter." ), DeprecationWarning, ) @@ -195,18 +224,17 @@ def __init__( tags=tags, owner=owner, ) - assert _sources is not None self.source_feature_view_projections: Dict[str, FeatureViewProjection] = {} self.source_request_sources: Dict[str, RequestSource] = {} - for source_name, odfv_source in _sources.items(): + for odfv_source in _sources: if isinstance(odfv_source, RequestSource): - self.source_request_sources[source_name] = odfv_source + self.source_request_sources[odfv_source.name] = odfv_source elif isinstance(odfv_source, FeatureViewProjection): - self.source_feature_view_projections[source_name] = odfv_source + self.source_feature_view_projections[odfv_source.name] = odfv_source else: self.source_feature_view_projections[ - source_name + odfv_source.name ] = odfv_source.projection if _udf is None: @@ -219,12 +247,12 @@ def proto_class(self) -> Type[OnDemandFeatureViewProto]: return OnDemandFeatureViewProto def __copy__(self): + fv = OnDemandFeatureView( name=self.name, schema=self.features, - sources=dict( - **self.source_feature_view_projections, **self.source_request_sources, - ), + sources=list(self.source_feature_view_projections.values()) + + list(self.source_request_sources.values()), udf=self.udf, description=self.description, tags=self.tags, @@ -234,14 +262,19 @@ def __copy__(self): return fv def __eq__(self, other): + if not isinstance(other, OnDemandFeatureView): + raise TypeError( + "Comparisons should only involve OnDemandFeatureView class objects." + ) + if not super().__eq__(other): return False if ( - not self.source_feature_view_projections - == other.source_feature_view_projections - or not self.source_request_sources == other.source_request_sources - or not self.udf.__code__.co_code == other.udf.__code__.co_code + self.source_feature_view_projections + != other.source_feature_view_projections + or self.source_request_sources != other.source_request_sources + or self.udf.__code__.co_code != other.udf.__code__.co_code ): return False @@ -297,22 +330,21 @@ def from_proto(cls, on_demand_feature_view_proto: OnDemandFeatureViewProto): Returns: A OnDemandFeatureView object based on the on-demand feature view protobuf. """ - sources = {} - for ( - source_name, - on_demand_source, - ) in on_demand_feature_view_proto.spec.sources.items(): + sources = [] + for (_, on_demand_source,) in on_demand_feature_view_proto.spec.sources.items(): if on_demand_source.WhichOneof("source") == "feature_view": - sources[source_name] = FeatureView.from_proto( - on_demand_source.feature_view - ).projection + sources.append( + FeatureView.from_proto(on_demand_source.feature_view).projection + ) elif on_demand_source.WhichOneof("source") == "feature_view_projection": - sources[source_name] = FeatureViewProjection.from_proto( - on_demand_source.feature_view_projection + sources.append( + FeatureViewProjection.from_proto( + on_demand_source.feature_view_projection + ) ) else: - sources[source_name] = RequestSource.from_proto( - on_demand_source.request_data_source + sources.append( + RequestSource.from_proto(on_demand_source.request_data_source) ) on_demand_feature_view_obj = cls( name=on_demand_feature_view_proto.spec.name, @@ -471,7 +503,16 @@ def get_requested_odfvs(feature_refs, project, registry): def on_demand_feature_view( *args, features: Optional[List[Feature]] = None, - sources: Optional[Dict[str, Union[FeatureView, RequestSource]]] = None, + sources: Optional[ + List[ + Union[ + BatchFeatureView, + StreamFeatureView, + RequestSource, + FeatureViewProjection, + ] + ] + ] = None, inputs: Optional[Dict[str, Union[FeatureView, RequestSource]]] = None, schema: Optional[List[Field]] = None, description: str = "", @@ -485,7 +526,7 @@ def on_demand_feature_view( features (deprecated): The list of features in the output of the on demand feature view, after the transformation has been applied. sources (optional): A map from input source names to the actual input sources, - which may be feature views, feature view projections, or request data sources. + which may be feature views, or request data sources. These sources serve as inputs to the udf, which will refer to them by name. inputs (optional): A map from input source names to the actual input sources, which may be feature views, feature view projections, or request data sources. @@ -507,23 +548,33 @@ def on_demand_feature_view( ( "The `features` parameter is being deprecated in favor of the `schema` parameter. " "Please switch from using `features` to `schema`. This will also requiring switching " - "feature definitions from using `Feature` to `Field`. Feast 0.21 and onwards will not " + "feature definitions from using `Feature` to `Field`. Feast 0.23 and onwards will not " "support the `features` parameter." ), DeprecationWarning, ) - - _sources = sources or inputs + _sources = sources or [] if inputs and sources: raise ValueError("At most one of `sources` or `inputs` can be specified.") elif inputs: warnings.warn( ( "The `inputs` parameter is being deprecated. Please use `sources` instead. " - "Feast 0.21 and onwards will not support the `inputs` parameter." + "Feast 0.23 and onwards will not support the `inputs` parameter." ), DeprecationWarning, ) + for _, source in inputs.items(): + if isinstance(source, FeatureView): + _sources.append(feature_view_to_batch_feature_view(source)) + elif isinstance(source, RequestSource) or isinstance( + source, FeatureViewProjection + ): + _sources.append(source) + else: + raise ValueError( + "input can only accept FeatureView, FeatureViewProjection, or RequestSource" + ) if args: warnings.warn( @@ -548,25 +599,43 @@ def on_demand_feature_view( ( "The `features` parameter is being deprecated in favor of the `schema` parameter. " "Please switch from using `features` to `schema`. This will also requiring switching " - "feature definitions from using `Feature` to `Field`. Feast 0.21 and onwards will not " + "feature definitions from using `Feature` to `Field`. Feast 0.23 and onwards will not " "support the `features` parameter." ), DeprecationWarning, ) if len(args) >= 2: - _sources = args[1] - warnings.warn( - ( - "The `inputs` parameter is being deprecated. Please use `sources` instead. " - "Feast 0.21 and onwards will not support the `inputs` parameter." - ), - DeprecationWarning, - ) + _inputs = args[1] + for _, source in _inputs.items(): + if isinstance(source, FeatureView): + _sources.append(feature_view_to_batch_feature_view(source)) + elif isinstance(source, RequestSource) or isinstance( + source, FeatureViewProjection + ): + _sources.append(source) + else: + raise ValueError( + "input can only accept FeatureView, FeatureViewProjection, or RequestSource" + ) + warnings.warn( + ( + "The `inputs` parameter is being deprecated. Please use `sources` instead. " + "Feast 0.23 and onwards will not support the `inputs` parameter." + ), + DeprecationWarning, + ) if not _sources: raise ValueError("The `sources` parameter must be specified.") + def mainify(obj): + # Needed to allow dill to properly serialize the udf. Otherwise, clients will need to have a file with the same + # name as the original file defining the ODFV. + if obj.__module__ != "__main__": + obj.__module__ = "__main__" + def decorator(user_function): + mainify(user_function) on_demand_feature_view_obj = OnDemandFeatureView( name=user_function.__name__, sources=_sources, @@ -582,3 +651,16 @@ def decorator(user_function): return on_demand_feature_view_obj return decorator + + +def feature_view_to_batch_feature_view(fv: FeatureView) -> BatchFeatureView: + return BatchFeatureView( + name=fv.name, + entities=fv.entities, + ttl=fv.ttl, + tags=fv.tags, + online=fv.online, + owner=fv.owner, + schema=fv.schema, + source=fv.source, + ) diff --git a/sdk/python/feast/registry.py b/sdk/python/feast/registry.py index da9c6c6b217..be009566d05 100644 --- a/sdk/python/feast/registry.py +++ b/sdk/python/feast/registry.py @@ -18,7 +18,7 @@ from enum import Enum from pathlib import Path from threading import Lock -from typing import Any, Dict, List, Optional, Set +from typing import Any, Dict, List, Optional from urllib.parse import urlparse import dill @@ -58,6 +58,7 @@ "GCSRegistryStore": "feast.infra.gcp.GCSRegistryStore", "S3RegistryStore": "feast.infra.aws.S3RegistryStore", "LocalRegistryStore": "feast.infra.local.LocalRegistryStore", + "PostgreSQLRegistryStore": "feast.infra.registry_stores.contrib.postgres.registry_store.PostgreSQLRegistryStore", } REGISTRY_STORE_CLASS_FOR_SCHEME = { @@ -98,7 +99,7 @@ def get_objects_from_registry( @staticmethod def get_objects_from_repo_contents( repo_contents: RepoContents, - ) -> Dict["FeastObjectType", Set[Any]]: + ) -> Dict["FeastObjectType", List[Any]]: return { FeastObjectType.DATA_SOURCE: repo_contents.data_sources, FeastObjectType.ENTITY: repo_contents.entities, diff --git a/sdk/python/feast/repo_config.py b/sdk/python/feast/repo_config.py index c86a42a8bd8..62d799a2b6b 100644 --- a/sdk/python/feast/repo_config.py +++ b/sdk/python/feast/repo_config.py @@ -6,6 +6,7 @@ import yaml from pydantic import ( BaseModel, + Field, StrictInt, StrictStr, ValidationError, @@ -35,6 +36,8 @@ "redis": "feast.infra.online_stores.redis.RedisOnlineStore", "dynamodb": "feast.infra.online_stores.dynamodb.DynamoDBOnlineStore", "snowflake.online": "feast.infra.online_stores.snowflake.SnowflakeOnlineStore", + "postgres": "feast.infra.online_stores.contrib.postgres.PostgreSQLOnlineStore", + "hbase": "feast.infra.online_stores.contrib.hbase_online_store.hbase.HbaseOnlineStore", } OFFLINE_STORE_CLASS_FOR_TYPE = { @@ -44,6 +47,7 @@ "snowflake.offline": "feast.infra.offline_stores.snowflake.SnowflakeOfflineStore", "spark": "feast.infra.offline_stores.contrib.spark_offline_store.spark.SparkOfflineStore", "trino": "feast.infra.offline_stores.contrib.trino_offline_store.trino.TrinoOfflineStore", + "postgres": "feast.infra.offline_stores.contrib.postgres_offline_store.postgres.PostgreSQLOfflineStore", } FEATURE_SERVER_CONFIG_CLASS_FOR_TYPE = { @@ -104,10 +108,10 @@ class RepoConfig(FeastBaseModel): provider: StrictStr """ str: local or gcp or aws """ - online_store: Any + _online_config: Any = Field(alias="online_store") """ OnlineStoreConfig: Online store configuration (optional depending on provider) """ - offline_store: Any + _offline_config: Any = Field(alias="offline_store") """ OfflineStoreConfig: Offline store configuration (optional depending on provider) """ feature_server: Optional[Any] @@ -123,19 +127,27 @@ class RepoConfig(FeastBaseModel): def __init__(self, **data: Any): super().__init__(**data) - if isinstance(self.online_store, Dict): - self.online_store = get_online_config_from_type(self.online_store["type"])( - **self.online_store - ) - elif isinstance(self.online_store, str): - self.online_store = get_online_config_from_type(self.online_store)() - - if isinstance(self.offline_store, Dict): - self.offline_store = get_offline_config_from_type( - self.offline_store["type"] - )(**self.offline_store) - elif isinstance(self.offline_store, str): - self.offline_store = get_offline_config_from_type(self.offline_store)() + self._offline_store = None + if "offline_store" in data: + self._offline_config = data["offline_store"] + else: + if data["provider"] == "local": + self._offline_config = "file" + elif data["provider"] == "gcp": + self._offline_config = "bigquery" + elif data["provider"] == "aws": + self._offline_config = "redshift" + + self._online_store = None + if "online_store" in data: + self._online_config = data["online_store"] + else: + if data["provider"] == "local": + self._online_config = "sqlite" + elif data["provider"] == "gcp": + self._online_config = "datastore" + elif data["provider"] == "aws": + self._online_config = "dynamodb" if isinstance(self.feature_server, Dict): self.feature_server = get_feature_server_config_from_type( @@ -148,6 +160,35 @@ def get_registry_config(self): else: return self.registry + @property + def offline_store(self): + if not self._offline_store: + if isinstance(self._offline_config, Dict): + self._offline_store = get_offline_config_from_type( + self._offline_config["type"] + )(**self._offline_config) + elif isinstance(self._offline_config, str): + self._offline_store = get_offline_config_from_type( + self._offline_config + )() + elif self._offline_config: + self._offline_store = self._offline_config + return self._offline_store + + @property + def online_store(self): + if not self._online_store: + if isinstance(self._online_config, Dict): + self._online_store = get_online_config_from_type( + self._online_config["type"] + )(**self._online_config) + elif isinstance(self._online_config, str): + self._online_store = get_online_config_from_type(self._online_config)() + elif self._online_config: + self._online_store = self._online_config + + return self._online_store + @root_validator(pre=True) @log_exceptions def _validate_online_store_config(cls, values): @@ -301,6 +342,9 @@ def write_to_path(self, repo_path: Path): sort_keys=False, ) + class Config: + allow_population_by_field_name = True + class FeastConfigError(Exception): def __init__(self, error_message, config_path): diff --git a/sdk/python/feast/repo_contents.py b/sdk/python/feast/repo_contents.py index b59adc34db4..4d7c92f2a6d 100644 --- a/sdk/python/feast/repo_contents.py +++ b/sdk/python/feast/repo_contents.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from typing import NamedTuple, Set +from typing import List, NamedTuple from feast.data_source import DataSource from feast.entity import Entity @@ -27,12 +27,12 @@ class RepoContents(NamedTuple): Represents the objects in a Feast feature repo. """ - data_sources: Set[DataSource] - feature_views: Set[FeatureView] - on_demand_feature_views: Set[OnDemandFeatureView] - request_feature_views: Set[RequestFeatureView] - entities: Set[Entity] - feature_services: Set[FeatureService] + data_sources: List[DataSource] + feature_views: List[FeatureView] + on_demand_feature_views: List[OnDemandFeatureView] + request_feature_views: List[RequestFeatureView] + entities: List[Entity] + feature_services: List[FeatureService] def to_registry_proto(self) -> RegistryProto: registry_proto = RegistryProto() diff --git a/sdk/python/feast/repo_operations.py b/sdk/python/feast/repo_operations.py index 5e223aac8af..0e82fdf47ad 100644 --- a/sdk/python/feast/repo_operations.py +++ b/sdk/python/feast/repo_operations.py @@ -28,9 +28,9 @@ from feast.usage import log_exceptions_and_usage -def py_path_to_module(path: Path, repo_root: Path) -> str: +def py_path_to_module(path: Path) -> str: return ( - str(path.relative_to(repo_root))[: -len(".py")] + str(path.relative_to(os.getcwd()))[: -len(".py")] .replace("./", "") .replace("/", ".") ) @@ -94,36 +94,61 @@ def get_repo_files(repo_root: Path) -> List[Path]: def parse_repo(repo_root: Path) -> RepoContents: - """Collect feature table definitions from feature repo""" + """ + Collects unique Feast object definitions from the given feature repo. + + Specifically, if an object foo has already been added, bar will still be added if + (bar == foo), but not if (bar is foo). This ensures that import statements will + not result in duplicates, but defining two equal objects will. + """ res = RepoContents( - data_sources=set(), - entities=set(), - feature_views=set(), - feature_services=set(), - on_demand_feature_views=set(), - request_feature_views=set(), + data_sources=[], + entities=[], + feature_views=[], + feature_services=[], + on_demand_feature_views=[], + request_feature_views=[], ) + data_sources_set = set() for repo_file in get_repo_files(repo_root): - module_path = py_path_to_module(repo_file, repo_root) + module_path = py_path_to_module(repo_file) module = importlib.import_module(module_path) for attr_name in dir(module): obj = getattr(module, attr_name) - if isinstance(obj, DataSource): - res.data_sources.add(obj) - if isinstance(obj, FeatureView): - res.feature_views.add(obj) - if isinstance(obj.stream_source, PushSource): - res.data_sources.add(obj.stream_source.batch_source) - elif isinstance(obj, Entity): - res.entities.add(obj) - elif isinstance(obj, FeatureService): - res.feature_services.add(obj) - elif isinstance(obj, OnDemandFeatureView): - res.on_demand_feature_views.add(obj) - elif isinstance(obj, RequestFeatureView): - res.request_feature_views.add(obj) - res.entities.add(DUMMY_ENTITY) + if isinstance(obj, DataSource) and not any( + (obj is ds) for ds in res.data_sources + ): + res.data_sources.append(obj) + data_sources_set.add(obj) + if isinstance(obj, FeatureView) and not any( + (obj is fv) for fv in res.feature_views + ): + res.feature_views.append(obj) + if isinstance(obj.stream_source, PushSource) and not any( + (obj is ds) for ds in res.data_sources + ): + push_source_dep = obj.stream_source.batch_source + # Don't add if the push source's batch source is a duplicate of an existing batch source + if push_source_dep not in data_sources_set: + res.data_sources.append(push_source_dep) + elif isinstance(obj, Entity) and not any( + (obj is entity) for entity in res.entities + ): + res.entities.append(obj) + elif isinstance(obj, FeatureService) and not any( + (obj is fs) for fs in res.feature_services + ): + res.feature_services.append(obj) + elif isinstance(obj, OnDemandFeatureView) and not any( + (obj is odfv) for odfv in res.on_demand_feature_views + ): + res.on_demand_feature_views.append(obj) + elif isinstance(obj, RequestFeatureView) and not any( + (obj is rfv) for rfv in res.request_feature_views + ): + res.request_feature_views.append(obj) + res.entities.append(DUMMY_ENTITY) return res @@ -261,14 +286,13 @@ def teardown(repo_config: RepoConfig, repo_path: Path): @log_exceptions_and_usage -def registry_dump(repo_config: RepoConfig, repo_path: Path): +def registry_dump(repo_config: RepoConfig, repo_path: Path) -> str: """For debugging only: output contents of the metadata registry""" registry_config = repo_config.get_registry_config() project = repo_config.project registry = Registry(registry_config=registry_config, repo_path=repo_path) registry_dict = registry.to_dict(project=project) - - click.echo(json.dumps(registry_dict, indent=2, sort_keys=True)) + return json.dumps(registry_dict, indent=2, sort_keys=True) def cli_check_repo(repo_path: Path): diff --git a/sdk/python/feast/saved_dataset.py b/sdk/python/feast/saved_dataset.py index 7a05a9ca221..aead7fe8eff 100644 --- a/sdk/python/feast/saved_dataset.py +++ b/sdk/python/feast/saved_dataset.py @@ -92,17 +92,23 @@ def __str__(self): return str(MessageToJson(self.to_proto())) def __hash__(self): - return hash((id(self), self.name)) + return hash((self.name)) def __eq__(self, other): if not isinstance(other, SavedDataset): raise TypeError( - "Comparisons should only involve FeatureService class objects." + "Comparisons should only involve SavedDataset class objects." ) - if self.name != other.name: - return False - if sorted(self.features) != sorted(other.features): + if ( + self.name != other.name + or sorted(self.features) != sorted(other.features) + or sorted(self.join_keys) != sorted(other.join_keys) + or self.storage != other.storage + or self.full_feature_names != other.full_feature_names + or self.tags != other.tags + or self.feature_service_name != other.feature_service_name + ): return False return True diff --git a/sdk/python/feast/stream_feature_view.py b/sdk/python/feast/stream_feature_view.py new file mode 100644 index 00000000000..1c51b94a7cf --- /dev/null +++ b/sdk/python/feast/stream_feature_view.py @@ -0,0 +1,54 @@ +from datetime import timedelta +from typing import Dict, List, Optional, Union + +from feast.data_source import DataSource +from feast.entity import Entity +from feast.feature_view import FeatureView +from feast.field import Field +from feast.protos.feast.core.DataSource_pb2 import DataSource as DataSourceProto + +SUPPORTED_STREAM_SOURCES = { + "KafkaSource", + "KinesisSource", +} + + +class StreamFeatureView(FeatureView): + def __init__( + self, + *, + name: Optional[str] = None, + entities: Optional[Union[List[Entity], List[str]]] = None, + ttl: Optional[timedelta] = None, + tags: Optional[Dict[str, str]] = None, + online: bool = True, + description: str = "", + owner: str = "", + schema: Optional[List[Field]] = None, + source: Optional[DataSource] = None, + ): + + if source is None: + raise ValueError("Feature views need a source specified") + if ( + type(source).__name__ not in SUPPORTED_STREAM_SOURCES + and source.to_proto().type != DataSourceProto.SourceType.CUSTOM_SOURCE + ): + raise ValueError( + f"Stream feature views need a stream source, expected one of {SUPPORTED_STREAM_SOURCES} " + f"or CUSTOM_SOURCE, got {type(source).__name__}: {source.name} instead " + ) + + super().__init__( + name=name, + entities=entities, + ttl=ttl, + batch_source=None, + stream_source=None, + tags=tags, + online=online, + description=description, + owner=owner, + schema=schema, + source=source, + ) diff --git a/sdk/python/feast/templates/aws/bootstrap.py b/sdk/python/feast/templates/aws/bootstrap.py index 80c2480d254..456c6e9b709 100644 --- a/sdk/python/feast/templates/aws/bootstrap.py +++ b/sdk/python/feast/templates/aws/bootstrap.py @@ -61,9 +61,8 @@ def bootstrap(): replace_str_in_file(driver_file, "%REDSHIFT_DATABASE%", database) replace_str_in_file(config_file, "%REDSHIFT_USER%", user) replace_str_in_file( - driver_file, config_file, "%REDSHIFT_S3_STAGING_LOCATION%", s3_staging_location + config_file, "%REDSHIFT_S3_STAGING_LOCATION%", s3_staging_location ) - replace_str_in_file(config_file,) replace_str_in_file(config_file, "%REDSHIFT_IAM_ROLE%", iam_role) diff --git a/sdk/python/feast/templates/aws/driver_repo.py b/sdk/python/feast/templates/aws/driver_repo.py index 19ba44807b9..8ebe0b6e927 100644 --- a/sdk/python/feast/templates/aws/driver_repo.py +++ b/sdk/python/feast/templates/aws/driver_repo.py @@ -1,6 +1,6 @@ from datetime import timedelta -from feast import Entity, FeatureView, Field, RedshiftSource, ValueType +from feast import Entity, FeatureService, FeatureView, Field, RedshiftSource, ValueType from feast.types import Float32, Int64 # Define an entity for the driver. Entities can be thought of as primary keys used to @@ -8,11 +8,11 @@ # construction of feature vectors driver = Entity( # Name of the entity. Must be unique within a project - name="driver_id", - # The join key of an entity describes the storage level field/column on which - # features can be looked up. The join key is also used to join feature + name="driver", + # The join keys of an entity describe the storage level field/column on which + # features can be looked up. The join keys are also used to join feature # tables/views when building feature vectors - join_key="driver_id", + join_keys=["driver_id"], # The storage level type for an entity value_type=ValueType.INT64, ) @@ -41,7 +41,7 @@ # The list of entities specifies the keys required for joining or looking # up features from this feature view. The reference provided in this field # correspond to the name of a defined entity (or entities) - entities=["driver_id"], + entities=["driver"], # The timedelta is the maximum age that each feature value may have # relative to its lookup time. For historical features (used in training), # TTL is relative to each timestamp provided in the entity dataframe. @@ -60,8 +60,10 @@ # Batch sources are used to find feature values. In the case of this feature # view we will query a source table on Redshift for driver statistics # features - batch_source=driver_stats_source, + source=driver_stats_source, # Tags are user defined key/value pairs that are attached to each # feature view tags={"team": "driver_performance"}, ) + +driver_stats_fs = FeatureService(name="driver_activity", features=[driver_stats_fv]) diff --git a/sdk/python/feast/templates/gcp/driver_repo.py b/sdk/python/feast/templates/gcp/driver_repo.py index e494e021f2f..a4517516b56 100644 --- a/sdk/python/feast/templates/gcp/driver_repo.py +++ b/sdk/python/feast/templates/gcp/driver_repo.py @@ -1,6 +1,6 @@ from datetime import timedelta -from feast import BigQuerySource, Entity, FeatureView, Field, ValueType +from feast import BigQuerySource, Entity, FeatureService, FeatureView, Field, ValueType from feast.types import Float32, Int64 # Define an entity for the driver. Entities can be thought of as primary keys used to @@ -8,11 +8,11 @@ # construction of feature vectors driver = Entity( # Name of the entity. Must be unique within a project - name="driver_id", - # The join key of an entity describes the storage level field/column on which - # features can be looked up. The join key is also used to join feature + name="driver", + # The join keys of an entity describe the storage level field/column on which + # features can be looked up. The join keys are also used to join feature # tables/views when building feature vectors - join_key="driver_id", + join_keys=["driver_id"], # The storage level type for an entity value_type=ValueType.INT64, ) @@ -39,7 +39,7 @@ # The list of entities specifies the keys required for joining or looking # up features from this feature view. The reference provided in this field # correspond to the name of a defined entity (or entities) - entities=["driver_id"], + entities=["driver"], # The timedelta is the maximum age that each feature value may have # relative to its lookup time. For historical features (used in training), # TTL is relative to each timestamp provided in the entity dataframe. @@ -58,8 +58,10 @@ # Batch sources are used to find feature values. In the case of this feature # view we will query a source table on BigQuery for driver statistics # features - batch_source=driver_stats_source, + source=driver_stats_source, # Tags are user defined key/value pairs that are attached to each # feature view tags={"team": "driver_performance"}, ) + +driver_stats_fs = FeatureService(name="driver_activity", features=[driver_stats_fv]) diff --git a/sdk/python/feast/templates/hbase/__init__.py b/sdk/python/feast/templates/hbase/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/feast/templates/hbase/bootstrap.py b/sdk/python/feast/templates/hbase/bootstrap.py new file mode 100644 index 00000000000..4013ca5a8d6 --- /dev/null +++ b/sdk/python/feast/templates/hbase/bootstrap.py @@ -0,0 +1,35 @@ +def bootstrap(): + # Bootstrap() will automatically be called from the init_repo() during `feast init` + + import pathlib + from datetime import datetime, timedelta + + from feast.driver_test_data import create_driver_hourly_stats_df + + repo_path = pathlib.Path(__file__).parent.absolute() + data_path = repo_path / "data" + data_path.mkdir(exist_ok=True) + + end_date = datetime.now().replace(microsecond=0, second=0, minute=0) + start_date = end_date - timedelta(days=15) + + driver_entities = [1001, 1002, 1003, 1004, 1005] + driver_df = create_driver_hourly_stats_df(driver_entities, start_date, end_date) + + driver_stats_path = data_path / "driver_stats.parquet" + driver_df.to_parquet(path=str(driver_stats_path), allow_truncated_timestamps=True) + + example_py_file = repo_path / "example.py" + replace_str_in_file(example_py_file, "%PARQUET_PATH%", str(driver_stats_path)) + + +def replace_str_in_file(file_path, match_str, sub_str): + with open(file_path, "r") as f: + contents = f.read() + contents = contents.replace(match_str, sub_str) + with open(file_path, "wt") as f: + f.write(contents) + + +if __name__ == "__main__": + bootstrap() diff --git a/sdk/python/feast/templates/hbase/example.py b/sdk/python/feast/templates/hbase/example.py new file mode 100644 index 00000000000..1d441e0e995 --- /dev/null +++ b/sdk/python/feast/templates/hbase/example.py @@ -0,0 +1,36 @@ +# This is an example feature definition file + +from datetime import timedelta + +from feast import Entity, FeatureView, Field, FileSource, ValueType +from feast.types import Float32, Int64 + +# Read data from parquet files. Parquet is convenient for local development mode. For +# production, you can use your favorite DWH, such as BigQuery. See Feast documentation +# for more info. +driver_hourly_stats = FileSource( + path="%PARQUET_PATH%", + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + +# Define an entity for the driver. You can think of entity as a primary key used to +# fetch features. +driver = Entity(name="driver", join_keys=["driver_id"], value_type=ValueType.INT64,) + +# Our parquet files contain sample data that includes a driver_id column, timestamps and +# three feature column. Here we define a Feature View that will allow us to serve this +# data to our model online. +driver_hourly_stats_view = FeatureView( + name="driver_hourly_stats", + entities=["driver"], + ttl=timedelta(days=1), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_hourly_stats, + tags={}, +) diff --git a/sdk/python/feast/templates/hbase/feature_store.yaml b/sdk/python/feast/templates/hbase/feature_store.yaml new file mode 100644 index 00000000000..83ce237b715 --- /dev/null +++ b/sdk/python/feast/templates/hbase/feature_store.yaml @@ -0,0 +1,7 @@ +project: my_project +registry: data/registry.db +provider: local +online_store: + type: hbase + host: 127.0.0.1 + port: 9090 diff --git a/sdk/python/feast/templates/local/example.py b/sdk/python/feast/templates/local/example.py index 076a331f91a..7633947e6e4 100644 --- a/sdk/python/feast/templates/local/example.py +++ b/sdk/python/feast/templates/local/example.py @@ -2,7 +2,7 @@ from datetime import timedelta -from feast import Entity, FeatureView, Field, FileSource, ValueType +from feast import Entity, FeatureService, FeatureView, Field, FileSource, ValueType from feast.types import Float32, Int64 # Read data from parquet files. Parquet is convenient for local development mode. For @@ -16,14 +16,14 @@ # Define an entity for the driver. You can think of entity as a primary key used to # fetch features. -driver = Entity(name="driver_id", value_type=ValueType.INT64, description="driver id",) +driver = Entity(name="driver", join_keys=["driver_id"], value_type=ValueType.INT64,) # Our parquet files contain sample data that includes a driver_id column, timestamps and # three feature column. Here we define a Feature View that will allow us to serve this # data to our model online. driver_hourly_stats_view = FeatureView( name="driver_hourly_stats", - entities=["driver_id"], + entities=["driver"], ttl=timedelta(days=1), schema=[ Field(name="conv_rate", dtype=Float32), @@ -31,6 +31,10 @@ Field(name="avg_daily_trips", dtype=Int64), ], online=True, - batch_source=driver_hourly_stats, + source=driver_hourly_stats, tags={}, ) + +driver_stats_fs = FeatureService( + name="driver_activity", features=[driver_hourly_stats_view] +) diff --git a/sdk/python/feast/templates/postgres/__init__.py b/sdk/python/feast/templates/postgres/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/feast/templates/postgres/bootstrap.py b/sdk/python/feast/templates/postgres/bootstrap.py new file mode 100644 index 00000000000..078d7cdc689 --- /dev/null +++ b/sdk/python/feast/templates/postgres/bootstrap.py @@ -0,0 +1,78 @@ +import click +import psycopg2 + +from feast.infra.utils.postgres.connection_utils import df_to_postgres_table +from feast.infra.utils.postgres.postgres_config import PostgreSQLConfig + + +def bootstrap(): + # Bootstrap() will automatically be called from the init_repo() during `feast init` + + import pathlib + from datetime import datetime, timedelta + + from feast.driver_test_data import create_driver_hourly_stats_df + + repo_path = pathlib.Path(__file__).parent.absolute() + config_file = repo_path / "feature_store.yaml" + + end_date = datetime.now().replace(microsecond=0, second=0, minute=0) + start_date = end_date - timedelta(days=15) + + driver_entities = [1001, 1002, 1003, 1004, 1005] + driver_df = create_driver_hourly_stats_df(driver_entities, start_date, end_date) + + postgres_host = click.prompt("Postgres host", default="localhost") + postgres_port = click.prompt("Postgres port", default="5432") + postgres_database = click.prompt("Postgres DB name", default="postgres") + postgres_schema = click.prompt("Postgres schema", default="public") + postgres_user = click.prompt("Postgres user") + postgres_password = click.prompt("Postgres password", hide_input=True) + + if click.confirm( + 'Should I upload example data to Postgres (overwriting "feast_driver_hourly_stats" table)?', + default=True, + ): + db_connection = psycopg2.connect( + dbname=postgres_database, + host=postgres_host, + port=int(postgres_port), + user=postgres_user, + password=postgres_password, + options=f"-c search_path={postgres_schema}", + ) + + with db_connection as conn, conn.cursor() as cur: + cur.execute('DROP TABLE IF EXISTS "feast_driver_hourly_stats"') + + df_to_postgres_table( + config=PostgreSQLConfig( + host=postgres_host, + port=int(postgres_port), + database=postgres_database, + db_schema=postgres_schema, + user=postgres_user, + password=postgres_password, + ), + df=driver_df, + table_name="feast_driver_hourly_stats", + ) + + replace_str_in_file(config_file, "DB_HOST", postgres_host) + replace_str_in_file(config_file, "DB_PORT", postgres_port) + replace_str_in_file(config_file, "DB_NAME", postgres_database) + replace_str_in_file(config_file, "DB_SCHEMA", postgres_schema) + replace_str_in_file(config_file, "DB_USERNAME", postgres_user) + replace_str_in_file(config_file, "DB_PASSWORD", postgres_password) + + +def replace_str_in_file(file_path, match_str, sub_str): + with open(file_path, "r") as f: + contents = f.read() + contents = contents.replace(match_str, sub_str) + with open(file_path, "wt") as f: + f.write(contents) + + +if __name__ == "__main__": + bootstrap() diff --git a/sdk/python/feast/templates/postgres/driver_repo.py b/sdk/python/feast/templates/postgres/driver_repo.py new file mode 100644 index 00000000000..34bc0022e23 --- /dev/null +++ b/sdk/python/feast/templates/postgres/driver_repo.py @@ -0,0 +1,29 @@ +from datetime import timedelta + +from feast import Entity, FeatureView, Field +from feast.infra.offline_stores.contrib.postgres_offline_store.postgres_source import ( + PostgreSQLSource, +) +from feast.types import Float32, Int64 + +driver = Entity(name="driver_id", join_keys=["driver_id"],) + + +driver_stats_source = PostgreSQLSource( + name="feast_driver_hourly_stats", + query="SELECT * FROM feast_driver_hourly_stats", + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + +driver_stats_fv = FeatureView( + name="driver_hourly_stats", + entities=["driver_id"], + ttl=timedelta(weeks=52), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + source=driver_stats_source, +) diff --git a/sdk/python/feast/templates/postgres/feature_store.yaml b/sdk/python/feast/templates/postgres/feature_store.yaml new file mode 100644 index 00000000000..53b86b70641 --- /dev/null +++ b/sdk/python/feast/templates/postgres/feature_store.yaml @@ -0,0 +1,27 @@ +project: my_project +provider: local +registry: + registry_store_type: PostgreSQLRegistryStore + path: feast_registry + host: DB_HOST + port: DB_PORT + database: DB_NAME + db_schema: DB_SCHEMA + user: DB_USERNAME + password: DB_PASSWORD +online_store: + type: postgres + host: DB_HOST + port: DB_PORT + database: DB_NAME + db_schema: DB_SCHEMA + user: DB_USERNAME + password: DB_PASSWORD +offline_store: + type: postgres + host: DB_HOST + port: DB_PORT + database: DB_NAME + db_schema: DB_SCHEMA + user: DB_USERNAME + password: DB_PASSWORD diff --git a/sdk/python/feast/templates/postgres/test.py b/sdk/python/feast/templates/postgres/test.py new file mode 100644 index 00000000000..81ac2996985 --- /dev/null +++ b/sdk/python/feast/templates/postgres/test.py @@ -0,0 +1,63 @@ +from datetime import datetime, timedelta + +import pandas as pd +from driver_repo import driver, driver_stats_fv + +from feast import FeatureStore + + +def main(): + pd.set_option("display.max_columns", None) + pd.set_option("display.width", 1000) + + # Load the feature store from the current path + fs = FeatureStore(repo_path=".") + + print("Deploying feature store to Postgres...") + fs.apply([driver, driver_stats_fv]) + + # Select features + features = ["driver_hourly_stats:conv_rate", "driver_hourly_stats:acc_rate"] + + # Create an entity dataframe. This is the dataframe that will be enriched with historical features + entity_df = pd.DataFrame( + { + "event_timestamp": [ + pd.Timestamp(dt, unit="ms", tz="UTC").round("ms") + for dt in pd.date_range( + start=datetime.now() - timedelta(days=3), + end=datetime.now(), + periods=3, + ) + ], + "driver_id": [1001, 1002, 1003], + } + ) + + print("Retrieving training data...") + + training_df = fs.get_historical_features( + features=features, entity_df=entity_df + ).to_df() + + print() + print(training_df) + + print() + print("Loading features into the online store...") + fs.materialize_incremental(end_date=datetime.now()) + + print() + print("Retrieving online features...") + + # Retrieve features from the online store + online_features = fs.get_online_features( + features=features, entity_rows=[{"driver_id": 1001}, {"driver_id": 1002}], + ).to_dict() + + print() + print(pd.DataFrame.from_dict(online_features)) + + +if __name__ == "__main__": + main() diff --git a/sdk/python/feast/templates/snowflake/driver_repo.py b/sdk/python/feast/templates/snowflake/driver_repo.py index c14e4c38cc9..ceaf0ba8de8 100644 --- a/sdk/python/feast/templates/snowflake/driver_repo.py +++ b/sdk/python/feast/templates/snowflake/driver_repo.py @@ -2,7 +2,7 @@ import yaml -from feast import Entity, FeatureView, Field, SnowflakeSource +from feast import Entity, FeatureService, FeatureView, Field, SnowflakeSource from feast.types import Float32, Int64 # Define an entity for the driver. Entities can be thought of as primary keys used to @@ -10,11 +10,11 @@ # construction of feature vectors driver = Entity( # Name of the entity. Must be unique within a project - name="driver_id", - # The join key of an entity describes the storage level field/column on which - # features can be looked up. The join key is also used to join feature + name="driver", + # The join keys of an entity describe the storage level field/column on which + # features can be looked up. The join keys are also used to join feature # tables/views when building feature vectors - join_key="driver_id", + join_keys=["driver_id"], ) # Indicates a data source from which feature values can be retrieved. Sources are queried when building training @@ -43,7 +43,7 @@ # The list of entities specifies the keys required for joining or looking # up features from this feature view. The reference provided in this field # correspond to the name of a defined entity (or entities) - entities=["driver_id"], + entities=["driver"], # The timedelta is the maximum age that each feature value may have # relative to its lookup time. For historical features (used in training), # TTL is relative to each timestamp provided in the entity dataframe. @@ -64,3 +64,5 @@ # features batch_source=driver_stats_source, ) + +driver_stats_fs = FeatureService(name="driver_activity", features=[driver_stats_fv]) diff --git a/sdk/python/feast/templates/spark/example.py b/sdk/python/feast/templates/spark/example.py index c8c1c1257eb..da334dd83ca 100644 --- a/sdk/python/feast/templates/spark/example.py +++ b/sdk/python/feast/templates/spark/example.py @@ -5,7 +5,7 @@ from datetime import timedelta from pathlib import Path -from feast import Entity, FeatureView, Field, ValueType +from feast import Entity, FeatureService, FeatureView, Field, ValueType from feast.infra.offline_stores.contrib.spark_offline_store.spark_source import ( SparkSource, ) @@ -16,9 +16,9 @@ # Entity definitions -driver = Entity(name="driver_id", value_type=ValueType.INT64, description="driver id",) +driver = Entity(name="driver", value_type=ValueType.INT64, description="driver id",) customer = Entity( - name="customer_id", value_type=ValueType.INT64, description="customer id", + name="customer", value_type=ValueType.INT64, description="customer id", ) # Sources @@ -40,7 +40,7 @@ # Feature Views driver_hourly_stats_view = FeatureView( name="driver_hourly_stats", - entities=["driver_id"], + entities=["driver"], ttl=timedelta(days=7), schema=[ Field(name="conv_rate", dtype=Float32), @@ -48,12 +48,12 @@ Field(name="avg_daily_trips", dtype=Int64), ], online=True, - batch_source=driver_hourly_stats, + source=driver_hourly_stats, tags={}, ) customer_daily_profile_view = FeatureView( name="customer_daily_profile", - entities=["customer_id"], + entities=["customer"], ttl=timedelta(days=7), schema=[ Field(name="current_balance", dtype=Float32), @@ -61,6 +61,11 @@ Field(name="lifetime_trip_count", dtype=Int64), ], online=True, - batch_source=customer_daily_profile, + source=customer_daily_profile, tags={}, ) + +driver_stats_fs = FeatureService( + name="driver_activity", + features=[driver_hourly_stats_view, customer_daily_profile_view], +) diff --git a/sdk/python/feast/type_map.py b/sdk/python/feast/type_map.py index 9798faf508b..d22c51d5963 100644 --- a/sdk/python/feast/type_map.py +++ b/sdk/python/feast/type_map.py @@ -529,7 +529,8 @@ def snowflake_python_type_to_feast_value_type( "uint8": ValueType.INT32, "int8": ValueType.INT32, "datetime64[ns]": ValueType.UNIX_TIMESTAMP, - "object": ValueType.UNKNOWN, + "object": ValueType.STRING, + "bool": ValueType.BOOL, } return type_map[snowflake_python_type_as_str.lower()] @@ -638,3 +639,143 @@ def spark_schema_to_np_dtypes(dtypes: List[Tuple[str, str]]) -> Iterator[np.dtyp ) return (type_map[t] for _, t in dtypes) + + +def arrow_to_pg_type(t_str: str) -> str: + try: + if t_str.startswith("timestamp") or t_str.startswith("datetime"): + return "timestamptz" if "tz=" in t_str else "timestamp" + return { + "null": "null", + "bool": "boolean", + "int8": "tinyint", + "int16": "smallint", + "int32": "int", + "int64": "bigint", + "list": "int[]", + "list": "bigint[]", + "list": "boolean[]", + "list": "double precision[]", + "list": "timestamp[]", + "uint8": "smallint", + "uint16": "int", + "uint32": "bigint", + "uint64": "bigint", + "float": "float", + "double": "double precision", + "binary": "binary", + "string": "text", + }[t_str] + except KeyError: + raise ValueError(f"Unsupported type: {t_str}") + + +def pg_type_to_feast_value_type(type_str: str) -> ValueType: + type_map: Dict[str, ValueType] = { + "boolean": ValueType.BOOL, + "bytea": ValueType.BYTES, + "char": ValueType.STRING, + "bigint": ValueType.INT64, + "smallint": ValueType.INT32, + "integer": ValueType.INT32, + "real": ValueType.DOUBLE, + "double precision": ValueType.DOUBLE, + "boolean[]": ValueType.BOOL_LIST, + "bytea[]": ValueType.BYTES_LIST, + "char[]": ValueType.STRING_LIST, + "smallint[]": ValueType.INT32_LIST, + "integer[]": ValueType.INT32_LIST, + "text": ValueType.STRING, + "text[]": ValueType.STRING_LIST, + "character[]": ValueType.STRING_LIST, + "bigint[]": ValueType.INT64_LIST, + "real[]": ValueType.DOUBLE_LIST, + "double precision[]": ValueType.DOUBLE_LIST, + "character": ValueType.STRING, + "character varying": ValueType.STRING, + "date": ValueType.UNIX_TIMESTAMP, + "time without time zone": ValueType.UNIX_TIMESTAMP, + "timestamp without time zone": ValueType.UNIX_TIMESTAMP, + "timestamp without time zone[]": ValueType.UNIX_TIMESTAMP_LIST, + "date[]": ValueType.UNIX_TIMESTAMP_LIST, + "time without time zone[]": ValueType.UNIX_TIMESTAMP_LIST, + "timestamp with time zone": ValueType.UNIX_TIMESTAMP, + "timestamp with time zone[]": ValueType.UNIX_TIMESTAMP_LIST, + "numeric[]": ValueType.DOUBLE_LIST, + "numeric": ValueType.DOUBLE, + "uuid": ValueType.STRING, + "uuid[]": ValueType.STRING_LIST, + } + value = ( + type_map[type_str.lower()] + if type_str.lower() in type_map + else ValueType.UNKNOWN + ) + if value == ValueType.UNKNOWN: + print("unknown type:", type_str) + return value + + +def feast_value_type_to_pa(feast_type: ValueType) -> pyarrow.DataType: + type_map = { + ValueType.INT32: pyarrow.int32(), + ValueType.INT64: pyarrow.int64(), + ValueType.DOUBLE: pyarrow.float64(), + ValueType.FLOAT: pyarrow.float32(), + ValueType.STRING: pyarrow.string(), + ValueType.BYTES: pyarrow.binary(), + ValueType.BOOL: pyarrow.bool_(), + ValueType.UNIX_TIMESTAMP: pyarrow.timestamp("us"), + ValueType.INT32_LIST: pyarrow.list_(pyarrow.int32()), + ValueType.INT64_LIST: pyarrow.list_(pyarrow.int64()), + ValueType.DOUBLE_LIST: pyarrow.list_(pyarrow.float64()), + ValueType.FLOAT_LIST: pyarrow.list_(pyarrow.float32()), + ValueType.STRING_LIST: pyarrow.list_(pyarrow.string()), + ValueType.BYTES_LIST: pyarrow.list_(pyarrow.binary()), + ValueType.BOOL_LIST: pyarrow.list_(pyarrow.bool_()), + ValueType.UNIX_TIMESTAMP_LIST: pyarrow.list_(pyarrow.timestamp("us")), + ValueType.NULL: pyarrow.null(), + } + return type_map[feast_type] + + +def pg_type_code_to_pg_type(code: int) -> str: + return { + 16: "boolean", + 17: "bytea", + 20: "bigint", + 21: "smallint", + 23: "integer", + 25: "text", + 700: "real", + 701: "double precision", + 1000: "boolean[]", + 1001: "bytea[]", + 1005: "smallint[]", + 1007: "integer[]", + 1009: "text[]", + 1014: "character[]", + 1016: "bigint[]", + 1021: "real[]", + 1022: "double precision[]", + 1042: "character", + 1043: "character varying", + 1082: "date", + 1083: "time without time zone", + 1114: "timestamp without time zone", + 1115: "timestamp without time zone[]", + 1182: "date[]", + 1183: "time without time zone[]", + 1184: "timestamp with time zone", + 1185: "timestamp with time zone[]", + 1231: "numeric[]", + 1700: "numeric", + 2950: "uuid", + 2951: "uuid[]", + }[code] + + +def pg_type_code_to_arrow(code: int) -> str: + return feast_value_type_to_pa( + pg_type_to_feast_value_type(pg_type_code_to_pg_type(code)) + ) diff --git a/sdk/python/feast/ui/README.md b/sdk/python/feast/ui/README.md new file mode 100644 index 00000000000..ea5243ca4c6 --- /dev/null +++ b/sdk/python/feast/ui/README.md @@ -0,0 +1,33 @@ +# Example Feast UI App + +This is an example React App that imports the Feast UI module and relies on a "/projects-list" endpoint to get projects. + +See the module import in `src/index.js`. The main change this implements on top of a vanilla create-react-app is adding: + +```tsx +import ReactDOM from "react-dom"; +import FeastUI from "@feast-dev/feast-ui"; +import "@feast-dev/feast-ui/dist/feast-ui.css"; + +ReactDOM.render( + + { + return res.json(); + }) + }} + /> + , + document.getElementById("root") +); +``` + +It is used by the `feast ui` command to scaffold a local UI server. The feast python package bundles in resources produced from `npm run build --omit=dev + + +**Note**: yarn start will not work on this because of the above dependency. diff --git a/sdk/python/feast/ui/__init__.py b/sdk/python/feast/ui/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/feast/ui/package.json b/sdk/python/feast/ui/package.json new file mode 100644 index 00000000000..556637aaae2 --- /dev/null +++ b/sdk/python/feast/ui/package.json @@ -0,0 +1,53 @@ +{ + "name": "example-feast-ui", + "version": "0.1.0", + "private": true, + "dependencies": { + "@elastic/datemath": "^5.0.3", + "@elastic/eui": "^57.0.0", + "@emotion/react": "^11.9.0", + "@feast-dev/feast-ui": "^0.20.4", + "@testing-library/jest-dom": "^5.16.4", + "@testing-library/react": "^13.2.0", + "@testing-library/user-event": "^13.5.0", + "@types/d3": "^7.1.0", + "d3": "^7.4.4", + "inter-ui": "^3.19.3", + "moment": "^2.29.3", + "prop-types": "^15.8.1", + "query-string": "^7.1.1", + "react": "^18.1.0", + "react-dom": "^18.1.0", + "react-query": "^3.39.0", + "react-router-dom": "^6.3.0", + "react-scripts": "5.0.1", + "typescript": "^4.6.4", + "use-query-params": "^1.2.3", + "web-vitals": "^2.1.4", + "zod": "^3.15.1" + }, + "scripts": { + "start": "react-scripts start", + "build": "react-scripts build", + "test": "react-scripts test", + "eject": "react-scripts eject" + }, + "eslintConfig": { + "extends": [ + "react-app", + "react-app/jest" + ] + }, + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + } +} diff --git a/sdk/python/feast/ui/public/favicon.ico b/sdk/python/feast/ui/public/favicon.ico new file mode 100644 index 00000000000..a11777cc471 Binary files /dev/null and b/sdk/python/feast/ui/public/favicon.ico differ diff --git a/sdk/python/feast/ui/public/feast-favicon-300x300.png b/sdk/python/feast/ui/public/feast-favicon-300x300.png new file mode 100644 index 00000000000..e9f43491f6b Binary files /dev/null and b/sdk/python/feast/ui/public/feast-favicon-300x300.png differ diff --git a/sdk/python/feast/ui/public/feast-favicon-36x36.png b/sdk/python/feast/ui/public/feast-favicon-36x36.png new file mode 100644 index 00000000000..ed39c60c1bf Binary files /dev/null and b/sdk/python/feast/ui/public/feast-favicon-36x36.png differ diff --git a/sdk/python/feast/ui/public/index.html b/sdk/python/feast/ui/public/index.html new file mode 100644 index 00000000000..9571ac3a7d9 --- /dev/null +++ b/sdk/python/feast/ui/public/index.html @@ -0,0 +1,46 @@ + + + + + + + + + + + + + Feast Feature Store + + + +
+ + + diff --git a/sdk/python/feast/ui/public/manifest.json b/sdk/python/feast/ui/public/manifest.json new file mode 100644 index 00000000000..f9608db7384 --- /dev/null +++ b/sdk/python/feast/ui/public/manifest.json @@ -0,0 +1,25 @@ +{ + "short_name": "Feast UI", + "name": "Feast UI", + "icons": [ + { + "src": "favicon.ico", + "sizes": "64x64 32x32 24x24 16x16", + "type": "image/x-icon" + }, + { + "src": "feast-favicon-36x36.png", + "type": "image/png", + "sizes": "36x36" + }, + { + "src": "feast-favicon-300x300.png", + "type": "image/png", + "sizes": "300x300" + } + ], + "start_url": ".", + "display": "standalone", + "theme_color": "#000000", + "background_color": "#ffffff" +} diff --git a/sdk/python/feast/ui/public/robots.txt b/sdk/python/feast/ui/public/robots.txt new file mode 100644 index 00000000000..e9e57dc4d41 --- /dev/null +++ b/sdk/python/feast/ui/public/robots.txt @@ -0,0 +1,3 @@ +# https://www.robotstxt.org/robotstxt.html +User-agent: * +Disallow: diff --git a/sdk/python/feast/ui/src/index.css b/sdk/python/feast/ui/src/index.css new file mode 100644 index 00000000000..ec2585e8c0b --- /dev/null +++ b/sdk/python/feast/ui/src/index.css @@ -0,0 +1,13 @@ +body { + margin: 0; + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen', + 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue', + sans-serif; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +code { + font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New', + monospace; +} diff --git a/sdk/python/feast/ui/src/index.tsx b/sdk/python/feast/ui/src/index.tsx new file mode 100644 index 00000000000..4191de17a06 --- /dev/null +++ b/sdk/python/feast/ui/src/index.tsx @@ -0,0 +1,22 @@ +import React from 'react'; +import ReactDOM from "react-dom"; +import './index.css'; +import FeastUI from "@feast-dev/feast-ui"; +import "@feast-dev/feast-ui/dist/feast-ui.css"; + +ReactDOM.render( + + { + return res.json(); + }) + }} + /> + , + document.getElementById("root") +); \ No newline at end of file diff --git a/sdk/python/feast/ui/yarn.lock b/sdk/python/feast/ui/yarn.lock new file mode 100644 index 00000000000..f6301957c82 --- /dev/null +++ b/sdk/python/feast/ui/yarn.lock @@ -0,0 +1,10625 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@ampproject/remapping@^2.1.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" + integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== + dependencies: + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@apideck/better-ajv-errors@^0.3.1": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@apideck/better-ajv-errors/-/better-ajv-errors-0.3.3.tgz#ab0b1e981e1749bf59736cf7ebe25cfc9f949c15" + integrity sha512-9o+HO2MbJhJHjDYZaDxJmSDckvDpiuItEsrIShV0DXeCshXWRHhqYyU/PKHMkuClOmFnZhRd6wzv4vpDu/dRKg== + dependencies: + json-schema "^0.4.0" + jsonpointer "^5.0.0" + leven "^3.1.0" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.16.7", "@babel/code-frame@^7.8.3": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.16.7.tgz#44416b6bd7624b998f5b1af5d470856c40138789" + integrity sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg== + dependencies: + "@babel/highlight" "^7.16.7" + +"@babel/compat-data@^7.13.11", "@babel/compat-data@^7.17.0", "@babel/compat-data@^7.17.10": + version "7.17.10" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.17.10.tgz#711dc726a492dfc8be8220028b1b92482362baab" + integrity sha512-GZt/TCsG70Ms19gfZO1tM4CVnXsPgEPBCpJu+Qz3L0LUDsY5nZqFZglIoPC1kIYOtNBZlrnFT+klg12vFGZXrw== + +"@babel/core@^7.1.0", "@babel/core@^7.11.1", "@babel/core@^7.12.3", "@babel/core@^7.16.0", "@babel/core@^7.7.2", "@babel/core@^7.8.0": + version "7.17.10" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.17.10.tgz#74ef0fbf56b7dfc3f198fc2d927f4f03e12f4b05" + integrity sha512-liKoppandF3ZcBnIYFjfSDHZLKdLHGJRkoWtG8zQyGJBQfIYobpnVGI5+pLBNtS6psFLDzyq8+h5HiVljW9PNA== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.16.7" + "@babel/generator" "^7.17.10" + "@babel/helper-compilation-targets" "^7.17.10" + "@babel/helper-module-transforms" "^7.17.7" + "@babel/helpers" "^7.17.9" + "@babel/parser" "^7.17.10" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.17.10" + "@babel/types" "^7.17.10" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + +"@babel/eslint-parser@^7.16.3": + version "7.17.0" + resolved "https://registry.yarnpkg.com/@babel/eslint-parser/-/eslint-parser-7.17.0.tgz#eabb24ad9f0afa80e5849f8240d0e5facc2d90d6" + integrity sha512-PUEJ7ZBXbRkbq3qqM/jZ2nIuakUBqCYc7Qf52Lj7dlZ6zERnqisdHioL0l4wwQZnmskMeasqUNzLBFKs3nylXA== + dependencies: + eslint-scope "^5.1.1" + eslint-visitor-keys "^2.1.0" + semver "^6.3.0" + +"@babel/generator@^7.17.10", "@babel/generator@^7.7.2": + version "7.17.10" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.17.10.tgz#c281fa35b0c349bbe9d02916f4ae08fc85ed7189" + integrity sha512-46MJZZo9y3o4kmhBVc7zW7i8dtR1oIK/sdO5NcfcZRhTGYi+KKJRtHNgsU6c4VUcJmUNV/LQdebD/9Dlv4K+Tg== + dependencies: + "@babel/types" "^7.17.10" + "@jridgewell/gen-mapping" "^0.1.0" + jsesc "^2.5.1" + +"@babel/helper-annotate-as-pure@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.16.7.tgz#bb2339a7534a9c128e3102024c60760a3a7f3862" + integrity sha512-s6t2w/IPQVTAET1HitoowRGXooX8mCgtuP5195wD/QJPV6wYjpujCGF7JuMODVX2ZAJOf1GT6DT9MHEZvLOFSw== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-builder-binary-assignment-operator-visitor@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.16.7.tgz#38d138561ea207f0f69eb1626a418e4f7e6a580b" + integrity sha512-C6FdbRaxYjwVu/geKW4ZeQ0Q31AftgRcdSnZ5/jsH6BzCJbtvXvhpfkbkThYSuutZA7nCXpPR6AD9zd1dprMkA== + dependencies: + "@babel/helper-explode-assignable-expression" "^7.16.7" + "@babel/types" "^7.16.7" + +"@babel/helper-compilation-targets@^7.13.0", "@babel/helper-compilation-targets@^7.16.7", "@babel/helper-compilation-targets@^7.17.10": + version "7.17.10" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.17.10.tgz#09c63106d47af93cf31803db6bc49fef354e2ebe" + integrity sha512-gh3RxjWbauw/dFiU/7whjd0qN9K6nPJMqe6+Er7rOavFh0CQUSwhAE3IcTho2rywPJFxej6TUUHDkWcYI6gGqQ== + dependencies: + "@babel/compat-data" "^7.17.10" + "@babel/helper-validator-option" "^7.16.7" + browserslist "^4.20.2" + semver "^6.3.0" + +"@babel/helper-create-class-features-plugin@^7.16.10", "@babel/helper-create-class-features-plugin@^7.16.7", "@babel/helper-create-class-features-plugin@^7.17.6", "@babel/helper-create-class-features-plugin@^7.17.9": + version "7.17.9" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.17.9.tgz#71835d7fb9f38bd9f1378e40a4c0902fdc2ea49d" + integrity sha512-kUjip3gruz6AJKOq5i3nC6CoCEEF/oHH3cp6tOZhB+IyyyPyW0g1Gfsxn3mkk6S08pIA2y8GQh609v9G/5sHVQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.7" + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-function-name" "^7.17.9" + "@babel/helper-member-expression-to-functions" "^7.17.7" + "@babel/helper-optimise-call-expression" "^7.16.7" + "@babel/helper-replace-supers" "^7.16.7" + "@babel/helper-split-export-declaration" "^7.16.7" + +"@babel/helper-create-regexp-features-plugin@^7.16.7", "@babel/helper-create-regexp-features-plugin@^7.17.0": + version "7.17.0" + resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.17.0.tgz#1dcc7d40ba0c6b6b25618997c5dbfd310f186fe1" + integrity sha512-awO2So99wG6KnlE+TPs6rn83gCz5WlEePJDTnLEqbchMVrBeAujURVphRdigsk094VhvZehFoNOihSlcBjwsXA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.7" + regexpu-core "^5.0.1" + +"@babel/helper-define-polyfill-provider@^0.3.1": + version "0.3.1" + resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.1.tgz#52411b445bdb2e676869e5a74960d2d3826d2665" + integrity sha512-J9hGMpJQmtWmj46B3kBHmL38UhJGhYX7eqkcq+2gsstyYt341HmPeWspihX43yVRA0mS+8GGk2Gckc7bY/HCmA== + dependencies: + "@babel/helper-compilation-targets" "^7.13.0" + "@babel/helper-module-imports" "^7.12.13" + "@babel/helper-plugin-utils" "^7.13.0" + "@babel/traverse" "^7.13.0" + debug "^4.1.1" + lodash.debounce "^4.0.8" + resolve "^1.14.2" + semver "^6.1.2" + +"@babel/helper-environment-visitor@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.16.7.tgz#ff484094a839bde9d89cd63cba017d7aae80ecd7" + integrity sha512-SLLb0AAn6PkUeAfKJCCOl9e1R53pQlGAfc4y4XuMRZfqeMYLE0dM1LMhqbGAlGQY0lfw5/ohoYWAe9V1yibRag== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-explode-assignable-expression@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.16.7.tgz#12a6d8522fdd834f194e868af6354e8650242b7a" + integrity sha512-KyUenhWMC8VrxzkGP0Jizjo4/Zx+1nNZhgocs+gLzyZyB8SHidhoq9KK/8Ato4anhwsivfkBLftky7gvzbZMtQ== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-function-name@^7.16.7", "@babel/helper-function-name@^7.17.9": + version "7.17.9" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.17.9.tgz#136fcd54bc1da82fcb47565cf16fd8e444b1ff12" + integrity sha512-7cRisGlVtiVqZ0MW0/yFB4atgpGLWEHUVYnb448hZK4x+vih0YO5UoS11XIYtZYqHd0dIPMdUSv8q5K4LdMnIg== + dependencies: + "@babel/template" "^7.16.7" + "@babel/types" "^7.17.0" + +"@babel/helper-hoist-variables@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.7.tgz#86bcb19a77a509c7b77d0e22323ef588fa58c246" + integrity sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-member-expression-to-functions@^7.16.7", "@babel/helper-member-expression-to-functions@^7.17.7": + version "7.17.7" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.17.7.tgz#a34013b57d8542a8c4ff8ba3f747c02452a4d8c4" + integrity sha512-thxXgnQ8qQ11W2wVUObIqDL4p148VMxkt5T/qpN5k2fboRyzFGFmKsTGViquyM5QHKUy48OZoca8kw4ajaDPyw== + dependencies: + "@babel/types" "^7.17.0" + +"@babel/helper-module-imports@^7.10.4", "@babel/helper-module-imports@^7.12.13", "@babel/helper-module-imports@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz#25612a8091a999704461c8a222d0efec5d091437" + integrity sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-module-transforms@^7.16.7", "@babel/helper-module-transforms@^7.17.7": + version "7.17.7" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.17.7.tgz#3943c7f777139e7954a5355c815263741a9c1cbd" + integrity sha512-VmZD99F3gNTYB7fJRDTi+u6l/zxY0BE6OIxPSU7a50s6ZUQkHwSDmV92FfM+oCG0pZRVojGYhkR8I0OGeCVREw== + dependencies: + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-module-imports" "^7.16.7" + "@babel/helper-simple-access" "^7.17.7" + "@babel/helper-split-export-declaration" "^7.16.7" + "@babel/helper-validator-identifier" "^7.16.7" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.17.3" + "@babel/types" "^7.17.0" + +"@babel/helper-optimise-call-expression@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.16.7.tgz#a34e3560605abbd31a18546bd2aad3e6d9a174f2" + integrity sha512-EtgBhg7rd/JcnpZFXpBy0ze1YRfdm7BnBX4uKMBd3ixa3RGAE002JZB66FJyNH7g0F38U05pXmA5P8cBh7z+1w== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.13.0", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.16.7.tgz#aa3a8ab4c3cceff8e65eb9e73d87dc4ff320b2f5" + integrity sha512-Qg3Nk7ZxpgMrsox6HreY1ZNKdBq7K72tDSliA6dCl5f007jR4ne8iD5UzuNnCJH2xBf2BEEVGr+/OL6Gdp7RxA== + +"@babel/helper-remap-async-to-generator@^7.16.8": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.16.8.tgz#29ffaade68a367e2ed09c90901986918d25e57e3" + integrity sha512-fm0gH7Flb8H51LqJHy3HJ3wnE1+qtYR2A99K06ahwrawLdOFsCEWjZOrYricXJHoPSudNKxrMBUPEIPxiIIvBw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.7" + "@babel/helper-wrap-function" "^7.16.8" + "@babel/types" "^7.16.8" + +"@babel/helper-replace-supers@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.16.7.tgz#e9f5f5f32ac90429c1a4bdec0f231ef0c2838ab1" + integrity sha512-y9vsWilTNaVnVh6xiJfABzsNpgDPKev9HnAgz6Gb1p6UUwf9NepdlsV7VXGCftJM+jqD5f7JIEubcpLjZj5dBw== + dependencies: + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-member-expression-to-functions" "^7.16.7" + "@babel/helper-optimise-call-expression" "^7.16.7" + "@babel/traverse" "^7.16.7" + "@babel/types" "^7.16.7" + +"@babel/helper-simple-access@^7.17.7": + version "7.17.7" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.17.7.tgz#aaa473de92b7987c6dfa7ce9a7d9674724823367" + integrity sha512-txyMCGroZ96i+Pxr3Je3lzEJjqwaRC9buMUgtomcrLe5Nd0+fk1h0LLA+ixUF5OW7AhHuQ7Es1WcQJZmZsz2XA== + dependencies: + "@babel/types" "^7.17.0" + +"@babel/helper-skip-transparent-expression-wrappers@^7.16.0": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.16.0.tgz#0ee3388070147c3ae051e487eca3ebb0e2e8bb09" + integrity sha512-+il1gTy0oHwUsBQZyJvukbB4vPMdcYBrFHa0Uc4AizLxbq6BOYC51Rv4tWocX9BLBDLZ4kc6qUFpQ6HRgL+3zw== + dependencies: + "@babel/types" "^7.16.0" + +"@babel/helper-split-export-declaration@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.7.tgz#0b648c0c42da9d3920d85ad585f2778620b8726b" + integrity sha512-xbWoy/PFoxSWazIToT9Sif+jJTlrMcndIsaOKvTA6u7QEo7ilkRZpjew18/W3c7nm8fXdUDXh02VXTbZ0pGDNw== + dependencies: + "@babel/types" "^7.16.7" + +"@babel/helper-validator-identifier@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz#e8c602438c4a8195751243da9031d1607d247cad" + integrity sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw== + +"@babel/helper-validator-option@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz#b203ce62ce5fe153899b617c08957de860de4d23" + integrity sha512-TRtenOuRUVo9oIQGPC5G9DgK4743cdxvtOw0weQNpZXaS16SCBi5MNjZF8vba3ETURjZpTbVn7Vvcf2eAwFozQ== + +"@babel/helper-wrap-function@^7.16.8": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.16.8.tgz#58afda087c4cd235de92f7ceedebca2c41274200" + integrity sha512-8RpyRVIAW1RcDDGTA+GpPAwV22wXCfKOoM9bet6TLkGIFTkRQSkH1nMQ5Yet4MpoXe1ZwHPVtNasc2w0uZMqnw== + dependencies: + "@babel/helper-function-name" "^7.16.7" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.16.8" + "@babel/types" "^7.16.8" + +"@babel/helpers@^7.17.9": + version "7.17.9" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.17.9.tgz#b2af120821bfbe44f9907b1826e168e819375a1a" + integrity sha512-cPCt915ShDWUEzEp3+UNRktO2n6v49l5RSnG9M5pS24hA+2FAc5si+Pn1i4VVbQQ+jh+bIZhPFQOJOzbrOYY1Q== + dependencies: + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.17.9" + "@babel/types" "^7.17.0" + +"@babel/highlight@^7.16.7": + version "7.17.9" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.17.9.tgz#61b2ee7f32ea0454612def4fccdae0de232b73e3" + integrity sha512-J9PfEKCbFIv2X5bjTMiZu6Vf341N05QIY+d6FvVKynkG1S7G0j3I0QoRtWIrXhZ+/Nlb5Q0MzqL7TokEJ5BNHg== + dependencies: + "@babel/helper-validator-identifier" "^7.16.7" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.16.7", "@babel/parser@^7.17.10": + version "7.17.10" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.17.10.tgz#873b16db82a8909e0fbd7f115772f4b739f6ce78" + integrity sha512-n2Q6i+fnJqzOaq2VkdXxy2TCPCWQZHiCo0XqmrCvDWcZQKRyZzYi4Z0yxlBuN0w+r2ZHmre+Q087DSrw3pbJDQ== + +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.16.7.tgz#4eda6d6c2a0aa79c70fa7b6da67763dfe2141050" + integrity sha512-anv/DObl7waiGEnC24O9zqL0pSuI9hljihqiDuFHC8d7/bjr/4RLGPWuc8rYOff/QPzbEPSkzG8wGG9aDuhHRg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.16.7.tgz#cc001234dfc139ac45f6bcf801866198c8c72ff9" + integrity sha512-di8vUHRdf+4aJ7ltXhaDbPoszdkh59AQtJM5soLsuHpQJdFQZOA4uGj0V2u/CZ8bJ/u8ULDL5yq6FO/bCXnKHw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-skip-transparent-expression-wrappers" "^7.16.0" + "@babel/plugin-proposal-optional-chaining" "^7.16.7" + +"@babel/plugin-proposal-async-generator-functions@^7.16.8": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.16.8.tgz#3bdd1ebbe620804ea9416706cd67d60787504bc8" + integrity sha512-71YHIvMuiuqWJQkebWJtdhQTfd4Q4mF76q2IX37uZPkG9+olBxsX+rH1vkhFto4UeJZ9dPY2s+mDvhDm1u2BGQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-remap-async-to-generator" "^7.16.8" + "@babel/plugin-syntax-async-generators" "^7.8.4" + +"@babel/plugin-proposal-class-properties@^7.16.0", "@babel/plugin-proposal-class-properties@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.16.7.tgz#925cad7b3b1a2fcea7e59ecc8eb5954f961f91b0" + integrity sha512-IobU0Xme31ewjYOShSIqd/ZGM/r/cuOz2z0MDbNrhF5FW+ZVgi0f2lyeoj9KFPDOAqsYxmLWZte1WOwlvY9aww== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-proposal-class-static-block@^7.17.6": + version "7.17.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.17.6.tgz#164e8fd25f0d80fa48c5a4d1438a6629325ad83c" + integrity sha512-X/tididvL2zbs7jZCeeRJ8167U/+Ac135AM6jCAx6gYXDUviZV5Ku9UDvWS2NCuWlFjIRXklYhwo6HhAC7ETnA== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.17.6" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + +"@babel/plugin-proposal-decorators@^7.16.4": + version "7.17.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.17.9.tgz#67a1653be9c77ce5b6c318aa90c8287b87831619" + integrity sha512-EfH2LZ/vPa2wuPwJ26j+kYRkaubf89UlwxKXtxqEm57HrgSEYDB8t4swFP+p8LcI9yiP9ZRJJjo/58hS6BnaDA== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.17.9" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-replace-supers" "^7.16.7" + "@babel/helper-split-export-declaration" "^7.16.7" + "@babel/plugin-syntax-decorators" "^7.17.0" + charcodes "^0.2.0" + +"@babel/plugin-proposal-dynamic-import@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.16.7.tgz#c19c897eaa46b27634a00fee9fb7d829158704b2" + integrity sha512-I8SW9Ho3/8DRSdmDdH3gORdyUuYnk1m4cMxUAdu5oy4n3OfN8flDEH+d60iG7dUfi0KkYwSvoalHzzdRzpWHTg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + +"@babel/plugin-proposal-export-namespace-from@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.16.7.tgz#09de09df18445a5786a305681423ae63507a6163" + integrity sha512-ZxdtqDXLRGBL64ocZcs7ovt71L3jhC1RGSyR996svrCi3PYqHNkb3SwPJCs8RIzD86s+WPpt2S73+EHCGO+NUA== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + +"@babel/plugin-proposal-json-strings@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.16.7.tgz#9732cb1d17d9a2626a08c5be25186c195b6fa6e8" + integrity sha512-lNZ3EEggsGY78JavgbHsK9u5P3pQaW7k4axlgFLYkMd7UBsiNahCITShLjNQschPyjtO6dADrL24757IdhBrsQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-json-strings" "^7.8.3" + +"@babel/plugin-proposal-logical-assignment-operators@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.16.7.tgz#be23c0ba74deec1922e639832904be0bea73cdea" + integrity sha512-K3XzyZJGQCr00+EtYtrDjmwX7o7PLK6U9bi1nCwkQioRFVUv6dJoxbQjtWVtP+bCPy82bONBKG8NPyQ4+i6yjg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + +"@babel/plugin-proposal-nullish-coalescing-operator@^7.16.0", "@babel/plugin-proposal-nullish-coalescing-operator@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.16.7.tgz#141fc20b6857e59459d430c850a0011e36561d99" + integrity sha512-aUOrYU3EVtjf62jQrCj63pYZ7k6vns2h/DQvHPWGmsJRYzWXZ6/AsfgpiRy6XiuIDADhJzP2Q9MwSMKauBQ+UQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + +"@babel/plugin-proposal-numeric-separator@^7.16.0", "@babel/plugin-proposal-numeric-separator@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.16.7.tgz#d6b69f4af63fb38b6ca2558442a7fb191236eba9" + integrity sha512-vQgPMknOIgiuVqbokToyXbkY/OmmjAzr/0lhSIbG/KmnzXPGwW/AdhdKpi+O4X/VkWiWjnkKOBiqJrTaC98VKw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + +"@babel/plugin-proposal-object-rest-spread@^7.17.3": + version "7.17.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.17.3.tgz#d9eb649a54628a51701aef7e0ea3d17e2b9dd390" + integrity sha512-yuL5iQA/TbZn+RGAfxQXfi7CNLmKi1f8zInn4IgobuCWcAb7i+zj4TYzQ9l8cEzVyJ89PDGuqxK1xZpUDISesw== + dependencies: + "@babel/compat-data" "^7.17.0" + "@babel/helper-compilation-targets" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.16.7" + +"@babel/plugin-proposal-optional-catch-binding@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.16.7.tgz#c623a430674ffc4ab732fd0a0ae7722b67cb74cf" + integrity sha512-eMOH/L4OvWSZAE1VkHbr1vckLG1WUcHGJSLqqQwl2GaUqG6QjddvrOaTUMNYiv77H5IKPMZ9U9P7EaHwvAShfA== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + +"@babel/plugin-proposal-optional-chaining@^7.16.0", "@babel/plugin-proposal-optional-chaining@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.16.7.tgz#7cd629564724816c0e8a969535551f943c64c39a" + integrity sha512-eC3xy+ZrUcBtP7x+sq62Q/HYd674pPTb/77XZMb5wbDPGWIdUbSr4Agr052+zaUPSb+gGRnjxXfKFvx5iMJ+DA== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-skip-transparent-expression-wrappers" "^7.16.0" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + +"@babel/plugin-proposal-private-methods@^7.16.0", "@babel/plugin-proposal-private-methods@^7.16.11": + version "7.16.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.16.11.tgz#e8df108288555ff259f4527dbe84813aac3a1c50" + integrity sha512-F/2uAkPlXDr8+BHpZvo19w3hLFKge+k75XUprE6jaqKxjGkSYcK+4c+bup5PdW/7W/Rpjwql7FTVEDW+fRAQsw== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.16.10" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-proposal-private-property-in-object@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.16.7.tgz#b0b8cef543c2c3d57e59e2c611994861d46a3fce" + integrity sha512-rMQkjcOFbm+ufe3bTZLyOfsOUOxyvLXZJCTARhJr+8UMSoZmqTe1K1BgkFcrW37rAchWg57yI69ORxiWvUINuQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.7" + "@babel/helper-create-class-features-plugin" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + +"@babel/plugin-proposal-unicode-property-regex@^7.16.7", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.16.7.tgz#635d18eb10c6214210ffc5ff4932552de08188a2" + integrity sha512-QRK0YI/40VLhNVGIjRNAAQkEHws0cswSdFFjpFyt943YmJIU1da9uW63Iu6NFV6CxTZW5eTDCrwZUstBWgp/Rg== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-bigint@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.12.13", "@babel/plugin-syntax-class-properties@^7.8.3": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-class-static-block@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" + integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-decorators@^7.17.0": + version "7.17.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.17.0.tgz#a2be3b2c9fe7d78bd4994e790896bc411e2f166d" + integrity sha512-qWe85yCXsvDEluNP0OyeQjH63DlhAR3W7K9BxxU1MvbDb48tgBG+Ao6IJJ6smPDrrVzSQZrbF6donpkFBMcs3A== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-syntax-dynamic-import@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" + integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-export-namespace-from@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" + integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.3" + +"@babel/plugin-syntax-flow@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.16.7.tgz#202b147e5892b8452bbb0bb269c7ed2539ab8832" + integrity sha512-UDo3YGQO0jH6ytzVwgSLv9i/CzMcUjbKenL67dTrAZPPv6GFAtDhe6jqnvmoKzC/7htNTohhos+onPtDMqJwaQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-syntax-import-meta@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-jsx@^7.12.13", "@babel/plugin-syntax-jsx@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.16.7.tgz#50b6571d13f764266a113d77c82b4a6508bbe665" + integrity sha512-Esxmk7YjA8QysKeT3VhTXvF6y77f/a91SIs4pWb4H2eWGQkCKFgQaG6hdoEVZtGsrAcb2K5BW66XsOErD4WU3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.10.4", "@babel/plugin-syntax-numeric-separator@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-private-property-in-object@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" + integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-top-level-await@^7.14.5", "@babel/plugin-syntax-top-level-await@^7.8.3": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-typescript@^7.16.7", "@babel/plugin-syntax-typescript@^7.7.2": + version "7.17.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.17.10.tgz#80031e6042cad6a95ed753f672ebd23c30933195" + integrity sha512-xJefea1DWXW09pW4Tm9bjwVlPDyYA2it3fWlmEjpYz6alPvTUjL0EOzNzI/FEOyI3r4/J7uVH5UqKgl1TQ5hqQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-arrow-functions@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.16.7.tgz#44125e653d94b98db76369de9c396dc14bef4154" + integrity sha512-9ffkFFMbvzTvv+7dTp/66xvZAWASuPD5Tl9LK3Z9vhOmANo6j94rik+5YMBt4CwHVMWLWpMsriIc2zsa3WW3xQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-async-to-generator@^7.16.8": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.16.8.tgz#b83dff4b970cf41f1b819f8b49cc0cfbaa53a808" + integrity sha512-MtmUmTJQHCnyJVrScNzNlofQJ3dLFuobYn3mwOTKHnSCMtbNsqvF71GQmJfFjdrXSsAA7iysFmYWw4bXZ20hOg== + dependencies: + "@babel/helper-module-imports" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-remap-async-to-generator" "^7.16.8" + +"@babel/plugin-transform-block-scoped-functions@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.16.7.tgz#4d0d57d9632ef6062cdf354bb717102ee042a620" + integrity sha512-JUuzlzmF40Z9cXyytcbZEZKckgrQzChbQJw/5PuEHYeqzCsvebDx0K0jWnIIVcmmDOAVctCgnYs0pMcrYj2zJg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-block-scoping@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.16.7.tgz#f50664ab99ddeaee5bc681b8f3a6ea9d72ab4f87" + integrity sha512-ObZev2nxVAYA4bhyusELdo9hb3H+A56bxH3FZMbEImZFiEDYVHXQSJ1hQKFlDnlt8G9bBrCZ5ZpURZUrV4G5qQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-classes@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.16.7.tgz#8f4b9562850cd973de3b498f1218796eb181ce00" + integrity sha512-WY7og38SFAGYRe64BrjKf8OrE6ulEHtr5jEYaZMwox9KebgqPi67Zqz8K53EKk1fFEJgm96r32rkKZ3qA2nCWQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.7" + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-function-name" "^7.16.7" + "@babel/helper-optimise-call-expression" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-replace-supers" "^7.16.7" + "@babel/helper-split-export-declaration" "^7.16.7" + globals "^11.1.0" + +"@babel/plugin-transform-computed-properties@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.16.7.tgz#66dee12e46f61d2aae7a73710f591eb3df616470" + integrity sha512-gN72G9bcmenVILj//sv1zLNaPyYcOzUho2lIJBMh/iakJ9ygCo/hEF9cpGb61SCMEDxbbyBoVQxrt+bWKu5KGw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-destructuring@^7.17.7": + version "7.17.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.17.7.tgz#49dc2675a7afa9a5e4c6bdee636061136c3408d1" + integrity sha512-XVh0r5yq9sLR4vZ6eVZe8FKfIcSgaTBxVBRSYokRj2qksf6QerYnTxz9/GTuKTH/n/HwLP7t6gtlybHetJ/6hQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-dotall-regex@^7.16.7", "@babel/plugin-transform-dotall-regex@^7.4.4": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.16.7.tgz#6b2d67686fab15fb6a7fd4bd895d5982cfc81241" + integrity sha512-Lyttaao2SjZF6Pf4vk1dVKv8YypMpomAbygW+mU5cYP3S5cWTfCJjG8xV6CFdzGFlfWK81IjL9viiTvpb6G7gQ== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-duplicate-keys@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.16.7.tgz#2207e9ca8f82a0d36a5a67b6536e7ef8b08823c9" + integrity sha512-03DvpbRfvWIXyK0/6QiR1KMTWeT6OcQ7tbhjrXyFS02kjuX/mu5Bvnh5SDSWHxyawit2g5aWhKwI86EE7GUnTw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-exponentiation-operator@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.16.7.tgz#efa9862ef97e9e9e5f653f6ddc7b665e8536fe9b" + integrity sha512-8UYLSlyLgRixQvlYH3J2ekXFHDFLQutdy7FfFAMm3CPZ6q9wHCwnUyiXpQCe3gVVnQlHc5nsuiEVziteRNTXEA== + dependencies: + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-flow-strip-types@^7.16.0": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.16.7.tgz#291fb140c78dabbf87f2427e7c7c332b126964b8" + integrity sha512-mzmCq3cNsDpZZu9FADYYyfZJIOrSONmHcop2XEKPdBNMa4PDC4eEvcOvzZaCNcjKu72v0XQlA5y1g58aLRXdYg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-flow" "^7.16.7" + +"@babel/plugin-transform-for-of@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.16.7.tgz#649d639d4617dff502a9a158c479b3b556728d8c" + integrity sha512-/QZm9W92Ptpw7sjI9Nx1mbcsWz33+l8kuMIQnDwgQBG5s3fAfQvkRjQ7NqXhtNcKOnPkdICmUHyCaWW06HCsqg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-function-name@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.16.7.tgz#5ab34375c64d61d083d7d2f05c38d90b97ec65cf" + integrity sha512-SU/C68YVwTRxqWj5kgsbKINakGag0KTgq9f2iZEXdStoAbOzLHEBRYzImmA6yFo8YZhJVflvXmIHUO7GWHmxxA== + dependencies: + "@babel/helper-compilation-targets" "^7.16.7" + "@babel/helper-function-name" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-literals@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.16.7.tgz#254c9618c5ff749e87cb0c0cef1a0a050c0bdab1" + integrity sha512-6tH8RTpTWI0s2sV6uq3e/C9wPo4PTqqZps4uF0kzQ9/xPLFQtipynvmT1g/dOfEJ+0EQsHhkQ/zyRId8J2b8zQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-member-expression-literals@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.16.7.tgz#6e5dcf906ef8a098e630149d14c867dd28f92384" + integrity sha512-mBruRMbktKQwbxaJof32LT9KLy2f3gH+27a5XSuXo6h7R3vqltl0PgZ80C8ZMKw98Bf8bqt6BEVi3svOh2PzMw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-modules-amd@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.16.7.tgz#b28d323016a7daaae8609781d1f8c9da42b13186" + integrity sha512-KaaEtgBL7FKYwjJ/teH63oAmE3lP34N3kshz8mm4VMAw7U3PxjVwwUmxEFksbgsNUaO3wId9R2AVQYSEGRa2+g== + dependencies: + "@babel/helper-module-transforms" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-commonjs@^7.17.9": + version "7.17.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.17.9.tgz#274be1a2087beec0254d4abd4d86e52442e1e5b6" + integrity sha512-2TBFd/r2I6VlYn0YRTz2JdazS+FoUuQ2rIFHoAxtyP/0G3D82SBLaRq9rnUkpqlLg03Byfl/+M32mpxjO6KaPw== + dependencies: + "@babel/helper-module-transforms" "^7.17.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-simple-access" "^7.17.7" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-systemjs@^7.17.8": + version "7.17.8" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.17.8.tgz#81fd834024fae14ea78fbe34168b042f38703859" + integrity sha512-39reIkMTUVagzgA5x88zDYXPCMT6lcaRKs1+S9K6NKBPErbgO/w/kP8GlNQTC87b412ZTlmNgr3k2JrWgHH+Bw== + dependencies: + "@babel/helper-hoist-variables" "^7.16.7" + "@babel/helper-module-transforms" "^7.17.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-validator-identifier" "^7.16.7" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-umd@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.16.7.tgz#23dad479fa585283dbd22215bff12719171e7618" + integrity sha512-EMh7uolsC8O4xhudF2F6wedbSHm1HHZ0C6aJ7K67zcDNidMzVcxWdGr+htW9n21klm+bOn+Rx4CBsAntZd3rEQ== + dependencies: + "@babel/helper-module-transforms" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-named-capturing-groups-regex@^7.17.10": + version "7.17.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.17.10.tgz#715dbcfafdb54ce8bccd3d12e8917296a4ba66a4" + integrity sha512-v54O6yLaJySCs6mGzaVOUw9T967GnH38T6CQSAtnzdNPwu84l2qAjssKzo/WSO8Yi7NF+7ekm5cVbF/5qiIgNA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.17.0" + +"@babel/plugin-transform-new-target@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.16.7.tgz#9967d89a5c243818e0800fdad89db22c5f514244" + integrity sha512-xiLDzWNMfKoGOpc6t3U+etCE2yRnn3SM09BXqWPIZOBpL2gvVrBWUKnsJx0K/ADi5F5YC5f8APFfWrz25TdlGg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-object-super@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.16.7.tgz#ac359cf8d32cf4354d27a46867999490b6c32a94" + integrity sha512-14J1feiQVWaGvRxj2WjyMuXS2jsBkgB3MdSN5HuC2G5nRspa5RK9COcs82Pwy5BuGcjb+fYaUj94mYcOj7rCvw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-replace-supers" "^7.16.7" + +"@babel/plugin-transform-parameters@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.16.7.tgz#a1721f55b99b736511cb7e0152f61f17688f331f" + integrity sha512-AT3MufQ7zZEhU2hwOA11axBnExW0Lszu4RL/tAlUJBuNoRak+wehQW8h6KcXOcgjY42fHtDxswuMhMjFEuv/aw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-property-literals@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.16.7.tgz#2dadac85155436f22c696c4827730e0fe1057a55" + integrity sha512-z4FGr9NMGdoIl1RqavCqGG+ZuYjfZ/hkCIeuH6Do7tXmSm0ls11nYVSJqFEUOSJbDab5wC6lRE/w6YjVcr6Hqw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-react-constant-elements@^7.12.1": + version "7.17.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.17.6.tgz#6cc273c2f612a6a50cb657e63ee1303e5e68d10a" + integrity sha512-OBv9VkyyKtsHZiHLoSfCn+h6yU7YKX8nrs32xUmOa1SRSk+t03FosB6fBZ0Yz4BpD1WV7l73Nsad+2Tz7APpqw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-react-display-name@^7.16.0", "@babel/plugin-transform-react-display-name@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.16.7.tgz#7b6d40d232f4c0f550ea348593db3b21e2404340" + integrity sha512-qgIg8BcZgd0G/Cz916D5+9kqX0c7nPZyXaP8R2tLNN5tkyIZdG5fEwBrxwplzSnjC1jvQmyMNVwUCZPcbGY7Pg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-react-jsx-development@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.16.7.tgz#43a00724a3ed2557ed3f276a01a929e6686ac7b8" + integrity sha512-RMvQWvpla+xy6MlBpPlrKZCMRs2AGiHOGHY3xRwl0pEeim348dDyxeH4xBsMPbIMhujeq7ihE702eM2Ew0Wo+A== + dependencies: + "@babel/plugin-transform-react-jsx" "^7.16.7" + +"@babel/plugin-transform-react-jsx@^7.16.7": + version "7.17.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.17.3.tgz#eac1565da176ccb1a715dae0b4609858808008c1" + integrity sha512-9tjBm4O07f7mzKSIlEmPdiE6ub7kfIe6Cd+w+oQebpATfTQMAgW+YOuWxogbKVTulA+MEO7byMeIUtQ1z+z+ZQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.7" + "@babel/helper-module-imports" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-jsx" "^7.16.7" + "@babel/types" "^7.17.0" + +"@babel/plugin-transform-react-pure-annotations@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.16.7.tgz#232bfd2f12eb551d6d7d01d13fe3f86b45eb9c67" + integrity sha512-hs71ToC97k3QWxswh2ElzMFABXHvGiJ01IB1TbYQDGeWRKWz/MPUTh5jGExdHvosYKpnJW5Pm3S4+TA3FyX+GA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-regenerator@^7.17.9": + version "7.17.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.17.9.tgz#0a33c3a61cf47f45ed3232903683a0afd2d3460c" + integrity sha512-Lc2TfbxR1HOyn/c6b4Y/b6NHoTb67n/IoWLxTu4kC7h4KQnWlhCq2S8Tx0t2SVvv5Uu87Hs+6JEJ5kt2tYGylQ== + dependencies: + regenerator-transform "^0.15.0" + +"@babel/plugin-transform-reserved-words@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.16.7.tgz#1d798e078f7c5958eec952059c460b220a63f586" + integrity sha512-KQzzDnZ9hWQBjwi5lpY5v9shmm6IVG0U9pB18zvMu2i4H90xpT4gmqwPYsn8rObiadYe2M0gmgsiOIF5A/2rtg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-runtime@^7.16.4": + version "7.17.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.17.10.tgz#b89d821c55d61b5e3d3c3d1d636d8d5a81040ae1" + integrity sha512-6jrMilUAJhktTr56kACL8LnWC5hx3Lf27BS0R0DSyW/OoJfb/iTHeE96V3b1dgKG3FSFdd/0culnYWMkjcKCig== + dependencies: + "@babel/helper-module-imports" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + babel-plugin-polyfill-corejs2 "^0.3.0" + babel-plugin-polyfill-corejs3 "^0.5.0" + babel-plugin-polyfill-regenerator "^0.3.0" + semver "^6.3.0" + +"@babel/plugin-transform-shorthand-properties@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.16.7.tgz#e8549ae4afcf8382f711794c0c7b6b934c5fbd2a" + integrity sha512-hah2+FEnoRoATdIb05IOXf+4GzXYTq75TVhIn1PewihbpyrNWUt2JbudKQOETWw6QpLe+AIUpJ5MVLYTQbeeUg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-spread@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.16.7.tgz#a303e2122f9f12e0105daeedd0f30fb197d8ff44" + integrity sha512-+pjJpgAngb53L0iaA5gU/1MLXJIfXcYepLgXB3esVRf4fqmj8f2cxM3/FKaHsZms08hFQJkFccEWuIpm429TXg== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-skip-transparent-expression-wrappers" "^7.16.0" + +"@babel/plugin-transform-sticky-regex@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.16.7.tgz#c84741d4f4a38072b9a1e2e3fd56d359552e8660" + integrity sha512-NJa0Bd/87QV5NZZzTuZG5BPJjLYadeSZ9fO6oOUoL4iQx+9EEuw/eEM92SrsT19Yc2jgB1u1hsjqDtH02c3Drw== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-template-literals@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.16.7.tgz#f3d1c45d28967c8e80f53666fc9c3e50618217ab" + integrity sha512-VwbkDDUeenlIjmfNeDX/V0aWrQH2QiVyJtwymVQSzItFDTpxfyJh3EVaQiS0rIN/CqbLGr0VcGmuwyTdZtdIsA== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-typeof-symbol@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.16.7.tgz#9cdbe622582c21368bd482b660ba87d5545d4f7e" + integrity sha512-p2rOixCKRJzpg9JB4gjnG4gjWkWa89ZoYUnl9snJ1cWIcTH/hvxZqfO+WjG6T8DRBpctEol5jw1O5rA8gkCokQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-typescript@^7.16.7": + version "7.16.8" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.16.8.tgz#591ce9b6b83504903fa9dd3652c357c2ba7a1ee0" + integrity sha512-bHdQ9k7YpBDO2d0NVfkj51DpQcvwIzIusJ7mEUaMlbZq3Kt/U47j24inXZHQ5MDiYpCs+oZiwnXyKedE8+q7AQ== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/plugin-syntax-typescript" "^7.16.7" + +"@babel/plugin-transform-unicode-escapes@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.16.7.tgz#da8717de7b3287a2c6d659750c964f302b31ece3" + integrity sha512-TAV5IGahIz3yZ9/Hfv35TV2xEm+kaBDaZQCn2S/hG9/CZ0DktxJv9eKfPc7yYCvOYR4JGx1h8C+jcSOvgaaI/Q== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/plugin-transform-unicode-regex@^7.16.7": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.16.7.tgz#0f7aa4a501198976e25e82702574c34cfebe9ef2" + integrity sha512-oC5tYYKw56HO75KZVLQ+R/Nl3Hro9kf8iG0hXoaHP7tjAyCpvqBiSNe6vGrZni1Z6MggmUOC6A7VP7AVmw225Q== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.16.7" + "@babel/helper-plugin-utils" "^7.16.7" + +"@babel/preset-env@^7.11.0", "@babel/preset-env@^7.12.1", "@babel/preset-env@^7.16.4": + version "7.17.10" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.17.10.tgz#a81b093669e3eb6541bb81a23173c5963c5de69c" + integrity sha512-YNgyBHZQpeoBSRBg0xixsZzfT58Ze1iZrajvv0lJc70qDDGuGfonEnMGfWeSY0mQ3JTuCWFbMkzFRVafOyJx4g== + dependencies: + "@babel/compat-data" "^7.17.10" + "@babel/helper-compilation-targets" "^7.17.10" + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-validator-option" "^7.16.7" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.16.7" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.16.7" + "@babel/plugin-proposal-async-generator-functions" "^7.16.8" + "@babel/plugin-proposal-class-properties" "^7.16.7" + "@babel/plugin-proposal-class-static-block" "^7.17.6" + "@babel/plugin-proposal-dynamic-import" "^7.16.7" + "@babel/plugin-proposal-export-namespace-from" "^7.16.7" + "@babel/plugin-proposal-json-strings" "^7.16.7" + "@babel/plugin-proposal-logical-assignment-operators" "^7.16.7" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.16.7" + "@babel/plugin-proposal-numeric-separator" "^7.16.7" + "@babel/plugin-proposal-object-rest-spread" "^7.17.3" + "@babel/plugin-proposal-optional-catch-binding" "^7.16.7" + "@babel/plugin-proposal-optional-chaining" "^7.16.7" + "@babel/plugin-proposal-private-methods" "^7.16.11" + "@babel/plugin-proposal-private-property-in-object" "^7.16.7" + "@babel/plugin-proposal-unicode-property-regex" "^7.16.7" + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-class-properties" "^7.12.13" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + "@babel/plugin-syntax-top-level-await" "^7.14.5" + "@babel/plugin-transform-arrow-functions" "^7.16.7" + "@babel/plugin-transform-async-to-generator" "^7.16.8" + "@babel/plugin-transform-block-scoped-functions" "^7.16.7" + "@babel/plugin-transform-block-scoping" "^7.16.7" + "@babel/plugin-transform-classes" "^7.16.7" + "@babel/plugin-transform-computed-properties" "^7.16.7" + "@babel/plugin-transform-destructuring" "^7.17.7" + "@babel/plugin-transform-dotall-regex" "^7.16.7" + "@babel/plugin-transform-duplicate-keys" "^7.16.7" + "@babel/plugin-transform-exponentiation-operator" "^7.16.7" + "@babel/plugin-transform-for-of" "^7.16.7" + "@babel/plugin-transform-function-name" "^7.16.7" + "@babel/plugin-transform-literals" "^7.16.7" + "@babel/plugin-transform-member-expression-literals" "^7.16.7" + "@babel/plugin-transform-modules-amd" "^7.16.7" + "@babel/plugin-transform-modules-commonjs" "^7.17.9" + "@babel/plugin-transform-modules-systemjs" "^7.17.8" + "@babel/plugin-transform-modules-umd" "^7.16.7" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.17.10" + "@babel/plugin-transform-new-target" "^7.16.7" + "@babel/plugin-transform-object-super" "^7.16.7" + "@babel/plugin-transform-parameters" "^7.16.7" + "@babel/plugin-transform-property-literals" "^7.16.7" + "@babel/plugin-transform-regenerator" "^7.17.9" + "@babel/plugin-transform-reserved-words" "^7.16.7" + "@babel/plugin-transform-shorthand-properties" "^7.16.7" + "@babel/plugin-transform-spread" "^7.16.7" + "@babel/plugin-transform-sticky-regex" "^7.16.7" + "@babel/plugin-transform-template-literals" "^7.16.7" + "@babel/plugin-transform-typeof-symbol" "^7.16.7" + "@babel/plugin-transform-unicode-escapes" "^7.16.7" + "@babel/plugin-transform-unicode-regex" "^7.16.7" + "@babel/preset-modules" "^0.1.5" + "@babel/types" "^7.17.10" + babel-plugin-polyfill-corejs2 "^0.3.0" + babel-plugin-polyfill-corejs3 "^0.5.0" + babel-plugin-polyfill-regenerator "^0.3.0" + core-js-compat "^3.22.1" + semver "^6.3.0" + +"@babel/preset-modules@^0.1.5": + version "0.1.5" + resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.5.tgz#ef939d6e7f268827e1841638dc6ff95515e115d9" + integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" + "@babel/plugin-transform-dotall-regex" "^7.4.4" + "@babel/types" "^7.4.4" + esutils "^2.0.2" + +"@babel/preset-react@^7.12.5", "@babel/preset-react@^7.16.0": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.16.7.tgz#4c18150491edc69c183ff818f9f2aecbe5d93852" + integrity sha512-fWpyI8UM/HE6DfPBzD8LnhQ/OcH8AgTaqcqP2nGOXEUV+VKBR5JRN9hCk9ai+zQQ57vtm9oWeXguBCPNUjytgA== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-validator-option" "^7.16.7" + "@babel/plugin-transform-react-display-name" "^7.16.7" + "@babel/plugin-transform-react-jsx" "^7.16.7" + "@babel/plugin-transform-react-jsx-development" "^7.16.7" + "@babel/plugin-transform-react-pure-annotations" "^7.16.7" + +"@babel/preset-typescript@^7.16.0": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.16.7.tgz#ab114d68bb2020afc069cd51b37ff98a046a70b9" + integrity sha512-WbVEmgXdIyvzB77AQjGBEyYPZx+8tTsO50XtfozQrkW8QB2rLJpH2lgx0TRw5EJrBxOZQ+wCcyPVQvS8tjEHpQ== + dependencies: + "@babel/helper-plugin-utils" "^7.16.7" + "@babel/helper-validator-option" "^7.16.7" + "@babel/plugin-transform-typescript" "^7.16.7" + +"@babel/runtime-corejs3@^7.10.2": + version "7.17.9" + resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.17.9.tgz#3d02d0161f0fbf3ada8e88159375af97690f4055" + integrity sha512-WxYHHUWF2uZ7Hp1K+D1xQgbgkGUfA+5UPOegEXGt2Y5SMog/rYCVaifLZDbw8UkNXozEqqrZTy6bglL7xTaCOw== + dependencies: + core-js-pure "^3.20.2" + regenerator-runtime "^0.13.4" + +"@babel/runtime@^7.0.0", "@babel/runtime@^7.10.2", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.15.4", "@babel/runtime@^7.16.3", "@babel/runtime@^7.5.5", "@babel/runtime@^7.6.2", "@babel/runtime@^7.7.2", "@babel/runtime@^7.7.6", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": + version "7.17.9" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.17.9.tgz#d19fbf802d01a8cb6cf053a64e472d42c434ba72" + integrity sha512-lSiBBvodq29uShpWGNbgFdKYNiFDo5/HIYsaCEY9ff4sb10x9jizo2+pRrSyF4jKZCXqgzuqBOQKbUm90gQwJg== + dependencies: + regenerator-runtime "^0.13.4" + +"@babel/template@^7.16.7", "@babel/template@^7.3.3": + version "7.16.7" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.7.tgz#8d126c8701fde4d66b264b3eba3d96f07666d155" + integrity sha512-I8j/x8kHUrbYRTUxXrrMbfCa7jxkE7tZre39x3kjr9hvI82cK1FfqLygotcWN5kdPGWcLdWMHpSBavse5tWw3w== + dependencies: + "@babel/code-frame" "^7.16.7" + "@babel/parser" "^7.16.7" + "@babel/types" "^7.16.7" + +"@babel/traverse@^7.13.0", "@babel/traverse@^7.16.7", "@babel/traverse@^7.16.8", "@babel/traverse@^7.17.10", "@babel/traverse@^7.17.3", "@babel/traverse@^7.17.9", "@babel/traverse@^7.7.2": + version "7.17.10" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.17.10.tgz#1ee1a5ac39f4eac844e6cf855b35520e5eb6f8b5" + integrity sha512-VmbrTHQteIdUUQNTb+zE12SHS/xQVIShmBPhlNP12hD5poF2pbITW1Z4172d03HegaQWhLffdkRJYtAzp0AGcw== + dependencies: + "@babel/code-frame" "^7.16.7" + "@babel/generator" "^7.17.10" + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-function-name" "^7.17.9" + "@babel/helper-hoist-variables" "^7.16.7" + "@babel/helper-split-export-declaration" "^7.16.7" + "@babel/parser" "^7.17.10" + "@babel/types" "^7.17.10" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.0.0", "@babel/types@^7.12.6", "@babel/types@^7.16.0", "@babel/types@^7.16.7", "@babel/types@^7.16.8", "@babel/types@^7.17.0", "@babel/types@^7.17.10", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": + version "7.17.10" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.17.10.tgz#d35d7b4467e439fcf06d195f8100e0fea7fc82c4" + integrity sha512-9O26jG0mBYfGkUYCYZRnBwbVLd1UZOICEr2Em6InB6jVfsAv1GKgwXHmrSg+WFWDmeKTA6vyTZiN8tCSM5Oo3A== + dependencies: + "@babel/helper-validator-identifier" "^7.16.7" + to-fast-properties "^2.0.0" + +"@base2/pretty-print-object@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@base2/pretty-print-object/-/pretty-print-object-1.0.1.tgz#371ba8be66d556812dc7fb169ebc3c08378f69d4" + integrity sha512-4iri8i1AqYHJE2DstZYkyEprg6Pq6sKx3xn5FpySk9sNhH7qN2LLlHJCfDTZRILNwQNPD7mATWM0TBui7uC1pA== + +"@bcoe/v8-coverage@^0.2.3": + version "0.2.3" + resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + +"@csstools/normalize.css@*": + version "12.0.0" + resolved "https://registry.yarnpkg.com/@csstools/normalize.css/-/normalize.css-12.0.0.tgz#a9583a75c3f150667771f30b60d9f059473e62c4" + integrity sha512-M0qqxAcwCsIVfpFQSlGN5XjXWu8l5JDZN+fPt1LeW5SZexQTgnaEvgXAY+CeygRw0EeppWHi12JxESWiWrB0Sg== + +"@csstools/postcss-color-function@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@csstools/postcss-color-function/-/postcss-color-function-1.1.0.tgz#229966327747f58fbe586de35daa139db3ce1e5d" + integrity sha512-5D5ND/mZWcQoSfYnSPsXtuiFxhzmhxt6pcjrFLJyldj+p0ZN2vvRpYNX+lahFTtMhAYOa2WmkdGINr0yP0CvGA== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +"@csstools/postcss-font-format-keywords@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@csstools/postcss-font-format-keywords/-/postcss-font-format-keywords-1.0.0.tgz#7e7df948a83a0dfb7eb150a96e2390ac642356a1" + integrity sha512-oO0cZt8do8FdVBX8INftvIA4lUrKUSCcWUf9IwH9IPWOgKT22oAZFXeHLoDK7nhB2SmkNycp5brxfNMRLIhd6Q== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-hwb-function@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@csstools/postcss-hwb-function/-/postcss-hwb-function-1.0.0.tgz#d6785c1c5ba8152d1d392c66f3a6a446c6034f6d" + integrity sha512-VSTd7hGjmde4rTj1rR30sokY3ONJph1reCBTUXqeW1fKwETPy1x4t/XIeaaqbMbC5Xg4SM/lyXZ2S8NELT2TaA== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-ic-unit@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@csstools/postcss-ic-unit/-/postcss-ic-unit-1.0.0.tgz#f484db59fc94f35a21b6d680d23b0ec69b286b7f" + integrity sha512-i4yps1mBp2ijrx7E96RXrQXQQHm6F4ym1TOD0D69/sjDjZvQ22tqiEvaNw7pFZTUO5b9vWRHzbHzP9+UKuw+bA== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +"@csstools/postcss-is-pseudo-class@^2.0.2": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@csstools/postcss-is-pseudo-class/-/postcss-is-pseudo-class-2.0.3.tgz#98c827ca88598e598dcd726a9d9e21e0475eb487" + integrity sha512-wMQ3GMWrJyRQfvBJsD38ndF/nwHT32xevSn8w2X+iCoWqmhhoj0K7HgdGW8XQhah6sdENBa8yS9gRosdezaQZw== + dependencies: + "@csstools/selector-specificity" "^1.0.0" + postcss-selector-parser "^6.0.10" + +"@csstools/postcss-normalize-display-values@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@csstools/postcss-normalize-display-values/-/postcss-normalize-display-values-1.0.0.tgz#ce698f688c28517447aedf15a9037987e3d2dc97" + integrity sha512-bX+nx5V8XTJEmGtpWTO6kywdS725t71YSLlxWt78XoHUbELWgoCXeOFymRJmL3SU1TLlKSIi7v52EWqe60vJTQ== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-oklab-function@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@csstools/postcss-oklab-function/-/postcss-oklab-function-1.1.0.tgz#e9a269487a292e0930760948e923e1d46b638ee6" + integrity sha512-e/Q5HopQzmnQgqimG9v3w2IG4VRABsBq3itOcn4bnm+j4enTgQZ0nWsaH/m9GV2otWGQ0nwccYL5vmLKyvP1ww== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +"@csstools/postcss-progressive-custom-properties@^1.1.0", "@csstools/postcss-progressive-custom-properties@^1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@csstools/postcss-progressive-custom-properties/-/postcss-progressive-custom-properties-1.3.0.tgz#542292558384361776b45c85226b9a3a34f276fa" + integrity sha512-ASA9W1aIy5ygskZYuWams4BzafD12ULvSypmaLJT2jvQ8G0M3I8PRQhC0h7mG0Z3LI05+agZjqSR9+K9yaQQjA== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-stepped-value-functions@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@csstools/postcss-stepped-value-functions/-/postcss-stepped-value-functions-1.0.0.tgz#f8ffc05e163ba7bcbefc5fdcaf264ce9fd408c16" + integrity sha512-q8c4bs1GumAiRenmFjASBcWSLKrbzHzWl6C2HcaAxAXIiL2rUlUWbqQZUjwVG5tied0rld19j/Mm90K3qI26vw== + dependencies: + postcss-value-parser "^4.2.0" + +"@csstools/postcss-unset-value@^1.0.0": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@csstools/postcss-unset-value/-/postcss-unset-value-1.0.1.tgz#2cc020785db5ec82cc9444afe4cdae2a65445f89" + integrity sha512-f1G1WGDXEU/RN1TWAxBPQgQudtLnLQPyiWdtypkPC+mVYNKFKH/HYXSxH4MVNqwF8M0eDsoiU7HumJHCg/L/jg== + +"@csstools/selector-specificity@1.0.0", "@csstools/selector-specificity@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@csstools/selector-specificity/-/selector-specificity-1.0.0.tgz#91c560df2ed8d9700e4c7ed4ac21a3a322c9d975" + integrity sha512-RkYG5KiGNX0fJ5YoI0f4Wfq2Yo74D25Hru4fxTOioYdQvHBxcrrtTTyT5Ozzh2ejcNrhFy7IEts2WyEY7yi5yw== + +"@elastic/datemath@^5.0.3": + version "5.0.3" + resolved "https://registry.yarnpkg.com/@elastic/datemath/-/datemath-5.0.3.tgz#7baccdab672b9a3ecb7fe8387580670936b58573" + integrity sha512-8Hbr1Uyjm5OcYBfEB60K7sCP6U3IXuWDaLaQmYv3UxgI4jqBWbakoemwWvsqPVUvnwEjuX6z7ghPZbefs8xiaA== + dependencies: + tslib "^1.9.3" + +"@elastic/eui@^55.0.1": + version "55.1.2" + resolved "https://registry.yarnpkg.com/@elastic/eui/-/eui-55.1.2.tgz#dd0b42f5b26c5800d6a9cb2d4c2fe1afce9d3f07" + integrity sha512-wwZz5KxMIMFlqEsoCRiQBJDc4CrluS1d0sCOmQ5lhIzKhYc91MdxnqCk2i6YkhL4sSDf2Y9KAEuMXa+uweOWUA== + dependencies: + "@types/chroma-js" "^2.0.0" + "@types/lodash" "^4.14.160" + "@types/numeral" "^0.0.28" + "@types/react-beautiful-dnd" "^13.1.2" + "@types/react-input-autosize" "^2.2.1" + "@types/react-virtualized-auto-sizer" "^1.0.1" + "@types/react-window" "^1.8.5" + "@types/refractor" "^3.0.0" + "@types/resize-observer-browser" "^0.1.5" + "@types/vfile-message" "^2.0.0" + chroma-js "^2.1.0" + classnames "^2.2.6" + lodash "^4.17.21" + mdast-util-to-hast "^10.0.0" + numeral "^2.0.6" + prop-types "^15.6.0" + react-beautiful-dnd "^13.1.0" + react-dropzone "^11.5.3" + react-element-to-jsx-string "^14.3.4" + react-focus-on "^3.5.4" + react-input-autosize "^3.0.0" + react-is "^17.0.2" + react-virtualized-auto-sizer "^1.0.6" + react-window "^1.8.6" + refractor "^3.5.0" + rehype-raw "^5.0.0" + rehype-react "^6.0.0" + rehype-stringify "^8.0.0" + remark-breaks "^2.0.2" + remark-emoji "^2.1.0" + remark-parse "^8.0.3" + remark-rehype "^8.0.0" + tabbable "^5.2.1" + text-diff "^1.0.1" + unified "^9.2.0" + unist-util-visit "^2.0.3" + url-parse "^1.5.10" + uuid "^8.3.0" + vfile "^4.2.0" + +"@elastic/eui@^57.0.0": + version "57.0.0" + resolved "https://registry.yarnpkg.com/@elastic/eui/-/eui-57.0.0.tgz#86d43e27196f9997ef44d2a4c701d092ce99e132" + integrity sha512-VBgW6Pr0JJB3JhJ59MV8guxb2v4Gd3SJEmsMGKGyIY+KcvSMWbVEGa44Ep12VAJYynIA05Z3OXXc/ge5dMycpA== + dependencies: + "@types/chroma-js" "^2.0.0" + "@types/lodash" "^4.14.160" + "@types/numeral" "^0.0.28" + "@types/react-beautiful-dnd" "^13.1.2" + "@types/react-input-autosize" "^2.2.1" + "@types/react-virtualized-auto-sizer" "^1.0.1" + "@types/react-window" "^1.8.5" + "@types/refractor" "^3.0.0" + "@types/resize-observer-browser" "^0.1.5" + "@types/vfile-message" "^2.0.0" + chroma-js "^2.1.0" + classnames "^2.2.6" + lodash "^4.17.21" + mdast-util-to-hast "^10.0.0" + numeral "^2.0.6" + prop-types "^15.6.0" + react-beautiful-dnd "^13.1.0" + react-dropzone "^11.5.3" + react-element-to-jsx-string "^14.3.4" + react-focus-on "^3.5.4" + react-input-autosize "^3.0.0" + react-is "^17.0.2" + react-virtualized-auto-sizer "^1.0.6" + react-window "^1.8.6" + refractor "^3.5.0" + rehype-raw "^5.0.0" + rehype-react "^6.0.0" + rehype-stringify "^8.0.0" + remark-breaks "^2.0.2" + remark-emoji "^2.1.0" + remark-parse "^8.0.3" + remark-rehype "^8.0.0" + tabbable "^5.2.1" + text-diff "^1.0.1" + unified "^9.2.0" + unist-util-visit "^2.0.3" + url-parse "^1.5.10" + uuid "^8.3.0" + vfile "^4.2.0" + +"@emotion/babel-plugin@^11.7.1": + version "11.9.2" + resolved "https://registry.yarnpkg.com/@emotion/babel-plugin/-/babel-plugin-11.9.2.tgz#723b6d394c89fb2ef782229d92ba95a740576e95" + integrity sha512-Pr/7HGH6H6yKgnVFNEj2MVlreu3ADqftqjqwUvDy/OJzKFgxKeTQ+eeUf20FOTuHVkDON2iNa25rAXVYtWJCjw== + dependencies: + "@babel/helper-module-imports" "^7.12.13" + "@babel/plugin-syntax-jsx" "^7.12.13" + "@babel/runtime" "^7.13.10" + "@emotion/hash" "^0.8.0" + "@emotion/memoize" "^0.7.5" + "@emotion/serialize" "^1.0.2" + babel-plugin-macros "^2.6.1" + convert-source-map "^1.5.0" + escape-string-regexp "^4.0.0" + find-root "^1.1.0" + source-map "^0.5.7" + stylis "4.0.13" + +"@emotion/cache@^11.7.1": + version "11.7.1" + resolved "https://registry.yarnpkg.com/@emotion/cache/-/cache-11.7.1.tgz#08d080e396a42e0037848214e8aa7bf879065539" + integrity sha512-r65Zy4Iljb8oyjtLeCuBH8Qjiy107dOYC6SJq7g7GV5UCQWMObY4SJDPGFjiiVpPrOJ2hmJOoBiYTC7hwx9E2A== + dependencies: + "@emotion/memoize" "^0.7.4" + "@emotion/sheet" "^1.1.0" + "@emotion/utils" "^1.0.0" + "@emotion/weak-memoize" "^0.2.5" + stylis "4.0.13" + +"@emotion/hash@^0.8.0": + version "0.8.0" + resolved "https://registry.yarnpkg.com/@emotion/hash/-/hash-0.8.0.tgz#bbbff68978fefdbe68ccb533bc8cbe1d1afb5413" + integrity sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow== + +"@emotion/memoize@^0.7.4", "@emotion/memoize@^0.7.5": + version "0.7.5" + resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.7.5.tgz#2c40f81449a4e554e9fc6396910ed4843ec2be50" + integrity sha512-igX9a37DR2ZPGYtV6suZ6whr8pTFtyHL3K/oLUotxpSVO2ASaprmAe2Dkq7tBo7CRY7MMDrAa9nuQP9/YG8FxQ== + +"@emotion/react@^11.7.1", "@emotion/react@^11.9.0": + version "11.9.0" + resolved "https://registry.yarnpkg.com/@emotion/react/-/react-11.9.0.tgz#b6d42b1db3bd7511e7a7c4151dc8bc82e14593b8" + integrity sha512-lBVSF5d0ceKtfKCDQJveNAtkC7ayxpVlgOohLgXqRwqWr9bOf4TZAFFyIcNngnV6xK6X4x2ZeXq7vliHkoVkxQ== + dependencies: + "@babel/runtime" "^7.13.10" + "@emotion/babel-plugin" "^11.7.1" + "@emotion/cache" "^11.7.1" + "@emotion/serialize" "^1.0.3" + "@emotion/utils" "^1.1.0" + "@emotion/weak-memoize" "^0.2.5" + hoist-non-react-statics "^3.3.1" + +"@emotion/serialize@^1.0.2", "@emotion/serialize@^1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@emotion/serialize/-/serialize-1.0.3.tgz#99e2060c26c6292469fb30db41f4690e1c8fea63" + integrity sha512-2mSSvgLfyV3q+iVh3YWgNlUc2a9ZlDU7DjuP5MjK3AXRR0dYigCrP99aeFtaB2L/hjfEZdSThn5dsZ0ufqbvsA== + dependencies: + "@emotion/hash" "^0.8.0" + "@emotion/memoize" "^0.7.4" + "@emotion/unitless" "^0.7.5" + "@emotion/utils" "^1.0.0" + csstype "^3.0.2" + +"@emotion/sheet@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@emotion/sheet/-/sheet-1.1.0.tgz#56d99c41f0a1cda2726a05aa6a20afd4c63e58d2" + integrity sha512-u0AX4aSo25sMAygCuQTzS+HsImZFuS8llY8O7b9MDRzbJM0kVJlAz6KNDqcG7pOuQZJmj/8X/rAW+66kMnMW+g== + +"@emotion/unitless@^0.7.5": + version "0.7.5" + resolved "https://registry.yarnpkg.com/@emotion/unitless/-/unitless-0.7.5.tgz#77211291c1900a700b8a78cfafda3160d76949ed" + integrity sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg== + +"@emotion/utils@^1.0.0", "@emotion/utils@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@emotion/utils/-/utils-1.1.0.tgz#86b0b297f3f1a0f2bdb08eeac9a2f49afd40d0cf" + integrity sha512-iRLa/Y4Rs5H/f2nimczYmS5kFJEbpiVvgN3XVfZ022IYhuNA1IRSHEizcof88LtCTXtl9S2Cxt32KgaXEu72JQ== + +"@emotion/weak-memoize@^0.2.5": + version "0.2.5" + resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.2.5.tgz#8eed982e2ee6f7f4e44c253e12962980791efd46" + integrity sha512-6U71C2Wp7r5XtFtQzYrW5iKFT67OixrSxjI4MptCHzdSVlgabczzqLe0ZSgnub/5Kp4hSbpDB1tMytZY9pwxxA== + +"@eslint/eslintrc@^1.2.3": + version "1.2.3" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.2.3.tgz#fcaa2bcef39e13d6e9e7f6271f4cc7cae1174886" + integrity sha512-uGo44hIwoLGNyduRpjdEpovcbMdd+Nv7amtmJxnKmI8xj6yd5LncmSwDa5NgX/41lIFJtkjD6YdVfgEzPfJ5UA== + dependencies: + ajv "^6.12.4" + debug "^4.3.2" + espree "^9.3.2" + globals "^13.9.0" + ignore "^5.2.0" + import-fresh "^3.2.1" + js-yaml "^4.1.0" + minimatch "^3.1.2" + strip-json-comments "^3.1.1" + +"@feast-dev/feast-ui@^0.20.4": + version "0.20.4" + resolved "https://registry.yarnpkg.com/@feast-dev/feast-ui/-/feast-ui-0.20.4.tgz#4b918f8922f3eecd9e3e7323f25ba9cac78a4567" + integrity sha512-KTUhKni7t++G6UwXyPbGWXwWHnTOVTH8ouYCoHXbGorgRL3K4fbq5tCSCJzP9L5FAo+cF1AjVZNRgwzPe6vAgA== + dependencies: + "@elastic/datemath" "^5.0.3" + "@elastic/eui" "^55.0.1" + "@emotion/react" "^11.7.1" + "@types/d3" "^7.1.0" + "@types/jest" "^27.0.1" + "@types/node" "^16.7.13" + "@types/react" "^17.0.20" + "@types/react-dom" "^17.0.9" + d3 "^7.3.0" + inter-ui "^3.19.3" + moment "^2.29.1" + prop-types "^15.8.1" + query-string "^7.1.1" + react-query "^3.34.12" + react-router-dom "6" + react-scripts "^5.0.0" + use-query-params "^1.2.3" + zod "^3.11.6" + +"@humanwhocodes/config-array@^0.9.2": + version "0.9.5" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.9.5.tgz#2cbaf9a89460da24b5ca6531b8bbfc23e1df50c7" + integrity sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw== + dependencies: + "@humanwhocodes/object-schema" "^1.2.1" + debug "^4.1.1" + minimatch "^3.0.4" + +"@humanwhocodes/object-schema@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== + +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + +"@jest/console@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/console/-/console-27.5.1.tgz#260fe7239602fe5130a94f1aa386eff54b014bba" + integrity sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg== + dependencies: + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^27.5.1" + jest-util "^27.5.1" + slash "^3.0.0" + +"@jest/console@^28.1.0": + version "28.1.0" + resolved "https://registry.yarnpkg.com/@jest/console/-/console-28.1.0.tgz#db78222c3d3b0c1db82f1b9de51094c2aaff2176" + integrity sha512-tscn3dlJFGay47kb4qVruQg/XWlmvU0xp3EJOjzzY+sBaI+YgwKcvAmTcyYU7xEiLLIY5HCdWRooAL8dqkFlDA== + dependencies: + "@jest/types" "^28.1.0" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^28.1.0" + jest-util "^28.1.0" + slash "^3.0.0" + +"@jest/core@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/core/-/core-27.5.1.tgz#267ac5f704e09dc52de2922cbf3af9edcd64b626" + integrity sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ== + dependencies: + "@jest/console" "^27.5.1" + "@jest/reporters" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + emittery "^0.8.1" + exit "^0.1.2" + graceful-fs "^4.2.9" + jest-changed-files "^27.5.1" + jest-config "^27.5.1" + jest-haste-map "^27.5.1" + jest-message-util "^27.5.1" + jest-regex-util "^27.5.1" + jest-resolve "^27.5.1" + jest-resolve-dependencies "^27.5.1" + jest-runner "^27.5.1" + jest-runtime "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" + jest-watcher "^27.5.1" + micromatch "^4.0.4" + rimraf "^3.0.0" + slash "^3.0.0" + strip-ansi "^6.0.0" + +"@jest/environment@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-27.5.1.tgz#d7425820511fe7158abbecc010140c3fd3be9c74" + integrity sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA== + dependencies: + "@jest/fake-timers" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + jest-mock "^27.5.1" + +"@jest/fake-timers@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-27.5.1.tgz#76979745ce0579c8a94a4678af7a748eda8ada74" + integrity sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ== + dependencies: + "@jest/types" "^27.5.1" + "@sinonjs/fake-timers" "^8.0.1" + "@types/node" "*" + jest-message-util "^27.5.1" + jest-mock "^27.5.1" + jest-util "^27.5.1" + +"@jest/globals@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-27.5.1.tgz#7ac06ce57ab966566c7963431cef458434601b2b" + integrity sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/types" "^27.5.1" + expect "^27.5.1" + +"@jest/reporters@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-27.5.1.tgz#ceda7be96170b03c923c37987b64015812ffec04" + integrity sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw== + dependencies: + "@bcoe/v8-coverage" "^0.2.3" + "@jest/console" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + collect-v8-coverage "^1.0.0" + exit "^0.1.2" + glob "^7.1.2" + graceful-fs "^4.2.9" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-instrument "^5.1.0" + istanbul-lib-report "^3.0.0" + istanbul-lib-source-maps "^4.0.0" + istanbul-reports "^3.1.3" + jest-haste-map "^27.5.1" + jest-resolve "^27.5.1" + jest-util "^27.5.1" + jest-worker "^27.5.1" + slash "^3.0.0" + source-map "^0.6.0" + string-length "^4.0.1" + terminal-link "^2.0.0" + v8-to-istanbul "^8.1.0" + +"@jest/schemas@^28.0.2": + version "28.0.2" + resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-28.0.2.tgz#08c30df6a8d07eafea0aef9fb222c5e26d72e613" + integrity sha512-YVDJZjd4izeTDkij00vHHAymNXQ6WWsdChFRK86qck6Jpr3DCL5W3Is3vslviRlP+bLuMYRLbdp98amMvqudhA== + dependencies: + "@sinclair/typebox" "^0.23.3" + +"@jest/source-map@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-27.5.1.tgz#6608391e465add4205eae073b55e7f279e04e8cf" + integrity sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg== + dependencies: + callsites "^3.0.0" + graceful-fs "^4.2.9" + source-map "^0.6.0" + +"@jest/test-result@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-27.5.1.tgz#56a6585fa80f7cdab72b8c5fc2e871d03832f5bb" + integrity sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag== + dependencies: + "@jest/console" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-result@^28.1.0": + version "28.1.0" + resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-28.1.0.tgz#fd149dee123510dd2fcadbbf5f0020f98ad7f12c" + integrity sha512-sBBFIyoPzrZho3N+80P35A5oAkSKlGfsEFfXFWuPGBsW40UAjCkGakZhn4UQK4iQlW2vgCDMRDOob9FGKV8YoQ== + dependencies: + "@jest/console" "^28.1.0" + "@jest/types" "^28.1.0" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-sequencer@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-27.5.1.tgz#4057e0e9cea4439e544c6353c6affe58d095745b" + integrity sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ== + dependencies: + "@jest/test-result" "^27.5.1" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-runtime "^27.5.1" + +"@jest/transform@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-27.5.1.tgz#6c3501dcc00c4c08915f292a600ece5ecfe1f409" + integrity sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw== + dependencies: + "@babel/core" "^7.1.0" + "@jest/types" "^27.5.1" + babel-plugin-istanbul "^6.1.1" + chalk "^4.0.0" + convert-source-map "^1.4.0" + fast-json-stable-stringify "^2.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-regex-util "^27.5.1" + jest-util "^27.5.1" + micromatch "^4.0.4" + pirates "^4.0.4" + slash "^3.0.0" + source-map "^0.6.1" + write-file-atomic "^3.0.0" + +"@jest/types@^27.5.1": + version "27.5.1" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-27.5.1.tgz#3c79ec4a8ba61c170bf937bcf9e98a9df175ec80" + integrity sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^16.0.0" + chalk "^4.0.0" + +"@jest/types@^28.1.0": + version "28.1.0" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-28.1.0.tgz#508327a89976cbf9bd3e1cc74641a29fd7dfd519" + integrity sha512-xmEggMPr317MIOjjDoZ4ejCSr9Lpbt/u34+dvc99t7DS8YirW5rwZEhzKPC2BMUFkUhI48qs6qLUSGw5FuL0GA== + dependencies: + "@jest/schemas" "^28.0.2" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + +"@jridgewell/gen-mapping@^0.1.0": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" + integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/resolve-uri@^3.0.3": + version "3.0.7" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.7.tgz#30cd49820a962aff48c8fffc5cd760151fca61fe" + integrity sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA== + +"@jridgewell/set-array@^1.0.0": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.1.tgz#36a6acc93987adcf0ba50c66908bd0b70de8afea" + integrity sha512-Ct5MqZkLGEXTVmQYbGtx9SVqD2fqwvdubdps5D3djjAkgkKwT918VNOz65pEHFaYTeWcukmJmH5SwsA9Tn2ObQ== + +"@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.13" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.13.tgz#b6461fb0c2964356c469e115f504c95ad97ab88c" + integrity sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w== + +"@jridgewell/trace-mapping@^0.3.9": + version "0.3.12" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.12.tgz#ccd8cd83ad894bae98a79eecd6a885b211bfe217" + integrity sha512-6GMdw8fZlZjs9CJONrWeWyjl8zYqbyOMSxS9FABnEw3i+wz99SESjWMWRRIsbIp8HVsMeXggi5b7+a9qO6W1fQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@leichtgewicht/ip-codec@^2.0.1": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" + integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== + +"@mapbox/hast-util-table-cell-style@^0.2.0": + version "0.2.0" + resolved "https://registry.yarnpkg.com/@mapbox/hast-util-table-cell-style/-/hast-util-table-cell-style-0.2.0.tgz#1003f59d54fae6f638cb5646f52110fb3da95b4d" + integrity sha512-gqaTIGC8My3LVSnU38IwjHVKJC94HSonjvFHDk8/aSrApL8v4uWgm8zJkK7MJIIbHuNOr/+Mv2KkQKcxs6LEZA== + dependencies: + unist-util-visit "^1.4.1" + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@pmmmwh/react-refresh-webpack-plugin@^0.5.3": + version "0.5.6" + resolved "https://registry.yarnpkg.com/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.6.tgz#9ced74cb23dae31ab385f775e237ce4c50422a1d" + integrity sha512-IIWxofIYt/AbMwoeBgj+O2aAXLrlCQVg+A4a2zfpXFNHgP8o8rvi3v+oe5t787Lj+KXlKOh8BAiUp9bhuELXhg== + dependencies: + ansi-html-community "^0.0.8" + common-path-prefix "^3.0.0" + core-js-pure "^3.8.1" + error-stack-parser "^2.0.6" + find-up "^5.0.0" + html-entities "^2.1.0" + loader-utils "^2.0.0" + schema-utils "^3.0.0" + source-map "^0.7.3" + +"@rollup/plugin-babel@^5.2.0": + version "5.3.1" + resolved "https://registry.yarnpkg.com/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz#04bc0608f4aa4b2e4b1aebf284344d0f68fda283" + integrity sha512-WFfdLWU/xVWKeRQnKmIAQULUI7Il0gZnBIH/ZFO069wYIfPu+8zrfp/KMW0atmELoRDq8FbiP3VCss9MhCut7Q== + dependencies: + "@babel/helper-module-imports" "^7.10.4" + "@rollup/pluginutils" "^3.1.0" + +"@rollup/plugin-node-resolve@^11.2.1": + version "11.2.1" + resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-11.2.1.tgz#82aa59397a29cd4e13248b106e6a4a1880362a60" + integrity sha512-yc2n43jcqVyGE2sqV5/YCmocy9ArjVAP/BeXyTtADTBBX6V0e5UMqwO8CdQ0kzjb6zu5P1qMzsScCMRvE9OlVg== + dependencies: + "@rollup/pluginutils" "^3.1.0" + "@types/resolve" "1.17.1" + builtin-modules "^3.1.0" + deepmerge "^4.2.2" + is-module "^1.0.0" + resolve "^1.19.0" + +"@rollup/plugin-replace@^2.4.1": + version "2.4.2" + resolved "https://registry.yarnpkg.com/@rollup/plugin-replace/-/plugin-replace-2.4.2.tgz#a2d539314fbc77c244858faa523012825068510a" + integrity sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg== + dependencies: + "@rollup/pluginutils" "^3.1.0" + magic-string "^0.25.7" + +"@rollup/pluginutils@^3.1.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-3.1.0.tgz#706b4524ee6dc8b103b3c995533e5ad680c02b9b" + integrity sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg== + dependencies: + "@types/estree" "0.0.39" + estree-walker "^1.0.1" + picomatch "^2.2.2" + +"@rushstack/eslint-patch@^1.1.0": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.1.3.tgz#6801033be7ff87a6b7cadaf5b337c9f366a3c4b0" + integrity sha512-WiBSI6JBIhC6LRIsB2Kwh8DsGTlbBU+mLRxJmAe3LjHTdkDpwIbEOZgoXBbZilk/vlfjK8i6nKRAvIRn1XaIMw== + +"@sinclair/typebox@^0.23.3": + version "0.23.5" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.23.5.tgz#93f7b9f4e3285a7a9ade7557d9a8d36809cbc47d" + integrity sha512-AFBVi/iT4g20DHoujvMH1aEDn8fGJh4xsRGCP6d8RpLPMqsNPvW01Jcn0QysXTsg++/xj25NmJsGyH9xug/wKg== + +"@sinonjs/commons@^1.7.0": + version "1.8.3" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" + integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== + dependencies: + type-detect "4.0.8" + +"@sinonjs/fake-timers@^8.0.1": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz#3fdc2b6cb58935b21bfb8d1625eb1300484316e7" + integrity sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg== + dependencies: + "@sinonjs/commons" "^1.7.0" + +"@surma/rollup-plugin-off-main-thread@^2.2.3": + version "2.2.3" + resolved "https://registry.yarnpkg.com/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.3.tgz#ee34985952ca21558ab0d952f00298ad2190c053" + integrity sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ== + dependencies: + ejs "^3.1.6" + json5 "^2.2.0" + magic-string "^0.25.0" + string.prototype.matchall "^4.0.6" + +"@svgr/babel-plugin-add-jsx-attribute@^5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz#81ef61947bb268eb9d50523446f9c638fb355906" + integrity sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg== + +"@svgr/babel-plugin-remove-jsx-attribute@^5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-5.4.0.tgz#6b2c770c95c874654fd5e1d5ef475b78a0a962ef" + integrity sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg== + +"@svgr/babel-plugin-remove-jsx-empty-expression@^5.0.1": + version "5.0.1" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-5.0.1.tgz#25621a8915ed7ad70da6cea3d0a6dbc2ea933efd" + integrity sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA== + +"@svgr/babel-plugin-replace-jsx-attribute-value@^5.0.1": + version "5.0.1" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-5.0.1.tgz#0b221fc57f9fcd10e91fe219e2cd0dd03145a897" + integrity sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ== + +"@svgr/babel-plugin-svg-dynamic-title@^5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-5.4.0.tgz#139b546dd0c3186b6e5db4fefc26cb0baea729d7" + integrity sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg== + +"@svgr/babel-plugin-svg-em-dimensions@^5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-5.4.0.tgz#6543f69526632a133ce5cabab965deeaea2234a0" + integrity sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw== + +"@svgr/babel-plugin-transform-react-native-svg@^5.4.0": + version "5.4.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-5.4.0.tgz#00bf9a7a73f1cad3948cdab1f8dfb774750f8c80" + integrity sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q== + +"@svgr/babel-plugin-transform-svg-component@^5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-5.5.0.tgz#583a5e2a193e214da2f3afeb0b9e8d3250126b4a" + integrity sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ== + +"@svgr/babel-preset@^5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-preset/-/babel-preset-5.5.0.tgz#8af54f3e0a8add7b1e2b0fcd5a882c55393df327" + integrity sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig== + dependencies: + "@svgr/babel-plugin-add-jsx-attribute" "^5.4.0" + "@svgr/babel-plugin-remove-jsx-attribute" "^5.4.0" + "@svgr/babel-plugin-remove-jsx-empty-expression" "^5.0.1" + "@svgr/babel-plugin-replace-jsx-attribute-value" "^5.0.1" + "@svgr/babel-plugin-svg-dynamic-title" "^5.4.0" + "@svgr/babel-plugin-svg-em-dimensions" "^5.4.0" + "@svgr/babel-plugin-transform-react-native-svg" "^5.4.0" + "@svgr/babel-plugin-transform-svg-component" "^5.5.0" + +"@svgr/core@^5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@svgr/core/-/core-5.5.0.tgz#82e826b8715d71083120fe8f2492ec7d7874a579" + integrity sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ== + dependencies: + "@svgr/plugin-jsx" "^5.5.0" + camelcase "^6.2.0" + cosmiconfig "^7.0.0" + +"@svgr/hast-util-to-babel-ast@^5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-5.5.0.tgz#5ee52a9c2533f73e63f8f22b779f93cd432a5461" + integrity sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ== + dependencies: + "@babel/types" "^7.12.6" + +"@svgr/plugin-jsx@^5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@svgr/plugin-jsx/-/plugin-jsx-5.5.0.tgz#1aa8cd798a1db7173ac043466d7b52236b369000" + integrity sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA== + dependencies: + "@babel/core" "^7.12.3" + "@svgr/babel-preset" "^5.5.0" + "@svgr/hast-util-to-babel-ast" "^5.5.0" + svg-parser "^2.0.2" + +"@svgr/plugin-svgo@^5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@svgr/plugin-svgo/-/plugin-svgo-5.5.0.tgz#02da55d85320549324e201c7b2e53bf431fcc246" + integrity sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ== + dependencies: + cosmiconfig "^7.0.0" + deepmerge "^4.2.2" + svgo "^1.2.2" + +"@svgr/webpack@^5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@svgr/webpack/-/webpack-5.5.0.tgz#aae858ee579f5fa8ce6c3166ef56c6a1b381b640" + integrity sha512-DOBOK255wfQxguUta2INKkzPj6AIS6iafZYiYmHn6W3pHlycSRRlvWKCfLDG10fXfLWqE3DJHgRUOyJYmARa7g== + dependencies: + "@babel/core" "^7.12.3" + "@babel/plugin-transform-react-constant-elements" "^7.12.1" + "@babel/preset-env" "^7.12.1" + "@babel/preset-react" "^7.12.5" + "@svgr/core" "^5.5.0" + "@svgr/plugin-jsx" "^5.5.0" + "@svgr/plugin-svgo" "^5.5.0" + loader-utils "^2.0.0" + +"@testing-library/dom@^8.5.0": + version "8.13.0" + resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-8.13.0.tgz#bc00bdd64c7d8b40841e27a70211399ad3af46f5" + integrity sha512-9VHgfIatKNXQNaZTtLnalIy0jNZzY35a4S3oi08YAt9Hv1VsfZ/DfA45lM8D/UhtHBGJ4/lGwp0PZkVndRkoOQ== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/runtime" "^7.12.5" + "@types/aria-query" "^4.2.0" + aria-query "^5.0.0" + chalk "^4.1.0" + dom-accessibility-api "^0.5.9" + lz-string "^1.4.4" + pretty-format "^27.0.2" + +"@testing-library/jest-dom@^5.16.4": + version "5.16.4" + resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-5.16.4.tgz#938302d7b8b483963a3ae821f1c0808f872245cd" + integrity sha512-Gy+IoFutbMQcky0k+bqqumXZ1cTGswLsFqmNLzNdSKkU9KGV2u9oXhukCbbJ9/LRPKiqwxEE8VpV/+YZlfkPUA== + dependencies: + "@babel/runtime" "^7.9.2" + "@types/testing-library__jest-dom" "^5.9.1" + aria-query "^5.0.0" + chalk "^3.0.0" + css "^3.0.0" + css.escape "^1.5.1" + dom-accessibility-api "^0.5.6" + lodash "^4.17.15" + redent "^3.0.0" + +"@testing-library/react@^13.2.0": + version "13.2.0" + resolved "https://registry.yarnpkg.com/@testing-library/react/-/react-13.2.0.tgz#2db00bc94d71c4e90e5c25582e90a650ae2925bf" + integrity sha512-Bprbz/SZVONCJy5f7hcihNCv313IJXdYiv0nSJklIs1SQCIHHNlnGNkosSXnGZTmesyGIcBGNppYhXcc11pb7g== + dependencies: + "@babel/runtime" "^7.12.5" + "@testing-library/dom" "^8.5.0" + "@types/react-dom" "^18.0.0" + +"@testing-library/user-event@^13.5.0": + version "13.5.0" + resolved "https://registry.yarnpkg.com/@testing-library/user-event/-/user-event-13.5.0.tgz#69d77007f1e124d55314a2b73fd204b333b13295" + integrity sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg== + dependencies: + "@babel/runtime" "^7.12.5" + +"@tootallnate/once@1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" + integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== + +"@trysound/sax@0.2.0": + version "0.2.0" + resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" + integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== + +"@types/aria-query@^4.2.0": + version "4.2.2" + resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-4.2.2.tgz#ed4e0ad92306a704f9fb132a0cfcf77486dbe2bc" + integrity sha512-HnYpAE1Y6kRyKM/XkEuiRQhTHvkzMBurTHnpFLYLBGPIylZNPs9jJcuOOYWxPLJCSEtmZT0Y8rHDokKN7rRTig== + +"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.14": + version "7.1.19" + resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.19.tgz#7b497495b7d1b4812bdb9d02804d0576f43ee460" + integrity sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + "@types/babel__generator" "*" + "@types/babel__template" "*" + "@types/babel__traverse" "*" + +"@types/babel__generator@*": + version "7.6.4" + resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" + integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== + dependencies: + "@babel/types" "^7.0.0" + +"@types/babel__template@*": + version "7.4.1" + resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" + integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + +"@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6": + version "7.17.1" + resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.17.1.tgz#1a0e73e8c28c7e832656db372b779bfd2ef37314" + integrity sha512-kVzjari1s2YVi77D3w1yuvohV2idweYXMCDzqBiVNN63TcDWrIlTVOYpqVrvbbyOE/IyzBoTKF0fdnLPEORFxA== + dependencies: + "@babel/types" "^7.3.0" + +"@types/body-parser@*": + version "1.19.2" + resolved "https://registry.yarnpkg.com/@types/body-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0" + integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g== + dependencies: + "@types/connect" "*" + "@types/node" "*" + +"@types/bonjour@^3.5.9": + version "3.5.10" + resolved "https://registry.yarnpkg.com/@types/bonjour/-/bonjour-3.5.10.tgz#0f6aadfe00ea414edc86f5d106357cda9701e275" + integrity sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw== + dependencies: + "@types/node" "*" + +"@types/chroma-js@^2.0.0": + version "2.1.3" + resolved "https://registry.yarnpkg.com/@types/chroma-js/-/chroma-js-2.1.3.tgz#0b03d737ff28fad10eb884e0c6cedd5ffdc4ba0a" + integrity sha512-1xGPhoSGY1CPmXLCBcjVZSQinFjL26vlR8ZqprsBWiFyED4JacJJ9zHhh5aaUXqbY9B37mKQ73nlydVAXmr1+g== + +"@types/connect-history-api-fallback@^1.3.5": + version "1.3.5" + resolved "https://registry.yarnpkg.com/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.5.tgz#d1f7a8a09d0ed5a57aee5ae9c18ab9b803205dae" + integrity sha512-h8QJa8xSb1WD4fpKBDcATDNGXghFj6/3GRWG6dhmRcu0RX1Ubasur2Uvx5aeEwlf0MwblEC2bMzzMQntxnw/Cw== + dependencies: + "@types/express-serve-static-core" "*" + "@types/node" "*" + +"@types/connect@*": + version "3.4.35" + resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1" + integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ== + dependencies: + "@types/node" "*" + +"@types/d3-array@*": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-array/-/d3-array-3.0.2.tgz#71c35bca8366a40d1b8fce9279afa4a77fb0065d" + integrity sha512-5mjGjz6XOXKOCdTajXTZ/pMsg236RdiwKPrRPWAEf/2S/+PzwY+LLYShUpeysWaMvsdS7LArh6GdUefoxpchsQ== + +"@types/d3-axis@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-axis/-/d3-axis-3.0.1.tgz#6afc20744fa5cc0cbc3e2bd367b140a79ed3e7a8" + integrity sha512-zji/iIbdd49g9WN0aIsGcwcTBUkgLsCSwB+uH+LPVDAiKWENMtI3cJEWt+7/YYwelMoZmbBfzA3qCdrZ2XFNnw== + dependencies: + "@types/d3-selection" "*" + +"@types/d3-brush@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-brush/-/d3-brush-3.0.1.tgz#ae5f17ce391935ca88b29000e60ee20452c6357c" + integrity sha512-B532DozsiTuQMHu2YChdZU0qsFJSio3Q6jmBYGYNp3gMDzBmuFFgPt9qKA4VYuLZMp4qc6eX7IUFUEsvHiXZAw== + dependencies: + "@types/d3-selection" "*" + +"@types/d3-chord@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-chord/-/d3-chord-3.0.1.tgz#54c8856c19c8e4ab36a53f73ba737de4768ad248" + integrity sha512-eQfcxIHrg7V++W8Qxn6QkqBNBokyhdWSAS73AbkbMzvLQmVVBviknoz2SRS/ZJdIOmhcmmdCRE/NFOm28Z1AMw== + +"@types/d3-color@*": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-color/-/d3-color-3.0.2.tgz#53f2d6325f66ee79afd707c05ac849e8ae0edbb0" + integrity sha512-WVx6zBiz4sWlboCy7TCgjeyHpNjMsoF36yaagny1uXfbadc9f+5BeBf7U+lRmQqY3EHbGQpP8UdW8AC+cywSwQ== + +"@types/d3-contour@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-contour/-/d3-contour-3.0.1.tgz#9ff4e2fd2a3910de9c5097270a7da8a6ef240017" + integrity sha512-C3zfBrhHZvrpAAK3YXqLWVAGo87A4SvJ83Q/zVJ8rFWJdKejUnDYaWZPkA8K84kb2vDA/g90LTQAz7etXcgoQQ== + dependencies: + "@types/d3-array" "*" + "@types/geojson" "*" + +"@types/d3-delaunay@*": + version "6.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-delaunay/-/d3-delaunay-6.0.0.tgz#c09953ac7e5460997f693d2d7bf3522e0d4a88e6" + integrity sha512-iGm7ZaGLq11RK3e69VeMM6Oqj2SjKUB9Qhcyd1zIcqn2uE8w9GFB445yCY46NOQO3ByaNyktX1DK+Etz7ZaX+w== + +"@types/d3-dispatch@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-dispatch/-/d3-dispatch-3.0.1.tgz#a1b18ae5fa055a6734cb3bd3cbc6260ef19676e3" + integrity sha512-NhxMn3bAkqhjoxabVJWKryhnZXXYYVQxaBnbANu0O94+O/nX9qSjrA1P1jbAQJxJf+VC72TxDX/YJcKue5bRqw== + +"@types/d3-drag@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-drag/-/d3-drag-3.0.1.tgz#fb1e3d5cceeee4d913caa59dedf55c94cb66e80f" + integrity sha512-o1Va7bLwwk6h03+nSM8dpaGEYnoIG19P0lKqlic8Un36ymh9NSkNFX1yiXMKNMx8rJ0Kfnn2eovuFaL6Jvj0zA== + dependencies: + "@types/d3-selection" "*" + +"@types/d3-dsv@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-dsv/-/d3-dsv-3.0.0.tgz#f3c61fb117bd493ec0e814856feb804a14cfc311" + integrity sha512-o0/7RlMl9p5n6FQDptuJVMxDf/7EDEv2SYEO/CwdG2tr1hTfUVi0Iavkk2ax+VpaQ/1jVhpnj5rq1nj8vwhn2A== + +"@types/d3-ease@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-ease/-/d3-ease-3.0.0.tgz#c29926f8b596f9dadaeca062a32a45365681eae0" + integrity sha512-aMo4eaAOijJjA6uU+GIeW018dvy9+oH5Y2VPPzjjfxevvGQ/oRDs+tfYC9b50Q4BygRR8yE2QCLsrT0WtAVseA== + +"@types/d3-fetch@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-fetch/-/d3-fetch-3.0.1.tgz#f9fa88b81aa2eea5814f11aec82ecfddbd0b8fe0" + integrity sha512-toZJNOwrOIqz7Oh6Q7l2zkaNfXkfR7mFSJvGvlD/Ciq/+SQ39d5gynHJZ/0fjt83ec3WL7+u3ssqIijQtBISsw== + dependencies: + "@types/d3-dsv" "*" + +"@types/d3-force@*": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@types/d3-force/-/d3-force-3.0.3.tgz#76cb20d04ae798afede1ea6e41750763ff5a9c82" + integrity sha512-z8GteGVfkWJMKsx6hwC3SiTSLspL98VNpmvLpEFJQpZPq6xpA1I8HNBDNSpukfK0Vb0l64zGFhzunLgEAcBWSA== + +"@types/d3-format@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-format/-/d3-format-3.0.1.tgz#194f1317a499edd7e58766f96735bdc0216bb89d" + integrity sha512-5KY70ifCCzorkLuIkDe0Z9YTf9RR2CjBX1iaJG+rgM/cPP+sO+q9YdQ9WdhQcgPj1EQiJ2/0+yUkkziTG6Lubg== + +"@types/d3-geo@*": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-geo/-/d3-geo-3.0.2.tgz#e7ec5f484c159b2c404c42d260e6d99d99f45d9a" + integrity sha512-DbqK7MLYA8LpyHQfv6Klz0426bQEf7bRTvhMy44sNGVyZoWn//B0c+Qbeg8Osi2Obdc9BLLXYAKpyWege2/7LQ== + dependencies: + "@types/geojson" "*" + +"@types/d3-hierarchy@*": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-hierarchy/-/d3-hierarchy-3.0.2.tgz#ca63f2f4da15b8f129c5b7dffd71d904cba6aca2" + integrity sha512-+krnrWOZ+aQB6v+E+jEkmkAx9HvsNAD+1LCD0vlBY3t+HwjKnsBFbpVLx6WWzDzCIuiTWdAxXMEnGnVXpB09qQ== + +"@types/d3-interpolate@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-interpolate/-/d3-interpolate-3.0.1.tgz#e7d17fa4a5830ad56fe22ce3b4fac8541a9572dc" + integrity sha512-jx5leotSeac3jr0RePOH1KdR9rISG91QIE4Q2PYTu4OymLTZfA3SrnURSLzKH48HmXVUru50b8nje4E79oQSQw== + dependencies: + "@types/d3-color" "*" + +"@types/d3-path@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-path/-/d3-path-3.0.0.tgz#939e3a784ae4f80b1fde8098b91af1776ff1312b" + integrity sha512-0g/A+mZXgFkQxN3HniRDbXMN79K3CdTpLsevj+PXiTcb2hVyvkZUBg37StmgCQkaD84cUJ4uaDAWq7UJOQy2Tg== + +"@types/d3-polygon@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-polygon/-/d3-polygon-3.0.0.tgz#5200a3fa793d7736fa104285fa19b0dbc2424b93" + integrity sha512-D49z4DyzTKXM0sGKVqiTDTYr+DHg/uxsiWDAkNrwXYuiZVd9o9wXZIo+YsHkifOiyBkmSWlEngHCQme54/hnHw== + +"@types/d3-quadtree@*": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-quadtree/-/d3-quadtree-3.0.2.tgz#433112a178eb7df123aab2ce11c67f51cafe8ff5" + integrity sha512-QNcK8Jguvc8lU+4OfeNx+qnVy7c0VrDJ+CCVFS9srBo2GL9Y18CnIxBdTF3v38flrGy5s1YggcoAiu6s4fLQIw== + +"@types/d3-random@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-random/-/d3-random-3.0.1.tgz#5c8d42b36cd4c80b92e5626a252f994ca6bfc953" + integrity sha512-IIE6YTekGczpLYo/HehAy3JGF1ty7+usI97LqraNa8IiDur+L44d0VOjAvFQWJVdZOJHukUJw+ZdZBlgeUsHOQ== + +"@types/d3-scale-chromatic@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.0.0.tgz#103124777e8cdec85b20b51fd3397c682ee1e954" + integrity sha512-dsoJGEIShosKVRBZB0Vo3C8nqSDqVGujJU6tPznsBJxNJNwMF8utmS83nvCBKQYPpjCzaaHcrf66iTRpZosLPw== + +"@types/d3-scale@*": + version "4.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-scale/-/d3-scale-4.0.2.tgz#41be241126af4630524ead9cb1008ab2f0f26e69" + integrity sha512-Yk4htunhPAwN0XGlIwArRomOjdoBFXC3+kCxK2Ubg7I9shQlVSJy/pG/Ht5ASN+gdMIalpk8TJ5xV74jFsetLA== + dependencies: + "@types/d3-time" "*" + +"@types/d3-selection@*": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-selection/-/d3-selection-3.0.2.tgz#23e48a285b24063630bbe312cc0cfe2276de4a59" + integrity sha512-d29EDd0iUBrRoKhPndhDY6U/PYxOWqgIZwKTooy2UkBfU7TNZNpRho0yLWPxlatQrFWk2mnTu71IZQ4+LRgKlQ== + +"@types/d3-shape@*": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/d3-shape/-/d3-shape-3.0.2.tgz#4b1ca4ddaac294e76b712429726d40365cd1e8ca" + integrity sha512-5+ButCmIfNX8id5seZ7jKj3igdcxx+S9IDBiT35fQGTLZUfkFgTv+oBH34xgeoWDKpWcMITSzBILWQtBoN5Piw== + dependencies: + "@types/d3-path" "*" + +"@types/d3-time-format@*": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-time-format/-/d3-time-format-4.0.0.tgz#ee7b6e798f8deb2d9640675f8811d0253aaa1946" + integrity sha512-yjfBUe6DJBsDin2BMIulhSHmr5qNR5Pxs17+oW4DoVPyVIXZ+m6bs7j1UVKP08Emv6jRmYrYqxYzO63mQxy1rw== + +"@types/d3-time@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-time/-/d3-time-3.0.0.tgz#e1ac0f3e9e195135361fa1a1d62f795d87e6e819" + integrity sha512-sZLCdHvBUcNby1cB6Fd3ZBrABbjz3v1Vm90nysCQ6Vt7vd6e/h9Lt7SiJUoEX0l4Dzc7P5llKyhqSi1ycSf1Hg== + +"@types/d3-timer@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/d3-timer/-/d3-timer-3.0.0.tgz#e2505f1c21ec08bda8915238e397fb71d2fc54ce" + integrity sha512-HNB/9GHqu7Fo8AQiugyJbv6ZxYz58wef0esl4Mv828w1ZKpAshw/uFWVDUcIB9KKFeFKoxS3cHY07FFgtTRZ1g== + +"@types/d3-transition@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-transition/-/d3-transition-3.0.1.tgz#c9a96125567173d6163a6985b874f79154f4cc3d" + integrity sha512-Sv4qEI9uq3bnZwlOANvYK853zvpdKEm1yz9rcc8ZTsxvRklcs9Fx4YFuGA3gXoQN/c/1T6QkVNjhaRO/cWj94g== + dependencies: + "@types/d3-selection" "*" + +"@types/d3-zoom@*": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/d3-zoom/-/d3-zoom-3.0.1.tgz#4bfc7e29625c4f79df38e2c36de52ec3e9faf826" + integrity sha512-7s5L9TjfqIYQmQQEUcpMAcBOahem7TRoSO/+Gkz02GbMVuULiZzjF2BOdw291dbO2aNon4m2OdFsRGaCq2caLQ== + dependencies: + "@types/d3-interpolate" "*" + "@types/d3-selection" "*" + +"@types/d3@^7.1.0": + version "7.1.0" + resolved "https://registry.yarnpkg.com/@types/d3/-/d3-7.1.0.tgz#8f32a7e7f434d8f920c8b1ebdfed55e18c033720" + integrity sha512-gYWvgeGjEl+zmF8c+U1RNIKqe7sfQwIXeLXO5Os72TjDjCEtgpvGBvZ8dXlAuSS1m6B90Y1Uo6Bm36OGR/OtCA== + dependencies: + "@types/d3-array" "*" + "@types/d3-axis" "*" + "@types/d3-brush" "*" + "@types/d3-chord" "*" + "@types/d3-color" "*" + "@types/d3-contour" "*" + "@types/d3-delaunay" "*" + "@types/d3-dispatch" "*" + "@types/d3-drag" "*" + "@types/d3-dsv" "*" + "@types/d3-ease" "*" + "@types/d3-fetch" "*" + "@types/d3-force" "*" + "@types/d3-format" "*" + "@types/d3-geo" "*" + "@types/d3-hierarchy" "*" + "@types/d3-interpolate" "*" + "@types/d3-path" "*" + "@types/d3-polygon" "*" + "@types/d3-quadtree" "*" + "@types/d3-random" "*" + "@types/d3-scale" "*" + "@types/d3-scale-chromatic" "*" + "@types/d3-selection" "*" + "@types/d3-shape" "*" + "@types/d3-time" "*" + "@types/d3-time-format" "*" + "@types/d3-timer" "*" + "@types/d3-transition" "*" + "@types/d3-zoom" "*" + +"@types/eslint-scope@^3.7.3": + version "3.7.3" + resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.3.tgz#125b88504b61e3c8bc6f870882003253005c3224" + integrity sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g== + dependencies: + "@types/eslint" "*" + "@types/estree" "*" + +"@types/eslint@*": + version "8.4.2" + resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.4.2.tgz#48f2ac58ab9c631cb68845c3d956b28f79fad575" + integrity sha512-Z1nseZON+GEnFjJc04sv4NSALGjhFwy6K0HXt7qsn5ArfAKtb63dXNJHf+1YW6IpOIYRBGUbu3GwJdj8DGnCjA== + dependencies: + "@types/estree" "*" + "@types/json-schema" "*" + +"@types/eslint@^7.28.2": + version "7.29.0" + resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-7.29.0.tgz#e56ddc8e542815272720bb0b4ccc2aff9c3e1c78" + integrity sha512-VNcvioYDH8/FxaeTKkM4/TiTwt6pBV9E3OfGmvaw8tPl0rrHCJ4Ll15HRT+pMiFAf/MLQvAzC+6RzUMEL9Ceng== + dependencies: + "@types/estree" "*" + "@types/json-schema" "*" + +"@types/estree@*", "@types/estree@^0.0.51": + version "0.0.51" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.51.tgz#cfd70924a25a3fd32b218e5e420e6897e1ac4f40" + integrity sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ== + +"@types/estree@0.0.39": + version "0.0.39" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" + integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== + +"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": + version "4.17.28" + resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.28.tgz#c47def9f34ec81dc6328d0b1b5303d1ec98d86b8" + integrity sha512-P1BJAEAW3E2DJUlkgq4tOL3RyMunoWXqbSCygWo5ZIWTjUgN1YnaXWW4VWl/oc8vs/XoYibEGBKP0uZyF4AHig== + dependencies: + "@types/node" "*" + "@types/qs" "*" + "@types/range-parser" "*" + +"@types/express@*", "@types/express@^4.17.13": + version "4.17.13" + resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.13.tgz#a76e2995728999bab51a33fabce1d705a3709034" + integrity sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA== + dependencies: + "@types/body-parser" "*" + "@types/express-serve-static-core" "^4.17.18" + "@types/qs" "*" + "@types/serve-static" "*" + +"@types/geojson@*": + version "7946.0.8" + resolved "https://registry.yarnpkg.com/@types/geojson/-/geojson-7946.0.8.tgz#30744afdb385e2945e22f3b033f897f76b1f12ca" + integrity sha512-1rkryxURpr6aWP7R786/UQOkJ3PcpQiWkAXBmdWc7ryFWqN6a4xfK7BtjXvFBKO9LjQ+MWQSWxYeZX1OApnArA== + +"@types/graceful-fs@^4.1.2": + version "4.1.5" + resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" + integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== + dependencies: + "@types/node" "*" + +"@types/hast@^2.0.0": + version "2.3.4" + resolved "https://registry.yarnpkg.com/@types/hast/-/hast-2.3.4.tgz#8aa5ef92c117d20d974a82bdfb6a648b08c0bafc" + integrity sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g== + dependencies: + "@types/unist" "*" + +"@types/hoist-non-react-statics@^3.3.0": + version "3.3.1" + resolved "https://registry.yarnpkg.com/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.1.tgz#1124aafe5118cb591977aeb1ceaaed1070eb039f" + integrity sha512-iMIqiko6ooLrTh1joXodJK5X9xeEALT1kM5G3ZLhD3hszxBdIEd5C75U834D9mLcINgD4OyZf5uQXjkuYydWvA== + dependencies: + "@types/react" "*" + hoist-non-react-statics "^3.3.0" + +"@types/html-minifier-terser@^6.0.0": + version "6.1.0" + resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35" + integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== + +"@types/http-proxy@^1.17.8": + version "1.17.9" + resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.9.tgz#7f0e7931343761efde1e2bf48c40f02f3f75705a" + integrity sha512-QsbSjA/fSk7xB+UXlCT3wHBy5ai9wOcNDWwZAtud+jXhwOM3l+EYZh8Lng4+/6n8uar0J7xILzqftJdJ/Wdfkw== + dependencies: + "@types/node" "*" + +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" + integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== + +"@types/istanbul-lib-report@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" + integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/jest@*", "@types/jest@^27.0.1": + version "27.5.0" + resolved "https://registry.yarnpkg.com/@types/jest/-/jest-27.5.0.tgz#e04ed1824ca6b1dd0438997ba60f99a7405d4c7b" + integrity sha512-9RBFx7r4k+msyj/arpfaa0WOOEcaAZNmN+j80KFbFCoSqCJGHTz7YMAMGQW9Xmqm5w6l5c25vbSjMwlikJi5+g== + dependencies: + jest-matcher-utils "^27.0.0" + pretty-format "^27.0.0" + +"@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": + version "7.0.11" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" + integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== + +"@types/json5@^0.0.29": + version "0.0.29" + resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4= + +"@types/lodash@^4.14.160": + version "4.14.182" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.182.tgz#05301a4d5e62963227eaafe0ce04dd77c54ea5c2" + integrity sha512-/THyiqyQAP9AfARo4pF+aCGcyiQ94tX/Is2I7HofNRqoYLgN1PBoOWu2/zTA5zMxzP5EFutMtWtGAFRKUe961Q== + +"@types/mdast@^3.0.0": + version "3.0.10" + resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.10.tgz#4724244a82a4598884cbbe9bcfd73dff927ee8af" + integrity sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA== + dependencies: + "@types/unist" "*" + +"@types/mime@^1": + version "1.3.2" + resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a" + integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw== + +"@types/node@*": + version "17.0.32" + resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.32.tgz#51d59d7a90ef2d0ae961791e0900cad2393a0149" + integrity sha512-eAIcfAvhf/BkHcf4pkLJ7ECpBAhh9kcxRBpip9cTiO+hf+aJrsxYxBeS6OXvOd9WqNAJmavXVpZvY1rBjNsXmw== + +"@types/node@^16.7.13": + version "16.11.34" + resolved "https://registry.yarnpkg.com/@types/node/-/node-16.11.34.tgz#520224e4be4448c279ecad09639ab460cc441a50" + integrity sha512-UrWGDyLAlQ2Z8bNOGWTsqbP9ZcBeTYBVuTRNxXTztBy5KhWUFI3BaeDWoCP/CzV/EVGgO1NTYzv9ZytBI9GAEw== + +"@types/numeral@^0.0.28": + version "0.0.28" + resolved "https://registry.yarnpkg.com/@types/numeral/-/numeral-0.0.28.tgz#e43928f0bda10b169b6f7ecf99e3ddf836b8ebe4" + integrity sha512-Sjsy10w6XFHDktJJdXzBJmoondAKW+LcGpRFH+9+zXEDj0cOH8BxJuZA9vUDSMAzU1YRJlsPKmZEEiTYDlICLw== + +"@types/parse-json@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" + integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== + +"@types/parse5@^5.0.0": + version "5.0.3" + resolved "https://registry.yarnpkg.com/@types/parse5/-/parse5-5.0.3.tgz#e7b5aebbac150f8b5fdd4a46e7f0bd8e65e19109" + integrity sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw== + +"@types/prettier@^2.1.5": + version "2.6.0" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.6.0.tgz#efcbd41937f9ae7434c714ab698604822d890759" + integrity sha512-G/AdOadiZhnJp0jXCaBQU449W2h716OW/EoXeYkCytxKL06X1WCXB4DZpp8TpZ8eyIJVS1cw4lrlkkSYU21cDw== + +"@types/prismjs@*": + version "1.26.0" + resolved "https://registry.yarnpkg.com/@types/prismjs/-/prismjs-1.26.0.tgz#a1c3809b0ad61c62cac6d4e0c56d610c910b7654" + integrity sha512-ZTaqn/qSqUuAq1YwvOFQfVW1AR/oQJlLSZVustdjwI+GZ8kr0MSHBj0tsXPW1EqHubx50gtBEjbPGsdZwQwCjQ== + +"@types/prop-types@*": + version "15.7.5" + resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" + integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== + +"@types/q@^1.5.1": + version "1.5.5" + resolved "https://registry.yarnpkg.com/@types/q/-/q-1.5.5.tgz#75a2a8e7d8ab4b230414505d92335d1dcb53a6df" + integrity sha512-L28j2FcJfSZOnL1WBjDYp2vUHCeIFlyYI/53EwD/rKUBQ7MtUUfbQWiyKJGpcnv4/WgrhWsFKrcPstcAt/J0tQ== + +"@types/qs@*": + version "6.9.7" + resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" + integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== + +"@types/range-parser@*": + version "1.2.4" + resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc" + integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== + +"@types/react-beautiful-dnd@^13.1.2": + version "13.1.2" + resolved "https://registry.yarnpkg.com/@types/react-beautiful-dnd/-/react-beautiful-dnd-13.1.2.tgz#510405abb09f493afdfd898bf83995dc6385c130" + integrity sha512-+OvPkB8CdE/bGdXKyIhc/Lm2U7UAYCCJgsqmopFmh9gbAudmslkI8eOrPDjg4JhwSE6wytz4a3/wRjKtovHVJg== + dependencies: + "@types/react" "*" + +"@types/react-dom@^17.0.9": + version "17.0.16" + resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-17.0.16.tgz#7caba93cf2806c51e64d620d8dff4bae57e06cc4" + integrity sha512-DWcXf8EbMrO/gWnQU7Z88Ws/p16qxGpPyjTKTpmBSFKeE+HveVubqGO1CVK7FrwlWD5MuOcvh8gtd0/XO38NdQ== + dependencies: + "@types/react" "^17" + +"@types/react-dom@^18.0.0": + version "18.0.3" + resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-18.0.3.tgz#a022ea08c75a476fe5e96b675c3e673363853831" + integrity sha512-1RRW9kst+67gveJRYPxGmVy8eVJ05O43hg77G2j5m76/RFJtMbcfAs2viQ2UNsvvDg8F7OfQZx8qQcl6ymygaQ== + dependencies: + "@types/react" "*" + +"@types/react-input-autosize@^2.2.1": + version "2.2.1" + resolved "https://registry.yarnpkg.com/@types/react-input-autosize/-/react-input-autosize-2.2.1.tgz#6a335212e7fce1e1a4da56ae2095c8c5c35fbfe6" + integrity sha512-RxzEjd4gbLAAdLQ92Q68/AC+TfsAKTc4evsArUH1aIShIMqQMIMjsxoSnwyjtbFTO/AGIW/RQI94XSdvOxCz/w== + dependencies: + "@types/react" "*" + +"@types/react-redux@^7.1.20": + version "7.1.24" + resolved "https://registry.yarnpkg.com/@types/react-redux/-/react-redux-7.1.24.tgz#6caaff1603aba17b27d20f8ad073e4c077e975c0" + integrity sha512-7FkurKcS1k0FHZEtdbbgN8Oc6b+stGSfZYjQGicofJ0j4U0qIn/jaSvnP2pLwZKiai3/17xqqxkkrxTgN8UNbQ== + dependencies: + "@types/hoist-non-react-statics" "^3.3.0" + "@types/react" "*" + hoist-non-react-statics "^3.3.0" + redux "^4.0.0" + +"@types/react-virtualized-auto-sizer@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@types/react-virtualized-auto-sizer/-/react-virtualized-auto-sizer-1.0.1.tgz#b3187dae1dfc4c15880c9cfc5b45f2719ea6ebd4" + integrity sha512-GH8sAnBEM5GV9LTeiz56r4ZhMOUSrP43tAQNSRVxNexDjcNKLCEtnxusAItg1owFUFE6k0NslV26gqVClVvong== + dependencies: + "@types/react" "*" + +"@types/react-window@^1.8.5": + version "1.8.5" + resolved "https://registry.yarnpkg.com/@types/react-window/-/react-window-1.8.5.tgz#285fcc5cea703eef78d90f499e1457e9b5c02fc1" + integrity sha512-V9q3CvhC9Jk9bWBOysPGaWy/Z0lxYcTXLtLipkt2cnRj1JOSFNF7wqGpkScSXMgBwC+fnVRg/7shwgddBG5ICw== + dependencies: + "@types/react" "*" + +"@types/react@*": + version "18.0.9" + resolved "https://registry.yarnpkg.com/@types/react/-/react-18.0.9.tgz#d6712a38bd6cd83469603e7359511126f122e878" + integrity sha512-9bjbg1hJHUm4De19L1cHiW0Jvx3geel6Qczhjd0qY5VKVE2X5+x77YxAepuCwVh4vrgZJdgEJw48zrhRIeF4Nw== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/react@^17", "@types/react@^17.0.20": + version "17.0.45" + resolved "https://registry.yarnpkg.com/@types/react/-/react-17.0.45.tgz#9b3d5b661fd26365fefef0e766a1c6c30ccf7b3f" + integrity sha512-YfhQ22Lah2e3CHPsb93tRwIGNiSwkuz1/blk4e6QrWS0jQzCSNbGLtOEYhPg02W0yGTTmpajp7dCTbBAMN3qsg== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/refractor@^3.0.0": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/refractor/-/refractor-3.0.2.tgz#2d42128d59f78f84d2c799ffc5ab5cadbcba2d82" + integrity sha512-2HMXuwGuOqzUG+KUTm9GDJCHl0LCBKsB5cg28ujEmVi/0qgTb6jOmkVSO5K48qXksyl2Fr3C0Q2VrgD4zbwyXg== + dependencies: + "@types/prismjs" "*" + +"@types/resize-observer-browser@^0.1.5": + version "0.1.7" + resolved "https://registry.yarnpkg.com/@types/resize-observer-browser/-/resize-observer-browser-0.1.7.tgz#294aaadf24ac6580b8fbd1fe3ab7b59fe85f9ef3" + integrity sha512-G9eN0Sn0ii9PWQ3Vl72jDPgeJwRWhv2Qk/nQkJuWmRmOB4HX3/BhD5SE1dZs/hzPZL/WKnvF0RHdTSG54QJFyg== + +"@types/resolve@1.17.1": + version "1.17.1" + resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-1.17.1.tgz#3afd6ad8967c77e4376c598a82ddd58f46ec45d6" + integrity sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw== + dependencies: + "@types/node" "*" + +"@types/retry@0.12.0": + version "0.12.0" + resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d" + integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== + +"@types/scheduler@*": + version "0.16.2" + resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" + integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== + +"@types/serve-index@^1.9.1": + version "1.9.1" + resolved "https://registry.yarnpkg.com/@types/serve-index/-/serve-index-1.9.1.tgz#1b5e85370a192c01ec6cec4735cf2917337a6278" + integrity sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg== + dependencies: + "@types/express" "*" + +"@types/serve-static@*": + version "1.13.10" + resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.13.10.tgz#f5e0ce8797d2d7cc5ebeda48a52c96c4fa47a8d9" + integrity sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ== + dependencies: + "@types/mime" "^1" + "@types/node" "*" + +"@types/sockjs@^0.3.33": + version "0.3.33" + resolved "https://registry.yarnpkg.com/@types/sockjs/-/sockjs-0.3.33.tgz#570d3a0b99ac995360e3136fd6045113b1bd236f" + integrity sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw== + dependencies: + "@types/node" "*" + +"@types/stack-utils@^2.0.0": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" + integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== + +"@types/testing-library__jest-dom@^5.9.1": + version "5.14.3" + resolved "https://registry.yarnpkg.com/@types/testing-library__jest-dom/-/testing-library__jest-dom-5.14.3.tgz#ee6c7ffe9f8595882ee7bda8af33ae7b8789ef17" + integrity sha512-oKZe+Mf4ioWlMuzVBaXQ9WDnEm1+umLx0InILg+yvZVBBDmzV5KfZyLrCvadtWcx8+916jLmHafcmqqffl+iIw== + dependencies: + "@types/jest" "*" + +"@types/trusted-types@^2.0.2": + version "2.0.2" + resolved "https://registry.yarnpkg.com/@types/trusted-types/-/trusted-types-2.0.2.tgz#fc25ad9943bcac11cceb8168db4f275e0e72e756" + integrity sha512-F5DIZ36YVLE+PN+Zwws4kJogq47hNgX3Nx6WyDJ3kcplxyke3XIzB8uK5n/Lpm1HBsbGzd6nmGehL8cPekP+Tg== + +"@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2", "@types/unist@^2.0.3": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.6.tgz#250a7b16c3b91f672a24552ec64678eeb1d3a08d" + integrity sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ== + +"@types/vfile-message@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@types/vfile-message/-/vfile-message-2.0.0.tgz#690e46af0fdfc1f9faae00cd049cc888957927d5" + integrity sha512-GpTIuDpb9u4zIO165fUy9+fXcULdD8HFRNli04GehoMVbeNq7D6OBnqSmg3lxZnC+UvgUhEWKxdKiwYUkGltIw== + dependencies: + vfile-message "*" + +"@types/ws@^8.5.1": + version "8.5.3" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.3.tgz#7d25a1ffbecd3c4f2d35068d0b283c037003274d" + integrity sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w== + dependencies: + "@types/node" "*" + +"@types/yargs-parser@*": + version "21.0.0" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" + integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== + +"@types/yargs@^16.0.0": + version "16.0.4" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-16.0.4.tgz#26aad98dd2c2a38e421086ea9ad42b9e51642977" + integrity sha512-T8Yc9wt/5LbJyCaLiHPReJa0kApcIgJ7Bn735GjItUfh08Z1pJvu8QZqb9s+mMvKV6WUQRV7K2R46YbjMXTTJw== + dependencies: + "@types/yargs-parser" "*" + +"@types/yargs@^17.0.8": + version "17.0.10" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.10.tgz#591522fce85d8739bca7b8bb90d048e4478d186a" + integrity sha512-gmEaFwpj/7f/ROdtIlci1R1VYU1J4j95m8T+Tj3iBgiBFKg1foE/PSl93bBd5T9LDXNPo8UlNN6W0qwD8O5OaA== + dependencies: + "@types/yargs-parser" "*" + +"@typescript-eslint/eslint-plugin@^5.5.0": + version "5.23.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.23.0.tgz#bc4cbcf91fbbcc2e47e534774781b82ae25cc3d8" + integrity sha512-hEcSmG4XodSLiAp1uxv/OQSGsDY6QN3TcRU32gANp+19wGE1QQZLRS8/GV58VRUoXhnkuJ3ZxNQ3T6Z6zM59DA== + dependencies: + "@typescript-eslint/scope-manager" "5.23.0" + "@typescript-eslint/type-utils" "5.23.0" + "@typescript-eslint/utils" "5.23.0" + debug "^4.3.2" + functional-red-black-tree "^1.0.1" + ignore "^5.1.8" + regexpp "^3.2.0" + semver "^7.3.5" + tsutils "^3.21.0" + +"@typescript-eslint/experimental-utils@^5.0.0": + version "5.23.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-5.23.0.tgz#ea03860fa612dadf272789988f2ce41f0b7bb2f7" + integrity sha512-I+3YGQztH1DM9kgWzjslpZzJCBMRz0KhYG2WP62IwpooeZ1L6Qt0mNK8zs+uP+R2HOsr+TeDW35Pitc3PfVv8Q== + dependencies: + "@typescript-eslint/utils" "5.23.0" + +"@typescript-eslint/parser@^5.5.0": + version "5.23.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.23.0.tgz#443778e1afc9a8ff180f91b5e260ac3bec5e2de1" + integrity sha512-V06cYUkqcGqpFjb8ttVgzNF53tgbB/KoQT/iB++DOIExKmzI9vBJKjZKt/6FuV9c+zrDsvJKbJ2DOCYwX91cbw== + dependencies: + "@typescript-eslint/scope-manager" "5.23.0" + "@typescript-eslint/types" "5.23.0" + "@typescript-eslint/typescript-estree" "5.23.0" + debug "^4.3.2" + +"@typescript-eslint/scope-manager@5.23.0": + version "5.23.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.23.0.tgz#4305e61c2c8e3cfa3787d30f54e79430cc17ce1b" + integrity sha512-EhjaFELQHCRb5wTwlGsNMvzK9b8Oco4aYNleeDlNuL6qXWDF47ch4EhVNPh8Rdhf9tmqbN4sWDk/8g+Z/J8JVw== + dependencies: + "@typescript-eslint/types" "5.23.0" + "@typescript-eslint/visitor-keys" "5.23.0" + +"@typescript-eslint/type-utils@5.23.0": + version "5.23.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.23.0.tgz#f852252f2fc27620d5bb279d8fed2a13d2e3685e" + integrity sha512-iuI05JsJl/SUnOTXA9f4oI+/4qS/Zcgk+s2ir+lRmXI+80D8GaGwoUqs4p+X+4AxDolPpEpVUdlEH4ADxFy4gw== + dependencies: + "@typescript-eslint/utils" "5.23.0" + debug "^4.3.2" + tsutils "^3.21.0" + +"@typescript-eslint/types@5.23.0": + version "5.23.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.23.0.tgz#8733de0f58ae0ed318dbdd8f09868cdbf9f9ad09" + integrity sha512-NfBsV/h4dir/8mJwdZz7JFibaKC3E/QdeMEDJhiAE3/eMkoniZ7MjbEMCGXw6MZnZDMN3G9S0mH/6WUIj91dmw== + +"@typescript-eslint/typescript-estree@5.23.0": + version "5.23.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.23.0.tgz#dca5f10a0a85226db0796e8ad86addc9aee52065" + integrity sha512-xE9e0lrHhI647SlGMl+m+3E3CKPF1wzvvOEWnuE3CCjjT7UiRnDGJxmAcVKJIlFgK6DY9RB98eLr1OPigPEOGg== + dependencies: + "@typescript-eslint/types" "5.23.0" + "@typescript-eslint/visitor-keys" "5.23.0" + debug "^4.3.2" + globby "^11.0.4" + is-glob "^4.0.3" + semver "^7.3.5" + tsutils "^3.21.0" + +"@typescript-eslint/utils@5.23.0", "@typescript-eslint/utils@^5.13.0": + version "5.23.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.23.0.tgz#4691c3d1b414da2c53d8943310df36ab1c50648a" + integrity sha512-dbgaKN21drqpkbbedGMNPCtRPZo1IOUr5EI9Jrrh99r5UW5Q0dz46RKXeSBoPV+56R6dFKpbrdhgUNSJsDDRZA== + dependencies: + "@types/json-schema" "^7.0.9" + "@typescript-eslint/scope-manager" "5.23.0" + "@typescript-eslint/types" "5.23.0" + "@typescript-eslint/typescript-estree" "5.23.0" + eslint-scope "^5.1.1" + eslint-utils "^3.0.0" + +"@typescript-eslint/visitor-keys@5.23.0": + version "5.23.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.23.0.tgz#057c60a7ca64667a39f991473059377a8067c87b" + integrity sha512-Vd4mFNchU62sJB8pX19ZSPog05B0Y0CE2UxAZPT5k4iqhRYjPnqyY3woMxCd0++t9OTqkgjST+1ydLBi7e2Fvg== + dependencies: + "@typescript-eslint/types" "5.23.0" + eslint-visitor-keys "^3.0.0" + +"@webassemblyjs/ast@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" + integrity sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw== + dependencies: + "@webassemblyjs/helper-numbers" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + +"@webassemblyjs/floating-point-hex-parser@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz#f6c61a705f0fd7a6aecaa4e8198f23d9dc179e4f" + integrity sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ== + +"@webassemblyjs/helper-api-error@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz#1a63192d8788e5c012800ba6a7a46c705288fd16" + integrity sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg== + +"@webassemblyjs/helper-buffer@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz#832a900eb444884cde9a7cad467f81500f5e5ab5" + integrity sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA== + +"@webassemblyjs/helper-numbers@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz#64d81da219fbbba1e3bd1bfc74f6e8c4e10a62ae" + integrity sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ== + dependencies: + "@webassemblyjs/floating-point-hex-parser" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@xtuc/long" "4.2.2" + +"@webassemblyjs/helper-wasm-bytecode@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz#f328241e41e7b199d0b20c18e88429c4433295e1" + integrity sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q== + +"@webassemblyjs/helper-wasm-section@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz#21ee065a7b635f319e738f0dd73bfbda281c097a" + integrity sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + +"@webassemblyjs/ieee754@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz#963929e9bbd05709e7e12243a099180812992614" + integrity sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ== + dependencies: + "@xtuc/ieee754" "^1.2.0" + +"@webassemblyjs/leb128@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.1.tgz#ce814b45574e93d76bae1fb2644ab9cdd9527aa5" + integrity sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw== + dependencies: + "@xtuc/long" "4.2.2" + +"@webassemblyjs/utf8@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.1.tgz#d1f8b764369e7c6e6bae350e854dec9a59f0a3ff" + integrity sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ== + +"@webassemblyjs/wasm-edit@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz#ad206ebf4bf95a058ce9880a8c092c5dec8193d6" + integrity sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/helper-wasm-section" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-opt" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + "@webassemblyjs/wast-printer" "1.11.1" + +"@webassemblyjs/wasm-gen@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz#86c5ea304849759b7d88c47a32f4f039ae3c8f76" + integrity sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wasm-opt@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz#657b4c2202f4cf3b345f8a4c6461c8c2418985f2" + integrity sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-buffer" "1.11.1" + "@webassemblyjs/wasm-gen" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + +"@webassemblyjs/wasm-parser@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz#86ca734534f417e9bd3c67c7a1c75d8be41fb199" + integrity sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/helper-api-error" "1.11.1" + "@webassemblyjs/helper-wasm-bytecode" "1.11.1" + "@webassemblyjs/ieee754" "1.11.1" + "@webassemblyjs/leb128" "1.11.1" + "@webassemblyjs/utf8" "1.11.1" + +"@webassemblyjs/wast-printer@1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz#d0c73beda8eec5426f10ae8ef55cee5e7084c2f0" + integrity sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg== + dependencies: + "@webassemblyjs/ast" "1.11.1" + "@xtuc/long" "4.2.2" + +"@xtuc/ieee754@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" + integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== + +"@xtuc/long@4.2.2": + version "4.2.2" + resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" + integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== + +abab@^2.0.3, abab@^2.0.5: + version "2.0.6" + resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" + integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA== + +accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: + version "1.3.8" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" + integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== + dependencies: + mime-types "~2.1.34" + negotiator "0.6.3" + +acorn-globals@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" + integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== + dependencies: + acorn "^7.1.1" + acorn-walk "^7.1.1" + +acorn-import-assertions@^1.7.6: + version "1.8.0" + resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" + integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== + +acorn-jsx@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn-node@^1.6.1: + version "1.8.2" + resolved "https://registry.yarnpkg.com/acorn-node/-/acorn-node-1.8.2.tgz#114c95d64539e53dede23de8b9d96df7c7ae2af8" + integrity sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A== + dependencies: + acorn "^7.0.0" + acorn-walk "^7.0.0" + xtend "^4.0.2" + +acorn-walk@^7.0.0, acorn-walk@^7.1.1: + version "7.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" + integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== + +acorn@^7.0.0, acorn@^7.1.1: + version "7.4.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + +acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.1: + version "8.7.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30" + integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A== + +address@^1.0.1, address@^1.1.2: + version "1.2.0" + resolved "https://registry.yarnpkg.com/address/-/address-1.2.0.tgz#d352a62c92fee90f89a693eccd2a8b2139ab02d9" + integrity sha512-tNEZYz5G/zYunxFm7sfhAxkXEuLj3K6BKwv6ZURlsF6yiUQ65z0Q2wZW9L5cPUl9ocofGvXOdFYbFHp0+6MOig== + +adjust-sourcemap-loader@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/adjust-sourcemap-loader/-/adjust-sourcemap-loader-4.0.0.tgz#fc4a0fd080f7d10471f30a7320f25560ade28c99" + integrity sha512-OXwN5b9pCUXNQHJpwwD2qP40byEmSgzj8B4ydSN0uMNYWiFmJ6x6KwUllMmfk8Rwu/HJDFR7U8ubsWBoN0Xp0A== + dependencies: + loader-utils "^2.0.0" + regex-parser "^2.2.11" + +agent-base@6: + version "6.0.2" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== + dependencies: + debug "4" + +ajv-formats@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" + integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== + dependencies: + ajv "^8.0.0" + +ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: + version "3.5.2" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" + integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== + +ajv-keywords@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" + integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== + dependencies: + fast-deep-equal "^3.1.3" + +ajv@^6.10.0, ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ajv@^8.0.0, ajv@^8.6.0, ajv@^8.8.0: + version "8.11.0" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.11.0.tgz#977e91dd96ca669f54a11e23e378e33b884a565f" + integrity sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg== + dependencies: + fast-deep-equal "^3.1.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + uri-js "^4.2.2" + +ansi-escapes@^4.2.1, ansi-escapes@^4.3.1: + version "4.3.2" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + +ansi-html-community@^0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" + integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-regex@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" + integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== + +anymatch@^3.0.3, anymatch@~3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" + integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +arg@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.1.tgz#eb0c9a8f77786cad2af8ff2b862899842d7b6adb" + integrity sha512-e0hDa9H2Z9AwFkk2qDlwhoMYE4eToKarchkQHovNdLTCYMHZHeRjI71crOh+dio4K6u1IcwubQqo79Ga4CyAQA== + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +aria-hidden@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/aria-hidden/-/aria-hidden-1.1.3.tgz#bb48de18dc84787a3c6eee113709c473c64ec254" + integrity sha512-RhVWFtKH5BiGMycI72q2RAFMLQi8JP9bLuQXgR5a8Znp7P5KOIADSJeyfI8PCVxLEp067B2HbP5JIiI/PXIZeA== + dependencies: + tslib "^1.0.0" + +aria-query@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b" + integrity sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA== + dependencies: + "@babel/runtime" "^7.10.2" + "@babel/runtime-corejs3" "^7.10.2" + +aria-query@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.0.0.tgz#210c21aaf469613ee8c9a62c7f86525e058db52c" + integrity sha512-V+SM7AbUwJ+EBnB8+DXs0hPZHO0W6pqBcc0dW90OwtVG02PswOu/teuARoLQjdDOH+t9pJgGnW5/Qmouf3gPJg== + +array-flatten@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= + +array-flatten@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" + integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== + +array-includes@^3.1.4: + version "3.1.5" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.5.tgz#2c320010db8d31031fd2a5f6b3bbd4b1aad31bdb" + integrity sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + get-intrinsic "^1.1.1" + is-string "^1.0.7" + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array.prototype.flat@^1.2.5: + version "1.3.0" + resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz#0b0c1567bf57b38b56b4c97b8aa72ab45e4adc7b" + integrity sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" + +array.prototype.flatmap@^1.2.5: + version "1.3.0" + resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz#a7e8ed4225f4788a70cd910abcf0791e76a5534f" + integrity sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" + +asap@~2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" + integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= + +ast-types-flow@^0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" + integrity sha1-9wtzXGvKGlycItmCw+Oef+ujva0= + +async@^3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.3.tgz#ac53dafd3f4720ee9e8a160628f18ea91df196c9" + integrity sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g== + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= + +at-least-node@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" + integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== + +atob@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" + integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== + +attr-accept@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/attr-accept/-/attr-accept-2.2.2.tgz#646613809660110749e92f2c10833b70968d929b" + integrity sha512-7prDjvt9HmqiZ0cl5CRjtS84sEyhsHP2coDkaZKRKVfCDo9s7iw7ChVmar78Gu9pC4SoR/28wFu/G5JJhTnqEg== + +autoprefixer@^10.4.6: + version "10.4.7" + resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.7.tgz#1db8d195f41a52ca5069b7593be167618edbbedf" + integrity sha512-ypHju4Y2Oav95SipEcCcI5J7CGPuvz8oat7sUtYj3ClK44bldfvtvcxK6IEK++7rqB7YchDGzweZIBG+SD0ZAA== + dependencies: + browserslist "^4.20.3" + caniuse-lite "^1.0.30001335" + fraction.js "^4.2.0" + normalize-range "^0.1.2" + picocolors "^1.0.0" + postcss-value-parser "^4.2.0" + +axe-core@^4.3.5: + version "4.4.1" + resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.4.1.tgz#7dbdc25989298f9ad006645cd396782443757413" + integrity sha512-gd1kmb21kwNuWr6BQz8fv6GNECPBnUasepcoLbekws23NVBLODdsClRZ+bQ8+9Uomf3Sm3+Vwn0oYG9NvwnJCw== + +axobject-query@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be" + integrity sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA== + +babel-jest@^27.4.2, babel-jest@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-27.5.1.tgz#a1bf8d61928edfefd21da27eb86a695bfd691444" + integrity sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg== + dependencies: + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/babel__core" "^7.1.14" + babel-plugin-istanbul "^6.1.1" + babel-preset-jest "^27.5.1" + chalk "^4.0.0" + graceful-fs "^4.2.9" + slash "^3.0.0" + +babel-loader@^8.2.3: + version "8.2.5" + resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.2.5.tgz#d45f585e654d5a5d90f5350a779d7647c5ed512e" + integrity sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ== + dependencies: + find-cache-dir "^3.3.1" + loader-utils "^2.0.0" + make-dir "^3.1.0" + schema-utils "^2.6.5" + +babel-plugin-dynamic-import-node@^2.3.3: + version "2.3.3" + resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3" + integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ== + dependencies: + object.assign "^4.1.0" + +babel-plugin-istanbul@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-instrument "^5.0.4" + test-exclude "^6.0.0" + +babel-plugin-jest-hoist@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz#9be98ecf28c331eb9f5df9c72d6f89deb8181c2e" + integrity sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ== + dependencies: + "@babel/template" "^7.3.3" + "@babel/types" "^7.3.3" + "@types/babel__core" "^7.0.0" + "@types/babel__traverse" "^7.0.6" + +babel-plugin-macros@^2.6.1: + version "2.8.0" + resolved "https://registry.yarnpkg.com/babel-plugin-macros/-/babel-plugin-macros-2.8.0.tgz#0f958a7cc6556b1e65344465d99111a1e5e10138" + integrity sha512-SEP5kJpfGYqYKpBrj5XU3ahw5p5GOHJ0U5ssOSQ/WBVdwkD2Dzlce95exQTs3jOVWPPKLBN2rlEWkCK7dSmLvg== + dependencies: + "@babel/runtime" "^7.7.2" + cosmiconfig "^6.0.0" + resolve "^1.12.0" + +babel-plugin-macros@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz#9ef6dc74deb934b4db344dc973ee851d148c50c1" + integrity sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg== + dependencies: + "@babel/runtime" "^7.12.5" + cosmiconfig "^7.0.0" + resolve "^1.19.0" + +babel-plugin-named-asset-import@^0.3.8: + version "0.3.8" + resolved "https://registry.yarnpkg.com/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.8.tgz#6b7fa43c59229685368683c28bc9734f24524cc2" + integrity sha512-WXiAc++qo7XcJ1ZnTYGtLxmBCVbddAml3CEXgWaBzNzLNoxtQ8AiGEFDMOhot9XjTCQbvP5E77Fj9Gk924f00Q== + +babel-plugin-polyfill-corejs2@^0.3.0: + version "0.3.1" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.1.tgz#440f1b70ccfaabc6b676d196239b138f8a2cfba5" + integrity sha512-v7/T6EQcNfVLfcN2X8Lulb7DjprieyLWJK/zOWH5DUYcAgex9sP3h25Q+DLsX9TloXe3y1O8l2q2Jv9q8UVB9w== + dependencies: + "@babel/compat-data" "^7.13.11" + "@babel/helper-define-polyfill-provider" "^0.3.1" + semver "^6.1.1" + +babel-plugin-polyfill-corejs3@^0.5.0: + version "0.5.2" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.2.tgz#aabe4b2fa04a6e038b688c5e55d44e78cd3a5f72" + integrity sha512-G3uJih0XWiID451fpeFaYGVuxHEjzKTHtc9uGFEjR6hHrvNzeS/PX+LLLcetJcytsB5m4j+K3o/EpXJNb/5IEQ== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.1" + core-js-compat "^3.21.0" + +babel-plugin-polyfill-regenerator@^0.3.0: + version "0.3.1" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.3.1.tgz#2c0678ea47c75c8cc2fbb1852278d8fb68233990" + integrity sha512-Y2B06tvgHYt1x0yz17jGkGeeMr5FeKUu+ASJ+N6nB5lQ8Dapfg42i0OVrf8PNGJ3zKL4A23snMi1IRwrqqND7A== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.1" + +babel-plugin-transform-react-remove-prop-types@^0.4.24: + version "0.4.24" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz#f2edaf9b4c6a5fbe5c1d678bfb531078c1555f3a" + integrity sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA== + +babel-preset-current-node-syntax@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" + integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== + dependencies: + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-bigint" "^7.8.3" + "@babel/plugin-syntax-class-properties" "^7.8.3" + "@babel/plugin-syntax-import-meta" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.8.3" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-top-level-await" "^7.8.3" + +babel-preset-jest@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz#91f10f58034cb7989cb4f962b69fa6eef6a6bc81" + integrity sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag== + dependencies: + babel-plugin-jest-hoist "^27.5.1" + babel-preset-current-node-syntax "^1.0.0" + +babel-preset-react-app@^10.0.1: + version "10.0.1" + resolved "https://registry.yarnpkg.com/babel-preset-react-app/-/babel-preset-react-app-10.0.1.tgz#ed6005a20a24f2c88521809fa9aea99903751584" + integrity sha512-b0D9IZ1WhhCWkrTXyFuIIgqGzSkRIH5D5AmB0bXbzYAB1OBAwHcUeyWW2LorutLWF5btNo/N7r/cIdmvvKJlYg== + dependencies: + "@babel/core" "^7.16.0" + "@babel/plugin-proposal-class-properties" "^7.16.0" + "@babel/plugin-proposal-decorators" "^7.16.4" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.16.0" + "@babel/plugin-proposal-numeric-separator" "^7.16.0" + "@babel/plugin-proposal-optional-chaining" "^7.16.0" + "@babel/plugin-proposal-private-methods" "^7.16.0" + "@babel/plugin-transform-flow-strip-types" "^7.16.0" + "@babel/plugin-transform-react-display-name" "^7.16.0" + "@babel/plugin-transform-runtime" "^7.16.4" + "@babel/preset-env" "^7.16.4" + "@babel/preset-react" "^7.16.0" + "@babel/preset-typescript" "^7.16.0" + "@babel/runtime" "^7.16.3" + babel-plugin-macros "^3.1.0" + babel-plugin-transform-react-remove-prop-types "^0.4.24" + +bail@^1.0.0: + version "1.0.5" + resolved "https://registry.yarnpkg.com/bail/-/bail-1.0.5.tgz#b6fa133404a392cbc1f8c4bf63f5953351e7a776" + integrity sha512-xFbRxM1tahm08yHBP16MMjVUAvDaBMD38zsM9EMAUN61omwLmKlOpB/Zku5QkjZ8TZ4vn53pj+t518cH0S03RQ== + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +batch@0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" + integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= + +bfj@^7.0.2: + version "7.0.2" + resolved "https://registry.yarnpkg.com/bfj/-/bfj-7.0.2.tgz#1988ce76f3add9ac2913fd8ba47aad9e651bfbb2" + integrity sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw== + dependencies: + bluebird "^3.5.5" + check-types "^11.1.1" + hoopy "^0.1.4" + tryer "^1.0.1" + +big-integer@^1.6.16: + version "1.6.51" + resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.51.tgz#0df92a5d9880560d3ff2d5fd20245c889d130686" + integrity sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg== + +big.js@^5.2.2: + version "5.2.2" + resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" + integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== + +binary-extensions@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" + integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== + +bluebird@^3.5.5: + version "3.7.2" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" + integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== + +body-parser@1.20.0: + version "1.20.0" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" + integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== + dependencies: + bytes "3.1.2" + content-type "~1.0.4" + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + http-errors "2.0.0" + iconv-lite "0.4.24" + on-finished "2.4.1" + qs "6.10.3" + raw-body "2.5.1" + type-is "~1.6.18" + unpipe "1.0.0" + +bonjour-service@^1.0.11: + version "1.0.12" + resolved "https://registry.yarnpkg.com/bonjour-service/-/bonjour-service-1.0.12.tgz#28fbd4683f5f2e36feedb833e24ba661cac960c3" + integrity sha512-pMmguXYCu63Ug37DluMKEHdxc+aaIf/ay4YbF8Gxtba+9d3u+rmEWy61VK3Z3hp8Rskok3BunHYnG0dUHAsblw== + dependencies: + array-flatten "^2.1.2" + dns-equal "^1.0.0" + fast-deep-equal "^3.1.3" + multicast-dns "^7.2.4" + +boolbase@^1.0.0, boolbase@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" + integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + +braces@^3.0.2, braces@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +broadcast-channel@^3.4.1: + version "3.7.0" + resolved "https://registry.yarnpkg.com/broadcast-channel/-/broadcast-channel-3.7.0.tgz#2dfa5c7b4289547ac3f6705f9c00af8723889937" + integrity sha512-cIAKJXAxGJceNZGTZSBzMxzyOn72cVgPnKx4dc6LRjQgbaJUQqhy5rzL3zbMxkMWsGKkv2hSFkPRMEXfoMZ2Mg== + dependencies: + "@babel/runtime" "^7.7.2" + detect-node "^2.1.0" + js-sha3 "0.8.0" + microseconds "0.2.0" + nano-time "1.0.0" + oblivious-set "1.0.0" + rimraf "3.0.2" + unload "2.2.0" + +browser-process-hrtime@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" + integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== + +browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.6, browserslist@^4.18.1, browserslist@^4.20.2, browserslist@^4.20.3: + version "4.20.3" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.20.3.tgz#eb7572f49ec430e054f56d52ff0ebe9be915f8bf" + integrity sha512-NBhymBQl1zM0Y5dQT/O+xiLP9/rzOIQdKM/eMJBAq7yBgaB6krIYLGejrwVYnSHZdqjscB1SPuAjHwxjvN6Wdg== + dependencies: + caniuse-lite "^1.0.30001332" + electron-to-chromium "^1.4.118" + escalade "^3.1.1" + node-releases "^2.0.3" + picocolors "^1.0.0" + +bser@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + dependencies: + node-int64 "^0.4.0" + +buffer-from@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== + +builtin-modules@^3.1.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.2.0.tgz#45d5db99e7ee5e6bc4f362e008bf917ab5049887" + integrity sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA== + +bytes@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" + integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= + +bytes@3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== + +call-bind@^1.0.0, call-bind@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camel-case@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" + integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== + dependencies: + pascal-case "^3.1.2" + tslib "^2.0.3" + +camelcase-css@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" + integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== + +camelcase@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +camelcase@^6.2.0, camelcase@^6.2.1: + version "6.3.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== + +caniuse-api@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0" + integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw== + dependencies: + browserslist "^4.0.0" + caniuse-lite "^1.0.0" + lodash.memoize "^4.1.2" + lodash.uniq "^4.5.0" + +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001332, caniuse-lite@^1.0.30001335: + version "1.0.30001339" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001339.tgz#f9aece4ea8156071613b27791547ba0b33f176cf" + integrity sha512-Es8PiVqCe+uXdms0Gu5xP5PF2bxLR7OBp3wUzUnuO7OHzhOfCyg3hdiGWVPVxhiuniOzng+hTc1u3fEQ0TlkSQ== + +case-sensitive-paths-webpack-plugin@^2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" + integrity sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw== + +ccount@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.1.0.tgz#246687debb6014735131be8abab2d93898f8d043" + integrity sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg== + +chalk@^2.0.0, chalk@^2.4.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" + integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +char-regex@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== + +char-regex@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-2.0.1.tgz#6dafdb25f9d3349914079f010ba8d0e6ff9cd01e" + integrity sha512-oSvEeo6ZUD7NepqAat3RqoucZ5SeqLJgOvVIwkafu6IP3V0pO38s/ypdVUmDDK6qIIHNlYHJAKX9E7R7HoKElw== + +character-entities-html4@^1.0.0: + version "1.1.4" + resolved "https://registry.yarnpkg.com/character-entities-html4/-/character-entities-html4-1.1.4.tgz#0e64b0a3753ddbf1fdc044c5fd01d0199a02e125" + integrity sha512-HRcDxZuZqMx3/a+qrzxdBKBPUpxWEq9xw2OPZ3a/174ihfrQKVsFhqtthBInFy1zZ9GgZyFXOatNujm8M+El3g== + +character-entities-legacy@^1.0.0: + version "1.1.4" + resolved "https://registry.yarnpkg.com/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz#94bc1845dce70a5bb9d2ecc748725661293d8fc1" + integrity sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA== + +character-entities@^1.0.0: + version "1.2.4" + resolved "https://registry.yarnpkg.com/character-entities/-/character-entities-1.2.4.tgz#e12c3939b7eaf4e5b15e7ad4c5e28e1d48c5b16b" + integrity sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw== + +character-reference-invalid@^1.0.0: + version "1.1.4" + resolved "https://registry.yarnpkg.com/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz#083329cda0eae272ab3dbbf37e9a382c13af1560" + integrity sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg== + +charcodes@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/charcodes/-/charcodes-0.2.0.tgz#5208d327e6cc05f99eb80ffc814707572d1f14e4" + integrity sha512-Y4kiDb+AM4Ecy58YkuZrrSRJBDQdQ2L+NyS1vHHFtNtUjgutcZfx3yp1dAONI/oPaPmyGfCLx5CxL+zauIMyKQ== + +check-types@^11.1.1: + version "11.1.2" + resolved "https://registry.yarnpkg.com/check-types/-/check-types-11.1.2.tgz#86a7c12bf5539f6324eb0e70ca8896c0e38f3e2f" + integrity sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ== + +chokidar@^3.4.2, chokidar@^3.5.3: + version "3.5.3" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" + integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== + dependencies: + anymatch "~3.1.2" + braces "~3.0.2" + glob-parent "~5.1.2" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.6.0" + optionalDependencies: + fsevents "~2.3.2" + +chroma-js@^2.1.0: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chroma-js/-/chroma-js-2.4.2.tgz#dffc214ed0c11fa8eefca2c36651d8e57cbfb2b0" + integrity sha512-U9eDw6+wt7V8z5NncY2jJfZa+hUH8XEj8FQHgFJTrUFnJfXYf4Ml4adI2vXZOjqRDpFWtYVWypDfZwnJ+HIR4A== + +chrome-trace-event@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" + integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== + +ci-info@^3.2.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.3.0.tgz#b4ed1fb6818dea4803a55c623041f9165d2066b2" + integrity sha512-riT/3vI5YpVH6/qomlDnJow6TBee2PBKSEpx3O32EGPYbWGIRsIlGRms3Sm74wYE1JMo8RnO04Hb12+v1J5ICw== + +cjs-module-lexer@^1.0.0: + version "1.2.2" + resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" + integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== + +classnames@^2.2.6: + version "2.3.1" + resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.1.tgz#dfcfa3891e306ec1dad105d0e88f4417b8535e8e" + integrity sha512-OlQdbZ7gLfGarSqxesMesDa5uz7KFbID8Kpq/SxIoNGDqY8lSYs0D+hhtBXhcdB3rcbXArFr7vlHheLk1voeNA== + +clean-css@^5.2.2: + version "5.3.0" + resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.3.0.tgz#ad3d8238d5f3549e83d5f87205189494bc7cbb59" + integrity sha512-YYuuxv4H/iNb1Z/5IbMRoxgrzjWGhOEFfd+groZ5dMCVkpENiMZmwspdrzBo9286JjM1gZJPAyL7ZIdzuvu2AQ== + dependencies: + source-map "~0.6.0" + +cliui@^7.0.2: + version "7.0.4" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" + integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^7.0.0" + +co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= + +coa@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3" + integrity sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA== + dependencies: + "@types/q" "^1.5.1" + chalk "^2.4.1" + q "^1.1.2" + +collapse-white-space@^1.0.2: + version "1.0.6" + resolved "https://registry.yarnpkg.com/collapse-white-space/-/collapse-white-space-1.0.6.tgz#e63629c0016665792060dbbeb79c42239d2c5287" + integrity sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ== + +collect-v8-coverage@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" + integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= + +color-name@^1.1.4, color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +colord@^2.9.1: + version "2.9.2" + resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.2.tgz#25e2bacbbaa65991422c07ea209e2089428effb1" + integrity sha512-Uqbg+J445nc1TKn4FoDPS6ZZqAvEDnwrH42yo8B40JSOgSLxMZ/gt3h4nmCtPLQeXhjJJkqBx7SCY35WnIixaQ== + +colorette@^2.0.10: + version "2.0.16" + resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.16.tgz#713b9af84fdb000139f04546bd4a93f62a5085da" + integrity sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g== + +combined-stream@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +comma-separated-tokens@^1.0.0: + version "1.0.8" + resolved "https://registry.yarnpkg.com/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz#632b80b6117867a158f1080ad498b2fbe7e3f5ea" + integrity sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw== + +commander@7, commander@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" + integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== + +commander@^2.20.0: + version "2.20.3" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" + integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== + +commander@^8.3.0: + version "8.3.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" + integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== + +common-path-prefix@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" + integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== + +common-tags@^1.8.0: + version "1.8.2" + resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" + integrity sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA== + +commondir@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" + integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= + +compressible@~2.0.16: + version "2.0.18" + resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" + integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== + dependencies: + mime-db ">= 1.43.0 < 2" + +compression@^1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" + integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== + dependencies: + accepts "~1.3.5" + bytes "3.0.0" + compressible "~2.0.16" + debug "2.6.9" + on-headers "~1.0.2" + safe-buffer "5.1.2" + vary "~1.1.2" + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + +confusing-browser-globals@^1.0.11: + version "1.0.11" + resolved "https://registry.yarnpkg.com/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz#ae40e9b57cdd3915408a2805ebd3a5585608dc81" + integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA== + +connect-history-api-fallback@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz#8b32089359308d111115d81cad3fceab888f97bc" + integrity sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg== + +content-disposition@0.5.4: + version "0.5.4" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + dependencies: + safe-buffer "5.2.1" + +content-type@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" + integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + +convert-source-map@^1.4.0, convert-source-map@^1.5.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" + integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== + dependencies: + safe-buffer "~5.1.1" + +cookie-signature@1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= + +cookie@0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" + integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== + +core-js-compat@^3.21.0, core-js-compat@^3.22.1: + version "3.22.5" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.22.5.tgz#7fffa1d20cb18405bd22756ca1353c6f1a0e8614" + integrity sha512-rEF75n3QtInrYICvJjrAgV03HwKiYvtKHdPtaba1KucG+cNZ4NJnH9isqt979e67KZlhpbCOTwnsvnIr+CVeOg== + dependencies: + browserslist "^4.20.3" + semver "7.0.0" + +core-js-pure@^3.20.2, core-js-pure@^3.8.1: + version "3.22.5" + resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.22.5.tgz#bdee0ed2f9b78f2862cda4338a07b13a49b6c9a9" + integrity sha512-8xo9R00iYD7TcV7OrC98GwxiUEAabVWO3dix+uyWjnYrx9fyASLlIX+f/3p5dW5qByaP2bcZ8X/T47s55et/tA== + +core-js@^3.19.2: + version "3.22.5" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.22.5.tgz#a5f5a58e663d5c0ebb4e680cd7be37536fb2a9cf" + integrity sha512-VP/xYuvJ0MJWRAobcmQ8F2H6Bsn+s7zqAAjFaHGBMc5AQm7zaelhD1LGduFn2EehEcQcU+br6t+fwbpQ5d1ZWA== + +core-util-is@~1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" + integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== + +cosmiconfig@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" + integrity sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.1.0" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.7.2" + +cosmiconfig@^7.0.0: + version "7.0.1" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.0.1.tgz#714d756522cace867867ccb4474c5d01bbae5d6d" + integrity sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.2.1" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.10.0" + +cross-spawn@^7.0.2, cross-spawn@^7.0.3: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +crypto-random-string@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" + integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== + +css-blank-pseudo@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/css-blank-pseudo/-/css-blank-pseudo-3.0.3.tgz#36523b01c12a25d812df343a32c322d2a2324561" + integrity sha512-VS90XWtsHGqoM0t4KpH053c4ehxZ2E6HtGI7x68YFV0pTo/QmkV/YFA+NnlvK8guxZVNWGQhVNJGC39Q8XF4OQ== + dependencies: + postcss-selector-parser "^6.0.9" + +css-box-model@^1.2.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/css-box-model/-/css-box-model-1.2.1.tgz#59951d3b81fd6b2074a62d49444415b0d2b4d7c1" + integrity sha512-a7Vr4Q/kd/aw96bnJG332W9V9LkJO69JRcaCYDUqjp6/z0w6VcZjgAcTbgFxEPfBgdnAwlh3iwu+hLopa+flJw== + dependencies: + tiny-invariant "^1.0.6" + +css-declaration-sorter@^6.2.2: + version "6.2.2" + resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-6.2.2.tgz#bfd2f6f50002d6a3ae779a87d3a0c5d5b10e0f02" + integrity sha512-Ufadglr88ZLsrvS11gjeu/40Lw74D9Am/Jpr3LlYm5Q4ZP5KdlUhG+6u2EjyXeZcxmZ2h1ebCKngDjolpeLHpg== + +css-has-pseudo@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/css-has-pseudo/-/css-has-pseudo-3.0.4.tgz#57f6be91ca242d5c9020ee3e51bbb5b89fc7af73" + integrity sha512-Vse0xpR1K9MNlp2j5w1pgWIJtm1a8qS0JwS9goFYcImjlHEmywP9VUF05aGBXzGpDJF86QXk4L0ypBmwPhGArw== + dependencies: + postcss-selector-parser "^6.0.9" + +css-loader@^6.5.1: + version "6.7.1" + resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-6.7.1.tgz#e98106f154f6e1baf3fc3bc455cb9981c1d5fd2e" + integrity sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw== + dependencies: + icss-utils "^5.1.0" + postcss "^8.4.7" + postcss-modules-extract-imports "^3.0.0" + postcss-modules-local-by-default "^4.0.0" + postcss-modules-scope "^3.0.0" + postcss-modules-values "^4.0.0" + postcss-value-parser "^4.2.0" + semver "^7.3.5" + +css-minimizer-webpack-plugin@^3.2.0: + version "3.4.1" + resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz#ab78f781ced9181992fe7b6e4f3422e76429878f" + integrity sha512-1u6D71zeIfgngN2XNRJefc/hY7Ybsxd74Jm4qngIXyUEk7fss3VUzuHxLAq/R8NAba4QU9OUSaMZlbpRc7bM4Q== + dependencies: + cssnano "^5.0.6" + jest-worker "^27.0.2" + postcss "^8.3.5" + schema-utils "^4.0.0" + serialize-javascript "^6.0.0" + source-map "^0.6.1" + +css-prefers-color-scheme@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/css-prefers-color-scheme/-/css-prefers-color-scheme-6.0.3.tgz#ca8a22e5992c10a5b9d315155e7caee625903349" + integrity sha512-4BqMbZksRkJQx2zAjrokiGMd07RqOa2IxIrrN10lyBe9xhn9DEvjUK79J6jkeiv9D9hQFXKb6g1jwU62jziJZA== + +css-select-base-adapter@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz#3b2ff4972cc362ab88561507a95408a1432135d7" + integrity sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w== + +css-select@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/css-select/-/css-select-2.1.0.tgz#6a34653356635934a81baca68d0255432105dbef" + integrity sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ== + dependencies: + boolbase "^1.0.0" + css-what "^3.2.1" + domutils "^1.7.0" + nth-check "^1.0.2" + +css-select@^4.1.3: + version "4.3.0" + resolved "https://registry.yarnpkg.com/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b" + integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== + dependencies: + boolbase "^1.0.0" + css-what "^6.0.1" + domhandler "^4.3.1" + domutils "^2.8.0" + nth-check "^2.0.1" + +css-tree@1.0.0-alpha.37: + version "1.0.0-alpha.37" + resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.37.tgz#98bebd62c4c1d9f960ec340cf9f7522e30709a22" + integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg== + dependencies: + mdn-data "2.0.4" + source-map "^0.6.1" + +css-tree@^1.1.2, css-tree@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d" + integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== + dependencies: + mdn-data "2.0.14" + source-map "^0.6.1" + +css-what@^3.2.1: + version "3.4.2" + resolved "https://registry.yarnpkg.com/css-what/-/css-what-3.4.2.tgz#ea7026fcb01777edbde52124e21f327e7ae950e4" + integrity sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ== + +css-what@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" + integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== + +css.escape@^1.5.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/css.escape/-/css.escape-1.5.1.tgz#42e27d4fa04ae32f931a4b4d4191fa9cddee97cb" + integrity sha1-QuJ9T6BK4y+TGktNQZH6nN3ul8s= + +css@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/css/-/css-3.0.0.tgz#4447a4d58fdd03367c516ca9f64ae365cee4aa5d" + integrity sha512-DG9pFfwOrzc+hawpmqX/dHYHJG+Bsdb0klhyi1sDneOgGOXy9wQIC8hzyVp1e4NRYDBdxcylvywPkkXCHAzTyQ== + dependencies: + inherits "^2.0.4" + source-map "^0.6.1" + source-map-resolve "^0.6.0" + +cssdb@^6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/cssdb/-/cssdb-6.6.1.tgz#2637fdc57eab452849488de7e8d961ec06f2fe8f" + integrity sha512-0/nZEYfp8SFEzJkMud8NxZJsGfD7RHDJti6GRBLZptIwAzco6RTx1KgwFl4mGWsYS0ZNbCrsY9QryhQ4ldF3Mg== + +cssesc@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" + integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== + +cssnano-preset-default@^5.2.7: + version "5.2.7" + resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-5.2.7.tgz#791e3603fb8f1b46717ac53b47e3c418e950f5f3" + integrity sha512-JiKP38ymZQK+zVKevphPzNSGHSlTI+AOwlasoSRtSVMUU285O7/6uZyd5NbW92ZHp41m0sSHe6JoZosakj63uA== + dependencies: + css-declaration-sorter "^6.2.2" + cssnano-utils "^3.1.0" + postcss-calc "^8.2.3" + postcss-colormin "^5.3.0" + postcss-convert-values "^5.1.0" + postcss-discard-comments "^5.1.1" + postcss-discard-duplicates "^5.1.0" + postcss-discard-empty "^5.1.1" + postcss-discard-overridden "^5.1.0" + postcss-merge-longhand "^5.1.4" + postcss-merge-rules "^5.1.1" + postcss-minify-font-values "^5.1.0" + postcss-minify-gradients "^5.1.1" + postcss-minify-params "^5.1.2" + postcss-minify-selectors "^5.2.0" + postcss-normalize-charset "^5.1.0" + postcss-normalize-display-values "^5.1.0" + postcss-normalize-positions "^5.1.0" + postcss-normalize-repeat-style "^5.1.0" + postcss-normalize-string "^5.1.0" + postcss-normalize-timing-functions "^5.1.0" + postcss-normalize-unicode "^5.1.0" + postcss-normalize-url "^5.1.0" + postcss-normalize-whitespace "^5.1.1" + postcss-ordered-values "^5.1.1" + postcss-reduce-initial "^5.1.0" + postcss-reduce-transforms "^5.1.0" + postcss-svgo "^5.1.0" + postcss-unique-selectors "^5.1.1" + +cssnano-utils@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/cssnano-utils/-/cssnano-utils-3.1.0.tgz#95684d08c91511edfc70d2636338ca37ef3a6861" + integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA== + +cssnano@^5.0.6: + version "5.1.7" + resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-5.1.7.tgz#99858bef6c76c9240f0cdc9239570bc7db8368be" + integrity sha512-pVsUV6LcTXif7lvKKW9ZrmX+rGRzxkEdJuVJcp5ftUjWITgwam5LMZOgaTvUrWPkcORBey6he7JKb4XAJvrpKg== + dependencies: + cssnano-preset-default "^5.2.7" + lilconfig "^2.0.3" + yaml "^1.10.2" + +csso@^4.0.2, csso@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/csso/-/csso-4.2.0.tgz#ea3a561346e8dc9f546d6febedd50187cf389529" + integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== + dependencies: + css-tree "^1.1.2" + +cssom@^0.4.4: + version "0.4.4" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" + integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== + +cssom@~0.3.6: + version "0.3.8" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" + integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== + +cssstyle@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" + integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== + dependencies: + cssom "~0.3.6" + +csstype@^3.0.2: + version "3.0.11" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.0.11.tgz#d66700c5eacfac1940deb4e3ee5642792d85cd33" + integrity sha512-sa6P2wJ+CAbgyy4KFssIb/JNMLxFvKF1pCYCSXS8ZMuqZnMsrxqI2E5sPyoTpxoPU/gVZMzr2zjOfg8GIZOMsw== + +"d3-array@2 - 3", "d3-array@2.10.0 - 3", "d3-array@2.5.0 - 3", d3-array@3: + version "3.1.6" + resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-3.1.6.tgz#0342c835925826f49b4d16eb7027aec334ffc97d" + integrity sha512-DCbBBNuKOeiR9h04ySRBMW52TFVc91O9wJziuyXw6Ztmy8D3oZbmCkOO3UHKC7ceNJsN2Mavo9+vwV8EAEUXzA== + dependencies: + internmap "1 - 2" + +d3-axis@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-axis/-/d3-axis-3.0.0.tgz#c42a4a13e8131d637b745fc2973824cfeaf93322" + integrity sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw== + +d3-brush@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-brush/-/d3-brush-3.0.0.tgz#6f767c4ed8dcb79de7ede3e1c0f89e63ef64d31c" + integrity sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ== + dependencies: + d3-dispatch "1 - 3" + d3-drag "2 - 3" + d3-interpolate "1 - 3" + d3-selection "3" + d3-transition "3" + +d3-chord@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-chord/-/d3-chord-3.0.1.tgz#d156d61f485fce8327e6abf339cb41d8cbba6966" + integrity sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g== + dependencies: + d3-path "1 - 3" + +"d3-color@1 - 3", d3-color@3: + version "3.1.0" + resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-3.1.0.tgz#395b2833dfac71507f12ac2f7af23bf819de24e2" + integrity sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA== + +d3-contour@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-contour/-/d3-contour-3.0.1.tgz#2c64255d43059599cd0dba8fe4cc3d51ccdd9bbd" + integrity sha512-0Oc4D0KyhwhM7ZL0RMnfGycLN7hxHB8CMmwZ3+H26PWAG0ozNuYG5hXSDNgmP1SgJkQMrlG6cP20HoaSbvcJTQ== + dependencies: + d3-array "2 - 3" + +d3-delaunay@6: + version "6.0.2" + resolved "https://registry.yarnpkg.com/d3-delaunay/-/d3-delaunay-6.0.2.tgz#7fd3717ad0eade2fc9939f4260acfb503f984e92" + integrity sha512-IMLNldruDQScrcfT+MWnazhHbDJhcRJyOEBAJfwQnHle1RPh6WDuLvxNArUju2VSMSUuKlY5BGHRJ2cYyoFLQQ== + dependencies: + delaunator "5" + +"d3-dispatch@1 - 3", d3-dispatch@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-dispatch/-/d3-dispatch-3.0.1.tgz#5fc75284e9c2375c36c839411a0cf550cbfc4d5e" + integrity sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg== + +"d3-drag@2 - 3", d3-drag@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-drag/-/d3-drag-3.0.0.tgz#994aae9cd23c719f53b5e10e3a0a6108c69607ba" + integrity sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg== + dependencies: + d3-dispatch "1 - 3" + d3-selection "3" + +"d3-dsv@1 - 3", d3-dsv@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-dsv/-/d3-dsv-3.0.1.tgz#c63af978f4d6a0d084a52a673922be2160789b73" + integrity sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q== + dependencies: + commander "7" + iconv-lite "0.6" + rw "1" + +"d3-ease@1 - 3", d3-ease@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-ease/-/d3-ease-3.0.1.tgz#9658ac38a2140d59d346160f1f6c30fda0bd12f4" + integrity sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w== + +d3-fetch@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-fetch/-/d3-fetch-3.0.1.tgz#83141bff9856a0edb5e38de89cdcfe63d0a60a22" + integrity sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw== + dependencies: + d3-dsv "1 - 3" + +d3-force@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-force/-/d3-force-3.0.0.tgz#3e2ba1a61e70888fe3d9194e30d6d14eece155c4" + integrity sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg== + dependencies: + d3-dispatch "1 - 3" + d3-quadtree "1 - 3" + d3-timer "1 - 3" + +"d3-format@1 - 3", d3-format@3: + version "3.1.0" + resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-3.1.0.tgz#9260e23a28ea5cb109e93b21a06e24e2ebd55641" + integrity sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA== + +d3-geo@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-geo/-/d3-geo-3.0.1.tgz#4f92362fd8685d93e3b1fae0fd97dc8980b1ed7e" + integrity sha512-Wt23xBych5tSy9IYAM1FR2rWIBFWa52B/oF/GYe5zbdHrg08FU8+BuI6X4PvTwPDdqdAdq04fuWJpELtsaEjeA== + dependencies: + d3-array "2.5.0 - 3" + +d3-hierarchy@3: + version "3.1.2" + resolved "https://registry.yarnpkg.com/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz#b01cd42c1eed3d46db77a5966cf726f8c09160c6" + integrity sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA== + +"d3-interpolate@1 - 3", "d3-interpolate@1.2.0 - 3", d3-interpolate@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-3.0.1.tgz#3c47aa5b32c5b3dfb56ef3fd4342078a632b400d" + integrity sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g== + dependencies: + d3-color "1 - 3" + +"d3-path@1 - 3", d3-path@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-path/-/d3-path-3.0.1.tgz#f09dec0aaffd770b7995f1a399152bf93052321e" + integrity sha512-gq6gZom9AFZby0YLduxT1qmrp4xpBA1YZr19OI717WIdKE2OM5ETq5qrHLb301IgxhLwcuxvGZVLeeWc/k1I6w== + +d3-polygon@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-polygon/-/d3-polygon-3.0.1.tgz#0b45d3dd1c48a29c8e057e6135693ec80bf16398" + integrity sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg== + +"d3-quadtree@1 - 3", d3-quadtree@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-quadtree/-/d3-quadtree-3.0.1.tgz#6dca3e8be2b393c9a9d514dabbd80a92deef1a4f" + integrity sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw== + +d3-random@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-random/-/d3-random-3.0.1.tgz#d4926378d333d9c0bfd1e6fa0194d30aebaa20f4" + integrity sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ== + +d3-scale-chromatic@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-scale-chromatic/-/d3-scale-chromatic-3.0.0.tgz#15b4ceb8ca2bb0dcb6d1a641ee03d59c3b62376a" + integrity sha512-Lx9thtxAKrO2Pq6OO2Ua474opeziKr279P/TKZsMAhYyNDD3EnCffdbgeSYN5O7m2ByQsxtuP2CSDczNUIZ22g== + dependencies: + d3-color "1 - 3" + d3-interpolate "1 - 3" + +d3-scale@4: + version "4.0.2" + resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-4.0.2.tgz#82b38e8e8ff7080764f8dcec77bd4be393689396" + integrity sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ== + dependencies: + d3-array "2.10.0 - 3" + d3-format "1 - 3" + d3-interpolate "1.2.0 - 3" + d3-time "2.1.1 - 3" + d3-time-format "2 - 4" + +"d3-selection@2 - 3", d3-selection@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-selection/-/d3-selection-3.0.0.tgz#c25338207efa72cc5b9bd1458a1a41901f1e1b31" + integrity sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ== + +d3-shape@3: + version "3.1.0" + resolved "https://registry.yarnpkg.com/d3-shape/-/d3-shape-3.1.0.tgz#c8a495652d83ea6f524e482fca57aa3f8bc32556" + integrity sha512-tGDh1Muf8kWjEDT/LswZJ8WF85yDZLvVJpYU9Nq+8+yW1Z5enxrmXOhTArlkaElU+CTn0OTVNli+/i+HP45QEQ== + dependencies: + d3-path "1 - 3" + +"d3-time-format@2 - 4", d3-time-format@4: + version "4.1.0" + resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-4.1.0.tgz#7ab5257a5041d11ecb4fe70a5c7d16a195bb408a" + integrity sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg== + dependencies: + d3-time "1 - 3" + +"d3-time@1 - 3", "d3-time@2.1.1 - 3", d3-time@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-3.0.0.tgz#65972cb98ae2d4954ef5c932e8704061335d4975" + integrity sha512-zmV3lRnlaLI08y9IMRXSDshQb5Nj77smnfpnd2LrBa/2K281Jijactokeak14QacHs/kKq0AQ121nidNYlarbQ== + dependencies: + d3-array "2 - 3" + +"d3-timer@1 - 3", d3-timer@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-timer/-/d3-timer-3.0.1.tgz#6284d2a2708285b1abb7e201eda4380af35e63b0" + integrity sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA== + +"d3-transition@2 - 3", d3-transition@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/d3-transition/-/d3-transition-3.0.1.tgz#6869fdde1448868077fdd5989200cb61b2a1645f" + integrity sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w== + dependencies: + d3-color "1 - 3" + d3-dispatch "1 - 3" + d3-ease "1 - 3" + d3-interpolate "1 - 3" + d3-timer "1 - 3" + +d3-zoom@3: + version "3.0.0" + resolved "https://registry.yarnpkg.com/d3-zoom/-/d3-zoom-3.0.0.tgz#d13f4165c73217ffeaa54295cd6969b3e7aee8f3" + integrity sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw== + dependencies: + d3-dispatch "1 - 3" + d3-drag "2 - 3" + d3-interpolate "1 - 3" + d3-selection "2 - 3" + d3-transition "2 - 3" + +d3@^7.3.0, d3@^7.4.4: + version "7.4.4" + resolved "https://registry.yarnpkg.com/d3/-/d3-7.4.4.tgz#bfbf87487c37d3196efebd5a63e3a0ed8299d8ff" + integrity sha512-97FE+MYdAlV3R9P74+R3Uar7wUKkIFu89UWMjEaDhiJ9VxKvqaMxauImy8PC2DdBkdM2BxJOIoLxPrcZUyrKoQ== + dependencies: + d3-array "3" + d3-axis "3" + d3-brush "3" + d3-chord "3" + d3-color "3" + d3-contour "3" + d3-delaunay "6" + d3-dispatch "3" + d3-drag "3" + d3-dsv "3" + d3-ease "3" + d3-fetch "3" + d3-force "3" + d3-format "3" + d3-geo "3" + d3-hierarchy "3" + d3-interpolate "3" + d3-path "3" + d3-polygon "3" + d3-quadtree "3" + d3-random "3" + d3-scale "4" + d3-scale-chromatic "3" + d3-selection "3" + d3-shape "3" + d3-time "3" + d3-time-format "4" + d3-timer "3" + d3-transition "3" + d3-zoom "3" + +damerau-levenshtein@^1.0.7: + version "1.0.8" + resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" + integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== + +data-urls@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" + integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== + dependencies: + abab "^2.0.3" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.0.0" + +debug@2.6.9, debug@^2.6.0, debug@^2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +debug@^3.2.7: + version "3.2.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +decimal.js@^10.2.1: + version "10.3.1" + resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.3.1.tgz#d8c3a444a9c6774ba60ca6ad7261c3a94fd5e783" + integrity sha512-V0pfhfr8suzyPGOx3nmq4aHqabehUZn6Ch9kyFpV79TGDTWFmHqUqXdabR7QHqxzrYolF4+tVmJhUG4OURg5dQ== + +decode-uri-component@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" + integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= + +dedent@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" + integrity sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw= + +deep-is@^0.1.3, deep-is@~0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +deepmerge@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" + integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + +default-gateway@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71" + integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== + dependencies: + execa "^5.0.0" + +define-lazy-prop@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" + integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== + +define-properties@^1.1.3, define-properties@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" + integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== + dependencies: + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + +defined@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" + integrity sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM= + +delaunator@5: + version "5.0.0" + resolved "https://registry.yarnpkg.com/delaunator/-/delaunator-5.0.0.tgz#60f052b28bd91c9b4566850ebf7756efe821d81b" + integrity sha512-AyLvtyJdbv/U1GkiS6gUUzclRoAY4Gs75qkMygJJhU75LW4DNuSF2RMzpxs9jw9Oz1BobHjTdkG3zdP55VxAqw== + dependencies: + robust-predicates "^3.0.0" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= + +depd@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + +depd@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" + integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= + +destroy@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" + integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== + +detect-newline@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== + +detect-node-es@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/detect-node-es/-/detect-node-es-1.1.0.tgz#163acdf643330caa0b4cd7c21e7ee7755d6fa493" + integrity sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ== + +detect-node@^2.0.4, detect-node@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" + integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== + +detect-port-alt@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/detect-port-alt/-/detect-port-alt-1.1.6.tgz#24707deabe932d4a3cf621302027c2b266568275" + integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== + dependencies: + address "^1.0.1" + debug "^2.6.0" + +detective@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/detective/-/detective-5.2.0.tgz#feb2a77e85b904ecdea459ad897cc90a99bd2a7b" + integrity sha512-6SsIx+nUUbuK0EthKjv0zrdnajCCXVYGmbYYiYjFVpzcjwEs/JMDZ8tPRG29J/HhN56t3GJp2cGSWDRjjot8Pg== + dependencies: + acorn-node "^1.6.1" + defined "^1.0.0" + minimist "^1.1.1" + +didyoumean@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" + integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== + +diff-sequences@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-27.5.1.tgz#eaecc0d327fd68c8d9672a1e64ab8dccb2ef5327" + integrity sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ== + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +dlv@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" + integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== + +dns-equal@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" + integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0= + +dns-packet@^5.2.2: + version "5.3.1" + resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-5.3.1.tgz#eb94413789daec0f0ebe2fcc230bdc9d7c91b43d" + integrity sha512-spBwIj0TK0Ey3666GwIdWVfUpLyubpU53BTCu8iPn4r4oXd9O14Hjg3EHw3ts2oed77/SeckunUYCyRlSngqHw== + dependencies: + "@leichtgewicht/ip-codec" "^2.0.1" + +doctrine@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +dom-accessibility-api@^0.5.6, dom-accessibility-api@^0.5.9: + version "0.5.14" + resolved "https://registry.yarnpkg.com/dom-accessibility-api/-/dom-accessibility-api-0.5.14.tgz#56082f71b1dc7aac69d83c4285eef39c15d93f56" + integrity sha512-NMt+m9zFMPZe0JcY9gN224Qvk6qLIdqex29clBvc/y75ZBX9YA9wNK3frsYvu2DI1xcCIwxwnX+TlsJ2DSOADg== + +dom-converter@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" + integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== + dependencies: + utila "~0.4" + +dom-serializer@0: + version "0.2.2" + resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" + integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== + dependencies: + domelementtype "^2.0.1" + entities "^2.0.0" + +dom-serializer@^1.0.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30" + integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.2.0" + entities "^2.0.0" + +domelementtype@1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" + integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== + +domelementtype@^2.0.1, domelementtype@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" + integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== + +domexception@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" + integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== + dependencies: + webidl-conversions "^5.0.0" + +domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: + version "4.3.1" + resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c" + integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== + dependencies: + domelementtype "^2.2.0" + +domutils@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" + integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== + dependencies: + dom-serializer "0" + domelementtype "1" + +domutils@^2.5.2, domutils@^2.8.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" + integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== + dependencies: + dom-serializer "^1.0.1" + domelementtype "^2.2.0" + domhandler "^4.2.0" + +dot-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" + integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +dotenv-expand@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0" + integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== + +dotenv@^10.0.0: + version "10.0.0" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" + integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== + +duplexer@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" + integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== + +ee-first@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= + +ejs@^3.1.6: + version "3.1.7" + resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.7.tgz#c544d9c7f715783dd92f0bddcf73a59e6962d006" + integrity sha512-BIar7R6abbUxDA3bfXrO4DSgwo8I+fB5/1zgujl3HLLjwd6+9iOnrT+t3grn2qbk9vOgBubXOFwX2m9axoFaGw== + dependencies: + jake "^10.8.5" + +electron-to-chromium@^1.4.118: + version "1.4.137" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.137.tgz#186180a45617283f1c012284458510cd99d6787f" + integrity sha512-0Rcpald12O11BUogJagX3HsCN3FE83DSqWjgXoHo5a72KUKMSfI39XBgJpgNNxS9fuGzytaFjE06kZkiVFy2qA== + +emittery@^0.10.2: + version "0.10.2" + resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" + integrity sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw== + +emittery@^0.8.1: + version "0.8.1" + resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" + integrity sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +emoji-regex@^9.2.2: + version "9.2.2" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + +emojis-list@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" + integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== + +emoticon@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/emoticon/-/emoticon-3.2.0.tgz#c008ca7d7620fac742fe1bf4af8ff8fed154ae7f" + integrity sha512-SNujglcLTTg+lDAcApPNgEdudaqQFiAbJCqzjNxJkvN9vAwCGi0uu8IUVvx+f16h+V44KCY6Y2yboroc9pilHg== + +encodeurl@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= + +enhanced-resolve@^5.9.3: + version "5.9.3" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.9.3.tgz#44a342c012cbc473254af5cc6ae20ebd0aae5d88" + integrity sha512-Bq9VSor+kjvW3f9/MiiR4eE3XYgOl7/rS8lnSxbRbF3kS0B2r+Y9w5krBWxZgDxASVZbdYrn5wT4j/Wb0J9qow== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + +entities@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" + integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== + +error-ex@^1.3.1: + version "1.3.2" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +error-stack-parser@^2.0.6: + version "2.0.7" + resolved "https://registry.yarnpkg.com/error-stack-parser/-/error-stack-parser-2.0.7.tgz#b0c6e2ce27d0495cf78ad98715e0cad1219abb57" + integrity sha512-chLOW0ZGRf4s8raLrDxa5sdkvPec5YdvwbFnqJme4rk0rFajP8mPtrDL1+I+CwrQDCjswDA5sREX7jYQDQs9vA== + dependencies: + stackframe "^1.1.1" + +es-abstract@^1.17.2, es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5: + version "1.20.0" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.0.tgz#b2d526489cceca004588296334726329e0a6bfb6" + integrity sha512-URbD8tgRthKD3YcC39vbvSDrX23upXnPcnGAjQfgxXF5ID75YcENawc9ZX/9iTP9ptUyfCLIxTTuMYoRfiOVKA== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + function.prototype.name "^1.1.5" + get-intrinsic "^1.1.1" + get-symbol-description "^1.0.0" + has "^1.0.3" + has-property-descriptors "^1.0.0" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + is-callable "^1.2.4" + is-negative-zero "^2.0.2" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + is-string "^1.0.7" + is-weakref "^1.0.2" + object-inspect "^1.12.0" + object-keys "^1.1.1" + object.assign "^4.1.2" + regexp.prototype.flags "^1.4.1" + string.prototype.trimend "^1.0.5" + string.prototype.trimstart "^1.0.5" + unbox-primitive "^1.0.2" + +es-module-lexer@^0.9.0: + version "0.9.3" + resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" + integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== + +es-shim-unscopables@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" + integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== + dependencies: + has "^1.0.3" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +escalade@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-html@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= + +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +escodegen@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd" + integrity sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw== + dependencies: + esprima "^4.0.1" + estraverse "^5.2.0" + esutils "^2.0.2" + optionator "^0.8.1" + optionalDependencies: + source-map "~0.6.1" + +eslint-config-react-app@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/eslint-config-react-app/-/eslint-config-react-app-7.0.1.tgz#73ba3929978001c5c86274c017ea57eb5fa644b4" + integrity sha512-K6rNzvkIeHaTd8m/QEh1Zko0KI7BACWkkneSs6s9cKZC/J27X3eZR6Upt1jkmZ/4FK+XUOPPxMEN7+lbUXfSlA== + dependencies: + "@babel/core" "^7.16.0" + "@babel/eslint-parser" "^7.16.3" + "@rushstack/eslint-patch" "^1.1.0" + "@typescript-eslint/eslint-plugin" "^5.5.0" + "@typescript-eslint/parser" "^5.5.0" + babel-preset-react-app "^10.0.1" + confusing-browser-globals "^1.0.11" + eslint-plugin-flowtype "^8.0.3" + eslint-plugin-import "^2.25.3" + eslint-plugin-jest "^25.3.0" + eslint-plugin-jsx-a11y "^6.5.1" + eslint-plugin-react "^7.27.1" + eslint-plugin-react-hooks "^4.3.0" + eslint-plugin-testing-library "^5.0.1" + +eslint-import-resolver-node@^0.3.6: + version "0.3.6" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" + integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== + dependencies: + debug "^3.2.7" + resolve "^1.20.0" + +eslint-module-utils@^2.7.3: + version "2.7.3" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.7.3.tgz#ad7e3a10552fdd0642e1e55292781bd6e34876ee" + integrity sha512-088JEC7O3lDZM9xGe0RerkOMd0EjFl+Yvd1jPWIkMT5u3H9+HC34mWWPnqPrN13gieT9pBOO+Qt07Nb/6TresQ== + dependencies: + debug "^3.2.7" + find-up "^2.1.0" + +eslint-plugin-flowtype@^8.0.3: + version "8.0.3" + resolved "https://registry.yarnpkg.com/eslint-plugin-flowtype/-/eslint-plugin-flowtype-8.0.3.tgz#e1557e37118f24734aa3122e7536a038d34a4912" + integrity sha512-dX8l6qUL6O+fYPtpNRideCFSpmWOUVx5QcaGLVqe/vlDiBSe4vYljDWDETwnyFzpl7By/WVIu6rcrniCgH9BqQ== + dependencies: + lodash "^4.17.21" + string-natural-compare "^3.0.1" + +eslint-plugin-import@^2.25.3: + version "2.26.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz#f812dc47be4f2b72b478a021605a59fc6fe8b88b" + integrity sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA== + dependencies: + array-includes "^3.1.4" + array.prototype.flat "^1.2.5" + debug "^2.6.9" + doctrine "^2.1.0" + eslint-import-resolver-node "^0.3.6" + eslint-module-utils "^2.7.3" + has "^1.0.3" + is-core-module "^2.8.1" + is-glob "^4.0.3" + minimatch "^3.1.2" + object.values "^1.1.5" + resolve "^1.22.0" + tsconfig-paths "^3.14.1" + +eslint-plugin-jest@^25.3.0: + version "25.7.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-25.7.0.tgz#ff4ac97520b53a96187bad9c9814e7d00de09a6a" + integrity sha512-PWLUEXeeF7C9QGKqvdSbzLOiLTx+bno7/HC9eefePfEb257QFHg7ye3dh80AZVkaa/RQsBB1Q/ORQvg2X7F0NQ== + dependencies: + "@typescript-eslint/experimental-utils" "^5.0.0" + +eslint-plugin-jsx-a11y@^6.5.1: + version "6.5.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.5.1.tgz#cdbf2df901040ca140b6ec14715c988889c2a6d8" + integrity sha512-sVCFKX9fllURnXT2JwLN5Qgo24Ug5NF6dxhkmxsMEUZhXRcGg+X3e1JbJ84YePQKBl5E0ZjAH5Q4rkdcGY99+g== + dependencies: + "@babel/runtime" "^7.16.3" + aria-query "^4.2.2" + array-includes "^3.1.4" + ast-types-flow "^0.0.7" + axe-core "^4.3.5" + axobject-query "^2.2.0" + damerau-levenshtein "^1.0.7" + emoji-regex "^9.2.2" + has "^1.0.3" + jsx-ast-utils "^3.2.1" + language-tags "^1.0.5" + minimatch "^3.0.4" + +eslint-plugin-react-hooks@^4.3.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.5.0.tgz#5f762dfedf8b2cf431c689f533c9d3fa5dcf25ad" + integrity sha512-8k1gRt7D7h03kd+SAAlzXkQwWK22BnK6GKZG+FJA6BAGy22CFvl8kCIXKpVux0cCxMWDQUPqSok0LKaZ0aOcCw== + +eslint-plugin-react@^7.27.1: + version "7.29.4" + resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.29.4.tgz#4717de5227f55f3801a5fd51a16a4fa22b5914d2" + integrity sha512-CVCXajliVh509PcZYRFyu/BoUEz452+jtQJq2b3Bae4v3xBUWPLCmtmBM+ZinG4MzwmxJgJ2M5rMqhqLVn7MtQ== + dependencies: + array-includes "^3.1.4" + array.prototype.flatmap "^1.2.5" + doctrine "^2.1.0" + estraverse "^5.3.0" + jsx-ast-utils "^2.4.1 || ^3.0.0" + minimatch "^3.1.2" + object.entries "^1.1.5" + object.fromentries "^2.0.5" + object.hasown "^1.1.0" + object.values "^1.1.5" + prop-types "^15.8.1" + resolve "^2.0.0-next.3" + semver "^6.3.0" + string.prototype.matchall "^4.0.6" + +eslint-plugin-testing-library@^5.0.1: + version "5.5.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-testing-library/-/eslint-plugin-testing-library-5.5.0.tgz#ce43113dac5a5d93e8b0a8d9937983cdbf63f049" + integrity sha512-eWQ19l6uWL7LW8oeMyQVSGjVYFnBqk7DMHjadm0yOHBvX3Xi9OBrsNuxoAMdX4r7wlQ5WWpW46d+CB6FWFL/PQ== + dependencies: + "@typescript-eslint/utils" "^5.13.0" + +eslint-scope@5.1.1, eslint-scope@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +eslint-scope@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642" + integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw== + dependencies: + esrecurse "^4.3.0" + estraverse "^5.2.0" + +eslint-utils@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" + integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== + dependencies: + eslint-visitor-keys "^2.0.0" + +eslint-visitor-keys@^2.0.0, eslint-visitor-keys@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== + +eslint-visitor-keys@^3.0.0, eslint-visitor-keys@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" + integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== + +eslint-webpack-plugin@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/eslint-webpack-plugin/-/eslint-webpack-plugin-3.1.1.tgz#83dad2395e5f572d6f4d919eedaa9cf902890fcb" + integrity sha512-xSucskTN9tOkfW7so4EaiFIkulWLXwCB/15H917lR6pTv0Zot6/fetFucmENRb7J5whVSFKIvwnrnsa78SG2yg== + dependencies: + "@types/eslint" "^7.28.2" + jest-worker "^27.3.1" + micromatch "^4.0.4" + normalize-path "^3.0.0" + schema-utils "^3.1.1" + +eslint@^8.3.0: + version "8.15.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.15.0.tgz#fea1d55a7062da48d82600d2e0974c55612a11e9" + integrity sha512-GG5USZ1jhCu8HJkzGgeK8/+RGnHaNYZGrGDzUtigK3BsGESW/rs2az23XqE0WVwDxy1VRvvjSSGu5nB0Bu+6SA== + dependencies: + "@eslint/eslintrc" "^1.2.3" + "@humanwhocodes/config-array" "^0.9.2" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.3.2" + doctrine "^3.0.0" + escape-string-regexp "^4.0.0" + eslint-scope "^7.1.1" + eslint-utils "^3.0.0" + eslint-visitor-keys "^3.3.0" + espree "^9.3.2" + esquery "^1.4.0" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + functional-red-black-tree "^1.0.1" + glob-parent "^6.0.1" + globals "^13.6.0" + ignore "^5.2.0" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-yaml "^4.1.0" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.1.2" + natural-compare "^1.4.0" + optionator "^0.9.1" + regexpp "^3.2.0" + strip-ansi "^6.0.1" + strip-json-comments "^3.1.0" + text-table "^0.2.0" + v8-compile-cache "^2.0.3" + +espree@^9.3.2: + version "9.3.2" + resolved "https://registry.yarnpkg.com/espree/-/espree-9.3.2.tgz#f58f77bd334731182801ced3380a8cc859091596" + integrity sha512-D211tC7ZwouTIuY5x9XnS0E9sWNChB7IYKX/Xp5eQj3nFXhqmiUDB9q27y76oFl8jTg3pXcQx/bpxMfs3CIZbA== + dependencies: + acorn "^8.7.1" + acorn-jsx "^5.3.2" + eslint-visitor-keys "^3.3.0" + +esprima@^4.0.0, esprima@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esquery@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +estree-walker@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-1.0.1.tgz#31bc5d612c96b704106b477e6dd5d8aa138cb700" + integrity sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +etag@~1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= + +eventemitter3@^4.0.0: + version "4.0.7" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" + integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== + +events@^3.2.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" + integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== + +execa@^5.0.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + +exit@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + integrity sha1-BjJjj42HfMghB9MKD/8aF8uhzQw= + +expect@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/expect/-/expect-27.5.1.tgz#83ce59f1e5bdf5f9d2b94b61d2050db48f3fef74" + integrity sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw== + dependencies: + "@jest/types" "^27.5.1" + jest-get-type "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + +express@^4.17.3: + version "4.18.1" + resolved "https://registry.yarnpkg.com/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf" + integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q== + dependencies: + accepts "~1.3.8" + array-flatten "1.1.1" + body-parser "1.20.0" + content-disposition "0.5.4" + content-type "~1.0.4" + cookie "0.5.0" + cookie-signature "1.0.6" + debug "2.6.9" + depd "2.0.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "1.2.0" + fresh "0.5.2" + http-errors "2.0.0" + merge-descriptors "1.0.1" + methods "~1.1.2" + on-finished "2.4.1" + parseurl "~1.3.3" + path-to-regexp "0.1.7" + proxy-addr "~2.0.7" + qs "6.10.3" + range-parser "~1.2.1" + safe-buffer "5.2.1" + send "0.18.0" + serve-static "1.15.0" + setprototypeof "1.2.0" + statuses "2.0.1" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +extend@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^3.2.11, fast-glob@^3.2.9: + version "3.2.11" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9" + integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= + +fastq@^1.6.0: + version "1.13.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" + integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== + dependencies: + reusify "^1.0.4" + +faye-websocket@^0.11.3: + version "0.11.4" + resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da" + integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g== + dependencies: + websocket-driver ">=0.5.1" + +fb-watchman@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.1.tgz#fc84fb39d2709cf3ff6d743706157bb5708a8a85" + integrity sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg== + dependencies: + bser "2.1.1" + +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + +file-loader@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-6.2.0.tgz#baef7cf8e1840df325e4390b4484879480eebe4d" + integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== + dependencies: + loader-utils "^2.0.0" + schema-utils "^3.0.0" + +file-selector@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/file-selector/-/file-selector-0.4.0.tgz#59ec4f27aa5baf0841e9c6385c8386bef4d18b17" + integrity sha512-iACCiXeMYOvZqlF1kTiYINzgepRBymz1wwjiuup9u9nayhb6g4fSwiyJ/6adli+EPwrWtpgQAh2PoS7HukEGEg== + dependencies: + tslib "^2.0.3" + +filelist@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/filelist/-/filelist-1.0.3.tgz#448607750376484932f67ef1b9ff07386b036c83" + integrity sha512-LwjCsruLWQULGYKy7TX0OPtrL9kLpojOFKc5VCTxdFTV7w5zbsgqVKfnkKG7Qgjtq50gKfO56hJv88OfcGb70Q== + dependencies: + minimatch "^5.0.1" + +filesize@^8.0.6: + version "8.0.7" + resolved "https://registry.yarnpkg.com/filesize/-/filesize-8.0.7.tgz#695e70d80f4e47012c132d57a059e80c6b580bd8" + integrity sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ== + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +filter-obj@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/filter-obj/-/filter-obj-1.1.0.tgz#9b311112bc6c6127a16e016c6c5d7f19e0805c5b" + integrity sha1-mzERErxsYSehbgFsbF1/GeCAXFs= + +finalhandler@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" + integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== + dependencies: + debug "2.6.9" + encodeurl "~1.0.2" + escape-html "~1.0.3" + on-finished "2.4.1" + parseurl "~1.3.3" + statuses "2.0.1" + unpipe "~1.0.0" + +find-cache-dir@^3.3.1: + version "3.3.2" + resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" + integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== + dependencies: + commondir "^1.0.1" + make-dir "^3.0.2" + pkg-dir "^4.1.0" + +find-root@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/find-root/-/find-root-1.1.0.tgz#abcfc8ba76f708c42a97b3d685b7e9450bfb9ce4" + integrity sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng== + +find-up@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c= + dependencies: + locate-path "^2.0.0" + +find-up@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" + integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== + dependencies: + locate-path "^3.0.0" + +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +find-up@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== + dependencies: + locate-path "^6.0.0" + path-exists "^4.0.0" + +flat-cache@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + +flatted@^3.1.0: + version "3.2.5" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.5.tgz#76c8584f4fc843db64702a6bd04ab7a8bd666da3" + integrity sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg== + +focus-lock@^0.11.2: + version "0.11.2" + resolved "https://registry.yarnpkg.com/focus-lock/-/focus-lock-0.11.2.tgz#aeef3caf1cea757797ac8afdebaec8fd9ab243ed" + integrity sha512-pZ2bO++NWLHhiKkgP1bEXHhR1/OjVcSvlCJ98aNJDFeb7H5OOQaO+SKOZle6041O9rv2tmbrO4JzClAvDUHf0g== + dependencies: + tslib "^2.0.3" + +follow-redirects@^1.0.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.0.tgz#06441868281c86d0dda4ad8bdaead2d02dca89d4" + integrity sha512-aExlJShTV4qOUOL7yF1U5tvLCB0xQuudbf6toyYA0E/acBNw71mvjFTnLaRp50aQaYocMR0a/RMMBIHeZnGyjQ== + +fork-ts-checker-webpack-plugin@^6.5.0: + version "6.5.2" + resolved "https://registry.yarnpkg.com/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.2.tgz#4f67183f2f9eb8ba7df7177ce3cf3e75cdafb340" + integrity sha512-m5cUmF30xkZ7h4tWUgTAcEaKmUW7tfyUyTqNNOz7OxWJ0v1VWKTcOvH8FWHUwSjlW/356Ijc9vi3XfcPstpQKA== + dependencies: + "@babel/code-frame" "^7.8.3" + "@types/json-schema" "^7.0.5" + chalk "^4.1.0" + chokidar "^3.4.2" + cosmiconfig "^6.0.0" + deepmerge "^4.2.2" + fs-extra "^9.0.0" + glob "^7.1.6" + memfs "^3.1.2" + minimatch "^3.0.4" + schema-utils "2.7.0" + semver "^7.3.2" + tapable "^1.0.0" + +form-data@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +forwarded@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== + +fraction.js@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.2.0.tgz#448e5109a313a3527f5a3ab2119ec4cf0e0e2950" + integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA== + +fresh@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= + +fs-extra@^10.0.0: + version "10.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" + integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-extra@^9.0.0, fs-extra@^9.0.1: + version "9.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" + integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== + dependencies: + at-least-node "^1.0.0" + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-monkey@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.3.tgz#ae3ac92d53bb328efe0e9a1d9541f6ad8d48e2d3" + integrity sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q== + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + +fsevents@^2.3.2, fsevents@~2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +function.prototype.name@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" + integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.0" + functions-have-names "^1.2.2" + +functional-red-black-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" + integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= + +functions-have-names@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== + +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-caller-file@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6" + integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + +get-nonce@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/get-nonce/-/get-nonce-1.0.1.tgz#fdf3f0278073820d2ce9426c18f07481b1e0cdf3" + integrity sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q== + +get-own-enumerable-property-symbols@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664" + integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== + +get-package-type@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + +get-stream@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + +get-symbol-description@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" + integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.1" + +glob-parent@^5.1.2, glob-parent@~5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob-parent@^6.0.1, glob-parent@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + +glob-to-regexp@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" + integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== + +glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: + version "7.2.0" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" + integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +global-modules@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" + integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== + dependencies: + global-prefix "^3.0.0" + +global-prefix@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" + integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== + dependencies: + ini "^1.3.5" + kind-of "^6.0.2" + which "^1.3.1" + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^13.6.0, globals@^13.9.0: + version "13.14.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.14.0.tgz#daf3ff9b4336527cf56e98330b6f64bea9aff9df" + integrity sha512-ERO68sOYwm5UuLvSJTY7w7NP2c8S4UcXs3X1GBX8cwOr+ShOcDBbCY5mH4zxz0jsYCdJ8ve8Mv9n2YGJMB1aeg== + dependencies: + type-fest "^0.20.2" + +globby@^11.0.4: + version "11.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + +graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: + version "4.2.10" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +gzip-size@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462" + integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q== + dependencies: + duplexer "^0.1.2" + +handle-thing@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e" + integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== + +harmony-reflect@^1.4.6: + version "1.6.2" + resolved "https://registry.yarnpkg.com/harmony-reflect/-/harmony-reflect-1.6.2.tgz#31ecbd32e648a34d030d86adb67d4d47547fe710" + integrity sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g== + +has-bigints@^1.0.1, has-bigints@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-property-descriptors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" + integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== + dependencies: + get-intrinsic "^1.1.1" + +has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has-tostringtag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== + dependencies: + has-symbols "^1.0.2" + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +hast-to-hyperscript@^9.0.0: + version "9.0.1" + resolved "https://registry.yarnpkg.com/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz#9b67fd188e4c81e8ad66f803855334173920218d" + integrity sha512-zQgLKqF+O2F72S1aa4y2ivxzSlko3MAvxkwG8ehGmNiqd98BIN3JM1rAJPmplEyLmGLO2QZYJtIneOSZ2YbJuA== + dependencies: + "@types/unist" "^2.0.3" + comma-separated-tokens "^1.0.0" + property-information "^5.3.0" + space-separated-tokens "^1.0.0" + style-to-object "^0.3.0" + unist-util-is "^4.0.0" + web-namespaces "^1.0.0" + +hast-util-from-parse5@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/hast-util-from-parse5/-/hast-util-from-parse5-6.0.1.tgz#554e34abdeea25ac76f5bd950a1f0180e0b3bc2a" + integrity sha512-jeJUWiN5pSxW12Rh01smtVkZgZr33wBokLzKLwinYOUfSzm1Nl/c3GUGebDyOKjdsRgMvoVbV0VpAcpjF4NrJA== + dependencies: + "@types/parse5" "^5.0.0" + hastscript "^6.0.0" + property-information "^5.0.0" + vfile "^4.0.0" + vfile-location "^3.2.0" + web-namespaces "^1.0.0" + +hast-util-is-element@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/hast-util-is-element/-/hast-util-is-element-1.1.0.tgz#3b3ed5159a2707c6137b48637fbfe068e175a425" + integrity sha512-oUmNua0bFbdrD/ELDSSEadRVtWZOf3iF6Lbv81naqsIV99RnSCieTbWuWCY8BAeEfKJTKl0gRdokv+dELutHGQ== + +hast-util-parse-selector@^2.0.0: + version "2.2.5" + resolved "https://registry.yarnpkg.com/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz#d57c23f4da16ae3c63b3b6ca4616683313499c3a" + integrity sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ== + +hast-util-raw@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/hast-util-raw/-/hast-util-raw-6.1.0.tgz#e16a3c2642f65cc7c480c165400a40d604ab75d0" + integrity sha512-5FoZLDHBpka20OlZZ4I/+RBw5piVQ8iI1doEvffQhx5CbCyTtP8UCq8Tw6NmTAMtXgsQxmhW7Ly8OdFre5/YMQ== + dependencies: + "@types/hast" "^2.0.0" + hast-util-from-parse5 "^6.0.0" + hast-util-to-parse5 "^6.0.0" + html-void-elements "^1.0.0" + parse5 "^6.0.0" + unist-util-position "^3.0.0" + unist-util-visit "^2.0.0" + vfile "^4.0.0" + web-namespaces "^1.0.0" + xtend "^4.0.0" + zwitch "^1.0.0" + +hast-util-to-html@^7.1.1: + version "7.1.3" + resolved "https://registry.yarnpkg.com/hast-util-to-html/-/hast-util-to-html-7.1.3.tgz#9f339ca9bea71246e565fc79ff7dbfe98bb50f5e" + integrity sha512-yk2+1p3EJTEE9ZEUkgHsUSVhIpCsL/bvT8E5GzmWc+N1Po5gBw+0F8bo7dpxXR0nu0bQVxVZGX2lBGF21CmeDw== + dependencies: + ccount "^1.0.0" + comma-separated-tokens "^1.0.0" + hast-util-is-element "^1.0.0" + hast-util-whitespace "^1.0.0" + html-void-elements "^1.0.0" + property-information "^5.0.0" + space-separated-tokens "^1.0.0" + stringify-entities "^3.0.1" + unist-util-is "^4.0.0" + xtend "^4.0.0" + +hast-util-to-parse5@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/hast-util-to-parse5/-/hast-util-to-parse5-6.0.0.tgz#1ec44650b631d72952066cea9b1445df699f8479" + integrity sha512-Lu5m6Lgm/fWuz8eWnrKezHtVY83JeRGaNQ2kn9aJgqaxvVkFCZQBEhgodZUDUvoodgyROHDb3r5IxAEdl6suJQ== + dependencies: + hast-to-hyperscript "^9.0.0" + property-information "^5.0.0" + web-namespaces "^1.0.0" + xtend "^4.0.0" + zwitch "^1.0.0" + +hast-util-whitespace@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/hast-util-whitespace/-/hast-util-whitespace-1.0.4.tgz#e4fe77c4a9ae1cb2e6c25e02df0043d0164f6e41" + integrity sha512-I5GTdSfhYfAPNztx2xJRQpG8cuDSNt599/7YUn7Gx/WxNMsG+a835k97TDkFgk123cwjfwINaZknkKkphx/f2A== + +hastscript@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/hastscript/-/hastscript-6.0.0.tgz#e8768d7eac56c3fdeac8a92830d58e811e5bf640" + integrity sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w== + dependencies: + "@types/hast" "^2.0.0" + comma-separated-tokens "^1.0.0" + hast-util-parse-selector "^2.0.0" + property-information "^5.0.0" + space-separated-tokens "^1.0.0" + +he@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" + integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== + +history@^5.2.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/history/-/history-5.3.0.tgz#1548abaa245ba47992f063a0783db91ef201c73b" + integrity sha512-ZqaKwjjrAYUYfLG+htGaIIZ4nioX2L70ZUMIFysS3xvBsSG4x/n1V6TXV3N8ZYNuFGlDirFg32T7B6WOUPDYcQ== + dependencies: + "@babel/runtime" "^7.7.6" + +hoist-non-react-statics@^3.3.0, hoist-non-react-statics@^3.3.1, hoist-non-react-statics@^3.3.2: + version "3.3.2" + resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz#ece0acaf71d62c2969c2ec59feff42a4b1a85b45" + integrity sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw== + dependencies: + react-is "^16.7.0" + +hoopy@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/hoopy/-/hoopy-0.1.4.tgz#609207d661100033a9a9402ad3dea677381c1b1d" + integrity sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ== + +hpack.js@^2.1.6: + version "2.1.6" + resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" + integrity sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI= + dependencies: + inherits "^2.0.1" + obuf "^1.0.0" + readable-stream "^2.0.1" + wbuf "^1.1.0" + +html-encoding-sniffer@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" + integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== + dependencies: + whatwg-encoding "^1.0.5" + +html-entities@^2.1.0, html-entities@^2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.3.3.tgz#117d7626bece327fc8baace8868fa6f5ef856e46" + integrity sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA== + +html-escaper@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + +html-minifier-terser@^6.0.2: + version "6.1.0" + resolved "https://registry.yarnpkg.com/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#bfc818934cc07918f6b3669f5774ecdfd48f32ab" + integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== + dependencies: + camel-case "^4.1.2" + clean-css "^5.2.2" + commander "^8.3.0" + he "^1.2.0" + param-case "^3.0.4" + relateurl "^0.2.7" + terser "^5.10.0" + +html-void-elements@^1.0.0: + version "1.0.5" + resolved "https://registry.yarnpkg.com/html-void-elements/-/html-void-elements-1.0.5.tgz#ce9159494e86d95e45795b166c2021c2cfca4483" + integrity sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w== + +html-webpack-plugin@^5.5.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz#c3911936f57681c1f9f4d8b68c158cd9dfe52f50" + integrity sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw== + dependencies: + "@types/html-minifier-terser" "^6.0.0" + html-minifier-terser "^6.0.2" + lodash "^4.17.21" + pretty-error "^4.0.0" + tapable "^2.0.0" + +htmlparser2@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7" + integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== + dependencies: + domelementtype "^2.0.1" + domhandler "^4.0.0" + domutils "^2.5.2" + entities "^2.0.0" + +http-deceiver@^1.2.7: + version "1.2.7" + resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" + integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= + +http-errors@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" + integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== + dependencies: + depd "2.0.0" + inherits "2.0.4" + setprototypeof "1.2.0" + statuses "2.0.1" + toidentifier "1.0.1" + +http-errors@~1.6.2: + version "1.6.3" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" + integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= + dependencies: + depd "~1.1.2" + inherits "2.0.3" + setprototypeof "1.1.0" + statuses ">= 1.4.0 < 2" + +http-parser-js@>=0.5.1: + version "0.5.6" + resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.6.tgz#2e02406ab2df8af8a7abfba62e0da01c62b95afd" + integrity sha512-vDlkRPDJn93swjcjqMSaGSPABbIarsr1TLAui/gLDXzV5VsJNdXNzMYDyNBLQkjWQCJ1uizu8T2oDMhmGt0PRA== + +http-proxy-agent@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" + integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== + dependencies: + "@tootallnate/once" "1" + agent-base "6" + debug "4" + +http-proxy-middleware@^2.0.3: + version "2.0.6" + resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz#e1a4dd6979572c7ab5a4e4b55095d1f32a74963f" + integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw== + dependencies: + "@types/http-proxy" "^1.17.8" + http-proxy "^1.18.1" + is-glob "^4.0.1" + is-plain-obj "^3.0.0" + micromatch "^4.0.2" + +http-proxy@^1.18.1: + version "1.18.1" + resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" + integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== + dependencies: + eventemitter3 "^4.0.0" + follow-redirects "^1.0.0" + requires-port "^1.0.0" + +https-proxy-agent@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" + integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== + dependencies: + agent-base "6" + debug "4" + +human-signals@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + +iconv-lite@0.4.24: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +iconv-lite@0.6, iconv-lite@^0.6.3: + version "0.6.3" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" + integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== + dependencies: + safer-buffer ">= 2.1.2 < 3.0.0" + +icss-utils@^5.0.0, icss-utils@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" + integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== + +idb@^6.1.4: + version "6.1.5" + resolved "https://registry.yarnpkg.com/idb/-/idb-6.1.5.tgz#dbc53e7adf1ac7c59f9b2bf56e00b4ea4fce8c7b" + integrity sha512-IJtugpKkiVXQn5Y+LteyBCNk1N8xpGV3wWZk9EVtZWH8DYkjBn0bX1XnGP9RkyZF0sAcywa6unHqSWKe7q4LGw== + +identity-obj-proxy@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/identity-obj-proxy/-/identity-obj-proxy-3.0.0.tgz#94d2bda96084453ef36fbc5aaec37e0f79f1fc14" + integrity sha1-lNK9qWCERT7zb7xarsN+D3nx/BQ= + dependencies: + harmony-reflect "^1.4.6" + +ignore@^5.1.8, ignore@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" + integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== + +immer@^9.0.7: + version "9.0.12" + resolved "https://registry.yarnpkg.com/immer/-/immer-9.0.12.tgz#2d33ddf3ee1d247deab9d707ca472c8c942a0f20" + integrity sha512-lk7UNmSbAukB5B6dh9fnh5D0bJTOFKxVg2cyJWTYrWRfhLrLMBquONcUs3aFq507hNoIZEDDh8lb8UtOizSMhA== + +import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +import-local@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" + integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== + dependencies: + pkg-dir "^4.2.0" + resolve-cwd "^3.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= + +indent-string@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@2.0.4, inherits@^2.0.0, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inherits@2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= + +ini@^1.3.5: + version "1.3.8" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + +inline-style-parser@0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/inline-style-parser/-/inline-style-parser-0.1.1.tgz#ec8a3b429274e9c0a1f1c4ffa9453a7fef72cea1" + integrity sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q== + +inter-ui@^3.19.3: + version "3.19.3" + resolved "https://registry.yarnpkg.com/inter-ui/-/inter-ui-3.19.3.tgz#cf4b4b6d30de8d5463e2462588654b325206488c" + integrity sha512-5FG9fjuYOXocIfjzcCBhICL5cpvwEetseL3FU6tP3d6Bn7g8wODhB+I9RNGRTizCT7CUG4GOK54OPxqq3msQgg== + +internal-slot@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" + integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== + dependencies: + get-intrinsic "^1.1.0" + has "^1.0.3" + side-channel "^1.0.4" + +"internmap@1 - 2": + version "2.0.3" + resolved "https://registry.yarnpkg.com/internmap/-/internmap-2.0.3.tgz#6685f23755e43c524e251d29cbc97248e3061009" + integrity sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg== + +invariant@^2.2.4: + version "2.2.4" + resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" + integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== + dependencies: + loose-envify "^1.0.0" + +ipaddr.js@1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + +ipaddr.js@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.0.1.tgz#eca256a7a877e917aeb368b0a7497ddf42ef81c0" + integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== + +is-alphabetical@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-alphabetical/-/is-alphabetical-1.0.4.tgz#9e7d6b94916be22153745d184c298cbf986a686d" + integrity sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg== + +is-alphanumerical@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz#7eb9a2431f855f6b1ef1a78e326df515696c4dbf" + integrity sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A== + dependencies: + is-alphabetical "^1.0.0" + is-decimal "^1.0.0" + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= + +is-bigint@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== + dependencies: + has-bigints "^1.0.1" + +is-binary-path@~2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + +is-boolean-object@^1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-buffer@^2.0.0: + version "2.0.5" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" + integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== + +is-callable@^1.1.4, is-callable@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.4.tgz#47301d58dd0259407865547853df6d61fe471945" + integrity sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w== + +is-core-module@^2.2.0, is-core-module@^2.8.1: + version "2.9.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.9.0.tgz#e1c34429cd51c6dd9e09e0799e396e27b19a9c69" + integrity sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A== + dependencies: + has "^1.0.3" + +is-date-object@^1.0.1: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + +is-decimal@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-decimal/-/is-decimal-1.0.4.tgz#65a3a5958a1c5b63a706e1b333d7cd9f630d3fa5" + integrity sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw== + +is-docker@^2.0.0, is-docker@^2.1.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" + integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-generator-fn@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-hexadecimal@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz#cc35c97588da4bd49a8eedd6bc4082d44dcb23a7" + integrity sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw== + +is-module@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" + integrity sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE= + +is-negative-zero@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" + integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== + +is-number-object@^1.0.4: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" + integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== + dependencies: + has-tostringtag "^1.0.0" + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-obj@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" + integrity sha1-PkcprB9f3gJc19g6iW2rn09n2w8= + +is-plain-obj@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" + integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== + +is-plain-obj@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7" + integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== + +is-plain-object@5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344" + integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q== + +is-potential-custom-element-name@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz#171ed6f19e3ac554394edf78caa05784a45bebb5" + integrity sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ== + +is-regex@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-regexp@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069" + integrity sha1-/S2INUXEa6xaYz57mgnof6LLUGk= + +is-root@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-root/-/is-root-2.1.0.tgz#809e18129cf1129644302a4f8544035d51984a9c" + integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== + +is-shared-array-buffer@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" + integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== + dependencies: + call-bind "^1.0.2" + +is-stream@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +is-string@^1.0.5, is-string@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== + dependencies: + has-tostringtag "^1.0.0" + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== + dependencies: + has-symbols "^1.0.2" + +is-typedarray@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= + +is-weakref@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== + dependencies: + call-bind "^1.0.2" + +is-whitespace-character@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz#0858edd94a95594c7c9dd0b5c174ec6e45ee4aa7" + integrity sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w== + +is-word-character@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-word-character/-/is-word-character-1.0.4.tgz#ce0e73216f98599060592f62ff31354ddbeb0230" + integrity sha512-5SMO8RVennx3nZrqtKwCGyyetPE9VDba5ugvKLaD4KopPG5kR4mQ7tNt/r7feL5yt5h3lpuBbIUmCOG2eSzXHA== + +is-wsl@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" + integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== + dependencies: + is-docker "^2.0.0" + +isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= + +istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== + +istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.0.tgz#31d18bdd127f825dd02ea7bfdfd906f8ab840e9f" + integrity sha512-6Lthe1hqXHBNsqvgDzGO6l03XNeu3CrG4RqQ1KM9+l5+jNGpEJfIELx1NS3SEHmJQA8np/u+E4EPRKRiu6m19A== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^6.3.0" + +istanbul-lib-report@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^3.0.0" + supports-color "^7.1.0" + +istanbul-lib-source-maps@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== + dependencies: + debug "^4.1.1" + istanbul-lib-coverage "^3.0.0" + source-map "^0.6.1" + +istanbul-reports@^3.1.3: + version "3.1.4" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.4.tgz#1b6f068ecbc6c331040aab5741991273e609e40c" + integrity sha512-r1/DshN4KSE7xWEknZLLLLDn5CJybV3nw01VTkp6D5jzLuELlcbudfj/eSQFvrKsJuTVCGnePO7ho82Nw9zzfw== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + +jake@^10.8.5: + version "10.8.5" + resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46" + integrity sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw== + dependencies: + async "^3.2.3" + chalk "^4.0.2" + filelist "^1.0.1" + minimatch "^3.0.4" + +jest-changed-files@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-27.5.1.tgz#a348aed00ec9bf671cc58a66fcbe7c3dfd6a68f5" + integrity sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw== + dependencies: + "@jest/types" "^27.5.1" + execa "^5.0.0" + throat "^6.0.1" + +jest-circus@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-27.5.1.tgz#37a5a4459b7bf4406e53d637b49d22c65d125ecc" + integrity sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + dedent "^0.7.0" + expect "^27.5.1" + is-generator-fn "^2.0.0" + jest-each "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + jest-runtime "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + pretty-format "^27.5.1" + slash "^3.0.0" + stack-utils "^2.0.3" + throat "^6.0.1" + +jest-cli@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-27.5.1.tgz#278794a6e6458ea8029547e6c6cbf673bd30b145" + integrity sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw== + dependencies: + "@jest/core" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + import-local "^3.0.2" + jest-config "^27.5.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" + prompts "^2.0.1" + yargs "^16.2.0" + +jest-config@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-27.5.1.tgz#5c387de33dca3f99ad6357ddeccd91bf3a0e4a41" + integrity sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA== + dependencies: + "@babel/core" "^7.8.0" + "@jest/test-sequencer" "^27.5.1" + "@jest/types" "^27.5.1" + babel-jest "^27.5.1" + chalk "^4.0.0" + ci-info "^3.2.0" + deepmerge "^4.2.2" + glob "^7.1.1" + graceful-fs "^4.2.9" + jest-circus "^27.5.1" + jest-environment-jsdom "^27.5.1" + jest-environment-node "^27.5.1" + jest-get-type "^27.5.1" + jest-jasmine2 "^27.5.1" + jest-regex-util "^27.5.1" + jest-resolve "^27.5.1" + jest-runner "^27.5.1" + jest-util "^27.5.1" + jest-validate "^27.5.1" + micromatch "^4.0.4" + parse-json "^5.2.0" + pretty-format "^27.5.1" + slash "^3.0.0" + strip-json-comments "^3.1.1" + +jest-diff@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-27.5.1.tgz#a07f5011ac9e6643cf8a95a462b7b1ecf6680def" + integrity sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw== + dependencies: + chalk "^4.0.0" + diff-sequences "^27.5.1" + jest-get-type "^27.5.1" + pretty-format "^27.5.1" + +jest-docblock@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-27.5.1.tgz#14092f364a42c6108d42c33c8cf30e058e25f6c0" + integrity sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ== + dependencies: + detect-newline "^3.0.0" + +jest-each@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-27.5.1.tgz#5bc87016f45ed9507fed6e4702a5b468a5b2c44e" + integrity sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ== + dependencies: + "@jest/types" "^27.5.1" + chalk "^4.0.0" + jest-get-type "^27.5.1" + jest-util "^27.5.1" + pretty-format "^27.5.1" + +jest-environment-jsdom@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz#ea9ccd1fc610209655a77898f86b2b559516a546" + integrity sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/fake-timers" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + jest-mock "^27.5.1" + jest-util "^27.5.1" + jsdom "^16.6.0" + +jest-environment-node@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-27.5.1.tgz#dedc2cfe52fab6b8f5714b4808aefa85357a365e" + integrity sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/fake-timers" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + jest-mock "^27.5.1" + jest-util "^27.5.1" + +jest-get-type@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-27.5.1.tgz#3cd613c507b0f7ace013df407a1c1cd578bcb4f1" + integrity sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw== + +jest-haste-map@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-27.5.1.tgz#9fd8bd7e7b4fa502d9c6164c5640512b4e811e7f" + integrity sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng== + dependencies: + "@jest/types" "^27.5.1" + "@types/graceful-fs" "^4.1.2" + "@types/node" "*" + anymatch "^3.0.3" + fb-watchman "^2.0.0" + graceful-fs "^4.2.9" + jest-regex-util "^27.5.1" + jest-serializer "^27.5.1" + jest-util "^27.5.1" + jest-worker "^27.5.1" + micromatch "^4.0.4" + walker "^1.0.7" + optionalDependencies: + fsevents "^2.3.2" + +jest-jasmine2@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz#a037b0034ef49a9f3d71c4375a796f3b230d1ac4" + integrity sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/source-map" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + expect "^27.5.1" + is-generator-fn "^2.0.0" + jest-each "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + jest-runtime "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + pretty-format "^27.5.1" + throat "^6.0.1" + +jest-leak-detector@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz#6ec9d54c3579dd6e3e66d70e3498adf80fde3fb8" + integrity sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ== + dependencies: + jest-get-type "^27.5.1" + pretty-format "^27.5.1" + +jest-matcher-utils@^27.0.0, jest-matcher-utils@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz#9c0cdbda8245bc22d2331729d1091308b40cf8ab" + integrity sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw== + dependencies: + chalk "^4.0.0" + jest-diff "^27.5.1" + jest-get-type "^27.5.1" + pretty-format "^27.5.1" + +jest-message-util@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-27.5.1.tgz#bdda72806da10d9ed6425e12afff38cd1458b6cf" + integrity sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^27.5.1" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^27.5.1" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-message-util@^28.1.0: + version "28.1.0" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-28.1.0.tgz#7e8f0b9049e948e7b94c2a52731166774ba7d0af" + integrity sha512-RpA8mpaJ/B2HphDMiDlrAZdDytkmwFqgjDZovM21F35lHGeUeCvYmm6W+sbQ0ydaLpg5bFAUuWG1cjqOl8vqrw== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^28.1.0" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^28.1.0" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-mock@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6" + integrity sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og== + dependencies: + "@jest/types" "^27.5.1" + "@types/node" "*" + +jest-pnp-resolver@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" + integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== + +jest-regex-util@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-27.5.1.tgz#4da143f7e9fd1e542d4aa69617b38e4a78365b95" + integrity sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg== + +jest-regex-util@^28.0.0: + version "28.0.2" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead" + integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== + +jest-resolve-dependencies@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz#d811ecc8305e731cc86dd79741ee98fed06f1da8" + integrity sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg== + dependencies: + "@jest/types" "^27.5.1" + jest-regex-util "^27.5.1" + jest-snapshot "^27.5.1" + +jest-resolve@^27.4.2, jest-resolve@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-27.5.1.tgz#a2f1c5a0796ec18fe9eb1536ac3814c23617b384" + integrity sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw== + dependencies: + "@jest/types" "^27.5.1" + chalk "^4.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-pnp-resolver "^1.2.2" + jest-util "^27.5.1" + jest-validate "^27.5.1" + resolve "^1.20.0" + resolve.exports "^1.1.0" + slash "^3.0.0" + +jest-runner@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-27.5.1.tgz#071b27c1fa30d90540805c5645a0ec167c7b62e5" + integrity sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ== + dependencies: + "@jest/console" "^27.5.1" + "@jest/environment" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + emittery "^0.8.1" + graceful-fs "^4.2.9" + jest-docblock "^27.5.1" + jest-environment-jsdom "^27.5.1" + jest-environment-node "^27.5.1" + jest-haste-map "^27.5.1" + jest-leak-detector "^27.5.1" + jest-message-util "^27.5.1" + jest-resolve "^27.5.1" + jest-runtime "^27.5.1" + jest-util "^27.5.1" + jest-worker "^27.5.1" + source-map-support "^0.5.6" + throat "^6.0.1" + +jest-runtime@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-27.5.1.tgz#4896003d7a334f7e8e4a53ba93fb9bcd3db0a1af" + integrity sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A== + dependencies: + "@jest/environment" "^27.5.1" + "@jest/fake-timers" "^27.5.1" + "@jest/globals" "^27.5.1" + "@jest/source-map" "^27.5.1" + "@jest/test-result" "^27.5.1" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + chalk "^4.0.0" + cjs-module-lexer "^1.0.0" + collect-v8-coverage "^1.0.0" + execa "^5.0.0" + glob "^7.1.3" + graceful-fs "^4.2.9" + jest-haste-map "^27.5.1" + jest-message-util "^27.5.1" + jest-mock "^27.5.1" + jest-regex-util "^27.5.1" + jest-resolve "^27.5.1" + jest-snapshot "^27.5.1" + jest-util "^27.5.1" + slash "^3.0.0" + strip-bom "^4.0.0" + +jest-serializer@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-27.5.1.tgz#81438410a30ea66fd57ff730835123dea1fb1f64" + integrity sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w== + dependencies: + "@types/node" "*" + graceful-fs "^4.2.9" + +jest-snapshot@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-27.5.1.tgz#b668d50d23d38054a51b42c4039cab59ae6eb6a1" + integrity sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA== + dependencies: + "@babel/core" "^7.7.2" + "@babel/generator" "^7.7.2" + "@babel/plugin-syntax-typescript" "^7.7.2" + "@babel/traverse" "^7.7.2" + "@babel/types" "^7.0.0" + "@jest/transform" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/babel__traverse" "^7.0.4" + "@types/prettier" "^2.1.5" + babel-preset-current-node-syntax "^1.0.0" + chalk "^4.0.0" + expect "^27.5.1" + graceful-fs "^4.2.9" + jest-diff "^27.5.1" + jest-get-type "^27.5.1" + jest-haste-map "^27.5.1" + jest-matcher-utils "^27.5.1" + jest-message-util "^27.5.1" + jest-util "^27.5.1" + natural-compare "^1.4.0" + pretty-format "^27.5.1" + semver "^7.3.2" + +jest-util@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-27.5.1.tgz#3ba9771e8e31a0b85da48fe0b0891fb86c01c2f9" + integrity sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw== + dependencies: + "@jest/types" "^27.5.1" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-util@^28.1.0: + version "28.1.0" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-28.1.0.tgz#d54eb83ad77e1dd441408738c5a5043642823be5" + integrity sha512-qYdCKD77k4Hwkose2YBEqQk7PzUf/NSE+rutzceduFveQREeH6b+89Dc9+wjX9dAwHcgdx4yedGA3FQlU/qCTA== + dependencies: + "@jest/types" "^28.1.0" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-validate@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-27.5.1.tgz#9197d54dc0bdb52260b8db40b46ae668e04df067" + integrity sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ== + dependencies: + "@jest/types" "^27.5.1" + camelcase "^6.2.0" + chalk "^4.0.0" + jest-get-type "^27.5.1" + leven "^3.1.0" + pretty-format "^27.5.1" + +jest-watch-typeahead@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/jest-watch-typeahead/-/jest-watch-typeahead-1.1.0.tgz#b4a6826dfb9c9420da2f7bc900de59dad11266a9" + integrity sha512-Va5nLSJTN7YFtC2jd+7wsoe1pNe5K4ShLux/E5iHEwlB9AxaxmggY7to9KUqKojhaJw3aXqt5WAb4jGPOolpEw== + dependencies: + ansi-escapes "^4.3.1" + chalk "^4.0.0" + jest-regex-util "^28.0.0" + jest-watcher "^28.0.0" + slash "^4.0.0" + string-length "^5.0.1" + strip-ansi "^7.0.1" + +jest-watcher@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-27.5.1.tgz#71bd85fb9bde3a2c2ec4dc353437971c43c642a2" + integrity sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw== + dependencies: + "@jest/test-result" "^27.5.1" + "@jest/types" "^27.5.1" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + jest-util "^27.5.1" + string-length "^4.0.1" + +jest-watcher@^28.0.0: + version "28.1.0" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-28.1.0.tgz#aaa7b4164a4e77eeb5f7d7b25ede5e7b4e9c9aaf" + integrity sha512-tNHMtfLE8Njcr2IRS+5rXYA4BhU90gAOwI9frTGOqd+jX0P/Au/JfRSNqsf5nUTcWdbVYuLxS1KjnzILSoR5hA== + dependencies: + "@jest/test-result" "^28.1.0" + "@jest/types" "^28.1.0" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + emittery "^0.10.2" + jest-util "^28.1.0" + string-length "^4.0.1" + +jest-worker@^26.2.1: + version "26.6.2" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed" + integrity sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^7.0.0" + +jest-worker@^27.0.2, jest-worker@^27.3.1, jest-worker@^27.4.5, jest-worker@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" + integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest@^27.4.3: + version "27.5.1" + resolved "https://registry.yarnpkg.com/jest/-/jest-27.5.1.tgz#dadf33ba70a779be7a6fc33015843b51494f63fc" + integrity sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ== + dependencies: + "@jest/core" "^27.5.1" + import-local "^3.0.2" + jest-cli "^27.5.1" + +js-sha3@0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/js-sha3/-/js-sha3-0.8.0.tgz#b9b7a5da73afad7dedd0f8c463954cbde6818840" + integrity sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q== + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^3.13.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +jsdom@^16.6.0: + version "16.7.0" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" + integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== + dependencies: + abab "^2.0.5" + acorn "^8.2.4" + acorn-globals "^6.0.0" + cssom "^0.4.4" + cssstyle "^2.3.0" + data-urls "^2.0.0" + decimal.js "^10.2.1" + domexception "^2.0.1" + escodegen "^2.0.0" + form-data "^3.0.0" + html-encoding-sniffer "^2.0.1" + http-proxy-agent "^4.0.1" + https-proxy-agent "^5.0.0" + is-potential-custom-element-name "^1.0.1" + nwsapi "^2.2.0" + parse5 "6.0.1" + saxes "^5.0.1" + symbol-tree "^3.2.4" + tough-cookie "^4.0.0" + w3c-hr-time "^1.0.2" + w3c-xmlserializer "^2.0.0" + webidl-conversions "^6.1.0" + whatwg-encoding "^1.0.5" + whatwg-mimetype "^2.3.0" + whatwg-url "^8.5.0" + ws "^7.4.6" + xml-name-validator "^3.0.0" + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +jsesc@~0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= + +json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema-traverse@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + +json-schema@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" + integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= + +json5@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" + integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + dependencies: + minimist "^1.2.0" + +json5@^2.1.2, json5@^2.2.0, json5@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" + integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== + +jsonfile@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + dependencies: + universalify "^2.0.0" + optionalDependencies: + graceful-fs "^4.1.6" + +jsonpointer@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-5.0.0.tgz#f802669a524ec4805fa7389eadbc9921d5dc8072" + integrity sha512-PNYZIdMjVIvVgDSYKTT63Y+KZ6IZvGRNNWcxwD+GNnUz1MKPfv30J8ueCjdwcN0nDx2SlshgyB7Oy0epAzVRRg== + +"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.3.0.tgz#e624f259143b9062c92b6413ff92a164c80d3ccb" + integrity sha512-XzO9luP6L0xkxwhIJMTJQpZo/eeN60K08jHdexfD569AGxeNug6UketeHXEhROoM8aR7EcUoOQmIhcJQjcuq8Q== + dependencies: + array-includes "^3.1.4" + object.assign "^4.1.2" + +kind-of@^6.0.2: + version "6.0.3" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== + +kleur@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + +klona@^2.0.4, klona@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/klona/-/klona-2.0.5.tgz#d166574d90076395d9963aa7a928fabb8d76afbc" + integrity sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ== + +language-subtag-registry@~0.3.2: + version "0.3.21" + resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.21.tgz#04ac218bea46f04cb039084602c6da9e788dd45a" + integrity sha512-L0IqwlIXjilBVVYKFT37X9Ih11Um5NEl9cbJIuU/SwP/zEEAbBPOnEeeuxVMf45ydWQRDQN3Nqc96OgbH1K+Pg== + +language-tags@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/language-tags/-/language-tags-1.0.5.tgz#d321dbc4da30ba8bf3024e040fa5c14661f9193a" + integrity sha1-0yHbxNowuovzAk4ED6XBRmH5GTo= + dependencies: + language-subtag-registry "~0.3.2" + +leven@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +levn@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +lilconfig@^2.0.3, lilconfig@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.0.5.tgz#19e57fd06ccc3848fd1891655b5a447092225b25" + integrity sha512-xaYmXZtTHPAw5m+xLN8ab9C+3a8YmV3asNSPOATITbtwrfbwaLJj8h66H1WMIpALCkqsIzK3h7oQ+PdX+LQ9Eg== + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +loader-runner@^4.2.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" + integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== + +loader-utils@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.2.tgz#d6e3b4fb81870721ae4e0868ab11dd638368c129" + integrity sha512-TM57VeHptv569d/GKh6TAYdzKblwDNiumOdkFnejjD0XwTH87K90w3O7AiJRqdQoXygvi1VQTJTLGhJl7WqA7A== + dependencies: + big.js "^5.2.2" + emojis-list "^3.0.0" + json5 "^2.1.2" + +loader-utils@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-3.2.0.tgz#bcecc51a7898bee7473d4bc6b845b23af8304d4f" + integrity sha512-HVl9ZqccQihZ7JM85dco1MvO9G+ONvxoGa9rkhzFsneGLKSUg1gJf9bWzhRhcvm2qChhWpebQhP44qxjKIUCaQ== + +locate-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" + integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4= + dependencies: + p-locate "^2.0.0" + path-exists "^3.0.0" + +locate-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" + integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== + dependencies: + p-locate "^3.0.0" + path-exists "^3.0.0" + +locate-path@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +locate-path@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== + dependencies: + p-locate "^5.0.0" + +lodash.debounce@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" + integrity sha1-gteb/zCmfEAF/9XiUVMArZyk168= + +lodash.memoize@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + integrity sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4= + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +lodash.sortby@^4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" + integrity sha1-7dFMgk4sycHgsKG0K7UhBRakJDg= + +lodash.uniq@^4.5.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" + integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M= + +lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0: + version "4.17.21" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +lower-case@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" + integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== + dependencies: + tslib "^2.0.3" + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +lz-string@^1.4.4: + version "1.4.4" + resolved "https://registry.yarnpkg.com/lz-string/-/lz-string-1.4.4.tgz#c0d8eaf36059f705796e1e344811cf4c498d3a26" + integrity sha1-wNjq82BZ9wV5bh40SBHPTEmNOiY= + +magic-string@^0.25.0, magic-string@^0.25.7: + version "0.25.9" + resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.9.tgz#de7f9faf91ef8a1c91d02c2e5314c8277dbcdd1c" + integrity sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ== + dependencies: + sourcemap-codec "^1.4.8" + +make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + +makeerror@1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== + dependencies: + tmpl "1.0.5" + +markdown-escapes@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/markdown-escapes/-/markdown-escapes-1.0.4.tgz#c95415ef451499d7602b91095f3c8e8975f78535" + integrity sha512-8z4efJYk43E0upd0NbVXwgSTQs6cT3T06etieCMEg7dRbzCbxUCK/GHlX8mhHRDcp+OLlHkPKsvqQTCvsRl2cg== + +match-sorter@^6.0.2: + version "6.3.1" + resolved "https://registry.yarnpkg.com/match-sorter/-/match-sorter-6.3.1.tgz#98cc37fda756093424ddf3cbc62bfe9c75b92bda" + integrity sha512-mxybbo3pPNuA+ZuCUhm5bwNkXrJTbsk5VWbR5wiwz/GC6LIiegBGn2w3O08UG/jdbYLinw51fSQ5xNU1U3MgBw== + dependencies: + "@babel/runtime" "^7.12.5" + remove-accents "0.4.2" + +mdast-util-definitions@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-definitions/-/mdast-util-definitions-4.0.0.tgz#c5c1a84db799173b4dcf7643cda999e440c24db2" + integrity sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ== + dependencies: + unist-util-visit "^2.0.0" + +mdast-util-to-hast@^10.0.0, mdast-util-to-hast@^10.2.0: + version "10.2.0" + resolved "https://registry.yarnpkg.com/mdast-util-to-hast/-/mdast-util-to-hast-10.2.0.tgz#61875526a017d8857b71abc9333942700b2d3604" + integrity sha512-JoPBfJ3gBnHZ18icCwHR50orC9kNH81tiR1gs01D8Q5YpV6adHNO9nKNuFBCJQ941/32PT1a63UF/DitmS3amQ== + dependencies: + "@types/mdast" "^3.0.0" + "@types/unist" "^2.0.0" + mdast-util-definitions "^4.0.0" + mdurl "^1.0.0" + unist-builder "^2.0.0" + unist-util-generated "^1.0.0" + unist-util-position "^3.0.0" + unist-util-visit "^2.0.0" + +mdn-data@2.0.14: + version "2.0.14" + resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50" + integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== + +mdn-data@2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b" + integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA== + +mdurl@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-1.0.1.tgz#fe85b2ec75a59037f2adfec100fd6c601761152e" + integrity sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4= + +media-typer@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= + +memfs@^3.1.2, memfs@^3.4.1: + version "3.4.1" + resolved "https://registry.yarnpkg.com/memfs/-/memfs-3.4.1.tgz#b78092f466a0dce054d63d39275b24c71d3f1305" + integrity sha512-1c9VPVvW5P7I85c35zAdEr1TD5+F11IToIHIlrVIcflfnzPkJa0ZoYEoEdYDP8KgPFoSZ/opDrUsAoZWym3mtw== + dependencies: + fs-monkey "1.0.3" + +"memoize-one@>=3.1.1 <6", memoize-one@^5.1.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/memoize-one/-/memoize-one-5.2.1.tgz#8337aa3c4335581839ec01c3d594090cebe8f00e" + integrity sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q== + +merge-descriptors@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= + +merge-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +methods@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= + +micromatch@^4.0.2, micromatch@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + dependencies: + braces "^3.0.2" + picomatch "^2.3.1" + +microseconds@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/microseconds/-/microseconds-0.2.0.tgz#233b25f50c62a65d861f978a4a4f8ec18797dc39" + integrity sha512-n7DHHMjR1avBbSpsTBj6fmMGh2AGrifVV4e+WYc3Q9lO+xnSZ3NyhcBND3vzzatt05LFhoKFRxrIyklmLlUtyA== + +mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime@1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +min-indent@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" + integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== + +mini-css-extract-plugin@^2.4.5: + version "2.6.0" + resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.0.tgz#578aebc7fc14d32c0ad304c2c34f08af44673f5e" + integrity sha512-ndG8nxCEnAemsg4FSgS+yNyHKgkTB4nPKqCOgh65j3/30qqC5RaSQQXMm++Y6sb6E1zRSxPkztj9fqxhS1Eo6w== + dependencies: + schema-utils "^4.0.0" + +minimalistic-assert@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== + +minimatch@3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^3.0.4, minimatch@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.0.1.tgz#fb9022f7528125187c92bd9e9b6366be1cf3415b" + integrity sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g== + dependencies: + brace-expansion "^2.0.1" + +minimist@^1.1.1, minimist@^1.2.0, minimist@^1.2.6: + version "1.2.6" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" + integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== + +mkdirp@~0.5.1: + version "0.5.6" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" + integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== + dependencies: + minimist "^1.2.6" + +moment@^2.29.1, moment@^2.29.3: + version "2.29.3" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.3.tgz#edd47411c322413999f7a5940d526de183c031f3" + integrity sha512-c6YRvhEo//6T2Jz/vVtYzqBzwvPT95JBQ+smCytzf7c50oMZRsR/a4w88aD34I+/QVSfnoAnSBFPJHItlOMJVw== + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@2.1.3, ms@^2.1.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +multicast-dns@^7.2.4: + version "7.2.4" + resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-7.2.4.tgz#cf0b115c31e922aeb20b64e6556cbeb34cf0dd19" + integrity sha512-XkCYOU+rr2Ft3LI6w4ye51M3VK31qJXFIxu0XLw169PtKG0Zx47OrXeVW/GCYOfpC9s1yyyf1S+L8/4LY0J9Zw== + dependencies: + dns-packet "^5.2.2" + thunky "^1.0.2" + +nano-time@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/nano-time/-/nano-time-1.0.0.tgz#b0554f69ad89e22d0907f7a12b0993a5d96137ef" + integrity sha1-sFVPaa2J4i0JB/ehKwmTpdlhN+8= + dependencies: + big-integer "^1.6.16" + +nanoid@^3.3.3: + version "3.3.4" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab" + integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= + +negotiator@0.6.3: + version "0.6.3" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== + +neo-async@^2.6.2: + version "2.6.2" + resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + +no-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" + integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== + dependencies: + lower-case "^2.0.2" + tslib "^2.0.3" + +node-emoji@^1.10.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/node-emoji/-/node-emoji-1.11.0.tgz#69a0150e6946e2f115e9d7ea4df7971e2628301c" + integrity sha512-wo2DpQkQp7Sjm2A0cq+sN7EHKO6Sl0ctXeBdFZrL9T9+UywORbufTcTZxom8YqpLQt/FqNMUkOpkZrJVYSKD3A== + dependencies: + lodash "^4.17.21" + +node-forge@^1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" + integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== + +node-int64@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs= + +node-releases@^2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.4.tgz#f38252370c43854dc48aa431c766c6c398f40476" + integrity sha512-gbMzqQtTtDz/00jQzZ21PQzdI9PyLYqUSvD0p3naOhX4odFji0ZxYdnVwPTxmSwkmxhcFImpozceidSG+AgoPQ== + +normalize-path@^3.0.0, normalize-path@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +normalize-range@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" + integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= + +normalize-url@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" + integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== + +npm-run-path@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + +nth-check@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" + integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== + dependencies: + boolbase "~1.0.0" + +nth-check@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.0.1.tgz#2efe162f5c3da06a28959fbd3db75dbeea9f0fc2" + integrity sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w== + dependencies: + boolbase "^1.0.0" + +numeral@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/numeral/-/numeral-2.0.6.tgz#4ad080936d443c2561aed9f2197efffe25f4e506" + integrity sha1-StCAk21EPCVhrtnyGX7//iX05QY= + +nwsapi@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.0.tgz#204879a9e3d068ff2a55139c2c772780681a38b7" + integrity sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ== + +object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= + +object-hash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9" + integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== + +object-inspect@^1.12.0, object-inspect@^1.9.0: + version "1.12.0" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.0.tgz#6e2c120e868fd1fd18cb4f18c31741d0d6e776f0" + integrity sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g== + +object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@^4.1.0, object.assign@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" + integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + has-symbols "^1.0.1" + object-keys "^1.1.1" + +object.entries@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.5.tgz#e1acdd17c4de2cd96d5a08487cfb9db84d881861" + integrity sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.fromentries@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.5.tgz#7b37b205109c21e741e605727fe8b0ad5fa08251" + integrity sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.getownpropertydescriptors@^2.1.0: + version "2.1.3" + resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.3.tgz#b223cf38e17fefb97a63c10c91df72ccb386df9e" + integrity sha512-VdDoCwvJI4QdC6ndjpqFmoL3/+HxffFBbcJzKi5hwLLqqx3mdbedRpfZDdK0SrOSauj8X4GzBvnDZl4vTN7dOw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.hasown@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object.hasown/-/object.hasown-1.1.1.tgz#ad1eecc60d03f49460600430d97f23882cf592a3" + integrity sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A== + dependencies: + define-properties "^1.1.4" + es-abstract "^1.19.5" + +object.values@^1.1.0, object.values@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac" + integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +oblivious-set@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/oblivious-set/-/oblivious-set-1.0.0.tgz#c8316f2c2fb6ff7b11b6158db3234c49f733c566" + integrity sha512-z+pI07qxo4c2CulUHCDf9lcqDlMSo72N/4rLUpRXf6fu+q8vjt8y0xS+Tlf8NTJDdTXHbdeO1n3MlbctwEoXZw== + +obuf@^1.0.0, obuf@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" + integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== + +on-finished@2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" + integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== + dependencies: + ee-first "1.1.1" + +on-headers@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" + integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + dependencies: + wrappy "1" + +onetime@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +open@^8.0.9, open@^8.4.0: + version "8.4.0" + resolved "https://registry.yarnpkg.com/open/-/open-8.4.0.tgz#345321ae18f8138f82565a910fdc6b39e8c244f8" + integrity sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q== + dependencies: + define-lazy-prop "^2.0.0" + is-docker "^2.1.1" + is-wsl "^2.2.0" + +optionator@^0.8.1: + version "0.8.3" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" + integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.6" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + word-wrap "~1.2.3" + +optionator@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +p-limit@^1.1.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" + integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== + dependencies: + p-try "^1.0.0" + +p-limit@^2.0.0, p-limit@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-limit@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + dependencies: + yocto-queue "^0.1.0" + +p-locate@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM= + dependencies: + p-limit "^1.1.0" + +p-locate@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" + integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== + dependencies: + p-limit "^2.0.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-locate@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== + dependencies: + p-limit "^3.0.2" + +p-retry@^4.5.0: + version "4.6.2" + resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16" + integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== + dependencies: + "@types/retry" "0.12.0" + retry "^0.13.1" + +p-try@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" + integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M= + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +param-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" + integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== + dependencies: + dot-case "^3.0.4" + tslib "^2.0.3" + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse-entities@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/parse-entities/-/parse-entities-2.0.0.tgz#53c6eb5b9314a1f4ec99fa0fdf7ce01ecda0cbe8" + integrity sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ== + dependencies: + character-entities "^1.0.0" + character-entities-legacy "^1.0.0" + character-reference-invalid "^1.0.0" + is-alphanumerical "^1.0.0" + is-decimal "^1.0.0" + is-hexadecimal "^1.0.0" + +parse-json@^5.0.0, parse-json@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== + dependencies: + "@babel/code-frame" "^7.0.0" + error-ex "^1.3.1" + json-parse-even-better-errors "^2.3.0" + lines-and-columns "^1.1.6" + +parse5@6.0.1, parse5@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" + integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== + +parseurl@~1.3.2, parseurl@~1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +pascal-case@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb" + integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + +path-key@^3.0.0, path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.6, path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-to-regexp@0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +performance-now@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= + +picocolors@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" + integrity sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA== + +picocolors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +pirates@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" + integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== + +pkg-dir@^4.1.0, pkg-dir@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + +pkg-up@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5" + integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== + dependencies: + find-up "^3.0.0" + +postcss-attribute-case-insensitive@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-5.0.0.tgz#39cbf6babf3ded1e4abf37d09d6eda21c644105c" + integrity sha512-b4g9eagFGq9T5SWX4+USfVyjIb3liPnjhHHRMP7FMB2kFVpYyfEscV0wP3eaXhKlcHKUut8lt5BGoeylWA/dBQ== + dependencies: + postcss-selector-parser "^6.0.2" + +postcss-browser-comments@^4: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-browser-comments/-/postcss-browser-comments-4.0.0.tgz#bcfc86134df5807f5d3c0eefa191d42136b5e72a" + integrity sha512-X9X9/WN3KIvY9+hNERUqX9gncsgBA25XaeR+jshHz2j8+sYyHktHw1JdKuMjeLpGktXidqDhA7b/qm1mrBDmgg== + +postcss-calc@^8.2.3: + version "8.2.4" + resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-8.2.4.tgz#77b9c29bfcbe8a07ff6693dc87050828889739a5" + integrity sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q== + dependencies: + postcss-selector-parser "^6.0.9" + postcss-value-parser "^4.2.0" + +postcss-clamp@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/postcss-clamp/-/postcss-clamp-4.1.0.tgz#7263e95abadd8c2ba1bd911b0b5a5c9c93e02363" + integrity sha512-ry4b1Llo/9zz+PKC+030KUnPITTJAHeOwjfAyyB60eT0AorGLdzp52s31OsPRHRf8NchkgFoG2y6fCfn1IV1Ow== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-color-functional-notation@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-4.2.2.tgz#f59ccaeb4ee78f1b32987d43df146109cc743073" + integrity sha512-DXVtwUhIk4f49KK5EGuEdgx4Gnyj6+t2jBSEmxvpIK9QI40tWrpS2Pua8Q7iIZWBrki2QOaeUdEaLPPa91K0RQ== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-color-hex-alpha@^8.0.3: + version "8.0.3" + resolved "https://registry.yarnpkg.com/postcss-color-hex-alpha/-/postcss-color-hex-alpha-8.0.3.tgz#61a0fd151d28b128aa6a8a21a2dad24eebb34d52" + integrity sha512-fESawWJCrBV035DcbKRPAVmy21LpoyiXdPTuHUfWJ14ZRjY7Y7PA6P4g8z6LQGYhU1WAxkTxjIjurXzoe68Glw== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-color-rebeccapurple@^7.0.2: + version "7.0.2" + resolved "https://registry.yarnpkg.com/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-7.0.2.tgz#5d397039424a58a9ca628762eb0b88a61a66e079" + integrity sha512-SFc3MaocHaQ6k3oZaFwH8io6MdypkUtEy/eXzXEB1vEQlO3S3oDc/FSZA8AsS04Z25RirQhlDlHLh3dn7XewWw== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-colormin@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-5.3.0.tgz#3cee9e5ca62b2c27e84fce63affc0cfb5901956a" + integrity sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + colord "^2.9.1" + postcss-value-parser "^4.2.0" + +postcss-convert-values@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-5.1.0.tgz#f8d3abe40b4ce4b1470702a0706343eac17e7c10" + integrity sha512-GkyPbZEYJiWtQB0KZ0X6qusqFHUepguBCNFi9t5JJc7I2OTXG7C0twbTLvCfaKOLl3rSXmpAwV7W5txd91V84g== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-custom-media@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/postcss-custom-media/-/postcss-custom-media-8.0.0.tgz#1be6aff8be7dc9bf1fe014bde3b71b92bb4552f1" + integrity sha512-FvO2GzMUaTN0t1fBULDeIvxr5IvbDXcIatt6pnJghc736nqNgsGao5NT+5+WVLAQiTt6Cb3YUms0jiPaXhL//g== + +postcss-custom-properties@^12.1.7: + version "12.1.7" + resolved "https://registry.yarnpkg.com/postcss-custom-properties/-/postcss-custom-properties-12.1.7.tgz#ca470fd4bbac5a87fd868636dafc084bc2a78b41" + integrity sha512-N/hYP5gSoFhaqxi2DPCmvto/ZcRDVjE3T1LiAMzc/bg53hvhcHOLpXOHb526LzBBp5ZlAUhkuot/bfpmpgStJg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-custom-selectors@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/postcss-custom-selectors/-/postcss-custom-selectors-6.0.0.tgz#022839e41fbf71c47ae6e316cb0e6213012df5ef" + integrity sha512-/1iyBhz/W8jUepjGyu7V1OPcGbc636snN1yXEQCinb6Bwt7KxsiU7/bLQlp8GwAXzCh7cobBU5odNn/2zQWR8Q== + dependencies: + postcss-selector-parser "^6.0.4" + +postcss-dir-pseudo-class@^6.0.4: + version "6.0.4" + resolved "https://registry.yarnpkg.com/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-6.0.4.tgz#9afe49ea631f0cb36fa0076e7c2feb4e7e3f049c" + integrity sha512-I8epwGy5ftdzNWEYok9VjW9whC4xnelAtbajGv4adql4FIF09rnrxnA9Y8xSHN47y7gqFIv10C5+ImsLeJpKBw== + dependencies: + postcss-selector-parser "^6.0.9" + +postcss-discard-comments@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-5.1.1.tgz#e90019e1a0e5b99de05f63516ce640bd0df3d369" + integrity sha512-5JscyFmvkUxz/5/+TB3QTTT9Gi9jHkcn8dcmmuN68JQcv3aQg4y88yEHHhwFB52l/NkaJ43O0dbksGMAo49nfQ== + +postcss-discard-duplicates@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz#9eb4fe8456706a4eebd6d3b7b777d07bad03e848" + integrity sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw== + +postcss-discard-empty@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz#e57762343ff7f503fe53fca553d18d7f0c369c6c" + integrity sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A== + +postcss-discard-overridden@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz#7e8c5b53325747e9d90131bb88635282fb4a276e" + integrity sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw== + +postcss-double-position-gradients@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/postcss-double-position-gradients/-/postcss-double-position-gradients-3.1.1.tgz#a12cfdb7d11fa1a99ccecc747f0c19718fb37152" + integrity sha512-jM+CGkTs4FcG53sMPjrrGE0rIvLDdCrqMzgDC5fLI7JHDO7o6QG8C5TQBtExb13hdBdoH9C2QVbG4jo2y9lErQ== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +postcss-env-function@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/postcss-env-function/-/postcss-env-function-4.0.6.tgz#7b2d24c812f540ed6eda4c81f6090416722a8e7a" + integrity sha512-kpA6FsLra+NqcFnL81TnsU+Z7orGtDTxcOhl6pwXeEq1yFPpRMkCDpHhrz8CFQDr/Wfm0jLiNQ1OsGGPjlqPwA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-flexbugs-fixes@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-5.0.2.tgz#2028e145313074fc9abe276cb7ca14e5401eb49d" + integrity sha512-18f9voByak7bTktR2QgDveglpn9DTbBWPUzSOe9g0N4WR/2eSt6Vrcbf0hmspvMI6YWGywz6B9f7jzpFNJJgnQ== + +postcss-focus-visible@^6.0.4: + version "6.0.4" + resolved "https://registry.yarnpkg.com/postcss-focus-visible/-/postcss-focus-visible-6.0.4.tgz#50c9ea9afa0ee657fb75635fabad25e18d76bf9e" + integrity sha512-QcKuUU/dgNsstIK6HELFRT5Y3lbrMLEOwG+A4s5cA+fx3A3y/JTq3X9LaOj3OC3ALH0XqyrgQIgey/MIZ8Wczw== + dependencies: + postcss-selector-parser "^6.0.9" + +postcss-focus-within@^5.0.4: + version "5.0.4" + resolved "https://registry.yarnpkg.com/postcss-focus-within/-/postcss-focus-within-5.0.4.tgz#5b1d2ec603195f3344b716c0b75f61e44e8d2e20" + integrity sha512-vvjDN++C0mu8jz4af5d52CB184ogg/sSxAFS+oUJQq2SuCe7T5U2iIsVJtsCp2d6R4j0jr5+q3rPkBVZkXD9fQ== + dependencies: + postcss-selector-parser "^6.0.9" + +postcss-font-variant@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/postcss-font-variant/-/postcss-font-variant-5.0.0.tgz#efd59b4b7ea8bb06127f2d031bfbb7f24d32fa66" + integrity sha512-1fmkBaCALD72CK2a9i468mA/+tr9/1cBxRRMXOUaZqO43oWPR5imcyPjXwuv7PXbCid4ndlP5zWhidQVVa3hmA== + +postcss-gap-properties@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/postcss-gap-properties/-/postcss-gap-properties-3.0.3.tgz#6401bb2f67d9cf255d677042928a70a915e6ba60" + integrity sha512-rPPZRLPmEKgLk/KlXMqRaNkYTUpE7YC+bOIQFN5xcu1Vp11Y4faIXv6/Jpft6FMnl6YRxZqDZG0qQOW80stzxQ== + +postcss-image-set-function@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/postcss-image-set-function/-/postcss-image-set-function-4.0.6.tgz#bcff2794efae778c09441498f40e0c77374870a9" + integrity sha512-KfdC6vg53GC+vPd2+HYzsZ6obmPqOk6HY09kttU19+Gj1nC3S3XBVEXDHxkhxTohgZqzbUb94bKXvKDnYWBm/A== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-initial@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/postcss-initial/-/postcss-initial-4.0.1.tgz#529f735f72c5724a0fb30527df6fb7ac54d7de42" + integrity sha512-0ueD7rPqX8Pn1xJIjay0AZeIuDoF+V+VvMt/uOnn+4ezUKhZM/NokDeP6DwMNyIoYByuN/94IQnt5FEkaN59xQ== + +postcss-js@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-js/-/postcss-js-4.0.0.tgz#31db79889531b80dc7bc9b0ad283e418dce0ac00" + integrity sha512-77QESFBwgX4irogGVPgQ5s07vLvFqWr228qZY+w6lW599cRlK/HmnlivnnVUxkjHnCu4J16PDMHcH+e+2HbvTQ== + dependencies: + camelcase-css "^2.0.1" + +postcss-lab-function@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/postcss-lab-function/-/postcss-lab-function-4.2.0.tgz#e054e662c6480202f5760887ec1ae0d153357123" + integrity sha512-Zb1EO9DGYfa3CP8LhINHCcTTCTLI+R3t7AX2mKsDzdgVQ/GkCpHOTgOr6HBHslP7XDdVbqgHW5vvRPMdVANQ8w== + dependencies: + "@csstools/postcss-progressive-custom-properties" "^1.1.0" + postcss-value-parser "^4.2.0" + +postcss-load-config@^3.1.4: + version "3.1.4" + resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-3.1.4.tgz#1ab2571faf84bb078877e1d07905eabe9ebda855" + integrity sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg== + dependencies: + lilconfig "^2.0.5" + yaml "^1.10.2" + +postcss-loader@^6.2.1: + version "6.2.1" + resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-6.2.1.tgz#0895f7346b1702103d30fdc66e4d494a93c008ef" + integrity sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q== + dependencies: + cosmiconfig "^7.0.0" + klona "^2.0.5" + semver "^7.3.5" + +postcss-logical@^5.0.4: + version "5.0.4" + resolved "https://registry.yarnpkg.com/postcss-logical/-/postcss-logical-5.0.4.tgz#ec75b1ee54421acc04d5921576b7d8db6b0e6f73" + integrity sha512-RHXxplCeLh9VjinvMrZONq7im4wjWGlRJAqmAVLXyZaXwfDWP73/oq4NdIp+OZwhQUMj0zjqDfM5Fj7qby+B4g== + +postcss-media-minmax@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/postcss-media-minmax/-/postcss-media-minmax-5.0.0.tgz#7140bddec173e2d6d657edbd8554a55794e2a5b5" + integrity sha512-yDUvFf9QdFZTuCUg0g0uNSHVlJ5X1lSzDZjPSFaiCWvjgsvu8vEVxtahPrLMinIDEEGnx6cBe6iqdx5YWz08wQ== + +postcss-merge-longhand@^5.1.4: + version "5.1.4" + resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-5.1.4.tgz#0f46f8753989a33260efc47de9a0cdc571f2ec5c" + integrity sha512-hbqRRqYfmXoGpzYKeW0/NCZhvNyQIlQeWVSao5iKWdyx7skLvCfQFGIUsP9NUs3dSbPac2IC4Go85/zG+7MlmA== + dependencies: + postcss-value-parser "^4.2.0" + stylehacks "^5.1.0" + +postcss-merge-rules@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-5.1.1.tgz#d327b221cd07540bcc8d9ff84446d8b404d00162" + integrity sha512-8wv8q2cXjEuCcgpIB1Xx1pIy8/rhMPIQqYKNzEdyx37m6gpq83mQQdCxgIkFgliyEnKvdwJf/C61vN4tQDq4Ww== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + cssnano-utils "^3.1.0" + postcss-selector-parser "^6.0.5" + +postcss-minify-font-values@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz#f1df0014a726083d260d3bd85d7385fb89d1f01b" + integrity sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-minify-gradients@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz#f1fe1b4f498134a5068240c2f25d46fcd236ba2c" + integrity sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw== + dependencies: + colord "^2.9.1" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-minify-params@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-5.1.2.tgz#77e250780c64198289c954884ebe3ee4481c3b1c" + integrity sha512-aEP+p71S/urY48HWaRHasyx4WHQJyOYaKpQ6eXl8k0kxg66Wt/30VR6/woh8THgcpRbonJD5IeD+CzNhPi1L8g== + dependencies: + browserslist "^4.16.6" + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-minify-selectors@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-5.2.0.tgz#17c2be233e12b28ffa8a421a02fc8b839825536c" + integrity sha512-vYxvHkW+iULstA+ctVNx0VoRAR4THQQRkG77o0oa4/mBS0OzGvvzLIvHDv/nNEM0crzN2WIyFU5X7wZhaUK3RA== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-modules-extract-imports@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" + integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== + +postcss-modules-local-by-default@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz#ebbb54fae1598eecfdf691a02b3ff3b390a5a51c" + integrity sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ== + dependencies: + icss-utils "^5.0.0" + postcss-selector-parser "^6.0.2" + postcss-value-parser "^4.1.0" + +postcss-modules-scope@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz#9ef3151456d3bbfa120ca44898dfca6f2fa01f06" + integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== + dependencies: + postcss-selector-parser "^6.0.4" + +postcss-modules-values@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c" + integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== + dependencies: + icss-utils "^5.0.0" + +postcss-nested@5.0.6: + version "5.0.6" + resolved "https://registry.yarnpkg.com/postcss-nested/-/postcss-nested-5.0.6.tgz#466343f7fc8d3d46af3e7dba3fcd47d052a945bc" + integrity sha512-rKqm2Fk0KbA8Vt3AdGN0FB9OBOMDVajMG6ZCf/GoHgdxUJ4sBFp0A/uMIRm+MJUdo33YXEtjqIz8u7DAp8B7DA== + dependencies: + postcss-selector-parser "^6.0.6" + +postcss-nesting@^10.1.4: + version "10.1.5" + resolved "https://registry.yarnpkg.com/postcss-nesting/-/postcss-nesting-10.1.5.tgz#0711260e394cd0e117ff1f879eb1ee9a94550352" + integrity sha512-+NyBBE/wUcJ+NJgVd2FyKIZ414lul6ExqkOt1qXXw7oRzpQ0iT68cVpx+QfHh42QUMHXNoVLlN9InFY9XXK8ng== + dependencies: + "@csstools/selector-specificity" "1.0.0" + postcss-selector-parser "^6.0.10" + +postcss-normalize-charset@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz#9302de0b29094b52c259e9b2cf8dc0879879f0ed" + integrity sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg== + +postcss-normalize-display-values@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz#72abbae58081960e9edd7200fcf21ab8325c3da8" + integrity sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-positions@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-5.1.0.tgz#902a7cb97cf0b9e8b1b654d4a43d451e48966458" + integrity sha512-8gmItgA4H5xiUxgN/3TVvXRoJxkAWLW6f/KKhdsH03atg0cB8ilXnrB5PpSshwVu/dD2ZsRFQcR1OEmSBDAgcQ== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-repeat-style@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.0.tgz#f6d6fd5a54f51a741cc84a37f7459e60ef7a6398" + integrity sha512-IR3uBjc+7mcWGL6CtniKNQ4Rr5fTxwkaDHwMBDGGs1x9IVRkYIT/M4NelZWkAOBdV6v3Z9S46zqaKGlyzHSchw== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-string@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz#411961169e07308c82c1f8c55f3e8a337757e228" + integrity sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-timing-functions@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz#d5614410f8f0b2388e9f240aa6011ba6f52dafbb" + integrity sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize-unicode@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.0.tgz#3d23aede35e160089a285e27bf715de11dc9db75" + integrity sha512-J6M3MizAAZ2dOdSjy2caayJLQT8E8K9XjLce8AUQMwOrCvjCHv24aLC/Lps1R1ylOfol5VIDMaM/Lo9NGlk1SQ== + dependencies: + browserslist "^4.16.6" + postcss-value-parser "^4.2.0" + +postcss-normalize-url@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz#ed9d88ca82e21abef99f743457d3729a042adcdc" + integrity sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew== + dependencies: + normalize-url "^6.0.1" + postcss-value-parser "^4.2.0" + +postcss-normalize-whitespace@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz#08a1a0d1ffa17a7cc6efe1e6c9da969cc4493cfa" + integrity sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-normalize@^10.0.1: + version "10.0.1" + resolved "https://registry.yarnpkg.com/postcss-normalize/-/postcss-normalize-10.0.1.tgz#464692676b52792a06b06880a176279216540dd7" + integrity sha512-+5w18/rDev5mqERcG3W5GZNMJa1eoYYNGo8gB7tEwaos0ajk3ZXAI4mHGcNT47NE+ZnZD1pEpUOFLvltIwmeJA== + dependencies: + "@csstools/normalize.css" "*" + postcss-browser-comments "^4" + sanitize.css "*" + +postcss-opacity-percentage@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/postcss-opacity-percentage/-/postcss-opacity-percentage-1.1.2.tgz#bd698bb3670a0a27f6d657cc16744b3ebf3b1145" + integrity sha512-lyUfF7miG+yewZ8EAk9XUBIlrHyUE6fijnesuz+Mj5zrIHIEw6KcIZSOk/elVMqzLvREmXB83Zi/5QpNRYd47w== + +postcss-ordered-values@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-5.1.1.tgz#0b41b610ba02906a3341e92cab01ff8ebc598adb" + integrity sha512-7lxgXF0NaoMIgyihL/2boNAEZKiW0+HkMhdKMTD93CjW8TdCy2hSdj8lsAo+uwm7EDG16Da2Jdmtqpedl0cMfw== + dependencies: + cssnano-utils "^3.1.0" + postcss-value-parser "^4.2.0" + +postcss-overflow-shorthand@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/postcss-overflow-shorthand/-/postcss-overflow-shorthand-3.0.3.tgz#ebcfc0483a15bbf1b27fdd9b3c10125372f4cbc2" + integrity sha512-CxZwoWup9KXzQeeIxtgOciQ00tDtnylYIlJBBODqkgS/PU2jISuWOL/mYLHmZb9ZhZiCaNKsCRiLp22dZUtNsg== + +postcss-page-break@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/postcss-page-break/-/postcss-page-break-3.0.4.tgz#7fbf741c233621622b68d435babfb70dd8c1ee5f" + integrity sha512-1JGu8oCjVXLa9q9rFTo4MbeeA5FMe00/9C7lN4va606Rdb+HkxXtXsmEDrIraQ11fGz/WvKWa8gMuCKkrXpTsQ== + +postcss-place@^7.0.4: + version "7.0.4" + resolved "https://registry.yarnpkg.com/postcss-place/-/postcss-place-7.0.4.tgz#eb026650b7f769ae57ca4f938c1addd6be2f62c9" + integrity sha512-MrgKeiiu5OC/TETQO45kV3npRjOFxEHthsqGtkh3I1rPbZSbXGD/lZVi9j13cYh+NA8PIAPyk6sGjT9QbRyvSg== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-preset-env@^7.0.1: + version "7.5.0" + resolved "https://registry.yarnpkg.com/postcss-preset-env/-/postcss-preset-env-7.5.0.tgz#0c1f23933597d55dab4a90f61eda30b76e710658" + integrity sha512-0BJzWEfCdTtK2R3EiKKSdkE51/DI/BwnhlnicSW482Ym6/DGHud8K0wGLcdjip1epVX0HKo4c8zzTeV/SkiejQ== + dependencies: + "@csstools/postcss-color-function" "^1.1.0" + "@csstools/postcss-font-format-keywords" "^1.0.0" + "@csstools/postcss-hwb-function" "^1.0.0" + "@csstools/postcss-ic-unit" "^1.0.0" + "@csstools/postcss-is-pseudo-class" "^2.0.2" + "@csstools/postcss-normalize-display-values" "^1.0.0" + "@csstools/postcss-oklab-function" "^1.1.0" + "@csstools/postcss-progressive-custom-properties" "^1.3.0" + "@csstools/postcss-stepped-value-functions" "^1.0.0" + "@csstools/postcss-unset-value" "^1.0.0" + autoprefixer "^10.4.6" + browserslist "^4.20.3" + css-blank-pseudo "^3.0.3" + css-has-pseudo "^3.0.4" + css-prefers-color-scheme "^6.0.3" + cssdb "^6.6.1" + postcss-attribute-case-insensitive "^5.0.0" + postcss-clamp "^4.1.0" + postcss-color-functional-notation "^4.2.2" + postcss-color-hex-alpha "^8.0.3" + postcss-color-rebeccapurple "^7.0.2" + postcss-custom-media "^8.0.0" + postcss-custom-properties "^12.1.7" + postcss-custom-selectors "^6.0.0" + postcss-dir-pseudo-class "^6.0.4" + postcss-double-position-gradients "^3.1.1" + postcss-env-function "^4.0.6" + postcss-focus-visible "^6.0.4" + postcss-focus-within "^5.0.4" + postcss-font-variant "^5.0.0" + postcss-gap-properties "^3.0.3" + postcss-image-set-function "^4.0.6" + postcss-initial "^4.0.1" + postcss-lab-function "^4.2.0" + postcss-logical "^5.0.4" + postcss-media-minmax "^5.0.0" + postcss-nesting "^10.1.4" + postcss-opacity-percentage "^1.1.2" + postcss-overflow-shorthand "^3.0.3" + postcss-page-break "^3.0.4" + postcss-place "^7.0.4" + postcss-pseudo-class-any-link "^7.1.2" + postcss-replace-overflow-wrap "^4.0.0" + postcss-selector-not "^5.0.0" + postcss-value-parser "^4.2.0" + +postcss-pseudo-class-any-link@^7.1.2: + version "7.1.3" + resolved "https://registry.yarnpkg.com/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-7.1.3.tgz#0e4753518b9f6caa8b649c75b56e69e391d0c12f" + integrity sha512-I9Yp1VV2r8xFwg/JrnAlPCcKmutv6f6Ig6/CHFPqGJiDgYXM9C+0kgLfK4KOXbKNw+63QYl4agRUB0Wi9ftUIg== + dependencies: + postcss-selector-parser "^6.0.10" + +postcss-reduce-initial@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-5.1.0.tgz#fc31659ea6e85c492fb2a7b545370c215822c5d6" + integrity sha512-5OgTUviz0aeH6MtBjHfbr57tml13PuedK/Ecg8szzd4XRMbYxH4572JFG067z+FqBIf6Zp/d+0581glkvvWMFw== + dependencies: + browserslist "^4.16.6" + caniuse-api "^3.0.0" + +postcss-reduce-transforms@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz#333b70e7758b802f3dd0ddfe98bb1ccfef96b6e9" + integrity sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ== + dependencies: + postcss-value-parser "^4.2.0" + +postcss-replace-overflow-wrap@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-4.0.0.tgz#d2df6bed10b477bf9c52fab28c568b4b29ca4319" + integrity sha512-KmF7SBPphT4gPPcKZc7aDkweHiKEEO8cla/GjcBK+ckKxiZslIu3C4GCRW3DNfL0o7yW7kMQu9xlZ1kXRXLXtw== + +postcss-selector-not@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/postcss-selector-not/-/postcss-selector-not-5.0.0.tgz#ac5fc506f7565dd872f82f5314c0f81a05630dc7" + integrity sha512-/2K3A4TCP9orP4TNS7u3tGdRFVKqz/E6pX3aGnriPG0jU78of8wsUcqE4QAhWEU0d+WnMSF93Ah3F//vUtK+iQ== + dependencies: + balanced-match "^1.0.0" + +postcss-selector-parser@^6.0.10, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.6, postcss-selector-parser@^6.0.9: + version "6.0.10" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz#79b61e2c0d1bfc2602d549e11d0876256f8df88d" + integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" + +postcss-svgo@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-5.1.0.tgz#0a317400ced789f233a28826e77523f15857d80d" + integrity sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA== + dependencies: + postcss-value-parser "^4.2.0" + svgo "^2.7.0" + +postcss-unique-selectors@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz#a9f273d1eacd09e9aa6088f4b0507b18b1b541b6" + integrity sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA== + dependencies: + postcss-selector-parser "^6.0.5" + +postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" + integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== + +postcss@^7.0.35: + version "7.0.39" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" + integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== + dependencies: + picocolors "^0.2.1" + source-map "^0.6.1" + +postcss@^8.3.5, postcss@^8.4.12, postcss@^8.4.4, postcss@^8.4.7: + version "8.4.13" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.13.tgz#7c87bc268e79f7f86524235821dfdf9f73e5d575" + integrity sha512-jtL6eTBrza5MPzy8oJLFuUscHDXTV5KcLlqAWHl5q5WYRfnNRGSmOZmOZ1T6Gy7A99mOZfqungmZMpMmCVJ8ZA== + dependencies: + nanoid "^3.3.3" + picocolors "^1.0.0" + source-map-js "^1.0.2" + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= + +pretty-bytes@^5.3.0, pretty-bytes@^5.4.1: + version "5.6.0" + resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb" + integrity sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg== + +pretty-error@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6" + integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== + dependencies: + lodash "^4.17.20" + renderkid "^3.0.0" + +pretty-format@^27.0.0, pretty-format@^27.0.2, pretty-format@^27.5.1: + version "27.5.1" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-27.5.1.tgz#2181879fdea51a7a5851fb39d920faa63f01d88e" + integrity sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ== + dependencies: + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^17.0.1" + +pretty-format@^28.1.0: + version "28.1.0" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-28.1.0.tgz#8f5836c6a0dfdb834730577ec18029052191af55" + integrity sha512-79Z4wWOYCdvQkEoEuSlBhHJqWeZ8D8YRPiPctJFCtvuaClGpiwiQYSCUOE6IEKUbbFukKOTFIUAXE8N4EQTo1Q== + dependencies: + "@jest/schemas" "^28.0.2" + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^18.0.0" + +prismjs@~1.27.0: + version "1.27.0" + resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.27.0.tgz#bb6ee3138a0b438a3653dd4d6ce0cc6510a45057" + integrity sha512-t13BGPUlFDR7wRB5kQDG4jjl7XeuH6jbJGt11JHPL96qwsEHNX2+68tFXqc1/k+/jALsbSWJKUOT/hcYAZ5LkA== + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +promise@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/promise/-/promise-8.1.0.tgz#697c25c3dfe7435dd79fcd58c38a135888eaf05e" + integrity sha512-W04AqnILOL/sPRXziNicCjSNRruLAuIHEOVBazepu0545DDNGYHz7ar9ZgZ1fMU8/MA4mVxp5rkBWRi6OXIy3Q== + dependencies: + asap "~2.0.6" + +prompts@^2.0.1, prompts@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + +prop-types@^15.5.8, prop-types@^15.6.0, prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1: + version "15.8.1" + resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" + integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.13.1" + +property-information@^5.0.0, property-information@^5.3.0: + version "5.6.0" + resolved "https://registry.yarnpkg.com/property-information/-/property-information-5.6.0.tgz#61675545fb23002f245c6540ec46077d4da3ed69" + integrity sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA== + dependencies: + xtend "^4.0.0" + +proxy-addr@~2.0.7: + version "2.0.7" + resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + dependencies: + forwarded "0.2.0" + ipaddr.js "1.9.1" + +psl@^1.1.33: + version "1.8.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" + integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== + +punycode@^2.1.0, punycode@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +q@^1.1.2: + version "1.5.1" + resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" + integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= + +qs@6.10.3: + version "6.10.3" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" + integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== + dependencies: + side-channel "^1.0.4" + +query-string@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/query-string/-/query-string-7.1.1.tgz#754620669db978625a90f635f12617c271a088e1" + integrity sha512-MplouLRDHBZSG9z7fpuAAcI7aAYjDLhtsiVZsevsfaHWDS2IDdORKbSd1kWUA+V4zyva/HZoSfpwnYMMQDhb0w== + dependencies: + decode-uri-component "^0.2.0" + filter-obj "^1.1.0" + split-on-first "^1.0.0" + strict-uri-encode "^2.0.0" + +querystringify@^2.1.1: + version "2.2.0" + resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" + integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +quick-lru@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" + integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== + +raf-schd@^4.0.2: + version "4.0.3" + resolved "https://registry.yarnpkg.com/raf-schd/-/raf-schd-4.0.3.tgz#5d6c34ef46f8b2a0e880a8fcdb743efc5bfdbc1a" + integrity sha512-tQkJl2GRWh83ui2DiPTJz9wEiMN20syf+5oKfB03yYP7ioZcJwsIK8FjrtLwH1m7C7e+Tt2yYBlrOpdT+dyeIQ== + +raf@^3.4.1: + version "3.4.1" + resolved "https://registry.yarnpkg.com/raf/-/raf-3.4.1.tgz#0742e99a4a6552f445d73e3ee0328af0ff1ede39" + integrity sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA== + dependencies: + performance-now "^2.1.0" + +randombytes@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" + integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== + dependencies: + safe-buffer "^5.1.0" + +range-parser@^1.2.1, range-parser@~1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +raw-body@2.5.1: + version "2.5.1" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" + integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== + dependencies: + bytes "3.1.2" + http-errors "2.0.0" + iconv-lite "0.4.24" + unpipe "1.0.0" + +react-app-polyfill@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz#95221e0a9bd259e5ca6b177c7bb1cb6768f68fd7" + integrity sha512-sZ41cxiU5llIB003yxxQBYrARBqe0repqPTTYBTmMqTz9szeBbE37BehCE891NZsmdZqqP+xWKdT3eo3vOzN8w== + dependencies: + core-js "^3.19.2" + object-assign "^4.1.1" + promise "^8.1.0" + raf "^3.4.1" + regenerator-runtime "^0.13.9" + whatwg-fetch "^3.6.2" + +react-beautiful-dnd@^13.1.0: + version "13.1.0" + resolved "https://registry.yarnpkg.com/react-beautiful-dnd/-/react-beautiful-dnd-13.1.0.tgz#ec97c81093593526454b0de69852ae433783844d" + integrity sha512-aGvblPZTJowOWUNiwd6tNfEpgkX5OxmpqxHKNW/4VmvZTNTbeiq7bA3bn5T+QSF2uibXB0D1DmJsb1aC/+3cUA== + dependencies: + "@babel/runtime" "^7.9.2" + css-box-model "^1.2.0" + memoize-one "^5.1.1" + raf-schd "^4.0.2" + react-redux "^7.2.0" + redux "^4.0.4" + use-memo-one "^1.1.1" + +react-clientside-effect@^1.2.6: + version "1.2.6" + resolved "https://registry.yarnpkg.com/react-clientside-effect/-/react-clientside-effect-1.2.6.tgz#29f9b14e944a376b03fb650eed2a754dd128ea3a" + integrity sha512-XGGGRQAKY+q25Lz9a/4EPqom7WRjz3z9R2k4jhVKA/puQFH/5Nt27vFZYql4m4NVNdUvX8PS3O7r/Zzm7cjUlg== + dependencies: + "@babel/runtime" "^7.12.13" + +react-dev-utils@^12.0.1: + version "12.0.1" + resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-12.0.1.tgz#ba92edb4a1f379bd46ccd6bcd4e7bc398df33e73" + integrity sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ== + dependencies: + "@babel/code-frame" "^7.16.0" + address "^1.1.2" + browserslist "^4.18.1" + chalk "^4.1.2" + cross-spawn "^7.0.3" + detect-port-alt "^1.1.6" + escape-string-regexp "^4.0.0" + filesize "^8.0.6" + find-up "^5.0.0" + fork-ts-checker-webpack-plugin "^6.5.0" + global-modules "^2.0.0" + globby "^11.0.4" + gzip-size "^6.0.0" + immer "^9.0.7" + is-root "^2.1.0" + loader-utils "^3.2.0" + open "^8.4.0" + pkg-up "^3.1.0" + prompts "^2.4.2" + react-error-overlay "^6.0.11" + recursive-readdir "^2.2.2" + shell-quote "^1.7.3" + strip-ansi "^6.0.1" + text-table "^0.2.0" + +react-dom@^18.1.0: + version "18.1.0" + resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-18.1.0.tgz#7f6dd84b706408adde05e1df575b3a024d7e8a2f" + integrity sha512-fU1Txz7Budmvamp7bshe4Zi32d0ll7ect+ccxNu9FlObT605GOEB8BfO4tmRJ39R5Zj831VCpvQ05QPBW5yb+w== + dependencies: + loose-envify "^1.1.0" + scheduler "^0.22.0" + +react-dropzone@^11.5.3: + version "11.7.1" + resolved "https://registry.yarnpkg.com/react-dropzone/-/react-dropzone-11.7.1.tgz#3851bb75b26af0bf1b17ce1449fd980e643b9356" + integrity sha512-zxCMwhfPy1olUEbw3FLNPLhAm/HnaYH5aELIEglRbqabizKAdHs0h+WuyOpmA+v1JXn0++fpQDdNfUagWt5hJQ== + dependencies: + attr-accept "^2.2.2" + file-selector "^0.4.0" + prop-types "^15.8.1" + +react-element-to-jsx-string@^14.3.4: + version "14.3.4" + resolved "https://registry.yarnpkg.com/react-element-to-jsx-string/-/react-element-to-jsx-string-14.3.4.tgz#709125bc72f06800b68f9f4db485f2c7d31218a8" + integrity sha512-t4ZwvV6vwNxzujDQ+37bspnLwA4JlgUPWhLjBJWsNIDceAf6ZKUTCjdm08cN6WeZ5pTMKiCJkmAYnpmR4Bm+dg== + dependencies: + "@base2/pretty-print-object" "1.0.1" + is-plain-object "5.0.0" + react-is "17.0.2" + +react-error-overlay@^6.0.11: + version "6.0.11" + resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-6.0.11.tgz#92835de5841c5cf08ba00ddd2d677b6d17ff9adb" + integrity sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg== + +react-focus-lock@^2.9.0: + version "2.9.1" + resolved "https://registry.yarnpkg.com/react-focus-lock/-/react-focus-lock-2.9.1.tgz#094cfc19b4f334122c73bb0bff65d77a0c92dd16" + integrity sha512-pSWOQrUmiKLkffPO6BpMXN7SNKXMsuOakl652IBuALAu1esk+IcpJyM+ALcYzPTTFz1rD0R54aB9A4HuP5t1Wg== + dependencies: + "@babel/runtime" "^7.0.0" + focus-lock "^0.11.2" + prop-types "^15.6.2" + react-clientside-effect "^1.2.6" + use-callback-ref "^1.3.0" + use-sidecar "^1.1.2" + +react-focus-on@^3.5.4: + version "3.6.0" + resolved "https://registry.yarnpkg.com/react-focus-on/-/react-focus-on-3.6.0.tgz#159e13082dad4ea1f07abe11254f0e981d5a7b79" + integrity sha512-onIRjpd9trAUenXNdDcvjc8KJUSklty4X/Gr7hAm/MzM7ekSF2pg9D8KBKL7ipige22IAPxLRRf/EmJji9KD6Q== + dependencies: + aria-hidden "^1.1.3" + react-focus-lock "^2.9.0" + react-remove-scroll "^2.5.2" + react-style-singleton "^2.2.0" + tslib "^2.3.1" + use-callback-ref "^1.3.0" + use-sidecar "^1.1.2" + +react-input-autosize@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/react-input-autosize/-/react-input-autosize-3.0.0.tgz#6b5898c790d4478d69420b55441fcc31d5c50a85" + integrity sha512-nL9uS7jEs/zu8sqwFE5MAPx6pPkNAriACQ2rGLlqmKr2sPGtN7TXTyDdQt4lbNXVx7Uzadb40x8qotIuru6Rhg== + dependencies: + prop-types "^15.5.8" + +react-is@17.0.2, react-is@^17.0.1, react-is@^17.0.2: + version "17.0.2" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0" + integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w== + +react-is@^16.13.1, react-is@^16.7.0: + version "16.13.1" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + +react-is@^18.0.0: + version "18.1.0" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.1.0.tgz#61aaed3096d30eacf2a2127118b5b41387d32a67" + integrity sha512-Fl7FuabXsJnV5Q1qIOQwx/sagGF18kogb4gpfcG4gjLBWO0WDiiz1ko/ExayuxE7InyQkBLkxRFG5oxY6Uu3Kg== + +react-query@^3.34.12, react-query@^3.39.0: + version "3.39.0" + resolved "https://registry.yarnpkg.com/react-query/-/react-query-3.39.0.tgz#0caca7b0da98e65008bbcd4df0d25618c2100050" + integrity sha512-Od0IkSuS79WJOhzWBx/ys0x13+7wFqgnn64vBqqAAnZ9whocVhl/y1padD5uuZ6EIkXbFbInax0qvY7zGM0thA== + dependencies: + "@babel/runtime" "^7.5.5" + broadcast-channel "^3.4.1" + match-sorter "^6.0.2" + +react-redux@^7.2.0: + version "7.2.8" + resolved "https://registry.yarnpkg.com/react-redux/-/react-redux-7.2.8.tgz#a894068315e65de5b1b68899f9c6ee0923dd28de" + integrity sha512-6+uDjhs3PSIclqoCk0kd6iX74gzrGc3W5zcAjbrFgEdIjRSQObdIwfx80unTkVUYvbQ95Y8Av3OvFHq1w5EOUw== + dependencies: + "@babel/runtime" "^7.15.4" + "@types/react-redux" "^7.1.20" + hoist-non-react-statics "^3.3.2" + loose-envify "^1.4.0" + prop-types "^15.7.2" + react-is "^17.0.2" + +react-refresh@^0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.11.0.tgz#77198b944733f0f1f1a90e791de4541f9f074046" + integrity sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A== + +react-remove-scroll-bar@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.1.tgz#9f13b05b249eaa57c8d646c1ebb83006b3581f5f" + integrity sha512-IvGX3mJclEF7+hga8APZczve1UyGMkMG+tjS0o/U1iLgvZRpjFAQEUBJ4JETfvbNlfNnZnoDyWJCICkA15Mghg== + dependencies: + react-style-singleton "^2.2.0" + tslib "^2.0.0" + +react-remove-scroll@^2.5.2: + version "2.5.3" + resolved "https://registry.yarnpkg.com/react-remove-scroll/-/react-remove-scroll-2.5.3.tgz#a152196e710e8e5811be39dc352fd8a90b05c961" + integrity sha512-NQ1bXrxKrnK5pFo/GhLkXeo3CrK5steI+5L+jynwwIemvZyfXqaL0L5BzwJd7CSwNCU723DZaccvjuyOdoy3Xw== + dependencies: + react-remove-scroll-bar "^2.3.1" + react-style-singleton "^2.2.0" + tslib "^2.0.0" + use-callback-ref "^1.3.0" + use-sidecar "^1.1.2" + +react-router-dom@6, react-router-dom@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-6.3.0.tgz#a0216da813454e521905b5fa55e0e5176123f43d" + integrity sha512-uaJj7LKytRxZNQV8+RbzJWnJ8K2nPsOOEuX7aQstlMZKQT0164C+X2w6bnkqU3sjtLvpd5ojrezAyfZ1+0sStw== + dependencies: + history "^5.2.0" + react-router "6.3.0" + +react-router@6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/react-router/-/react-router-6.3.0.tgz#3970cc64b4cb4eae0c1ea5203a80334fdd175557" + integrity sha512-7Wh1DzVQ+tlFjkeo+ujvjSqSJmkt1+8JO+T5xklPlgrh70y7ogx75ODRW0ThWhY7S+6yEDks8TYrtQe/aoboBQ== + dependencies: + history "^5.2.0" + +react-scripts@5.0.1, react-scripts@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/react-scripts/-/react-scripts-5.0.1.tgz#6285dbd65a8ba6e49ca8d651ce30645a6d980003" + integrity sha512-8VAmEm/ZAwQzJ+GOMLbBsTdDKOpuZh7RPs0UymvBR2vRk4iZWCskjbFnxqjrzoIvlNNRZ3QJFx6/qDSi6zSnaQ== + dependencies: + "@babel/core" "^7.16.0" + "@pmmmwh/react-refresh-webpack-plugin" "^0.5.3" + "@svgr/webpack" "^5.5.0" + babel-jest "^27.4.2" + babel-loader "^8.2.3" + babel-plugin-named-asset-import "^0.3.8" + babel-preset-react-app "^10.0.1" + bfj "^7.0.2" + browserslist "^4.18.1" + camelcase "^6.2.1" + case-sensitive-paths-webpack-plugin "^2.4.0" + css-loader "^6.5.1" + css-minimizer-webpack-plugin "^3.2.0" + dotenv "^10.0.0" + dotenv-expand "^5.1.0" + eslint "^8.3.0" + eslint-config-react-app "^7.0.1" + eslint-webpack-plugin "^3.1.1" + file-loader "^6.2.0" + fs-extra "^10.0.0" + html-webpack-plugin "^5.5.0" + identity-obj-proxy "^3.0.0" + jest "^27.4.3" + jest-resolve "^27.4.2" + jest-watch-typeahead "^1.0.0" + mini-css-extract-plugin "^2.4.5" + postcss "^8.4.4" + postcss-flexbugs-fixes "^5.0.2" + postcss-loader "^6.2.1" + postcss-normalize "^10.0.1" + postcss-preset-env "^7.0.1" + prompts "^2.4.2" + react-app-polyfill "^3.0.0" + react-dev-utils "^12.0.1" + react-refresh "^0.11.0" + resolve "^1.20.0" + resolve-url-loader "^4.0.0" + sass-loader "^12.3.0" + semver "^7.3.5" + source-map-loader "^3.0.0" + style-loader "^3.3.1" + tailwindcss "^3.0.2" + terser-webpack-plugin "^5.2.5" + webpack "^5.64.4" + webpack-dev-server "^4.6.0" + webpack-manifest-plugin "^4.0.2" + workbox-webpack-plugin "^6.4.1" + optionalDependencies: + fsevents "^2.3.2" + +react-style-singleton@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/react-style-singleton/-/react-style-singleton-2.2.0.tgz#70f45f5fef97fdb9a52eed98d1839fa6b9032b22" + integrity sha512-nK7mN92DMYZEu3cQcAhfwE48NpzO5RpxjG4okbSqRRbfal9Pk+fG2RdQXTMp+f6all1hB9LIJSt+j7dCYrU11g== + dependencies: + get-nonce "^1.0.0" + invariant "^2.2.4" + tslib "^2.0.0" + +react-virtualized-auto-sizer@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/react-virtualized-auto-sizer/-/react-virtualized-auto-sizer-1.0.6.tgz#66c5b1c9278064c5ef1699ed40a29c11518f97ca" + integrity sha512-7tQ0BmZqfVF6YYEWcIGuoR3OdYe8I/ZFbNclFlGOC3pMqunkYF/oL30NCjSGl9sMEb17AnzixDz98Kqc3N76HQ== + +react-window@^1.8.6: + version "1.8.7" + resolved "https://registry.yarnpkg.com/react-window/-/react-window-1.8.7.tgz#5e9fd0d23f48f432d7022cdb327219353a15f0d4" + integrity sha512-JHEZbPXBpKMmoNO1bNhoXOOLg/ujhL/BU4IqVU9r8eQPcy5KQnGHIHDRkJ0ns9IM5+Aq5LNwt3j8t3tIrePQzA== + dependencies: + "@babel/runtime" "^7.0.0" + memoize-one ">=3.1.1 <6" + +react@^18.1.0: + version "18.1.0" + resolved "https://registry.yarnpkg.com/react/-/react-18.1.0.tgz#6f8620382decb17fdc5cc223a115e2adbf104890" + integrity sha512-4oL8ivCz5ZEPyclFQXaNksK3adutVS8l2xzZU0cqEFrE9Sb7fC0EFK5uEk74wIreL1DERyjvsU915j1pcT2uEQ== + dependencies: + loose-envify "^1.1.0" + +readable-stream@^2.0.1: + version "2.3.7" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^3.0.6: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" + integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readdirp@~3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== + dependencies: + picomatch "^2.2.1" + +recursive-readdir@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/recursive-readdir/-/recursive-readdir-2.2.2.tgz#9946fb3274e1628de6e36b2f6714953b4845094f" + integrity sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg== + dependencies: + minimatch "3.0.4" + +redent@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" + integrity sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg== + dependencies: + indent-string "^4.0.0" + strip-indent "^3.0.0" + +redux@^4.0.0, redux@^4.0.4: + version "4.2.0" + resolved "https://registry.yarnpkg.com/redux/-/redux-4.2.0.tgz#46f10d6e29b6666df758780437651eeb2b969f13" + integrity sha512-oSBmcKKIuIR4ME29/AeNUnl5L+hvBq7OaJWzaptTQJAntaPvxIJqfnjbaEiCzzaIz+XmVILfqAM3Ob0aXLPfjA== + dependencies: + "@babel/runtime" "^7.9.2" + +refractor@^3.5.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/refractor/-/refractor-3.6.0.tgz#ac318f5a0715ead790fcfb0c71f4dd83d977935a" + integrity sha512-MY9W41IOWxxk31o+YvFCNyNzdkc9M20NoZK5vq6jkv4I/uh2zkWcfudj0Q1fovjUQJrNewS9NMzeTtqPf+n5EA== + dependencies: + hastscript "^6.0.0" + parse-entities "^2.0.0" + prismjs "~1.27.0" + +regenerate-unicode-properties@^10.0.1: + version "10.0.1" + resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.0.1.tgz#7f442732aa7934a3740c779bb9b3340dccc1fb56" + integrity sha512-vn5DU6yg6h8hP/2OkQo3K7uVILvY4iu0oI4t3HFa81UPkhGJwkRwM10JEc3upjdhHjs/k8GJY1sRBhk5sr69Bw== + dependencies: + regenerate "^1.4.2" + +regenerate@^1.4.2: + version "1.4.2" + resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" + integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== + +regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.9: + version "0.13.9" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" + integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== + +regenerator-transform@^0.15.0: + version "0.15.0" + resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.15.0.tgz#cbd9ead5d77fae1a48d957cf889ad0586adb6537" + integrity sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg== + dependencies: + "@babel/runtime" "^7.8.4" + +regex-parser@^2.2.11: + version "2.2.11" + resolved "https://registry.yarnpkg.com/regex-parser/-/regex-parser-2.2.11.tgz#3b37ec9049e19479806e878cabe7c1ca83ccfe58" + integrity sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q== + +regexp.prototype.flags@^1.4.1: + version "1.4.3" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" + integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + functions-have-names "^1.2.2" + +regexpp@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" + integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== + +regexpu-core@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.0.1.tgz#c531122a7840de743dcf9c83e923b5560323ced3" + integrity sha512-CriEZlrKK9VJw/xQGJpQM5rY88BtuL8DM+AEwvcThHilbxiTAy8vq4iJnd2tqq8wLmjbGZzP7ZcKFjbGkmEFrw== + dependencies: + regenerate "^1.4.2" + regenerate-unicode-properties "^10.0.1" + regjsgen "^0.6.0" + regjsparser "^0.8.2" + unicode-match-property-ecmascript "^2.0.0" + unicode-match-property-value-ecmascript "^2.0.0" + +regjsgen@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.6.0.tgz#83414c5354afd7d6627b16af5f10f41c4e71808d" + integrity sha512-ozE883Uigtqj3bx7OhL1KNbCzGyW2NQZPl6Hs09WTvCuZD5sTI4JY58bkbQWa/Y9hxIsvJ3M8Nbf7j54IqeZbA== + +regjsparser@^0.8.2: + version "0.8.4" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.8.4.tgz#8a14285ffcc5de78c5b95d62bbf413b6bc132d5f" + integrity sha512-J3LABycON/VNEu3abOviqGHuB/LOtOQj8SKmfP9anY5GfAVw/SPjwzSjxGjbZXIxbGfqTHtJw58C2Li/WkStmA== + dependencies: + jsesc "~0.5.0" + +rehype-raw@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/rehype-raw/-/rehype-raw-5.1.0.tgz#66d5e8d7188ada2d31bc137bc19a1000cf2c6b7e" + integrity sha512-MDvHAb/5mUnif2R+0IPCYJU8WjHa9UzGtM/F4AVy5GixPlDZ1z3HacYy4xojDU+uBa+0X/3PIfyQI26/2ljJNA== + dependencies: + hast-util-raw "^6.1.0" + +rehype-react@^6.0.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/rehype-react/-/rehype-react-6.2.1.tgz#9b9bf188451ad6f63796b784fe1f51165c67b73a" + integrity sha512-f9KIrjktvLvmbGc7si25HepocOg4z0MuNOtweigKzBcDjiGSTGhyz6VSgaV5K421Cq1O+z4/oxRJ5G9owo0KVg== + dependencies: + "@mapbox/hast-util-table-cell-style" "^0.2.0" + hast-to-hyperscript "^9.0.0" + +rehype-stringify@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/rehype-stringify/-/rehype-stringify-8.0.0.tgz#9b6afb599bcf3165f10f93fc8548f9a03d2ec2ba" + integrity sha512-VkIs18G0pj2xklyllrPSvdShAV36Ff3yE5PUO9u36f6+2qJFnn22Z5gKwBOwgXviux4UC7K+/j13AnZfPICi/g== + dependencies: + hast-util-to-html "^7.1.1" + +relateurl@^0.2.7: + version "0.2.7" + resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" + integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= + +remark-breaks@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/remark-breaks/-/remark-breaks-2.0.2.tgz#55fdec6c7da84f659aa7fdb1aa95b632870cee8d" + integrity sha512-LsQnPPQ7Fzp9RTjj4IwdEmjPOr9bxe9zYKWhs9ZQOg9hMg8rOfeeqQ410cvVdIK87Famqza1CKRxNkepp2EvUA== + dependencies: + unist-util-visit "^2.0.0" + +remark-emoji@^2.1.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/remark-emoji/-/remark-emoji-2.2.0.tgz#1c702090a1525da5b80e15a8f963ef2c8236cac7" + integrity sha512-P3cj9s5ggsUvWw5fS2uzCHJMGuXYRb0NnZqYlNecewXt8QBU9n5vW3DUUKOhepS8F9CwdMx9B8a3i7pqFWAI5w== + dependencies: + emoticon "^3.2.0" + node-emoji "^1.10.0" + unist-util-visit "^2.0.3" + +remark-parse@^8.0.3: + version "8.0.3" + resolved "https://registry.yarnpkg.com/remark-parse/-/remark-parse-8.0.3.tgz#9c62aa3b35b79a486454c690472906075f40c7e1" + integrity sha512-E1K9+QLGgggHxCQtLt++uXltxEprmWzNfg+MxpfHsZlrddKzZ/hZyWHDbK3/Ap8HJQqYJRXP+jHczdL6q6i85Q== + dependencies: + ccount "^1.0.0" + collapse-white-space "^1.0.2" + is-alphabetical "^1.0.0" + is-decimal "^1.0.0" + is-whitespace-character "^1.0.0" + is-word-character "^1.0.0" + markdown-escapes "^1.0.0" + parse-entities "^2.0.0" + repeat-string "^1.5.4" + state-toggle "^1.0.0" + trim "0.0.1" + trim-trailing-lines "^1.0.0" + unherit "^1.0.4" + unist-util-remove-position "^2.0.0" + vfile-location "^3.0.0" + xtend "^4.0.1" + +remark-rehype@^8.0.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/remark-rehype/-/remark-rehype-8.1.0.tgz#610509a043484c1e697437fa5eb3fd992617c945" + integrity sha512-EbCu9kHgAxKmW1yEYjx3QafMyGY3q8noUbNUI5xyKbaFP89wbhDrKxyIQNukNYthzjNHZu6J7hwFg7hRm1svYA== + dependencies: + mdast-util-to-hast "^10.2.0" + +remove-accents@0.4.2: + version "0.4.2" + resolved "https://registry.yarnpkg.com/remove-accents/-/remove-accents-0.4.2.tgz#0a43d3aaae1e80db919e07ae254b285d9e1c7bb5" + integrity sha1-CkPTqq4egNuRngeuJUsoXZ4ce7U= + +renderkid@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a" + integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== + dependencies: + css-select "^4.1.3" + dom-converter "^0.2.0" + htmlparser2 "^6.1.0" + lodash "^4.17.21" + strip-ansi "^6.0.1" + +repeat-string@^1.5.4: + version "1.6.1" + resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" + integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= + +require-from-string@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + +requires-port@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" + integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= + +resolve-cwd@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== + dependencies: + resolve-from "^5.0.0" + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve-from@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + +resolve-url-loader@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-url-loader/-/resolve-url-loader-4.0.0.tgz#d50d4ddc746bb10468443167acf800dcd6c3ad57" + integrity sha512-05VEMczVREcbtT7Bz+C+96eUO5HDNvdthIiMB34t7FcF8ehcu4wC0sSgPUubs3XW2Q3CNLJk/BJrCU9wVRymiA== + dependencies: + adjust-sourcemap-loader "^4.0.0" + convert-source-map "^1.7.0" + loader-utils "^2.0.0" + postcss "^7.0.35" + source-map "0.6.1" + +resolve.exports@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" + integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== + +resolve@^1.12.0, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.0: + version "1.22.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.0.tgz#5e0b8c67c15df57a89bdbabe603a002f21731198" + integrity sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw== + dependencies: + is-core-module "^2.8.1" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +resolve@^2.0.0-next.3: + version "2.0.0-next.3" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.3.tgz#d41016293d4a8586a39ca5d9b5f15cbea1f55e46" + integrity sha512-W8LucSynKUIDu9ylraa7ueVZ7hc0uAgJBxVsQSKOXOyle8a93qXhcz+XAXZ8bIq2d6i4Ehddn6Evt+0/UwKk6Q== + dependencies: + is-core-module "^2.2.0" + path-parse "^1.0.6" + +retry@^0.13.1: + version "0.13.1" + resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" + integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@3.0.2, rimraf@^3.0.0, rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +robust-predicates@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/robust-predicates/-/robust-predicates-3.0.1.tgz#ecde075044f7f30118682bd9fb3f123109577f9a" + integrity sha512-ndEIpszUHiG4HtDsQLeIuMvRsDnn8c8rYStabochtUeCvfuvNptb5TUbVD68LRAILPX7p9nqQGh4xJgn3EHS/g== + +rollup-plugin-terser@^7.0.0: + version "7.0.2" + resolved "https://registry.yarnpkg.com/rollup-plugin-terser/-/rollup-plugin-terser-7.0.2.tgz#e8fbba4869981b2dc35ae7e8a502d5c6c04d324d" + integrity sha512-w3iIaU4OxcF52UUXiZNsNeuXIMDvFrr+ZXK6bFZ0Q60qyVfq4uLptoS4bbq3paG3x216eQllFZX7zt6TIImguQ== + dependencies: + "@babel/code-frame" "^7.10.4" + jest-worker "^26.2.1" + serialize-javascript "^4.0.0" + terser "^5.0.0" + +rollup@^2.43.1: + version "2.72.1" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.72.1.tgz#861c94790537b10008f0ca0fbc60e631aabdd045" + integrity sha512-NTc5UGy/NWFGpSqF1lFY8z9Adri6uhyMLI6LvPAXdBKoPRFhIIiBUpt+Qg2awixqO3xvzSijjhnb4+QEZwJmxA== + optionalDependencies: + fsevents "~2.3.2" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +rw@1: + version "1.3.3" + resolved "https://registry.yarnpkg.com/rw/-/rw-1.3.3.tgz#3f862dfa91ab766b14885ef4d01124bfda074fb4" + integrity sha1-P4Yt+pGrdmsUiF700BEkv9oHT7Q= + +safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0": + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +sanitize.css@*: + version "13.0.0" + resolved "https://registry.yarnpkg.com/sanitize.css/-/sanitize.css-13.0.0.tgz#2675553974b27964c75562ade3bd85d79879f173" + integrity sha512-ZRwKbh/eQ6w9vmTjkuG0Ioi3HBwPFce0O+v//ve+aOq1oeCy7jMV2qzzAlpsNuqpqCBjjriM1lbtZbF/Q8jVyA== + +sass-loader@^12.3.0: + version "12.6.0" + resolved "https://registry.yarnpkg.com/sass-loader/-/sass-loader-12.6.0.tgz#5148362c8e2cdd4b950f3c63ac5d16dbfed37bcb" + integrity sha512-oLTaH0YCtX4cfnJZxKSLAyglED0naiYfNG1iXfU5w1LNZ+ukoA5DtyDIN5zmKVZwYNJP4KRc5Y3hkWga+7tYfA== + dependencies: + klona "^2.0.4" + neo-async "^2.6.2" + +sax@~1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + +saxes@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" + integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== + dependencies: + xmlchars "^2.2.0" + +scheduler@^0.22.0: + version "0.22.0" + resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.22.0.tgz#83a5d63594edf074add9a7198b1bae76c3db01b8" + integrity sha512-6QAm1BgQI88NPYymgGQLCZgvep4FyePDWFpXVK+zNSUgHwlqpJy8VEh8Et0KxTACS4VWwMousBElAZOH9nkkoQ== + dependencies: + loose-envify "^1.1.0" + +schema-utils@2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.0.tgz#17151f76d8eae67fbbf77960c33c676ad9f4efc7" + integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== + dependencies: + "@types/json-schema" "^7.0.4" + ajv "^6.12.2" + ajv-keywords "^3.4.1" + +schema-utils@^2.6.5: + version "2.7.1" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7" + integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== + dependencies: + "@types/json-schema" "^7.0.5" + ajv "^6.12.4" + ajv-keywords "^3.5.2" + +schema-utils@^3.0.0, schema-utils@^3.1.0, schema-utils@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.1.1.tgz#bc74c4b6b6995c1d88f76a8b77bea7219e0c8281" + integrity sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + +schema-utils@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.0.0.tgz#60331e9e3ae78ec5d16353c467c34b3a0a1d3df7" + integrity sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg== + dependencies: + "@types/json-schema" "^7.0.9" + ajv "^8.8.0" + ajv-formats "^2.1.1" + ajv-keywords "^5.0.0" + +select-hose@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" + integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= + +selfsigned@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-2.0.1.tgz#8b2df7fa56bf014d19b6007655fff209c0ef0a56" + integrity sha512-LmME957M1zOsUhG+67rAjKfiWFox3SBxE/yymatMZsAx+oMrJ0YQ8AToOnyCm7xbeg2ep37IHLxdu0o2MavQOQ== + dependencies: + node-forge "^1" + +semver@7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e" + integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A== + +semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +semver@^7.3.2, semver@^7.3.5: + version "7.3.7" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" + integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== + dependencies: + lru-cache "^6.0.0" + +send@0.18.0: + version "0.18.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" + integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== + dependencies: + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "2.0.0" + mime "1.6.0" + ms "2.1.3" + on-finished "2.4.1" + range-parser "~1.2.1" + statuses "2.0.1" + +serialize-javascript@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa" + integrity sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw== + dependencies: + randombytes "^2.1.0" + +serialize-javascript@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" + integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== + dependencies: + randombytes "^2.1.0" + +serialize-query-params@^1.3.5: + version "1.3.6" + resolved "https://registry.yarnpkg.com/serialize-query-params/-/serialize-query-params-1.3.6.tgz#5dd5225db85ce747fe6fbc4897628504faafec6d" + integrity sha512-VlH7sfWNyPVZClPkRacopn6sn5uQMXBsjPVz1+pBHX895VpcYVznfJtZ49e6jymcrz+l/vowkepCZn/7xEAEdw== + +serve-index@^1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" + integrity sha1-03aNabHn2C5c4FD/9bRTvqEqkjk= + dependencies: + accepts "~1.3.4" + batch "0.6.1" + debug "2.6.9" + escape-html "~1.0.3" + http-errors "~1.6.2" + mime-types "~2.1.17" + parseurl "~1.3.2" + +serve-static@1.15.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" + integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== + dependencies: + encodeurl "~1.0.2" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.18.0" + +setprototypeof@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" + integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== + +setprototypeof@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +shell-quote@^1.7.3: + version "1.7.3" + resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.3.tgz#aa40edac170445b9a431e17bb62c0b881b9c4123" + integrity sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw== + +side-channel@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +signal-exit@^3.0.2, signal-exit@^3.0.3: + version "3.0.7" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + +sisteransi@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +slash@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" + integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== + +sockjs@^0.3.21: + version "0.3.24" + resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.24.tgz#c9bc8995f33a111bea0395ec30aa3206bdb5ccce" + integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== + dependencies: + faye-websocket "^0.11.3" + uuid "^8.3.2" + websocket-driver "^0.7.4" + +source-list-map@^2.0.0, source-list-map@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" + integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== + +source-map-js@^1.0.1, source-map-js@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== + +source-map-loader@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/source-map-loader/-/source-map-loader-3.0.1.tgz#9ae5edc7c2d42570934be4c95d1ccc6352eba52d" + integrity sha512-Vp1UsfyPvgujKQzi4pyDiTOnE3E4H+yHvkVRN3c/9PJmQS4CQJExvcDvaX/D+RV+xQben9HJ56jMJS3CgUeWyA== + dependencies: + abab "^2.0.5" + iconv-lite "^0.6.3" + source-map-js "^1.0.1" + +source-map-resolve@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.6.0.tgz#3d9df87e236b53f16d01e58150fc7711138e5ed2" + integrity sha512-KXBr9d/fO/bWo97NXsPIAW1bFSBOuCnjbNTBMO7N59hsv5i9yzRDfcYwwt0l04+VqnKC+EwzvJZIP/qkuMgR/w== + dependencies: + atob "^2.1.2" + decode-uri-component "^0.2.0" + +source-map-support@^0.5.6, source-map-support@~0.5.20: + version "0.5.21" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" + integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map@0.6.1, source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +source-map@^0.5.7: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= + +source-map@^0.7.3: + version "0.7.3" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" + integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== + +source-map@^0.8.0-beta.0, source-map@~0.8.0-beta.0: + version "0.8.0-beta.0" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11" + integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA== + dependencies: + whatwg-url "^7.0.0" + +sourcemap-codec@^1.4.8: + version "1.4.8" + resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" + integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== + +space-separated-tokens@^1.0.0: + version "1.1.5" + resolved "https://registry.yarnpkg.com/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz#85f32c3d10d9682007e917414ddc5c26d1aa6899" + integrity sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA== + +spdy-transport@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" + integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== + dependencies: + debug "^4.1.0" + detect-node "^2.0.4" + hpack.js "^2.1.6" + obuf "^1.1.2" + readable-stream "^3.0.6" + wbuf "^1.7.3" + +spdy@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.2.tgz#b74f466203a3eda452c02492b91fb9e84a27677b" + integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA== + dependencies: + debug "^4.1.0" + handle-thing "^2.0.0" + http-deceiver "^1.2.7" + select-hose "^2.0.0" + spdy-transport "^3.0.0" + +split-on-first@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/split-on-first/-/split-on-first-1.1.0.tgz#f610afeee3b12bce1d0c30425e76398b78249a5f" + integrity sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw== + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= + +stable@^0.1.8: + version "0.1.8" + resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" + integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== + +stack-utils@^2.0.3: + version "2.0.5" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" + integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA== + dependencies: + escape-string-regexp "^2.0.0" + +stackframe@^1.1.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/stackframe/-/stackframe-1.2.1.tgz#1033a3473ee67f08e2f2fc8eba6aef4f845124e1" + integrity sha512-h88QkzREN/hy8eRdyNhhsO7RSJ5oyTqxxmmn0dzBIMUclZsjpfmrsg81vp8mjjAs2vAZ72nyWxRUwSwmh0e4xg== + +state-toggle@^1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/state-toggle/-/state-toggle-1.0.3.tgz#e123b16a88e143139b09c6852221bc9815917dfe" + integrity sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ== + +statuses@2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + +"statuses@>= 1.4.0 < 2": + version "1.5.0" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" + integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= + +strict-uri-encode@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz#b9c7330c7042862f6b142dc274bbcc5866ce3546" + integrity sha1-ucczDHBChi9rFC3CdLvMWGbONUY= + +string-length@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" + integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== + dependencies: + char-regex "^1.0.2" + strip-ansi "^6.0.0" + +string-length@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-5.0.1.tgz#3d647f497b6e8e8d41e422f7e0b23bc536c8381e" + integrity sha512-9Ep08KAMUn0OadnVaBuRdE2l615CQ508kr0XMadjClfYpdCyvrbFp6Taebo8yyxokQ4viUd/xPPUA4FGgUa0ow== + dependencies: + char-regex "^2.0.0" + strip-ansi "^7.0.1" + +string-natural-compare@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" + integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== + +string-width@^4.1.0, string-width@^4.2.0: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string.prototype.matchall@^4.0.6: + version "4.0.7" + resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz#8e6ecb0d8a1fb1fda470d81acecb2dba057a481d" + integrity sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + get-intrinsic "^1.1.1" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + regexp.prototype.flags "^1.4.1" + side-channel "^1.0.4" + +string.prototype.trimend@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz#914a65baaab25fbdd4ee291ca7dde57e869cb8d0" + integrity sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string.prototype.trimstart@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz#5466d93ba58cfa2134839f81d7f42437e8c01fef" + integrity sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +stringify-entities@^3.0.1: + version "3.1.0" + resolved "https://registry.yarnpkg.com/stringify-entities/-/stringify-entities-3.1.0.tgz#b8d3feac256d9ffcc9fa1fefdcf3ca70576ee903" + integrity sha512-3FP+jGMmMV/ffZs86MoghGqAoqXAdxLrJP4GUdrDN1aIScYih5tuIO3eF4To5AJZ79KDZ8Fpdy7QJnK8SsL1Vg== + dependencies: + character-entities-html4 "^1.0.0" + character-entities-legacy "^1.0.0" + xtend "^4.0.0" + +stringify-object@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629" + integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw== + dependencies: + get-own-enumerable-property-symbols "^3.0.0" + is-obj "^1.0.1" + is-regexp "^1.0.0" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.0.1.tgz#61740a08ce36b61e50e65653f07060d000975fb2" + integrity sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw== + dependencies: + ansi-regex "^6.0.1" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= + +strip-bom@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== + +strip-comments@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-comments/-/strip-comments-2.0.1.tgz#4ad11c3fbcac177a67a40ac224ca339ca1c1ba9b" + integrity sha512-ZprKx+bBLXv067WTCALv8SSz5l2+XhpYCsVtSqlMnkAXMWDq+/ekVbl1ghqP9rUHTzv6sm/DwCOiYutU/yp1fw== + +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + +strip-indent@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" + integrity sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ== + dependencies: + min-indent "^1.0.0" + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +style-loader@^3.3.1: + version "3.3.1" + resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-3.3.1.tgz#057dfa6b3d4d7c7064462830f9113ed417d38575" + integrity sha512-GPcQ+LDJbrcxHORTRes6Jy2sfvK2kS6hpSfI/fXhPt+spVzxF6LJ1dHLN9zIGmVaaP044YKaIatFaufENRiDoQ== + +style-to-object@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/style-to-object/-/style-to-object-0.3.0.tgz#b1b790d205991cc783801967214979ee19a76e46" + integrity sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA== + dependencies: + inline-style-parser "0.1.1" + +stylehacks@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-5.1.0.tgz#a40066490ca0caca04e96c6b02153ddc39913520" + integrity sha512-SzLmvHQTrIWfSgljkQCw2++C9+Ne91d/6Sp92I8c5uHTcy/PgeHamwITIbBW9wnFTY/3ZfSXR9HIL6Ikqmcu6Q== + dependencies: + browserslist "^4.16.6" + postcss-selector-parser "^6.0.4" + +stylis@4.0.13: + version "4.0.13" + resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.0.13.tgz#f5db332e376d13cc84ecfe5dace9a2a51d954c91" + integrity sha512-xGPXiFVl4YED9Jh7Euv2V220mriG9u4B2TA6Ybjc1catrstKD2PpIdU3U0RKpkVBC2EhmL/F0sPCr9vrFTNRag== + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.0.0, supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-color@^8.0.0: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + +supports-hyperlinks@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz#4f77b42488765891774b70c79babd87f9bd594bb" + integrity sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ== + dependencies: + has-flag "^4.0.0" + supports-color "^7.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +svg-parser@^2.0.2: + version "2.0.4" + resolved "https://registry.yarnpkg.com/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5" + integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== + +svgo@^1.2.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/svgo/-/svgo-1.3.2.tgz#b6dc511c063346c9e415b81e43401145b96d4167" + integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== + dependencies: + chalk "^2.4.1" + coa "^2.0.2" + css-select "^2.0.0" + css-select-base-adapter "^0.1.1" + css-tree "1.0.0-alpha.37" + csso "^4.0.2" + js-yaml "^3.13.1" + mkdirp "~0.5.1" + object.values "^1.1.0" + sax "~1.2.4" + stable "^0.1.8" + unquote "~1.1.1" + util.promisify "~1.0.0" + +svgo@^2.7.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24" + integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== + dependencies: + "@trysound/sax" "0.2.0" + commander "^7.2.0" + css-select "^4.1.3" + css-tree "^1.1.3" + csso "^4.2.0" + picocolors "^1.0.0" + stable "^0.1.8" + +symbol-tree@^3.2.4: + version "3.2.4" + resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" + integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== + +tabbable@^5.2.1: + version "5.3.2" + resolved "https://registry.yarnpkg.com/tabbable/-/tabbable-5.3.2.tgz#66d6119ee8a533634c3f17deb0caa1c379e36ac7" + integrity sha512-6G/8EWRFx8CiSe2++/xHhXkmCRq2rHtDtZbQFHx34cvDfZzIBfvwG9zGUNTWMXWLCYvDj3aQqOzdl3oCxKuBkQ== + +tailwindcss@^3.0.2: + version "3.0.24" + resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.0.24.tgz#22e31e801a44a78a1d9a81ecc52e13b69d85704d" + integrity sha512-H3uMmZNWzG6aqmg9q07ZIRNIawoiEcNFKDfL+YzOPuPsXuDXxJxB9icqzLgdzKNwjG3SAro2h9SYav8ewXNgig== + dependencies: + arg "^5.0.1" + chokidar "^3.5.3" + color-name "^1.1.4" + detective "^5.2.0" + didyoumean "^1.2.2" + dlv "^1.1.3" + fast-glob "^3.2.11" + glob-parent "^6.0.2" + is-glob "^4.0.3" + lilconfig "^2.0.5" + normalize-path "^3.0.0" + object-hash "^3.0.0" + picocolors "^1.0.0" + postcss "^8.4.12" + postcss-js "^4.0.0" + postcss-load-config "^3.1.4" + postcss-nested "5.0.6" + postcss-selector-parser "^6.0.10" + postcss-value-parser "^4.2.0" + quick-lru "^5.1.1" + resolve "^1.22.0" + +tapable@^1.0.0: + version "1.1.3" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" + integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== + +tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== + +temp-dir@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-2.0.0.tgz#bde92b05bdfeb1516e804c9c00ad45177f31321e" + integrity sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg== + +tempy@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tempy/-/tempy-0.6.0.tgz#65e2c35abc06f1124a97f387b08303442bde59f3" + integrity sha512-G13vtMYPT/J8A4X2SjdtBTphZlrp1gKv6hZiOjw14RCWg6GbHuQBGtjlx75xLbYV/wEc0D7G5K4rxKP/cXk8Bw== + dependencies: + is-stream "^2.0.0" + temp-dir "^2.0.0" + type-fest "^0.16.0" + unique-string "^2.0.0" + +terminal-link@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" + integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== + dependencies: + ansi-escapes "^4.2.1" + supports-hyperlinks "^2.0.0" + +terser-webpack-plugin@^5.1.3, terser-webpack-plugin@^5.2.5: + version "5.3.1" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.1.tgz#0320dcc270ad5372c1e8993fabbd927929773e54" + integrity sha512-GvlZdT6wPQKbDNW/GDQzZFg/j4vKU96yl2q6mcUkzKOgW4gwf1Z8cZToUCrz31XHlPWH8MVb1r2tFtdDtTGJ7g== + dependencies: + jest-worker "^27.4.5" + schema-utils "^3.1.1" + serialize-javascript "^6.0.0" + source-map "^0.6.1" + terser "^5.7.2" + +terser@^5.0.0, terser@^5.10.0, terser@^5.7.2: + version "5.13.1" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.13.1.tgz#66332cdc5a01b04a224c9fad449fc1a18eaa1799" + integrity sha512-hn4WKOfwnwbYfe48NgrQjqNOH9jzLqRcIfbYytOXCOv46LBfWr9bDS17MQqOi+BWGD0sJK3Sj5NC/gJjiojaoA== + dependencies: + acorn "^8.5.0" + commander "^2.20.0" + source-map "~0.8.0-beta.0" + source-map-support "~0.5.20" + +test-exclude@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + +text-diff@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/text-diff/-/text-diff-1.0.1.tgz#6c105905435e337857375c9d2f6ca63e453ff565" + integrity sha1-bBBZBUNeM3hXN1ydL2ymPkU/9WU= + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= + +throat@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375" + integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w== + +thunky@^1.0.2: + version "1.1.0" + resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" + integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== + +tiny-invariant@^1.0.6: + version "1.2.0" + resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.2.0.tgz#a1141f86b672a9148c72e978a19a73b9b94a15a9" + integrity sha512-1Uhn/aqw5C6RI4KejVeTg6mIS7IqxnLJ8Mv2tV5rTc0qWobay7pDUz6Wi392Cnc8ak1H0F2cjoRzb2/AW4+Fvg== + +tmpl@1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +toidentifier@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + +tough-cookie@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.0.0.tgz#d822234eeca882f991f0f908824ad2622ddbece4" + integrity sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg== + dependencies: + psl "^1.1.33" + punycode "^2.1.1" + universalify "^0.1.2" + +tr46@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" + integrity sha1-qLE/1r/SSJUZZ0zN5VujaTtwbQk= + dependencies: + punycode "^2.1.0" + +tr46@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" + integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== + dependencies: + punycode "^2.1.1" + +trim-trailing-lines@^1.0.0: + version "1.1.4" + resolved "https://registry.yarnpkg.com/trim-trailing-lines/-/trim-trailing-lines-1.1.4.tgz#bd4abbec7cc880462f10b2c8b5ce1d8d1ec7c2c0" + integrity sha512-rjUWSqnfTNrjbB9NQWfPMH/xRK1deHeGsHoVfpxJ++XeYXE0d6B1En37AHfw3jtfTU7dzMzZL2jjpe8Qb5gLIQ== + +trim@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/trim/-/trim-0.0.1.tgz#5858547f6b290757ee95cccc666fb50084c460dd" + integrity sha1-WFhUf2spB1fulczMZm+1AITEYN0= + +trough@^1.0.0: + version "1.0.5" + resolved "https://registry.yarnpkg.com/trough/-/trough-1.0.5.tgz#b8b639cefad7d0bb2abd37d433ff8293efa5f406" + integrity sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA== + +tryer@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/tryer/-/tryer-1.0.1.tgz#f2c85406800b9b0f74c9f7465b81eaad241252f8" + integrity sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA== + +tsconfig-paths@^3.14.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz#ba0734599e8ea36c862798e920bcf163277b137a" + integrity sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ== + dependencies: + "@types/json5" "^0.0.29" + json5 "^1.0.1" + minimist "^1.2.6" + strip-bom "^3.0.0" + +tslib@^1.0.0, tslib@^1.8.1, tslib@^1.9.3: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tslib@^2.0.0, tslib@^2.0.3, tslib@^2.3.1: + version "2.4.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" + integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== + +tsutils@^3.21.0: + version "3.21.0" + resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== + dependencies: + tslib "^1.8.1" + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-check@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= + dependencies: + prelude-ls "~1.1.2" + +type-detect@4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + +type-fest@^0.16.0: + version "0.16.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.16.0.tgz#3240b891a78b0deae910dbeb86553e552a148860" + integrity sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg== + +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +type-fest@^0.21.3: + version "0.21.3" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + +type-is@~1.6.18: + version "1.6.18" + resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + dependencies: + media-typer "0.3.0" + mime-types "~2.1.24" + +typedarray-to-buffer@^3.1.5: + version "3.1.5" + resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" + integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== + dependencies: + is-typedarray "^1.0.0" + +typescript@^4.6.4: + version "4.6.4" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.6.4.tgz#caa78bbc3a59e6a5c510d35703f6a09877ce45e9" + integrity sha512-9ia/jWHIEbo49HfjrLGfKbZSuWo9iTMwXO+Ca3pRsSpbsMbc7/IU8NKdCZVRRBafVPGnoJeFL76ZOAA84I9fEg== + +unbox-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== + dependencies: + call-bind "^1.0.2" + has-bigints "^1.0.2" + has-symbols "^1.0.3" + which-boxed-primitive "^1.0.2" + +unherit@^1.0.4: + version "1.1.3" + resolved "https://registry.yarnpkg.com/unherit/-/unherit-1.1.3.tgz#6c9b503f2b41b262330c80e91c8614abdaa69c22" + integrity sha512-Ft16BJcnapDKp0+J/rqFC3Rrk6Y/Ng4nzsC028k2jdDII/rdZ7Wd3pPT/6+vIIxRagwRc9K0IUX0Ra4fKvw+WQ== + dependencies: + inherits "^2.0.0" + xtend "^4.0.0" + +unicode-canonical-property-names-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" + integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== + +unicode-match-property-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" + integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== + dependencies: + unicode-canonical-property-names-ecmascript "^2.0.0" + unicode-property-aliases-ecmascript "^2.0.0" + +unicode-match-property-value-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz#1a01aa57247c14c568b89775a54938788189a714" + integrity sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw== + +unicode-property-aliases-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz#0a36cb9a585c4f6abd51ad1deddb285c165297c8" + integrity sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ== + +unified@^9.2.0: + version "9.2.2" + resolved "https://registry.yarnpkg.com/unified/-/unified-9.2.2.tgz#67649a1abfc3ab85d2969502902775eb03146975" + integrity sha512-Sg7j110mtefBD+qunSLO1lqOEKdrwBFBrR6Qd8f4uwkhWNlbkaqwHse6e7QvD3AP/MNoJdEDLaf8OxYyoWgorQ== + dependencies: + bail "^1.0.0" + extend "^3.0.0" + is-buffer "^2.0.0" + is-plain-obj "^2.0.0" + trough "^1.0.0" + vfile "^4.0.0" + +unique-string@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" + integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== + dependencies: + crypto-random-string "^2.0.0" + +unist-builder@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/unist-builder/-/unist-builder-2.0.3.tgz#77648711b5d86af0942f334397a33c5e91516436" + integrity sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw== + +unist-util-generated@^1.0.0: + version "1.1.6" + resolved "https://registry.yarnpkg.com/unist-util-generated/-/unist-util-generated-1.1.6.tgz#5ab51f689e2992a472beb1b35f2ce7ff2f324d4b" + integrity sha512-cln2Mm1/CZzN5ttGK7vkoGw+RZ8VcUH6BtGbq98DDtRGquAAOXig1mrBQYelOwMXYS8rK+vZDyyojSjp7JX+Lg== + +unist-util-is@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/unist-util-is/-/unist-util-is-3.0.0.tgz#d9e84381c2468e82629e4a5be9d7d05a2dd324cd" + integrity sha512-sVZZX3+kspVNmLWBPAB6r+7D9ZgAFPNWm66f7YNb420RlQSbn+n8rG8dGZSkrER7ZIXGQYNm5pqC3v3HopH24A== + +unist-util-is@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/unist-util-is/-/unist-util-is-4.1.0.tgz#976e5f462a7a5de73d94b706bac1b90671b57797" + integrity sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg== + +unist-util-position@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/unist-util-position/-/unist-util-position-3.1.0.tgz#1c42ee6301f8d52f47d14f62bbdb796571fa2d47" + integrity sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA== + +unist-util-remove-position@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/unist-util-remove-position/-/unist-util-remove-position-2.0.1.tgz#5d19ca79fdba712301999b2b73553ca8f3b352cc" + integrity sha512-fDZsLYIe2uT+oGFnuZmy73K6ZxOPG/Qcm+w7jbEjaFcJgbQ6cqjs/eSPzXhsmGpAsWPkqZM9pYjww5QTn3LHMA== + dependencies: + unist-util-visit "^2.0.0" + +unist-util-stringify-position@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-2.0.3.tgz#cce3bfa1cdf85ba7375d1d5b17bdc4cada9bd9da" + integrity sha512-3faScn5I+hy9VleOq/qNbAd6pAx7iH5jYBMS9I1HgQVijz/4mv5Bvw5iw1sC/90CODiKo81G/ps8AJrISn687g== + dependencies: + "@types/unist" "^2.0.2" + +unist-util-stringify-position@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-3.0.2.tgz#5c6aa07c90b1deffd9153be170dce628a869a447" + integrity sha512-7A6eiDCs9UtjcwZOcCpM4aPII3bAAGv13E96IkawkOAW0OhH+yRxtY0lzo8KiHpzEMfH7Q+FizUmwp8Iqy5EWg== + dependencies: + "@types/unist" "^2.0.0" + +unist-util-visit-parents@^2.0.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/unist-util-visit-parents/-/unist-util-visit-parents-2.1.2.tgz#25e43e55312166f3348cae6743588781d112c1e9" + integrity sha512-DyN5vD4NE3aSeB+PXYNKxzGsfocxp6asDc2XXE3b0ekO2BaRUpBicbbUygfSvYfUz1IkmjFR1YF7dPklraMZ2g== + dependencies: + unist-util-is "^3.0.0" + +unist-util-visit-parents@^3.0.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz#65a6ce698f78a6b0f56aa0e88f13801886cdaef6" + integrity sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg== + dependencies: + "@types/unist" "^2.0.0" + unist-util-is "^4.0.0" + +unist-util-visit@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/unist-util-visit/-/unist-util-visit-1.4.1.tgz#4724aaa8486e6ee6e26d7ff3c8685960d560b1e3" + integrity sha512-AvGNk7Bb//EmJZyhtRUnNMEpId/AZ5Ph/KUpTI09WHQuDZHKovQ1oEv3mfmKpWKtoMzyMC4GLBm1Zy5k12fjIw== + dependencies: + unist-util-visit-parents "^2.0.0" + +unist-util-visit@^2.0.0, unist-util-visit@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/unist-util-visit/-/unist-util-visit-2.0.3.tgz#c3703893146df47203bb8a9795af47d7b971208c" + integrity sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q== + dependencies: + "@types/unist" "^2.0.0" + unist-util-is "^4.0.0" + unist-util-visit-parents "^3.0.0" + +universalify@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" + integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== + +universalify@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" + integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== + +unload@2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/unload/-/unload-2.2.0.tgz#ccc88fdcad345faa06a92039ec0f80b488880ef7" + integrity sha512-B60uB5TNBLtN6/LsgAf3udH9saB5p7gqJwcFfbOEZ8BcBHnGwCf6G/TGiEqkRAxX7zAFIUtzdrXQSdL3Q/wqNA== + dependencies: + "@babel/runtime" "^7.6.2" + detect-node "^2.0.4" + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= + +unquote@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/unquote/-/unquote-1.1.1.tgz#8fded7324ec6e88a0ff8b905e7c098cdc086d544" + integrity sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ= + +upath@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" + integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +url-parse@^1.5.10: + version "1.5.10" + resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" + integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== + dependencies: + querystringify "^2.1.1" + requires-port "^1.0.0" + +use-callback-ref@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/use-callback-ref/-/use-callback-ref-1.3.0.tgz#772199899b9c9a50526fedc4993fc7fa1f7e32d5" + integrity sha512-3FT9PRuRdbB9HfXhEq35u4oZkvpJ5kuYbpqhCfmiZyReuRgpnhDlbr2ZEnnuS0RrJAPn6l23xjFg9kpDM+Ms7w== + dependencies: + tslib "^2.0.0" + +use-memo-one@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/use-memo-one/-/use-memo-one-1.1.2.tgz#0c8203a329f76e040047a35a1197defe342fab20" + integrity sha512-u2qFKtxLsia/r8qG0ZKkbytbztzRb317XCkT7yP8wxL0tZ/CzK2G+WWie5vWvpyeP7+YoPIwbJoIHJ4Ba4k0oQ== + +use-query-params@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/use-query-params/-/use-query-params-1.2.3.tgz#306c31a0cbc714e8a3b4bd7e91a6a9aaccaa5e22" + integrity sha512-cdG0tgbzK+FzsV6DAt2CN8Saa3WpRnze7uC4Rdh7l15epSFq7egmcB/zuREvPNwO5Yk80nUpDZpiyHsoq50d8w== + dependencies: + serialize-query-params "^1.3.5" + +use-sidecar@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/use-sidecar/-/use-sidecar-1.1.2.tgz#2f43126ba2d7d7e117aa5855e5d8f0276dfe73c2" + integrity sha512-epTbsLuzZ7lPClpz2TyryBfztm7m+28DlEv2ZCQ3MDr5ssiwyOwGH/e5F9CkfWjJ1t4clvI58yF822/GUkjjhw== + dependencies: + detect-node-es "^1.1.0" + tslib "^2.0.0" + +util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + +util.promisify@~1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.1.tgz#6baf7774b80eeb0f7520d8b81d07982a59abbaee" + integrity sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.2" + has-symbols "^1.0.1" + object.getownpropertydescriptors "^2.1.0" + +utila@~0.4: + version "0.4.0" + resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" + integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= + +utils-merge@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= + +uuid@^8.3.0, uuid@^8.3.2: + version "8.3.2" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" + integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== + +v8-compile-cache@^2.0.3: + version "2.3.0" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" + integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== + +v8-to-istanbul@^8.1.0: + version "8.1.1" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz#77b752fd3975e31bbcef938f85e9bd1c7a8d60ed" + integrity sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w== + dependencies: + "@types/istanbul-lib-coverage" "^2.0.1" + convert-source-map "^1.6.0" + source-map "^0.7.3" + +vary@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= + +vfile-location@^3.0.0, vfile-location@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/vfile-location/-/vfile-location-3.2.0.tgz#d8e41fbcbd406063669ebf6c33d56ae8721d0f3c" + integrity sha512-aLEIZKv/oxuCDZ8lkJGhuhztf/BW4M+iHdCwglA/eWc+vtuRFJj8EtgceYFX4LRjOhCAAiNHsKGssC6onJ+jbA== + +vfile-message@*: + version "3.1.2" + resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-3.1.2.tgz#a2908f64d9e557315ec9d7ea3a910f658ac05f7d" + integrity sha512-QjSNP6Yxzyycd4SVOtmKKyTsSvClqBPJcd00Z0zuPj3hOIjg0rUPG6DbFGPvUKRgYyaIWLPKpuEclcuvb3H8qA== + dependencies: + "@types/unist" "^2.0.0" + unist-util-stringify-position "^3.0.0" + +vfile-message@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-2.0.4.tgz#5b43b88171d409eae58477d13f23dd41d52c371a" + integrity sha512-DjssxRGkMvifUOJre00juHoP9DPWuzjxKuMDrhNbk2TdaYYBNMStsNhEOt3idrtI12VQYM/1+iM0KOzXi4pxwQ== + dependencies: + "@types/unist" "^2.0.0" + unist-util-stringify-position "^2.0.0" + +vfile@^4.0.0, vfile@^4.2.0: + version "4.2.1" + resolved "https://registry.yarnpkg.com/vfile/-/vfile-4.2.1.tgz#03f1dce28fc625c625bc6514350fbdb00fa9e624" + integrity sha512-O6AE4OskCG5S1emQ/4gl8zK586RqA3srz3nfK/Viy0UPToBc5Trp9BVFb1u0CjsKrAWwnpr4ifM/KBXPWwJbCA== + dependencies: + "@types/unist" "^2.0.0" + is-buffer "^2.0.0" + unist-util-stringify-position "^2.0.0" + vfile-message "^2.0.0" + +w3c-hr-time@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" + integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== + dependencies: + browser-process-hrtime "^1.0.0" + +w3c-xmlserializer@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" + integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== + dependencies: + xml-name-validator "^3.0.0" + +walker@^1.0.7: + version "1.0.8" + resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== + dependencies: + makeerror "1.0.12" + +watchpack@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.3.1.tgz#4200d9447b401156eeca7767ee610f8809bc9d25" + integrity sha512-x0t0JuydIo8qCNctdDrn1OzH/qDzk2+rdCOC3YzumZ42fiMqmQ7T3xQurykYMhYfHaPHTp4ZxAx2NfUo1K6QaA== + dependencies: + glob-to-regexp "^0.4.1" + graceful-fs "^4.1.2" + +wbuf@^1.1.0, wbuf@^1.7.3: + version "1.7.3" + resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" + integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== + dependencies: + minimalistic-assert "^1.0.0" + +web-namespaces@^1.0.0: + version "1.1.4" + resolved "https://registry.yarnpkg.com/web-namespaces/-/web-namespaces-1.1.4.tgz#bc98a3de60dadd7faefc403d1076d529f5e030ec" + integrity sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw== + +web-vitals@^2.1.4: + version "2.1.4" + resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-2.1.4.tgz#76563175a475a5e835264d373704f9dde718290c" + integrity sha512-sVWcwhU5mX6crfI5Vd2dC4qchyTqxV8URinzt25XqVh+bHEPGH4C3NPrNionCP7Obx59wrYEbNlw4Z8sjALzZg== + +webidl-conversions@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" + integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== + +webidl-conversions@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" + integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== + +webidl-conversions@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" + integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== + +webpack-dev-middleware@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.1.tgz#aa079a8dedd7e58bfeab358a9af7dab304cee57f" + integrity sha512-81EujCKkyles2wphtdrnPg/QqegC/AtqNH//mQkBYSMqwFVCQrxM6ktB2O/SPlZy7LqeEfTbV3cZARGQz6umhg== + dependencies: + colorette "^2.0.10" + memfs "^3.4.1" + mime-types "^2.1.31" + range-parser "^1.2.1" + schema-utils "^4.0.0" + +webpack-dev-server@^4.6.0: + version "4.9.0" + resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-4.9.0.tgz#737dbf44335bb8bde68f8f39127fc401c97a1557" + integrity sha512-+Nlb39iQSOSsFv0lWUuUTim3jDQO8nhK3E68f//J2r5rIcp4lULHXz2oZ0UVdEeWXEh5lSzYUlzarZhDAeAVQw== + dependencies: + "@types/bonjour" "^3.5.9" + "@types/connect-history-api-fallback" "^1.3.5" + "@types/express" "^4.17.13" + "@types/serve-index" "^1.9.1" + "@types/sockjs" "^0.3.33" + "@types/ws" "^8.5.1" + ansi-html-community "^0.0.8" + bonjour-service "^1.0.11" + chokidar "^3.5.3" + colorette "^2.0.10" + compression "^1.7.4" + connect-history-api-fallback "^1.6.0" + default-gateway "^6.0.3" + express "^4.17.3" + graceful-fs "^4.2.6" + html-entities "^2.3.2" + http-proxy-middleware "^2.0.3" + ipaddr.js "^2.0.1" + open "^8.0.9" + p-retry "^4.5.0" + rimraf "^3.0.2" + schema-utils "^4.0.0" + selfsigned "^2.0.1" + serve-index "^1.9.1" + sockjs "^0.3.21" + spdy "^4.0.2" + webpack-dev-middleware "^5.3.1" + ws "^8.4.2" + +webpack-manifest-plugin@^4.0.2: + version "4.1.1" + resolved "https://registry.yarnpkg.com/webpack-manifest-plugin/-/webpack-manifest-plugin-4.1.1.tgz#10f8dbf4714ff93a215d5a45bcc416d80506f94f" + integrity sha512-YXUAwxtfKIJIKkhg03MKuiFAD72PlrqCiwdwO4VEXdRO5V0ORCNwaOwAZawPZalCbmH9kBDmXnNeQOw+BIEiow== + dependencies: + tapable "^2.0.0" + webpack-sources "^2.2.0" + +webpack-sources@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" + integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== + dependencies: + source-list-map "^2.0.0" + source-map "~0.6.1" + +webpack-sources@^2.2.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-2.3.1.tgz#570de0af163949fe272233c2cefe1b56f74511fd" + integrity sha512-y9EI9AO42JjEcrTJFOYmVywVZdKVUfOvDUPsJea5GIr1JOEGFVqwlY2K098fFoIjOkDzHn2AjRvM8dsBZu+gCA== + dependencies: + source-list-map "^2.0.1" + source-map "^0.6.1" + +webpack-sources@^3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" + integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== + +webpack@^5.64.4: + version "5.72.1" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.72.1.tgz#3500fc834b4e9ba573b9f430b2c0a61e1bb57d13" + integrity sha512-dXG5zXCLspQR4krZVR6QgajnZOjW2K/djHvdcRaDQvsjV9z9vaW6+ja5dZOYbqBBjF6kGXka/2ZyxNdc+8Jung== + dependencies: + "@types/eslint-scope" "^3.7.3" + "@types/estree" "^0.0.51" + "@webassemblyjs/ast" "1.11.1" + "@webassemblyjs/wasm-edit" "1.11.1" + "@webassemblyjs/wasm-parser" "1.11.1" + acorn "^8.4.1" + acorn-import-assertions "^1.7.6" + browserslist "^4.14.5" + chrome-trace-event "^1.0.2" + enhanced-resolve "^5.9.3" + es-module-lexer "^0.9.0" + eslint-scope "5.1.1" + events "^3.2.0" + glob-to-regexp "^0.4.1" + graceful-fs "^4.2.9" + json-parse-even-better-errors "^2.3.1" + loader-runner "^4.2.0" + mime-types "^2.1.27" + neo-async "^2.6.2" + schema-utils "^3.1.0" + tapable "^2.1.1" + terser-webpack-plugin "^5.1.3" + watchpack "^2.3.1" + webpack-sources "^3.2.3" + +websocket-driver@>=0.5.1, websocket-driver@^0.7.4: + version "0.7.4" + resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760" + integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== + dependencies: + http-parser-js ">=0.5.1" + safe-buffer ">=5.1.0" + websocket-extensions ">=0.1.1" + +websocket-extensions@>=0.1.1: + version "0.1.4" + resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" + integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== + +whatwg-encoding@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" + integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== + dependencies: + iconv-lite "0.4.24" + +whatwg-fetch@^3.6.2: + version "3.6.2" + resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" + integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== + +whatwg-mimetype@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" + integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== + +whatwg-url@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" + integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== + dependencies: + lodash.sortby "^4.7.0" + tr46 "^1.0.1" + webidl-conversions "^4.0.2" + +whatwg-url@^8.0.0, whatwg-url@^8.5.0: + version "8.7.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.7.0.tgz#656a78e510ff8f3937bc0bcbe9f5c0ac35941b77" + integrity sha512-gAojqb/m9Q8a5IV96E3fHJM70AzCkgt4uXYX2O7EmuyOnLrViCQlsEBmF9UQIu3/aeAIp2U17rtbpZWNntQqdg== + dependencies: + lodash "^4.7.0" + tr46 "^2.1.0" + webidl-conversions "^6.1.0" + +which-boxed-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +word-wrap@^1.2.3, word-wrap@~1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +workbox-background-sync@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-background-sync/-/workbox-background-sync-6.5.3.tgz#7c66c1836aeca6f3762dc48d17a1852a33b3168c" + integrity sha512-0DD/V05FAcek6tWv9XYj2w5T/plxhDSpclIcAGjA/b7t/6PdaRkQ7ZgtAX6Q/L7kV7wZ8uYRJUoH11VjNipMZw== + dependencies: + idb "^6.1.4" + workbox-core "6.5.3" + +workbox-broadcast-update@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-broadcast-update/-/workbox-broadcast-update-6.5.3.tgz#fc2ad79cf507e22950cda9baf1e9a0ccc43f31bc" + integrity sha512-4AwCIA5DiDrYhlN+Miv/fp5T3/whNmSL+KqhTwRBTZIL6pvTgE4lVuRzAt1JltmqyMcQ3SEfCdfxczuI4kwFQg== + dependencies: + workbox-core "6.5.3" + +workbox-build@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-build/-/workbox-build-6.5.3.tgz#38e3f286d63d2745bff4d1478bb3a6ab5c8b1170" + integrity sha512-8JNHHS7u13nhwIYCDea9MNXBNPHXCs5KDZPKI/ZNTr3f4sMGoD7hgFGecbyjX1gw4z6e9bMpMsOEJNyH5htA/w== + dependencies: + "@apideck/better-ajv-errors" "^0.3.1" + "@babel/core" "^7.11.1" + "@babel/preset-env" "^7.11.0" + "@babel/runtime" "^7.11.2" + "@rollup/plugin-babel" "^5.2.0" + "@rollup/plugin-node-resolve" "^11.2.1" + "@rollup/plugin-replace" "^2.4.1" + "@surma/rollup-plugin-off-main-thread" "^2.2.3" + ajv "^8.6.0" + common-tags "^1.8.0" + fast-json-stable-stringify "^2.1.0" + fs-extra "^9.0.1" + glob "^7.1.6" + lodash "^4.17.20" + pretty-bytes "^5.3.0" + rollup "^2.43.1" + rollup-plugin-terser "^7.0.0" + source-map "^0.8.0-beta.0" + stringify-object "^3.3.0" + strip-comments "^2.0.1" + tempy "^0.6.0" + upath "^1.2.0" + workbox-background-sync "6.5.3" + workbox-broadcast-update "6.5.3" + workbox-cacheable-response "6.5.3" + workbox-core "6.5.3" + workbox-expiration "6.5.3" + workbox-google-analytics "6.5.3" + workbox-navigation-preload "6.5.3" + workbox-precaching "6.5.3" + workbox-range-requests "6.5.3" + workbox-recipes "6.5.3" + workbox-routing "6.5.3" + workbox-strategies "6.5.3" + workbox-streams "6.5.3" + workbox-sw "6.5.3" + workbox-window "6.5.3" + +workbox-cacheable-response@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-cacheable-response/-/workbox-cacheable-response-6.5.3.tgz#b1f8c2bc599a7be8f7e3c262535629c558738e47" + integrity sha512-6JE/Zm05hNasHzzAGKDkqqgYtZZL2H06ic2GxuRLStA4S/rHUfm2mnLFFXuHAaGR1XuuYyVCEey1M6H3PdZ7SQ== + dependencies: + workbox-core "6.5.3" + +workbox-core@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-core/-/workbox-core-6.5.3.tgz#bca038a9ef0d7a634a6db2a60f45313ed22ac249" + integrity sha512-Bb9ey5n/M9x+l3fBTlLpHt9ASTzgSGj6vxni7pY72ilB/Pb3XtN+cZ9yueboVhD5+9cNQrC9n/E1fSrqWsUz7Q== + +workbox-expiration@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-expiration/-/workbox-expiration-6.5.3.tgz#efc0811f371a2ede1052b9de1c4f072b71d50503" + integrity sha512-jzYopYR1zD04ZMdlbn/R2Ik6ixiXbi15c9iX5H8CTi6RPDz7uhvMLZPKEndZTpfgmUk8mdmT9Vx/AhbuCl5Sqw== + dependencies: + idb "^6.1.4" + workbox-core "6.5.3" + +workbox-google-analytics@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-google-analytics/-/workbox-google-analytics-6.5.3.tgz#cc8c3a61f449131660a4ed2f5362d9a3599b18fe" + integrity sha512-3GLCHotz5umoRSb4aNQeTbILETcrTVEozSfLhHSBaegHs1PnqCmN0zbIy2TjTpph2AGXiNwDrWGF0AN+UgDNTw== + dependencies: + workbox-background-sync "6.5.3" + workbox-core "6.5.3" + workbox-routing "6.5.3" + workbox-strategies "6.5.3" + +workbox-navigation-preload@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-navigation-preload/-/workbox-navigation-preload-6.5.3.tgz#81b74f598b11aa07e2cf1c21af7a826a4f0f70b3" + integrity sha512-bK1gDFTc5iu6lH3UQ07QVo+0ovErhRNGvJJO/1ngknT0UQ702nmOUhoN9qE5mhuQSrnK+cqu7O7xeaJ+Rd9Tmg== + dependencies: + workbox-core "6.5.3" + +workbox-precaching@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-precaching/-/workbox-precaching-6.5.3.tgz#c870312b2ef901d790ab9e48da084e776c62af47" + integrity sha512-sjNfgNLSsRX5zcc63H/ar/hCf+T19fRtTqvWh795gdpghWb5xsfEkecXEvZ8biEi1QD7X/ljtHphdaPvXDygMQ== + dependencies: + workbox-core "6.5.3" + workbox-routing "6.5.3" + workbox-strategies "6.5.3" + +workbox-range-requests@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-range-requests/-/workbox-range-requests-6.5.3.tgz#e624ac82ff266a5e4f236d055797def07949d941" + integrity sha512-pGCP80Bpn/0Q0MQsfETSfmtXsQcu3M2QCJwSFuJ6cDp8s2XmbUXkzbuQhCUzKR86ZH2Vex/VUjb2UaZBGamijA== + dependencies: + workbox-core "6.5.3" + +workbox-recipes@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-recipes/-/workbox-recipes-6.5.3.tgz#15beac9d8ae7a3a1c100218094a824b4dd3fd59a" + integrity sha512-IcgiKYmbGiDvvf3PMSEtmwqxwfQ5zwI7OZPio3GWu4PfehA8jI8JHI3KZj+PCfRiUPZhjQHJ3v1HbNs+SiSkig== + dependencies: + workbox-cacheable-response "6.5.3" + workbox-core "6.5.3" + workbox-expiration "6.5.3" + workbox-precaching "6.5.3" + workbox-routing "6.5.3" + workbox-strategies "6.5.3" + +workbox-routing@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-routing/-/workbox-routing-6.5.3.tgz#a0a699d8cc90b5692bd3df24679acbbda3913777" + integrity sha512-DFjxcuRAJjjt4T34RbMm3MCn+xnd36UT/2RfPRfa8VWJGItGJIn7tG+GwVTdHmvE54i/QmVTJepyAGWtoLPTmg== + dependencies: + workbox-core "6.5.3" + +workbox-strategies@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-strategies/-/workbox-strategies-6.5.3.tgz#4bea9a48fee16cf43766e0d8138296773c8a9783" + integrity sha512-MgmGRrDVXs7rtSCcetZgkSZyMpRGw8HqL2aguszOc3nUmzGZsT238z/NN9ZouCxSzDu3PQ3ZSKmovAacaIhu1w== + dependencies: + workbox-core "6.5.3" + +workbox-streams@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-streams/-/workbox-streams-6.5.3.tgz#b6860290031caa7d0e46ad7142315c94359c780b" + integrity sha512-vN4Qi8o+b7zj1FDVNZ+PlmAcy1sBoV7SC956uhqYvZ9Sg1fViSbOpydULOssVJ4tOyKRifH/eoi6h99d+sJ33w== + dependencies: + workbox-core "6.5.3" + workbox-routing "6.5.3" + +workbox-sw@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-sw/-/workbox-sw-6.5.3.tgz#cd2f0c086f4496acd25774ed02c48504189bebdd" + integrity sha512-BQBzm092w+NqdIEF2yhl32dERt9j9MDGUTa2Eaa+o3YKL4Qqw55W9yQC6f44FdAHdAJrJvp0t+HVrfh8AiGj8A== + +workbox-webpack-plugin@^6.4.1: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-webpack-plugin/-/workbox-webpack-plugin-6.5.3.tgz#c37bb323be4952311565c07db51054fe59c87d73" + integrity sha512-Es8Xr02Gi6Kc3zaUwR691ZLy61hz3vhhs5GztcklQ7kl5k2qAusPh0s6LF3wEtlpfs9ZDErnmy5SErwoll7jBA== + dependencies: + fast-json-stable-stringify "^2.1.0" + pretty-bytes "^5.4.1" + upath "^1.2.0" + webpack-sources "^1.4.3" + workbox-build "6.5.3" + +workbox-window@6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/workbox-window/-/workbox-window-6.5.3.tgz#4ade70056cb73477ef1cd8fea7cfd0ecbd825c7f" + integrity sha512-GnJbx1kcKXDtoJBVZs/P7ddP0Yt52NNy4nocjBpYPiRhMqTpJCNrSL+fGHZ/i/oP6p/vhE8II0sA6AZGKGnssw== + dependencies: + "@types/trusted-types" "^2.0.2" + workbox-core "6.5.3" + +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + +write-file-atomic@^3.0.0: + version "3.0.3" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" + integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== + dependencies: + imurmurhash "^0.1.4" + is-typedarray "^1.0.0" + signal-exit "^3.0.2" + typedarray-to-buffer "^3.1.5" + +ws@^7.4.6: + version "7.5.7" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.7.tgz#9e0ac77ee50af70d58326ecff7e85eb3fa375e67" + integrity sha512-KMvVuFzpKBuiIXW3E4u3mySRO2/mCHSyZDJQM5NQ9Q9KHWHWh0NHgfbRMLLrceUK5qAL4ytALJbpRMjixFZh8A== + +ws@^8.4.2: + version "8.6.0" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.6.0.tgz#e5e9f1d9e7ff88083d0c0dd8281ea662a42c9c23" + integrity sha512-AzmM3aH3gk0aX7/rZLYvjdvZooofDu3fFOzGqcSnQ1tOcTWwhM/o+q++E8mAyVVIyUdajrkzWUGftaVSDLn1bw== + +xml-name-validator@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" + integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== + +xmlchars@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" + integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== + +xtend@^4.0.0, xtend@^4.0.1, xtend@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== + +y18n@^5.0.5: + version "5.0.8" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: + version "1.10.2" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== + +yargs-parser@^20.2.2: + version "20.2.9" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" + integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== + +yargs@^16.2.0: + version "16.2.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" + integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.0" + y18n "^5.0.5" + yargs-parser "^20.2.2" + +yocto-queue@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== + +zod@^3.11.6, zod@^3.15.1: + version "3.15.1" + resolved "https://registry.yarnpkg.com/zod/-/zod-3.15.1.tgz#9e404cd8002ccffb03baa94cff2e1638ed49d82f" + integrity sha512-WAdjcoOxa4S9oc/u7fTbC3CC7uVqptLLU0LKqS8RDBOrCXp2t5avM8BUfgNVZJymGWAx6SEUYxWPPoYuQ5rgwQ== + +zwitch@^1.0.0: + version "1.0.5" + resolved "https://registry.yarnpkg.com/zwitch/-/zwitch-1.0.5.tgz#d11d7381ffed16b742f6af7b3f223d5cd9fe9920" + integrity sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw== diff --git a/sdk/python/feast/ui_server.py b/sdk/python/feast/ui_server.py new file mode 100644 index 00000000000..cc546f53716 --- /dev/null +++ b/sdk/python/feast/ui_server.py @@ -0,0 +1,102 @@ +import json +import threading +from typing import Callable, Optional + +import pkg_resources +import uvicorn +from fastapi import FastAPI, Response +from fastapi.middleware.cors import CORSMiddleware +from fastapi.staticfiles import StaticFiles + +import feast + + +def get_app( + store: "feast.FeatureStore", + get_registry_dump: Callable, + project_id: str, + registry_ttl_secs: int, +): + ui_dir = pkg_resources.resource_filename(__name__, "ui/build/") + + app = FastAPI() + + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + + # Asynchronously refresh registry, notifying shutdown and canceling the active timer if the app is shutting down + registry_json = "" + shutting_down = False + active_timer: Optional[threading.Timer] = None + + def async_refresh(): + store.refresh_registry() + nonlocal registry_json + registry_json = get_registry_dump(store.config, store.repo_path) + if shutting_down: + return + nonlocal active_timer + active_timer = threading.Timer(registry_ttl_secs, async_refresh) + active_timer.start() + + @app.on_event("shutdown") + def shutdown_event(): + nonlocal shutting_down + shutting_down = True + if active_timer: + active_timer.cancel() + + async_refresh() + + @app.get("/registry") + def read_registry(): + return json.loads(registry_json) + + # Generate projects-list json that points to the current repo's project + # TODO(adchia): Enable users to also add project name + description fields in feature_store.yaml + @app.get("/projects-list") + def projects_list(): + projects = { + "projects": [ + { + "name": "Project", + "description": "Test project", + "id": project_id, + "registryPath": "http://0.0.0.0:8888/registry", + } + ] + } + return projects + + # For all other paths (such as paths that would otherwise be handled by react router), pass to React + @app.api_route("/p/{path_name:path}", methods=["GET"]) + def catch_all(): + filename = ui_dir + "index.html" + + with open(filename) as f: + content = f.read() + + return Response(content, media_type="text/html") + + app.mount( + "/", StaticFiles(directory=ui_dir, html=True), name="site", + ) + + return app + + +def start_server( + store: "feast.FeatureStore", + host: str, + port: int, + get_registry_dump: Callable, + project_id: str, + registry_ttl_sec: int, +): + app = get_app(store, get_registry_dump, project_id, registry_ttl_sec) + uvicorn.run(app, host=host, port=port) diff --git a/sdk/python/feast/version.py b/sdk/python/feast/version.py index cc5190e3392..3e42643ccbe 100644 --- a/sdk/python/feast/version.py +++ b/sdk/python/feast/version.py @@ -1,10 +1,13 @@ -import pkg_resources +try: + from importlib.metadata import PackageNotFoundError, version +except ModuleNotFoundError: + from importlib_metadata import PackageNotFoundError, version # type: ignore def get_version(): """Returns version information of the Feast Python Package.""" try: - sdk_version = pkg_resources.get_distribution("feast").version - except pkg_resources.DistributionNotFound: + sdk_version = version("feast") + except PackageNotFoundError: sdk_version = "unknown" return sdk_version diff --git a/sdk/python/requirements/py3.10-ci-requirements.txt b/sdk/python/requirements/py3.10-ci-requirements.txt index e120f8c58e4..0655eb9c769 100644 --- a/sdk/python/requirements/py3.10-ci-requirements.txt +++ b/sdk/python/requirements/py3.10-ci-requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with python 3.10 # To update, run: # -# pip-compile --extra=ci --output-file=requirements/py3.10-ci-requirements.txt +# pip-compile --extra=ci --output-file=sdk/python/requirements/py3.10-ci-requirements.txt # absl-py==1.0.0 # via tensorflow-metadata @@ -23,20 +23,16 @@ alabaster==0.7.12 altair==4.2.0 # via great-expectations anyio==3.5.0 - # via starlette + # via + # starlette + # watchgod appdirs==1.4.4 # via black -appnope==0.1.2 - # via - # ipykernel - # ipython -argon2-cffi==21.3.0 - # via notebook -argon2-cffi-bindings==21.2.0 - # via argon2-cffi -asgiref==3.5.0 +appnope==0.1.3 + # via ipython +asgiref==3.5.1 # via uvicorn -asn1crypto==1.4.0 +asn1crypto==1.5.1 # via # oscrypto # snowflake-connector-python @@ -45,7 +41,9 @@ assertpy==1.1 asttokens==2.0.5 # via stack-data async-timeout==4.0.2 - # via aiohttp + # via + # aiohttp + # redis attrs==21.4.0 # via # aiohttp @@ -54,35 +52,35 @@ attrs==21.4.0 # pytest avro==1.10.0 # via feast (setup.py) -azure-core==1.23.0 +azure-core==1.23.1 # via # adlfs # azure-identity # azure-storage-blob azure-datalake-store==0.0.52 # via adlfs -azure-identity==1.8.0 +azure-identity==1.10.0 # via adlfs -azure-storage-blob==12.9.0 +azure-storage-blob==12.11.0 # via adlfs -babel==2.9.1 +babel==2.10.1 # via sphinx backcall==0.2.0 # via ipython black==19.10b0 # via feast (setup.py) -bleach==4.1.0 - # via nbconvert -boto3==1.21.11 +boto3==1.22.8 # via # feast (setup.py) # moto -botocore==1.24.11 +botocore==1.25.8 # via # boto3 # moto # s3transfer -cachecontrol==0.12.10 +build==0.7.0 + # via feast (setup.py) +cachecontrol==0.12.11 # via firebase-admin cachetools==4.2.4 # via google-auth @@ -94,7 +92,6 @@ certifi==2021.10.8 # snowflake-connector-python cffi==1.15.0 # via - # argon2-cffi-bindings # azure-datalake-store # cryptography # snowflake-connector-python @@ -105,7 +102,7 @@ charset-normalizer==2.0.12 # aiohttp # requests # snowflake-connector-python -click==8.0.4 +click==8.0.1 # via # black # feast (setup.py) @@ -115,30 +112,32 @@ click==8.0.4 cloudpickle==2.0.0 # via dask colorama==0.4.4 - # via feast (setup.py) + # via + # feast (setup.py) + # great-expectations coverage[toml]==6.3.2 # via pytest-cov -cryptography==3.3.2 +cryptography==3.4.8 # via # adal # azure-identity # azure-storage-blob # feast (setup.py) + # great-expectations # moto # msal - # pyjwt # pyopenssl # snowflake-connector-python dask==2022.1.1 # via feast (setup.py) -debugpy==1.5.1 - # via ipykernel +dataclasses==0.6 + # via great-expectations decorator==5.1.1 # via # gcsfs # ipython -defusedxml==0.7.1 - # via nbconvert +deprecated==1.2.13 + # via redis deprecation==2.1.0 # via testcontainers dill==0.3.4 @@ -154,20 +153,19 @@ docutils==0.17.1 # sphinx # sphinx-rtd-theme entrypoints==0.4 - # via - # altair - # jupyter-client - # nbconvert + # via altair execnet==1.9.0 # via pytest-xdist executing==0.8.3 # via stack-data -fastapi==0.74.1 +fastapi==0.76.0 # via feast (setup.py) -fastavro==1.4.9 +fastavro==1.4.11 # via # feast (setup.py) # pandavro +fastjsonschema==2.15.3 + # via nbformat filelock==3.6.0 # via virtualenv firebase-admin==4.5.2 @@ -178,12 +176,12 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal -fsspec==2022.2.0 +fsspec==2022.3.0 # via # adlfs # dask # gcsfs -gcsfs==2022.2.0 +gcsfs==2022.3.0 # via feast (setup.py) google-api-core[grpc]==1.31.5 # via @@ -195,7 +193,7 @@ google-api-core[grpc]==1.31.5 # google-cloud-core # google-cloud-datastore # google-cloud-firestore -google-api-python-client==2.39.0 +google-api-python-client==2.47.0 # via firebase-admin google-auth==1.35.0 # via @@ -208,11 +206,11 @@ google-auth==1.35.0 # google-cloud-storage google-auth-httplib2==0.1.0 # via google-api-python-client -google-auth-oauthlib==0.5.0 +google-auth-oauthlib==0.5.1 # via gcsfs -google-cloud-bigquery==2.34.1 +google-cloud-bigquery==2.34.3 # via feast (setup.py) -google-cloud-bigquery-storage==2.12.0 +google-cloud-bigquery-storage==2.13.1 # via feast (setup.py) google-cloud-core==1.7.2 # via @@ -221,9 +219,9 @@ google-cloud-core==1.7.2 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-cloud-datastore==2.5.0 +google-cloud-datastore==2.5.1 # via feast (setup.py) -google-cloud-firestore==2.3.4 +google-cloud-firestore==2.4.0 # via firebase-admin google-cloud-storage==1.40.0 # via @@ -241,9 +239,9 @@ googleapis-common-protos==1.52.0 # feast (setup.py) # google-api-core # tensorflow-metadata -great-expectations==0.14.8 +great-expectations==0.14.13 # via feast (setup.py) -grpcio==1.44.0 +grpcio==1.46.0 # via # feast (setup.py) # google-api-core @@ -251,7 +249,7 @@ grpcio==1.44.0 # grpcio-reflection # grpcio-testing # grpcio-tools -grpcio-reflection==1.44.0 +grpcio-reflection==1.46.0 # via feast (setup.py) grpcio-testing==1.44.0 # via feast (setup.py) @@ -259,15 +257,17 @@ grpcio-tools==1.44.0 # via feast (setup.py) h11==0.13.0 # via uvicorn +happybase==1.2.0 + # via feast (setup.py) hiredis==2.0.0 # via feast (setup.py) httplib2==0.20.4 # via # google-api-python-client # google-auth-httplib2 -httptools==0.3.0 +httptools==0.4.0 # via uvicorn -identify==2.4.11 +identify==2.5.0 # via pre-commit idna==3.3 # via @@ -277,24 +277,11 @@ idna==3.3 # yarl imagesize==1.3.0 # via sphinx -importlib-metadata==4.11.2 +importlib-metadata==4.11.3 # via great-expectations iniconfig==1.1.1 # via pytest -ipykernel==6.9.1 - # via - # ipywidgets - # notebook -ipython==8.1.1 - # via - # ipykernel - # ipywidgets -ipython-genutils==0.2.0 - # via - # ipywidgets - # nbformat - # notebook -ipywidgets==7.6.5 +ipython==8.3.0 # via great-expectations isodate==0.6.1 # via msrest @@ -308,67 +295,48 @@ jinja2==3.0.3 # feast (setup.py) # great-expectations # moto - # nbconvert - # notebook # sphinx -jmespath==0.10.0 +jmespath==1.0.0 # via # boto3 # botocore jsonpatch==1.32 # via great-expectations -jsonpointer==2.2 +jsonpointer==2.3 # via jsonpatch -jsonschema==4.4.0 +jsonschema==4.5.1 # via # altair # feast (setup.py) # great-expectations # nbformat -jupyter-client==7.1.2 - # via - # ipykernel - # nbclient - # notebook -jupyter-core==4.9.2 - # via - # jupyter-client - # nbconvert - # nbformat - # notebook -jupyterlab-pygments==0.1.2 - # via nbconvert -jupyterlab-widgets==1.0.2 - # via ipywidgets -locket==0.2.1 +jupyter-core==4.10.0 + # via nbformat +locket==1.0.0 # via partd -markupsafe==2.1.0 +markupsafe==2.1.1 # via # jinja2 # moto matplotlib-inline==0.1.3 - # via - # ipykernel - # ipython + # via ipython mccabe==0.6.1 # via flake8 minio==7.1.0 # via feast (setup.py) -mistune==0.8.4 - # via - # great-expectations - # nbconvert +mistune==2.0.2 + # via great-expectations mmh3==3.0.0 # via feast (setup.py) mock==2.0.0 # via feast (setup.py) -moto==3.0.5 +moto==3.1.7 # via feast (setup.py) msal==1.17.0 # via # azure-identity # msal-extensions -msal-extensions==0.3.1 +msal-extensions==1.0.0 # via azure-identity msgpack==1.0.3 # via cachecontrol @@ -386,31 +354,16 @@ mypy==0.931 # via feast (setup.py) mypy-extensions==0.4.3 # via mypy -mypy-protobuf==3.1.0 - # via feast (setup.py) -nbclient==0.5.11 - # via nbconvert -nbconvert==6.4.2 - # via notebook -nbformat==5.1.3 - # via - # ipywidgets - # nbclient - # nbconvert - # notebook -nest-asyncio==1.5.4 - # via - # ipykernel - # jupyter-client - # nbclient - # notebook +mypy-protobuf==3.1 + # via feast (setup.py) +nbformat==5.4.0 + # via great-expectations nodeenv==1.6.0 # via pre-commit -notebook==6.4.10 - # via widgetsnbextension -numpy==1.22.2 +numpy==1.21.6 # via # altair + # feast (setup.py) # great-expectations # pandas # pandavro @@ -418,19 +371,20 @@ numpy==1.22.2 # scipy oauthlib==3.2.0 # via requests-oauthlib -oscrypto==1.2.1 +oscrypto==1.3.0 # via snowflake-connector-python packaging==21.3 # via - # bleach + # build # dask # deprecation # google-api-core # google-cloud-bigquery - # google-cloud-firestore + # great-expectations # pytest + # redis # sphinx -pandas==1.3.5 +pandas==1.4.2 # via # altair # feast (setup.py) @@ -439,35 +393,35 @@ pandas==1.3.5 # snowflake-connector-python pandavro==1.5.2 # via feast (setup.py) -pandocfilters==1.5.0 - # via nbconvert parso==0.8.3 # via jedi partd==1.2.0 # via dask pathspec==0.9.0 # via black -pbr==5.8.1 +pbr==5.9.0 # via mock pep517==0.12.0 - # via pip-tools + # via + # build + # pip-tools pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==6.5.1 +pip-tools==6.6.0 # via feast (setup.py) -platformdirs==2.5.1 +platformdirs==2.5.2 # via virtualenv pluggy==1.0.0 # via pytest +ply==3.11 + # via thriftpy2 portalocker==2.4.0 # via msal-extensions -pre-commit==2.17.0 +pre-commit==2.19.0 # via feast (setup.py) -prometheus-client==0.13.1 - # via notebook -prompt-toolkit==3.0.28 +prompt-toolkit==3.0.29 # via ipython proto-plus==1.19.6 # via @@ -490,10 +444,10 @@ protobuf==3.19.4 # tensorflow-metadata psutil==5.9.0 # via feast (setup.py) +psycopg2-binary==2.9.3 + # via feast (setup.py) ptyprocess==0.7.0 - # via - # pexpect - # terminado + # via pexpect pure-eval==0.2.2 # via stack-data py==1.11.0 @@ -514,7 +468,7 @@ pyasn1==0.4.8 # rsa pyasn1-modules==0.2.8 # via google-auth -pybindgen==0.22.0 +pybindgen==0.22.1 # via feast (setup.py) pycodestyle==2.8.0 # via flake8 @@ -528,11 +482,9 @@ pydantic==1.9.0 # feast (setup.py) pyflakes==2.4.0 # via flake8 -pygments==2.11.2 +pygments==2.12.0 # via # ipython - # jupyterlab-pygments - # nbconvert # sphinx pyjwt[crypto]==2.3.0 # via @@ -550,7 +502,7 @@ pyrsistent==0.18.1 # via jsonschema pyspark==3.2.1 # via feast (setup.py) -pytest==7.0.1 +pytest==7.1.2 # via # feast (setup.py) # pytest-benchmark @@ -583,12 +535,11 @@ python-dateutil==2.8.2 # botocore # google-cloud-bigquery # great-expectations - # jupyter-client # moto # pandas -python-dotenv==0.19.2 +python-dotenv==0.20.0 # via uvicorn -pytz==2021.3 +pytz==2022.1 # via # babel # google-api-core @@ -596,6 +547,7 @@ pytz==2021.3 # moto # pandas # snowflake-connector-python + # trino pytz-deprecation-shim==0.1.0.post0 # via tzlocal pyyaml==6.0 @@ -604,17 +556,9 @@ pyyaml==6.0 # feast (setup.py) # pre-commit # uvicorn -pyzmq==22.3.0 - # via - # jupyter-client - # notebook -redis==3.5.3 - # via - # feast (setup.py) - # redis-py-cluster -redis-py-cluster==2.1.3 +redis==4.2.2 # via feast (setup.py) -regex==2022.3.2 +regex==2022.4.24 # via black requests==2.27.1 # via @@ -636,39 +580,33 @@ requests==2.27.1 # responses # snowflake-connector-python # sphinx + # trino requests-oauthlib==1.3.1 # via # google-auth-oauthlib # msrest -responses==0.18.0 +responses==0.20.0 # via moto rsa==4.8 # via google-auth -ruamel.yaml==0.17.17 +ruamel-yaml==0.17.17 # via great-expectations -ruamel.yaml.clib==0.2.6 - # via ruamel.yaml s3transfer==0.5.2 # via boto3 scipy==1.8.0 # via great-expectations -send2trash==1.8.0 - # via notebook six==1.16.0 # via # absl-py - # asttokens # azure-core # azure-identity - # bleach - # cryptography # google-api-core # google-auth # google-auth-httplib2 # google-cloud-core # google-resumable-media # grpcio - # isodate + # happybase # mock # msrestazure # pandavro @@ -679,7 +617,7 @@ sniffio==1.2.0 # via anyio snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==2.7.4 +snowflake-connector-python[pandas]==2.7.7 # via feast (setup.py) sphinx==4.3.2 # via @@ -701,7 +639,7 @@ sphinxcontrib-serializinghtml==1.1.5 # via sphinx stack-data==0.2.0 # via ipython -starlette==0.17.1 +starlette==0.18.0 # via fastapi tabulate==0.8.9 # via feast (setup.py) @@ -711,12 +649,10 @@ tensorflow-metadata==1.7.0 # via feast (setup.py) termcolor==1.1.0 # via great-expectations -terminado==0.13.2 - # via notebook -testcontainers==3.4.2 +testcontainers==3.5.3 # via feast (setup.py) -testpath==0.6.0 - # via nbconvert +thriftpy2==0.4.14 + # via happybase toml==0.10.2 # via # black @@ -724,6 +660,7 @@ toml==0.10.2 # pre-commit tomli==2.0.1 # via + # build # coverage # mypy # pep517 @@ -733,98 +670,87 @@ toolz==0.11.2 # altair # dask # partd -tornado==6.1 - # via - # ipykernel - # jupyter-client - # notebook - # terminado -tqdm==4.63.0 +tqdm==4.64.0 # via # feast (setup.py) # great-expectations traitlets==5.1.1 # via - # ipykernel # ipython - # ipywidgets - # jupyter-client # jupyter-core # matplotlib-inline - # nbclient - # nbconvert # nbformat - # notebook -typed-ast==1.5.2 +trino==0.313.0 + # via feast (setup.py) +typed-ast==1.5.3 # via black -types-protobuf==3.19.12 +types-protobuf==3.19.18 # via # feast (setup.py) # mypy-protobuf -types-python-dateutil==2.8.9 +types-python-dateutil==2.8.14 # via feast (setup.py) -types-pytz==2021.3.5 +types-pytz==2021.3.7 # via feast (setup.py) -types-pyyaml==6.0.4 +types-pyyaml==6.0.7 # via feast (setup.py) -types-redis==4.1.17 +types-redis==4.2.1 # via feast (setup.py) -types-requests==2.27.11 +types-requests==2.27.25 # via feast (setup.py) -types-setuptools==57.4.9 +types-setuptools==57.4.14 # via feast (setup.py) -types-tabulate==0.8.5 +types-tabulate==0.8.8 # via feast (setup.py) -types-urllib3==1.26.10 +types-urllib3==1.26.14 # via types-requests -typing-extensions==4.1.1 +typing-extensions==4.2.0 # via # azure-core # great-expectations # mypy # pydantic -tzdata==2021.5 +tzdata==2022.1 # via pytz-deprecation-shim -tzlocal==4.1 +tzlocal==4.2 # via great-expectations uritemplate==4.1.1 # via google-api-python-client -urllib3==1.26.8 +urllib3==1.26.9 # via # botocore # feast (setup.py) + # great-expectations # minio # requests # responses -uvicorn[standard]==0.17.5 +uvicorn[standard]==0.17.6 # via feast (setup.py) uvloop==0.16.0 # via uvicorn -virtualenv==20.13.2 +virtualenv==20.14.1 # via pre-commit -watchgod==0.7 +watchgod==0.8.2 # via uvicorn wcwidth==0.2.5 # via prompt-toolkit -webencodings==0.5.1 - # via bleach -websocket-client==1.3.1 +websocket-client==1.3.2 # via docker -websockets==10.2 +websockets==10.3 # via uvicorn -werkzeug==2.0.3 +werkzeug==2.1.2 # via moto wheel==0.37.1 # via pip-tools -widgetsnbextension==3.5.2 - # via ipywidgets -wrapt==1.13.3 - # via testcontainers +wrapt==1.14.1 + # via + # deprecated + # testcontainers xmltodict==0.12.0 # via moto yarl==1.7.2 # via aiohttp -zipp==3.7.0 +zipp==3.8.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/sdk/python/requirements/py3.10-requirements.txt b/sdk/python/requirements/py3.10-requirements.txt index e21a4ba4129..369c3b8e60b 100644 --- a/sdk/python/requirements/py3.10-requirements.txt +++ b/sdk/python/requirements/py3.10-requirements.txt @@ -2,13 +2,15 @@ # This file is autogenerated by pip-compile with python 3.10 # To update, run: # -# pip-compile --output-file=requirements/py3.10-requirements.txt +# pip-compile --output-file=sdk/python/requirements/py3.10-requirements.txt # absl-py==1.0.0 # via tensorflow-metadata anyio==3.5.0 - # via starlette -asgiref==3.4.1 + # via + # starlette + # watchgod +asgiref==3.5.1 # via uvicorn attrs==21.4.0 # via jsonschema @@ -16,9 +18,9 @@ cachetools==4.2.4 # via google-auth certifi==2021.10.8 # via requests -charset-normalizer==2.0.10 +charset-normalizer==2.0.12 # via requests -click==8.0.3 +click==8.0.1 # via # feast (setup.py) # uvicorn @@ -30,55 +32,58 @@ dask==2022.1.1 # via feast (setup.py) dill==0.3.4 # via feast (setup.py) -fastapi==0.72.0 +fastapi==0.76.0 # via feast (setup.py) -fastavro==1.4.9 +fastavro==1.4.11 # via # feast (setup.py) # pandavro -fsspec==2022.2.0 +fsspec==2022.3.0 # via dask -google-api-core==2.4.0 +google-api-core==1.31.5 # via feast (setup.py) -google-auth==2.3.3 +google-auth==1.35.0 # via google-api-core googleapis-common-protos==1.52.0 # via # feast (setup.py) # google-api-core # tensorflow-metadata -grpcio==1.43.0 +grpcio==1.46.0 # via # feast (setup.py) # grpcio-reflection -grpcio-reflection==1.43.0 +grpcio-reflection==1.46.0 # via feast (setup.py) h11==0.13.0 # via uvicorn -httptools==0.3.0 +httptools==0.4.0 # via uvicorn idna==3.3 # via # anyio # requests -jinja2==3.0.3 +jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.4.0 +jsonschema==4.5.1 # via feast (setup.py) -locket==0.2.1 +locket==1.0.0 # via partd -markupsafe==2.0.1 +markupsafe==2.1.1 # via jinja2 mmh3==3.0.0 # via feast (setup.py) -numpy==1.21.5 +numpy==1.21.6 # via + # feast (setup.py) # pandas # pandavro # pyarrow packaging==21.3 - # via dask -pandas==1.3.5 + # via + # dask + # google-api-core +pandas==1.4.2 # via # feast (setup.py) # pandavro @@ -88,7 +93,7 @@ partd==1.2.0 # via dask proto-plus==1.19.6 # via feast (setup.py) -protobuf==3.19.3 +protobuf==3.19.4 # via # feast (setup.py) # google-api-core @@ -108,16 +113,18 @@ pydantic==1.9.0 # via # fastapi # feast (setup.py) -pyparsing==3.0.7 +pyparsing==3.0.8 # via packaging pyrsistent==0.18.1 # via jsonschema python-dateutil==2.8.2 # via pandas -python-dotenv==0.19.2 +python-dotenv==0.20.0 # via uvicorn -pytz==2021.3 - # via pandas +pytz==2022.1 + # via + # google-api-core + # pandas pyyaml==6.0 # via # dask @@ -130,19 +137,20 @@ rsa==4.8 six==1.16.0 # via # absl-py + # google-api-core # google-auth # grpcio # pandavro # python-dateutil sniffio==1.2.0 # via anyio -starlette==0.17.1 +starlette==0.18.0 # via fastapi tabulate==0.8.9 # via feast (setup.py) tenacity==8.0.1 # via feast (setup.py) -tensorflow-metadata==1.6.0 +tensorflow-metadata==1.7.0 # via feast (setup.py) toml==0.10.2 # via feast (setup.py) @@ -150,19 +158,19 @@ toolz==0.11.2 # via # dask # partd -tqdm==4.62.3 +tqdm==4.64.0 # via feast (setup.py) -typing-extensions==4.0.1 +typing-extensions==4.2.0 # via pydantic -urllib3==1.26.8 +urllib3==1.26.9 # via requests -uvicorn[standard]==0.17.0 +uvicorn[standard]==0.17.6 # via feast (setup.py) uvloop==0.16.0 # via uvicorn -watchgod==0.7 +watchgod==0.8.2 # via uvicorn -websockets==10.1 +websockets==10.3 # via uvicorn # The following packages are considered to be unsafe in a requirements file: diff --git a/sdk/python/requirements/py3.7-ci-requirements.txt b/sdk/python/requirements/py3.7-ci-requirements.txt index 4ec4bde7c42..4232f5b2941 100644 --- a/sdk/python/requirements/py3.7-ci-requirements.txt +++ b/sdk/python/requirements/py3.7-ci-requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with python 3.7 # To update, run: # -# pip-compile --extra=ci --output-file=requirements/py3.7-ci-requirements.txt +# pip-compile --extra=ci --output-file=sdk/python/requirements/py3.7-ci-requirements.txt # absl-py==1.0.0 # via tensorflow-metadata @@ -23,27 +23,25 @@ alabaster==0.7.12 altair==4.2.0 # via great-expectations anyio==3.5.0 - # via starlette + # via + # starlette + # watchgod appdirs==1.4.4 # via black -appnope==0.1.2 - # via - # ipykernel - # ipython -argon2-cffi==21.3.0 - # via notebook -argon2-cffi-bindings==21.2.0 - # via argon2-cffi -asgiref==3.5.0 +appnope==0.1.3 + # via ipython +asgiref==3.5.1 # via uvicorn -asn1crypto==1.4.0 +asn1crypto==1.5.1 # via # oscrypto # snowflake-connector-python assertpy==1.1 # via feast (setup.py) async-timeout==4.0.2 - # via aiohttp + # via + # aiohttp + # redis asynctest==0.13.0 # via aiohttp attrs==21.4.0 @@ -54,18 +52,18 @@ attrs==21.4.0 # pytest avro==1.10.0 # via feast (setup.py) -azure-core==1.23.0 +azure-core==1.23.1 # via # adlfs # azure-identity # azure-storage-blob azure-datalake-store==0.0.52 # via adlfs -azure-identity==1.8.0 +azure-identity==1.10.0 # via adlfs -azure-storage-blob==12.9.0 +azure-storage-blob==12.11.0 # via adlfs -babel==2.9.1 +babel==2.10.1 # via sphinx backcall==0.2.0 # via ipython @@ -75,18 +73,18 @@ backports-zoneinfo==0.2.1 # tzlocal black==19.10b0 # via feast (setup.py) -bleach==4.1.0 - # via nbconvert -boto3==1.21.11 +boto3==1.22.8 # via # feast (setup.py) # moto -botocore==1.24.11 +botocore==1.25.8 # via # boto3 # moto # s3transfer -cachecontrol==0.12.10 +build==0.7.0 + # via feast (setup.py) +cachecontrol==0.12.11 # via firebase-admin cachetools==4.2.4 # via google-auth @@ -98,7 +96,6 @@ certifi==2021.10.8 # snowflake-connector-python cffi==1.15.0 # via - # argon2-cffi-bindings # azure-datalake-store # cryptography # snowflake-connector-python @@ -109,7 +106,7 @@ charset-normalizer==2.0.12 # aiohttp # requests # snowflake-connector-python -click==8.0.4 +click==8.0.1 # via # black # feast (setup.py) @@ -119,30 +116,32 @@ click==8.0.4 cloudpickle==2.0.0 # via dask colorama==0.4.4 - # via feast (setup.py) + # via + # feast (setup.py) + # great-expectations coverage[toml]==6.3.2 # via pytest-cov -cryptography==3.3.2 +cryptography==3.4.8 # via # adal # azure-identity # azure-storage-blob # feast (setup.py) + # great-expectations # moto # msal - # pyjwt # pyopenssl # snowflake-connector-python dask==2022.1.1 # via feast (setup.py) -debugpy==1.5.1 - # via ipykernel +dataclasses==0.6 + # via great-expectations decorator==5.1.1 # via # gcsfs # ipython -defusedxml==0.7.1 - # via nbconvert +deprecated==1.2.13 + # via redis deprecation==2.1.0 # via testcontainers dill==0.3.4 @@ -158,18 +157,17 @@ docutils==0.17.1 # sphinx # sphinx-rtd-theme entrypoints==0.4 - # via - # altair - # jupyter-client - # nbconvert + # via altair execnet==1.9.0 # via pytest-xdist -fastapi==0.74.1 +fastapi==0.76.0 # via feast (setup.py) -fastavro==1.4.9 +fastavro==1.4.11 # via # feast (setup.py) # pandavro +fastjsonschema==2.15.3 + # via nbformat filelock==3.6.0 # via virtualenv firebase-admin==4.5.2 @@ -180,12 +178,12 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal -fsspec==2022.2.0 +fsspec==2022.3.0 # via # adlfs # dask # gcsfs -gcsfs==2022.2.0 +gcsfs==2022.3.0 # via feast (setup.py) google-api-core[grpc]==1.31.5 # via @@ -197,7 +195,7 @@ google-api-core[grpc]==1.31.5 # google-cloud-core # google-cloud-datastore # google-cloud-firestore -google-api-python-client==2.39.0 +google-api-python-client==2.47.0 # via firebase-admin google-auth==1.35.0 # via @@ -210,11 +208,11 @@ google-auth==1.35.0 # google-cloud-storage google-auth-httplib2==0.1.0 # via google-api-python-client -google-auth-oauthlib==0.5.0 +google-auth-oauthlib==0.5.1 # via gcsfs -google-cloud-bigquery==2.34.1 +google-cloud-bigquery==2.34.3 # via feast (setup.py) -google-cloud-bigquery-storage==2.12.0 +google-cloud-bigquery-storage==2.13.1 # via feast (setup.py) google-cloud-core==1.7.2 # via @@ -223,9 +221,9 @@ google-cloud-core==1.7.2 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-cloud-datastore==2.5.0 +google-cloud-datastore==2.5.1 # via feast (setup.py) -google-cloud-firestore==2.3.4 +google-cloud-firestore==2.4.0 # via firebase-admin google-cloud-storage==1.40.0 # via @@ -243,9 +241,9 @@ googleapis-common-protos==1.52.0 # feast (setup.py) # google-api-core # tensorflow-metadata -great-expectations==0.14.8 +great-expectations==0.14.13 # via feast (setup.py) -grpcio==1.44.0 +grpcio==1.46.0 # via # feast (setup.py) # google-api-core @@ -253,7 +251,7 @@ grpcio==1.44.0 # grpcio-reflection # grpcio-testing # grpcio-tools -grpcio-reflection==1.44.0 +grpcio-reflection==1.46.0 # via feast (setup.py) grpcio-testing==1.44.0 # via feast (setup.py) @@ -261,15 +259,17 @@ grpcio-tools==1.44.0 # via feast (setup.py) h11==0.13.0 # via uvicorn +happybase==1.2.0 + # via feast (setup.py) hiredis==2.0.0 # via feast (setup.py) httplib2==0.20.4 # via # google-api-python-client # google-auth-httplib2 -httptools==0.3.0 +httptools==0.4.0 # via uvicorn -identify==2.4.11 +identify==2.5.0 # via pre-commit idna==3.3 # via @@ -281,6 +281,7 @@ imagesize==1.3.0 # via sphinx importlib-metadata==4.2.0 # via + # build # click # flake8 # great-expectations @@ -290,25 +291,13 @@ importlib-metadata==4.2.0 # pluggy # pre-commit # pytest + # redis # virtualenv -importlib-resources==5.4.0 +importlib-resources==5.7.1 # via jsonschema iniconfig==1.1.1 # via pytest -ipykernel==6.9.1 - # via - # ipywidgets - # notebook -ipython==7.32.0 - # via - # ipykernel - # ipywidgets -ipython-genutils==0.2.0 - # via - # ipywidgets - # nbformat - # notebook -ipywidgets==7.6.5 +ipython==7.33.0 # via great-expectations isodate==0.6.1 # via msrest @@ -322,67 +311,48 @@ jinja2==3.0.3 # feast (setup.py) # great-expectations # moto - # nbconvert - # notebook # sphinx -jmespath==0.10.0 +jmespath==1.0.0 # via # boto3 # botocore jsonpatch==1.32 # via great-expectations -jsonpointer==2.2 +jsonpointer==2.3 # via jsonpatch -jsonschema==4.4.0 +jsonschema==4.5.1 # via # altair # feast (setup.py) # great-expectations # nbformat -jupyter-client==7.1.2 - # via - # ipykernel - # nbclient - # notebook -jupyter-core==4.9.2 - # via - # jupyter-client - # nbconvert - # nbformat - # notebook -jupyterlab-pygments==0.1.2 - # via nbconvert -jupyterlab-widgets==1.0.2 - # via ipywidgets -locket==0.2.1 +jupyter-core==4.10.0 + # via nbformat +locket==1.0.0 # via partd -markupsafe==2.1.0 +markupsafe==2.1.1 # via # jinja2 # moto matplotlib-inline==0.1.3 - # via - # ipykernel - # ipython + # via ipython mccabe==0.6.1 # via flake8 minio==7.1.0 # via feast (setup.py) -mistune==0.8.4 - # via - # great-expectations - # nbconvert +mistune==2.0.2 + # via great-expectations mmh3==3.0.0 # via feast (setup.py) mock==2.0.0 # via feast (setup.py) -moto==3.0.5 +moto==3.1.7 # via feast (setup.py) msal==1.17.0 # via # azure-identity # msal-extensions -msal-extensions==0.3.1 +msal-extensions==1.0.0 # via azure-identity msgpack==1.0.3 # via cachecontrol @@ -402,29 +372,14 @@ mypy-extensions==0.4.3 # via mypy mypy-protobuf==3.1 # via feast (setup.py) -nbclient==0.5.11 - # via nbconvert -nbconvert==6.4.2 - # via notebook -nbformat==5.1.3 - # via - # ipywidgets - # nbclient - # nbconvert - # notebook -nest-asyncio==1.5.4 - # via - # ipykernel - # jupyter-client - # nbclient - # notebook +nbformat==5.4.0 + # via great-expectations nodeenv==1.6.0 # via pre-commit -notebook==6.4.10 - # via widgetsnbextension -numpy==1.21.5 +numpy==1.21.6 # via # altair + # feast (setup.py) # great-expectations # pandas # pandavro @@ -432,17 +387,18 @@ numpy==1.21.5 # scipy oauthlib==3.2.0 # via requests-oauthlib -oscrypto==1.2.1 +oscrypto==1.3.0 # via snowflake-connector-python packaging==21.3 # via - # bleach + # build # dask # deprecation # google-api-core # google-cloud-bigquery - # google-cloud-firestore + # great-expectations # pytest + # redis # sphinx pandas==1.3.5 # via @@ -453,35 +409,35 @@ pandas==1.3.5 # snowflake-connector-python pandavro==1.5.2 # via feast (setup.py) -pandocfilters==1.5.0 - # via nbconvert parso==0.8.3 # via jedi partd==1.2.0 # via dask pathspec==0.9.0 # via black -pbr==5.8.1 +pbr==5.9.0 # via mock pep517==0.12.0 - # via pip-tools + # via + # build + # pip-tools pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==6.5.1 +pip-tools==6.6.0 # via feast (setup.py) -platformdirs==2.5.1 +platformdirs==2.5.2 # via virtualenv pluggy==1.0.0 # via pytest +ply==3.11 + # via thriftpy2 portalocker==2.4.0 # via msal-extensions -pre-commit==2.17.0 +pre-commit==2.19.0 # via feast (setup.py) -prometheus-client==0.13.1 - # via notebook -prompt-toolkit==3.0.28 +prompt-toolkit==3.0.29 # via ipython proto-plus==1.19.6 # via @@ -504,10 +460,10 @@ protobuf==3.19.4 # tensorflow-metadata psutil==5.9.0 # via feast (setup.py) +psycopg2-binary==2.9.3 + # via feast (setup.py) ptyprocess==0.7.0 - # via - # pexpect - # terminado + # via pexpect py==1.11.0 # via # pytest @@ -526,7 +482,7 @@ pyasn1==0.4.8 # rsa pyasn1-modules==0.2.8 # via google-auth -pybindgen==0.22.0 +pybindgen==0.22.1 # via feast (setup.py) pycodestyle==2.8.0 # via flake8 @@ -540,11 +496,9 @@ pydantic==1.9.0 # feast (setup.py) pyflakes==2.4.0 # via flake8 -pygments==2.11.2 +pygments==2.12.0 # via # ipython - # jupyterlab-pygments - # nbconvert # sphinx pyjwt[crypto]==2.3.0 # via @@ -562,7 +516,7 @@ pyrsistent==0.18.1 # via jsonschema pyspark==3.2.1 # via feast (setup.py) -pytest==7.0.1 +pytest==7.1.2 # via # feast (setup.py) # pytest-benchmark @@ -595,12 +549,11 @@ python-dateutil==2.8.2 # botocore # google-cloud-bigquery # great-expectations - # jupyter-client # moto # pandas -python-dotenv==0.19.2 +python-dotenv==0.20.0 # via uvicorn -pytz==2021.3 +pytz==2022.1 # via # babel # google-api-core @@ -608,6 +561,7 @@ pytz==2021.3 # moto # pandas # snowflake-connector-python + # trino pytz-deprecation-shim==0.1.0.post0 # via tzlocal pyyaml==6.0 @@ -616,17 +570,9 @@ pyyaml==6.0 # feast (setup.py) # pre-commit # uvicorn -pyzmq==22.3.0 - # via - # jupyter-client - # notebook -redis==3.5.3 - # via - # feast (setup.py) - # redis-py-cluster -redis-py-cluster==2.1.3 +redis==4.2.2 # via feast (setup.py) -regex==2022.3.2 +regex==2022.4.24 # via black requests==2.27.1 # via @@ -648,11 +594,12 @@ requests==2.27.1 # responses # snowflake-connector-python # sphinx + # trino requests-oauthlib==1.3.1 # via # google-auth-oauthlib # msrest -responses==0.18.0 +responses==0.20.0 # via moto rsa==4.8 # via google-auth @@ -664,22 +611,18 @@ s3transfer==0.5.2 # via boto3 scipy==1.7.3 # via great-expectations -send2trash==1.8.0 - # via notebook six==1.16.0 # via # absl-py # azure-core # azure-identity - # bleach - # cryptography # google-api-core # google-auth # google-auth-httplib2 # google-cloud-core # google-resumable-media # grpcio - # isodate + # happybase # mock # msrestazure # pandavro @@ -690,7 +633,7 @@ sniffio==1.2.0 # via anyio snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==2.7.4 +snowflake-connector-python[pandas]==2.7.7 # via feast (setup.py) sphinx==4.3.2 # via @@ -710,7 +653,7 @@ sphinxcontrib-qthelp==1.0.3 # via sphinx sphinxcontrib-serializinghtml==1.1.5 # via sphinx -starlette==0.17.1 +starlette==0.18.0 # via fastapi tabulate==0.8.9 # via feast (setup.py) @@ -720,12 +663,10 @@ tensorflow-metadata==1.7.0 # via feast (setup.py) termcolor==1.1.0 # via great-expectations -terminado==0.13.2 - # via notebook -testcontainers==3.4.2 +testcontainers==3.5.3 # via feast (setup.py) -testpath==0.6.0 - # via nbconvert +thriftpy2==0.4.14 + # via happybase toml==0.10.2 # via # black @@ -733,6 +674,7 @@ toml==0.10.2 # pre-commit tomli==2.0.1 # via + # build # coverage # mypy # pep517 @@ -742,57 +684,46 @@ toolz==0.11.2 # altair # dask # partd -tornado==6.1 - # via - # ipykernel - # jupyter-client - # notebook - # terminado -tqdm==4.63.0 +tqdm==4.64.0 # via # feast (setup.py) # great-expectations traitlets==5.1.1 # via - # ipykernel # ipython - # ipywidgets - # jupyter-client # jupyter-core # matplotlib-inline - # nbclient - # nbconvert # nbformat - # notebook -typed-ast==1.5.2 +trino==0.313.0 + # via feast (setup.py) +typed-ast==1.5.3 # via # black # mypy -types-protobuf==3.19.12 +types-protobuf==3.19.18 # via # feast (setup.py) # mypy-protobuf -types-python-dateutil==2.8.9 +types-python-dateutil==2.8.14 # via feast (setup.py) -types-pytz==2021.3.5 +types-pytz==2021.3.7 # via feast (setup.py) -types-pyyaml==6.0.4 +types-pyyaml==6.0.7 # via feast (setup.py) -types-redis==4.1.17 +types-redis==4.2.1 # via feast (setup.py) -types-requests==2.27.11 +types-requests==2.27.25 # via feast (setup.py) -types-setuptools==57.4.9 +types-setuptools==57.4.14 # via feast (setup.py) -types-tabulate==0.8.5 +types-tabulate==0.8.8 # via feast (setup.py) -types-urllib3==1.26.10 +types-urllib3==1.26.14 # via types-requests -typing-extensions==4.1.1 +typing-extensions==4.2.0 # via # aiohttp # anyio - # argon2-cffi # asgiref # async-timeout # azure-core @@ -802,51 +733,51 @@ typing-extensions==4.1.1 # jsonschema # mypy # pydantic + # redis # starlette # uvicorn # yarl -tzdata==2021.5 +tzdata==2022.1 # via pytz-deprecation-shim -tzlocal==4.1 +tzlocal==4.2 # via great-expectations uritemplate==4.1.1 # via google-api-python-client -urllib3==1.26.8 +urllib3==1.26.9 # via # botocore # feast (setup.py) + # great-expectations # minio # requests # responses -uvicorn[standard]==0.17.5 +uvicorn[standard]==0.17.6 # via feast (setup.py) uvloop==0.16.0 # via uvicorn -virtualenv==20.13.2 +virtualenv==20.14.1 # via pre-commit -watchgod==0.7 +watchgod==0.8.2 # via uvicorn wcwidth==0.2.5 # via prompt-toolkit -webencodings==0.5.1 - # via bleach -websocket-client==1.3.1 +websocket-client==1.3.2 # via docker -websockets==10.2 +websockets==10.3 # via uvicorn -werkzeug==2.0.3 +werkzeug==2.1.2 # via moto wheel==0.37.1 # via pip-tools -widgetsnbextension==3.5.2 - # via ipywidgets -wrapt==1.13.3 - # via testcontainers +wrapt==1.14.1 + # via + # deprecated + # testcontainers xmltodict==0.12.0 # via moto yarl==1.7.2 # via aiohttp -zipp==3.7.0 +zipp==3.8.0 # via # importlib-metadata # importlib-resources diff --git a/sdk/python/requirements/py3.7-requirements.txt b/sdk/python/requirements/py3.7-requirements.txt index f3c90a6e3bd..dfdd86e39a1 100644 --- a/sdk/python/requirements/py3.7-requirements.txt +++ b/sdk/python/requirements/py3.7-requirements.txt @@ -2,23 +2,25 @@ # This file is autogenerated by pip-compile with python 3.7 # To update, run: # -# pip-compile --output-file=requirements/py3.7-requirements.txt +# pip-compile --output-file=sdk/python/requirements/py3.7-requirements.txt # absl-py==1.0.0 # via tensorflow-metadata anyio==3.5.0 - # via starlette -asgiref==3.5.0 + # via + # starlette + # watchgod +asgiref==3.5.1 # via uvicorn attrs==21.4.0 # via jsonschema -cachetools==5.0.0 +cachetools==4.2.4 # via google-auth certifi==2021.10.8 # via requests charset-normalizer==2.0.12 # via requests -click==8.0.4 +click==8.0.1 # via # feast (setup.py) # uvicorn @@ -30,60 +32,63 @@ dask==2022.1.1 # via feast (setup.py) dill==0.3.4 # via feast (setup.py) -fastapi==0.74.1 +fastapi==0.76.0 # via feast (setup.py) -fastavro==1.4.9 +fastavro==1.4.11 # via # feast (setup.py) # pandavro -fsspec==2022.2.0 +fsspec==2022.3.0 # via dask -google-api-core==2.5.0 +google-api-core==1.31.5 # via feast (setup.py) -google-auth==2.6.0 +google-auth==1.35.0 # via google-api-core googleapis-common-protos==1.52.0 # via # feast (setup.py) # google-api-core # tensorflow-metadata -grpcio==1.44.0 +grpcio==1.46.0 # via # feast (setup.py) # grpcio-reflection -grpcio-reflection==1.44.0 +grpcio-reflection==1.46.0 # via feast (setup.py) h11==0.13.0 # via uvicorn -httptools==0.3.0 +httptools==0.4.0 # via uvicorn idna==3.3 # via # anyio # requests -importlib-metadata==4.11.1 +importlib-metadata==4.11.3 # via # click # jsonschema -importlib-resources==5.4.0 +importlib-resources==5.7.1 # via jsonschema -jinja2==3.0.3 +jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.4.0 +jsonschema==4.5.1 # via feast (setup.py) -locket==0.2.1 +locket==1.0.0 # via partd -markupsafe==2.1.0 +markupsafe==2.1.1 # via jinja2 mmh3==3.0.0 # via feast (setup.py) -numpy==1.21.5 +numpy==1.21.6 # via + # feast (setup.py) # pandas # pandavro # pyarrow packaging==21.3 - # via dask + # via + # dask + # google-api-core pandas==1.3.5 # via # feast (setup.py) @@ -102,7 +107,7 @@ protobuf==3.19.4 # grpcio-reflection # proto-plus # tensorflow-metadata -pyarrow==7.0.0 +pyarrow==6.0.1 # via feast (setup.py) pyasn1==0.4.8 # via @@ -114,16 +119,18 @@ pydantic==1.9.0 # via # fastapi # feast (setup.py) -pyparsing==3.0.7 +pyparsing==3.0.8 # via packaging pyrsistent==0.18.1 # via jsonschema python-dateutil==2.8.2 # via pandas -python-dotenv==0.19.2 +python-dotenv==0.20.0 # via uvicorn -pytz==2021.3 - # via pandas +pytz==2022.1 + # via + # google-api-core + # pandas pyyaml==6.0 # via # dask @@ -136,19 +143,20 @@ rsa==4.8 six==1.16.0 # via # absl-py + # google-api-core # google-auth # grpcio # pandavro # python-dateutil sniffio==1.2.0 # via anyio -starlette==0.17.1 +starlette==0.18.0 # via fastapi tabulate==0.8.9 # via feast (setup.py) tenacity==8.0.1 # via feast (setup.py) -tensorflow-metadata==1.6.0 +tensorflow-metadata==1.7.0 # via feast (setup.py) toml==0.10.2 # via feast (setup.py) @@ -156,9 +164,9 @@ toolz==0.11.2 # via # dask # partd -tqdm==4.62.3 +tqdm==4.64.0 # via feast (setup.py) -typing-extensions==4.1.1 +typing-extensions==4.2.0 # via # anyio # asgiref @@ -168,17 +176,20 @@ typing-extensions==4.1.1 # pydantic # starlette # uvicorn -urllib3==1.26.8 +urllib3==1.26.9 # via requests -uvicorn[standard]==0.17.5 +uvicorn[standard]==0.17.6 # via feast (setup.py) uvloop==0.16.0 # via uvicorn -watchgod==0.7 +watchgod==0.8.2 # via uvicorn -websockets==10.2 +websockets==10.3 # via uvicorn -zipp==3.7.0 +zipp==3.8.0 # via # importlib-metadata # importlib-resources + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/sdk/python/requirements/py3.8-ci-requirements.txt b/sdk/python/requirements/py3.8-ci-requirements.txt index 5e2da9baa7d..db3eca4b538 100644 --- a/sdk/python/requirements/py3.8-ci-requirements.txt +++ b/sdk/python/requirements/py3.8-ci-requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with python 3.8 # To update, run: # -# pip-compile --extra=ci --output-file=requirements/py3.8-ci-requirements.txt +# pip-compile --extra=ci --output-file=sdk/python/requirements/py3.8-ci-requirements.txt # absl-py==1.0.0 # via tensorflow-metadata @@ -23,20 +23,16 @@ alabaster==0.7.12 altair==4.2.0 # via great-expectations anyio==3.5.0 - # via starlette + # via + # starlette + # watchgod appdirs==1.4.4 # via black -appnope==0.1.2 - # via - # ipykernel - # ipython -argon2-cffi==21.3.0 - # via notebook -argon2-cffi-bindings==21.2.0 - # via argon2-cffi -asgiref==3.5.0 +appnope==0.1.3 + # via ipython +asgiref==3.5.1 # via uvicorn -asn1crypto==1.4.0 +asn1crypto==1.5.1 # via # oscrypto # snowflake-connector-python @@ -45,7 +41,9 @@ assertpy==1.1 asttokens==2.0.5 # via stack-data async-timeout==4.0.2 - # via aiohttp + # via + # aiohttp + # redis attrs==21.4.0 # via # aiohttp @@ -54,18 +52,18 @@ attrs==21.4.0 # pytest avro==1.10.0 # via feast (setup.py) -azure-core==1.23.0 +azure-core==1.23.1 # via # adlfs # azure-identity # azure-storage-blob azure-datalake-store==0.0.52 # via adlfs -azure-identity==1.8.0 +azure-identity==1.10.0 # via adlfs -azure-storage-blob==12.9.0 +azure-storage-blob==12.11.0 # via adlfs -babel==2.9.1 +babel==2.10.1 # via sphinx backcall==0.2.0 # via ipython @@ -75,18 +73,18 @@ backports-zoneinfo==0.2.1 # tzlocal black==19.10b0 # via feast (setup.py) -bleach==4.1.0 - # via nbconvert -boto3==1.21.11 +boto3==1.22.8 # via # feast (setup.py) # moto -botocore==1.24.11 +botocore==1.25.8 # via # boto3 # moto # s3transfer -cachecontrol==0.12.10 +build==0.7.0 + # via feast (setup.py) +cachecontrol==0.12.11 # via firebase-admin cachetools==4.2.4 # via google-auth @@ -98,7 +96,6 @@ certifi==2021.10.8 # snowflake-connector-python cffi==1.15.0 # via - # argon2-cffi-bindings # azure-datalake-store # cryptography # snowflake-connector-python @@ -109,7 +106,7 @@ charset-normalizer==2.0.12 # aiohttp # requests # snowflake-connector-python -click==8.0.4 +click==8.0.1 # via # black # feast (setup.py) @@ -119,30 +116,32 @@ click==8.0.4 cloudpickle==2.0.0 # via dask colorama==0.4.4 - # via feast (setup.py) + # via + # feast (setup.py) + # great-expectations coverage[toml]==6.3.2 # via pytest-cov -cryptography==3.3.2 +cryptography==3.4.8 # via # adal # azure-identity # azure-storage-blob # feast (setup.py) + # great-expectations # moto # msal - # pyjwt # pyopenssl # snowflake-connector-python dask==2022.1.1 # via feast (setup.py) -debugpy==1.5.1 - # via ipykernel +dataclasses==0.6 + # via great-expectations decorator==5.1.1 # via # gcsfs # ipython -defusedxml==0.7.1 - # via nbconvert +deprecated==1.2.13 + # via redis deprecation==2.1.0 # via testcontainers dill==0.3.4 @@ -158,20 +157,19 @@ docutils==0.17.1 # sphinx # sphinx-rtd-theme entrypoints==0.4 - # via - # altair - # jupyter-client - # nbconvert + # via altair execnet==1.9.0 # via pytest-xdist executing==0.8.3 # via stack-data -fastapi==0.74.1 +fastapi==0.76.0 # via feast (setup.py) -fastavro==1.4.9 +fastavro==1.4.11 # via # feast (setup.py) # pandavro +fastjsonschema==2.15.3 + # via nbformat filelock==3.6.0 # via virtualenv firebase-admin==4.5.2 @@ -182,12 +180,12 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal -fsspec==2022.2.0 +fsspec==2022.3.0 # via # adlfs # dask # gcsfs -gcsfs==2022.2.0 +gcsfs==2022.3.0 # via feast (setup.py) google-api-core[grpc]==1.31.5 # via @@ -199,7 +197,7 @@ google-api-core[grpc]==1.31.5 # google-cloud-core # google-cloud-datastore # google-cloud-firestore -google-api-python-client==2.39.0 +google-api-python-client==2.47.0 # via firebase-admin google-auth==1.35.0 # via @@ -212,11 +210,11 @@ google-auth==1.35.0 # google-cloud-storage google-auth-httplib2==0.1.0 # via google-api-python-client -google-auth-oauthlib==0.5.0 +google-auth-oauthlib==0.5.1 # via gcsfs -google-cloud-bigquery==2.34.1 +google-cloud-bigquery==2.34.3 # via feast (setup.py) -google-cloud-bigquery-storage==2.12.0 +google-cloud-bigquery-storage==2.13.1 # via feast (setup.py) google-cloud-core==1.7.2 # via @@ -225,9 +223,9 @@ google-cloud-core==1.7.2 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-cloud-datastore==2.5.0 +google-cloud-datastore==2.5.1 # via feast (setup.py) -google-cloud-firestore==2.3.4 +google-cloud-firestore==2.4.0 # via firebase-admin google-cloud-storage==1.40.0 # via @@ -245,9 +243,9 @@ googleapis-common-protos==1.52.0 # feast (setup.py) # google-api-core # tensorflow-metadata -great-expectations==0.14.8 +great-expectations==0.14.13 # via feast (setup.py) -grpcio==1.44.0 +grpcio==1.46.0 # via # feast (setup.py) # google-api-core @@ -255,7 +253,7 @@ grpcio==1.44.0 # grpcio-reflection # grpcio-testing # grpcio-tools -grpcio-reflection==1.44.0 +grpcio-reflection==1.46.0 # via feast (setup.py) grpcio-testing==1.44.0 # via feast (setup.py) @@ -263,15 +261,17 @@ grpcio-tools==1.44.0 # via feast (setup.py) h11==0.13.0 # via uvicorn +happybase==1.2.0 + # via feast (setup.py) hiredis==2.0.0 # via feast (setup.py) httplib2==0.20.4 # via # google-api-python-client # google-auth-httplib2 -httptools==0.3.0 +httptools==0.4.0 # via uvicorn -identify==2.4.11 +identify==2.5.0 # via pre-commit idna==3.3 # via @@ -281,26 +281,13 @@ idna==3.3 # yarl imagesize==1.3.0 # via sphinx -importlib-metadata==4.11.2 +importlib-metadata==4.11.3 # via great-expectations -importlib-resources==5.4.0 +importlib-resources==5.7.1 # via jsonschema iniconfig==1.1.1 # via pytest -ipykernel==6.9.1 - # via - # ipywidgets - # notebook -ipython==8.1.1 - # via - # ipykernel - # ipywidgets -ipython-genutils==0.2.0 - # via - # ipywidgets - # nbformat - # notebook -ipywidgets==7.6.5 +ipython==8.3.0 # via great-expectations isodate==0.6.1 # via msrest @@ -314,67 +301,48 @@ jinja2==3.0.3 # feast (setup.py) # great-expectations # moto - # nbconvert - # notebook # sphinx -jmespath==0.10.0 +jmespath==1.0.0 # via # boto3 # botocore jsonpatch==1.32 # via great-expectations -jsonpointer==2.2 +jsonpointer==2.3 # via jsonpatch -jsonschema==4.4.0 +jsonschema==4.5.1 # via # altair # feast (setup.py) # great-expectations # nbformat -jupyter-client==7.1.2 - # via - # ipykernel - # nbclient - # notebook -jupyter-core==4.9.2 - # via - # jupyter-client - # nbconvert - # nbformat - # notebook -jupyterlab-pygments==0.1.2 - # via nbconvert -jupyterlab-widgets==1.0.2 - # via ipywidgets -locket==0.2.1 +jupyter-core==4.10.0 + # via nbformat +locket==1.0.0 # via partd -markupsafe==2.1.0 +markupsafe==2.1.1 # via # jinja2 # moto matplotlib-inline==0.1.3 - # via - # ipykernel - # ipython + # via ipython mccabe==0.6.1 # via flake8 minio==7.1.0 # via feast (setup.py) -mistune==0.8.4 - # via - # great-expectations - # nbconvert +mistune==2.0.2 + # via great-expectations mmh3==3.0.0 # via feast (setup.py) mock==2.0.0 # via feast (setup.py) -moto==3.0.5 +moto==3.1.7 # via feast (setup.py) msal==1.17.0 # via # azure-identity # msal-extensions -msal-extensions==0.3.1 +msal-extensions==1.0.0 # via azure-identity msgpack==1.0.3 # via cachecontrol @@ -392,31 +360,16 @@ mypy==0.931 # via feast (setup.py) mypy-extensions==0.4.3 # via mypy -mypy-protobuf==3.1.0 - # via feast (setup.py) -nbclient==0.5.11 - # via nbconvert -nbconvert==6.4.2 - # via notebook -nbformat==5.1.3 - # via - # ipywidgets - # nbclient - # nbconvert - # notebook -nest-asyncio==1.5.4 - # via - # ipykernel - # jupyter-client - # nbclient - # notebook +mypy-protobuf==3.1 + # via feast (setup.py) +nbformat==5.4.0 + # via great-expectations nodeenv==1.6.0 # via pre-commit -notebook==6.4.10 - # via widgetsnbextension -numpy==1.22.2 +numpy==1.21.6 # via # altair + # feast (setup.py) # great-expectations # pandas # pandavro @@ -424,19 +377,20 @@ numpy==1.22.2 # scipy oauthlib==3.2.0 # via requests-oauthlib -oscrypto==1.2.1 +oscrypto==1.3.0 # via snowflake-connector-python packaging==21.3 # via - # bleach + # build # dask # deprecation # google-api-core # google-cloud-bigquery - # google-cloud-firestore + # great-expectations # pytest + # redis # sphinx -pandas==1.3.5 +pandas==1.4.2 # via # altair # feast (setup.py) @@ -445,35 +399,35 @@ pandas==1.3.5 # snowflake-connector-python pandavro==1.5.2 # via feast (setup.py) -pandocfilters==1.5.0 - # via nbconvert parso==0.8.3 # via jedi partd==1.2.0 # via dask pathspec==0.9.0 # via black -pbr==5.8.1 +pbr==5.9.0 # via mock pep517==0.12.0 - # via pip-tools + # via + # build + # pip-tools pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==6.5.1 +pip-tools==6.6.0 # via feast (setup.py) -platformdirs==2.5.1 +platformdirs==2.5.2 # via virtualenv pluggy==1.0.0 # via pytest +ply==3.11 + # via thriftpy2 portalocker==2.4.0 # via msal-extensions -pre-commit==2.17.0 +pre-commit==2.19.0 # via feast (setup.py) -prometheus-client==0.13.1 - # via notebook -prompt-toolkit==3.0.28 +prompt-toolkit==3.0.29 # via ipython proto-plus==1.19.6 # via @@ -496,10 +450,10 @@ protobuf==3.19.4 # tensorflow-metadata psutil==5.9.0 # via feast (setup.py) +psycopg2-binary==2.9.3 + # via feast (setup.py) ptyprocess==0.7.0 - # via - # pexpect - # terminado + # via pexpect pure-eval==0.2.2 # via stack-data py==1.11.0 @@ -520,7 +474,7 @@ pyasn1==0.4.8 # rsa pyasn1-modules==0.2.8 # via google-auth -pybindgen==0.22.0 +pybindgen==0.22.1 # via feast (setup.py) pycodestyle==2.8.0 # via flake8 @@ -534,11 +488,9 @@ pydantic==1.9.0 # feast (setup.py) pyflakes==2.4.0 # via flake8 -pygments==2.11.2 +pygments==2.12.0 # via # ipython - # jupyterlab-pygments - # nbconvert # sphinx pyjwt[crypto]==2.3.0 # via @@ -556,7 +508,7 @@ pyrsistent==0.18.1 # via jsonschema pyspark==3.2.1 # via feast (setup.py) -pytest==7.0.1 +pytest==7.1.2 # via # feast (setup.py) # pytest-benchmark @@ -589,12 +541,11 @@ python-dateutil==2.8.2 # botocore # google-cloud-bigquery # great-expectations - # jupyter-client # moto # pandas -python-dotenv==0.19.2 +python-dotenv==0.20.0 # via uvicorn -pytz==2021.3 +pytz==2022.1 # via # babel # google-api-core @@ -602,6 +553,7 @@ pytz==2021.3 # moto # pandas # snowflake-connector-python + # trino pytz-deprecation-shim==0.1.0.post0 # via tzlocal pyyaml==6.0 @@ -610,17 +562,9 @@ pyyaml==6.0 # feast (setup.py) # pre-commit # uvicorn -pyzmq==22.3.0 - # via - # jupyter-client - # notebook -redis==3.5.3 - # via - # feast (setup.py) - # redis-py-cluster -redis-py-cluster==2.1.3 +redis==4.2.2 # via feast (setup.py) -regex==2022.3.2 +regex==2022.4.24 # via black requests==2.27.1 # via @@ -642,39 +586,35 @@ requests==2.27.1 # responses # snowflake-connector-python # sphinx + # trino requests-oauthlib==1.3.1 # via # google-auth-oauthlib # msrest -responses==0.18.0 +responses==0.20.0 # via moto rsa==4.8 # via google-auth ruamel-yaml==0.17.17 # via great-expectations -ruamel.yaml.clib==0.2.6 - # via ruamel.yaml +ruamel-yaml-clib==0.2.6 + # via ruamel-yaml s3transfer==0.5.2 # via boto3 scipy==1.8.0 # via great-expectations -send2trash==1.8.0 - # via notebook six==1.16.0 # via # absl-py - # asttokens # azure-core # azure-identity - # bleach - # cryptography # google-api-core # google-auth # google-auth-httplib2 # google-cloud-core # google-resumable-media # grpcio - # isodate + # happybase # mock # msrestazure # pandavro @@ -685,7 +625,7 @@ sniffio==1.2.0 # via anyio snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==2.7.4 +snowflake-connector-python[pandas]==2.7.7 # via feast (setup.py) sphinx==4.3.2 # via @@ -707,7 +647,7 @@ sphinxcontrib-serializinghtml==1.1.5 # via sphinx stack-data==0.2.0 # via ipython -starlette==0.17.1 +starlette==0.18.0 # via fastapi tabulate==0.8.9 # via feast (setup.py) @@ -717,12 +657,10 @@ tensorflow-metadata==1.7.0 # via feast (setup.py) termcolor==1.1.0 # via great-expectations -terminado==0.13.2 - # via notebook -testcontainers==3.4.2 +testcontainers==3.5.3 # via feast (setup.py) -testpath==0.6.0 - # via nbconvert +thriftpy2==0.4.14 + # via happybase toml==0.10.2 # via # black @@ -730,6 +668,7 @@ toml==0.10.2 # pre-commit tomli==2.0.1 # via + # build # coverage # mypy # pep517 @@ -739,102 +678,92 @@ toolz==0.11.2 # altair # dask # partd -tornado==6.1 - # via - # ipykernel - # jupyter-client - # notebook - # terminado -tqdm==4.63.0 +tqdm==4.64.0 # via # feast (setup.py) # great-expectations traitlets==5.1.1 # via - # ipykernel # ipython - # ipywidgets - # jupyter-client # jupyter-core # matplotlib-inline - # nbclient - # nbconvert # nbformat - # notebook -typed-ast==1.5.2 +trino==0.313.0 + # via feast (setup.py) +typed-ast==1.5.3 # via black -types-protobuf==3.19.12 +types-protobuf==3.19.18 # via # feast (setup.py) # mypy-protobuf -types-python-dateutil==2.8.9 +types-python-dateutil==2.8.14 # via feast (setup.py) -types-pytz==2021.3.5 +types-pytz==2021.3.7 # via feast (setup.py) -types-pyyaml==6.0.4 +types-pyyaml==6.0.7 # via feast (setup.py) -types-redis==4.1.17 +types-redis==4.2.1 # via feast (setup.py) -types-requests==2.27.11 +types-requests==2.27.25 # via feast (setup.py) -types-setuptools==57.4.9 +types-setuptools==57.4.14 # via feast (setup.py) -types-tabulate==0.8.5 +types-tabulate==0.8.8 # via feast (setup.py) -types-urllib3==1.26.10 +types-urllib3==1.26.14 # via types-requests -typing-extensions==4.1.1 +typing-extensions==4.2.0 # via # azure-core # great-expectations # mypy # pydantic -tzdata==2021.5 + # starlette +tzdata==2022.1 # via pytz-deprecation-shim -tzlocal==4.1 +tzlocal==4.2 # via great-expectations uritemplate==4.1.1 # via google-api-python-client -urllib3==1.26.8 +urllib3==1.26.9 # via # botocore # feast (setup.py) + # great-expectations # minio # requests # responses -uvicorn[standard]==0.17.5 +uvicorn[standard]==0.17.6 # via feast (setup.py) uvloop==0.16.0 # via uvicorn -virtualenv==20.13.2 +virtualenv==20.14.1 # via pre-commit -watchgod==0.7 +watchgod==0.8.2 # via uvicorn wcwidth==0.2.5 # via prompt-toolkit -webencodings==0.5.1 - # via bleach -websocket-client==1.3.1 +websocket-client==1.3.2 # via docker -websockets==10.2 +websockets==10.3 # via uvicorn -werkzeug==2.0.3 +werkzeug==2.1.2 # via moto wheel==0.37.1 # via pip-tools -widgetsnbextension==3.5.2 - # via ipywidgets -wrapt==1.13.3 - # via testcontainers +wrapt==1.14.1 + # via + # deprecated + # testcontainers xmltodict==0.12.0 # via moto yarl==1.7.2 # via aiohttp -zipp==3.7.0 +zipp==3.8.0 # via # importlib-metadata # importlib-resources # The following packages are considered to be unsafe in a requirements file: # pip -# setuptools \ No newline at end of file +# setuptools diff --git a/sdk/python/requirements/py3.8-requirements.txt b/sdk/python/requirements/py3.8-requirements.txt index 4b996ef075a..9e47d0e831d 100644 --- a/sdk/python/requirements/py3.8-requirements.txt +++ b/sdk/python/requirements/py3.8-requirements.txt @@ -2,13 +2,15 @@ # This file is autogenerated by pip-compile with python 3.8 # To update, run: # -# pip-compile --output-file=requirements/py3.8-requirements.txt +# pip-compile --output-file=sdk/python/requirements/py3.8-requirements.txt # absl-py==1.0.0 # via tensorflow-metadata anyio==3.5.0 - # via starlette -asgiref==3.4.1 + # via + # starlette + # watchgod +asgiref==3.5.1 # via uvicorn attrs==21.4.0 # via jsonschema @@ -16,9 +18,9 @@ cachetools==4.2.4 # via google-auth certifi==2021.10.8 # via requests -charset-normalizer==2.0.10 +charset-normalizer==2.0.12 # via requests -click==8.0.3 +click==8.0.1 # via # feast (setup.py) # uvicorn @@ -30,57 +32,60 @@ dask==2022.1.1 # via feast (setup.py) dill==0.3.4 # via feast (setup.py) -fastapi==0.72.0 +fastapi==0.76.0 # via feast (setup.py) -fastavro==1.4.9 +fastavro==1.4.11 # via # feast (setup.py) # pandavro -fsspec==2022.2.0 +fsspec==2022.3.0 # via dask -google-api-core==2.4.0 +google-api-core==1.31.5 # via feast (setup.py) -google-auth==2.3.3 +google-auth==1.35.0 # via google-api-core googleapis-common-protos==1.52.0 # via # feast (setup.py) # google-api-core # tensorflow-metadata -grpcio==1.43.0 +grpcio==1.46.0 # via # feast (setup.py) # grpcio-reflection -grpcio-reflection==1.43.0 +grpcio-reflection==1.46.0 # via feast (setup.py) h11==0.13.0 # via uvicorn -httptools==0.3.0 +httptools==0.4.0 # via uvicorn idna==3.3 # via # anyio # requests -importlib-resources==5.4.0 +importlib-resources==5.7.1 # via jsonschema -jinja2==3.0.3 +jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.4.0 +jsonschema==4.5.1 # via feast (setup.py) -locket==0.2.1 +locket==1.0.0 # via partd -markupsafe==2.0.1 +markupsafe==2.1.1 # via jinja2 mmh3==3.0.0 # via feast (setup.py) -numpy==1.21.5 +numpy==1.21.6 # via + # feast (setup.py) # pandas # pandavro # pyarrow packaging==21.3 - # via dask -pandas==1.3.5 + # via + # dask + # google-api-core +pandas==1.4.2 # via # feast (setup.py) # pandavro @@ -90,7 +95,7 @@ partd==1.2.0 # via dask proto-plus==1.19.6 # via feast (setup.py) -protobuf==3.19.3 +protobuf==3.19.4 # via # feast (setup.py) # google-api-core @@ -110,16 +115,18 @@ pydantic==1.9.0 # via # fastapi # feast (setup.py) -pyparsing==3.0.7 +pyparsing==3.0.8 # via packaging pyrsistent==0.18.1 # via jsonschema python-dateutil==2.8.2 # via pandas -python-dotenv==0.19.2 +python-dotenv==0.20.0 # via uvicorn -pytz==2021.3 - # via pandas +pytz==2022.1 + # via + # google-api-core + # pandas pyyaml==6.0 # via # dask @@ -132,19 +139,20 @@ rsa==4.8 six==1.16.0 # via # absl-py + # google-api-core # google-auth # grpcio # pandavro # python-dateutil sniffio==1.2.0 # via anyio -starlette==0.17.1 +starlette==0.18.0 # via fastapi tabulate==0.8.9 # via feast (setup.py) tenacity==8.0.1 # via feast (setup.py) -tensorflow-metadata==1.6.0 +tensorflow-metadata==1.7.0 # via feast (setup.py) toml==0.10.2 # via feast (setup.py) @@ -152,21 +160,23 @@ toolz==0.11.2 # via # dask # partd -tqdm==4.62.3 +tqdm==4.64.0 # via feast (setup.py) -typing-extensions==4.0.1 - # via pydantic -urllib3==1.26.8 +typing-extensions==4.2.0 + # via + # pydantic + # starlette +urllib3==1.26.9 # via requests -uvicorn[standard]==0.17.0 +uvicorn[standard]==0.17.6 # via feast (setup.py) uvloop==0.16.0 # via uvicorn -watchgod==0.7 +watchgod==0.8.2 # via uvicorn -websockets==10.1 +websockets==10.3 # via uvicorn -zipp==3.7.0 +zipp==3.8.0 # via importlib-resources # The following packages are considered to be unsafe in a requirements file: diff --git a/sdk/python/requirements/py3.9-ci-requirements.txt b/sdk/python/requirements/py3.9-ci-requirements.txt index cf228b9412b..4a96dc6fd2c 100644 --- a/sdk/python/requirements/py3.9-ci-requirements.txt +++ b/sdk/python/requirements/py3.9-ci-requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with python 3.9 # To update, run: # -# pip-compile --extra=ci --output-file=requirements/py3.9-ci-requirements.txt +# pip-compile --extra=ci --output-file=sdk/python/requirements/py3.9-ci-requirements.txt # absl-py==1.0.0 # via tensorflow-metadata @@ -23,20 +23,16 @@ alabaster==0.7.12 altair==4.2.0 # via great-expectations anyio==3.5.0 - # via starlette + # via + # starlette + # watchgod appdirs==1.4.4 # via black -appnope==0.1.2 - # via - # ipykernel - # ipython -argon2-cffi==21.3.0 - # via notebook -argon2-cffi-bindings==21.2.0 - # via argon2-cffi -asgiref==3.5.0 +appnope==0.1.3 + # via ipython +asgiref==3.5.1 # via uvicorn -asn1crypto==1.4.0 +asn1crypto==1.5.1 # via # oscrypto # snowflake-connector-python @@ -45,7 +41,9 @@ assertpy==1.1 asttokens==2.0.5 # via stack-data async-timeout==4.0.2 - # via aiohttp + # via + # aiohttp + # redis attrs==21.4.0 # via # aiohttp @@ -54,35 +52,35 @@ attrs==21.4.0 # pytest avro==1.10.0 # via feast (setup.py) -azure-core==1.23.0 +azure-core==1.23.1 # via # adlfs # azure-identity # azure-storage-blob azure-datalake-store==0.0.52 # via adlfs -azure-identity==1.8.0 +azure-identity==1.10.0 # via adlfs -azure-storage-blob==12.9.0 +azure-storage-blob==12.11.0 # via adlfs -babel==2.9.1 +babel==2.10.1 # via sphinx backcall==0.2.0 # via ipython black==19.10b0 # via feast (setup.py) -bleach==4.1.0 - # via nbconvert -boto3==1.21.11 +boto3==1.22.8 # via # feast (setup.py) # moto -botocore==1.24.11 +botocore==1.25.8 # via # boto3 # moto # s3transfer -cachecontrol==0.12.10 +build==0.7.0 + # via feast (setup.py) +cachecontrol==0.12.11 # via firebase-admin cachetools==4.2.4 # via google-auth @@ -94,7 +92,6 @@ certifi==2021.10.8 # snowflake-connector-python cffi==1.15.0 # via - # argon2-cffi-bindings # azure-datalake-store # cryptography # snowflake-connector-python @@ -105,7 +102,7 @@ charset-normalizer==2.0.12 # aiohttp # requests # snowflake-connector-python -click==8.0.4 +click==8.0.1 # via # black # feast (setup.py) @@ -115,30 +112,32 @@ click==8.0.4 cloudpickle==2.0.0 # via dask colorama==0.4.4 - # via feast (setup.py) + # via + # feast (setup.py) + # great-expectations coverage[toml]==6.3.2 # via pytest-cov -cryptography==3.3.2 +cryptography==3.4.8 # via # adal # azure-identity # azure-storage-blob # feast (setup.py) + # great-expectations # moto # msal - # pyjwt # pyopenssl # snowflake-connector-python dask==2022.1.1 # via feast (setup.py) -debugpy==1.5.1 - # via ipykernel +dataclasses==0.6 + # via great-expectations decorator==5.1.1 # via # gcsfs # ipython -defusedxml==0.7.1 - # via nbconvert +deprecated==1.2.13 + # via redis deprecation==2.1.0 # via testcontainers dill==0.3.4 @@ -154,20 +153,19 @@ docutils==0.17.1 # sphinx # sphinx-rtd-theme entrypoints==0.4 - # via - # altair - # jupyter-client - # nbconvert + # via altair execnet==1.9.0 # via pytest-xdist executing==0.8.3 # via stack-data -fastapi==0.74.1 +fastapi==0.76.0 # via feast (setup.py) -fastavro==1.4.9 +fastavro==1.4.11 # via # feast (setup.py) # pandavro +fastjsonschema==2.15.3 + # via nbformat filelock==3.6.0 # via virtualenv firebase-admin==4.5.2 @@ -178,12 +176,12 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal -fsspec==2022.2.0 +fsspec==2022.3.0 # via # adlfs # dask # gcsfs -gcsfs==2022.2.0 +gcsfs==2022.3.0 # via feast (setup.py) google-api-core[grpc]==1.31.5 # via @@ -195,7 +193,7 @@ google-api-core[grpc]==1.31.5 # google-cloud-core # google-cloud-datastore # google-cloud-firestore -google-api-python-client==2.39.0 +google-api-python-client==2.47.0 # via firebase-admin google-auth==1.35.0 # via @@ -208,11 +206,11 @@ google-auth==1.35.0 # google-cloud-storage google-auth-httplib2==0.1.0 # via google-api-python-client -google-auth-oauthlib==0.5.0 +google-auth-oauthlib==0.5.1 # via gcsfs -google-cloud-bigquery==2.34.1 +google-cloud-bigquery==2.34.3 # via feast (setup.py) -google-cloud-bigquery-storage==2.12.0 +google-cloud-bigquery-storage==2.13.1 # via feast (setup.py) google-cloud-core==1.7.2 # via @@ -221,9 +219,9 @@ google-cloud-core==1.7.2 # google-cloud-datastore # google-cloud-firestore # google-cloud-storage -google-cloud-datastore==2.5.0 +google-cloud-datastore==2.5.1 # via feast (setup.py) -google-cloud-firestore==2.3.4 +google-cloud-firestore==2.4.0 # via firebase-admin google-cloud-storage==1.40.0 # via @@ -241,9 +239,9 @@ googleapis-common-protos==1.52.0 # feast (setup.py) # google-api-core # tensorflow-metadata -great-expectations==0.14.8 +great-expectations==0.14.13 # via feast (setup.py) -grpcio==1.44.0 +grpcio==1.46.0 # via # feast (setup.py) # google-api-core @@ -251,7 +249,7 @@ grpcio==1.44.0 # grpcio-reflection # grpcio-testing # grpcio-tools -grpcio-reflection==1.44.0 +grpcio-reflection==1.46.0 # via feast (setup.py) grpcio-testing==1.44.0 # via feast (setup.py) @@ -259,15 +257,17 @@ grpcio-tools==1.44.0 # via feast (setup.py) h11==0.13.0 # via uvicorn +happybase==1.2.0 + # via feast (setup.py) hiredis==2.0.0 # via feast (setup.py) httplib2==0.20.4 # via # google-api-python-client # google-auth-httplib2 -httptools==0.3.0 +httptools==0.4.0 # via uvicorn -identify==2.4.11 +identify==2.5.0 # via pre-commit idna==3.3 # via @@ -277,24 +277,11 @@ idna==3.3 # yarl imagesize==1.3.0 # via sphinx -importlib-metadata==4.11.2 +importlib-metadata==4.11.3 # via great-expectations iniconfig==1.1.1 # via pytest -ipykernel==6.9.1 - # via - # ipywidgets - # notebook -ipython==8.1.1 - # via - # ipykernel - # ipywidgets -ipython-genutils==0.2.0 - # via - # ipywidgets - # nbformat - # notebook -ipywidgets==7.6.5 +ipython==8.3.0 # via great-expectations isodate==0.6.1 # via msrest @@ -308,67 +295,48 @@ jinja2==3.0.3 # feast (setup.py) # great-expectations # moto - # nbconvert - # notebook # sphinx -jmespath==0.10.0 +jmespath==1.0.0 # via # boto3 # botocore jsonpatch==1.32 # via great-expectations -jsonpointer==2.2 +jsonpointer==2.3 # via jsonpatch -jsonschema==4.4.0 +jsonschema==4.5.1 # via # altair # feast (setup.py) # great-expectations # nbformat -jupyter-client==7.1.2 - # via - # ipykernel - # nbclient - # notebook -jupyter-core==4.9.2 - # via - # jupyter-client - # nbconvert - # nbformat - # notebook -jupyterlab-pygments==0.1.2 - # via nbconvert -jupyterlab-widgets==1.0.2 - # via ipywidgets -locket==0.2.1 +jupyter-core==4.10.0 + # via nbformat +locket==1.0.0 # via partd -markupsafe==2.1.0 +markupsafe==2.1.1 # via # jinja2 # moto matplotlib-inline==0.1.3 - # via - # ipykernel - # ipython + # via ipython mccabe==0.6.1 # via flake8 minio==7.1.0 # via feast (setup.py) -mistune==0.8.4 - # via - # great-expectations - # nbconvert +mistune==2.0.2 + # via great-expectations mmh3==3.0.0 # via feast (setup.py) mock==2.0.0 # via feast (setup.py) -moto==3.0.5 +moto==3.1.7 # via feast (setup.py) msal==1.17.0 # via # azure-identity # msal-extensions -msal-extensions==0.3.1 +msal-extensions==1.0.0 # via azure-identity msgpack==1.0.3 # via cachecontrol @@ -386,31 +354,16 @@ mypy==0.931 # via feast (setup.py) mypy-extensions==0.4.3 # via mypy -mypy-protobuf==3.1.0 - # via feast (setup.py) -nbclient==0.5.11 - # via nbconvert -nbconvert==6.4.2 - # via notebook -nbformat==5.1.3 - # via - # ipywidgets - # nbclient - # nbconvert - # notebook -nest-asyncio==1.5.4 - # via - # ipykernel - # jupyter-client - # nbclient - # notebook +mypy-protobuf==3.1 + # via feast (setup.py) +nbformat==5.4.0 + # via great-expectations nodeenv==1.6.0 # via pre-commit -notebook==6.4.10 - # via widgetsnbextension -numpy==1.22.2 +numpy==1.21.6 # via # altair + # feast (setup.py) # great-expectations # pandas # pandavro @@ -418,19 +371,20 @@ numpy==1.22.2 # scipy oauthlib==3.2.0 # via requests-oauthlib -oscrypto==1.2.1 +oscrypto==1.3.0 # via snowflake-connector-python packaging==21.3 # via - # bleach + # build # dask # deprecation # google-api-core # google-cloud-bigquery - # google-cloud-firestore + # great-expectations # pytest + # redis # sphinx -pandas==1.3.5 +pandas==1.4.2 # via # altair # feast (setup.py) @@ -439,35 +393,35 @@ pandas==1.3.5 # snowflake-connector-python pandavro==1.5.2 # via feast (setup.py) -pandocfilters==1.5.0 - # via nbconvert parso==0.8.3 # via jedi partd==1.2.0 # via dask pathspec==0.9.0 # via black -pbr==5.8.1 +pbr==5.9.0 # via mock pep517==0.12.0 - # via pip-tools + # via + # build + # pip-tools pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==6.5.1 +pip-tools==6.6.0 # via feast (setup.py) -platformdirs==2.5.1 +platformdirs==2.5.2 # via virtualenv pluggy==1.0.0 # via pytest +ply==3.11 + # via thriftpy2 portalocker==2.4.0 # via msal-extensions -pre-commit==2.17.0 +pre-commit==2.19.0 # via feast (setup.py) -prometheus-client==0.13.1 - # via notebook -prompt-toolkit==3.0.28 +prompt-toolkit==3.0.29 # via ipython proto-plus==1.19.6 # via @@ -490,10 +444,10 @@ protobuf==3.19.4 # tensorflow-metadata psutil==5.9.0 # via feast (setup.py) +psycopg2-binary==2.9.3 + # via feast (setup.py) ptyprocess==0.7.0 - # via - # pexpect - # terminado + # via pexpect pure-eval==0.2.2 # via stack-data py==1.11.0 @@ -514,7 +468,7 @@ pyasn1==0.4.8 # rsa pyasn1-modules==0.2.8 # via google-auth -pybindgen==0.22.0 +pybindgen==0.22.1 # via feast (setup.py) pycodestyle==2.8.0 # via flake8 @@ -528,11 +482,9 @@ pydantic==1.9.0 # feast (setup.py) pyflakes==2.4.0 # via flake8 -pygments==2.11.2 +pygments==2.12.0 # via # ipython - # jupyterlab-pygments - # nbconvert # sphinx pyjwt[crypto]==2.3.0 # via @@ -550,7 +502,7 @@ pyrsistent==0.18.1 # via jsonschema pyspark==3.2.1 # via feast (setup.py) -pytest==7.0.1 +pytest==7.1.2 # via # feast (setup.py) # pytest-benchmark @@ -583,12 +535,11 @@ python-dateutil==2.8.2 # botocore # google-cloud-bigquery # great-expectations - # jupyter-client # moto # pandas -python-dotenv==0.19.2 +python-dotenv==0.20.0 # via uvicorn -pytz==2021.3 +pytz==2022.1 # via # babel # google-api-core @@ -596,6 +547,7 @@ pytz==2021.3 # moto # pandas # snowflake-connector-python + # trino pytz-deprecation-shim==0.1.0.post0 # via tzlocal pyyaml==6.0 @@ -604,17 +556,9 @@ pyyaml==6.0 # feast (setup.py) # pre-commit # uvicorn -pyzmq==22.3.0 - # via - # jupyter-client - # notebook -redis==3.5.3 - # via - # feast (setup.py) - # redis-py-cluster -redis-py-cluster==2.1.3 +redis==4.2.2 # via feast (setup.py) -regex==2022.3.2 +regex==2022.4.24 # via black requests==2.27.1 # via @@ -636,39 +580,35 @@ requests==2.27.1 # responses # snowflake-connector-python # sphinx + # trino requests-oauthlib==1.3.1 # via # google-auth-oauthlib # msrest -responses==0.18.0 +responses==0.20.0 # via moto rsa==4.8 # via google-auth -ruamel.yaml==0.17.17 +ruamel-yaml==0.17.17 # via great-expectations -ruamel.yaml.clib==0.2.6 - # via ruamel.yaml +ruamel-yaml-clib==0.2.6 + # via ruamel-yaml s3transfer==0.5.2 # via boto3 scipy==1.8.0 # via great-expectations -send2trash==1.8.0 - # via notebook six==1.16.0 # via # absl-py - # asttokens # azure-core # azure-identity - # bleach - # cryptography # google-api-core # google-auth # google-auth-httplib2 # google-cloud-core # google-resumable-media # grpcio - # isodate + # happybase # mock # msrestazure # pandavro @@ -679,7 +619,7 @@ sniffio==1.2.0 # via anyio snowballstemmer==2.2.0 # via sphinx -snowflake-connector-python[pandas]==2.7.4 +snowflake-connector-python[pandas]==2.7.7 # via feast (setup.py) sphinx==4.3.2 # via @@ -701,7 +641,7 @@ sphinxcontrib-serializinghtml==1.1.5 # via sphinx stack-data==0.2.0 # via ipython -starlette==0.17.1 +starlette==0.18.0 # via fastapi tabulate==0.8.9 # via feast (setup.py) @@ -711,12 +651,10 @@ tensorflow-metadata==1.7.0 # via feast (setup.py) termcolor==1.1.0 # via great-expectations -terminado==0.13.2 - # via notebook -testcontainers==3.4.2 +testcontainers==3.5.3 # via feast (setup.py) -testpath==0.6.0 - # via nbconvert +thriftpy2==0.4.14 + # via happybase toml==0.10.2 # via # black @@ -724,6 +662,7 @@ toml==0.10.2 # pre-commit tomli==2.0.1 # via + # build # coverage # mypy # pep517 @@ -733,98 +672,88 @@ toolz==0.11.2 # altair # dask # partd -tornado==6.1 - # via - # ipykernel - # jupyter-client - # notebook - # terminado -tqdm==4.63.0 +tqdm==4.64.0 # via # feast (setup.py) # great-expectations traitlets==5.1.1 # via - # ipykernel # ipython - # ipywidgets - # jupyter-client # jupyter-core # matplotlib-inline - # nbclient - # nbconvert # nbformat - # notebook -typed-ast==1.5.2 +trino==0.313.0 + # via feast (setup.py) +typed-ast==1.5.3 # via black -types-protobuf==3.19.12 +types-protobuf==3.19.18 # via # feast (setup.py) # mypy-protobuf -types-python-dateutil==2.8.9 +types-python-dateutil==2.8.14 # via feast (setup.py) -types-pytz==2021.3.5 +types-pytz==2021.3.7 # via feast (setup.py) -types-pyyaml==6.0.4 +types-pyyaml==6.0.7 # via feast (setup.py) -types-redis==4.1.17 +types-redis==4.2.1 # via feast (setup.py) -types-requests==2.27.11 +types-requests==2.27.25 # via feast (setup.py) -types-setuptools==57.4.9 +types-setuptools==57.4.14 # via feast (setup.py) -types-tabulate==0.8.5 +types-tabulate==0.8.8 # via feast (setup.py) -types-urllib3==1.26.10 +types-urllib3==1.26.14 # via types-requests -typing-extensions==4.1.1 +typing-extensions==4.2.0 # via # azure-core # great-expectations # mypy # pydantic -tzdata==2021.5 + # starlette +tzdata==2022.1 # via pytz-deprecation-shim -tzlocal==4.1 +tzlocal==4.2 # via great-expectations uritemplate==4.1.1 # via google-api-python-client -urllib3==1.26.8 +urllib3==1.26.9 # via # botocore # feast (setup.py) + # great-expectations # minio # requests # responses -uvicorn[standard]==0.17.5 +uvicorn[standard]==0.17.6 # via feast (setup.py) uvloop==0.16.0 # via uvicorn -virtualenv==20.13.2 +virtualenv==20.14.1 # via pre-commit -watchgod==0.7 +watchgod==0.8.2 # via uvicorn wcwidth==0.2.5 # via prompt-toolkit -webencodings==0.5.1 - # via bleach -websocket-client==1.3.1 +websocket-client==1.3.2 # via docker -websockets==10.2 +websockets==10.3 # via uvicorn -werkzeug==2.0.3 +werkzeug==2.1.2 # via moto wheel==0.37.1 # via pip-tools -widgetsnbextension==3.5.2 - # via ipywidgets -wrapt==1.13.3 - # via testcontainers +wrapt==1.14.1 + # via + # deprecated + # testcontainers xmltodict==0.12.0 # via moto yarl==1.7.2 # via aiohttp -zipp==3.7.0 +zipp==3.8.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: diff --git a/sdk/python/requirements/py3.9-requirements.txt b/sdk/python/requirements/py3.9-requirements.txt index 67ef8ada9e0..0ad985b828e 100644 --- a/sdk/python/requirements/py3.9-requirements.txt +++ b/sdk/python/requirements/py3.9-requirements.txt @@ -2,13 +2,15 @@ # This file is autogenerated by pip-compile with python 3.9 # To update, run: # -# pip-compile --output-file=requirements/py3.9-requirements.txt +# pip-compile --output-file=sdk/python/requirements/py3.9-requirements.txt # absl-py==1.0.0 # via tensorflow-metadata anyio==3.5.0 - # via starlette -asgiref==3.4.1 + # via + # starlette + # watchgod +asgiref==3.5.1 # via uvicorn attrs==21.4.0 # via jsonschema @@ -16,9 +18,9 @@ cachetools==4.2.4 # via google-auth certifi==2021.10.8 # via requests -charset-normalizer==2.0.10 +charset-normalizer==2.0.12 # via requests -click==8.0.3 +click==8.0.1 # via # feast (setup.py) # uvicorn @@ -30,55 +32,58 @@ dask==2022.1.1 # via feast (setup.py) dill==0.3.4 # via feast (setup.py) -fastapi==0.72.0 +fastapi==0.76.0 # via feast (setup.py) -fastavro==1.4.9 +fastavro==1.4.11 # via # feast (setup.py) # pandavro -fsspec==2022.2.0 +fsspec==2022.3.0 # via dask -google-api-core==2.4.0 +google-api-core==1.31.5 # via feast (setup.py) -google-auth==2.3.3 +google-auth==1.35.0 # via google-api-core googleapis-common-protos==1.52.0 # via # feast (setup.py) # google-api-core # tensorflow-metadata -grpcio==1.43.0 +grpcio==1.46.0 # via # feast (setup.py) # grpcio-reflection -grpcio-reflection==1.43.0 +grpcio-reflection==1.46.0 # via feast (setup.py) h11==0.13.0 # via uvicorn -httptools==0.3.0 +httptools==0.4.0 # via uvicorn idna==3.3 # via # anyio # requests -jinja2==3.0.3 +jinja2==3.1.2 # via feast (setup.py) -jsonschema==4.4.0 +jsonschema==4.5.1 # via feast (setup.py) -locket==0.2.1 +locket==1.0.0 # via partd -markupsafe==2.0.1 +markupsafe==2.1.1 # via jinja2 mmh3==3.0.0 # via feast (setup.py) -numpy==1.21.5 +numpy==1.21.6 # via + # feast (setup.py) # pandas # pandavro # pyarrow packaging==21.3 - # via dask -pandas==1.3.5 + # via + # dask + # google-api-core +pandas==1.4.2 # via # feast (setup.py) # pandavro @@ -88,7 +93,7 @@ partd==1.2.0 # via dask proto-plus==1.19.6 # via feast (setup.py) -protobuf==3.19.3 +protobuf==3.19.4 # via # feast (setup.py) # google-api-core @@ -108,16 +113,18 @@ pydantic==1.9.0 # via # fastapi # feast (setup.py) -pyparsing==3.0.7 +pyparsing==3.0.8 # via packaging pyrsistent==0.18.1 # via jsonschema python-dateutil==2.8.2 # via pandas -python-dotenv==0.19.2 +python-dotenv==0.20.0 # via uvicorn -pytz==2021.3 - # via pandas +pytz==2022.1 + # via + # google-api-core + # pandas pyyaml==6.0 # via # dask @@ -130,19 +137,20 @@ rsa==4.8 six==1.16.0 # via # absl-py + # google-api-core # google-auth # grpcio # pandavro # python-dateutil sniffio==1.2.0 # via anyio -starlette==0.17.1 +starlette==0.18.0 # via fastapi tabulate==0.8.9 # via feast (setup.py) tenacity==8.0.1 # via feast (setup.py) -tensorflow-metadata==1.6.0 +tensorflow-metadata==1.7.0 # via feast (setup.py) toml==0.10.2 # via feast (setup.py) @@ -150,19 +158,21 @@ toolz==0.11.2 # via # dask # partd -tqdm==4.62.3 +tqdm==4.64.0 # via feast (setup.py) -typing-extensions==4.0.1 - # via pydantic -urllib3==1.26.8 +typing-extensions==4.2.0 + # via + # pydantic + # starlette +urllib3==1.26.9 # via requests -uvicorn[standard]==0.17.0 +uvicorn[standard]==0.17.6 # via feast (setup.py) uvloop==0.16.0 # via uvicorn -watchgod==0.7 +watchgod==0.8.2 # via uvicorn -websockets==10.1 +websockets==10.3 # via uvicorn # The following packages are considered to be unsafe in a requirements file: diff --git a/sdk/python/tests/benchmarks/test_benchmark_universal_online_retrieval.py b/sdk/python/tests/benchmarks/test_benchmark_universal_online_retrieval.py index a29383a5c9c..6e22c93e5f6 100644 --- a/sdk/python/tests/benchmarks/test_benchmark_universal_online_retrieval.py +++ b/sdk/python/tests/benchmarks/test_benchmark_universal_online_retrieval.py @@ -17,6 +17,7 @@ @pytest.mark.benchmark @pytest.mark.integration +@pytest.mark.universal_online_stores def test_online_retrieval(environment, universal_data_sources, benchmark): fs = environment.feature_store entities, datasets, data_sources = universal_data_sources diff --git a/sdk/python/tests/conftest.py b/sdk/python/tests/conftest.py index c72a3af7543..d492c7ba845 100644 --- a/sdk/python/tests/conftest.py +++ b/sdk/python/tests/conftest.py @@ -13,33 +13,34 @@ # limitations under the License. import logging import multiprocessing -import pathlib -import time +import socket +from contextlib import closing from datetime import datetime, timedelta from multiprocessing import Process from sys import platform -from typing import List +from typing import Any, Dict, List import pandas as pd import pytest from _pytest.nodes import Item -from testcontainers.core.container import DockerContainer -from testcontainers.core.waiting_utils import wait_for_logs from feast import FeatureStore +from feast.wait import wait_retry_backoff from tests.data.data_creator import create_dataset from tests.integration.feature_repos.integration_test_repo_config import ( IntegrationTestRepoConfig, ) from tests.integration.feature_repos.repo_configuration import ( - FULL_REPO_CONFIGS, - REDIS_CLUSTER_CONFIG, - REDIS_CONFIG, + AVAILABLE_OFFLINE_STORES, + AVAILABLE_ONLINE_STORES, Environment, TestData, construct_test_environment, construct_universal_test_data, ) +from tests.integration.feature_repos.universal.data_sources.file import ( + FileDataSourceCreator, +) logger = logging.getLogger(__name__) @@ -53,9 +54,6 @@ def pytest_configure(config): "markers", "integration: mark test that has external dependencies" ) config.addinivalue_line("markers", "benchmark: mark benchmarking tests") - config.addinivalue_line( - "markers", "universal: mark tests that use the universal feature repo" - ) config.addinivalue_line( "markers", "goserver: mark tests that use the go feature server" ) @@ -71,9 +69,6 @@ def pytest_addoption(parser): parser.addoption( "--benchmark", action="store_true", default=False, help="Run benchmark tests", ) - parser.addoption( - "--universal", action="store_true", default=False, help="Run universal tests", - ) parser.addoption( "--goserver", action="store_true", @@ -85,7 +80,6 @@ def pytest_addoption(parser): def pytest_collection_modifyitems(config, items: List[Item]): should_run_integration = config.getoption("--integration") is True should_run_benchmark = config.getoption("--benchmark") is True - should_run_universal = config.getoption("--universal") is True should_run_goserver = config.getoption("--goserver") is True integration_tests = [t for t in items if "integration" in t.keywords] @@ -106,12 +100,6 @@ def pytest_collection_modifyitems(config, items: List[Item]): for t in benchmark_tests: items.append(t) - universal_tests = [t for t in items if "universal" in t.keywords] - if should_run_universal: - items.clear() - for t in universal_tests: - items.append(t) - goserver_tests = [t for t in items if "goserver" in t.keywords] if should_run_goserver: items.clear() @@ -164,120 +152,168 @@ def start_test_local_server(repo_path: str, port: int): fs.serve("localhost", port, no_access_log=True) -class TrinoContainerSingleton: - current_file = pathlib.Path(__file__).resolve() - catalog_dir = current_file.parent.joinpath( - "integration/feature_repos/universal/data_sources/catalog" +@pytest.fixture(scope="session") +def environment(request, worker_id): + e = construct_test_environment( + request.param, worker_id=worker_id, fixture_request=request ) - container = None - is_running = False - - @classmethod - def get_singleton(cls): - if not cls.is_running: - cls.container = ( - DockerContainer("trinodb/trino:376") - .with_volume_mapping(cls.catalog_dir, "/etc/catalog/") - .with_exposed_ports("8080") - ) - cls.container.start() - log_string_to_wait_for = "SERVER STARTED" - wait_for_logs( - container=cls.container, predicate=log_string_to_wait_for, timeout=30 + yield e + + e.feature_store.teardown() + e.data_source_creator.teardown() + if e.online_store_creator: + e.online_store_creator.teardown() + + +_config_cache = {} + + +def pytest_generate_tests(metafunc: pytest.Metafunc): + """ + This function receives each test function (wrapped in Metafunc) + at the collection stage (before tests started). + Here we can access all fixture requests made by the test as well as its markers. + That allows us to dynamically parametrize the test based on markers and fixtures + by calling metafunc.parametrize(...). + + See more examples at https://docs.pytest.org/en/6.2.x/example/parametrize.html#paramexamples + + We also utilize indirect parametrization here. Since `environment` is a fixture, + when we call metafunc.parametrize("environment", ..., indirect=True) we actually + parametrizing this "environment" fixture and not the test itself. + Moreover, by utilizing `_config_cache` we are able to share `environment` fixture between different tests. + In order for pytest to group tests together (and share environment fixture) + parameter should point to the same Python object (hence, we use _config_cache dict to store those objects). + """ + if "environment" in metafunc.fixturenames: + markers = {m.name: m for m in metafunc.definition.own_markers} + + if "universal_offline_stores" in markers: + offline_stores = AVAILABLE_OFFLINE_STORES + else: + # default offline store for testing online store dimension + offline_stores = [("local", FileDataSourceCreator)] + + online_stores = None + if "universal_online_stores" in markers: + # Online stores are explicitly requested + if "only" in markers["universal_online_stores"].kwargs: + online_stores = [ + AVAILABLE_ONLINE_STORES.get(store_name) + for store_name in markers["universal_online_stores"].kwargs["only"] + if store_name in AVAILABLE_ONLINE_STORES + ] + else: + online_stores = AVAILABLE_ONLINE_STORES.values() + + if online_stores is None: + # No online stores requested -> setting the default or first available + online_stores = [ + AVAILABLE_ONLINE_STORES.get( + "redis", + AVAILABLE_ONLINE_STORES.get( + "sqlite", next(iter(AVAILABLE_ONLINE_STORES.values())) + ), + ) + ] + + extra_dimensions: List[Dict[str, Any]] = [{}] + + if "python_server" in metafunc.fixturenames: + extra_dimensions.extend( + [ + {"python_feature_server": True}, + {"python_feature_server": True, "provider": "aws"}, + ] ) - cls.is_running = True - return cls.container - @classmethod - def teardown(cls): - if cls.container: - cls.container.stop() + if "goserver" in markers: + extra_dimensions.append({"go_feature_retrieval": True}) + + configs = [] + for provider, offline_store_creator in offline_stores: + for online_store, online_store_creator in online_stores: + for dim in extra_dimensions: + config = { + "provider": provider, + "offline_store_creator": offline_store_creator, + "online_store": online_store, + "online_store_creator": online_store_creator, + **dim, + } + # temporary Go works only with redis + if config.get("go_feature_retrieval") and ( + not isinstance(online_store, dict) + or online_store["type"] != "redis" + ): + continue + + # aws lambda works only with dynamo + if ( + config.get("python_feature_server") + and config.get("provider") == "aws" + and ( + not isinstance(online_store, dict) + or online_store["type"] != "dynamodb" + ) + ): + continue + + c = IntegrationTestRepoConfig(**config) + + if c not in _config_cache: + _config_cache[c] = c + + configs.append(_config_cache[c]) + + metafunc.parametrize( + "environment", configs, indirect=True, ids=[str(c) for c in configs] + ) -@pytest.fixture( - params=FULL_REPO_CONFIGS, scope="session", ids=[str(c) for c in FULL_REPO_CONFIGS] -) -def environment(request, worker_id: str): - if "TrinoSourceCreator" in request.param.offline_store_creator.__name__: - e = construct_test_environment( - request.param, - worker_id=worker_id, - offline_container=TrinoContainerSingleton.get_singleton(), - ) - else: - e = construct_test_environment(request.param, worker_id=worker_id) +@pytest.fixture(scope="session") +def python_server(environment): proc = Process( target=start_test_local_server, - args=(e.feature_store.repo_path, e.get_local_server_port()), + args=(environment.feature_store.repo_path, environment.get_local_server_port()), daemon=True, ) - if e.python_feature_server and e.test_repo_config.provider == "local": + if ( + environment.python_feature_server + and environment.test_repo_config.provider == "local" + ): proc.start() # Wait for server to start - time.sleep(3) - - def cleanup(): - e.feature_store.teardown() - if proc.is_alive(): - proc.kill() - if e.online_store_creator: - e.online_store_creator.teardown() - - request.addfinalizer(cleanup) - - return e + wait_retry_backoff( + lambda: ( + None, + _check_port_open("localhost", environment.get_local_server_port()), + ), + timeout_secs=10, + ) + yield -@pytest.fixture( - params=[REDIS_CONFIG, REDIS_CLUSTER_CONFIG], - scope="session", - ids=[str(c) for c in [REDIS_CONFIG, REDIS_CLUSTER_CONFIG]], -) -def local_redis_environment(request, worker_id): - e = construct_test_environment( - IntegrationTestRepoConfig(online_store=request.param), worker_id=worker_id - ) + if proc.is_alive(): + proc.kill() - def cleanup(): - e.feature_store.teardown() - request.addfinalizer(cleanup) - return e +def _check_port_open(host, port) -> bool: + with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: + return sock.connect_ex((host, port)) == 0 @pytest.fixture(scope="session") -def universal_data_sources(request, environment) -> TestData: - def cleanup(): - # logger.info("Running cleanup in %s, Request: %s", worker_id, request.param) - environment.data_source_creator.teardown() - - request.addfinalizer(cleanup) +def universal_data_sources(environment) -> TestData: return construct_universal_test_data(environment) @pytest.fixture(scope="session") -def redis_universal_data_sources(request, local_redis_environment): - def cleanup(): - # logger.info("Running cleanup in %s, Request: %s", worker_id, request.param) - local_redis_environment.data_source_creator.teardown() - - request.addfinalizer(cleanup) - return construct_universal_test_data(local_redis_environment) - - -@pytest.fixture(scope="session") -def e2e_data_sources(environment: Environment, request): +def e2e_data_sources(environment: Environment): df = create_dataset() data_source = environment.data_source_creator.create_data_source( df, environment.feature_store.project, field_mapping={"ts_1": "ts"}, ) - def cleanup(): - environment.data_source_creator.teardown() - if environment.online_store_creator: - environment.online_store_creator.teardown() - - request.addfinalizer(cleanup) - return df, data_source diff --git a/sdk/python/tests/example_repos/example_feature_repo_1.py b/sdk/python/tests/example_repos/example_feature_repo_1.py index 76b42b22416..d8b6d7c89b1 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_1.py +++ b/sdk/python/tests/example_repos/example_feature_repo_1.py @@ -40,26 +40,19 @@ ) driver_locations_push_source = PushSource( - name="driver_locations_push", - schema=[ - Field(name="driver_id", dtype=String), - Field(name="driver_lat", dtype=Float32), - Field(name="driver_long", dtype=String), - ], - batch_source=driver_locations_source, - timestamp_field="event_timestamp", + name="driver_locations_push", batch_source=driver_locations_source, ) driver = Entity( name="driver", # The name is derived from this argument, not object name. - join_key="driver_id", + join_keys=["driver_id"], value_type=ValueType.INT64, description="driver id", ) customer = Entity( name="customer", # The name is derived from this argument, not object name. - join_key="customer_id", + join_keys=["customer_id"], value_type=ValueType.STRING, ) diff --git a/sdk/python/tests/example_repos/example_feature_repo_2.py b/sdk/python/tests/example_repos/example_feature_repo_2.py index 1ca7cc3805c..d4c7976418b 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_2.py +++ b/sdk/python/tests/example_repos/example_feature_repo_2.py @@ -9,7 +9,7 @@ created_timestamp_column="created", ) -driver = Entity(name="driver_id", value_type=ValueType.INT64, description="driver id",) +driver = Entity(name="driver_id", value_type=ValueType.INT64, description="driver id") driver_hourly_stats_view = FeatureView( @@ -22,7 +22,7 @@ Field(name="avg_daily_trips", dtype=Int64), ], online=True, - batch_source=driver_hourly_stats, + source=driver_hourly_stats, tags={}, ) @@ -43,6 +43,6 @@ Field(name="avg_ride_length", dtype=Float32), ], online=True, - batch_source=global_daily_stats, + source=global_daily_stats, tags={}, ) diff --git a/sdk/python/tests/example_repos/example_feature_repo_version_0_19.py b/sdk/python/tests/example_repos/example_feature_repo_version_0_19.py new file mode 100644 index 00000000000..e00a69b867a --- /dev/null +++ b/sdk/python/tests/example_repos/example_feature_repo_version_0_19.py @@ -0,0 +1,52 @@ +from datetime import timedelta + +from feast import Entity, Feature, FeatureView, FileSource, ValueType + +driver_hourly_stats = FileSource( + path="%PARQUET_PATH%", # placeholder to be replaced by the test + event_timestamp_column="event_timestamp", # Changed to `timestamp_field` in 0.20 + created_timestamp_column="created", +) + +driver = Entity( + name="driver_id", + value_type=ValueType.INT64, + description="driver id", + join_key="driver_id", # Changed to `join_keys` in 0.20 +) + + +driver_hourly_stats_view = FeatureView( + name="driver_hourly_stats", + entities=["driver_id"], + ttl=timedelta(days=1), + features=[ # Changed to `schema` in 0.20 + Feature(name="conv_rate", dtype=ValueType.FLOAT), # Changed to `Field` in 0.20 + Feature(name="acc_rate", dtype=ValueType.FLOAT), + Feature(name="avg_daily_trips", dtype=ValueType.INT64), + ], + online=True, + batch_source=driver_hourly_stats, # Changed to `source` in 0.20 + tags={}, +) + + +global_daily_stats = FileSource( + path="%PARQUET_PATH_GLOBAL%", # placeholder to be replaced by the test + event_timestamp_column="event_timestamp", # Changed to `timestamp_field` in 0.20 + created_timestamp_column="created", +) + + +global_stats_feature_view = FeatureView( + name="global_daily_stats", + entities=[], + ttl=timedelta(days=1), + features=[ # Changed to `schema` in 0.20 + Feature(name="num_rides", dtype=ValueType.INT32), # Changed to `Field` in 0.20 + Feature(name="avg_ride_length", dtype=ValueType.FLOAT), + ], + online=True, + batch_source=global_daily_stats, # Changed to `source` in 0.20 + tags={}, +) diff --git a/sdk/python/tests/example_repos/example_feature_repo_with_duplicated_featureview_names.py b/sdk/python/tests/example_repos/example_feature_repo_with_duplicated_featureview_names.py index 20ff666bd9c..cbcc3ad172b 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_with_duplicated_featureview_names.py +++ b/sdk/python/tests/example_repos/example_feature_repo_with_duplicated_featureview_names.py @@ -10,7 +10,7 @@ name="driver_hourly_stats", # Intentionally use the same FeatureView name entities=["driver_id"], online=False, - batch_source=driver_hourly_stats, + source=driver_hourly_stats, ttl=timedelta(days=1), tags={}, ) @@ -19,7 +19,7 @@ name="driver_hourly_stats", # Intentionally use the same FeatureView name entities=["driver_id"], online=False, - batch_source=driver_hourly_stats, + source=driver_hourly_stats, ttl=timedelta(days=1), tags={}, ) diff --git a/sdk/python/tests/example_repos/example_feature_repo_with_entity_join_key.py b/sdk/python/tests/example_repos/example_feature_repo_with_entity_join_key.py index 3e1bbbba779..5ba26d2573b 100644 --- a/sdk/python/tests/example_repos/example_feature_repo_with_entity_join_key.py +++ b/sdk/python/tests/example_repos/example_feature_repo_with_entity_join_key.py @@ -15,7 +15,7 @@ name="driver_id", value_type=ValueType.INT64, description="driver id", - join_key="driver", + join_keys=["driver"], ) @@ -29,6 +29,6 @@ Field(name="avg_daily_trips", dtype=Int64), ], online=True, - batch_source=driver_hourly_stats, + source=driver_hourly_stats, tags={}, ) diff --git a/sdk/python/tests/example_repos/example_feature_repo_with_ttl_0.py b/sdk/python/tests/example_repos/example_feature_repo_with_ttl_0.py new file mode 100644 index 00000000000..e2bec03f8f3 --- /dev/null +++ b/sdk/python/tests/example_repos/example_feature_repo_with_ttl_0.py @@ -0,0 +1,48 @@ +from datetime import timedelta + +from feast import Entity, FeatureView, Field, FileSource, ValueType +from feast.types import Float32, Int32, Int64 + +driver_hourly_stats = FileSource( + path="%PARQUET_PATH%", # placeholder to be replaced by the test + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + +driver = Entity(name="driver_id", value_type=ValueType.INT64, description="driver id") + + +driver_hourly_stats_view = FeatureView( + name="driver_hourly_stats", + entities=[driver], + ttl=timedelta(days=0), + schema=[ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int64), + ], + online=True, + source=driver_hourly_stats, + tags={}, +) + + +global_daily_stats = FileSource( + path="%PARQUET_PATH_GLOBAL%", # placeholder to be replaced by the test + timestamp_field="event_timestamp", + created_timestamp_column="created", +) + + +global_stats_feature_view = FeatureView( + name="global_daily_stats", + entities=[], + ttl=timedelta(days=0), + schema=[ + Field(name="num_rides", dtype=Int32), + Field(name="avg_ride_length", dtype=Float32), + ], + online=True, + source=global_daily_stats, + tags={}, +) diff --git a/sdk/python/tests/foo_provider.py b/sdk/python/tests/foo_provider.py index 1d4ce7d6cb6..2d61c362734 100644 --- a/sdk/python/tests/foo_provider.py +++ b/sdk/python/tests/foo_provider.py @@ -1,10 +1,12 @@ from datetime import datetime +from pathlib import Path from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import pandas +import pyarrow from tqdm import tqdm -from feast import Entity, FeatureView, RepoConfig +from feast import Entity, FeatureService, FeatureView, RepoConfig from feast.infra.offline_stores.offline_store import RetrievalJob from feast.infra.provider import Provider from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto @@ -79,3 +81,22 @@ def online_read( def retrieve_saved_dataset(self, config: RepoConfig, dataset: SavedDataset): pass + + def write_feature_service_logs( + self, + feature_service: FeatureService, + logs: Union[pyarrow.Table, Path], + config: RepoConfig, + registry: Registry, + ): + pass + + def retrieve_feature_service_logs( + self, + feature_service: FeatureService, + start_date: datetime, + end_date: datetime, + config: RepoConfig, + registry: Registry, + ) -> RetrievalJob: + pass diff --git a/sdk/python/tests/integration/e2e/test_go_feature_server.py b/sdk/python/tests/integration/e2e/test_go_feature_server.py new file mode 100644 index 00000000000..e469c90c11f --- /dev/null +++ b/sdk/python/tests/integration/e2e/test_go_feature_server.py @@ -0,0 +1,227 @@ +import socket +import threading +import time +from contextlib import closing +from datetime import datetime +from typing import List + +import grpc +import pandas as pd +import pytest +import pytz + +from feast import FeatureService, ValueType +from feast.embedded_go.lib.embedded import LoggingOptions +from feast.embedded_go.online_features_service import EmbeddedOnlineFeatureServer +from feast.feast_object import FeastObject +from feast.feature_logging import LoggingConfig +from feast.protos.feast.serving.ServingService_pb2 import ( + FieldStatus, + GetOnlineFeaturesRequest, + GetOnlineFeaturesResponse, +) +from feast.protos.feast.serving.ServingService_pb2_grpc import ServingServiceStub +from feast.protos.feast.types.Value_pb2 import RepeatedValue +from feast.type_map import python_values_to_proto_values +from feast.wait import wait_retry_backoff +from tests.integration.feature_repos.repo_configuration import ( + construct_universal_feature_views, +) +from tests.integration.feature_repos.universal.entities import ( + customer, + driver, + location, +) + +NANOSECOND = 1 +MILLISECOND = 1000_000 * NANOSECOND +SECOND = 1000 * MILLISECOND + + +@pytest.fixture(scope="session") +def initialized_registry(environment, universal_data_sources): + fs = environment.feature_store + + _, _, data_sources = universal_data_sources + feature_views = construct_universal_feature_views(data_sources) + + feature_service = FeatureService( + name="driver_features", + features=[feature_views.driver], + logging_config=LoggingConfig( + destination=environment.data_source_creator.create_logged_features_destination(), + sample_rate=1.0, + ), + ) + feast_objects: List[FeastObject] = [feature_service] + feast_objects.extend(feature_views.values()) + feast_objects.extend([driver(), customer(), location()]) + + fs.apply(feast_objects) + fs.materialize(environment.start_date, environment.end_date) + + +@pytest.fixture +def grpc_server_port(environment, initialized_registry): + if not environment.test_repo_config.go_feature_retrieval: + pytest.skip("Only for Go path") + + fs = environment.feature_store + + embedded = EmbeddedOnlineFeatureServer( + repo_path=str(fs.repo_path.absolute()), repo_config=fs.config, feature_store=fs, + ) + port = free_port() + + t = threading.Thread( + target=embedded.start_grpc_server, + args=("127.0.0.1", port), + kwargs=dict( + enable_logging=True, + logging_options=LoggingOptions( + ChannelCapacity=100, + WriteInterval=100 * MILLISECOND, + FlushInterval=1 * SECOND, + EmitTimeout=10 * MILLISECOND, + ), + ), + ) + t.start() + + wait_retry_backoff( + lambda: (None, check_port_open("127.0.0.1", port)), timeout_secs=15 + ) + + yield port + embedded.stop_grpc_server() + # wait for graceful stop + time.sleep(2) + + +@pytest.fixture +def grpc_client(grpc_server_port): + ch = grpc.insecure_channel(f"localhost:{grpc_server_port}") + yield ServingServiceStub(ch) + + +@pytest.mark.integration +@pytest.mark.goserver +def test_go_grpc_server(grpc_client): + resp: GetOnlineFeaturesResponse = grpc_client.GetOnlineFeatures( + GetOnlineFeaturesRequest( + feature_service="driver_features", + entities={ + "driver_id": RepeatedValue( + val=python_values_to_proto_values( + [5001, 5002], feature_type=ValueType.INT64 + ) + ) + }, + full_feature_names=True, + ) + ) + assert list(resp.metadata.feature_names.val) == [ + "driver_id", + "driver_stats__conv_rate", + "driver_stats__acc_rate", + "driver_stats__avg_daily_trips", + ] + for vector in resp.results: + assert all([s == FieldStatus.PRESENT for s in vector.statuses]) + + +@pytest.mark.integration +@pytest.mark.goserver +@pytest.mark.universal_offline_stores +@pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) +def test_feature_logging( + grpc_client, environment, universal_data_sources, full_feature_names +): + fs = environment.feature_store + feature_service = fs.get_feature_service("driver_features") + log_start_date = datetime.now().astimezone(pytz.UTC) + driver_ids = list(range(5001, 5011)) + + for driver_id in driver_ids: + # send each driver id in separate request + grpc_client.GetOnlineFeatures( + GetOnlineFeaturesRequest( + feature_service="driver_features", + entities={ + "driver_id": RepeatedValue( + val=python_values_to_proto_values( + [driver_id], feature_type=ValueType.INT64 + ) + ) + }, + full_feature_names=full_feature_names, + ) + ) + # with some pause + time.sleep(0.1) + + _, datasets, _ = universal_data_sources + latest_rows = get_latest_rows(datasets.driver_df, "driver_id", driver_ids) + features = [ + feature.name + for proj in feature_service.feature_view_projections + for feature in proj.features + ] + expected_logs = generate_expected_logs( + latest_rows, "driver_stats", features, ["driver_id"], "event_timestamp" + ) + + def retrieve(): + retrieval_job = fs._get_provider().retrieve_feature_service_logs( + feature_service=feature_service, + start_date=log_start_date, + end_date=datetime.now().astimezone(pytz.UTC), + config=fs.config, + registry=fs._registry, + ) + try: + df = retrieval_job.to_df() + except Exception: + # Table or directory was not created yet + return None, False + + return df, df.shape[0] == len(driver_ids) + + persisted_logs = wait_retry_backoff( + retrieve, timeout_secs=60, timeout_msg="Logs retrieval failed" + ) + + persisted_logs = persisted_logs.sort_values(by="driver_id").reset_index(drop=True) + persisted_logs = persisted_logs[expected_logs.columns] + pd.testing.assert_frame_equal(expected_logs, persisted_logs, check_dtype=False) + + +def free_port(): + sock = socket.socket() + sock.bind(("", 0)) + return sock.getsockname()[1] + + +def check_port_open(host, port) -> bool: + with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: + return sock.connect_ex((host, port)) == 0 + + +def get_latest_rows(df, join_key, entity_values): + rows = df[df[join_key].isin(entity_values)] + return rows.loc[rows.groupby(join_key)["event_timestamp"].idxmax()] + + +def generate_expected_logs( + df, feature_view_name, features, join_keys, timestamp_column +): + logs = pd.DataFrame() + for join_key in join_keys: + logs[join_key] = df[join_key] + + for feature in features: + logs[f"{feature_view_name}__{feature}"] = df[feature] + logs[f"{feature_view_name}__{feature}__timestamp"] = df[timestamp_column] + logs[f"{feature_view_name}__{feature}__status"] = FieldStatus.PRESENT + + return logs.sort_values(by=join_keys).reset_index(drop=True) diff --git a/sdk/python/tests/integration/e2e/test_python_feature_server.py b/sdk/python/tests/integration/e2e/test_python_feature_server.py new file mode 100644 index 00000000000..ea4c35a1ca9 --- /dev/null +++ b/sdk/python/tests/integration/e2e/test_python_feature_server.py @@ -0,0 +1,118 @@ +import json +from datetime import datetime +from typing import List + +import pytest +from fastapi.testclient import TestClient + +from feast.feast_object import FeastObject +from feast.feature_server import get_app +from tests.integration.feature_repos.integration_test_repo_config import ( + IntegrationTestRepoConfig, +) +from tests.integration.feature_repos.repo_configuration import ( + construct_test_environment, + construct_universal_feature_views, + construct_universal_test_data, +) +from tests.integration.feature_repos.universal.entities import ( + customer, + driver, + location, +) + + +@pytest.mark.integration +@pytest.mark.universal_online_stores +def test_get_online_features(python_fs_client): + request_data_dict = { + "features": [ + "driver_stats:conv_rate", + "driver_stats:acc_rate", + "driver_stats:avg_daily_trips", + ], + "entities": {"driver_id": [5001, 5002]}, + } + response = python_fs_client.post( + "/get-online-features", data=json.dumps(request_data_dict) + ) + + # Check entities and features are present + parsed_response = json.loads(response.text) + assert "metadata" in parsed_response + metadata = parsed_response["metadata"] + expected_features = ["driver_id", "conv_rate", "acc_rate", "avg_daily_trips"] + response_feature_names = metadata["feature_names"] + assert len(response_feature_names) == len(expected_features) + for expected_feature in expected_features: + assert expected_feature in response_feature_names + assert "results" in parsed_response + results = parsed_response["results"] + for result in results: + # Same order as in metadata + assert len(result["statuses"]) == 2 # Requested two entities + for status in result["statuses"]: + assert status == "PRESENT" + results_driver_id_index = response_feature_names.index("driver_id") + assert ( + results[results_driver_id_index]["values"] + == request_data_dict["entities"]["driver_id"] + ) + + +@pytest.mark.integration +@pytest.mark.universal_online_stores +def test_push(python_fs_client): + initial_temp = get_temperatures(python_fs_client, location_ids=[1])[0] + json_data = json.dumps( + { + "push_source_name": "location_stats_push_source", + "df": { + "location_id": [1], + "temperature": [initial_temp * 100], + "event_timestamp": [str(datetime.utcnow())], + "created": [str(datetime.utcnow())], + }, + } + ) + response = python_fs_client.post("/push", data=json_data,) + + # Check new pushed temperature is fetched + assert response.status_code == 200 + assert get_temperatures(python_fs_client, location_ids=[1]) == [initial_temp * 100] + + +def get_temperatures(client, location_ids: List[int]): + get_request_data = { + "features": ["pushable_location_stats:temperature"], + "entities": {"location_id": location_ids}, + } + response = client.post("/get-online-features", data=json.dumps(get_request_data)) + parsed_response = json.loads(response.text) + assert "metadata" in parsed_response + metadata = parsed_response["metadata"] + response_feature_names = metadata["feature_names"] + assert "results" in parsed_response + results = parsed_response["results"] + results_temperature_index = response_feature_names.index("temperature") + return results[results_temperature_index]["values"] + + +@pytest.fixture +def python_fs_client(request): + config = IntegrationTestRepoConfig() + environment = construct_test_environment(config, fixture_request=request) + fs = environment.feature_store + try: + entities, datasets, data_sources = construct_universal_test_data(environment) + feature_views = construct_universal_feature_views(data_sources) + feast_objects: List[FeastObject] = [] + feast_objects.extend(feature_views.values()) + feast_objects.extend([driver(), customer(), location()]) + fs.apply(feast_objects) + fs.materialize(environment.start_date, environment.end_date) + client = TestClient(get_app(fs)) + yield client + finally: + fs.teardown() + environment.data_source_creator.teardown() diff --git a/sdk/python/tests/integration/e2e/test_universal_e2e.py b/sdk/python/tests/integration/e2e/test_universal_e2e.py index 957cf9fba6f..a42a96e594b 100644 --- a/sdk/python/tests/integration/e2e/test_universal_e2e.py +++ b/sdk/python/tests/integration/e2e/test_universal_e2e.py @@ -12,7 +12,7 @@ @pytest.mark.integration -@pytest.mark.universal +@pytest.mark.universal_online_stores @pytest.mark.parametrize("infer_features", [True, False]) def test_e2e_consistency(environment, e2e_data_sources, infer_features): fs = environment.feature_store @@ -50,7 +50,9 @@ def check_offline_and_online_features( ).to_dict() if full_feature_names: + if expected_value: + assert response_dict[f"{fv.name}__value"][0], f"Response: {response_dict}" assert ( abs(response_dict[f"{fv.name}__value"][0] - expected_value) < 1e-6 ), f"Response: {response_dict}, Expected: {expected_value}" @@ -58,6 +60,7 @@ def check_offline_and_online_features( assert response_dict[f"{fv.name}__value"][0] is None else: if expected_value: + assert response_dict["value"][0], f"Response: {response_dict}" assert ( abs(response_dict["value"][0] - expected_value) < 1e-6 ), f"Response: {response_dict}, Expected: {expected_value}" diff --git a/sdk/python/tests/integration/e2e/test_validation.py b/sdk/python/tests/integration/e2e/test_validation.py index 76bbe152c57..e434f1a133f 100644 --- a/sdk/python/tests/integration/e2e/test_validation.py +++ b/sdk/python/tests/integration/e2e/test_validation.py @@ -58,7 +58,7 @@ def profiler_with_unrealistic_expectations(dataset: PandasDataset) -> Expectatio @pytest.mark.integration -@pytest.mark.universal +@pytest.mark.universal_offline_stores def test_historical_retrieval_with_validation(environment, universal_data_sources): store = environment.feature_store (entities, datasets, data_sources) = universal_data_sources @@ -88,7 +88,6 @@ def test_historical_retrieval_with_validation(environment, universal_data_source @pytest.mark.integration -@pytest.mark.universal def test_historical_retrieval_fails_on_validation(environment, universal_data_sources): store = environment.feature_store diff --git a/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py b/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py index f8cd66a6196..61920bb03f8 100644 --- a/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py +++ b/sdk/python/tests/integration/feature_repos/integration_test_repo_config.py @@ -1,3 +1,4 @@ +import hashlib from dataclasses import dataclass from typing import Dict, Optional, Type, Union @@ -19,7 +20,7 @@ class IntegrationTestRepoConfig: """ provider: str = "local" - online_store: Union[str, Dict] = "sqlite" + online_store: Optional[Union[str, Dict]] = "sqlite" offline_store_creator: Type[DataSourceCreator] = FileDataSourceCreator online_store_creator: Optional[Type[OnlineStoreCreator]] = None @@ -38,8 +39,10 @@ def __repr__(self) -> str: online_store_type = self.online_store.get("redis_type", "redis") else: online_store_type = self.online_store["type"] - else: + elif self.online_store: online_store_type = self.online_store.__name__ + else: + online_store_type = "none" else: online_store_type = self.online_store_creator.__name__ @@ -48,5 +51,23 @@ def __repr__(self) -> str: f"{self.provider.upper()}", f"{self.offline_store_creator.__name__.split('.')[-1].replace('DataSourceCreator', '')}", online_store_type, + f"python_fs={self.python_feature_server}", + f"go_fs={self.go_feature_retrieval}", ] ) + + def __hash__(self): + return int(hashlib.sha1(repr(self).encode()).hexdigest(), 16) + + def __eq__(self, other): + if not isinstance(other, IntegrationTestRepoConfig): + return False + + return ( + self.provider == other.provider + and self.online_store == other.online_store + and self.offline_store_creator == other.offline_store_creator + and self.online_store_creator == other.online_store_creator + and self.go_feature_retrieval == other.go_feature_retrieval + and self.python_feature_server == other.python_feature_server + ) diff --git a/sdk/python/tests/integration/feature_repos/repo_configuration.py b/sdk/python/tests/integration/feature_repos/repo_configuration.py index 9902f7c7b8e..27cf1a52e9d 100644 --- a/sdk/python/tests/integration/feature_repos/repo_configuration.py +++ b/sdk/python/tests/integration/feature_repos/repo_configuration.py @@ -8,11 +8,11 @@ from dataclasses import dataclass from datetime import datetime, timedelta from pathlib import Path -from typing import Any, List, Optional, Tuple, Union +from typing import Any, Dict, List, Optional, Tuple, Type, Union import pandas as pd +import pytest import yaml -from testcontainers.core.container import DockerContainer from feast import FeatureStore, FeatureView, OnDemandFeatureView, driver_test_data from feast.constants import FULL_REPO_CONFIGS_MODULE_ENV_NAME @@ -28,6 +28,9 @@ from tests.integration.feature_repos.universal.data_sources.bigquery import ( BigQueryDataSourceCreator, ) +from tests.integration.feature_repos.universal.data_sources.file import ( + FileDataSourceCreator, +) from tests.integration.feature_repos.universal.data_sources.redshift import ( RedshiftDataSourceCreator, ) @@ -38,6 +41,7 @@ conv_rate_plus_100_feature_view, create_conv_rate_request_source, create_customer_daily_profile_feature_view, + create_driver_hourly_stats_batch_feature_view, create_driver_hourly_stats_feature_view, create_field_mapping_feature_view, create_global_stats_feature_view, @@ -69,94 +73,78 @@ "connection_string": "127.0.0.1:6001,127.0.0.1:6002,127.0.0.1:6003", } -# FULL_REPO_CONFIGS contains the repo configurations (e.g. provider, offline store, -# online store, test data, and more parameters) that most integration tests will test -# against. By default, FULL_REPO_CONFIGS uses the three providers (local, GCP, and AWS) -# with their default offline and online stores; it also tests the providers with the -# Redis online store. It can be overwritten by specifying a Python module through the -# FULL_REPO_CONFIGS_MODULE_ENV_NAME environment variable. In this case, that Python -# module will be imported and FULL_REPO_CONFIGS will be extracted from the file. -DEFAULT_FULL_REPO_CONFIGS: List[IntegrationTestRepoConfig] = [ - # Local configurations - IntegrationTestRepoConfig(), - IntegrationTestRepoConfig(python_feature_server=True), +AVAILABLE_OFFLINE_STORES: List[Tuple[str, Type[DataSourceCreator]]] = [ + ("local", FileDataSourceCreator), ] + +AVAILABLE_ONLINE_STORES: Dict[ + str, Tuple[Union[str, Dict[str, str]], Optional[Type[OnlineStoreCreator]]] +] = { + "sqlite": ({"type": "sqlite"}, None), +} + if os.getenv("FEAST_IS_LOCAL_TEST", "False") != "True": - DEFAULT_FULL_REPO_CONFIGS.extend( + AVAILABLE_OFFLINE_STORES.extend( [ - IntegrationTestRepoConfig(online_store=REDIS_CONFIG), - # GCP configurations - IntegrationTestRepoConfig( - provider="gcp", - offline_store_creator=BigQueryDataSourceCreator, - online_store="datastore", - ), - IntegrationTestRepoConfig( - provider="gcp", - offline_store_creator=BigQueryDataSourceCreator, - online_store=REDIS_CONFIG, - ), - # AWS configurations - IntegrationTestRepoConfig( - provider="aws", - offline_store_creator=RedshiftDataSourceCreator, - online_store=DYNAMO_CONFIG, - python_feature_server=True, - ), - IntegrationTestRepoConfig( - provider="aws", - offline_store_creator=RedshiftDataSourceCreator, - online_store=REDIS_CONFIG, - ), - # Snowflake configurations - IntegrationTestRepoConfig( - provider="aws", # no list features, no feature server - offline_store_creator=SnowflakeDataSourceCreator, - online_store=REDIS_CONFIG, - ), - # Go implementation for online retrieval - IntegrationTestRepoConfig( - online_store=REDIS_CONFIG, go_feature_retrieval=True, - ), - # TODO(felixwang9817): Enable this test once https://github.com/feast-dev/feast/issues/2544 is resolved. - # IntegrationTestRepoConfig( - # online_store=REDIS_CONFIG, - # python_feature_server=True, - # go_feature_retrieval=True, - # ), + ("gcp", BigQueryDataSourceCreator), + ("aws", RedshiftDataSourceCreator), + ("aws", SnowflakeDataSourceCreator), ] ) -if os.getenv("FEAST_GO_FEATURE_RETRIEVAL", "False") == "True": - DEFAULT_FULL_REPO_CONFIGS = [ - IntegrationTestRepoConfig( - online_store=REDIS_CONFIG, go_feature_retrieval=True, - ), - ] + + AVAILABLE_ONLINE_STORES["redis"] = (REDIS_CONFIG, None) + AVAILABLE_ONLINE_STORES["dynamodb"] = (DYNAMO_CONFIG, None) + AVAILABLE_ONLINE_STORES["datastore"] = ("datastore", None) + + full_repo_configs_module = os.environ.get(FULL_REPO_CONFIGS_MODULE_ENV_NAME) if full_repo_configs_module is not None: try: module = importlib.import_module(full_repo_configs_module) - FULL_REPO_CONFIGS = getattr(module, "FULL_REPO_CONFIGS") - except Exception as e: + except ImportError as e: raise FeastModuleImportError( - "FULL_REPO_CONFIGS", full_repo_configs_module + full_repo_configs_module, "FULL_REPO_CONFIGS" ) from e -else: - FULL_REPO_CONFIGS = DEFAULT_FULL_REPO_CONFIGS + + try: + AVAILABLE_ONLINE_STORES = getattr(module, "AVAILABLE_ONLINE_STORES") + AVAILABLE_OFFLINE_STORES = getattr(module, "AVAILABLE_OFFLINE_STORES") + except AttributeError: + try: + FULL_REPO_CONFIGS: List[IntegrationTestRepoConfig] = getattr( + module, "FULL_REPO_CONFIGS" + ) + except AttributeError as e: + raise FeastModuleImportError( + full_repo_configs_module, "FULL_REPO_CONFIGS" + ) from e + + AVAILABLE_OFFLINE_STORES = [ + (config.provider, config.offline_store_creator) + for config in FULL_REPO_CONFIGS + ] + AVAILABLE_OFFLINE_STORES = list(set(AVAILABLE_OFFLINE_STORES)) # unique only + + AVAILABLE_ONLINE_STORES = { + c.online_store["type"] + if isinstance(c.online_store, dict) + else c.online_store: (c.online_store, c.online_store_creator) + for c in FULL_REPO_CONFIGS + } + if os.getenv("FEAST_LOCAL_ONLINE_CONTAINER", "False").lower() == "true": - replacements = {"datastore": DatastoreOnlineStoreCreator} - replacement_dicts = [ - (REDIS_CONFIG, RedisOnlineStoreCreator), - (DYNAMO_CONFIG, DynamoDBOnlineStoreCreator), - ] - for c in FULL_REPO_CONFIGS: - if isinstance(c.online_store, dict): - for _replacement in replacement_dicts: - if c.online_store == _replacement[0]: - c.online_store_creator = _replacement[1] - elif c.online_store in replacements: - c.online_store_creator = replacements[c.online_store] + replacements: Dict[ + str, Tuple[Union[str, Dict[str, str]], Optional[Type[OnlineStoreCreator]]] + ] = { + "redis": (REDIS_CONFIG, RedisOnlineStoreCreator), + "dynamodb": (DYNAMO_CONFIG, DynamoDBOnlineStoreCreator), + "datastore": ("datastore", DatastoreOnlineStoreCreator), + } + + for key, replacement in replacements.items(): + if key in AVAILABLE_ONLINE_STORES: + AVAILABLE_ONLINE_STORES[key] = replacement @dataclass @@ -311,15 +299,15 @@ def construct_universal_feature_views( data_sources: UniversalDataSources, with_odfv: bool = True, ) -> UniversalFeatureViews: driver_hourly_stats = create_driver_hourly_stats_feature_view(data_sources.driver) + driver_hourly_stats_base_feature_view = create_driver_hourly_stats_batch_feature_view( + data_sources.driver + ) return UniversalFeatureViews( customer=create_customer_daily_profile_feature_view(data_sources.customer), global_fv=create_global_stats_feature_view(data_sources.global_ds), driver=driver_hourly_stats, driver_odfv=conv_rate_plus_100_feature_view( - { - "driver": driver_hourly_stats, - "input_request": create_conv_rate_request_source(), - } + [driver_hourly_stats_base_feature_view, create_conv_rate_request_source()] ) if with_odfv else None, @@ -340,10 +328,15 @@ class Environment: worker_id: str online_store_creator: Optional[OnlineStoreCreator] = None + next_id = 0 + def __post_init__(self): self.end_date = datetime.utcnow().replace(microsecond=0, second=0, minute=0) self.start_date: datetime = self.end_date - timedelta(days=3) + Environment.next_id += 1 + self.id = Environment.next_id + def get_feature_server_endpoint(self) -> str: if self.python_feature_server and self.test_repo_config.provider == "local": return f"http://localhost:{self.get_local_server_port()}" @@ -356,7 +349,7 @@ def get_local_server_port(self) -> int: worker_id_num = int(parsed_worker_id[0]) else: worker_id_num = 0 - return 6566 + worker_id_num + return 6000 + 100 * worker_id_num + self.id def table_name_from_data_source(ds: DataSource) -> Optional[str]: @@ -369,9 +362,9 @@ def table_name_from_data_source(ds: DataSource) -> Optional[str]: def construct_test_environment( test_repo_config: IntegrationTestRepoConfig, + fixture_request: Optional[pytest.FixtureRequest], test_suite_name: str = "integration_test", worker_id: str = "worker_id", - offline_container: Optional[DockerContainer] = None, ) -> Environment: _uuid = str(uuid.uuid4()).replace("-", "")[:6] @@ -382,12 +375,14 @@ def construct_test_environment( project = f"{test_suite_name}_{run_id}_{run_num}" offline_creator: DataSourceCreator = test_repo_config.offline_store_creator( - project, offline_container=offline_container + project, fixture_request=fixture_request ) offline_store_config = offline_creator.create_offline_store_config() if test_repo_config.online_store_creator: - online_creator = test_repo_config.online_store_creator(project) + online_creator = test_repo_config.online_store_creator( + project, fixture_request=fixture_request + ) online_store = ( test_repo_config.online_store ) = online_creator.create_online_store() diff --git a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py index e2a700d0677..b36af0db472 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_source_creator.py @@ -4,6 +4,7 @@ import pandas as pd from feast.data_source import DataSource +from feast.feature_logging import LoggingDestination from feast.repo_config import FeastConfigBaseModel from feast.saved_dataset import SavedDatasetStorage @@ -34,7 +35,7 @@ def create_data_source( event_timestamp_column: (Deprecated) Pass through for the underlying data source. created_timestamp_column: Pass through for the underlying data source. field_mapping: Pass through for the underlying data source. - timestamp_field: (Deprecated) Pass through for the underlying data source. + timestamp_field: Pass through for the underlying data source. Returns: @@ -51,6 +52,9 @@ def create_offline_store_config(self) -> FeastConfigBaseModel: def create_saved_dataset_destination(self) -> SavedDatasetStorage: ... + def create_logged_features_destination(self) -> LoggingDestination: + pass + @abstractmethod def teardown(self): ... diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/__init__.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py index 881f547617e..620f444159b 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/bigquery.py @@ -7,8 +7,12 @@ from feast import BigQuerySource from feast.data_source import DataSource +from feast.feature_logging import LoggingDestination from feast.infra.offline_stores.bigquery import BigQueryOfflineStoreConfig -from feast.infra.offline_stores.bigquery_source import SavedDatasetBigQueryStorage +from feast.infra.offline_stores.bigquery_source import ( + BigQueryLoggingDestination, + SavedDatasetBigQueryStorage, +) from tests.integration.feature_repos.universal.data_source_creator import ( DataSourceCreator, ) @@ -86,5 +90,11 @@ def create_saved_dataset_destination(self) -> SavedDatasetBigQueryStorage: ) return SavedDatasetBigQueryStorage(table=table) + def create_logged_features_destination(self) -> LoggingDestination: + table = self.get_prefixed_table_name( + f"logged_features_{str(uuid.uuid4()).replace('-', '_')}" + ) + return BigQueryLoggingDestination(table_ref=table) + def get_prefixed_table_name(self, suffix: str) -> str: return f"{self.client.project}.{self.project_name}.{suffix}" diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py index 64c3aeacf3e..ccc1544bb8f 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/file.py @@ -1,3 +1,5 @@ +import os.path +import shutil import tempfile import uuid from typing import Any, Dict, List, Optional @@ -10,8 +12,12 @@ from feast import FileSource from feast.data_format import ParquetFormat from feast.data_source import DataSource +from feast.feature_logging import LoggingDestination from feast.infra.offline_stores.file import FileOfflineStoreConfig -from feast.infra.offline_stores.file_source import SavedDatasetFileStorage +from feast.infra.offline_stores.file_source import ( + FileLoggingDestination, + SavedDatasetFileStorage, +) from feast.repo_config import FeastConfigBaseModel from tests.integration.feature_repos.universal.data_source_creator import ( DataSourceCreator, @@ -20,10 +26,12 @@ class FileDataSourceCreator(DataSourceCreator): files: List[Any] + dirs: List[Any] def __init__(self, project_name: str, *args, **kwargs): super().__init__(project_name) self.files = [] + self.dirs = [] def create_data_source( self, @@ -53,6 +61,7 @@ def create_data_source( def create_saved_dataset_destination(self) -> SavedDatasetFileStorage: d = tempfile.mkdtemp(prefix=self.project_name) + self.dirs.append(d) return SavedDatasetFileStorage( path=d, file_format=ParquetFormat(), s3_endpoint_override=None ) @@ -63,10 +72,20 @@ def get_prefixed_table_name(self, suffix: str) -> str: def create_offline_store_config(self) -> FeastConfigBaseModel: return FileOfflineStoreConfig() + def create_logged_features_destination(self) -> LoggingDestination: + d = tempfile.mkdtemp(prefix=self.project_name) + self.dirs.append(d) + return FileLoggingDestination(path=d) + def teardown(self): for f in self.files: f.close() + for d in self.dirs: + if not os.path.exists(d): + continue + shutil.rmtree(d) + class S3FileDataSourceCreator(DataSourceCreator): f: Any @@ -143,6 +162,15 @@ def create_saved_dataset_destination(self) -> SavedDatasetFileStorage: s3_endpoint_override=f"http://{host}:{port}", ) + def create_logged_features_destination(self) -> LoggingDestination: + port = self.minio.get_exposed_port("9000") + host = self.minio.get_container_host_ip() + + return FileLoggingDestination( + path=f"s3://{self.bucket}/logged_features/{str(uuid.uuid4())}", + s3_endpoint_override=f"http://{host}:{port}", + ) + def get_prefixed_table_name(self, suffix: str) -> str: return f"{suffix}" diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py index 7e305fee801..3b2794393fc 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py @@ -5,8 +5,12 @@ from feast import RedshiftSource from feast.data_source import DataSource +from feast.feature_logging import LoggingDestination from feast.infra.offline_stores.redshift import RedshiftOfflineStoreConfig -from feast.infra.offline_stores.redshift_source import SavedDatasetRedshiftStorage +from feast.infra.offline_stores.redshift_source import ( + RedshiftLoggingDestination, + SavedDatasetRedshiftStorage, +) from feast.infra.utils import aws_utils from feast.repo_config import FeastConfigBaseModel from tests.integration.feature_repos.universal.data_source_creator import ( @@ -74,6 +78,14 @@ def create_saved_dataset_destination(self) -> SavedDatasetRedshiftStorage: return SavedDatasetRedshiftStorage(table_ref=table) + def create_logged_features_destination(self) -> LoggingDestination: + table = self.get_prefixed_table_name( + f"persisted_ds_{str(uuid.uuid4()).replace('-', '_')}" + ) + self.tables.append(table) + + return RedshiftLoggingDestination(table_name=table) + def create_offline_store_config(self) -> FeastConfigBaseModel: return self.offline_store_config diff --git a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py index 3942444f324..23466bc00c0 100644 --- a/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py +++ b/sdk/python/tests/integration/feature_repos/universal/data_sources/snowflake.py @@ -6,8 +6,12 @@ from feast import SnowflakeSource from feast.data_source import DataSource +from feast.feature_logging import LoggingDestination from feast.infra.offline_stores.snowflake import SnowflakeOfflineStoreConfig -from feast.infra.offline_stores.snowflake_source import SavedDatasetSnowflakeStorage +from feast.infra.offline_stores.snowflake_source import ( + SavedDatasetSnowflakeStorage, + SnowflakeLoggingDestination, +) from feast.infra.utils.snowflake_utils import get_snowflake_conn, write_pandas from feast.repo_config import FeastConfigBaseModel from tests.integration.feature_repos.universal.data_source_creator import ( @@ -66,6 +70,14 @@ def create_saved_dataset_destination(self) -> SavedDatasetSnowflakeStorage: return SavedDatasetSnowflakeStorage(table_ref=table) + def create_logged_features_destination(self) -> LoggingDestination: + table = self.get_prefixed_table_name( + f"logged_features_{str(uuid.uuid4()).replace('-', '_')}" + ) + self.tables.append(table) + + return SnowflakeLoggingDestination(table_name=table) + def create_offline_store_config(self) -> FeastConfigBaseModel: return self.offline_store_config diff --git a/sdk/python/tests/integration/feature_repos/universal/entities.py b/sdk/python/tests/integration/feature_repos/universal/entities.py index e8e90a6af62..b7a7583f1b3 100644 --- a/sdk/python/tests/integration/feature_repos/universal/entities.py +++ b/sdk/python/tests/integration/feature_repos/universal/entities.py @@ -6,7 +6,7 @@ def driver(value_type: ValueType = ValueType.INT64): name="driver", # The name is derived from this argument, not object name. value_type=value_type, description="driver id", - join_key="driver_id", + join_keys=["driver_id"], ) diff --git a/sdk/python/tests/integration/feature_repos/universal/feature_views.py b/sdk/python/tests/integration/feature_repos/universal/feature_views.py index 5918e367532..3e05f5d7e5f 100644 --- a/sdk/python/tests/integration/feature_repos/universal/feature_views.py +++ b/sdk/python/tests/integration/feature_repos/universal/feature_views.py @@ -5,6 +5,7 @@ import pandas as pd from feast import ( + BatchFeatureView, Feature, FeatureView, Field, @@ -13,7 +14,7 @@ ValueType, ) from feast.data_source import DataSource, RequestSource -from feast.types import Array, FeastType, Float32, Float64, Int32, Int64 +from feast.types import Array, FeastType, Float32, Float64, Int32 from tests.integration.feature_repos.universal.entities import location @@ -22,10 +23,11 @@ def driver_feature_view( name="test_correctness", infer_features: bool = False, dtype: FeastType = Float32, + entities: Optional[List[str]] = None, ) -> FeatureView: return FeatureView( name=name, - entities=["driver"], + entities=entities or ["driver"], schema=None if infer_features else [Field(name="value", dtype=dtype)], ttl=timedelta(days=5), source=data_source, @@ -65,19 +67,19 @@ def conv_rate_plus_100(features_df: pd.DataFrame) -> pd.DataFrame: def conv_rate_plus_100_feature_view( sources: Dict[str, Union[RequestSource, FeatureView]], infer_features: bool = False, - features: Optional[List[Feature]] = None, + features: Optional[List[Field]] = None, ) -> OnDemandFeatureView: # Test that positional arguments and Features still work for ODFVs. _features = features or [ - Feature(name="conv_rate_plus_100", dtype=ValueType.DOUBLE), - Feature(name="conv_rate_plus_val_to_add", dtype=ValueType.DOUBLE), - Feature(name="conv_rate_plus_100_rounded", dtype=ValueType.INT32), + Field(name="conv_rate_plus_100", dtype=Float64), + Field(name="conv_rate_plus_val_to_add", dtype=Float64), + Field(name="conv_rate_plus_100_rounded", dtype=Int32), ] return OnDemandFeatureView( - conv_rate_plus_100.__name__, - [] if infer_features else _features, - sources, - conv_rate_plus_100, + name=conv_rate_plus_100.__name__, + schema=[] if infer_features else _features, + sources=sources, + udf=conv_rate_plus_100, ) @@ -150,6 +152,24 @@ def create_item_embeddings_feature_view(source, infer_features: bool = False): return item_embeddings_feature_view +def create_item_embeddings_batch_feature_view( + source, infer_features: bool = False +) -> BatchFeatureView: + item_embeddings_feature_view = BatchFeatureView( + name="item_embeddings", + entities=["item"], + schema=None + if infer_features + else [ + Field(name="embedding_double", dtype=Array(Float64)), + Field(name="embedding_float", dtype=Array(Float32)), + ], + source=source, + ttl=timedelta(hours=2), + ) + return item_embeddings_feature_view + + def create_driver_hourly_stats_feature_view(source, infer_features: bool = False): driver_stats_feature_view = FeatureView( name="driver_stats", @@ -167,6 +187,25 @@ def create_driver_hourly_stats_feature_view(source, infer_features: bool = False return driver_stats_feature_view +def create_driver_hourly_stats_batch_feature_view( + source, infer_features: bool = False +) -> BatchFeatureView: + driver_stats_feature_view = BatchFeatureView( + name="driver_stats", + entities=["driver"], + schema=None + if infer_features + else [ + Field(name="conv_rate", dtype=Float32), + Field(name="acc_rate", dtype=Float32), + Field(name="avg_daily_trips", dtype=Int32), + ], + source=source, + ttl=timedelta(hours=2), + ) + return driver_stats_feature_view + + def create_customer_daily_profile_feature_view(source, infer_features: bool = False): customer_profile_feature_view = FeatureView( name="customer_profile", @@ -237,13 +276,7 @@ def create_field_mapping_feature_view(source): def create_pushable_feature_view(batch_source: DataSource): push_source = PushSource( - name="location_stats_push_source", - schema=[ - Field(name="location_id", dtype=Int64), - Field(name="temperature", dtype=Int32), - ], - timestamp_field="timestamp", - batch_source=batch_source, + name="location_stats_push_source", batch_source=batch_source, ) return FeatureView( name="pushable_location_stats", diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/datastore.py b/sdk/python/tests/integration/feature_repos/universal/online_store/datastore.py index 52851e80d88..6067a1ff4b8 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/datastore.py +++ b/sdk/python/tests/integration/feature_repos/universal/online_store/datastore.py @@ -11,7 +11,7 @@ class DatastoreOnlineStoreCreator(OnlineStoreCreator): - def __init__(self, project_name: str): + def __init__(self, project_name: str, **kwargs): super().__init__(project_name) self.container = ( DockerContainer( diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/dynamodb.py b/sdk/python/tests/integration/feature_repos/universal/online_store/dynamodb.py index e4d8e0c3d02..473b7acee97 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/dynamodb.py +++ b/sdk/python/tests/integration/feature_repos/universal/online_store/dynamodb.py @@ -9,7 +9,7 @@ class DynamoDBOnlineStoreCreator(OnlineStoreCreator): - def __init__(self, project_name: str): + def __init__(self, project_name: str, **kwargs): super().__init__(project_name) self.container = DockerContainer( "amazon/dynamodb-local:latest" diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/hbase.py b/sdk/python/tests/integration/feature_repos/universal/online_store/hbase.py new file mode 100644 index 00000000000..ecaace87097 --- /dev/null +++ b/sdk/python/tests/integration/feature_repos/universal/online_store/hbase.py @@ -0,0 +1,28 @@ +from typing import Dict + +from testcontainers.core.container import DockerContainer +from testcontainers.core.waiting_utils import wait_for_logs + +from tests.integration.feature_repos.universal.online_store_creator import ( + OnlineStoreCreator, +) + + +class HbaseOnlineStoreCreator(OnlineStoreCreator): + def __init__(self, project_name: str, **kwargs): + super().__init__(project_name) + self.container = DockerContainer("harisekhon/hbase").with_exposed_ports("9090") + + def create_online_store(self) -> Dict[str, str]: + self.container.start() + log_string_to_wait_for = ( + "Initializing Hbase Local with the following configuration:" + ) + wait_for_logs( + container=self.container, predicate=log_string_to_wait_for, timeout=5 + ) + exposed_port = self.container.get_exposed_port("9090") + return {"type": "hbase", "host": "127.0.0.1", "port": exposed_port} + + def teardown(self): + self.container.stop() diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py b/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py index 073760f5145..49951876652 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py +++ b/sdk/python/tests/integration/feature_repos/universal/online_store/redis.py @@ -9,7 +9,7 @@ class RedisOnlineStoreCreator(OnlineStoreCreator): - def __init__(self, project_name: str): + def __init__(self, project_name: str, **kwargs): super().__init__(project_name) self.container = DockerContainer("redis").with_exposed_ports("6379") diff --git a/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py b/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py index 0fa0dbed3e1..c3872ea697f 100644 --- a/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py +++ b/sdk/python/tests/integration/feature_repos/universal/online_store_creator.py @@ -4,7 +4,7 @@ class OnlineStoreCreator(ABC): - def __init__(self, project_name: str): + def __init__(self, project_name: str, **kwargs): self.project_name = project_name def create_online_store(self) -> FeastConfigBaseModel: diff --git a/sdk/python/tests/integration/offline_store/test_feature_logging.py b/sdk/python/tests/integration/offline_store/test_feature_logging.py new file mode 100644 index 00000000000..8e7e9d68bea --- /dev/null +++ b/sdk/python/tests/integration/offline_store/test_feature_logging.py @@ -0,0 +1,145 @@ +import contextlib +import datetime +import tempfile +import uuid +from pathlib import Path +from typing import Iterator, Union + +import numpy as np +import pandas as pd +import pyarrow +import pyarrow as pa +import pytest +from google.api_core.exceptions import NotFound + +from feast.feature_logging import ( + LOG_DATE_FIELD, + LOG_TIMESTAMP_FIELD, + REQUEST_ID_FIELD, + FeatureServiceLoggingSource, + LoggingConfig, +) +from feast.feature_service import FeatureService +from feast.protos.feast.serving.ServingService_pb2 import FieldStatus +from feast.wait import wait_retry_backoff +from tests.integration.feature_repos.repo_configuration import ( + UniversalDatasets, + construct_universal_feature_views, +) +from tests.integration.feature_repos.universal.entities import driver +from tests.integration.feature_repos.universal.feature_views import conv_rate_plus_100 + + +@pytest.mark.integration +@pytest.mark.universal_offline_stores +@pytest.mark.parametrize("pass_as_path", [True, False], ids=lambda v: str(v)) +def test_feature_service_logging(environment, universal_data_sources, pass_as_path): + store = environment.feature_store + + (_, datasets, data_sources) = universal_data_sources + + feature_views = construct_universal_feature_views(data_sources) + store.apply([driver(), *feature_views.values()]) + + logs_df = prepare_logs(datasets) + + feature_service = FeatureService( + name="test_service", + features=[ + feature_views.driver[["conv_rate", "avg_daily_trips"]], + feature_views.driver_odfv[ + ["conv_rate_plus_val_to_add", "conv_rate_plus_100_rounded"] + ], + ], + logging_config=LoggingConfig( + destination=environment.data_source_creator.create_logged_features_destination() + ), + ) + + schema = FeatureServiceLoggingSource( + feature_service=feature_service, project=store.project + ).get_schema(store._registry) + + num_rows = logs_df.shape[0] + first_batch = pa.Table.from_pandas(logs_df.iloc[: num_rows // 2, :], schema=schema) + second_batch = pa.Table.from_pandas(logs_df.iloc[num_rows // 2 :, :], schema=schema) + + with to_logs_dataset(first_batch, pass_as_path) as logs: + store.write_logged_features( + source=feature_service, logs=logs, + ) + + with to_logs_dataset(second_batch, pass_as_path) as logs: + store.write_logged_features( + source=feature_service, logs=logs, + ) + expected_columns = list(set(logs_df.columns) - {LOG_DATE_FIELD}) + + def retrieve(): + retrieval_job = store._get_provider().retrieve_feature_service_logs( + feature_service=feature_service, + start_date=logs_df[LOG_TIMESTAMP_FIELD].min(), + end_date=logs_df[LOG_TIMESTAMP_FIELD].max() + datetime.timedelta(seconds=1), + config=store.config, + registry=store._registry, + ) + try: + df = retrieval_job.to_df() + except NotFound: + # Table was not created yet + return None, False + + return df, df.shape[0] == logs_df.shape[0] + + persisted_logs = wait_retry_backoff( + retrieve, timeout_secs=60, timeout_msg="Logs retrieval failed" + ) + + persisted_logs = persisted_logs[expected_columns] + logs_df = logs_df[expected_columns] + pd.testing.assert_frame_equal( + logs_df.sort_values(REQUEST_ID_FIELD).reset_index(drop=True), + persisted_logs.sort_values(REQUEST_ID_FIELD).reset_index(drop=True), + check_dtype=False, + ) + + +def prepare_logs(datasets: UniversalDatasets) -> pd.DataFrame: + driver_df = datasets.driver_df + driver_df["val_to_add"] = 50 + driver_df = driver_df.join(conv_rate_plus_100(driver_df)) + num_rows = driver_df.shape[0] + + logs_df = driver_df[["driver_id", "val_to_add"]] + logs_df[REQUEST_ID_FIELD] = [str(uuid.uuid4()) for _ in range(num_rows)] + logs_df[LOG_TIMESTAMP_FIELD] = pd.Series( + np.random.randint(0, 7 * 24 * 3600, num_rows) + ).map(lambda secs: pd.Timestamp.utcnow() - datetime.timedelta(seconds=secs)) + logs_df[LOG_DATE_FIELD] = logs_df[LOG_TIMESTAMP_FIELD].dt.date + + for view, features in ( + ("driver_stats", ("conv_rate", "avg_daily_trips")), + ( + "conv_rate_plus_100", + ("conv_rate_plus_val_to_add", "conv_rate_plus_100_rounded"), + ), + ): + for feature in features: + logs_df[f"{view}__{feature}"] = driver_df[feature] + logs_df[f"{view}__{feature}__timestamp"] = driver_df["event_timestamp"] + logs_df[f"{view}__{feature}__status"] = FieldStatus.PRESENT + + return logs_df + + +@contextlib.contextmanager +def to_logs_dataset( + table: pyarrow.Table, pass_as_path: bool +) -> Iterator[Union[pyarrow.Table, Path]]: + if not pass_as_path: + yield table + return + + with tempfile.TemporaryDirectory() as temp_dir: + pyarrow.parquet.write_to_dataset(table, root_path=temp_dir) + yield Path(temp_dir) diff --git a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py index 0d6ef84ff47..d5f49a1f958 100644 --- a/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py +++ b/sdk/python/tests/integration/offline_store/test_universal_historical_retrieval.py @@ -21,7 +21,7 @@ from feast.infra.offline_stores.offline_utils import ( DEFAULT_ENTITY_DF_EVENT_TIMESTAMP_COL, ) -from feast.types import Int32 +from feast.types import Float32, Int32 from feast.value_type import ValueType from tests.integration.feature_repos.repo_configuration import ( construct_universal_feature_views, @@ -280,7 +280,7 @@ def get_expected_training_df( @pytest.mark.integration -@pytest.mark.universal +@pytest.mark.universal_offline_stores @pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) def test_historical_features(environment, universal_data_sources, full_feature_names): store = environment.feature_store @@ -413,8 +413,47 @@ def test_historical_features(environment, universal_data_sources, full_feature_n @pytest.mark.integration @pytest.mark.universal @pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) -def test_historical_features_with_missing_request_data( +def test_historical_features_with_shared_batch_source( environment, universal_data_sources, full_feature_names +): + # Addresses https://github.com/feast-dev/feast/issues/2576 + + store = environment.feature_store + + entities, datasets, data_sources = universal_data_sources + driver_stats_v1 = FeatureView( + name="driver_stats_v1", + entities=["driver"], + schema=[Field(name="avg_daily_trips", dtype=Int32)], + source=data_sources.driver, + ) + driver_stats_v2 = FeatureView( + name="driver_stats_v2", + entities=["driver"], + schema=[ + Field(name="avg_daily_trips", dtype=Int32), + Field(name="conv_rate", dtype=Float32), + ], + source=data_sources.driver, + ) + + store.apply([driver(), driver_stats_v1, driver_stats_v2]) + + with pytest.raises(KeyError): + store.get_historical_features( + entity_df=datasets.entity_df, + features=[ + # `driver_stats_v1` does not have `conv_rate` + "driver_stats_v1:conv_rate", + ], + full_feature_names=full_feature_names, + ).to_df() + + +@pytest.mark.integration +@pytest.mark.universal_offline_stores +def test_historical_features_with_missing_request_data( + environment, universal_data_sources ): store = environment.feature_store @@ -437,12 +476,12 @@ def test_historical_features_with_missing_request_data( "global_stats:avg_ride_length", "field_mapping:feature_name", ], - full_feature_names=full_feature_names, + full_feature_names=True, ) @pytest.mark.integration -@pytest.mark.universal +@pytest.mark.universal_offline_stores @pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) def test_historical_features_with_entities_from_query( environment, universal_data_sources, full_feature_names @@ -542,7 +581,7 @@ def test_historical_features_with_entities_from_query( @pytest.mark.integration -@pytest.mark.universal +@pytest.mark.universal_offline_stores @pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) def test_historical_features_persisting( environment, universal_data_sources, full_feature_names @@ -621,7 +660,7 @@ def test_historical_features_persisting( @pytest.mark.integration -@pytest.mark.universal +@pytest.mark.universal_offline_stores def test_historical_features_from_bigquery_sources_containing_backfills(environment): store = environment.feature_store @@ -689,7 +728,7 @@ def test_historical_features_from_bigquery_sources_containing_backfills(environm created_timestamp_column="created", ) - driver = Entity(name="driver", join_key="driver_id", value_type=ValueType.INT64) + driver = Entity(name="driver", join_keys=["driver_id"], value_type=ValueType.INT64) driver_fv = FeatureView( name="driver_stats", entities=["driver"], diff --git a/sdk/python/tests/integration/online_store/test_e2e_local.py b/sdk/python/tests/integration/online_store/test_e2e_local.py index c1aa10900ae..d3cb86716a2 100644 --- a/sdk/python/tests/integration/online_store/test_e2e_local.py +++ b/sdk/python/tests/integration/online_store/test_e2e_local.py @@ -6,8 +6,11 @@ import pandas as pd from pytz import utc -import feast.driver_test_data as driver_data -from feast import FeatureStore +from feast.driver_test_data import ( + create_driver_hourly_stats_df, + create_global_daily_stats_df, +) +from feast.feature_store import FeatureStore from tests.utils.cli_utils import CliRunner, get_example_repo @@ -65,68 +68,90 @@ def _assert_online_features( assert "global_daily_stats__avg_ride_length" in result +def _test_materialize_and_online_retrieval( + runner: CliRunner, + store: FeatureStore, + start_date: datetime, + end_date: datetime, + driver_df: pd.DataFrame, +): + assert store.repo_path is not None + + # Test `feast materialize` and online retrieval. + r = runner.run( + [ + "materialize", + start_date.isoformat(), + (end_date - timedelta(days=7)).isoformat(), + ], + cwd=Path(store.repo_path), + ) + + assert r.returncode == 0 + _assert_online_features(store, driver_df, end_date - timedelta(days=7)) + + # Test `feast materialize-incremental` and online retrieval. + r = runner.run( + ["materialize-incremental", end_date.isoformat()], cwd=Path(store.repo_path), + ) + + assert r.returncode == 0 + _assert_online_features(store, driver_df, end_date) + + def test_e2e_local() -> None: """ - A more comprehensive than "basic" test, using local provider. + Tests the end-to-end workflow of apply, materialize, and online retrieval. - 1. Create a repo. - 2. Apply - 3. Ingest some data to online store from parquet - 4. Read from the online store to make sure it made it there. + This test runs against several different types of repos: + 1. A repo with a normal FV and an entity-less FV. + 2. A repo using the SDK from version 0.19.0. + 3. A repo with a FV with a ttl of 0. """ - runner = CliRunner() with tempfile.TemporaryDirectory() as data_dir: - - # Generate some test data in parquet format. + # Generate test data. end_date = datetime.now().replace(microsecond=0, second=0, minute=0) start_date = end_date - timedelta(days=15) driver_entities = [1001, 1002, 1003, 1004, 1005] - driver_df = driver_data.create_driver_hourly_stats_df( - driver_entities, start_date, end_date - ) + driver_df = create_driver_hourly_stats_df(driver_entities, start_date, end_date) driver_stats_path = os.path.join(data_dir, "driver_stats.parquet") driver_df.to_parquet(path=driver_stats_path, allow_truncated_timestamps=True) - global_df = driver_data.create_global_daily_stats_df(start_date, end_date) + global_df = create_global_daily_stats_df(start_date, end_date) global_stats_path = os.path.join(data_dir, "global_stats.parquet") global_df.to_parquet(path=global_stats_path, allow_truncated_timestamps=True) - # Note that runner takes care of running apply/teardown for us here. - # We patch python code in example_feature_repo_2.py to set the path to Parquet files. with runner.local_repo( get_example_repo("example_feature_repo_2.py") .replace("%PARQUET_PATH%", driver_stats_path) .replace("%PARQUET_PATH_GLOBAL%", global_stats_path), "file", ) as store: - - assert store.repo_path is not None - - # feast materialize - r = runner.run( - [ - "materialize", - start_date.isoformat(), - (end_date - timedelta(days=7)).isoformat(), - ], - cwd=Path(store.repo_path), + _test_materialize_and_online_retrieval( + runner, store, start_date, end_date, driver_df ) - assert r.returncode == 0 - - _assert_online_features(store, driver_df, end_date - timedelta(days=7)) - - # feast materialize-incremental - r = runner.run( - ["materialize-incremental", end_date.isoformat()], - cwd=Path(store.repo_path), + with runner.local_repo( + get_example_repo("example_feature_repo_version_0_19.py") + .replace("%PARQUET_PATH%", driver_stats_path) + .replace("%PARQUET_PATH_GLOBAL%", global_stats_path), + "file", + ) as store: + _test_materialize_and_online_retrieval( + runner, store, start_date, end_date, driver_df ) - assert r.returncode == 0 - - _assert_online_features(store, driver_df, end_date) + with runner.local_repo( + get_example_repo("example_feature_repo_with_ttl_0.py") + .replace("%PARQUET_PATH%", driver_stats_path) + .replace("%PARQUET_PATH_GLOBAL%", global_stats_path), + "file", + ) as store: + _test_materialize_and_online_retrieval( + runner, store, start_date, end_date, driver_df + ) # Test a failure case when the parquet file doesn't include a join key with runner.local_repo( @@ -135,10 +160,8 @@ def test_e2e_local() -> None: ), "file", ) as store: - assert store.repo_path is not None - # feast materialize returncode, output = runner.run_with_output( [ "materialize", diff --git a/sdk/python/tests/integration/online_store/test_push_online_retrieval.py b/sdk/python/tests/integration/online_store/test_push_online_retrieval.py index 9e9ec953c73..aa7e3e7f530 100644 --- a/sdk/python/tests/integration/online_store/test_push_online_retrieval.py +++ b/sdk/python/tests/integration/online_store/test_push_online_retrieval.py @@ -14,7 +14,7 @@ @pytest.mark.integration -@pytest.mark.universal +@pytest.mark.universal_online_stores def test_push_features_and_read(environment, universal_data_sources): store = environment.feature_store diff --git a/sdk/python/tests/integration/online_store/test_universal_online.py b/sdk/python/tests/integration/online_store/test_universal_online.py index 774c3f9a424..259a094426e 100644 --- a/sdk/python/tests/integration/online_store/test_universal_online.py +++ b/sdk/python/tests/integration/online_store/test_universal_online.py @@ -19,7 +19,7 @@ RequestDataNotFoundInEntityRowsException, ) from feast.online_response import TIMESTAMP_POSTFIX -from feast.types import String +from feast.types import Float32, Int32, String from feast.wait import wait_retry_backoff from tests.integration.feature_repos.repo_configuration import ( Environment, @@ -38,13 +38,14 @@ @pytest.mark.integration -def test_entity_ttl_online_store(local_redis_environment, redis_universal_data_sources): +@pytest.mark.universal_online_stores(only=["redis"]) +def test_entity_ttl_online_store(environment, universal_data_sources): if os.getenv("FEAST_IS_LOCAL_TEST", "False") == "True": return - fs = local_redis_environment.feature_store + fs = environment.feature_store # setting ttl setting in online store to 1 second fs.config.online_store.key_ttl_seconds = 1 - entities, datasets, data_sources = redis_universal_data_sources + entities, datasets, data_sources = universal_data_sources driver_hourly_stats = create_driver_hourly_stats_feature_view(data_sources.driver) driver_entity = driver() @@ -98,10 +99,11 @@ def test_entity_ttl_online_store(local_redis_environment, redis_universal_data_s # TODO: make this work with all universal (all online store types) @pytest.mark.integration -def test_write_to_online_store_event_check(local_redis_environment): +@pytest.mark.universal_online_stores(only=["redis"]) +def test_write_to_online_store_event_check(environment): if os.getenv("FEAST_IS_LOCAL_TEST", "False") == "True": return - fs = local_redis_environment.feature_store + fs = environment.feature_store # write same data points 3 with different timestamps now = pd.Timestamp(datetime.datetime.utcnow()).round("ms") @@ -114,9 +116,7 @@ def test_write_to_online_store_event_check(local_redis_environment): "ts_1": [hour_ago, now, now], } dataframe_source = pd.DataFrame(data) - with prep_file_source( - df=dataframe_source, event_timestamp_column="ts_1" - ) as file_source: + with prep_file_source(df=dataframe_source, timestamp_field="ts_1") as file_source: e = Entity(name="id", value_type=ValueType.STRING) # Create Feature View @@ -200,7 +200,7 @@ def test_write_to_online_store_event_check(local_redis_environment): @pytest.mark.integration -@pytest.mark.universal +@pytest.mark.universal_online_stores def test_write_to_online_store(environment, universal_data_sources): fs = environment.feature_store entities, datasets, data_sources = universal_data_sources @@ -326,6 +326,60 @@ def get_online_features_dict( @pytest.mark.integration @pytest.mark.universal +def test_online_retrieval_with_shared_batch_source(environment, universal_data_sources): + # Addresses https://github.com/feast-dev/feast/issues/2576 + + fs = environment.feature_store + + entities, datasets, data_sources = universal_data_sources + driver_stats_v1 = FeatureView( + name="driver_stats_v1", + entities=["driver"], + schema=[Field(name="avg_daily_trips", dtype=Int32)], + source=data_sources.driver, + ) + driver_stats_v2 = FeatureView( + name="driver_stats_v2", + entities=["driver"], + schema=[ + Field(name="avg_daily_trips", dtype=Int32), + Field(name="conv_rate", dtype=Float32), + ], + source=data_sources.driver, + ) + + fs.apply([driver(), driver_stats_v1, driver_stats_v2]) + + data = pd.DataFrame( + { + "driver_id": [1, 2], + "avg_daily_trips": [4, 5], + "conv_rate": [0.5, 0.3], + "event_timestamp": [ + pd.to_datetime(1646263500, utc=True, unit="s"), + pd.to_datetime(1646263600, utc=True, unit="s"), + ], + "created": [ + pd.to_datetime(1646263500, unit="s"), + pd.to_datetime(1646263600, unit="s"), + ], + } + ) + fs.write_to_online_store("driver_stats_v1", data.drop("conv_rate", axis=1)) + fs.write_to_online_store("driver_stats_v2", data) + + with pytest.raises(KeyError): + fs.get_online_features( + features=[ + # `driver_stats_v1` does not have `conv_rate` + "driver_stats_v1:conv_rate", + ], + entity_rows=[{"driver_id": 1}, {"driver_id": 2}], + ) + + +@pytest.mark.integration +@pytest.mark.universal_online_stores @pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) def test_online_retrieval_with_event_timestamps( environment, universal_data_sources, full_feature_names @@ -389,10 +443,12 @@ def test_online_retrieval_with_event_timestamps( @pytest.mark.integration -@pytest.mark.universal +@pytest.mark.universal_online_stores @pytest.mark.goserver @pytest.mark.parametrize("full_feature_names", [True, False], ids=lambda v: str(v)) -def test_online_retrieval(environment, universal_data_sources, full_feature_names): +def test_online_retrieval( + environment, universal_data_sources, python_server, full_feature_names +): fs = environment.feature_store entities, datasets, data_sources = universal_data_sources feature_views = construct_universal_feature_views(data_sources) @@ -612,7 +668,7 @@ def test_online_retrieval(environment, universal_data_sources, full_feature_name @pytest.mark.integration -@pytest.mark.universal +@pytest.mark.universal_online_stores(only=["redis"]) def test_online_store_cleanup(environment, universal_data_sources): """ Some online store implementations (like Redis) keep features from different features views diff --git a/sdk/python/tests/integration/registration/test_cli.py b/sdk/python/tests/integration/registration/test_cli.py index 655e53e7593..ce23ed66a60 100644 --- a/sdk/python/tests/integration/registration/test_cli.py +++ b/sdk/python/tests/integration/registration/test_cli.py @@ -32,7 +32,7 @@ @pytest.mark.integration -@pytest.mark.universal +@pytest.mark.universal_offline_stores def test_universal_cli(environment: Environment): project = f"test_universal_cli_{str(uuid.uuid4()).replace('-', '')[:8]}" runner = CliRunner() diff --git a/sdk/python/tests/integration/registration/test_feature_store.py b/sdk/python/tests/integration/registration/test_feature_store.py index 39de7fc6888..db4c6700cec 100644 --- a/sdk/python/tests/integration/registration/test_feature_store.py +++ b/sdk/python/tests/integration/registration/test_feature_store.py @@ -216,10 +216,10 @@ def test_apply_feature_view_success(test_feature_store): ) @pytest.mark.parametrize("dataframe_source", [lazy_fixture("simple_dataset_1")]) def test_feature_view_inference_success(test_feature_store, dataframe_source): - with prep_file_source( - df=dataframe_source, event_timestamp_column="ts_1" - ) as file_source: - entity = Entity(name="id", join_key="id_join_key", value_type=ValueType.INT64) + with prep_file_source(df=dataframe_source, timestamp_field="ts_1") as file_source: + entity = Entity( + name="id", join_keys=["id_join_key"], value_type=ValueType.INT64 + ) fv1 = FeatureView( name="fv1", @@ -432,11 +432,9 @@ def test_apply_remote_repo(): ) @pytest.mark.parametrize("dataframe_source", [lazy_fixture("simple_dataset_1")]) def test_reapply_feature_view_success(test_feature_store, dataframe_source): - with prep_file_source( - df=dataframe_source, event_timestamp_column="ts_1" - ) as file_source: + with prep_file_source(df=dataframe_source, timestamp_field="ts_1") as file_source: - e = Entity(name="id", join_key="id_join_key", value_type=ValueType.STRING) + e = Entity(name="id", join_keys=["id_join_key"], value_type=ValueType.STRING) # Create Feature View fv1 = FeatureView( diff --git a/sdk/python/tests/integration/registration/test_inference.py b/sdk/python/tests/integration/registration/test_inference.py index 526f422e9d6..6cf49c31db8 100644 --- a/sdk/python/tests/integration/registration/test_inference.py +++ b/sdk/python/tests/integration/registration/test_inference.py @@ -7,6 +7,7 @@ BigQuerySource, Entity, Feature, + FeatureService, FileSource, RedshiftSource, RepoConfig, @@ -24,12 +25,13 @@ from feast.inference import ( update_data_sources_with_inferred_event_timestamp_col, update_entities_with_inferred_types_from_feature_views, + update_feature_views_with_inferred_features, ) from feast.infra.offline_stores.contrib.spark_offline_store.spark_source import ( SparkSource, ) from feast.on_demand_feature_view import on_demand_feature_view -from feast.types import PrimitiveFeastType, String, UnixTimestamp +from feast.types import Float32, String, UnixTimestamp from tests.utils.data_source_utils import ( prep_file_source, simple_bq_source_using_query_arg, @@ -41,9 +43,9 @@ def test_update_entities_with_inferred_types_from_feature_views( simple_dataset_1, simple_dataset_2 ): with prep_file_source( - df=simple_dataset_1, event_timestamp_column="ts_1" + df=simple_dataset_1, timestamp_field="ts_1" ) as file_source, prep_file_source( - df=simple_dataset_2, event_timestamp_column="ts_1" + df=simple_dataset_2, timestamp_field="ts_1" ) as file_source_2: fv1 = FeatureView( @@ -53,8 +55,8 @@ def test_update_entities_with_inferred_types_from_feature_views( name="fv2", entities=["id"], batch_source=file_source_2, ttl=None, ) - actual_1 = Entity(name="id", join_key="id_join_key") - actual_2 = Entity(name="id", join_key="id_join_key") + actual_1 = Entity(name="id", join_keys=["id_join_key"]) + actual_2 = Entity(name="id", join_keys=["id_join_key"]) update_entities_with_inferred_types_from_feature_views( [actual_1], [fv1], RepoConfig(provider="local", project="test") @@ -63,16 +65,16 @@ def test_update_entities_with_inferred_types_from_feature_views( [actual_2], [fv2], RepoConfig(provider="local", project="test") ) assert actual_1 == Entity( - name="id", join_key="id_join_key", value_type=ValueType.INT64 + name="id", join_keys=["id_join_key"], value_type=ValueType.INT64 ) assert actual_2 == Entity( - name="id", join_key="id_join_key", value_type=ValueType.STRING + name="id", join_keys=["id_join_key"], value_type=ValueType.STRING ) with pytest.raises(RegistryInferenceFailure): # two viable data types update_entities_with_inferred_types_from_feature_views( - [Entity(name="id", join_key="id_join_key")], + [Entity(name="id", join_keys=["id_join_key"])], [fv1, fv2], RepoConfig(provider="local", project="test"), ) @@ -143,7 +145,6 @@ def test_update_file_data_source_with_inferred_event_timestamp_col(simple_datase @pytest.mark.integration -@pytest.mark.universal def test_update_data_sources_with_inferred_event_timestamp_col(universal_data_sources): (_, _, data_sources) = universal_data_sources data_sources_copy = deepcopy(data_sources) @@ -168,15 +169,14 @@ def test_update_data_sources_with_inferred_event_timestamp_col(universal_data_so def test_on_demand_features_type_inference(): # Create Feature Views date_request = RequestSource( - name="date_request", - schema=[Field(name="some_date", dtype=PrimitiveFeastType.UNIX_TIMESTAMP)], + name="date_request", schema=[Field(name="some_date", dtype=UnixTimestamp)], ) @on_demand_feature_view( - sources={"date_request": date_request}, - features=[ - Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), - Feature(name="string_output", dtype=ValueType.STRING), + sources=[date_request], + schema=[ + Field(name="output", dtype=UnixTimestamp), + Field(name="string_output", dtype=String), ], ) def test_view(features_df: pd.DataFrame) -> pd.DataFrame: @@ -229,7 +229,7 @@ def test_view_with_missing_feature(features_df: pd.DataFrame) -> pd.DataFrame: @pytest.mark.parametrize( "request_source_schema", [ - [Field(name="some_date", dtype=PrimitiveFeastType.UNIX_TIMESTAMP)], + [Field(name="some_date", dtype=UnixTimestamp)], {"some_date": ValueType.UNIX_TIMESTAMP}, ], ) @@ -245,7 +245,7 @@ def test_datasource_inference(request_source_schema): Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), Feature(name="string_output", dtype=ValueType.STRING), ], - sources={"date_request": date_request}, + sources=[date_request], ) def test_view(features_df: pd.DataFrame) -> pd.DataFrame: data = pd.DataFrame() @@ -256,7 +256,7 @@ def test_view(features_df: pd.DataFrame) -> pd.DataFrame: test_view.infer_features() @on_demand_feature_view( - sources={"date_request": date_request}, + sources=[date_request], schema=[ Field(name="output", dtype=UnixTimestamp), Field(name="object_output", dtype=String), @@ -272,7 +272,7 @@ def invalid_test_view(features_df: pd.DataFrame) -> pd.DataFrame: invalid_test_view.infer_features() @on_demand_feature_view( - sources={"date_request": date_request}, + sources=[date_request], features=[ Feature(name="output", dtype=ValueType.UNIX_TIMESTAMP), Feature(name="missing", dtype=ValueType.STRING), @@ -285,3 +285,72 @@ def test_view_with_missing_feature(features_df: pd.DataFrame) -> pd.DataFrame: with pytest.raises(SpecifiedFeaturesNotPresentError): test_view_with_missing_feature.infer_features() + + +def test_update_feature_views_with_inferred_features(): + file_source = FileSource(name="test", path="test path") + entity1 = Entity(name="test1", join_keys=["test_column_1"]) + entity2 = Entity(name="test2", join_keys=["test_column_2"]) + feature_view_1 = FeatureView( + name="test1", + entities=[entity1], + schema=[ + Field(name="feature", dtype=Float32), + Field(name="test_column_1", dtype=String), + ], + source=file_source, + ) + feature_view_2 = FeatureView( + name="test2", + entities=[entity1, entity2], + schema=[ + Field(name="feature", dtype=Float32), + Field(name="test_column_1", dtype=String), + Field(name="test_column_2", dtype=String), + ], + source=file_source, + ) + + assert len(feature_view_1.schema) == 2 + assert len(feature_view_1.features) == 2 + + # The entity field should be deleted from the schema and features of the feature view. + update_feature_views_with_inferred_features( + [feature_view_1], [entity1], RepoConfig(provider="local", project="test") + ) + assert len(feature_view_1.schema) == 1 + assert len(feature_view_1.features) == 1 + + assert len(feature_view_2.schema) == 3 + assert len(feature_view_2.features) == 3 + + # The entity fields should be deleted from the schema and features of the feature view. + update_feature_views_with_inferred_features( + [feature_view_2], + [entity1, entity2], + RepoConfig(provider="local", project="test"), + ) + assert len(feature_view_2.schema) == 1 + assert len(feature_view_2.features) == 1 + + +def test_update_feature_services_with_inferred_features(simple_dataset_1): + with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: + entity1 = Entity(name="test1", join_keys=["id_join_key"]) + feature_view_1 = FeatureView( + name="test1", entities=[entity1], source=file_source, + ) + feature_service = FeatureService(name="fs_1", features=[feature_view_1]) + assert len(feature_service.feature_view_projections) == 1 + assert len(feature_service.feature_view_projections[0].features) == 0 + + update_feature_views_with_inferred_features( + [feature_view_1], [entity1], RepoConfig(provider="local", project="test") + ) + feature_service.infer_features( + fvs_to_update={feature_view_1.name: feature_view_1} + ) + + assert len(feature_view_1.schema) == 3 + assert len(feature_view_1.features) == 3 + assert len(feature_service.feature_view_projections[0].features) == 3 diff --git a/sdk/python/tests/integration/registration/test_registry.py b/sdk/python/tests/integration/registration/test_registry.py index 072be15bfee..f011d73d2dd 100644 --- a/sdk/python/tests/integration/registration/test_registry.py +++ b/sdk/python/tests/integration/registration/test_registry.py @@ -29,7 +29,7 @@ from feast.protos.feast.types import Value_pb2 as ValueProto from feast.registry import Registry from feast.repo_config import RegistryConfig -from feast.types import Array, Bytes, Float32, Int32, Int64, PrimitiveFeastType, String +from feast.types import Array, Bytes, Float32, Int32, Int64, String from feast.value_type import ValueType @@ -240,10 +240,7 @@ def test_apply_feature_view_success(test_registry): # TODO(kevjumba): remove this in feast 0.23 when deprecating @pytest.mark.parametrize( "request_source_schema", - [ - [Field(name="my_input_1", dtype=PrimitiveFeastType.INT32)], - {"my_input_1": ValueType.INT32}, - ], + [[Field(name="my_input_1", dtype=Int32)], {"my_input_1": ValueType.INT32}], ) def test_modify_feature_views_success(test_registry, request_source_schema): # Create Feature Views @@ -270,7 +267,7 @@ def test_modify_feature_views_success(test_registry, request_source_schema): Feature(name="odfv1_my_feature_1", dtype=ValueType.STRING), Feature(name="odfv1_my_feature_2", dtype=ValueType.INT32), ], - sources={"request_source": request_source}, + sources=[request_source], ) def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: data = pd.DataFrame() @@ -290,7 +287,7 @@ def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: Feature(name="odfv1_my_feature_1", dtype=ValueType.FLOAT), Feature(name="odfv1_my_feature_2", dtype=ValueType.INT32), ], - sources={"request_source": request_source}, + sources=[request_source], ) def odfv1(feature_df: pd.DataFrame) -> pd.DataFrame: data = pd.DataFrame() diff --git a/sdk/python/tests/integration/registration/test_universal_odfv_feature_inference.py b/sdk/python/tests/integration/registration/test_universal_odfv_feature_inference.py index 9dced8f13ad..b7a9a571af6 100644 --- a/sdk/python/tests/integration/registration/test_universal_odfv_feature_inference.py +++ b/sdk/python/tests/integration/registration/test_universal_odfv_feature_inference.py @@ -11,26 +11,27 @@ from tests.integration.feature_repos.universal.feature_views import ( conv_rate_plus_100_feature_view, create_conv_rate_request_source, - create_driver_hourly_stats_feature_view, - create_item_embeddings_feature_view, + create_driver_hourly_stats_batch_feature_view, + create_item_embeddings_batch_feature_view, create_similarity_request_source, similarity_feature_view, ) @pytest.mark.integration -@pytest.mark.universal +@pytest.mark.universal_offline_stores @pytest.mark.parametrize("infer_features", [True, False], ids=lambda v: str(v)) def test_infer_odfv_features(environment, universal_data_sources, infer_features): store = environment.feature_store (entities, datasets, data_sources) = universal_data_sources - driver_hourly_stats = create_driver_hourly_stats_feature_view(data_sources.driver) + driver_hourly_stats = create_driver_hourly_stats_batch_feature_view( + data_sources.driver + ) request_source = create_conv_rate_request_source() driver_odfv = conv_rate_plus_100_feature_view( - {"driver": driver_hourly_stats, "input_request": request_source}, - infer_features=infer_features, + [driver_hourly_stats, request_source], infer_features=infer_features, ) feast_objects = [driver_hourly_stats, driver_odfv, driver(), customer()] @@ -59,30 +60,30 @@ def test_infer_odfv_list_features(environment, infer_features, tmp_path): timestamp_field="event_timestamp", created_timestamp_column="created", ) - items = create_item_embeddings_feature_view(fake_items_src) + item_feature_view = create_item_embeddings_batch_feature_view(fake_items_src) sim_odfv = similarity_feature_view( - {"items": items, "input_request": create_similarity_request_source()}, + [item_feature_view, create_similarity_request_source()], infer_features=infer_features, ) store = environment.feature_store - store.apply([item(), items, sim_odfv]) + store.apply([item(), item_feature_view, sim_odfv]) odfv = store.get_on_demand_feature_view("similarity") assert len(odfv.features) == 2 @pytest.mark.integration -@pytest.mark.universal def test_infer_odfv_features_with_error(environment, universal_data_sources): store = environment.feature_store (entities, datasets, data_sources) = universal_data_sources features = [Field(name="conv_rate_plus_200", dtype=Float64)] - driver_hourly_stats = create_driver_hourly_stats_feature_view(data_sources.driver) + driver_hourly_stats = create_driver_hourly_stats_batch_feature_view( + data_sources.driver + ) request_source = create_conv_rate_request_source() driver_odfv = conv_rate_plus_100_feature_view( - {"driver": driver_hourly_stats, "input_request": request_source}, - features=features, + [driver_hourly_stats, request_source], features=features, ) feast_objects = [driver_hourly_stats, driver_odfv, driver(), customer()] diff --git a/sdk/python/tests/integration/registration/test_universal_types.py b/sdk/python/tests/integration/registration/test_universal_types.py index 81fa0200fdd..6d016e3e85c 100644 --- a/sdk/python/tests/integration/registration/test_universal_types.py +++ b/sdk/python/tests/integration/registration/test_universal_types.py @@ -8,16 +8,11 @@ import pyarrow as pa import pytest +from feast.entity import Entity from feast.infra.offline_stores.offline_store import RetrievalJob from feast.types import Array, Bool, Float32, Int32, Int64, UnixTimestamp from feast.value_type import ValueType from tests.data.data_creator import create_dataset -from tests.integration.feature_repos.repo_configuration import ( - FULL_REPO_CONFIGS, - REDIS_CONFIG, - IntegrationTestRepoConfig, - construct_test_environment, -) from tests.integration.feature_repos.universal.entities import driver from tests.integration.feature_repos.universal.feature_views import driver_feature_view @@ -25,46 +20,36 @@ def populate_test_configs(offline: bool): - entity_type_feature_dtypes = [ - (ValueType.INT32, "int32"), - (ValueType.INT64, "int64"), - (ValueType.STRING, "float"), - (ValueType.STRING, "bool"), - (ValueType.INT32, "datetime"), + feature_dtypes = [ + "int32", + "int64", + "float", + "bool", + "datetime", ] configs: List[TypeTestConfig] = [] - for test_repo_config in FULL_REPO_CONFIGS: - for entity_type, feature_dtype in entity_type_feature_dtypes: - for feature_is_list in [True, False]: - # Redshift doesn't support list features - if test_repo_config.provider == "aws" and feature_is_list is True: - continue - # For offline tests, don't need to vary for online store - if offline and test_repo_config.online_store == REDIS_CONFIG: + for feature_dtype in feature_dtypes: + for feature_is_list in [True, False]: + for has_empty_list in [True, False]: + # For non list features `has_empty_list` does nothing + if feature_is_list is False and has_empty_list is True: continue - for has_empty_list in [True, False]: - # For non list features `has_empty_list` does nothing - if feature_is_list is False and has_empty_list is True: - continue - configs.append( - TypeTestConfig( - entity_type=entity_type, - feature_dtype=feature_dtype, - feature_is_list=feature_is_list, - has_empty_list=has_empty_list, - test_repo_config=test_repo_config, - ) + + configs.append( + TypeTestConfig( + feature_dtype=feature_dtype, + feature_is_list=feature_is_list, + has_empty_list=has_empty_list, ) + ) return configs @dataclass(frozen=True, repr=True) class TypeTestConfig: - entity_type: ValueType feature_dtype: str feature_is_list: bool has_empty_list: bool - test_repo_config: IntegrationTestRepoConfig OFFLINE_TYPE_TEST_CONFIGS: List[TypeTestConfig] = populate_test_configs(offline=True) @@ -76,8 +61,15 @@ class TypeTestConfig: scope="session", ids=[str(c) for c in OFFLINE_TYPE_TEST_CONFIGS], ) -def offline_types_test_fixtures(request): - return get_fixtures(request) +def offline_types_test_fixtures(request, environment): + config: TypeTestConfig = request.param + if ( + environment.test_repo_config.provider == "aws" + and config.feature_is_list is True + ): + pytest.skip("Redshift doesn't support list features") + + return get_fixtures(request, environment) @pytest.fixture( @@ -85,86 +77,91 @@ def offline_types_test_fixtures(request): scope="session", ids=[str(c) for c in ONLINE_TYPE_TEST_CONFIGS], ) -def online_types_test_fixtures(request): - return get_fixtures(request) +def online_types_test_fixtures(request, environment): + return get_fixtures(request, environment) -def get_fixtures(request): +def get_fixtures(request, environment): config: TypeTestConfig = request.param # Lower case needed because Redshift lower-cases all table names - test_project_id = f"{config.entity_type}{config.feature_dtype}{config.feature_is_list}".replace( + destination_name = f"feature_type_{config.feature_dtype}{config.feature_is_list}".replace( ".", "" ).lower() - type_test_environment = construct_test_environment( - test_repo_config=config.test_repo_config, - test_suite_name=f"test_{test_project_id}", - ) config = request.param df = create_dataset( - config.entity_type, + ValueType.INT64, config.feature_dtype, config.feature_is_list, config.has_empty_list, ) - data_source = type_test_environment.data_source_creator.create_data_source( - df, - destination_name=type_test_environment.feature_store.project, - field_mapping={"ts_1": "ts"}, + data_source = environment.data_source_creator.create_data_source( + df, destination_name=destination_name, field_mapping={"ts_1": "ts"}, ) fv = create_feature_view( - request.fixturename, + destination_name, config.feature_dtype, config.feature_is_list, config.has_empty_list, data_source, ) - def cleanup(): - try: - type_test_environment.data_source_creator.teardown() - except Exception: # noqa - logger.exception("DataSourceCreator teardown has failed") - - type_test_environment.feature_store.teardown() - - request.addfinalizer(cleanup) - - return type_test_environment, config, data_source, fv + return config, data_source, fv @pytest.mark.integration -@pytest.mark.universal -def test_entity_inference_types_match(offline_types_test_fixtures): - environment, config, data_source, fv = offline_types_test_fixtures +@pytest.mark.universal_offline_stores +@pytest.mark.parametrize( + "entity_type", [ValueType.INT32, ValueType.INT64, ValueType.STRING] +) +def test_entity_inference_types_match(environment, entity_type): fs = environment.feature_store # Don't specify value type in entity to force inference - entity = driver(value_type=ValueType.UNKNOWN) + entity = Entity( + name=f"driver_{entity_type.name.lower()}", + value_type=ValueType.UNKNOWN, + join_key="driver_id", + ) + df = create_dataset(entity_type, feature_dtype="int32",) + data_source = environment.data_source_creator.create_data_source( + df, + destination_name=f"entity_type_{entity_type.name.lower()}", + field_mapping={"ts_1": "ts"}, + ) + fv = create_feature_view( + f"fv_entity_type_{entity_type.name.lower()}", + feature_dtype="int32", + feature_is_list=False, + has_empty_list=False, + data_source=data_source, + entity=entity.name, + ) fs.apply([fv, entity]) - entities = fs.list_entities() + inferred_entity = fs.get_entity(entity.name) entity_type_to_expected_inferred_entity_type = { - ValueType.INT32: ValueType.INT64, - ValueType.INT64: ValueType.INT64, - ValueType.FLOAT: ValueType.DOUBLE, - ValueType.STRING: ValueType.STRING, + ValueType.INT32: {ValueType.INT32, ValueType.INT64}, + ValueType.INT64: {ValueType.INT32, ValueType.INT64}, + ValueType.FLOAT: {ValueType.DOUBLE}, + ValueType.STRING: {ValueType.STRING}, } - for entity in entities: - assert ( - entity.value_type - == entity_type_to_expected_inferred_entity_type[config.entity_type] - ) + assert ( + inferred_entity.value_type + in entity_type_to_expected_inferred_entity_type[entity_type] + ) @pytest.mark.integration -@pytest.mark.universal -def test_feature_get_historical_features_types_match(offline_types_test_fixtures): +@pytest.mark.universal_offline_stores +def test_feature_get_historical_features_types_match( + offline_types_test_fixtures, environment +): """ Note: to make sure this test works, we need to ensure that get_historical_features returns at least one non-null row to make sure type inferral works. This can only be achieved by carefully matching entity_df to the data fixtures. """ - environment, config, data_source, fv = offline_types_test_fixtures + config, data_source, fv = offline_types_test_fixtures fs = environment.feature_store entity = driver() fv = create_feature_view( @@ -177,9 +174,7 @@ def test_feature_get_historical_features_types_match(offline_types_test_fixtures fs.apply([fv, entity]) entity_df = pd.DataFrame() - entity_df["driver_id"] = ( - ["1", "3"] if config.entity_type == ValueType.STRING else [1, 3] - ) + entity_df["driver_id"] = [1, 3] ts = pd.Timestamp(datetime.utcnow()).round("ms") entity_df["ts"] = [ ts - timedelta(hours=4), @@ -213,9 +208,12 @@ def test_feature_get_historical_features_types_match(offline_types_test_fixtures @pytest.mark.integration -@pytest.mark.universal -def test_feature_get_online_features_types_match(online_types_test_fixtures): - environment, config, data_source, fv = online_types_test_fixtures +@pytest.mark.universal_online_stores(only=["sqlite"]) +def test_feature_get_online_features_types_match( + online_types_test_fixtures, environment +): + config, data_source, fv = online_types_test_fixtures + entity = driver() fv = create_feature_view( "get_online_features_types_match", config.feature_dtype, @@ -225,7 +223,6 @@ def test_feature_get_online_features_types_match(online_types_test_fixtures): ) fs = environment.feature_store features = [fv.name + ":value"] - entity = driver(value_type=config.entity_type) fs.apply([fv, entity]) fs.materialize( environment.start_date, @@ -234,9 +231,8 @@ def test_feature_get_online_features_types_match(online_types_test_fixtures): # we can successfully infer type even from all empty values ) - driver_id_value = "1" if config.entity_type == ValueType.STRING else 1 online_features = fs.get_online_features( - features=features, entity_rows=[{"driver_id": driver_id_value}], + features=features, entity_rows=[{"driver_id": 1}], ).to_dict() feature_list_dtype_to_expected_online_response_value_type = { @@ -267,7 +263,7 @@ def test_feature_get_online_features_types_match(online_types_test_fixtures): def create_feature_view( - name, feature_dtype, feature_is_list, has_empty_list, data_source + name, feature_dtype, feature_is_list, has_empty_list, data_source, entity="driver" ): if feature_is_list is True: if feature_dtype == "int32": @@ -292,7 +288,7 @@ def create_feature_view( elif feature_dtype == "datetime": dtype = UnixTimestamp - return driver_feature_view(data_source, name=name, dtype=dtype,) + return driver_feature_view(data_source, name=name, dtype=dtype, entities=[entity]) def assert_expected_historical_feature_types( @@ -301,7 +297,7 @@ def assert_expected_historical_feature_types( print("Asserting historical feature types") feature_dtype_to_expected_historical_feature_dtype = { "int32": (pd.api.types.is_integer_dtype,), - "int64": (pd.api.types.is_int64_dtype,), + "int64": (pd.api.types.is_integer_dtype,), "float": (pd.api.types.is_float_dtype,), "string": (pd.api.types.is_string_dtype,), "bool": (pd.api.types.is_bool_dtype, pd.api.types.is_object_dtype), @@ -310,7 +306,7 @@ def assert_expected_historical_feature_types( dtype_checkers = feature_dtype_to_expected_historical_feature_dtype[feature_dtype] assert any( check(historical_features_df.dtypes["value"]) for check in dtype_checkers - ) + ), f"Failed to match feature type {historical_features_df.dtypes['value']} with checkers {dtype_checkers}" def assert_feature_list_types( @@ -356,8 +352,8 @@ def assert_expected_arrow_types( historical_features_arrow = historical_features.to_arrow() print(historical_features_arrow) feature_list_dtype_to_expected_historical_feature_arrow_type = { - "int32": pa.types.is_int64, - "int64": pa.types.is_int64, + "int32": pa.types.is_signed_integer, # different offline stores could interpret integers differently + "int64": pa.types.is_signed_integer, # eg, Snowflake chooses the smallest possible (like int8) "float": pa.types.is_float64, "string": pa.types.is_string, "bool": pa.types.is_boolean, diff --git a/sdk/python/tests/unit/diff/test_registry_diff.py b/sdk/python/tests/unit/diff/test_registry_diff.py index 0322ab47abf..483dae73e26 100644 --- a/sdk/python/tests/unit/diff/test_registry_diff.py +++ b/sdk/python/tests/unit/diff/test_registry_diff.py @@ -7,9 +7,7 @@ def test_tag_objects_for_keep_delete_update_add(simple_dataset_1): - with prep_file_source( - df=simple_dataset_1, event_timestamp_column="ts_1" - ) as file_source: + with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: to_delete = FeatureView( name="to_delete", entities=["id"], batch_source=file_source, ttl=None, ) @@ -53,9 +51,7 @@ def test_tag_objects_for_keep_delete_update_add(simple_dataset_1): def test_diff_registry_objects_feature_views(simple_dataset_1): - with prep_file_source( - df=simple_dataset_1, event_timestamp_column="ts_1" - ) as file_source: + with prep_file_source(df=simple_dataset_1, timestamp_field="ts_1") as file_source: pre_changed = FeatureView( name="fv2", entities=["id"], diff --git a/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py b/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py index 7d6da0dc06d..6275a177e05 100644 --- a/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py +++ b/sdk/python/tests/unit/infra/online_store/test_dynamodb_online_store.py @@ -11,6 +11,8 @@ DynamoDBOnlineStoreConfig, DynamoDBTable, ) +from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto +from feast.protos.feast.types.Value_pb2 import Value as ValueProto from feast.repo_config import RepoConfig from tests.utils.online_store_utils import ( _create_n_customer_test_samples, @@ -37,11 +39,17 @@ def repo_config(): project=PROJECT, provider=PROVIDER, online_store=DynamoDBOnlineStoreConfig(region=REGION), + # online_store={"type": "dynamodb", "region": REGION}, offline_store=FileOfflineStoreConfig(), ) -def test_online_store_config_default(): +@pytest.fixture +def dynamodb_online_store(): + return DynamoDBOnlineStore() + + +def test_dynamodb_online_store_config_default(): """Test DynamoDBOnlineStoreConfig default parameters.""" aws_region = "us-west-2" dynamodb_store_config = DynamoDBOnlineStoreConfig(region=aws_region) @@ -49,7 +57,6 @@ def test_online_store_config_default(): assert dynamodb_store_config.batch_size == 40 assert dynamodb_store_config.endpoint_url is None assert dynamodb_store_config.region == aws_region - assert dynamodb_store_config.sort_response is True assert dynamodb_store_config.table_name_template == "{project}.{table_name}" @@ -65,25 +72,22 @@ def test_dynamodb_table_default_params(): assert dynamodb_table._dynamodb_resource is None -def test_online_store_config_custom_params(): +def test_dynamodb_online_store_config_custom_params(): """Test DynamoDBOnlineStoreConfig custom parameters.""" aws_region = "us-west-2" batch_size = 20 endpoint_url = "http://localhost:8000" - sort_response = False table_name_template = "feast_test.dynamodb_table" dynamodb_store_config = DynamoDBOnlineStoreConfig( region=aws_region, batch_size=batch_size, endpoint_url=endpoint_url, - sort_response=sort_response, table_name_template=table_name_template, ) assert dynamodb_store_config.type == "dynamodb" assert dynamodb_store_config.batch_size == batch_size assert dynamodb_store_config.endpoint_url == endpoint_url assert dynamodb_store_config.region == aws_region - assert dynamodb_store_config.sort_response == sort_response assert dynamodb_store_config.table_name_template == table_name_template @@ -100,15 +104,14 @@ def test_dynamodb_table_custom_params(): assert dynamodb_table._dynamodb_resource is None -def test_online_store_config_dynamodb_client(): +def test_dynamodb_online_store_config_dynamodb_client(dynamodb_online_store): """Test DynamoDBOnlineStoreConfig configure DynamoDB client with endpoint_url.""" aws_region = "us-west-2" endpoint_url = "http://localhost:8000" - dynamodb_store = DynamoDBOnlineStore() dynamodb_store_config = DynamoDBOnlineStoreConfig( region=aws_region, endpoint_url=endpoint_url ) - dynamodb_client = dynamodb_store._get_dynamodb_client( + dynamodb_client = dynamodb_online_store._get_dynamodb_client( dynamodb_store_config.region, dynamodb_store_config.endpoint_url ) assert dynamodb_client.meta.region_name == aws_region @@ -128,15 +131,14 @@ def test_dynamodb_table_dynamodb_client(): assert dynamodb_client.meta.endpoint_url == endpoint_url -def test_online_store_config_dynamodb_resource(): +def test_dynamodb_online_store_config_dynamodb_resource(dynamodb_online_store): """Test DynamoDBOnlineStoreConfig configure DynamoDB Resource with endpoint_url.""" aws_region = "us-west-2" endpoint_url = "http://localhost:8000" - dynamodb_store = DynamoDBOnlineStore() dynamodb_store_config = DynamoDBOnlineStoreConfig( region=aws_region, endpoint_url=endpoint_url ) - dynamodb_resource = dynamodb_store._get_dynamodb_resource( + dynamodb_resource = dynamodb_online_store._get_dynamodb_resource( dynamodb_store_config.region, dynamodb_store_config.endpoint_url ) assert dynamodb_resource.meta.client.meta.region_name == aws_region @@ -158,17 +160,19 @@ def test_dynamodb_table_dynamodb_resource(): @mock_dynamodb2 @pytest.mark.parametrize("n_samples", [5, 50, 100]) -def test_online_read(repo_config, n_samples): +def test_dynamodb_online_store_online_read( + repo_config, dynamodb_online_store, n_samples +): """Test DynamoDBOnlineStore online_read method.""" - _create_test_table(PROJECT, f"{TABLE_NAME}_{n_samples}", REGION) + db_table_name = f"{TABLE_NAME}_online_read_{n_samples}" + _create_test_table(PROJECT, db_table_name, REGION) data = _create_n_customer_test_samples(n=n_samples) - _insert_data_test_table(data, PROJECT, f"{TABLE_NAME}_{n_samples}", REGION) + _insert_data_test_table(data, PROJECT, db_table_name, REGION) entity_keys, features, *rest = zip(*data) - dynamodb_store = DynamoDBOnlineStore() - returned_items = dynamodb_store.online_read( + returned_items = dynamodb_online_store.online_read( config=repo_config, - table=MockFeatureView(name=f"{TABLE_NAME}_{n_samples}"), + table=MockFeatureView(name=db_table_name), entity_keys=entity_keys, ) assert len(returned_items) == len(data) @@ -176,7 +180,128 @@ def test_online_read(repo_config, n_samples): @mock_dynamodb2 -def test_write_batch_non_duplicates(repo_config): +@pytest.mark.parametrize("n_samples", [5, 50, 100]) +def test_dynamodb_online_store_online_write_batch( + repo_config, dynamodb_online_store, n_samples +): + """Test DynamoDBOnlineStore online_write_batch method.""" + db_table_name = f"{TABLE_NAME}_online_write_batch_{n_samples}" + _create_test_table(PROJECT, db_table_name, REGION) + data = _create_n_customer_test_samples() + + entity_keys, features, *rest = zip(*data) + dynamodb_online_store.online_write_batch( + config=repo_config, + table=MockFeatureView(name=db_table_name), + data=data, + progress=None, + ) + stored_items = dynamodb_online_store.online_read( + config=repo_config, + table=MockFeatureView(name=db_table_name), + entity_keys=entity_keys, + ) + assert stored_items is not None + assert len(stored_items) == len(data) + assert [item[1] for item in stored_items] == list(features) + + +@mock_dynamodb2 +def test_dynamodb_online_store_update(repo_config, dynamodb_online_store): + """Test DynamoDBOnlineStore update method.""" + # create dummy table to keep + db_table_keep_name = f"{TABLE_NAME}_keep_update" + _create_test_table(PROJECT, db_table_keep_name, REGION) + # create dummy table to delete + db_table_delete_name = f"{TABLE_NAME}_delete_update" + _create_test_table(PROJECT, db_table_delete_name, REGION) + + dynamodb_online_store.update( + config=repo_config, + tables_to_delete=[MockFeatureView(name=db_table_delete_name)], + tables_to_keep=[MockFeatureView(name=db_table_keep_name)], + entities_to_delete=None, + entities_to_keep=None, + partial=None, + ) + + # check only db_table_keep_name exists + dynamodb_client = dynamodb_online_store._get_dynamodb_client(REGION) + existing_tables = dynamodb_client.list_tables() + existing_tables = existing_tables.get("TableNames", None) + + assert existing_tables is not None + assert len(existing_tables) == 1 + assert existing_tables[0] == f"test_aws.{db_table_keep_name}" + + +@mock_dynamodb2 +def test_dynamodb_online_store_teardown(repo_config, dynamodb_online_store): + """Test DynamoDBOnlineStore teardown method.""" + db_table_delete_name_one = f"{TABLE_NAME}_delete_teardown_1" + db_table_delete_name_two = f"{TABLE_NAME}_delete_teardown_2" + _create_test_table(PROJECT, db_table_delete_name_one, REGION) + _create_test_table(PROJECT, db_table_delete_name_two, REGION) + + dynamodb_online_store.teardown( + config=repo_config, + tables=[ + MockFeatureView(name=db_table_delete_name_one), + MockFeatureView(name=db_table_delete_name_two), + ], + entities=None, + ) + + # Check tables non exist + dynamodb_client = dynamodb_online_store._get_dynamodb_client(REGION) + existing_tables = dynamodb_client.list_tables() + existing_tables = existing_tables.get("TableNames", None) + + assert existing_tables is not None + assert len(existing_tables) == 0 + + +@mock_dynamodb2 +def test_dynamodb_online_store_online_read_unknown_entity( + repo_config, dynamodb_online_store +): + """Test DynamoDBOnlineStore online_read method.""" + n_samples = 2 + _create_test_table(PROJECT, f"{TABLE_NAME}_unknown_entity_{n_samples}", REGION) + data = _create_n_customer_test_samples(n=n_samples) + _insert_data_test_table( + data, PROJECT, f"{TABLE_NAME}_unknown_entity_{n_samples}", REGION + ) + + entity_keys, features, *rest = zip(*data) + # Append a nonsensical entity to search for + entity_keys = list(entity_keys) + features = list(features) + + # Have the unknown entity be in the beginning, middle, and end of the list of entities. + for pos in range(len(entity_keys)): + entity_keys_with_unknown = deepcopy(entity_keys) + entity_keys_with_unknown.insert( + pos, + EntityKeyProto( + join_keys=["customer"], entity_values=[ValueProto(string_val="12359")] + ), + ) + features_with_none = deepcopy(features) + features_with_none.insert(pos, None) + returned_items = dynamodb_online_store.online_read( + config=repo_config, + table=MockFeatureView(name=f"{TABLE_NAME}_unknown_entity_{n_samples}"), + entity_keys=entity_keys_with_unknown, + ) + assert len(returned_items) == len(entity_keys_with_unknown) + assert [item[1] for item in returned_items] == list(features_with_none) + # The order should match the original entity key order + assert returned_items[pos] == (None, None) + + +@mock_dynamodb2 +def test_write_batch_non_duplicates(repo_config, dynamodb_online_store): """Test DynamoDBOnline Store deduplicate write batch request items.""" dynamodb_tbl = f"{TABLE_NAME}_batch_non_duplicates" _create_test_table(PROJECT, dynamodb_tbl, REGION) @@ -184,9 +309,8 @@ def test_write_batch_non_duplicates(repo_config): data_duplicate = deepcopy(data) dynamodb_resource = boto3.resource("dynamodb", region_name=REGION) table_instance = dynamodb_resource.Table(f"{PROJECT}.{dynamodb_tbl}") - dynamodb_store = DynamoDBOnlineStore() # Insert duplicate data - dynamodb_store._write_batch_non_duplicates( + dynamodb_online_store._write_batch_non_duplicates( table_instance, data + data_duplicate, progress=None ) # Request more items than inserted diff --git a/sdk/python/tests/unit/test_data_sources.py b/sdk/python/tests/unit/test_data_sources.py index ceb9ff4ce67..7f288d36db9 100644 --- a/sdk/python/tests/unit/test_data_sources.py +++ b/sdk/python/tests/unit/test_data_sources.py @@ -1,32 +1,33 @@ import pytest from feast import ValueType -from feast.data_source import PushSource, RequestDataSource, RequestSource +from feast.data_format import ProtoFormat +from feast.data_source import ( + DataSource, + KafkaSource, + KinesisSource, + PushSource, + RequestDataSource, + RequestSource, +) from feast.field import Field from feast.infra.offline_stores.bigquery_source import BigQuerySource -from feast.types import PrimitiveFeastType +from feast.infra.offline_stores.file_source import FileSource +from feast.infra.offline_stores.redshift_source import RedshiftSource +from feast.infra.offline_stores.snowflake_source import SnowflakeSource +from feast.types import Bool, Float32, Int64 def test_push_with_batch(): push_source = PushSource( - name="test", - schema=[ - Field(name="f1", dtype=PrimitiveFeastType.FLOAT32), - Field(name="f2", dtype=PrimitiveFeastType.BOOL), - ], - timestamp_field="event_timestamp", - batch_source=BigQuerySource(table="test.test"), + name="test", batch_source=BigQuerySource(table="test.test"), ) push_source_proto = push_source.to_proto() assert push_source_proto.HasField("batch_source") - assert push_source_proto.timestamp_field is not None - assert push_source_proto.push_options is not None push_source_unproto = PushSource.from_proto(push_source_proto) assert push_source.name == push_source_unproto.name - assert push_source.schema == push_source_unproto.schema - assert push_source.timestamp_field == push_source_unproto.timestamp_field assert push_source.batch_source.name == push_source_unproto.batch_source.name @@ -43,8 +44,8 @@ def test_request_data_source_deprecation(): def test_request_source_primitive_type_to_proto(): schema = [ - Field(name="f1", dtype=PrimitiveFeastType.FLOAT32), - Field(name="f2", dtype=PrimitiveFeastType.BOOL), + Field(name="f1", dtype=Float32), + Field(name="f2", dtype=Bool), ] request_source = RequestSource( name="source", schema=schema, description="desc", tags={}, owner="feast", @@ -52,3 +53,203 @@ def test_request_source_primitive_type_to_proto(): request_proto = request_source.to_proto() deserialized_request_source = RequestSource.from_proto(request_proto) assert deserialized_request_source == request_source + + +def test_hash(): + push_source_1 = PushSource( + name="test", batch_source=BigQuerySource(table="test.test"), + ) + push_source_2 = PushSource( + name="test", batch_source=BigQuerySource(table="test.test"), + ) + push_source_3 = PushSource( + name="test", batch_source=BigQuerySource(table="test.test2"), + ) + push_source_4 = PushSource( + name="test", + batch_source=BigQuerySource(table="test.test2"), + description="test", + ) + + s1 = {push_source_1, push_source_2} + assert len(s1) == 1 + + s2 = {push_source_1, push_source_3} + assert len(s2) == 2 + + s3 = {push_source_3, push_source_4} + assert len(s3) == 2 + + s4 = {push_source_1, push_source_2, push_source_3, push_source_4} + assert len(s4) == 3 + + +# TODO(kevjumba): Remove this test in feast 0.23 when positional arguments are removed. +def test_default_data_source_kw_arg_warning(): + # source_class = request.param + with pytest.warns(DeprecationWarning): + source = KafkaSource( + "name", "column", "bootstrap_servers", ProtoFormat("class_path"), "topic" + ) + assert source.name == "name" + assert source.timestamp_field == "column" + assert source.kafka_options.bootstrap_servers == "bootstrap_servers" + assert source.kafka_options.topic == "topic" + with pytest.raises(ValueError): + KafkaSource("name", "column", "bootstrap_servers", topic="topic") + + with pytest.warns(DeprecationWarning): + source = KinesisSource( + "name", + "column", + "c_column", + ProtoFormat("class_path"), + "region", + "stream_name", + ) + assert source.name == "name" + assert source.timestamp_field == "column" + assert source.created_timestamp_column == "c_column" + assert source.kinesis_options.region == "region" + assert source.kinesis_options.stream_name == "stream_name" + + with pytest.raises(ValueError): + KinesisSource( + "name", "column", "c_column", region="region", stream_name="stream_name" + ) + + with pytest.warns(DeprecationWarning): + source = RequestSource( + "name", [Field(name="val_to_add", dtype=Int64)], description="description" + ) + assert source.name == "name" + assert source.description == "description" + + with pytest.raises(ValueError): + RequestSource("name") + + with pytest.warns(DeprecationWarning): + source = PushSource( + "name", + BigQuerySource(name="bigquery_source", table="table"), + description="description", + ) + assert source.name == "name" + assert source.description == "description" + assert source.batch_source.name == "bigquery_source" + + with pytest.raises(ValueError): + PushSource("name") + + # No name warning for DataSource + with pytest.warns(UserWarning): + source = KafkaSource( + timestamp_field="column", + bootstrap_servers="bootstrap_servers", + message_format=ProtoFormat("class_path"), + topic="topic", + ) + + +def test_proto_conversion(): + bigquery_source = BigQuerySource( + name="test_source", + table="test_table", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", + field_mapping={"foo": "bar"}, + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + ) + + file_source = FileSource( + name="test_source", + path="test_path", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", + field_mapping={"foo": "bar"}, + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + ) + + redshift_source = RedshiftSource( + name="test_source", + database="test_database", + schema="test_schema", + table="test_table", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", + field_mapping={"foo": "bar"}, + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + ) + + snowflake_source = SnowflakeSource( + name="test_source", + database="test_database", + warehouse="test_warehouse", + schema="test_schema", + table="test_table", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", + field_mapping={"foo": "bar"}, + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + ) + + kafka_source = KafkaSource( + name="test_source", + bootstrap_servers="test_servers", + message_format=ProtoFormat("class_path"), + topic="test_topic", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", + field_mapping={"foo": "bar"}, + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + batch_source=file_source, + ) + + kinesis_source = KinesisSource( + name="test_source", + region="test_region", + record_format=ProtoFormat("class_path"), + stream_name="test_stream", + timestamp_field="event_timestamp", + created_timestamp_column="created_timestamp", + field_mapping={"foo": "bar"}, + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + batch_source=file_source, + ) + + push_source = PushSource( + name="test_source", + batch_source=file_source, + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + ) + + request_source = RequestSource( + name="test_source", + schema=[Field(name="test1", dtype=Float32), Field(name="test1", dtype=Int64)], + description="test description", + tags={"test": "test"}, + owner="test@gmail.com", + ) + + assert DataSource.from_proto(bigquery_source.to_proto()) == bigquery_source + assert DataSource.from_proto(file_source.to_proto()) == file_source + assert DataSource.from_proto(redshift_source.to_proto()) == redshift_source + assert DataSource.from_proto(snowflake_source.to_proto()) == snowflake_source + assert DataSource.from_proto(kafka_source.to_proto()) == kafka_source + assert DataSource.from_proto(kinesis_source.to_proto()) == kinesis_source + assert DataSource.from_proto(push_source.to_proto()) == push_source + assert DataSource.from_proto(request_source.to_proto()) == request_source diff --git a/sdk/python/tests/unit/test_entity.py b/sdk/python/tests/unit/test_entity.py index fee8bd9f009..254a975f678 100644 --- a/sdk/python/tests/unit/test_entity.py +++ b/sdk/python/tests/unit/test_entity.py @@ -63,3 +63,22 @@ def test_multiple_args(): def test_name_keyword(recwarn): Entity(name="my-entity", value_type=ValueType.STRING) assert len(recwarn) == 0 + + +def test_hash(): + entity1 = Entity(name="my-entity", value_type=ValueType.STRING) + entity2 = Entity(name="my-entity", value_type=ValueType.STRING) + entity3 = Entity(name="my-entity", value_type=ValueType.FLOAT) + entity4 = Entity(name="my-entity", value_type=ValueType.FLOAT, description="test") + + s1 = {entity1, entity2} + assert len(s1) == 1 + + s2 = {entity1, entity3} + assert len(s2) == 2 + + s3 = {entity3, entity4} + assert len(s3) == 2 + + s4 = {entity1, entity2, entity3, entity4} + assert len(s4) == 3 diff --git a/sdk/python/tests/unit/test_feature_service.py b/sdk/python/tests/unit/test_feature_service.py index 522ac49de13..fc4fd70bcbf 100644 --- a/sdk/python/tests/unit/test_feature_service.py +++ b/sdk/python/tests/unit/test_feature_service.py @@ -1,4 +1,10 @@ -from feast import FeatureService +import pytest + +from feast.feature_service import FeatureService +from feast.feature_view import FeatureView +from feast.field import Field +from feast.infra.offline_stores.file_source import FileSource +from feast.types import Float32 def test_feature_service_with_description(): @@ -12,3 +18,88 @@ def test_feature_service_without_description(): feature_service = FeatureService(name="my-feature-service", features=[]) # assert feature_service.to_proto().spec.description == "" + + +def test_hash(): + file_source = FileSource(name="my-file-source", path="test.parquet") + feature_view = FeatureView( + name="my-feature-view", + entities=[], + schema=[ + Field(name="feature1", dtype=Float32), + Field(name="feature2", dtype=Float32), + ], + source=file_source, + ) + feature_service_1 = FeatureService( + name="my-feature-service", features=[feature_view[["feature1", "feature2"]]] + ) + feature_service_2 = FeatureService( + name="my-feature-service", features=[feature_view[["feature1", "feature2"]]] + ) + feature_service_3 = FeatureService( + name="my-feature-service", features=[feature_view[["feature1"]]] + ) + feature_service_4 = FeatureService( + name="my-feature-service", + features=[feature_view[["feature1"]]], + description="test", + ) + + s1 = {feature_service_1, feature_service_2} + assert len(s1) == 1 + + s2 = {feature_service_1, feature_service_3} + assert len(s2) == 2 + + s3 = {feature_service_3, feature_service_4} + assert len(s3) == 2 + + s4 = {feature_service_1, feature_service_2, feature_service_3, feature_service_4} + assert len(s4) == 3 + + +def test_feature_view_kw_args_warning(): + with pytest.warns(DeprecationWarning): + service = FeatureService("name", [], tags={"tag_1": "tag"}, description="desc") + assert service.name == "name" + assert service.tags == {"tag_1": "tag"} + assert service.description == "desc" + + # More positional args than name and features + with pytest.raises(ValueError): + service = FeatureService("name", [], {"tag_1": "tag"}, "desc") + + # No name defined. + with pytest.raises(ValueError): + service = FeatureService(features=[], tags={"tag_1": "tag"}, description="desc") + + +def no_warnings(func): + def wrapper_no_warnings(*args, **kwargs): + with pytest.warns(None) as warnings: + func(*args, **kwargs) + + if len(warnings) > 0: + raise AssertionError( + "Warnings were raised: " + ", ".join([str(w) for w in warnings]) + ) + + return wrapper_no_warnings + + +@no_warnings +def test_feature_view_kw_args_normal(): + file_source = FileSource(name="my-file-source", path="test.parquet") + feature_view = FeatureView( + name="my-feature-view", + entities=[], + schema=[ + Field(name="feature1", dtype=Float32), + Field(name="feature2", dtype=Float32), + ], + source=file_source, + ) + _ = FeatureService( + name="my-feature-service", features=[feature_view[["feature1", "feature2"]]] + ) diff --git a/sdk/python/tests/unit/test_feature_view.py b/sdk/python/tests/unit/test_feature_view.py new file mode 100644 index 00000000000..80a583806e7 --- /dev/null +++ b/sdk/python/tests/unit/test_feature_view.py @@ -0,0 +1,64 @@ +# Copyright 2022 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from feast.feature_view import FeatureView +from feast.field import Field +from feast.infra.offline_stores.file_source import FileSource +from feast.types import Float32 + + +def test_hash(): + file_source = FileSource(name="my-file-source", path="test.parquet") + feature_view_1 = FeatureView( + name="my-feature-view", + entities=[], + schema=[ + Field(name="feature1", dtype=Float32), + Field(name="feature2", dtype=Float32), + ], + source=file_source, + ) + feature_view_2 = FeatureView( + name="my-feature-view", + entities=[], + schema=[ + Field(name="feature1", dtype=Float32), + Field(name="feature2", dtype=Float32), + ], + source=file_source, + ) + feature_view_3 = FeatureView( + name="my-feature-view", + entities=[], + schema=[Field(name="feature1", dtype=Float32)], + source=file_source, + ) + feature_view_4 = FeatureView( + name="my-feature-view", + entities=[], + schema=[Field(name="feature1", dtype=Float32)], + source=file_source, + description="test", + ) + + s1 = {feature_view_1, feature_view_2} + assert len(s1) == 1 + + s2 = {feature_view_1, feature_view_3} + assert len(s2) == 2 + + s3 = {feature_view_3, feature_view_4} + assert len(s3) == 2 + + s4 = {feature_view_1, feature_view_2, feature_view_3, feature_view_4} + assert len(s4) == 3 diff --git a/sdk/python/tests/unit/test_feature_views.py b/sdk/python/tests/unit/test_feature_views.py new file mode 100644 index 00000000000..8708a983c41 --- /dev/null +++ b/sdk/python/tests/unit/test_feature_views.py @@ -0,0 +1,70 @@ +from datetime import timedelta + +import pytest + +from feast.batch_feature_view import BatchFeatureView +from feast.data_format import AvroFormat +from feast.data_source import KafkaSource +from feast.infra.offline_stores.file_source import FileSource +from feast.stream_feature_view import StreamFeatureView + + +def test_create_batch_feature_view(): + batch_source = FileSource(path="some path") + BatchFeatureView( + name="test batch feature view", + entities=[], + ttl=timedelta(days=30), + source=batch_source, + ) + + with pytest.raises(ValueError): + BatchFeatureView( + name="test batch feature view", entities=[], ttl=timedelta(days=30) + ) + + stream_source = KafkaSource( + name="kafka", + timestamp_field="", + bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=FileSource(path="some path"), + ) + with pytest.raises(ValueError): + BatchFeatureView( + name="test batch feature view", + entities=[], + ttl=timedelta(days=30), + source=stream_source, + ) + + +def test_create_stream_feature_view(): + stream_source = KafkaSource( + name="kafka", + timestamp_field="", + bootstrap_servers="", + message_format=AvroFormat(""), + topic="topic", + batch_source=FileSource(path="some path"), + ) + StreamFeatureView( + name="test batch feature view", + entities=[], + ttl=timedelta(days=30), + source=stream_source, + ) + + with pytest.raises(ValueError): + StreamFeatureView( + name="test batch feature view", entities=[], ttl=timedelta(days=30) + ) + + with pytest.raises(ValueError): + StreamFeatureView( + name="test batch feature view", + entities=[], + ttl=timedelta(days=30), + source=FileSource(path="some path"), + ) diff --git a/sdk/python/tests/unit/test_on_demand_feature_view.py b/sdk/python/tests/unit/test_on_demand_feature_view.py new file mode 100644 index 00000000000..33435b8557e --- /dev/null +++ b/sdk/python/tests/unit/test_on_demand_feature_view.py @@ -0,0 +1,164 @@ +# Copyright 2022 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pandas as pd +import pytest + +from feast import RequestSource +from feast.feature_view import FeatureView +from feast.field import Field +from feast.infra.offline_stores.file_source import FileSource +from feast.on_demand_feature_view import OnDemandFeatureView, on_demand_feature_view +from feast.types import Float32, String, UnixTimestamp + + +def udf1(features_df: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["output1"] = features_df["feature1"] + df["output2"] = features_df["feature2"] + return df + + +def udf2(features_df: pd.DataFrame) -> pd.DataFrame: + df = pd.DataFrame() + df["output1"] = features_df["feature1"] + 100 + df["output2"] = features_df["feature2"] + 100 + return df + + +def test_hash(): + file_source = FileSource(name="my-file-source", path="test.parquet") + feature_view = FeatureView( + name="my-feature-view", + entities=[], + schema=[ + Field(name="feature1", dtype=Float32), + Field(name="feature2", dtype=Float32), + ], + source=file_source, + ) + sources = [feature_view] + on_demand_feature_view_1 = OnDemandFeatureView( + name="my-on-demand-feature-view", + sources=sources, + schema=[ + Field(name="output1", dtype=Float32), + Field(name="output2", dtype=Float32), + ], + udf=udf1, + ) + on_demand_feature_view_2 = OnDemandFeatureView( + name="my-on-demand-feature-view", + sources=sources, + schema=[ + Field(name="output1", dtype=Float32), + Field(name="output2", dtype=Float32), + ], + udf=udf1, + ) + on_demand_feature_view_3 = OnDemandFeatureView( + name="my-on-demand-feature-view", + sources=sources, + schema=[ + Field(name="output1", dtype=Float32), + Field(name="output2", dtype=Float32), + ], + udf=udf2, + ) + on_demand_feature_view_4 = OnDemandFeatureView( + name="my-on-demand-feature-view", + sources=sources, + schema=[ + Field(name="output1", dtype=Float32), + Field(name="output2", dtype=Float32), + ], + udf=udf2, + description="test", + ) + + s1 = {on_demand_feature_view_1, on_demand_feature_view_2} + assert len(s1) == 1 + + s2 = {on_demand_feature_view_1, on_demand_feature_view_3} + assert len(s2) == 2 + + s3 = {on_demand_feature_view_3, on_demand_feature_view_4} + assert len(s3) == 2 + + s4 = { + on_demand_feature_view_1, + on_demand_feature_view_2, + on_demand_feature_view_3, + on_demand_feature_view_4, + } + assert len(s4) == 3 + + +def test_inputs_parameter_deprecation_in_odfv(): + date_request = RequestSource( + name="date_request", schema=[Field(name="some_date", dtype=UnixTimestamp)], + ) + with pytest.warns(DeprecationWarning): + + @on_demand_feature_view( + inputs={"date_request": date_request}, + schema=[ + Field(name="output", dtype=UnixTimestamp), + Field(name="string_output", dtype=String), + ], + ) + def test_view(features_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + data["string_output"] = features_df["some_date"].astype(pd.StringDtype()) + return data + + odfv = test_view + assert odfv.name == "test_view" + assert len(odfv.source_request_sources) == 1 + assert odfv.source_request_sources["date_request"].name == "date_request" + assert odfv.source_request_sources["date_request"].schema == date_request.schema + + with pytest.raises(ValueError): + + @on_demand_feature_view( + inputs={"date_request": date_request}, + sources=[date_request], + schema=[ + Field(name="output", dtype=UnixTimestamp), + Field(name="string_output", dtype=String), + ], + ) + def incorrect_testview(features_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + data["string_output"] = features_df["some_date"].astype(pd.StringDtype()) + return data + + @on_demand_feature_view( + inputs={"odfv": date_request}, + schema=[ + Field(name="output", dtype=UnixTimestamp), + Field(name="string_output", dtype=String), + ], + ) + def test_correct_view(features_df: pd.DataFrame) -> pd.DataFrame: + data = pd.DataFrame() + data["output"] = features_df["some_date"] + data["string_output"] = features_df["some_date"].astype(pd.StringDtype()) + return data + + odfv = test_correct_view + assert odfv.name == "test_correct_view" + assert odfv.source_request_sources["date_request"].schema == date_request.schema diff --git a/sdk/python/tests/utils/data_source_utils.py b/sdk/python/tests/utils/data_source_utils.py index 5bb5a622d61..d5f45964ca7 100644 --- a/sdk/python/tests/utils/data_source_utils.py +++ b/sdk/python/tests/utils/data_source_utils.py @@ -11,19 +11,17 @@ @contextlib.contextmanager -def prep_file_source(df, event_timestamp_column=None) -> Iterator[FileSource]: +def prep_file_source(df, timestamp_field=None) -> Iterator[FileSource]: with tempfile.NamedTemporaryFile(suffix=".parquet") as f: f.close() df.to_parquet(f.name) file_source = FileSource( - file_format=ParquetFormat(), - path=f.name, - timestamp_field=event_timestamp_column, + file_format=ParquetFormat(), path=f.name, timestamp_field=timestamp_field, ) yield file_source -def simple_bq_source_using_table_arg(df, event_timestamp_column=None) -> BigQuerySource: +def simple_bq_source_using_table_arg(df, timestamp_field=None) -> BigQuerySource: client = bigquery.Client() gcp_project = client.project bigquery_dataset = f"ds_{time.time_ns()}" @@ -40,13 +38,13 @@ def simple_bq_source_using_table_arg(df, event_timestamp_column=None) -> BigQuer job = client.load_table_from_dataframe(df, table) job.result() - return BigQuerySource(table=table, timestamp_field=event_timestamp_column,) + return BigQuerySource(table=table, timestamp_field=timestamp_field,) -def simple_bq_source_using_query_arg(df, event_timestamp_column=None) -> BigQuerySource: - bq_source_using_table = simple_bq_source_using_table_arg(df, event_timestamp_column) +def simple_bq_source_using_query_arg(df, timestamp_field=None) -> BigQuerySource: + bq_source_using_table = simple_bq_source_using_table_arg(df, timestamp_field) return BigQuerySource( name=bq_source_using_table.table, query=f"SELECT * FROM {bq_source_using_table.table}", - timestamp_field=event_timestamp_column, + timestamp_field=timestamp_field, ) diff --git a/sdk/python/tests/utils/online_write_benchmark.py b/sdk/python/tests/utils/online_write_benchmark.py index 6d6b73d5da6..82ffc8e98bf 100644 --- a/sdk/python/tests/utils/online_write_benchmark.py +++ b/sdk/python/tests/utils/online_write_benchmark.py @@ -29,7 +29,7 @@ def create_driver_hourly_stats_feature_view(source): Field(name="acc_rate", dtype=Float32), Field(name="avg_daily_trips", dtype=Int32), ], - batch_source=source, + source=source, ttl=timedelta(hours=2), ) return driver_stats_feature_view diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000000..e2d707e2720 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,22 @@ +[isort] +src_paths = feast,tests +multi_line_output=3 +include_trailing_comma=True +force_grid_wrap=0 +use_parentheses=True +line_length=88 +skip=feast/protos,feast/embedded_go/lib +known_first_party=feast,feast_serving_server,feast_core_server +default_section=THIRDPARTY + +[flake8] +ignore = E203, E266, E501, W503 +max-line-length = 88 +max-complexity = 20 +select = B,C,E,F,W,T4 +exclude = .git,__pycache__,docs/conf.py,dist,feast/protos,feast/embedded_go/lib + +[mypy] +files=feast,tests +ignore_missing_imports=true +exclude=feast/embedded_go/lib diff --git a/sdk/python/setup.py b/setup.py similarity index 61% rename from sdk/python/setup.py rename to setup.py index cc883da95e3..2044de008fc 100644 --- a/sdk/python/setup.py +++ b/setup.py @@ -11,28 +11,33 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import copy import glob +import json import os import pathlib import re import shutil import subprocess +import sys from distutils.cmd import Command +from distutils.dir_util import copy_tree from pathlib import Path from subprocess import CalledProcessError -from setuptools import find_packages +from setuptools import find_packages, Extension try: from setuptools import setup from setuptools.command.build_py import build_py + from setuptools.command.build_ext import build_ext as _build_ext from setuptools.command.develop import develop from setuptools.command.install import install - from setuptools.dist import Distribution + except ImportError: from distutils.command.build_py import build_py + from distutils.command.build_ext import build_ext as _build_ext from distutils.core import setup - from distutils.dist import Distribution NAME = "feast" DESCRIPTION = "Python SDK for Feast" @@ -41,47 +46,47 @@ REQUIRES_PYTHON = ">=3.7.0" REQUIRED = [ - "click>=7.0.0", - "colorama>=0.3.9", + "click>=7.0.0,<8.0.2", + "colorama>=0.3.9,<1", "dill==0.3.*", - "fastavro>=1.1.0", - "google-api-core>=1.23.0", - "googleapis-common-protos==1.52.*", - "grpcio>=1.34.0", - "grpcio-reflection>=1.34.0", - "Jinja2>=2.0.0", + "fastavro>=1.1.0,<2", + "google-api-core>=1.23.0,<3", + "googleapis-common-protos==1.52.*,<2", + "grpcio>=1.34.0,<2", + "grpcio-reflection>=1.34.0,<2", + "Jinja2>=2,<4", "jsonschema", "mmh3", - "pandas>=1.0.0", + "numpy<1.22,<2", + "pandas>=1,<2", "pandavro==1.5.*", "protobuf>=3.10,<3.20", "proto-plus<1.19.7", - "pyarrow>=4.0.0", - "pydantic>=1.0.0", - "PyYAML>=5.4.*", + "pyarrow>=4,<7", + "pydantic>=1,<2", + "PyYAML>=5.4.*,<7", "tabulate==0.8.*", - "tenacity>=7.*", + "tenacity>=7,<9", "toml==0.10.*", "tqdm==4.*", - "fastapi>=0.68.0", - "uvicorn[standard]>=0.14.0", + "fastapi>=0.68.0,<1", + "uvicorn[standard]>=0.14.0,<1", "proto-plus<1.19.7", "tensorflow-metadata>=1.0.0,<2.0.0", "dask>=2021.*,<2022.02.0", ] GCP_REQUIRED = [ - "google-cloud-bigquery>=2.28.1", - "google-cloud-bigquery-storage >= 2.0.0", - "google-cloud-datastore>=2.1.*", + "google-cloud-bigquery>=2,<3", + "google-cloud-bigquery-storage >= 2.0.0,<3", + "google-cloud-datastore>=2.1.*,<3", "google-cloud-storage>=1.34.*,<1.41", "google-cloud-core>=1.4.0,<2.0.0", ] REDIS_REQUIRED = [ - "redis==3.5.3", - "redis-py-cluster>=2.1.3", - "hiredis>=2.0.0", + "redis==4.2.2", + "hiredis>=2.0.0,<3", ] AWS_REQUIRED = [ @@ -90,27 +95,38 @@ ] SNOWFLAKE_REQUIRED = [ - "snowflake-connector-python[pandas]>=2.7.3", + "snowflake-connector-python[pandas]>=2.7.3,<3", ] SPARK_REQUIRED = [ - "pyspark>=3.0.0", + "pyspark>=3.0.0,<4", ] TRINO_REQUIRED = [ "trino>=0.305.0,<0.400.0", ] -GE_REQUIRED = [ - "great_expectations>=0.14.0,<0.15.0" +POSTGRES_REQUIRED = [ + "psycopg2-binary>=2.8.3,<3", +] + +HBASE_REQUIRED = [ + "happybase>=1.2.0,<3", +] + +GE_REQUIRED = ["great_expectations>=0.14.0,<0.15.0"] + +GO_REQUIRED = [ + "cffi==1.15.*,<2", ] CI_REQUIRED = ( [ - "cryptography==3.3.2", + "build", + "cryptography==3.4.8", "flake8", "black==19.10b0", - "isort>=5", + "isort>=5,<6", "grpcio-tools==1.44.0", "grpcio-testing==1.44.0", "minio==7.1.0", @@ -120,19 +136,19 @@ "mypy-protobuf==3.1", "avro==1.10.0", "gcsfs", - "urllib3>=1.25.4", + "urllib3>=1.25.4,<2", "psutil==5.9.0", - "pytest>=6.0.0", + "pytest>=6.0.0,<8", "pytest-cov", "pytest-xdist", - "pytest-benchmark>=3.4.1", + "pytest-benchmark>=3.4.1,<4", "pytest-lazy-fixture==0.6.3", "pytest-timeout==1.4.2", "pytest-ordering==0.6.*", "pytest-mock==1.10.4", "Sphinx!=4.0.0,<4.4.0", "sphinx-rtd-theme", - "testcontainers>=3.5", + "testcontainers>=3.5,<4", "adlfs==0.5.9", "firebase-admin==4.5.2", "pre-commit", @@ -148,19 +164,21 @@ "types-setuptools", "types-tabulate", ] - + GCP_REQUIRED - + REDIS_REQUIRED - + AWS_REQUIRED - + SNOWFLAKE_REQUIRED - + SPARK_REQUIRED - + TRINO_REQUIRED - + GE_REQUIRED + + GCP_REQUIRED + + REDIS_REQUIRED + + AWS_REQUIRED + + SNOWFLAKE_REQUIRED + + SPARK_REQUIRED + + POSTGRES_REQUIRED + + TRINO_REQUIRED + + GE_REQUIRED + + HBASE_REQUIRED ) DEV_REQUIRED = ["mypy-protobuf==3.1", "grpcio-testing==1.*"] + CI_REQUIRED # Get git repo root directory -repo_root = str(pathlib.Path(__file__).resolve().parent.parent.parent) +repo_root = str(pathlib.Path(__file__).resolve().parent) # README file from Feast repo root directory README_FILE = os.path.join(repo_root, "README.md") @@ -176,35 +194,46 @@ # Only set use_scm_version if git executable exists (setting this variable causes pip to use git under the hood) if shutil.which("git"): - use_scm_version = {"root": "../..", "relative_to": __file__, "tag_regex": TAG_REGEX} + use_scm_version = {"root": ".", "relative_to": __file__, "tag_regex": TAG_REGEX} else: use_scm_version = None PROTO_SUBDIRS = ["core", "serving", "types", "storage"] +PYTHON_CODE_PREFIX = "sdk/python" class BuildPythonProtosCommand(Command): description = "Builds the proto files into Python files." - user_options = [] + user_options = [ + ("inplace", "i", "Write generated proto files to source directory."), + ] def initialize_options(self): self.python_protoc = [ - "python", + sys.executable, "-m", "grpc_tools.protoc", ] # find_executable("protoc") self.proto_folder = os.path.join(repo_root, "protos") - self.python_folder = os.path.join( - os.path.dirname(__file__) or os.getcwd(), "feast/protos" - ) self.sub_folders = PROTO_SUBDIRS + self.build_lib = None + self.inplace = 0 def finalize_options(self): - pass + self.set_undefined_options("build", ("build_lib", "build_lib")) + + @property + def python_folder(self): + if self.inplace: + return os.path.join( + os.path.dirname(__file__) or os.getcwd(), "sdk/python/feast/protos" + ) + + return os.path.join(self.build_lib, "feast/protos") def _generate_python_protos(self, path: str): proto_files = glob.glob(os.path.join(self.proto_folder, path)) - Path(self.python_folder).mkdir(exist_ok=True) + Path(self.python_folder).mkdir(parents=True, exist_ok=True) subprocess.check_call( self.python_protoc + [ @@ -225,19 +254,16 @@ def run(self): self._generate_python_protos(f"feast/{sub_folder}/*.proto") # We need the __init__ files for each of the generated subdirs # so that they are regular packages, and don't need the `--namespace-packages` flags - # when being typechecked using mypy. BUT, we need to exclude `types` because that clashes - # with an existing module in the python standard library. - if sub_folder == "types": - continue - with open(f"{self.python_folder}/feast/{sub_folder}/__init__.py", 'w'): + # when being typechecked using mypy. + with open(f"{self.python_folder}/feast/{sub_folder}/__init__.py", "w"): pass - with open(f"{self.python_folder}/__init__.py", 'w'): + with open(f"{self.python_folder}/__init__.py", "w"): pass - with open(f"{self.python_folder}/feast/__init__.py", 'w'): + with open(f"{self.python_folder}/feast/__init__.py", "w"): pass - for path in Path("feast/protos").rglob("*.py"): + for path in Path(self.python_folder).rglob("*.py"): for folder in self.sub_folders: # Read in the file with open(path, "r") as file: @@ -276,12 +302,10 @@ def _ensure_go_and_proto_toolchain(): path_val = _generate_path_with_gopath() try: - subprocess.check_call(["protoc-gen-go", "--version"], env={ - "PATH": path_val - }) - subprocess.check_call(["protoc-gen-go-grpc", "--version"], env={ - "PATH": path_val - }) + subprocess.check_call(["protoc-gen-go", "--version"], env={"PATH": path_val}) + subprocess.check_call( + ["protoc-gen-go-grpc", "--version"], env={"PATH": path_val} + ) except Exception as e: raise RuntimeError("Unable to find go/grpc extensions for protoc") from e @@ -292,7 +316,7 @@ class BuildGoProtosCommand(Command): def initialize_options(self): self.go_protoc = [ - "python", + sys.executable, "-m", "grpc_tools.protoc", ] # find_executable("protoc") @@ -310,15 +334,18 @@ def _generate_go_protos(self, path: str): try: subprocess.check_call( self.go_protoc - + ["-I", self.proto_folder, - "--go_out", self.go_folder, - "--go_opt=module=github.com/feast-dev/feast/go/protos", - "--go-grpc_out", self.go_folder, - "--go-grpc_opt=module=github.com/feast-dev/feast/go/protos"] + + [ + "-I", + self.proto_folder, + "--go_out", + self.go_folder, + "--go_opt=module=github.com/feast-dev/feast/go/protos", + "--go-grpc_out", + self.go_folder, + "--go-grpc_opt=module=github.com/feast-dev/feast/go/protos", + ] + proto_files, - env={ - "PATH": self.path_val - } + env={"PATH": self.path_val}, ) except CalledProcessError as e: print(f"Stderr: {e.stderr}") @@ -331,44 +358,6 @@ def run(self): self._generate_go_protos(f"feast/{sub_folder}/*.proto") -class BuildGoEmbeddedCommand(Command): - description = "Builds Go embedded library" - user_options = [] - - def initialize_options(self) -> None: - self.path_val = _generate_path_with_gopath() - - self.go_env = {} - for var in ("GOCACHE", "GOPATH"): - self.go_env[var] = subprocess \ - .check_output(["go", "env", var]) \ - .decode("utf-8") \ - .strip() - - def finalize_options(self) -> None: - pass - - def _compile_embedded_lib(self): - print("Compile embedded go") - subprocess.check_call([ - "gopy", - "build", - "-output", - "feast/embedded_go/lib", - "-vm", - "python3", - "-no-make", - "github.com/feast-dev/feast/go/embedded" - ], env={ - "PATH": self.path_val, - "CGO_LDFLAGS_ALLOW": ".*", - **self.go_env, - }) - - def run(self): - self._compile_embedded_lib() - - class BuildCommand(build_py): """Custom build command.""" @@ -377,7 +366,8 @@ def run(self): if os.getenv("COMPILE_GO", "false").lower() == "true": _ensure_go_and_proto_toolchain() self.run_command("build_go_protos") - self.run_command("build_go_lib") + + self.run_command("build_ext") build_py.run(self) @@ -385,19 +375,77 @@ class DevelopCommand(develop): """Custom develop command.""" def run(self): + self.reinitialize_command("build_python_protos", inplace=1) self.run_command("build_python_protos") if os.getenv("COMPILE_GO", "false").lower() == "true": _ensure_go_and_proto_toolchain() self.run_command("build_go_protos") - self.run_command("build_go_lib") + develop.run(self) -class BinaryDistribution(Distribution): - """Distribution which forces a binary package with platform name - when go compilation is enabled""" - def has_ext_modules(self): - return os.getenv("COMPILE_GO", "false").lower() == "true" +class build_ext(_build_ext): + def finalize_options(self) -> None: + super().finalize_options() + if os.getenv("COMPILE_GO", "false").lower() == "false": + self.extensions = [e for e in self.extensions if not self._is_go_ext(e)] + + def _is_go_ext(self, ext: Extension): + return any( + source.endswith(".go") or source.startswith("github") + for source in ext.sources + ) + + def build_extension(self, ext: Extension): + if not self._is_go_ext(ext): + # the base class may mutate `self.compiler` + compiler = copy.deepcopy(self.compiler) + self.compiler, compiler = compiler, self.compiler + try: + return _build_ext.build_extension(self, ext) + finally: + self.compiler, compiler = compiler, self.compiler + + bin_path = _generate_path_with_gopath() + go_env = json.loads( + subprocess.check_output(["go", "env", "-json"]).decode("utf-8").strip() + ) + + destination = os.path.dirname(os.path.abspath(self.get_ext_fullpath(ext.name))) + subprocess.check_call(["go", "install", "golang.org/x/tools/cmd/goimports"]) + subprocess.check_call(["go", "install", "github.com/go-python/gopy"]) + subprocess.check_call( + [ + "gopy", + "build", + "-output", + destination, + "-vm", + sys.executable, + "-no-make", + *ext.sources, + ], + env={"PATH": bin_path, "CGO_LDFLAGS_ALLOW": ".*", **go_env,}, + ) + + def copy_extensions_to_source(self): + build_py = self.get_finalized_command("build_py") + for ext in self.extensions: + fullname = self.get_ext_fullname(ext.name) + modpath = fullname.split(".") + package = ".".join(modpath[:-1]) + package_dir = build_py.get_package_dir(package) + + src_dir = dest_dir = package_dir + + if src_dir.startswith(PYTHON_CODE_PREFIX): + src_dir = package_dir[len(PYTHON_CODE_PREFIX) :] + src_dir = src_dir.lstrip("/") + + src_dir = os.path.join(self.build_lib, src_dir) + + # copy whole directory + copy_tree(src_dir, dest_dir) setup( @@ -408,7 +456,10 @@ def has_ext_modules(self): long_description_content_type="text/markdown", python_requires=REQUIRES_PYTHON, url=URL, - packages=find_packages(exclude=("tests",)), + packages=find_packages( + where=PYTHON_CODE_PREFIX, exclude=("java", "infra", "sdk/python/tests", "ui") + ), + package_dir={"": PYTHON_CODE_PREFIX}, install_requires=REQUIRED, # https://stackoverflow.com/questions/28509965/setuptools-development-requirements # Install dev requirements with: pip install -e .[dev] @@ -421,7 +472,10 @@ def has_ext_modules(self): "snowflake": SNOWFLAKE_REQUIRED, "spark": SPARK_REQUIRED, "trino": TRINO_REQUIRED, + "postgres": POSTGRES_REQUIRED, "ge": GE_REQUIRED, + "hbase": HBASE_REQUIRED, + "go": GO_REQUIRED, }, include_package_data=True, license="Apache", @@ -442,19 +496,17 @@ def has_ext_modules(self): "mypy-protobuf==3.1", "sphinx!=4.0.0", ], - package_data={ - "": [ - "protos/feast/**/*.proto", - "protos/feast/third_party/grpc/health/v1/*.proto", - "feast/protos/feast/**/*.py", - ], - }, cmdclass={ "build_python_protos": BuildPythonProtosCommand, "build_go_protos": BuildGoProtosCommand, - "build_go_lib": BuildGoEmbeddedCommand, "build_py": BuildCommand, "develop": DevelopCommand, + "build_ext": build_ext, }, - distclass=BinaryDistribution, # generate wheel with platform-specific name + ext_modules=[ + Extension( + "feast.embedded_go.lib._embedded", + ["github.com/feast-dev/feast/go/embedded"], + ) + ], ) diff --git a/ui/PUBLISHING_TO_NPM.md b/ui/PUBLISHING_TO_NPM.md index 2ad42fd227b..0ab1af39236 100644 --- a/ui/PUBLISHING_TO_NPM.md +++ b/ui/PUBLISHING_TO_NPM.md @@ -6,7 +6,7 @@ To publish a new version of the module, you will need to be part of the @feast-d ## Steps for Publishing -1. Make sure tests are passing. Run tests with `yarn tests` in the ui directory. +1. Make sure tests are passing. Run tests with `yarn test` in the ui directory. 2. Bump the version number in `package.json` as appropriate. 3. Package the modules for distributions. Run the library build script with `yarn build:lib`. We use [Rollup](https://rollupjs.org/) for building the module, and the configs are in the `rollup.config.js` file. 4. Publish the package to NPM. Run `npm publish` diff --git a/ui/README.md b/ui/README.md index 80e20b31643..713d1c17c5c 100644 --- a/ui/README.md +++ b/ui/README.md @@ -1,6 +1,6 @@ -# [WIP] Feast Web UI +# [Experimental] Feast Web UI -![Sample UI](sample.png) +![Sample UI](https://github.com/feast-dev/feast/blob/master/ui/sample.png) This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app). @@ -18,7 +18,10 @@ data to the UI. ## Usage -There are two modes of usage: importing the UI as a module, or running the entire build as a React app. +There are three modes of usage: +- via the 'feast ui' CLI to view the current feature repository +- importing the UI as a module +- running the entire build as a React app. ### Importing the UI as a module diff --git a/ui/docker/Dockerfile b/ui/docker/Dockerfile new file mode 100644 index 00000000000..172f833e12f --- /dev/null +++ b/ui/docker/Dockerfile @@ -0,0 +1,22 @@ +FROM node:17.9.0-slim + +WORKDIR /app +# add `/app/node_modules/.bin` to $PATH +ENV PATH /app/node_modules/.bin:$PATH + +# Install UI dependencies +COPY ui/package.json . +RUN npm install + +# Copy over app +COPY ui/tsconfig.json . +COPY ui/public ./public +COPY ui/src ./src + +# Build for production. +RUN npm run build --omit=dev + +# Serve the UI from a port +RUN npm install -g serve +EXPOSE 3000 +CMD ["serve", "-s", "build"] diff --git a/ui/feature_repo/README.md b/ui/feature_repo/README.md index db706e709e4..de4bc1f4dc7 100644 --- a/ui/feature_repo/README.md +++ b/ui/feature_repo/README.md @@ -18,7 +18,7 @@ git clone https://github.com/feast-dev/feast.git Install a dev build of feast ```bash cd feast -pip install -e "sdk/python[dev]" +pip install -e ".[dev]" ``` Then for this demo, you'll actually need to fix a bug by adding this to `type_map.py#L144`: diff --git a/ui/feature_repo/features.py b/ui/feature_repo/features.py index 0f74b424e7b..efdab694e83 100644 --- a/ui/feature_repo/features.py +++ b/ui/feature_repo/features.py @@ -1,27 +1,24 @@ from datetime import timedelta from feast import ( - Bool, Entity, FeatureService, FeatureView, Field, FileSource, - Int64, - String, ValueType, ) from feast.data_source import RequestSource from feast.request_feature_view import RequestFeatureView from feast.on_demand_feature_view import on_demand_feature_view -from feast.field import Field +from feast.types import Bool, Int64, String import pandas as pd zipcode = Entity( name="zipcode", value_type=ValueType.INT64, description="A zipcode", - labels={"owner": "danny@tecton.ai", "team": "hack week",}, + tags={"owner": "danny@tecton.ai", "team": "hack week",}, ) zipcode_source = FileSource( @@ -43,7 +40,7 @@ Field(name="population", dtype=Int64), Field(name="total_wages", dtype=Int64), ], - batch_source=zipcode_source, + source=zipcode_source, tags={ "date_added": "2022-02-7", "experiments": "experiment-A,experiment-B,experiment-C", @@ -64,7 +61,7 @@ Field(name="population", dtype=Int64), Field(name="total_wages", dtype=Int64), ], - batch_source=zipcode_source, + source=zipcode_source, tags={ "date_added": "2022-02-7", "experiments": "experiment-A,experiment-B,experiment-C", @@ -81,7 +78,7 @@ Field(name="tax_returns_filed", dtype=Int64), Field(name="total_wages", dtype=Int64), ], - batch_source=zipcode_source, + source=zipcode_source, tags={ "date_added": "2022-02-7", "experiments": "experiment-A,experiment-B,experiment-C", @@ -94,7 +91,7 @@ name="dob_ssn", value_type=ValueType.STRING, description="Date of birth and last four digits of social security number", - labels={"owner": "tony@tecton.ai", "team": "hack week",}, + tags={"owner": "tony@tecton.ai", "team": "hack week",}, ) credit_history_source = FileSource( @@ -119,7 +116,7 @@ Field(name="missed_payments_6m", dtype=Int64), Field(name="bankruptcies", dtype=Int64), ], - batch_source=credit_history_source, + source=credit_history_source, tags={ "date_added": "2022-02-6", "experiments": "experiment-A", @@ -140,7 +137,7 @@ # Define an on demand feature view which can generate new features based on # existing feature views and RequestSource features @on_demand_feature_view( - sources={"credit_history": credit_history, "transaction": input_request,}, + sources=[credit_history, input_request], schema=[ Field(name="transaction_gt_last_credit_card_due", dtype=Bool), ], @@ -152,16 +149,10 @@ def transaction_gt_last_credit_card_due(inputs: pd.DataFrame) -> pd.DataFrame: ) return df - -# Define request feature view -transaction_request_fv = RequestFeatureView( - name="transaction_request_fv", request_data_source=input_request, -) - model_v1 = FeatureService( name="credit_score_v1", features=[ - credit_history[["mortgage_due", "credit_card_due", "missed_payments_1y"]], + credit_history[["credit_card_due", "missed_payments_1y"]], zipcode_features, ], tags={"owner": "tony@tecton.ai", "stage": "staging"}, @@ -173,7 +164,6 @@ def transaction_gt_last_credit_card_due(inputs: pd.DataFrame) -> pd.DataFrame: features=[ credit_history[["mortgage_due", "credit_card_due", "missed_payments_1y"]], zipcode_features, - transaction_request_fv, ], tags={"owner": "tony@tecton.ai", "stage": "prod"}, description="Credit scoring model", diff --git a/ui/package.json b/ui/package.json index 37a5ddf4318..252faf8613d 100644 --- a/ui/package.json +++ b/ui/package.json @@ -1,6 +1,6 @@ { "name": "@feast-dev/feast-ui", - "version": "0.19.0", + "version": "0.20.4", "private": false, "files": [ "dist" @@ -9,7 +9,7 @@ "module": "./dist/feast-ui.module.js", "peerDependencies": { "@elastic/datemath": "^5.0.3", - "@elastic/eui": "^46.1.0", + "@elastic/eui": "^55.0.1", "@emotion/react": "^11.7.1", "@types/d3": "^7.1.0", "@types/react": "^17.0.20", @@ -30,7 +30,7 @@ }, "dependencies": { "@elastic/datemath": "^5.0.3", - "@elastic/eui": "^46.1.0", + "@elastic/eui": "^55.0.1", "@emotion/react": "^11.7.1", "@types/d3": "^7.1.0", "@types/jest": "^27.0.1", diff --git a/ui/public/registry.json b/ui/public/registry.json index af328979e98..2d5c93c9620 100644 --- a/ui/public/registry.json +++ b/ui/public/registry.json @@ -1,313 +1,104 @@ { - "project": "credit_scoring_aws", "dataSources": [ { - "type": "BATCH_FILE", - "eventTimestampColumn": "event_timestamp", "createdTimestampColumn": "created_timestamp", "fileOptions": { - "fileUrl": "data/credit_history.parquet" + "uri": "data/credit_history.parquet" }, "name": "credit_history", - "meta": { - "latestEventTimestamp": "2021-08-29T22:01:04.746575Z", - "earliestEventTimestamp": "2020-04-26T22:01:04.746575Z" - } + "timestampField": "event_timestamp", + "type": "BATCH_FILE" }, { - "type": "REQUEST_SOURCE", + "name": "transaction", "requestDataOptions": { - "schema": { - "transaction_amt": "INT64" - } + "schema": [ + { + "name": "transaction_amt", + "valueType": "INT64" + } + ] }, - "name": "transaction" + "type": "REQUEST_SOURCE" }, { - "type": "BATCH_FILE", - "eventTimestampColumn": "event_timestamp", "createdTimestampColumn": "created_timestamp", "fileOptions": { - "fileUrl": "data/zipcode_table.parquet" + "uri": "data/zipcode_table.parquet" }, "name": "zipcode", - "meta": { - "latestEventTimestamp": "2017-01-01T12:00:00Z", - "earliestEventTimestamp": "2017-01-01T12:00:00Z" - } + "timestampField": "event_timestamp", + "type": "BATCH_FILE" } ], "entities": [ { + "meta": { + "createdTimestamp": "2022-05-11T19:27:03.171062Z", + "lastUpdatedTimestamp": "2022-05-11T19:27:03.171062Z" + }, "spec": { + "joinKey": "__dummy_id", "name": "__dummy", - "valueType": "STRING", - "joinKey": "__dummy_id" - }, - "meta": { - "createdTimestamp": "2022-02-09T20:40:53.101387Z", - "lastUpdatedTimestamp": "2022-02-09T20:40:53.101387Z" + "valueType": "STRING" } }, { - "spec": { - "name": "dob_ssn", - "valueType": "STRING", - "description": "Date of birth and last four digits of social security number", - "joinKey": "dob_ssn", - "labels": { - "team": "hack week", - "owner": "tony@tecton.ai" - } - }, "meta": { - "createdTimestamp": "2022-02-09T20:40:53.101256Z", - "lastUpdatedTimestamp": "2022-02-09T20:40:53.101256Z" - } - }, - { - "spec": { - "name": "zipcode", - "valueType": "INT64", - "description": "A zipcode", - "joinKey": "zipcode", - "labels": { - "owner": "danny@tecton.ai", - "team": "hack week" - } + "createdTimestamp": "2022-05-11T19:27:03.171112Z", + "lastUpdatedTimestamp": "2022-05-11T19:27:03.171112Z" }, - "meta": { - "createdTimestamp": "2022-02-09T20:40:53.101335Z", - "lastUpdatedTimestamp": "2022-02-09T20:40:53.101335Z" - } - } - ], - "featureViews": [ - { "spec": { - "name": "credit_history", - "entities": ["dob_ssn"], - "features": [ - { - "name": "credit_card_due", - "valueType": "INT64" - }, - { - "name": "mortgage_due", - "valueType": "INT64" - }, - { - "name": "student_loan_due", - "valueType": "INT64" - }, - { - "name": "vehicle_loan_due", - "valueType": "INT64" - }, - { - "name": "hard_pulls", - "valueType": "INT64" - }, - { - "name": "missed_payments_2y", - "valueType": "INT64" - }, - { - "name": "missed_payments_1y", - "valueType": "INT64" - }, - { - "name": "missed_payments_6m", - "valueType": "INT64" - }, - { - "name": "bankruptcies", - "valueType": "INT64" - } - ], + "description": "Date of birth and last four digits of social security number", + "joinKey": "dob_ssn", + "name": "dob_ssn", "tags": { - "access_group": "feast-team@tecton.ai", - "experiments": "experiment-A", - "date_added": "2022-02-6" - }, - "ttl": "777600000s", - "batchSource": { - "type": "BATCH_FILE", - "eventTimestampColumn": "event_timestamp", - "createdTimestampColumn": "created_timestamp", - "fileOptions": { - "fileUrl": "data/credit_history.parquet" - }, - "dataSourceClassType": "feast.infra.offline_stores.file_source.FileSource", - "name": "credit_history", - "meta": { - "latestEventTimestamp": "2021-08-29T22:01:04.746575Z", - "earliestEventTimestamp": "2020-04-26T22:01:04.746575Z" - } + "owner": "tony@tecton.ai", + "team": "hack week" }, - "online": true - }, - "meta": { - "createdTimestamp": "2022-02-09T20:40:53.101460Z", - "lastUpdatedTimestamp": "2022-02-11T20:15:13.735432Z", - "materializationIntervals": [ - { - "startTime": "1997-06-20T20:41:14.456417Z", - "endTime": "2019-02-09T20:41:11Z" - }, - { - "startTime": "2019-02-09T20:41:11Z", - "endTime": "2022-02-09T20:42:03Z" - }, - { - "startTime": "2022-02-09T20:42:03Z", - "endTime": "2022-02-11T00:18:02Z" - } - ] + "valueType": "STRING" } }, { - "spec": { - "name": "zipcode_features", - "entities": ["zipcode"], - "features": [ - { - "name": "city", - "valueType": "STRING" - }, - { - "name": "state", - "valueType": "STRING" - }, - { - "name": "location_type", - "valueType": "STRING" - }, - { - "name": "tax_returns_filed", - "valueType": "INT64" - }, - { - "name": "population", - "valueType": "INT64" - }, - { - "name": "total_wages", - "valueType": "INT64" - } - ], - "tags": { - "experiments": "experiment-A,experiment-B,experiment-C", - "date_added": "2022-02-7", - "access_group": "feast-team@tecton.ai" - }, - "ttl": "315360000s", - "batchSource": { - "type": "BATCH_FILE", - "eventTimestampColumn": "event_timestamp", - "createdTimestampColumn": "created_timestamp", - "fileOptions": { - "fileUrl": "data/zipcode_table.parquet" - }, - "dataSourceClassType": "feast.infra.offline_stores.file_source.FileSource", - "name": "zipcode", - "meta": { - "latestEventTimestamp": "2017-01-01T12:00:00Z", - "earliestEventTimestamp": "2017-01-01T12:00:00Z" - } - }, - "online": true - }, "meta": { - "createdTimestamp": "2022-02-11T20:12:50.182923Z", - "lastUpdatedTimestamp": "2022-02-11T20:15:21.790447Z" - } - }, - { + "createdTimestamp": "2022-05-11T19:27:03.171153Z", + "lastUpdatedTimestamp": "2022-05-11T19:27:03.171153Z" + }, "spec": { - "name": "zipcode_money_features", - "entities": ["zipcode"], - "features": [ - { - "name": "tax_returns_filed", - "valueType": "INT64" - }, - { - "name": "total_wages", - "valueType": "INT64" - } - ], + "description": "A zipcode", + "joinKey": "zipcode", + "name": "zipcode", "tags": { - "experiments": "experiment-A,experiment-B,experiment-C", - "access_group": "feast-team@tecton.ai", - "date_added": "2022-02-7", - "test_apple": "2022-02-7", - "test_banana": "2022-02-7", - "test_cherry": "2022-02-7", - "test_danish": "2022-02-7", - "test_eggplant": "2022-02-7", - "test_figs": "2022-02-7", - "test_grape": "2022-02-7", - "test_honey": "2022-02-7", - "test_ice": "2022-02-7", - "test_jackfruit": "2022-02-7", - "test_kiwi_fruit": "2022-02-7", - "test_lychee": "2022-02-7", - "test_mango": "2022-02-7", - "test_orange": "2022-02-7", - "test_peach": "2022-02-7", - "test_question": "2022-02-7", - "test_ruby": "2022-02-7", - "test_starfruit": "2022-02-7", - "test_tamarind": "2022-02-7" - }, - "ttl": "315360000s", - "batchSource": { - "type": "BATCH_FILE", - "eventTimestampColumn": "event_timestamp", - "createdTimestampColumn": "created_timestamp", - "fileOptions": { - "fileUrl": "data/zipcode_table.parquet" - }, - "dataSourceClassType": "feast.infra.offline_stores.file_source.FileSource", - "name": "zipcode", - "meta": { - "latestEventTimestamp": "2017-01-01T12:00:00Z", - "earliestEventTimestamp": "2017-01-01T12:00:00Z" - } + "owner": "danny@tecton.ai", + "team": "hack week" }, - "online": true - }, - "meta": { - "createdTimestamp": "2022-02-11T20:10:53.228047Z", - "lastUpdatedTimestamp": "2022-02-11T20:15:15.949101Z" + "valueType": "INT64" } } ], "featureServices": [ { + "meta": { + "createdTimestamp": "2022-05-11T19:27:03.172623Z", + "lastUpdatedTimestamp": "2022-05-11T19:27:03.172623Z" + }, "spec": { - "name": "credit_score_v1", + "description": "Credit scoring model", "features": [ { - "featureViewName": "credit_history", "featureColumns": [ { "name": "credit_card_due", "valueType": "INT64" }, - { - "name": "mortgage_due", - "valueType": "INT64" - }, { "name": "missed_payments_1y", "valueType": "INT64" } - ] + ], + "featureViewName": "credit_history" }, { - "featureViewName": "zipcode_features", "featureColumns": [ { "name": "city", @@ -333,25 +124,26 @@ "name": "total_wages", "valueType": "INT64" } - ] + ], + "featureViewName": "zipcode_features" } ], + "name": "credit_score_v1", "tags": { "owner": "tony@tecton.ai", "stage": "staging" - }, - "description": "Credit scoring model" - }, - "meta": { - "createdTimestamp": "2022-02-11T20:12:50.186773Z" + } } }, { + "meta": { + "createdTimestamp": "2022-05-11T19:27:03.172405Z", + "lastUpdatedTimestamp": "2022-05-11T19:27:03.172405Z" + }, "spec": { - "name": "credit_score_v2", + "description": "Credit scoring model", "features": [ { - "featureViewName": "credit_history", "featureColumns": [ { "name": "credit_card_due", @@ -365,10 +157,10 @@ "name": "missed_payments_1y", "valueType": "INT64" } - ] + ], + "featureViewName": "credit_history" }, { - "featureViewName": "zipcode_features", "featureColumns": [ { "name": "city", @@ -394,34 +186,26 @@ "name": "total_wages", "valueType": "INT64" } - ] - }, - { - "featureViewName": "transaction_request_fv", - "featureColumns": [ - { - "name": "transaction_amt", - "valueType": "INT64" - } - ] + ], + "featureViewName": "zipcode_features" } ], + "name": "credit_score_v2", "tags": { - "stage": "prod", - "owner": "tony@tecton.ai" - }, - "description": "Credit scoring model" - }, - "meta": { - "createdTimestamp": "2022-02-11T20:12:50.185785Z" + "owner": "tony@tecton.ai", + "stage": "prod" + } } }, { + "meta": { + "createdTimestamp": "2022-05-11T19:27:03.172264Z", + "lastUpdatedTimestamp": "2022-05-11T19:27:03.172264Z" + }, "spec": { - "name": "credit_score_v3", + "description": "Credit scoring model", "features": [ { - "featureViewName": "credit_history", "featureColumns": [ { "name": "credit_card_due", @@ -435,10 +219,10 @@ "name": "missed_payments_1y", "valueType": "INT64" } - ] + ], + "featureViewName": "credit_history" }, { - "featureViewName": "zipcode_features", "featureColumns": [ { "name": "city", @@ -464,34 +248,35 @@ "name": "total_wages", "valueType": "INT64" } - ] + ], + "featureViewName": "zipcode_features" }, { - "featureViewName": "transaction_gt_last_credit_card_due", "featureColumns": [ { "name": "transaction_gt_last_credit_card_due", "valueType": "BOOL" } - ] + ], + "featureViewName": "transaction_gt_last_credit_card_due" } ], + "name": "credit_score_v3", "tags": { - "stage": "dev", - "owner": "tony@tecton.ai" - }, - "description": "Credit scoring model" - }, - "meta": { - "createdTimestamp": "2022-02-11T20:12:50.186367Z" + "owner": "tony@tecton.ai", + "stage": "dev" + } } }, { + "meta": { + "createdTimestamp": "2022-05-11T19:27:03.172530Z", + "lastUpdatedTimestamp": "2022-05-11T19:27:03.172530Z" + }, "spec": { - "name": "zipcode_model", + "description": "Location model", "features": [ { - "featureViewName": "zipcode_features", "featureColumns": [ { "name": "city", @@ -517,25 +302,26 @@ "name": "total_wages", "valueType": "INT64" } - ] + ], + "featureViewName": "zipcode_features" } ], + "name": "zipcode_model", "tags": { - "stage": "dev", - "owner": "amanda@tecton.ai" - }, - "description": "Location model" - }, - "meta": { - "createdTimestamp": "2022-02-11T20:12:50.187069Z" + "owner": "amanda@tecton.ai", + "stage": "dev" + } } }, { + "meta": { + "createdTimestamp": "2022-05-11T19:27:03.172188Z", + "lastUpdatedTimestamp": "2022-05-11T19:27:03.172188Z" + }, "spec": { - "name": "zipcode_model_v2", + "description": "Location model", "features": [ { - "featureViewName": "zipcode_money_features", "featureColumns": [ { "name": "tax_returns_filed", @@ -545,45 +331,242 @@ "name": "total_wages", "valueType": "INT64" } - ] + ], + "featureViewName": "zipcode_money_features" } ], + "name": "zipcode_model_v2", "tags": { "owner": "amanda@tecton.ai", "stage": "dev" + } + } + } + ], + "featureViews": [ + { + "meta": { + "createdTimestamp": "2022-05-11T19:27:03.171421Z", + "lastUpdatedTimestamp": "2022-05-11T19:28:21.444739Z", + "materializationIntervals": [ + { + "endTime": "2019-05-11T19:27:05Z", + "startTime": "1997-09-19T19:27:13.273753Z" + }, + { + "endTime": "2022-05-11T19:27:43Z", + "startTime": "2019-05-11T19:27:05Z" + } + ] + }, + "spec": { + "batchSource": { + "createdTimestampColumn": "created_timestamp", + "dataSourceClassType": "feast.infra.offline_stores.file_source.FileSource", + "fileOptions": { + "uri": "data/credit_history.parquet" + }, + "name": "credit_history", + "timestampField": "event_timestamp", + "type": "BATCH_FILE" + }, + "entities": [ + "dob_ssn" + ], + "features": [ + { + "name": "credit_card_due", + "valueType": "INT64" + }, + { + "name": "mortgage_due", + "valueType": "INT64" + }, + { + "name": "student_loan_due", + "valueType": "INT64" + }, + { + "name": "vehicle_loan_due", + "valueType": "INT64" + }, + { + "name": "hard_pulls", + "valueType": "INT64" + }, + { + "name": "missed_payments_2y", + "valueType": "INT64" + }, + { + "name": "missed_payments_1y", + "valueType": "INT64" + }, + { + "name": "missed_payments_6m", + "valueType": "INT64" + }, + { + "name": "bankruptcies", + "valueType": "INT64" + } + ], + "name": "credit_history", + "online": true, + "tags": { + "access_group": "feast-team@tecton.ai", + "date_added": "2022-02-6", + "experiments": "experiment-A" }, - "description": "Location model" + "ttl": "777600000s" + } + }, + { + "meta": { + "createdTimestamp": "2022-05-11T19:27:03.171300Z", + "lastUpdatedTimestamp": "2022-05-11T19:27:46.002348Z", + "materializationIntervals": [ + { + "endTime": "2019-05-11T19:27:05Z", + "startTime": "2012-05-13T19:27:08.483036Z" + }, + { + "endTime": "2022-05-11T19:27:43Z", + "startTime": "2019-05-11T19:27:05Z" + } + ] }, + "spec": { + "batchSource": { + "createdTimestampColumn": "created_timestamp", + "dataSourceClassType": "feast.infra.offline_stores.file_source.FileSource", + "fileOptions": { + "uri": "data/zipcode_table.parquet" + }, + "name": "zipcode", + "timestampField": "event_timestamp", + "type": "BATCH_FILE" + }, + "entities": [ + "zipcode" + ], + "features": [ + { + "name": "city", + "valueType": "STRING" + }, + { + "name": "state", + "valueType": "STRING" + }, + { + "name": "location_type", + "valueType": "STRING" + }, + { + "name": "tax_returns_filed", + "valueType": "INT64" + }, + { + "name": "population", + "valueType": "INT64" + }, + { + "name": "total_wages", + "valueType": "INT64" + } + ], + "name": "zipcode_features", + "online": true, + "tags": { + "access_group": "feast-team@tecton.ai", + "date_added": "2022-02-7", + "experiments": "experiment-A,experiment-B,experiment-C" + }, + "ttl": "315360000s" + } + }, + { "meta": { - "createdTimestamp": "2022-02-11T20:17:15.582561Z" + "createdTimestamp": "2022-05-11T19:27:03.171195Z", + "lastUpdatedTimestamp": "2022-05-11T19:27:45.942549Z", + "materializationIntervals": [ + { + "endTime": "2019-05-11T19:27:05Z", + "startTime": "2012-05-13T19:27:06.493847Z" + }, + { + "endTime": "2022-05-11T19:27:43Z", + "startTime": "2019-05-11T19:27:05Z" + } + ] + }, + "spec": { + "batchSource": { + "createdTimestampColumn": "created_timestamp", + "dataSourceClassType": "feast.infra.offline_stores.file_source.FileSource", + "fileOptions": { + "uri": "data/zipcode_table.parquet" + }, + "name": "zipcode", + "timestampField": "event_timestamp", + "type": "BATCH_FILE" + }, + "entities": [ + "zipcode" + ], + "features": [ + { + "name": "tax_returns_filed", + "valueType": "INT64" + }, + { + "name": "total_wages", + "valueType": "INT64" + } + ], + "name": "zipcode_money_features", + "online": true, + "tags": { + "access_group": "feast-team@tecton.ai", + "date_added": "2022-02-7", + "experiments": "experiment-A,experiment-B,experiment-C" + }, + "ttl": "315360000s" } } ], + "infra": [ + { + "name": "credit_scoring_aws_credit_history", + "path": "/Users/dannychiao/GitHub/feast/ui/feature_repo/data/online.db" + }, + { + "name": "credit_scoring_aws_zipcode_features", + "path": "/Users/dannychiao/GitHub/feast/ui/feature_repo/data/online.db" + }, + { + "name": "credit_scoring_aws_zipcode_money_features", + "path": "/Users/dannychiao/GitHub/feast/ui/feature_repo/data/online.db" + } + ], "onDemandFeatureViews": [ { + "meta": { + "createdTimestamp": "2022-05-11T19:27:03.171556Z", + "lastUpdatedTimestamp": "2022-05-11T19:27:03.171556Z" + }, "spec": { - "name": "transaction_gt_last_credit_card_due", "features": [ { "name": "transaction_gt_last_credit_card_due", "valueType": "BOOL" } ], - "inputs": { - "transaction": { - "requestDataSource": { - "type": "REQUEST_SOURCE", - "requestDataOptions": { - "schema": { - "transaction_amt": "INT64" - } - }, - "name": "transaction" - } - }, + "name": "transaction_gt_last_credit_card_due", + "sources": { "credit_history": { "featureViewProjection": { - "featureViewName": "credit_history", "featureColumns": [ { "name": "credit_card_due", @@ -621,169 +604,31 @@ "name": "bankruptcies", "valueType": "INT64" } - ] - } - } - }, - "userDefinedFunction": { - "name": "transaction_gt_last_credit_card_due", - "body": "@on_demand_feature_view(\n sources={\"credit_history\": credit_history, \"transaction\": input_request,},\n schema=[\n Field(name=\"transaction_gt_last_credit_card_due\", dtype=Bool),\n ],\n)\ndef transaction_gt_last_credit_card_due(inputs: pd.DataFrame) -> pd.DataFrame:\n df = pd.DataFrame()\n df[\"transaction_gt_last_credit_card_due\"] = (\n inputs[\"transaction_amt\"] > inputs[\"credit_card_due\"]\n )\n return df\n" - } - }, - "meta": { - "createdTimestamp": "2022-02-11T20:17:15.581514Z", - "lastUpdatedTimestamp": "2022-02-11T20:17:15.581514Z" - } - } - ], - "requestFeatureViews": [ - { - "spec": { - "name": "transaction_request_fv", - "requestDataSource": { - "type": "REQUEST_SOURCE", - "requestDataOptions": { - "schema": { - "transaction_amt": "INT64" + ], + "featureViewName": "credit_history" } }, - "name": "transaction" - } - } - } - ], - "savedDatasets": [ - { - "spec": { - "name": "my_training_ds", - "features": [ - "credit_history:credit_card_due", - "credit_history:mortgage_due", - "credit_history:missed_payments_1y", - "zipcode_features:city", - "zipcode_features:state", - "zipcode_features:location_type", - "zipcode_features:tax_returns_filed", - "zipcode_features:population", - "zipcode_features:total_wages" - ], - "joinKeys": [ - "person_income", - "person_emp_length", - "created_timestamp", - "zipcode", - "person_home_ownership", - "loan_amnt", - "person_age", - "loan_int_rate", - "loan_status", - "loan_id", - "dob_ssn", - "loan_intent" - ], - "storage": { - "fileStorage": { - "fileFormat": { - "parquetFormat": {} - }, - "fileUrl": "my_training_ds.parquet" - } - }, - "featureService": { - "spec": { - "name": "credit_score_v1", - "features": [ - { - "featureViewName": "credit_history", - "featureColumns": [ - { - "name": "credit_card_due", - "valueType": "INT64" - }, - { - "name": "mortgage_due", - "valueType": "INT64" - }, + "transaction": { + "requestDataSource": { + "name": "transaction", + "requestDataOptions": { + "schema": [ { - "name": "missed_payments_1y", + "name": "transaction_amt", "valueType": "INT64" } ] }, - { - "featureViewName": "zipcode_features", - "featureColumns": [ - { - "name": "city", - "valueType": "STRING" - }, - { - "name": "state", - "valueType": "STRING" - }, - { - "name": "location_type", - "valueType": "STRING" - }, - { - "name": "tax_returns_filed", - "valueType": "INT64" - }, - { - "name": "population", - "valueType": "INT64" - }, - { - "name": "total_wages", - "valueType": "INT64" - } - ] - } - ], - "tags": { - "stage": "staging", - "owner": "tony@tecton.ai" - }, - "description": "Credit scoring model" - }, - "meta": { - "createdTimestamp": "2022-02-09T20:40:53.103078Z" + "type": "REQUEST_SOURCE" + } } }, - "profile": "{\"meta\": {\"great_expectations_version\": \"0.14.4\"}, \"expectations\": [{\"meta\": {}, \"kwargs\": {\"column\": \"credit_card_due\", \"min_value\": 0, \"mostly\": 0.99}, \"expectation_type\": \"expect_column_values_to_be_between\"}, {\"meta\": {}, \"kwargs\": {\"column\": \"missed_payments_1y\", \"min_value\": 0, \"max_value\": 5, \"mostly\": 0.99}, \"expectation_type\": \"expect_column_values_to_be_between\"}], \"data_asset_type\": \"Dataset\", \"expectation_suite_name\": \"default\", \"ge_cloud_id\": null}" - }, - "meta": { - "createdTimestamp": "2022-02-09T20:44:03.377806Z", - "minEventTimestamp": "2020-08-25T20:34:41.361Z", - "maxEventTimestamp": "2021-08-25T20:34:41.361Z" + "userDefinedFunction": { + "body": "@on_demand_feature_view(\n sources=[credit_history, input_request],\n schema=[\n Field(name=\"transaction_gt_last_credit_card_due\", dtype=Bool),\n ],\n)\ndef transaction_gt_last_credit_card_due(inputs: pd.DataFrame) -> pd.DataFrame:\n df = pd.DataFrame()\n df[\"transaction_gt_last_credit_card_due\"] = (\n inputs[\"transaction_amt\"] > inputs[\"credit_card_due\"]\n )\n return df\n", + "name": "transaction_gt_last_credit_card_due" + } } } ], - "infra": [ - { - "infraObjects": [ - { - "infraObjectClassType": "feast.infra.online_stores.sqlite.SqliteTable", - "sqliteTable": { - "path": "/Users/dannychiao/GitHub/feast-ui/feature_repo/data/online.db", - "name": "credit_scoring_aws_credit_history" - } - }, - { - "infraObjectClassType": "feast.infra.online_stores.sqlite.SqliteTable", - "sqliteTable": { - "path": "/Users/dannychiao/GitHub/feast-ui/feature_repo/data/online.db", - "name": "credit_scoring_aws_zipcode_features" - } - }, - { - "infraObjectClassType": "feast.infra.online_stores.sqlite.SqliteTable", - "sqliteTable": { - "path": "/Users/dannychiao/GitHub/feast-ui/feature_repo/data/online.db", - "name": "credit_scoring_aws_zipcode_money_features" - } - } - ] - } - ] + "project": "credit_scoring_aws" } diff --git a/ui/src/FeastUISansProviders.tsx b/ui/src/FeastUISansProviders.tsx index a6df45b86df..628068f0f00 100644 --- a/ui/src/FeastUISansProviders.tsx +++ b/ui/src/FeastUISansProviders.tsx @@ -34,7 +34,7 @@ import { } from "./contexts/ProjectListContext"; interface FeastUIConfigs { - tabsRegistry: FeastTabsRegistryInterface; + tabsRegistry?: FeastTabsRegistryInterface; featureFlags?: FeatureFlags; projectListPromise?: Promise; } diff --git a/ui/src/components/TagsDisplay.tsx b/ui/src/components/TagsDisplay.tsx index 6f7f23b0072..87e48638474 100644 --- a/ui/src/components/TagsDisplay.tsx +++ b/ui/src/components/TagsDisplay.tsx @@ -9,11 +9,36 @@ import EuiCustomLink from "./EuiCustomLink"; interface TagsDisplayProps { createLink?: (key: string, value: string) => string; tags: Record; + owner?: string; + description?: string; } -const TagsDisplay = ({ tags, createLink }: TagsDisplayProps) => { +const TagsDisplay = ({ + tags, + createLink, + owner, + description, +}: TagsDisplayProps) => { return ( + {owner ? ( + + owner + {owner} + + ) : ( + "" + )} + {description ? ( + + description + + {description} + + + ) : ( + "" + )} {Object.entries(tags).map(([key, value]) => { return ( diff --git a/ui/src/custom-tabs/TabsRegistryContext.tsx b/ui/src/custom-tabs/TabsRegistryContext.tsx index 8ad58d7a16f..a5321e9c40b 100644 --- a/ui/src/custom-tabs/TabsRegistryContext.tsx +++ b/ui/src/custom-tabs/TabsRegistryContext.tsx @@ -72,10 +72,12 @@ const useGenericCustomTabsNavigation = < const { pathname } = useLocation(); // Current Location useEffect(() => { + if (entries.length === 0) { + return; + } setTabs( entries.map(({ label, path }) => { const resolvedTabPath = resolvePath(path, featureViewRoot.pathname); - return { label, // Can't use the match hooks here b/c we're in a loop due diff --git a/ui/src/index.tsx b/ui/src/index.tsx index d4f1013503d..3a6269a8b78 100644 --- a/ui/src/index.tsx +++ b/ui/src/index.tsx @@ -79,4 +79,4 @@ ReactDOM.render( /> , document.getElementById("root") -); +); \ No newline at end of file diff --git a/ui/src/pages/data-sources/BatchSourcePropertiesView.tsx b/ui/src/pages/data-sources/BatchSourcePropertiesView.tsx index 6d5e6ea86fd..97f9f58d274 100644 --- a/ui/src/pages/data-sources/BatchSourcePropertiesView.tsx +++ b/ui/src/pages/data-sources/BatchSourcePropertiesView.tsx @@ -1,116 +1,136 @@ import React from "react"; import { - EuiCodeBlock, - EuiDescriptionList, - EuiDescriptionListDescription, - EuiDescriptionListTitle, - EuiFlexGroup, - EuiFlexItem, - EuiHorizontalRule, - EuiSpacer, - EuiTitle, - } from "@elastic/eui"; + EuiCodeBlock, + EuiDescriptionList, + EuiDescriptionListDescription, + EuiDescriptionListTitle, + EuiFlexGroup, + EuiFlexItem, + EuiSpacer, + EuiTitle, +} from "@elastic/eui"; interface BatchSourcePropertiesViewProps { - batchSource: { - type?: string | undefined; - dataSourceClassType?: string | undefined; - fileOptions?: { - fileUrl?: string | undefined; - } | undefined; - meta?: { - latestEventTimestamp?: Date | undefined; - earliestEventTimestamp?: Date | undefined; - } | undefined; - bigqueryOptions?: { - dbtModelSerialized?: string | undefined - } | undefined; - } + batchSource: { + type?: string | undefined; + owner?: string | undefined; + description?: string | undefined; + dataSourceClassType?: string | undefined; + fileOptions?: + | { + uri?: string | undefined; + } + | undefined; + meta?: + | { + latestEventTimestamp?: Date | undefined; + earliestEventTimestamp?: Date | undefined; + } + | undefined; + bigqueryOptions?: + | { + dbtModelSerialized?: string | undefined; + } + | undefined; + }; } const BatchSourcePropertiesView = (props: BatchSourcePropertiesViewProps) => { - const batchSource = props.batchSource; - return ( - - - - - {(batchSource.dataSourceClassType || batchSource.type) && - - Source Type - {batchSource.dataSourceClassType ? ( - - {batchSource.dataSourceClassType - .split(".") - .at(-1)} - ) - : batchSource.type ? ( - {batchSource.type} - ) - : ""} - } - - {batchSource.fileOptions && ( - - - File URL - - - {batchSource.fileOptions - ? batchSource.fileOptions.fileUrl - : ""} - - - )} - {batchSource.meta?.latestEventTimestamp && ( - - - Latest Event - - - {batchSource.meta.latestEventTimestamp.toLocaleDateString( - "en-CA" - )} - - - )} - {batchSource.meta?.earliestEventTimestamp && ( - - - Earliest Event - - - {batchSource.meta.earliestEventTimestamp.toLocaleDateString( - "en-CA" - )} - - - )} - - - - {batchSource.bigqueryOptions?.dbtModelSerialized && ( - - - - )} - {batchSource.bigqueryOptions?.dbtModelSerialized && ( - - -

Dbt Transformation

-
- - {batchSource.bigqueryOptions.dbtModelSerialized} - -
+ const batchSource = props.batchSource; + return ( + + + + + {(batchSource.dataSourceClassType || batchSource.type) && ( + + Source Type + {batchSource.dataSourceClassType ? ( + + {batchSource.dataSourceClassType.split(".").at(-1)} + + ) : batchSource.type ? ( + + {batchSource.type} + + ) : ( + "" )} - - ); +
+ )} + + {batchSource.owner && ( + + Owner + + {batchSource.owner} + + + )} + {batchSource.description && ( + + Description + + {batchSource.description} + + + )} + {batchSource.fileOptions && ( + + File URL + + {batchSource.fileOptions ? batchSource.fileOptions.uri : ""} + + + )} + {batchSource.meta?.latestEventTimestamp && ( + + Latest Event + + {batchSource.meta.latestEventTimestamp.toLocaleDateString( + "en-CA" + )} + + + )} + {batchSource.meta?.earliestEventTimestamp && ( + + + Earliest Event + + + {batchSource.meta.earliestEventTimestamp.toLocaleDateString( + "en-CA" + )} + + + )} +
+ + + {batchSource.bigqueryOptions?.dbtModelSerialized && ( + + + + )} + {batchSource.bigqueryOptions?.dbtModelSerialized && ( + + +

Dbt Transformation

+
+ + {batchSource.bigqueryOptions.dbtModelSerialized} + +
+ )} + + + ); }; export default BatchSourcePropertiesView; diff --git a/ui/src/pages/data-sources/DataSourceOverviewTab.tsx b/ui/src/pages/data-sources/DataSourceOverviewTab.tsx index f7c05000e78..124a0e6ab92 100644 --- a/ui/src/pages/data-sources/DataSourceOverviewTab.tsx +++ b/ui/src/pages/data-sources/DataSourceOverviewTab.tsx @@ -27,7 +27,6 @@ const DataSourceOverviewTab = () => { const { isLoading, isSuccess, isError, data, consumingFeatureViews } = useLoadDataSource(dsName); const isEmpty = data === undefined; - console.log(consumingFeatureViews); return ( @@ -51,7 +50,7 @@ const DataSourceOverviewTab = () => { {data.fileOptions || data.bigqueryOptions ? ( - ) : data.requestDataOptions ? ( + ) : data.type ? ( @@ -62,7 +61,7 @@ const DataSourceOverviewTab = () => { - ): ( + ) : ( "" )} @@ -78,12 +77,10 @@ const DataSourceOverviewTab = () => { { + fields={data.requestDataOptions.schema.map((obj) => { return { - fieldName: field, - valueType: type, + fieldName: obj.name, + valueType: obj.valueType, }; })} /> diff --git a/ui/src/pages/entities/EntityOverviewTab.tsx b/ui/src/pages/entities/EntityOverviewTab.tsx index dce0c12824d..dc649c2a9fb 100644 --- a/ui/src/pages/entities/EntityOverviewTab.tsx +++ b/ui/src/pages/entities/EntityOverviewTab.tsx @@ -9,7 +9,6 @@ import { EuiText, EuiFlexItem, EuiSpacer, - EuiStat, EuiDescriptionList, EuiDescriptionListTitle, EuiDescriptionListDescription, diff --git a/ui/src/pages/feature-views/OnDemandFeatureViewOverviewTab.tsx b/ui/src/pages/feature-views/OnDemandFeatureViewOverviewTab.tsx index 9bd725534ee..1ea509d8df9 100644 --- a/ui/src/pages/feature-views/OnDemandFeatureViewOverviewTab.tsx +++ b/ui/src/pages/feature-views/OnDemandFeatureViewOverviewTab.tsx @@ -38,7 +38,7 @@ const whereFSconsumesThisFv = (fvName: string) => { const OnDemandFeatureViewOverviewTab = ({ data, }: OnDemandFeatureViewOverviewTabProps) => { - const inputs = Object.entries(data.spec.inputs); + const inputs = Object.entries(data.spec.sources); const relationshipQuery = useLoadRelationshipData(); const fsNames = relationshipQuery.data diff --git a/ui/src/pages/feature-views/RegularFeatureViewOverviewTab.tsx b/ui/src/pages/feature-views/RegularFeatureViewOverviewTab.tsx index 72ea646c95f..d284d697e81 100644 --- a/ui/src/pages/feature-views/RegularFeatureViewOverviewTab.tsx +++ b/ui/src/pages/feature-views/RegularFeatureViewOverviewTab.tsx @@ -60,27 +60,6 @@ const RegularFeatureViewOverviewTab = ({ - {data.spec.batchSource.meta ? ( - - - - ) : ( - No batchSource specified on this feature view. - )} - {data.meta.lastUpdatedTimestamp && ( - - - - )} @@ -96,7 +75,7 @@ const RegularFeatureViewOverviewTab = ({ features={data.spec.features} /> ) : ( - No Tags specified on this feature view. + No features specified on this feature view. )} @@ -156,6 +135,8 @@ const RegularFeatureViewOverviewTab = ({ encodeSearchQueryString(`${key}:${value}`) ); }} + owner={data.spec.owner} + description={data.spec.description} /> ) : ( No Tags specified on this feature view. diff --git a/ui/src/parsers/feastDatasources.ts b/ui/src/parsers/feastDatasources.ts index a9f58d716c3..3e1dca72d1f 100644 --- a/ui/src/parsers/feastDatasources.ts +++ b/ui/src/parsers/feastDatasources.ts @@ -1,20 +1,22 @@ import { z } from "zod"; -import { FEAST_FEATURE_VALUE_TYPES } from "./types"; +import { FeastFeatureColumnSchema } from "./feastFeatureViews"; const FeastDatasourceSchema = z.object({ type: z.string(), eventTimestampColumn: z.string().optional(), createdTimestampColumn: z.string().optional(), fileOptions: z.object({ - fileUrl: z.string().optional(), + uri: z.string().optional(), }).optional(), name: z.string(), + description: z.string().optional(), + owner: z.string().optional(), meta: z.object({ latestEventTimestamp: z.string().transform((val) => new Date(val)), earliestEventTimestamp: z.string().transform((val) => new Date(val)), }).optional(), requestDataOptions: z.object({ - schema: z.record(z.nativeEnum(FEAST_FEATURE_VALUE_TYPES)), + schema: z.array(FeastFeatureColumnSchema), }).optional(), bigqueryOptions: z.object({ tableRef: z.string().optional(), diff --git a/ui/src/parsers/feastFeatureViews.ts b/ui/src/parsers/feastFeatureViews.ts index c8cdadd25c3..3e63b5afd0e 100644 --- a/ui/src/parsers/feastFeatureViews.ts +++ b/ui/src/parsers/feastFeatureViews.ts @@ -4,6 +4,7 @@ import { FEAST_FEATURE_VALUE_TYPES } from "./types"; const FeastFeatureColumnSchema = z.object({ name: z.string(), valueType: z.nativeEnum(FEAST_FEATURE_VALUE_TYPES), + tags: z.record(z.string()).optional(), }); const FeastBatchSourceSchema = z.object({ @@ -11,9 +12,11 @@ const FeastBatchSourceSchema = z.object({ eventTimestampColumn: z.string().optional(), createdTimestampColumn: z.string().optional(), fileOptions: z.object({ - fileUrl: z.string().optional(), + uri: z.string().optional(), }).optional(), name: z.string().optional(), + description: z.string().optional(), + owner: z.string().optional(), meta: z.object({ earliestEventTimestamp: z.string().transform((val) => new Date(val)), latestEventTimestamp: z.string().transform((val) => new Date(val)), @@ -30,12 +33,14 @@ const FeastBatchSourceSchema = z.object({ const FeastFeatureViewSchema = z.object({ spec: z.object({ + description: z.string().optional(), name: z.string(), entities: z.array(z.string()), features: z.array(FeastFeatureColumnSchema), ttl: z.string().transform((val) => parseInt(val)), batchSource: FeastBatchSourceSchema, online: z.boolean(), + owner: z.string().optional(), tags: z.record(z.string()).optional(), }), meta: z.object({ diff --git a/ui/src/parsers/feastODFVS.ts b/ui/src/parsers/feastODFVS.ts index ebac09e1634..8341438d50c 100644 --- a/ui/src/parsers/feastODFVS.ts +++ b/ui/src/parsers/feastODFVS.ts @@ -14,7 +14,7 @@ const RequestDataSourceSchema = z.object({ type: z.string(), name: z.string(), requestDataOptions: z.object({ - schema: z.record(z.nativeEnum(FEAST_FEATURE_VALUE_TYPES)), + schema: z.array(FeastFeatureColumnSchema), }), }), }); @@ -28,7 +28,7 @@ const FeastODFVSchema = z.object({ spec: z.object({ name: z.string(), features: z.array(FeastFeatureColumnSchema), - inputs: z.record(ODFVInputsSchema), + sources: z.record(ODFVInputsSchema), userDefinedFunction: z.object({ name: z.string(), body: z.string(), diff --git a/ui/src/parsers/parseEntityRelationships.ts b/ui/src/parsers/parseEntityRelationships.ts index bf82e86ff9c..f54bff63a1c 100644 --- a/ui/src/parsers/parseEntityRelationships.ts +++ b/ui/src/parsers/parseEntityRelationships.ts @@ -57,7 +57,7 @@ const parseEntityRelationships = (objects: FeastRegistryType) => { }); objects.onDemandFeatureViews?.forEach((fv) => { - Object.values(fv.spec.inputs).forEach((input: { [key: string]: any }) => { + Object.values(fv.spec.sources).forEach((input: { [key: string]: any }) => { if (input.requestDataSource) { links.push({ source: { diff --git a/ui/yarn.lock b/ui/yarn.lock index 396204ffb4d..8079556b8d3 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -3009,11 +3009,6 @@ ast-types-flow@^0.0.7: resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" integrity sha1-9wtzXGvKGlycItmCw+Oef+ujva0= -async@0.9.x: - version "0.9.2" - resolved "https://registry.yarnpkg.com/async/-/async-0.9.2.tgz#aea74d5e61c1f899613bf64bda66d4c78f2fd17d" - integrity sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0= - async@^2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" @@ -3021,6 +3016,11 @@ async@^2.6.2: dependencies: lodash "^4.17.14" +async@^3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.3.tgz#ac53dafd3f4720ee9e8a160628f18ea91df196c9" + integrity sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g== + asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" @@ -3306,6 +3306,13 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + braces@^3.0.1, braces@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" @@ -3452,7 +3459,7 @@ chalk@4.1.1: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: +chalk@^2.0.0, chalk@^2.4.1: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== @@ -3469,7 +3476,7 @@ chalk@^3.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.1, chalk@^4.1.2: +chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.1, chalk@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== @@ -4639,11 +4646,11 @@ ee-first@1.1.1: integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= ejs@^3.1.6: - version "3.1.6" - resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.6.tgz#5bfd0a0689743bb5268b3550cceeebbc1702822a" - integrity sha512-9lt9Zse4hPucPkoP7FHDF0LQAlGyF9JVpnClFLFH3aSSbxmyoqINRpp/9wePWJTUl4KOQwRL72Iw3InHPDkoGw== + version "3.1.7" + resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.7.tgz#c544d9c7f715783dd92f0bddcf73a59e6962d006" + integrity sha512-BIar7R6abbUxDA3bfXrO4DSgwo8I+fB5/1zgujl3HLLjwd6+9iOnrT+t3grn2qbk9vOgBubXOFwX2m9axoFaGw== dependencies: - jake "^10.6.1" + jake "^10.8.5" electron-to-chromium@^1.4.17: version "1.4.57" @@ -5219,11 +5226,11 @@ file-selector@^0.2.2: tslib "^2.0.3" filelist@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/filelist/-/filelist-1.0.2.tgz#80202f21462d4d1c2e214119b1807c1bc0380e5b" - integrity sha512-z7O0IS8Plc39rTCq6i6iHxk43duYOn8uFJiWSewIq0Bww1RNybVHSCjahmcC87ZqAm4OTvFzlzeGu3XAzG1ctQ== + version "1.0.3" + resolved "https://registry.yarnpkg.com/filelist/-/filelist-1.0.3.tgz#448607750376484932f67ef1b9ff07386b036c83" + integrity sha512-LwjCsruLWQULGYKy7TX0OPtrL9kLpojOFKc5VCTxdFTV7w5zbsgqVKfnkKG7Qgjtq50gKfO56hJv88OfcGb70Q== dependencies: - minimatch "^3.0.4" + minimatch "^5.0.1" filesize@^8.0.6: version "8.0.7" @@ -6348,13 +6355,13 @@ istanbul-reports@^3.1.3: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" -jake@^10.6.1: - version "10.8.2" - resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.2.tgz#ebc9de8558160a66d82d0eadc6a2e58fbc500a7b" - integrity sha512-eLpKyrfG3mzvGE2Du8VoPbeSkRry093+tyNjdYaBbJS9v17knImYGNXQCUV0gLxQtF82m3E8iRb/wdSQZLoq7A== +jake@^10.8.5: + version "10.8.5" + resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46" + integrity sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw== dependencies: - async "0.9.x" - chalk "^2.4.2" + async "^3.2.3" + chalk "^4.0.2" filelist "^1.0.1" minimatch "^3.0.4" @@ -7274,20 +7281,27 @@ minimalistic-assert@^1.0.0: resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== -minimatch@3.0.4, minimatch@^3.0.4: +minimatch@3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" -minimatch@^3.0.2: +minimatch@^3.0.2, minimatch@^3.0.4: version "3.1.2" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== dependencies: brace-expansion "^1.1.7" +minimatch@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.0.1.tgz#fb9022f7528125187c92bd9e9b6366be1cf3415b" + integrity sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g== + dependencies: + brace-expansion "^2.0.1" + minimist@^1.1.1, minimist@^1.2.0, minimist@^1.2.5: version "1.2.6" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44"