diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..339315ad --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,12 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file + +version: 2 +updates: + - package-ecosystem: "" # See documentation for possible values + directory: "/" # Location of package manifests + schedule: + interval: "weekly" + diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 00000000..c53d2d66 --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,2 @@ +App: + - '/.*/' diff --git a/.github/workflows/ci.yaml b/.github/workflows/dockerbuild.yaml similarity index 78% rename from .github/workflows/ci.yaml rename to .github/workflows/dockerbuild.yaml index f06f556d..eaf2d1b0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/dockerbuild.yaml @@ -1,4 +1,4 @@ -name: ci +name: dockerbuild on: push: @@ -23,6 +23,13 @@ jobs: ret = [] for i in flist: a,v = i.split('/') + # Look for folders only + if 'unsupported' in v or 'unsupported' in a: + continue + + if '.md' in v: + continue + ret.append({'app':a, 'version':v }) print(json.dumps({'include': ret})) EOF @@ -38,7 +45,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Set up QEMU uses: docker/setup-qemu-action@v1 @@ -47,7 +54,7 @@ jobs: uses: docker/setup-buildx-action@v1 - name: Login to DockerHub - uses: docker/login-action@v1 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} @@ -60,28 +67,28 @@ jobs: name: Build and push Master if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }} id: docker_build_master - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v6 with: context: ${{ matrix.app }}/${{ matrix.version }} file: ${{ matrix.app }}/${{ matrix.version }}/Dockerfile - platforms: linux/amd64,linux/arm64,linux/386 + platforms: linux/amd64,linux/arm64 push: true tags: | - ${{ secrets.DOCKERHUB_USERNAME }}/${{ matrix.app }}:${{ matrix.version }} - ${{ secrets.DOCKERHUB_USERNAME }}/${{ matrix.app }}:latest - ${{ secrets.DOCKERHUB_USERNAME }}/shuffle:${{ matrix.app }}_${{ matrix.version }} + frikky/${{ matrix.app }}:${{ matrix.version }} + frikky/${{ matrix.app }}:latest + frikky/shuffle:${{ matrix.app }}_${{ matrix.version }} - name: Build and push Feature PR if: ${{ github.event_name == 'pull_request' }} id: docker_build_feature - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v6 with: context: ${{ matrix.app }}/${{ matrix.version }} file: ${{ matrix.app }}/${{ matrix.version }}/Dockerfile platforms: linux/amd64,linux/arm64,linux/386 push: true tags: | - ${{ secrets.DOCKERHUB_USERNAME }}/${{ matrix.app }}:${{ github.head_ref }} + frikky/${{ matrix.app }}:${{ github.head_ref }} - name: Image digest run: | diff --git a/.github/workflows/project_automation.yml b/.github/workflows/project_automation.yml new file mode 100644 index 00000000..837957f6 --- /dev/null +++ b/.github/workflows/project_automation.yml @@ -0,0 +1,26 @@ +name: Automation - Add all new issues to roadmap project + +on: + issues: + types: + - opened + +jobs: + add-label: + name: Add label to issue + runs-on: ubuntu-latest + steps: + - uses: github/issue-labeler@v3.3 #May not be the latest version + with: + configuration-path: .github/labeler.yml + repo-token: ${{ secrets.ADD_TO_PROJECT_PAT }} + enable-versioned-regex: 0 + + add-to-project: + name: Add issue to project + runs-on: ubuntu-latest + steps: + - uses: actions/add-to-project@v0.5.0 + with: + project-url: https://github.com/orgs/Shuffle/projects/8 + github-token: ${{ secrets.ADD_TO_PROJECT_PAT }} diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 00000000..271c0fc2 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,76 @@ +# Repository Guidelines + +## Project Structure & Module Organization +This repository hosts Shuffle app implementations. Each app lives in a top-level folder named after the integration (e.g., `aws-ec2/`), and each release is versioned under a subfolder like `1.0.0/`. A typical app version contains: + +- `src/app.py`: the Shuffle SDK entry point. +- `api.yaml`: OpenAPI definition used by Shuffle. +- `requirements.txt`: Python dependencies for the app. +- `Dockerfile`: container build instructions for the app. +- `README.md`: app-specific usage and action documentation. +- Optional assets such as screenshots (`*.png`). + +In `api.yaml`, prefer an `authentication` block for shared credentials (URL, tokens, keys). Actions should only include auth parameters when they truly differ per call. + +## Build, Test, and Development Commands +Apps are built and run container-first via the Shuffle SDK image. From an app version directory: + +- `docker build -t shuffle-: .`: build the app image. +- `docker run --rm shuffle-:`: run the app container locally. + +For quick iteration on code, you can also run the Python entrypoint in a virtualenv: + +- `pip install -r requirements.txt` +- `python src/app.py --log-level DEBUG` + +## Coding Style & Naming Conventions +Use 4-space indentation and standard Python style. Keep functions `snake_case`, classes `CamelCase`, and constants `UPPER_SNAKE_CASE`. Match existing patterns in `src/app.py` and keep action names aligned with `api.yaml`. + +## Creating New Shuffle Apps (Agent Workflow) +Use an existing app as a template (e.g., `http/1.4.0/` or `aws-ec2/1.0.0/`) and follow the same folder layout. A minimal, working app version should include: + +- `api.yaml`: action definitions, parameters, and examples. +- `src/app.py`: class extending the Shuffle SDK (`shuffle_sdk.AppBase`). +- `requirements.txt`: third-party dependencies. +- `Dockerfile`: built on `frikky/shuffle:app_sdk`. + +When adding actions, ensure the `api.yaml` action name matches the method name in `src/app.py` and parameter names align exactly. Keep input parsing defensive (strings vs JSON), and return JSON-serializable results. For HTTP integrations, centralize auth and base URL handling and add a TLS `verify` option. If a service requires special payloads (e.g., ADF for Jira), accept JSON strings and pass through unchanged. Keep `api.yaml` examples realistic because they show up in the Shuffle UI. + +## Authentication & App Configuration +Most apps declare credentials in `api.yaml` under `authentication:` so Shuffle injects them automatically. In code, read those values as normal action arguments (Shuffle passes them into each action). Prefer a single auth helper in `src/app.py` (e.g., `_auth()` for tokens, `_build_api_base()` for base URLs) and reuse it across actions. If an integration supports multiple auth modes (token vs password), accept both and choose the provided one. + +Prefer small, focused actions (create, update, list, search) and document auth requirements and examples in the app `README.md`. + +## Manual Python App Notes (From Shuffle Docs) +- **SDK image choices:** Shuffle provides Alpine (slim), Kali (security tooling), and Blackarch (kitchen‑sink). This repo’s Dockerfiles typically use `frikky/shuffle:app_sdk` (Alpine‑based) unless a toolset requires otherwise. +- **Directory layout:** `api.yaml`, `Dockerfile`, `requirements.txt`, `README.md`, and `src/app.py` are expected in each app version. Complex apps can add additional modules under `src/` and import them from `app.py`. +- **Actions & wiring:** Every action in `api.yaml` must map to a method in `src/app.py` with the same name and argument list. Authentication parameters are passed into each action automatically when declared under `authentication:`. +- **Utility helpers:** In `AppBase`, you can use `self.get_file`, `self.set_files`, `self.update_file`, and cache helpers `self.get_cache`, `self.set_cache`, `self.delete_cache` for file and key/value workflows. +- **Prototyping:** Build and test your Python logic locally first, then wire it into `src/app.py`. Keep return values JSON‑serializable so Shuffle can consume them. +- **Upload & hotload:** After a prototype works, upload it to Shuffle (cloud) or hotload locally (on‑prem) by rebuilding the app image. Local Docker rebuilds are faster for iteration. + +## Testing, Hotloading, and CI/CD +- **Cloud upload test:** Use the Upload App API to add the app to your org, then run a workflow to validate actions. +- **Local hotload (on‑prem):** Place the app folder in `shuffle-apps/`, set `SHUFFLE_APP_HOTLOAD_FOLDER=./shuffle-apps`, then use the hot reload button in the UI. Allow ~20 seconds for the reload to complete. +- **Update workflow deps:** If you update an existing app version, remove and re‑add the app in any workflows that reference it. +- **Fast local iteration:** After the first upload, rebuild locally: `docker images | grep ` then `docker build . -t `. +- **CI/CD pattern:** Create a test workflow, upload a test app version, run the workflow via API, and validate `workflowexecution.workflow.validation.valid` before promoting. + +## Publishing Apps +- **OpenAPI apps:** Upload to your instance, then use the `/apps` page to publish so it appears on `shuffler.io`. +- **Python apps:** Fork `https://github.com/frikky/shuffle-apps`, add your app, and open a pull request to upstream. + +## Testing Guidelines +There is no repository-wide test suite. If you add tests for a specific app, keep them alongside the app version (e.g., `aws-ec2/1.0.0/tests/`) and document how to run them in that app’s `README.md`. + +## Commit & Pull Request Guidelines +Commit messages are short and descriptive, sometimes using a prefix like `fix:`. Follow that style and keep commits scoped to a single app/version when possible. + +For pull requests, include: + +- A clear description of the change and impacted app/version path. +- Updated `README.md` or `api.yaml` when behavior changes. +- Screenshots/assets if user-facing output or UI-related docs are affected. + +## Security & Configuration Tips +Many apps require API keys or credentials. Do not commit secrets; use environment variables or Shuffle configuration fields instead, and document required inputs in the app’s `README.md`. diff --git a/README.md b/README.md index 3ed3879d..a8f01df6 100644 --- a/README.md +++ b/README.md @@ -1,28 +1,28 @@ # Shuffle Apps -This is a repository for apps to be used in [Shuffle](https://github.com/frikky/shuffle) +All public apps are available in the search, engine either in your local instance or on [https://shuffler.io/search?tab=apps](https://shuffler.io/search). This is a repository for apps to be used in [Shuffle](https://github.com/shuffle/shuffle) -**PS:** These apps should be valid with WALKOFF, but the SDK is different, meaning you have to change the FIRST line in each Dockerfile (FROM frikky/shuffle:app_sdk). +**PS:** These apps should be valid with WALKOFF (from NSA), but the SDK is different, meaning you have to change the FIRST line in each Dockerfile (FROM shuffle/shuffle:app_sdk) to make it compatible with Shuffle. ## App Creation -App creation can be done with the Shuffle App Creator (exports as OpenAPI) or Python, which makes it possible to connect _literally_ any tool. Always prioritize using the App Creator when applicable. +App creation can be done with the Shuffle App Creator (exports as OpenAPI), with AI Generation, or Python - which makes it possible to connect _literally_ any tool. Always prioritize using the App Creator when applicable, as it makes maintaining an app easier. -![Shuffle-workflow-categories](https://github.com/frikky/shuffle-workflows/blob/master/images/categories_circle_dark.png) +![Shuffle-workflow-categories](https://github.com/shuffle/shuffle-workflows/blob/master/images/categories_circle_dark.png) ### References -* [App Development Process](https://github.com/frikky/shuffle-docs/blob/master/handbook/engineering/app_development.md) +* [App Development Process](https://github.com/shuffle/shuffle-docs/blob/master/handbook/engineering/app_development.md) * [Python app documentation](https://shuffler.io/docs/app_creation) -* [Apps in progress](https://github.com/frikky/Shuffle-apps/projects/1) +* [Apps in progress](https://github.com/shuffle/shuffle-apps/projects/1) ### Categories We have defined eight (8) "major" categories of tools that are necessary to any cybersecurity threat. Most security-related tools can fit into one of these eight. -1. [Communication](https://github.com/frikky/Shuffle-apps/issues/26) - Any way to chat; WhatsApp, SMS, Email etc. -2. [Case Management](https://github.com/frikky/Shuffle-apps/issues/22) - The central hub for operation teams. -3. [SIEM](https://github.com/frikky/Shuffle-apps/issues/21) - Search engine for logs in an enterprise. Used to find evil. -4. [Assets](https://github.com/frikky/Shuffle-apps/issues/25) - Discover endpoint information. Vulnerabilities, owners, departments etc. -5. [IAM](https://github.com/frikky/Shuffle-apps/issues/86) - Access Management. Active Directory, Google Workspaces, Single Sign-on etc. -6. [Intelligence](https://github.com/frikky/Shuffle-apps/issues/24) - Typically a vendor explaining what you should be looking for. -7. [Network](https://github.com/frikky/Shuffle-apps/issues/27) - Anything BETWEEN your connected devices. Firewalls, WAF, Switches, Bluetooth... -8. [Eradication](https://github.com/frikky/Shuffle-apps/issues/23) - Control machines directly to eradicate evil. Hard and undefined (EDR & AV) +1. [Communication](https://github.com/shuffle/shuffle-apps/issues/26) - Any way to chat; WhatsApp, SMS, Email etc. +2. [Case Management](https://github.com/shuffle/shuffle-apps/issues/22) - The central hub for operation teams. +3. [SIEM](https://github.com/shuffle/shuffle-apps/issues/21) - Search engine for logs in an enterprise. Used to find evil. +4. [Assets](https://github.com/shuffle/shuffle-apps/issues/25) - Discover endpoint information. Vulnerabilities, owners, departments etc. +5. [IAM](https://github.com/shuffle/shuffle-apps/issues/86) - Access Management. Active Directory, Google Workspaces, Single Sign-on etc. +6. [Intelligence](https://github.com/shuffle/shuffle-apps/issues/24) - Typically a vendor explaining what you should be looking for. +7. [Network](https://github.com/shuffle/shuffle-apps/issues/27) - Anything BETWEEN your connected devices. Firewalls, WAF, Switches, Bluetooth... +8. [Eradication](https://github.com/shuffle/shuffle-apps/issues/23) - Control machines directly to eradicate evil. Hard and undefined (EDR & AV) ## OpenAPI Apps in this repository are mostly manually made. Shuffle is striving for standardization and accessability, and our effort is focused on OpenAPI rather than manual work. With this in mind, most app creation that supports REST API's will be continued here. @@ -32,18 +32,18 @@ Apps in this repository are mostly manually made. Shuffle is striving for standa ## Support * [Discord](https://discord.gg/B2CBzUm) * [Twitter](https://twitter.com/shuffleio) -* [Email](mailto:frikky@shuffler.io) -* [Open issue](https://github.com/frikky/Shuffle/issues/new) +* [Email](mailto:support@shuffler.io) +* [Open issue](https://github.com/shuffle/shuffle/issues/new) * [Shuffler.io](https://shuffler.io/contact) ## External contributions -[**App magicians**](https://github.com/frikky/shuffle-apps) - - +[**App magicians**](https://github.com/shuffle/shuffle-apps) + + [**OpenAPI creators**](https://github.com/frikky/security-openapis) - + @@ -51,4 +51,4 @@ Apps in this repository are mostly manually made. Shuffle is striving for standa All apps, workflows and modular parts of Shuffle including our App SDK is under licensed under MIT, meaning you can freely use it anywhere in any way you want. # Contributing -Contributing guidelines for outlined [here](https://github.com/frikky/Shuffle/blob/master/.github/CONTRIBUTING.md). +Contributing guidelines for outlined [here](https://github.com/shuffle/shuffle/blob/master/.github/CONTRIBUTING.md). diff --git a/active-directory/1.0.0/api.yaml b/active-directory/1.0.0/api.yaml index 526de3fd..187ed2eb 100644 --- a/active-directory/1.0.0/api.yaml +++ b/active-directory/1.0.0/api.yaml @@ -175,4 +175,133 @@ actions: returns: schema: type: string + - name: lock_user + description: Lock User account + parameters: + - name: samaccountname + description: user to lock + required: true + multiline: false + example: 'user01' + schema: + type: string + - name: search_base + description: "If empty it will use the base_dn." + required: false + multiline: false + example: "OU=Users,DC=icplahd,DC=com" + schema: + type: string + returns: + schema: + type: string + - name: unlock_user + description: Unlock User account + parameters: + - name: samaccountname + description: user to unlock + required: true + multiline: false + example: 'user01' + schema: + type: string + - name: search_base + description: "If empty it will use the base_dn." + required: false + multiline: false + example: "OU=Users,DC=icplahd,DC=com" + schema: + type: string + returns: + schema: + type: string + - name: change_user_password_at_next_login + description: Set given password for user at next login + parameters: + - name: samaccountname + description: user to change password for + required: true + multiline: false + example: 'user01' + schema: + type: string + - name: search_base + description: "If empty it will use the base_dn." + required: false + multiline: false + example: "OU=Users,DC=icplahd,DC=com" + schema: + type: string + - name: new_user_password + description: "New password you want to set" + required: true + multiline: false + example: "***" + schema: + type: string + - name: repeat_new_user_password + description: "Repeat new password you want to set" + required: true + multiline: false + example: "***" + schema: + type: string + returns: + schema: + type: string + - name: add_user_to_group + description: Add user to group + parameters: + - name: samaccountname + description: user to change password for + required: true + multiline: false + example: 'user01' + schema: + type: string + - name: search_base + description: "If empty it will use the base_dn." + required: false + multiline: false + example: "OU=Users,DC=icplahd,DC=com" + schema: + type: string + - name: group_name + description: "Group you want to add user to" + required: true + multiline: false + example: "Group name" + schema: + type: string + returns: + schema: + type: string + - name: remove_user_from_group + description: Remove user from group + parameters: + - name: samaccountname + description: user to change password for + required: true + multiline: false + example: 'user01' + schema: + type: string + - name: search_base + description: "If empty it will use the base_dn." + required: false + multiline: false + example: "OU=Users,DC=icplahd,DC=com" + schema: + type: string + - name: group_name + description: "Group you want to remove user from" + required: true + multiline: false + example: "Group name" + schema: + type: string + returns: + schema: + type: string + large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAK4AAACuCAYAAACvDDbuAAAAAXNSR0IArs4c6QAAD6lJREFUeF7tnXmQFOUZh3+zO8fOzF5ccoioiIBcsRRQNOKREFAQAaNRMJRXhWi0Ek2iKWMlUVJWqlQ0sSxMJcGT0qLCciqKKBAUFTwW5FpYVFxYEISdY+eenk71zA47O85Ib3/b2927v/kHavd7v+/t533m7W96ZnptsizL4IMELEbARnEtVjGmmyZAcSmCJQlQXEuWjUlTXDpgSQIU15JlY9IUlw5YkgDFtWTZmDTFpQOWJEBxLVk2Jk1x6YAlCVBcS5aNSVNcOmBJAhTXkmVj0hSXDliSAMW1ZNmYNMWlA5YkQHEtWTYmTXHpgCUJUFxLlo1JU1w6YEkCFNeSZWPSFJcOWJIAxbVk2Zg0xaUDliRAcS1ZNiZNcemAJQlQXEuWjUlTXDpgSQIU15JlY9IUt5s4EEsBC+rC2B0CXhrrsfxRU1zLl7D1ABoiKaw7mkREkvHsvgh2hQofnDyr2vJHTXEtVsLGSAqBpIz3j8Uxb1sMkoa7G1NcixXdSukqQv5xRwjPf51EPAX4EzI0OFrwkCmulUwwWa7RaBSNkgN/3RNLd9CPjydwINo5SVLczuFs6VU2Ho1hWxD42h/Bcw1ASDL+cCiu8TUwXQbu5T5EU6ZLq01CFNfc9enw7NY0xnBHbRTNSSAqyUjIwMPDyzB/RNnJtcqW+6BcejLzg+KauToacvu4ScLf6iKIpICdvqSqPWe+uOy4GsBrCOlWl8PqgxJWHVFepctYfSSJ944nNSBrG8KOK4xQ0wRdSlxfQsbeoIS4JOGZ+hiWHNb/nM2Oq8k74aAuI66txicMQ8sE7LhaqInHUFxBhuy4ggA1hlNcjeCyYey4ggA1hlNcjeCKicurCoJAVYZTXJWgig1jxxUEqDGc4moEx62CIDjBcIorCJAvzgQBagynuBrBseMKghMMp7iCANlxBQFqDKe4GsGx4wqCEwynuIIA2XEFAWoMp7gawbHjCoITDKe4ggDZcQUBagynuBrBseMKghMMp7iCANlxBQFqDKe4GsEV7bjLmhCTbYKz6hvOr+7oy7dds5vl87j8kE27yqZ5MDuuZnSZQH7IRhCgxnCKqxFcsa0CO64gUJXhFFclqGLD2HEFAWoMp7gawbHjCoITDKe4ggDZcQUBagynuBrBFb0cxjvZCBJVF05x1XEqOoodVxCgxnCKqxEc97iC4ATDu4y4Rt1ozm4DSnPeKFNuwtxRN2AWrG3RcFeJXjN33rxdRtzOQ8aVzECA4pqhCsyh3QQobruRMcAMBLqMuI5lxtz07qFhZXgk58bORuXRHpkSM/nnotrDS9exZvl0mFF5tAcuP9bYHlo6jzVKmPzruEbl0R68FLc9tHQea5QwFFfnwhaZvsvscSmueoHYcdWz0n0kxVWPmOKqZ6X7SIqrHjHFVc9K95EUVz1iiquele4jKa56xBRXPSvdR1Jc9YgprnpWuo+kuOoRU1z1rHQfSXHVI6a46lnpPpLiqkdMcdWz0n0kxVWPmOKqZ6X7SIqrHjHFVc9K95EUVz1iiquele4jKa56xBRXPSvdR1Jc9YgprnpWuo+kuOoRU1z1rHQfSXHVI6a46lnpPpLiqkdMcdWz0n0kxVWPmOKqZ6X7SIqrHjHFVc9K95EUVz1iiquele4jKa56xBRXPSvdR5av8CMkdf7t5vK/5VtS4zP9Te8oru46al8gnJSxtUlCsyRjw9EEnt4fR1IHr/PFtS/zwYDnT7tAUdx24TLX4IZgHK80pnAiKqE2IOOdY0lNnTJfXOUWTHo8QTqSHsXtSJomm2ubX8LvtofQLNlwKJJCQ6Rwu84X17nMh4QOnb0j8VDcjqRpkbmUbYDi5crDCfx8awj3n1uG+Tk3vSvj34DolEp2mTvZdAotFYvs9idwOG5DSgZu3hLCt3HztV92XBWF5JBWArGUjBcOxHEkKqPWl8TKI8m04J39oLidTbyLr7f5SAiPfQGEJRm7/UkcietzwBRXH66ctQABWZYxf3cM8+ui6d9m99paYFFcLdQY06EEjkUl7A/LaE6k8MDnYXwWOPX0FPfUjDjCQAKJZBJrv5WxI5DCwVACL3ydRLMEUFwDi8KluzcBXg7r3vW37NFTXMuWrnsnTnG7d/0te/QU17Kl696JU9zuXX/LHj3FtWzpunfiFLd719+yR29acYe84Uf9NVUYstKPPdOrYAdQF5BwZnkp6kMpnOO2IWWzwQ4ZjlIbokkZss2GpV9GMWOgE7VhGRN7llq2MEz8+wmYVtznD8QxprcDA2wy+nhK0NSUQJ8eDuwISBhVWYoVhxOob05hXWMC/72sHO8dS2JrUxKTqkpwUX8njktAL3rbZf03tbgrvoph4ThvWly7DJz1VgCrL/GmxZ25uRmDK0qx5lACj4314t91EYyutmNGz4y4Z6wJoOHqyi5buO5+YKYVt7sXhsdv0a0CC0cC30eAHZd+WJIAxbVk2Zg0xaUDliTQ7cRVvqx465lOQ4p1LJzCrqiMy3l9WZi/JcTt/7ofR2IyDlxbhUEOm+qDrvVJOL+69WLuYV8CA94N6fINgEI33Xvz0nJM7qu8dZJ5jF4XSH8bQa9vIOwJSBjkLYGnVD0j1TBNNtD84sqAbZkPk3uWonelHa9c4FaNUJEpX5J4CnCWqJ5C9UBlrY0TKzCxd2n6FkxNEQmnvRnEDYNdWHJ+a87Klxz18krJYdGlFbitb9d/58X04u75No7z/hfG55d4MHpzuGC32vhNAo/vj+N0Twn+2SLJ4oY4btkaxivjPGn55pyR2R5c90EIKyZ44WtO4p49cbwyNvP77GPa5lD6TY7s477aMPaFZUzu58C9g4tvMXLFzcbubEpi1Prmkzm/3xjH2mYZjwx1pYfM/iiEReO9+NfeKNaekLBqQuu6d20JoSEJTOrrwK/PyVtXljHtg3B6jpfHedDDYUP2eH8xtAwTq0owppcDoz2tnVc5LuVx4yAH5g5snW/t11Ek3Hac77Fh3rYoXhvvwdyPw3h5vBfenCf4grooenvsmHtG6xlE9bNah4GmF1cRYvUVFZjasxTK/2unVOEHOQW5ZmMQa45LeHCoC182p7D0SBLJGVX46YchLG1M4PoBjkyBx3vhLkF6jmwXzu/IL+6L4tbPo21+f+MZTowuL8Gz+2JIOktwbEpFwTIUElcZqPz8ugFOLL/Yg5+tC2JJWIY8PfOOXlmNDxN621HXnMIQjw3vXF4BZSekxNw52IUBTmBxQwIuhw07r8qs+8ahBKZ+FMJvh7rSn994fG8MJ2ZU444tmeO9sJcdZ7lsaeHnne3Eq1/FMfvTMO471wWnDfhHfQxOVwl8Le8qXrMmAFdvO5Y3xNPjbxrixp2bAriknxMv5ZzdlJx2XV2F89zm2IaYWtyEJMO5wo/YzOo09KnvBlAv2VA3KVPEQFRC1RtBxGdWpwue/yi0Vcj9mXe5D9MHuvDq2Myp/KK1fozs7cSiC9wYuCaAclcJ9lxVfnJaJbZxejX6F2g6xcQ9Z6UPo/s4sHyCt6C4PdwlOJzz1vT1HzTjuGzDhpyur8wtzaqG0gCdNT48PNKNPw3LdO3cR6GtgvKzF8d5MLfljJN9Mr14aQXm9i2FIu6aSNt996t7I5i9I3byCfxNIIF+6/R5baC1GZta3OcPxPDQrtjJwm5qjGPih2GkZlVD8XTBvhge2BVF8rqqol0wf4+bK+6vaiNY+EUsPV+2oNm5y5f78PfzPZjZ0rGV3w9c7cfdI9x4YnhhabJ73Nxk+tX4cHH/4uIuHO/FbQMzZ4VsDnef58b8nO1Br9V+PHNJBe7plznrZHM8lbjBhIzKVf7vbK+Grvbhh/2dWHShJyNu0gb52rZnEmWd6KxqKEc6ZVMQI3o6sGBkmVbPOjzO1OIq8Jx2G4Z7Wjdb2wMSNk+uwgSvDU/ui2H+3hh8Uwt/mOZUHTcriiL3+gNRXPVJ6zZB6WwPDi/DlX3attehlaUY6Ppue/++rcIvh7mxcKSrYMctJO6cwWW4/fS2647tZUdl3lbnVOKeiMtQpM9/8k5cF8CQHvbvFXfue0HsTACfXFmRfrJ8c20VTmvHFZ0ONzX/7CIr9/Yx4UNKpmBfGcD6y1pP1Uqaf/g0hI9CcroY/9kfw53bIkUvL6kR113jw48GubDtaDx9Cp7Xco23zyo/Hhrlxn1nq7vmW0jcxV/GcMtnrfkV2uPmi9tvpQ/XD3Lh2ZwrEfnbgbeurMRPenz30kj+ViEmyShb4UfjtCr0V/ZaLQ97jQ/3DnPjqZGuoh1394kERmwIIT61As7Xg7pdwtOqnmk77twtIbzZlMLRyW1PYfuCEoa+HUTzjOr0q16lWC9f7MUtAxz4JpJCvzWBNi+utv64AhdWlqa3FrkdNgts6cE45nwSgVLk5KxqZC8kPbczjLvq4tg3pRJDWjr+pm+TuKx34VfVSh6Lx3kxtkcpTsRSWN8Qx0NfxLH4Ii9mn57ZCqgR99FdUfx5TxT1UypxTsu6G44lcUVL5x/3dgA7ozIC06pgtwF3fdiMeWO86asCSg5VHjt8U8pxMCpjYJkNfVf5YXOV4NCkivRluNfqI7h5e+v+tdhWIctK+fepsV78ZlDrdkarbB0ZZ1pxlSLcP8qDJ4d+t+OV1vjw2gQvbujvwBdBCee+HURKeQUP4NExbjw8JLMHve/TEJ7+KpH+f2BGNSqKnGqVtUpsgDQzs9fNPu7cEsKig4mTt9hXREnkjcmOLV3mOxmn5KGMjc5oO99N7wSxJCQjlXNV4bnxXtyas8dVJnl0exh/qY8XXTf370zk5hSVZHhW+NNxyt685uLM5bXc2/sr8iovdrNP0Kkte9xU3h5XiXt+Rwi3700gNLMaORdyOtI/zXOZVlzNR8TADiPwxLYQfr8/YbptQvpsYNY9bofR50SaCShnotevqMQ1PXV4q1FzVplAiisIsKuG+8MS5tRG0+8y6vUWtQg7iitCj7GGEaC4hqHnwiIEKK4IPcYaRoDiGoaeC4sQoLgi9BhrGAGKaxh6LixCgOKK0GOsYQQormHoubAIAYorQo+xhhGguIah58IiBCiuCD3GGkaA4hqGnguLEKC4IvQYaxgBimsYei4sQoDiitBjrGEEKK5h6LmwCAGKK0KPsYYRoLiGoefCIgQorgg9xhpGgOIahp4LixCguCL0GGsYAYprGHouLEKA4orQY6xhBCiuYei5sAgBiitCj7GGEaC4hqHnwiIEKK4IPcYaRoDiGoaeC4sQoLgi9BhrGAGKaxh6LixCgOKK0GOsYQQormHoubAIAYorQo+xhhGguIah58IiBCiuCD3GGkaA4hqGnguLEKC4IvQYaxgBimsYei4sQuD/UVLFb/ZiRHAAAAAASUVORK5CYII= diff --git a/active-directory/1.0.0/requirements.txt b/active-directory/1.0.0/requirements.txt index 5238833e..ad59907e 100644 --- a/active-directory/1.0.0/requirements.txt +++ b/active-directory/1.0.0/requirements.txt @@ -1,2 +1,3 @@ +shuffle-sdk ldap3==2.9.1 -requests==2.25.1 +pycryptodome diff --git a/active-directory/1.0.0/src/app.py b/active-directory/1.0.0/src/app.py index 3930dcf9..67f8ed2c 100644 --- a/active-directory/1.0.0/src/app.py +++ b/active-directory/1.0.0/src/app.py @@ -1,13 +1,23 @@ import json +import hashlib import ldap3 import asyncio -from ldap3 import ( - Server, - Connection, - MODIFY_REPLACE, - ALL_ATTRIBUTES, +from ldap3 import Server, Connection, MODIFY_REPLACE, ALL_ATTRIBUTES, NTLM + +try: + from Crypto.Hash import MD4 as CryptoMD4 +except ImportError: + CryptoMD4 = None + +from ldap3.extend.microsoft.addMembersToGroups import ( + ad_add_members_to_groups as addUsersInGroups, +) +from ldap3.extend.microsoft.removeMembersFromGroups import ( + ad_remove_members_from_groups as removeUsersFromGroups, ) -from walkoff_app_sdk.app_base import AppBase + +from shuffle_sdk import AppBase + class ActiveDirectory(AppBase): __version__ = "1.0.1" @@ -23,12 +33,28 @@ def __init__(self, redis, logger, console_logger=None): super().__init__(redis, logger, console_logger) def __ldap_connection(self, server, port, domain, login_user, password, use_ssl): - use_SSL = False if use_ssl.lower() == "false" else False + use_SSL = False if use_ssl.lower() == "false" else True login_dn = domain + "\\" + login_user s = Server(server, port=int(port), use_ssl=use_SSL) - c = Connection(s, user=login_dn, password=password, auto_bind=True) + if CryptoMD4 and not getattr(hashlib, "__active_directory_md4_patch__", False): + try: + import ldap3.utils.ntlm as ldap3_ntlm + + def _md4_hash(data): + md4 = CryptoMD4.new() + md4.update(data) + return md4.digest() + + ldap3_ntlm.hashlib.md4 = _md4_hash + hashlib.__active_directory_md4_patch__ = True + except Exception: + pass + + c = Connection( + s, user=login_dn, password=password, authentication=NTLM, auto_bind=True + ) return c # Decode UserAccountControl code @@ -133,21 +159,28 @@ def user_attributes( result = json.loads(c.response_to_json()) if len(result["entries"]) == 0: - return json.dumps({ - "success": False, - "result": result, - "reason": "No user found for %s" % samaccountname, - }) + return json.dumps( + { + "success": False, + "result": result, + "reason": "No user found for %s" % samaccountname, + } + ) except Exception as e: - return json.dumps({ - "success": False, - "reason": "Failed to get users in user attributes: %s" % e, - }) - + return json.dumps( + { + "success": False, + "reason": "Failed to get users in user attributes: %s" % e, + } + ) result = result["entries"][0] - result["attributes"]["userAccountControl"] = self.__getUserAccountControlAttributes(result["attributes"]["userAccountControl"]) + result["attributes"]["userAccountControl"] = ( + self.__getUserAccountControlAttributes( + result["attributes"]["userAccountControl"] + ) + ) return json.dumps(result) @@ -176,7 +209,19 @@ def set_password( server, port, domain, login_user, password, use_ssl ) - result = json.loads( self.user_attributes( server, port, domain, login_user, password, base_dn, use_ssl, samaccountname, search_base,)) + result = json.loads( + self.user_attributes( + server, + port, + domain, + login_user, + password, + base_dn, + use_ssl, + samaccountname, + search_base, + ) + ) user_dn = result["dn"] c.extend.microsoft.modify_password(user_dn, new_password) @@ -239,7 +284,6 @@ def enable_user( samaccountname, search_base, ): - if search_base: base_dn = search_base @@ -295,7 +339,6 @@ def disable_user( samaccountname, search_base, ): - if search_base: base_dn = search_base @@ -322,7 +365,6 @@ def disable_user( "success": False, "reason": "Failed to get result attributes: %s" % e, } - if "ACCOUNTDISABLED" in userAccountControl: try: @@ -358,6 +400,188 @@ def disable_user( "reason": "Failed adding ACCOUNTDISABLED to user: %s" % e, } + def lock_user( + self, + server, + domain, + port, + login_user, + password, + base_dn, + use_ssl, + samaccountname, + search_base, + ): + if search_base: + base_dn = search_base + + c = self.__ldap_connection(server, port, domain, login_user, password, use_ssl) + + c.search(base_dn, f"(SAMAccountName={samaccountname})") + + if len(c.entries) == 0: + return {"success": "false", "message": f"User {samaccountname} not found"} + + user_dn = c.entries[0].entry_dn + + c.modify(user_dn, {"userAccountControl": [(MODIFY_REPLACE, [514])]}) + + result = c.result + result["success"] = True + + return result + + def unlock_user( + self, + server, + domain, + port, + login_user, + password, + base_dn, + use_ssl, + samaccountname, + search_base, + ): + if search_base: + base_dn = search_base + + c = self.__ldap_connection(server, port, domain, login_user, password, use_ssl) + + c.search(base_dn, f"(SAMAccountName={samaccountname})") + + if len(c.entries) == 0: + return {"success": "false", "message": f"User {samaccountname} not found"} + + user_dn = c.entries[0].entry_dn + + c.modify(user_dn, {"userAccountControl": [(MODIFY_REPLACE, [0])]}) + + result = c.result + result["success"] = True + + return result + + def change_user_password_at_next_login( + self, + server, + domain, + port, + login_user, + password, + base_dn, + use_ssl, + samaccountname, + search_base, + new_user_password, + repeat_new_user_password, + ): + if search_base: + base_dn = search_base + + if str(new_user_password) != str(repeat_new_user_password): + return { + "success": "false", + "message": "new_user_password and repeat_new_user_password does not match.", + } + + c = self.__ldap_connection(server, port, domain, login_user, password, use_ssl) + + c.search(base_dn, f"(SAMAccountName={samaccountname})") + + if len(c.entries) == 0: + return {"success": "false", "message": f"User {samaccountname} not found"} + + user_dn = c.entries[0].entry_dn + + c.modify(user_dn, {"pwdLastSet": (MODIFY_REPLACE, [0])}) + c.extend.microsoft.modify_password( + user_dn, new_user_password.encode("utf-16-le") + ) + + result = c.result + result["success"] = True + + return result + + def add_user_to_group( + self, + server, + domain, + port, + login_user, + password, + base_dn, + use_ssl, + samaccountname, + search_base, + group_name, + ): + if search_base: + base_dn = search_base + + c = self.__ldap_connection(server, port, domain, login_user, password, use_ssl) + + c.search(base_dn, f"(SAMAccountName={samaccountname})") + if len(c.entries) == 0: + return {"success": "false", "message": f"User {samaccountname} not found"} + user_dn = c.entries[0].entry_dn + + search_filter = f"(&(objectClass=group)(cn={group_name}))" + c.search(base_dn, search_filter, attributes=["distinguishedName"]) + if len(c.entries) == 0: + return {"success": "false", "message": f"Group {group_name} not found"} + group_dn = c.entries[0]["distinguishedName"] + print(group_dn) + + res = addUsersInGroups(c, user_dn, str(group_dn), fix=True) + if res == True: + return { + "success": "true", + "message": f"User {samaccountname} was added to group {group_name}", + } + else: + return {"success": "false", "message": f"Could not add user to group"} + + def remove_user_from_group( + self, + server, + domain, + port, + login_user, + password, + base_dn, + use_ssl, + samaccountname, + search_base, + group_name, + ): + if search_base: + base_dn = search_base + + c = self.__ldap_connection(server, port, domain, login_user, password, use_ssl) + + c.search(base_dn, f"(SAMAccountName={samaccountname})") + if len(c.entries) == 0: + return {"success": "false", "message": f"User {samaccountname} not found"} + + user_dn = c.entries[0].entry_dn + search_filter = f"(&(objectClass=group)(cn={group_name}))" + c.search(base_dn, search_filter, attributes=["distinguishedName"]) + if len(c.entries) == 0: + return {"success": "false", "message": f"Group {group_name} not found"} + + group_dn = c.entries[0]["distinguishedName"] + print(group_dn) + res = removeUsersFromGroups(c, user_dn, str(group_dn), fix=True) + if res == True: + return { + "success": "true", + "message": f"User {samaccountname} was removed from group {group_name}", + } + else: + return {"success": "false", "message": f"Could not remove user to group"} + if __name__ == "__main__": ActiveDirectory.run() diff --git a/archive-org/1.0.0/requirements.txt b/archive-org/1.0.0/requirements.txt index 01635895..95feca5f 100644 --- a/archive-org/1.0.0/requirements.txt +++ b/archive-org/1.0.0/requirements.txt @@ -1,2 +1,2 @@ -requests==2.25.1 +requests==2.32.4 savepagenow==1.1.1 \ No newline at end of file diff --git a/archive-today/1.0.0/requirements.txt b/archive-today/1.0.0/requirements.txt index 150b17f3..1f5f6ba3 100644 --- a/archive-today/1.0.0/requirements.txt +++ b/archive-today/1.0.0/requirements.txt @@ -1,2 +1,2 @@ -requests==2.25.1 +requests==2.32.4 archiveis==0.0.9 \ No newline at end of file diff --git a/aws-cloudwatch/1.0.0/requirements.txt b/aws-cloudwatch/1.0.0/requirements.txt index 9c1b76e6..f41ce43e 100644 --- a/aws-cloudwatch/1.0.0/requirements.txt +++ b/aws-cloudwatch/1.0.0/requirements.txt @@ -1,2 +1,2 @@ boto3==1.20.20 -requests==2.25.1 +requests==2.32.4 diff --git a/aws-dynamodb/1.0.0/requirements.txt b/aws-dynamodb/1.0.0/requirements.txt index 97f3f4de..f66253b2 100644 --- a/aws-dynamodb/1.0.0/requirements.txt +++ b/aws-dynamodb/1.0.0/requirements.txt @@ -1,3 +1,3 @@ boto3==1.16.59 bson==0.5.10 -requests==2.25.1 \ No newline at end of file +requests==2.32.4 \ No newline at end of file diff --git a/aws-ec2/1.0.0/requirements.txt b/aws-ec2/1.0.0/requirements.txt index 9c1b76e6..f41ce43e 100644 --- a/aws-ec2/1.0.0/requirements.txt +++ b/aws-ec2/1.0.0/requirements.txt @@ -1,2 +1,2 @@ boto3==1.20.20 -requests==2.25.1 +requests==2.32.4 diff --git a/aws-guardduty/1.0.0/requirements.txt b/aws-guardduty/1.0.0/requirements.txt index f9c46b04..70356370 100644 --- a/aws-guardduty/1.0.0/requirements.txt +++ b/aws-guardduty/1.0.0/requirements.txt @@ -1,2 +1,3 @@ boto3==1.16.59 -requests==2.25.1 +requests==2.32.4 +shuffle_sdk diff --git a/aws-guardduty/1.0.0/src/app.py b/aws-guardduty/1.0.0/src/app.py index fd90f85a..f818584f 100644 --- a/aws-guardduty/1.0.0/src/app.py +++ b/aws-guardduty/1.0.0/src/app.py @@ -7,7 +7,9 @@ import botocore from botocore.config import Config -from walkoff_app_sdk.app_base import AppBase +#from walkoff_app_sdk.app_base import AppBase +from shuffle_sdk import AppBase + class AWSGuardduty(AppBase): __version__ = "1.0.0" diff --git a/aws-iam/1.0.0/requirements.txt b/aws-iam/1.0.0/requirements.txt index 06ef1c78..e2a84f1d 100644 --- a/aws-iam/1.0.0/requirements.txt +++ b/aws-iam/1.0.0/requirements.txt @@ -1,2 +1,2 @@ boto3==1.16.59 -requests==2.25.1 \ No newline at end of file +requests==2.32.4 \ No newline at end of file diff --git a/aws-lambda/1.0.0/requirements.txt b/aws-lambda/1.0.0/requirements.txt index f9c46b04..423ace70 100644 --- a/aws-lambda/1.0.0/requirements.txt +++ b/aws-lambda/1.0.0/requirements.txt @@ -1,2 +1,2 @@ boto3==1.16.59 -requests==2.25.1 +requests==2.32.4 diff --git a/aws-s3/1.0.0/requirements.txt b/aws-s3/1.0.0/requirements.txt index 00eb0244..8c127e25 100644 --- a/aws-s3/1.0.0/requirements.txt +++ b/aws-s3/1.0.0/requirements.txt @@ -1,3 +1,3 @@ boto3==1.16.59 bson==0.5.10 -requests==2.25.1 +requests==2.32.4 diff --git a/aws-securityhub/1.0.0/requirements.txt b/aws-securityhub/1.0.0/requirements.txt index 06ef1c78..e2a84f1d 100644 --- a/aws-securityhub/1.0.0/requirements.txt +++ b/aws-securityhub/1.0.0/requirements.txt @@ -1,2 +1,2 @@ boto3==1.16.59 -requests==2.25.1 \ No newline at end of file +requests==2.32.4 \ No newline at end of file diff --git a/aws-ses/1.0.0/requirements.txt b/aws-ses/1.0.0/requirements.txt index d46a14e7..216ec100 100644 --- a/aws-ses/1.0.0/requirements.txt +++ b/aws-ses/1.0.0/requirements.txt @@ -1,2 +1,2 @@ -requests==2.25.1 +requests==2.32.4 boto3==1.16.59 \ No newline at end of file diff --git a/aws-waf/1.0.0/requirements.txt b/aws-waf/1.0.0/requirements.txt index f9c46b04..423ace70 100644 --- a/aws-waf/1.0.0/requirements.txt +++ b/aws-waf/1.0.0/requirements.txt @@ -1,2 +1,2 @@ boto3==1.16.59 -requests==2.25.1 +requests==2.32.4 diff --git a/breachsense/1.0.0/requirements.txt b/breachsense/1.0.0/requirements.txt index fd7d3e06..480d0c4b 100644 --- a/breachsense/1.0.0/requirements.txt +++ b/breachsense/1.0.0/requirements.txt @@ -1 +1 @@ -requests==2.25.1 \ No newline at end of file +requests==2.32.4 \ No newline at end of file diff --git a/checkpoint/1.0.0/requirements.txt b/checkpoint/1.0.0/requirements.txt index fd7d3e06..480d0c4b 100644 --- a/checkpoint/1.0.0/requirements.txt +++ b/checkpoint/1.0.0/requirements.txt @@ -1 +1 @@ -requests==2.25.1 \ No newline at end of file +requests==2.32.4 \ No newline at end of file diff --git a/cortex/1.0.0/requirements.txt b/cortex/1.0.0/requirements.txt index 1f296044..fbca76ef 100644 --- a/cortex/1.0.0/requirements.txt +++ b/cortex/1.0.0/requirements.txt @@ -1,3 +1,3 @@ -requests==2.25.1 +requests==2.32.4 python-magic==0.4.18 cortex4py==2.0.1 diff --git a/cylance/1.0.0/requirements.txt b/cylance/1.0.0/requirements.txt deleted file mode 100644 index d7aa9605..00000000 --- a/cylance/1.0.0/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -cryptography==3.3.2 -requests==2.25.1 -PyJWT==1.7.1 diff --git a/databasemanager/1.0.0/requirements.txt b/databasemanager/1.0.0/requirements.txt index fcb1a934..3edf3f44 100644 --- a/databasemanager/1.0.0/requirements.txt +++ b/databasemanager/1.0.0/requirements.txt @@ -1,3 +1,3 @@ -requests==2.25.1 -mysql-connector-python==8.0.26 +requests==2.32.4 +mysql-connector-python==9.1.0 diff --git a/email/1.1.0/api.yaml b/email/1.1.0/api.yaml deleted file mode 100644 index 7259f92f..00000000 --- a/email/1.1.0/api.yaml +++ /dev/null @@ -1,253 +0,0 @@ -walkoff_version: 1.1.0 -app_version: 1.1.0 -name: email -description: Email app -tags: - - email -categories: - - communication -contact_info: - name: "@frikkylikeme" - url: https://github.com/frikky - email: "frikky@shuffler.io" -actions: - - name: send_email_shuffle - description: Send an email from Shuffle - parameters: - - name: apikey - description: Your https://shuffler.io apikey - multiline: false - example: "https://shuffler.io apikey" - required: true - schema: - type: string - - name: recipients - description: The recipients of the email - multiline: false - example: "test@example.com,frikky@shuffler.io" - required: true - schema: - type: string - - name: subject - description: The subject to use - multiline: false - example: "SOS this is an alert :o" - required: true - schema: - type: string - - name: body - description: The body to add to the email - multiline: true - example: "This is an email alert from Shuffler.io :)" - required: true - schema: - type: string - returns: - schema: - type: string - - name: send_email_smtp - description: Send an email with SMTP - parameters: - - name: username - description: The SMTP login username - multiline: false - example: "frikky@shuffler.io" - required: true - schema: - type: string - - name: password - description: The password to log in with SMTP - multiline: false - example: "******************" - required: true - schema: - type: string - - name: smtp_host - description: The host of the SMTP - multiline: false - example: "smtp-mail.outlook.com" - required: true - schema: - type: string - - name: smtp_port - description: The port to use for SMTP - multiline: false - example: "587" - required: true - schema: - type: string - - name: recipient - description: The receiver(s) of the email - multiline: false - example: "frikky@shuffler.io,frikky@shuffler.io" - required: true - schema: - type: string - - name: subject - description: The subject of the email - multiline: false - example: "This is a subject, hello there :)" - required: true - schema: - type: string - - name: body - description: The body to add to the email - multiline: true - example: "This is an email alert from Shuffler.io :)" - required: true - schema: - type: string - - name: attachments - description: Send files from shuffle as part of the email - multiline: false - example: "file_id1,file_id2,file_id3" - required: false - schema: - type: string - - name: ssl_verify - description: Whether to use TLS or not - example: "true" - required: false - options: - - true - - false - schema: - type: string - returns: - schema: - type: string - - name: get_emails_imap - description: Get emails using IMAP (e.g. imap.gmail.com / Outlook.office365.com) - parameters: - - name: username - description: The SMTP login username - multiline: false - example: "frikky@shuffler.io" - required: true - schema: - type: string - - name: password - description: The password to log in with SMTP - multiline: false - example: "******************" - required: true - schema: - type: string - - name: imap_server - description: The imap server host - multiline: false - example: "Outlook.office365.com" - required: true - schema: - type: string - - name: foldername - description: The folder to use, e.g. "inbox" - multiline: false - example: "inbox" - required: true - schema: - type: string - - name: amount - description: Amount of emails to retrieve - multiline: false - example: "10" - required: true - schema: - type: string - - name: unread - description: Retrieve just unread emails - multiline: false - options: - - "false" - - "true" - required: true - schema: - type: bool - - name: fields - description: Comma separated list of fields to be exported - multiline: false - example: "body, header.subject, header.header.message-id" - required: false - schema: - type: string - - name: include_raw_body - description: Include raw body in email export - multiline: false - options: - - "false" - - "true" - required: true - schema: - type: bool - - name: include_attachment_data - description: Include raw attachments in email export - multiline: false - options: - - "false" - - "true" - required: true - schema: - type: bool - - name: upload_email_shuffle - description: Upload email in shuffle, return uid - multiline: false - options: - - "false" - - "true" - required: true - schema: - type: bool - - name: upload_attachments_shuffle - description: Upload attachments in shuffle, return uids - multiline: false - options: - - "false" - - "true" - required: true - schema: - type: bool - - name: ssl_verify - description: Whether to use TLS or not - example: "true" - required: false - options: - - true - - false - schema: - type: string - - name: parse_email_file - description: Takes a file from shuffle and analyzes it if it's a valid .eml or .msg - parameters: - - name: file_id - description: file id - required: true - multiline: true - example: 'adf5e3d0fd85633be17004735a0a119e' - schema: - type: string - - name: file_extension - description: Extension of file you want to convert - required: true - options: - - eml - - msg - example: 'eml' - schema: - type: string - - name: parse_email_headers - description: - parameters: - - name: email_headers - description: Email headers - required: true - multiline: true - example: 'Email Headers' - schema: - type: string - returns: - schema: - type: string - returns: - schema: - type: string -large_image: data:image/png;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/4QAYRXhpZgAASUkqAAgAAAAAAAAAAAAAAP/hAytodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoV2luZG93cykiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6Nzg4QTJBMjVEMDI1MTFFN0EwQUVDODc5QjYyQkFCMUQiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6Nzg4QTJBMjZEMDI1MTFFN0EwQUVDODc5QjYyQkFCMUQiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo3ODhBMkEyM0QwMjUxMUU3QTBBRUM4NzlCNjJCQUIxRCIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo3ODhBMkEyNEQwMjUxMUU3QTBBRUM4NzlCNjJCQUIxRCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/Pv/bAEMAAwICAgICAwICAgMDAwMEBgQEBAQECAYGBQYJCAoKCQgJCQoMDwwKCw4LCQkNEQ0ODxAQERAKDBITEhATDxAQEP/AAAsIAGQAZAEBEQD/xAAeAAABAwUBAQAAAAAAAAAAAAAAAQgJAgQFBwoGA//EAEoQAAECBAMEBwMDEQgDAAAAAAECAwAEBREGBxIIITFRCRMiMkFSYRRicSNCQxUWGBk2U1dYc3WBlJWzwdLTJDNjZXKDkeEmgqH/2gAIAQEAAD8Ak8JKipSlBwuDSpSeDw8qeRguQQrUAQNAV4JH3s+sA7OnT8n1fcv9Bfzc7wWAAToIAOsJ8Uq++H0gI1XSUlYWdSkji6fMnkBAdS9SidesWUpI3OjyjkYXtXCgbEDSFW3JT5D6+sIOzp0/J9X3NX0F/NzvBYABOggA6wnxSr74fSAjVdJSVhZ1KSOLp8yeQEBJUSVKCysaVKHB0eVPIwXIIVqAIGgK8Ej72fWFS640kNtzjcukcGli6k/GENwVagAQO0EcEjmj1g33AATe1wD3SnmffgG/Tp337mv535T+EaB2k9tzInZilVyuMq+up4iUjWxh+mFLs8s23dbv0stnmsgnwBiNPOHpddozHExMSmWsrSsA0tZIaMs0JudCfV50FKf/AFQPjDYsT7S+0LjJ8zGJc68aTqiSrSqtPoQD6ISoJH6BGFkM5c3qW8Jim5qYvlXArUFNVuZSb89y43Tlv0ju15ltMtKZzUmsRSbZGuSxC0mebcHIrV8r/wALEP02c+l2ywzAmZXDGeND+saqvEIRVWXFP0x1Z3AOE9thPx1JHioQ/un1Kn1eQZqtLn2ZuSmkJdamZVwOIWlQuktKTuUg8xFybgnUACB2gjgkc0e9BvuAAm9rgHulPM+/CpDik3bal1p8FPd8/GEtp7Ojq+r36OPUe96wWv2dF79vR5/8T/qI/ukM6RMZMGcyWyUqDMzjZ5vRWKwmy26UlQ3IQOBmLHx3IFibncIeqvWKtiCqTVbrtSmahUJ11T0zNTLqnHXnFG5UpSiSSeZi0ggggh2GxRt8Y92X69K4cr83N1zLuadCZumLXrcp4Ue0/KXPZPiW+6r0O+JxsF4zwvmFhSl42wXV5eo0Sqy6ZySmWFakJbUO/wDHiCk7wQQd4jN2v2dF79vR5v8AE/6g6rrflPYfar/S69Or9EIAAE6QoAHshfFJ5r9Ibpt3bTrOzBkbPYhpb7f11V5aqZh9le8iZUm65i3i20ntcirQPGIA6nU6jWqlNVirzr05PTzy5iZmHllTjrqyVKWoneSSSSYtoIIIIIIkI6J/axmsA4+Rs84yqZ+tvFb5VQ1vL7EjVCNyN/Bt4C1vOEn5xiYndYghVr3IHeKuY9yKVJbJu63MrV4qY7h+EVA6rEKKwvclSuLp8quQiDnpWM5ZnMrafn8HS04pykZfy6KMw2D2BNEByZUPXWQj/aEM0h3uwvk5PYmoeLM4HcnqHmth/CU1KytdwrNy5M+uUdQtZmZBYIu83oN2z3wbcbWk7ym2Zej6zuwXJ49y5yXwbUqXNgpUPZlpelnh32XmyrU24k7ik7/0WMey+wI2OPxesJfqyv5oPsCNjj8XrCX6sr+aD7AjY4/F6wl+rq/mhs2b2SGzXjPGc3s9bKuzngiq4zZGjEWJ3pRTlKwkyrcVOKCrOzVr6GRex73AiIe65TvqRWqhSet632Kadl9enTq0LKb28L24R86ZUp6jVKUrFMmVy85IvtzMu8g2U24hQUlQPMEAx0h7O+abGdWR+DM0mnAF16lMuzRTxamgNDzQHIOJWI2IpxDZ0Lm3ZdQ4tti6U/CEdeDaHJhxYWAklahwdAF9KeRjmXzRxJMYxzLxXiyacUt2sVqdnlFRuflHlq/jHmIlv6D37is1T/mlM/cvQ5zNnZ7xtl/jWc2htlFUtIYrfs7iXCLy+rpeLGk7zcDczN2vpdFrnvcSTsrIPaHwNtBYcmKlhsv02t0h4ydfw7UE9XUKPOJJC2Xmzv3KBAWNyrbvEDaDjjbTa3XVpQhAKlKUbBIHEk+ENLxdnDj/AGr8T1LJ7Zgra6NgymPmSxhmU0LhB+kkaV4OPkGynu6gG4PAlwGUuT2AMjsDy2BMuqGin06XBcdWTrfm3j3333D2nHFHeVH/AOCwjmnxr92Ve/Oc1+9VGGibDogMVP1vZWmKI+4b4dxJOybS1G4S06ht7QPipxf/ADD4kurbGhE21LpHBtwXUn4xbVNpb9OnGAAFrl3EkI4JukgFHrHMFWpdyUrE/KvAhxmZdbUDxBCyDFnEuHQej/wjNU/5rTP3L0PQ2hs1cR4f+pOUWU/VTGZeOtbFK1jW3SZNO6YqkwPBtlJ7IPfcKUi++NdYh2H5LB1CoWLtnXFD2Fs1sLS6rV+ZUXG8SqWouPtVVP0yXnCo6+8gqFtwAGAbkdpzbFcTgXNLB1Qyay9pBEri1iXm9VQxPNo/vJeVdT/dyJ3XcG9YNgTvt6TFODKZsY4nlc2MsaEmRypn25em45oMi2eqpiUANsVllA8gsiYtvUiyzcpJhz8rOylSkGqjITTUzKzTKXmHmlhSHG1C6VJI3EEEEGOXnGv3ZV785zX71UYaJjehfk3mdn/GM4oHRM4sWEBfcsiUZ1Eeu+JBUhxSbttS60+Cnu+fjCAaDbR1fV9rRx6n3vW8c5+2Bl1MZV7TGYmDXmlIaZrkxNyhIsFy0wrr2lD00OJjT0SfdE5mphvJnIrOXHmJutdalatSmZSSlxqmKhNuNOpYlWU8VOOLISAOdzuBiQLZ5yrxJQTVs382Q0/mVjrQ9VAk6m6RJp3y9Llz4NtA9ojvuFSjfdG54N8fGekZOpyUxTajKtTMrNNLYfYdSFIdbULKSpJ3EEEgiG4ZXz07s0Zis7OuJpp1zAmJFvP5cVSYWSJVYut2hurPzkC62Ce83dHFFogGxr92Ve/Oc1+9VGGiezo0MupnLzY/wezPy5bm8RqmMROsqFiUvr+SWf8AaQ2besOl6rrflPYfar/S69Or9EIAAE6QoAHshfFJ5r9Ii76Y7Z4mJgUHaSw7IqWhlCKHiLQN6RcmWmP9Nypsn8mIizj3GWGdOY2T9Xka1gSuJlH6bO/VKWbflm5hlubDam0v9U4lSC4lClBKiLp1G1iY3v8AbSdtr8LTP7Ekf6UL9tJ22vwss/sOR/pQfbSdtr8LLP7Dkf6UH20nba/C0z+w5H+lHmMxOkB2qc1MOKwrjjMNmfkPaGZxrTSZRl1iYZWFtPNOobC21pULhSSDx8DDe5uamJ6aenZt1Tr8w4p11auKlqNyT8SY2ZszZH1raIzqw1ldSG1hqozSXKlMAHTKyLZCn3VHwsi4HvKSPGOjSj0im0CjyVBpMqJen06XalZZhG7Q22kJQE+4AAIulJbJu63MrV4qY7h+EVA6rEKKwvclSuLp8quQjB45wVhrMbB9YwNjGnIn6JW5VyQnWVjihYtoTysbEKHAgGOf7a72U8abKeZszhStMOzVAnVreoNXCfk5uXvuSojcHUAgLTz3jcRGi4IIIIIuqVSqnXKnK0ajSD89PzzyJeWlpdsrcecUbJQlI3kkkAAROd0eGxqjZiy7XiLF8sy5j/FjaFVEiyhJMDeiSB9D2lkbiqw4JEO58CrUQAdJV4pPkHu+sIpxDZ0Lm3ZdQ4tti6U/CFJKiVKUFle5Sk8HR5U8jBcghWoAgaArwSPIfe9Y8PnJkvl1nzgScy7zMoDVQpMyLtlXZekHfmvNucULHgR8DcEiIZtqro1s58gZucxFg6QmsbYJQVOonpFkqnJNrw9pYTcgAfSJuk8Tp4Qz9SSklKgQQbEHwgggjYeTOz9m7n/iFGHMq8Fz1YdCgJiZSjRKSiT8955XYQB6m58AYmN2LejtwJsyoYxri52XxVmC43unA3/ZpAEb0ygVvv4F09ojgEgm7wSSq5Kgsr3KUODo8qeRguQQrUAQNAV4JHkPvesKl1bY0Im2pdI4NuC6k/GENwVagAQO0EcEjmj1g33AATe1wD3SnmffgG/Tp337mv535T+EG4i4KiCbAnvE8j7kaHzf2Hdl/O1+YqONcraexU3jd6p0i8jNFfPU1ZLnxWlUNjxL0LOTs6+tzC+bWLKSkHUWpmXl5xKR4BJAbJjD0/oTcEoeH1Uz5rbzfe0sUZlolH+pTigFelo3Nlr0UuyZgSYZn6vQ6zjKaQQpr6uz3yBI462WQhNvRVxDscMYVwvgujMYfwfh+n0Wly/ZZlJCVQw2k8tCABp9YypsL6iQAe0U8Unkj3YDcE6gAQO0EcEjmj3oN9wAE3tcA90p5n34VIcUm7bUutPgp7vn4wOpS05MNtiyZdAW0PKo+MASkuIbIulbPXKHNfOEa+V9m6zf7Vq633rcIpSoqbQ6T2lvdQo80coVxRbRMLRuVLuBts+VJ4iKnEhtb6ECwl0BxseVR8YAlJcQ2RdK2euUOa+cI18r7N1m/wBq1db71uEUpUVNodJ7S3uoUeaOUK4otomFo3Kl3A22fKk8RFTiQ2t9CBYS6A42PKo+MASkuIbIulbPXKHNfOPtKSkvNy6JiYaC3F71KJO+P//Z diff --git a/email/1.1.0/requirements.txt b/email/1.1.0/requirements.txt deleted file mode 100644 index b18be475..00000000 --- a/email/1.1.0/requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -requests==2.25.1 -glom==20.11.0 -requests==2.25.1 -eml-parser==1.17.0 -msg-parser==1.2.0 -mail-parser==3.15.0 -extract-msg==0.23.1 -jsonpickle==2.0.0 diff --git a/email/1.1.0/src/app.py b/email/1.1.0/src/app.py deleted file mode 100644 index 8b2868c8..00000000 --- a/email/1.1.0/src/app.py +++ /dev/null @@ -1,360 +0,0 @@ -import json -import uuid -import socket -import asyncio -import requests -import datetime -import base64 -import imaplib -import smtplib -import eml_parser -import time -import random -import eml_parser -import mailparser -import extract_msg -import jsonpickle - -from glom import glom -from msg_parser import MsOxMessage -from email.mime.multipart import MIMEMultipart -from email.mime.text import MIMEText -from email.mime.application import MIMEApplication - -from walkoff_app_sdk.app_base import AppBase - -def json_serial(obj): - if isinstance(obj, datetime.datetime): - serial = obj.isoformat() - return serial - -def default(o): - """helpers to store item in json - arguments: - - o: field of the object to serialize - returns: - - valid serialized value for unserializable fields - """ - if isinstance(o, (datetime.date, datetime.datetime)): - return o.isoformat() - if isinstance(o, set): - return list(o) - if isinstance(o, bytes): - return o.decode("utf-8") - - -class Email(AppBase): - __version__ = "1.1.0" - app_name = "email" - - def __init__(self, redis, logger, console_logger=None): - """ - Each app should have this __init__ to set up Redis and logging. - :param redis: - :param logger: - :param console_logger: - """ - super().__init__(redis, logger, console_logger) - - # This is an email function of Shuffle - def send_email_shuffle(self, apikey, recipients, subject, body): - targets = [recipients] - if ", " in recipients: - targets = recipients.split(", ") - elif "," in recipients: - targets = recipients.split(",") - - data = {"targets": targets, "body": body, "subject": subject, "type": "alert"} - - url = "https://shuffler.io/functions/sendmail" - headers = {"Authorization": "Bearer %s" % apikey} - return requests.post(url, headers=headers, json=data).text - - def send_email_smtp( - self, username, password, smtp_host, recipient, subject, body, smtp_port, attachments="", ssl_verify="True" - ): - if type(smtp_port) == str: - try: - smtp_port = int(smtp_port) - except ValueError: - return "SMTP port needs to be a number (Current: %s)" % smtp_port - - try: - s = smtplib.SMTP(host=smtp_host, port=smtp_port) - except socket.gaierror as e: - return f"Bad SMTP host or port: {e}" - - if ssl_verify == "false" or ssl_verify == "False": - pass - else: - s.starttls() - - if len(username) > 0 or len(password) > 0: - try: - s.login(username, password) - except smtplib.SMTPAuthenticationError as e: - return f"Bad username or password: {e}" - - # setup the parameters of the message - msg = MIMEMultipart() - msg["From"] = username - msg["To"] = recipient - msg["Subject"] = subject - msg.attach(MIMEText(body, "html")) - - # Read the attachments - attachment_count = 0 - try: - if attachments != None and len(attachments) > 0: - print("Got attachments: %s" % attachments) - attachmentsplit = attachments.split(",") - - #attachments = parse_list(attachments, splitter=",") - #print("Got attachments2: %s" % attachmentsplit) - print("Before loop") - files = [] - for file_id in attachmentsplit: - print(f"Looping {file_id}") - file_id = file_id.strip() - new_file = self.get_file(file_id) - print(f"New file: {new_file}") - try: - part = MIMEApplication( - new_file["data"], - Name=new_file["filename"], - ) - part["Content-Disposition"] = f"attachment; filename=\"{new_file['filename']}\"" - msg.attach(part) - attachment_count += 1 - except Exception as e: - print(f"[WARNING] Failed to attach {file_id}: {e}") - - - #files.append(new_file) - - #return files - #data["attachments"] = files - except Exception as e: - print(f"Error in attachment parsing for email: {e}") - - - try: - s.send_message(msg) - except smtplib.SMTPDataError as e: - return { - "success": False, - "reason": f"Failed to send mail: {e}" - } - - print("Successfully sent email with subject %s to %s" % (subject, recipient)) - return { - "success": True, - "reason": "Email sent to %s!" % recipient, - "attachments": attachment_count - } - - def get_emails_imap( - self, - username, - password, - imap_server, - foldername, - amount, - unread, - fields, - include_raw_body, - include_attachment_data, - upload_email_shuffle, - upload_attachments_shuffle, - ssl_verify="True" - ): - def path_to_dict(path, value=None): - def pack(parts): - return ( - {parts[0]: pack(parts[1:]) if len(parts) > 1 else value} - if len(parts) > 1 - else {parts[0]: value} - ) - - return pack(path.split(".")) - - def merge(d1, d2): - for k in d2: - if k in d1 and isinstance(d1[k], dict) and isinstance(d2[k], dict): - merge(d1[k], d2[k]) - else: - d1[k] = d2[k] - - if type(amount) == str: - try: - amount = int(amount) - except ValueError: - return "Amount needs to be a number, not %s" % amount - - try: - email = imaplib.IMAP4_SSL(imap_server) - except ConnectionRefusedError as error: - try: - email = imaplib.IMAP4(imap_server) - - if ssl_verify == "false" or ssl_verify == "False": - pass - else: - email.starttls() - except socket.gaierror as error: - return "Can't connect to IMAP server %s: %s" % (imap_server, error) - except socket.gaierror as error: - return "Can't connect to IMAP server %s: %s" % (imap_server, error) - - try: - email.login(username, password) - except imaplib.IMAP4.error as error: - return "Failed to log into %s: %s" % (username, error) - - email.select(foldername) - unread = True if unread.lower().strip() == "true" else False - try: - # IMAP search queries, e.g. "seen" or "read" - # https://www.rebex.net/secure-mail.net/features/imap-search.aspx - mode = "(UNSEEN)" if unread else "ALL" - thistype, data = email.search(None, mode) - except imaplib.IMAP4.error as error: - return "Couldn't find folder %s." % (foldername) - - email_ids = data[0] - id_list = email_ids.split() - if id_list == None: - return "Couldn't retrieve email. Data: %s" % data - - try: - print("LIST: ", len(id_list)) - except TypeError: - return "Error getting email. Data: %s" % data - - include_raw_body = True if include_raw_body.lower().strip() == "true" else False - include_attachment_data = ( - True if include_attachment_data.lower().strip() == "true" else False - ) - upload_email_shuffle = ( - True if upload_email_shuffle.lower().strip() == "true" else False - ) - upload_attachments_shuffle = ( - True if upload_attachments_shuffle.lower().strip() == "true" else False - ) - - # Convert of mails in json - emails = [] - ep = eml_parser.EmlParser( - include_attachment_data=include_attachment_data - or upload_attachments_shuffle, - include_raw_body=include_raw_body, - ) - try: - for i in range(len(id_list) - 1, len(id_list) - amount - 1, -1): - resp, data = email.fetch(id_list[i], "(RFC822)") - error = None - - if resp != "OK": - print("Failed getting %s" % id_list[i]) - continue - - if data == None: - continue - - output_dict = {} - parsed_eml = ep.decode_email_bytes(data[0][1]) - - if fields and fields.strip() != "": - for field in fields.split(","): - field = field.strip() - merge( - output_dict, - path_to_dict( - field, - glom(parsed_eml, field, default=None), - ), - ) - else: - output_dict = parsed_eml - - # Add message-id as top returned field - output_dict["message-id"] = parsed_eml["header"]["header"][ - "message-id" - ][0] - - if upload_email_shuffle: - email_up = [{"filename": "email.msg", "data": data[0][1]}] - email_id = self.set_files(email_up) - output_dict["email_uid"] = email_id[0] - - if upload_attachments_shuffle: - atts_up = [ - { - "filename": x["filename"], - "data": base64.b64decode(x["raw"]), - } - for x in parsed_eml["attachment"] - ] - atts_ids = self.set_files(atts_up) - output_dict["attachments_uids"] = atts_ids - - emails.append(output_dict) - except Exception as err: - return "Error during email processing: {}".format(err) - return json.dumps(emails, default=default) - - def parse_email_file(self, file_id, file_extension): - file_path = self.get_file(file_id) - if file_path["success"] == False: - return { - "success": False, - "reason": "Couldn't get file with ID %s" % file_id - } - - print("File: %s" % file_path) - if file_extension.lower() == 'eml': - print('working with .eml file') - ep = eml_parser.EmlParser() - try: - parsed_eml = ep.decode_email_bytes(file_path['data']) - return json.dumps(parsed_eml, default=json_serial) - except Exception as e: - return {"Success":"False","Message":f"Exception occured: {e}"} - elif file_extension.lower() == 'msg': - print('working with .msg file') - try: - msg = MsOxMessage(file_path['data']) - msg_properties_dict = msg.get_properties() - print(msg_properties_dict) - frozen = jsonpickle.encode(msg_properties_dict) - return frozen - except Exception as e: - return {"Success":"False","Message":f"Exception occured: {e}"} - else: - return {"Success":"False","Message":f"No file handler for file extension {file_extension}"} - - def parse_email_headers(self, email_headers): - try: - email_headers = bytes(email_headers,'utf-8') - ep = eml_parser.EmlParser() - parsed_headers = ep.decode_email_bytes(email_headers) - return json.dumps(parsed_headers, default=json_serial) - except Exception as e: - raise Exception(e) - - -# Run the actual thing after we've checked params -def run(request): - action = request.get_json() - authorization_key = action.get("authorization") - current_execution_id = action.get("execution_id") - - if action and "name" in action and "app_name" in action: - Email.run(action) - return f'Attempting to execute function {action["name"]} in app {action["app_name"]}' - else: - return f"Invalid action" - - -if __name__ == "__main__": - Email.run() diff --git a/email/1.2.0/requirements.txt b/email/1.2.0/requirements.txt deleted file mode 100644 index 926027e8..00000000 --- a/email/1.2.0/requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -requests==2.25.1 -glom==20.11.0 -eml-parser==1.17.0 -msg-parser==1.2.0 -mail-parser==3.15.0 -extract-msg==0.30.9 -jsonpickle==2.0.0 - diff --git a/email/1.2.0/src/app.py b/email/1.2.0/src/app.py deleted file mode 100644 index 9fd12543..00000000 --- a/email/1.2.0/src/app.py +++ /dev/null @@ -1,454 +0,0 @@ -import json -import uuid -import socket -import asyncio -import requests -import datetime -import base64 -import imaplib -import smtplib -import time -import random -import eml_parser -import mailparser -import extract_msg -import jsonpickle - -from glom import glom -from msg_parser import MsOxMessage -from email.mime.multipart import MIMEMultipart -from email.mime.text import MIMEText -from email.mime.application import MIMEApplication - -from walkoff_app_sdk.app_base import AppBase - -def json_serial(obj): - if isinstance(obj, datetime.datetime): - serial = obj.isoformat() - return serial - -def default(o): - """helpers to store item in json - arguments: - - o: field of the object to serialize - returns: - - valid serialized value for unserializable fields - """ - if isinstance(o, (datetime.date, datetime.datetime)): - return o.isoformat() - if isinstance(o, set): - return list(o) - if isinstance(o, bytes): - try: - return o.decode("utf-8") - except: - print("Failed parsing utf-8 string") - return o - - -class Email(AppBase): - __version__ = "1.2.0" - app_name = "email" - - def __init__(self, redis, logger, console_logger=None): - """ - Each app should have this __init__ to set up Redis and logging. - :param redis: - :param logger: - :param console_logger: - """ - super().__init__(redis, logger, console_logger) - - # This is an email function of Shuffle - def send_email_shuffle(self, apikey, recipients, subject, body): - targets = [recipients] - if ", " in recipients: - targets = recipients.split(", ") - elif "," in recipients: - targets = recipients.split(",") - - data = {"targets": targets, "body": body, "subject": subject, "type": "alert"} - - url = "https://shuffler.io/functions/sendmail" - headers = {"Authorization": "Bearer %s" % apikey} - return requests.post(url, headers=headers, json=data).text - - def send_email_smtp( - self, username, password, smtp_host, recipient, subject, body, smtp_port, attachments="", ssl_verify="True", body_type="html" - ): - if type(smtp_port) == str: - try: - smtp_port = int(smtp_port) - except ValueError: - return "SMTP port needs to be a number (Current: %s)" % smtp_port - - try: - s = smtplib.SMTP(host=smtp_host, port=smtp_port) - except socket.gaierror as e: - return f"Bad SMTP host or port: {e}" - - # This is not how it should work.. - # Port 465 & 587 = TLS. Sometimes 25. - if ssl_verify == "false" or ssl_verify == "False": - pass - else: - s.starttls() - - if len(username) > 0 or len(password) > 0: - try: - s.login(username, password) - except smtplib.SMTPAuthenticationError as e: - return { - "success": False, - "reason": f"Bad username or password: {e}" - } - - if body_type == "" or len(body_type) < 3: - body_type = "html" - - # setup the parameters of the message - msg = MIMEMultipart() - msg["From"] = username - msg["To"] = recipient - msg["Subject"] = subject - msg.attach(MIMEText(body, body_type)) - - # Read the attachments - attachment_count = 0 - try: - if attachments != None and len(attachments) > 0: - print("Got attachments: %s" % attachments) - attachmentsplit = attachments.split(",") - - #attachments = parse_list(attachments, splitter=",") - #print("Got attachments2: %s" % attachmentsplit) - print("Before loop") - files = [] - for file_id in attachmentsplit: - print(f"Looping {file_id}") - file_id = file_id.strip() - new_file = self.get_file(file_id) - print(f"New file: {new_file}") - try: - part = MIMEApplication( - new_file["data"], - Name=new_file["filename"], - ) - part["Content-Disposition"] = f"attachment; filename=\"{new_file['filename']}\"" - msg.attach(part) - attachment_count += 1 - except Exception as e: - print(f"[WARNING] Failed to attach {file_id}: {e}") - - - #files.append(new_file) - - #return files - #data["attachments"] = files - except Exception as e: - self.logger.info(f"Error in attachment parsing for email: {e}") - - - try: - s.send_message(msg) - except smtplib.SMTPDataError as e: - return { - "success": False, - "reason": f"Failed to send mail: {e}" - } - - self.logger.info("Successfully sent email with subject %s to %s" % (subject, recipient)) - return { - "success": True, - "reason": "Email sent to %s!" % recipient, - "attachments": attachment_count - } - - def get_emails_imap( - self, - username, - password, - imap_server, - foldername, - amount, - unread, - fields, - include_raw_body, - include_attachment_data, - upload_email_shuffle, - upload_attachments_shuffle, - ssl_verify="True", - mark_as_read="False", - ): - def path_to_dict(path, value=None): - def pack(parts): - return ( - {parts[0]: pack(parts[1:]) if len(parts) > 1 else value} - if len(parts) > 1 - else {parts[0]: value} - ) - - return pack(path.split(".")) - - def merge(d1, d2): - for k in d2: - if k in d1 and isinstance(d1[k], dict) and isinstance(d2[k], dict): - merge(d1[k], d2[k]) - else: - d1[k] = d2[k] - - #if isinstance(mark_as_read, str): - # if str(mark_as_read).lower() == "true": - # mark_as_read = True - # else: - # mark_as_read = False - - if type(amount) == str: - try: - amount = int(amount) - except ValueError: - return { - "success": False, - "reason": "Amount needs to be a number, not %s" % amount, - } - - try: - email = imaplib.IMAP4_SSL(imap_server) - except ConnectionRefusedError as error: - try: - email = imaplib.IMAP4(imap_server) - - if ssl_verify == "false" or ssl_verify == "False" or ssl_verify == False: - pass - else: - email.starttls() - except socket.gaierror as error: - return { - "success": False, - "reason": "Can't connect to IMAP server %s: %s" % (imap_server, error), - } - except socket.gaierror as error: - return { - "success": False, - "reason": "Can't connect to IMAP server %s: %s" % (imap_server, error), - } - - try: - email.login(username, password) - except imaplib.IMAP4.error as error: - return { - "success": False, - "reason": "Failed to log into %s: %s" % (username, error), - } - - email.select(foldername) - unread = True if unread.lower().strip() == "true" else False - - try: - # IMAP search queries, e.g. "seen" or "read" - # https://www.rebex.net/secure-mail.net/features/imap-search.aspx - mode = "(UNSEEN)" if unread else "ALL" - thistype, data = email.search(None, mode) - except imaplib.IMAP4.error as error: - return { - "success": False, - "reason": "Couldn't find folder %s." % (foldername), - } - - email_ids = data[0] - id_list = email_ids.split() - if id_list == None: - return { - "success": False, - "reason": f"Couldn't retrieve email. Data: {data}", - } - - #try: - # self.logger.info(f"LIST: {id_list}") - #except TypeError: - # return { - # "success": False, - # "reason": "Error getting email. Data: %s" % data, - # } - - mark_as_read = True if str(mark_as_read).lower().strip() == "true" else False - include_raw_body = True if str(include_raw_body).lower().strip() == "true" else False - include_attachment_data = ( - True if str(include_attachment_data).lower().strip() == "true" else False - ) - upload_email_shuffle = ( - True if str(upload_email_shuffle).lower().strip() == "true" else False - ) - upload_attachments_shuffle = ( - True if str(upload_attachments_shuffle).lower().strip() == "true" else False - ) - - # Convert of mails in json - emails = [] - ep = eml_parser.EmlParser( - include_attachment_data=include_attachment_data - or upload_attachments_shuffle, - include_raw_body=include_raw_body, - ) - - if len(id_list) == 0: - return { - "success": True, - "messages": [], - } - - try: - amount = len(id_list) if len(id_list) 1 or len(password) > 1: + try: + s.login(username, password) + except Exception as e: + if len(password) == 0: + self.logger.info("[WARNING] Auth failed (2). No password provided. Continuing as auth may not be necessary.") + else: + return { + "success": False, + "reason": f"General login exception: {e}" + } + + except smtplib.SMTPAuthenticationError as e: + if len(password) == 0: + self.logger.info("[WARNING] Auth failed. No password provided. Continuing as auth may not be necessary.") + else: + return { + "success": False, + "reason": f"Bad username or password: {e}" + } + + if body_type == "" or len(body_type) < 3: + body_type = "html" + + # setup the parameters of the message + self.logger.info("Pre mime multipart") + msg = MIMEMultipart() + msg["From"] = username + if len(username) == 0: + return { + "success": False, + "reason": "No username provided (sender). Please provide a username. Required since January 2025." + } + + msg["To"] = recipient + msg["Subject"] = subject + + if cc_emails: + msg["Cc"] = cc_emails + + self.logger.info("Pre mime check") + msg.attach(MIMEText(body, body_type)) + + # Read the attachments + attachment_count = 0 + self.logger.info("Pre attachments") + try: + if attachments != None and len(attachments) > 0: + print("Got attachments: %s" % attachments) + attachmentsplit = attachments.split(",") + + #attachments = parse_list(attachments, splitter=",") + #print("Got attachments2: %s" % attachmentsplit) + print("Before loop") + files = [] + for file_id in attachmentsplit: + print(f"Looping {file_id}") + file_id = file_id.strip() + new_file = self.get_file(file_id) + print(f"New file: {new_file}") + try: + part = MIMEApplication( + new_file["data"], + Name=new_file["filename"], + ) + part["Content-Disposition"] = f"attachment; filename=\"{new_file['filename']}\"" + msg.attach(part) + attachment_count += 1 + except Exception as e: + print(f"[WARNING] Failed to attach {file_id}: {e}") + + + #files.append(new_file) + + #return files + #data["attachments"] = files + except Exception as e: + self.logger.info(f"Error in attachment parsing for email: {e}") + + self.logger.info(f"Pre send msg: {msg}") + try: + s.send_message(msg) + except smtplib.SMTPDataError as e: + return { + "success": False, + "reason": f"Failed to send mail: {e}" + } + except Exception as e: + return { + "success": False, + "reason": f"Failed to send mail (2): {e}" + } + + self.logger.info("Successfully sent email with subject %s to %s" % (subject, recipient)) + return { + "success": True, + "reason": "Email sent to %s, %s!" % (recipient, cc_emails) if cc_emails else "Email sent to %s!" % recipient, + "attachments": attachment_count + } + + def get_emails_imap( + self, + username, + password, + imap_server, + foldername, + amount, + unread, + fields, + include_raw_body, + include_attachment_data, + upload_email_shuffle, + upload_attachments_shuffle, + ssl_verify="True", + mark_as_read="False", + ): + def path_to_dict(path, value=None): + def pack(parts): + return ( + {parts[0]: pack(parts[1:]) if len(parts) > 1 else value} + if len(parts) > 1 + else {parts[0]: value} + ) + + return pack(path.split(".")) + + def merge(d1, d2): + for k in d2: + if k in d1 and isinstance(d1[k], dict) and isinstance(d2[k], dict): + merge(d1[k], d2[k]) + else: + d1[k] = d2[k] + + #if isinstance(mark_as_read, str): + # if str(mark_as_read).lower() == "true": + # mark_as_read = True + # else: + # mark_as_read = False + + if type(amount) == str: + try: + amount = int(amount) + except ValueError: + return { + "success": False, + "reason": "Amount needs to be a number, not %s" % amount, + } + + try: + email = imaplib.IMAP4_SSL(imap_server) + except ConnectionRefusedError as error: + try: + email = imaplib.IMAP4(imap_server) + + if ssl_verify == "false" or ssl_verify == "False" or ssl_verify == False: + pass + else: + email.starttls() + except socket.gaierror as error: + return { + "success": False, + "reason": "Can't connect to IMAP server %s: %s" % (imap_server, error), + } + except socket.gaierror as error: + return { + "success": False, + "reason": "Can't connect to IMAP server %s: %s" % (imap_server, error), + } + + try: + email.login(username, password) + except imaplib.IMAP4.error as error: + return { + "success": False, + "reason": "Failed to log into %s: %s" % (username, error), + } + + email.select(foldername) + unread = True if unread.lower().strip() == "true" else False + + try: + # IMAP search queries, e.g. "seen" or "read" + # https://www.rebex.net/secure-mail.net/features/imap-search.aspx + mode = "(UNSEEN)" if unread else "ALL" + thistype, data = email.search(None, mode) + except imaplib.IMAP4.error as error: + return { + "success": False, + "reason": "Couldn't find folder %s." % (foldername), + } + + email_ids = data[0] + id_list = email_ids.split() + if id_list == None: + return { + "success": False, + "reason": f"Couldn't retrieve email. Data: {data}", + } + + #try: + # self.logger.info(f"LIST: {id_list}") + #except TypeError: + # return { + # "success": False, + # "reason": "Error getting email. Data: %s" % data, + # } + + mark_as_read = True if str(mark_as_read).lower().strip() == "true" else False + include_raw_body = True if str(include_raw_body).lower().strip() == "true" else False + include_attachment_data = ( + True if str(include_attachment_data).lower().strip() == "true" else False + ) + upload_email_shuffle = ( + True if str(upload_email_shuffle).lower().strip() == "true" else False + ) + upload_attachments_shuffle = ( + True if str(upload_attachments_shuffle).lower().strip() == "true" else False + ) + + # Convert of mails in json + emails = [] + ep = eml_parser.EmlParser( + include_attachment_data=include_attachment_data + or upload_attachments_shuffle, + include_raw_body=include_raw_body, + ) + + if len(id_list) == 0: + return { + "success": True, + "messages": [], + } + + try: + amount = len(id_list) if len(id_list) 0: + + for i in range(len(parsed_eml["body"])): + if "uri" in parsed_eml["body"][i] and len(parsed_eml["body"][i]["uri"]) > 0: + parsed_eml["body"][i]["uri"] = self.remove_similar_items(parsed_eml["body"][i]["uri"]) + + if "email" in parsed_eml["body"][i] and len(parsed_eml["body"][i]["email"]) > 0: + parsed_eml["body"][i]["email"] = self.remove_similar_items(parsed_eml["body"][i]["email"]) + + if "domain" in parsed_eml["body"][i] and len(parsed_eml["body"][i]["domain"]) > 0: + parsed_eml["body"][i]["domain"] = self.remove_similar_items(parsed_eml["body"][i]["domain"]) + + except Exception as e: + self.logger.info(f"[ERROR] Failed to remove similar items: {e}") + + parsed_eml["success"] = True + return json.dumps(parsed_eml, default=json_serial) + except Exception as e: + return {"success":False, "reason": f"An exception occured during EML parsing: {e}. Please contact support"} + + return {"success": False, "reason": "No email has been defined for this file type"} + + + def parse_email_headers(self, email_headers): + try: + email_headers = bytes(email_headers,'utf-8') + ep = eml_parser.EmlParser() + parsed_headers = ep.decode_email_bytes(email_headers) + return json.dumps(parsed_headers, default=json_serial) + except Exception as e: + raise Exception(e) + + # Basic function to check headers in an email + # Can be dumped in in pretty much any format + def analyze_headers(self, headers): + self.logger.info("Input headers: %s" % headers) + + # Raw + if isinstance(headers, str): + headers = self.parse_email_headers(headers) + if isinstance(headers, str): + headers = json.loads(headers) + + headers = headers["header"]["header"] + + # Just a way to parse out shitty email formats + if "header" in headers: + headers = headers["header"] + if "header" in headers: + headers = headers["header"] + + if "headers" in headers: + headers = headers["headers"] + if "headers" in headers: + headers = headers["headers"] + + if not isinstance(headers, list): + newheaders = [] + for key, value in headers.items(): + if isinstance(value, list): + newheaders.append({ + "key": key, + "value": value[0], + }) + else: + newheaders.append({ + "key": key, + "value": value, + }) + + headers = newheaders + + #self.logger.info("Parsed headers: %s" % headers) + + spf = False + dkim = False + dmarc = False + spoofed = False + + analyzed_headers = { + "success": True, + "sender": "", + "receiver": "", + "subject": "", + "date": "", + "details": { + "spf": "", + "dkim": "", + "dmarc": "", + "spoofed": "", + }, + } + + for item in headers: + if "name" in item: + item["key"] = item["name"] + + item["key"] = item["key"].lower() + + # Handle sender/receiver + if item["key"] == "from" or item["key"] == "sender" or item["key"] == "delivered-to": + analyzed_headers["sender"] = item["value"] + + if item["key"] == "to" or item["key"] == "receiver" or item["key"] == "delivered-to": + analyzed_headers["receiver"] = item["value"] + + if item["key"] == "subject" or item["key"] == "title": + analyzed_headers["subject"] = item["value"] + + if item["key"] == "date": + analyzed_headers["date"] = item["value"] + + if "spf" in item["key"]: + analyzed_headers["details"]["spf"] = spf + if "pass " in item["value"].lower(): + spf = True + + if "dkim" in item["key"]: + analyzed_headers["details"]["dkim"] = dkim + if "pass " in item["value"].lower(): + dkim = True + + if "dmarc" in item["key"]: + analyzed_headers["details"]["dmarc"] = dmarc + print("dmarc: ", item["key"]) + + if item["key"].lower() == "authentication-results": + if "spf" in item["value"].lower(): + analyzed_headers["details"]["spf"] = spf + + if "dkim" in item["value"].lower(): + analyzed_headers["details"]["dkim"] = dkim + + if "dmarc" in item["value"].lower(): + analyzed_headers["details"]["dmarc"] = dmarc + + if "spf=pass" in item["value"]: + spf = True + if "dkim=pass" in item["value"]: + dkim = True + if "dmarc=pass" in item["value"]: + dmarc = True + + # Fix spoofed! + if item["key"] == "from": + print("From: " + item["value"]) + + if "<" in item["value"]: + item["value"] = item["value"].split("<")[1] + + for subitem in headers: + if "name" in subitem: + subitem["key"] = subitem["name"] + + subitem["key"] = subitem["key"].lower() + + if subitem["key"] == "reply-to": + + if "<" in subitem["value"]: + subitem["value"] = subitem["value"].split("<")[1] + + if item["value"] != subitem["value"]: + spoofed = True + analyzed_headers["spoofed_reason"] = "Reply-To is different than From" + analyzed_headers["details"]["spoofed"] = subitem["value"] + break + + + if subitem["key"] == "mail-reply-to": + print("Reply-To: " + subitem["value"], item["value"]) + + if "<" in subitem["value"]: + subitem["value"] = subitem["value"].split("<")[1] + + if item["value"] != subitem["value"]: + spoofed = True + analyzed_headers["spoofed_reason"] = "Mail-Reply-To is different than From" + analyzed_headers["details"]["spoofed"] = subitem["value"] + break + + analyzed_headers["spf"] = spf + analyzed_headers["dkim"] = dkim + analyzed_headers["dmarc"] = dmarc + analyzed_headers["spoofed"] = spoofed + + # Should be a dictionary + return analyzed_headers + + # This is an SMS function of Shuffle + def send_sms_shuffle(self, apikey, phone_numbers, body): + phone_numbers = phone_numbers.replace(" ", "") + targets = phone_numbers.split(",") + + data = {"numbers": targets, "body": body} + + url = "https://shuffler.io/api/v1/functions/sendsms" + headers = {"Authorization": "Bearer %s" % apikey} + return requests.post(url, headers=headers, json=data, verify=False).text + + +# Run the actual thing after we've checked params +def run(request): + action = request.get_json() + authorization_key = action.get("authorization") + current_execution_id = action.get("execution_id") + + if action and "name" in action and "app_name" in action: + Email.run(action) + return f'Attempting to execute function {action["name"]} in app {action["app_name"]}' + else: + return f"Invalid action" + + +if __name__ == "__main__": + Email.run() diff --git a/gitguardian/1.0.0/requirements.txt b/gitguardian/1.0.0/requirements.txt index 7a453b65..ab60beb1 100644 --- a/gitguardian/1.0.0/requirements.txt +++ b/gitguardian/1.0.0/requirements.txt @@ -1,2 +1,2 @@ -requests==2.25.1 +requests==2.32.4 pygitguardian==1.1.2 \ No newline at end of file diff --git a/google-chat/1.0.0/requirements.txt b/google-chat/1.0.0/requirements.txt index 9d84d358..bd6f2345 100644 --- a/google-chat/1.0.0/requirements.txt +++ b/google-chat/1.0.0/requirements.txt @@ -1 +1 @@ -requests==2.25.1 +requests==2.32.4 diff --git a/gpg-tools/1.0.0/requirements.txt b/gpg-tools/1.0.0/requirements.txt index e5ff88aa..1edb807d 100644 --- a/gpg-tools/1.0.0/requirements.txt +++ b/gpg-tools/1.0.0/requirements.txt @@ -1,2 +1,2 @@ python-gnupg==0.4.6 -requests==2.25.1 \ No newline at end of file +requests==2.32.4 \ No newline at end of file diff --git a/gpg-tools/1.0.0/src/app.py b/gpg-tools/1.0.0/src/app.py index d92b9e53..7db1c28b 100644 --- a/gpg-tools/1.0.0/src/app.py +++ b/gpg-tools/1.0.0/src/app.py @@ -79,9 +79,7 @@ def encrypt_file( ) ) - gpg = gnupg.GPG( - gnupghome=os.path.join("/app/local/", gpg_home), gpgbinary="/usr/bin/gpg" - ) + gpg = gnupg.GPG(gnupghome=os.path.join("/app/local/", gpg_home), gpgbinary="/usr/bin/gpg") with tempfile.NamedTemporaryFile(delete=False) as tmpfile: with open(tmpfile.name, "wb") as f: diff --git a/thehive/1.1.3/Dockerfile b/gpg-tools/1.1.0/Dockerfile similarity index 93% rename from thehive/1.1.3/Dockerfile rename to gpg-tools/1.1.0/Dockerfile index bfa83edc..370fb0d4 100644 --- a/thehive/1.1.3/Dockerfile +++ b/gpg-tools/1.1.0/Dockerfile @@ -5,7 +5,7 @@ FROM frikky/shuffle:app_sdk as base FROM base as builder # Install all alpine build tools needed for our pip installs -RUN apk --no-cache add --update alpine-sdk libffi libffi-dev musl-dev openssl-dev +RUN apk --no-cache add --update alpine-sdk libffi libffi-dev musl-dev openssl-dev # Install all of our pip packages in a single directory that we can copy to our base image later RUN mkdir /install @@ -19,7 +19,7 @@ COPY --from=builder /install /usr/local COPY src /app # Install any binary dependencies needed in our final image -RUN apk --no-cache add --update libmagic +RUN apk --no-cache add --update gnupg # Finally, lets run our app! WORKDIR /app diff --git a/gpg-tools/1.1.0/api.yaml b/gpg-tools/1.1.0/api.yaml new file mode 100644 index 00000000..1a1ba296 --- /dev/null +++ b/gpg-tools/1.1.0/api.yaml @@ -0,0 +1,124 @@ +app_version: 1.1.0 +name: Gpg Tools +description: A gpg app for Shuffle +contact_info: + name: "@deb-alex" +authentication: + required: true + parameters: + - name: zip_file_id + description: FileID for the ZIP file containing the GNUPG home directory + required: true + multiline: false + schema: + type: string + - name: password + description: Password to use for key store decryption + required: true + multiline: false + schema: + type: string +tags: + - Encryption +categories: + - Encryption +actions: + - name: encrypt_text + description: Encrypt text with gpg + parameters: + - name: clear_text + description: Clear text to encrypt + required: true + multiline: false + schema: + type: string + - name: recipients + description: List of key fingerprints separated by comma (,) + required: true + multiline: false + schema: + type: string + - name: always_trust + description: Skip key validation and assume that used keys are always fully trusted. + required: true + options: + - "false" + - "true" + schema: + type: bool + + - name: decrypt_text + description: Decrypt text with gpg + parameters: + - name: encrypted_text + description: Encrypted text message to decrypt + required: true + multiline: false + schema: + type: string + - name: always_trust + description: Skip key validation and assume that used keys are always fully trusted. + required: true + options: + - "false" + - "true" + schema: + type: bool + + - name: encrypt_file + description: Encrypt file with gpg + parameters: + - name: file_id + description: FileID of the clear text file to encrypt + required: true + multiline: false + schema: + type: file + - name: output_name + description: Name of the encrypted output file + required: true + multiline: false + schema: + type: string + - name: recipients + description: List of key fingerprints separated by comma (,) + required: true + multiline: false + schema: + type: string + - name: always_trust + description: Skip key validation and assume that used keys are always fully trusted. + required: true + options: + - "false" + - "true" + schema: + type: bool + - name: decrypt_file + description: Decrypt file with gpg + parameters: + - name: file_id + description: FileID of the encrypted file to decrypt + required: true + multiline: false + schema: + type: file + - name: output_name + description: Name of the decrypted output file + required: true + multiline: false + schema: + type: string + - name: always_trust + description: Skip key validation and assume that used keys are always fully trusted. + required: true + options: + - "false" + - "true" + schema: + type: bool + + returns: + schema: + type: string +large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAG4AAABuCAIAAABJObGsAAAACXBIWXMAAC4jAAAuIwF4pT92AAAcmElEQVR42u2dCXSU1dmAZ0nCIktFkE0Fi0u17u1vxbUutS61elzaX1utWrRVq7X9a/XXuiFW7fGXZNbsIUBAkkASAkgSIBiTEEIIgbDn22bf929mvn3+c++dGSbJJCSYYILMuYdzMkwy3/d8773ve9/7LrLYWHpJUkw6+SsmxcbiSzY2CEqiGOckJv7lRCnEiR5GDLAiK0hC4n000I9nUfaSQcBFlBhB8jPifhezHqPfa/M8udVxe6XlJ+vM16013fCl6eYKy2Nb7G80u8uOhnbbo86IEOUlXowDHSNMZd+ZGMLBi5IpxFfh9P80uW8tN0/VE8psTJ6NyXIwmSo+5GpMocLkOeB9+fKeCWr82tXGJducK48Ej3o5RkgKtPQ9QimlcBQkCfNxn3f67qu2npdHZqpxBYDVo1wOqCk1+AQtnqnF5Wpcno0plvfIczCFBldowDuKHEypwqbpicXrzO/t9u5zMmJiZUh+y5mMEt2oJEmcIB3zcZ/s8V5WapisweUqPEONzdDhi1YYnoCzWNXlX300WIXTFRhdfDi4fJ/vzzucd26wXlRETdHgShXkC4dMjU/Q4PMKqXd3eY56OTaxgn4nEio7PcKI1K4gSa6IkNsduLbMmKXCFDnYZC1+TZnxb9+4a8kwGeAiPFA1xiC/z8nuMEc2EuGKHvrL46H1GL0Bo9cdp//V5v31JtvCYipLg8tUACVkiilzsB+vMuoOBLwMmu+x0w/zNKFEE3C/i3223jE9l5BDjtevMX3e6e90MowgcqJ0yMPmHQz8qdF5R7n56lLDgiJqZh4xXU9M1REzcokLCsgflxoWrzXdV2N7eJP9qjLTOXpCBgVThuRUhZ2rJ15pdBIB7jtZOk8HSlGSwpxYjdM/LKEyVXimGszlD9u9FpoP86IpxJceCf1qo21uITURLoIKNa5Qn5jFyYHeV6iwrGxsso6YqIsLZnLI1XimGrvpS3OLNcqLSDalMwQlupkAK+Z0+ecWkgoVlqnGH6ix7jRHOFEiA5y6y3/3eotSjQMFDSHK1b3o9B/xVVKFA/2e7n8VOdjVq4yNJvAVaG05M1CCG/lsr29mHqlQ45M0+LMNTszP8aK03RS5e71lug5Imexk+IY7MlT4jWtMzdaoKMVO27opG9V5TXNi9j4/MG7U2HQ98e8OX4gT8QD38k7XVB2BxFA2CEoV0O99ZvEQh1KFXbnScNjDnrZ1UzZK81qUYqwgfXksdEEhpVBj52jxt1o8NCd2e9hfb7QpodpNP3lVYLLLIOVpucTcQmpaLoGIg0VAjcmHhhJ8LAd7YovdFhbGNUpw7d1u9royoxKafn/a7rTSwjfW6B0Vlsy0CyLUwpkqbE4BeXeVZVm7dzMV7nAwB9xsh4OpJcNvt3puq7RM0wJxGzrNqTr8/zr9jHA6BFM2SiIZYMVHN9mgMsEf2GilgvwOU2RhMSXvD0IFPpMFt4Mf7vbud7O82PfO0SRlBGkzFX663jEzj5CrhkrzkhWGb8CiOeo0ZaMikpJUhdPTtcB2OUdHNFuj+93szevM8v7LItxln5tLvNLo2utkWOGE7yf1zqUUh5A3KhQfCl5aapAPSV+B/fuL253JuTJuUKJ7ttL8/TVWBZywrzS6jvm4B2usGekgKlTYwmKqlgyHWDFBcECFi/CKcOvZYo3eXmlWDEU2VfgUPXHYy422XMpGQSRjFRg9M49QqrFLSwztdubFHU5FunmdocYXrzPvMEXiEIfsiUAP7JCHvbncDB/JyfRPNvZmi4cTxxVKZAC9vNOlUONKNf5MvaOWDJ+j7aevoUhettIIbHVBGu49Jp3EjabIJSWGk66bchV+c4WZCHDCaLIceZTGIH95qQE4wTT4Jx2+hzYC5dNXz6jxaXqiCqeFU93eod9iRanoUHBWHjm4Tleo8dm5xAaMHlXlM5Io0S6t0RyZBh0Ws/PJ15rcF+STCk3fGZelxv+y08UI0re8N1GS3FHht1vtspzBLCTwjRr83TZPFH7jOEApQjNId8CvhLb07ELqpgpzhibNdFtUQn1tjnx70xm5JhvNkZPsPqF+e3KL3RMVxwlKURKl2P+2uBVf9CjUeAb0hPf3OyjU2NP1jiA7ArMtuSl8uLbfMtL3+WHXlRmJADd6XvaRlUpwX3/Z6QLnBwPPtUka/PNO/0j5uhFL/YHARM2gXiUVPr+I6naz40PtIBF5rsE5OMofaIlKjB4pLwPyK3c6mStKDYpBPSMTtXizNTp+UMZif9w2GEqFBj9PRzQYIyPosOFFyRDk7660DG6xKzX4RiI8PlAi8/ClHRClZkCUM3X4DvMIozSG+F+styjUJ0FZg48jlFLs9a9dii8GQzlLjzdZomdRngSlKMXebIYafGCU5+uJFttZlIO+BGgMvdvqOSnK3XZGPIvypCb6R7u9g6OcrSc6nN8BSoUGrx5fKP/T4ZMPqsHn6IkuF3sW5clRgnOxbGwQlHP1RLf7LMohoNR3B2QD7+EUanxeLnDEiiPnozljURYdCg7ii0Uoj/tG0g97BqJEB2Srj4YGCbJQaPD5eSTm50bQQ3OmoZQkEKgmSrHyHhrFRQ60B5+fT+KB70wqpWR44thHuZEIT9LBw1UV1nfAgID5hRQRALEuwggNVgAo76lEKLE03wu/OkODbyTHNkoJHDEKX1HhWjK8hQp/1O6dqCMGkcq5RVS7nTGFeDM9MsMQ5DsczO0V6aUyGQU3Q0+UHg6aQuDzhiBvDwu8OJLnEyOAUpCkSoyeXUBO1hGTdARw9w4eG6XBFxZTi0pGbPywhLq4hJqsSw9RqcJm5ZFP1TmqCNoeFoKcGGTBsEcEmh97KNcdp8/Tg8geOQqYOtnZtEKViNQfkQEDX1N99XJ0qJkN4tV/u8VeidGeqJCMVED8fIxIc2MPZXkPQCkf6di+UxgKFQhWn64jUFx2h4OhOZEXe6VQnEU5cNhqIs4tU41fvsLwxGZ7waEgDk+9+8fMJE+EzqKME1RqgCYBZ7bLQZLPuXnkw7VWVZe/w8HYwwL0UaVx46E3BAkkD9QbIz5GPPNRyvsb+XAVhossPlkLQgQuX2F4sNb6RrN7XQ/tZZKJZZIwgF5GWRqSJNnDwnMNzsc327xnGEo5NJ7RSE5Vee+csgwNfl4+edUq40M11j9vd36217cBp/c4GFckVQAHc5BIiXlNBfkl25wTNfjjW+xnAsqkrSeH2jwzB5uYjU3OwabqidlF5CWlhp+sNd253vL7OscbzR71/kCdIdzpZHp8nD0CrBkUO8inQByK8cuL0nEf96saG8y7wsYrSnTUI0eBgGpsRi5xaQn1szLjL9ZbflfneK/Nm9sdqMLpFmv0sJezhwVORBojOStP5DcPK6MxmZFKc2J5D331aqMchcSosMfGI0qUp5ihxi5ZYXhkk21pu3c9Bqgd93GOiBBgRSRfKJNWTMljTqL8NjsxUZKCrPjxHt+FxRSMTcTQQx0/UgnD1eRqPEONLyiintzqKDoUxP2cNyokNYQI08DF0cmZR48kzEs7zdFHNsFJDXMJUOZAhgr//VbH+EAJE7vwH600vLrT1eVimd6R0aOXdZx8SKwg9fi5d3Z55hWS8kScG4SIzS0g/9DgzO0OjANjSK7CMlXY03WOVluUhu605GwdvVfqMmoL88v3+e8oN0/QQhMKZZ/BfeQzdY5qgm61Rd9odrujwphGqVTjV60yFh4MBlnxNFQRQIpFEMFSG2TFQx42e5//qjLjBBW4EgV0Z0zW4FesMr76tbvdzoR50c+IT251PL7ZNqalUq7Cbik3N1ujgiiNXvEAFNyfqtCpIF+N06/udF1WasiAGZPIXTI7n7y/yvrpXt8RLweVWwwkuHX5p+jGql0JPEMw52txubnDEU1mHEkjxy5pG6KYa0SQ5sTtpsg7uzy3VVjm5pNZWpiJlt2jUGGLSg2v7XR9RYVNIR6t1AL89WqCvniFQa4ewyb6rFziwRorOrQZsYC/hC0pwCnMCZIzIvT4uR2miLrL/3y9c14RyKmSLe+Rq7ApOmJBAfnzDZbXmlz1xnA0vrjEEvMjFhWkLWR4Tj5IAJaPWbty7bHQjWtMnU5GGrkKAWirxwhg8tYZwgWHgu+3eX+z2bZ4tXF+HqmEm8tMNTa/iLql3LykwZnd5d9uiphCPCucsEzRI0VWUdmx0BWlxnhw8NhEKUrSNlNkCxUWpdgILo4SNGhabdGl7d6HN9kXV5ivWWO6Ya3p5nXm+6qsz29zLtvj+4oKd7tZY5D3MwInJI38mHRi0w2edISXPtvrm19IKYF9PoZRSpLkZ8UIL46GkpESSkaEEoq8PmKKPS+llHJKce7GbSNWlPY4mD/UO6BnJCUqfmyjHBWLB52t9zbsJRS7kBaClJIQGeJEfXfg+jWmrDQuu+8fyuHqKATRx4hbqfCjm+3TtIQ8MaPPohzGZlEQQfGdemPkmXrHdD2RcAKlz+Q5i7KXuZq6WfQyQhVGP7bZdhGoiwDPPgepJnEWpZSyUxSgInKEhS4X81aL59o1pikacOKYsixi6AxSfhallKLBU3SOFBVEDBrqn3R4H95onVdIydCBuLpXjTFgt2vxm9aZp+m+N2oHnQImBy+C4pV8yvk/kkRelA662bzuwPPbnDetMy/IJydrkWc+Hm6YPIaUZWPAq1Jm1O4PqPb7Z+qJvgHdZypKXgQJtM4IGI6wYKF5MsAd93EdDqaGoHO7/W80uR+ptS0qNQB1nA2GHJ6dKWE9BwUs/zJFi8/NJa4sNfyyxvqvXd4mSzTCi3udzE/XmhSq74cGl6AB+JUh/Ole3/PbnY9tsT9QY/35OvONq42XFlMz9IQShqyAM0hY1ARJ32QdqJtz1WrjXZWWpzbb/97kzu7y15LgNM2ROBo67OXurbYmItm+H1Ipwr9ABLjd9mgNQRccDHy2x7e0zfNOi/sf37jfavEsbff+p9OnORAoORJcdzz0FRXebY8ecLOYnzOHeE9UDPOgQJ4Ay9qiyrYVGP2zL00ZA6c1n7FrZeohhRRLPWVMHDqmjBSb/ESJW+QBojmxzc68uN2JllH59w1lGq09JJRS6gacEaRuN/t6k/tHKw2ZJy2Td9au7COMyHvkjAhVOP1cg3NhEZWpHlJU4vcdZWpUECeAakYNxsiyPd47Ki2TtMQgyUJnPsohVJOPT95YooKvKAEZrDeEP9jtva8KVP/NgFErchU+vIilcY0STUwxcTrIJxRFwkEbS+pfFkbqswJY/sKc6IoIPT622RrN6fI/VGu7dKVhihbPzB5S3dAzDWWqfIV50RDkuz3sLhuzzRT5igpvJMLlPaGVR0L5B8HO5It9/s87fZ/t9f27w7e03fv3JvfjtbYrVxmn6EHBnbh1+W0gjguUUu/gi9RYDJoTtxrC/2rzPF3vuGOD5eoy0yVF1EX55Pw8ck4eOSsXFEaeriem6IlzdGBM1uGTdLA0ugZEeWRAs1yJ/h2JodDgT3w1NlEy4Jw+yktgCIkBfgQdIN5r816x0jhbT8zW4LO1+GwdMTuXmJ2XGLmDDv1ojefrHKc1pCC1vn6foJ9UlDQn2cKCPSLYw72GIyJQQX4DTq/H6GqcriHCcNDVBPjxux3tDoYTpaGrx4Fuf0gokx0KxIS3Bjlskm9KKb0GoFMnlm6c7pc4wEjTbWYoJxzxSE9gfiECKAAvhcCgKJMuQk9UaLVFa0lac8D/4W7vsj1efXdgMxVusUaPeNkgfKxDa5Jzml5BVkTJYn2GMcRbaMETBS1RTsAduNS3mHDrHfOxTZZoeQ/9SYfv/d3ezzt9a4+Fdpgi7faoOSSkyrUs7b5NlCRHRFh1NPS7OseiEgPMFANZYJkafKqemFNAXlxMLS43/6nRtc/FjJEmLegkcgsVfqDGdtcG610bLKnj7irrA7W2p+ocrzW5P9vr2+dikHxJA7hXeFE64GaX7fHeWmFeUETNyCOzNPHSaOflkRcUkpeXGh7caPt8n48RxAFQQuNuv4u5c4Nlmp5QxmvkA09qXJPCH1F5yil6Ys3xkBQbE22Z0LJTcjg49USyajy7LT7gvWTASmYXFVFvtnjIIN+3Ei78O15GVO8PXLHKOFGD+gOAm81IEEAeUmBO5GB3VlroRBKxrM/SwAjSRiJ8wxqTAppvChDYSd1bbf1rk/t/Wz1vtnhe/dr9q1rbj0oNP9ATk7V42bGxhzIRnjg9j1xYRC0oohYWURcVUvMKyCk6QpEIR85SY0u2O+NV9lIcVBaa/2ezZ7KWgE8Cm6ABAbdP1TnebPG83ep5o9nzwg7n7ZWWCwupLBU+IEpRAkX0f1hqQBuJTA3+8/WWjUSYCnJwOoAVhBHAl+1xMMWHg09ssdcZwmMTpVyNPVnn2GmONlmizVbwb4MpojsQuPFLk/JEqW8iLgrxYw8gSX/9Gpa/h0dDcwvIt1o8u2xRN4hyjJdhCPPgZGmbMfK3JvezDc5QH5RozTYE+RtBnWjwNRO0+DMNjnjidn8VCbd94Xj64LA8kqPFvZ9UYn9tcjMC0LwoEC7ZdGrJdmcm2vPkYPdUWX3xQj3gA5UYPUEbP2i7sJhaeSQY4ePRNX0IoFAkRwRkQvdaK1FHoU/3+iZp48fHN60z434uUUB2AENhgPTBXn7c3jsfcVBLZCjGykAf6I/ydYgS1fc94SSWpHY7c35+vKbtxSsMu2zxqunmEH9bpQUlJEzS4B/s9oLAZTF9Hn7/tBdZ8jqOeLmb1pnkicdVdCg4rCq8Egy863azexzMHgfT7WbZRD3kHj+n3h94r837zi7Pxx2+fS4WXUcfsRUl8Ml2O/j1TifjY8X+XxEVpMOe+FegbIEhokz9mCnEX7zCgOb4+flEDUELid89NxdNbRBwjfu5YUWKypIXkX8wMA2tMjnY4nIzWh2G/hJgZ6Jr15iytEDTLS43e6Jij5/7oM0zI59UwLxOkGu3HJuWS7y600UGeq0Nggjmyx/qHZlqPEsLynZ+3a8EniiBpIdbKs2ZGpjDssJABfmkyAwdpTnELypNoMwDKFno/XxwozWe5paN/afDlwj7Gs7GUYBPe8k2JzoMUYC68E4UgTdclNeUGUGDuxzs/mrrUS/32Cb7JGg2yRO5jOiZT9Lgf4P3mbzSOMo6hyIHWFrzC8n0KH0crIUOLvKiYooKcsNCiWydoz5ubhGFXEQXFlOoLcIBN3vtaiOMkMHOyydhLcPhxdzK0MQJsuJd6y2oa8u5ucSqo0FBGl6x5jjK1SYk17+ssv5miz0L2F/YJDV2bj45I5+cGG/6Ah7Y9DyyLaVe4PBQwhs+FZTwMwUHA1NQdQ8VdnOFxRERYjFQiCY5Ka8tM1pofrjle2ToUnp83OwCCmYJg+QvlOIwrD90AiU0ACbqQLX+aTring2WSozucDB7nUzBoeCVq4yKxHK8tN0bS1T8HT2UfNyMi+ucoz7u9vUWeaLuxPttXrTOqPb7k67MB2usp1CrWob2oV0udiLcJMhV+KUrDLvtzLdBia5pqpb4R5ObCvLJOjg0Jy5t907SxJsRPF3vEKVRQ6nqhRI1MD3sZZ9pcEwGQZdAaK5YZeyJ5yHEPt7jSwZt/bnRRQ+/EJIMrQh7HIwikbp2zWpjl4sdpEdAWpOlD8oMNf7iDleQS7b+jFsPW6jwDD2JInse2WwTRm2Cy9XYvTVW/YGAZn8ge5//43bfU3WOhcWGTE18B3lJqaH0SJBP7MTfbvWg2COlBn+/zTtQi55k2kt/An1RytT49WtMqJLfICE+NCeiwQon9MaJtVKFXb7SaETqNbX4hwQ6ksxCNl0O9utNtmRFgdGQStRQSg5zoRSJkwzQcE+N31ph2QgVdxLGOxAl2uMta/eywiCGbSzKxwmEgaz0m+CoZJVcjf94laETKHApvTzGYq3W6EuNrhd2OP+43VlN0PHlPAWlIge7YY0JtBnoFwHQaouef3pQosoaiaGA8UYZGhAj+Har54CbFVJ7P8di/+7wxaVSjb/V6okOKJXxEPcXtjtf2O58eafLFIqfasU1uCnEXwKbASlU+MKE2pHST+3YisPBc7QgD1iZg33U7kUWRh+U1wOUYn+Uu04XSrkau26t6bltzue2OV9udC1t9606GmqzMc4IyKIWU9I54kXquwOKRJGOx2D6ozgASndU/O1WR4YKtJycoMH3gmwlcA2yRMtK6YFqK7qNGblkeU/6JiDovdIjwck6Ark8lkGU0thDmarBk0GaKZvXlAuDo5YMz4QXBlskmGxhYSCUnqj4ZJ0D3f4ELdGZihKZ6C/tcCKLL0OFv9roQhaZlFYqIUpkbH/U7judKI+fgl15siolaFYd9nI/XWNCQewJE11K53wAKP+7zoHisrO0eBylFJ/ggOYGHNRVQ4Bm5JF7E5/4blAWkDvN0T4lqwQJtHC+ad2pm+iDWHLuqPhMvTMDbcxysGcbnAmlNByU6DrIAH9PlTW5YL/R7EHHIKlATxvKufnkNlMkVZhQcUXQqCgxDUcQJfr18h7QBwztcecXUtuMkbSur5OgREdC+u7ANF38b83OJ4sOAW9dr9oWo4kyykvP18dR/kBPrDoaEhKrG1qCqvDwnAIS2q3YyKKUEmeCD9Xa0CqnVOG3VVhabVGht2NQHApKlMn2dIMDuewVsLXqKztBvzs+cWKJTjJLDgcnjTRKFCD59yZ3vJO6Cr+32trtZmkORCoc8nDvt3nPLyAVsKLnaEhlwlZj5hVSCJMiB3Tzq8ZppIKSW0+AcuvAKJM0u1zstWXGeLwsbEB9S4Xl3TZPvSFCBDhDEATcf9rhm6jFRxYl+va1x0LIYyhTYVka/OZy80uNrpd3um6tsEyBPXIXrjBcCTuWKkYcZSLXN787MC/hGJarsDn55ONb7Or9/oNuloLnwHudzC9Ba0VsEJTxad5qY+6psk7QnDily1KDE8v5xdQFJYYLiqhz88iku6yvXVmWaqIPgLIgjvLhFJRJ8/bRTTZlSvqNEvnooIfp/Hwy72DgsS121CV8wQAop8FKmjJ13I83rC55wLvMiyWHg/MLSUWiRJdSjZ+jJeYWUBcUUxeWGEAf88QymB5lqteeCvIvN7oWFINI5HjZztSu5/AoLkONz8ojv+j0JVF2Aak0KnIwZTZ2fZnJnQ5liy06K49Uwpi0h2p7o4Sf6fFzd2+AfXFhLQEFLPM5SUv8bJ257FgI9NXZYEW7l4uK0qAshoe3Svi7rze5TgElWpo34PT91daZ0GmtSDQuTyEALmCCBl9YYjgEmj0n7Mq08TEo7/KNZvc9G6xzCimlJt5lLEODzyogb62wvLbTVYnRwI+dWOxMIf6DXd4lDc4lDY73d3lDvUNy0F8+6uVea3S9sM35xwaHZn+gVxJ3YqE44Gb/2ey+o9KyoIS6bKXhvmrrh7u9nbCNqyMifLrHh77in9/A6ja9QwqaLNGXdzhf2AZG+fEQWuWHH6ANLsMQ5FcdDT6z1fFfX5qn5cbNBuQ/XFRqeHSTbVm7d4cpEubEAQNdkiFXwDHFg7K4PX6uw8m02ZnddqbDwRzzch5GQA9c7OVYBUe7KJ6NEQYKfIglgtxASGr6BwkdYn5GpIK8OcSjenZCXIGCXtknvkJKYyEm/z4vnmoqUWLt5qGJ5ooIBz1su51pskRbbdEuF6iLAF05YuqsOkkkW689f+94eqG/zYWCsJLhvANcZepnBriNWJo4zdTjsIG/IlmzRBBHoGVjshpmyu3HxHR1EYYaX9kr++M0BhCMkWCF1IzfQQj8PyPmru8ZDJqJAAAAAElFTkSuQmCC diff --git a/shuffle-tools/1.0.0/docker-compose.yml b/gpg-tools/1.1.0/docker-compose.yml similarity index 58% rename from shuffle-tools/1.0.0/docker-compose.yml rename to gpg-tools/1.1.0/docker-compose.yml index 40ee05f6..02b32361 100644 --- a/shuffle-tools/1.0.0/docker-compose.yml +++ b/gpg-tools/1.1.0/docker-compose.yml @@ -1,10 +1,10 @@ -version: '3.4' +version: "3.4" services: hello_world: build: context: . dockerfile: Dockerfile -# image: walkoff_registry:5000/walkoff_app_HelloWorld-v1-0 + # image: walkoff_registry:5000/walkoff_app_HelloWorld-v1-0 deploy: mode: replicated replicas: 10 @@ -13,8 +13,8 @@ services: restart: "no" secrets: - secret1 -secrets: - secret1: - file: ./secret_data - labels: - foo: bar +# secrets: +# secret1: +# file: ./secret_data +# labels: +# foo: bar diff --git a/gpg-tools/1.1.0/requirements.txt b/gpg-tools/1.1.0/requirements.txt new file mode 100644 index 00000000..1edb807d --- /dev/null +++ b/gpg-tools/1.1.0/requirements.txt @@ -0,0 +1,2 @@ +python-gnupg==0.4.6 +requests==2.32.4 \ No newline at end of file diff --git a/gpg-tools/1.1.0/src/app.py b/gpg-tools/1.1.0/src/app.py new file mode 100644 index 00000000..e8932462 --- /dev/null +++ b/gpg-tools/1.1.0/src/app.py @@ -0,0 +1,285 @@ +import os +import socket +import asyncio +import time +import random +import json +import subprocess +import requests +import tempfile +import gnupg +import zipfile +import shutil + + +from walkoff_app_sdk.app_base import AppBase + + +class Gpg(AppBase): + """ + An example of a Walkoff App. + Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes. + """ + + __version__ = "1.1.0" + app_name = "Gpg Tools" + + def __init__(self, redis, logger, console_logger=None): + """ + Each app should have this __init__ to set up Redis and logging. + :param redis: + :param logger: + :param console_logger: + """ + super().__init__(redis, logger, console_logger) + + def extract_archive(self, zip_file_id, fileformat="zip", password=None): + try: + return_data = {"success": False, "files": []} + to_be_uploaded = [] + item = self.get_file(zip_file_id) + return_ids = None + + self.logger.info("Working with fileformat %s" % fileformat) + with tempfile.TemporaryDirectory() as tmpdirname: + + # Get archive and save phisically + with open(os.path.join(tmpdirname, "archive"), "wb") as f: + f.write(item["data"]) + + # Grab files before, upload them later + + # Zipfile for zipped archive + if fileformat.strip().lower() == "zip": + try: + self.logger.info("Starting zip extraction") + with zipfile.ZipFile(os.path.join(tmpdirname, "archive")) as z_file: + if password: + self.logger.info("In zip extraction with password") + z_file.setpassword(bytes(password.encode())) + + self.logger.info("Past zip extraction") + for member in z_file.namelist(): + filename = os.path.basename(member) + if not filename: + continue + + source = z_file.open(member) + to_be_uploaded.append( + {"filename": source.name.split("/")[-1], "data": source.read()} + ) + + return_data["success"] = True + except (zipfile.BadZipFile, Exception): + return_data["files"].append( + { + "success": False, + "file_id": zip_file_id, + "filename": item["filename"], + "message": "File is not a valid zip archive", + } + ) + else: + return "No such format: %s" % fileformat + + self.logger.info("Breaking as this only handles one archive at a time.") + if len(to_be_uploaded) > 0: + return_ids = self.set_files(to_be_uploaded) + self.logger.info(f"Got return ids from files: {return_ids}") + + for i in range(len(return_ids)): + return_data["archive_id"] = zip_file_id + try: + return_data["files"].append( + { + "success": True, + "file_id": return_ids[i], + "filename": to_be_uploaded[i]["filename"], + } + ) + except: + return_data["files"].append( + { + "success": True, + "file_id": return_ids[i], + } + ) + else: + self.logger.info(f"No file ids to upload.") + return_data["success"] = False + return_data["files"].append( + { + "success": False, + "filename": "No data in archive", + "message": "Archive is empty", + } + ) + + return return_data + + except Exception as excp: + return {"success": False, "message": "%s" % excp} + + def get_auth(self, file_id): + item = self.get_file(file_id) + tmpdirname = f"/tmp/{file_id}" + + # Clean up all old stuff + if os.path.exists(tmpdirname): + shutil.rmtree(tmpdirname, ) + + # Get archive and save physically + os.mkdir(tmpdirname) + with open(os.path.join(tmpdirname, "archive"), "wb") as f: + f.write(item["data"]) + + # Grab files before, upload them later + gpgfound = False + with zipfile.ZipFile(os.path.join(tmpdirname, "archive")) as z_file: + print("Past zip extraction") + for member in z_file.namelist(): + print(member) + if member == ".gnupg/": + gpgfound = True + + z_file.extract(member, tmpdirname) + + os.remove(os.path.join(tmpdirname, "archive")) + + if gpgfound: + tmpdirname = os.path.join(tmpdirname, ".gnupg") + + try: + gpg = gnupg.GPG(gnupghome=tmpdirname) + except TypeError: + gpg = gnupg.GPG(homedir=tmpdirname) + + return gpg + + def cleanup(self, zip_file_id): + + tmpdirname = f"/tmp/{zip_file_id}" + + if os.path.exists(tmpdirname): + shutil.rmtree(tmpdirname) + self.logger.debug(">> Cleanup complete") + + return + + + def decrypt_text( + self, zip_file_id, encrypted_text, password, always_trust + ): + gpg = self.get_auth(zip_file_id) + self.logger.debug(">> Created GPG instance") + + decrypted_text = gpg.decrypt( + encrypted_text, + passphrase=password, + always_trust=always_trust + ) + + # Delete the downloaded keystore + self.cleanup(zip_file_id) + + if decrypted_text.ok: + return {"success": True, "data": decrypted_text.data.decode('utf-8')} + else: + return {"success": False, "error": decrypted_text.stderr } + + + + def encrypt_text( + self, zip_file_id, clear_text, recipients, always_trust + ): + gpg = self.get_auth(zip_file_id) + self.logger.debug(">> Created GPG instance") + + # Build list of recipients from comma-separated string + recipients = recipients.split(',') + + self.logger.debug(f">> Recipients: {recipients}") + + encrypted_text = gpg.encrypt( + clear_text, + recipients=recipients, + always_trust=always_trust + ) + + # Delete the downloaded keystore + self.cleanup(zip_file_id) + + if encrypted_text.ok: + return {"success": True, "data": encrypted_text.data.decode('utf-8')} + else: + return {"success": False, "error": encrypted_text.stderr } + + + def decrypt_file( + self, zip_file_id, password, file_id, output_name, always_trust + ): + gpg = self.get_auth(zip_file_id) + self.logger.debug(">> Created GPG instance") + + if file_id["success"] == False: + return "Error managing files." + + always_trust = True if always_trust.lower() == "true" else False + + ret_decrypt = gpg.decrypt( + file_id["data"], + passphrase=password, + always_trust=always_trust, + ) + + # Delete the downloaded keystore + self.cleanup(zip_file_id) + + if ret_decrypt.ok: + self.logger.debug(">> File decrypted") + + file_id = self.set_files([{"filename": output_name, "data": ret_decrypt.data}]) + if len(file_id) == 1: + file_id = file_id[0] + return {"success": True, "id": file_id} + else: + return {"success": False, "error": ret_decrypt.stderr} + + def encrypt_file( + self, zip_file_id, file_id, output_name, recipients, always_trust + ): + gpg = self.get_auth(zip_file_id) + self.logger.debug(">> Created GPG instance") + + if file_id["success"] == False: + return "Error managing files." + + always_trust = True if always_trust.lower() == "true" else False + + # Build list of recipients from comma-separated string + recipients = recipients.split(',') + + self.logger.debug(f">> Recipients: {recipients}") + + ret_encrypt = gpg.encrypt( + file_id['data'], + recipients=recipients, + always_trust=always_trust + ) + + # Delete the downloaded keystore + self.cleanup(zip_file_id) + + if ret_encrypt.ok: + self.logger.debug(">> File encrypted") + + file_id = self.set_files([{"filename": output_name, "data": ret_encrypt.data}]) + if len(file_id) == 1: + file_id = file_id[0] + return {"success": True, "id": file_id} + else: + return {"success": False, "error": ret_encrypt.stderr} + + +if __name__ == "__main__": + Gpg.run() diff --git a/gpg-tools/README.md b/gpg-tools/README.md new file mode 100644 index 00000000..16d42d28 --- /dev/null +++ b/gpg-tools/README.md @@ -0,0 +1,18 @@ +# GPG Tools +GPG tools is a utility app can help with encryption and decryption of text and files. +It requires your own GPG keystore containing private and public keys, along with the password to access the keystore. + +## Authentication +Authentication for the app is necessary in order to decrypt or encrypt files or data. +How this is handled from version 1.1.0 is through a Zip file with all your resources, uploaded to the Shuffle File storage. +The ZIP archive must contain the entire GnuPG Home Directory, named '.gnupg' + +**Required Authentication Arguments:** +- Zip_File_ID: Points to the File ID of the Zip file containing your Private & Public key(s) +- Password: The password that protects your Private Key + +Getting the ZIP's File ID: +1. Create your public & Private key with `gpg --full-gen-key` +2. A GPG Home Dir is created, under `~/.gnupg` +3. Compress the GPH Home Dir `zip -r gpg.zip .gnupg/` +4. Upload the ZIP file `gpg.zip` to Shuffle Files and obtain the FileID for the Zip file. \ No newline at end of file diff --git a/hoxhunt/1.0.0/Dockerfile b/gws/1.0.0/Dockerfile similarity index 100% rename from hoxhunt/1.0.0/Dockerfile rename to gws/1.0.0/Dockerfile diff --git a/gws/1.0.0/README.md b/gws/1.0.0/README.md new file mode 100644 index 00000000..5c8eb2e4 --- /dev/null +++ b/gws/1.0.0/README.md @@ -0,0 +1,40 @@ +## Google Workspace +An app for interacting with Google Workspace or GWS. +## Requirements +1) Enable the Admin SDK API from GCP console. + - Login to Google cloud (Make sure you are using the same administrator acount that you're using for Google Workspace) and In the navigation menu on the left-hand side, click on “APIs & Services” > “Library”. + - In the API Library, use the search bar to find the "Admin SDK". Click on it to open the API page. + - Click the “Enable” button to activate the Admin SDK API for your project. + 2) Create a Service account. + - Go to the navigation menu, and select “IAM & Admin” > “Service Accounts”. + - Click on “Create Service Account” at the top of the page. + - Enter a service account name and description, then click “Create”. + - You can skip the permission part here as we will be adding persmissions from GWS console later on. + - In the service account details page, click on “Keys”. + - Click on “Add Key” and select “Create new key”. + - Choose “JSON” as the key type and click “Create”. This will download the JSON key file which contains the “client_id”. Note down this client ID. + + 3) Subject (Email address associated with the service account) + - Note down the email address associated with the service account you just created it'll be used in the authentication in Shuffle. + 4) Adding permissions to the service account from GWS console. + - Signin to the Google Workspace admin console. + - In the Admin console, locate the sidebar and navigate to Security > API controls. This area allows you to manage third-party and internal application access to your Google Workspace data. + - Under the Domain-wide delegation section, click on “Manage Domain Wide Delegation” to view and configure client access. + - If the service account client ID is not listed, you will add it; if it is already listed but you need to update permissions, click on the service account’s client ID. To add a new client ID: + - Click on Add new. + - Enter the Client ID of the service account you noted earlier when creating the service account in GCP. + - In the OAuth Scopes field, enter the scopes required for your service account to function correctly. OAuth Scopes specify the permissions that your application requests. + - Depending on the actions you want to use below are the OAuth scopes required. + +| Action | OAuth Scope | +|---------------------|---------------------------------------------------------------------------------------------------------------------------------------------| +| Reset User Password | `https://www.googleapis.com/auth/admin.directory.user` | +| Suspend User | `https://www.googleapis.com/auth/admin.directory.user` | +| Get User Devices |`https://www.googleapis.com/auth/admin.directory.device.mobile` | +| Reactivate User | `https://www.googleapis.com/auth/admin.directory.user` + +## Authentication +1) Upload the Service account JSON file in to the Shuffle files and copy the file id. +2) Now, Inside the GWS app authentication in Shuffle; use the file id you just copied and in subject use the email address asscoitate with your service account. + + diff --git a/gws/1.0.0/api.yaml b/gws/1.0.0/api.yaml new file mode 100644 index 00000000..a879728b --- /dev/null +++ b/gws/1.0.0/api.yaml @@ -0,0 +1,106 @@ +app_version: 1.0.0 +name: Google Workspace +description: Manage Google Workspace with Shuffle +contact_info: + name: "dhaval055" + url: https://github.com/dhaval055 +tags: + - Assets +categories: + - Assets +authentication: + required: true + parameters: + - name: service_account_file_id + description: Upload a service account file to Shuffle and use the file ID here. + example: "file_id" + required: true + schema: + type: string + - name: subject + description: User email associated with service account. + example: "admin@org.com" + required: true + schema: + type: string +actions: + - name: reset_user_password + description: Change GWS user password. + parameters: + - name: user_email + description: User email you want to reset password of. + required: true + multiline: false + example: 'testuser@testorg.com' + schema: + type: string + - name: new_password + description: Password you want to set. If you do not provide this value then a random password will be generated. + required: false + multiline: false + example: "*******" + schema: + type: string + returns: + schema: + type: string + - name: get_user_devices + description: Get GWS user devices. + parameters: + - name: user_email + description: User email you want to reset password of. + required: true + multiline: false + example: 'testuser@testorg.com' + schema: + type: string + - name: customer_id + description: Customer ID of the account. Can be found in admin console under account -> account settings. + required: true + multiline: false + example: "C02dnh9vw" + schema: + type: string + returns: + schema: + type: string + - name: suspend_user + description: Suspend GWS user. + parameters: + - name: user_email + description: User email you want to reset password of. + required: true + multiline: false + example: 'testuser@testorg.com' + schema: + type: string + returns: + schema: + type: string + - name: reactivate_user + description: Reactivate the suspended GWS user. + parameters: + - name: service_account_file_id + description: Upload a service account file to Shuffle and use the file ID here. + example: "file_id" + required: true + schema: + type: string + - name: subject + description: User email associated with service account. + required: true + multiline: false + example: 'adminuser@testorg.com' + schema: + type: string + - name: user_email + description: User email you want to reset password of. + required: true + multiline: false + example: 'testuser@testorg.com' + schema: + type: string + returns: + schema: + type: string +large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAABAAAAAQACAMAAABIw9uxAAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAMAUExURUxpce8+OjWlajqxbq6TADSoU/9gNDuRtOhGLupDNepDNepENTSoU+pDNTSoUzWnU+pDMupDNepDNepDNepDNepDNTSoUzSoUzOoUupDNTKqVTSoUzSoU+tDNepDNTSoUupDNTKnZUGI5kCJ4etDNTSoUzSoUzSoUzSoUjSoU+xEM/u8BTOpUzSpU+hFNupDNTSoU+xCNetDNTyaqOtCNOpDNepDNetFNOpDNTSoU+pCNTOpVOpDNOtDNTOoUzSpUzSoUzSoUzSoUzSoU+tFNDOoUzSoUzSoUzOoU+tCNTSoUz2QwTSrVDSoU/u7BUCK3zSoU+tCNOtDNTKnUelCNOpDNTSpUz6OyUCK2zSpU+pDNepDNetDNT+L1zanUupCNT2YrDyUsulDNTSoU+pCNfy8Bj+M1OpDNexDM/u8BTSpUzOoUz2RuTSoUupDNfu8BPu8BD6PxT6OzD2QvzuWrupDNO5EM/u8BTuVrOpDNfu8BTOoU/u8BP+9APu7BTSpU/qxCu1XLOlEND2WpDyVrTyTr/m7Bvq8Bfu8Bfy8Bfq9BT6OxupCND6Ozvu8BjySt/++APy9BTSpU/u8Bfu8BPq8BepGNPu8BOpDNOtDNPu9Bfu8BetCNDupUP+7BO9pJvu8BOpDNepDNPy8BT+N0DSoUzyUpUaqS/+2ADSoU/u8Bfu8BDuWqfm7Bu1QL/y8BupDNfu8BjyUsvSJGf+7ADyTtfi+B+pCNTqYqvu9BT2RvfmoDDuUqlisRN65EPy9BPigDz6pT/OCHfq+Bfq1CPu8BLG1IlasRs+4Fv+8CHmvOI6xL/J2Ici3GT2UrWiuPjSoU/y8BTuVqfWYFJSxLby2Hfu8BT2SuPWQFtq5EnywN/u8BDyUsz2TtKCzKO9qJqCzKHmvODmfeUyqSdK5FTyTtTelazajcjSoU+pDNUKF9Pu8BUGF8/u8BUGG8UGH7UGI6jWkaOu7CzSnV/S8CDSmXTmbkDiegT+Nzz2QwjehdTuVqT+M2TuYnj2TtTuXokN3Xn4AAADodFJOUwARBgMB/AOOCsH7zP7o9SEG+f7fQ/K86hX1EH/5Zn9E7gr+/HBmotLDKRn9OuUVtG8zvBce5ZjUKlx2M9ijeC3uVpvJ27XXzeBM3ccMrvT5qSWJGTeSh933kapdr/McVA9/LvLEV+5QDY0/c62Wyap30ePAabkP01eExUhuCfpR6sUiK2B0K6Rg2mfXPOa2nw+bTIBCMo6wYdG9zVjwGcE8a57e6o0j5wVh5uNFJ81R7IWIyxSYIkc46brgTej+S9jpwTbzRuvb+B3s5MP0PeZPljPS8fDspsr53DmMke643tK9wHjeHDUbygXtAAAz4UlEQVR42uzdsU5i2xoH8ENijEJMYMCCAkJBRSGBIBoCnY0lRhowITEkUNk5Ca01tRkTG30Cu3km3+LmnnvPuefOjI4o4N57/X6PwP7+rL3Wt9baf/wBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACQGDupVr/xp8KPzhs3rdTMTwTJsJ26aRSm36rl28eL5iTTzVeKz79XrBzm73YveoN5pzQuNPopPyTEZ4jvL6YP5cHFpLv/vBrpvaPdYXteHRWOd/y+EEWpy9OHeW/3pPi8Vk939XbZPwFExMH5qPzY7K459z/bm/Tm3wqtbU8APmN+f1MonV3c7T9/rly+2a5+7/sfgA3JXo7Lw27uOUpy3Yuz0uLAw4F1Rv9+PuymnyNrr3n7rWF5AFbt66L6eJd7joMvJ8Py+MYjg5VM9o9Hg0nlOW6Kk8Ho2NIAfGTc/16u7z3H11N9Pq15jLC0nUK5fvicBHv1csG6ALzZwfR2UnxOkuLu2VSPAH6rNWp3n5Pp5HHU8oDhBdlFJ9Yz/jfNB4alY08aftQvDZMe/r902/emA/C/1/5wwv+XfHv81YOH1Lidfw5SOnO2sFGAoN/7q83ic8gOH+/dN0KYS36noQ79P+wczpQbqoGw1Er1ouz/Y0XgNKsoCMRNtZkT+h/7g49T/wEk3tZi4MX/pe2C9ZL2IAm2XZD+33QGJlX/ASQz/Ytb6X+DXH2kMUDS3vzPB4ey/fb/gHvHB0nSvF/6l38P8B9AEhyfefN/35rgUG+QmDt4mHwR5fdvFLx1dJDYyo7raSH+oMyDJUHiOPEv9PbFdyV7hZuWA4iZVrkruatT6RW2FBUxsX06tNV35VOB0kxlEYfBf67ntxb7g0vVRbTtlDKSuj5HVa8BRNfNoCKka34NaLs8gIjO/C80/TbRFLg+dZEYUZPq2O+3MSdmAkTKuWX/DR8V6FkQJCrv/uOJRG7eZGwmQBTW/Y+E8XN0S1n1x6eqzZ8E8fPsdRwU4BOn/nVH/T59McB5QT7F1qmpfyTagvWFYkT8A9YsKEg2uvJX1fWPlLuRlgCbi7/jPpFz5C+AjUiduegjmi2Bsv2BrNuso+8XWYcdfwEY/QO2f2ZjAOIfsCcTAdbz8l/28h+PicBDVrWyYl+N/jF6C+j4C2CVsiWNv1g50RRkZbbFP366YxeJs5r42/UXS1c2CPNhW2PH/WOr6dogPubyWoxiLN2uqWHerfbont+YK85tC+Cdnb+Bmz6T0BOsagjwjrW/qn0/SWkInCpnljT2dd8krQa6N4xl9OtCkyi5M0sBvNXszOQ/cfZKlgIw+Q95KeC74ua3Fr7vnVj1lvrmVa2hm/6TvCugnFXjvChbLgpJsp2MlTkvvf3fCUjyDc0D+JVU29t/GPOAjn4AP6/9V0QjmH6Ag8L8v2Mf+grJl57Lg/nH4p+dP6HZGyl7/uv7iUDYFECgZhb/wlQpuTWQP8Z7ohCqK4cEQ1dz7C9kubOsDISspPcXuLuGFASrZfgn7SUgUFuGf/7t6FwYAnRwofT5z0pA2d5gs39CfgmwEhDY8D9U9HgJCNWp3j8/uOrLRSBmPeXOT4pVGwODsPCxX37p2umA5NsuO/jHC/Z8Qijp+u785RU93w9JtGpRjfOavF1ByZXS/ON30hqCVv8IWdNaYDJX/9Jqm7eo3ItL4hxcK2ze6MttVmKSpWDvH0u4Mg1IkuzAtX8sZd80IDlquwqaZfV2JMfrP+G6czzI6j+mAcTZzOYf3t8NsCko5ho2//ABGd2AWLvfV8N8xKGzATHu/rUVMB+U7ghSTB3o/rECQ/3AeE7/D9Uuq9D1CcEYmpr+syJPBXmKW/ff9J8V9gPLLgyNlZSzf1gICFa/q2JZrd0DuYqLU9N/Vs6OgLh4sPmfNciNZMvyHwEvBZ45GhB5O776zfqWArMSFm2tO1XK+tw5HBRpiyc1ylqXAhtSFl3TogplvSp2BUZW2dWfrF26KmnRXP4fKE42oa0ZYPmfgF37hnDkzJrqkg3JpAQuau0/u//ZlF0Hg6Km4ep/5D9Y353+Qf61/8H8PzhV7X+M/8E6U5XIf6i2bP9B/sPd/tdTlch/qLK2/yH/tv+B/IeX/11Vyabo/0XNwZ2qxPgfqprt/8i//IP8B+cmryqR/1D15R/5D9ax47/If7Au5R/5D3f8P1SVbIr+f+Ty7+sfGP/N/0H+w+v/ef9H/uUf5D84Lf1/5D9YtRNVifwHm3/7/5H/YKUyqpJN0f+PmtmVqsT4H6qs+7+Q/2Btu/8T+Q/Wlvu/kf9w+f4H8h+ujqpE/oNV8v1P5D9Y9/KP/AdrUVSWbIj9P5Fzua8sMf6HquUAMPIfrNSRskT+Q5XdVZbIf6hsAEb+A9ZWlsi/DYAg/8GZ2gDEhuj/R895UV1i/A92A4AvgCD/wfpqAwDyH24D8FpdIv/BulWXyH+wHtQl8h+sQlphIv+hOnYCGPkP1kwDgM2w/0cDAOM/UXKmMJH/YLkCFPkPV6OoMpH/UB3kVSbyH6rsRGUi/8HyDUDkP1wjlckm6P9bAMT4T6SkLAAi/8HaqitN5D9Y7gBG/sO1cAQY+Q9WzR2gyH+wtnfVJvIfrLnaRP6DVXAEkPWz/yeqCwBPihPjvwUAkH87AED+g9HIqc4NKlby+cxV82+ZTKabzz9VKsl+DOb/UTXrCuV67d1d986q38aFRr82e+VJ7LQuF9NRtTzo1Sf5nPyzEY8SuqbcX10MquNFLfu+hZna+bR6O0zIP4H8R9a9pK5Y+uR60Jlezla1RFsrlG7r+bT8swY3vgK0wuh3L+b3jexaHlT2eNp5vKrIPystqyuxXYlcpv1wvv517q3+6HZSlH9WZC66H/bl6LG0pmH/hUnB5bd2Jif/fNi5M8AfU6mXC18/593tvHrxJP98qAPoErCPDPztce2TH2Bt3I76I7T/J8LaYvzeFl/v201EHuJxqXdo/OcdvjsD+B7FZqexFa0n2a82c/LPclKHwry0/OB7NN9pZ+PHJ/lnCRfivGSrr166ifQTPe40I7Wqa/4fZT4DtNyL/8UoDsNZ6yE6/wHG/yjr2wK4xJrfoJCNz9RuVM8Z/3nd1rVYv3Xs751mY/Z0a9XJF+M/rygJdlLT//d7QNr4z0tDREW23zKKPRzEeZY3PzT+80u+A/iGjl+nFffHvF0Y5oz//MQlAL+TvphmE/GoW+VD4z8mAEs5KiWpghvtovGffxiK+GuDf/10O2n/+OU94z9/mQr5y57mrSQ+8+zoTv7504HvAL3oapxN7HNv9DbSF/T+H3WuAX5J83Qr0U++Pyga/4PnQ6AvTP17l8l/+KnOofyHLes7IL/c8df+F3t391pVdsZxHDuHUxMJnJNjLnKRYFuv5kBEaxwJyV1upJBiMExRi1Mx1UCJ6YsjIzFY28mL08lMMDYmFsepUIIhNVYnTsaCMuqFMBRkYCi0xevpks6/0HHGaF7Oy1777LPWetb6fv6FZz2/s/c+61nrbCD1v9hJ/4esmWZfb0t3SzgrIH2gnf4P1m4uAlyndWtg361Se6pzGjzf/9wvfQ39vrb9mzIBLoTN/fR/iDgFJPRf/xc/BUc38fwfnA11tPyqXT9tmXAXQ+poO/0fGLYArPr01xb4M2vqUHK3CvD8L+HFjy0ALzW0HWNFZJrq6P9gpKdp+2X1HTtZEM+09GR5/g9EG32/bNtulsOynbtq+f0PIus5BeC5zkOshpW21/D7zxfAgF7+MyyGNV8DD9TR/777A18An6ntYbkWsLE5y/O/318AN9H8XzuynaVQ2N4T9L/PLtL8X3650JRmJRT9iWha4Pnf3y+AHAPE0385b/bU8vvvqR76v30zy6CcfdP8/ntpd23o7d/YnGEZlJfR+hhI/0sR/FWgR/ayCKI5vIn+986e0D/+HUixCKJKNUW8OZ73fzHPdYGfA3jwVdaAjrM76H+vhH0XeEMTP/+a0lsbef73x7HWkPu/hrG/GPbW0P/eeCvkj//8/Mf8ErA1y/O/H3Y2htv/R85S/7h2T9P/Xgh3CrC+O0P5K3h17OD53wPbg50CzO2j+pU5usDvv3jB7gHa1kLxK3W2n99/4W6H+vXvALVPQLoty++/ZKn+MPu/k7n/hGyu4/dfsENh9n8HR34npmUHv/9yn+CC3AScbaLySS6ibvpfqiA3Aed4/E/Ynxrpf5EyuQD7v4bRn8Rtz/H+L1GAV4HUN3PsXxUcO8jvv8CqhXcQYMNRyl4VqbZ6+l+aZl7/kZg9PP8LsyG4u8B2sPkPWNYdWv938/oPLGsJbAyYf/+BcB8Attym5ECoDwC53ZQceCmsvwCmOfkPWGFDQ0j9v43hH2Clgd8G1P/7MxQcWOH9LvVpMB8Bmjn5F1jlvFLqXhj3AdRvpdzA2geArz0I4TWgkd3/wNovAOob+SdZ3/u/YTPVBlY7vaSe+7zO7/5fYPoHWOuceuHMX33u/1a2/wBrpa++DAA19EE9/Q8EpFet8k9fx4I72f4HrJO6tDoA1JnXvOz/1zj8D1hvUq019IGP2/85/QMoYEatd9O7bYHt/P4DBVxQhdzp9Oz5n99/oJDjBQNAdXm1LbCd/gcKeTSkivBoOijH93+goD5V1L0c/Q947dsxoCIe+LEtcIH9P0Bhg6qU/GdZ+h/wVnpMlSZ/OqhhH2UGCrurypE+HZRl/hcoZqZsAKi86OmgWs7/AIoZVlHcFHxiMOd/AUVNRAoAwdNBzdQYKOZWV7QAEDsd1EONgaIGVGSfSnwNOMj1v0BR6bnoASBxOujIRmoMFHVX6RC3LTDHABBQwpTSI2s6aAsbAIESFvOaASBqOqj2NhUGSrihtAm6O4gNAEAps9f0A0DOdNAuCgyU0qti+XxBxB8AGQoMlDITLwBETAd1bqC+QCnz+ZgBIGA6qPEw9QVK6lPxOX53UD0TgECZT4BfVBAA6s60ywHwFuUFqvIJ8MV0kMP/B55gAgAoY1xVyNlDAnJ8AATKeJSvNABcnQ7Kbqe6QBkDqnJu3h30IcUFykiNqSQ4OB3UQXGBcq6oZNxr5QMAIM5EQgHg2nQQO4CA8k53JRUAjk0H8QEAKG9UJcihu4MOUlqgvJkkA8Cd6SA+AAARXM4nGgBq6IkT00FZLgEEIhhQSXNiWyAjAEAUlxIPABfuDupnBACIYFhVgfW7gxo5BBiI4ryqCsuvARwCCkSRulqdALA7HbSDFwAgivuqWizeHdS4l8ICUfSp6rE2HdREXYEo0teqGAC27g46kaKwQBSTqqqsTAc17KSuQCQT1Q0AK9NBzAAB0VR2GnC0Q8PreAEA3PRQVZ/hbYH8AwBE9dhAABi+O6iNqgLRpJeUEQbvDppmCxAQ0RVliLHXgHqGgIGo+kwFgLHpoF0UFYgoNafMuWliW+BCC1UFIrqgTDIxHXSRogJR3TAaAAbuDupnCwAQ2ZgyrMrTQfVcBAhENqyMq+500J+pKRDZgPkAUGeq+BqwhS+AQHSnLASAyn9Wyx5AwL5beWVFte4Oas9QUyCyUWVJle4OOkpJgeiO2wqA6kwH1VBRILrZLmVP8tNBDAEAOiaVTYlPB+2nooCGPqsBoIaS/T8wyzEggI4xZVmidwf1UFBAw7yyLsHpoAb2AAE6Bu0HQILTQdwFDmgZVy5IaDpo4U0KCmg4PeREACQ0HcQmYEDLpHJEEleI1m2koICOG64EQBJ3B22lnoCWS8odlU4HtWaoJ6DD1iRgVaaDeAAA9PQqp1Q0HcQDAKBpQjmmgukgHgAATVddC4D42wIXjlFOQMuick/cu4OaKSeg55xyUazpoIYNlBPQc9zJAFD3YrwG/JFqAnpSS24GQIzpoOxOygnomVfO0p0O6qCagBefAJ6/BrRqBcBhqgloGnE4ANQDndeAExQT0DXncgBoTQfdppiApkXluMjTQe1cBw7oGnU9ACJPBzVRTEDXhPMBEHE6aIGDQABtY0qAKNsCu6kloOuyEqH83UG1bAICtF2XEQCq60mZANhGLQFtfUqKMq8Be6gloO2UmAAofUhALk0tAV2zQ3ICoOR0EAcBAPruK1GKTgdlX6WWgLZBWQFQdDroIKUE9I0IC4Bi00F8AgRiGJMWAIWng1r5BAjoc+tKkPjTQewCBGK4qyRaNx1Uv5dSAvrOiwyAddNB/VQSiGFcCbV6W+CHVBKI4QupAaDuTK/YBMBtAEAMj5RcK+4OYg4IiOO6kuzFa8ABKgnEcF50ACxPBzVyFBAQx5TsAHg+HbSfQgJxzCnpnm0LPEQhgRhuKfnu5RoyVBKIYdKDAFBnuBEYiGXAhwBQvRQSiGPEh/7vOk0hv/FdyPOx1SVz0ocAmKL1v/VfyPNrmyvm9JAPATBK6xMAYv3c5oq54EP/D92i9QkAsb56xeKKGfUhAGbofAJAsL9bXDE3fAiAQTqfABDsPYsrZsqHAFik8wkAwd6xuGLGPOj/MRqfAJDsJ/YWzGzegwDoo/EJAMme/tDaghn24Q1gksYnAER729qC6fVhG+AsjU8AiPaJtQVz3oMAGKfvCQDZ3rW2YI7zJyABANt+YG3B+DAJMEzfEwDC/c7Sekl7MAmwlKLvCQDhfmZpvSx68AAwQtsTAHwFjMeH44CYBCQAxPuVpfVyjn3ABADC/QrYJ7//r9L1BIB4T/9iZ71M8QmAAIADLO0F9OBfwHN0PQEg3y+sLJdUl/wAmKfrCQD5XreyXC7L7/9rND0B4AE7E8FX5AfAcZqeAPDA/75jY7l4cCDgAE1PAPjAyrmAHhwIeIWmJwB88G8by0X+rUBDXAlEAHjByu0gM+ID4BI9TwB4wcpm4KviA+AxPU8A8DdATB4MAzMJRAB48jfAG2wD4DAQAoC/AQy6L/880DQ9TwD44T/mV8t18QFwipYnAPgbIK5BvgESAAj3bwD5pwEwCkgA+OKn5leL/DPB79PyBIAnvjK/Wk5J7/88+wAJAG+YPxp8TnoAcC0wAeCP37APSNcUHU8AeOM904vllvhPADfoeALAGz8yvVjmxQdALx1PAPA/YFzybwW5QMcTAN4wPg7Uy58ABADc+R/wFTYC6uFOEAKA/wHjE38g2DgNTwB45PeGF8sEkwAEANzxN8OLRfy9YIM0PAHgEdO3A4nfCXyXhicA2AgQm/gTATkOiABgI0B80i8G5F9AAsArhgeCZ6U/AMzR7wSAT75vdq2IPxKU88AIAK88/Z7RtSJ+FIB7QQkAv/zD6FoRfyZwH/1OAHjF7IkAD9kGQADAJWZPBhc/C3SdficA2AkU2zmGgQkAuMTs1QAD0gPgMv1OAHjlHaNrRfowYJ5rwQgAv7xrdK08Fh4AS7Q7AeAXs1sBR4QHwEnanQDwy4+NrpVx4QEwQ7sTAH4xeznQjPAAGKHdCQDPvGFyrVxiIyABAKd8bHKtnBQeAAO0OwHgGaOnAko/D4SrwQkA37xtcq0sCQ8ArgUiAHxj9FhQ6QcCPaTdCQDPfGRyrUi/G/hftDsB4BmT00DiTwSbp90JAM98YnCpvM8sEAEAt5g8GFz8kYCcCUwA+OaXBpfKovRhQLqdAPDN6waXyrDwAOii2wkA3/yfvfN7rfLKwrBE49yEzF1TOoHBhE47pIKQq1JonUppaVpTGJikFykJxYAzKZofELE2vTA2atDoWBEMKYIZFWuRihYrTkWGttqiRalKay0tMxebL4djEP+AmSZ1qHpST3JO9t7v2s/zJ6xvvS/r23uttX3OA+9mGhgDgLjoxACKZj9qxwCs4XMhgPpWcNYBYADmaPWYKvvEDWAUtWMA1mjGAIqGh8EwAAygBN5lIRAGAHFRjwFgABhAuvh8H1j9ZbDtqB0DwADmzhkMAAOAuJjEAIpmD2rHAKzhcy3wUQwAA4C4yGMARbMGtWMA1shhABgABoABYAD8AmAA/AJwCIgBYAAYAAbANSAGwC0AjUAYAAaAAZQLWoExAEi4EQgDwAAgYQNgHBgDgMjwOQtwnYUgGACkawDqK8G2oHYMAAOYOywFxQAgMuoxgKJhLTgGgAGUAA+DYAAQGT53AvI0GAYACRsAj4NiABAZPt8F4HlwDAAiw+fLQIfUDeAD5I4BGMPn24ALBsQN4DpyxwCMMe4zV1aKG8AZ5I4BGMPn8+ALDosbwCrkjgEYo8VnruwXN4CtyB0DMMawz1zpFzeAtcgdAzBGt89cGRU3gNXIHQMwRpPPXNkrbgBDyB0DMEavz1zZzkogDACi4pTPXBkSN4B+5I4BGKPLZ65cEjeAq8gdAzDGNZ+58qH6OOAh9I4B2GLEZ66sZRoIA4CoOOYzV7aqG8C/0DsGYItNPnNllboBHEXvGIAtBn3mivrbYBMb0DsGYIsjPnNFfS84rYAYgDU2+syVD9QNYA16xwBs0e4zV46rGwCPg2EAtsh7zRX5nWCH0TsGYIpJv8mivhKogb3AGIAp6v0mi/pGkIndCB4DsESr32RZTyMABgAR0ek3WfaoG8BaBI8BWGLcb7LsVDeAnQgeA7BEi99k2aVuAKwEwQBM0e03WTaoG8B+BI8BWKLXb7LITwM17EDxGIAhrvlNln3qBsBAMAZgiht+k0V+GGBiM4rHAAyxyW+yyD8QzjwgBmCKdX6TZZH6+8AT21E8BmCIjZ6z5X2uATAAiIbcEs/ZIt8LzDUABmCISd/ZMiR/CLAPyWMAZmj2nS2r5Q2AtYAYgB3afGeLfCsg0wAYgCHGfWfLUXkDGEXyGIAZhn1ny255AxjgeTAMwAxNvrNFfi0oS4EwAEN0+c6WxfKdQDQDYwB2+M57uuznFBADgFjo854ue+UNoB/NYwBWOOk9XfQ7gQZYDY4BGCHX7j1ddumfAr6L6DEAG9T5T5fN+gawFtFjADZo9Z8u+juBeCEUA7DCuP90eU/fAD5ehOoxABMMB8iXlfoO8A6qxwBM0BsgX0b1DWArqscATDASIF/W6BvAEKrHAEzQESBfPtQ3gMOoHgMwwboA+bJV3wAmPkL2GIAFGgPky7sGDIB5IAzAApMh8uW8AQOgEwADoA9ojhgYCKYTAAMwQU+QhOk3UAKwFAQDMEBTkIQxcA/IOAAGYIFrQRLGwDzgxF50jwHQBjA3VhkwAHYCYAAGOB0kYd4xYAATnyJ8DECdAOtAfuKQgWuAiUsIHwNQpy5Qxli4Bngf4WMA6rQFyhgL1wCMBGMAtAEkfA3ARSAGQBtAwtcAE+tRPgYgzkigjNltwQAGjiN9DECbvkAZ85aFawAmAjEAdQ6EShkL1wAT25E+BiDNZLCUGTLxD7AD7WMA3ALOhQ0WDGBiFdrHAJRpCZYyFpYCNXz9b7SPAShzKljK7GiQ1/+tr1z1EsSPAQhzLFzO7FfX/ycHnXN/QvwYgDCnw+XMHnH9f1/7P/27lxE/BqBLvj1czmg3A9/+wk1R+zDqxwBkqQ+YM2fky/8pHkX9GIAs4wFzRvmJ4Ctjd/TvHkL9GIAs3SGT5qp6+T9F1WvIHwNQZSRk0mxXvf37p/sly5A/BqDKYMikET0FPDd2l/7dY8g/aU4I6z/XGDJyn2o2/1W5e3gKEaTMsLABNAeNnGIv4D3l/xRvIIKUaRY2gJ6wodsiX/5PsZx24IQ5kBM2gN6wsduppv+vK10hnkQG6dLBJMCc2Sp8+/dLnkMG6dKkbABHwsZOay/gNwdn0L+r+i06SJZOYf3fXBg2dodWCun/s1o3I39BB6nSflPYANpCR2+9fvk/RU0FSkiUQeU/gOHQ0VutfPv3S9gKkCpdygbQFTp6R0X0f2Hs1/XPRFCy9CgbQF/o6B1v0Cj/q9yDeAYpJEnFJI3ApdBvoPyf4lW0wBEAjcCz5pKB8n+K6kcQQ4qcUjaAlvDxi/6J0AKzP4X5G2JIkU7OAEsi9q1At75yRfLCQtSQHo15zgBLI+7d4OfGXNG8jRzS45iy/vONEUQw6nmg72uL1797HDmkR7eyAbTGEMHNqs1/3ASC9i6AGM4AFyz4SGDxd5G8gh5S42LGGWCpHI518Xf1LPXvKpkJTI1rGWeApbImztu/L6vcrKEZKDW2Kes/1xhFDKM8BCiq+e/+ZiBeCUuLxpucAZbMednmP9YCpM4N6T+A7kiiGN9m0Bk2/z2YmpcQRUpITwJmJyKJ4iUT5f80TyMK/gBUuBhJGCPbCXBubO76d8spARJCug0wq4sljDsGTJT/lADJ0SJtAD3RxDGixYDFz/5wCpA87dp/AF3RBDKeN0K/OetKhRIgGaRfBAn8LvBd7Itm8Xd1yfp3NTwTxh+AAjfjSdRIXgeY7ewPJUDifwCT0gbQFlEot0vO/szA67QDpoF2F1DWFFEoY3gi8Eq1KxP/QBtJ0KltAB0RhfJ88OXgc5r9mWki4M+IIwEu56T1n9sYUzBDLwcvpfnvfp5FHQlwSrsAaI0qmB8KN//dTy17ARKgXtsAuqMK5vWwi78rXXl5GXmYp09b/9mxqKK5+KqV8n+a3yMQ6wxr6z93IK5wDoUr/8+WX//ued4IMI74IGBkRwABHwj6rNrNB7wWbpwTGUcA5STQRODtr9z8sJSGYNu0ihtAR2wB3avc/EdDcGoMius/3xhbRNdKN/8V6AbiKtAyPeIG0BZdRD9Qnf3hlZAEuZwXN4Cm+GLab6f8n+ZNdGKWpowjgHLjeSvIhbF51r97jKtAq7TXcQRQdt7xqv8vK928swylGGVEvQDojDGqHp8HmI/mvwKLAV5DKiZZqH4HmJ2KMaz+BoLOjTkvMBJgk03q+s/WxRjW3b70/32t8wTngCbZpq7/ujiPp/ZrLP6eBS+wI9wgg/IFQEucgV0tsvib7WBp0yNvADfiDKyP7eCfVfvUv6t9Br1Y43ROXf+xjQLfYfHHsrM/MzcDVKAYY7TIFwBtsYZ2tXrzXwF+h2JscUS+AMh6Y43tdVPlPz8BFABx0hdtcOezF+j2F1UuBCvoCLbExby8/ifj/S2dx16gW5+7QCxDNYbo1i8AeuKN7vzNBF84G0r/bAawxGX9AiA7EXF8R+fr3Z9KF47nuQngBIBJwKJYa6z85ybAFoP6VwBxTgLeYV4eCTx3Nqz+XeUTSMcGnfr6z7qijvB65dmfGXmRmQATdBjQf+5k1CEu+0Pht791EfAq4jFARbMBA2iNO8bvlfl9gG8Ouih4G/noM2JA//G2Af7MHv3mv0IsZzuQPI11FgxgXeRRPmqu/J/mcRoC1em1oP/62KN86LCZ2z+eCjLFxpsWDKA7+jiXbSTwwlhM+ne1T6EhaVos6D/bFH2cy7UePGjzX8G7wEcQkTAdJvRfJ9CVOmqv/J/mIY4BhE8A600YwLBAqDfYK/9pCeYEkD+AYjk+YK/8n6aKNeGqnM7zB+CNNQbL/5+7AZgM1mRhZ8YfgDfO2Gj+K9gNwGSwJCcy/gD8UVorQDTNfwV5AzEpngDWZfwBeKSEl8L9L/6eJY8iJz1ajBQA3SLxPj/nY8BPDkauf1f7V/SkxncZfwB+2WOy/J+mhrEgMTZOGtF/nUwjyqdz2/z3bZUTgINAMbZl/AF4ZtEWU7d/9/CHRYhKiBsZfwDemcNy0CtjTgU6AoU4edOK/uuFWtFn3Q3YEGnzX+GOwCfRlQpWWoAEdgHdxZDR8p+rADGumdF/7ohS3PdpLf6eLUvpCdbgiJkfgHgfBS9M/yz0/0OtU+PFhxGXAI3NZvQf+XsA97HBTvNfQVYsQV7x02NH//mNWqF/62qxsz9nnSTPsR4kekbs6D8bVwv+ajvNf8wFabIub8gAfqMW/aIGAkSa/wqzDIlxAOCLyXa5+K+xMPvzq+0ATAZyAMAqkBl58E3glRonTeXfURkHAH7oE/wCo5qb/2ZBNQ1B0TJo6QAga1Y8ct5sqflvBgd4AqXFyZFJUwVAl+I3+NXVYBfOOgvU/BGtcQBIE0Bhdlku/+84wDOoLT4qxk3pP+vR/AzvDVgu/6kB4qXblv6VNgHcxU4jsz84gBbXjOm/WbXr9KMGI7M/OIASm/LGDOCU7Kco0Ax0+0dnjRcYDo6Ji3XG9J+7LPstdku9+0MNYIGT9cb0n20T/hp77539ed1ZhLuAaDjQak3/WYfw57h7QXjDj1XOJkupAeKgvdOc/uuk99CPGr39wwHQP0eARbBKcvH3HFhOV3B4KrbZ03/+gPQnWbzl/6P/lc401W8iwMAsbLGn/6xF/KNssF/+33EApoMD02tQ/9mg+Ec59L6h2Z8H7AdgQwj6Lzdt8p/lpxLgh2qXAlXLUCH6Ly8n5L/LW4dvf+5S4Vl2BYeiyaT+69r1v8x/Drp0eIXHw9F/Gek18GkqliZkAG4Fbwah//LdAV628HGeTskA3GOvIUf0n/YmkHt4qSYpB1j6FILk/K889Nn4PmmVAK72SSTpkyUtVvXfZuUL/Ze983lpM1vjuCS6C2aXSBIIUZJGiBBwJYIxCTEhNSgIGV0oLYILHRftzKLgwkXbVRkK/g12QOiqMDAM/RsGuZu7mdncVRBRh3I7vTOde29bO63WRN8k57zve87z+fwHec/5npzzfJ8fsq4ArVB8EFm6xuS+rfo/+sWWNRJ2BcAMcJGHq9bqf8GaXRSLSjsBIpgB7vDzgrX6N3MaQHvy0g6A1gTVgW7waNRe/Z9N2rNOgZq4EyBBZYB+7p7aq/+j2zatVLoljxXygjXz64nF+rcjCejTFaAu8ATILqFRnXvq3pHNPLFrtYoCD4BWlCYh+vh+32r9Hz+3a7mCtySeAENbZAToCv8vW63/o33bFmynJRL8QMJ/IjsBXaUs8wSoUxqgga+PLdf/hn1rlpR5ALQSPANUk3pyZDuPLVy2itAToFXhGaCU58vW63/VxnUbH5J6AkRpGKyQH06t17/R48A6U5J6ALRCTaqDFDH5lf3yP1q189U4si72BGjNEQtUwoMFAfo/emHp6sXlHgCtIS4BKqL/JxL0v2xrEvn8hOAToJUNI2Cu/0741doVTEs+AFrraQzBfrg7KkP/y/ZWkclMCL6QF8gloPe//3vHMvRvwTSgzmzKPgBaw3EiAb3xeEyI/G2+ALwjIvwEaGXHEXP33BHz92+vBXBOYajFJQBBd8mjBTHytzUH4BMrLfHMbSLprv7+vzmRo39LkwCxAi9XBzA/zDkH/xEkfzurAC6vJ/pvtaJpGgY6Y63RevWnnACALdPArmEwi/7fvwNoF+aAQHz4/cf6/UyK/vcFLOr0EPL/8A4gKeAmdv7uJfvbWxn6P34gYVlLiP88M3ALP+A6ljKhT9/q1X9FHADfiVjYkVnEf059F5l3IhW/XDz6lwAr4PiljLVNI/2/qWIJtn/8z1z5l/jD/lzAJ0JWV3pJwEVCi4QC2jz+2+2Qf9vuB54+lLK+xAEvMJThCLjMZqcO0n/Z7Qd+I2eJiQNeyg5u0jb0M+ELsb8vsdoPHJ2Us8jkA35xBKxwBJwzvhi67kP99sbeA+CfktZ5CtFfZjYeQ/0D4VLihu9krx+4LMsUrqD5K7cA6dOEC5WQg+/02tK24HeFnfXDSJ4j4FJk2JH8rfUDN6StdxzBt3kIiA0HFjLOnSEb/cDjR9JWPDCH3tvdArYFmoKDe112irKvPvC+QLc3hNzb5gUsFoT9FaS7/y/43bIOwac/Crz1kQzQiciunHYBI/FoL5/IMj/wJ4nPvhjJAJ3LhLZkuIKFUq/B4Fd/WqT/hTsiAz+7CP26YID1/YNTB+V+vpBFfuCLAZlk0Pl1VNMpixc/udivE2xNm5B9ofrnEXCjLbhiqScQm1FhAlniB548F5v8UUTjN3kCjUP7UkRzK6p6wry2oU3I7QG5LCLxm68BpZxd6ld57/vDfD9QUhXgFZZoD+aEct6SDMG1rariT2O+H/h0QDKUBTqNCOaNLxRYmtGR/Wn62IAN2SMiBikLdEqiMjVv7kKH85GEpg9jdJuQk5cDslmKIm3nIcGskfeAwF6ppvOzmOwH3h6QDo+ALu8BB2bFA1KHJe1nvLltQsYmxR8AOAHd3wNmTKkXKmxV3Gn8YKofeBf9kw7UC7XtHb+nCS5NZVxcWTPbhNxH/u9IUhjc00Wg2kz6NUkoVizVXf4cJqYFnj1E/e/ZRs09Es0c+C5ZeK34LJvw4mOYNzbga7R/HiVmVFAfrEdmcj65CcT24pF1776EaX7gahDtn5NjVFB/zFbiSW87CAQLUyvlhMefwSw/UF4fwM7QIrR/QnOl9LQXV4HAdHq77I8uz0b5gaQAXNhDZQSsJk2gWkrnXPMHguGdfOaWr65v5rQJWb6D7j8TpipIoT8w12hO6b0MrG2mnzXmEj788ab4gcffovqL0BpA+TFQr2xv7RaU1g8El3LFrZVG1c9jXQzxA++h+cvQJFiXVVhurGwVc2t93AhGxjeL+WYmW0sY8YtN8AMXyAH+ghSTQnRbBXPZxnY8f3C4WVi7/l4QWwpP55LFqfTMs0zk1oRxHo3/xwbwALhKgXGBbjI8W6tXy5HPlKvVav0dUQvWwfdtQp6g96ukUSWo8gP9PTaAIsC2NNi4oMwP9HN9IEWA7Z+edfYtKHsGvMUBMC4lOMG+Bev9wGUeAJ3Is23B9mfAyQOE3hGmhYFCfDk24CdkTjYAuPQM8J8fuEER8HWMr7NrQR2+Gxtw+jMipygA3MNnbUKeIvEboEEYKMVXfiBtQG8OA9AgDNQGAvzjB47SBvRm1hgWBGrxix94/Bh5O+CQPuGgFp+0CaELmDNm2LFg4TNgNYC2HTHIuDBQjfdtQs5+RNoOmScQCKrx3A/8BWE7JkwgEFTjcZsQmoB0Q5JZIaAaT8cG0AacQCB4jXdjA05founuAoH0BwL1eOYHMgi060AghYGgHo/8wK8QNIFA8AVe+IEEAHphmkbhoAH3xwacPUfNvVAkJxg04LYfeEwX4B5hbDjowOWxAZQA9GwF0CMQtOCmH7hPE7CeCWTZq6DlGeBam5Cx79Fx74zU2KugA7f8wJNHqBgrAHzI/47JADKAXawA0IMbYwMoAeobpgaDrmeAdj9wI4WAMQPBr+geG0AAUIkZWGKngia0tgk5ZQygEoJUBoIuNPqBxy/QrhpSZTYq6HoGaGsT8g3KVZYOQG0waEPT2ID7ZACqY3yWfQq60OIHLk8iW4VskhAE2tCQFjhGE3C1JBPsU9CG6jYhGADqUwJpFAz6UOsH0gKAlEAwC6VtQqgAICUQDEOhH3gPsWrhGbsUNKKqTch3GIAkBYOBqBkbsHoHqepKCmZuMOhEhR849g+Eqi8pOMsmBZ30XR+IAaiVWJU9Cjrpc2zA6beIVO8JQGEQaKUvP/CEBABOADCbfsYGPEWgvALAdHr2A6kAdoORW2xR0PsMeEsCECcAyKUnP5AOAK6dADQIAd3PgK7bhGyQAOQaazV2KOil2/rAZVoAu0i4zg4Fzc+AN+ifOADIpZuxAcsP0SQnAEh9BlAAwAkAFuLQD6QDIDmBYOczwEmbkNHnqJETAOzkZj8Q/Xt2AmTZn6Cbm9qEnFEAzB0AbA4EvKEA2K/MR9ifoJ1rxgacPUKFXhKgSxjop6MfyP3fa4J0CgXPngHo33sGm2xP0E5bPxD9+4KZEPsTtHO1TQj69wlpTgDQz5d+4Cj69wsHTA4F/VyuD+T/30fsDrM9QT8X/MCxl8jOR0xH2Z2gn09jAxao//EXtAgBN/joB1L/7ztGsuxO0M+HsQGr9P/xH6kKuxNc4PXpxiRy82NS4DabE1zgX3cQm09TgticoJ3FAErzK3kSAkAzJeZ/kBAAYmkOojI/U6ixR0EbiQMkhh0IUhneQ2C+J5Bho4IWotPIywAGqQ8GHdTGEZcZFAkFgnKqSyjLFDapDQLFZEfQlTmE59ixoJIM6T+EAkEqQ3kkZVwokKxAUMT6DoIyj51Zdi4oCf8XUJORgQAmiIMCyoT/DWW+we6FfmmkUJKpBJvkBEF/PKP6z+icoHW2MPTO8BQaMjwQwAhx6Jk64T/zMwJW2MfQGxGy/2xgitIA6IUmz387GMcPBJ7/gonhB0KXRDfRDc8A4PkPVpCrsanBKSHcf+uYpz4QnF7/k+jFQopUB4ETsmuIxUpICgIH13/cP2sJxKkNgOuZPUQnFrPLMwCu4xatf+1mhCni0Pn6H6f1n+0E4wk2OrS//hfRhwDGq2x1IPovORZIw1D4kqE40X8x5JgbAJeZIPdfEjHyAuEiETp/CiM5wa6Hj6xT+ivQEKRGGM4pY/6LZIdLALRaifwgWhAaCSiRGiyeKn//ki8BNRQg3Pwj9w87AMSaf1T+kxNAToDYv/8mY79gYH6bSIBI6uT+wAcKdAoRGPyf4fUPHwmmmSEozftn6hdcYGkRTQhiOE/lD1xmKoou+PsHuaSaVAmLIDpF6h+0dQQZIiiADIV/0IFB3gG2cyvHNofOxJq0DLSYdYJ/cAPj9A22lgZd/8DBO4AyYSuZoOkvOPMD4kwTt/D2T+Yf8A6QSiXMroYumM4iGoti/9T9QLfs1RGOHUTTxP6BUIBQQmT+QI+QFWA+i/T8g94pRJCQ0Y//HfYw9EWOI8BY6kXKfqBvkjQMMpJZnH9QQnAKQ8A4EtvE/kDZEVDkCDCKoRJp/6CSQJoKAYPkT94fqGY+Pou0jCBCzT9wC0D+AKpvATO0DPK58X/ILgVuAfz7A3AEyKKC/MENUvkaavPfvz+TfsEtgntkB/qK0CKzPsBNBveyyM438m9MsyPBbQoZRgn5geEV0n7AE8IrtAzxmol8jI0IXrHUJDHAU/lvzbMJwUsCB1V06FXWzwEFv+A90yVeAu4zlCHyBz5hZIbkIHeZjVPuDz4ilWa2uHvUtoj8AS8Boa5/JUmrP/DnS4DGQdrv/tvk/AHXAKF//pH/t3c3K41DUQCAB53ZSbPLlEYILhojVMg2FPonbaVYKAiFbtx0Z+kTuOg79B268G18pIHZzDAyjI5Vm+T7niH35Nxzzr23r+7PYbuLFATf6ee/8POnAI63Uy8K7V2an/m0KIibPDyxZvdn0nHLL8VyEV1buHtRG7Q870vxHG2Gzgu+fd43q/uUKKh6ltoKvMH3hXlfCh4DmmLA/65+qT9lEGRODL7WMtlq+VMWX1sL0wGvGfhpGvanXI5bO22Bl/iWZnp+lHQvoB7wr8y/79+PGFDNWd9ka9qPCsSA0Kjw85p/srH6qYijeD6y5n8V/dLOhY8CiUAlt/2z3KwflVR/nPWqvfovk76Lval0ItCcLSta85s1vekDX87i+bhirYHTsNM25wu/JQJVeWTo5Hyt2Q/P3PYXZc8EamEU6/bB39zlg7I2CE/T+YOKH/xLfbNLS3a58OV01XK6D15eFMgXaSnmBHrDKLblh9drxNGwyKXBk1GS+fHDW9y0smR8WrwN/yK/sPZhP7nAfZacFyQKLK8Gq3tJP+zZUbcfTUcHfNNwbZx0toGnO+F9w8BsdGDZwGU4WD0FZvvgYxwHcXM+Pf/0YwS98SzK29r78CnqrTxKwtGHjw3URsP1atO21YeDCATdbXOepJN3jgS1STrbZf3WrY0+HKKzoLVpRutpOtrb9mA5uRomu9XjQ9fNHVCcOkEjaMebxyzaJdMwHU8mvZfkB8ve9WQcTpP1vNPMn+67t7r5UJoEoREE7Z/iP7W7wV3DagcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMrjB5u8iKa7CD0yAAAAAElFTkSuQmCC diff --git a/gws/1.0.0/requirements.txt b/gws/1.0.0/requirements.txt new file mode 100644 index 00000000..a3b996d6 --- /dev/null +++ b/gws/1.0.0/requirements.txt @@ -0,0 +1,6 @@ +requests==2.32.4 +google-auth==1.28.0 +google-auth-oauthlib==0.4.3 +google-auth-httplib2==0.0.4 +google-api-python-client==2.0.2 + diff --git a/gws/1.0.0/src/app.py b/gws/1.0.0/src/app.py new file mode 100644 index 00000000..211055c3 --- /dev/null +++ b/gws/1.0.0/src/app.py @@ -0,0 +1,130 @@ +import socket +import asyncio +import time +import random +import json +import requests +import secrets +import string + +from walkoff_app_sdk.app_base import AppBase + +from google.oauth2 import service_account +from googleapiclient.discovery import build + + +class Gws(AppBase): + __version__ = "1.0.0" + app_name = "Google Workspace" + + def __init__(self, redis, logger, console_logger=None): + """ + Each app should have this __init__ to set up Redis and logging. + :param redis: + :param logger: + :param console_logger: + """ + super().__init__(redis, logger, console_logger) + + def reset_user_password(self, service_account_file_id, subject ,user_email,new_password): + service_account_file = self.get_file(service_account_file_id) + service_account_info = service_account_file['data'].decode() + + def generate_secure_password(length=12): + characters = string.ascii_letters + string.digits + string.punctuation + secure_password = ''.join(secrets.choice(characters) for i in range(length)) + return secure_password + + if new_password == "": + print("Generating new password") + new_password = generate_secure_password() + + try: + service_account_info = json.loads(service_account_info) + except Exception as e: + print(f"Error loading service account file: {e}") + return {"success": False, "message": f"Error loading service account file: {e}"} + + SCOPES = ['https://www.googleapis.com/auth/admin.directory.user'] + + creds = service_account.Credentials.from_service_account_info(service_account_info, scopes=SCOPES,subject=subject) + service = build('admin', 'directory_v1', credentials=creds) + + try: + result = service.users().update(userKey=user_email, body={'password': new_password}).execute() + return {"success": True, "message": f"Password for {user_email} reset successfully.", "new_password": new_password} + except Exception as e: + return {"success": False, "message": f"Error resetting password: {e}"} + + def get_user_devices(self, service_account_file_id, subject ,user_email, customer_id): + service_account_file = self.get_file(service_account_file_id) + service_account_info = service_account_file['data'].decode() + + try: + service_account_info = json.loads(service_account_info) + except Exception as e: + print(f"Error loading service account file: {e}") + return {"success": False, "message": f"Error loading service account file: {e}"} + + SCOPES = ['https://www.googleapis.com/auth/admin.directory.device.mobile'] + + creds = service_account.Credentials.from_service_account_info(service_account_info, scopes=SCOPES,subject=subject) + service = build('admin', 'directory_v1', credentials=creds) + + query = f'email:{user_email}' + + try: + results = service.mobiledevices().list(customerId=customer_id, query=query).execute() + devices = results.get('mobiledevices', []) + except Exception as e: + return {"success": False, "message": f"Error getting devices: {e}"} + + return {"success": True, "message": f"Devices for {user_email} retrieved successfully.", "devices": devices} + + def suspend_user(self, service_account_file_id, subject ,user_email): + service_account_file = self.get_file(service_account_file_id) + service_account_info = service_account_file['data'].decode() + + try: + service_account_info = json.loads(service_account_info) + except Exception as e: + print(f"Error loading service account file: {e}") + return {"success": False, "message": f"Error loading service account file: {e}"} + + SCOPES = ['https://www.googleapis.com/auth/admin.directory.user'] + + creds = service_account.Credentials.from_service_account_info(service_account_info, scopes=SCOPES,subject=subject) + service = build('admin', 'directory_v1', credentials=creds) + + try: + result = service.users().update(userKey=user_email,body={'suspended': True}).execute() + except Exception as e: + return {"success": False, "message": f"Error suspending user: {e}"} + + return {"success": True, "message": f"{user_email} suspended successfully."} + + def reactivate_user(self, service_account_file_id, subject ,user_email): + service_account_file = self.get_file(service_account_file_id) + service_account_info = service_account_file['data'].decode() + + try: + service_account_info = json.loads(service_account_info) + except Exception as e: + print(f"Error loading service account file: {e}") + return {"success": False, "message": f"Error loading service account file: {e}"} + + SCOPES = ['https://www.googleapis.com/auth/admin.directory.user'] + + creds = service_account.Credentials.from_service_account_info(service_account_info, scopes=SCOPES,subject=subject) + service = build('admin', 'directory_v1', credentials=creds) + + try: + result = service.users().update(userKey=user_email,body={'suspended': False}).execute() + except Exception as e: + return {"success": False, "message": f"Error reactivating user: {e}"} + + return {"success": True, "message": f"{user_email} reactivated successfully."} + + +if __name__ == "__main__": + Gws.run() diff --git a/harfanglab-edr/1.0.0/requirements.txt b/harfanglab-edr/1.0.0/requirements.txt index 414fe428..b26fe9ce 100644 --- a/harfanglab-edr/1.0.0/requirements.txt +++ b/harfanglab-edr/1.0.0/requirements.txt @@ -1,4 +1,4 @@ -requests==2.28.1 +requests==2.32.4 python-dateutil==2.8.2 DateTime==4.7 Markdown==3.4.1 diff --git a/hoxhunt/1.0.0/requirements.txt b/hoxhunt/1.0.0/requirements.txt deleted file mode 100644 index fd7d3e06..00000000 --- a/hoxhunt/1.0.0/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests==2.25.1 \ No newline at end of file diff --git a/http/1.0.0/api.yaml b/http/1.0.0/api.yaml deleted file mode 100644 index b73b44ed..00000000 --- a/http/1.0.0/api.yaml +++ /dev/null @@ -1,267 +0,0 @@ -walkoff_version: 1.0.0 -app_version: 1.0.0 -name: http -description: HTTP app -tags: - - Testing - - HTTP -categories: - - Testing - - HTTP -contact_info: - name: "@frikkylikeme" - url: https://github.com/frikky - email: "frikky@shuffler.io" -actions: - - name: curl - description: Run a curl command - parameters: - - name: statement - description: The curl command to run - multiline: true - example: "curl https://example.com" - required: true - schema: - type: string - returns: - schema: - type: string - - name: GET - description: Runs a GET request towards the specified endpoint - parameters: - - name: url - description: The URL to get - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: verify - description: Check certificate - multiline: false - options: - - false - - true - required: false - example: "false" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: POST - description: Runs a POST request towards the specified endpoint - parameters: - - name: url - description: The URL to post to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: body - description: The body to use - multiline: true - example: "{\n\t'json': 'blob'\n}" - required: false - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - false - - true - example: "false" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: PATCH - description: Runs a PATCHrequest towards the specified endpoint - parameters: - - name: url - description: The URL to post to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: body - description: The body to use - multiline: true - example: "{\n\t'json': 'blob'\n}" - required: false - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - false - - true - example: "false" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: PUT - description: Runs a PUT request towards the specified endpoint - parameters: - - name: url - description: The URL to PUT to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: body - description: The body to use - multiline: true - example: "{\n\t'json': 'blob'\n}" - required: false - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - false - - true - example: "false" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: DELETE - description: Runs a DELETE request towards the specified endpoint - parameters: - - name: url - description: The URL to post to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - true - - false - example: "false" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: HEAD - description: Runs a HEAD request towards the specified endpoint - parameters: - - name: url - description: The URL to HEAD to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - false - - true - example: "false" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: OPTIONS - description: Runs a OPTIONS request towards the specified endpoint - parameters: - - name: url - description: The URL to HEAD to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - false - - true - example: "false" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" -large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABjCAIAAADihTK7AAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH4wgeDyYG/VPJzQAAMI9JREFUeNrFvXecJWd1Jvyc81bVzd238+SomZFGYZRRQAkLIQQ2XuEFG4zBXsv2EtYG24vZ9WKC1wmEZbDXWnvx77Mx0ZhgsUgIS8gSoJxnNDnHzuHmW/We8/3xVtW9HUYJyXvV6l9P9e0Kz3vyed5zSVXxar4UEFUoDFN6cLIWHphq7Jlq7ploHJxpnaq0p5u2GSmIiAAiqIKgAMBEVAxoMO+t7M2sL2fOGshtHsiu6w0yHqcntKJE1HWFV+VFrx5YqhDVFKNGaJ86WXvkWOXJE9X9083Jum1ZVcAwPMOGDDGINIGYQABIKT6VFY1EFWCmYsDLCv5Zg9mLlxcvXVE4oz/buSKUiV4l0F4VsERVAUMEoBXJQ0fn7t47/dCRypHZVjOCZyjjk2+YiYigCSJdMBFIAQIAIsAJnBIBBFWySqHVllVR7c2Ysway160r/tS60ub+nDuJ1VcFslcYLFEAykQADk83v7l9/M590/smmpEgF3DGYyJSgqp7dIdDDIsCcDoIAjiWMkIXap3Hd0ADsKKNSNtWe7N80XD2Z7f0vWFDT9bjVwOyVwysbqV79lT1S0+N3rV7eqIR5TzOBx4xRcmViIhASgQih0hiq6AOmhgydN7ewXXRAwDOWkWi9UisYEt/5q1nlt+6pbcv6znIzCuE2CsDlpUYpl1jtdsfPHHX3ql6KKWs5xtWkDg0KX5aItIEgdg2EUgJ7jhUGQATlsTotKLiECaiRiiNSNb0eO/c2veOs/uLgVGFAj+5+f9JwUrvY7oe/tWPj3356dFKW3uzHjOJOrGgLv1agBQTOWlKMCCOn7tLoLoRSnUx1c4lUWNCM0KtbTf1Zd574cBbNpW7V/T/DVjp5b+9Y/zWfzt6cLrRk/M9Jok1i4mgMUBIkXLCpGBnygECE6AAxWChI4NIQHaKuxC45P7Tg+njOMjqoTQjuX5t8XcuGT6jP+t++bKV8uWD5ZAarbQ+8f0Dd+ycygcm4xtRxPeeyFS39oFjJ6cgZUrMF8U/USJOFCMao+FQ7gKx+6f07tN/0nzICDrbsqWA33/BwHvOHQRgFeZl4fVywFKFQpnoB3snf//O/UcrYV/eV4UARKwEAhGTxFqTPn4aOhEI7GJIYgUiRaQQ9xUjEQeZhskw+cweEzOYSGKfm7wvuSl3do3h7eClqoYQis617A1rix9/7fKhvPfyrP5LBks0jgz+8odHPvNvRz3PFAIORTWVDRDxaWWKmcAsSm1rm1YtOGO4mOH+rF/OmnLW9GaMb4gJqmhYnWvrXNtON2W2bRsWqvAMsoYCJiKSVJYAOKFGilH6DarqtHKqEa3tCf70muUXjuRehny9NLAcUs1QPvx/93z9mbH+fEDMoqkrAkBMpESaKJIzWERkmKxSPZKWIO/z2t7MucO5bSO5zf2ZVaWgP+flfbO0IEPnWnaiER2ei3ZOt3ZMtHZPt8cbVhU5nzOGANjuh0geSd1/Gh9TVY9Qi4SBP7hi+D9sKr/UQOwlgOVEd6rW/rV/2vngkdnBQsaqxv6JEgUkQuL+EiEjNtSKtBpJzjfnDudft67nqjWlLQOZwHD3+Z1j7ZKO2PUtfpqZln1movnA8fqDJ5tHqxETih4zkdVEvFLEEsDUnV+VCVZ0thn91oX9771wWDRZ5VcQLIfUqUrrV768Y/totT8fhAJiIpAkuCww6kRkDDWt1tq6vCd40+byz27pO284l/p8q85rIg1FE1FKbq7riCYwUFfEVAujB0+17jhQfXi02bIoBcxQ2yVNi7+nGfpU3d6yrfy7lwzHJuSVAstp36m55i998dndE61y3kQCYtbEeoOBGDpSQJl8plBoNrTreoN3njf0c1v7BvOBe2yRxL4tupDqPNREl44k40pGkn4C2D7Z/Ore6j3H6i2rPT5LIkpQ0i6gEjFTVTAw3ghvObfvw5cOW1HmF8brhcFySE1UW+/8x2d2jTXLeS9SAMTMnWiTQMQOOmYippmmlHPeL1849O5tQ2WXdiyqoijm4aXqIikKRV3c5TPFochpFEUBURCUCQDtnG79n+fm7jteD5hyHkUSw+8UcB5UzqED4/Xwvdv6Pnjx8Ivxjy8AlvtlrR394j8+88SxSrkQRKJEHGscJXEmwMRCMIbaVhuhvumsvv96xfK15ZyDafG6LXBGqiDSk9XwIz8an2qK71EzwuvX5H7rgr5I4PFC67bgdBKXvgDoD443/vLZ2f1zYV9gREXVPaSqxqghsfeqysBEI/qdiwZ/bVv/C+L1fGA5lWGm3/inHd/ZMT5YyIRxhJCE2pyEUkRExIZmmzJY8H7/mpVvOWsQQCRqFtkDpyBMaIRRYIxxcYZqI5Rfvvvk9ql2T2AE8AxNt+yHLijfcnZfqo+VdlQKGCCrtNjxpzWPWiife3bm6wdqWUMBwUonnlDtIOUwBDDTtJ++ZtmbNpSeHy/G6V8iapg+fe+Bf3l2bLAQhKLJcyfpXleua5gm6tHlq4vffMeWt5w1aEVV4S3yzJKEPN/YM/uXT0w6PXJY3Pbk1Pap1kjB8z3K+RwYWlbwbn+u8sREkykODmbb8j8enTlSDQ3NDxcQS5bziQWff+/C/j9+zUDO44Yl33CcZnW+iNLAkKiUMR/98diOiaYhktNr2mnBctnMd3eMfvb+IwOFTGS1k5NR/Gdx7stERBP18D9dNPyF/7hpVU/W/S0ttfJMaFn70R+e+uC9J96wocdjsqqG8NR445/2VgfzfqRJkQ8gIp/4tqfnIhGCWtVVxaDo89vvmbzneM0QRLH40QxBAat4/er83147uK7Xn4mQMUxJQkFMzA6l+C59pkjxu/efmmlGlBifFwuWq0wdnar/t+/syQdenEXESJEmkqUEJgJophl99HWrP/66tQSyunRyb0WZMFaPfvm7R29/euqnN/WcN5ST2HLp3zw7173miFMZKmZ4+0x497EGU5zGvHtzMefzf3+88n/2zLlAf/GjEWAIkei6UvA3Vw9cNpKZDtX3WGOQQEScQAYiAUo+H5gL//ChMSIIlkZrCbA0Niv6ke/snmpI4MUxOiUVy1QHXS2q0rZ/duPaWy5ebgVEZFz0sBT6JyqtX7zj8JNjraG895YzepAUM58cazx0qtGTNdpVb0h8COU8/trBhlsDUVqW965clgk8/vzuxm3bl8bL/dNjEkUp4D+/vP+nVmWn2up7zEnQ4taGOXZWFhjIeXccrP/znhlDZJeSriXAElFD9IVHj9+7d7o371lRV5sDdQe7scbPtsJP3bju7ecOR6KGQbEvdz9Il/bpWK39njuPHqmEeZ9WFv3XriwATjDxz/tqAuLUlDCBSZlgGMSFwOyZix4fbxHU2anXLw+s6mDOfPlg83PPzTHpfEOjlKDHBFHyDP/RpeXrV2Vn2mo8BjMTJ8+gnYVR9AT8mSemTtZCXsp4LQRLFEx0fKZ56w8OlXK+lfjiScUtKTARmDDVjD5x/dqfO2coEvWYUl2baYYnKi0m1kT/m5H+57uPH5wNB/L+XFuvWl3IeRyJGtKJRvTjk41iYCS5CBETc7LmygwQ3Xm8hSR2v2gwuzznNSIdytIX9te/dKBmCKksiGLfXJiE6+C4r8afvKTvNcNBJYTPpAwX8hEhkS0CkPFouim3Pjq+pEdcLFlKhD+958BErR0Y7gosKF0yBTxDE/XofZctf/eFyzpIqRqmwzONzz4yWvCNC7KtKhP+4IHRx0cbAzkTivqGr1uVR+zp6f7j9cmWBKZT3lJOFxvMrKCCx49PhtMtcU6w4PMFA34jUlEMZMxf7aw9NNZwjkwBJqq05fZdVaJOIZdAPtEfXdq7vsfUBcbEFj7ReCYiYraKvizfeah2/9EqL3K488ByFuSRwzPffna0nHfxZ0eU4moU4BuabkQ3bi5/5NrVabHUihqiE5XWe7598KYzyr1ZDyBReExf3zX1lZ0zwwUvUrQFK4retuEsANck/bcTDc+QWyRwnHsTEXO85gB8xkTTPjHZdjcJ4LIhPyktIGf4T56pTTQjp31W9YLBbCW0n3hqlkld6kMEAXoD/48uKWc8cpF10iNSmleGJc/w556cCq3yfOM7DywCiepnfnDQ2Z1umUp/MEyNUFf2BH/2xg2apMGiMEwT9dbPfHnPpSuLF68oujiQCccr7T96cLycNVZhiFoW5wzmSoFnRZloshHtmArzHgsSe8sduVpQhn9oPEzv+Nw+vzegSKGKDNN4Uz77XJVIVeMm2ge2lu4/1f7M9jlDlCaSVrGhx//wtlLNqknCh0SgYxkToBjwM5OtO/bPEqHbcs3rgDPh/n1TP9w/3ZP1rKZNhKSnkCDWsvonN64fyAfi2nsKIjRC+yvfOjjXsh++ckVyx0qQP35wdKYlgRc/sFW9eFkWSQXqmcnWVNMGJtWEuK8VO/UuS5o1tH06bFprGKoYzpn1RdOyyoRItTfgfz3ReuBUy1koUWQNf+Cswt/srv/jvmoawRqCVXrDqsLPrM3OhuozJ9dNFSi29Hmf//65mWYkTB3h4q6lA4DPP3TMVQ+65SppV8FnmqpH77pg+Or1fZFIEiUoQf/r9w//4FDl1y8e7s95AriO9ANHa9/dX+nPsZVYALOe2TaUSS/3xHhbnAYS4lycujqoqR0FAqaTDXugEhEQKQBs7fPbogxyxiFj+H/vqbesdXm1Am9YlbtwwL/tueo9JxqpB2CCqH7g7NLKgtfWTsstNl2JJuY93j0dfu9wlZIKRwcsZ62eODr3wP7pUsZbKsogAjUjWV3OfOiqVaJqiBFnefTZB09+bcfUmUPZt2/t18S+ta3e9ug4E6xAQQpuWhopeBt7MwA8IlU8Pdn0DIlCQOJa+UktQVS7vxTaiPTpqSjueIDOLnsKWFVRiUQyBntnom8dbjjdEaWs4beszTLRnz5b3T3TdAi6P+4NvPdtLTasMif2mByVoJPFBYa+umvWmYsYAufvXB3mQ9/Y8ZUnR/vzvtU0LkRaXPeNmaiHt/70hreft6y7B6eqeyfrRJT1aHVvNtXX0MqBmaZvTAq2KDIGK0u+y5dE9Uilra4QNs88Lt15jlSLHoZznis6tKycqEdMHeUQqE+6Mu/H/W1F20anmhopSgZDOS/VbRf6feDH04+NtwoMayVNrUVURFWVoHNN+7evX3H5yoKrkXipdIxVW/fumSwGpttfprUQJqq27fkrim89Z1jULQgUcUK/ebCQ6kvnbpi2DOQXPbVzTjEC63qC+QXRBf2tJV6apPMZw+tL/uI3RAKTUAAynllbjIk4kSZVkuR1y5bC4+MtpCQnpPro5AgCfGt/5fKVBUrV0Onk93ZOjFbagfPnRPMrRsRMrcj++muWe8yakjoAj8nriFjnWZlwmlpHxx7xwrZyt9s97df8sy7xBm9e7SwJxIk8ptRau8j+vIHMlcuCSgTmpN/bRT8RaN7nH5+oTzQi5zc8JDnHnc+NGsOdllznOzGh1rZbRwpv2NyvqoZdTovJeutfdk8HxtRDe+WanrOH80lFCfcdnts3HWY918WIWxoC/PSGUjljREEkPz7ZOFiRrElbQS9QCidCy8rVyzIr854rYDw12dw5K1nTSU2Y0LJ69Yi/ouAn1lPvPt6YbZNC+wO6fmVGlBKODt6+Mf/AqXbcJHehB5TiHygwNFaP7j9Wu3lTr6h6zlodnW48daySD4ykUt71YqJGaH9+20jgmUjUIwjUEP3VQ6duffDkYCEzF9o7fmGL0zHDVA+jj9x36uBsO+cbMJhdf4yzvrl+TbGcccvAt++oPDzaKmWMMhkmYmJ6vtaUIUy15JMX0VvXepFqQLRjJvqfT1f7M5yaDvee956Zf/9WX+JwgR4ZD798sNEXcCi6PN+7tS/jqh2quHAgc16/v30yzDGJKClApBozVAAYxn1Hqjdv6iWieInu3zc1VQv7i8ESpS9C28pI0X/z1gF3bVEYopOV1jd2Tm3oz4ZKW4Zzl64spqL42KnGWD1a05MRKDGY2TC3BOt7g+G8k2U0rdRCXZE3gWc0zgXx/JLlEYj4VM2mgr+l1x/OcdHjjncn5Ix5dCKMVD0iqwD0hpWZ759o9QU82ZKvHGx+si+jiaIZohtXZZ8Yb+cMYtPl3Jrze6I5j58eb041ov6c5yw1Htg/yadhmBhCtW1fu753qBCIanqirz07OdmIPKZqy24bzhmmdHnvO1yJRK1qpLAK93PLSjnLaXw43bTTLStEoaoFWcULfoUKVZysW6cpAPoz7BFFop33CDzCkZrdOxumq72lN+jLcDXSgkcPjrUPVyMmiKOmANcszw5mjZU05kwqTEQgBIZG6/aZsQZcL6YR2mdPVDIeL1FIczZYceOWgbR5x0zNyN6xazKfNJ2cWKmqqwQ9fKKe9bibkcBEVml5wUfiTyaaUo/UcNq6eVHtS0MYa4kibnb0Z7jow85LzpzY6uOTbXdOBXoD3lDyWqIeUy2Su4833XEiiNJA1jt/0G9EGufy82NMAqzqY6MNOG+4f7x2Yq4VeOw4AQtebavDpcxla3pdaO2K6I8cq+ybauR9DkULPp89nEtV4+hs+/Bs20HfHQiI6kAujmMBjDeipKj/YvvnqvCIZtvSjOL7LBgqeLwghBbAI3pmOnRxgluzs3pNJFAga8wDo+22tdy1RJcNZ6Jk1bohICJRBIa2TzQBZQA7T1VrbWs6jJTuVaJmaM9eVujP+67T7W7s7r0zri4aWowU/dW9QXrh7RONSls86nTS3d8QMJLz0jNPNqVbIl4k48AQqpFWI3Ho+4b6A47md8ZUETAdqthGZFPTsqnXdwYka3CoanfORg5Hp3rnD/gln0N3n2n5PHmkgPlIJZpqCAPYNVYFll5gIrStXrK6hER9PKa2tQ8fq+R8VtW21TW9mbzvpSZ2+1hzyYI/EfqynWh7vGFftEglKMSUPp1rO7AAUMFfWNJUwGdMtuRY3aYH1+ZN3pBbnlD00YkQiSaq6uqit7poml3Flg5jCfAY083o0FyLAeyfqHu8dANRFb6h85cXkeQrAPZPNo/OtB1nPxLZUM4gKbEC2Dfd9Bgp0zg9jyHqDUx6eLZtKb2dhBX0gi8iRIqGRbLq6M8Y1YWwM6ER6ZFqB6yhnCkHsQvymZ6dCpHUXQUwxJt6/VDU0TYpSSHdrbvYbf9Mm0MrJ+daPtNirIgQifZmvfX9OQCcJJLPjtbrbWuS/GutC5wAJkRij1Xaydk69GSB+ky9WZNCONsWg2QjxcJQ/vleVnW6JSlYRS8m+C4Ww6NdklX0MZDhUFSBDNORmp0LJUGEAGzq8RPB6lrjJN8T1UOzbZ6qh5PVtjG8VIBFodWhoj/sUrDkJLvGawCc+TJMy3s6CdpUQybrkWc6OyXi1VEwIWsIcaFDq22hJFJ24fKLcYcEWEU16rw3Y5aGmQgnap2OCUBD2Th29RjTbTlWs531BNYUjXFv1aTXN19Uj861earWTsRkibAhsjJSCgJjnBN0Vu/gZMs35FgXHmMw3zHbs62oFjqRmd9vATyGT52Va1lNLWgM54uLHgAKu2Qp6/HigEcBA5puW9c2cOLTn2GJ928gtHq0GqErd1+eNzmPbPcaa0okARON1kJvotpqhuI7T79gjQiR6HAxQEIGYoKqTtRD38RbRnIel7MdyZppSiSa8TCvxgMACJgCr6NvLuFIm9uuBveCzGsmYpJWV2Gkxyd2dMMuyAjwGbOhuuzNodWfcXzN+C3jTZvKIICeDGc9brQtI+1JJaBBDWGqEXmVprWq/iLBUlUCqaI/5ydHQIS5lj0622pGYhWRatY3eb/j4ybq4XQjIiLRpCXHMIYiVcPkKI1EENXplm1EEjcqDAwxJQ3j5wHLkNZCTLc7xsgj1CPJmHmdGOfyJhsSKYKk0pA1XAslwwCoHumRmu0+c8HjvEe1VpwzdquGe/BmpJ7bmkWgpdrgEGg556ELyUjkzKFcqDBEFsj5JushlZeCT5esKPRkfUnWzBELrKKc9fx4SwBFIht6/FJgAhM3OZnZsRBoKYPQLVmVtgy4JwYA9GXovAG/5HfSw+QSKAcQEbDn3jmSM+f1e72BUUU1ksEsxRzFGErKGRJHfUrEJf1OQEvgNSMR1a6qPLS78KAIvJTIDwD9ee8rv3Bmd/FeVYB4O+FVa4pXr+05jWtTEdfZIyb69GuH5v/2hct+3acC4DMBuGwwuOyq4PRX1DQ3um559rrlma5fqiNUJU9HHjkLRTH3rZvKC1hRz4r79YKrJAzIRXaE4NKLpK4IdBd2QcbOh36eXHBcYjacZo4L6H8vKoDornsp8ekZZtRNglOgO5ij+b9F3KmiBc4p7msrROElFCr3Jk1aQukFVBbey2K6V1edGM9PnovfqZp2AV5iFL/UFZ/HzHVryeJ3akpMAACIpMFhzAnv2nkAA3jZuAo7LzBKVEIBNMPOHxBhuh7+7ncPNEU9ZlFkPPrUjevKucC1MB46Vrn1odFS2nZMWKai6M2aP75qJO97gIZiP/7w5HhDMj7HjVpOm4ZEnFJ2Fr6YUAn1plXZn1mTjwQe49Hx1uf31EvBwqTHKnp8/r3zinmPHU3w/lOtLx6o9/gMoNKW1wx6v7ylKBrTXK3YtpVY/1S7NdEJl2Hysh6ZOIjuaBYSagMRpuohumh+nuEnTtTG6mHGM04ya6GWc/Efz7bk3kNz/fnAxqyDuHtqgb6817bI+1CFz2bHZHvPdFjIeM5+ctJcdR3p07lFjzHZ0gsHAsCxqGi8qQ+Ot/szHHUFX0RoC5bnnG2Pjcyxmn1orN0fsKhONmVNngHWREqbFvVIieJtBt3kcHfIN/BKOc909TY60gs4fuZ0I+wGsZTxVpcztdDmA6NAqKh2OfJlxWAo72d9J1NMMd2blCggNG1cYGJCX4YLPhc9t5VHCcScyBeDCbQUXoaoLdKf6RibULTk84JCDQMNwVCWHd/ZiWnTasnnvEcq2jS6pphs6FCAUAu11raMVPvm2XirWvCYh4rZTMJ4SZFC0un0mE7Ndbg+TtSHC34rih1NI5SZZgesnoANwYqKqIVYVVcpFdGmjYPJVNMjkUjUioioqFpRtWJFrWgksCJWVNINUNrpvGa6crNqpFYXvQ0IRXsDJuLUqE+2rBWJBJGSKEaSEog711TL1kPhlPqdksABKKzoYM7jgYLveoXxhslu1KAe02il1YyieOuEKoCNA9lIxAXfVmS81k7/pC9negITiSQUCU1PGFqEVhILSlkX1MQWIbb67gjENTtJZGHa5HD2u9xYwyo6S5C8AxCBE8DEF+hkU6CqIlYkQ7qmFKCrunCyFjUiS12s7/hHcRZNhwqGyzl/sBhEVpYwqQqfeaIanpxto+t+tg7nKeaWQxTHO9Vu9GS8wZyJbLJkXWFwZLURdR6rEBgrXbY0lXnXG046xCIqIt3VJUPalbmjZZcuWljVlflY0VzXb6wuBqSq7Uj6M7y6GLco3OvgbLKUibXSuHbk4gZd25tlw7SynA07ZOR5ymgYlVa0d6IGx8omAnDuskJPhq2qQgk4ONOIF1OViVb3ZiKrHDMDnOooAaGVqUaUSBbKPrs5DQkq8XFRURGkTXQnaqKSeDuPqDdwETwBmG27KGShABqmNYXONrNKaMebkSFAtB3K+pKX9zhldALYNdVmQDpt/G4tVCZsLAcMYNNQwS7RAovNfCT61PEqEsOs0DXl3Ib+bDMUQH2mQ9POqMXOe/NAJnJaGPNr4lgwEp3rcgXljHFapvMkqeOtncqISGJBRESsiGHkvHjLOYCZtvAiqKwiy1hb7IB1qm5nmtaoQKVt9YKBuA7uHqptZfd0y2dIZ0tBZ4eBFc15vKGcYQBbhgvzttSkNw2IamDosSNzSHYVicAwXbmutxkKgMDQsZnWXCtMBfPsobwhgSROuStlGK/bVA2H8txtGZzGqVKCF1RS4yWqKgIVRFazhLIfN51UdS4UpgXioK1IBwJake+AdbASNUJh1chq1uhly7KxwXK139nW0UroM0kXVCqxuISiA1le6+R567JiMestIVyqqprzzXNj1eOzTRdgOFBu2NSf8UgFPmOs2j443UzNxtnD+XLWi0RIiURIHeaqirF6lL5tKOcZitPFLrwk2SEizvFBFFZgRUVFNbQoeCj6TrKoZWWyaSnR01hbFc1INpS8TBcbYdd0KKJQrUeyudffVPZVNaU+PHqqWWtbRrwwqQLGZjGSDeWgJ2MYwLqB/JpythXJ/B5JvMI+01Qt/NGBKSRWSRXnryicPZyvta3HVA/lmZP1VKqXl4KNfZlmJLF3TUBn0rFalNrUobzxSK0LGrr304g6qxWT8RO5UhWIhNb2ehQk5IZqqNWWkMKKM20xxmEk2/rjYglDFdgx1fJIATQj+alVeUMxjq5se/+xmkfosledaIugkZVtwzkAbEV9wxes6mmFktJP5gUQCsN0567J9DlFlYlvPmewFVmPKTD0xImqMyIiSsDlq4qR1cDAEBkmw2xYMgYuyHDqPJTzenwDVY/UIxgiQzDkqoDKIIYy1BAMlAlG1YdCdFmOU27MdFvqoXgQUiEVhjAUagtGLx4K3P0TYaoZHp4LCx5FIv0ZvnFtPjWyRDhSCZ8aa+ZMytKCSkdWVOEzLl6eBxBXhK8+Y+BLj59c0saLaDEwDx2ePTzdWNuXk4Th+h/OGfrLHx0/PN1g5u/vnZ5phOWc78Lo69aXPvWjk6MaAiQER0BuCZ6baDYjcYNi+rKGCccrYd7nlLff2TeQsq45JtggyXX6M3EHAaBDc+HxSrscdIghRKiHcv5gsKEnkzKufniisXuqPZg14/XwP53dO5z3HTlNREF018HKTNMOZDlciJQC2rJ2Rck7d6QAIE6jr9zQP1LKVNtRd6vVpewK9ZgnauE/Pz36oWvXqSozqaI369/2M2fsGm9kfRNajWJ6MgE4d6T42ZvWNSNh7sxjcBIqsTNGYOi/Xz40Wo8Cw9rZ7ULxNKPknxwnqXEHoGn1spGsJtuD1pa8j13SlzWUxG9EhGYkW/p8V/BkQKGriv4nLxvIGW5Gcv2anEIZ5HLXtrV37K/kDKyoyjykVNUjnQvl0g29pcBY532cJbrly898Z8dYOeenlj4lVxvmtuhwMbjrNy4sBjHZUBcFgvpyCy6v6muJ3gKAZHfo9w7Ovf+eE+UMR1YXyBRUGJhp2r++ad0bN/Za0Q754M1nDy0uw2vCOM16vH+y/vWnR4nIxYcuCg1jUwzMRyqysjhQ7Nw+gC4W8Et69vl/tbgnFKeK6SVSO2xd+pm8j6Ci8v9tn/YIIpJGxSlSBLQiXd3jv3Z1EQAzdZh/124aXNOXHa+FvlnYcE0sl/f5h46/7fyRnO/H2wWIPOhopTnTFCYMFvxyLjarzBirtmea1uOkVUUkqqWARgquCqyhlaOVkJcS0yW7rgoEjJXF+BKRyNFKe37dFJGV1T1+JmX9qhyrRqGgbWV9bxAkx90Ohu8fqjx6stYbeNZ25VvxvANlUDW0b1vT53TQEHkAiGBFe3P+TVuH/9cPjwwU/KgrF3P0LlHN+7x/ovF3Dx//wFXr3OYTVRBoumHf9sXnphr2TWf2/++bN4ubskJ8YKb5jq/tK2Y8Rz4zRLOhXLGq+OWbNzovUQ/1Hd8+VAnV43SAhibKn/4PB4fHmGnJWzeV/ueVI6Goz3RwNvqF7570TUITBeqRntmX+fsbl2uifUer4TvuPDFalxvW5P/iumWOuedWJrTy2ccnfeLu1MbJFikUJNC8obee1Zcu3LxmxDsvWdmT9aLu6LRTV9VItDdnbv/xsaPTDcc9YoJVPXO4+LHXr4Xq/YdmnxurUryVRy5b1XP9ht5KKwqYDIgIBY8OTLdmm9Zxyfpy3qa+TDsSj8BKDJASK1jBUAJIlRQEdbVxUZw/lE1vaudUsxZZQ3AdZENoW/mVc0ouFnV567f2VY7ORetL3scvH0qIe/Feki/smHp2tJH3yAUMiVipSwcYqDbt5SuL5y0rpDSOGCz38GcMFW44c7DajBZsRU0tl0c024g+cfe+tFzoMVvRm88Zef8VK45MN77wxJiLRd1mhd+8fLnPJCpQUdWAMVkLd082AbgluXBZLrRCgPND8UQBcalivGEAKlCJIskbPW8wSJf2qfFmnHGrEnSmaS8eyV6/pigqDDAw24q+vmeuaPBnVw0PJpQpV6Q7Mtf83GOTpYCjBCmoUNyAFoWQClTfvW0QSXkdWLSF7pYrVgfePJvVCVBVrWhfzv+/z03+01MnDFO8m4nJqn74urW/dMGyv3v05MGphvOWVvTskcI7zxuYqoUeQwUEall54mQtPflrVhQ8UtG4+t0pz1iIRXf1oRHZVUVvQzlQt0VV5OmxZsCudiiu3PihC/qZGHADluiLz00/N9H89LXD54/kbZLcuIX46P2js+3I49hIpaV3UgHUU6q15KLl+avX9jj+z0KwnHCdv6r3jVuHZhtht3B11wOsaE/G/MFdB/ZP1BxeFBNs8Bdv2XTZ6t7fv/uAS1BdiPxbV6xY35ept8WQqqpP9MjxKgA3h2bbcG5FwWu51nRXGbczrEIgFqSot+WikZzPJrJKwJFKeHCmlWGNrBhgvB6+a2vP+SN5qwDIEMbq4W2PTd567fI3biiHIu6BrajHfPuTY/ccqpYDE8V2XeapoYCgkchvXDximLr3Sy+ULAU+eN2GYmBkUV4dK6OIx1Rv2/d/Y2cjjLqyawoMf+WdW5uhfGfnmEnC0d6s/4fXr2mGLsOTnEfbR+uT9bYhikRLGe+i5flGS0y8bWfplyNdX7OqkNhgPHKyPtMIScHQ2Va4tT/4zYuGEvutAD7xo9H/ctHAu8/pj0RdZdWKekw/Ola59ZGJ/ixHViievuLyGuvkyzDm2vbqNaXXre8Vnbf1YR5YTrg2DxffdcnK6UbozReujnsWLWW8p49Xf/tbu5jIqmiy2yrnm394+9bdY43RSst1wiKRq9eVP3jF8rFqaIh8xmi1/cjxmiYV/Rs29Eq8vMkImaTy4JAioBnJipJ3yfIckjLWfUeqpICgHalH+NQ1I67IBYUheuDI3FUrc++/cCgScYetwjAdmG5+4O5jQbytJM1uJBYoBZFaUQ/021esWEwkWChZDBLFf7luwxkD+UZol6xzQTWyMpD3v/Hs+Mfv2usxu+aFK0gUAvOBK1dyUityHuA3r1jx1q39Y9W2zwTVf90/TUlX/eo1pVUlvxWKwwsST0FB4hYYWm3ba1fnS4EXiXqMiXr7sZONgsdWdLYlf3rNirMGC5Go21Qiqmf0Z96+td+KeOzGi6ghTNTDX/3u4UoryrALT0VVRSUpf6sCHnimbn9xW/95ywq20wk+DViOY1nOeR994xnNtl3cjErjkUh0qBD89YPH/+zeA4bJipseCVUEnjdUzKTBIhOJ0q03bbhufc/JSrucNT88XJlpRoZjTbxxY0+1FXkxP05UJSmQgCCiEjBu3tyLpNlx39HqWDUMDMYb4SdeO3zTxl4rSPWAiZYXM6JkOB7taohmmtF7vnPo0Eyz6HNo47IkqZCCNOZwsaLesmf0B7952cqUJf98YAFwZvvGrSM/f9HyqVp7aWVUdc5xKO//+X1H/uRfD3jMzpHRIla7Cy0zhv/2Zze9bl1puh6OVcN79s8kD4+fP3ug4LOosCqE4tKUiqoYRbVpX7M8d/6yQjo09jt754h0sh598rUjv3TuYCgyf2xZZyqpG4YzXgvf9e1DO0YbvYFpWxs/h8QwxfVzBSBtG33s2lU9GQPo4oxy6YkhzpF97KYtm4by9ZbtNnILTK8VHSz4tz1w5MPf2e2asjbJHBcIrChyvvn8zZveds7ATCP86rPjgLqF2TyYe+PG3pmG9ZgIQukeTVUFQpFbzh8ikFUwYe9U896Dc3mD265f8Z5tg6n9nne5xLZ6TPummm/7xr7tY7VylkIrLCBRdlEVACUIoOKRTtXCWy4euXp9uXtD5guD5dgPvTn/L37uHGKyqksm7ileQwX/Hx49+e4v7Ziotd3zL06RXT/KN+bTb9z4mTeuf/BI5e690+xuFfS+1ywreBzZJG6AQsUnzDai160tXbOuKG4wLvRTD55YVfS+evP6t2zpj7q0r/slCqvqMd13aPZt/7zv2GxYznAUKatSQpzUxA+qikeYaURXrin9zpUrreppd+Z87GMfOw1eZEVXlrNDxeBfnh3NB95SdGZKyLwoZcyusdpdOye2jpTW9GUJagUL1ocSob9wRena9T1PnqhcuKLkMQt0IO8T6V17pss5AwUre0yhikf0v960vi/nuXLYwenGeC383I3rVpYy6TyJBQroKCpM+NzDJ//bvcdVNe9TJB22UcKYivuCjgc+nPf+7ubNvdlOZWEJTHQJIei83A194s49n73/0HApG9kO+yLZ7xmXNN3ux0Ykovq+K9d84OpVgfGsKmGJmfbJmCqxQsn8LAD49W/v+/auqVLWqBAIbaufvWntzVsH0wS4FUZZ38NpJtyl06/2TdU/dt/xfztc6c8ZgETUjW9wkeL8DraKwFr9wn/cdMmqnuefn/VCk9ncvDHC+776zNefHhsqBWEXdzMdspNKmVuT6UZ0/srSR65ff9WGPgBWhIgWLJdoOscU6b032+En7zt6vGIDQy0rb95cftu5wwtmIi85z9Cquo3zrUg+/+TY7Y+OVUPbmzGukUEKxLP/OnUBJBvJqy37uTevf9OZAy84fPmFZ/6530civ/qlp+/cOTFUPA1endkI8A1X21ZU33z20HuvXL11pIg4Clw8haB7YZxPmmdGRZVPv9SutejFG7b1jt1Tf/3I2PaxRjnrttbFGHWvfUJWgwEINNOM/viGNe/YNrKkUr9ksJDIfCO0v/blZ7+3a8JxI55HvpBQTGcatpDx3rx14BcvXn7BypJzU1YVp0FNk9Ft6T8XK0VimzU9QzOy39s78/dPTTx+spIxXPA56sjQQmqJK9e7uQSzjfDj1699z0XLXgxSLxasdIXbkfznrz5zx/bxwWLQXfZagJej3JCbeiuYa0bZwFy+vuet5w5fs7G/r4srHrvNtIFDS9T10VUXoPn71PdNNr67Z/qO3VO7JxqB4WLAoouq1d17z+A2RSK02gjtJ29Y987zR1786POXMAHX4WVFfvubO7/42ImBQqDz1y1tcKS23/3omjq1tg0Vq8qZy9eWr91YvnBVcU05h9O0OJ6n99GKot0TjQePVO4/NPfUyfpsyxZ8zvnkmFmLtj2kZ4yTTcPUCIWgn75pw5vPGnxJQ+Jf2mzlmDIP/Pm9Bz997/6cz75nurtBne8JeJocMUTE1LJSD0WBvlywcTB3zkjhzJH8+v7c8p6gL+sVMp6bDZUsjzRCqbXtRD06PtvaN9l8bqz+3Hj96GyrEWpgqBAYz4V1mlKt5rWHNaUzAAA8pplGtLIn+PM3b7x0dc9LHaf/kqd2azLf/I7to7/37Z2zzagn67+gSnYfMUwghKKtSNsWCvUNFTKmGPjFDOd8N7WbItW21VrbVtu21pZGJFbhEWU8zph4WN7z9Ic6TYSYHQtSTNWjq9b33PqmM1b0ZF7GBw+8zOH57kp7x6q/882dDx6aKef9tHC6GC93qPNz8hY3f8+FqlZhgcgJg6omzQpmRwCIybwJySQpLWv3brKuWTPz9/EYRq0tIvKrly7/3WvWeMz/TvPgF+AVWrnt3gO3//hoM5LerJe0lGI8Ohz0rhEkCyBLh9clY4c7aowF7MEln6774KJHceM5ZxrR5sHc/7h+7bUb+3UeC//fCyx0BUFPHZv9w7v2PnBgOh94OZ9TyGi+QKXh2Hz2ftq6T1mLHasHINnIoKcDbMlfOC79XCvKGLzzomW/9drVbnrTT/JJPD/xp6Mkg3JV9UuPHf+rBw7vn2gUMibrLQ1Zt1ZinpQhDRziEmUsdV3TGYF0F5IjK4ibWJzeSWf/jNbaVkWv2VD+4NWrt62Ix3z+hB/A88p87k4qYjP18B8ePvqPj584PNXI+SYXGGjM7scCEXNskYV4JZh1JAzoAot0Xp1JOxIXj3SzqtWWZeCS1aVbLlv5+s0DOM0k7P9nYLlX6l+mau2vPnnya0+c2DVaY6J8YDw351ahiyDrpm0soSLzLJiz6NS944aST8pqRVJv20LAV6wrv+ui5a/b1I8u3/2KPOAr/Flh2vUhWI3Qfn/n+DefOfXw4dnJWtszlPM938RD9TpWuws7nX9kwa2m6Dnj5iZUtK00QmHCmr7sdRv7bj5v+PyVPfHivXIffPWqgLUYMgCHphr/umv8nj0TzxyvTNbaAmQ8znjGfUxM6vU699RBrYNnqopWJBRtW4msBoZX92UvWdN7w5bBK9eXSxkTXx2vMEyvIlgxZMknMKSh37HpxpNHZx85PP3syerRmeZMI2xZAOCYTZmQ/5AaecQU3IQtZIjyAQ8W/I2D+W0rS5es7jl3RU8pE/MXrSgRXiml+3cFK325ds2CKfon51qHJmr7Jur7J+pHZ5pjc63persRaWjVuqiTySPOGipkeLiYGenJrOnPbxzMbRoqrClnS9nOlv9kJupPEBS8uNf/DxrLmMA1V9rHAAAAJXRFWHRkYXRlOmNyZWF0ZQAyMDE5LTA4LTMwVDE1OjM4OjA2LTA0OjAwvtdu7gAAACV0RVh0ZGF0ZTptb2RpZnkAMjAxOS0wOC0zMFQxNTozODowNi0wNDowMM+K1lIAAAAASUVORK5CYII= diff --git a/http/1.0.0/requirements.txt b/http/1.0.0/requirements.txt deleted file mode 100644 index ae3e5391..00000000 --- a/http/1.0.0/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -uncurl==0.0.10 -requests==2.25.1 \ No newline at end of file diff --git a/http/1.0.0/src/app.py b/http/1.0.0/src/app.py deleted file mode 100755 index af48e5a8..00000000 --- a/http/1.0.0/src/app.py +++ /dev/null @@ -1,173 +0,0 @@ -import time -import json -import json -import random -import socket -import uncurl -import asyncio -import requests -import subprocess - -from walkoff_app_sdk.app_base import AppBase - -class HTTP(AppBase): - __version__ = "1.0.0" - app_name = "http" - - def __init__(self, redis, logger, console_logger=None): - print("INIT") - """ - Each app should have this __init__ to set up Redis and logging. - :param redis: - :param logger: - :param console_logger: - """ - super().__init__(redis, logger, console_logger) - - # This is dangerously fun :) - # Do we care about arbitrary code execution here? - def curl(self, statement): - process = subprocess.Popen(statement, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, shell=True) - stdout = process.communicate() - item = "" - if len(stdout[0]) > 0: - print("Succesfully ran bash!") - item = stdout[0] - else: - print("FAILED to run bash!") - item = stdout[1] - - try: - ret = item.decode("utf-8") - return ret - except: - return item - - return item - #try: - # if not statement.startswith("curl "): - # statement = "curl %s" % statement - - # data = uncurl.parse(statement) - # request = eval(data) - # if isinstance(request, requests.models.Response): - # return request.text - # else: - # return "Unable to parse the curl parameter. Remember to start with curl " - #except: - # return "An error occurred during curl parsing" - - def splitheaders(self, headers): - parsed_headers = {} - if headers: - split_headers = headers.split("\n") - self.logger.info(split_headers) - for header in split_headers: - if ": " in header: - splititem = ": " - elif ":" in header: - splititem = ":" - elif "= " in header: - splititem = "= " - elif "=" in header: - splititem = "=" - else: - self.logger.info("Skipping header %s as its invalid" % header) - continue - - splitheader = header.split(splititem) - if len(splitheader) == 2: - parsed_headers[splitheader[0]] = splitheader[1] - else: - self.logger.info("Skipping header %s with split %s cus only one item" % (header, splititem)) - continue - - return parsed_headers - - def checkverify(self, verify): - if verify == None: - return False - elif verify: - return True - elif not verify: - return False - elif verify.lower().strip() == "false": - return False - else: - return True - - def checkbody(self, body): - # Indicates json - if body.strip().startswith("{"): - body = body.replace("\'", "\"") - - # Not sure if loading is necessary - # Seemed to work with plain string into data=body too, and not parsed json=body - #try: - # body = json.loads(body) - #except json.decoder.JSONDecodeError as e: - # return body - - return body - else: - return body - - def GET(self, url, headers="", verify=True): - parsed_headers = self.splitheaders(headers) - verify = self.checkverify(verify) - return requests.get(url, headers=parsed_headers, verify=verify).text - - def POST(self, url, headers="", body="", verify=True): - parsed_headers = self.splitheaders(headers) - verify = self.checkverify(verify) - body = self.checkbody(body) - return requests.post(url, headers=parsed_headers, data=body, verify=verify).text - - # UNTESTED BELOW HERE - def PUT(self, url, headers="", body="", verify=True): - parsed_headers = self.splitheaders(headers) - verify = self.checkverify(verify) - body = self.checkbody(body) - return requests.put(url, headers=parsed_headers, data=body, verify=verify).text - - def PATCH(self, url, headers="", body="", verify=True): - parsed_headers = self.splitheaders(headers) - verify = self.checkverify(verify) - body = self.checkbody(body) - return requests.patch(url, headers=parsed_headers, data=body, verify=verify).text - - def DELETE(self, url, headers="", body="", verify=True): - parsed_headers = self.splitheaders(headers) - verify = self.checkverify(verify) - return requests.delete(url, headers=parsed_headers, verify=verify).text - - def HEAD(self, url, headers="", body="", verify=True): - parsed_headers = self.splitheaders(headers) - verify = self.checkverify(verify) - body = self.checkbody(body) - return requests.head(url, headers=parsed_headers, verify=verify).text - - def OPTIONS(self, url, headers="", body="", verify=True): - parsed_headers = self.splitheaders(headers) - verify = self.checkverify(verify) - body = self.checkbody(body) - return requests.options(url, headers=parsed_headers, verify=verify).text - - -# Run the actual thing after we've checked params -def run(request): - print("Starting cloud!") - action = request.get_json() - print(action) - print(type(action)) - authorization_key = action.get("authorization") - current_execution_id = action.get("execution_id") - - if action and "name" in action and "app_name" in action: - HTTP.run(action) - return f'Attempting to execute function {action["name"]} in app {action["app_name"]}' - else: - return f'Invalid action' - -if __name__ == "__main__": - HTTP.run() diff --git a/http/1.1.0/api.yaml b/http/1.1.0/api.yaml deleted file mode 100644 index bd622ff8..00000000 --- a/http/1.1.0/api.yaml +++ /dev/null @@ -1,365 +0,0 @@ -walkoff_version: 1.1.0 -app_version: 1.1.0 -name: http -description: HTTP app -tags: - - Testing - - HTTP -categories: - - Testing - - HTTP -contact_info: - name: "@frikkylikeme" - url: https://github.com/frikky - email: "frikky@shuffler.io" -actions: - - name: GET - description: Runs a GET request towards the specified endpoint - parameters: - - name: url - description: The URL to get - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: username - description: The username to use - multiline: false - required: false - example: "Username" - schema: - type: string - - name: password - description: The password to use - multiline: false - required: false - example: "*****" - schema: - type: string - - name: verify - description: Check certificate - multiline: false - options: - - false - - true - required: false - example: "false" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: POST - description: Runs a POST request towards the specified endpoint - parameters: - - name: url - description: The URL to post to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: body - description: The body to use - multiline: true - example: "{\n\t'json': 'blob'\n}" - required: false - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: username - description: The username to use - multiline: false - required: false - example: "Username" - schema: - type: string - - name: password - description: The password to use - multiline: false - required: false - example: "*****" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - false - - true - example: "false" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: PATCH - description: Runs a PATCHrequest towards the specified endpoint - parameters: - - name: url - description: The URL to post to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: body - description: The body to use - multiline: true - example: "{\n\t'json': 'blob'\n}" - required: false - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: username - description: The username to use - multiline: false - required: false - example: "Username" - schema: - type: string - - name: password - description: The password to use - multiline: false - required: false - example: "*****" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - false - - true - example: "false" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: PUT - description: Runs a PUT request towards the specified endpoint - parameters: - - name: url - description: The URL to PUT to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: body - description: The body to use - multiline: true - example: "{\n\t'json': 'blob'\n}" - required: false - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: username - description: The username to use - multiline: false - required: false - example: "Username" - schema: - type: string - - name: password - description: The password to use - multiline: false - required: false - example: "*****" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - false - - true - example: "false" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: DELETE - description: Runs a DELETE request towards the specified endpoint - parameters: - - name: url - description: The URL to post to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: username - description: The username to use - multiline: false - required: false - example: "Username" - schema: - type: string - - name: password - description: The password to use - multiline: false - required: false - example: "*****" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - true - - false - example: "false" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: HEAD - description: Runs a HEAD request towards the specified endpoint - parameters: - - name: url - description: The URL to HEAD to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: username - description: The username to use - multiline: false - required: false - example: "Username" - schema: - type: string - - name: password - description: The password to use - multiline: false - required: false - example: "*****" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - false - - true - example: "false" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: OPTIONS - description: Runs a OPTIONS request towards the specified endpoint - parameters: - - name: url - description: The URL to HEAD to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: username - description: The username to use - multiline: false - required: false - example: "Username" - schema: - type: string - - name: password - description: The password to use - multiline: false - required: false - example: "*****" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - false - - true - example: "false" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: curl - description: Run a curl command - parameters: - - name: statement - description: The curl command to run - multiline: true - example: "curl https://example.com" - required: true - schema: - type: string - returns: - schema: - type: string -large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABjCAIAAADihTK7AAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH4wgeDyYG/VPJzQAAMI9JREFUeNrFvXecJWd1Jvyc81bVzd238+SomZFGYZRRQAkLIQQ2XuEFG4zBXsv2EtYG24vZ9WKC1wmEZbDXWnvx77Mx0ZhgsUgIS8gSoJxnNDnHzuHmW/We8/3xVtW9HUYJyXvV6l9P9e0Kz3vyed5zSVXxar4UEFUoDFN6cLIWHphq7Jlq7ploHJxpnaq0p5u2GSmIiAAiqIKgAMBEVAxoMO+t7M2sL2fOGshtHsiu6w0yHqcntKJE1HWFV+VFrx5YqhDVFKNGaJ86WXvkWOXJE9X9083Jum1ZVcAwPMOGDDGINIGYQABIKT6VFY1EFWCmYsDLCv5Zg9mLlxcvXVE4oz/buSKUiV4l0F4VsERVAUMEoBXJQ0fn7t47/dCRypHZVjOCZyjjk2+YiYigCSJdMBFIAQIAIsAJnBIBBFWySqHVllVR7c2Ysway160r/tS60ub+nDuJ1VcFslcYLFEAykQADk83v7l9/M590/smmpEgF3DGYyJSgqp7dIdDDIsCcDoIAjiWMkIXap3Hd0ADsKKNSNtWe7N80XD2Z7f0vWFDT9bjVwOyVwysbqV79lT1S0+N3rV7eqIR5TzOBx4xRcmViIhASgQih0hiq6AOmhgydN7ewXXRAwDOWkWi9UisYEt/5q1nlt+6pbcv6znIzCuE2CsDlpUYpl1jtdsfPHHX3ql6KKWs5xtWkDg0KX5aItIEgdg2EUgJ7jhUGQATlsTotKLiECaiRiiNSNb0eO/c2veOs/uLgVGFAj+5+f9JwUrvY7oe/tWPj3356dFKW3uzHjOJOrGgLv1agBQTOWlKMCCOn7tLoLoRSnUx1c4lUWNCM0KtbTf1Zd574cBbNpW7V/T/DVjp5b+9Y/zWfzt6cLrRk/M9Jok1i4mgMUBIkXLCpGBnygECE6AAxWChI4NIQHaKuxC45P7Tg+njOMjqoTQjuX5t8XcuGT6jP+t++bKV8uWD5ZAarbQ+8f0Dd+ycygcm4xtRxPeeyFS39oFjJ6cgZUrMF8U/USJOFCMao+FQ7gKx+6f07tN/0nzICDrbsqWA33/BwHvOHQRgFeZl4fVywFKFQpnoB3snf//O/UcrYV/eV4UARKwEAhGTxFqTPn4aOhEI7GJIYgUiRaQQ9xUjEQeZhskw+cweEzOYSGKfm7wvuSl3do3h7eClqoYQis617A1rix9/7fKhvPfyrP5LBks0jgz+8odHPvNvRz3PFAIORTWVDRDxaWWKmcAsSm1rm1YtOGO4mOH+rF/OmnLW9GaMb4gJqmhYnWvrXNtON2W2bRsWqvAMsoYCJiKSVJYAOKFGilH6DarqtHKqEa3tCf70muUXjuRehny9NLAcUs1QPvx/93z9mbH+fEDMoqkrAkBMpESaKJIzWERkmKxSPZKWIO/z2t7MucO5bSO5zf2ZVaWgP+flfbO0IEPnWnaiER2ei3ZOt3ZMtHZPt8cbVhU5nzOGANjuh0geSd1/Gh9TVY9Qi4SBP7hi+D9sKr/UQOwlgOVEd6rW/rV/2vngkdnBQsaqxv6JEgUkQuL+EiEjNtSKtBpJzjfnDudft67nqjWlLQOZwHD3+Z1j7ZKO2PUtfpqZln1movnA8fqDJ5tHqxETih4zkdVEvFLEEsDUnV+VCVZ0thn91oX9771wWDRZ5VcQLIfUqUrrV768Y/totT8fhAJiIpAkuCww6kRkDDWt1tq6vCd40+byz27pO284l/p8q85rIg1FE1FKbq7riCYwUFfEVAujB0+17jhQfXi02bIoBcxQ2yVNi7+nGfpU3d6yrfy7lwzHJuSVAstp36m55i998dndE61y3kQCYtbEeoOBGDpSQJl8plBoNrTreoN3njf0c1v7BvOBe2yRxL4tupDqPNREl44k40pGkn4C2D7Z/Ore6j3H6i2rPT5LIkpQ0i6gEjFTVTAw3ghvObfvw5cOW1HmF8brhcFySE1UW+/8x2d2jTXLeS9SAMTMnWiTQMQOOmYippmmlHPeL1849O5tQ2WXdiyqoijm4aXqIikKRV3c5TPFochpFEUBURCUCQDtnG79n+fm7jteD5hyHkUSw+8UcB5UzqED4/Xwvdv6Pnjx8Ivxjy8AlvtlrR394j8+88SxSrkQRKJEHGscJXEmwMRCMIbaVhuhvumsvv96xfK15ZyDafG6LXBGqiDSk9XwIz8an2qK71EzwuvX5H7rgr5I4PFC67bgdBKXvgDoD443/vLZ2f1zYV9gREXVPaSqxqghsfeqysBEI/qdiwZ/bVv/C+L1fGA5lWGm3/inHd/ZMT5YyIRxhJCE2pyEUkRExIZmmzJY8H7/mpVvOWsQQCRqFtkDpyBMaIRRYIxxcYZqI5Rfvvvk9ql2T2AE8AxNt+yHLijfcnZfqo+VdlQKGCCrtNjxpzWPWiife3bm6wdqWUMBwUonnlDtIOUwBDDTtJ++ZtmbNpSeHy/G6V8iapg+fe+Bf3l2bLAQhKLJcyfpXleua5gm6tHlq4vffMeWt5w1aEVV4S3yzJKEPN/YM/uXT0w6PXJY3Pbk1Pap1kjB8z3K+RwYWlbwbn+u8sREkykODmbb8j8enTlSDQ3NDxcQS5bziQWff+/C/j9+zUDO44Yl33CcZnW+iNLAkKiUMR/98diOiaYhktNr2mnBctnMd3eMfvb+IwOFTGS1k5NR/Gdx7stERBP18D9dNPyF/7hpVU/W/S0ttfJMaFn70R+e+uC9J96wocdjsqqG8NR445/2VgfzfqRJkQ8gIp/4tqfnIhGCWtVVxaDo89vvmbzneM0QRLH40QxBAat4/er83147uK7Xn4mQMUxJQkFMzA6l+C59pkjxu/efmmlGlBifFwuWq0wdnar/t+/syQdenEXESJEmkqUEJgJophl99HWrP/66tQSyunRyb0WZMFaPfvm7R29/euqnN/WcN5ST2HLp3zw7173miFMZKmZ4+0x497EGU5zGvHtzMefzf3+88n/2zLlAf/GjEWAIkei6UvA3Vw9cNpKZDtX3WGOQQEScQAYiAUo+H5gL//ChMSIIlkZrCbA0Niv6ke/snmpI4MUxOiUVy1QHXS2q0rZ/duPaWy5ebgVEZFz0sBT6JyqtX7zj8JNjraG895YzepAUM58cazx0qtGTNdpVb0h8COU8/trBhlsDUVqW965clgk8/vzuxm3bl8bL/dNjEkUp4D+/vP+nVmWn2up7zEnQ4taGOXZWFhjIeXccrP/znhlDZJeSriXAElFD9IVHj9+7d7o371lRV5sDdQe7scbPtsJP3bju7ecOR6KGQbEvdz9Il/bpWK39njuPHqmEeZ9WFv3XriwATjDxz/tqAuLUlDCBSZlgGMSFwOyZix4fbxHU2anXLw+s6mDOfPlg83PPzTHpfEOjlKDHBFHyDP/RpeXrV2Vn2mo8BjMTJ8+gnYVR9AT8mSemTtZCXsp4LQRLFEx0fKZ56w8OlXK+lfjiScUtKTARmDDVjD5x/dqfO2coEvWYUl2baYYnKi0m1kT/m5H+57uPH5wNB/L+XFuvWl3IeRyJGtKJRvTjk41iYCS5CBETc7LmygwQ3Xm8hSR2v2gwuzznNSIdytIX9te/dKBmCKksiGLfXJiE6+C4r8afvKTvNcNBJYTPpAwX8hEhkS0CkPFouim3Pjq+pEdcLFlKhD+958BErR0Y7gosKF0yBTxDE/XofZctf/eFyzpIqRqmwzONzz4yWvCNC7KtKhP+4IHRx0cbAzkTivqGr1uVR+zp6f7j9cmWBKZT3lJOFxvMrKCCx49PhtMtcU6w4PMFA34jUlEMZMxf7aw9NNZwjkwBJqq05fZdVaJOIZdAPtEfXdq7vsfUBcbEFj7ReCYiYraKvizfeah2/9EqL3K488ByFuSRwzPffna0nHfxZ0eU4moU4BuabkQ3bi5/5NrVabHUihqiE5XWe7598KYzyr1ZDyBReExf3zX1lZ0zwwUvUrQFK4retuEsANck/bcTDc+QWyRwnHsTEXO85gB8xkTTPjHZdjcJ4LIhPyktIGf4T56pTTQjp31W9YLBbCW0n3hqlkld6kMEAXoD/48uKWc8cpF10iNSmleGJc/w556cCq3yfOM7DywCiepnfnDQ2Z1umUp/MEyNUFf2BH/2xg2apMGiMEwT9dbPfHnPpSuLF68oujiQCccr7T96cLycNVZhiFoW5wzmSoFnRZloshHtmArzHgsSe8sduVpQhn9oPEzv+Nw+vzegSKGKDNN4Uz77XJVIVeMm2ge2lu4/1f7M9jlDlCaSVrGhx//wtlLNqknCh0SgYxkToBjwM5OtO/bPEqHbcs3rgDPh/n1TP9w/3ZP1rKZNhKSnkCDWsvonN64fyAfi2nsKIjRC+yvfOjjXsh++ckVyx0qQP35wdKYlgRc/sFW9eFkWSQXqmcnWVNMGJtWEuK8VO/UuS5o1tH06bFprGKoYzpn1RdOyyoRItTfgfz3ReuBUy1koUWQNf+Cswt/srv/jvmoawRqCVXrDqsLPrM3OhuozJ9dNFSi29Hmf//65mWYkTB3h4q6lA4DPP3TMVQ+65SppV8FnmqpH77pg+Or1fZFIEiUoQf/r9w//4FDl1y8e7s95AriO9ANHa9/dX+nPsZVYALOe2TaUSS/3xHhbnAYS4lycujqoqR0FAqaTDXugEhEQKQBs7fPbogxyxiFj+H/vqbesdXm1Am9YlbtwwL/tueo9JxqpB2CCqH7g7NLKgtfWTsstNl2JJuY93j0dfu9wlZIKRwcsZ62eODr3wP7pUsZbKsogAjUjWV3OfOiqVaJqiBFnefTZB09+bcfUmUPZt2/t18S+ta3e9ug4E6xAQQpuWhopeBt7MwA8IlU8Pdn0DIlCQOJa+UktQVS7vxTaiPTpqSjueIDOLnsKWFVRiUQyBntnom8dbjjdEaWs4beszTLRnz5b3T3TdAi6P+4NvPdtLTasMif2mByVoJPFBYa+umvWmYsYAufvXB3mQ9/Y8ZUnR/vzvtU0LkRaXPeNmaiHt/70hreft6y7B6eqeyfrRJT1aHVvNtXX0MqBmaZvTAq2KDIGK0u+y5dE9Uilra4QNs88Lt15jlSLHoZznis6tKycqEdMHeUQqE+6Mu/H/W1F20anmhopSgZDOS/VbRf6feDH04+NtwoMayVNrUVURFWVoHNN+7evX3H5yoKrkXipdIxVW/fumSwGpttfprUQJqq27fkrim89Z1jULQgUcUK/ebCQ6kvnbpi2DOQXPbVzTjEC63qC+QXRBf2tJV6apPMZw+tL/uI3RAKTUAAynllbjIk4kSZVkuR1y5bC4+MtpCQnpPro5AgCfGt/5fKVBUrV0Onk93ZOjFbagfPnRPMrRsRMrcj++muWe8yakjoAj8nriFjnWZlwmlpHxx7xwrZyt9s97df8sy7xBm9e7SwJxIk8ptRau8j+vIHMlcuCSgTmpN/bRT8RaN7nH5+oTzQi5zc8JDnHnc+NGsOdllznOzGh1rZbRwpv2NyvqoZdTovJeutfdk8HxtRDe+WanrOH80lFCfcdnts3HWY918WIWxoC/PSGUjljREEkPz7ZOFiRrElbQS9QCidCy8rVyzIr854rYDw12dw5K1nTSU2Y0LJ69Yi/ouAn1lPvPt6YbZNC+wO6fmVGlBKODt6+Mf/AqXbcJHehB5TiHygwNFaP7j9Wu3lTr6h6zlodnW48daySD4ykUt71YqJGaH9+20jgmUjUIwjUEP3VQ6duffDkYCEzF9o7fmGL0zHDVA+jj9x36uBsO+cbMJhdf4yzvrl+TbGcccvAt++oPDzaKmWMMhkmYmJ6vtaUIUy15JMX0VvXepFqQLRjJvqfT1f7M5yaDvee956Zf/9WX+JwgR4ZD798sNEXcCi6PN+7tS/jqh2quHAgc16/v30yzDGJKClApBozVAAYxn1Hqjdv6iWieInu3zc1VQv7i8ESpS9C28pI0X/z1gF3bVEYopOV1jd2Tm3oz4ZKW4Zzl64spqL42KnGWD1a05MRKDGY2TC3BOt7g+G8k2U0rdRCXZE3gWc0zgXx/JLlEYj4VM2mgr+l1x/OcdHjjncn5Ix5dCKMVD0iqwD0hpWZ759o9QU82ZKvHGx+si+jiaIZohtXZZ8Yb+cMYtPl3Jrze6I5j58eb041ov6c5yw1Htg/yadhmBhCtW1fu753qBCIanqirz07OdmIPKZqy24bzhmmdHnvO1yJRK1qpLAK93PLSjnLaXw43bTTLStEoaoFWcULfoUKVZysW6cpAPoz7BFFop33CDzCkZrdOxumq72lN+jLcDXSgkcPjrUPVyMmiKOmANcszw5mjZU05kwqTEQgBIZG6/aZsQZcL6YR2mdPVDIeL1FIczZYceOWgbR5x0zNyN6xazKfNJ2cWKmqqwQ9fKKe9bibkcBEVml5wUfiTyaaUo/UcNq6eVHtS0MYa4kibnb0Z7jow85LzpzY6uOTbXdOBXoD3lDyWqIeUy2Su4833XEiiNJA1jt/0G9EGufy82NMAqzqY6MNOG+4f7x2Yq4VeOw4AQtebavDpcxla3pdaO2K6I8cq+ybauR9DkULPp89nEtV4+hs+/Bs20HfHQiI6kAujmMBjDeipKj/YvvnqvCIZtvSjOL7LBgqeLwghBbAI3pmOnRxgluzs3pNJFAga8wDo+22tdy1RJcNZ6Jk1bohICJRBIa2TzQBZQA7T1VrbWs6jJTuVaJmaM9eVujP+67T7W7s7r0zri4aWowU/dW9QXrh7RONSls86nTS3d8QMJLz0jNPNqVbIl4k48AQqpFWI3Ho+4b6A47md8ZUETAdqthGZFPTsqnXdwYka3CoanfORg5Hp3rnD/gln0N3n2n5PHmkgPlIJZpqCAPYNVYFll5gIrStXrK6hER9PKa2tQ8fq+R8VtW21TW9mbzvpSZ2+1hzyYI/EfqynWh7vGFftEglKMSUPp1rO7AAUMFfWNJUwGdMtuRY3aYH1+ZN3pBbnlD00YkQiSaq6uqit7poml3Flg5jCfAY083o0FyLAeyfqHu8dANRFb6h85cXkeQrAPZPNo/OtB1nPxLZUM4gKbEC2Dfd9Bgp0zg9jyHqDUx6eLZtKb2dhBX0gi8iRIqGRbLq6M8Y1YWwM6ER6ZFqB6yhnCkHsQvymZ6dCpHUXQUwxJt6/VDU0TYpSSHdrbvYbf9Mm0MrJ+daPtNirIgQifZmvfX9OQCcJJLPjtbrbWuS/GutC5wAJkRij1Xaydk69GSB+ky9WZNCONsWg2QjxcJQ/vleVnW6JSlYRS8m+C4Ww6NdklX0MZDhUFSBDNORmp0LJUGEAGzq8RPB6lrjJN8T1UOzbZ6qh5PVtjG8VIBFodWhoj/sUrDkJLvGawCc+TJMy3s6CdpUQybrkWc6OyXi1VEwIWsIcaFDq22hJFJ24fKLcYcEWEU16rw3Y5aGmQgnap2OCUBD2Th29RjTbTlWs531BNYUjXFv1aTXN19Uj861earWTsRkibAhsjJSCgJjnBN0Vu/gZMs35FgXHmMw3zHbs62oFjqRmd9vATyGT52Va1lNLWgM54uLHgAKu2Qp6/HigEcBA5puW9c2cOLTn2GJ928gtHq0GqErd1+eNzmPbPcaa0okARON1kJvotpqhuI7T79gjQiR6HAxQEIGYoKqTtRD38RbRnIel7MdyZppSiSa8TCvxgMACJgCr6NvLuFIm9uuBveCzGsmYpJWV2Gkxyd2dMMuyAjwGbOhuuzNodWfcXzN+C3jTZvKIICeDGc9brQtI+1JJaBBDWGqEXmVprWq/iLBUlUCqaI/5ydHQIS5lj0622pGYhWRatY3eb/j4ybq4XQjIiLRpCXHMIYiVcPkKI1EENXplm1EEjcqDAwxJQ3j5wHLkNZCTLc7xsgj1CPJmHmdGOfyJhsSKYKk0pA1XAslwwCoHumRmu0+c8HjvEe1VpwzdquGe/BmpJ7bmkWgpdrgEGg556ELyUjkzKFcqDBEFsj5JushlZeCT5esKPRkfUnWzBELrKKc9fx4SwBFIht6/FJgAhM3OZnZsRBoKYPQLVmVtgy4JwYA9GXovAG/5HfSw+QSKAcQEbDn3jmSM+f1e72BUUU1ksEsxRzFGErKGRJHfUrEJf1OQEvgNSMR1a6qPLS78KAIvJTIDwD9ee8rv3Bmd/FeVYB4O+FVa4pXr+05jWtTEdfZIyb69GuH5v/2hct+3acC4DMBuGwwuOyq4PRX1DQ3um559rrlma5fqiNUJU9HHjkLRTH3rZvKC1hRz4r79YKrJAzIRXaE4NKLpK4IdBd2QcbOh36eXHBcYjacZo4L6H8vKoDornsp8ekZZtRNglOgO5ij+b9F3KmiBc4p7msrROElFCr3Jk1aQukFVBbey2K6V1edGM9PnovfqZp2AV5iFL/UFZ/HzHVryeJ3akpMAACIpMFhzAnv2nkAA3jZuAo7LzBKVEIBNMPOHxBhuh7+7ncPNEU9ZlFkPPrUjevKucC1MB46Vrn1odFS2nZMWKai6M2aP75qJO97gIZiP/7w5HhDMj7HjVpOm4ZEnFJ2Fr6YUAn1plXZn1mTjwQe49Hx1uf31EvBwqTHKnp8/r3zinmPHU3w/lOtLx6o9/gMoNKW1wx6v7ylKBrTXK3YtpVY/1S7NdEJl2Hysh6ZOIjuaBYSagMRpuohumh+nuEnTtTG6mHGM04ya6GWc/Efz7bk3kNz/fnAxqyDuHtqgb6817bI+1CFz2bHZHvPdFjIeM5+ctJcdR3p07lFjzHZ0gsHAsCxqGi8qQ+Ot/szHHUFX0RoC5bnnG2Pjcyxmn1orN0fsKhONmVNngHWREqbFvVIieJtBt3kcHfIN/BKOc909TY60gs4fuZ0I+wGsZTxVpcztdDmA6NAqKh2OfJlxWAo72d9J1NMMd2blCggNG1cYGJCX4YLPhc9t5VHCcScyBeDCbQUXoaoLdKf6RibULTk84JCDQMNwVCWHd/ZiWnTasnnvEcq2jS6pphs6FCAUAu11raMVPvm2XirWvCYh4rZTMJ4SZFC0un0mE7Ndbg+TtSHC34rih1NI5SZZgesnoANwYqKqIVYVVcpFdGmjYPJVNMjkUjUioioqFpRtWJFrWgksCJWVNINUNrpvGa6crNqpFYXvQ0IRXsDJuLUqE+2rBWJBJGSKEaSEog711TL1kPhlPqdksABKKzoYM7jgYLveoXxhslu1KAe02il1YyieOuEKoCNA9lIxAXfVmS81k7/pC9negITiSQUCU1PGFqEVhILSlkX1MQWIbb67gjENTtJZGHa5HD2u9xYwyo6S5C8AxCBE8DEF+hkU6CqIlYkQ7qmFKCrunCyFjUiS12s7/hHcRZNhwqGyzl/sBhEVpYwqQqfeaIanpxto+t+tg7nKeaWQxTHO9Vu9GS8wZyJbLJkXWFwZLURdR6rEBgrXbY0lXnXG046xCIqIt3VJUPalbmjZZcuWljVlflY0VzXb6wuBqSq7Uj6M7y6GLco3OvgbLKUibXSuHbk4gZd25tlw7SynA07ZOR5ymgYlVa0d6IGx8omAnDuskJPhq2qQgk4ONOIF1OViVb3ZiKrHDMDnOooAaGVqUaUSBbKPrs5DQkq8XFRURGkTXQnaqKSeDuPqDdwETwBmG27KGShABqmNYXONrNKaMebkSFAtB3K+pKX9zhldALYNdVmQDpt/G4tVCZsLAcMYNNQwS7RAovNfCT61PEqEsOs0DXl3Ib+bDMUQH2mQ9POqMXOe/NAJnJaGPNr4lgwEp3rcgXljHFapvMkqeOtncqISGJBRESsiGHkvHjLOYCZtvAiqKwiy1hb7IB1qm5nmtaoQKVt9YKBuA7uHqptZfd0y2dIZ0tBZ4eBFc15vKGcYQBbhgvzttSkNw2IamDosSNzSHYVicAwXbmutxkKgMDQsZnWXCtMBfPsobwhgSROuStlGK/bVA2H8txtGZzGqVKCF1RS4yWqKgIVRFazhLIfN51UdS4UpgXioK1IBwJake+AdbASNUJh1chq1uhly7KxwXK139nW0UroM0kXVCqxuISiA1le6+R567JiMestIVyqqprzzXNj1eOzTRdgOFBu2NSf8UgFPmOs2j443UzNxtnD+XLWi0RIiURIHeaqirF6lL5tKOcZitPFLrwk2SEizvFBFFZgRUVFNbQoeCj6TrKoZWWyaSnR01hbFc1INpS8TBcbYdd0KKJQrUeyudffVPZVNaU+PHqqWWtbRrwwqQLGZjGSDeWgJ2MYwLqB/JpythXJ/B5JvMI+01Qt/NGBKSRWSRXnryicPZyvta3HVA/lmZP1VKqXl4KNfZlmJLF3TUBn0rFalNrUobzxSK0LGrr304g6qxWT8RO5UhWIhNb2ehQk5IZqqNWWkMKKM20xxmEk2/rjYglDFdgx1fJIATQj+alVeUMxjq5se/+xmkfosledaIugkZVtwzkAbEV9wxes6mmFktJP5gUQCsN0567J9DlFlYlvPmewFVmPKTD0xImqMyIiSsDlq4qR1cDAEBkmw2xYMgYuyHDqPJTzenwDVY/UIxgiQzDkqoDKIIYy1BAMlAlG1YdCdFmOU27MdFvqoXgQUiEVhjAUagtGLx4K3P0TYaoZHp4LCx5FIv0ZvnFtPjWyRDhSCZ8aa+ZMytKCSkdWVOEzLl6eBxBXhK8+Y+BLj59c0saLaDEwDx2ePTzdWNuXk4Th+h/OGfrLHx0/PN1g5u/vnZ5phOWc78Lo69aXPvWjk6MaAiQER0BuCZ6baDYjcYNi+rKGCccrYd7nlLff2TeQsq45JtggyXX6M3EHAaBDc+HxSrscdIghRKiHcv5gsKEnkzKufniisXuqPZg14/XwP53dO5z3HTlNREF018HKTNMOZDlciJQC2rJ2Rck7d6QAIE6jr9zQP1LKVNtRd6vVpewK9ZgnauE/Pz36oWvXqSozqaI369/2M2fsGm9kfRNajWJ6MgE4d6T42ZvWNSNh7sxjcBIqsTNGYOi/Xz40Wo8Cw9rZ7ULxNKPknxwnqXEHoGn1spGsJtuD1pa8j13SlzWUxG9EhGYkW/p8V/BkQKGriv4nLxvIGW5Gcv2anEIZ5HLXtrV37K/kDKyoyjykVNUjnQvl0g29pcBY532cJbrly898Z8dYOeenlj4lVxvmtuhwMbjrNy4sBjHZUBcFgvpyCy6v6muJ3gKAZHfo9w7Ovf+eE+UMR1YXyBRUGJhp2r++ad0bN/Za0Q754M1nDy0uw2vCOM16vH+y/vWnR4nIxYcuCg1jUwzMRyqysjhQ7Nw+gC4W8Et69vl/tbgnFKeK6SVSO2xd+pm8j6Ci8v9tn/YIIpJGxSlSBLQiXd3jv3Z1EQAzdZh/124aXNOXHa+FvlnYcE0sl/f5h46/7fyRnO/H2wWIPOhopTnTFCYMFvxyLjarzBirtmea1uOkVUUkqqWARgquCqyhlaOVkJcS0yW7rgoEjJXF+BKRyNFKe37dFJGV1T1+JmX9qhyrRqGgbWV9bxAkx90Ohu8fqjx6stYbeNZ25VvxvANlUDW0b1vT53TQEHkAiGBFe3P+TVuH/9cPjwwU/KgrF3P0LlHN+7x/ovF3Dx//wFXr3OYTVRBoumHf9sXnphr2TWf2/++bN4ubskJ8YKb5jq/tK2Y8Rz4zRLOhXLGq+OWbNzovUQ/1Hd8+VAnV43SAhibKn/4PB4fHmGnJWzeV/ueVI6Goz3RwNvqF7570TUITBeqRntmX+fsbl2uifUer4TvuPDFalxvW5P/iumWOuedWJrTy2ccnfeLu1MbJFikUJNC8obee1Zcu3LxmxDsvWdmT9aLu6LRTV9VItDdnbv/xsaPTDcc9YoJVPXO4+LHXr4Xq/YdmnxurUryVRy5b1XP9ht5KKwqYDIgIBY8OTLdmm9Zxyfpy3qa+TDsSj8BKDJASK1jBUAJIlRQEdbVxUZw/lE1vaudUsxZZQ3AdZENoW/mVc0ouFnV567f2VY7ORetL3scvH0qIe/Feki/smHp2tJH3yAUMiVipSwcYqDbt5SuL5y0rpDSOGCz38GcMFW44c7DajBZsRU0tl0c024g+cfe+tFzoMVvRm88Zef8VK45MN77wxJiLRd1mhd+8fLnPJCpQUdWAMVkLd082AbgluXBZLrRCgPND8UQBcalivGEAKlCJIskbPW8wSJf2qfFmnHGrEnSmaS8eyV6/pigqDDAw24q+vmeuaPBnVw0PJpQpV6Q7Mtf83GOTpYCjBCmoUNyAFoWQClTfvW0QSXkdWLSF7pYrVgfePJvVCVBVrWhfzv+/z03+01MnDFO8m4nJqn74urW/dMGyv3v05MGphvOWVvTskcI7zxuYqoUeQwUEall54mQtPflrVhQ8UtG4+t0pz1iIRXf1oRHZVUVvQzlQt0VV5OmxZsCudiiu3PihC/qZGHADluiLz00/N9H89LXD54/kbZLcuIX46P2js+3I49hIpaV3UgHUU6q15KLl+avX9jj+z0KwnHCdv6r3jVuHZhtht3B11wOsaE/G/MFdB/ZP1BxeFBNs8Bdv2XTZ6t7fv/uAS1BdiPxbV6xY35ept8WQqqpP9MjxKgA3h2bbcG5FwWu51nRXGbczrEIgFqSot+WikZzPJrJKwJFKeHCmlWGNrBhgvB6+a2vP+SN5qwDIEMbq4W2PTd567fI3biiHIu6BrajHfPuTY/ccqpYDE8V2XeapoYCgkchvXDximLr3Sy+ULAU+eN2GYmBkUV4dK6OIx1Rv2/d/Y2cjjLqyawoMf+WdW5uhfGfnmEnC0d6s/4fXr2mGLsOTnEfbR+uT9bYhikRLGe+i5flGS0y8bWfplyNdX7OqkNhgPHKyPtMIScHQ2Va4tT/4zYuGEvutAD7xo9H/ctHAu8/pj0RdZdWKekw/Ola59ZGJ/ixHViievuLyGuvkyzDm2vbqNaXXre8Vnbf1YR5YTrg2DxffdcnK6UbozReujnsWLWW8p49Xf/tbu5jIqmiy2yrnm394+9bdY43RSst1wiKRq9eVP3jF8rFqaIh8xmi1/cjxmiYV/Rs29Eq8vMkImaTy4JAioBnJipJ3yfIckjLWfUeqpICgHalH+NQ1I67IBYUheuDI3FUrc++/cCgScYetwjAdmG5+4O5jQbytJM1uJBYoBZFaUQ/021esWEwkWChZDBLFf7luwxkD+UZol6xzQTWyMpD3v/Hs+Mfv2usxu+aFK0gUAvOBK1dyUityHuA3r1jx1q39Y9W2zwTVf90/TUlX/eo1pVUlvxWKwwsST0FB4hYYWm3ba1fnS4EXiXqMiXr7sZONgsdWdLYlf3rNirMGC5Go21Qiqmf0Z96+td+KeOzGi6ghTNTDX/3u4UoryrALT0VVRSUpf6sCHnimbn9xW/95ywq20wk+DViOY1nOeR994xnNtl3cjErjkUh0qBD89YPH/+zeA4bJipseCVUEnjdUzKTBIhOJ0q03bbhufc/JSrucNT88XJlpRoZjTbxxY0+1FXkxP05UJSmQgCCiEjBu3tyLpNlx39HqWDUMDMYb4SdeO3zTxl4rSPWAiZYXM6JkOB7taohmmtF7vnPo0Eyz6HNo47IkqZCCNOZwsaLesmf0B7952cqUJf98YAFwZvvGrSM/f9HyqVp7aWVUdc5xKO//+X1H/uRfD3jMzpHRIla7Cy0zhv/2Zze9bl1puh6OVcN79s8kD4+fP3ug4LOosCqE4tKUiqoYRbVpX7M8d/6yQjo09jt754h0sh598rUjv3TuYCgyf2xZZyqpG4YzXgvf9e1DO0YbvYFpWxs/h8QwxfVzBSBtG33s2lU9GQPo4oxy6YkhzpF97KYtm4by9ZbtNnILTK8VHSz4tz1w5MPf2e2asjbJHBcIrChyvvn8zZveds7ATCP86rPjgLqF2TyYe+PG3pmG9ZgIQukeTVUFQpFbzh8ikFUwYe9U896Dc3mD265f8Z5tg6n9nne5xLZ6TPummm/7xr7tY7VylkIrLCBRdlEVACUIoOKRTtXCWy4euXp9uXtD5guD5dgPvTn/L37uHGKyqksm7ileQwX/Hx49+e4v7Ziotd3zL06RXT/KN+bTb9z4mTeuf/BI5e690+xuFfS+1ywreBzZJG6AQsUnzDai160tXbOuKG4wLvRTD55YVfS+evP6t2zpj7q0r/slCqvqMd13aPZt/7zv2GxYznAUKatSQpzUxA+qikeYaURXrin9zpUrreppd+Z87GMfOw1eZEVXlrNDxeBfnh3NB95SdGZKyLwoZcyusdpdOye2jpTW9GUJagUL1ocSob9wRena9T1PnqhcuKLkMQt0IO8T6V17pss5AwUre0yhikf0v960vi/nuXLYwenGeC383I3rVpYy6TyJBQroKCpM+NzDJ//bvcdVNe9TJB22UcKYivuCjgc+nPf+7ubNvdlOZWEJTHQJIei83A194s49n73/0HApG9kO+yLZ7xmXNN3ux0Ykovq+K9d84OpVgfGsKmGJmfbJmCqxQsn8LAD49W/v+/auqVLWqBAIbaufvWntzVsH0wS4FUZZ38NpJtyl06/2TdU/dt/xfztc6c8ZgETUjW9wkeL8DraKwFr9wn/cdMmqnuefn/VCk9ncvDHC+776zNefHhsqBWEXdzMdspNKmVuT6UZ0/srSR65ff9WGPgBWhIgWLJdoOscU6b032+En7zt6vGIDQy0rb95cftu5wwtmIi85z9Cquo3zrUg+/+TY7Y+OVUPbmzGukUEKxLP/OnUBJBvJqy37uTevf9OZAy84fPmFZ/6530civ/qlp+/cOTFUPA1endkI8A1X21ZU33z20HuvXL11pIg4Clw8haB7YZxPmmdGRZVPv9SutejFG7b1jt1Tf/3I2PaxRjnrttbFGHWvfUJWgwEINNOM/viGNe/YNrKkUr9ksJDIfCO0v/blZ7+3a8JxI55HvpBQTGcatpDx3rx14BcvXn7BypJzU1YVp0FNk9Ft6T8XK0VimzU9QzOy39s78/dPTTx+spIxXPA56sjQQmqJK9e7uQSzjfDj1699z0XLXgxSLxasdIXbkfznrz5zx/bxwWLQXfZagJej3JCbeiuYa0bZwFy+vuet5w5fs7G/r4srHrvNtIFDS9T10VUXoPn71PdNNr67Z/qO3VO7JxqB4WLAoouq1d17z+A2RSK02gjtJ29Y987zR1786POXMAHX4WVFfvubO7/42ImBQqDz1y1tcKS23/3omjq1tg0Vq8qZy9eWr91YvnBVcU05h9O0OJ6n99GKot0TjQePVO4/NPfUyfpsyxZ8zvnkmFmLtj2kZ4yTTcPUCIWgn75pw5vPGnxJQ+Jf2mzlmDIP/Pm9Bz997/6cz75nurtBne8JeJocMUTE1LJSD0WBvlywcTB3zkjhzJH8+v7c8p6gL+sVMp6bDZUsjzRCqbXtRD06PtvaN9l8bqz+3Hj96GyrEWpgqBAYz4V1mlKt5rWHNaUzAAA8pplGtLIn+PM3b7x0dc9LHaf/kqd2azLf/I7to7/37Z2zzagn67+gSnYfMUwghKKtSNsWCvUNFTKmGPjFDOd8N7WbItW21VrbVtu21pZGJFbhEWU8zph4WN7z9Ic6TYSYHQtSTNWjq9b33PqmM1b0ZF7GBw+8zOH57kp7x6q/882dDx6aKef9tHC6GC93qPNz8hY3f8+FqlZhgcgJg6omzQpmRwCIybwJySQpLWv3brKuWTPz9/EYRq0tIvKrly7/3WvWeMz/TvPgF+AVWrnt3gO3//hoM5LerJe0lGI8Ohz0rhEkCyBLh9clY4c7aowF7MEln6774KJHceM5ZxrR5sHc/7h+7bUb+3UeC//fCyx0BUFPHZv9w7v2PnBgOh94OZ9TyGi+QKXh2Hz2ftq6T1mLHasHINnIoKcDbMlfOC79XCvKGLzzomW/9drVbnrTT/JJPD/xp6Mkg3JV9UuPHf+rBw7vn2gUMibrLQ1Zt1ZinpQhDRziEmUsdV3TGYF0F5IjK4ibWJzeSWf/jNbaVkWv2VD+4NWrt62Ix3z+hB/A88p87k4qYjP18B8ePvqPj584PNXI+SYXGGjM7scCEXNskYV4JZh1JAzoAot0Xp1JOxIXj3SzqtWWZeCS1aVbLlv5+s0DOM0k7P9nYLlX6l+mau2vPnnya0+c2DVaY6J8YDw351ahiyDrpm0soSLzLJiz6NS944aST8pqRVJv20LAV6wrv+ui5a/b1I8u3/2KPOAr/Flh2vUhWI3Qfn/n+DefOfXw4dnJWtszlPM938RD9TpWuws7nX9kwa2m6Dnj5iZUtK00QmHCmr7sdRv7bj5v+PyVPfHivXIffPWqgLUYMgCHphr/umv8nj0TzxyvTNbaAmQ8znjGfUxM6vU699RBrYNnqopWJBRtW4msBoZX92UvWdN7w5bBK9eXSxkTXx2vMEyvIlgxZMknMKSh37HpxpNHZx85PP3syerRmeZMI2xZAOCYTZmQ/5AaecQU3IQtZIjyAQ8W/I2D+W0rS5es7jl3RU8pE/MXrSgRXiml+3cFK325ds2CKfon51qHJmr7Jur7J+pHZ5pjc63persRaWjVuqiTySPOGipkeLiYGenJrOnPbxzMbRoqrClnS9nOlv9kJupPEBS8uNf/DxrLmMA1V9rHAAAAJXRFWHRkYXRlOmNyZWF0ZQAyMDE5LTA4LTMwVDE1OjM4OjA2LTA0OjAwvtdu7gAAACV0RVh0ZGF0ZTptb2RpZnkAMjAxOS0wOC0zMFQxNTozODowNi0wNDowMM+K1lIAAAAASUVORK5CYII= diff --git a/http/1.1.0/requirements.txt b/http/1.1.0/requirements.txt deleted file mode 100644 index ae3e5391..00000000 --- a/http/1.1.0/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -uncurl==0.0.10 -requests==2.25.1 \ No newline at end of file diff --git a/http/1.1.0/src/app.py b/http/1.1.0/src/app.py deleted file mode 100755 index 6b7fcc87..00000000 --- a/http/1.1.0/src/app.py +++ /dev/null @@ -1,247 +0,0 @@ -import time -import json -import ast -import random -import socket -import uncurl -import asyncio -import requests -import subprocess - -from walkoff_app_sdk.app_base import AppBase - -class HTTP(AppBase): - __version__ = "1.0.0" - app_name = "http" - - def __init__(self, redis, logger, console_logger=None): - print("INIT") - """ - Each app should have this __init__ to set up Redis and logging. - :param redis: - :param logger: - :param console_logger: - """ - super().__init__(redis, logger, console_logger) - - # This is dangerously fun :) - # Do we care about arbitrary code execution here? - def curl(self, statement): - process = subprocess.Popen(statement, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, shell=True) - stdout = process.communicate() - item = "" - if len(stdout[0]) > 0: - print("Succesfully ran bash!") - item = stdout[0] - else: - print("FAILED to run bash!") - item = stdout[1] - - try: - ret = item.decode("utf-8") - return ret - except: - return item - - return item - #try: - # if not statement.startswith("curl "): - # statement = "curl %s" % statement - - # data = uncurl.parse(statement) - # request = eval(data) - # if isinstance(request, requests.models.Response): - # return request.text - # else: - # return "Unable to parse the curl parameter. Remember to start with curl " - #except: - # return "An error occurred during curl parsing" - - def splitheaders(self, headers): - parsed_headers = {} - if headers: - split_headers = headers.split("\n") - self.logger.info(split_headers) - for header in split_headers: - if ": " in header: - splititem = ": " - elif ":" in header: - splititem = ":" - elif "= " in header: - splititem = "= " - elif "=" in header: - splititem = "=" - else: - self.logger.info("Skipping header %s as its invalid" % header) - continue - - splitheader = header.split(splititem) - if len(splitheader) == 2: - parsed_headers[splitheader[0]] = splitheader[1] - else: - self.logger.info("Skipping header %s with split %s cus only one item" % (header, splititem)) - continue - - return parsed_headers - - def checkverify(self, verify): - if verify == None: - return False - elif verify: - return True - elif not verify: - return False - elif verify.lower().strip() == "false": - return False - else: - return True - - def checkbody(self, body): - # Indicates json - if body.strip().startswith("{"): - body = json.dumps(ast.literal_eval(body)) - - # Not sure if loading is necessary - # Seemed to work with plain string into data=body too, and not parsed json=body - #try: - # body = json.loads(body) - #except json.decoder.JSONDecodeError as e: - # return body - - return body - else: - return body - - def fix_url(self, url): - # Random bugs seen by users - if "hhttp" in url: - url = url.replace("hhttp", "http") - - if "http:/" in url and not "http://" in url: - url = url.replace("http:/", "http://", -1) - if "https:/" in url and not "https://" in url: - url = url.replace("https:/", "https://", -1) - if "http:///" in url: - url = url.replace("http:///", "http://", -1) - if "https:///" in url: - url = url.replace("https:///", "https://", -1) - if not "http://" in url and not "http" in url: - url = f"http://{url}" - - return url - - def GET(self, url, headers="", username="", password="", verify=True): - url = self.fix_url(url) - - parsed_headers = self.splitheaders(headers) - parsed_headers["User-Agent"] = "Shuffle Automation" - verify = self.checkverify(verify) - - auth=None - if username or password: - auth = requests.auth.HTTPBasicAuth(username, password) - - return requests.get(url, headers=parsed_headers, auth=auth, verify=verify).text - - def POST(self, url, headers="", body="", username="", password="", verify=True): - url = self.fix_url(url) - - parsed_headers = self.splitheaders(headers) - parsed_headers["User-Agent"] = "Shuffle Automation" - verify = self.checkverify(verify) - body = self.checkbody(body) - - auth=None - if username or password: - auth = requests.auth.HTTPBasicAuth(username, password) - - return requests.post(url, headers=parsed_headers, auth=auth, data=body, verify=verify).text - - # UNTESTED BELOW HERE - def PUT(self, url, headers="", body="", username="", password="", verify=True): - url = self.fix_url(url) - - parsed_headers = self.splitheaders(headers) - parsed_headers["User-Agent"] = "Shuffle Automation" - verify = self.checkverify(verify) - body = self.checkbody(body) - - auth=None - if username or password: - auth = requests.auth.HTTPBasicAuth(username, password) - - return requests.put(url, headers=parsed_headers, auth=auth, data=body, verify=verify).text - - def PATCH(self, url, headers="", body="", username="", password="", verify=True): - url = self.fix_url(url) - - parsed_headers = self.splitheaders(headers) - parsed_headers["User-Agent"] = "Shuffle Automation" - verify = self.checkverify(verify) - body = self.checkbody(body) - - auth=None - if username or password: - auth = requests.auth.HTTPBasicAuth(username, password) - - return requests.patch(url, headers=parsed_headers, data=body, auth=auth, verify=verify).text - - def DELETE(self, url, headers="", body="", username="", password="", verify=True): - url = self.fix_url(url) - - parsed_headers = self.splitheaders(headers) - parsed_headers["User-Agent"] = "Shuffle Automation" - verify = self.checkverify(verify) - - auth=None - if username or password: - auth = requests.auth.HTTPBasicAuth(username, password) - - return requests.delete(url, headers=parsed_headers, auth=auth, verify=verify).text - - def HEAD(self, url, headers="", body="", username="", password="", verify=True): - url = self.fix_url(url) - - parsed_headers = self.splitheaders(headers) - parsed_headers["User-Agent"] = "Shuffle Automation" - verify = self.checkverify(verify) - body = self.checkbody(body) - - auth=None - if username or password: - auth = requests.auth.HTTPBasicAuth(username, password) - - return requests.head(url, headers=parsed_headers, auth=auth, verify=verify).text - - def OPTIONS(self, url, headers="", body="", username="", password="", verify=True): - url = self.fix_url(url) - - parsed_headers = self.splitheaders(headers) - parsed_headers["User-Agent"] = "Shuffle Automation" - verify = self.checkverify(verify) - body = self.checkbody(body) - - auth=None - if username or password: - auth = requests.auth.HTTPBasicAuth(username, password) - - return requests.options(url, headers=parsed_headers, auth=auth, verify=verify).text - - -# Run the actual thing after we've checked params -def run(request): - print("Starting cloud!") - action = request.get_json() - print(action) - print(type(action)) - authorization_key = action.get("authorization") - current_execution_id = action.get("execution_id") - - if action and "name" in action and "app_name" in action: - HTTP.run(action) - return f'Attempting to execute function {action["name"]} in app {action["app_name"]}' - else: - return f'Invalid action' - -if __name__ == "__main__": - HTTP.run() diff --git a/http/1.2.0/api.yaml b/http/1.2.0/api.yaml deleted file mode 100644 index 65e22188..00000000 --- a/http/1.2.0/api.yaml +++ /dev/null @@ -1,522 +0,0 @@ -walkoff_version: 1.2.0 -app_version: 1.2.0 -name: http -description: HTTP app -tags: - - Testing - - HTTP -categories: - - Testing - - HTTP -contact_info: - name: "@frikkylikeme" - url: https://github.com/frikky - email: "frikky@shuffler.io" -actions: - - name: GET - description: Runs a GET request towards the specified endpoint - parameters: - - name: url - description: The URL to get - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: username - description: The username to use - multiline: false - required: false - example: "Username" - schema: - type: string - - name: password - description: The password to use - multiline: false - required: false - example: "*****" - schema: - type: string - - name: verify - description: Check certificate - multiline: false - options: - - false - - true - required: false - example: "false" - schema: - type: bool - - name: http_proxy - description: Add a HTTP proxy - multiline: false - required: false - example: "http://192.168.0.1:8080" - schema: - type: bool - - name: https_proxy - description: Add a HTTPS proxy - multiline: false - required: false - example: "http://192.168.0.1:8080" - schema: - type: bool - - name: timeout - description: Add a timeout for the request, in seconds - multiline: false - required: false - example: "10" - schema: - type: bool - - name: to_file - description: Makes the response into a file, and returns it as an ID - multiline: false - required: false - options: - - false - - true - example: "true" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: POST - description: Runs a POST request towards the specified endpoint - parameters: - - name: url - description: The URL to post to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: body - description: The body to use - multiline: true - example: "{\n\t'json': 'blob'\n}" - required: false - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: username - description: The username to use - multiline: false - required: false - example: "Username" - schema: - type: string - - name: password - description: The password to use - multiline: false - required: false - example: "*****" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - false - - true - example: "false" - schema: - type: bool - - name: http_proxy - description: Add a HTTP proxy - multiline: false - required: false - example: "http://192.168.0.1:8080" - schema: - type: bool - - name: https_proxy - description: Add a HTTPS proxy - multiline: false - required: false - example: "http://192.168.0.1:8080" - schema: - type: bool - - name: timeout - description: Add a timeout for the request, in seconds - multiline: false - required: false - example: "10" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: PATCH - description: Runs a PATCHrequest towards the specified endpoint - parameters: - - name: url - description: The URL to post to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: body - description: The body to use - multiline: true - example: "{\n\t'json': 'blob'\n}" - required: false - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: username - description: The username to use - multiline: false - required: false - example: "Username" - schema: - type: string - - name: password - description: The password to use - multiline: false - required: false - example: "*****" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - false - - true - example: "false" - schema: - type: bool - - name: http_proxy - description: Add a HTTP proxy - multiline: false - required: false - example: "http://192.168.0.1:8080" - schema: - type: bool - - name: https_proxy - description: Add a HTTPS proxy - multiline: false - required: false - example: "http://192.168.0.1:8080" - schema: - type: bool - - name: timeout - description: Add a timeout for the request, in seconds - multiline: false - required: false - example: "10" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: PUT - description: Runs a PUT request towards the specified endpoint - parameters: - - name: url - description: The URL to PUT to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: body - description: The body to use - multiline: true - example: "{\n\t'json': 'blob'\n}" - required: false - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: username - description: The username to use - multiline: false - required: false - example: "Username" - schema: - type: string - - name: password - description: The password to use - multiline: false - required: false - example: "*****" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - false - - true - example: "false" - schema: - type: bool - - name: http_proxy - description: Add a HTTP proxy - multiline: false - required: false - example: "http://192.168.0.1:8080" - schema: - type: bool - - name: https_proxy - description: Add a HTTPS proxy - multiline: false - required: false - example: "http://192.168.0.1:8080" - schema: - type: bool - - name: timeout - description: Add a timeout for the request, in seconds - multiline: false - required: false - example: "10" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: DELETE - description: Runs a DELETE request towards the specified endpoint - parameters: - - name: url - description: The URL to post to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: username - description: The username to use - multiline: false - required: false - example: "Username" - schema: - type: string - - name: password - description: The password to use - multiline: false - required: false - example: "*****" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - true - - false - example: "false" - schema: - type: bool - - name: http_proxy - description: Add a HTTP proxy - multiline: false - required: false - example: "http://192.168.0.1:8080" - schema: - type: bool - - name: https_proxy - description: Add a HTTPS proxy - multiline: false - required: false - example: "http://192.168.0.1:8080" - schema: - type: bool - - name: timeout - description: Add a timeout for the request, in seconds - multiline: false - required: false - example: "10" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: HEAD - description: Runs a HEAD request towards the specified endpoint - parameters: - - name: url - description: The URL to HEAD to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: username - description: The username to use - multiline: false - required: false - example: "Username" - schema: - type: string - - name: password - description: The password to use - multiline: false - required: false - example: "*****" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - false - - true - example: "false" - schema: - type: bool - - name: http_proxy - description: Add a HTTP proxy - multiline: false - required: false - example: "http://192.168.0.1:8080" - schema: - type: bool - - name: https_proxy - description: Add a HTTPS proxy - multiline: false - required: false - example: "http://192.168.0.1:8080" - schema: - type: bool - - name: timeout - description: Add a timeout for the request, in seconds - multiline: false - required: false - example: "10" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: OPTIONS - description: Runs a OPTIONS request towards the specified endpoint - parameters: - - name: url - description: The URL to HEAD to - multiline: false - example: "https://example.com" - required: true - schema: - type: string - - name: headers - description: Headers to use - multiline: true - required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" - schema: - type: string - - name: username - description: The username to use - multiline: false - required: false - example: "Username" - schema: - type: string - - name: password - description: The password to use - multiline: false - required: false - example: "*****" - schema: - type: string - - name: verify - description: Whether to check the certificate or not - multiline: false - required: false - options: - - false - - true - example: "false" - schema: - type: bool - - name: http_proxy - description: Add a HTTP proxy - multiline: false - required: false - example: "http://192.168.0.1:8080" - schema: - type: bool - - name: https_proxy - description: Add a HTTPS proxy - multiline: false - required: false - example: "http://192.168.0.1:8080" - schema: - type: bool - - name: timeout - description: Add a timeout for the request, in seconds - multiline: false - required: false - example: "10" - schema: - type: bool - returns: - schema: - type: string - example: "404 NOT FOUND" - - name: curl - description: Run a curl command - parameters: - - name: statement - description: The curl command to run - multiline: true - example: "curl https://example.com" - required: true - schema: - type: string - returns: - schema: - type: string -large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABjCAIAAADihTK7AAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH4wgeDyYG/VPJzQAAMI9JREFUeNrFvXecJWd1Jvyc81bVzd238+SomZFGYZRRQAkLIQQ2XuEFG4zBXsv2EtYG24vZ9WKC1wmEZbDXWnvx77Mx0ZhgsUgIS8gSoJxnNDnHzuHmW/We8/3xVtW9HUYJyXvV6l9P9e0Kz3vyed5zSVXxar4UEFUoDFN6cLIWHphq7Jlq7ploHJxpnaq0p5u2GSmIiAAiqIKgAMBEVAxoMO+t7M2sL2fOGshtHsiu6w0yHqcntKJE1HWFV+VFrx5YqhDVFKNGaJ86WXvkWOXJE9X9083Jum1ZVcAwPMOGDDGINIGYQABIKT6VFY1EFWCmYsDLCv5Zg9mLlxcvXVE4oz/buSKUiV4l0F4VsERVAUMEoBXJQ0fn7t47/dCRypHZVjOCZyjjk2+YiYigCSJdMBFIAQIAIsAJnBIBBFWySqHVllVR7c2Ysway160r/tS60ub+nDuJ1VcFslcYLFEAykQADk83v7l9/M590/smmpEgF3DGYyJSgqp7dIdDDIsCcDoIAjiWMkIXap3Hd0ADsKKNSNtWe7N80XD2Z7f0vWFDT9bjVwOyVwysbqV79lT1S0+N3rV7eqIR5TzOBx4xRcmViIhASgQih0hiq6AOmhgydN7ewXXRAwDOWkWi9UisYEt/5q1nlt+6pbcv6znIzCuE2CsDlpUYpl1jtdsfPHHX3ql6KKWs5xtWkDg0KX5aItIEgdg2EUgJ7jhUGQATlsTotKLiECaiRiiNSNb0eO/c2veOs/uLgVGFAj+5+f9JwUrvY7oe/tWPj3356dFKW3uzHjOJOrGgLv1agBQTOWlKMCCOn7tLoLoRSnUx1c4lUWNCM0KtbTf1Zd574cBbNpW7V/T/DVjp5b+9Y/zWfzt6cLrRk/M9Jok1i4mgMUBIkXLCpGBnygECE6AAxWChI4NIQHaKuxC45P7Tg+njOMjqoTQjuX5t8XcuGT6jP+t++bKV8uWD5ZAarbQ+8f0Dd+ycygcm4xtRxPeeyFS39oFjJ6cgZUrMF8U/USJOFCMao+FQ7gKx+6f07tN/0nzICDrbsqWA33/BwHvOHQRgFeZl4fVywFKFQpnoB3snf//O/UcrYV/eV4UARKwEAhGTxFqTPn4aOhEI7GJIYgUiRaQQ9xUjEQeZhskw+cweEzOYSGKfm7wvuSl3do3h7eClqoYQis617A1rix9/7fKhvPfyrP5LBks0jgz+8odHPvNvRz3PFAIORTWVDRDxaWWKmcAsSm1rm1YtOGO4mOH+rF/OmnLW9GaMb4gJqmhYnWvrXNtON2W2bRsWqvAMsoYCJiKSVJYAOKFGilH6DarqtHKqEa3tCf70muUXjuRehny9NLAcUs1QPvx/93z9mbH+fEDMoqkrAkBMpESaKJIzWERkmKxSPZKWIO/z2t7MucO5bSO5zf2ZVaWgP+flfbO0IEPnWnaiER2ei3ZOt3ZMtHZPt8cbVhU5nzOGANjuh0geSd1/Gh9TVY9Qi4SBP7hi+D9sKr/UQOwlgOVEd6rW/rV/2vngkdnBQsaqxv6JEgUkQuL+EiEjNtSKtBpJzjfnDudft67nqjWlLQOZwHD3+Z1j7ZKO2PUtfpqZln1movnA8fqDJ5tHqxETih4zkdVEvFLEEsDUnV+VCVZ0thn91oX9771wWDRZ5VcQLIfUqUrrV768Y/totT8fhAJiIpAkuCww6kRkDDWt1tq6vCd40+byz27pO284l/p8q85rIg1FE1FKbq7riCYwUFfEVAujB0+17jhQfXi02bIoBcxQ2yVNi7+nGfpU3d6yrfy7lwzHJuSVAstp36m55i998dndE61y3kQCYtbEeoOBGDpSQJl8plBoNrTreoN3njf0c1v7BvOBe2yRxL4tupDqPNREl44k40pGkn4C2D7Z/Ore6j3H6i2rPT5LIkpQ0i6gEjFTVTAw3ghvObfvw5cOW1HmF8brhcFySE1UW+/8x2d2jTXLeS9SAMTMnWiTQMQOOmYippmmlHPeL1849O5tQ2WXdiyqoijm4aXqIikKRV3c5TPFochpFEUBURCUCQDtnG79n+fm7jteD5hyHkUSw+8UcB5UzqED4/Xwvdv6Pnjx8Ivxjy8AlvtlrR394j8+88SxSrkQRKJEHGscJXEmwMRCMIbaVhuhvumsvv96xfK15ZyDafG6LXBGqiDSk9XwIz8an2qK71EzwuvX5H7rgr5I4PFC67bgdBKXvgDoD443/vLZ2f1zYV9gREXVPaSqxqghsfeqysBEI/qdiwZ/bVv/C+L1fGA5lWGm3/inHd/ZMT5YyIRxhJCE2pyEUkRExIZmmzJY8H7/mpVvOWsQQCRqFtkDpyBMaIRRYIxxcYZqI5Rfvvvk9ql2T2AE8AxNt+yHLijfcnZfqo+VdlQKGCCrtNjxpzWPWiife3bm6wdqWUMBwUonnlDtIOUwBDDTtJ++ZtmbNpSeHy/G6V8iapg+fe+Bf3l2bLAQhKLJcyfpXleua5gm6tHlq4vffMeWt5w1aEVV4S3yzJKEPN/YM/uXT0w6PXJY3Pbk1Pap1kjB8z3K+RwYWlbwbn+u8sREkykODmbb8j8enTlSDQ3NDxcQS5bziQWff+/C/j9+zUDO44Yl33CcZnW+iNLAkKiUMR/98diOiaYhktNr2mnBctnMd3eMfvb+IwOFTGS1k5NR/Gdx7stERBP18D9dNPyF/7hpVU/W/S0ttfJMaFn70R+e+uC9J96wocdjsqqG8NR445/2VgfzfqRJkQ8gIp/4tqfnIhGCWtVVxaDo89vvmbzneM0QRLH40QxBAat4/er83147uK7Xn4mQMUxJQkFMzA6l+C59pkjxu/efmmlGlBifFwuWq0wdnar/t+/syQdenEXESJEmkqUEJgJophl99HWrP/66tQSyunRyb0WZMFaPfvm7R29/euqnN/WcN5ST2HLp3zw7173miFMZKmZ4+0x497EGU5zGvHtzMefzf3+88n/2zLlAf/GjEWAIkei6UvA3Vw9cNpKZDtX3WGOQQEScQAYiAUo+H5gL//ChMSIIlkZrCbA0Niv6ke/snmpI4MUxOiUVy1QHXS2q0rZ/duPaWy5ebgVEZFz0sBT6JyqtX7zj8JNjraG895YzepAUM58cazx0qtGTNdpVb0h8COU8/trBhlsDUVqW965clgk8/vzuxm3bl8bL/dNjEkUp4D+/vP+nVmWn2up7zEnQ4taGOXZWFhjIeXccrP/znhlDZJeSriXAElFD9IVHj9+7d7o371lRV5sDdQe7scbPtsJP3bju7ecOR6KGQbEvdz9Il/bpWK39njuPHqmEeZ9WFv3XriwATjDxz/tqAuLUlDCBSZlgGMSFwOyZix4fbxHU2anXLw+s6mDOfPlg83PPzTHpfEOjlKDHBFHyDP/RpeXrV2Vn2mo8BjMTJ8+gnYVR9AT8mSemTtZCXsp4LQRLFEx0fKZ56w8OlXK+lfjiScUtKTARmDDVjD5x/dqfO2coEvWYUl2baYYnKi0m1kT/m5H+57uPH5wNB/L+XFuvWl3IeRyJGtKJRvTjk41iYCS5CBETc7LmygwQ3Xm8hSR2v2gwuzznNSIdytIX9te/dKBmCKksiGLfXJiE6+C4r8afvKTvNcNBJYTPpAwX8hEhkS0CkPFouim3Pjq+pEdcLFlKhD+958BErR0Y7gosKF0yBTxDE/XofZctf/eFyzpIqRqmwzONzz4yWvCNC7KtKhP+4IHRx0cbAzkTivqGr1uVR+zp6f7j9cmWBKZT3lJOFxvMrKCCx49PhtMtcU6w4PMFA34jUlEMZMxf7aw9NNZwjkwBJqq05fZdVaJOIZdAPtEfXdq7vsfUBcbEFj7ReCYiYraKvizfeah2/9EqL3K488ByFuSRwzPffna0nHfxZ0eU4moU4BuabkQ3bi5/5NrVabHUihqiE5XWe7598KYzyr1ZDyBReExf3zX1lZ0zwwUvUrQFK4retuEsANck/bcTDc+QWyRwnHsTEXO85gB8xkTTPjHZdjcJ4LIhPyktIGf4T56pTTQjp31W9YLBbCW0n3hqlkld6kMEAXoD/48uKWc8cpF10iNSmleGJc/w556cCq3yfOM7DywCiepnfnDQ2Z1umUp/MEyNUFf2BH/2xg2apMGiMEwT9dbPfHnPpSuLF68oujiQCccr7T96cLycNVZhiFoW5wzmSoFnRZloshHtmArzHgsSe8sduVpQhn9oPEzv+Nw+vzegSKGKDNN4Uz77XJVIVeMm2ge2lu4/1f7M9jlDlCaSVrGhx//wtlLNqknCh0SgYxkToBjwM5OtO/bPEqHbcs3rgDPh/n1TP9w/3ZP1rKZNhKSnkCDWsvonN64fyAfi2nsKIjRC+yvfOjjXsh++ckVyx0qQP35wdKYlgRc/sFW9eFkWSQXqmcnWVNMGJtWEuK8VO/UuS5o1tH06bFprGKoYzpn1RdOyyoRItTfgfz3ReuBUy1koUWQNf+Cswt/srv/jvmoawRqCVXrDqsLPrM3OhuozJ9dNFSi29Hmf//65mWYkTB3h4q6lA4DPP3TMVQ+65SppV8FnmqpH77pg+Or1fZFIEiUoQf/r9w//4FDl1y8e7s95AriO9ANHa9/dX+nPsZVYALOe2TaUSS/3xHhbnAYS4lycujqoqR0FAqaTDXugEhEQKQBs7fPbogxyxiFj+H/vqbesdXm1Am9YlbtwwL/tueo9JxqpB2CCqH7g7NLKgtfWTsstNl2JJuY93j0dfu9wlZIKRwcsZ62eODr3wP7pUsZbKsogAjUjWV3OfOiqVaJqiBFnefTZB09+bcfUmUPZt2/t18S+ta3e9ug4E6xAQQpuWhopeBt7MwA8IlU8Pdn0DIlCQOJa+UktQVS7vxTaiPTpqSjueIDOLnsKWFVRiUQyBntnom8dbjjdEaWs4beszTLRnz5b3T3TdAi6P+4NvPdtLTasMif2mByVoJPFBYa+umvWmYsYAufvXB3mQ9/Y8ZUnR/vzvtU0LkRaXPeNmaiHt/70hreft6y7B6eqeyfrRJT1aHVvNtXX0MqBmaZvTAq2KDIGK0u+y5dE9Uilra4QNs88Lt15jlSLHoZznis6tKycqEdMHeUQqE+6Mu/H/W1F20anmhopSgZDOS/VbRf6feDH04+NtwoMayVNrUVURFWVoHNN+7evX3H5yoKrkXipdIxVW/fumSwGpttfprUQJqq27fkrim89Z1jULQgUcUK/ebCQ6kvnbpi2DOQXPbVzTjEC63qC+QXRBf2tJV6apPMZw+tL/uI3RAKTUAAynllbjIk4kSZVkuR1y5bC4+MtpCQnpPro5AgCfGt/5fKVBUrV0Onk93ZOjFbagfPnRPMrRsRMrcj++muWe8yakjoAj8nriFjnWZlwmlpHxx7xwrZyt9s97df8sy7xBm9e7SwJxIk8ptRau8j+vIHMlcuCSgTmpN/bRT8RaN7nH5+oTzQi5zc8JDnHnc+NGsOdllznOzGh1rZbRwpv2NyvqoZdTovJeutfdk8HxtRDe+WanrOH80lFCfcdnts3HWY918WIWxoC/PSGUjljREEkPz7ZOFiRrElbQS9QCidCy8rVyzIr854rYDw12dw5K1nTSU2Y0LJ69Yi/ouAn1lPvPt6YbZNC+wO6fmVGlBKODt6+Mf/AqXbcJHehB5TiHygwNFaP7j9Wu3lTr6h6zlodnW48daySD4ykUt71YqJGaH9+20jgmUjUIwjUEP3VQ6duffDkYCEzF9o7fmGL0zHDVA+jj9x36uBsO+cbMJhdf4yzvrl+TbGcccvAt++oPDzaKmWMMhkmYmJ6vtaUIUy15JMX0VvXepFqQLRjJvqfT1f7M5yaDvee956Zf/9WX+JwgR4ZD798sNEXcCi6PN+7tS/jqh2quHAgc16/v30yzDGJKClApBozVAAYxn1Hqjdv6iWieInu3zc1VQv7i8ESpS9C28pI0X/z1gF3bVEYopOV1jd2Tm3oz4ZKW4Zzl64spqL42KnGWD1a05MRKDGY2TC3BOt7g+G8k2U0rdRCXZE3gWc0zgXx/JLlEYj4VM2mgr+l1x/OcdHjjncn5Ix5dCKMVD0iqwD0hpWZ759o9QU82ZKvHGx+si+jiaIZohtXZZ8Yb+cMYtPl3Jrze6I5j58eb041ov6c5yw1Htg/yadhmBhCtW1fu753qBCIanqirz07OdmIPKZqy24bzhmmdHnvO1yJRK1qpLAK93PLSjnLaXw43bTTLStEoaoFWcULfoUKVZysW6cpAPoz7BFFop33CDzCkZrdOxumq72lN+jLcDXSgkcPjrUPVyMmiKOmANcszw5mjZU05kwqTEQgBIZG6/aZsQZcL6YR2mdPVDIeL1FIczZYceOWgbR5x0zNyN6xazKfNJ2cWKmqqwQ9fKKe9bibkcBEVml5wUfiTyaaUo/UcNq6eVHtS0MYa4kibnb0Z7jow85LzpzY6uOTbXdOBXoD3lDyWqIeUy2Su4833XEiiNJA1jt/0G9EGufy82NMAqzqY6MNOG+4f7x2Yq4VeOw4AQtebavDpcxla3pdaO2K6I8cq+ybauR9DkULPp89nEtV4+hs+/Bs20HfHQiI6kAujmMBjDeipKj/YvvnqvCIZtvSjOL7LBgqeLwghBbAI3pmOnRxgluzs3pNJFAga8wDo+22tdy1RJcNZ6Jk1bohICJRBIa2TzQBZQA7T1VrbWs6jJTuVaJmaM9eVujP+67T7W7s7r0zri4aWowU/dW9QXrh7RONSls86nTS3d8QMJLz0jNPNqVbIl4k48AQqpFWI3Ho+4b6A47md8ZUETAdqthGZFPTsqnXdwYka3CoanfORg5Hp3rnD/gln0N3n2n5PHmkgPlIJZpqCAPYNVYFll5gIrStXrK6hER9PKa2tQ8fq+R8VtW21TW9mbzvpSZ2+1hzyYI/EfqynWh7vGFftEglKMSUPp1rO7AAUMFfWNJUwGdMtuRY3aYH1+ZN3pBbnlD00YkQiSaq6uqit7poml3Flg5jCfAY083o0FyLAeyfqHu8dANRFb6h85cXkeQrAPZPNo/OtB1nPxLZUM4gKbEC2Dfd9Bgp0zg9jyHqDUx6eLZtKb2dhBX0gi8iRIqGRbLq6M8Y1YWwM6ER6ZFqB6yhnCkHsQvymZ6dCpHUXQUwxJt6/VDU0TYpSSHdrbvYbf9Mm0MrJ+daPtNirIgQifZmvfX9OQCcJJLPjtbrbWuS/GutC5wAJkRij1Xaydk69GSB+ky9WZNCONsWg2QjxcJQ/vleVnW6JSlYRS8m+C4Ww6NdklX0MZDhUFSBDNORmp0LJUGEAGzq8RPB6lrjJN8T1UOzbZ6qh5PVtjG8VIBFodWhoj/sUrDkJLvGawCc+TJMy3s6CdpUQybrkWc6OyXi1VEwIWsIcaFDq22hJFJ24fKLcYcEWEU16rw3Y5aGmQgnap2OCUBD2Th29RjTbTlWs531BNYUjXFv1aTXN19Uj861earWTsRkibAhsjJSCgJjnBN0Vu/gZMs35FgXHmMw3zHbs62oFjqRmd9vATyGT52Va1lNLWgM54uLHgAKu2Qp6/HigEcBA5puW9c2cOLTn2GJ928gtHq0GqErd1+eNzmPbPcaa0okARON1kJvotpqhuI7T79gjQiR6HAxQEIGYoKqTtRD38RbRnIel7MdyZppSiSa8TCvxgMACJgCr6NvLuFIm9uuBveCzGsmYpJWV2Gkxyd2dMMuyAjwGbOhuuzNodWfcXzN+C3jTZvKIICeDGc9brQtI+1JJaBBDWGqEXmVprWq/iLBUlUCqaI/5ydHQIS5lj0622pGYhWRatY3eb/j4ybq4XQjIiLRpCXHMIYiVcPkKI1EENXplm1EEjcqDAwxJQ3j5wHLkNZCTLc7xsgj1CPJmHmdGOfyJhsSKYKk0pA1XAslwwCoHumRmu0+c8HjvEe1VpwzdquGe/BmpJ7bmkWgpdrgEGg556ELyUjkzKFcqDBEFsj5JushlZeCT5esKPRkfUnWzBELrKKc9fx4SwBFIht6/FJgAhM3OZnZsRBoKYPQLVmVtgy4JwYA9GXovAG/5HfSw+QSKAcQEbDn3jmSM+f1e72BUUU1ksEsxRzFGErKGRJHfUrEJf1OQEvgNSMR1a6qPLS78KAIvJTIDwD9ee8rv3Bmd/FeVYB4O+FVa4pXr+05jWtTEdfZIyb69GuH5v/2hct+3acC4DMBuGwwuOyq4PRX1DQ3um559rrlma5fqiNUJU9HHjkLRTH3rZvKC1hRz4r79YKrJAzIRXaE4NKLpK4IdBd2QcbOh36eXHBcYjacZo4L6H8vKoDornsp8ekZZtRNglOgO5ij+b9F3KmiBc4p7msrROElFCr3Jk1aQukFVBbey2K6V1edGM9PnovfqZp2AV5iFL/UFZ/HzHVryeJ3akpMAACIpMFhzAnv2nkAA3jZuAo7LzBKVEIBNMPOHxBhuh7+7ncPNEU9ZlFkPPrUjevKucC1MB46Vrn1odFS2nZMWKai6M2aP75qJO97gIZiP/7w5HhDMj7HjVpOm4ZEnFJ2Fr6YUAn1plXZn1mTjwQe49Hx1uf31EvBwqTHKnp8/r3zinmPHU3w/lOtLx6o9/gMoNKW1wx6v7ylKBrTXK3YtpVY/1S7NdEJl2Hysh6ZOIjuaBYSagMRpuohumh+nuEnTtTG6mHGM04ya6GWc/Efz7bk3kNz/fnAxqyDuHtqgb6817bI+1CFz2bHZHvPdFjIeM5+ctJcdR3p07lFjzHZ0gsHAsCxqGi8qQ+Ot/szHHUFX0RoC5bnnG2Pjcyxmn1orN0fsKhONmVNngHWREqbFvVIieJtBt3kcHfIN/BKOc909TY60gs4fuZ0I+wGsZTxVpcztdDmA6NAqKh2OfJlxWAo72d9J1NMMd2blCggNG1cYGJCX4YLPhc9t5VHCcScyBeDCbQUXoaoLdKf6RibULTk84JCDQMNwVCWHd/ZiWnTasnnvEcq2jS6pphs6FCAUAu11raMVPvm2XirWvCYh4rZTMJ4SZFC0un0mE7Ndbg+TtSHC34rih1NI5SZZgesnoANwYqKqIVYVVcpFdGmjYPJVNMjkUjUioioqFpRtWJFrWgksCJWVNINUNrpvGa6crNqpFYXvQ0IRXsDJuLUqE+2rBWJBJGSKEaSEog711TL1kPhlPqdksABKKzoYM7jgYLveoXxhslu1KAe02il1YyieOuEKoCNA9lIxAXfVmS81k7/pC9negITiSQUCU1PGFqEVhILSlkX1MQWIbb67gjENTtJZGHa5HD2u9xYwyo6S5C8AxCBE8DEF+hkU6CqIlYkQ7qmFKCrunCyFjUiS12s7/hHcRZNhwqGyzl/sBhEVpYwqQqfeaIanpxto+t+tg7nKeaWQxTHO9Vu9GS8wZyJbLJkXWFwZLURdR6rEBgrXbY0lXnXG046xCIqIt3VJUPalbmjZZcuWljVlflY0VzXb6wuBqSq7Uj6M7y6GLco3OvgbLKUibXSuHbk4gZd25tlw7SynA07ZOR5ymgYlVa0d6IGx8omAnDuskJPhq2qQgk4ONOIF1OViVb3ZiKrHDMDnOooAaGVqUaUSBbKPrs5DQkq8XFRURGkTXQnaqKSeDuPqDdwETwBmG27KGShABqmNYXONrNKaMebkSFAtB3K+pKX9zhldALYNdVmQDpt/G4tVCZsLAcMYNNQwS7RAovNfCT61PEqEsOs0DXl3Ib+bDMUQH2mQ9POqMXOe/NAJnJaGPNr4lgwEp3rcgXljHFapvMkqeOtncqISGJBRESsiGHkvHjLOYCZtvAiqKwiy1hb7IB1qm5nmtaoQKVt9YKBuA7uHqptZfd0y2dIZ0tBZ4eBFc15vKGcYQBbhgvzttSkNw2IamDosSNzSHYVicAwXbmutxkKgMDQsZnWXCtMBfPsobwhgSROuStlGK/bVA2H8txtGZzGqVKCF1RS4yWqKgIVRFazhLIfN51UdS4UpgXioK1IBwJake+AdbASNUJh1chq1uhly7KxwXK139nW0UroM0kXVCqxuISiA1le6+R567JiMestIVyqqprzzXNj1eOzTRdgOFBu2NSf8UgFPmOs2j443UzNxtnD+XLWi0RIiURIHeaqirF6lL5tKOcZitPFLrwk2SEizvFBFFZgRUVFNbQoeCj6TrKoZWWyaSnR01hbFc1INpS8TBcbYdd0KKJQrUeyudffVPZVNaU+PHqqWWtbRrwwqQLGZjGSDeWgJ2MYwLqB/JpythXJ/B5JvMI+01Qt/NGBKSRWSRXnryicPZyvta3HVA/lmZP1VKqXl4KNfZlmJLF3TUBn0rFalNrUobzxSK0LGrr304g6qxWT8RO5UhWIhNb2ehQk5IZqqNWWkMKKM20xxmEk2/rjYglDFdgx1fJIATQj+alVeUMxjq5se/+xmkfosledaIugkZVtwzkAbEV9wxes6mmFktJP5gUQCsN0567J9DlFlYlvPmewFVmPKTD0xImqMyIiSsDlq4qR1cDAEBkmw2xYMgYuyHDqPJTzenwDVY/UIxgiQzDkqoDKIIYy1BAMlAlG1YdCdFmOU27MdFvqoXgQUiEVhjAUagtGLx4K3P0TYaoZHp4LCx5FIv0ZvnFtPjWyRDhSCZ8aa+ZMytKCSkdWVOEzLl6eBxBXhK8+Y+BLj59c0saLaDEwDx2ePTzdWNuXk4Th+h/OGfrLHx0/PN1g5u/vnZ5phOWc78Lo69aXPvWjk6MaAiQER0BuCZ6baDYjcYNi+rKGCccrYd7nlLff2TeQsq45JtggyXX6M3EHAaBDc+HxSrscdIghRKiHcv5gsKEnkzKufniisXuqPZg14/XwP53dO5z3HTlNREF018HKTNMOZDlciJQC2rJ2Rck7d6QAIE6jr9zQP1LKVNtRd6vVpewK9ZgnauE/Pz36oWvXqSozqaI369/2M2fsGm9kfRNajWJ6MgE4d6T42ZvWNSNh7sxjcBIqsTNGYOi/Xz40Wo8Cw9rZ7ULxNKPknxwnqXEHoGn1spGsJtuD1pa8j13SlzWUxG9EhGYkW/p8V/BkQKGriv4nLxvIGW5Gcv2anEIZ5HLXtrV37K/kDKyoyjykVNUjnQvl0g29pcBY532cJbrly898Z8dYOeenlj4lVxvmtuhwMbjrNy4sBjHZUBcFgvpyCy6v6muJ3gKAZHfo9w7Ovf+eE+UMR1YXyBRUGJhp2r++ad0bN/Za0Q754M1nDy0uw2vCOM16vH+y/vWnR4nIxYcuCg1jUwzMRyqysjhQ7Nw+gC4W8Et69vl/tbgnFKeK6SVSO2xd+pm8j6Ci8v9tn/YIIpJGxSlSBLQiXd3jv3Z1EQAzdZh/124aXNOXHa+FvlnYcE0sl/f5h46/7fyRnO/H2wWIPOhopTnTFCYMFvxyLjarzBirtmea1uOkVUUkqqWARgquCqyhlaOVkJcS0yW7rgoEjJXF+BKRyNFKe37dFJGV1T1+JmX9qhyrRqGgbWV9bxAkx90Ohu8fqjx6stYbeNZ25VvxvANlUDW0b1vT53TQEHkAiGBFe3P+TVuH/9cPjwwU/KgrF3P0LlHN+7x/ovF3Dx//wFXr3OYTVRBoumHf9sXnphr2TWf2/++bN4ubskJ8YKb5jq/tK2Y8Rz4zRLOhXLGq+OWbNzovUQ/1Hd8+VAnV43SAhibKn/4PB4fHmGnJWzeV/ueVI6Goz3RwNvqF7570TUITBeqRntmX+fsbl2uifUer4TvuPDFalxvW5P/iumWOuedWJrTy2ccnfeLu1MbJFikUJNC8obee1Zcu3LxmxDsvWdmT9aLu6LRTV9VItDdnbv/xsaPTDcc9YoJVPXO4+LHXr4Xq/YdmnxurUryVRy5b1XP9ht5KKwqYDIgIBY8OTLdmm9Zxyfpy3qa+TDsSj8BKDJASK1jBUAJIlRQEdbVxUZw/lE1vaudUsxZZQ3AdZENoW/mVc0ouFnV567f2VY7ORetL3scvH0qIe/Feki/smHp2tJH3yAUMiVipSwcYqDbt5SuL5y0rpDSOGCz38GcMFW44c7DajBZsRU0tl0c024g+cfe+tFzoMVvRm88Zef8VK45MN77wxJiLRd1mhd+8fLnPJCpQUdWAMVkLd082AbgluXBZLrRCgPND8UQBcalivGEAKlCJIskbPW8wSJf2qfFmnHGrEnSmaS8eyV6/pigqDDAw24q+vmeuaPBnVw0PJpQpV6Q7Mtf83GOTpYCjBCmoUNyAFoWQClTfvW0QSXkdWLSF7pYrVgfePJvVCVBVrWhfzv+/z03+01MnDFO8m4nJqn74urW/dMGyv3v05MGphvOWVvTskcI7zxuYqoUeQwUEall54mQtPflrVhQ8UtG4+t0pz1iIRXf1oRHZVUVvQzlQt0VV5OmxZsCudiiu3PihC/qZGHADluiLz00/N9H89LXD54/kbZLcuIX46P2js+3I49hIpaV3UgHUU6q15KLl+avX9jj+z0KwnHCdv6r3jVuHZhtht3B11wOsaE/G/MFdB/ZP1BxeFBNs8Bdv2XTZ6t7fv/uAS1BdiPxbV6xY35ept8WQqqpP9MjxKgA3h2bbcG5FwWu51nRXGbczrEIgFqSot+WikZzPJrJKwJFKeHCmlWGNrBhgvB6+a2vP+SN5qwDIEMbq4W2PTd567fI3biiHIu6BrajHfPuTY/ccqpYDE8V2XeapoYCgkchvXDximLr3Sy+ULAU+eN2GYmBkUV4dK6OIx1Rv2/d/Y2cjjLqyawoMf+WdW5uhfGfnmEnC0d6s/4fXr2mGLsOTnEfbR+uT9bYhikRLGe+i5flGS0y8bWfplyNdX7OqkNhgPHKyPtMIScHQ2Va4tT/4zYuGEvutAD7xo9H/ctHAu8/pj0RdZdWKekw/Ola59ZGJ/ixHViievuLyGuvkyzDm2vbqNaXXre8Vnbf1YR5YTrg2DxffdcnK6UbozReujnsWLWW8p49Xf/tbu5jIqmiy2yrnm394+9bdY43RSst1wiKRq9eVP3jF8rFqaIh8xmi1/cjxmiYV/Rs29Eq8vMkImaTy4JAioBnJipJ3yfIckjLWfUeqpICgHalH+NQ1I67IBYUheuDI3FUrc++/cCgScYetwjAdmG5+4O5jQbytJM1uJBYoBZFaUQ/021esWEwkWChZDBLFf7luwxkD+UZol6xzQTWyMpD3v/Hs+Mfv2usxu+aFK0gUAvOBK1dyUityHuA3r1jx1q39Y9W2zwTVf90/TUlX/eo1pVUlvxWKwwsST0FB4hYYWm3ba1fnS4EXiXqMiXr7sZONgsdWdLYlf3rNirMGC5Go21Qiqmf0Z96+td+KeOzGi6ghTNTDX/3u4UoryrALT0VVRSUpf6sCHnimbn9xW/95ywq20wk+DViOY1nOeR994xnNtl3cjErjkUh0qBD89YPH/+zeA4bJipseCVUEnjdUzKTBIhOJ0q03bbhufc/JSrucNT88XJlpRoZjTbxxY0+1FXkxP05UJSmQgCCiEjBu3tyLpNlx39HqWDUMDMYb4SdeO3zTxl4rSPWAiZYXM6JkOB7taohmmtF7vnPo0Eyz6HNo47IkqZCCNOZwsaLesmf0B7952cqUJf98YAFwZvvGrSM/f9HyqVp7aWVUdc5xKO//+X1H/uRfD3jMzpHRIla7Cy0zhv/2Zze9bl1puh6OVcN79s8kD4+fP3ug4LOosCqE4tKUiqoYRbVpX7M8d/6yQjo09jt754h0sh598rUjv3TuYCgyf2xZZyqpG4YzXgvf9e1DO0YbvYFpWxs/h8QwxfVzBSBtG33s2lU9GQPo4oxy6YkhzpF97KYtm4by9ZbtNnILTK8VHSz4tz1w5MPf2e2asjbJHBcIrChyvvn8zZveds7ATCP86rPjgLqF2TyYe+PG3pmG9ZgIQukeTVUFQpFbzh8ikFUwYe9U896Dc3mD265f8Z5tg6n9nne5xLZ6TPummm/7xr7tY7VylkIrLCBRdlEVACUIoOKRTtXCWy4euXp9uXtD5guD5dgPvTn/L37uHGKyqksm7ileQwX/Hx49+e4v7Ziotd3zL06RXT/KN+bTb9z4mTeuf/BI5e690+xuFfS+1ywreBzZJG6AQsUnzDai160tXbOuKG4wLvRTD55YVfS+evP6t2zpj7q0r/slCqvqMd13aPZt/7zv2GxYznAUKatSQpzUxA+qikeYaURXrin9zpUrreppd+Z87GMfOw1eZEVXlrNDxeBfnh3NB95SdGZKyLwoZcyusdpdOye2jpTW9GUJagUL1ocSob9wRena9T1PnqhcuKLkMQt0IO8T6V17pss5AwUre0yhikf0v960vi/nuXLYwenGeC383I3rVpYy6TyJBQroKCpM+NzDJ//bvcdVNe9TJB22UcKYivuCjgc+nPf+7ubNvdlOZWEJTHQJIei83A194s49n73/0HApG9kO+yLZ7xmXNN3ux0Ykovq+K9d84OpVgfGsKmGJmfbJmCqxQsn8LAD49W/v+/auqVLWqBAIbaufvWntzVsH0wS4FUZZ38NpJtyl06/2TdU/dt/xfztc6c8ZgETUjW9wkeL8DraKwFr9wn/cdMmqnuefn/VCk9ncvDHC+776zNefHhsqBWEXdzMdspNKmVuT6UZ0/srSR65ff9WGPgBWhIgWLJdoOscU6b032+En7zt6vGIDQy0rb95cftu5wwtmIi85z9Cquo3zrUg+/+TY7Y+OVUPbmzGukUEKxLP/OnUBJBvJqy37uTevf9OZAy84fPmFZ/6530civ/qlp+/cOTFUPA1endkI8A1X21ZU33z20HuvXL11pIg4Clw8haB7YZxPmmdGRZVPv9SutejFG7b1jt1Tf/3I2PaxRjnrttbFGHWvfUJWgwEINNOM/viGNe/YNrKkUr9ksJDIfCO0v/blZ7+3a8JxI55HvpBQTGcatpDx3rx14BcvXn7BypJzU1YVp0FNk9Ft6T8XK0VimzU9QzOy39s78/dPTTx+spIxXPA56sjQQmqJK9e7uQSzjfDj1699z0XLXgxSLxasdIXbkfznrz5zx/bxwWLQXfZagJej3JCbeiuYa0bZwFy+vuet5w5fs7G/r4srHrvNtIFDS9T10VUXoPn71PdNNr67Z/qO3VO7JxqB4WLAoouq1d17z+A2RSK02gjtJ29Y987zR1786POXMAHX4WVFfvubO7/42ImBQqDz1y1tcKS23/3omjq1tg0Vq8qZy9eWr91YvnBVcU05h9O0OJ6n99GKot0TjQePVO4/NPfUyfpsyxZ8zvnkmFmLtj2kZ4yTTcPUCIWgn75pw5vPGnxJQ+Jf2mzlmDIP/Pm9Bz997/6cz75nurtBne8JeJocMUTE1LJSD0WBvlywcTB3zkjhzJH8+v7c8p6gL+sVMp6bDZUsjzRCqbXtRD06PtvaN9l8bqz+3Hj96GyrEWpgqBAYz4V1mlKt5rWHNaUzAAA8pplGtLIn+PM3b7x0dc9LHaf/kqd2azLf/I7to7/37Z2zzagn67+gSnYfMUwghKKtSNsWCvUNFTKmGPjFDOd8N7WbItW21VrbVtu21pZGJFbhEWU8zph4WN7z9Ic6TYSYHQtSTNWjq9b33PqmM1b0ZF7GBw+8zOH57kp7x6q/882dDx6aKef9tHC6GC93qPNz8hY3f8+FqlZhgcgJg6omzQpmRwCIybwJySQpLWv3brKuWTPz9/EYRq0tIvKrly7/3WvWeMz/TvPgF+AVWrnt3gO3//hoM5LerJe0lGI8Ohz0rhEkCyBLh9clY4c7aowF7MEln6774KJHceM5ZxrR5sHc/7h+7bUb+3UeC//fCyx0BUFPHZv9w7v2PnBgOh94OZ9TyGi+QKXh2Hz2ftq6T1mLHasHINnIoKcDbMlfOC79XCvKGLzzomW/9drVbnrTT/JJPD/xp6Mkg3JV9UuPHf+rBw7vn2gUMibrLQ1Zt1ZinpQhDRziEmUsdV3TGYF0F5IjK4ibWJzeSWf/jNbaVkWv2VD+4NWrt62Ix3z+hB/A88p87k4qYjP18B8ePvqPj584PNXI+SYXGGjM7scCEXNskYV4JZh1JAzoAot0Xp1JOxIXj3SzqtWWZeCS1aVbLlv5+s0DOM0k7P9nYLlX6l+mau2vPnnya0+c2DVaY6J8YDw351ahiyDrpm0soSLzLJiz6NS944aST8pqRVJv20LAV6wrv+ui5a/b1I8u3/2KPOAr/Flh2vUhWI3Qfn/n+DefOfXw4dnJWtszlPM938RD9TpWuws7nX9kwa2m6Dnj5iZUtK00QmHCmr7sdRv7bj5v+PyVPfHivXIffPWqgLUYMgCHphr/umv8nj0TzxyvTNbaAmQ8znjGfUxM6vU699RBrYNnqopWJBRtW4msBoZX92UvWdN7w5bBK9eXSxkTXx2vMEyvIlgxZMknMKSh37HpxpNHZx85PP3syerRmeZMI2xZAOCYTZmQ/5AaecQU3IQtZIjyAQ8W/I2D+W0rS5es7jl3RU8pE/MXrSgRXiml+3cFK325ds2CKfon51qHJmr7Jur7J+pHZ5pjc63persRaWjVuqiTySPOGipkeLiYGenJrOnPbxzMbRoqrClnS9nOlv9kJupPEBS8uNf/DxrLmMA1V9rHAAAAJXRFWHRkYXRlOmNyZWF0ZQAyMDE5LTA4LTMwVDE1OjM4OjA2LTA0OjAwvtdu7gAAACV0RVh0ZGF0ZTptb2RpZnkAMjAxOS0wOC0zMFQxNTozODowNi0wNDowMM+K1lIAAAAASUVORK5CYII= diff --git a/http/1.2.0/requirements.txt b/http/1.2.0/requirements.txt deleted file mode 100644 index ae3e5391..00000000 --- a/http/1.2.0/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -uncurl==0.0.10 -requests==2.25.1 \ No newline at end of file diff --git a/http/1.2.0/src/app.py b/http/1.2.0/src/app.py deleted file mode 100755 index de56d29c..00000000 --- a/http/1.2.0/src/app.py +++ /dev/null @@ -1,403 +0,0 @@ -import time -import json -import ast -import random -import socket -import uncurl -import asyncio -import requests -import subprocess - -from walkoff_app_sdk.app_base import AppBase - -class HTTP(AppBase): - __version__ = "1.2.0" - app_name = "http" - - def __init__(self, redis, logger, console_logger=None): - print("INIT") - """ - Each app should have this __init__ to set up Redis and logging. - :param redis: - :param logger: - :param console_logger: - """ - super().__init__(redis, logger, console_logger) - - # This is dangerously fun :) - # Do we care about arbitrary code execution here? - def curl(self, statement): - process = subprocess.Popen(statement, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, shell=True) - stdout = process.communicate() - item = "" - if len(stdout[0]) > 0: - print("Succesfully ran bash!") - item = stdout[0] - else: - print("FAILED to run bash!") - item = stdout[1] - - try: - ret = item.decode("utf-8") - return ret - except: - return item - - return item - #try: - # if not statement.startswith("curl "): - # statement = "curl %s" % statement - - # data = uncurl.parse(statement) - # request = eval(data) - # if isinstance(request, requests.models.Response): - # return request.text - # else: - # return "Unable to parse the curl parameter. Remember to start with curl " - #except: - # return "An error occurred during curl parsing" - - def splitheaders(self, headers): - parsed_headers = {} - if headers: - split_headers = headers.split("\n") - self.logger.info(split_headers) - for header in split_headers: - if ": " in header: - splititem = ": " - elif ":" in header: - splititem = ":" - elif "= " in header: - splititem = "= " - elif "=" in header: - splititem = "=" - else: - self.logger.info("Skipping header %s as its invalid" % header) - continue - - splitheader = header.split(splititem) - if len(splitheader) == 2: - parsed_headers[splitheader[0]] = splitheader[1] - else: - self.logger.info("Skipping header %s with split %s cus only one item" % (header, splititem)) - continue - - return parsed_headers - - def checkverify(self, verify): - if verify == None: - return False - elif verify: - return True - elif not verify: - return False - elif verify.lower().strip() == "false": - return False - else: - return True - - def checkbody(self, body): - # Indicates json - if isinstance(body, str): - if body.strip().startswith("{"): - body = json.dumps(ast.literal_eval(body)) - - - # Not sure if loading is necessary - # Seemed to work with plain string into data=body too, and not parsed json=body - #try: - # body = json.loads(body) - #except json.decoder.JSONDecodeError as e: - # return body - - return body - else: - return body - - if isinstance(body, dict) or isinstance(body, list): - try: - body = json.dumps(body) - except: - return body - - return body - - def fix_url(self, url): - # Random bugs seen by users - if "hhttp" in url: - url = url.replace("hhttp", "http") - - if "http:/" in url and not "http://" in url: - url = url.replace("http:/", "http://", -1) - if "https:/" in url and not "https://" in url: - url = url.replace("https:/", "https://", -1) - if "http:///" in url: - url = url.replace("http:///", "http://", -1) - if "https:///" in url: - url = url.replace("https:///", "https://", -1) - if not "http://" in url and not "http" in url: - url = f"http://{url}" - - return url - - def return_file(self, requestdata): - filedata = { - "filename": "response.txt", - "data": requestdata, - } - fileret = self.set_files([filedata]) - if len(fileret) == 1: - return {"success": True, "file_id": fileret[0]} - - return fileret - - def GET(self, url, headers="", username="", password="", verify=True, http_proxy="", https_proxy="", timeout=5, to_file=False): - url = self.fix_url(url) - - parsed_headers = self.splitheaders(headers) - parsed_headers["User-Agent"] = "Shuffle Automation" - verify = self.checkverify(verify) - proxies = None - if http_proxy: - proxies["http"] = http_proxy - if https_proxy: - proxies["https"] = https_proxy - - auth=None - if username or password: - auth = requests.auth.HTTPBasicAuth(username, password) - - if not timeout: - timeout = 5 - if timeout: - timeout = int(timeout) - - if to_file == "true": - to_file = True - else: - to_file = False - - request = requests.get(url, headers=parsed_headers, auth=auth, verify=verify, proxies=proxies, timeout=timeout) - if not to_file: - return request.text - - return self.return_file(request.text) - - def POST(self, url, headers="", body="", username="", password="", verify=True, http_proxy="", https_proxy="", timeout=5, to_file=False): - url = self.fix_url(url) - - parsed_headers = self.splitheaders(headers) - parsed_headers["User-Agent"] = "Shuffle Automation" - verify = self.checkverify(verify) - body = self.checkbody(body) - proxies = None - if http_proxy: - proxies["http"] = http_proxy - if https_proxy: - proxies["https"] = https_proxy - - auth=None - if username or password: - auth = requests.auth.HTTPBasicAuth(username, password) - - if not timeout: - timeout = 5 - if timeout: - timeout = int(timeout) - - if to_file == "true": - to_file = True - else: - to_file = False - - request = requests.post(url, headers=parsed_headers, auth=auth, data=body, verify=verify, proxies=proxies, timeout=timeout) - if not to_file: - return request.text - - return self.return_file(request.text) - - # UNTESTED BELOW HERE - def PUT(self, url, headers="", body="", username="", password="", verify=True, http_proxy="", https_proxy="", timeout=5, to_file=False): - url = self.fix_url(url) - - parsed_headers = self.splitheaders(headers) - parsed_headers["User-Agent"] = "Shuffle Automation" - verify = self.checkverify(verify) - body = self.checkbody(body) - proxies = None - if http_proxy: - proxies["http"] = http_proxy - if https_proxy: - proxies["https"] = https_proxy - - - auth=None - if username or password: - auth = requests.auth.HTTPBasicAuth(username, password) - - if not timeout: - timeout = 5 - if timeout: - timeout = int(timeout) - - if to_file == "true": - to_file = True - else: - to_file = False - - request = requests.put(url, headers=parsed_headers, auth=auth, data=body, verify=verify, proxies=proxies, timeout=timeout) - if not to_file: - return request.text - - return self.return_file(request.text) - - def PATCH(self, url, headers="", body="", username="", password="", verify=True, http_proxy="", https_proxy="", timeout=5, to_file=False): - url = self.fix_url(url) - - parsed_headers = self.splitheaders(headers) - parsed_headers["User-Agent"] = "Shuffle Automation" - verify = self.checkverify(verify) - body = self.checkbody(body) - proxies = None - if http_proxy: - proxies["http"] = http_proxy - if https_proxy: - proxies["https"] = https_proxy - - auth=None - if username or password: - auth = requests.auth.HTTPBasicAuth(username, password) - - if not timeout: - timeout = 5 - if timeout: - timeout = int(timeout) - - if to_file == "true": - to_file = True - else: - to_file = False - - request = requests.patch(url, headers=parsed_headers, data=body, auth=auth, verify=verify, proxies=proxies, timeout=timeout) - if not to_file: - return request.text - - return self.return_file(request.text) - - def DELETE(self, url, headers="", body="", username="", password="", verify=True, http_proxy="", https_proxy="", timeout=5, to_file=False): - url = self.fix_url(url) - - parsed_headers = self.splitheaders(headers) - parsed_headers["User-Agent"] = "Shuffle Automation" - verify = self.checkverify(verify) - proxies = None - if http_proxy: - proxies["http"] = http_proxy - if https_proxy: - proxies["https"] = https_proxy - - auth=None - if username or password: - auth = requests.auth.HTTPBasicAuth(username, password) - - if not timeout: - timeout = 5 - if timeout: - timeout = int(timeout) - - if to_file == "true": - to_file = True - else: - to_file = False - - request = requests.delete(url, headers=parsed_headers, auth=auth, verify=verify, proxies=proxies, timeout=timeout) - if not to_file: - return request.text - - return self.return_file(request.text) - - def HEAD(self, url, headers="", body="", username="", password="", verify=True, http_proxy="", https_proxy="", timeout=5, to_file=False): - url = self.fix_url(url) - - parsed_headers = self.splitheaders(headers) - parsed_headers["User-Agent"] = "Shuffle Automation" - verify = self.checkverify(verify) - body = self.checkbody(body) - proxies = None - if http_proxy: - proxies["http"] = http_proxy - if https_proxy: - proxies["https"] = https_proxy - - auth=None - if username or password: - auth = requests.auth.HTTPBasicAuth(username, password) - - if not timeout: - timeout = 5 - if timeout: - timeout = int(timeout) - - if to_file == "true": - to_file = True - else: - to_file = False - - request = requests.head(url, headers=parsed_headers, auth=auth, verify=verify, proxies=proxies, timeout=timeout) - if not to_file: - return request.text - - return self.return_file(request.text) - - def OPTIONS(self, url, headers="", body="", username="", password="", verify=True, http_proxy="", https_proxy="", timeout=5, to_file=False): - url = self.fix_url(url) - - parsed_headers = self.splitheaders(headers) - parsed_headers["User-Agent"] = "Shuffle Automation" - verify = self.checkverify(verify) - body = self.checkbody(body) - proxies = None - if http_proxy: - proxies["http"] = http_proxy - if https_proxy: - proxies["https"] = https_proxy - - auth=None - if username or password: - auth = requests.auth.HTTPBasicAuth(username, password) - - if not timeout: - timeout = 5 - - if timeout: - timeout = int(timeout) - - if to_file == "true": - to_file = True - else: - to_file = False - - request = requests.options(url, headers=parsed_headers, auth=auth, verify=verify, proxies=proxies, timeout=timeout) - if not to_file: - return request.text - - return self.return_file(request.text) - - -# Run the actual thing after we've checked params -def run(request): - print("Starting cloud!") - action = request.get_json() - print(action) - print(type(action)) - authorization_key = action.get("authorization") - current_execution_id = action.get("execution_id") - - if action and "name" in action and "app_name" in action: - HTTP.run(action) - return f'Attempting to execute function {action["name"]} in app {action["app_name"]}' - else: - return f'Invalid action' - -if __name__ == "__main__": - HTTP.run() diff --git a/http/1.3.0/requirements.txt b/http/1.3.0/requirements.txt deleted file mode 100644 index ae3e5391..00000000 --- a/http/1.3.0/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -uncurl==0.0.10 -requests==2.25.1 \ No newline at end of file diff --git a/http/1.0.0/Dockerfile b/http/1.4.0/Dockerfile similarity index 100% rename from http/1.0.0/Dockerfile rename to http/1.4.0/Dockerfile diff --git a/http/1.3.0/api.yaml b/http/1.4.0/api.yaml similarity index 96% rename from http/1.3.0/api.yaml rename to http/1.4.0/api.yaml index a90ed131..0c542673 100644 --- a/http/1.3.0/api.yaml +++ b/http/1.4.0/api.yaml @@ -1,5 +1,5 @@ -walkoff_version: 1.3.0 -app_version: 1.3.0 +walkoff_version: 1.4.0 +app_version: 1.4.0 name: http description: HTTP app tags: @@ -27,7 +27,7 @@ actions: description: Headers to use multiline: true required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" + example: "Content-Type: application/json" schema: type: string - name: username @@ -88,7 +88,20 @@ actions: returns: schema: type: string - example: "404 NOT FOUND" + example: | + { + "status": 200, + "body": { + "example": "json" + "data": "json" + }, + "url": "https://example.com", + "headers": { + "Content-Type": "application/json" + }, + "cookies": {}, + "success": true + } - name: POST description: Runs a POST request towards the specified endpoint parameters: @@ -110,7 +123,7 @@ actions: description: Headers to use multiline: true required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" + example: "Content-Type: application/json" schema: type: string - name: username @@ -163,7 +176,7 @@ actions: type: string example: "404 NOT FOUND" - name: PATCH - description: Runs a PATCHrequest towards the specified endpoint + description: Runs a PATCH request towards the specified endpoint parameters: - name: url description: The URL to post to @@ -183,7 +196,7 @@ actions: description: Headers to use multiline: true required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" + example: "Content-Type: application/json" schema: type: string - name: username @@ -256,7 +269,7 @@ actions: description: Headers to use multiline: true required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" + example: "Content-Type: application/json" schema: type: string - name: username @@ -318,11 +331,18 @@ actions: required: true schema: type: string + - name: body + description: The body to use + multiline: true + example: "{\n\t'json': 'blob'\n}" + required: false + schema: + type: string - name: headers description: Headers to use multiline: true required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" + example: "Content-Type: application/json" schema: type: string - name: username @@ -388,7 +408,7 @@ actions: description: Headers to use multiline: true required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" + example: "Content-Type: application/json" schema: type: string - name: username @@ -454,7 +474,7 @@ actions: description: Headers to use multiline: true required: false - example: "Authorization: Bearer asd\nContent-Type: application/json" + example: "Content-Type: application/json" schema: type: string - name: username diff --git a/http/1.4.0/requirements.txt b/http/1.4.0/requirements.txt new file mode 100644 index 00000000..b7101bd6 --- /dev/null +++ b/http/1.4.0/requirements.txt @@ -0,0 +1,2 @@ +uncurl==0.0.10 +shuffle_sdk==0.0.31 diff --git a/http/1.3.0/src/app.py b/http/1.4.0/src/app.py similarity index 88% rename from http/1.3.0/src/app.py rename to http/1.4.0/src/app.py index ddebbf6c..95ee49f8 100755 --- a/http/1.3.0/src/app.py +++ b/http/1.4.0/src/app.py @@ -8,10 +8,10 @@ import requests import subprocess -from walkoff_app_sdk.app_base import AppBase +from shuffle_sdk import AppBase class HTTP(AppBase): - __version__ = "1.3.0" + __version__ = "1.4.0" app_name = "http" def __init__(self, redis, logger, console_logger=None): @@ -26,6 +26,7 @@ def __init__(self, redis, logger, console_logger=None): # This is dangerously fun :) # Do we care about arbitrary code execution here? + # Probably not huh def curl(self, statement): process = subprocess.Popen(statement, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, shell=True) stdout = process.communicate() @@ -44,18 +45,6 @@ def curl(self, statement): return item return item - #try: - # if not statement.startswith("curl "): - # statement = "curl %s" % statement - - # data = uncurl.parse(statement) - # request = eval(data) - # if isinstance(request, requests.models.Response): - # return request.text - # else: - # return "Unable to parse the curl parameter. Remember to start with curl " - #except: - # return "An error occurred during curl parsing" def splitheaders(self, headers): parsed_headers = {} @@ -63,12 +52,8 @@ def splitheaders(self, headers): split_headers = headers.split("\n") self.logger.info(split_headers) for header in split_headers: - if ": " in header: - splititem = ": " - elif ":" in header: + if ":" in header: splititem = ":" - elif "= " in header: - splititem = "= " elif "=" in header: splititem = "=" else: @@ -76,8 +61,8 @@ def splitheaders(self, headers): continue splitheader = header.split(splititem) - if len(splitheader) == 2: - parsed_headers[splitheader[0]] = splitheader[1] + if len(splitheader) >= 2: + parsed_headers[splitheader[0].strip()] = splititem.join(splitheader[1:]).strip() else: self.logger.info("Skipping header %s with split %s cus only one item" % (header, splititem)) continue @@ -169,14 +154,16 @@ def prepare_response(self, request): except: pass - return json.dumps({ - "success": True, + parseddata = { "status": request.status_code, + "body": jsondata, "url": request.url, "headers": parsedheaders, - "body": jsondata, "cookies":cookies, - }) + "success": True, + } + + return json.dumps(parseddata) except Exception as e: print(f"[WARNING] Failed in request: {e}") return request.text @@ -187,7 +174,8 @@ def GET(self, url, headers="", username="", password="", verify=True, http_proxy parsed_headers = self.splitheaders(headers) parsed_headers["User-Agent"] = "Shuffle Automation" verify = self.checkverify(verify) - proxies = None + + proxies = {} if http_proxy: proxies["http"] = http_proxy if https_proxy: @@ -203,9 +191,12 @@ def GET(self, url, headers="", username="", password="", verify=True, http_proxy auth = requests.auth.HTTPBasicAuth(username, password) if not timeout: - timeout = 5 - if timeout: - timeout = int(timeout) + timeout = 25 + else: + try: + timeout = int(timeout) + except: + timeout = 25 if to_file == "true": to_file = True @@ -225,7 +216,7 @@ def POST(self, url, headers="", body="", username="", password="", verify=True, parsed_headers["User-Agent"] = "Shuffle Automation" verify = self.checkverify(verify) body = self.checkbody(body) - proxies = None + proxies = {} if http_proxy: proxies["http"] = http_proxy if https_proxy: @@ -241,9 +232,12 @@ def POST(self, url, headers="", body="", username="", password="", verify=True, auth = requests.auth.HTTPBasicAuth(username, password) if not timeout: - timeout = 5 - if timeout: - timeout = int(timeout) + timeout = 25 + else: + try: + timeout = int(timeout) + except: + timeout = 25 if to_file == "true": to_file = True @@ -256,7 +250,6 @@ def POST(self, url, headers="", body="", username="", password="", verify=True, return self.return_file(request.text) - # UNTESTED BELOW HERE def PUT(self, url, headers="", body="", username="", password="", verify=True, http_proxy="", https_proxy="", timeout=5, to_file=False): url = self.fix_url(url) @@ -264,7 +257,7 @@ def PUT(self, url, headers="", body="", username="", password="", verify=True, h parsed_headers["User-Agent"] = "Shuffle Automation" verify = self.checkverify(verify) body = self.checkbody(body) - proxies = None + proxies = {} if http_proxy: proxies["http"] = http_proxy if https_proxy: @@ -281,9 +274,12 @@ def PUT(self, url, headers="", body="", username="", password="", verify=True, h auth = requests.auth.HTTPBasicAuth(username, password) if not timeout: - timeout = 5 - if timeout: - timeout = int(timeout) + timeout = 25 + else: + try: + timeout = int(timeout) + except: + timeout = 25 if to_file == "true": to_file = True @@ -303,7 +299,7 @@ def PATCH(self, url, headers="", body="", username="", password="", verify=True, parsed_headers["User-Agent"] = "Shuffle Automation" verify = self.checkverify(verify) body = self.checkbody(body) - proxies = None + proxies = {} if http_proxy: proxies["http"] = http_proxy if https_proxy: @@ -318,6 +314,14 @@ def PATCH(self, url, headers="", body="", username="", password="", verify=True, else: auth = requests.auth.HTTPBasicAuth(username, password) + if not timeout: + timeout = 25 + else: + try: + timeout = int(timeout) + except: + timeout = 25 + if to_file == "true": to_file = True else: @@ -335,7 +339,8 @@ def DELETE(self, url, headers="", body="", username="", password="", verify=True parsed_headers = self.splitheaders(headers) parsed_headers["User-Agent"] = "Shuffle Automation" verify = self.checkverify(verify) - proxies = None + body = self.checkbody(body) + proxies = {} if http_proxy: proxies["http"] = http_proxy if https_proxy: @@ -351,16 +356,19 @@ def DELETE(self, url, headers="", body="", username="", password="", verify=True auth = requests.auth.HTTPBasicAuth(username, password) if not timeout: - timeout = 5 - if timeout: - timeout = int(timeout) + timeout = 25 + else: + try: + timeout = int(timeout) + except: + timeout = 25 if to_file == "true": to_file = True else: to_file = False - request = requests.delete(url, headers=parsed_headers, auth=auth, verify=verify, proxies=proxies, timeout=timeout) + request = requests.delete(url, headers=parsed_headers, data=body, auth=auth, verify=verify, proxies=proxies, timeout=timeout) if not to_file: return self.prepare_response(request) @@ -373,7 +381,7 @@ def HEAD(self, url, headers="", body="", username="", password="", verify=True, parsed_headers["User-Agent"] = "Shuffle Automation" verify = self.checkverify(verify) body = self.checkbody(body) - proxies = None + proxies = {} if http_proxy: proxies["http"] = http_proxy if https_proxy: @@ -389,9 +397,12 @@ def HEAD(self, url, headers="", body="", username="", password="", verify=True, auth = requests.auth.HTTPBasicAuth(username, password) if not timeout: - timeout = 5 - if timeout: - timeout = int(timeout) + timeout = 25 + else: + try: + timeout = int(timeout) + except: + timeout = 25 if to_file == "true": to_file = True @@ -411,7 +422,7 @@ def OPTIONS(self, url, headers="", body="", username="", password="", verify=Tru parsed_headers["User-Agent"] = "Shuffle Automation" verify = self.checkverify(verify) body = self.checkbody(body) - proxies = None + proxies = {} if http_proxy: proxies["http"] = http_proxy if https_proxy: @@ -427,10 +438,12 @@ def OPTIONS(self, url, headers="", body="", username="", password="", verify=Tru auth = requests.auth.HTTPBasicAuth(username, password) if not timeout: - timeout = 5 - - if timeout: - timeout = int(timeout) + timeout = 25 + else: + try: + timeout = int(timeout) + except: + timeout = 25 if to_file == "true": to_file = True @@ -446,10 +459,7 @@ def OPTIONS(self, url, headers="", body="", username="", password="", verify=Tru # Run the actual thing after we've checked params def run(request): - print("Starting cloud!") action = request.get_json() - print(action) - print(type(action)) authorization_key = action.get("authorization") current_execution_id = action.get("execution_id") diff --git a/microsoft-excel/1.0.0/api.yaml b/microsoft-excel/1.0.0/api.yaml index d52baafd..0c27ae28 100644 --- a/microsoft-excel/1.0.0/api.yaml +++ b/microsoft-excel/1.0.0/api.yaml @@ -30,6 +30,40 @@ authentication: schema: type: string actions: + - name: get_excel_file_data + description: Gets data from all cells in an excel file as a list. If CSV, returns it as a CSV list. Max 25.000 lines in total due to timeouts. + auth_not_required: true + parameters: + - name: file_id + description: The file id of the file + multiline: false + required: true + schema: + type: string + - name: to_list + description: Whether the output should be a list or not + multiline: false + required: false + options: + - true + - false + schema: + type: string + - name: sheets + description: The sheets to use. Comma separated. + multiline: false + required: false + schema: + type: string + - name: max_rows + description: The maximum number of rows to return + multiline: false + required: false + schema: + type: string + returns: + schema: + type: string - name: get_user_id description: Returns all users - name: get_files @@ -176,16 +210,4 @@ actions: # returns: # schema: # type: string - - name: get_excel_file_data - description: Gets data from all cells in an excel file as a list - parameters: - - name: file_id - description: The file id of the file - multiline: false - required: true - schema: - type: string - returns: - schema: - type: string large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAZ8AAAFMCAIAAACbKiZCAAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAAhdEVYdENyZWF0aW9uIFRpbWUAMjAyMTowNToxMyAyMjoyOTozNuzdkIwAAB1CSURBVHhe7d0JXNVlvsdxFQVBQVEWAwwVLdwxrUlcM0UtK53GrNEa7bbYYt20xZqprF63LCundTS7OZnVLTMddZrUyt0WM8lSK3PLNBcSRUVBofuD5/F4grP84fzh/M/j5/06r8Pz+5/9wP/L8/zXms3HZdUAAOPU0j8BwCykGwAzkW4AzES6ATAT6QbATKQbADORbgDMRLoBMBPpBsBMpBsAM5FuAMxEugEwE+kGwEykGwAzkW4AzES6ATAT6QbATKQbADORbgDMRLoBMBPpBsBMpBsAM5FuAMxEugEwE+kGwEykGwAzkW4AzES6ATAT6QbATKQbADORbgDMRLoBMBPpBsBMpBsAM5FuAMxEugEwE+kGwEykGwAzkW4AzES6ATAT6QbATKQbADORbgDMRLoBMBPpBsBMpBsAM5FuAMxEugEwE+kGwEykGwAzkW4AzES6ATAT6QbATKQbADORbgDMRLoBMBPpBsBMpBsAM5FuAMxEugEwE+kGwEykGwAzkW4AzES6ATAT6QbATKQbADORbgDMRLoBMBPpBsBMpBsAM5FuAMxEugEwE+kGwEykGwAzkW4AzES6ATAT6QbATKQbADORbgDMRLoBMBPpBsBMpBsAM5FuAMxEugEwE+kGwEykGwAzkW4AzES6ATAT6QbATKQbADPVbD4uSzeBahQVEZkW37R901ZpCU1bJ6Wd26TZrTMe+3zLen0zEDDSDdWhQWT95vEpzeOTS691I7JOhL651M3/fGTxt6t1AQSMdIP96oTVdsuy5OZxJYnWuH4DfbMXpBvsRbrBBimNEksj7HTXrGmjJvo2y0g32It0Q4U1qtfAvWvWojTRpL+mb64s0g32It3gR906Ea4gk+sWpX20BlHR+mb7kG6wF+mGslLjktQws7RTVpJoZzWM17dVJdIN9iLdznTx0Y10p0x30EoSrVbNIGwISbrBXqTbmaVeRKSKMJ1lcSXX9etG6ZuDinSDvUg3k9WsWVP1xdTCMpVrCTGN9M0OQ7rBXqSbUZo0iFNrME9lWXKzuGR9m+ORbrAX6RbCYiLruxb86+u4lMjw3+0AEEJIN9iLdAsZtcNqn1pYdjrRGtdvqG8OfaQb7EW6OVdKbOLpTlnpdSV2AAghpBvsRbo5RekOAKeDTDXCw+rom88ApBvsRboFjXTNWienpSU0bZ9SchQguYTVCtO3nZGu+cc9Po6A1Ck1Pbx2uC4co/Bk4fqdm4uKi3QNJyHdgmb5AzNSGiXqAv7SzbFfV37BsWc+fH36ijm6hmNwbF4gIFERkQ9eMfr5Efef4V1vByLdABsMyuh976XX6wLOQLrBKWIi6+uWJ3nHj+iWU/2pS1Z6UgtdwAFINzhF3jFf+RVT11f2OUFsvZgGPgMa1Yx0g1OEet9N+P4IqGakGwAzkW4AzES6ATATW/MGDVvzluF7a97nR9wfF231yHRtklq4LwJbt+O7gpOFugjMhWkddMsT3x8B1Yx0CxrSrQwb9zOdOvLhfu0ydVGjxqBnb924e4suApP92Gwfqw7YVdZRGJkCMBPpBsBMpBsAM5FuAMxEugEwE+kGwEykGwAzkW4AzES6ATAT6QbATKQbADORbgDMRLoBMBPHCAkajhFSho1na37witFtktJ0UaPGHTOf2H/4gC4C8/Ytk3TLE46A5CikW9CQbmX4PnzQgrEvuweWM3EEJEdhZArATKQbADMxMg0ah49Mi4qLfjmYI409h3JOFhflHj2UX3C84GTh/sO5MvHnA3vkWtoyZdyAkV2aty15TGB8L7QKiYE8y90chXQLGofPrrPXLL7nnad14V1qXNJH974aVitM1wHwHQ2cVwEVRboFTYDpJrPrl9s27DlY0rGS7lVpV2t/yfWhkg7XrgN75frn3JLr2WOe65SaXvKYihjw9M0/7NmuC+8mDbv7yvP76SIwNkbDW7dMco+hnv9znfoqArf16YW65Qnp5igsdwtVRcXF05a+J92r+2dNfm7RGy9+9NbsLxfP/eoTmbvkIjOza35+/8tFqmHdvHVLrERbz3M792vXVRcBs/FE7oeP/e7E9TY+c97vn7kMG18IgSPdQlVUeN0BHbrrwqel363ZvHeHLqxZvOFT3fJpUEZv5mc4FukWwga27961ZUddeLcrd9/STWt0YcHy77+0stFW37ZdB2X00gXgPKRbCGsQFT2gfQ9d+LTsuzWFJ0/owh+JNit3lmirWydCF4DzkG6hbWCH7la2xlj9Y7aMT3Xh09c/fW9lWHpJx5503OBwpFtoi4uOHdDe6tI33fJp8YbV+/L87JJZOyxMoq1WTf544Gj8gYa8AR16dGx6ri68k8Hp979s04UX23N2Lf7Wf8dtUEZvi5EKBBHpFvKSGsZbWXn6y8H9frtvEm1+165GhUcyJkVIIN1MIOnW2sLxMyTdjp0o0EU5uUfzZFiqC+8k2vq0/oMuAAcj3UyQ2jhpoIXu2+db1i/zvmmIRNuX2zbowouGUTF03BAqSDdDDGjfvWXi2brwztvgtPi3YmtL3Hp1P+c8XQDORroZQqJtoIUl/ZJum3Zv0YUbibaPN36mCy8SYxrTcUMIId3MMaBDj9S4JF14sS/vV4/dNytL3C7N6HVBi/a6AByPY4QETVUcAemxf02ZvmKOLrxIiU1c/tcZuii1I2f3RRNH6cKLiNrh8qj46FhdVwHfB9gY2KFHVHhdXfgzus+wtISmuqhR44n50w4cPaSLwEy6+m7d8oRjhDgK6RY0VZFu63d+f+vrj+0+uF/XXrx47V8v6dhTF9Yy8ZY+w+655HpdVA3OqwB7MTI1Soem58r4VBfeuQ9ON+3e6ndYKv2gQRm9dQGECNLNNNdmXub3SLlz1n6cezRPtZd9t+bn0kNd+iDR1jqphS6qjJ9Dp9UNgUMt+f4IqGakm2lS45IGn9dHF14UFRfNz16q2guyl6mGNzGR9Uf1GKwLIHSw3C1oqu68Cjtydvd96gaJMF17ck6TZh/ePXXLvp39nrpBT/LiwStGj+oxRBdVyfci+fsvu7FRVANd+NMzvYv7CpD/rF+RX3BcF4HxfZh11io4CukWNFV61pg7Zj6x4FTvzBt5A2+smjdt2Xu69kQyYvlfZ0RYPgl8IGyMBs6rAMHI1Eyj+1ylW97JfPjJps914cXoPsOqJ9oA25FuZmqTlNavXaYuvHjni//IyFQXnqTGJV3X7XJdAKGGdDPWVRf0rxNWWxee+N1n/voef7TlRKUWcU4s2It0M9bFbS4caGHbN2+6terEXqUIaaSbyQI5gu6gjN6x9WJ0AYQg0s1kAzp0t3jO0zIuan0BHTeEOtLNcAOtnRKwDOm41YuI1AUQmkg3w0nf7eI2F+rCmv7tu9FxgwFIN8PVCatd0aVvfdt09b2ytYrYuJNm9O/XXdr4zL7XirKfqaOQbuZbtfkr3bJm8YZPC04U6qIasUUI7EW6Ge6Dr5e7dpi3aNG3qyr6EMCBSDeTFZ48ITlVVFysa8vmr1t6MP+wLoDQRLqZTKJt4TerdFERK35YO3/dEl0AoYljhARNlR4jRBQVF13x9zEbPZ0By4qU2MTF971anbvQ+z7AxqgeQ6wv1bqySz/373b6ijl5x47qIjB3Zo3QLU84RoijkG5BU9Xp9ubqBQ++/4IuKmX8pTfcdNFQXVQ9zqsAezEyNZN03F5dPlsXXnRr1Um3vPjf5bOrcxMHP6sjOfI4Koh0M9OMVfN25OzWhSdpCU39Ht1o/+Hc6Svm6gIINaSbgfILjk355B1dePGHtA4t3M746c30FXMk43QBhBSWuwVN1S13e27RzOcWvaELL6aOfLhfu0wrh+Qe1WPIg1eM1kVV8r1InvMqoKJIt6CponTLO3ak5+N/8btJ/eq/zYyKiHzr03//bfbzeqoXEbXDP7x7ampckq6rjI3RwHkVIBiZmmbKknf9Ltv+c9dLJdqk0Sv9fL8rIgtOFr62/H1dVCX2xIK9SDejfLV94/x1fnaiSoxpnHXqlAvJsQl923ZVbR/mZS9d/v1aXQAhgnQzyvzspbv8DcGy2nfLODtdFzVqSLqd1TBeF14cyj/MnqcIOaSbOVZtXuc3g+pHRLk6bkq7lJZWum/z1y2p3E5dQLCQbuZYkL30wJFDuvAiq31m+Y14+7a9sG4dP3tcVXqHfCBYSDdDfLzxM79L3GrWrJnVrpsu3PQ4p7OV7lvpwZTYtR4hg3QzhERbfqGfTbpkTFpmWOpiJd2EvMqR4/m6AJyNdDPBguxlfjtuwmPHTZF087vbqViy6QtWLyBUkG4h7/iJgvnrlvxW4zdde9H9nPP6t/eablHhda133/YfPqALwMFIt5AncbN4w6e68K5/u24SYbrwRNLNyiGGPtvytZV+IhB0pFtoyz2aZ2Wo2Cm1dVZ7z0vcXCxu2SvkFX/69RddAE5FuoU2CZqVP/g/5VVWu27x0Y104Z2km98te8XXP31P9w3OR7qFsF8O7reSMuc0adbfX8dNKd2y19KpnednL/n+l226AByJY4QETeDHCJm+Ys5j/5qiC+9u7/vnsQP+ogt/Nu7eMujZW3Xh0429/nT/ZTfqwg6+D7AxsEMP38sN3Y3uMyzN7eh1T8yfduCon+2cLZp09d265QnHCHEU0i1oAky3rft23vnmxA27ftS1F8mxiVNGPtQ2uaWuLbhj5uMLspfpwrumjZo8N/z+jNTTu6wGiPMqwF6MTEPV/OylfqNNZLXLrFC0CYvrFnYe2MOuC3Ay0i0kSa5ZWeLWMCraxzZu3ki6WdmyV8h7WP1jti4AhyHdQlLhyZO90s+/sku/gR16/CGtQ6fU9JTYRLnom0/p2jLjghbtdWFZVHhdeVpd+JRz5OCbqxfoImB+DgzJObFQQSx3C5qqO6/CnkM5J4uK9h/Oled3P72AdQUnCzMfG557NE/XPtm1RMz3InnOq4CKIt2CpurSzRafbVm/afeWvGNH8wuOHTh6KL/wuFwXnizcn5d7srhIAlTfr/S8qG/cPFEXAbAxGjivAgTpFjQOTzcrfj5QEhmSdDI0DqsVpiZWGukGe7HcDZUn6SyXLs3bBh5tgrPGwF6kGwAzkW4AzES6ATAT6QbATKQbADORbgDMRLoBMBPpBsBMpBsAM5FuAMxEugEwE3vRB40Be9Hby/cu6BwBCRVFugUN6VYG51WAvRiZAjAT6QbATKQbADORbgDMRLrBKQw4J9aOnN26BQcg3QB7fLZl/Q97tusCDkC6ATb4ctuGG157SBdwBtINCEjBycJpy967dur4/IJjehKcga15g4atecvwvaF/p9T08NrhunCMwpOFG3dtlYDTNZyEdAsa0q0MNvSHvRiZAjAT6QbATKQbADOFxWY6/bgLpsovPPbrkUO/1SiOjoiqU7uOnnoGe+vTf+/K3asLIGCsVQi+yPC6zeOTm8ellFzHl17HJTeIitY3nzFYqwB7kW5O1KheA510ccnNTkVehPO2h7AX6QZ7kW6hISk2QZJO9+xKUy81LknfZgrSDfYi3UJSrZo1VdI1cxvPJsY01jeHJtIN9iLdDBFVsvDudM9OtWMiQ+C4Gi6kG+xFuhmrcf0G7j07tdYi3MErZ0k32It0O4Mkxya4J520z258lr7NAUg32It0O3PVqlWrzJoKuU6IaaRvrnakG+xFuuG0ehGR7j27ko1R4lJiIuvpm6sY6QZ7kW7wpXH9hrpz59bLqxNWW99sK9IN9iLdUDEpsYkq5lTPThp2Lbwj3WAv0g0BCasVpjp0pZ073b+Lj67MwjvSDfYi3WCz+hFRKuZKO3c676Lr+l94R7rBXqQbqlxcdKzbYjs9nq1dbuEd6QZ7kW4IgpjI+uc0SW2TlNYioalcS1um+D6vAlBRpBscIT46tuDkCd8nbAYqhGPzwhH2H84l2mAv0g2AmUg3AGYi3QCYiXQDYCbSDYCZSDcAZiLdAJiJdANgJtINgJlINwBmIt0AmIl0A2Am0g2AmUg3AGYi3QCYiXQDYCbSDYCZSDcAZiLdAJiJdANgJtINgJlINwBmIt0AmIl0A2Am0g2AmUg3AGYi3QCYiXQDYCbSDYCZSDcAZiLdAJiJdANgJtINFbD16YVyeWXUBF0bYWT3wepzSUNPghFqNh+XpZsIzCfjX2sWl6wL77bn7Ooz8XpdhBqJALn+aMOnN023J+Du6n9d52ZtkmIT3L86+Ypyj+at/OGryQtn6ElVSULtocG3SOPRuf/458q5aqKSHJvwxs0T5b2t3rxuxNTxeipCBH03BId0ADc8Pm9Mv+GZrTqV+a8gZafU1nLT2kdnTRhym54aDOMGjFTvTd6kmoIQQt/NNqrvtudQzitLZulJnuQcyV2QvUwXocaWvtugjF4PXHZTkwZxqszNz9u0a8uP+3Zu379LymbxyS0TmrZIaOq6g3Tlrp06flfuPlXazkffTd7q8yMekIa8yc4PDVUTESpIN9uodAvpgadfgaeb5MWTV42LDI+QtvpPUCZQXCR0brpoqMo4uefQF++qooDzkW5Cbs1slTFt6Xtrtn2rJyFEMDJF9Tm/eTtXtK3bsSnzseHeok3ITXIHuZu0JeNm3T5ZTa9m8jYkyom2UES6ofo8OWysK9qufOG/1UTf5G6ugDNsXS2qGumGajJhyG1qCb0M3i1GmyJ3zs3Pk0a3VuclxyaoiYBfpBuqSVa7TNW4751nVcO6+euWyrX0+x4efKuaAvjFWgXbVGKtgmuVnO+R2gfjpqSf1fxYYcHIaQ94WwAkPaMLWrRPbNA4NipGTZF3sjt333OLZnp7yMjugy/r1LtZfLJ6iDz/jl93f7H1mwlzXlJ3KK/SaxVcn/S7X7Zd8sxoNbFC1j46S96nx3WXZd7V+c3b3Zk1wrXWVX2udz//0MfqCx9rFXx85ABf1530SccNGJmRmq66t2LPoZyt+3b6+PXBL/puwbQge5nMG9LolNpa5jE1sQyZLtEmjYXfrPT4h35X/+tkzr+u2+VyN1e0CZlPMlt1enLYWF27kXlJElNmaXld10OkZyTPIM8jMW37APDyThepxuJvV6tGRW3atUWu5d1KiKgpHknKv3PbM/LBVcQI9bnkw868eaKaUhUCeV35DS6659XBnS92RZuQ55Fn++eNjwd3i7+QRroF2SNzX1YLlW66yPPmVGOyhsu1dMTGvv2UmuJO5pwx/YbLPC89hdWb10nvY9hL4+QijblrP1bPXIYk16zbJ6vElD7jC4vfdD1ELb+XeUzuYG/AtUw8WzXe/eJD1aiotds3qsalGb1UozwJAkln+Srks8vHaXF3f7mW/x8yRW6VsJAcUfe0VyCvK4+V36BEofyy3B8+Y9U8mSLT5ZkJuMoh3YJsV+6+Fxa9KQ35X13+j/iVURNUcnlcVvXsNffKnCMNSaWsSTeMmDpeBkHSv5OLNCQNZRA3Y+U8dWeXN26eKK8lz3nHzMdlODx54QzXQ6SU+UruI3d48ioPnb5KaxAVLdcy2qr0Nmub9+5QjaSG8apRRkqjJhIE8hLyVchnV+NBuZZh433vPqOCZkS3y0rva6dAXlc65vJYachvUH5Z7g+fMOelyyffJgN5KYee3993jxUekW7BJ3/Krj9i9x6T/EF3a3WeNFZt/qr8mHRQRi8Zy0hDLbPzlhpqbnGRAFXDH5nxPO4yIfdXg2XJTRu7b2r8e/xEyaxeOX538JDeqLeNfuWx8h1KQ96G7d23QF5Xdczlt+9xqas84Y2vPST5KD24O7NG6KmwjHSzmWTH1tIDTni76Pv9nuuP+GG3dYJq6zCZc8ovzxa3XnyNXKsumJpixWWdesu1BKKPsJDBsmrc2DvE9j16Zcksbynv+lCtk1qoho0q97ryn0aF/sPvv6imlCdPu25HyZC8dXKamgLrSDdHkD/ihd+slEbftl3VGET+1atO1uPzXym5x+9Jr0otOJNHeZuvypNxkJqdpq+YUzrBM3nC7Tklu3y2T2mlpoSE3Py8Mh1Vd/Kh1FJI1xJAu1T6dXue21mupeNWvmPu7qMNn8m1/OIYnFYU6WYz6Wo9OvcfPi76fuWMffsplSlqLadaUiODRI+dLFev6u3PPlANKzJbZaiG31GeElvv9BpY59t76Ffd8uJQ/mHdslWlX1f99/r5wB5V+tU2uaVuwRrSzWbHTxTIf3IfF30/T5798HW5lj/6D8ZNkf/V8j/fNa4pQy1Zlzv4/rdfRkqjJqpRZrBc/qJmPBupheuBGHRqVemR4/mqUYb1mLBX5V7X9XGkt17myy9zUdvioRJINweRLpVaoq9GnS8setPbqFMNcyraGalbJ1y3qt3evBy5DiQ0u5wal63f+YNqhLS4+rG6hSpDujnL6s3ZqiGdncUbKrnhq29+x86ui+pL2mL3qZj2ttGyX66FgL77vyFHbeNm5VJFfw8GI90cJDk2QW0ioNafTrv+UTW9vOMnCuW6bp2S421UlN+xs+ticfGcFf/3+X9UQ620rYT0s0rWOapNZ0wiXdEyX7u3i/XVR1BINwd58qqxsVEx0rea9MFrUsr41NvGWWpZj2unH4t+3PuTXCfGVOxRtpCgVKsOJaQqsRndhCG3SdxL44ut36gpoU7SSjU6ND1HNWA70s0pZMimdjxQh6tVnZQbev3JYxZs2r1VNSq0berug/vlWmLCtUi7OrmO86F2p7dOvgHV45N89LGHf8jRcV8F299BId0cQWbgey4pObLI6s3r1H911/a9HrNg8sIZai1khQZ605bqEz6M6jFENaqTBJN0S6XRKbV1hXaclBG62kxv5qr5aooZ1HEBpIfOhmxVhHRzBJmBJcgksO57V+9PuuvU9r3eDh+idvFpFpdsPSnkOdV+8vKcfrtvlRg/+nXnzCdUKFvcM1zew+wxf1drkOWdV88JAKvNc4tmqsYjf7xdNbypit/FmYB0Cz4ZXaoZeNaahe5Ljse+/ZTq7IzJGl7+7/uRuS+rWyUpnr3mXjWxDHlUmRCZuOBVlS9PXjXO2+pL6Up8MG6K+z5hdlmz7VvXjuXytuVVfHRb5J3Pu+slCWJpq31p1XRjyLchXXVpyG9fQtxbhMn3sOieV3WBigiLzWT/NXv8pfsVDaNijp8oLCouzjg73cdl/+EDh48fVY+Sv+nJfx5fJ6z29pxdo179m5roIk/VK/38yDolRwp7f+1HemopeYa9eb/2Tr9AHpue1OKK8/o0j0+Rrlz2T99Jv+zSjr1u73vNvZf8V1x0w9dX/ks/pmTR276EmEYdzz5XHiXP3PPcLvIGioqLZLqE3eDOF4/pN1wucdGxW/f/PD+7ZEmZuzuzrpVrjzdZ9MOeHT/9uvvClh3lQ8mrDL2gv7zzbq06tUw8Wz6jfDmSejddNPS+Qf91YVoHuY88RCLg6pfvVg/3yOK7Ur+gg/mH3b8QIS8qX4U0ln33pXx7aqKLjycP8HXF51vXX9KxZ/26UWc1jB/SpW96k+bSkPcgoZ/VLvPWi6+e8Mfb5HuQX5arowfrODavbSyei1486nYMWHXcXWkMe2mcxx0P5L+66r+4P8pFskmdLF3X5Xjs9UiQSX9QLczySB51x8zHy2+CsLWyx+YtQ9629A27tTpPrQn1RhJ/xsp55T91GRbflbeDJ8u3Ycuxeb3xfdBm+SqevGqsWqHkUW5+3sxV8w0blVcPRqbB5BqTyhzibZ8qSRk1lPN4eEsJIJlnZLaUPFLr4BSZl9TBLD0O6GQevnzybTNWzZO7qScX0pBy7tqPJWd9HFLJFvLkkghZk26Q9yDvXA2xFfU25AuRDy4fzW+0hTr5KkZMHS8ftsz3IG2Z8sLiNzs/NJRoqxz6bgDMRN8NgJlINwBmIt0AmIl0A2Am0g2AmUg3AGYi3QCYiXQDYCbSDYCZSDcAZiLdAJiJdANgJtINgJlINwBmIt0AmIl0A2Am0g2AmUg3AGYi3QCYiXQDYCbSDYCZSDcAZiLdAJiJdANgJtINgJlINwAmqlHj/wETRGXjIqRUogAAAABJRU5ErkJggg== diff --git a/microsoft-excel/1.0.0/requirements.txt b/microsoft-excel/1.0.0/requirements.txt index 451582f8..1cdfbefa 100644 --- a/microsoft-excel/1.0.0/requirements.txt +++ b/microsoft-excel/1.0.0/requirements.txt @@ -1,2 +1,3 @@ -requests==2.25.1 openpyxl==3.0.9 +requests +shuffle-sdk diff --git a/microsoft-excel/1.0.0/src/app.py b/microsoft-excel/1.0.0/src/app.py index 7d0b1a13..7031d916 100644 --- a/microsoft-excel/1.0.0/src/app.py +++ b/microsoft-excel/1.0.0/src/app.py @@ -1,14 +1,10 @@ -import socket -import asyncio -import time -import random import json -import uuid -import time import requests -from openpyxl import Workbook, load_workbook -from walkoff_app_sdk.app_base import AppBase +from shuffle_sdk import AppBase +from shuffle_sdk import csv_parse + +from openpyxl import Workbook, load_workbook class MSExcel(AppBase): __version__ = "1.0.0" @@ -123,7 +119,7 @@ def convert_to_csv(self, tenant_id, client_id, client_secret, file_id, sheet="Sh if filedata["success"] != True: return filedata - basename = "file.xlsx" + basename = "/tmp/file.xlsx" with open(basename, "wb") as tmp: tmp.write(filedata["data"]) @@ -131,13 +127,12 @@ def convert_to_csv(self, tenant_id, client_id, client_secret, file_id, sheet="Sh sheet = "Sheet1" #wb = Workbook(basename) - wb = load_workbook(basename) - print("Sheets: %s" % wb.sheetnames) - + wb = load_workbook(basename, read_only=True) + # grab the active worksheet ws = wb.active - for item in ws.iter_rows(): - print(item) + #for item in ws.iter_rows(): + # print(item) csvdata = "" for row in ws.values: @@ -153,41 +148,81 @@ def convert_to_csv(self, tenant_id, client_id, client_secret, file_id, sheet="Sh csvdata = csvdata[:-1]+"\n" csvdata = csvdata[:-1] - print() - print("Data:\n%s\n" % csvdata) + print("Data length: (%s)" % len(csvdata)) return csvdata - def get_excel_file_data(self, file_id): + def get_excel_file_data(self, file_id, to_list=True, sheets="", max_rows=100000, skip_rows=0): filedata = self.get_file(file_id) if filedata["success"] != True: - print(f"Bad info from file: {filedata}") + print(f"[ERROR] Bad info from file: {filedata}") return filedata - #filedata = file_id + + if not sheets: + sheets = "" + + sheets = sheets.lower() + max_rows = int(max_rows) + skip_rows = int(skip_rows) + + try: + #print("Filename: %s" % filedata["filename"]) + if "csv" in filedata["filename"]: + try: + filedata["data"] = filedata["data"].decode("utf-8") + except: + try: + filedata["data"] = filedata["data"].decode("utf-16") + except: + filedata["data"] = filedata["data"].decode("latin-1") + + returndata = csv_parse(filedata["data"]) + return returndata + + except Exception as e: + print("Error parsing file with csv parser for file %s: %s" % (filedata["filename"], e)) - basename = "file.xlsx" + basename = "/tmp/file.xlsx" with open(basename, "wb") as tmp: tmp.write(filedata["data"]) #wb = Workbook(basename) try: - wb = load_workbook(basename) + wb = load_workbook(basename, read_only=True) except Exception as e: return { "success": False, - "reason": "The file is invalid. Are you sure it's a valid excel file?", + "reason": "The file is invalid. Are you sure it's a valid excel file? CSV files are not supported.", "exception": "Error: %s" % e, } - - print("Sheets: %s" % wb.sheetnames) + # Default + #max_count = 25000 + #if os.getenv("SHUFFLE_APP_SDK_TIMEOUT") > 240: + # Limits are ~no longer relevant if to_list=True + + cnt = 0 + skipped_cnt = 0 output_data = [] for ws in wb.worksheets: - print(f"Title: {ws.title}") + if ws.title.lower() not in sheets and sheets != "": + continue # grab the active worksheet csvdata = "" + if cnt-skipped_cnt > skip_rows: + break + + list_data = [] for row in ws.values: + cnt += 1 + if cnt < skip_rows: + skipped_cnt += 1 + continue + + if cnt-skipped_cnt > max_rows: + break + for value in row: #print(value) if value == None: @@ -197,15 +232,30 @@ def get_excel_file_data(self, file_id): else: csvdata += str(value)+"," - csvdata = csvdata[:-1]+"\n" - csvdata = csvdata[:-1] - - print() - print("Data:\n%s\n" % csvdata) - output_data.append({ + list_data.append(csvdata) + if to_list == False: + csvdata = csvdata[:-1]+"\n" + else: + csvdata = "" + + #csvdata = csvdata[:-1] + + output = { "sheet": ws.title, "data": csvdata, - }) + } + + if to_list == False: + print("Data len (%s): %d" % (ws.title, len(csvdata))) + output_data.append(output) + else: + print("Data len (%s): %d" % (ws.title, len(list_data))) + output_data.append({ + "sheet": ws.title, + "data": list_data, + }) + + print("Done! Returning data of length: %d" % len(output_data)) return output_data diff --git a/microsoft-identity-and-access/1.0.0/requirements.txt b/microsoft-identity-and-access/1.0.0/requirements.txt deleted file mode 100644 index fd7d3e06..00000000 --- a/microsoft-identity-and-access/1.0.0/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests==2.25.1 \ No newline at end of file diff --git a/microsoft-intune/1.0.0/requirements.txt b/microsoft-intune/1.0.0/requirements.txt deleted file mode 100644 index fd7d3e06..00000000 --- a/microsoft-intune/1.0.0/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests==2.25.1 \ No newline at end of file diff --git a/microsoft-security-and-compliance/1.0.0/requirements.txt b/microsoft-security-and-compliance/1.0.0/requirements.txt deleted file mode 100644 index fd7d3e06..00000000 --- a/microsoft-security-and-compliance/1.0.0/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests==2.25.1 \ No newline at end of file diff --git a/microsoft-security-oauth2/1.0.0/requirements.txt b/microsoft-security-oauth2/1.0.0/requirements.txt deleted file mode 100644 index 9d84d358..00000000 --- a/microsoft-security-oauth2/1.0.0/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests==2.25.1 diff --git a/microsoft-teams-system-access/1.0.0/requirements.txt b/microsoft-teams-system-access/1.0.0/requirements.txt deleted file mode 100644 index fd7d3e06..00000000 --- a/microsoft-teams-system-access/1.0.0/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests==2.25.1 \ No newline at end of file diff --git a/microsoft-teams/1.0.0/requirements.txt b/microsoft-teams/1.0.0/requirements.txt deleted file mode 100644 index fd7d3e06..00000000 --- a/microsoft-teams/1.0.0/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests==2.25.1 \ No newline at end of file diff --git a/microsoft-teams/1.0.0/src/__pycache__/teams.cpython-38.pyc b/microsoft-teams/1.0.0/src/__pycache__/teams.cpython-38.pyc deleted file mode 100644 index 1c3e43a2..00000000 Binary files a/microsoft-teams/1.0.0/src/__pycache__/teams.cpython-38.pyc and /dev/null differ diff --git a/microsoft-teams/1.0.0/src/__pycache__/teams.cpython-39.pyc b/microsoft-teams/1.0.0/src/__pycache__/teams.cpython-39.pyc deleted file mode 100644 index 90a9ca6d..00000000 Binary files a/microsoft-teams/1.0.0/src/__pycache__/teams.cpython-39.pyc and /dev/null differ diff --git a/mysql/1.0.0/requirements.txt b/mysql/1.0.0/requirements.txt index 28cc0ef7..9ae049c1 100644 --- a/mysql/1.0.0/requirements.txt +++ b/mysql/1.0.0/requirements.txt @@ -1,2 +1,2 @@ -requests==2.25.1 -mysql-connector-python==8.0.23 +requests==2.32.4 +mysql-connector-python==9.1.0 diff --git a/netcraft/1.0.0/requirements.txt b/netcraft/1.0.0/requirements.txt index fd7d3e06..480d0c4b 100644 --- a/netcraft/1.0.0/requirements.txt +++ b/netcraft/1.0.0/requirements.txt @@ -1 +1 @@ -requests==2.25.1 \ No newline at end of file +requests==2.32.4 \ No newline at end of file diff --git a/oauth2-example/1.0.0/api.yaml b/oauth2-example/1.0.0/api.yaml deleted file mode 100644 index 40c44127..00000000 --- a/oauth2-example/1.0.0/api.yaml +++ /dev/null @@ -1,53 +0,0 @@ -walkoff_version: 1.0.0 -app_version: 1.0.0 -name: oauth2-example -description: Oauth2 sample -tags: - - Example -categories: - - Example -contact_info: - name: "@frikkylikeme" - url: https://github.com/frikky -authentication: - required: true - type: oauth2 - redirect_uri: "https://login.microsoftonline.com/common/oauth2/authorize" - token_uri: "https://login.microsoftonline.com/common/oauth2/v2.0/token" - client_id: "dae24316-4bec-4832-b660-4cba6dc2477b" - client_secret: "._Qu3EvYY-OW_D57uy79qwEo.32qD6.l0z" - scope: - - UserAuthenticationMethod.ReadWrite.All -actions: - - name: reset_password - description: Change password of a user in Azure - parameters: - - name: userId - description: - example: "user@company.com" - required: true - schema: - type: string - - name: passwordId - description: - example: "28c10230-6103-485e-b985-444c60001490" - required: true - schema: - type: string - - name: newPassword - description: - example: "*****" - required: false - schema: - type: string - returns: - example: '{"data": "this is a test", "this_is_a_number": 1, "this_is_a_list": [{"item": [{"hello": "there", "how_is_this": {"sub_in_sub": [{"another": "list"}]}}]}, {"item": "2"}], "subobject": {"data": "subobject"}}' - schema: - type: string - - name: get_password_methods - description: Get available password methods for your user - returns: - example: '{"data": "this is a test", "this_is_a_number": 1, "this_is_a_list": [{"item": [{"hello": "there", "how_is_this": {"sub_in_sub": [{"another": "list"}]}}]}, {"item": "2"}], "subobject": {"data": "subobject"}}' - schema: - type: string -large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAK4AAACuCAIAAAAgbqG5AAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JQAAgIMAAPn/AACA6QAAdTAAAOpgAAA6mAAAF2+SX8VGAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH5QgSEisELoK6KgAAPYtJREFUeNrtvXmcVMW5Pv5W1Tmn19l69g1mYGYYHFBIIiCKbIoiaiLiksTtRnOj5maP5uZmNzE3xiS/773GLYuJSzRGJYmKCgqiCSi4gLKNw8AMMMwwa8/S6zmn6v39Ud3NTG/TPdPDkjvPx080cLpO1VvPeeut933rLYKIMIlJANCT3YFJnCqYpMIkQpikwiRCmKTCJEKYpMIkQpikwiRCmKTCJEKYpMIkQpikwiRCmKTCJEKYpMIkQpikwiRCUE52ByYcCBAJuRFCAICk9isY+cNUfnVag/zrRSYjc598/hCGTbWUBYTJMr6WT1P8i1AhMq9Rk4QA/XqwX9eHDGNA1/v1oMc0fabpM80A5yYKLpADAgADwihRCLUyZlcUu6I4FSVXs+RoWpaq5mparmaJahkRR2XPaYTTmwqh+Rg2GQJxQNe7AoEjPk+7z9sVCHgMw2fyADdNREoIBSAECESWitBvUf4PAgIgICIIAIGoEGJlil1hTlUtslrL7I5Ku7PIas3RNBp+bWw3TkecrlQQI0XvNc2jPm/jQP9hj6cnGOjTg4hACFAglBBGCB02TVKFJJXKiEkViBxRIApA2axLsxRYrFOczvqc3HK7w6EokZYRkZ6elDjNqBAla49pNA4M7O13t3qG+oJBgQgAKqUKpTBsysc/wtD7whQxhTCEAABKiMtiqXJmnZGbV5+T41RU+aA4DZXEaUOFiL0GAIYQHw0M7OzrbRrsHzAMgahSqpCQ8HHih0TCPUFEE9EQghKSo6p12blzXPkzcnJUycVhfT71cRpQARGBAAECAN2BwI6+nh29vcf8PhNRo5QRQgg5AdOfCJIWiMgRdSEUQkps9rn5+XNdBYVWK4QNkFOfEKc0FRAQAAgQBDji9W7tOrbL3Tdg6CqlKqGEEJFy5+U8RM0HDls+Rn0gFVBCENFAYQiRo2qz81wLi0oqHQ4ybCwnW6iJRXRqUkHa81JwhzyeTceO7unvD3JuoZRRmooOiOhwgYgAQtp9iAJQIACAQgglhBAiHa4CABEFookIADRscsp/CABNefWRr+ZCBIWwMNaQk7u0tLzK6YQQIU5R/XAqUiFic7X5vBs72ne5e3UuLIzR0RaCiK5GALmEA6BGmUapXVFyNS1Ps+RomkNRbYzZFKYQKvcXAMBDPBB+k/s595rGgK679WC/rvtMUxdCFxyAhIySYS9K3hmBGORco3S2K395aVmF3XHK7jJOLSpE7KxBQ3+tvf3t7k4/51bGaNK1IDQxYcOeEqJSWmCxVtjtpXZ7kdVWbLPlaRaF0nTFL9t068FOv78r4O/w+dp8vp5gwBAiZKtSSkZTFbLzAc5tjC0oLF5eVpajaqegRXkKUUEgUkI44tvdXRva23qDQdtoJJDfli4EF4IQUmi1VtodNdk507OyXRZNoyz2JykamCTBPOmC9wX1A0ODzYMDR3ze7kAAERmlGqVyCEm6KgnhslhWlFUsKCxihIhTST2cElSIGG5Hfb7nD7fuHehXKVUpTSTZ0GKMqHMOBIqstpqs7LPy8iudjsjOHkYLGWBMXAqHtR/153Hb8ZrGYY/3A3dv89BgV8APCBpjLOnCQQkxhDCEOCMn9/IpVeV2O8Z740nByaeC/DIE4uvHOja0t/m5aWNKImlKEphCBIVwKkpNdvYn8gtrs3OO+/vCkQhILF8ch+gjEcvhasNrmvsHB97t7W4eHPSYpoVSJbFtK38oh7mirGJpSSk9NdTDyaRCxIDqCQaebW3Z3e+2MsYSrAhSgvKTclksH88vmFdQWGKzh5qKF4tKDo5Ihz3PERGQANEFZySk8FPp/3BOHPP7t/d0vdfb0xcMSsWWiBByKQxwPis3b01VdYHFetIdlCeNChHFuLOv97lDLQO6blOUJIKTJCi22RYWFs8rKHSqKqQTB5KaYNDQf/TBu10BPyPko8H+RxYunZmTxxEZIXe899abne25muXA0OCdDXP+ve4M+ecpjmV4Nzymsb27e2tXZ2fAn2SlC6kH08zRtCunVs9x5Z/cxeLkpK5IwSHgi0eOvNZxlBJiU5S48pLK02uahVbruUXFCwuL7YoCwzw2qX5IiECIIcRLRw83DridiuoxDa9phpsi+wcHtvd05WiWIUMXkJJnCcOr23E/NCAAOBV1WWnZgsKird2dW7o6uwOBuNpOEsimKF7T/GNz0wWl5asqKwkQKZwTPykngQpSfF7TfKqleWdfr50pEM/2jnw0VsYuKC1bVlqWrWpw3OswFmERIFmKKl0LAkYszxbG7IriUBSfafTrOgCMalQQgIhPIkQIkIQARLQrygWl5fMKCl/vaN/S1ek3zbhqT8bBgZBX2o8cC/g+XV3jUJSTYjqcaCrIQXYF/I827z/s9TgUNZEyMIUwOJ+V51pVMUVa2pIEEa+fCM9Euh3g4aDz8FA1R4EIgGAI0RsMgAxqJmhe/k27z7vx2NErp0yzKwpKQoTtFennQMQsVfvklKqzC4pebDu8290nXRGx6gEAHIr6QV+fO7j3xpraIqvtxLPhhKa5yuG1eoYeaNzb5vPa4y0K0sXrN02HonxmWs3n6+rL7XYMOYMJCc8lIYQRgjB2SwcBjvq9of8CKLHaBaCc+1HngKMAgKdbm2/4x2sXb3zxgY/2DOhBRggQIt3bkYEAACKW2e2fr6v/zLQah6L4TZPGWwAEol1R2nzeBxr3tnqGaDoRlozgxFFB8qBxoP/hpsYBQ7cyFjtUSggC+EzzTJfrqw2zFxQWyc3BcItAkuCQd+iZ1gOmQBKdoTgKhj/qC9kKAADZqna8maTtIaJCaLvf99v9+/Kttg/6er/yzj8Xr//7vXt2dvn9Mq4xnBDSRQ2ACwqLvtow+0yXy2eaIh7hBKKVsQFDf7ipsXGg/wSz4QRRQfJgT7/7kf0fBTnXaHwe6JwTgDVTq2+prXdpFoE43DCUP+gLBn666/0Vr754zZuvfn/nOwAABEYXGSEAkKWqOZrGBZLwGyN/z1MWunzuwOCA1zSGDAMAXJqlxTP0X+9vO3/93/7z/bdbhgYpGWHPSuNGILo0yy219WumVlOAIOdx2aBRFuT8kf0f7el3n0g2nAgqSB7sdrv/2NzEEeNuruSiUGC13lZ/xuKSUgTAkVMld/BHvJ6Vr6377o7tXQF/kdX6q707b9qyScYdUhEZCy0rcTBiUpKuD1J1LSouffuSK79/1ieqndkC0cJYvsXSGfD9cs8HSzb8/fa33/zQ3Rv3hwiwuKT0tvozCq1WuVjEikullCP+sblpt/vEsWHCqSB5sG+g/7EDTQKRJeCB1zRn5eV9eeasameWkH6bqIcIAIBCaXfQn6dpUlh5FuufWvbf8M9NA7ounTajdGakLSgNfvmrET8dVcUAIECR1fad2R+7b955JqJMXVEJK7Bafab5yIHGRa/87Z7dO2Dk5kiOSyBWO7O+PHPWrLw8bwI2SEE9dqBp34laKSaWChE78bED+01E6Y6NkikB8JnmkuLSm2vrs1Q1keUsp63UZi+w2IJCAICBIsh5ocX67KGDa95Y3xPws6RsQACFUIVQEZ7q93q7AEAmn+3o61Eokd5PRkc/OROxUV5pP+w1DalsBKDHMLymkadZAHBGTi7E45Wc2ixVvbm2fklxqc80Y6mPiAqlJuJjB/afGCtyAqkgxXrM7/9j8/6AacauC/LbCgpxeeXUNVXVUn8msd7ljysdTgQ0hZjqyCq0Wj2mWWS1vdnZsfqN9S2eoURsIGGzv9qZJafQzpTHDjT9unHX9p7OO957653eLhtTBIKFshnZuTBaBFlOVatn6ImDTQ5FJUCGDONzNfV/W7ZyZfnUvmDw7PyiT1VWQ4IRRQa7pqr68sqpQSFit65ypQiY5h+b9x/z+ymZWNfwRFFBRgR8pvn4gf1uPajF7BcIgAAwhbiqqvrCsvKIvZ1c+gBQ5ciS+8lsVbt/3vlWxjym4bJYtvd0Xbl5/b4BNyPERBFvqAQAPl1dywiVeZEBYX7zvbcufm3dfft2KYSolHpMo9LhvLRiKoy2pZQveKp1f4ffZ6FMFzzfYrmldubykvK1Sy7629KLfzx3HkhfSIIW5J8LxAvLyq+qqjaFEPHYoDHm1oOPH9jvM02SwNDJCCaEChheEZ882HzY67Em4IFAvLZ6+nlFJWJkZCg5Smw2jmhhbO+AuzY75/Fzl2cpapDzPM3SNNi/auNLb3d3KoTyBDvV84tLvz37Y+5gYNAwBIJTURVKnKpqCNEdCNgU5f+dfW6+DA4lHSAjpCcQeOzARzamAAGvaa6qmDojO9cUQiCuLJ+ysLAERuMTCS8W5xWVXFs9XSDGZYOVscNez5MHm2WvJogNE0MFRABY13Z4p7sv1o8kB8OFuKZ62vzCotTdapEFwhQoF4I9/X0XlVfeOqPBz02B6FTU7qD/mjdf3dDeJh1QsaJHgO+d+fEnF124vLQ8T7PYmGJnql1RKhzOm2vqX1p2ycryKaN2SfLsz63NB4cGbYyZQjhV5XM1M0H6wQjhYb9CSnNAiECcX1h0TfU0nmClsCvKTnffurbDkKYfJXVkfvmRctzR2/Pogf0apbGtE4CgEFdXTTu3qDgt96oMFW46dvSyTS9lKarHNG+cPoMR8tzhg5FUNoVSnXMAuG/+os9W18b1HWM4gbYvGOgOBuRWsNzmsDCWSuKhbNOtB5eu//tBz5BdUfr14Jop055YdEG63mJ52CqyUlBCtnR1/qX1oCWB3HQhbpxeOze/YCLc0hmOQUhTqCvgX3u4NZG55DPNyyunpssDCE9qsdWer1k9pmFXlMcPNnEUCqE+0yyzO3ymEeRcF5wS8o/O9qumTpdcJNHtEBlAclmsLos18uci7N5O3g2ByAhZ13boo8H+bFXjQmiU/ntdA6SpumVIJRKHlLrh3KJin2k+f+RQXK88JWTt4dZyh6PIahtPAk5cZHiBkFustYdaB3RdjaG29B+cX1xyYVn5GLJ+5dMFFkuOppmICGBlDBGyVe2rZ5z51sorfjTnbI7i4rIpm1Zc/tCCxXKXGPcdjBASjjILRBkwpCOPVsaFtBJ8pvm/jbsYoSqlXtM8v7js/OJSTCc8JrnYHfAHOIcwh+Qe4cKy8vOLS2L9DQigUjqg62sPtWJSU2ZsyKRWkF/5pmPtu/vdjhhSS31wZl7e6qnVAGM6DUAIAORZLFmqxlEwYH7Or59e9+1ZH6tyZgHAF+oazi8uq8vO1SgVKUT9yWg7xjhABEKGTKPEam8kbrceFIC31zVAWFuM3kD4yW09nTf+c9MttWd8s+Gs478lBABWT61268FdbneUbhCINkXZ3e/edKx9eWl5ZpeJjGkFBKSEtPm8G9qPWhnDGB4EOS+x2a6pmi4NujGMgABwRI2yfIuFC6SEEAJfm3lmlTNL2u2UwKxclxJOFYw1vkzOTdMUQiSLaCIKIUzTNDmPEzglBACKrba/L7t43bJVy0sqFhWVLikphxRWFtkHRGSEPHGw6bJNLx/xee7e9d66tkORxBZ5jooRck3V9BKbLTZOgYhWxja0H23zeWk803jMyBgVZLjlhSOH/NyM8vPLKbQwdm319BxNk0GmiGh45NxSJI1gNFQ7sxklPtP0GuZHg/1yhmg45DNczyMi55xzLtcjhTFFUSilQMI5JhB69fF4NyGUUkVRlPAhnEgLUeNdVFy6dulFTy26UCZWjUoE2TcE+O7O7f/+9humEDIZ5/NvvbG7v48eZwMRiDmadm31dAtjfORaIJchPzdfOHJouCTHj8wsEBHrd29/f5zdIyFB01wztXp6VnZEpyWx1UdN+CyzO3y6fl5J+R0Ncy4qq4Rhi/TxxoVARMYYY6HTEK3HOnY3Nh7Zv3/Hvn1Nra3dnZ2+/gEzEEDDAACiqorVas/NKSwurquqmjtzZmVtbcOM+urS0kgLnHNCCAkfreGIjNACqxVSgNz+9AYDX31ny9OtzTmqhRAwEX2meW5hSa6mDX9Y0mJ6VvYl5ZXPHmqxKwpGLRNM2dvfv6Wrc1FxSaaWiQxsJiObq//Zu3vQMJSRKkGGHM90uW6prY+ab0pI02D/nw7uf7+ve9Aw8jTLvIKia6qmT8/KSZTwKX+1ob2teWjg5pp6uf2LeowLQYEQSgDAEwxue2f7hs2bX39t48HGxt7uLhApj5cQV1HR9Pr6pRcsX7Fkyfyz5zktFgBAgQKQhVOiR7XkEUBuc/YNuG/asmlnX2+upgkALoSPm1+aMfuuufNsYS9crPnyu/2NH/b1ReV+EgATMVtVv3LGrDzNkpHdRMao8Nyhls3HOuzxepylql+a2VBgsUZ6LJ+4b9+un+5+3x0MRhYUjiLfYv3ijFnfmjVXjbcPjELUByGEIACEUgDYu3//bx9/7OXnX2jaswdNUz5ACaFh50FkQYimHQnlGohhtgJRlLqGhpWXX/b56284o7YWpNYBoKPlyEeU3/r2I7e+/Uan35+tahyF1PN3zTn7C3UNIrx/iTp4I4ffEwzct2/PULxvzGeaS0pKr5xafUpQQXbisNfz68a9mCAL4dPTpp9TeNyLIP/j7l3vf3/n9jzNolFqIpKwY9gQoi8YuLaq5pGFS22KksBHBFGGoZA1UCjlQmz4x5v3P/DAqy+9pHu8sg8kXPgiKp9xNNmQ46mUQkhaaE7HhZdc8sXbb1+x6HxGaeS9yVu6/6Pd335/GwWQagyk8UTZU+dfsLi4LCi4hTJDiO/t3F6blXNz7cxI3r0c5lvdnU8dPBA3KZwQ8h/1Z0xxOMfPhsyYja8f64jNwpA8mJXnOqewOBJylPvpjR1t/73r/XyLFQD6dV0WqegLBnXOZamKp1sPfP3dreF5j5mjsGMgNMFCEEIopZu3vb1i9RWXLFu+7i/PGF4fY4wyJgCk3TfKxiEWiChE6LcAlDHGmOH1rfvLM5csW75i9RWbt71NKSWECCFiPwP5/72m+aXt//zGu1s1SiUPvKbhN02VkEFDv3nr6x8N9lso6wr4b9iy8d49O//z/bff7OyIbCjk53FOYfGsPFciCb9+rCMjkzguKsg9YYtnaJe7NzbmxBGtjK2qqJRilX8o14J7du+QwlMpvXvu/J2XXrV5xSe/2XCWSqmPmwKxwGp97OBHG9qPSM9gsslCpJS293Rff/ttFyxesunvzxNExhgCcM4F5+PIhB3xJkkKBGCMEcRNf3/+gsVLrr/9tvaebkqpwOi8dimN3+/f9+u9H+RpFhk06QsGbppef920up5gMEfTjni9X92+ZXtP1+rN659pPVhstelC3L7tzY+Gp6sgAsCqikorYzzmFVbGdrl7WzxDkWoeJ4cKUidt7mjXuYglbIDzhUXF5XZHxNsjx/ZuT9d7fT1Zqjqo67fNaPjaGWeW2x1n5rnunjv/uSUXldkcAc4ZIYYQf25phsT7dfmVM0qfWLt27tlnP/HgQ2gYjDEE5JliQCwQOecIyBhDw3jiwYfmnn32E2vXMkqlQ2K4BADgc7X1F1dW9eu6iRjk/K458+6fv+j/O/vcZSXl3QF/nqZt7+266LUXP+jrybdY5L6x1TO0d8ANESOGEIFYbncsLCoOxLgZKCE6F5s7OiLTMWaMnQrS93nY69kz0G8ZqRIIgCFEodW6rLQMhpnE8onmoUEZMbIwtqCgGELnFcEQ4ryi0gcXnM8IMYWwMeW9vu6g4HHDslwISumg1/vZ22+7/qqreg4dZoxJH8BEBXFHDB6kp4Ex1nPo8PVXXfXZ228b9HqlsRIRggyW/mbB4ulZ2QTg9wuX/Nfsj5lCWBl75NylH3MV+jiX6ZY2RSFABg29wGp7ftnKK6ZUD3djSwEuKy0rtFoNIYZPuIyl7RlwH/Z60k3+zhwVAABga1dnrEdMnnM9t7A4W9WGB/7lT0wU0mIIcHNPvzvUD0JkmtOykvKP5xf6OWeEDBp6XI8TF4JRuudA89KVFz/54EMKIYTSWC/QREMyj1CqEPLkgw8tXXnxngPNbBgbpJKvdDh/d86Svy9beXVVDZd1HwEKLVbpJCXhLYNbD84rKN5wwaXLSsqjsiUkq7JV7dzCYkMIEqMYgpxv7eqENONhURgjFaQZ2On373L3aSMzFqVKKLbaFhYVw8hdsvyvPM0ik1Qdinpf464P3L3y/0Zm0soYAAjEMptDjTHOOeeM0vX//MeyZct2bNnKFMUUYrhmTgWEEJYU0h5MpSkhhCkEU5QdW7YuW7Zs/T//wSjlnEfmCQHnFxafW1giJ1gh5JBn6LJNL/39SKtTVWWKbV8weNP0GS8uu2RaVjaP5zKSnVlYVFxstUUpBkTUKN3l7uuUSW8nmgqIALCzr3fA0JWREUipEhYWFUsfw/BOyxEuLCwuttp0zjVKuwP+q9/Y8NLRw3JHwAh5pf3I292dDkXxcvPj+YUKocMbMTlnjD33yitXXH551+EjjDFumqmbBSEGUBpxJyeC3BQojFHGRucEIjdNxljX4SNXXH75c6+8whgzw2yQJ2IjO4JtPZ2rNr30RmdHlqrKz8bP+V1zz374nMVOVU0U0yLhBJaFRdGKAQEUSgcMfWdfL4xjjRi7X8EU4pd7dh0L+FRynAoy3OBU1W82nJmlqnETcighd3347l0fvFtotXFEub4sLSlbXFx21Od9qqXZYxoKIRxxw4WXfSK/MOKQkOvCsy+//JlrrjaGPJQxERZ3KiSglArOQwcU8/Jqa2sqZsyorqqaUlqWl5XlsFo9fn93v7ulre1wS2tbU1Nzy8Hg4JAcFGUs7o4xCrJLapbzyaf/smblStnh4QN/taPtys2vUCAWxgiQIVPPUS2/nr9o9ZTqyJHLhHwDIABDhvGLPR96DGN4oIcAGChKrPZvNMxWUqgMERdjiUHIUe0d6D/m96kxKiHI+eL8gixVjXs4XP7Jt2bNfben+4W21mKb3cYUE8XLR4+8dPQwIDhU1cpYZ8D/7VkfG8EDzhljr/zjzRuuv84Y8lBGU+cBDasBa3bW8lWrVl966dkLFjRUT0viujc439Xc/P727X969pm3X9sY8PlCZEq6EgnOKaPGkOeG669z/vWvFy86X3Ybwp/EGTl59Tl5e/vddkJ7gv6GXNdvz1nyifxCnkJ2Jwmdx1U/nl+wof2oY1hgAgFUQo/5fXsH+s/Mc40tKjEWrSDp+cSB5m09XVGeZgRQCPnSzFny7HPc7kiK9AWDX9r+j78cOmBlzM6UiLc1wHm/HrxtRsN98xZJ4pOwPtjZuO+SS1Z1tLSkrg9k9Ehwbs3Ouv7mm//jttvOrK2LdCPJvEZsBSHErv1N9z344OO/+73u9TLG+GiuKtm90urql15aN6d+phCChot2UUJ29fd96vWXW4YGL6+s/s05i4usNlOeq09Z8kd9vvv27TZjFl+fac4vKLpues3YPI9pUyGipu7d/UFUpg0hJGCas/Ncn6+rT94bmV0oEB872PTrxl2NA/0R90i1M/vztWd8sX6WEuaBpM6Q37fkoove/8c/GWM8ZR4wSk3Oz7nwgv/55a/Onj0bwi5k+YknH6b0W8jHCCHv79lz+1e+vG3jJhbPoRQF2cmPLTpv8/r1WTZ7REFKj/LzR1o3HTt6z8cWyBh0WqUBpEx+29S4y91njYlYOhTljllnxV2aRxdXulSQ1H6np/tPB5ujslgpIX5ufq5mxhxX/qg6KpJD5jWNHX09B4eGTCFK7faFRSU5qhYJEUkDmxBywxe/+PgDDzBF4eHY0igDC59o/sId3/zlj+5yWK0yxDxqvCDOkMPxbh3Fd374w1/c9WNGKR8tnCG7ev3ttz92//3D18qQB1Hmt6ZfYEUKdmdf7yPNH9lYdPBPF+Kz02rOLigcwxrBfvjDH6YnGEIIwMaO9jafVwsHVyBsuRRabasqpmiUjpopRsLVCCyMTXFkneXKn5tfUJudI92rkfVCcE4pffTZZ+66887hm7TROgkUiED82a/v+9l/fUcuNCyVvUCCrkorgSBetGy5mpO98ZX1sv9JfoVyUdu+vXpWw5yGBs45PZ5rSeB4BDT93gBkqdoud5/HNIZrFEqILoTG2Fmu/DFUB03vE4msDq2eITXKnUAIF1iTle2I2UMmHFQ4JBGphCLLpEVmTAjBGGvtaL/jjjsgvbLbVCB+7fvf+9YX/0MIQShlY7Wrj7dJKaWUm+Z3vvb1//jWnbJy56jiAoA77rijtb2dMSaGOSJTPwIUKzS5ENRkZXMxQqkgokppq2doyDDGcHImTQEhAkCH39cXDEaFz+XJuLNc+WMYmyy0LP+JzdD60U9+0t16SFGUFP1IUugrrlx974/uQhm0zFAuKAnHu3/6gx+eufCcyIeeCEIIRVG6Ww/96O6fZKQDw3GWK5/ETIFCSF8w2OH3AYx2E04M0tYKALBvoD8qn5gAmEIUWq0VdgdkqHSxjDK8/vZbj/32dwCQuqnIOXcWFf7iv/+bhQmaZDgiHIaOILnxRCnlnGfZbD/6wQ8AAEdjp+z2Y7/93etvvzU8QjEeyBFV2B2FVqs50vMozaN9A/0w0VpBfl5HPJ7YzhlCVNod8lR8Rr5BuQr+/Of3ChlvTI3jUml//ZvfnF1bl/yrFUKgEJTSKJczAZJc/VBKAfHyFRctvugiaYIkeVjam8Iw7vn5z2FMdcRiIdeILFWttDtiQxIQnqB0dWEaLiZpKPQFgz3BQJShIHO2arJzxj9OCelIWLf59Vf++leSukqg1OS8qLzs5htuhHBmW1zIvT5H/LBx3569e/e0tHh9vmJX/tmzZ5/9iU9k2+0RZ0CctxBicq4wtuYz176xfn0qioEArP/r39Ztfn3VkqXDXZDjRE12zvt9vVEToVLaEwz0BYMuS3o5j+lQAZEQ0h0I9OnBqG2MTEKZnpUNmVgdMKysHnroYQCgKTsSKCEc4LKrrp5SXBxx88VCTvO2Dz744Y/veu2V9abXK/9cmlrTZs36/g++f+Oaq5KU0pQsuWTx0m9mOYNDHjJa5QM5hAcfemjVkqUUyPiTz2THpmdlR3l7ZTyiTw92BwIuiyWtaqBp0FM2ecTniRq1zGUtsFijMrjHDBSCULqnqenlF14AgDTWVyEA4NrVVyR9RFBK17780uLzz3/lubXc54usEZQxSmnL7t03XX31fX94RGaqJRFFSWlpRW0tpMB+OYRXXnhxT1MToWRURZIicjWtwGI1Y1ZkRDji80CahEtHUxECAEe9XkJGmCTSUKiw263xUtHHAPmF/f5PT3CfjzGWoiUsFX5lXe2s2bMhweogefD+3j033fRvwcFBRVFgpOUohKCKAgjf+/Z/NbW2JAw6EAKIdk2bWVsHqShCRMYY9/l+/6cnYHwJJqH3A8gjoxV2e2ygkhA4KlVdOho6ba3QHQxQIDHTg6V2e0YGKe0sjx7c8MKLaTUoxTFjek1Jbh4XnCYI9QLAT+/5+VBXl6IopmnGts9NU1GUgc7Otc+/kKgDJJxjXVFYCKmtibKdDS+86NGDqVvBozZYardH7xUQKZDuYAAmSCvIt7n1oMcwoqQsSw0WWW3jHFuoNSEAYNv27Xt27KCQQkHGkaior4ME8yd9Tfuam5//+98AwExsf8ifv7nt7STSlB3LsqU6aoFIAfbs2LFt+/bIMMePIqsttggmJcRjGG49CGn55VJ8TkqnX9d9Jmcxng2N0mKbDTJ3GdKGN94AAJLy6iC7CAAVFZUQLucWdwhb3tluDAzKDWHy9obcboCEOlaONGDoqXePMBYZ2vgROshrs2kxliMjxGdyWbQ8dfWT3q5myDACfGQ0Mpxdk6dZICNndAgBgNdffQ3SdJLIh4vzE7o7pVDeeu89SI2yjpwc+bP4gqMUAI729EDK4pYPyaGN/5uJZAfGzRYLcFNWmk0d6VFhQNfNmGELxFzNMubkmaimKKUtHe0tTU0AabtOAcBmsQCAmQAA4O44BqNRVs7ThYsWQQI6yk1agPOPWlogdSogAsDBpqaD7e008c1YaUGhNFezxDZlIg7oKWssAEidClI6/XowNrlZIOZpWkYWBrmC7m5s7OnoGIOhAACKqgKAkgAAwP1+2e+EEmHMNM3iKVOuvnINJP18e7o6W/buhXSoQIH0dnTs+agRMmQuEIA8WacgJg26Xw9COuonvYQ2j2nGckcA5mgahL+VcY0MEQCONh8AAKooIrXUhPBPEQB+dc89Lzy31jD0OD1BYArbumULJHZfyvutAeCn99xTmdhPJUOmG996y+fuH9W/NLyLclBtzc2wdNn4D+1IgedomohRXhTAk470IHUqSLn6TDPKqQAAAsGhhG5yGu8JTlljd98+SD+aIufjw3fe/fCdd1N8OO7buRDfvvsnn7v2WrmtjS8NQgDg6SefhHCAKtVOAkYGOIYkmpjWgAA4FDWqUIB0LcgrDibE8QySCnGcCmBLGpJJHXL1+aj1IIzVRSGzCpI8ED/2SEgoL0ZRfvnA/V//wq1JYhBSVWzcuuXlF16EtJyhYeOnqbUV0g8XJUIc4SMSIL4J0goAgAABzmO7rxCSKSrIJby7s+u42NKESP94jMxo5ZyX1kx/+MEHL7vgwiQ8kBkrOuc/+PGPQdfTSsCPDKq7szMy2PHDxlhsliwBCHCelp5OR0chmiii2g7VOMpQqA0AEMDXP5Cp1pJDnpBBITjAmptuevetty+74EKZApnoJ5xzIOQH//3TLa+sZ+nyIAxf/0AGT/QplMapBE7AxPSKCKQxhQhgxqk6CxlMEwIAQDQDgcwJKiGkHcA5nzZ71tp1Lz7zhz+U5udLezAuFRDRNE1FUX7z+OM/+8EPyTi2AGYgkMGD3jR+jiThaW7A0rMVREyRPRlQzkhGRqRBWSdr4k7ChsxDzq052V+94447v/yVvKys5PVT5N8qivLQo4/edsstsirk2I6QAAAaRkbidhKMEBojLgKQ7pHyk3Pl6MnC8ONyyz55+a9+9rOz6mcKIWTyXKIDPPLOFkOI79z9k5997/sUAMlE1dw+iUiPCjLtYjgIgEjnDq5RQQCIqkLkEETmQCmlhJicV0ybdvfP77n+yjXy3FWSer5SGTBKW9uP/tutt25+4UWW2uHJJKNDAKKqmVtQgcsrN0eKCwFYmnonvSC1QuNMEKZT8D6F1xAltUqIaSFkHiJed9sXtm/bdsOVa1AIIQRLcHgeEUOqgtLfPPmnefPnj58HEShWa6a2DxCuEBs7ApamBZeOViBEITTKkSQdz2aGQq4AQADsuRnLkZRdlD6DgsqK//e/9332U5+SR2mTVFDgQhAARmljS8uXv/GNV//6V7nXSN2VlBz23JwMagUzfPRvBCEQFELTIlwaVCCy0npsVxD9GZKRLKZdWFwEkJlttzQOOOfnr1z5+4cfrqmslHvFJGmPAMAo1Q3j5//7Pz+/++6hnl5Gw/WdMtEhACgsLo4MdvxN+jmPjRHKHKcJS2gDsCsKQpwBZIoKcqGZUTUNMhLGlRe2cH7L17/28tq1kgeyokrsw3JFkNTZuHXLvCWLv/e1r3t6+xhlXPBMZZrIMdVVVcGYgm1xEUf4hCCgrDydOtLLYrIrCmK0NUIJeE0DMrE7khKfO3Pm+FuTTEJCfvXwQ7/95a+sqip9BvHfiyjthvaenhu/+MWLli3/YOtbIceDyJDCk70CEhng+OklReQ1jajNDwFABEmF1OmWHnGcihLbfQpEhsYzkMJECACU10wHAGGaMpt0jO0gIqUP/uGRW6+/IbllIISghCBjf3j66W/deWf34cNMHo/M1Ko3rFfC5ABQUVMTGez42iMAMKDrNObDEQDOidIKiAAQmyUhHc9uXc+IspOqe1Z9fUFpqRhHwIZRigA//sUvbr3+BtM0k1mInFNKe/r7V99w/eeuvbbnyBHGmEhahWPMIIQIwPzS0oYZ9ZCJyCQAIIBb12MdzzKfCCYuoS1H02IjHzJLIiObCEqIEKK6tKy6rk4KbwyNSFP/qps/992vfS2JIxnCMcbN27cvOH/R3x5/QlEUaVtM1JVthADAtLq6aWVlUhWNv01TiNh8IgBQCMlJ81hKelTIUlVrzLkoWfol3fzaRJDTsPTCC2BM5oJU7LVnnfXrX/wCIGQ6xn1SsuTZl1+6dNWqA7v3WCwWAKCEKOmApROSlf2QQ8tIwX0AcOtBn2lG1egTiFamZKlqeqJLdRiEAECuptmV6HtLZLGPTr8/IyOUWLF4MQAg5+kpBkJQCKD0J3f/pCg3j3OexI1IKb33/vuvumSVr6cHCAkGg/KSIDMdpGFPEIKcR4Y2fkhRd/r9etRhagCOaFeYPKyW+YS2SH6tU1X7dX34DMm6O10Bf0ZGKFfQ+fPmNcydu2fHjrSu5ZaupIuu+NTVqy5NkoMEYSukflr1U08+abHZxhBrRkRVUdp7er7x1a/6vd7Rj00SIhAb5s6dP28eZMhQAICugF8X3KGoUbeNOVU13Rz0tE9SF1qsh72eGNuedPh8kCFnAOfcqVlWXHbZnh070miQEME5MPafd9wJYf2f5GEAuGzlJWPupyx2dKCtTVFTkqEcyIrLLnVqliRHe9MSFAB0+HzR002IACy0WCHNFMP0UlcAoNzhiHItyIPcbT6fzHEa/wohB3nzZz/L7Hae8hohdw0LlixZMn9+pLJacvCYOhtpwDQBIBgICJ6CvUwI55zZ7Td/9jrIxAeD4TylNp9PjSmsjAjlDkdkylJEeqkrAFBpd0YNRNZ96QkG+tNMvE8oN0pRiIa6upWXXQbh6hmjdw8RAK677jqQQYQUxM1i6mykBQCgLKX4nxzCxZdd2lBXhwJJhlaHfl3vCQaUmLrOhECl3QlpfpbpRCal/9xqdWkWM8ZUMYQ4MDQImbAcZeAbAG699QsAkMpCLg/AKznZS887FzLi7EqllylDDuG2W2+FeOk/Y4AU8oGhQSNmIkwhXJql0GpNVw5pn6R2WSwFFmvUQW4Zn2wezFhOorxpY9WSpRdfccWoRW4gbIXN+8Qn6mpSKnhwIiGvrLnoik+tWrIUUqjrljqaBweii2IRYghRYLG6LGmfW0yvW9JMrXQ6o/5cmgtHfN4hwxhPHfrhkOkwd955B1XV5KmnEJ77s848UwEwTfPUoYK0gqmqfuvOOyFDOT7SlzNkGEd83ihDQUJOULrhrvSoIAU8Myc3ytMp6750BwJtPi9kyLsg72tbuuCcGz5/C4xa/UpWO6itHf97UwWO+FfCUTAGADd8/palC84RGarCJMXb5vN2BwJRNzDIOMDMnFxI30GXZs8IAYBSm91lsZgxjiZE/KCvd/xDjcIPvvvdwqqpMpSQ6Bn5BVSUlkK4jvcJAAAILpKQgVJqmmZh1dQffOe7GRfLB329GDMFJqLLYim12SOTlTrS1goIkKWqVc6s6LoviIyS5qFBb4wfdMyQXuSq0rJ7770XktcPRwSAqooKAJAZCRMOxgDAarPRxOpKdvjee++tKisbtd5niggXxjabhwYZHaGbpaFQ5cwaW7nvtDOe5ZHNmTm57/f2RK0RGmVdAf/+wYE5rvwMHKUFAGlzId645qqNt98+SrlvQu788leynU4+0qieOBBCArru98ZfEyPlvmWxt/H7lCSkYPcPDnQF/NF18hBZeHUYg/zTpoJ8QX1ObraqRl0CgACA8E5P9xxXfsYNt/t/ce+eXR8muwQAcdtbb2X2pWOGvMnoY4vOu/8X92a2ZSnYd3q6Y6+V5IjZqlovDYX05Z+2yoqsEXXZufrISCsiaowdGBo86vNlMHVd+gyybPbf/+Y3pdXVnPNEOnk8/qLxOJqiZcoY57y0uvr3v/lNls0uUvN3pYLI1SAHhga1kbW9ZNn3uuzcsa0OMLZrxGQPznS5lJgYDCPEY5rv9XbL5zIyfggbDXPqZz7y6B9t+S55L0/sY2P3Io8D0V1lVHBuy3c98ugf59TPzJSJEBE9ALzX2+0xzagTaYioEHKWywVj3cGNpZdSE5yRk1tisxsYfdGdhdL3enuGDINkyHiUkDf1XLzo/Mcef0LNcgouaKaOb2cOlDHBhZrlfOzxJy5edD5PHhJLE7J0+ZBhvNfbY4m90BFFic0uDYWxJcWMkbACUaF0jis/7kV3fcHgOz3dkLn0BQl5+8+alSuf+ssztrxckYn4Xia7x5jg3JaX+9RfnlmzcqXJeQYdixAW5js93X3BYNwLHefk5yvjKPE0xr7K6Z/jys9RNTPmBkyV0q1dnb7M7SojUBjjnF958cV/ff75oimVnHOmKBk8aTRGEMIUhXNeNKXyr88/f+XFF3POlYzSNJIttrWrMzYUaQqRo2pzXPkwDqf7WKkAIBCLbbbZeS49RjGolHYG/KGrcTOdJyhXiovOW7Rp06a55y7kpqmMVmllQkEpVSjlpjn33IWbNm266LxFmV0XJKQYt3Z1dgb8sRc66kLMznMVW23juYFh7BKUr1xYVBx1NzmEFcOW7s5BQ8+4YgCZrSREw/Sa119+5TO33WoiYtJ01glCpFKHifiZ2259/eVXGqbXZLDGfwRSJQwa+pbuztigg7x/K3Tt8zjeMg4qEIIAUxzOhpy8qBvKpWLoDgQ2dbTDBCgGCEcosh2OPz3w4OPPPFMwdcrxE3AngA8kdCkZ57xg6pTHn3nmTw88mO1wZCrKEAUpwE0d7d2BgBpzi1+Q84acvCkOJ44vJDvOfiMALCkt1Vi0tSIQrYxt7eo86vPSMVWlGL3rlAIhXIjrVq/e8c471912K5ExTJCEmBhGECJvkuGcE1W97rZbd7zzznWrV3MhYEw3F44uYkRKyFGfd2tXpzVGAQtEjdElpaXy2XHJc1xiAYIA1c6s2Xn5gZj0YkZIgPN1bUekBDMuIwjX/hFClBUUPv7Ag6+9sXn5Jz+JhMiLWeQVD5l5NSE0dJ0QcM6RkOWf/ORrb2x+/IEHywoK5aGGiVqeCAGAdW1HApxH+RIoIQHOZ+flVzuzMEFl69SRGRYvLSm1jbyRGAAEok1Rdrv73uruJJk7LRojKCKvnBZCLJm/YP3ates2bbz0mqtVh51zLjinYS+k1CJpNX381hAAmQapOuyXXnP1uk0b169du2T+AlluIcnpq3FCmoFvdXfudvclkvDSktLMSDIjZzMIwHOHWjYf64i6olqGTbNU9UszGwos1gwWIIovOCFI+FKQvfv3//bxx15+/oWmPXswHMGihFDGUK6+w+72huEdC18FKvjxwlZEUeoaGlZeftnnr7/hjNpaAEAhMHM57EkE2xMM3Ldvz5BhROUwyr3lkpLSK6dWZ0SwGaOCWw/+z97dg/F67DfNM12uW2rrJ05qw8GFoGFCeILBbe9s37B58+uvbTzY2Njb3QUi5fFSkl9YNK2+fukFy1csWTL/7HlOiwXkrdYpJ9+OH7/b3/hhX7RKkN9Ytqp+5YxZeVp614UlQmYMOnko4B+dx/7SetAeo8ckf9dMrV5cUjqGu5LHAJRf7cjQcOuxjt2NjUf279+xb19Ta2t3Z6evf8AMBGRBOKKqitVqz80pLC6uq6qaO3NmZW3trPr6qmHqV25SSIICXpmFFNQbxzqePdSSSKRXV01bVFySKZFm0rYXiA837ds30B97bbY8Fv2FuvrpWdknhg0SGD4THXc5x9AjACHjLE63krcwQQidtxkafLipUV4vE61ouTkzJ/cLdTMzKMkMVmFFSshllVNtTIk6VCnvLdE5/3PLgQF5AnwCqzKOgPQ0SB+AQJRHIoUIlTklAASIrNB2/LJxRCGEPBAp84kjLZyYPktJDuj6n1sO6Dz6Th55JNLGlMsqp2ZWkhmjAgEiECvsjhVl5YGYBGXpETvm9z/deoAjEsi8C3L0oRKiMKYoyihbCUIopfKY9AnTXhHIPSFHfLr1wDG/P9aTK68kWVFWXmF3CMRxbiBHyCeDw5CCW1ZSNis3zz8ywQnC9wp96HavPdQCkMlshn8pIALA2kMtH7rdcU0Ev2nOys1bVlIGmasdH2o8wwMBIISsnlqVo2lGTI6hQHQoypudx15tP0rSOSL9fwRyPXq1/eibncccMTyQR9ByNG311KrM5oJIZJgK0pVUZLWtnlIVd6YFoo2xF9sOb+nqTOvA/L88pKm4pavzxbbDtph1IfLM6ilVReOLQCZC5jfHcoLn5hcsLy3zJSh2oVH6bOvBbT1dk2yQkDzY1tP1bOtBLZ7HghLi43x5adnc/IIJ2oJNiJ9E2oyrKqbMyXP5YowG6Q9hlD7dcnBb9yQbwjzo7nq65SCjcWonSy/CnDzXqoopMGEnQieGCuEQ+2em1UxxOGMjVfLqAErIUy0H/tl1TLLh/yAdMMyDf3Yd+3PLARqvnL+MOU1xOD8zrUYmf0zQrmaivKfy3JxdUa6fXpunWfQEbFApfaa15dX2o/Jv/0+xQQ6WEvJq+9FnWlsUSuPyQOc8T7NcP73Wrig4ASbC8XdN3FDlHqHEZrupptaqKEZMeTpJcAulzx859GxrizSF/o8sFpHBPtva8vyRQ5YE64IhhFVRbqqpLbHZRIYOnCXChCSVRI2ZErJvoP8P+z/iiCzeIXB5CPDMvLxPV9dkqeqJ9EyfFMgBDhnGUy3NH7rdsftGkKfxhWCE/FvtjJk5uSdAJhNOhcjId7vdjx5oklnzIh4b/KZZbLN9ZlqNTMSACY5onxRExtXiGXryYHOn32+LxwNKiCkEJeTG6XWz8vJOzLdxIqgAYTbs6Xc/2txkIqoJ2KBzrlK6qmLK4pLSyK9OQPdODCLDeeNYx7q2w4YQWjz/gVwXFEJurKlryD1BPIATRoWIIBoH+h89sD9gmomkIBADnJ/lcq2eWu3SLAjy1oHTmxCICAQIkD49uPZQywd9fVbG4u6iKSG64Fam3Di9tv6ErAsRnDgqQJgNrZ6hPzY3uXXdGo8NBIAQ4jfNXE27pGLK/MIiEhLlaUkHBAB5hQvAtu6ul9oO9+u6TVHi3vEj9415mnZTTV2VM+sEK8UTSgUIs6Er4H+0ef9hr8ceb6WE8GJpCDErz7WqYkq53Y7hmgGnCyGGd/ioz7eu7fBud59KaaKzbNKPNMXhvLGmVrqWT/DieKKpAGE2eE3zqZbmnX29dhb/BouIerAytrCoeFlpWbaqQThmcyoTAsMZ6wAwaOibOtq3dnUGOE+kDORYfNyc48r/dHWN3FCceCPpJFABwp8LAq47cuS1jqOUEDXxtyIQ/ZwXWq3nFhUvLCwOX34ic09OLUoM75XPNLd2d27p6uwOBGwJLAMIG4kC8YLS8lWVlQRIpgrWpIuTQwUYtq3a2df73KGWgcQraERehhDFNts5hcXzCwqdqgojlfBJRFQ3PIaxvad7a3dnp9+vUpqI5RG1l6NpV06tnuPKP7lb6JNGBRimSHuCgWdbW3b3u62MsQRfDwkfHTeEcFksH88vmFdQWCJLkQFIDp1gToQYMCw+dMzv297T/V5vT18wKEmQhNwcMcD5rNy8NVXVBRbrSV/4TiYVJOS6KBBfP9axob3Nz00bS6gepNxNIYJCOBWlJjv7E/mFtdk5jvA9SRFOwMR8Xhh+y3AGeE1z/+DAu73dzYODHtO0UKokJkFIGXDTxpQVZRVLS0rl8E+6B+XkUwGGLRZHfb7nD7fuHehPolchLE2OqHMOBIqstpqs7LPy8iudDqeiDm9Wjm6cX1uidjymccTj/cDd2zw02BXwA4LGGCMEE8s0stKdkZN7+ZQquTOCU8OvekpQQUJ+GRzx7e6uDe1tvcFgEmtLQn5JuhCy1Huh1Vppd9Rk50zPynZZNI3GKXKAqQ2YJEgL0AXvC+oHhgabBweO+LzdgQAiMkplvknyrkr7N99iWVFWsaCwiJ0ayuD4kE8dKsCwj2/Q0F9rb3+7u9PPuXU0QpDwCX/pipD7kQKLtcJuL7Xbi6y2YpstT7Mo6R9lkW269WCn398V8Hf4fG0+X08wIG1+6SQgo9Er4kK1MbagsPiCsrJsVYuM9GSLfJgYTykqSER8i20+78aO9l3uXp0LC2M0qe6FCCcQEcBENIQAQI0yjVK7ouRqWp5mydE0h6LaGLMpTCGUEiKPJ3NEgWii8Jvcz7nXNAZ03a0H+3XdZ5q6ELrgAESlVCFk+IuSd0YgBjnXGJ2dl7+8tKzC7hjudTilcCpSAcLnluQG/ZDHs+nY0T39/UHOLZSyxBbZiIGFvzmZHyUQQ/8AylOTijwJQ4hM2RDyIBSivN+ZEqDh0zLyVFQouSblV3MhgkJYGGvIzV1WUj7VKe/qwIkqMDBunKJUkIh4bBCgzevd0nVsl7tvwNBVSlVC00qfl9KPUshy7JjaA6lA6i0DhSFEjqrNznMtLCqpdDjIqeoTGyGiU5kKEpGwHgB0BwI7+np29PYe8/tMRI1SRsiounpCEVksOKIuhEJIic0+Nz9/rqtAXtVyugRXTwMqSAy3swwhPhoY2NnX2zTYP2AYIQsuXPfkBNAisvogojRKKCE5qlqXnTvHlT8jJ0elNKrPpz5OGypIRNlcHtNoHBjY2+9u9Qz1BYNyvZCGPYQDxJCRS+5C/wq9WG5VAIAS4rJYqpxZZ+Tm1efkRLwaJ911OJYxnl5UiCBK1l7TPOrzNg70H/Z4eoKBPj2ICCRs+jFCYirUJx31yFkUiDxscspmXZqlwGKd4nTW5+SW2x3HfZ2n6u4gFZyuVJCIDUcJxAFd7woEjvg87T5vVyDgMQyfyQPcNBHlQQNCIHSEflhNBblnkdYdAiKCkPWLCbEyxa4wp6oWWa1ldkel3VlkteZoWmTKT5Go2DhxelMhAvmhx0akEKBfD/br+pBhDOh6vx70mKbPNH2mGeDcRMEFckAAYEAYJQqhVsbsimJXFKei5GqWHE3LUtVcTcvVLFEth04lnOYMiOBfhArDkWLoIcKe47KAUeY1U0GNUxP/glSIAg6b8dQjlpEIZOSH/3pzH4V/fSpMIkWctILpkzjVMEmFSYQwSYVJhDBJhUmEMEmFSYQwSYVJhDBJhUmEMEmFSYQwSYVJhDBJhUmEMEmFSYQwSYVJhPD/A41C2fBETKZdAAAAJXRFWHRkYXRlOmNyZWF0ZQAyMDIxLTA4LTE4VDE4OjQzOjA4KzAyOjAw46Q3zQAAACV0RVh0ZGF0ZTptb2RpZnkAMjAyMS0wOC0xOFQxODo0MzowNCswMjowMFVZ5QUAAAAASUVORK5CYII= diff --git a/oauth2-example/1.0.0/src/app.py b/oauth2-example/1.0.0/src/app.py deleted file mode 100644 index e36997e4..00000000 --- a/oauth2-example/1.0.0/src/app.py +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import asyncio -import time -import random -import json -import requests -import thehive4py - -from thehive4py.api import TheHiveApi -from thehive4py.query import * -import thehive4py.models - -from walkoff_app_sdk.app_base import AppBase - - -class Oauth2Example(AppBase): - __version__ = "1.0.0" - app_name = "oauth2-example" - - def __init__(self, redis, logger, console_logger=None): - """ - Each app should have this __init__ to set up Redis and logging. - :param redis: - :param logger: - :param console_logger: - """ - super().__init__(redis, logger, console_logger) - - def authenticate(self, access_token, refresh_token): - s = requests.Session() - s.headers = { - "Content-Type": "application/json", - "Authorization": "Bearer %s" % access_token - } - - return s - - # UserAuthenticationMethod.ReadWrite.All - def reset_password(self, access_token, refresh_token, userId, passwordId, newPassword=""): - graph_url = "https://graph.microsoft.com" - session = self.authenticate(access_token, refresh_token) - - url = "https://graph.microsoft.com/beta/users/%s/authentication/passwordMethods/%s/resetPassword" % (userId, passwordId) - response = session.post(url) - print(response.status_code) - return response.text - - # UserAuthenticationMethod.ReadWrite.All - def get_password_methods(self, access_token, refresh_token): - graph_url = "https://graph.microsoft.com" - session = self.authenticate(access_token, refresh_token) - - url = "https://graph.microsoft.com/beta/me/authentication/passwordMethods" - response = session.get(url) - print(response.status_code) - return response.text - -if __name__ == "__main__": - Oauth2Example.run() diff --git a/outlook-exchange/1.0.0/requirements.txt b/outlook-exchange/1.0.0/requirements.txt index 46de1688..c7a756d5 100644 --- a/outlook-exchange/1.0.0/requirements.txt +++ b/outlook-exchange/1.0.0/requirements.txt @@ -1,5 +1,5 @@ -cryptography==3.3.2 +cryptography==44.0.1 exchangelib==3.3.2 eml_parser==1.14.4 glom==20.11.0 -requests==2.25.1 +requests==2.32.4 diff --git a/outlook-exchange/1.0.0/src/app.py b/outlook-exchange/1.0.0/src/app.py index 43673a10..38d8b2cd 100644 --- a/outlook-exchange/1.0.0/src/app.py +++ b/outlook-exchange/1.0.0/src/app.py @@ -198,14 +198,13 @@ def send_email( account=account, subject=subject, body=body, - to_recipients=[ - Mailbox(email_address=address) for address in recipient.split(", ") - ], - cc_recipients=[ - Mailbox(email_address=address) for address in ccrecipient.split(", ") - ], + to_recipients=[] ) + for address in recipient.split(", "): + address = address.strip() + m.to_recipients.append(Mailbox(email_address=address)) + file_uids = str(attachments).split() if len(file_uids) > 0: for file_uid in file_uids: diff --git a/passivetotal/1.0.0/requirements.txt b/passivetotal/1.0.0/requirements.txt deleted file mode 100644 index fd7d3e06..00000000 --- a/passivetotal/1.0.0/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests==2.25.1 \ No newline at end of file diff --git a/email/1.2.0/Dockerfile b/postgress/1.0.0/Dockerfile similarity index 100% rename from email/1.2.0/Dockerfile rename to postgress/1.0.0/Dockerfile index bcc1273d..41f976bb 100644 --- a/email/1.2.0/Dockerfile +++ b/postgress/1.0.0/Dockerfile @@ -21,7 +21,7 @@ COPY src /app # Install any binary dependencies needed in our final image # RUN apk --no-cache add --update my_binary_dependency + # Finally, lets run our app! WORKDIR /app CMD python app.py --log-level DEBUG - diff --git a/postgress/1.0.0/README.md b/postgress/1.0.0/README.md new file mode 100644 index 00000000..96f475a4 --- /dev/null +++ b/postgress/1.0.0/README.md @@ -0,0 +1,3 @@ +# PostgreSQL Shuffle App +This app connects to PostgreSQL and executes queries. + diff --git a/postgress/1.0.0/api.yaml b/postgress/1.0.0/api.yaml new file mode 100644 index 00000000..26467487 --- /dev/null +++ b/postgress/1.0.0/api.yaml @@ -0,0 +1,55 @@ +app_version: 1.0.0 +name: postgress +description: postgress integration. Compatible with SQL databases. +contact_info: + name: "@d4rkw0lv3s" + url: https://github.com/D4rkw0lv3s + email: d4rkw0lv3s@outlook.pt +tags: + - postgress +categories: + - Intel + - Network +actions: + - name: run_query + description: Create a new database + parameters: + - name: host + description: mysql server ip or fqdn + example: "myserver.com or 127.0.0.1" + required: true + schema: + type: string + - name: port + description: mysql database + example: "my_database" + required: false + schema: + type: string + - name: dbname + description: mysql database + example: "my_database" + required: false + schema: + type: string + - name: user + description: mysql database + example: "my_database" + required: false + schema: + type: string + - name: password + description: mysql database + example: "my_database" + required: false + schema: + type: string + - name: query + description: mysql database + example: "my_database" + required: false + schema: + type: string + return: + schema: + type: string diff --git a/postgress/1.0.0/requirements.txt b/postgress/1.0.0/requirements.txt new file mode 100644 index 00000000..78f864b2 --- /dev/null +++ b/postgress/1.0.0/requirements.txt @@ -0,0 +1,2 @@ +psycopg2-binary +shuffle-sdk diff --git a/postgress/1.0.0/src/app.py b/postgress/1.0.0/src/app.py new file mode 100644 index 00000000..3c973e49 --- /dev/null +++ b/postgress/1.0.0/src/app.py @@ -0,0 +1,35 @@ +import psycopg2 +from psycopg2.extras import RealDictCursor +#from walkoff_app_sdk.app_base import AppBase +from shuffle_sdk import AppBase + + +class PostgreSQL(AppBase): + __version__ = "1.0.0" + app_name = "PostgreSQL" + + def __init__(self, redis, logger, console_logger=None): + super().__init__(redis, logger, console_logger) + + def connect(self, host, port, dbname, user, password): + conn = psycopg2.connect( + host=host, + port=port, + dbname=dbname, + user=user, + password=password, + cursor_factory=RealDictCursor + ) + return conn + + def run_query(self, host, port, dbname, user, password, query): + with self.connect(host, port, dbname, user, password) as conn: + with conn.cursor() as cur: + cur.execute(query) + try: + return {"result": cur.fetchall()} + except psycopg2.ProgrammingError: + return {"message": "Query executed successfully, no data returned."} + +if __name__ == "__main__": + PostgreSQL.run() diff --git a/recordedfuture/1.0.0/requirements.txt b/recordedfuture/1.0.0/requirements.txt deleted file mode 100644 index fd7d3e06..00000000 --- a/recordedfuture/1.0.0/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests==2.25.1 \ No newline at end of file diff --git a/http/1.1.0/Dockerfile b/redis/1.0.0/Dockerfile similarity index 100% rename from http/1.1.0/Dockerfile rename to redis/1.0.0/Dockerfile diff --git a/redis/1.0.0/api.yaml b/redis/1.0.0/api.yaml new file mode 100644 index 00000000..0ee97122 --- /dev/null +++ b/redis/1.0.0/api.yaml @@ -0,0 +1,110 @@ +app_version: 1.0.0 +name: Redis +description: Redis integration. +tags: + - redis +categories: + - Other +# contact_info: +# name: "@frikkylikeme" +# url: https://shuffler.io +# email: frikky@shuffler.io +authentication: + required: true + parameters: + - name: server + description: Redis server ip + example: "127.0.0.1" + required: true + schema: + type: string + - name: port + description: Redis port + example: "6379" + required: true + schema: + type: string + - name: password + description: redis password + example: "*****" + required: false + schema: + type: string + - name: database + description: redis database + example: "0" + required: false + options: + - 0 + - 1 + - 2 + - 3 + - 4 + - 5 + - 6 + - 7 + - 8 + - 9 + - 10 + - 11 + - 12 + - 13 + - 14 + - 15 + schema: + type: string + +actions: + - name: set_value + description: Set a key value pair + parameters: + - name: key + description: Key name + required: true + multiline: false + example: "my key" + schema: + type: string + - name: value + description: Value + required: true + multiline: true + example: 'my value' + schema: + type: string + - name: nx + description: Set value only if not exists + required: true + options: + - "false" + - "true" + multiline: false + example: "true" + schema: + type: bool + - name: ex + description: Expiration time in seconds + required: false + multiline: false + example: '60' + schema: + type: string + returns: + schema: + type: string + + - name: get_value + description: Get value of a key + parameters: + - name: key + description: Key name + required: true + multiline: false + example: "my key" + schema: + type: string + returns: + schema: + type: string +large_image: data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0idXRmLTgiPz48IS0tIFVwbG9hZGVkIHRvOiBTVkcgUmVwbywgd3d3LnN2Z3JlcG8uY29tLCBHZW5lcmF0b3I6IFNWRyBSZXBvIE1peGVyIFRvb2xzIC0tPgo8c3ZnIHdpZHRoPSI4MDBweCIgaGVpZ2h0PSI4MDBweCIgdmlld0JveD0iMCAtMTggMjU2IDI1NiIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiBwcmVzZXJ2ZUFzcGVjdFJhdGlvPSJ4TWluWU1pbiBtZWV0Ij48cGF0aCBkPSJNMjQ1Ljk3IDE2OC45NDNjLTEzLjY2MiA3LjEyMS04NC40MzQgMzYuMjItOTkuNTAxIDQ0LjA3NS0xNS4wNjcgNy44NTYtMjMuNDM3IDcuNzgtMzUuMzQgMi4wOS0xMS45MDItNS42OS04Ny4yMTYtMzYuMTEyLTEwMC43ODMtNDIuNTk3QzMuNTY2IDE2OS4yNzEgMCAxNjYuNTM1IDAgMTYzLjk1MXYtMjUuODc2czk4LjA1LTIxLjM0NSAxMTMuODc5LTI3LjAyNGMxNS44MjgtNS42NzkgMjEuMzItNS44ODQgMzQuNzktLjk1IDEzLjQ3MiA0LjkzNiA5NC4wMTggMTkuNDY4IDEwNy4zMzEgMjQuMzQ0bC0uMDA2IDI1LjUxYy4wMDIgMi41NTgtMy4wNyA1LjM2NC0xMC4wMjQgOC45ODgiIGZpbGw9IiM5MTI2MjYiLz48cGF0aCBkPSJNMjQ1Ljk2NSAxNDMuMjJjLTEzLjY2MSA3LjExOC04NC40MzEgMzYuMjE4LTk5LjQ5OCA0NC4wNzItMTUuMDY2IDcuODU3LTIzLjQzNiA3Ljc4LTM1LjMzOCAyLjA5LTExLjkwMy01LjY4Ni04Ny4yMTQtMzYuMTEzLTEwMC43OC00Mi41OTQtMTMuNTY2LTYuNDg1LTEzLjg1LTEwLjk0OC0uNTI0LTE2LjE2NiAxMy4zMjYtNS4yMiA4OC4yMjQtMzQuNjA1IDEwNC4wNTUtNDAuMjg0IDE1LjgyOC01LjY3NyAyMS4zMTktNS44ODQgMzQuNzg5LS45NDggMTMuNDcxIDQuOTM0IDgzLjgxOSAzMi45MzUgOTcuMTMgMzcuODEgMTMuMzE2IDQuODgxIDEzLjgyNyA4LjkuMTY2IDE2LjAyIiBmaWxsPSIjQzYzMDJCIi8+PHBhdGggZD0iTTI0NS45NyAxMjcuMDc0Yy0xMy42NjIgNy4xMjItODQuNDM0IDM2LjIyLTk5LjUwMSA0NC4wNzgtMTUuMDY3IDcuODUzLTIzLjQzNyA3Ljc3Ny0zNS4zNCAyLjA4Ny0xMS45MDMtNS42ODctODcuMjE2LTM2LjExMi0xMDAuNzgzLTQyLjU5N0MzLjU2NiAxMjcuNDAyIDAgMTI0LjY3IDAgMTIyLjA4NVY5Ni4yMDZzOTguMDUtMjEuMzQ0IDExMy44NzktMjcuMDIzYzE1LjgyOC01LjY3OSAyMS4zMi01Ljg4NSAzNC43OS0uOTVDMTYyLjE0MiA3My4xNjggMjQyLjY4OCA4Ny42OTcgMjU2IDkyLjU3NGwtLjAwNiAyNS41MTNjLjAwMiAyLjU1Ny0zLjA3IDUuMzYzLTEwLjAyNCA4Ljk4NyIgZmlsbD0iIzkxMjYyNiIvPjxwYXRoIGQ9Ik0yNDUuOTY1IDEwMS4zNTFjLTEzLjY2MSA3LjEyLTg0LjQzMSAzNi4yMTgtOTkuNDk4IDQ0LjA3NS0xNS4wNjYgNy44NTQtMjMuNDM2IDcuNzc3LTM1LjMzOCAyLjA4Ny0xMS45MDMtNS42ODYtODcuMjE0LTM2LjExMi0xMDAuNzgtNDIuNTk0LTEzLjU2Ni02LjQ4My0xMy44NS0xMC45NDctLjUyNC0xNi4xNjdDMjMuMTUxIDgzLjUzNSA5OC4wNSA1NC4xNDggMTEzLjg4IDQ4LjQ3YzE1LjgyOC01LjY3OCAyMS4zMTktNS44ODQgMzQuNzg5LS45NDkgMTMuNDcxIDQuOTM0IDgzLjgxOSAzMi45MzMgOTcuMTMgMzcuODEgMTMuMzE2IDQuODggMTMuODI3IDguOS4xNjYgMTYuMDIiIGZpbGw9IiNDNjMwMkIiLz48cGF0aCBkPSJNMjQ1Ljk3IDgzLjY1M2MtMTMuNjYyIDcuMTItODQuNDM0IDM2LjIyLTk5LjUwMSA0NC4wNzgtMTUuMDY3IDcuODU0LTIzLjQzNyA3Ljc3Ny0zNS4zNCAyLjA4Ny0xMS45MDMtNS42ODctODcuMjE2LTM2LjExMy0xMDAuNzgzLTQyLjU5NUMzLjU2NiA4My45OCAwIDgxLjI0NyAwIDc4LjY2NXYtMjUuODhzOTguMDUtMjEuMzQzIDExMy44NzktMjcuMDIxYzE1LjgyOC01LjY4IDIxLjMyLTUuODg0IDM0Ljc5LS45NUMxNjIuMTQyIDI5Ljc0OSAyNDIuNjg4IDQ0LjI3OCAyNTYgNDkuMTU1bC0uMDA2IDI1LjUxMmMuMDAyIDIuNTU1LTMuMDcgNS4zNjEtMTAuMDI0IDguOTg2IiBmaWxsPSIjOTEyNjI2Ii8+PHBhdGggZD0iTTI0NS45NjUgNTcuOTNjLTEzLjY2MSA3LjEyLTg0LjQzMSAzNi4yMi05OS40OTggNDQuMDc0LTE1LjA2NiA3Ljg1NC0yMy40MzYgNy43NzctMzUuMzM4IDIuMDlDOTkuMjI3IDk4LjQwNCAyMy45MTUgNjcuOTggMTAuMzUgNjEuNDk3LTMuMjE3IDU1LjAxNS0zLjUgNTAuNTUgOS44MjUgNDUuMzMxIDIzLjE1MSA0MC4xMTMgOTguMDUgMTAuNzMgMTEzLjg4IDUuMDVjMTUuODI4LTUuNjc5IDIxLjMxOS01Ljg4MyAzNC43ODktLjk0OCAxMy40NzEgNC45MzUgODMuODE5IDMyLjkzNCA5Ny4xMyAzNy44MTEgMTMuMzE2IDQuODc2IDEzLjgyNyA4Ljg5Ny4xNjYgMTYuMDE3IiBmaWxsPSIjQzYzMDJCIi8+PHBhdGggZD0iTTE1OS4yODMgMzIuNzU3bC0yMi4wMSAyLjI4NS00LjkyNyAxMS44NTYtNy45NTgtMTMuMjMtMjUuNDE1LTIuMjg0IDE4Ljk2NC02LjgzOS01LjY5LTEwLjQ5OCAxNy43NTUgNi45NDQgMTYuNzM4LTUuNDgtNC41MjQgMTAuODU1IDE3LjA2NyA2LjM5MU0xMzEuMDMyIDkwLjI3NUw4OS45NTUgNzMuMjM4bDU4Ljg2LTkuMDM1LTE3Ljc4MyAyNi4wNzJNNzQuMDgyIDM5LjM0N2MxNy4zNzUgMCAzMS40NiA1LjQ2IDMxLjQ2IDEyLjE5NCAwIDYuNzM2LTE0LjA4NSAxMi4xOTUtMzEuNDYgMTIuMTk1cy0zMS40Ni01LjQ2LTMxLjQ2LTEyLjE5NWMwLTYuNzM0IDE0LjA4NS0xMi4xOTQgMzEuNDYtMTIuMTk0IiBmaWxsPSIjRkZGIi8+PHBhdGggZD0iTTE4NS4yOTUgMzUuOTk4bDM0LjgzNiAxMy43NjYtMzQuODA2IDEzLjc1My0uMDMtMjcuNTIiIGZpbGw9IiM2MjFCMUMiLz48cGF0aCBkPSJNMTQ2Ljc1NSA1MS4yNDNsMzguNTQtMTUuMjQ1LjAzIDI3LjUxOS0zLjc3OSAxLjQ3OC0zNC43OTEtMTMuNzUyIiBmaWxsPSIjOUEyOTI4Ii8+PC9zdmc+ +# yamllint disable-line rule:line-length diff --git a/redis/1.0.0/app.zip b/redis/1.0.0/app.zip new file mode 100644 index 00000000..670f8456 Binary files /dev/null and b/redis/1.0.0/app.zip differ diff --git a/redis/1.0.0/requirements.txt b/redis/1.0.0/requirements.txt new file mode 100644 index 00000000..c6889d58 --- /dev/null +++ b/redis/1.0.0/requirements.txt @@ -0,0 +1 @@ +redis==5.2.1 \ No newline at end of file diff --git a/redis/1.0.0/src/app.py b/redis/1.0.0/src/app.py new file mode 100644 index 00000000..9a60449a --- /dev/null +++ b/redis/1.0.0/src/app.py @@ -0,0 +1,58 @@ +import json +import ast +import redis + +from walkoff_app_sdk.app_base import AppBase + +class REDIS(AppBase): + __version__ = "1.0.0" + app_name = "Redis" + + def __init__(self, redis, logger, console_logger=None): + print("INIT") + """ + Each app should have this __init__ to set up Redis and logging. + :param redis: + :param logger: + :param console_logger: + """ + super().__init__(redis, logger, console_logger) + + def set_value(self, server, port, key, value, nx, ex = None, password = None, database = 0): + """ + Sets a key-value pair in Redis. + """ + + if password == None: + redis_client = redis.Redis(decode_responses=True, host=server, port=port, db=database) + else: + redis_client = redis.Redis(decode_responses=True, password=password, port=port, host=server, db=database) + + + result = redis_client.set(name=key, value=value, nx=nx, ex=ex) # nx=True ensures "set only if the key does not exist" + print(result) + if result: # If result is True, the key was successfully set + print(f"Success: Key {key} set with value '{value}'") + return {"success": True} + else: + print(f"Failed: Key {key} already exists.") + return {"success": False} + + def get_value(self, server, port, key, password = None, database = 0): + """ + Gets a value for a key in Redis. + """ + if password == None: + redis_client = redis.Redis(decode_responses=True, host=server, port=port, db=database) + else: + redis_client = redis.Redis(decode_responses=True, password=password, port=port, host=server, db=database) + + result = redis_client.get(name=key) + if result: + return {"success": True, "value": result} + else: + return {"success": False, "error": f"Key {key} does not exist", "value": None} + + +if __name__ == "__main__": + REDIS.run() diff --git a/rss/1.0.0/requirements.txt b/rss/1.0.0/requirements.txt index 2da95e3d..1c09069e 100644 --- a/rss/1.0.0/requirements.txt +++ b/rss/1.0.0/requirements.txt @@ -1,2 +1,2 @@ feedparser==6.0.8 -requests==2.25.1 +requests==2.32.4 diff --git a/secureworks/1.0.0/requirements.txt b/secureworks/1.0.0/requirements.txt index fd7d3e06..480d0c4b 100644 --- a/secureworks/1.0.0/requirements.txt +++ b/secureworks/1.0.0/requirements.txt @@ -1 +1 @@ -requests==2.25.1 \ No newline at end of file +requests==2.32.4 \ No newline at end of file diff --git a/servicenow/1.0.0/requirements.txt b/servicenow/1.0.0/requirements.txt deleted file mode 100644 index fd7d3e06..00000000 --- a/servicenow/1.0.0/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests==2.25.1 \ No newline at end of file diff --git a/shuffle-ai/1.1.0/Dockerfile b/shuffle-ai/1.1.0/Dockerfile new file mode 100644 index 00000000..9b059f27 --- /dev/null +++ b/shuffle-ai/1.1.0/Dockerfile @@ -0,0 +1,54 @@ +FROM python:3.10-alpine + +# Install all alpine build tools needed for our pip installs +#RUN apk --no-cache add --update alpine-sdk libffi libffi-dev musl-dev openssl-dev git poppler-utils + +# Install all of our pip packages in a single directory that we can copy to our base image later +RUN mkdir /install +WORKDIR /install + +# Switch back to our base image and copy in all of our built packages and source code +#COPY --from=builder /install /usr/local +COPY src /app +COPY requirements.txt /requirements.txt +RUN python3 -m pip install -r /requirements.txt + +# Install any binary dependencies needed in our final image +# RUN apk --no-cache add --update my_binary_dependency +RUN apk --no-cache add jq git curl + +ENV SHELL=/bin/bash + +### Install Tesseract +ENV CC /usr/bin/clang +ENV CXX /usr/bin/clang++ +ENV LANG=C.UTF-8 +ENV TESSDATA_PREFIX=/usr/local/share/tessdata + +# Dev tools +WORKDIR /tmp +RUN apk update +RUN apk upgrade +RUN apk add file openssl openssl-dev bash tini leptonica-dev openjpeg-dev tiff-dev libpng-dev zlib-dev libgcc mupdf-dev jbig2dec-dev +RUN apk add freetype-dev openblas-dev ffmpeg-dev linux-headers aspell-dev aspell-en # enchant-dev jasper-dev +RUN apk add --virtual .dev-deps git clang clang-dev g++ make automake autoconf libtool pkgconfig cmake ninja +RUN apk add --virtual .dev-testing-deps -X http://dl-3.alpinelinux.org/alpine/edge/testing autoconf-archive +RUN ln -s /usr/include/locale.h /usr/include/xlocale.h + +RUN apk add tesseract-ocr +RUN apk add poppler-utils + +# Install from main +RUN mkdir /usr/local/share/tessdata +RUN mkdir src +RUN cd src +RUN wget https://github.com/tesseract-ocr/tessdata_fast/raw/main/eng.traineddata -P /usr/local/share/tessdata +RUN git clone --depth 1 https://github.com/tesseract-ocr/tesseract.git + +#RUN curl -fsSL https://ollama.com/install.sh | sh +#RUN ollama pull llama3.2 +#RUN cd tesseract && ./autogen.sh && ./configure --build=x86_64-alpine-linux-musl --host=x86_64-alpine-linux-musl && make && make install && cd /tmp/src + +# Finally, lets run our app! +WORKDIR /app +CMD ["python", "app.py", "--log-level", "DEBUG"] diff --git a/shuffle-ai/1.1.0/Dockerfile_GPU b/shuffle-ai/1.1.0/Dockerfile_GPU new file mode 100644 index 00000000..ed2798c2 --- /dev/null +++ b/shuffle-ai/1.1.0/Dockerfile_GPU @@ -0,0 +1,108 @@ +FROM python:3.10.18-slim + +# Switch back to our base image and copy in all of our built packages and source code +COPY requirements.txt /requirements.txt +# Check if requirements.txt contains llama-cpp-python or not +RUN grep -q "^llama-cpp-python" /requirements.txt \ + || (echo "❌ requirements.txt missing llama-cpp-python" && exit 1) + +### Install Tesseract +ENV SHELL=/bin/bash +ENV CC /usr/bin/clang +ENV CXX /usr/bin/clang++ +ENV LANG=C.UTF-8 +ENV TESSDATA_PREFIX=/usr/local/share/tessdata + +# Install all build tools needed for our pip installs +RUN apt update +RUN apt install -y clang g++ make automake autoconf libtool cmake + +## Install the same packages with apt as with apk, but ensure they exist in apt +RUN apt install -y jq git curl +RUN apt install -y file openssl bash tini libpng-dev aspell-en +RUN apt install -y git clang g++ make automake autoconf libtool cmake +RUN apt install -y autoconf-archive wget + +# Install cuda toolkit +#RUN wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/x86_64/cuda-archive-keyring.gpg +#RUN dpkg -i cuda-archive-keyring.gpg +#RUN rm cuda-archive-keyring.gpg +#RUN apt update +#RUN apt install -y cuda +#RUN echo 'export PATH=/usr/local/cuda/bin:$PATH' >> ~/.bashrc +#RUN echo 'export LD_LIBRARY_PATH=/usr/local/cuda/lib64:$LD_LIBRARY_PATH' >> ~/.bashrc +#RUN source ~/.bashrc + +# Larger model +RUN mkdir -p /models + +# Fails. 6 bit, 8B model. +#RUN wget https://huggingface.co/RichardErkhov/meta-llama_-_Meta-Llama-3-8B-gguf/blob/main/Meta-Llama-3-8B.Q6_K.gguf?download=true -O /models/Meta-Llama-3-8B.Q6_K.gguf +#ENV MODEL_PATH="/models/Meta-Llama-3-8B.Q6_K.gguf" + +# Simple small Llama wrapper +RUN wget https://huggingface.co/unsloth/DeepSeek-R1-Distill-Llama-8B-GGUF/resolve/main/DeepSeek-R1-Distill-Llama-8B-Q2_K.gguf?download=true -O /models/DeepSeek-R1-Distill-Llama.gguf +# Larger one +#RUN wget https://huggingface.co/unsloth/DeepSeek-R1-Distill-Llama-8B-GGUF/resolve/main/DeepSeek-R1-Distill-Llama-8B-Q8_0.gguf?download=true -O /models/DeepSeek-R1-Distill-Llama.gguf +ENV MODEL_PATH="/models/DeepSeek-R1-Distill-Llama.gguf" + +# Failing? Bad magic bytes. +#RUN wget https://huggingface.co/QuantFactory/Llama-3.2-3B-GGUF/resolve/main/Llama-3.2-3B.Q2_K.gguf?download=true -O /models/Llama-3.2-3B.Q2_K.gguf + + +# Install all of our pip packages in a single directory that we can copy to our base image later +RUN mkdir /install +WORKDIR /install + +RUN python3 -m pip install -r /requirements.txt +RUN CMAKE_ARGS="-DLLAMA_CUBLAS=on" python3 -m pip install llama-cpp-python --upgrade --force-reinstall --no-cache-dir + + +# Install any binary dependencies needed in our final image + + +# Dev tools +WORKDIR /tmp +#RUN apk update +#RUN apk upgrade + + +RUN ln -s /usr/include/locale.h /usr/include/xlocale.h + +#RUN apk add tesseract-ocr +RUN apt install -y tesseract-ocr +#RUN apk add poppler-utils +RUN apt install -y poppler-utils +RUN apt clean && rm -rf /var/lib/apt/lists/* + +# Install from main +RUN mkdir /usr/local/share/tessdata +RUN wget https://github.com/tesseract-ocr/tessdata_fast/raw/main/eng.traineddata -P /usr/local/share/tessdata + +RUN mkdir src +RUN cd src + +RUN git clone --depth 1 https://github.com/tesseract-ocr/tesseract.git + +#RUN curl -fsSL https://ollama.com/install.sh | sh +# Install to /usr/local +#RUN wget https://ollama.com/install.sh -O /usr/local/bin/ollama-install +#RUN chmod +x /usr/local/bin/ollama-install +#RUN sh /usr/local/bin/ollama-install +# +#RUN ls -alh /usr/bin +#RUN ollama serve & sleep 2 && ollama pull nezahatkorkmaz/deepseek-v3 +#CMD ["sh", "-c", "ollama serve & sleep 2 && python app.py --log-level DEBUG"] + +#RUN wget https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-GGUF/resolve/main/mistral-7b-instruct-v0.1.Q4_K_M.gguf +RUN python3 -m pip install ctransformers --no-binary ctransformers + +# Finally, lets run our app! +ENV GIN_MODE=release +ENV SHUFFLE_APP_SDK_TIMEOUT=300 +#ENV LD_LIBRARY_PATH=/usr/local/lib/python3.10/site-packages/ctransformers/lib/basic/libctransformers.so +#RUN chmod 755 /usr/local/lib/python3.10/site-packages/ctransformers/lib/basic/libctransformers.so + +COPY src /app +WORKDIR /app +CMD ["python", "app.py", "--log-level", "DEBUG"] diff --git a/shuffle-ai/1.1.0/api.yaml b/shuffle-ai/1.1.0/api.yaml new file mode 100644 index 00000000..ab3f6243 --- /dev/null +++ b/shuffle-ai/1.1.0/api.yaml @@ -0,0 +1,167 @@ +--- +app_version: 1.1.0 +name: Shuffle AI +description: An EXPERIMENTAL AI tool app for Shuffle +tags: + - AI + - Shuffle + - LLM +categories: + - AI + - LLM + - Shuffle +contact_info: + name: "@frikkylikeme" + url: https://shuffler.io + email: support@shuffler.io +actions: + #- name: run_llm + # description: "Runs a local LLM, with a GPU or CPU (slow). Default model is set up in Dockerfile" + # parameters: + # - name: input + # description: "The input question to the model" + # required: true + # multiline: true + # example: "" + # schema: + # type: string + # - name: system_message + # description: "The system message use, if any" + # required: false + # multiline: false + # example: "" + # schema: + # type: string + + - name: shuffle_cloud_inference + description: Input ANY kind of data in the format you want, and the format you want it in. Default is a business-y email. Uses ShuffleGPT, which is based on OpenAI and our own model. + parameters: + - name: apikey + description: Your https://shuffler.io apikey + required: true + multiline: false + example: "" + schema: + type: string + - name: text + description: The text you want to be converted (ANY format) + required: true + multiline: true + example: "Bad IPs are 1.2.3.4 and there's no good way to format this. JSON works too!" + schema: + type: string + - name: formatting + description: The format to use. + required: false + multiline: true + example: "Make it work as a ticket we can put in service now that is human readable for security analysts" + schema: + type: string + returns: + schema: + type: string + - name: generate_report + description: Input ANY kind of data in the format you want, and it will make an HTML report for you. This can be downloaded from the File location. + parameters: + - name: apikey + description: Your https://shuffler.io apikey + required: true + multiline: false + example: "" + schema: + type: string + - name: input_data + description: The text you want to be converted (ANY format) + required: true + multiline: true + example: "Bad IPs are 1.2.3.4 and there's no good way to format this. JSON works too!" + schema: + type: string + - name: report_title + description: The report title to be used in the report + required: true + multiline: true + example: "Statistics for October" + schema: + type: string + - name: report_name + description: The name of the HTML file + required: false + multiline: true + example: "statistics.html" + schema: + type: string + returns: + schema: + type: string + - name: extract_text_from_pdf + description: Returns text from a pdf + parameters: + - name: file_id + description: The file to find text in + required: true + multiline: false + example: "file_" + schema: + type: string + returns: + schema: + type: string + - name: extract_text_from_image + description: Returns text from an image + parameters: + - name: file_id + description: The file to find text in + required: true + multiline: false + example: "file_" + schema: + type: string + returns: + schema: + type: string + - name: run_schemaless + description: Runs an automatically translated action + parameters: + - name: category + description: The category the action is in + required: true + multiline: false + schema: + type: string + - name: action + description: The action label to run + required: true + multiline: false + schema: + type: string + - name: app_name + description: The app to run the action in + required: false + multiline: false + schema: + type: string + - name: fields + description: The additional fields to add + required: false + multiline: false + schema: + type: string + returns: + schema: + type: string + - name: transcribe_audio + description: Returns text from audio + parameters: + - name: file_id + description: The file containing the audio + required: true + multiline: false + example: "file_" + schema: + type: string + returns: + schema: + type: string + +large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAK4AAACuCAYAAACvDDbuAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAAAAB3RJTUUH5AgXDjM6hEZGWwAAD+lJREFUeNrtXb/vJTcRH7/v3iVBCqRBiCAQAtHwq4AWRElHwX8AoqbmXwDRpiH/QyQkGoogUSAhKIKUAE1IdSRSREhQQk7c3XtD8X55vePxjNfe3bk3H+nu+96uPf54POtnj8fe8OQX30JwOIxhtzYBh6MGOsPF0z9p2iWwpd8LjX6W5vWUYaiqlBuvLT5b5TQDPlRwmMSAABBg+kCer+XuAeQf4tL9tAxJ/hIfZGSm8rhyEfjytfxr9FeSX+KjvVfipNVpWlaPNhsAEPCS7Ao8FYnRlbO4ksLnjiSQvIanv4FNjwJ5pXIlMq6MQpIqqPnQKQKbjuPDtZlG55o6UHXWtVncZZTbbNBVB1P5dJYguCbJJ1WjOG8PVOioSm5HPrVt1rwuyN+K+PSZnNV1M/MmEFubfFjjU9tmK9XBJ2cOk3DDdZiEG67DJOrGuA7HyvAe12ESAxa73KPrN1z8gUikCCdvcD5NXnpQpA8nNhh9m5Yn4ZMrV8dHV/8a/dRA0x419a3lI9GBtM2GcrGYFXRNUU5TyluTOpdXwqeUt6YOpby9DUTLZylOcRlzdBTf2yV3ZBFOmKSHQh5KpjSSSpqG4s6VkUubqw8W8knTSnWk0Y+2jF5tlmuDUloJn6T8gRVcEpJ+3srChHSNt8RJsq4p+S41LC13KTcu/RJt1pLPKY1Pzhwm4YbrMAk3XIdJTMe4aeCjJhBVk0YiQ1MWZHhLgmO5QNVWfKRlavlIIQnurQmcnaMjSbBxhtMwYUxODpLcl2tUhvPlNE6VkiuoFVLXKT6ZfBjxRIIzOSlgWpLSB8uZ0g3BjeVDlFGEos0mfKKL7CQrY2ES7pM2i/OX22w4/sWReEhEnUOTxx3a+FrawQGZh04/rWe6oJBKo5zT4zLjPHE9ZHym5YzToogzfQcmfLgOhuLF/Sjm2izVDyXnrKtcmmmdaKumf+RyCw5Xn7OmzQaJF0fiEZG6BjXpYUYaSVkaPrXeHe4eVaZEr3Prqrmmrbc2T8lrmOMjn5xJHeJLYkk+PfzNTxOflrwF0EeHbU0Zt2wsW+PTkncB7g5zmMSwzUfS4eDhPa7DJK5jXGorsnZxonbRIbeAoOUjkUvlp+qxFp9YNuWL0nBqsVCkqUsrHQnuX+Nx5/qcJDI0kWgtJh7ihYCN8aG+13DqOXlbWUfD+fN0AUEmp3RcUWlVEwCynb5ssYLnxHViJT6ULCykb8EnzUfpqBWfVAdcnt5tprGhIe10WnjHpB2FtMPWcpM66yXyOad4Lz4Srq34SHhwZfRos1w9Y/jkzGESvj3dYRLe4zpMwg3XYRJuuA6T4M/Hzfk/OGd9OP2HOE2f8wtBlCebJrkfp+Gc3AGmiSiuaVlpwkmajL4osPUm9FMqIzBOJolfjGuzEtdUwWl53Dm7Eh9pzIdps+FiYJyi1N+Rvs/6OLCQBul8Ip8R08ik3EwhLZz1Wv8XmU7ZZqX7OT2gUIB2oaRBm+2ovDm5nM+ulEeiD8yka8UnJ1PCP82r9YWW8iCU5XO8W/PhPmvllNKW7lEyszsgNKuzkspJFZFL15uPtIweq7A1xiKpz1J8tGXP+dE53/fJmcMk6hcgJO8XqokEKi5uYzTG29LqSev95JqyKsoOOxjNpKQBD7VFc5GBJRsi+NQHkkv6+7m/UxTufwLCCy+CbAruyOLDdwEf/uf6vbbNJukzlogZC6wMdhAcM7ohHPawe/GrcO+HPwe4u782G7sIAE9++0vYv/YKwO6usfCaka0etgwXAGB3D8JznwIYnlmbiW0M92FbQy0d+MmZ3Xo5JDDcvuXJ2ZYqtyUuTwuM6nSXctcufHCOZqkjPScXhbIcdeD0XUpfKyNNy8nlyhuozLkM8XxR6pjm7tc4Fdx620I7lWq10JCm0ZanWoBwm3FsBe1WznpadbTg4A9PI2xx7FUKHopQjg7TKqNnpbioIUcFUGUsy1CS8fFYBYdJuOE6TMIN12ESgyiKiwO1bQOJe1w+6p42Etmhwmi6kLZXfC2G9IUj2vulY2wIPrv4onRhIXcRqS0DiWxkhF0uIb37wG22LRCSuVCyekC2GSXj9CG3YyT+krWh+KPAhkTvgGDKqbqnWbBwY+2Pnm3Wy4aMRYc1MuPDvp0skwgAh8PaJGbh5k4kx0f/hce/ewnw/QenXQCTFJDfQy45PzFNn5NHsoPy/u6gzE+nObzz91P9Z+6kWAm2zg6bDMoq8OQxHN78Axze/htAaB1EbQhhdzyfgRqIGoCxoUIjhDuA3ZDpcR0W4C3nMInbNVw7v4oOAsehArVFPL0uOjMM+DlM+pk7t7/BDuwcJsM6gcM7WweOX05nFCHNi12ASRfLo3QaX9O0GWTylOTnZIMwf4YPPTlD4iMm7aZwAGOUf3Rf48wjHNzVOMkKFA8pp0RHZ1mjdihs5R61PWbsWlphgs/E5gptNvFfSLY8QPk7dVbh+UNg8qfnJsZ8Bo0hzF0Y2Nqvc0s+Vbs5YL5OLfPRcorT2hvjtuxyHWZhzHCX6AMcFtB2B0RvtKZqqe6OEYz1uA7HEbdruN7ZmsZtGq4brXnQhlsbLFkDrY9mC9giH41/dSlONfeEIBcgss7nXopInPdkYN95J3XD1bMgkJUNFOxsDNLgyiynhYyX5dnAhnLyhzmO4V7IO8+xyZEgx5UqvJ41rOUTdhBOr2w6KjZc+B1FBkLGVUoAABQEcmPu6rPPw73v/gh2n/wMANYEhAd4/NqvYf/Wn5pEyPW2IUrOzQWSHyHdkEJgN8D97/0Edp/7GgDu9fnDDvD9t+HRqy8BPvxQ9i6xEXUEuPcMDF//Puw+/aVqDewfvA77f/zx9M40e7jNeNw5CDu4++K34e4r36kWcXj3TYDfvwz8D79ml1clDPuxx9FhuUik0rblVihFWLX+7ZFEXE2ioLBNg9fUSRopVsOjJbioskZlDuyAvmflpOWsOUNu/cBQ8jW/1A0np11RG+GjwG36cQHqFWnBcG4Axgx37d/I1uXXcvCnx6BXoQXf3mOAzvVpooJzaOcWdKBH1fZ07dCsFZpNgmfZbaOJ2dxnpwkNFC3C9MBcGxo0OugxwV8LWKm5lg9sFQdszKGhLAla2dCuduuOZcypx+UXdk0OK5e/hXKNTc4cjiPGhtvTX1njI6Z2+vbuKtaKspLooXdkXs1u5yUR7/LdROMsraSSIfTa6pqWodE9Mvla6sCI8d7uUMEXIEzjdg3XYRr2osOePIbDR+9BGO7re78QAD/+AODwpK5sBDg6dGyGAtL1sYnLGDe3+2BNTNycYQf7B2/Aw5d/XB9HejjA4YN3jgHUNQ132MOTv/wG9v98A+CgFBCO/+FH/wJ89PBaSY1OULZzQyQL2skayVwg/7Dk3Ky2IlcEgEcfw/7dt+YJnRP1f9jDoz+/AvM0FU4c1u8mes59e+ZXDhXmPE+tForD+lH73Q6EluiozfaldnzWQUWQzdprPk87lg44nkTKN+DT/10S7lW4VYz8wWucOTAPtl5e4mgfjmu0/b3HdZiEG67DJNxwbxlGhwkAuZeXAJS3Qpfemq7dds1tS5dsbc6dAyQpS5uGe+lKrJLSGUqlCb2GcwUuCxBzt71T2/g7t9mQniofv0yjWOtMYdSLM6Sy0pd5iLdFSQtUyiJtRnjmGOdhqq5bo5WzUXAYzns2Lu2tjaqb0WaTHRBrR9cvEVG4VF3WkLsGnzXqohzjbk3dt4hG/jDDxy8BLL5y5miBZi1wa9vT14dJ0o2qft6/1GhQZ1SV9uJxd3cQ7j+XD7RJ40JK38/XAPKz4ly+OG+KwOTDwn0uDSKEZ58/vgH+hmHLcA97uPvCN+G5H/wMoCaQ/KkAAtzdg/DCZ9cmsipsGS4ce5u7z38DYHhmbTL2YfjBH28DOM80s+MoxllVvfkwKudSbiL0dB0NTya2iGpNYmIzl+/EdexjQ8PEGE4FhdPHMAlbLhcsdWaPnfDEAxQJnbx53TEPJ51j3N7CrEfbSNt+arzXt57X2RBx94LsUGHOGRQtF7Fa8HFQQOabJmc5XQ8b8iAbh0mYNFzvdefD+nRhyPowqWitc2VbRyutGCF18+ilU2mEXWX51zFuKbqlZ/RLy0gixzagiS6sgL2hghuwAywarsMBxgzXO9u2sBzZWHwHRLwrQ5rWYQBIfuwCKnZJEpvEYSg9dRoncnejtdxFbBRLqFQzr5fSudH3nDmOaH26yHIwNcZ1NIZNmwWArYU1Fg8HDLB/7wH879VfAey2Rd0a9g/+2ubUyZUOdAz//umXjT136GPd2cDNnM9bC4Pd1gbOx3WsDh/jOkzCDddhEpcjmKiFhvGLQwDitJNrYTz05H7MS+N56hiq0mbYCfeIj2STb2s+cSJEOrguJ4fScaneOW7kOWZJm4VCmaPFg8wKgcSGuLpzR49Rerm8vIRaaECgvyB1Tbl9qOZoMiykHeVhVoZKwW9N+CSJuPwsH4YY12aTa5TxYyZPpsxSDG/Rhgp1lyxUnK/7UMFhEm64DpNIlnzTAdXcsJml8rdO1yt/K+R45EJUluS9zHaWITuQJb9rsVT+HvuKe+RvhdIIcE3ey4Rj+VDBYRJuuA6TcMN1mMT15SWMZ5h10Oc86+dr50s14QWch7rEh5PHef+psgsyqB0iI2e+hE+pDlpvvkQ/uVUMDfdSnTq12TA58injFUdOMPB5AeiALtHcUrstXrqSINnaoVjxyE5ra1ZipHMsTV2kMiQ8NDw7tdmqQ4WtzNEd9uBjXIdJuOE6TMLoy0sct46KHndNS6d2pW5tp+rW+Jw5rVl2qpP5Oqrcnr52w9RMgbfA8db5tAsp8DGuwyTaGW6DB7ppn9CCzxKnvKz9Kz7j/prUi0cwqQLQDBtvrp5uvMc/Wf00oFAT5FjscbcwMloCt1LPWvTUT41sH+M6TMIN12ESw3UPd8gPtrh7JeTyXvZGn0KD0jSlMms5Sfhw92vkUvXT5tPWt3WbSfjMsSFl3ujlJdy+4xkjnFze+PWrNWXWclqaT6t82vq2bjMJnzk2pMzrQwWHSbjhOkzCDdchxpZchpezwySQvHhiyVMLevPRctXwqeWmfcv5GaVTGKRy557YIHnhpETeoCl05grhbPlL89HK1vCp5darvZbgo+XEwYcKDpNww3WYxC6/U5PY5oun66MzPHH8L05PpqHKghn+TpjyictkZQLPh4u6yeknvXeWU+JD6TDHJ/cbn93Bi8nnDKdJm8EG2+zIZwBudlbjUOYOpj1frClPwyf3OZuXuaEx3lgWZixKxIfZ911rvJO65PRFVmZjbYY+VHDYhBuuwyTccB0mcdkB0cr5z70pW/pm7Bo+LesgqUsrPjVye9WXkqld8FiizRCi6LBWjmTRPGGG/JZ5ejvoa1ai1qwvlWarbeZDBYdJuOE6TKKP4W7xJdFb4+R8ZvH5P852gxhpwOZ9AAAAJXRFWHRkYXRlOmNyZWF0ZQAyMDIwLTA4LTIzVDE0OjUyOjAwKzAyOjAwetRgVgAAACV0RVh0ZGF0ZTptb2RpZnkAMjAyMC0wOC0yM1QxNDo1MTo1OCswMjowMJuxI+oAAAAASUVORK5CYII= diff --git a/shuffle-ai/1.1.0/requirements.txt b/shuffle-ai/1.1.0/requirements.txt new file mode 100644 index 00000000..bb5d1927 --- /dev/null +++ b/shuffle-ai/1.1.0/requirements.txt @@ -0,0 +1,5 @@ +shuffle-sdk==0.0.35 + +pytesseract +pdf2image +pypdf2 diff --git a/shuffle-ai/1.1.0/src/app.py b/shuffle-ai/1.1.0/src/app.py new file mode 100644 index 00000000..8785b341 --- /dev/null +++ b/shuffle-ai/1.1.0/src/app.py @@ -0,0 +1,610 @@ +import os +import json +import tempfile +import requests +import time + +try: + import pytesseract +except Exception as e: + print("Skipping pytesseract import: %s" % e) + +try: + import PyPDF2 +except Exception as e: + print("Skipping PyPDF2 import: %s" % e) + +try: + from pdf2image import convert_from_path +except Exception as e: + print("Skipping pdf2image import: %s" % e) + + +try: + import llama_cpp +except Exception as e: + print("Skipping llama_cpp import: %s" % e) + +print("LD Library: '%s'" % os.environ.get("LD_LIBRARY_PATH", "")) + +from shuffle_sdk import AppBase + +#model = "/models/Llama-3.2-3B.Q8_0.gguf" # Larger +#model = "/models/Llama-3.2-3B.Q2_K.gguf" # Smol + +#model = "/models/DeepSeek-R1-Distill-Llama-8B-Q2_K.gguf" # Smaller +#model = "/models/Meta-Llama-3-8B.Q6_K.gguf" +model = "/models/DeepSeek-R1-Distill-Llama.gguf" +if os.getenv("MODEL_PATH"): + model = os.getenv("MODEL_PATH") + +def load_llm_model(model): + print("Using model path '%s'" % model) + if not os.path.exists(model): + print("Could not find model at path %s" % model) + model_name = model.split("/")[-1] + # Check $HOME/downloads/{model} + + home_path = os.path.expanduser("~") + print(home_path) + + if os.path.exists(f"{home_path}/downloads/{model_name}"): + model = f"{home_path}/downloads/{model_name}" + else: + return { + "success": False, + "reason": "Model not found at path %s" % model, + "details": "Ensure the model path is correct" + } + + # Check for GPU layers + innerllm = None + gpu_layers = os.getenv("GPU_LAYERS") + if gpu_layers: + print("GPU Layers: %s" % gpu_layers) + + gpu_layers = int(gpu_layers) + if gpu_layers > 0: + innerllm = llama_cpp.Llama(model_path=model, n_gpu_layers=gpu_layers) + else: + innerllm = llama_cpp.Llama(model_path=model, n_gpu_layers=8) + else: + # Check if GPU available + print("No GPU layers set.") + #innerllm = llama_cpp.Llama(model_path=model) + + return { + "success": False, + "reason": "GPU layers not set", + "details": "Set GPU_LAYERS environment variable to the number of GPU layers to use (e.g. 8)." + } + + return innerllm + +try: + llm = load_llm_model(model) +except Exception as e: + print("[ERROR] Failed to load LLM model: %s" % e) + llm = { + "success": False, + "reason": "Failed to load LLM model %s" % model, + } + +class ShuffleAI(AppBase): + __version__ = "1.0.0" + app_name = "Shuffle AI" + + def __init__(self, redis, logger, console_logger=None): + super().__init__(redis, logger, console_logger) + + def run_llm(self, input, system_message=""): + global llm + global model + + self.logger.info("[DEBUG] LD LIbrary: '%s'. If this is empty, GPU's may not work." % os.environ.get("LD_LIBRARY_PATH", "")) + + if not system_message: + system_message = "Answer their question directly. Don't use HTML or Markdown", + + self.logger.info("[DEBUG] Running LLM with model '%s'. To overwrite path, use environment variable MODEL_PATH=" % model) + + # Check if llm is a dict or not and look for success and reason in it + if not llm: + return { + "success": False, + "reason": "LLM model not loaded", + "details": "Ensure the LLM model is loaded", + "gpu_layers": os.getenv("GPU_LAYERS"), + } + + if isinstance(llm, dict): + if "success" in llm and not llm["success"]: + # List files in /model folder + llm["folder"] = os.listdir("/models") + llm["gpu_layers"] = os.getenv("GPU_LAYERS") + return llm + + self.logger.info("[DEBUG] Running LLM with input '%s' and system message '%s'. GPU Layers: %s" % (input, system_message, os.getenv("GPU_LAYERS"))) + + # https://github.com/abetlen/llama-cpp-python + try: + print("LLM: ", llm) + + self.logger.info("[DEBUG] LLM: %s" % llm) + output = llm.create_chat_completion( + max_tokens=100, + messages = [ + { + "role": "system", + "content": system_message, + }, + { + "role": "user", + "content": input, + } + ] + ) + except Exception as e: + return { + "success": False, + "reason": f"Failed to run local LLM. Check logs in this execution for more info: {self.current_execution_id}", + "details": f"{e}" + } + + self.logger.info("[DEBUG] LLM output: %s" % output) + + new_message = "" + if "choices" in output and len(output["choices"]) > 0: + new_message = output["choices"][0]["message"]["content"] + + parsed_output = { + "success": True, + "model": output["model"], + "output": new_message, + } + + if "tokens" in output: + parsed_output["tokens"] = output["tokens"] + + if "usage" in output: + parsed_output["tokens"] = output["usage"] + + if not os.getenv("GPU_LAYERS"): + parsed_output["debug"] = "GPU_LAYERS not set. Running on CPU. Set GPU_LAYERS to the number of GPU layers to use (e.g. 8)." + + return parsed_output + + def security_assistant(self): + # Currently testing outside the Shuffle environment + # using assistants and local LLMs + + return "Not implemented" + + def shuffle_cloud_inference(self, apikey, text, formatting="auto"): + headers = { + "Authorization": "Bearer %s" % apikey, + } + + if not formatting: + formatting = "auto" + + output_formatting= "Format the following data to be a good email that can be sent to customers. Don't make it too business sounding." + if formatting != "auto": + output_formatting = formatting + + ret = requests.post( + "https://shuffler.io/api/v1/conversation", + json={ + "query": text, + "formatting": output_formatting, + "output_format": "formatting" + }, + headers=headers, + ) + + if ret.status_code != 200: + print(ret.text) + return { + "success": False, + "reason": "Status code for auto-formatter is not 200" + } + + return ret.text + + def autoformat_text(self, apikey, text, formatting="auto"): + headers = { + "Authorization": "Bearer %s" % apikey, + } + + if not formatting: + formatting = "auto" + + output_formatting= "Format the following data to be a good email that can be sent to customers. Don't make it too business sounding." + if formatting != "auto": + output_formatting = formatting + + ret = requests.post( + "https://shuffler.io/api/v1/conversation", + json={ + "query": text, + "formatting": output_formatting, + "output_format": "formatting" + }, + headers=headers, + ) + + if ret.status_code != 200: + print(ret.text) + return { + "success": False, + "reason": "Status code for auto-formatter is not 200" + } + + return ret.text + + def generate_report(self, apikey, input_data, report_title, report_name="generated_report.html"): + headers = { + "Authorization": "Bearer %s" % apikey, + } + + if not report_name: + report_name = "generated_report.html" + + if "." in report_name and not ".html" in report_name: + report_name = report_name.split(".")[0] + + if not "html" in report_name: + report_name = report_name + ".html" + + report_name = report_name.replace(" ", "_", -1) + output_formatting= "Format the following text into an HTML report with relevant graphs and tables. Title of the report should be {report_title}." + ret = requests.post( + "https://shuffler.io/api/v1/conversation", + json={ + "query": text, + "formatting": output_formatting, + "output_format": "formatting" + }, + headers=headers, + ) + + if ret.status_code != 200: + print(ret.text) + return { + "success": False, + "reason": "Status code for auto-formatter is not 200" + } + + # Make it into a shuffle file with self.set_files() + new_file = { + "name": report_name, + "data": ret.text, + } + + retdata = self.set_files([new_file]) + if retdata["success"]: + return retdata + + return { + "success": False, + "reason": "Failed to upload file" + } + + + def extract_text_from_pdf(self, file_id): + def extract_pdf_text(pdf_path): + with open(pdf_path, 'rb') as file: + pdf_reader = PyPDF2.PdfReader(file) + text = '' + for page in pdf_reader.pages: + text += page.extract_text() + + return text + + def extract_text_from_images(images): + text = '' + for image in images: + extracted_text = pytesseract.image_to_string(image, lang='eng') + text += extracted_text + return text + + def extract_text_from_pdf_with_images(pdf_path): + images = convert_from_path(pdf_path) + return extract_text_from_images(images) + + def export_text_to_json(image_text, extracted_text): + data = { + "success": True, + 'image_text': image_text, + 'extracted_text': extracted_text, + } + + #with open(output_path, 'w+') as file: + # json.dump(data, file, indent=4) + + return data + + pdf_data = self.get_file(file_id) + defaultdata = { + "success": False, + "file_id": file_id, + "filename": pdf_data["filename"], + "reason": "Something failed in reading and parsing the pdf. See error logs for more info", + } + + # Check type of pdf_data["data"] + if not isinstance(pdf_data["data"], bytes): + self.logger.info("Encoding data to bytes for the bytestream reader") + pdf_data["data"] = pdf_data["data"].encode() + + # Make a tempfile for the file data from self.get_file + # Make a tempfile with tempfile library + with tempfile.NamedTemporaryFile() as temp: + # Write the file data to the tempfile + # Get the path to the tempfile + temp.write(pdf_data["data"]) + pdf_path = temp.name + + # Extract text from the PDF + extracted_text_from_pdf = extract_pdf_text(pdf_path) + + # Extract text from the PDF using images + extracted_text_from_images = extract_text_from_pdf_with_images(pdf_path) + + # Combine the extracted text + + # Export combined text to JSON + #output_path = pdf_path.split(".")[0] + ".json" + exported_text = export_text_to_json(extracted_text_from_images, extracted_text_from_pdf) + exported_text["file_id"] = file_id + exported_text["filename"] = pdf_data["filename"] + return exported_text + + return defaultdata + + def extract_text_from_image(self, file_id): + # Check if it's a pdf + + pdf_data = self.get_file(file_id) + if "filename" not in pdf_data: + available_fields = [] + for key, value in pdf_data.items(): + available_fields.append(key) + + return { + "success": False, + "reason": "File not found", + "details": f"Available fields: {available_fields}", + } + + # If it is, use extract_text_from_pdf + # If it's not, use pytesseract + if pdf_data["filename"].endswith(".pdf"): + return self.extract_text_from_pdf(file_id) + + defaultdata = { + "success": False, + "file_id": file_id, + "filename": pdf_data["filename"], + "reason": "Something failed in reading and parsing the pdf. See error logs for more info", + } + + with tempfile.NamedTemporaryFile() as temp: + # Load temp as Image + # Write the file data to the tempfile + # Get the path to the tempfile + temp.write(pdf_data["data"]) + pdf_path = temp.name + + image = Image.open(temp.name) + image = image.resize((500,300)) + custom_config = r'-l eng --oem 3 --psm 6' + text = pytesseract.image_to_string(image,config=custom_config) + + data = { + "success": True, + 'extracted_text': text, + } + + return data + + return defaultdata + + def transcribe_audio(self, file_id): + return { + "success": False, + "reason": "Not implemented yet" + } + + def find_image_objects(self, file_id): + return { + "success": False, + "reason": "Not implemented yet" + } + + def gpt(self, input_text): + return { + "success": False, + "reason": "Not implemented yet" + } + + def run_agent(self, input_data, actions=None, apps=None): + prepared_format = { + "id": self.action["id"], + "params": { + "tool_name": self.action["app_name"], + "tool_id": self.action["app_id"], + "environment": self.action["environment"], + "input": { + "text": input_data, + } + }, + } + + if actions: + prepared_format["params"]["tool_name"] = actions + + if apps: + pass + + baseurl = f"{self.url}/api/v1/agent?execution_id={self.current_execution_id}&authorization={self.authorization}&action_id={self.action['id']}" + self.logger.info("[DEBUG] Running agent action with URL '%s'" % (baseurl)) + + headers = {} + request = requests.post( + baseurl, + json=prepared_format, + headers=headers, + ) + + # Random sleep timer to force delay + time.sleep(2) + # Gets into waiting state on backend + return json.dumps({ + "app_run": True, + "input_prompt": prepared_format, + "status": request.status_code, + "body": request.text, + }) + + def run_schemaless(self, category, action, app_name="", fields=""): + self.logger.info("[DEBUG] Running schemaless action with category '%s' and action label '%s'" % (category, action)) + + # Not necessary anymore + """ + action := shuffle.CategoryAction{ + Label: step.Name, + Category: step.Category, + AppName: step.AppName, + Fields: step.Fields, + + Environment: step.Environment, + + SkipWorkflow: true, + } + """ + + data = { + "label": action, + "category": category, + + "app_name": "", + "fields": [], + + "skip_workflow": True, + } + + if app_name: + data["app_name"] = app_name + + if fields: + if isinstance(fields, list): + data["fields"] = fields + + elif isinstance(fields, dict): + for key, value in fields.items(): + parsedvalue = str(value) + try: + if str(value).startswith("{") or str(value).startswith("["): + parsedvalue = json.dumps(value) + except: + pass + + data["fields"].append({ + "key": key, + "value": parsedvalue, + }) + + else: + fields = str(fields).strip() + # Valid format: + # {"field1": "value1", "field2": "value2"} + # field1=value1&field2=value2 + # field1:value1\nfield2:value2 + + cursplit = None + if "\\n" in fields and not fields.startswith("{") and not fields.startswith("["): + cursplit = "\\n" + elif ("=" in fields or ":" in fields) and not fields.startswith("{") and not fields.startswith("["): + cursplit = "&" + + if cursplit: + newfields = [] + for line in fields.split(cursplit): + splitkey = None + if "=" in line: + splitkey = "=" + elif ":" in line: + splitkey = ":" + + if splitkey: + parts = line.split(splitkey, 1) + newfields.append({ + "key": parts[0].strip(), + "value": splitkey.join(parts[1:]).strip(), + }) + + data["fields"] = newfields + else: + if not fields.startswith("{") and not fields.startswith("["): + fields = json.dumps({ + "data": fields, + }) + + try: + loadedfields = json.loads(fields) + for key, value in loadedfields.items(): + data["fields"].append({ + "key": key, + "value": value, + }) + + except Exception as e: + self.logger.info("[ERROR] Failed to load fields as JSON: %s" % e) + return json.dumps({ + "success": False, + "reason": "Ensure 'Fields' are valid JSON", + "details": "%s" % e, + }) + + #baseurl = "%s/api/v1/apps/categories/run" % self.base_url + baseurl = "%s/api/v1/apps/categories/run" % self.url + baseurl += "?execution_id=%s&authorization=%s" % (self.current_execution_id, self.authorization) + + self.logger.info("[DEBUG] Running schemaless action with URL '%s', category %s and action label %s" % (baseurl, category, action)) + + headers = {} + request = requests.post( + baseurl, + json=data, + headers=headers, + ) + + try: + if "parameters" in self.action: + response_headers = request.headers + for key, value in response_headers.items(): + if not str(key).lower().endswith("-url"): + continue + + self.action["parameters"].append({ + "name": key, + "value": value, + }) + + #self.logger.info("[DEBUG] Response header: %s: %s" % (key, value)) + except Exception as e: + self.logger.info("[ERROR] Failed to get response headers (category action url debug mapping): %s" % e) + + try: + data = request.json() + + #if "success" in data and "result" in data and "errors" in data: + # return data["result"] + + return data + except: + return request.text + +if __name__ == "__main__": + ShuffleAI.run() diff --git a/shuffle-ai/1.1.0/upload.sh b/shuffle-ai/1.1.0/upload.sh new file mode 100755 index 00000000..b449aa4b --- /dev/null +++ b/shuffle-ai/1.1.0/upload.sh @@ -0,0 +1,16 @@ +gcloud config set project shuffler + +gcloud beta run deploy shuffle-ai-1-0-0 \ + --project=shuffler \ + --region=europe-west4 \ + --source=./ \ + --max-instances=1 \ + --concurrency=64 \ + --gpu 1 --gpu-type=nvidia-l4 \ + --cpu 4 \ + --memory=16Gi \ + --no-cpu-throttling \ + --set-env-vars=MODEL_PATH=/models/DeepSeek-R1-Distill-Llama.gguf,GPU_LAYERS=64,SHUFFLE_APP_EXPOSED_PORT=8080,SHUFFLE_SWARM_CONFIG=run,SHUFFLE_LOGS_DISABLED=true,SHUFFLE_APP_SDK_TIMEOUT=300,LD_LIBRARY_PATH=/usr/local/lib:/usr/local/nvidia/lib64:$LD_LIBRARY_PATH \ + --source=./ \ + --service-account=shuffle-apps@shuffler.iam.gserviceaccount.com \ + --timeout=120s diff --git a/shuffle-subflow/1.0.0/api.yaml b/shuffle-subflow/1.0.0/api.yaml index 9d2642bb..4148c19a 100644 --- a/shuffle-subflow/1.0.0/api.yaml +++ b/shuffle-subflow/1.0.0/api.yaml @@ -58,4 +58,45 @@ actions: returns: schema: type: string + - name: run_userinput + description: Stops a workflow and notifies the right people + parameters: + - name: user_apikey + description: The apikey to connect back to the APIs + required: true + multiline: false + example: "apikey" + schema: + type: string + - name: sms + description: The numbers to send an sms to + required: false + multiline: false + example: "+474135212,+180241322" + schema: + type: string + - name: email + description: The emails to send an email to + required: false + multiline: false + example: "example@shuffler.io,test@test.com" + schema: + type: string + - name: subflow + description: The subflow IDs to start + required: false + multiline: false + example: "7944b41d-6200-4f28-8973-22ba52637bf0,4832b41d-6200-4f28-8973-22ba52637bf0" + schema: + type: string + - name: information + description: The information to send to the targets + required: false + multiline: true + example: "This is an argument using some liquid: {{ 1 + 2 }} " + schema: + type: string + returns: + schema: + type: string large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAIAAAD/gAIDAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAAAAB3RJTUUH4wgeDy4zYzmH5gAADkRJREFUeNrtXV1QG9cV3nt3V2AwkvgRrRE4nTFxMFKATGyLh1gONG8Rxn2qIQ8GOSYdfpy4rknATacPBRx7ak+d2K0dkDWTYvutMRB3nKllYZLaQD2B8GcPJBNwpIxBIPSDEbt39/ZhG+qglZDEjwT4e9Q9e38+3Xv2nHOPjgDGmHiGwADDPYG1hGdkBYFnZAWBZ2QFASrcEyAIgsAYcxyHEGIYhmVZnucJgoAQ0jQtkUgoiiJJEgAQ7mmGiSyEkMPhsFqtIyMjVqvVYvneYrFMTNgYhkHo/2RRFC2RSBSKJKVSqVSmpqRsSU9P37IlRS6XU1QYZr6qQ7pcrpGRke7urp6env7+fpvN5nQ6WBYBQEAICYLw3j6CZcPzPMYETVMymSwhIVGtfjErKys3V7NtW7pUKl21+YNVsLM8Hs/Q0JDJZOrouPPgwZDT6eR5niRJCGGwhwtjzPM8x3EQgrg4WUZGhlarzc/P37EjMzo6em2TNTU1ZTbfvn79emdnp9PpIAhiGbWPoOkIgpBKZbt37y4sLHz11bzExMS1R5bNZmtra7t69crg4ABCiKKoldPQGGOEEEmSKpW6qKhYp9MlJSWtDbLcbveNGzcMhqa+vq8xxqupiRFCAAC1+kW9Xv/667rNmzdHLlk8z//nP93nz583m80sy4TlhUUQBEKIpmmtdm9FReWuXbtIkow4sux2u9F4uampcXJykqYpggizWcSybHx8gl5/SK/XJyQkLEufy0NWX19fQ0N9e7sZACAYAZEA4Q2g1e6tqanNzs5eeodLJQshdOPGjfr6P42OjtI0HW5+RMCybFra1traWp2uYImaYUlkeTyexsbGc+f+MjPjXkbVsOzgOC42Nraq6sibbx7etGlTyP2ETpbL5T59+gOj0cDzOHKOni/wPA8hLCkpPX68Oi4uLrROQiTL6XTW1dU1N38CAIgEFzcQYIwxJoqL3zhx4vcyWShOUig7wu1219XVNTf/HcI1wxRBEAAACMGVK3+vq/uTy+UKoYegyfJ4PKdOfdDc/Ing+YabgeAXDOGVK82nT5/yeDxBPxuUNM/zjY0fG40GAMBaZEoAAITRePnSpYtCLChwBEdWa2vruXPneB6vodPnDQAAz/MfffTh9evXg3swcAXf19d3+PCbjx6NRbKVEDg4jlMqUy9d+jgnJyfARwLdWXb7VEND/ejod+uDKYIgSJJ89Gjs5MmGqampAB8JiCyMsdFobG83R6aNHjJomr5zp91gaApQeQVEVldXV1NT45rWU75AkqTB0NTZeS8Q4cXJcrvdFy6cn5ycjHwzPQQAAOx2+4ULF9xu96LCi6//xo3PzGYzTUfEpdlKgKbp9nZzS0vLopKLkGWz2QwGA8sya9eqCgQIIaPx8sTEuH+xRchqa2vr6/s6XDHPVQNFUQMD/a2trf7F/JE1OTl59eqVDZKThDG+du2azWbzI+OPLLPZPDg4sO63lQCKogYHB0wmkx8Zn2R5PJ6Wlk8RQuFexeqB47iWluuzs7O+BHySNTQ01NnZtUG2lQCKorq7uwcHB30J+CTLZDI5nY51aYj6AgDA6XSYTLd8CYiT5XK5OjruhHvyYQAAoKOjw+l0iraKkzUyMvLgwdC68ZkDB0mSDx8+HB4eFm0VJ6u7u8vpdG6oMyhAOIn37om7iiJkIYR6enqCjSKuG2CMe3t7WJb1bhIhy+Fw9Pf3b8AzKIAkyYGBgenpae8mEbKsVqvNZluXMYZAACG026csFotIk/dHIyMjG81oeBoAAIfD8c0333g3idicVqsVISSRSIIdBgvXmF5ji2aKBig5D57nMcZ+Ek9E+wykZ29wHGe1WgMiy2L5PliaCILgeS4uTuad2DkzMzM7++Snc8VSqVQiiVogyTCMyyVi4HAcR9P0z3++RSKRzM3NjY8/5jhuAWUY402bNsXGxhGEyHfAMHO+TCdfsFpFSFhIFkLIYrEEq7AQQhkZO+rq6lJSlDzPz6cdkyR55syfr127+nTwHmPi2LHjr7322tMvXIqiPv/88z/+8Q/eTKlU6oqKSo1GExMT43a7zWbz2bNnrFbL068glmX37/9VdfW7QpYpxgRBYGFD2e32+vq6L7/8IvBXFoRwbGxMyO5chKyJCVuAnQrgef7557d/8MGpl19+WRhJWKSQI7l5c+yC04ExoVAotm7dKqQeCx+SJOmdcsbzfGpq6pkzZ9VqtcfjmZmZUSgUxcXFiYmJ77zzttvtmt+wGOOYmNi0tDSOQwQBBF54nnc4HB9+eO7u3X8H+3K326dZll2ELIZhGIYJ6oQDACiKbGy89Ne/sjt37iovr3A4HGfPnhFuGIeGhnx5452dnZcu/Q0AgDGGED5+/Nj7m9Nq96rVarvdXl19/P79+zpdwdGjRzUaTWZm5t27X1LU/zYsTdNffNFRVvYmx3GpqWlHj/5WLpdfvHjxn//8rKenJ9iXFQBgbo5hGGZBftLCZbAsixAbRMcEAQB48ODB4OAAw7Acx5eXV8zNzd2+fWtoaIgkSZKkfB3q6enpnp7e+XHt9kkISa/OIUEQEokkKyvLarW2trbcv98VHR09PDz8tDCEcHT0u2+/HeE47oUXMsrLK4Qv4+7du6ElZCHEetulC8nieT4E250kSZIkhUSV//VLUTQt8b/58/Lybt78nPjRDvzNb8oW+FgURZlM/+rs7NRoNFVVR/T6Q6Ojo7du/au5+RObbXLBhoUQQiiBkKMo8sdPQMghJoQ4Qf39ZIjQ+lodQAitVutbb5WdPHnyq6++Qgjt2LGjquqI0fjJCy9keC9mpbGQeAjhqtnut2/frq2tEbYSy7IOh2PB0BzH/fKXr+XkvDQ2NvrGG8XJycn5+fnl5RUZGRmvv65b0bgIRZHenS8ki6bpea250pDJZFlZWQAQGBMQwvHx8a+/7v0pWfxzz/3i2LFjLpeLpumbN2+aTKZf//pAUlJSVFTQNnOQZNHeuQoLyZJIJBKJJOQbHUHhBPjyyc3N3bVr14+Toz799B9HjlT9dMbkZ5+1FRQUaDSaurr6t99+Jzo6WqFQ2Gw2k8m0cicAYxwVJfH2YRaSRVGUQhH6D1+ePHkyNjY2NTXJsj5vOgAgbDabxWJ5WulQFOXt6EMIx8cf/+53xyorq/bs2SOXyxFCnZ2dFy/+rbu7y9cZZFlksVgYhnny5EnIC4mPl3vvLJH8rNraGoOhKTTfUCKRyGRyjPH0tN23AsaCY/T00ACAuTmPqFPCcRxFUcnJP4uKiuI4bmpq0uVy+XnNkSQpl8cL/jDDzIUQEWAYpqSkpKHh1IJHRYZUKlOD7X1+wQzDjI8/Jn60430JOp0Oh2Pa+3HRhZEkiTH+4QerQC6E0L9BwHGccBEfciY1xkRKSqr3oyKjpqRsCdk8CXB+ISwjKA21RHVGUaRSqRTp1vujbdvSZTLZBrm19wbGWCaTbdu2zbtJhKyUlJSEhMQNG4PneT4+PiHQnSWXy9Vq9erbxxECjuNUKpVcLvduEiGLoqisrGwIN25YOTs7RzR7VlwRajSauLiNqLYwxlKpNDdXI9oqTlZ6enpGRhg81bCD47jt27enpz8v2ipOllQq1Wq14Z55GIAx1mr3ymQy0Vaf9kheXr5UurFOIsZYKpXl5eX7EvBJVmZm5u7duzdUMhtCaOfOnZmZKl8CPsmKjo7et69wQ13iQ0gWFu6PifEZhvbnFuTlvapSqTfI5kIIZWZm5uXl+ZHxR1ZiYtKBA0Ub5B4fAFBUVKRQKPzILOJw6nQFavWL635zIYRUKrVOV+BfbBGyFIqk0tLSdZ+GS5LUwYMlycnJ/sUWD2XodAV7974qmty1PsCyrFar3bdv36KSi5O1efPm8vKK+PiEdWlzYYzj4xMqKioDKfYQUJBMo9Ho9fp16f1wHFdSUpqbmxuIcEBkQQj1+kNa7d51dhhZln3llT2HDh0KMLIaaPg1ISHhvfdq0tK2rpv9JaSQ1NTUBl75LohYdU5OTk1NbWxs7DoIovI8HxMT+957NS+99FLgTwUX2C8sLKyqOgIhXNPKXki3rKys3L9/f1APBkcWhPDw4bKSktK1zBWBMT54sKSs7K1gPd+gr4yio6OPH68uLn5jjR5GnucPHCiurn43hLytUEzzuLi4Eyd+TxDElSvNABBrxXkU0pkPHCh+//33Q6ubu5RiY67Tp08ZjZeFOl7hpmIR8DwPADh4sKS6+t2QKwwvqYzd7Ozsxx9f+uijD2dmZiI58sVxXExMbGVlZVnZW+EpYycAIdTW1lZfX/fo0VhkFl9hWVawp/bt2xfOAonz6O3tbWiov3OnPUIKtwsQSm++8sqe2toTgZcy8oNlK+o6NTVlMDQZDE12uz0StphQ1LW0tFSvP7Rc1amXs1wwx3FdXV0XLpxvbzd7/zxh1YAQIklKq9VWVlbu3q2JxHLB83C73a2trZcvGwYG+sNSiFqlUpeUlOp0umWvq79SJc4nJiba2lqvXr06ODggpO6tdIlzCMnMzMyioqKCggKFInnp3a4eWQImJydNplstLS1dXV3CbxiXvXi+cDO6c+fOwsL9+fn5K1QJfjXIEjA7Ozs4OGgy3ero6Hj48KHT6cAYL/FvGQAAUql0+/btWu3evLx8lUq1FAMqgsiah9PpHB4evnfvXm9v78BAv90+5XA4EOIC/MMPiiJlMll8fIJKpc7Ozs7NzU1PT/eVl7DmyZoHy7LT09NWq2Vk5Bur1WK1WsbGxuz26bk5BiEWIY4gCIoiKYqOipLEx8vT0tJSUlKVSmV6+raUFKVcLg+LdRIeshZA0D4sywp/UiQEY0mSFP6kiKbpCLF1I4KstYJIjxZEFJ6RFQSekRUEnpEVBP4LiQWypqHC6doAAAAldEVYdGRhdGU6Y3JlYXRlADIwMTktMDgtMzBUMTU6NDc6MjQtMDQ6MDCzXTa0AAAAJXRFWHRkYXRlOm1vZGlmeQAyMDE5LTA4LTMwVDE1OjQ2OjUxLTA0OjAwdT/DiAAAAABJRU5ErkJggg== diff --git a/shuffle-subflow/1.0.0/requirements.txt b/shuffle-subflow/1.0.0/requirements.txt index fd7d3e06..41daa615 100644 --- a/shuffle-subflow/1.0.0/requirements.txt +++ b/shuffle-subflow/1.0.0/requirements.txt @@ -1 +1,2 @@ -requests==2.25.1 \ No newline at end of file +requests==2.32.4 +shuffle-sdk diff --git a/shuffle-subflow/1.0.0/src/app.py b/shuffle-subflow/1.0.0/src/app.py index 6abdd21f..adcb13cd 100644 --- a/shuffle-subflow/1.0.0/src/app.py +++ b/shuffle-subflow/1.0.0/src/app.py @@ -1,11 +1,7 @@ -import asyncio -import time -import random import json import requests -import json -from walkoff_app_sdk.app_base import AppBase +from shuffle_sdk import AppBase class Subflow(AppBase): """ @@ -25,18 +21,132 @@ def __init__(self, redis, logger, console_logger=None): super().__init__(redis, logger, console_logger) # Should run user input - #def run_userinput(self, sms, email, subflow, argument): - # url = "%s/api/v1/workflows/%s/execute" % (self.url, workflow) + def run_userinput(self, user_apikey, sms="", email="", subflow="", information="", startnode="", backend_url="", source_node=""): + #url = "%s/api/v1/workflows/%s/execute" % (self.url, workflow) + + headers = { + "Authorization": "Bearer %s" % user_apikey, + "User-Agent": "Shuffle Userinput 1.1.0" + } + + result = { + "success": True, + "source": "userinput", + "reason": "Userinput data sent and workflow paused. Waiting for user input before continuing workflow.", + "information": information, + "click_info": { + "clicked": False, + "time": "", + "ip": "", + "user": "", + "note": "", + } + } - # if len(sms) > 0: + url = self.url + if len(self.base_url) > 0: + url = self.base_url - def run_subflow(self, user_apikey, workflow, argument, source_workflow="", source_execution="", source_node="", source_auth="", startnode="", backend_url=""): + if len(str(backend_url)) > 0: + url = backend_url + + print("Found backend url: %s" % url) + #if len(information): + # print("Should run arg: %s", information) + + if len(subflow): + #print("Should run subflow: %s", subflow) + + # Missing startnode (user input trigger) + #print("Subflows to run from userinput: ", subflows) + + subflows = subflow.split(",") + frontend_url = url + if ":5001" in frontend_url: + print("Should change port to 3001.") + if "appspot.com" in frontend_url: + frontend_url = "https://shuffler.io" + + for item in subflows: + # In case of URL being passed, and not just ID + if "/" in item: + item = item.split("/")[-1] + + # Subflow should be the subflow to run + # Workflow in the URL should be the source workflow + argument = json.dumps({ + "information": information, + "parent_workflow": self.full_execution["workflow"]["id"], + "frontend_continue": "%s/forms/%s?authorization=%s&reference_execution=%s&answer=true" % (frontend_url, self.full_execution["workflow"]["id"], self.full_execution["authorization"], self.full_execution["execution_id"]), + "frontend_abort": "%s/forms/%s?authorization=%s&reference_execution=%s&answer=false" % (frontend_url, self.full_execution["workflow"]["id"], self.full_execution["authorization"], self.full_execution["execution_id"]), + "api_continue": "%s/api/v1/workflows/%s/execute?authorization=%s&reference_execution=%s&answer=true" % (frontend_url, self.full_execution["workflow"]["id"], self.full_execution["authorization"], self.full_execution["execution_id"]), + "api_abort": "%s/api/v1/workflows/%s/execute?authorization=%s&reference_execution=%s&answer=false" % (frontend_url, self.full_execution["workflow"]["id"], self.full_execution["authorization"], self.full_execution["execution_id"]), + }) + + ret = self.run_subflow(user_apikey, item, argument, source_workflow=self.full_execution["workflow"]["id"], source_execution=self.full_execution["execution_id"], source_auth=self.full_execution["authorization"], startnode=startnode, backend_url=backend_url, source_node=source_node) + result["subflow"] = ret + result["subflow_url"] = "%s/workflows/%s" % (frontend_url, item) + + if len(email): + jsondata = { + "targets": [], + "body": information, + "subject": "User input required", + "type": "User input", + "start": startnode, + "workflow_id": self.full_execution["workflow"]["id"], + "reference_execution": self.full_execution["execution_id"], + "authorization": self.full_execution["authorization"], + } + + for item in email.split(","): + jsondata["targets"].append(item.strip()) + + print("Should run email with targets: %s", jsondata["targets"]) + + ret = requests.post("%s/api/v1/functions/sendmail" % url, json=jsondata, headers=headers) + if ret.status_code != 200: + print("Failed sending email. Data: %s" % ret.text) + result["email"] = False + else: + result["email"] = True + + if len(sms) > 0: + print("Should run SMS: %s", sms) + + jsondata = { + "numbers": [], + "body": information, + "type": "User input", + "start": startnode, + "workflow_id": self.full_execution["workflow"]["id"], + "reference_execution": self.full_execution["execution_id"], + "authorization": self.full_execution["authorization"], + } + + for item in sms.split(","): + jsondata["numbers"].append(item.strip()) + + print("Should send sms with targets: %s", jsondata["numbers"]) + + ret = requests.post("%s/api/v1/functions/sendsms" % url, json=jsondata, headers=headers) + if ret.status_code != 200: + print("Failed sending email. Data: %s" % ret.text) + result["sms"] = False + else: + result["sms"] = True + + + + return json.dumps(result) + + def run_subflow(self, user_apikey, workflow, argument, source_workflow="", source_execution="", source_node="", source_auth="", startnode="", backend_url="", auth_override=""): #print("STARTNODE: %s" % startnode) url = "%s/api/v1/workflows/%s/execute" % (self.url, workflow) + if len(self.base_url) > 0: + url = "%s/api/v1/workflows/%s/execute" % (self.base_url, workflow) - params = { - "User-Agent": "Subflow 1.0.0" - } + params = {} if len(str(source_workflow)) > 0: params["source_workflow"] = source_workflow else: @@ -62,15 +172,24 @@ def run_subflow(self, user_apikey, workflow, argument, source_workflow="", sourc else: print("No startnode") + if len(self.full_execution["execution_id"]) > 0 and self.full_execution["execution_id"] != source_execution: + params["source_execution"] = self.full_execution["execution_id"] + + if len(self.full_execution["authorization"]) > 0 and self.full_execution["authorization"] != source_auth: + params["source_auth"] = self.full_execution["authorization"] + if len(str(backend_url)) > 0: url = "%s/api/v1/workflows/%s/execute" % (backend_url, workflow) print("[INFO] Changed URL to %s for this execution" % url) - headers = { "Authorization": "Bearer %s" % user_apikey, + "User-Agent": "Shuffle Subflow 1.0.0" } + if len(auth_override) > 0: + headers["appauth"] = auth_override + if len(str(argument)) == 0: ret = requests.post(url, headers=headers, params=params) else: diff --git a/microsoft-identity-and-access/1.0.0/Dockerfile b/shuffle-subflow/1.1.0/Dockerfile similarity index 100% rename from microsoft-identity-and-access/1.0.0/Dockerfile rename to shuffle-subflow/1.1.0/Dockerfile diff --git a/shuffle-subflow/1.1.0/api.yaml b/shuffle-subflow/1.1.0/api.yaml new file mode 100644 index 00000000..0c64bda1 --- /dev/null +++ b/shuffle-subflow/1.1.0/api.yaml @@ -0,0 +1,102 @@ +app_version: 1.1.0 +name: Shuffle Subflow +description: The Shuffle Subflow app +tags: + - Trigger +categories: + - Trigger +contact_info: + name: "@frikkylikeme" + url: https://shuffler.io + email: frikky@shuffler.io +actions: + - name: run_subflow + description: Executes a subflow + parameters: + - name: user_apikey + description: The apikey to use + required: true + multiline: false + example: "REPEATING: Hello world" + schema: + type: string + - name: workflow + description: The Workflow to execute + required: true + multiline: false + example: "REPEATING: Hello world" + schema: + type: string + - name: execution_argument + description: The execution_argument + required: true + multiline: true + example: "REPEATING: Hello world" + schema: + type: string + - name: startnode + description: + required: false + multiline: false + example: "" + schema: + type: string + - name: source_workflow + description: + required: false + multiline: false + example: "" + schema: + type: string + - name: source_execution + description: + required: false + multiline: false + example: "" + schema: + type: string + returns: + schema: + type: string + - name: run_userinput + description: Stops a workflow and notifies the right people + parameters: + - name: user_apikey + description: The apikey to connect back to the APIs + required: true + multiline: false + example: "apikey" + schema: + type: string + - name: sms + description: The numbers to send an sms to + required: false + multiline: false + example: "+474135212,+180241322" + schema: + type: string + - name: email + description: The emails to send an email to + required: false + multiline: false + example: "example@shuffler.io,test@test.com" + schema: + type: string + - name: subflow + description: The subflow IDs to start + required: false + multiline: false + example: "7944b41d-6200-4f28-8973-22ba52637bf0,4832b41d-6200-4f28-8973-22ba52637bf0" + schema: + type: string + - name: information + description: The information to send to the targets + required: false + multiline: true + example: "This is an argument using some liquid: {{ 1 + 2 }} " + schema: + type: string + returns: + schema: + type: string +large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAIAAAD/gAIDAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAAAAB3RJTUUH4wgeDy4zYzmH5gAADkRJREFUeNrtXV1QG9cV3nt3V2AwkvgRrRE4nTFxMFKATGyLh1gONG8Rxn2qIQ8GOSYdfpy4rknATacPBRx7ak+d2K0dkDWTYvutMRB3nKllYZLaQD2B8GcPJBNwpIxBIPSDEbt39/ZhG+qglZDEjwT4e9Q9e38+3Xv2nHOPjgDGmHiGwADDPYG1hGdkBYFnZAWBZ2QFASrcEyAIgsAYcxyHEGIYhmVZnucJgoAQ0jQtkUgoiiJJEgAQ7mmGiSyEkMPhsFqtIyMjVqvVYvneYrFMTNgYhkHo/2RRFC2RSBSKJKVSqVSmpqRsSU9P37IlRS6XU1QYZr6qQ7pcrpGRke7urp6env7+fpvN5nQ6WBYBQEAICYLw3j6CZcPzPMYETVMymSwhIVGtfjErKys3V7NtW7pUKl21+YNVsLM8Hs/Q0JDJZOrouPPgwZDT6eR5niRJCGGwhwtjzPM8x3EQgrg4WUZGhlarzc/P37EjMzo6em2TNTU1ZTbfvn79emdnp9PpIAhiGbWPoOkIgpBKZbt37y4sLHz11bzExMS1R5bNZmtra7t69crg4ABCiKKoldPQGGOEEEmSKpW6qKhYp9MlJSWtDbLcbveNGzcMhqa+vq8xxqupiRFCAAC1+kW9Xv/667rNmzdHLlk8z//nP93nz583m80sy4TlhUUQBEKIpmmtdm9FReWuXbtIkow4sux2u9F4uampcXJykqYpggizWcSybHx8gl5/SK/XJyQkLEufy0NWX19fQ0N9e7sZACAYAZEA4Q2g1e6tqanNzs5eeodLJQshdOPGjfr6P42OjtI0HW5+RMCybFra1traWp2uYImaYUlkeTyexsbGc+f+MjPjXkbVsOzgOC42Nraq6sibbx7etGlTyP2ETpbL5T59+gOj0cDzOHKOni/wPA8hLCkpPX68Oi4uLrROQiTL6XTW1dU1N38CAIgEFzcQYIwxJoqL3zhx4vcyWShOUig7wu1219XVNTf/HcI1wxRBEAAACMGVK3+vq/uTy+UKoYegyfJ4PKdOfdDc/Ing+YabgeAXDOGVK82nT5/yeDxBPxuUNM/zjY0fG40GAMBaZEoAAITRePnSpYtCLChwBEdWa2vruXPneB6vodPnDQAAz/MfffTh9evXg3swcAXf19d3+PCbjx6NRbKVEDg4jlMqUy9d+jgnJyfARwLdWXb7VEND/ejod+uDKYIgSJJ89Gjs5MmGqampAB8JiCyMsdFobG83R6aNHjJomr5zp91gaApQeQVEVldXV1NT45rWU75AkqTB0NTZeS8Q4cXJcrvdFy6cn5ycjHwzPQQAAOx2+4ULF9xu96LCi6//xo3PzGYzTUfEpdlKgKbp9nZzS0vLopKLkGWz2QwGA8sya9eqCgQIIaPx8sTEuH+xRchqa2vr6/s6XDHPVQNFUQMD/a2trf7F/JE1OTl59eqVDZKThDG+du2azWbzI+OPLLPZPDg4sO63lQCKogYHB0wmkx8Zn2R5PJ6Wlk8RQuFexeqB47iWluuzs7O+BHySNTQ01NnZtUG2lQCKorq7uwcHB30J+CTLZDI5nY51aYj6AgDA6XSYTLd8CYiT5XK5OjruhHvyYQAAoKOjw+l0iraKkzUyMvLgwdC68ZkDB0mSDx8+HB4eFm0VJ6u7u8vpdG6oMyhAOIn37om7iiJkIYR6enqCjSKuG2CMe3t7WJb1bhIhy+Fw9Pf3b8AzKIAkyYGBgenpae8mEbKsVqvNZluXMYZAACG026csFotIk/dHIyMjG81oeBoAAIfD8c0333g3idicVqsVISSRSIIdBgvXmF5ji2aKBig5D57nMcZ+Ek9E+wykZ29wHGe1WgMiy2L5PliaCILgeS4uTuad2DkzMzM7++Snc8VSqVQiiVogyTCMyyVi4HAcR9P0z3++RSKRzM3NjY8/5jhuAWUY402bNsXGxhGEyHfAMHO+TCdfsFpFSFhIFkLIYrEEq7AQQhkZO+rq6lJSlDzPz6cdkyR55syfr127+nTwHmPi2LHjr7322tMvXIqiPv/88z/+8Q/eTKlU6oqKSo1GExMT43a7zWbz2bNnrFbL068glmX37/9VdfW7QpYpxgRBYGFD2e32+vq6L7/8IvBXFoRwbGxMyO5chKyJCVuAnQrgef7557d/8MGpl19+WRhJWKSQI7l5c+yC04ExoVAotm7dKqQeCx+SJOmdcsbzfGpq6pkzZ9VqtcfjmZmZUSgUxcXFiYmJ77zzttvtmt+wGOOYmNi0tDSOQwQBBF54nnc4HB9+eO7u3X8H+3K326dZll2ELIZhGIYJ6oQDACiKbGy89Ne/sjt37iovr3A4HGfPnhFuGIeGhnx5452dnZcu/Q0AgDGGED5+/Nj7m9Nq96rVarvdXl19/P79+zpdwdGjRzUaTWZm5t27X1LU/zYsTdNffNFRVvYmx3GpqWlHj/5WLpdfvHjxn//8rKenJ9iXFQBgbo5hGGZBftLCZbAsixAbRMcEAQB48ODB4OAAw7Acx5eXV8zNzd2+fWtoaIgkSZKkfB3q6enpnp7e+XHt9kkISa/OIUEQEokkKyvLarW2trbcv98VHR09PDz8tDCEcHT0u2+/HeE47oUXMsrLK4Qv4+7du6ElZCHEetulC8nieT4E250kSZIkhUSV//VLUTQt8b/58/Lybt78nPjRDvzNb8oW+FgURZlM/+rs7NRoNFVVR/T6Q6Ojo7du/au5+RObbXLBhoUQQiiBkKMo8sdPQMghJoQ4Qf39ZIjQ+lodQAitVutbb5WdPHnyq6++Qgjt2LGjquqI0fjJCy9keC9mpbGQeAjhqtnut2/frq2tEbYSy7IOh2PB0BzH/fKXr+XkvDQ2NvrGG8XJycn5+fnl5RUZGRmvv65b0bgIRZHenS8ki6bpea250pDJZFlZWQAQGBMQwvHx8a+/7v0pWfxzz/3i2LFjLpeLpumbN2+aTKZf//pAUlJSVFTQNnOQZNHeuQoLyZJIJBKJJOQbHUHhBPjyyc3N3bVr14+Toz799B9HjlT9dMbkZ5+1FRQUaDSaurr6t99+Jzo6WqFQ2Gw2k8m0cicAYxwVJfH2YRaSRVGUQhH6D1+ePHkyNjY2NTXJsj5vOgAgbDabxWJ5WulQFOXt6EMIx8cf/+53xyorq/bs2SOXyxFCnZ2dFy/+rbu7y9cZZFlksVgYhnny5EnIC4mPl3vvLJH8rNraGoOhKTTfUCKRyGRyjPH0tN23AsaCY/T00ACAuTmPqFPCcRxFUcnJP4uKiuI4bmpq0uVy+XnNkSQpl8cL/jDDzIUQEWAYpqSkpKHh1IJHRYZUKlOD7X1+wQzDjI8/Jn60430JOp0Oh2Pa+3HRhZEkiTH+4QerQC6E0L9BwHGccBEfciY1xkRKSqr3oyKjpqRsCdk8CXB+ISwjKA21RHVGUaRSqRTp1vujbdvSZTLZBrm19wbGWCaTbdu2zbtJhKyUlJSEhMQNG4PneT4+PiHQnSWXy9Vq9erbxxECjuNUKpVcLvduEiGLoqisrGwIN25YOTs7RzR7VlwRajSauLiNqLYwxlKpNDdXI9oqTlZ6enpGRhg81bCD47jt27enpz8v2ipOllQq1Wq14Z55GIAx1mr3ymQy0Vaf9kheXr5UurFOIsZYKpXl5eX7EvBJVmZm5u7duzdUMhtCaOfOnZmZKl8CPsmKjo7et69wQ13iQ0gWFu6PifEZhvbnFuTlvapSqTfI5kIIZWZm5uXl+ZHxR1ZiYtKBA0Ub5B4fAFBUVKRQKPzILOJw6nQFavWL635zIYRUKrVOV+BfbBGyFIqk0tLSdZ+GS5LUwYMlycnJ/sUWD2XodAV7974qmty1PsCyrFar3bdv36KSi5O1efPm8vKK+PiEdWlzYYzj4xMqKioDKfYQUJBMo9Ho9fp16f1wHFdSUpqbmxuIcEBkQQj1+kNa7d51dhhZln3llT2HDh0KMLIaaPg1ISHhvfdq0tK2rpv9JaSQ1NTUBl75LohYdU5OTk1NbWxs7DoIovI8HxMT+957NS+99FLgTwUX2C8sLKyqOgIhXNPKXki3rKys3L9/f1APBkcWhPDw4bKSktK1zBWBMT54sKSs7K1gPd+gr4yio6OPH68uLn5jjR5GnucPHCiurn43hLytUEzzuLi4Eyd+TxDElSvNABBrxXkU0pkPHCh+//33Q6ubu5RiY67Tp08ZjZeFOl7hpmIR8DwPADh4sKS6+t2QKwwvqYzd7Ozsxx9f+uijD2dmZiI58sVxXExMbGVlZVnZW+EpYycAIdTW1lZfX/fo0VhkFl9hWVawp/bt2xfOAonz6O3tbWiov3OnPUIKtwsQSm++8sqe2toTgZcy8oNlK+o6NTVlMDQZDE12uz0StphQ1LW0tFSvP7Rc1amXs1wwx3FdXV0XLpxvbzd7/zxh1YAQIklKq9VWVlbu3q2JxHLB83C73a2trZcvGwYG+sNSiFqlUpeUlOp0umWvq79SJc4nJiba2lqvXr06ODggpO6tdIlzCMnMzMyioqKCggKFInnp3a4eWQImJydNplstLS1dXV3CbxiXvXi+cDO6c+fOwsL9+fn5K1QJfjXIEjA7Ozs4OGgy3ero6Hj48KHT6cAYL/FvGQAAUql0+/btWu3evLx8lUq1FAMqgsiah9PpHB4evnfvXm9v78BAv90+5XA4EOIC/MMPiiJlMll8fIJKpc7Ozs7NzU1PT/eVl7DmyZoHy7LT09NWq2Vk5Bur1WK1WsbGxuz26bk5BiEWIY4gCIoiKYqOipLEx8vT0tJSUlKVSmV6+raUFKVcLg+LdRIeshZA0D4sywp/UiQEY0mSFP6kiKbpCLF1I4KstYJIjxZEFJ6RFQSekRUEnpEVBP4LiQWypqHC6doAAAAldEVYdGRhdGU6Y3JlYXRlADIwMTktMDgtMzBUMTU6NDc6MjQtMDQ6MDCzXTa0AAAAJXRFWHRkYXRlOm1vZGlmeQAyMDE5LTA4LTMwVDE1OjQ2OjUxLTA0OjAwdT/DiAAAAABJRU5ErkJggg== diff --git a/shuffle-subflow/1.1.0/requirements.txt b/shuffle-subflow/1.1.0/requirements.txt new file mode 100644 index 00000000..41daa615 --- /dev/null +++ b/shuffle-subflow/1.1.0/requirements.txt @@ -0,0 +1,2 @@ +requests==2.32.4 +shuffle-sdk diff --git a/testing/1.0.0/run b/shuffle-subflow/1.1.0/run similarity index 100% rename from testing/1.0.0/run rename to shuffle-subflow/1.1.0/run diff --git a/shuffle-subflow/1.1.0/src/app.py b/shuffle-subflow/1.1.0/src/app.py new file mode 100644 index 00000000..f51ace59 --- /dev/null +++ b/shuffle-subflow/1.1.0/src/app.py @@ -0,0 +1,242 @@ +import json +import requests + +from shuffle_sdk import AppBase + +class Subflow(AppBase): + """ + An example of a Walkoff App. + Inherit from the AppBase class to have Redis, logging, and console logging set up behind the scenes. + """ + __version__ = "1.1.0" + app_name = "subflow" # this needs to match "name" in api.yaml + + def __init__(self, redis, logger, console_logger=None): + """ + Each app should have this __init__ to set up Redis and logging. + :param redis: + :param logger: + :param console_logger: + """ + super().__init__(redis, logger, console_logger) + + # Should run user input + def run_userinput(self, user_apikey, sms="", email="", subflow="", information="", startnode="", backend_url="", source_node=""): + #url = "%s/api/v1/workflows/%s/execute" % (self.url, workflow) + + headers = { + "Authorization": "Bearer %s" % user_apikey, + "User-Agent": "Shuffle Userinput 1.1.0", + } + + + result = { + "success": True, + "source": "userinput", + "reason": "Userinput data sent and workflow paused. Waiting for user input before continuing workflow.", + "information": information, + "click_info": { + "clicked": False, + "time": "", + "ip": "", + "user": "", + "note": "", + }, + "links": { + "frontend_no_answer": "", + "api_continue": "", + "api_abort": "", + } + } + + url = self.url + if len(self.base_url) > 0: + url = self.base_url + + if len(str(backend_url)) > 0: + url = backend_url + + frontend_url = url + if ":5001" in frontend_url: + print("Should change port to 3001.") + if "appspot.com" in frontend_url: + frontend_url = "https://shuffler.io" + if "run.app" in frontend_url: + frontend_url = "https://shuffler.io" + if "ngrok" in frontend_url: + frontend_url = "" + if "shuffle-backend" in frontend_url: + frontend_url = "" + + api_continue_url = "%s/api/v1/workflows/%s/execute?authorization=%s&reference_execution=%s&answer=true&source_node=%s" % (frontend_url, self.full_execution["workflow"]["id"], self.full_execution["authorization"], self.full_execution["execution_id"], source_node) + api_abort_url = "%s/api/v1/workflows/%s/execute?authorization=%s&reference_execution=%s&answer=false&source_node=%s" % (frontend_url, self.full_execution["workflow"]["id"], self.full_execution["authorization"], self.full_execution["execution_id"], source_node) + + # Remove subdomain before .shuffler.io so that https://*.shuffler.io -> https://shuffler.io + if ".shuffler.io" in frontend_url: + frontend_url = "https://shuffler.io" + + explore_path = "%s/forms/%s?authorization=%s&reference_execution=%s&source_node=%s&backend_url=%s" % (frontend_url, self.full_execution["workflow"]["id"], self.full_execution["authorization"], self.full_execution["execution_id"], source_node, backend_url) + frontend_continue_url = "%s/forms/%s?authorization=%s&reference_execution=%s&answer=true&source_node=%s&backend_url=%s" % (frontend_url, self.full_execution["workflow"]["id"], self.full_execution["authorization"], self.full_execution["execution_id"], source_node, backend_url) + frontend_abort_url = "%s/forms/%s?authorization=%s&reference_execution=%s&answer=false&source_node=%s&backend_url=%s" % (frontend_url, self.full_execution["workflow"]["id"], self.full_execution["authorization"], self.full_execution["execution_id"], source_node, backend_url) + + result["links"]["frontend_no_answer"] = explore_path + result["links"]["frontend_continue"] = frontend_continue_url + result["links"]["frontend_abort"] = frontend_abort_url + result["links"]["api_continue"] = api_continue_url + result["links"]["api_abort"] = api_abort_url + + print("Found backend url: %s" % url) + if len(subflow) > 0: + + subflows = subflow.split(",") + for item in subflows: + # In case of URL being passed, and not just ID + if "/" in item: + item = item.split("/")[-1] + + # Subflow should be the subflow to run + # Workflow in the URL should be the source workflow + argument = json.dumps({ + "information": information, + "parent_workflow": self.full_execution["workflow"]["id"], + "frontend_continue": frontend_continue_url, + "frontend_abort": frontend_abort_url, + "api_continue": api_continue_url, + "api_abort": api_abort_url, + }) + + ret = self.run_subflow(user_apikey, item, argument, source_workflow=self.full_execution["workflow"]["id"], source_execution=self.full_execution["execution_id"], source_auth=self.full_execution["authorization"], startnode=startnode, backend_url=backend_url, source_node=source_node) + result["subflow"] = ret + result["subflow_url"] = "%s/workflows/%s" % (frontend_url, item) + + if len(email): + jsondata = { + "targets": [], + "body": information, + "subject": "User input required", + "type": "User input", + "start": startnode, + "workflow_id": self.full_execution["workflow"]["id"], + "reference_execution": self.full_execution["execution_id"], + "authorization": self.full_execution["authorization"], + } + + for item in email.split(","): + jsondata["targets"].append(item.strip()) + + print("Should run email with targets: %s", jsondata["targets"]) + + ret = requests.post("%s/api/v1/functions/sendmail" % url, json=jsondata, headers=headers, verify=False, proxies=self.proxy_config) + if ret.status_code != 200: + print("Failed sending email. Data: %s" % ret.text) + result["email"] = False + else: + result["email"] = True + + if len(sms) > 0: + print("Should run SMS: %s", sms) + + jsondata = { + "numbers": [], + "body": information, + "type": "User input", + "start": startnode, + "workflow_id": self.full_execution["workflow"]["id"], + "reference_execution": self.full_execution["execution_id"], + "authorization": self.full_execution["authorization"], + } + + for item in sms.split(","): + jsondata["numbers"].append(item.strip()) + + print("Should send sms with targets: %s", jsondata["numbers"]) + + ret = requests.post("%s/api/v1/functions/sendsms" % url, json=jsondata, headers=headers, verify=False, proxies=self.proxy_config) + if ret.status_code != 200: + print("Failed sending email. Data: %s" % ret.text) + result["sms"] = False + else: + result["sms"] = True + + + + return json.dumps(result) + + def run_subflow(self, user_apikey, workflow, argument, source_workflow="", source_execution="", source_node="", source_auth="", startnode="", backend_url="", check_result="", auth_override=""): + #print("STARTNODE: %s" % startnode) + url = "%s/api/v1/workflows/%s/execute" % (self.url, workflow) + if len(self.base_url) > 0: + url = "%s/api/v1/workflows/%s/execute" % (self.base_url, workflow) + + params = {} + if len(str(source_workflow)) > 0: + params["source_workflow"] = source_workflow + else: + print("No source workflow") + + if len(str(source_auth)) > 0: + params["source_auth"] = source_auth + else: + print("No source auth") + + if len(str(source_node)) > 0: + params["source_node"] = source_node + else: + print("No source node") + + if len(str(source_execution)) > 0: + params["source_execution"] = source_execution + else: + print("No source execution") + + if len(str(startnode)) > 0: + params["start"] = startnode + else: + print("No startnode") + + if len(self.full_execution["execution_id"]) > 0 and self.full_execution["execution_id"] != source_execution: + params["source_execution"] = self.full_execution["execution_id"] + + if len(self.full_execution["authorization"]) > 0 and self.full_execution["authorization"] != source_auth: + params["source_auth"] = self.full_execution["authorization"] + + if len(str(backend_url)) > 0: + url = "%s/api/v1/workflows/%s/execute" % (backend_url, workflow) + print("[INFO] Changed URL to %s for this execution" % url) + + headers = { + "Authorization": "Bearer %s" % user_apikey, + "User-Agent": "Shuffle Subflow 1.1.0" + } + + if len(auth_override) > 0: + headers["appauth"] = auth_override + + if len(str(argument)) == 0: + ret = requests.post(url, headers=headers, params=params, verify=False, proxies=self.proxy_config) + else: + if not isinstance(argument, list) and not isinstance(argument, dict): + try: + argument = json.loads(argument) + except: + pass + + #print(f"ARG: {argument}") + try: + ret = requests.post(url, headers=headers, params=params, json=argument, verify=False, proxies=self.proxy_config) + print(f"Successfully sent argument of length {len(str(argument))} as JSON") + except: + try: + ret = requests.post(url, headers=headers, json=argument, params=params, verify=False, proxies=self.proxy_config) + print("Successfully sent as JSON (2)") + except: + ret = requests.post(url, headers=headers, data=argument, params=params, verify=False, proxies=self.proxy_config) + print("Successfully sent as data (3)") + + print("Status: %d" % ret.status_code) + print("RET: %s" % ret.text) + + return ret.text + +if __name__ == "__main__": + Subflow.run() diff --git a/shuffle-tools/1.0.0/Dockerfile b/shuffle-tools/1.0.0/Dockerfile deleted file mode 100644 index 5c1a8af4..00000000 --- a/shuffle-tools/1.0.0/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# Base our app image off of the WALKOFF App SDK image -FROM frikky/shuffle:app_sdk as base - -# We're going to stage away all of the bloat from the build tools so lets create a builder stage -FROM base as builder - -# Install all alpine build tools needed for our pip installs -RUN apk --no-cache add --update alpine-sdk libffi libffi-dev musl-dev openssl-dev git - -# Install all of our pip packages in a single directory that we can copy to our base image later -RUN mkdir /install -WORKDIR /install -COPY requirements.txt /requirements.txt -RUN pip install --no-cache-dir --prefix="/install" -r /requirements.txt - -# Switch back to our base image and copy in all of our built packages and source code -FROM base -COPY --from=builder /install /usr/local -COPY src /app - -# Install any binary dependencies needed in our final image -# RUN apk --no-cache add --update my_binary_dependency -RUN apk --no-cache add jq git curl - -# Finally, lets run our app! -WORKDIR /app -CMD ["python", "app.py", "--log-level", "DEBUG"] diff --git a/shuffle-tools/1.0.0/api.yaml b/shuffle-tools/1.0.0/api.yaml deleted file mode 100644 index 43fabb22..00000000 --- a/shuffle-tools/1.0.0/api.yaml +++ /dev/null @@ -1,814 +0,0 @@ ---- -app_version: 1.0.0 -name: Shuffle Tools -description: A tool app for Shuffle -tags: - - Testing - - Shuffle -categories: - - Testing - - Shuffle -contact_info: - name: "@frikkylikeme" - url: https://shuffler.io - email: frikky@shuffler.io -actions: - - name: repeat_back_to_me - description: Repeats the call parameter - parameters: - - name: call - description: The message to repeat - required: true - multiline: true - example: "REPEATING: Hello world" - schema: - type: string - returns: - schema: - type: string - - name: router - description: Reroutes information between different nodes - returns: - schema: - type: string - - name: get_cache_value - description: Get a value saved to your organization in Shuffle - parameters: - - name: key - description: The key to get - required: true - multiline: false - example: "timestamp" - schema: - type: string - returns: - schema: - type: string - - name: set_cache_value - description: Set a value to be saved to your organization in Shuffle. - parameters: - - name: key - description: The key to set the value for - required: true - multiline: false - example: "timestamp" - schema: - type: string - - name: value - description: The value to set - required: true - multiline: true - example: "1621959545" - schema: - type: string - returns: - schema: - type: string - - name: send_sms_shuffle - description: Send an SMS from Shuffle - parameters: - - name: apikey - description: Your https://shuffler.io organization apikey - multiline: false - example: "https://shuffler.io apikey" - required: true - schema: - type: string - - name: phone_numbers - description: The receivers of the SMS - multiline: false - example: "+4741323535,+8151023022" - required: true - schema: - type: string - - name: body - description: The SMS to add to the numbers - multiline: true - example: "This is an alert from Shuffle :)" - required: true - schema: - type: string - returns: - schema: - type: string - - name: send_email_shuffle - description: Send an email from Shuffle - parameters: - - name: apikey - description: Your https://shuffler.io organization apikey - multiline: false - example: "https://shuffler.io apikey" - required: true - schema: - type: string - - name: recipients - description: The recipients of the email - multiline: false - example: "test@example.com,frikky@shuffler.io" - required: true - schema: - type: string - - name: subject - description: The subject to use - multiline: false - example: "SOS this is an alert :o" - required: true - schema: - type: string - - name: body - description: The body to add to the email - multiline: true - example: "This is an email alert from Shuffler.io :)" - required: true - schema: - type: string - returns: - schema: - type: string - - name: filter_list - description: Takes a list and filters based on your data - skip_multicheck: true - parameters: - - name: input_list - description: The list to check - required: true - multiline: false - example: '[{"data": "1.2.3.4"}, {"data": "1.2.3.5"}]' - schema: - type: string - - name: field - description: The field to check - required: false - multiline: false - example: "data" - schema: - type: string - - name: check - description: Type of check - required: true - example: "equals" - options: - - equals - - 'larger than' - - 'less than' - - is empty - - contains - - contains any of - - starts with - - ends with - - field is unique - - files by extension - schema: - type: string - - name: value - description: The value to check with - required: false - multiline: false - example: "1.2.3.4" - schema: - type: string - - name: opposite - description: Whether to add or to NOT add - required: true - options: - - False - - True - multiline: false - example: "false" - schema: - type: string - returns: - schema: - type: string - #- name: multi_list_filter - # description: Takes a list and filters based on your data - # skip_multicheck: true - # parameters: - # - name: input_list - # description: The list to check - # required: true - # multiline: false - # example: '[{"data": "1.2.3.4"}, {"data": "1.2.3.5"}]' - # schema: - # type: string - # - name: field - # description: The field to check - # required: true - # multiline: false - # example: "data" - # schema: - # type: string - # - name: check - # description: Type of check - # required: true - # example: "equals,equals" - # schema: - # type: string - # - name: value - # description: The value to check with - # required: true - # multiline: false - # example: "1.2.3.4" - # schema: - # type: string - # returns: - # schema: - # type: string - - name: parse_ioc - description: Parse IOC's based on https://github.com/fhightower/ioc-finder - parameters: - - name: input_string - description: The string to check - required: true - multiline: true - example: "123ijq192.168.3.6kljqwiejs8 https://shuffler.io" - schema: - type: string - - name: input_type - description: The string to check - required: false - multiline: false - example: "md5s" - schema: - type: string - returns: - schema: - type: string - - name: parse_file_ioc - description: Parse IOC's based on https://github.com/fhightower/ioc-finder - parameters: - - name: file_ids - description: The shuffle file to check - required: true - multiline: false - schema: - type: string - - name: input_type - description: The string to check - required: false - multiline: false - example: "md5s" - schema: - type: string - returns: - schema: - type: string - - name: translate_value - description: Takes a list of values and translates it in your input data - parameters: - - name: input_data - description: The input data to use - required: true - multiline: true - example: Hello this is an md5 - schema: - type: string - - name: translate_from - description: The source items to look for - required: true - multiline: false - example: sha256,md5,sha1 - schema: - type: string - - name: translate_to - description: The destination data to change to - required: true - multiline: true - example: hash - schema: - type: string - - name: else_value - description: The value to set if it DOESNT match. Default to nothing. - required: false - multiline: false - example: - schema: - type: string - returns: - schema: - type: string - - name: map_value - description: Takes a mapping dictionary and translates the input data - parameters: - - name: input_data - description: The input data to use - required: true - multiline: true - example: $exec.field1 - schema: - type: string - - name: mapping - description: The mapping dictionary - required: true - multiline: true - example: | - { - "Low": 1, - "Medium": 2, - "High": 3, - } - schema: - type: string - returns: - schema: - type: string - - name: regex_replace - description: Replace all instances matching a regular expression - parameters: - - name: input_data - description: The input data to use - required: true - multiline: true - example: $exec.http_headers - schema: - type: string - - name: regex - description: Your regular expression - multiline: false - example: "(Content-\\w+): (.*)" - required: true - schema: - type: string - - name: replace_string - description: Replacement string (capture groups with \1 \2) - multiline: true - example: "Content header '\\1' = '\\2'" - required: false - schema: - type: string - - name: ignore_case - description: "Make regex case insensitive (Default: False)" - multiline: false - example: "False" - required: false - schema: - type: string - returns: - schema: - type: string - - name: parse_list - description: Parses a list and returns it as a json object - parameters: - - name: items - description: List of items - required: true - multiline: true - example: shuffler.io,test.com,test.no - schema: - type: string - - name: splitter - description: The splitter to use - required: false - multiline: false - example: "," - schema: - type: string - returns: - schema: - type: string - - name: execute_bash - description: Runs bash with the data inputted available (TBD) - parameters: - - name: code - description: The code to run - required: true - multiline: true - example: echo "Hello" - schema: - type: string - - name: shuffle_input - description: Alternative data to add - required: false - multiline: true - example: '{"data": "Hello world"}' - schema: - type: string - - name: get_file_value - description: This function is made for reading file(s), printing their data - parameters: - - name: filedata - description: The files - required: true - multiline: true - example: "REPEATING: Hello world" - schema: - type: file - returns: - schema: - type: string - - name: download_remote_file - description: Downloads a file from a URL - parameters: - - name: url - description: - required: true - multiline: false - example: "https://secure.eicar.org/eicar.com.txt" - schema: - type: string - returns: - schema: - type: string - - name: get_file_meta - description: Gets the file meta - parameters: - - name: file_id - description: - required: true - multiline: false - example: "" - schema: - type: string - returns: - schema: - type: string - - name: delete_file - description: Deletes a file based on ID - parameters: - - name: file_id - description: - required: true - multiline: false - example: "Some data to put in the file" - schema: - type: string - returns: - schema: - type: string - - name: extract_archive - description: Extract compressed files, return file ids - parameters: - - name: file_ids - description: - required: true - multiline: false - schema: - type: string - - name: fileformat - description: - required: true - multiline: false - options: - - zip - - rar - - 7zip - - tar - - tar.gz - schema: - type: string - - name: password - description: - required: false - multiline: false - schema: - type: string - returns: - schema: - type: string - - name: inflate_archive - description: Compress files in archive, return archive's file id - parameters: - - name: file_ids - description: - required: true - multiline: true - schema: - type: string - - name: fileformat - description: - required: true - multiline: false - options: - - zip - - 7zip - schema: - type: string - - name: name - description: - required: false - multiline: false - schema: - type: string - - name: password - description: - required: false - multiline: false - schema: - type: string - returns: - schema: - type: string - - name: xml_json_convertor - description: Converts xml to json and vice versa - parameters: - - name: convertto - required: true - multiline: false - options: - - json - - xml - schema: - type: string - - name: data - description: - required: true - multiline: false - example: 'xml data / json data' - schema: - type: string - returns: - schema: - type: string - - name: date_to_epoch - description: Converts a date field with a given format to an epoch time - parameters: - - name: input_data - description: The input data to use - required: true - multiline: true - example: 2010-11-04T04:15:22.123Z - schema: - type: dict - - name: date_field - description: The field containing the date to parse - required: true - multiline: false - example: currentDateTime - schema: - type: string - - name: date_format - # yamllint disable-line rule:line-length - description: The datetime format of the field to parse (strftime format). - required: true - multiline: false - example: '%Y-%m-%dT%H:%M:%s.%f%Z' - schema: - type: string - returns: - schema: - type: dict - - name: compare_relative_date - # yamllint disable-line rule:line-length - description: Compares an input date to a relative date and returns a True/False result - parameters: - - name: input_data - description: The input data to use - required: true - multiline: true - example: 2010-11-04T04:15:22.123Z - schema: - type: string - - name: date_format - description: The format of the input date field (strftime format) - required: true - multiline: false - example: '%Y-%m-%dT%H:%M:%S.%f%Z' - options: - - '%Y-%m-%dT%H:%M%z' - - '%Y-%m-%dT%H:%M:%SZ' - - '%Y-%m-%dT%H:%M:%S%Z' - - '%Y-%m-%dT%H:%M:%S%z' - - '%Y-%m-%dT%H:%M:%S.%f%z' - - '%Y-%m-%d' - - '%H:%M:%S' - - '%s' - schema: - type: string - - name: equality_test - description: How to compare the input date and offset date - required: true - multiline: false - example: '>' - options: - - '>' - - '<' - - '=' - - '!=' - - '>=' - - '<=' - schema: - type: string - - name: offset - description: Numeric offset from current time - required: true - multiline: false - example: 60 - schema: - type: string - - name: units - description: The units of the provided value - required: true - multiline: false - example: 'seconds' - options: - - seconds - - minutes - - hours - - days - schema: - type: string - - name: direction - description: Whether the comparison should be in the past or future - required: true - multiline: false - example: 'ago' - options: - - ago - - ahead - schema: - type: string - returns: - schema: - type: strings - - name: add_list_to_list - description: Adds items of second list (list_two) to the first one (list_one) - parameters: - - name: list_one - description: The first list - multiline: true - example: "{'key': 'value'}" - required: true - schema: - type: string - - name: list_two - description: The second list to use - multiline: true - required: true - example: "{'key2': 'value2'}" - schema: - type: string - - name: merge_lists - description: Merges two lists of same type AND length. - parameters: - - name: list_one - description: The first list - multiline: true - example: "{'key': 'value'}" - required: true - schema: - type: string - - name: list_two - description: The second list to use - multiline: true - required: true - example: "{'key2': 'value2'}" - schema: - type: string - - name: set_field - description: If items in list 2 are strings, but first is JSON, sets the values to the specified key. Defaults to key "new_shuffle_key" - required: false - example: "json_key" - schema: - type: string - - name: sort_key_list_one - description: Sort by this key before using list one for merging - required: false - example: "json_key" - schema: - type: string - - name: sort_key_list_two - description: Sort by this key before using list two for merging - required: false - example: "json_key" - schema: - type: string - - name: diff_lists - description: Diffs two lists of strings or integers and finds what's missing - parameters: - - name: list_one - description: The first list - multiline: true - example: "{'key': 'value'}" - required: true - schema: - type: string - - name: list_two - description: The second list to use - multiline: true - required: true - example: "{'key2': 'value2'}" - schema: - type: string - - name: delete_json_keys - description: Deletes keys in a json object - parameters: - - name: json_object - description: The object to edit - multiline: true - example: "{'key': 'value', 'key2': 'value2', 'key3': 'value3'}" - required: true - schema: - type: string - - name: keys - description: The key(s) to remove - multiline: true - required: true - example: "key, key3" - schema: - type: string - - name: convert_json_to_tags - description: Creates key:value pairs and - parameters: - - name: json_object - description: The object to make into a key:value pair - multiline: true - example: "{'key': 'value', 'key2': 'value2', 'key3': 'value3'}" - required: true - schema: - type: string - - name: split_value - description: The way to split the values. Defaults to comma. - multiline: false - required: false - example: "," - schema: - type: string - - name: include_key - description: Whether it should include the key or not - options: - - true - - false - schema: - type: string - - name: lowercase - description: Whether it should be lowercase or not - options: - - true - - false - schema: - type: string - - name: run_math_operation - description: Takes a math input and gives you the result - parameters: - - name: operation - description: The operation to perform - required: true - multiline: true - example: "5+10" - schema: - type: string - returns: - schema: - type: string - - name: escape_html - description: Performs HTML escaping on a field - parameters: - - name: input_data - description: The input data to use - required: true - multiline: true - example: $exec.field1 - schema: - type: string - - name: field_name - description: The field to HTML escape - required: true - multiline: true - example: my_unsafe_field - schema: - type: string - returns: - schema: - type: string - - name: base64_conversion - description: Encode or decode a Base64 string - parameters: - - name: string - description: string to process - multiline: true - example: "This is a string to be encoded" - required: true - schema: - type: string - - name: operation - description: Choose to encode or decode the string - example: "encode" - required: true - options: - - encode - - decode - schema: - type: string - - name: cidr_ip_match - description: Check if an IP is contained in a CIDR defined network - parameters: - - name: ip - description: IP to check - multiline: false - example: "1.1.1.1" - required: True - schema: - type: string - - name: networks - description: List of network in CIDR format - multiline: true - required: true - example: "['10.0.0.0/8', '192.168.10.0/24']" - schema: - type: string - returns: - schema: - type: string - -# yamllint disable-line rule:line-length -large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAK4AAACuCAYAAACvDDbuAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAAAAB3RJTUUH5AgXDjM6hEZGWwAAD+lJREFUeNrtXb/vJTcRH7/v3iVBCqRBiCAQAtHwq4AWRElHwX8AoqbmXwDRpiH/QyQkGoogUSAhKIKUAE1IdSRSREhQQk7c3XtD8X55vePxjNfe3bk3H+nu+96uPf54POtnj8fe8OQX30JwOIxhtzYBh6MGOsPF0z9p2iWwpd8LjX6W5vWUYaiqlBuvLT5b5TQDPlRwmMSAABBg+kCer+XuAeQf4tL9tAxJ/hIfZGSm8rhyEfjytfxr9FeSX+KjvVfipNVpWlaPNhsAEPCS7Ao8FYnRlbO4ksLnjiSQvIanv4FNjwJ5pXIlMq6MQpIqqPnQKQKbjuPDtZlG55o6UHXWtVncZZTbbNBVB1P5dJYguCbJJ1WjOG8PVOioSm5HPrVt1rwuyN+K+PSZnNV1M/MmEFubfFjjU9tmK9XBJ2cOk3DDdZiEG67DJOrGuA7HyvAe12ESAxa73KPrN1z8gUikCCdvcD5NXnpQpA8nNhh9m5Yn4ZMrV8dHV/8a/dRA0x419a3lI9GBtM2GcrGYFXRNUU5TyluTOpdXwqeUt6YOpby9DUTLZylOcRlzdBTf2yV3ZBFOmKSHQh5KpjSSSpqG4s6VkUubqw8W8knTSnWk0Y+2jF5tlmuDUloJn6T8gRVcEpJ+3srChHSNt8RJsq4p+S41LC13KTcu/RJt1pLPKY1Pzhwm4YbrMAk3XIdJTMe4aeCjJhBVk0YiQ1MWZHhLgmO5QNVWfKRlavlIIQnurQmcnaMjSbBxhtMwYUxODpLcl2tUhvPlNE6VkiuoFVLXKT6ZfBjxRIIzOSlgWpLSB8uZ0g3BjeVDlFGEos0mfKKL7CQrY2ES7pM2i/OX22w4/sWReEhEnUOTxx3a+FrawQGZh04/rWe6oJBKo5zT4zLjPHE9ZHym5YzToogzfQcmfLgOhuLF/Sjm2izVDyXnrKtcmmmdaKumf+RyCw5Xn7OmzQaJF0fiEZG6BjXpYUYaSVkaPrXeHe4eVaZEr3Prqrmmrbc2T8lrmOMjn5xJHeJLYkk+PfzNTxOflrwF0EeHbU0Zt2wsW+PTkncB7g5zmMSwzUfS4eDhPa7DJK5jXGorsnZxonbRIbeAoOUjkUvlp+qxFp9YNuWL0nBqsVCkqUsrHQnuX+Nx5/qcJDI0kWgtJh7ihYCN8aG+13DqOXlbWUfD+fN0AUEmp3RcUWlVEwCynb5ssYLnxHViJT6ULCykb8EnzUfpqBWfVAdcnt5tprGhIe10WnjHpB2FtMPWcpM66yXyOad4Lz4Srq34SHhwZfRos1w9Y/jkzGESvj3dYRLe4zpMwg3XYRJuuA6T4M/Hzfk/OGd9OP2HOE2f8wtBlCebJrkfp+Gc3AGmiSiuaVlpwkmajL4osPUm9FMqIzBOJolfjGuzEtdUwWl53Dm7Eh9pzIdps+FiYJyi1N+Rvs/6OLCQBul8Ip8R08ik3EwhLZz1Wv8XmU7ZZqX7OT2gUIB2oaRBm+2ovDm5nM+ulEeiD8yka8UnJ1PCP82r9YWW8iCU5XO8W/PhPmvllNKW7lEyszsgNKuzkspJFZFL15uPtIweq7A1xiKpz1J8tGXP+dE53/fJmcMk6hcgJO8XqokEKi5uYzTG29LqSev95JqyKsoOOxjNpKQBD7VFc5GBJRsi+NQHkkv6+7m/UxTufwLCCy+CbAruyOLDdwEf/uf6vbbNJukzlogZC6wMdhAcM7ohHPawe/GrcO+HPwe4u782G7sIAE9++0vYv/YKwO6usfCaka0etgwXAGB3D8JznwIYnlmbiW0M92FbQy0d+MmZ3Xo5JDDcvuXJ2ZYqtyUuTwuM6nSXctcufHCOZqkjPScXhbIcdeD0XUpfKyNNy8nlyhuozLkM8XxR6pjm7tc4Fdx620I7lWq10JCm0ZanWoBwm3FsBe1WznpadbTg4A9PI2xx7FUKHopQjg7TKqNnpbioIUcFUGUsy1CS8fFYBYdJuOE6TMIN12ESgyiKiwO1bQOJe1w+6p42Etmhwmi6kLZXfC2G9IUj2vulY2wIPrv4onRhIXcRqS0DiWxkhF0uIb37wG22LRCSuVCyekC2GSXj9CG3YyT+krWh+KPAhkTvgGDKqbqnWbBwY+2Pnm3Wy4aMRYc1MuPDvp0skwgAh8PaJGbh5k4kx0f/hce/ewnw/QenXQCTFJDfQy45PzFNn5NHsoPy/u6gzE+nObzz91P9Z+6kWAm2zg6bDMoq8OQxHN78Axze/htAaB1EbQhhdzyfgRqIGoCxoUIjhDuA3ZDpcR0W4C3nMInbNVw7v4oOAsehArVFPL0uOjMM+DlM+pk7t7/BDuwcJsM6gcM7WweOX05nFCHNi12ASRfLo3QaX9O0GWTylOTnZIMwf4YPPTlD4iMm7aZwAGOUf3Rf48wjHNzVOMkKFA8pp0RHZ1mjdihs5R61PWbsWlphgs/E5gptNvFfSLY8QPk7dVbh+UNg8qfnJsZ8Bo0hzF0Y2Nqvc0s+Vbs5YL5OLfPRcorT2hvjtuxyHWZhzHCX6AMcFtB2B0RvtKZqqe6OEYz1uA7HEbdruN7ZmsZtGq4brXnQhlsbLFkDrY9mC9giH41/dSlONfeEIBcgss7nXopInPdkYN95J3XD1bMgkJUNFOxsDNLgyiynhYyX5dnAhnLyhzmO4V7IO8+xyZEgx5UqvJ41rOUTdhBOr2w6KjZc+B1FBkLGVUoAABQEcmPu6rPPw73v/gh2n/wMANYEhAd4/NqvYf/Wn5pEyPW2IUrOzQWSHyHdkEJgN8D97/0Edp/7GgDu9fnDDvD9t+HRqy8BPvxQ9i6xEXUEuPcMDF//Puw+/aVqDewfvA77f/zx9M40e7jNeNw5CDu4++K34e4r36kWcXj3TYDfvwz8D79ml1clDPuxx9FhuUik0rblVihFWLX+7ZFEXE2ioLBNg9fUSRopVsOjJbioskZlDuyAvmflpOWsOUNu/cBQ8jW/1A0np11RG+GjwG36cQHqFWnBcG4Axgx37d/I1uXXcvCnx6BXoQXf3mOAzvVpooJzaOcWdKBH1fZ07dCsFZpNgmfZbaOJ2dxnpwkNFC3C9MBcGxo0OugxwV8LWKm5lg9sFQdszKGhLAla2dCuduuOZcypx+UXdk0OK5e/hXKNTc4cjiPGhtvTX1njI6Z2+vbuKtaKspLooXdkXs1u5yUR7/LdROMsraSSIfTa6pqWodE9Mvla6sCI8d7uUMEXIEzjdg3XYRr2osOePIbDR+9BGO7re78QAD/+AODwpK5sBDg6dGyGAtL1sYnLGDe3+2BNTNycYQf7B2/Aw5d/XB9HejjA4YN3jgHUNQ132MOTv/wG9v98A+CgFBCO/+FH/wJ89PBaSY1OULZzQyQL2skayVwg/7Dk3Ky2IlcEgEcfw/7dt+YJnRP1f9jDoz+/AvM0FU4c1u8mes59e+ZXDhXmPE+tForD+lH73Q6EluiozfaldnzWQUWQzdprPk87lg44nkTKN+DT/10S7lW4VYz8wWucOTAPtl5e4mgfjmu0/b3HdZiEG67DJNxwbxlGhwkAuZeXAJS3Qpfemq7dds1tS5dsbc6dAyQpS5uGe+lKrJLSGUqlCb2GcwUuCxBzt71T2/g7t9mQniofv0yjWOtMYdSLM6Sy0pd5iLdFSQtUyiJtRnjmGOdhqq5bo5WzUXAYzns2Lu2tjaqb0WaTHRBrR9cvEVG4VF3WkLsGnzXqohzjbk3dt4hG/jDDxy8BLL5y5miBZi1wa9vT14dJ0o2qft6/1GhQZ1SV9uJxd3cQ7j+XD7RJ40JK38/XAPKz4ly+OG+KwOTDwn0uDSKEZ58/vgH+hmHLcA97uPvCN+G5H/wMoCaQ/KkAAtzdg/DCZ9cmsipsGS4ce5u7z38DYHhmbTL2YfjBH28DOM80s+MoxllVvfkwKudSbiL0dB0NTya2iGpNYmIzl+/EdexjQ8PEGE4FhdPHMAlbLhcsdWaPnfDEAxQJnbx53TEPJ51j3N7CrEfbSNt+arzXt57X2RBx94LsUGHOGRQtF7Fa8HFQQOabJmc5XQ8b8iAbh0mYNFzvdefD+nRhyPowqWitc2VbRyutGCF18+ilU2mEXWX51zFuKbqlZ/RLy0gixzagiS6sgL2hghuwAywarsMBxgzXO9u2sBzZWHwHRLwrQ5rWYQBIfuwCKnZJEpvEYSg9dRoncnejtdxFbBRLqFQzr5fSudH3nDmOaH26yHIwNcZ1NIZNmwWArYU1Fg8HDLB/7wH879VfAey2Rd0a9g/+2ubUyZUOdAz//umXjT136GPd2cDNnM9bC4Pd1gbOx3WsDh/jOkzCDddhEpcjmKiFhvGLQwDitJNrYTz05H7MS+N56hiq0mbYCfeIj2STb2s+cSJEOrguJ4fScaneOW7kOWZJm4VCmaPFg8wKgcSGuLpzR49Rerm8vIRaaECgvyB1Tbl9qOZoMiykHeVhVoZKwW9N+CSJuPwsH4YY12aTa5TxYyZPpsxSDG/Rhgp1lyxUnK/7UMFhEm64DpNIlnzTAdXcsJml8rdO1yt/K+R45EJUluS9zHaWITuQJb9rsVT+HvuKe+RvhdIIcE3ey4Rj+VDBYRJuuA6TcMN1mMT15SWMZ5h10Oc86+dr50s14QWch7rEh5PHef+psgsyqB0iI2e+hE+pDlpvvkQ/uVUMDfdSnTq12TA58injFUdOMPB5AeiALtHcUrstXrqSINnaoVjxyE5ra1ZipHMsTV2kMiQ8NDw7tdmqQ4WtzNEd9uBjXIdJuOE6TMLoy0sct46KHndNS6d2pW5tp+rW+Jw5rVl2qpP5Oqrcnr52w9RMgbfA8db5tAsp8DGuwyTaGW6DB7ppn9CCzxKnvKz9Kz7j/prUi0cwqQLQDBtvrp5uvMc/Wf00oFAT5FjscbcwMloCt1LPWvTUT41sH+M6TMIN12ESw3UPd8gPtrh7JeTyXvZGn0KD0jSlMms5Sfhw92vkUvXT5tPWt3WbSfjMsSFl3ujlJdy+4xkjnFze+PWrNWXWclqaT6t82vq2bjMJnzk2pMzrQwWHSbjhOkzCDdchxpZchpezwySQvHhiyVMLevPRctXwqeWmfcv5GaVTGKRy557YIHnhpETeoCl05grhbPlL89HK1vCp5darvZbgo+XEwYcKDpNww3WYxC6/U5PY5oun66MzPHH8L05PpqHKghn+TpjyictkZQLPh4u6yeknvXeWU+JD6TDHJ/cbn93Bi8nnDKdJm8EG2+zIZwBudlbjUOYOpj1frClPwyf3OZuXuaEx3lgWZixKxIfZ911rvJO65PRFVmZjbYY+VHDYhBuuwyTccB0mcdkB0cr5z70pW/pm7Bo+LesgqUsrPjVye9WXkqld8FiizRCi6LBWjmTRPGGG/JZ5ejvoa1ai1qwvlWarbeZDBYdJuOE6TKKP4W7xJdFb4+R8ZvH5P852gxhpwOZ9AAAAJXRFWHRkYXRlOmNyZWF0ZQAyMDIwLTA4LTIzVDE0OjUyOjAwKzAyOjAwetRgVgAAACV0RVh0ZGF0ZTptb2RpZnkAMjAyMC0wOC0yM1QxNDo1MTo1OCswMjowMJuxI+oAAAAASUVORK5CYII= diff --git a/shuffle-tools/1.0.0/requirements.txt b/shuffle-tools/1.0.0/requirements.txt deleted file mode 100644 index 1ac4003c..00000000 --- a/shuffle-tools/1.0.0/requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -ioc_finder==6.0.1 -py7zr==0.11.3 -rarfile==4.0 -pyminizip==0.2.4 -requests==2.25.1 -xmltodict==0.11.0 -json2xml==3.6.0 -ipaddress==1.0.23 diff --git a/shuffle-tools/1.0.0/src/app.py b/shuffle-tools/1.0.0/src/app.py deleted file mode 100644 index 9a09e415..00000000 --- a/shuffle-tools/1.0.0/src/app.py +++ /dev/null @@ -1,1305 +0,0 @@ -import asyncio -import datetime -import json -import markupsafe -import os -import re -import subprocess -import tempfile -import zipfile -import base64 -import ipaddress - -import py7zr -import pyminizip -import rarfile -import requests -import tarfile - -import xmltodict -from json2xml import json2xml - -from json2xml.utils import readfromstring - -from ioc_finder import find_iocs -from walkoff_app_sdk.app_base import AppBase - - - - -class Tools(AppBase): - """ - An example of a Walkoff App. - Inherit from the AppBase class to have Redis, logging, and console - logging set up behind the scenes. - - """ - - __version__ = "1.0.0" - app_name = ( - "Shuffle Tools" # this needs to match "name" in api.yaml for WALKOFF to work - ) - - def __init__(self, redis, logger, console_logger=None): - """ - Each app should have this __init__ to set up Redis and logging. - :param redis: - :param logger: - :param console_logger: - """ - super().__init__(redis, logger, console_logger) - - def router(self): - return "This action should be skipped" - - def base64_conversion(self, string, operation): - - if operation == "encode": - encoded_bytes = base64.b64encode(string.encode("utf-8")) - encoded_string = str(encoded_bytes, "utf-8") - return encoded_string - - elif operation == "decode": - decoded_bytes = base64.b64decode(string.encode("utf-8")) - decoded_string = str(decoded_bytes, "utf-8") - return decoded_string - - # This is an SMS function of Shuffle - def send_sms_shuffle(self, apikey, phone_numbers, body): - targets = [phone_numbers] - if ", " in phone_numbers: - targets = phone_numbers.split(", ") - elif "," in phone_numbers: - targets = phone_numbers.split(",") - - data = {"numbers": targets, "body": body} - - url = "https://shuffler.io/api/v1/functions/sendsms" - headers = {"Authorization": "Bearer %s" % apikey} - return requests.post(url, headers=headers, json=data).text - - # This is an email function of Shuffle - def send_email_shuffle(self, apikey, recipients, subject, body): - targets = [recipients] - if ", " in recipients: - targets = recipients.split(", ") - elif "," in recipients: - targets = recipients.split(",") - - data = {"targets": targets, "body": body, "subject": subject, "type": "alert"} - - url = "https://shuffler.io/api/v1/functions/sendmail" - headers = {"Authorization": "Bearer %s" % apikey} - return requests.post(url, headers=headers, json=data).text - - def repeat_back_to_me(self, call): - return call - - # https://github.com/fhightower/ioc-finder - def parse_file_ioc(self, file_ids, input_type="all"): - def parse(data): - try: - iocs = find_iocs(str(data)) - newarray = [] - for key, value in iocs.items(): - if input_type != "all": - if key not in input_type: - continue - if len(value) > 0: - for item in value: - if isinstance(value, dict): - for subkey, subvalue in value.items(): - if len(subvalue) > 0: - for subitem in subvalue: - data = { - "data": subitem, - "data_type": "%s_%s" - % (key[:-1], subkey), - } - if data not in newarray: - newarray.append(data) - else: - data = {"data": item, "data_type": key[:-1]} - if data not in newarray: - newarray.append(data) - for item in newarray: - if "ip" in item["data_type"]: - item["data_type"] = "ip" - return {"success": True, "items": newarray} - except Exception as excp: - return {"success": False, "message": "{}".format(excp)} - - if input_type == "": - input_type = "all" - else: - input_type = input_type.split(",") - - try: - file_ids = eval(file_ids) # nosec - except SyntaxError: - file_ids = file_ids - except NameError: - file_ids = file_ids - - return_value = None - if type(file_ids) == str: - return_value = parse(self.get_file(file_ids)["data"]) - elif type(file_ids) == list and type(file_ids[0]) == str: - return_value = [ - parse(self.get_file(file_id)["data"]) for file_id in file_ids - ] - elif ( - type(file_ids) == list - and type(file_ids[0]) == list - and type(file_ids[0][0]) == str - ): - return_value = [ - [parse(self.get_file(file_id2)["data"]) for file_id2 in file_id] - for file_id in file_ids - ] - else: - return "Invalid input" - return return_value - - # https://github.com/fhightower/ioc-finder - def parse_ioc(self, input_string, input_type="all"): - if input_type == "": - input_type = "all" - else: - input_type = input_type.split(",") - - iocs = find_iocs(input_string) - newarray = [] - for key, value in iocs.items(): - if input_type != "all": - if key not in input_type: - continue - - if len(value) > 0: - for item in value: - # If in here: attack techniques. Shouldn't be 3 levels so no - # recursion necessary - if isinstance(value, dict): - for subkey, subvalue in value.items(): - if len(subvalue) > 0: - for subitem in subvalue: - data = { - "data": subitem, - "data_type": "%s_%s" % (key[:-1], subkey), - } - if data not in newarray: - newarray.append(data) - else: - data = {"data": item, "data_type": key[:-1]} - if data not in newarray: - newarray.append(data) - - # Reformatting IP - for item in newarray: - if "ip" in item["data_type"]: - item["data_type"] = "ip" - try: - item["is_private_ip"] = ipaddress.ip_address(item["data"]).is_private - except: - print("Error parsing %s" % ip) - pass - - try: - newarray = json.dumps(newarray) - except json.decoder.JSONDecodeError as e: - return "Failed to parse IOC's: %s" % e - - return newarray - - def parse_list(self, items, splitter="\n"): - if splitter == "": - splitter = "\n" - - splititems = items.split(splitter) - - return str(splititems) - - def get_length(self, item): - if item.startswith("[") and item.endswith("]"): - try: - item = item.replace("'", '"', -1) - item = json.loads(item) - except json.decoder.JSONDecodeError as e: - print("Parse error: %s" % e) - pass - - return str(len(item)) - - def delete_json_keys(self, json_object, keys): - splitdata = [keys] - if ", " in keys: - splitdata = keys.split(", ") - elif "," in keys: - splitdata = keys.split(",") - - for key in splitdata: - key = key.strip() - try: - del json_object[key] - except: - print("Key %s doesn't exist" % key) - - return json_object - - def translate_value(self, input_data, translate_from, translate_to, else_value=""): - splitdata = [translate_from] - if ", " in translate_from: - splitdata = translate_from.split(", ") - elif "," in translate_from: - splitdata = translate_from.split(",") - - if isinstance(input_data, list) or isinstance(input_data, dict): - input_data = json.dumps(input_data) - - to_return = input_data - if isinstance(input_data, str): - found = False - for item in splitdata: - item = item.strip() - if item in input_data: - input_data = input_data.replace(item, translate_to) - found = True - - if not found and len(else_value) > 0: - input_data = else_value - - if input_data.lower() == "false": - return False - elif input_data.lower() == "true": - return True - - return input_data - - def map_value(self, input_data, mapping): - - mapping = json.loads(mapping) - print(f"Got mapping {json.dumps(mapping, indent=2)}") - - # Get value if input_data in map, otherwise return original input_data - output_data = mapping.get(input_data, input_data) - print(f"Mapping {input_data} to {output_data}") - - return output_data - - def regex_replace( - self, input_data, regex, replace_string="", ignore_case="False" - ): - - print("=" * 80) - print(f"Regex: {regex}") - print(f"replace_string: {replace_string}") - print("=" * 80) - - if ignore_case.lower().strip() == "true": - return re.sub(regex, replace_string, input_data, flags=re.IGNORECASE) - else: - return re.sub(regex, replace_string, input_data) - - def execute_python(self, code, shuffle_input): - print("Run with shuffle_data %s" % shuffle_input) - print("And python code %s" % code) - # Write the code to a file, then jdjd - exec(code) # nosec - - # 1. Take the data into a file - # 2. Subprocess execute file? - - # May be necessary - # compile() - - return "Some return: %s" % shuffle_input - - def execute_bash(self, code, shuffle_input): - process = subprocess.Popen( - code, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - shell=True, # nosec - ) - stdout = process.communicate() - item = "" - if len(stdout[0]) > 0: - print("Succesfully ran bash!") - item = stdout[0] - else: - print("FAILED to run bash!") - item = stdout[1] - - try: - ret = item.decode("utf-8") - return ret - except Exception: - return item - - return item - - def filter_list(self, input_list, field, check, value, opposite): - print(f"\nRunning function with list {input_list}") - - flip = False - if opposite.lower() == "true": - flip = True - - try: - input_list = eval(input_list) # nosec - except Exception: - try: - input_list = input_list.replace("'", '"', -1) - input_list = json.loads(input_list) - except Exception: - print("Error parsing string to array. Continuing anyway.") - - # Workaround D: - if not isinstance(input_list, list): - return { - "success": False, - "reason": "Error: input isnt a list. Remove # to use this action." - % type(input_list), - "valid": [], - "invalid": [], - } - - input_list = [input_list] - - print("\nRunning with check \"%s\" on list of length %d\n" % (check, len(input_list))) - found_items = [] - new_list = [] - failed_list = [] - for item in input_list: - try: - try: - item = json.loads(item) - except Exception: - pass - - # Support for nested dict key - tmp = item - if field and field.strip() != "": - for subfield in field.split("."): - tmp = tmp[subfield] - - if isinstance(tmp, dict) or isinstance(tmp, list): - try: - tmp = json.dumps(tmp) - except json.decoder.JSONDecodeError as e: - print("FAILED DECODING: %s" % e) - pass - - #print("PRE CHECKS FOR TMP: %") - - # EQUALS JUST FOR STR - if check == "equals": - # Mostly for bools - # value = tmp.lower() - - if str(tmp).lower() == str(value).lower(): - print("APPENDED BECAUSE %s %s %s" % (field, check, value)) - if not flip: - new_list.append(item) - else: - failed_list.append(item) - else: - if flip: - new_list.append(item) - else: - failed_list.append(item) - - # IS EMPTY FOR STR OR LISTS - elif check == "is empty": - if tmp == "[]": - tmp = [] - - if type(tmp) == list and len(tmp) == 0 and not flip: - new_list.append(item) - elif type(tmp) == list and len(tmp) > 0 and flip: - new_list.append(item) - elif type(tmp) == str and not tmp and not flip: - new_list.append(item) - elif type(tmp) == str and tmp and flip: - new_list.append(item) - else: - failed_list.append(item) - - # STARTS WITH = FOR STR OR [0] FOR LIST - elif check == "starts with": - if type(tmp) == list and tmp[0] == value and not flip: - new_list.append(item) - elif type(tmp) == list and tmp[0] != value and flip: - new_list.append(item) - elif type(tmp) == str and tmp.startswith(value) and not flip: - new_list.append(item) - elif type(tmp) == str and not tmp.startswith(value) and flip: - new_list.append(item) - else: - failed_list.append(item) - - # ENDS WITH = FOR STR OR [-1] FOR LIST - elif check == "ends with": - if type(tmp) == list and tmp[-1] == value and not flip: - new_list.append(item) - elif type(tmp) == list and tmp[-1] != value and flip: - new_list.append(item) - elif type(tmp) == str and tmp.endswith(value) and not flip: - new_list.append(item) - elif type(tmp) == str and not tmp.endswith(value) and flip: - new_list.append(item) - else: - failed_list.append(item) - - # CONTAINS FIND FOR LIST AND IN FOR STR - elif check == "contains": - if type(tmp) == list and value.lower() in tmp and not flip: - new_list.append(item) - elif type(tmp) == list and value.lower() not in tmp and flip: - new_list.append(item) - elif ( - type(tmp) == str - and tmp.lower().find(value.lower()) != -1 - and not flip - ): - new_list.append(item) - elif ( - type(tmp) == str - and tmp.lower().find(value.lower()) == -1 - and flip - ): - new_list.append(item) - else: - failed_list.append(item) - elif check == "contains any of": - print("Inside contains any of") - checklist = value.split(",") - print("Checklist and tmp: %s - %s" % (checklist, tmp)) - found = False - for subcheck in checklist: - subcheck = subcheck.strip().lower() - #ext.lower().strip() == value.lower().strip() - if type(tmp) == list and subcheck in tmp and not flip: - new_list.append(item) - found = True - break - elif type(tmp) == list and subcheck not in tmp and flip: - new_list.append(item) - found = True - break - elif (type(tmp) == str and tmp.lower().find(subcheck) != -1 and not flip): - new_list.append(item) - found = True - break - elif (type(tmp) == str and tmp.lower().find(subcheck) == -1 and flip): - new_list.append(item) - found = True - break - - if not found: - failed_list.append(item) - - # CONTAINS FIND FOR LIST AND IN FOR STR - elif check == "field is unique": - #print("FOUND: %s" - if tmp.lower() not in found_items and not flip: - new_list.append(item) - found_items.append(tmp.lower()) - elif tmp.lower() in found_items and flip: - new_list.append(item) - found_items.append(tmp.lower()) - else: - failed_list.append(item) - - #tmp = json.dumps(tmp) - - #for item in new_list: - #if type(tmp) == list and value.lower() in tmp and not flip: - # new_list.append(item) - # found = True - # break - #elif type(tmp) == list and value.lower() not in tmp and flip: - # new_list.append(item) - # found = True - # break - - # CONTAINS FIND FOR LIST AND IN FOR STR - elif check == "contains any of": - checklist = value.split(",") - tmp = tmp.lower() - print("CHECKLIST: %s. Value: %s" % (checklist, tmp)) - found = False - for value in checklist: - if value in tmp and not flip: - new_list.append(item) - found = True - break - elif value not in tmp and flip: - new_list.append(item) - found = True - break - - if not found: - failed_list.append(item) - - elif check == "larger than": - if int(tmp) > int(value) and not flip: - new_list.append(item) - elif int(tmp) > int(value) and flip: - new_list.append(item) - else: - failed_list.append(item) - elif check == "less than": - if int(tmp) < int(value) and not flip: - new_list.append(item) - elif int(tmp) < int(value) and flip: - new_list.append(item) - else: - failed_list.append(item) - - # SINGLE ITEM COULD BE A FILE OR A LIST OF FILES - elif check == "files by extension": - if type(tmp) == list: - file_list = [] - - for file_id in tmp: - filedata = self.get_file(file_id) - _, ext = os.path.splitext(filedata["filename"]) - if ( - ext.lower().strip() == value.lower().strip() - and not flip - ): - file_list.append(file_id) - elif ext.lower().strip() != value.lower().strip() and flip: - file_list.append(file_id) - # else: - # failed_list.append(file_id) - - tmp = item - if field and field.strip() != "": - for subfield in field.split(".")[:-1]: - tmp = tmp[subfield] - tmp[field.split(".")[-1]] = file_list - new_list.append(item) - else: - new_list = file_list - # else: - # failed_list = file_list - - elif type(tmp) == str: - filedata = self.get_file(tmp) - _, ext = os.path.splitext(filedata["filename"]) - if ext.lower().strip() == value.lower().strip() and not flip: - new_list.append(item) - elif ext.lower().strip() != value.lower().strip() and flip: - new_list.append((item, ext)) - else: - failed_list.append(item) - - except Exception as e: - # "Error: %s" % e - print("[WARNING] FAILED WITH EXCEPTION: %s" % e) - failed_list.append(item) - # return - - try: - return json.dumps( - { - "success": True, - "valid": new_list, - "invalid": failed_list, - } - ) - # new_list = json.dumps(new_list) - except json.decoder.JSONDecodeError as e: - return json.dumps( - { - "success": False, - "reason": "Failed parsing filter list output" + e, - } - ) - - return new_list - - #def multi_list_filter(self, input_list, field, check, value): - # input_list = input_list.replace("'", '"', -1) - # input_list = json.loads(input_list) - - # fieldsplit = field.split(",") - # if ", " in field: - # fieldsplit = field.split(", ") - - # valuesplit = value.split(",") - # if ", " in value: - # valuesplit = value.split(", ") - - # checksplit = check.split(",") - # if ", " in check: - # checksplit = check.split(", ") - - # new_list = [] - # for list_item in input_list: - # list_item = json.loads(list_item) - - # index = 0 - # for check in checksplit: - # if check == "equals": - # print( - # "Checking %s vs %s" - # % (list_item[fieldsplit[index]], valuesplit[index]) - # ) - # if list_item[fieldsplit[index]] == valuesplit[index]: - # new_list.append(list_item) - - # index += 1 - - # # "=", - # # "equals", - # # "!=", - # # "does not equal", - # # ">", - # # "larger than", - # # "<", - # # "less than", - # # ">=", - # # "<=", - # # "startswith", - # # "endswith", - # # "contains", - # # "re", - # # "matches regex", - - # try: - # new_list = json.dumps(new_list) - # except json.decoder.JSONDecodeError as e: - # return "Failed parsing filter list output" % e - - # return new_list - - # Gets the file's metadata, e.g. md5 - def get_file_meta(self, file_id): - headers = { - "Authorization": "Bearer %s" % self.authorization, - } - - ret = requests.get( - "%s/api/v1/files/%s?execution_id=%s" - % (self.url, file_id, self.current_execution_id), - headers=headers, - ) - print(f"RET: {ret}") - - return ret.text - - # Use data from AppBase to talk to backend - def delete_file(self, file_id): - headers = { - "Authorization": "Bearer %s" % self.authorization, - } - print("HEADERS: %s" % headers) - - ret = requests.delete( - "%s/api/v1/files/%s?execution_id=%s" - % (self.url, file_id, self.current_execution_id), - headers=headers, - ) - return ret.text - - def get_file_value(self, filedata): - if filedata is None: - return "File is empty?" - - print("INSIDE APP DATA: %s" % filedata) - return "%s" % filedata["data"].decode() - - def download_remote_file(self, url): - ret = requests.get(url, verify=False) # nosec - filename = url.split("/")[-1] - fileret = self.set_files( - [ - { - "filename": filename, - "data": ret.content, - } - ] - ) - - if len(fileret) > 0: - value = {"success": True, "file_id": fileret[0]} - else: - value = {"success": False, "reason": "No files downloaded"} - - return value - - def extract_archive(self, file_ids, fileformat="zip", password=None): - try: - return_data = {"success": False, "files": []} - - try: - file_ids = eval(file_ids) # nosec - except SyntaxError: - file_ids = file_ids - - print("IDS: %s" % file_ids) - items = file_ids if type(file_ids) == list else file_ids.split(",") - for file_id in items: - - item = self.get_file(file_id) - return_ids = None - - print("Working with fileformat %s" % fileformat) - with tempfile.TemporaryDirectory() as tmpdirname: - - # Get archive and save phisically - with open(os.path.join(tmpdirname, "archive"), "wb") as f: - f.write(item["data"]) - - # Grab files before, upload them later - to_be_uploaded = [] - - # Zipfile for zipped archive - if fileformat.strip().lower() == "zip": - try: - with zipfile.ZipFile( - os.path.join(tmpdirname, "archive") - ) as z_file: - if password: - z_file.setpassword(bytes(password.encode())) - for member in z_file.namelist(): - filename = os.path.basename(member) - if not filename: - continue - source = z_file.open(member) - to_be_uploaded.append( - {"filename": source.name, "data": source.read()} - ) - return_data["success"] = True - except (zipfile.BadZipFile, Exception): - return_data["files"].append( - { - "success": False, - "file_id": file_id, - "filename": item["filename"], - "message": "File is not a valid zip archive", - } - ) - - continue - - elif fileformat.strip().lower() == "rar": - try: - with rarfile.RarFile( - os.path.join(tmpdirname, "archive") - ) as z_file: - if password: - z_file.setpassword(password) - for member in z_file.namelist(): - filename = os.path.basename(member) - if not filename: - continue - source = z_file.open(member) - to_be_uploaded.append( - {"filename": source.name, "data": source.read()} - ) - return_data["success"] = True - except Exception: - return_data["files"].append( - { - "success": False, - "file_id": file_id, - "filename": item["filename"], - "message": "File is not a valid rar archive", - } - ) - continue - - elif fileformat.strip().lower() == "tar": - try: - with tarfile.open( - os.path.join(tmpdirname, "archive"), mode="r" - ) as z_file: - for member in z_file.getnames(): - member_files = z_file.extractfile(member) - to_be_uploaded.append( - { - "filename": member, - "data": member_files.read(), - } - ) - return_data["success"] = True - except Exception as e: - return_data["files"].append( - { - "success": False, - "file_id": file_id, - "filename": item["filename"], - "message": e, - } - ) - continue - elif fileformat.strip().lower() == "tar.gz": - try: - with tarfile.open( - os.path.join(tmpdirname, "archive"), mode="r:gz" - ) as z_file: - for member in z_file.getnames(): - member_files = z_file.extractfile(member) - to_be_uploaded.append( - { - "filename": member, - "data": member_files.read(), - } - ) - return_data["success"] = True - except Exception as e: - return_data["files"].append( - { - "success": False, - "file_id": file_id, - "filename": item["filename"], - "message": e, - } - ) - continue - - elif fileformat.strip().lower() == "7zip": - try: - with py7zr.SevenZipFile( - os.path.join(tmpdirname, "archive"), - mode="r", - password=password if password else None, - ) as z_file: - for filename, source in z_file.readall().items(): - # Removes paths - filename = filename.split("/")[-1] - to_be_uploaded.append( - { - "filename": item["filename"], - "data": source.read(), - } - ) - return_data["success"] = True - except Exception: - return_data["files"].append( - { - "success": False, - "file_id": file_id, - "filename": item["filename"], - "message": "File is not a valid 7zip archive", - } - ) - continue - else: - return "No such format: %s" % fileformat - - if len(to_be_uploaded) > 0: - return_ids = self.set_files(to_be_uploaded) - return_data["files"].append( - { - "success": True, - "file_id": file_id, - "filename": item["filename"], - "file_ids": return_ids, - } - ) - else: - return_data["files"].append( - { - "success": False, - "file_id": file_id, - "filename": item["filename"], - "message": "Archive is empty", - } - ) - - return return_data - - except Exception as excp: - return {"success": False, "message": "%s" % excp} - - def inflate_archive(self, file_ids, fileformat, name, password=None): - - try: - # TODO: will in future support multiple files instead of string ids? - file_ids = file_ids.split() - print("picking {}".format(file_ids)) - - # GET all items from shuffle - items = [self.get_file(file_id) for file_id in file_ids] - - if len(items) == 0: - return "No file to inflate" - - # Dump files on disk, because libs want path :( - with tempfile.TemporaryDirectory() as tmpdir: - paths = [] - print("Number 1") - for item in items: - with open(os.path.join(tmpdir, item["filename"]), "wb") as f: - f.write(item["data"]) - paths.append(os.path.join(tmpdir, item["filename"])) - - # Create archive temporary - print("{} items to inflate".format(len(items))) - with tempfile.NamedTemporaryFile() as archive: - - if fileformat == "zip": - archive_name = "archive.zip" if not name else name - pyminizip.compress_multiple( - paths, [], archive.name, password, 5 - ) - - elif fileformat == "7zip": - archive_name = "archive.7z" if not name else name - with py7zr.SevenZipFile( - archive.name, - "w", - password=password if len(password) > 0 else None, - ) as sz_archive: - for path in paths: - sz_archive.write(path) - - else: - return "Format {} not supported".format(fileformat) - - return_id = self.set_files( - [{"filename": archive_name, "data": open(archive.name, "rb")}] - ) - - if len(return_id) == 1: - # Returns the first file's ID - return {"success": True, "id": return_id[0]} - else: - return { - "success": False, - "message": "Upload archive returned {}".format(return_id), - } - - except Exception as excp: - return {"success": False, "message": excp} - - def add_list_to_list(self, list_one, list_two): - try: - list_one = json.loads(list_one) - except json.decoder.JSONDecodeError as e: - print("Failed to parse list1 as json: %s" % e) - return "List one is not a valid list: %s" % list_one - - try: - list_two = json.loads(list_two) - except json.decoder.JSONDecodeError as e: - print("Failed to parse list2 as json: %s" % e) - return "List two is not a valid list: %s" % list_two - - for item in list_two: - list_one.append(item) - - return list_one - - def diff_lists(self, list_one, list_two): - try: - list_one = json.loads(list_one) - except json.decoder.JSONDecodeError as e: - print("Failed to parse list1 as json: %s" % e) - - try: - list_two = json.loads(list_two) - except json.decoder.JSONDecodeError as e: - print("Failed to parse list2 as json: %s" % e) - - def diff(li1, li2): - return list(set(li1) - set(li2)) + list(set(li2) - set(li1)) - - return diff(list_one, list_two) - - def merge_lists(self, list_one, list_two, set_field="", sort_key_list_one="", sort_key_list_two=""): - try: - list_one = json.loads(list_one) - except json.decoder.JSONDecodeError as e: - print("Failed to parse list1 as json: %s" % e) - - try: - list_two = json.loads(list_two) - except json.decoder.JSONDecodeError as e: - print("Failed to parse list2 as json: %s" % e) - - if len(list_one) != len(list_two): - return {"success": False, "message": "Lists length must be the same. %d vs %d" % (len(list_one), len(list_two))} - - #result = json.loads(input_data) - print(list_one) - print(list_two) - print(set_field) - print("START: ") - - if len(sort_key_list_one) > 0: - print("Sort 1 %s by key: %s" % (list_one, sort_key_list_one)) - try: - list_one = sorted(list_one, key=lambda k: k.get(sort_key_list_one), reverse=True) - except: - print("Failed to sort list one") - pass - - if len(sort_key_list_two) > 0: - #print("Sort 2 %s by key: %s" % (list_two, sort_key_list_two)) - try: - list_two = sorted(list_two, key=lambda k: k.get(sort_key_list_two), reverse=True) - except: - print("Failed to sort list one") - pass - - for i in range(len(list_one)): - #print(list_two[i]) - if isinstance(list_two[i], dict): - for key, value in list_two[i].items(): - list_one[i][key] = value - elif isinstance(list_two[i], str) or isinstance(list_two[i], int) or isinstance(list_two[i], bool): - print("IN SETTER FOR %s" % list_two[i]) - if len(set_field) == 0: - return "Define a JSON key to set for List two (Set Field)" - - list_one[i][set_field] = list_two[i] - - return list_one - - def xml_json_convertor(self, convertto, data): - try: - if convertto == "json": - ans = xmltodict.parse(data) - json_data = json.dumps(ans) - return json_data - else: - ans = readfromstring(data) - return json2xml.Json2xml(ans, wrapper="all", pretty=True).to_xml() - except Exception as e: - return e - - def date_to_epoch(self, input_data, date_field, date_format): - - print( - "Executing with {} on {} with format {}".format( - input_data, date_field, date_format - ) - ) - - result = json.loads(input_data) - - # https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior - epoch = datetime.datetime.strptime(result[date_field], date_format).strftime( - "%s" - ) - result["epoch"] = epoch - return result - - def compare_relative_date( - self, input_data, date_format, equality_test, offset, units, direction - ): - - if input_data == "None": - return False - - print("Converting input date.") - - if date_format != "%s": - input_dt = datetime.datetime.strptime(input_data, date_format) - else: - input_dt = datetime.datetime.utcfromtimestamp(float(input_data)) - - offset = int(offset) - if units == "seconds": - delta = datetime.timedelta(seconds=offset) - elif units == "minutes": - delta = datetime.timedelta(minutes=offset) - elif units == "hours": - delta = datetime.timedelta(hours=offset) - elif units == "days": - delta = datetime.timedelta(days=offset) - - utc_format = date_format - if utc_format.endswith("%z"): - utc_format = utc_format.replace("%z", "Z") - - if date_format != "%s": - formatted_dt = datetime.datetime.strptime( - datetime.datetime.utcnow().strftime(utc_format), date_format - ) - else: - formatted_dt = datetime.datetime.utcnow() - - print("Formatted time is: {}".format(formatted_dt)) - if direction == "ago": - comparison_dt = formatted_dt - delta - else: - comparison_dt = formatted_dt + delta - print("{} {} {} is {}".format(offset, units, direction, comparison_dt)) - - diff = (input_dt - comparison_dt).total_seconds() - print( - "Difference between {} and {} is {}".format(input_data, comparison_dt, diff) - ) - result = False - if equality_test == ">": - result = 0 > diff - if direction == "ahead": - result = not (result) - elif equality_test == "<": - result = 0 < diff - if direction == "ahead": - result = not (result) - elif equality_test == "=": - result = diff == 0 - elif equality_test == "!=": - result = diff != 0 - elif equality_test == ">=": - result = 0 >= diff - if direction == "ahead" and diff != 0: - result = not (result) - elif equality_test == "<=": - result = 0 <= diff - if direction == "ahead" and diff != 0: - result = not (result) - - print( - "At {}, is {} {} than {} {} {}? {}".format( - formatted_dt, - input_data, - equality_test, - offset, - units, - direction, - result, - ) - ) - - return result - - def run_math_operation(self, operation): - print("Operation: %s" % operation) - result = eval(operation) - return result - - def escape_html(self, input_data, field_name): - - mapping = json.loads(input_data) - print(f"Got mapping {json.dumps(mapping, indent=2)}") - - result = markupsafe.escape(mapping[field_name]) - print(f"Mapping {input_data} to {result}") - - mapping[field_name] = result - return mapping - - def get_cache_value(self, key): - org_id = self.full_execution["workflow"]["execution_org"]["id"] - url = "%s/api/v1/orgs/%s/get_cache" % (self.url, org_id) - data = { - "workflow_id": self.full_execution["workflow"]["id"], - "execution_id": self.current_execution_id, - "authorization": self.authorization, - "org_id": org_id, - "key": key, - } - - value = requests.post(url, json=data) - try: - allvalues = value.json() - print("VAL1: ", allvalues) - allvalues["key"] = key - print("VAL2: ", allvalues) - - try: - parsedvalue = json.loads(allvalues["value"]) - allvalues["value"] = parsedvalue - except: - print("Parsing of value as JSON failed") - pass - - return json.dumps(allvalues) - except: - print("Value couldn't be parsed, or json dump of value failed") - return value.text - - # FIXME: Add option for org only & sensitive data (not to be listed) - def set_cache_value(self, key, value): - org_id = self.full_execution["workflow"]["execution_org"]["id"] - url = "%s/api/v1/orgs/%s/set_cache" % (self.url, org_id) - data = { - "workflow_id": self.full_execution["workflow"]["id"], - "execution_id": self.current_execution_id, - "authorization": self.authorization, - "org_id": org_id, - "key": key, - "value": str(value), - } - - response = requests.post(url, json=data) - try: - allvalues = response.json() - allvalues["key"] = key - allvalues["value"] = str(value) - return json.dumps(allvalues) - except: - print("Value couldn't be parsed") - return response.text - - def convert_json_to_tags(self, json_object, split_value=", ", include_key=True, lowercase=True): - try: - json_object = json.loads(json_object) - except json.decoder.JSONDecodeError as e: - print("Failed to parse list2 as json: %s. Type: %s" % (e, type(json_object))) - - if isinstance(lowercase, str) and lowercase.lower() == "true": - lowercase = True - else: - lowercase = False - - if isinstance(include_key, str) or include_key.lower() == "true": - include_key = True - else: - include_key = False - - parsedstring = [] - for key, value in json_object.items(): - print("KV: %s:%s" % (key, value)) - if isinstance(value, str) or isinstance(value, int) or isinstance(value, bool): - if include_key == True: - parsedstring.append("%s:%s" % (key, value)) - else: - parsedstring.append("%s" % (value)) - else: - print("Can't handle type %s" % type(value)) - - fullstring = split_value.join(parsedstring) - if lowercase == True: - fullstring = fullstring.lower() - - return fullstring - - def cidr_ip_match(self, ip, networks): - print("Executing with\nIP: {},\nNetworks: {}".format(ip, networks)) - - try: - networks = json.loads(networks) - except json.decoder.JSONDecodeError as e: - print("Failed to parse networks list as json: {}. Type: {}".format( - e, type(networks) - )) - return "Networks is not a valid list: {}".format(networks) - - try: - ip_networks = list(map(ipaddress.ip_network, networks)) - ip_address = ipaddress.ip_address(ip) - except ValueError as e: - return "IP or some networks are not in valid format.\nError: {}".format(e) - - matched_networks = list(filter(lambda net: (ip_address in net), ip_networks)) - - result = {} - result['networks'] = list(map(str, matched_networks)) - result['is_contained'] = True if len(result['networks']) > 0 else False - - return json.dumps(result) - -if __name__ == "__main__": - Tools.run() diff --git a/shuffle-tools/1.1.0/Dockerfile b/shuffle-tools/1.1.0/Dockerfile deleted file mode 100644 index 5c1a8af4..00000000 --- a/shuffle-tools/1.1.0/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# Base our app image off of the WALKOFF App SDK image -FROM frikky/shuffle:app_sdk as base - -# We're going to stage away all of the bloat from the build tools so lets create a builder stage -FROM base as builder - -# Install all alpine build tools needed for our pip installs -RUN apk --no-cache add --update alpine-sdk libffi libffi-dev musl-dev openssl-dev git - -# Install all of our pip packages in a single directory that we can copy to our base image later -RUN mkdir /install -WORKDIR /install -COPY requirements.txt /requirements.txt -RUN pip install --no-cache-dir --prefix="/install" -r /requirements.txt - -# Switch back to our base image and copy in all of our built packages and source code -FROM base -COPY --from=builder /install /usr/local -COPY src /app - -# Install any binary dependencies needed in our final image -# RUN apk --no-cache add --update my_binary_dependency -RUN apk --no-cache add jq git curl - -# Finally, lets run our app! -WORKDIR /app -CMD ["python", "app.py", "--log-level", "DEBUG"] diff --git a/shuffle-tools/1.1.0/api.yaml b/shuffle-tools/1.1.0/api.yaml deleted file mode 100644 index 464c1d02..00000000 --- a/shuffle-tools/1.1.0/api.yaml +++ /dev/null @@ -1,950 +0,0 @@ ---- -app_version: 1.1.0 -name: Shuffle Tools -description: A tool app for Shuffle. Gives access to most missing features along with Liquid. -tags: - - Testing - - Shuffle -categories: - - Testing - - Shuffle -contact_info: - name: "@frikkylikeme" - url: https://shuffler.io - email: frikky@shuffler.io -actions: - - name: repeat_back_to_me - description: Repeats the call parameter - parameters: - - name: call - description: The message to repeat - required: true - multiline: true - example: "REPEATING: Hello world" - schema: - type: string - returns: - schema: - type: string - - name: router - description: Reroutes information between different nodes - returns: - schema: - type: string - - name: check_cache_contains - description: Checks Shuffle cache whether a user-provided key contains a value. Returns ALL the values previously appended. - parameters: - - name: key - description: The key to get - required: true - multiline: false - example: "alert_ids" - schema: - type: string - - name: value - description: The value to check for and append if applicable - required: true - multiline: false - example: "1208301599081" - schema: - type: string - - name: append - description: Whether to auto-append the value if it doesn't exist in the cache - required: true - options: - - true - - false - multiline: false - example: "timestamp" - schema: - type: string - - name: get_cache_value - description: Get a value saved to your organization in Shuffle - parameters: - - name: key - description: The key to get - required: true - multiline: false - example: "timestamp" - schema: - type: string - returns: - schema: - type: string - - name: set_cache_value - description: Set a value to be saved to your organization in Shuffle. - parameters: - - name: key - description: The key to set the value for - required: true - multiline: false - example: "timestamp" - schema: - type: string - - name: value - description: The value to set - required: true - multiline: true - example: "1621959545" - schema: - type: string - returns: - schema: - type: string - - name: send_sms_shuffle - description: Send an SMS from Shuffle - parameters: - - name: apikey - description: Your https://shuffler.io organization apikey - multiline: false - example: "https://shuffler.io apikey" - required: true - schema: - type: string - - name: phone_numbers - description: The receivers of the SMS - multiline: false - example: "+4741323535,+8151023022" - required: true - schema: - type: string - - name: body - description: The SMS to add to the numbers - multiline: true - example: "This is an alert from Shuffle :)" - required: true - schema: - type: string - returns: - schema: - type: string - - name: send_email_shuffle - description: Send an email from Shuffle - parameters: - - name: apikey - description: Your https://shuffler.io organization apikey - multiline: false - example: "https://shuffler.io apikey" - required: true - schema: - type: string - - name: recipients - description: The recipients of the email - multiline: false - example: "test@example.com,frikky@shuffler.io" - required: true - schema: - type: string - - name: subject - description: The subject to use - multiline: false - example: "SOS this is an alert :o" - required: true - schema: - type: string - - name: body - description: The body to add to the email - multiline: true - example: "This is an email alert from Shuffler.io :)" - required: true - schema: - type: string - - name: attachments - description: The ID of files in Shuffle to add as attachments - multiline: true - example: "file_id1,file_id2,file_id3" - required: false - schema: - type: string - returns: - schema: - type: string - - name: filter_list - description: Takes a list and filters based on your data - skip_multicheck: true - parameters: - - name: input_list - description: The list to check - required: true - multiline: false - example: '[{"data": "1.2.3.4"}, {"data": "1.2.3.5"}]' - schema: - type: string - - name: field - description: The field to check - required: false - multiline: false - example: "data" - schema: - type: string - - name: check - description: Type of check - required: true - example: "equals" - options: - - equals - - 'larger than' - - 'less than' - - is empty - - contains - - contains any of - - starts with - - ends with - - field is unique - - files by extension - schema: - type: string - - name: value - description: The value to check with - required: false - multiline: false - example: "1.2.3.4" - schema: - type: string - - name: opposite - description: Whether to add or to NOT add - required: true - options: - - False - - True - multiline: false - example: "false" - schema: - type: string - returns: - schema: - type: string - #- name: multi_list_filter - # description: Takes a list and filters based on your data - # skip_multicheck: true - # parameters: - # - name: input_list - # description: The list to check - # required: true - # multiline: false - # example: '[{"data": "1.2.3.4"}, {"data": "1.2.3.5"}]' - # schema: - # type: string - # - name: field - # description: The field to check - # required: true - # multiline: false - # example: "data" - # schema: - # type: string - # - name: check - # description: Type of check - # required: true - # example: "equals,equals" - # schema: - # type: string - # - name: value - # description: The value to check with - # required: true - # multiline: false - # example: "1.2.3.4" - # schema: - # type: string - # returns: - # schema: - # type: string - - name: parse_ioc - description: Parse IOC's based on https://github.com/fhightower/ioc-finder - parameters: - - name: input_string - description: The string to check - required: true - multiline: true - example: "123ijq192.168.3.6kljqwiejs8 https://shuffler.io" - schema: - type: string - - name: input_type - description: The string to check - required: false - multiline: false - example: "md5s" - schema: - type: string - returns: - schema: - type: string - - name: parse_file_ioc - description: Parse IOC's based on https://github.com/fhightower/ioc-finder - parameters: - - name: file_ids - description: The shuffle file to check - required: true - multiline: false - schema: - type: string - - name: input_type - description: The string to check - required: false - multiline: false - example: "md5s" - schema: - type: string - returns: - schema: - type: string - - name: translate_value - description: Takes a list of values and translates it in your input data - parameters: - - name: input_data - description: The input data to use - required: true - multiline: true - example: Hello this is an md5 - schema: - type: string - - name: translate_from - description: The source items to look for - required: true - multiline: false - example: sha256,md5,sha1 - schema: - type: string - - name: translate_to - description: The destination data to change to - required: true - multiline: true - example: hash - schema: - type: string - - name: else_value - description: The value to set if it DOESNT match. Default to nothing. - required: false - multiline: false - example: - schema: - type: string - returns: - schema: - type: string - - name: map_value - description: Takes a mapping dictionary and translates the input data. This is a search and replace for multiple fields. - parameters: - - name: input_data - description: The input data to use - required: true - multiline: true - example: $exec.field1 - schema: - type: string - - name: mapping - description: The mapping dictionary - required: true - multiline: true - example: | - { - "Low": 1, - "Medium": 2, - "High": 3, - } - schema: - type: string - returns: - schema: - type: string - - name: regex_capture_group - description: Returns objects matching the capture group(s) - parameters: - - name: input_data - description: The input data to use - required: true - multiline: true - example: This is some text a domain that is with.com - schema: - type: string - - name: regex - description: Your regular expression - multiline: false - example: "some text <[a-zA-Z0-9.]+> a domain" - required: true - schema: - type: string - returns: - schema: - type: string - - name: regex_replace - description: Replace all instances matching a regular expression - parameters: - - name: input_data - description: The input data to use - required: true - multiline: true - example: This is some text a domain that is with.com - schema: - type: string - - name: regex - description: Your regular expression - multiline: false - example: "some text <[a-zA-Z0-9.]+> a domain" - required: true - schema: - type: string - - name: replace_string - description: Replacement string (capture groups with \1 \2) - multiline: true - example: "some text a domain" - required: false - schema: - type: string - - name: ignore_case - description: "Make regex case insensitive (Default: False)" - multiline: false - options: - - false - - true - example: "False" - required: false - schema: - type: string - returns: - schema: - type: string - - name: parse_list - description: Parses a list and returns it as a json object - parameters: - - name: items - description: List of items - required: true - multiline: true - example: shuffler.io,test.com,test.no - schema: - type: string - - name: splitter - description: The splitter to use - required: false - multiline: false - example: "," - schema: - type: string - returns: - schema: - type: string - - name: execute_bash - description: Runs bash with the data input - parameters: - - name: code - description: The code to run - required: true - multiline: true - example: echo "Hello" - schema: - type: string - - name: shuffle_input - description: Alternative data to add - required: false - multiline: true - example: '{"data": "Hello world"}' - schema: - type: string - - name: execute_python - description: Runs python with the data input. Any prints will be returned. - parameters: - - name: code - description: The code to run. Can be a file ID from within Shuffle. - required: true - multiline: true - example: print("hello world") - schema: - type: string - - name: get_file_value - description: This function is made for reading file(s), printing their data - parameters: - - name: filedata - description: The files - required: true - multiline: false - example: "a2f89576-a9ec-479e-8c83-da69f468c90a" - schema: - type: string - returns: - schema: - type: string - - name: create_file - description: Returns uploaded file data - parameters: - - name: filename - description: - required: true - multiline: false - example: "test.csv" - schema: - type: string - - name: data - description: - required: true - multiline: true - example: "EventID,username\n4137,frikky" - schema: - type: string - - name: download_remote_file - description: Downloads a file from a URL - parameters: - - name: url - description: - required: true - multiline: false - example: "https://secure.eicar.org/eicar.com.txt" - schema: - type: string - returns: - schema: - type: string - - name: get_file_meta - description: Gets the file meta - parameters: - - name: file_id - description: - required: true - multiline: false - example: "" - schema: - type: string - returns: - schema: - type: string - - name: delete_file - description: Deletes a file based on ID - parameters: - - name: file_id - description: - required: true - multiline: false - example: "Some data to put in the file" - schema: - type: string - returns: - schema: - type: string - - name: extract_archive - description: Extract compressed files, return file ids - parameters: - - name: file_ids - description: - required: true - multiline: false - schema: - type: string - - name: fileformat - description: - required: true - multiline: false - options: - - zip - - rar - - 7zip - - tar - - tar.gz - schema: - type: string - - name: password - description: - required: false - multiline: false - schema: - type: string - returns: - schema: - type: string - - name: inflate_archive - description: Compress files in archive, return archive's file id - parameters: - - name: file_ids - description: - required: true - multiline: true - schema: - type: string - - name: fileformat - description: - required: true - multiline: false - options: - - zip - - 7zip - schema: - type: string - - name: name - description: - required: false - multiline: false - schema: - type: string - - name: password - description: - required: false - multiline: false - schema: - type: string - returns: - schema: - type: string - - name: xml_json_convertor - description: Converts xml to json and vice versa - parameters: - - name: convertto - required: true - multiline: false - options: - - json - - xml - schema: - type: string - - name: data - description: - required: true - multiline: false - example: 'xml data / json data' - schema: - type: string - returns: - schema: - type: string - - name: date_to_epoch - description: Converts a date field with a given format to an epoch time - parameters: - - name: input_data - description: The input data to use - required: true - multiline: true - example: 2010-11-04T04:15:22.123Z - schema: - type: dict - - name: date_field - description: The field containing the date to parse - required: true - multiline: false - example: currentDateTime - schema: - type: string - - name: date_format - # yamllint disable-line rule:line-length - description: The datetime format of the field to parse (strftime format). - required: true - multiline: false - example: '%Y-%m-%dT%H:%M:%s.%f%Z' - schema: - type: string - returns: - schema: - type: dict - - name: compare_relative_date - # yamllint disable-line rule:line-length - description: Compares an input date to a relative date and returns a True/False result - parameters: - - name: input_data - description: The input data to use - required: true - multiline: true - example: 2010-11-04T04:15:22.123Z - schema: - type: string - - name: date_format - description: The format of the input date field (strftime format) - required: true - multiline: false - example: '%Y-%m-%dT%H:%M:%S.%f%Z' - options: - - '%Y-%m-%dT%H:%M%z' - - '%Y-%m-%dT%H:%M:%SZ' - - '%Y-%m-%dT%H:%M:%S%Z' - - '%Y-%m-%dT%H:%M:%S%z' - - '%Y-%m-%dT%H:%M:%S.%f%z' - - '%Y-%m-%d' - - '%H:%M:%S' - - '%s' - schema: - type: string - - name: equality_test - description: How to compare the input date and offset date - required: true - multiline: false - example: '>' - options: - - '>' - - '<' - - '=' - - '!=' - - '>=' - - '<=' - schema: - type: string - - name: offset - description: Numeric offset from current time - required: true - multiline: false - example: 60 - schema: - type: string - - name: units - description: The units of the provided value - required: true - multiline: false - example: 'seconds' - options: - - seconds - - minutes - - hours - - days - schema: - type: string - - name: direction - description: Whether the comparison should be in the past or future - required: true - multiline: false - example: 'ago' - options: - - ago - - ahead - schema: - type: string - returns: - schema: - type: strings - - name: add_list_to_list - description: Adds items of second list (list_two) to the first one (list_one). Can also append a single item (dict) to a list. - parameters: - - name: list_one - description: The first list - multiline: true - example: "{'key': 'value'}" - required: true - schema: - type: string - - name: list_two - description: The second list to use - multiline: true - required: true - example: "{'key2': 'value2'}" - schema: - type: string - - name: merge_lists - description: Merges two lists of same type AND length. - parameters: - - name: list_one - description: The first list - multiline: true - example: "{'key': 'value'}" - required: true - schema: - type: string - - name: list_two - description: The second list to use - multiline: true - required: true - example: "{'key2': 'value2'}" - schema: - type: string - - name: set_field - description: If items in list 2 are strings, but first is JSON, sets the values to the specified key. Defaults to key "new_shuffle_key" - required: false - example: "json_key" - schema: - type: string - - name: sort_key_list_one - description: Sort by this key before using list one for merging - required: false - example: "json_key" - schema: - type: string - - name: sort_key_list_two - description: Sort by this key before using list two for merging - required: false - example: "json_key" - schema: - type: string - - name: diff_lists - description: Diffs two lists of strings or integers and finds what's missing - parameters: - - name: list_one - description: The first list - multiline: true - example: "{'key': 'value'}" - required: true - schema: - type: string - - name: list_two - description: The second list to use - multiline: true - required: true - example: "{'key2': 'value2'}" - schema: - type: string - - name: set_json_key - description: Adds a JSON key to an existing object - parameters: - - name: json_object - description: The object to edit - multiline: true - example: "recipients" - required: true - schema: - type: string - - name: key - description: The object to add - multiline: false - example: "recipients" - required: true - schema: - type: string - - name: value - description: The value to set it to in the JSON object - multiline: true - required: true - example: "frikky@shuffler.io" - schema: - type: string - - name: delete_json_keys - description: Deletes keys in a json object - parameters: - - name: json_object - description: The object to edit - multiline: true - example: "{'key': 'value', 'key2': 'value2', 'key3': 'value3'}" - required: true - schema: - type: string - - name: keys - description: The key(s) to remove - multiline: true - required: true - example: "key, key3" - schema: - type: string - - name: convert_json_to_tags - description: Creates key:value pairs and - parameters: - - name: json_object - description: The object to make into a key:value pair - multiline: true - example: "{'key': 'value', 'key2': 'value2', 'key3': 'value3'}" - required: true - schema: - type: string - - name: split_value - description: The way to split the values. Defaults to comma. - multiline: false - required: false - example: "," - schema: - type: string - - name: include_key - description: Whether it should include the key or not - options: - - true - - false - schema: - type: string - - name: lowercase - description: Whether it should be lowercase or not - options: - - true - - false - schema: - type: string - - name: run_math_operation - description: Takes a math input and gives you the result - parameters: - - name: operation - description: The operation to perform - required: true - multiline: true - example: "5+10" - schema: - type: string - returns: - schema: - type: string - - name: escape_html - description: Performs HTML escaping on a field - parameters: - - name: input_data - description: The input data to use - required: true - multiline: true - example: $exec.field1 - schema: - type: string - - name: field_name - description: The field to HTML escape - required: true - multiline: true - example: my_unsafe_field - schema: - type: string - returns: - schema: - type: string - - name: base64_conversion - description: Encode or decode a Base64 string - parameters: - - name: string - description: string to process - multiline: true - example: "This is a string to be encoded" - required: true - schema: - type: string - - name: operation - description: Choose to encode or decode the string - example: "encode" - required: true - options: - - encode - - decode - schema: - type: string - - name: get_timestamp - description: Gets a timestamp for right now. Default returns an epoch timestamp - parameters: - - name: time_format - description: The format to use - multiline: false - required: True - options: - - epoch - - unix - schema: - type: string - returns: - schema: - type: string - - name: get_hash_sum - description: Returns multiple formats of hashes based on the input value - parameters: - - name: value - description: The value to hash - multiline: false - example: "1.1.1.1" - required: True - schema: - type: string - returns: - schema: - type: string - - name: cidr_ip_match - description: Check if an IP is contained in a CIDR defined network - parameters: - - name: ip - description: IP to check - multiline: false - example: "1.1.1.1" - required: True - schema: - type: string - - name: networks - description: List of network in CIDR format - multiline: true - required: true - example: "['10.0.0.0/8', '192.168.10.0/24']" - schema: - type: string - returns: - schema: - type: string - -# yamllint disable-line rule:line-length -large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAK4AAACuCAYAAACvDDbuAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAAAAB3RJTUUH5AgXDjM6hEZGWwAAD+lJREFUeNrtXb/vJTcRH7/v3iVBCqRBiCAQAtHwq4AWRElHwX8AoqbmXwDRpiH/QyQkGoogUSAhKIKUAE1IdSRSREhQQk7c3XtD8X55vePxjNfe3bk3H+nu+96uPf54POtnj8fe8OQX30JwOIxhtzYBh6MGOsPF0z9p2iWwpd8LjX6W5vWUYaiqlBuvLT5b5TQDPlRwmMSAABBg+kCer+XuAeQf4tL9tAxJ/hIfZGSm8rhyEfjytfxr9FeSX+KjvVfipNVpWlaPNhsAEPCS7Ao8FYnRlbO4ksLnjiSQvIanv4FNjwJ5pXIlMq6MQpIqqPnQKQKbjuPDtZlG55o6UHXWtVncZZTbbNBVB1P5dJYguCbJJ1WjOG8PVOioSm5HPrVt1rwuyN+K+PSZnNV1M/MmEFubfFjjU9tmK9XBJ2cOk3DDdZiEG67DJOrGuA7HyvAe12ESAxa73KPrN1z8gUikCCdvcD5NXnpQpA8nNhh9m5Yn4ZMrV8dHV/8a/dRA0x419a3lI9GBtM2GcrGYFXRNUU5TyluTOpdXwqeUt6YOpby9DUTLZylOcRlzdBTf2yV3ZBFOmKSHQh5KpjSSSpqG4s6VkUubqw8W8knTSnWk0Y+2jF5tlmuDUloJn6T8gRVcEpJ+3srChHSNt8RJsq4p+S41LC13KTcu/RJt1pLPKY1Pzhwm4YbrMAk3XIdJTMe4aeCjJhBVk0YiQ1MWZHhLgmO5QNVWfKRlavlIIQnurQmcnaMjSbBxhtMwYUxODpLcl2tUhvPlNE6VkiuoFVLXKT6ZfBjxRIIzOSlgWpLSB8uZ0g3BjeVDlFGEos0mfKKL7CQrY2ES7pM2i/OX22w4/sWReEhEnUOTxx3a+FrawQGZh04/rWe6oJBKo5zT4zLjPHE9ZHym5YzToogzfQcmfLgOhuLF/Sjm2izVDyXnrKtcmmmdaKumf+RyCw5Xn7OmzQaJF0fiEZG6BjXpYUYaSVkaPrXeHe4eVaZEr3Prqrmmrbc2T8lrmOMjn5xJHeJLYkk+PfzNTxOflrwF0EeHbU0Zt2wsW+PTkncB7g5zmMSwzUfS4eDhPa7DJK5jXGorsnZxonbRIbeAoOUjkUvlp+qxFp9YNuWL0nBqsVCkqUsrHQnuX+Nx5/qcJDI0kWgtJh7ihYCN8aG+13DqOXlbWUfD+fN0AUEmp3RcUWlVEwCynb5ssYLnxHViJT6ULCykb8EnzUfpqBWfVAdcnt5tprGhIe10WnjHpB2FtMPWcpM66yXyOad4Lz4Srq34SHhwZfRos1w9Y/jkzGESvj3dYRLe4zpMwg3XYRJuuA6T4M/Hzfk/OGd9OP2HOE2f8wtBlCebJrkfp+Gc3AGmiSiuaVlpwkmajL4osPUm9FMqIzBOJolfjGuzEtdUwWl53Dm7Eh9pzIdps+FiYJyi1N+Rvs/6OLCQBul8Ip8R08ik3EwhLZz1Wv8XmU7ZZqX7OT2gUIB2oaRBm+2ovDm5nM+ulEeiD8yka8UnJ1PCP82r9YWW8iCU5XO8W/PhPmvllNKW7lEyszsgNKuzkspJFZFL15uPtIweq7A1xiKpz1J8tGXP+dE53/fJmcMk6hcgJO8XqokEKi5uYzTG29LqSev95JqyKsoOOxjNpKQBD7VFc5GBJRsi+NQHkkv6+7m/UxTufwLCCy+CbAruyOLDdwEf/uf6vbbNJukzlogZC6wMdhAcM7ohHPawe/GrcO+HPwe4u782G7sIAE9++0vYv/YKwO6usfCaka0etgwXAGB3D8JznwIYnlmbiW0M92FbQy0d+MmZ3Xo5JDDcvuXJ2ZYqtyUuTwuM6nSXctcufHCOZqkjPScXhbIcdeD0XUpfKyNNy8nlyhuozLkM8XxR6pjm7tc4Fdx620I7lWq10JCm0ZanWoBwm3FsBe1WznpadbTg4A9PI2xx7FUKHopQjg7TKqNnpbioIUcFUGUsy1CS8fFYBYdJuOE6TMIN12ESgyiKiwO1bQOJe1w+6p42Etmhwmi6kLZXfC2G9IUj2vulY2wIPrv4onRhIXcRqS0DiWxkhF0uIb37wG22LRCSuVCyekC2GSXj9CG3YyT+krWh+KPAhkTvgGDKqbqnWbBwY+2Pnm3Wy4aMRYc1MuPDvp0skwgAh8PaJGbh5k4kx0f/hce/ewnw/QenXQCTFJDfQy45PzFNn5NHsoPy/u6gzE+nObzz91P9Z+6kWAm2zg6bDMoq8OQxHN78Axze/htAaB1EbQhhdzyfgRqIGoCxoUIjhDuA3ZDpcR0W4C3nMInbNVw7v4oOAsehArVFPL0uOjMM+DlM+pk7t7/BDuwcJsM6gcM7WweOX05nFCHNi12ASRfLo3QaX9O0GWTylOTnZIMwf4YPPTlD4iMm7aZwAGOUf3Rf48wjHNzVOMkKFA8pp0RHZ1mjdihs5R61PWbsWlphgs/E5gptNvFfSLY8QPk7dVbh+UNg8qfnJsZ8Bo0hzF0Y2Nqvc0s+Vbs5YL5OLfPRcorT2hvjtuxyHWZhzHCX6AMcFtB2B0RvtKZqqe6OEYz1uA7HEbdruN7ZmsZtGq4brXnQhlsbLFkDrY9mC9giH41/dSlONfeEIBcgss7nXopInPdkYN95J3XD1bMgkJUNFOxsDNLgyiynhYyX5dnAhnLyhzmO4V7IO8+xyZEgx5UqvJ41rOUTdhBOr2w6KjZc+B1FBkLGVUoAABQEcmPu6rPPw73v/gh2n/wMANYEhAd4/NqvYf/Wn5pEyPW2IUrOzQWSHyHdkEJgN8D97/0Edp/7GgDu9fnDDvD9t+HRqy8BPvxQ9i6xEXUEuPcMDF//Puw+/aVqDewfvA77f/zx9M40e7jNeNw5CDu4++K34e4r36kWcXj3TYDfvwz8D79ml1clDPuxx9FhuUik0rblVihFWLX+7ZFEXE2ioLBNg9fUSRopVsOjJbioskZlDuyAvmflpOWsOUNu/cBQ8jW/1A0np11RG+GjwG36cQHqFWnBcG4Axgx37d/I1uXXcvCnx6BXoQXf3mOAzvVpooJzaOcWdKBH1fZ07dCsFZpNgmfZbaOJ2dxnpwkNFC3C9MBcGxo0OugxwV8LWKm5lg9sFQdszKGhLAla2dCuduuOZcypx+UXdk0OK5e/hXKNTc4cjiPGhtvTX1njI6Z2+vbuKtaKspLooXdkXs1u5yUR7/LdROMsraSSIfTa6pqWodE9Mvla6sCI8d7uUMEXIEzjdg3XYRr2osOePIbDR+9BGO7re78QAD/+AODwpK5sBDg6dGyGAtL1sYnLGDe3+2BNTNycYQf7B2/Aw5d/XB9HejjA4YN3jgHUNQ132MOTv/wG9v98A+CgFBCO/+FH/wJ89PBaSY1OULZzQyQL2skayVwg/7Dk3Ky2IlcEgEcfw/7dt+YJnRP1f9jDoz+/AvM0FU4c1u8mes59e+ZXDhXmPE+tForD+lH73Q6EluiozfaldnzWQUWQzdprPk87lg44nkTKN+DT/10S7lW4VYz8wWucOTAPtl5e4mgfjmu0/b3HdZiEG67DJNxwbxlGhwkAuZeXAJS3Qpfemq7dds1tS5dsbc6dAyQpS5uGe+lKrJLSGUqlCb2GcwUuCxBzt71T2/g7t9mQniofv0yjWOtMYdSLM6Sy0pd5iLdFSQtUyiJtRnjmGOdhqq5bo5WzUXAYzns2Lu2tjaqb0WaTHRBrR9cvEVG4VF3WkLsGnzXqohzjbk3dt4hG/jDDxy8BLL5y5miBZi1wa9vT14dJ0o2qft6/1GhQZ1SV9uJxd3cQ7j+XD7RJ40JK38/XAPKz4ly+OG+KwOTDwn0uDSKEZ58/vgH+hmHLcA97uPvCN+G5H/wMoCaQ/KkAAtzdg/DCZ9cmsipsGS4ce5u7z38DYHhmbTL2YfjBH28DOM80s+MoxllVvfkwKudSbiL0dB0NTya2iGpNYmIzl+/EdexjQ8PEGE4FhdPHMAlbLhcsdWaPnfDEAxQJnbx53TEPJ51j3N7CrEfbSNt+arzXt57X2RBx94LsUGHOGRQtF7Fa8HFQQOabJmc5XQ8b8iAbh0mYNFzvdefD+nRhyPowqWitc2VbRyutGCF18+ilU2mEXWX51zFuKbqlZ/RLy0gixzagiS6sgL2hghuwAywarsMBxgzXO9u2sBzZWHwHRLwrQ5rWYQBIfuwCKnZJEpvEYSg9dRoncnejtdxFbBRLqFQzr5fSudH3nDmOaH26yHIwNcZ1NIZNmwWArYU1Fg8HDLB/7wH879VfAey2Rd0a9g/+2ubUyZUOdAz//umXjT136GPd2cDNnM9bC4Pd1gbOx3WsDh/jOkzCDddhEpcjmKiFhvGLQwDitJNrYTz05H7MS+N56hiq0mbYCfeIj2STb2s+cSJEOrguJ4fScaneOW7kOWZJm4VCmaPFg8wKgcSGuLpzR49Rerm8vIRaaECgvyB1Tbl9qOZoMiykHeVhVoZKwW9N+CSJuPwsH4YY12aTa5TxYyZPpsxSDG/Rhgp1lyxUnK/7UMFhEm64DpNIlnzTAdXcsJml8rdO1yt/K+R45EJUluS9zHaWITuQJb9rsVT+HvuKe+RvhdIIcE3ey4Rj+VDBYRJuuA6TcMN1mMT15SWMZ5h10Oc86+dr50s14QWch7rEh5PHef+psgsyqB0iI2e+hE+pDlpvvkQ/uVUMDfdSnTq12TA58injFUdOMPB5AeiALtHcUrstXrqSINnaoVjxyE5ra1ZipHMsTV2kMiQ8NDw7tdmqQ4WtzNEd9uBjXIdJuOE6TMLoy0sct46KHndNS6d2pW5tp+rW+Jw5rVl2qpP5Oqrcnr52w9RMgbfA8db5tAsp8DGuwyTaGW6DB7ppn9CCzxKnvKz9Kz7j/prUi0cwqQLQDBtvrp5uvMc/Wf00oFAT5FjscbcwMloCt1LPWvTUT41sH+M6TMIN12ESw3UPd8gPtrh7JeTyXvZGn0KD0jSlMms5Sfhw92vkUvXT5tPWt3WbSfjMsSFl3ujlJdy+4xkjnFze+PWrNWXWclqaT6t82vq2bjMJnzk2pMzrQwWHSbjhOkzCDdchxpZchpezwySQvHhiyVMLevPRctXwqeWmfcv5GaVTGKRy557YIHnhpETeoCl05grhbPlL89HK1vCp5darvZbgo+XEwYcKDpNww3WYxC6/U5PY5oun66MzPHH8L05PpqHKghn+TpjyictkZQLPh4u6yeknvXeWU+JD6TDHJ/cbn93Bi8nnDKdJm8EG2+zIZwBudlbjUOYOpj1frClPwyf3OZuXuaEx3lgWZixKxIfZ911rvJO65PRFVmZjbYY+VHDYhBuuwyTccB0mcdkB0cr5z70pW/pm7Bo+LesgqUsrPjVye9WXkqld8FiizRCi6LBWjmTRPGGG/JZ5ejvoa1ai1qwvlWarbeZDBYdJuOE6TKKP4W7xJdFb4+R8ZvH5P852gxhpwOZ9AAAAJXRFWHRkYXRlOmNyZWF0ZQAyMDIwLTA4LTIzVDE0OjUyOjAwKzAyOjAwetRgVgAAACV0RVh0ZGF0ZTptb2RpZnkAMjAyMC0wOC0yM1QxNDo1MTo1OCswMjowMJuxI+oAAAAASUVORK5CYII= diff --git a/shuffle-tools/1.1.0/docker-compose.yml b/shuffle-tools/1.1.0/docker-compose.yml deleted file mode 100644 index e48c6b2f..00000000 --- a/shuffle-tools/1.1.0/docker-compose.yml +++ /dev/null @@ -1,15 +0,0 @@ -version: '3.4' -services: - shuffle-tools: - build: - context: . - dockerfile: Dockerfile -# image: walkoff_registry:5000/walkoff_app_HelloWorld-v1-0 - deploy: - mode: replicated - replicas: 10 - restart_policy: - condition: none - restart: "no" - secrets: - - secret1 diff --git a/shuffle-tools/1.1.0/requirements.txt b/shuffle-tools/1.1.0/requirements.txt deleted file mode 100644 index 1ac4003c..00000000 --- a/shuffle-tools/1.1.0/requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -ioc_finder==6.0.1 -py7zr==0.11.3 -rarfile==4.0 -pyminizip==0.2.4 -requests==2.25.1 -xmltodict==0.11.0 -json2xml==3.6.0 -ipaddress==1.0.23 diff --git a/shuffle-tools/1.1.0/src/app.py b/shuffle-tools/1.1.0/src/app.py deleted file mode 100644 index 6da2f2aa..00000000 --- a/shuffle-tools/1.1.0/src/app.py +++ /dev/null @@ -1,1810 +0,0 @@ -import asyncio -import datetime -import json -import time -import markupsafe -import os -import re -import subprocess -import tempfile -import zipfile -import base64 -import ipaddress -import hashlib -from io import StringIO -from contextlib import redirect_stdout -from liquid import Liquid -import liquid - -import py7zr -import pyminizip -import rarfile -import requests -import tarfile - -import xmltodict -from json2xml import json2xml -from json2xml.utils import readfromstring - -from ioc_finder import find_iocs -from walkoff_app_sdk.app_base import AppBase - -import binascii -import struct - -class Tools(AppBase): - """ - An example of a Walkoff App. - Inherit from the AppBase class to have Redis, logging, and console - logging set up behind the scenes. - """ - - __version__ = "1.1.0" - app_name = ( - "Shuffle Tools" # this needs to match "name" in api.yaml for WALKOFF to work - ) - - def __init__(self, redis, logger, console_logger=None): - """ - Each app should have this __init__ to set up Redis and logging. - :param redis: - :param logger: - :param console_logger: - """ - super().__init__(redis, logger, console_logger) - - def router(self): - return "This action should be skipped" - - def base64_conversion(self, string, operation): - if operation == "encode": - encoded_bytes = base64.b64encode(string.encode("utf-8")) - encoded_string = str(encoded_bytes, "utf-8") - return encoded_string - - elif operation == "decode": - try: - decoded_bytes = base64.b64decode(string) - try: - decoded_bytes = str(decoded_bytes, "utf-8") - except: - pass - - return decoded_bytes - except Exception as e: - #return string.decode("utf-16") - - self.logger.info(f"[WARNING] Error in normal decoding: {e}") - return { - "success": False, - "reason": f"Error decoding the base64: {e}", - } - #newvar = binascii.a2b_base64(string) - #try: - # if str(newvar).startswith("b'") and str(newvar).endswith("'"): - # newvar = newvar[2:-1] - #except Exception as e: - # self.logger.info(f"Encoding issue in base64: {e}") - #return newvar - - #try: - # return newvar - #except: - # pass - - return { - "success": False, - "reason": "Error decoding the base64", - } - - return json.dumps({ - "success": False, - "reason": "No base64 to be converted", - }) - - def parse_list(self, input_list): - try: - input_list = json.loads(input_list) - if isinstance(input_list, list): - input_list = ",".join(input_list) - else: - return json.dumps(input_list) - except: - pass - - input_list = input_list.replace(", ", ",", -1) - return input_list - - # This is an SMS function of Shuffle - def send_sms_shuffle(self, apikey, phone_numbers, body): - phone_numbers = self.parse_list(phone_numbers) - - targets = [phone_numbers] - if ", " in phone_numbers: - targets = phone_numbers.split(", ") - elif "," in phone_numbers: - targets = phone_numbers.split(",") - - data = {"numbers": targets, "body": body} - - url = "https://shuffler.io/api/v1/functions/sendsms" - headers = {"Authorization": "Bearer %s" % apikey} - return requests.post(url, headers=headers, json=data).text - - # This is an email function of Shuffle - def send_email_shuffle(self, apikey, recipients, subject, body, attachments=""): - recipients = self.parse_list(recipients) - - - targets = [recipients] - if ", " in recipients: - targets = recipients.split(", ") - elif "," in recipients: - targets = recipients.split(",") - - data = { - "targets": targets, - "subject": subject, - "body": body, - "type": "alert", - } - - # Read the attachments - if attachments != None and len(attachments) > 0: - try: - attachments = parse_list(attachments, splitter=",") - files = [] - for item in attachments: - new_file = self.get_file(file_ids) - files.append(new_file) - - data["attachments"] = files - except Exception as e: - self.logger.info(f"Error in attachment parsing for email: {e}") - - - url = "https://shuffler.io/api/v1/functions/sendmail" - headers = {"Authorization": "Bearer %s" % apikey} - return requests.post(url, headers=headers, json=data).text - - def repeat_back_to_me(self, call): - return call - - # https://github.com/fhightower/ioc-finder - def parse_file_ioc(self, file_ids, input_type="all"): - def parse(data): - try: - iocs = find_iocs(str(data)) - newarray = [] - for key, value in iocs.items(): - if input_type != "all": - if key not in input_type: - continue - if len(value) > 0: - for item in value: - if isinstance(value, dict): - for subkey, subvalue in value.items(): - if len(subvalue) > 0: - for subitem in subvalue: - data = { - "data": subitem, - "data_type": "%s_%s" - % (key[:-1], subkey), - } - if data not in newarray: - newarray.append(data) - else: - data = {"data": item, "data_type": key[:-1]} - if data not in newarray: - newarray.append(data) - for item in newarray: - if "ip" in item["data_type"]: - item["data_type"] = "ip" - return {"success": True, "items": newarray} - except Exception as excp: - return {"success": False, "message": "{}".format(excp)} - - if input_type == "": - input_type = "all" - else: - input_type = input_type.split(",") - - try: - file_ids = eval(file_ids) # nosec - except SyntaxError: - file_ids = file_ids - except NameError: - file_ids = file_ids - - return_value = None - if type(file_ids) == str: - return_value = parse(self.get_file(file_ids)["data"]) - elif type(file_ids) == list and type(file_ids[0]) == str: - return_value = [ - parse(self.get_file(file_id)["data"]) for file_id in file_ids - ] - elif ( - type(file_ids) == list - and type(file_ids[0]) == list - and type(file_ids[0][0]) == str - ): - return_value = [ - [parse(self.get_file(file_id2)["data"]) for file_id2 in file_id] - for file_id in file_ids - ] - else: - return "Invalid input" - return return_value - - # https://github.com/fhightower/ioc-finder - def parse_ioc(self, input_string, input_type="all"): - input_string = str(input_string) - if input_type == "": - input_type = "all" - else: - input_type = input_type.split(",") - - iocs = find_iocs(input_string) - newarray = [] - for key, value in iocs.items(): - if input_type != "all": - if key not in input_type: - continue - - if len(value) > 0: - for item in value: - # If in here: attack techniques. Shouldn't be 3 levels so no - # recursion necessary - if isinstance(value, dict): - for subkey, subvalue in value.items(): - if len(subvalue) > 0: - for subitem in subvalue: - data = { - "data": subitem, - "data_type": "%s_%s" % (key[:-1], subkey), - } - if data not in newarray: - newarray.append(data) - else: - data = {"data": item, "data_type": key[:-1]} - if data not in newarray: - newarray.append(data) - - # Reformatting IP - for item in newarray: - if "ip" in item["data_type"]: - item["data_type"] = "ip" - try: - item["is_private_ip"] = ipaddress.ip_address(item["data"]).is_private - except: - self.logger.info("Error parsing %s" % item["data"]) - - try: - newarray = json.dumps(newarray) - except json.decoder.JSONDecodeError as e: - return "Failed to parse IOC's: %s" % e - - return newarray - - def parse_list(self, items, splitter="\n"): - if splitter == "": - splitter = "\n" - - splititems = items.split(splitter) - - return str(splititems) - - def get_length(self, item): - if item.startswith("[") and item.endswith("]"): - try: - item = item.replace("'", '"', -1) - item = json.loads(item) - except json.decoder.JSONDecodeError as e: - self.logger.info("Parse error: %s" % e) - - return str(len(item)) - - def set_json_key(self, json_object, key, value): - self.logger.info(f"OBJ: {json_object}\nKEY: {key}\nVAL: {value}") - if isinstance(json_object, str): - try: - json_object = json.loads(json_object) - except json.decoder.JSONDecodeError as e: - return { - "success": False, - "reason": "Item is not valid JSON" - } - - if isinstance(json_object, list): - if len(json_object) == 1: - json_object = json_object[0] - else: - return { - "success": False, - "reason": "Item is valid JSON, but can't handle lists. Use .#" - } - - if not isinstance(json_object, object): - return { - "success": False, - "reason": "Item is not valid JSON (2)" - } - - if isinstance(value, str): - try: - value = json.loads(value) - except json.decoder.JSONDecodeError as e: - pass - - # Handle JSON paths - if "." in key: - base_object = json.loads(json.dumps(json_object)) - #base_object.output.recipients.notificationEndpointIds = ... - - keys = key.split(".") - if len(keys) >= 1: - first_object = keys[0] - - # This is awful :) - buildstring = "base_object" - for subkey in keys: - buildstring += f"[\"{subkey}\"]" - - buildstring += f" = {value}" - self.logger.info("BUILD: %s" % buildstring) - - #output = - exec(buildstring) - json_object = base_object - #json_object[first_object] = base_object - else: - json_object[key] = value - - return json_object - - def delete_json_keys(self, json_object, keys): - keys = self.parse_list(keys) - - splitdata = [keys] - if ", " in keys: - splitdata = keys.split(", ") - elif "," in keys: - splitdata = keys.split(",") - - for key in splitdata: - key = key.strip() - try: - del json_object[key] - except: - self.logger.info(f"[ERROR] Key {key} doesn't exist") - - return json_object - - def translate_value(self, input_data, translate_from, translate_to, else_value=""): - splitdata = [translate_from] - if ", " in translate_from: - splitdata = translate_from.split(", ") - elif "," in translate_from: - splitdata = translate_from.split(",") - - if isinstance(input_data, list) or isinstance(input_data, dict): - input_data = json.dumps(input_data) - - to_return = input_data - if isinstance(input_data, str): - found = False - for item in splitdata: - item = item.strip() - if item in input_data: - input_data = input_data.replace(item, translate_to) - found = True - - if not found and len(else_value) > 0: - input_data = else_value - - if input_data.lower() == "false": - return False - elif input_data.lower() == "true": - return True - - return input_data - - def map_value(self, input_data, mapping, default_value=""): - if not isinstance(mapping, dict) and not isinstance(mapping, object): - try: - mapping = json.loads(mapping) - except json.decoder.JSONDecodeError as e: - return { - "success": False, - "reason": "Mapping is not valid JSON: %s" % e, - } - - for key, value in mapping.items(): - try: - input_data = input_data.replace(key, str(value), -1) - except: - self.logger.info(f"Failed mapping output data for key {key}") - - return input_data - - # Changed with 1.1.0 to run with different returns - def regex_capture_group(self, input_data, regex): - try: - returnvalues = { - "success": True, - } - - matches = re.findall(regex, input_data) - self.logger.info(f"{matches}") - for item in matches: - if isinstance(item, str): - name = "group_0" - try: - returnvalues[name].append(item) - except: - returnvalues[name] = [item] - - else: - for i in range(0, len(item)): - name = "group_%d" % i - try: - returnvalues[name].append(item[i]) - except: - returnvalues[name] = [item[i]] - - return returnvalues - except re.error as e: - return { - "success": False, - "reason": "Bad regex pattern: %s" % e, - } - - def regex_replace( - self, input_data, regex, replace_string="", ignore_case="False" - ): - - #self.logger.info("=" * 80) - #self.logger.info(f"Regex: {regex}") - #self.logger.info(f"replace_string: {replace_string}") - #self.logger.info("=" * 80) - - if ignore_case.lower().strip() == "true": - return re.sub(regex, replace_string, input_data, flags=re.IGNORECASE) - else: - return re.sub(regex, replace_string, input_data) - - def execute_python(self, code): - self.logger.info(f"Python code {len(code)} {code}. If uuid, we'll try to download and use the file.") - - if len(code) == 36 and "-" in code: - filedata = self.get_file(code) - if filedata["success"] == False: - return { - "success": False, - "message": f"Failed to get file for ID {code}", - } - - if ".py" not in filedata["filename"]: - return { - "success": False, - "message": f"Filename needs to contain .py", - } - - - # Write the code to a file - # 1. Take the data into a file - # 2. Subprocess execute file? - try: - f = StringIO() - with redirect_stdout(f): - exec(code) # nosec :( - - s = f.getvalue() - - #try: - # s = s.encode("utf-8") - #except Exception as e: - # self.logger.info(f"Failed utf-8 encoding response: {e}") - - try: - return { - "success": True, - "message": s.strip(), - } - except Exception as e: - return { - "success": True, - "message": s, - } - - except Exception as e: - return { - "success": False, - "message": f"exception: {e}", - } - - def execute_bash(self, code, shuffle_input): - process = subprocess.Popen( - code, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - shell=True, # nosec - ) - stdout = process.communicate() - item = "" - if len(stdout[0]) > 0: - self.logger.info("[DEBUG] Succesfully ran bash!") - item = stdout[0] - else: - self.logger.info(f"[ERROR] FAILED to run bash command {code}!") - item = stdout[1] - - try: - ret = item.decode("utf-8") - return ret - except Exception: - return item - - return item - - def filter_list(self, input_list, field, check, value, opposite): - self.logger.info(f"\nRunning function with list {input_list}") - - flip = False - if str(opposite).lower() == "true": - flip = True - - try: - #input_list = eval(input_list) # nosec - input_list = json.loads(input_list) - except Exception: - try: - input_list = input_list.replace("'", '"', -1) - input_list = json.loads(input_list) - except Exception: - self.logger.info("[WARNING] Error parsing string to array. Continuing anyway.") - - # Workaround D: - if not isinstance(input_list, list): - return { - "success": False, - "reason": "Error: input isnt a list. Remove # to use this action.", - "valid": [], - "invalid": [], - } - - input_list = [input_list] - - self.logger.info(f"\nRunning with check \"%s\" on list of length %d\n" % (check, len(input_list))) - found_items = [] - new_list = [] - failed_list = [] - for item in input_list: - try: - try: - item = json.loads(item) - except Exception: - pass - - # Support for nested dict key - tmp = item - if field and field.strip() != "": - for subfield in field.split("."): - tmp = tmp[subfield] - - if isinstance(tmp, dict) or isinstance(tmp, list): - try: - tmp = json.dumps(tmp) - except json.decoder.JSONDecodeError as e: - self.logger.info("FAILED DECODING: %s" % e) - pass - - #self.logger.info("PRE CHECKS FOR TMP: %") - - # EQUALS JUST FOR STR - if check == "equals": - # Mostly for bools - # value = tmp.lower() - - if str(tmp).lower() == str(value).lower(): - self.logger.info("APPENDED BECAUSE %s %s %s" % (field, check, value)) - if not flip: - new_list.append(item) - else: - failed_list.append(item) - else: - if flip: - new_list.append(item) - else: - failed_list.append(item) - - # IS EMPTY FOR STR OR LISTS - elif check == "is empty": - if tmp == "[]": - tmp = [] - - if type(tmp) == list and len(tmp) == 0 and not flip: - new_list.append(item) - elif type(tmp) == list and len(tmp) > 0 and flip: - new_list.append(item) - elif type(tmp) == str and not tmp and not flip: - new_list.append(item) - elif type(tmp) == str and tmp and flip: - new_list.append(item) - else: - failed_list.append(item) - - # STARTS WITH = FOR STR OR [0] FOR LIST - elif check == "starts with": - if type(tmp) == list and tmp[0] == value and not flip: - new_list.append(item) - elif type(tmp) == list and tmp[0] != value and flip: - new_list.append(item) - elif type(tmp) == str and tmp.startswith(value) and not flip: - new_list.append(item) - elif type(tmp) == str and not tmp.startswith(value) and flip: - new_list.append(item) - else: - failed_list.append(item) - - # ENDS WITH = FOR STR OR [-1] FOR LIST - elif check == "ends with": - if type(tmp) == list and tmp[-1] == value and not flip: - new_list.append(item) - elif type(tmp) == list and tmp[-1] != value and flip: - new_list.append(item) - elif type(tmp) == str and tmp.endswith(value) and not flip: - new_list.append(item) - elif type(tmp) == str and not tmp.endswith(value) and flip: - new_list.append(item) - else: - failed_list.append(item) - - # CONTAINS FIND FOR LIST AND IN FOR STR - elif check == "contains": - if type(tmp) == list and value.lower() in tmp and not flip: - new_list.append(item) - elif type(tmp) == list and value.lower() not in tmp and flip: - new_list.append(item) - elif ( - type(tmp) == str - and tmp.lower().find(value.lower()) != -1 - and not flip - ): - new_list.append(item) - elif ( - type(tmp) == str - and tmp.lower().find(value.lower()) == -1 - and flip - ): - new_list.append(item) - else: - failed_list.append(item) - elif check == "contains any of": - self.logger.info("Inside contains any of") - checklist = value.split(",") - self.logger.info("Checklist and tmp: %s - %s" % (checklist, tmp)) - found = False - for subcheck in checklist: - subcheck = subcheck.strip().lower() - #ext.lower().strip() == value.lower().strip() - if type(tmp) == list and subcheck in tmp and not flip: - new_list.append(item) - found = True - break - elif type(tmp) == list and subcheck in tmp and flip: - failed_list.append(item) - found = True - break - elif type(tmp) == list and subcheck not in tmp and not flip: - new_list.append(item) - found = True - break - elif type(tmp) == list and subcheck not in tmp and flip: - failed_list.append(item) - found = True - break - elif (type(tmp) == str and tmp.lower().find(subcheck) != -1 and not flip): - new_list.append(item) - found = True - break - elif (type(tmp) == str and tmp.lower().find(subcheck) != -1 and flip): - failed_list.append(item) - found = True - break - elif (type(tmp) == str and tmp.lower().find(subcheck) == -1 and not flip): - failed_list.append(item) - found = True - break - elif (type(tmp) == str and tmp.lower().find(subcheck) == -1 and flip): - new_list.append(item) - found = True - break - - if not found: - failed_list.append(item) - - # CONTAINS FIND FOR LIST AND IN FOR STR - elif check == "field is unique": - #self.logger.info("FOUND: %s" - if tmp.lower() not in found_items and not flip: - new_list.append(item) - found_items.append(tmp.lower()) - elif tmp.lower() in found_items and flip: - new_list.append(item) - found_items.append(tmp.lower()) - else: - failed_list.append(item) - - #tmp = json.dumps(tmp) - - #for item in new_list: - #if type(tmp) == list and value.lower() in tmp and not flip: - # new_list.append(item) - # found = True - # break - #elif type(tmp) == list and value.lower() not in tmp and flip: - # new_list.append(item) - # found = True - # break - - # CONTAINS FIND FOR LIST AND IN FOR STR - elif check == "contains any of": - value = self.parse_list(value) - checklist = value.split(",") - tmp = tmp.lower() - self.logger.info("CHECKLIST: %s. Value: %s" % (checklist, tmp)) - found = False - for value in checklist: - if value in tmp and not flip: - new_list.append(item) - found = True - break - elif value not in tmp and flip: - new_list.append(item) - found = True - break - - if not found: - failed_list.append(item) - - elif check == "larger than": - if int(tmp) > int(value) and not flip: - new_list.append(item) - elif int(tmp) > int(value) and flip: - new_list.append(item) - else: - failed_list.append(item) - elif check == "less than": - if int(tmp) < int(value) and not flip: - new_list.append(item) - elif int(tmp) < int(value) and flip: - new_list.append(item) - else: - failed_list.append(item) - - # SINGLE ITEM COULD BE A FILE OR A LIST OF FILES - elif check == "files by extension": - if type(tmp) == list: - file_list = [] - - for file_id in tmp: - filedata = self.get_file(file_id) - _, ext = os.path.splitext(filedata["filename"]) - if ( - ext.lower().strip() == value.lower().strip() - and not flip - ): - file_list.append(file_id) - elif ext.lower().strip() != value.lower().strip() and flip: - file_list.append(file_id) - # else: - # failed_list.append(file_id) - - tmp = item - if field and field.strip() != "": - for subfield in field.split(".")[:-1]: - tmp = tmp[subfield] - tmp[field.split(".")[-1]] = file_list - new_list.append(item) - else: - new_list = file_list - # else: - # failed_list = file_list - - elif type(tmp) == str: - filedata = self.get_file(tmp) - _, ext = os.path.splitext(filedata["filename"]) - if ext.lower().strip() == value.lower().strip() and not flip: - new_list.append(item) - elif ext.lower().strip() != value.lower().strip() and flip: - new_list.append((item, ext)) - else: - failed_list.append(item) - - except Exception as e: - # "Error: %s" % e - self.logger.info("[WARNING] FAILED WITH EXCEPTION: %s" % e) - failed_list.append(item) - # return - - try: - return json.dumps( - { - "success": True, - "valid": new_list, - "invalid": failed_list, - } - ) - # new_list = json.dumps(new_list) - except json.decoder.JSONDecodeError as e: - return json.dumps( - { - "success": False, - "reason": "Failed parsing filter list output" + e, - } - ) - - return new_list - - #def multi_list_filter(self, input_list, field, check, value): - # input_list = input_list.replace("'", '"', -1) - # input_list = json.loads(input_list) - - # fieldsplit = field.split(",") - # if ", " in field: - # fieldsplit = field.split(", ") - - # valuesplit = value.split(",") - # if ", " in value: - # valuesplit = value.split(", ") - - # checksplit = check.split(",") - # if ", " in check: - # checksplit = check.split(", ") - - # new_list = [] - # for list_item in input_list: - # list_item = json.loads(list_item) - - # index = 0 - # for check in checksplit: - # if check == "equals": - # self.logger.info( - # "Checking %s vs %s" - # % (list_item[fieldsplit[index]], valuesplit[index]) - # ) - # if list_item[fieldsplit[index]] == valuesplit[index]: - # new_list.append(list_item) - - # index += 1 - - # # "=", - # # "equals", - # # "!=", - # # "does not equal", - # # ">", - # # "larger than", - # # "<", - # # "less than", - # # ">=", - # # "<=", - # # "startswith", - # # "endswith", - # # "contains", - # # "re", - # # "matches regex", - - # try: - # new_list = json.dumps(new_list) - # except json.decoder.JSONDecodeError as e: - # return "Failed parsing filter list output" % e - - # return new_list - - # Gets the file's metadata, e.g. md5 - def get_file_meta(self, file_id): - headers = { - "Authorization": "Bearer %s" % self.authorization, - } - - ret = requests.get( - "%s/api/v1/files/%s?execution_id=%s" - % (self.url, file_id, self.current_execution_id), - headers=headers, - ) - self.logger.info(f"RET: {ret}") - - return ret.text - - # Use data from AppBase to talk to backend - def delete_file(self, file_id): - headers = { - "Authorization": "Bearer %s" % self.authorization, - } - self.logger.info("HEADERS: %s" % headers) - - ret = requests.delete( - "%s/api/v1/files/%s?execution_id=%s" - % (self.url, file_id, self.current_execution_id), - headers=headers, - ) - return ret.text - - def create_file(self, filename, data): - self.logger.info("Inside function") - - #try: - # if str(data).startswith("b'") and str(data).endswith("'"): - # data = data[2:-1] - #except Exception as e: - # self.logger.info(f"Exception: {e}") - - filedata = { - "filename": filename, - "data": data, - } - - fileret = self.set_files([filedata]) - value = {"success": True, "file_ids": fileret} - if len(fileret) == 1: - value = {"success": True, "file_ids": fileret[0]} - - return value - - # Input is WAS a file, hence it didn't get the files - def get_file_value(self, filedata): - filedata = self.get_file(filedata) - if filedata is None: - return "File is empty?" - - self.logger.info("INSIDE APP DATA: %s" % filedata) - try: - return filedata["data"].decode() - except: - try: - return filedata["data"].decode("utf-16") - except: - return { - "success": False, - "reason": "Got the file, but the encoding can't be printed", - } - - def download_remote_file(self, url): - ret = requests.get(url, verify=False) # nosec - filename = url.split("/")[-1] - fileret = self.set_files( - [ - { - "filename": filename, - "data": ret.content, - } - ] - ) - - if len(fileret) > 0: - value = {"success": True, "file_id": fileret[0]} - else: - value = {"success": False, "reason": "No files downloaded"} - - return value - - def extract_archive(self, file_ids, fileformat="zip", password=None): - try: - return_data = {"success": False, "files": []} - - try: - file_ids = eval(file_ids) # nosec - except SyntaxError: - file_ids = file_ids - - self.logger.info("IDS: %s" % file_ids) - items = file_ids if type(file_ids) == list else file_ids.split(",") - for file_id in items: - - item = self.get_file(file_id) - return_ids = None - - self.logger.info("Working with fileformat %s" % fileformat) - with tempfile.TemporaryDirectory() as tmpdirname: - - # Get archive and save phisically - with open(os.path.join(tmpdirname, "archive"), "wb") as f: - f.write(item["data"]) - - # Grab files before, upload them later - to_be_uploaded = [] - - # Zipfile for zipped archive - if fileformat.strip().lower() == "zip": - try: - with zipfile.ZipFile( - os.path.join(tmpdirname, "archive") - ) as z_file: - if password: - z_file.setpassword(bytes(password.encode())) - for member in z_file.namelist(): - filename = os.path.basename(member) - if not filename: - continue - source = z_file.open(member) - to_be_uploaded.append( - {"filename": source.name, "data": source.read()} - ) - return_data["success"] = True - except (zipfile.BadZipFile, Exception): - return_data["files"].append( - { - "success": False, - "file_id": file_id, - "filename": item["filename"], - "message": "File is not a valid zip archive", - } - ) - - continue - - elif fileformat.strip().lower() == "rar": - try: - with rarfile.RarFile( - os.path.join(tmpdirname, "archive") - ) as z_file: - if password: - z_file.setpassword(password) - for member in z_file.namelist(): - filename = os.path.basename(member) - if not filename: - continue - source = z_file.open(member) - to_be_uploaded.append( - {"filename": source.name, "data": source.read()} - ) - return_data["success"] = True - except Exception: - return_data["files"].append( - { - "success": False, - "file_id": file_id, - "filename": item["filename"], - "message": "File is not a valid rar archive", - } - ) - continue - - elif fileformat.strip().lower() == "tar": - try: - with tarfile.open( - os.path.join(tmpdirname, "archive"), mode="r" - ) as z_file: - for member in z_file.getnames(): - member_files = z_file.extractfile(member) - to_be_uploaded.append( - { - "filename": member, - "data": member_files.read(), - } - ) - return_data["success"] = True - except Exception as e: - return_data["files"].append( - { - "success": False, - "file_id": file_id, - "filename": item["filename"], - "message": e, - } - ) - continue - elif fileformat.strip().lower() == "tar.gz": - try: - with tarfile.open( - os.path.join(tmpdirname, "archive"), mode="r:gz" - ) as z_file: - for member in z_file.getnames(): - member_files = z_file.extractfile(member) - to_be_uploaded.append( - { - "filename": member, - "data": member_files.read(), - } - ) - return_data["success"] = True - except Exception as e: - return_data["files"].append( - { - "success": False, - "file_id": file_id, - "filename": item["filename"], - "message": e, - } - ) - continue - - elif fileformat.strip().lower() == "7zip": - try: - with py7zr.SevenZipFile( - os.path.join(tmpdirname, "archive"), - mode="r", - password=password if password else None, - ) as z_file: - for filename, source in z_file.readall().items(): - # Removes paths - filename = filename.split("/")[-1] - to_be_uploaded.append( - { - "filename": item["filename"], - "data": source.read(), - } - ) - return_data["success"] = True - except Exception: - return_data["files"].append( - { - "success": False, - "file_id": file_id, - "filename": item["filename"], - "message": "File is not a valid 7zip archive", - } - ) - continue - else: - return "No such format: %s" % fileformat - - if len(to_be_uploaded) > 0: - return_ids = self.set_files(to_be_uploaded) - return_data["files"].append( - { - "success": True, - "file_id": file_id, - "filename": item["filename"], - "file_ids": return_ids, - } - ) - else: - return_data["files"].append( - { - "success": False, - "file_id": file_id, - "filename": item["filename"], - "message": "Archive is empty", - } - ) - - return return_data - - except Exception as excp: - return {"success": False, "message": "%s" % excp} - - def inflate_archive(self, file_ids, fileformat, name, password=None): - - try: - # TODO: will in future support multiple files instead of string ids? - file_ids = file_ids.split() - self.logger.info("picking {}".format(file_ids)) - - # GET all items from shuffle - items = [self.get_file(file_id) for file_id in file_ids] - - if len(items) == 0: - return "No file to inflate" - - # Dump files on disk, because libs want path :( - with tempfile.TemporaryDirectory() as tmpdir: - paths = [] - self.logger.info("Number 1") - for item in items: - with open(os.path.join(tmpdir, item["filename"]), "wb") as f: - f.write(item["data"]) - paths.append(os.path.join(tmpdir, item["filename"])) - - # Create archive temporary - self.logger.info("{} items to inflate".format(len(items))) - with tempfile.NamedTemporaryFile() as archive: - - if fileformat == "zip": - archive_name = "archive.zip" if not name else name - pyminizip.compress_multiple( - paths, [], archive.name, password, 5 - ) - - elif fileformat == "7zip": - archive_name = "archive.7z" if not name else name - with py7zr.SevenZipFile( - archive.name, - "w", - password=password if len(password) > 0 else None, - ) as sz_archive: - for path in paths: - sz_archive.write(path) - - else: - return "Format {} not supported".format(fileformat) - - return_id = self.set_files( - [{"filename": archive_name, "data": open(archive.name, "rb")}] - ) - - if len(return_id) == 1: - # Returns the first file's ID - return {"success": True, "id": return_id[0]} - else: - return { - "success": False, - "message": "Upload archive returned {}".format(return_id), - } - - except Exception as excp: - return {"success": False, "message": excp} - - def add_list_to_list(self, list_one, list_two): - if isinstance(list_one, str): - if not list_one or list_one == " " or list_one == "None" or list_one == "null": - list_one = "[]" - - try: - list_one = json.loads(list_one) - except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse list1 as json: %s" % e) - if list_one == None: - list_one = [] - else: - return { - "success": False, - "reason": f"List one is not a valid list: {list_one}" - } - - if isinstance(list_two, str): - if not list_two or list_two == " " or list_two == "None" or list_two == "null": - list_two = "[]" - try: - list_two = json.loads(list_two) - except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse list2 as json: %s" % e) - if list_one == None: - list_one = [] - else: - return { - "success": False, - "reason": f"List two is not a valid list: {list_two}" - } - - if isinstance(list_one, dict): - list_one = [list_one] - if isinstance(list_two, dict): - list_two = [list_two] - - for item in list_two: - list_one.append(item) - - return list_one - - def diff_lists(self, list_one, list_two): - if isinstance(list_one, str): - try: - list_one = json.loads(list_one) - except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse list1 as json: %s" % e) - - if isinstance(list_two, str): - try: - list_two = json.loads(list_two) - except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse list2 as json: %s" % e) - - def diff(li1, li2): - return list(set(li1) - set(li2)) + list(set(li2) - set(li1)) - - return diff(list_one, list_two) - - def merge_lists(self, list_one, list_two, set_field="", sort_key_list_one="", sort_key_list_two=""): - if isinstance(list_one, str): - try: - list_one = json.loads(list_one) - except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse list1 as json: %s" % e) - - if isinstance(list_two, str): - try: - list_two = json.loads(list_two) - except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse list2 as json: %s" % e) - - if len(list_one) != len(list_two): - return {"success": False, "message": "Lists length must be the same. %d vs %d" % (len(list_one), len(list_two))} - - if len(sort_key_list_one) > 0: - self.logger.info("Sort 1 %s by key: %s" % (list_one, sort_key_list_one)) - try: - list_one = sorted(list_one, key=lambda k: k.get(sort_key_list_one), reverse=True) - except: - self.logger.info("Failed to sort list one") - pass - - if len(sort_key_list_two) > 0: - #self.logger.info("Sort 2 %s by key: %s" % (list_two, sort_key_list_two)) - try: - list_two = sorted(list_two, key=lambda k: k.get(sort_key_list_two), reverse=True) - except: - self.logger.info("Failed to sort list one") - pass - - # Loops for each item in sub array and merges items together - # List one is being overwritten - base_key = "shuffle_auto_merge" - try: - for i in range(len(list_one)): - #self.logger.info(list_two[i]) - if isinstance(list_two[i], dict): - for key, value in list_two[i].items(): - list_one[i][key] = value - elif isinstance(list_two[i], str) and list_two[i] == "": - continue - elif isinstance(list_two[i], str) or isinstance(list_two[i], int) or isinstance(list_two[i], bool): - self.logger.info("IN SETTER FOR %s" % list_two[i]) - if len(set_field) == 0: - self.logger.info("Define a JSON key to set for List two (Set Field)") - list_one[i][base_key] = list_two[i] - else: - list_one[i][set_field] = list_two[i] - except Exception as e: - return { - "success": False, - "reason": "An error occurred while merging the lists. PS: List one can NOT be a list of integers. If this persists, contact us at support@shuffler.io", - "exception": f"{e}", - } - - - return list_one - - def xml_json_convertor(self, convertto, data): - try: - if convertto == "json": - ans = xmltodict.parse(data) - json_data = json.dumps(ans) - return json_data - else: - ans = readfromstring(data) - return json2xml.Json2xml(ans, wrapper="all", pretty=True).to_xml() - except Exception as e: - return e - - def date_to_epoch(self, input_data, date_field, date_format): - - self.logger.info( - "Executing with {} on {} with format {}".format( - input_data, date_field, date_format - ) - ) - - result = json.loads(input_data) - #try: - #except json.decoder.JSONDecodeError as e: - # result = input_data - - # https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior - epoch = datetime.datetime.strptime(result[date_field], date_format).strftime( - "%s" - ) - result["epoch"] = epoch - return result - - def compare_relative_date( - self, input_data, date_format, equality_test, offset, units, direction - ): - - if input_data == "None": - return False - - self.logger.info("Converting input date.") - - if date_format != "%s": - input_dt = datetime.datetime.strptime(input_data, date_format) - else: - input_dt = datetime.datetime.utcfromtimestamp(float(input_data)) - - offset = int(offset) - if units == "seconds": - delta = datetime.timedelta(seconds=offset) - elif units == "minutes": - delta = datetime.timedelta(minutes=offset) - elif units == "hours": - delta = datetime.timedelta(hours=offset) - elif units == "days": - delta = datetime.timedelta(days=offset) - - utc_format = date_format - if utc_format.endswith("%z"): - utc_format = utc_format.replace("%z", "Z") - - if date_format != "%s": - formatted_dt = datetime.datetime.strptime( - datetime.datetime.utcnow().strftime(utc_format), date_format - ) - else: - formatted_dt = datetime.datetime.utcnow() - - self.logger.info("Formatted time is: {}".format(formatted_dt)) - if direction == "ago": - comparison_dt = formatted_dt - delta - else: - comparison_dt = formatted_dt + delta - self.logger.info("{} {} {} is {}".format(offset, units, direction, comparison_dt)) - - diff = (input_dt - comparison_dt).total_seconds() - self.logger.info( - "Difference between {} and {} is {}".format(input_data, comparison_dt, diff) - ) - result = False - if equality_test == ">": - result = 0 > diff - if direction == "ahead": - result = not (result) - elif equality_test == "<": - result = 0 < diff - if direction == "ahead": - result = not (result) - elif equality_test == "=": - result = diff == 0 - elif equality_test == "!=": - result = diff != 0 - elif equality_test == ">=": - result = 0 >= diff - if direction == "ahead" and diff != 0: - result = not (result) - elif equality_test == "<=": - result = 0 <= diff - if direction == "ahead" and diff != 0: - result = not (result) - - self.logger.info( - "At {}, is {} {} than {} {} {}? {}".format( - formatted_dt, - input_data, - equality_test, - offset, - units, - direction, - result, - ) - ) - - return result - - def run_math_operation(self, operation): - self.logger.info("Operation: %s" % operation) - result = eval(operation) - return result - - def escape_html(self, input_data, field_name): - - mapping = json.loads(input_data) - self.logger.info(f"Got mapping {json.dumps(mapping, indent=2)}") - - result = markupsafe.escape(mapping[field_name]) - self.logger.info(f"Mapping {input_data} to {result}") - - mapping[field_name] = result - return mapping - - def check_cache_contains(self, key, value, append): - org_id = self.full_execution["workflow"]["execution_org"]["id"] - url = "%s/api/v1/orgs/%s/get_cache" % (self.url, org_id) - data = { - "workflow_id": self.full_execution["workflow"]["id"], - "execution_id": self.current_execution_id, - "authorization": self.authorization, - "org_id": org_id, - "key": key, - } - - if append.lower() == "true": - append = True - else: - append = False - - get_response = requests.post(url, json=data) - try: - allvalues = get_response.json() - try: - if allvalues["value"] == None or allvalues["value"] == "null": - allvalues["value"] = "[]" - except: - pass - - if allvalues["success"] == False: - if append == True: - new_value = [str(value)] - data["value"] = json.dumps(new_value) - - set_url = "%s/api/v1/orgs/%s/set_cache" % (self.url, org_id) - set_response = requests.post(set_url, json=data) - try: - allvalues = set_response.json() - #allvalues["key"] = key - #return allvalues - - return { - "success": True, - "found": False, - "key": key, - "value": new_value, - } - except Exception as e: - return { - "success": False, - "found": False, - "key": key, - "reason": "Failed to find key, and failed to append", - } - else: - return { - "success": True, - "found": False, - "key": key, - "reason": "Not appended, not found", - } - else: - if allvalues["value"] == None or allvalues["value"] == "null": - allvalues["value"] = "[]" - - try: - parsedvalue = json.loads(allvalues["value"]) - except json.decoder.JSONDecodeError as e: - parsedvalue = [] - - #return parsedvalue - - for item in parsedvalue: - #return "%s %s" % (item, value) - if item == value: - if not append: - return { - "success": True, - "found": True, - "reason": "Found and not appending!", - "key": key, - "value": json.loads(allvalues["value"]), - } - else: - return { - "success": True, - "found": True, - "reason": "Found, was appending, but item already exists", - "key": key, - "value": json.loads(allvalues["value"]), - } - - # Lol - break - - if not append: - return { - "success": True, - "found": False, - "reason": "Not found, not appending (2)!", - "key": key, - "value": json.loads(allvalues["value"]), - } - - #parsedvalue = json.loads(allvalues["value"]) - #if parsedvalue == None: - # parsedvalue = [] - - #return parsedvalue - new_value = parsedvalue - if new_value == None: - new_value = [value] - - new_value.append(value) - - #return new_value - - data["value"] = json.dumps(new_value) - #return allvalues - - set_url = "%s/api/v1/orgs/%s/set_cache" % (self.url, org_id) - response = requests.post(set_url, json=data) - exception = "" - try: - allvalues = response.json() - #return allvalues - - return { - "success": True, - "found": False, - "reason": "Appended as it didn't exist", - "key": key, - "value": new_value, - } - except Exception as e: - exception = e - pass - - return { - "success": False, - "found": True, - "reason": f"Failed to set append the value: {exception}. This should never happen", - "key": key - } - - self.logger.info("Handle all values!") - - #return allvalues - - except Exception as e: - return { - "success": False, - "key": key, - "reason": f"Failed to get cache: {e}", - "found": False, - } - - return value.text - - def get_cache_value(self, key): - org_id = self.full_execution["workflow"]["execution_org"]["id"] - url = "%s/api/v1/orgs/%s/get_cache" % (self.url, org_id) - data = { - "workflow_id": self.full_execution["workflow"]["id"], - "execution_id": self.current_execution_id, - "authorization": self.authorization, - "org_id": org_id, - "key": key, - } - - value = requests.post(url, json=data) - try: - allvalues = value.json() - self.logger.info("VAL1: ", allvalues) - allvalues["key"] = key - self.logger.info("VAL2: ", allvalues) - - if allvalues["success"] == True: - allvalues["found"] = True - else: - allvalues["success"] = True - allvalues["found"] = False - - try: - parsedvalue = json.loads(allvalues["value"]) - allvalues["value"] = parsedvalue - - except: - self.logger.info("Parsing of value as JSON failed") - pass - - return json.dumps(allvalues) - except: - self.logger.info("Value couldn't be parsed, or json dump of value failed") - return value.text - - # FIXME: Add option for org only & sensitive data (not to be listed) - def set_cache_value(self, key, value): - org_id = self.full_execution["workflow"]["execution_org"]["id"] - url = "%s/api/v1/orgs/%s/set_cache" % (self.url, org_id) - data = { - "workflow_id": self.full_execution["workflow"]["id"], - "execution_id": self.current_execution_id, - "authorization": self.authorization, - "org_id": org_id, - "key": key, - "value": str(value), - } - - response = requests.post(url, json=data) - try: - allvalues = response.json() - allvalues["key"] = key - #allvalues["value"] = json.loads(json.dumps(value)) - - if (value.startswith("{") and value.endswith("}")) or (value.startswith("[") and value.endswith("]")): - try: - allvalues["value"] = json.loads(value) - except json.decoder.JSONDecodeError as e: - self.logger.info("Failed inner value parsing: %s" % e) - allvalues["value"] = str(value) - else: - allvalues["value"] = str(value) - - return json.dumps(allvalues) - except: - self.logger.info("Value couldn't be parsed") - return response.text - - def convert_json_to_tags(self, json_object, split_value=", ", include_key=True, lowercase=True): - try: - json_object = json.loads(json_object) - except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse list2 as json: %s. Type: %s" % (e, type(json_object))) - - if isinstance(lowercase, str) and lowercase.lower() == "true": - lowercase = True - else: - lowercase = False - - if isinstance(include_key, str) or include_key.lower() == "true": - include_key = True - else: - include_key = False - - parsedstring = [] - try: - for key, value in json_object.items(): - self.logger.info("KV: %s:%s" % (key, value)) - if isinstance(value, str) or isinstance(value, int) or isinstance(value, bool): - if include_key == True: - parsedstring.append("%s:%s" % (key, value)) - else: - parsedstring.append("%s" % (value)) - else: - self.logger.info("Can't handle type %s" % type(value)) - except AttributeError as e: - return { - "success": False, - "reason": "Json Object is not a dictionary", - } - - fullstring = split_value.join(parsedstring) - if lowercase == True: - fullstring = fullstring.lower() - - return fullstring - - def cidr_ip_match(self, ip, networks): - self.logger.info("Executing with\nIP: {},\nNetworks: {}".format(ip, networks)) - - try: - networks = json.loads(networks) - except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse networks list as json: {}. Type: {}".format( - e, type(networks) - )) - return "Networks is not a valid list: {}".format(networks) - - try: - ip_networks = list(map(ipaddress.ip_network, networks)) - ip_address = ipaddress.ip_address(ip) - except ValueError as e: - return "IP or some networks are not in valid format.\nError: {}".format(e) - - matched_networks = list(filter(lambda net: (ip_address in net), ip_networks)) - - result = {} - result['networks'] = list(map(str, matched_networks)) - result['is_contained'] = True if len(result['networks']) > 0 else False - - return json.dumps(result) - - def get_timestamp(self, time_format): - timestamp = int(time.time()) - if time_format == "unix" or time_format == "epoch": - self.logger.info("Running default timestamp %s" % timestamp) - - return timestamp - - def get_hash_sum(self, value): - md5_value = "" - sha256_value = "" - - try: - md5_value = hashlib.md5(str(value).encode('utf-8')).hexdigest() - except Exception as e: - self.logger.info(f"Error in md5sum: {e}") - - try: - sha256_value = hashlib.sha256(str(value).encode('utf-8')).hexdigest() - except Exception as e: - self.logger.info(f"Error in sha256: {e}") - - parsedvalue = { - "success": True, - "original_value": value, - "md5": md5_value, - "sha256": sha256_value, - } - - return parsedvalue - -if __name__ == "__main__": - Tools.run() diff --git a/shuffle-tools/1.2.0/Dockerfile b/shuffle-tools/1.2.0/Dockerfile index 5c1a8af4..fac3aae2 100644 --- a/shuffle-tools/1.2.0/Dockerfile +++ b/shuffle-tools/1.2.0/Dockerfile @@ -5,13 +5,13 @@ FROM frikky/shuffle:app_sdk as base FROM base as builder # Install all alpine build tools needed for our pip installs -RUN apk --no-cache add --update alpine-sdk libffi libffi-dev musl-dev openssl-dev git +RUN apk --no-cache add --update alpine-sdk libffi libffi-dev musl-dev openssl-dev git zlib-dev python3-dev # Install all of our pip packages in a single directory that we can copy to our base image later RUN mkdir /install WORKDIR /install COPY requirements.txt /requirements.txt -RUN pip install --no-cache-dir --prefix="/install" -r /requirements.txt +RUN python3 -m pip install --no-cache-dir --upgrade --prefix="/install" -r /requirements.txt # Switch back to our base image and copy in all of our built packages and source code FROM base diff --git a/shuffle-tools/1.2.0/api.yaml b/shuffle-tools/1.2.0/api.yaml index 576b41ab..ab5337ee 100644 --- a/shuffle-tools/1.2.0/api.yaml +++ b/shuffle-tools/1.2.0/api.yaml @@ -1,7 +1,7 @@ --- app_version: 1.2.0 name: Shuffle Tools -description: A tool app for Shuffle. Gives access to most missing features along with Liquid. +description: A tool app for Shuffle. Gives access to most missing features along with Liquid. tags: - Testing - Shuffle @@ -25,34 +25,43 @@ actions: returns: schema: type: string - - name: check_cache_contains - description: Checks Shuffle cache whether a user-provided key contains a value. Returns ALL the values previously appended. + - name: execute_python + description: Runs python with the data input. Any prints will be returned. parameters: - - name: key - description: The key to get + - name: code + description: The code to run. Can be a file ID from within Shuffle. required: true - multiline: false - example: "alert_ids" + multiline: true + example: print("hello world") schema: type: string - - name: value - description: The value to check for and append if applicable + + - name: search_datastore_category + description: Checks whether keys within a list are already in datastore, and returns whether they existed or not. They will be automatically appended. + parameters: + - name: input_list + description: The list to check from. Don't use .# in this. required: true - multiline: false - example: "1208301599081" + multiline: true + example: '[{"data": "1.2.3.4"}, {"data": "1.2.3.5"}]' schema: type: string - - name: append - description: Whether to auto-append the value if it doesn't exist in the cache + - name: key + description: The key to use for deduplication. MUST be a part of each key in the input_list. required: true - options: - - true - - false multiline: false - example: "timestamp" + example: "ticketname+username" schema: type: string - - name: get_cache_value + - name: category + description: The category you want to upload to. This can be a new category, or an existing one. + required: true + multiline: false + example: "tickets" + schema: + type: string + + - name: get_datastore_value description: Get a value saved to your organization in Shuffle parameters: - name: key @@ -62,10 +71,17 @@ actions: example: "timestamp" schema: type: string + - name: category + description: The category to get the value from. Not required. + required: false + multiline: false + example: "tickets" + schema: + type: string returns: schema: type: string - - name: set_cache_value + - name: set_datastore_value description: Set a value to be saved to your organization in Shuffle. parameters: - name: key @@ -82,6 +98,34 @@ actions: example: "1621959545" schema: type: string + - name: category + description: The category to get the value from. Not required. + required: false + multiline: false + example: "tickets" + schema: + type: string + returns: + schema: + type: string + + - name: delete_datastore_value + description: Delete a value saved to your organization in Shuffle + parameters: + - name: key + description: The key to delete + required: true + multiline: false + example: "timestamp" + schema: + type: string + - name: category + description: The category to get the value from. Not required. + required: false + multiline: false + example: "tickets" + schema: + type: string returns: schema: type: string @@ -112,68 +156,117 @@ actions: returns: schema: type: string - - name: send_email_shuffle - description: Send an email from Shuffle + + #- name: send_email_shuffle + # description: Send an email from Shuffle + # parameters: + # - name: apikey + # description: Your https://shuffler.io organization apikey + # multiline: false + # example: "https://shuffler.io apikey" + # required: true + # schema: + # type: string + # - name: recipients + # description: The recipients of the email + # multiline: false + # example: "test@example.com,frikky@shuffler.io" + # required: true + # schema: + # type: string + # - name: subject + # description: The subject to use + # multiline: false + # example: "SOS this is an alert :o" + # required: true + # schema: + # type: string + # - name: body + # description: The body to add to the email + # multiline: true + # example: "This is an email alert from Shuffler.io :)" + # required: true + # schema: + # type: string + # - name: attachments + # description: The ID of files in Shuffle to add as attachments + # multiline: false + # example: "file_id1,file_id2,file_id3" + # required: false + # schema: + # type: string + # returns: + # schema: + # type: string + - name: dedup_and_merge + description: Merges data from multiple apps within a set timeframe. Returns action as SKIPPED if the data is a duplicate. Returns with a list of all data if the data at the end parameters: - - name: apikey - description: Your https://shuffler.io organization apikey - multiline: false - example: "https://shuffler.io apikey" + - name: key + description: The key to use for deduplication required: true - schema: - type: string - - name: recipients - description: The recipients of the email multiline: false - example: "test@example.com,frikky@shuffler.io" - required: true + example: "ticketname+username" schema: type: string - - name: subject - description: The subject to use - multiline: false - example: "SOS this is an alert :o" + - name: value + description: The full value of the item required: true + multiline: true + example: "1208301599081" schema: type: string - - name: body - description: The body to add to the email - multiline: true - example: "This is an email alert from Shuffler.io :)" + - name: timeout + description: The timeout before returning required: true + options: + - 2 + - 3 + - 4 + - 5 + - 6 + - 7 + - 8 + - 9 + - 10 + - 15 + - 20 + - 25 + multiline: false + example: "1" schema: type: string - - name: attachments - description: The ID of files in Shuffle to add as attachments + - name: set_skipped + description: Whether to set the action SKIPPED or not IF it matches another workflow in the same timeframe + required: true + options: + - true + - false multiline: false - example: "file_id1,file_id2,file_id3" - required: false + example: "true" schema: type: string - returns: - schema: - type: string + - name: filter_list description: Takes a list and filters based on your data skip_multicheck: true parameters: - name: input_list - description: The list to check + description: The list to filter from. Don't use .# into this. required: true multiline: false example: '[{"data": "1.2.3.4"}, {"data": "1.2.3.5"}]' schema: type: string - name: field - description: The field to check - required: false + description: The field to check in the input list + required: true multiline: false example: "data" schema: type: string - name: check description: Type of check - required: true + required: false example: "equals" options: - equals @@ -191,15 +284,15 @@ actions: schema: type: string - name: value - description: The value to check with - required: false + description: The value to compare with + required: false multiline: false example: "1.2.3.4" schema: type: string - name: opposite description: Whether to add or to NOT add - required: true + required: false options: - False - True @@ -245,7 +338,7 @@ actions: # schema: # type: string - name: parse_ioc - description: Parse IOC's based on https://github.com/fhightower/ioc-finder + description: "Parse IOC's based on https://github.com/fhightower/ioc-finder. Specify input type to optimize speed: domains, urls, email_addresses, ipv4s, ipv4_cidrs, md5s, sha256s, sha1s, cves and more.." parameters: - name: input_string description: The string to check @@ -258,7 +351,9 @@ actions: description: The string to check required: false multiline: false - example: "md5s" + example: "domains,urls,email_addresses,ipv4s,ipv4_cidrs,ipv6s,md5s,sha256s,sha1s,cves" + value: "domains,urls,ipv4s,md5s,sha1s,email_addresses" + multiselect: true schema: type: string returns: @@ -442,6 +537,13 @@ actions: example: "EventID,username\n4137,frikky" schema: type: string + - name: category + description: The category the file belongs to + required: false + multiline: false + example: "yara-rules" + schema: + type: string - name: download_remote_file description: Downloads a file from a URL parameters: @@ -552,7 +654,7 @@ actions: schema: type: string - name: xml_json_convertor - description: Converts xml to json and vice versa + description: Converts xml or html to json and vice versa. parameters: - name: convertto required: true @@ -747,6 +849,41 @@ actions: example: "json_key" schema: type: string + - name: merge_json_objects + description: Merges two lists of same type AND length. + parameters: + - name: list_one + description: The first list + multiline: true + example: "{'key': 'value'}" + required: true + schema: + type: string + - name: list_two + description: The second list to use + multiline: true + required: true + example: "{'key2': 'value2'}" + schema: + type: string + - name: set_field + description: If items in list 2 are strings, but first is JSON, sets the values to the specified key. Defaults to key "new_shuffle_key" + required: false + example: "json_key" + schema: + type: string + - name: sort_key_list_one + description: Sort by this key before using list one for merging + required: false + example: "json_key" + schema: + type: string + - name: sort_key_list_two + description: Sort by this key before using list two for merging + required: false + example: "json_key" + schema: + type: string - name: diff_lists description: Diffs two lists of strings or integers and finds what's missing parameters: @@ -866,6 +1003,7 @@ actions: options: - encode - decode + - "to image" schema: type: string - name: get_timestamp @@ -896,6 +1034,19 @@ actions: returns: schema: type: string + - name: list_cidr_ips + description: Lists the IPs for a CIDR + parameters: + - name: cidr + description: IP CIDR to check + multiline: false + example: "1.1.1.0/24" + required: True + schema: + type: string + returns: + schema: + type: string - name: cidr_ip_match description: Check if an IP is contained in a CIDR defined network parameters: @@ -933,16 +1084,7 @@ actions: example: '{"data": "Hello world"}' schema: type: string - - name: execute_python - description: Runs python with the data input. Any prints will be returned. - parameters: - - name: code - description: The code to run. Can be a file ID from within Shuffle. - required: true - multiline: true - example: print("hello world") - schema: - type: string + - name: run_math_operation description: Takes a math input and gives you the result parameters: @@ -992,25 +1134,25 @@ actions: returns: schema: type: string - - name: get_standardized_data - description: 'Used to run standardized synonym translations of data to make it easier to use in automation. This can be done automatically in subflows as well' - parameters: - - name: json_input - description: The full JSON blob to automatically translate - required: true - multiline: true - example: '{"ref": "1234"}' - schema: - type: string - - name: input_type - description: The data type of the input - required: true - multiline: true - example: 'cases' - options: - - cases - schema: - type: string + #- name: get_standardized_data + # description: 'Used to run standardized synonym translations of data to make it easier to use in automation. This can be done automatically in subflows as well' + # parameters: + # - name: json_input + # description: The full JSON blob to automatically translate + # required: true + # multiline: true + # example: '{"ref": "1234"}' + # schema: + # type: string + # - name: input_type + # description: The data type of the input + # required: true + # multiline: true + # example: 'cases' + # options: + # - cases + # schema: + # type: string - name: generate_random_string description: 'Used to generate passwords and random strings' parameters: @@ -1031,6 +1173,281 @@ actions: - false schema: type: string + - name: merge_incoming_branches + description: 'Merges the data of incoming branches. Uses the input type to determine how to merge the data, and removes duplicates' + parameters: + - name: input_type + description: What type to use + required: false + multiline: false + example: 'list' + options: + - list + - dict + schema: + type: string + - name: run_ssh_command + description: 'Run a command on remote machine with SSH' + parameters: + - name: host + description: Host IP + required: true + multiline: false + example: '192.168.55.11' + schema: + type: string + - name: port + description: A port on which SSH service is running + required: false + multiline: false + example: 'Default is 22' + schema: + type: string + - name: user_name + description: User on remote system + required: true + multiline: false + example: 'root' + schema: + type: string + - name: private_key_file_id + description: Private key file ID + required: false + multiline: false + example: 'file_c5c87a50-4146-40e2-a698-78cf13bf65c0' + schema: + type: string + - name: password + description: Password for SSH user. Use either password or private key. + required: false + multiline: false + example: '***' + schema: + type: string + - name: command + description: Command you want to run + required: true + multiline: true + example: 'ls -la' + schema: + type: string + - name: if_else_routing + description: Routes based on if-else statements + parameters: + - name: conditions + description: The conditions to be met + required: true + multiline: true + example: "REPEATING: Hello world" + schema: + type: string + returns: + schema: + type: string + + - name: check_datastore_contains + description: We recommend "Search datastore category" instead. Checks Shuffle datastore whether a user-provided key contains a value. Returns ALL the values previously appended. + parameters: + - name: key + description: The key to get + required: true + multiline: false + example: "alert_ids" + schema: + type: string + - name: value + description: The value to check for and append if applicable + required: true + multiline: false + example: "1208301599081" + schema: + type: string + - name: append + description: Whether to auto-append the value if it doesn't exist in the cache + required: true + options: + - true + - false + multiline: false + example: "timestamp" + schema: + type: string + - name: category + description: The category to get the value from. Not required. + required: false + multiline: false + example: "tickets" + schema: + type: string + - name: return_values + description: Whether to include the cache values in the response + required: false + options: + - true + - false + multiline: false + example: "false" + value: "true" + schema: + type: string + returns: + schema: + type: string + - name: get_cache_value + description: Get a value saved to your organization in Shuffle. Deprecated for "get_datastore_value" + parameters: + - name: key + description: The key to get + required: true + multiline: false + example: "timestamp" + schema: + type: string + - name: category + description: The category to get the value from. Not required. + required: false + multiline: false + example: "tickets" + schema: + type: string + returns: + schema: + type: string + - name: delete_cache_value + + description: Delete a value saved to your organization in Shuffle. Deprecated for "delete_datastore_value" + parameters: + - name: key + description: The key to delete + required: true + multiline: false + example: "timestamp" + schema: + type: string + - name: category + description: The category to get the value from. Not required. + required: false + multiline: false + example: "tickets" + schema: + type: string + returns: + schema: + type: string + + - name: set_cache_value + description: Set a value to be saved to your organization in Shuffle. Deprecated for "set_datastore_value" + parameters: + - name: key + description: The key to set the value for + required: true + multiline: false + example: "timestamp" + schema: + type: string + - name: value + description: The value to set + required: true + multiline: true + example: "1621959545" + schema: + type: string + - name: category + description: The category to get the value from. Not required. + required: false + multiline: false + example: "tickets" + schema: + type: string + returns: + schema: + type: string + - name: check_cache_contains + description: Checks Shuffle cache whether a user-provided key contains a value. Returns ALL the values previously appended. Deprecated for "check datastore contains" + parameters: + - name: key + description: The key to get + required: true + multiline: false + example: "alert_ids" + schema: + type: string + - name: value + description: The value to check for and append if applicable + required: true + multiline: false + example: "1208301599081" + schema: + type: string + - name: append + description: Whether to auto-append the value if it doesn't exist in the cache + required: true + options: + - true + - false + multiline: false + example: "timestamp" + schema: + type: string + - name: category + description: The category to get the value from. Not required. + required: false + multiline: false + example: "tickets" + schema: + type: string + - name: return_values + description: Whether to include the cache values in the response + required: false + options: + - true + - false + multiline: false + example: "false" + value: "true" + schema: + type: string + #- name: parse_ioc_new + # description: Parse IOC's based on https://github.com/fhightower/ioc-finder + # parameters: + # - name: input_string + # description: The string to check + # required: true + # multiline: true + # example: "123ijq192.168.3.6kljqwiejs8 https://shuffler.io" + # schema: + # type: string + # - name: input_type + # description: The string to check + # required: false + # multiline: false + # example: "md5s" + # schema: + # type: string + # returns: + # schema: + # type: string + # + - name: get_ioc + description: Get IOC's saved to your organization in Shuffle + parameters: + - name: ioc + description: The IOC to look for in Shuffle's datastore + required: true + multiline: true + example: "timestamp" + schema: + type: string + - name: data_type + description: The data type to get the IOC from. Discovered if not passed. + required: false + multiline: false + example: "ip" + schema: + type: string + returns: + schema: + type: string large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAK4AAACuCAYAAACvDDbuAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAAAAB3RJTUUH5AgXDjM6hEZGWwAAD+lJREFUeNrtXb/vJTcRH7/v3iVBCqRBiCAQAtHwq4AWRElHwX8AoqbmXwDRpiH/QyQkGoogUSAhKIKUAE1IdSRSREhQQk7c3XtD8X55vePxjNfe3bk3H+nu+96uPf54POtnj8fe8OQX30JwOIxhtzYBh6MGOsPF0z9p2iWwpd8LjX6W5vWUYaiqlBuvLT5b5TQDPlRwmMSAABBg+kCer+XuAeQf4tL9tAxJ/hIfZGSm8rhyEfjytfxr9FeSX+KjvVfipNVpWlaPNhsAEPCS7Ao8FYnRlbO4ksLnjiSQvIanv4FNjwJ5pXIlMq6MQpIqqPnQKQKbjuPDtZlG55o6UHXWtVncZZTbbNBVB1P5dJYguCbJJ1WjOG8PVOioSm5HPrVt1rwuyN+K+PSZnNV1M/MmEFubfFjjU9tmK9XBJ2cOk3DDdZiEG67DJOrGuA7HyvAe12ESAxa73KPrN1z8gUikCCdvcD5NXnpQpA8nNhh9m5Yn4ZMrV8dHV/8a/dRA0x419a3lI9GBtM2GcrGYFXRNUU5TyluTOpdXwqeUt6YOpby9DUTLZylOcRlzdBTf2yV3ZBFOmKSHQh5KpjSSSpqG4s6VkUubqw8W8knTSnWk0Y+2jF5tlmuDUloJn6T8gRVcEpJ+3srChHSNt8RJsq4p+S41LC13KTcu/RJt1pLPKY1Pzhwm4YbrMAk3XIdJTMe4aeCjJhBVk0YiQ1MWZHhLgmO5QNVWfKRlavlIIQnurQmcnaMjSbBxhtMwYUxODpLcl2tUhvPlNE6VkiuoFVLXKT6ZfBjxRIIzOSlgWpLSB8uZ0g3BjeVDlFGEos0mfKKL7CQrY2ES7pM2i/OX22w4/sWReEhEnUOTxx3a+FrawQGZh04/rWe6oJBKo5zT4zLjPHE9ZHym5YzToogzfQcmfLgOhuLF/Sjm2izVDyXnrKtcmmmdaKumf+RyCw5Xn7OmzQaJF0fiEZG6BjXpYUYaSVkaPrXeHe4eVaZEr3Prqrmmrbc2T8lrmOMjn5xJHeJLYkk+PfzNTxOflrwF0EeHbU0Zt2wsW+PTkncB7g5zmMSwzUfS4eDhPa7DJK5jXGorsnZxonbRIbeAoOUjkUvlp+qxFp9YNuWL0nBqsVCkqUsrHQnuX+Nx5/qcJDI0kWgtJh7ihYCN8aG+13DqOXlbWUfD+fN0AUEmp3RcUWlVEwCynb5ssYLnxHViJT6ULCykb8EnzUfpqBWfVAdcnt5tprGhIe10WnjHpB2FtMPWcpM66yXyOad4Lz4Srq34SHhwZfRos1w9Y/jkzGESvj3dYRLe4zpMwg3XYRJuuA6T4M/Hzfk/OGd9OP2HOE2f8wtBlCebJrkfp+Gc3AGmiSiuaVlpwkmajL4osPUm9FMqIzBOJolfjGuzEtdUwWl53Dm7Eh9pzIdps+FiYJyi1N+Rvs/6OLCQBul8Ip8R08ik3EwhLZz1Wv8XmU7ZZqX7OT2gUIB2oaRBm+2ovDm5nM+ulEeiD8yka8UnJ1PCP82r9YWW8iCU5XO8W/PhPmvllNKW7lEyszsgNKuzkspJFZFL15uPtIweq7A1xiKpz1J8tGXP+dE53/fJmcMk6hcgJO8XqokEKi5uYzTG29LqSev95JqyKsoOOxjNpKQBD7VFc5GBJRsi+NQHkkv6+7m/UxTufwLCCy+CbAruyOLDdwEf/uf6vbbNJukzlogZC6wMdhAcM7ohHPawe/GrcO+HPwe4u782G7sIAE9++0vYv/YKwO6usfCaka0etgwXAGB3D8JznwIYnlmbiW0M92FbQy0d+MmZ3Xo5JDDcvuXJ2ZYqtyUuTwuM6nSXctcufHCOZqkjPScXhbIcdeD0XUpfKyNNy8nlyhuozLkM8XxR6pjm7tc4Fdx620I7lWq10JCm0ZanWoBwm3FsBe1WznpadbTg4A9PI2xx7FUKHopQjg7TKqNnpbioIUcFUGUsy1CS8fFYBYdJuOE6TMIN12ESgyiKiwO1bQOJe1w+6p42Etmhwmi6kLZXfC2G9IUj2vulY2wIPrv4onRhIXcRqS0DiWxkhF0uIb37wG22LRCSuVCyekC2GSXj9CG3YyT+krWh+KPAhkTvgGDKqbqnWbBwY+2Pnm3Wy4aMRYc1MuPDvp0skwgAh8PaJGbh5k4kx0f/hce/ewnw/QenXQCTFJDfQy45PzFNn5NHsoPy/u6gzE+nObzz91P9Z+6kWAm2zg6bDMoq8OQxHN78Axze/htAaB1EbQhhdzyfgRqIGoCxoUIjhDuA3ZDpcR0W4C3nMInbNVw7v4oOAsehArVFPL0uOjMM+DlM+pk7t7/BDuwcJsM6gcM7WweOX05nFCHNi12ASRfLo3QaX9O0GWTylOTnZIMwf4YPPTlD4iMm7aZwAGOUf3Rf48wjHNzVOMkKFA8pp0RHZ1mjdihs5R61PWbsWlphgs/E5gptNvFfSLY8QPk7dVbh+UNg8qfnJsZ8Bo0hzF0Y2Nqvc0s+Vbs5YL5OLfPRcorT2hvjtuxyHWZhzHCX6AMcFtB2B0RvtKZqqe6OEYz1uA7HEbdruN7ZmsZtGq4brXnQhlsbLFkDrY9mC9giH41/dSlONfeEIBcgss7nXopInPdkYN95J3XD1bMgkJUNFOxsDNLgyiynhYyX5dnAhnLyhzmO4V7IO8+xyZEgx5UqvJ41rOUTdhBOr2w6KjZc+B1FBkLGVUoAABQEcmPu6rPPw73v/gh2n/wMANYEhAd4/NqvYf/Wn5pEyPW2IUrOzQWSHyHdkEJgN8D97/0Edp/7GgDu9fnDDvD9t+HRqy8BPvxQ9i6xEXUEuPcMDF//Puw+/aVqDewfvA77f/zx9M40e7jNeNw5CDu4++K34e4r36kWcXj3TYDfvwz8D79ml1clDPuxx9FhuUik0rblVihFWLX+7ZFEXE2ioLBNg9fUSRopVsOjJbioskZlDuyAvmflpOWsOUNu/cBQ8jW/1A0np11RG+GjwG36cQHqFWnBcG4Axgx37d/I1uXXcvCnx6BXoQXf3mOAzvVpooJzaOcWdKBH1fZ07dCsFZpNgmfZbaOJ2dxnpwkNFC3C9MBcGxo0OugxwV8LWKm5lg9sFQdszKGhLAla2dCuduuOZcypx+UXdk0OK5e/hXKNTc4cjiPGhtvTX1njI6Z2+vbuKtaKspLooXdkXs1u5yUR7/LdROMsraSSIfTa6pqWodE9Mvla6sCI8d7uUMEXIEzjdg3XYRr2osOePIbDR+9BGO7re78QAD/+AODwpK5sBDg6dGyGAtL1sYnLGDe3+2BNTNycYQf7B2/Aw5d/XB9HejjA4YN3jgHUNQ132MOTv/wG9v98A+CgFBCO/+FH/wJ89PBaSY1OULZzQyQL2skayVwg/7Dk3Ky2IlcEgEcfw/7dt+YJnRP1f9jDoz+/AvM0FU4c1u8mes59e+ZXDhXmPE+tForD+lH73Q6EluiozfaldnzWQUWQzdprPk87lg44nkTKN+DT/10S7lW4VYz8wWucOTAPtl5e4mgfjmu0/b3HdZiEG67DJNxwbxlGhwkAuZeXAJS3Qpfemq7dds1tS5dsbc6dAyQpS5uGe+lKrJLSGUqlCb2GcwUuCxBzt71T2/g7t9mQniofv0yjWOtMYdSLM6Sy0pd5iLdFSQtUyiJtRnjmGOdhqq5bo5WzUXAYzns2Lu2tjaqb0WaTHRBrR9cvEVG4VF3WkLsGnzXqohzjbk3dt4hG/jDDxy8BLL5y5miBZi1wa9vT14dJ0o2qft6/1GhQZ1SV9uJxd3cQ7j+XD7RJ40JK38/XAPKz4ly+OG+KwOTDwn0uDSKEZ58/vgH+hmHLcA97uPvCN+G5H/wMoCaQ/KkAAtzdg/DCZ9cmsipsGS4ce5u7z38DYHhmbTL2YfjBH28DOM80s+MoxllVvfkwKudSbiL0dB0NTya2iGpNYmIzl+/EdexjQ8PEGE4FhdPHMAlbLhcsdWaPnfDEAxQJnbx53TEPJ51j3N7CrEfbSNt+arzXt57X2RBx94LsUGHOGRQtF7Fa8HFQQOabJmc5XQ8b8iAbh0mYNFzvdefD+nRhyPowqWitc2VbRyutGCF18+ilU2mEXWX51zFuKbqlZ/RLy0gixzagiS6sgL2hghuwAywarsMBxgzXO9u2sBzZWHwHRLwrQ5rWYQBIfuwCKnZJEpvEYSg9dRoncnejtdxFbBRLqFQzr5fSudH3nDmOaH26yHIwNcZ1NIZNmwWArYU1Fg8HDLB/7wH879VfAey2Rd0a9g/+2ubUyZUOdAz//umXjT136GPd2cDNnM9bC4Pd1gbOx3WsDh/jOkzCDddhEpcjmKiFhvGLQwDitJNrYTz05H7MS+N56hiq0mbYCfeIj2STb2s+cSJEOrguJ4fScaneOW7kOWZJm4VCmaPFg8wKgcSGuLpzR49Rerm8vIRaaECgvyB1Tbl9qOZoMiykHeVhVoZKwW9N+CSJuPwsH4YY12aTa5TxYyZPpsxSDG/Rhgp1lyxUnK/7UMFhEm64DpNIlnzTAdXcsJml8rdO1yt/K+R45EJUluS9zHaWITuQJb9rsVT+HvuKe+RvhdIIcE3ey4Rj+VDBYRJuuA6TcMN1mMT15SWMZ5h10Oc86+dr50s14QWch7rEh5PHef+psgsyqB0iI2e+hE+pDlpvvkQ/uVUMDfdSnTq12TA58injFUdOMPB5AeiALtHcUrstXrqSINnaoVjxyE5ra1ZipHMsTV2kMiQ8NDw7tdmqQ4WtzNEd9uBjXIdJuOE6TMLoy0sct46KHndNS6d2pW5tp+rW+Jw5rVl2qpP5Oqrcnr52w9RMgbfA8db5tAsp8DGuwyTaGW6DB7ppn9CCzxKnvKz9Kz7j/prUi0cwqQLQDBtvrp5uvMc/Wf00oFAT5FjscbcwMloCt1LPWvTUT41sH+M6TMIN12ESw3UPd8gPtrh7JeTyXvZGn0KD0jSlMms5Sfhw92vkUvXT5tPWt3WbSfjMsSFl3ujlJdy+4xkjnFze+PWrNWXWclqaT6t82vq2bjMJnzk2pMzrQwWHSbjhOkzCDdchxpZchpezwySQvHhiyVMLevPRctXwqeWmfcv5GaVTGKRy557YIHnhpETeoCl05grhbPlL89HK1vCp5darvZbgo+XEwYcKDpNww3WYxC6/U5PY5oun66MzPHH8L05PpqHKghn+TpjyictkZQLPh4u6yeknvXeWU+JD6TDHJ/cbn93Bi8nnDKdJm8EG2+zIZwBudlbjUOYOpj1frClPwyf3OZuXuaEx3lgWZixKxIfZ911rvJO65PRFVmZjbYY+VHDYhBuuwyTccB0mcdkB0cr5z70pW/pm7Bo+LesgqUsrPjVye9WXkqld8FiizRCi6LBWjmTRPGGG/JZ5ejvoa1ai1qwvlWarbeZDBYdJuOE6TKKP4W7xJdFb4+R8ZvH5P852gxhpwOZ9AAAAJXRFWHRkYXRlOmNyZWF0ZQAyMDIwLTA4LTIzVDE0OjUyOjAwKzAyOjAwetRgVgAAACV0RVh0ZGF0ZTptb2RpZnkAMjAyMC0wOC0yM1QxNDo1MTo1OCswMjowMJuxI+oAAAAASUVORK5CYII= # yamllint disable-line rule:line-length diff --git a/shuffle-tools/1.2.0/docker-compose.yml b/shuffle-tools/1.2.0/docker-compose.yml deleted file mode 100644 index e48c6b2f..00000000 --- a/shuffle-tools/1.2.0/docker-compose.yml +++ /dev/null @@ -1,15 +0,0 @@ -version: '3.4' -services: - shuffle-tools: - build: - context: . - dockerfile: Dockerfile -# image: walkoff_registry:5000/walkoff_app_HelloWorld-v1-0 - deploy: - mode: replicated - replicas: 10 - restart_policy: - condition: none - restart: "no" - secrets: - - secret1 diff --git a/shuffle-tools/1.2.0/requirements.txt b/shuffle-tools/1.2.0/requirements.txt index b9000087..4fe45011 100644 --- a/shuffle-tools/1.2.0/requirements.txt +++ b/shuffle-tools/1.2.0/requirements.txt @@ -1,9 +1,11 @@ -ioc_finder==7.2.1 -py7zr==0.11.3 -rarfile==4.0 -pyminizip==0.2.4 -requests==2.25.1 -xmltodict==0.11.0 -json2xml==3.6.0 +ioc_finder==7.3.0 +py7zr==0.22.0 +rarfile==4.2 +pyzipper==0.3.6 +requests==2.32.3 +xmltodict==0.14.2 +json2xml==5.0.5 ipaddress==1.0.23 -google.auth==1.23.0 +google.auth==2.37.0 +paramiko==3.5.0 +shuffle-sdk==0.0.31 diff --git a/shuffle-tools/1.2.0/run.sh b/shuffle-tools/1.2.0/run.sh old mode 100644 new mode 100755 index aaf4edf7..3480d2f8 --- a/shuffle-tools/1.2.0/run.sh +++ b/shuffle-tools/1.2.0/run.sh @@ -1,7 +1,7 @@ # Build testing -NAME=frikky/shuffle:shuffle-tools_1.1.0 +NAME=frikky/shuffle:shuffle-tools_1.2.0 docker rmi $NAME --force -docker build . -t frikky/shuffle:shuffle-tools_1.1.0 +docker build . -t frikky/shuffle:shuffle-tools_1.2.0 # Run testing #docker run -e SHUFFLE_SWARM_CONFIG=run -e SHUFFLE_APP_EXPOSED_PORT=33334 frikky/shuffle:shuffle-tools_1.1.0 diff --git a/shuffle-tools/1.2.0/src/app.py b/shuffle-tools/1.2.0/src/app.py index fc4e5c90..16323e4e 100644 --- a/shuffle-tools/1.2.0/src/app.py +++ b/shuffle-tools/1.2.0/src/app.py @@ -1,20 +1,22 @@ -import asyncio +import os +import sys +import builtins + +import hmac import datetime import json import time import markupsafe -import os import re import subprocess import tempfile import zipfile import base64 +import gzip import ipaddress import hashlib from io import StringIO from contextlib import redirect_stdout -from liquid import Liquid -import liquid import random import string @@ -28,14 +30,26 @@ from google.auth import jwt import py7zr -import pyminizip +import pyzipper import rarfile import requests import tarfile import binascii import struct -from walkoff_app_sdk.app_base import AppBase +import paramiko +import concurrent.futures +import multiprocessing + +# from walkoff_app_sdk.app_base import AppBase +from shuffle_sdk import AppBase + +# Override exit(), sys.exit, and os._exit +# sys.exit() can be caught, meaning we can have a custom handler for it +builtins.exit = sys.exit +os.exit = sys.exit +os._exit = sys.exit + class Tools(AppBase): __version__ = "1.2.0" @@ -50,6 +64,8 @@ def __init__(self, redis, logger, console_logger=None): :param logger: :param console_logger: """ + self.cache_update_buffer = [] + self.shared_cache = {} super().__init__(redis, logger, console_logger) def router(self): @@ -57,51 +73,100 @@ def router(self): def base64_conversion(self, string, operation): if operation == "encode": - encoded_bytes = base64.b64encode(string.encode("utf-8")) + # Try JSON decoding + try: + string = json.dumps(json.loads(string)) + except: + pass + + encoded_bytes = base64.b64encode(str(string).encode("utf-8")) encoded_string = str(encoded_bytes, "utf-8") return encoded_string + elif operation == "to image": + # Decode the base64 into an image and upload it as a file + decoded_bytes = base64.b64decode(string) + + # Make the bytes into unicode escaped bytes + # UnicodeDecodeError - 'utf-8' codec can't decode byte 0x89 in position 0: invalid start byte + try: + decoded_bytes = str(decoded_bytes, "utf-8") + except: + pass + + filename = "base64_image.png" + file = { + "filename": filename, + "data": decoded_bytes, + } + + fileret = self.set_files([file]) + value = {"success": True, "filename": filename, "file_id": fileret} + if len(fileret) == 1: + value = {"success": True, "filename": filename, "file_id": fileret[0]} + + return value + elif operation == "decode": + if "-" in string: + string = string.replace("-", "+", -1) + + if "_" in string: + string = string.replace("_", "/", -1) + + # Fix padding + if len(string) % 4 != 0: + string += "=" * (4 - len(string) % 4) + + # For loop this. It's stupid. + decoded_bytes = "" try: decoded_bytes = base64.b64decode(string) - try: - decoded_bytes = str(decoded_bytes, "utf-8") - except: - pass - - return decoded_bytes except Exception as e: - #return string.decode("utf-16") + return json.dumps( + { + "success": False, + "reason": "Invalid Base64 - %s" % e, + } + ) - self.logger.info(f"[WARNING] Error in normal decoding: {e}") - return { - "success": False, - "reason": f"Error decoding the base64: {e}", - } - #newvar = binascii.a2b_base64(string) - #try: - # if str(newvar).startswith("b'") and str(newvar).endswith("'"): - # newvar = newvar[2:-1] - #except Exception as e: - # self.logger.info(f"Encoding issue in base64: {e}") - #return newvar - - #try: - # return newvar - #except: - # pass + # if "incorrect padding" in str(e).lower(): + # try: + # decoded_bytes = base64.b64decode(string + "=") + # except Exception as e: + # if "incorrect padding" in str(e).lower(): + # try: + # decoded_bytes = base64.b64decode(string + "==") + # except Exception as e: + # if "incorrect padding" in str(e).lower(): + # try: + # decoded_bytes = base64.b64decode(string + "===") + # except Exception as e: + # if "incorrect padding" in str(e).lower(): + # return "Invalid Base64" - return { - "success": False, - "reason": "Error decoding the base64", - } + try: + decoded_bytes = str(decoded_bytes, "utf-8") + except: + pass + + # Check if json + try: + decoded_bytes = json.loads(decoded_bytes) + except: + pass + + return decoded_bytes - return json.dumps({ + return { "success": False, - "reason": "No base64 to be converted", - }) + "reason": "Invalid operation", + } def parse_list_internal(self, input_list): + if isinstance(input_list, list): + input_list = ",".join(input_list) + try: input_list = json.loads(input_list) if isinstance(input_list, list): @@ -128,13 +193,12 @@ def send_sms_shuffle(self, apikey, phone_numbers, body): url = "https://shuffler.io/api/v1/functions/sendsms" headers = {"Authorization": "Bearer %s" % apikey} - return requests.post(url, headers=headers, json=data).text + return requests.post(url, headers=headers, json=data, verify=False).text # This is an email function of Shuffle def send_email_shuffle(self, apikey, recipients, subject, body, attachments=""): recipients = self.parse_list_internal(recipients) - targets = [recipients] if ", " in recipients: targets = recipients.split(", ") @@ -142,10 +206,11 @@ def send_email_shuffle(self, apikey, recipients, subject, body, attachments=""): targets = recipients.split(",") data = { - "targets": targets, - "subject": subject, - "body": body, + "targets": targets, + "subject": subject, + "body": body, "type": "alert", + "email_app": True, } # Read the attachments @@ -156,19 +221,108 @@ def send_email_shuffle(self, apikey, recipients, subject, body, attachments=""): for item in attachments: new_file = self.get_file(file_ids) files.append(new_file) - + data["attachments"] = files except Exception as e: - self.logger.info(f"Error in attachment parsing for email: {e}") - + pass - url = "https://shuffler.io/api/v1/functions/sendmail" + url = "https://shuffler.io/functions/sendmail" headers = {"Authorization": "Bearer %s" % apikey} return requests.post(url, headers=headers, json=data).text def repeat_back_to_me(self, call): return call + def repeat_back_to_me2(self, body): + print("call:", body) + return body + + def dedup_and_merge(self, key, value, timeout, set_skipped=True): + timeout = int(timeout) + key = str(key) + + set_skipped = True + if str(set_skipped).lower() == "false": + set_skipped = False + else: + set_skipped = True + + cachekey = "dedup-%s" % (key) + response = { + "success": False, + "datastore_key": cachekey, + "info": "All keys from the last %d seconds with the key '%s' have been merged. The result was set to SKIPPED in all other actions." + % (timeout, key), + "timeout": timeout, + "original_value": value, + "all_values": [], + } + + found_cache = self.get_cache(cachekey) + + if found_cache["success"] == True and len(found_cache["value"]) > 0: + if "value" in found_cache: + if not str(found_cache["value"]).startswith("["): + found_cache["value"] = [found_cache["value"]] + else: + try: + found_cache["value"] = json.loads(found_cache["value"]) + except Exception as e: + self.logger.info("[ERROR] Failed parsing JSON: %s" % e) + else: + found_cache["value"] = [] + + found_cache["value"].append(value) + if "created" in found_cache: + if found_cache["created"] + timeout + 3 < time.time(): + set_skipped = False + response["success"] = True + response["all_values"] = found_cache["value"] + + self.delete_cache(cachekey) + + return json.dumps(response) + else: + self.logger.info( + "Dedup-key is already handled in another workflow with timeout %d" + % timeout + ) + + self.set_cache(cachekey, json.dumps(found_cache["value"])) + if set_skipped == True: + self.action_result["status"] = "SKIPPED" + self.action_result["result"] = json.dumps( + { + "status": False, + "reason": "Dedup-key is already handled in another workflow with timeout %d" + % timeout, + } + ) + + self.send_result( + self.action_result, + {"Authorization": "Bearer %s" % self.authorization}, + "/api/v1/streams", + ) + + return found_cache + + parsedvalue = [value] + resp = self.set_cache(cachekey, json.dumps(parsedvalue)) + + self.logger.info( + "Sleeping for %d seconds while waiting for cache to fill up elsewhere" + % timeout + ) + time.sleep(timeout) + found_cache = self.get_cache(cachekey) + + response["success"] = True + response["all_values"] = found_cache["value"] + + self.delete_cache(cachekey) + return json.dumps(response) + # https://github.com/fhightower/ioc-finder def parse_file_ioc(self, file_ids, input_type="all"): def parse(data): @@ -187,7 +341,8 @@ def parse(data): for subitem in subvalue: data = { "data": subitem, - "data_type": "%s_%s" % (key[:-1], subkey), + "data_type": "%s_%s" + % (key[:-1], subkey), } if data not in newarray: newarray.append(data) @@ -234,69 +389,16 @@ def parse(data): return "Invalid input" return return_value - # https://github.com/fhightower/ioc-finder - def parse_ioc(self, input_string, input_type="all"): - #if len(input_string) > 2500000 and (input_type == "" or input_type == "all"): - # return { - # "success": False, - # "reason": "Data too large (%d). Please reduce it below 2.5 Megabytes to use this action or specify the input type" % len(input_string) - # } - - # https://github.com/fhightower/ioc-finder/blob/6ff92a73a60e9233bf09b530ccafae4b4415b08a/ioc_finder/ioc_finder.py#L433 - ioc_types = ["domains", "urls", "email_addresses", "ipv6s", "ipv4s", "ipv4_cidrs", "md5s", "sha256s", "sha1s", "cves"] - input_string = str(input_string) - if input_type == "": - input_type = "all" - else: - input_type = input_type.split(",") - for item in input_type: - item = item.strip() - - ioc_types = input_type - - iocs = find_iocs(input_string, included_ioc_types=ioc_types) - newarray = [] - for key, value in iocs.items(): - if input_type != "all": - if key not in input_type: - continue - - if len(value) > 0: - for item in value: - # If in here: attack techniques. Shouldn't be 3 levels so no - # recursion necessary - if isinstance(value, dict): - for subkey, subvalue in value.items(): - if len(subvalue) > 0: - for subitem in subvalue: - data = { - "data": subitem, - "data_type": "%s_%s" % (key[:-1], subkey), - } - if data not in newarray: - newarray.append(data) - else: - data = {"data": item, "data_type": key[:-1]} - if data not in newarray: - newarray.append(data) - - # Reformatting IP - for item in newarray: - if "ip" in item["data_type"]: - item["data_type"] = "ip" - try: - item["is_private_ip"] = ipaddress.ip_address(item["data"]).is_private - except: - self.logger.info("Error parsing %s" % item["data"]) - + def parse_list(self, items, splitter="\n"): + # Check if it's already a list first try: - newarray = json.dumps(newarray) - except json.decoder.JSONDecodeError as e: - return "Failed to parse IOC's: %s" % e + newlist = json.loads(items) + if isinstance(newlist, list): + return newlist - return newarray + except Exception as e: + self.logger.info("[WARNING] Parse error - fallback: %s" % e) - def parse_list(self, items, splitter="\n"): if splitter == "": splitter = "\n" @@ -315,15 +417,11 @@ def get_length(self, item): return str(len(item)) def set_json_key(self, json_object, key, value): - self.logger.info(f"OBJ: {json_object}\nKEY: {key}\nVAL: {value}") if isinstance(json_object, str): try: json_object = json.loads(json_object) except json.decoder.JSONDecodeError as e: - return { - "success": False, - "reason": "Item is not valid JSON" - } + return {"success": False, "reason": "Item is not valid JSON"} if isinstance(json_object, list): if len(json_object) == 1: @@ -331,16 +429,15 @@ def set_json_key(self, json_object, key, value): else: return { "success": False, - "reason": "Item is valid JSON, but can't handle lists. Use .#" + "reason": "Item is valid JSON, but can't handle lists. Use .#", } - #if not isinstance(json_object, object): + # if not isinstance(json_object, object): # return { # "success": False, # "reason": "Item is not valid JSON (2)" # } - if isinstance(value, str): try: value = json.loads(value) @@ -350,7 +447,7 @@ def set_json_key(self, json_object, key, value): # Handle JSON paths if "." in key: base_object = json.loads(json.dumps(json_object)) - #base_object.output.recipients.notificationEndpointIds = ... + # base_object.output.recipients.notificationEndpointIds = ... keys = key.split(".") if len(keys) >= 1: @@ -359,15 +456,14 @@ def set_json_key(self, json_object, key, value): # This is awful :) buildstring = "base_object" for subkey in keys: - buildstring += f"[\"{subkey}\"]" + buildstring += f'["{subkey}"]' buildstring += f" = {value}" - self.logger.info("BUILD: %s" % buildstring) - #output = + # output = exec(buildstring) json_object = base_object - #json_object[first_object] = base_object + # json_object[first_object] = base_object else: json_object[key] = value @@ -436,9 +532,9 @@ def replace_value_from_dictionary(self, input_data, mapping, default_value=""): except: self.logger.info(f"Failed mapping output data for key {key}") - return input_data + return input_data - # Changed with 1.1.0 to run with different returns + # Changed with 1.1.0 to run with different returns def regex_capture_group(self, input_data, regex): try: returnvalues = { @@ -446,12 +542,11 @@ def regex_capture_group(self, input_data, regex): } matches = re.findall(regex, input_data) - self.logger.info(f"{matches}") found = False for item in matches: if isinstance(item, str): - found = True - name = "group_0" + found = True + name = "group_0" try: returnvalues[name].append(item) except: @@ -459,7 +554,7 @@ def regex_capture_group(self, input_data, regex): else: for i in range(0, len(item)): - found = True + found = True name = "group_%d" % i try: returnvalues[name].append(item[i]) @@ -475,23 +570,13 @@ def regex_capture_group(self, input_data, regex): "reason": "Bad regex pattern: %s" % e, } - def regex_replace( - self, input_data, regex, replace_string="", ignore_case="False" - ): - - #self.logger.info("=" * 80) - #self.logger.info(f"Regex: {regex}") - #self.logger.info(f"replace_string: {replace_string}") - #self.logger.info("=" * 80) - + def regex_replace(self, input_data, regex, replace_string="", ignore_case="False"): if ignore_case.lower().strip() == "true": return re.sub(regex, replace_string, input_data, flags=re.IGNORECASE) else: return re.sub(regex, replace_string, input_data) def execute_python(self, code): - self.logger.info(f"Python code {len(code)} {code}. If uuid, we'll try to download and use the file.") - if len(code) == 36 and "-" in code: filedata = self.get_file(code) if filedata["success"] == False: @@ -506,37 +591,82 @@ def execute_python(self, code): "message": f"Filename needs to contain .py", } - # Write the code to a file # 1. Take the data into a file # 2. Subprocess execute file? try: f = StringIO() - with redirect_stdout(f): - exec(code) # nosec :( + + def custom_print(*args, **kwargs): + return print(*args, file=f, **kwargs) + + # with redirect_stdout(f): # just in case + # Add globals in it too + globals_copy = globals().copy() + globals_copy["print"] = custom_print + try: + globals_copy["singul"] = self.singul + globals_copy["shuffle"] = self.singul + except Exception as e: + self.logger.info(f"Failed to add singul to python globals: {e}") + + # Add self to globals_copy + for key, value in locals().copy().items(): + if key not in globals_copy: + globals_copy[key] = value + + globals_copy["self"] = self + + try: + exec(code, globals_copy) + except SystemExit as e: + # Same as a return + pass + except SyntaxError as e: + # Special handler for return usage. Makes return act as + # an exit() + if "'return' outside function" in str(e): + return { + "success": False, + "message": f"SyntaxError - Shuffle Recommendation: Instead of using 'return' without a function, use 'exit()' to return when not inside a function. Raw Syntax error: {e}", + } + else: + return { + "success": False, + "message": f"Syntax Error: {e}", + } + + # this doesn't work to capture top-level returns + # Reason: SyntaxError makes it crash BEFORE it reaches the return s = f.getvalue() + f.close() # why: https://www.youtube.com/watch?v=6SA6S9Ca5-U - #try: + # try: # s = s.encode("utf-8") - #except Exception as e: - # self.logger.info(f"Failed utf-8 encoding response: {e}") + # except Exception as e: try: return { "success": True, - "message": s.strip(), + "message": json.loads(s.strip()), } except Exception as e: - return { - "success": True, - "message": s, - } - + try: + return { + "success": True, + "message": s.strip(), + } + except Exception as e: + return { + "success": True, + "message": s, + } + except Exception as e: return { "success": False, - "message": f"exception: {e}", + "message": f"Exception: {e}", } def execute_bash(self, code, shuffle_input): @@ -550,7 +680,6 @@ def execute_bash(self, code, shuffle_input): stdout = process.communicate() item = "" if len(stdout[0]) > 0: - self.logger.info("[DEBUG] Succesfully ran bash!") item = stdout[0] else: self.logger.info(f"[ERROR] FAILED to run bash command {code}!") @@ -564,9 +693,65 @@ def execute_bash(self, code, shuffle_input): return item - def filter_list(self, input_list, field, check, value, opposite): - self.logger.info(f"\nRunning function with list {input_list}") + # Check if wildcardstring is in all_ips and support * as wildcard + def check_wildcard(self, wildcardstring, matching_string): + wildcardstring = str(wildcardstring.lower()) + if wildcardstring in str(matching_string).lower(): + return True + else: + wildcardstring = wildcardstring.replace(".", "\\.") + wildcardstring = wildcardstring.replace("*", ".*") + + if re.match(wildcardstring, str(matching_string).lower()): + return True + + return False + + def preload_cache(self, key): + org_id = self.full_execution["workflow"]["execution_org"]["id"] + url = f"{self.url}/api/v1/orgs/{org_id}/get_cache" + data = { + "workflow_id": self.full_execution["workflow"]["id"], + "execution_id": self.current_execution_id, + "authorization": self.authorization, + "org_id": org_id, + "key": key, + } + get_response = requests.post(url, json=data, verify=False) + response_data = get_response.json() + if "value" in response_data: + raw_value = response_data["value"] + if isinstance(raw_value, str): + try: + parsed = json.loads(raw_value) + except json.JSONDecodeError: + parsed = [raw_value] + else: + parsed = raw_value + + if not isinstance(parsed, list): + parsed = [parsed] + + response_data["value"] = parsed + return get_response.json() + + def update_cache(self, key): + org_id = self.full_execution["workflow"]["execution_org"]["id"] + url = f"{self.url}/api/v1/orgs/{org_id}/set_cache" + data = { + "workflow_id": self.full_execution["workflow"]["id"], + "execution_id": self.current_execution_id, + "authorization": self.authorization, + "org_id": org_id, + "key": key, + "value": json.dumps(self.shared_cache["value"]), + } + get_response = requests.post(url, json=data, verify=False) + self.cache_update_buffer = [] + return get_response.json() + + def filter_list(self, input_list, field, check, value, opposite): # Remove hashtags on the fly # E.g. #.fieldname or .#.fieldname @@ -574,22 +759,23 @@ def filter_list(self, input_list, field, check, value, opposite): if str(opposite).lower() == "true": flip = True - try: - #input_list = eval(input_list) # nosec + # input_list = eval(input_list) # nosec input_list = json.loads(input_list) except Exception: try: input_list = input_list.replace("'", '"', -1) input_list = json.loads(input_list) except Exception: - self.logger.info("[WARNING] Error parsing string to array. Continuing anyway.") + self.logger.info( + "[WARNING] Error parsing string to array. Continuing anyway." + ) # Workaround D: if not isinstance(input_list, list): return { "success": False, - "reason": "Error: input isnt a list. Remove # to use this action.", + "reason": "Error: input isnt a list. Please use conditions instead if using JSON.", "valid": [], "invalid": [], } @@ -599,7 +785,6 @@ def filter_list(self, input_list, field, check, value, opposite): if str(value).lower() == "null" or str(value).lower() == "none": value = "none" - self.logger.info(f"\nRunning with check \"%s\" on list of length %d\n" % (check, len(input_list))) found_items = [] new_list = [] failed_list = [] @@ -620,37 +805,26 @@ def filter_list(self, input_list, field, check, value, opposite): try: tmp = json.dumps(tmp) except json.decoder.JSONDecodeError as e: - self.logger.info("FAILED DECODING: %s" % e) pass - #self.logger.info("PRE CHECKS FOR TMP: %") - # EQUALS JUST FOR STR if check == "equals": # Mostly for bools # value = tmp.lower() if str(tmp).lower() == str(value).lower(): - self.logger.info("APPENDED BECAUSE %s %s %s" % (field, check, value)) - if not flip: - new_list.append(item) - else: - failed_list.append(item) + new_list.append(item) else: - if flip: - new_list.append(item) - else: - failed_list.append(item) + failed_list.append(item) elif check == "equals any of": - self.logger.info("Inside equals any of") + value = self.parse_list_internal(value) checklist = value.split(",") - self.logger.info("Checklist and tmp: %s - %s" % (checklist, tmp)) found = False for subcheck in checklist: - subcheck = subcheck.strip() + subcheck = str(subcheck).strip() - #ext.lower().strip() == value.lower().strip() + # ext.lower().strip() == value.lower().strip() if type(tmp) == list and subcheck in tmp: new_list.append(item) found = True @@ -659,111 +833,70 @@ def filter_list(self, input_list, field, check, value, opposite): new_list.append(item) found = True break + elif type(tmp) == int and str(tmp) == subcheck: + new_list.append(item) + found = True + break else: - print("Nothing matching") + if str(tmp) == str(subcheck): + new_list.append(item) + found = True + break if not found: failed_list.append(item) # IS EMPTY FOR STR OR LISTS elif check == "is empty": - if tmp == "[]": + if str(tmp) == "[]": tmp = [] - if type(tmp) == list and len(tmp) == 0 and not flip: - new_list.append(item) - elif type(tmp) == list and len(tmp) > 0 and flip: - new_list.append(item) - elif type(tmp) == str and not tmp and not flip: + if str(tmp) == "{}": + tmp = [] + + if type(tmp) == list and len(tmp) == 0: new_list.append(item) - elif type(tmp) == str and tmp and flip: + elif type(tmp) == str and not tmp: new_list.append(item) else: failed_list.append(item) # STARTS WITH = FOR STR OR [0] FOR LIST elif check == "starts with": - if type(tmp) == list and tmp[0] == value and not flip: - new_list.append(item) - elif type(tmp) == list and tmp[0] != value and flip: + if type(tmp) == list and tmp[0] == value: new_list.append(item) - elif type(tmp) == str and tmp.startswith(value) and not flip: - new_list.append(item) - elif type(tmp) == str and not tmp.startswith(value) and flip: + elif type(tmp) == str and tmp.startswith(value): new_list.append(item) else: failed_list.append(item) # ENDS WITH = FOR STR OR [-1] FOR LIST elif check == "ends with": - if type(tmp) == list and tmp[-1] == value and not flip: - new_list.append(item) - elif type(tmp) == list and tmp[-1] != value and flip: + if type(tmp) == list and tmp[-1] == value: new_list.append(item) - elif type(tmp) == str and tmp.endswith(value) and not flip: - new_list.append(item) - elif type(tmp) == str and not tmp.endswith(value) and flip: + elif type(tmp) == str and tmp.endswith(value): new_list.append(item) else: failed_list.append(item) # CONTAINS FIND FOR LIST AND IN FOR STR elif check == "contains": - if type(tmp) == list and value.lower() in tmp and not flip: - new_list.append(item) - elif type(tmp) == list and value.lower() not in tmp and flip: - new_list.append(item) - elif ( - type(tmp) == str - and tmp.lower().find(value.lower()) != -1 - and not flip - ): - new_list.append(item) - elif ( - type(tmp) == str - and tmp.lower().find(value.lower()) == -1 - and flip + # if str(value).lower() in str(tmp).lower(): + if str(value).lower() in str(tmp).lower() or self.check_wildcard( + value, tmp ): new_list.append(item) else: failed_list.append(item) + elif check == "contains any of": - self.logger.info("Inside contains any of") + value = self.parse_list_internal(value) checklist = value.split(",") - self.logger.info("Checklist and tmp: %s - %s" % (checklist, tmp)) found = False - for subcheck in checklist: - subcheck = subcheck.strip().lower() - #ext.lower().strip() == value.lower().strip() - if type(tmp) == list and subcheck in tmp and not flip: - new_list.append(item) - found = True - break - elif type(tmp) == list and subcheck in tmp and flip: - failed_list.append(item) - found = True - break - elif type(tmp) == list and subcheck not in tmp and not flip: - new_list.append(item) - found = True - break - elif type(tmp) == list and subcheck not in tmp and flip: - failed_list.append(item) - found = True - break - elif (type(tmp) == str and tmp.lower().find(subcheck) != -1 and not flip): - new_list.append(item) - found = True - break - elif (type(tmp) == str and tmp.lower().find(subcheck) != -1 and flip): - failed_list.append(item) - found = True - break - elif (type(tmp) == str and tmp.lower().find(subcheck) == -1 and not flip): - failed_list.append(item) - found = True - break - elif (type(tmp) == str and tmp.lower().find(subcheck) == -1 and flip): + for checker in checklist: + if str(checker).lower() in str( + tmp + ).lower() or self.check_wildcard(checker, tmp): new_list.append(item) found = True break @@ -773,76 +906,92 @@ def filter_list(self, input_list, field, check, value, opposite): # CONTAINS FIND FOR LIST AND IN FOR STR elif check == "field is unique": - #self.logger.info("FOUND: %s" - if tmp.lower() not in found_items and not flip: - new_list.append(item) - found_items.append(tmp.lower()) - elif tmp.lower() in found_items and flip: + if tmp.lower() not in found_items: new_list.append(item) found_items.append(tmp.lower()) else: failed_list.append(item) - #tmp = json.dumps(tmp) + # CONTAINS FIND FOR LIST AND IN FOR STR + elif check == "larger than": + list_set = False + try: + if str(tmp).isdigit() and str(value).isdigit(): + if int(tmp) > int(value): + new_list.append(item) + list_set = True + except AttributeError as e: + pass - #for item in new_list: - #if type(tmp) == list and value.lower() in tmp and not flip: - # new_list.append(item) - # found = True - # break - #elif type(tmp) == list and value.lower() not in tmp and flip: - # new_list.append(item) - # found = True - # break + try: + value = len(json.loads(value)) + except Exception as e: + pass - # CONTAINS FIND FOR LIST AND IN FOR STR - elif check == "contains any of": - value = self.parse_list_internal(value) - checklist = value.split(",") - tmp = tmp - self.logger.info("CHECKLIST: %s. Value: %s" % (checklist, tmp)) - found = False - for value in checklist: - if value in tmp and not flip: - new_list.append(item) - found = True - break - elif value not in tmp and flip: + try: + # Check if it's a list in autocast and if so, check the length + if len(json.loads(tmp)) > int(value): new_list.append(item) - found = True - break - - if not found: - failed_list.append(item) + list_set = True + except Exception as e: + pass - elif check == "larger than": - if int(tmp) > int(value) and not flip: - new_list.append(item) - elif int(tmp) > int(value) and flip: - new_list.append(item) - else: + if not list_set: failed_list.append(item) elif check == "less than": - if int(tmp) < int(value) and not flip: - new_list.append(item) - elif int(tmp) < int(value) and flip: - new_list.append(item) - else: + # Old + # if int(tmp) < int(value): + # new_list.append(item) + # else: + # failed_list.append(item) + + list_set = False + try: + if str(tmp).isdigit() and str(value).isdigit(): + if int(tmp) < int(value): + new_list.append(item) + list_set = True + except AttributeError as e: + pass + + try: + value = len(json.loads(value)) + except Exception as e: + pass + + try: + # Check if it's a list in autocast and if so, check the length + if len(json.loads(tmp)) < int(value): + new_list.append(item) + list_set = True + except Exception as e: + pass + + if not list_set: failed_list.append(item) elif check == "in cache key": + if item == input_list[0]: + self.shared_cache = self.preload_cache(key=value) + ret = self.check_cache_contains(value, tmp, "true") + if ret["success"] == True and ret["found"] == True: new_list.append(item) else: failed_list.append(item) - #return { + if len(self.cache_update_buffer) > 400 or ( + item == input_list[-1] and len(self.cache_update_buffer) > 0 + ): + self.update_cache(value) + + # return { # "success": True, # "found": False, # "key": key, # "value": new_value, - #} + # } # SINGLE ITEM COULD BE A FILE OR A LIST OF FILES elif check == "files by extension": @@ -852,12 +1001,7 @@ def filter_list(self, input_list, field, check, value, opposite): for file_id in tmp: filedata = self.get_file(file_id) _, ext = os.path.splitext(filedata["filename"]) - if ( - ext.lower().strip() == value.lower().strip() - and not flip - ): - file_list.append(file_id) - elif ext.lower().strip() != value.lower().strip() and flip: + if ext.lower().strip() == value.lower().strip(): file_list.append(file_id) # else: # failed_list.append(file_id) @@ -876,32 +1020,28 @@ def filter_list(self, input_list, field, check, value, opposite): elif type(tmp) == str: filedata = self.get_file(tmp) _, ext = os.path.splitext(filedata["filename"]) - if ext.lower().strip() == value.lower().strip() and not flip: + if ext.lower().strip() == value.lower().strip(): new_list.append(item) - elif ext.lower().strip() != value.lower().strip() and flip: - new_list.append((item, ext)) else: failed_list.append(item) except Exception as e: - self.logger.info("[WARNING] FAILED WITH EXCEPTION: %s" % e) failed_list.append(item) # return - if check == "equals any of" and flip: + if flip: tmplist = new_list new_list = failed_list failed_list = tmplist - try: - return json.dumps( - { - "success": True, - "valid": new_list, - "invalid": failed_list, - } - ) + data = { + "success": True, + "valid": new_list, + "invalid": failed_list, + } + + return json.dumps(data) # new_list = json.dumps(new_list) except json.decoder.JSONDecodeError as e: return json.dumps( @@ -913,7 +1053,7 @@ def filter_list(self, input_list, field, check, value, opposite): return new_list - #def multi_list_filter(self, input_list, field, check, value): + # def multi_list_filter(self, input_list, field, check, value): # input_list = input_list.replace("'", '"', -1) # input_list = json.loads(input_list) @@ -978,8 +1118,8 @@ def get_file_meta(self, file_id): "%s/api/v1/files/%s?execution_id=%s" % (self.url, file_id, self.current_execution_id), headers=headers, + verify=False, ) - self.logger.info(f"RET: {ret}") return ret.text @@ -988,28 +1128,26 @@ def delete_file(self, file_id): headers = { "Authorization": "Bearer %s" % self.authorization, } - self.logger.info("HEADERS: %s" % headers) ret = requests.delete( "%s/api/v1/files/%s?execution_id=%s" % (self.url, file_id, self.current_execution_id), headers=headers, + verify=False, ) return ret.text - def create_file(self, filename, data): - self.logger.info("Inside function") - + def create_file(self, filename, data, category=""): try: if str(data).startswith("b'") and str(data).endswith("'"): data = data[2:-1] - if str(data).startswith("\"") and str(data).endswith("\""): + if str(data).startswith('"') and str(data).endswith('"'): data = data[2:-1] except Exception as e: self.logger.info(f"Exception: {e}") try: - #if not isinstance(data, str) and not isinstance(data, int) and not isinstance(float) and not isinstance(data, bool): + # if not isinstance(data, str) and not isinstance(data, int) and not isinstance(float) and not isinstance(data, bool): if isinstance(data, dict) or isinstance(data, list): data = json.dumps(data) except: @@ -1018,6 +1156,7 @@ def create_file(self, filename, data): filedata = { "filename": filename, "data": data, + "namespace": category, } fileret = self.set_files([filedata]) @@ -1025,31 +1164,47 @@ def create_file(self, filename, data): if len(fileret) == 1: value = {"success": True, "filename": filename, "file_id": fileret[0]} - return value + return value - # Input is WAS a file, hence it didn't get the files + # Input is WAS a file, hence it didn't get the files def list_file_category_ids(self, file_category): return self.get_file_category_ids(file_category) - # Input is WAS a file, hence it didn't get the files - def get_file_value(self, filedata): - filedata = self.get_file(filedata) + # Input is WAS a file, hence it didn't get the files + # Category doesn't matter as it uses file ID, which is unique anyway + def get_file_value(self, filedata, category=""): + filedata = self.get_file(filedata, category) if filedata is None: - return "File is empty?" + return { + "success": False, + "reason": "File not found", + } + + if "data" not in filedata: + return { + "success": False, + "reason": "File content not found. File might be empty or not exist", + } - self.logger.info("INSIDE APP DATA: %s" % filedata) try: return filedata["data"].decode() except: try: return filedata["data"].decode("utf-16") except: - return { - "success": False, - "reason": "Got the file, but the encoding can't be printed", - } + try: + return filedata["data"].decode("utf-8") + except: + try: + return filedata["data"].decode("latin-1") + except: + return { + "success": False, + "reason": "Got the file, but the encoding can't be printed", + "size": len(filedata["data"]), + } - def download_remote_file(self, url, custom_filename=""): + def download_remote_file(self, url, custom_filename="", category=""): ret = requests.get(url, verify=False) # nosec filename = url.split("/")[-1] if "?" in filename: @@ -1063,6 +1218,7 @@ def download_remote_file(self, url, custom_filename=""): { "filename": filename, "data": ret.content, + "namespace": category, } ] ) @@ -1074,7 +1230,6 @@ def download_remote_file(self, url, custom_filename=""): return value - def extract_archive(self, file_id, fileformat="zip", password=None): try: return_data = {"success": False, "files": []} @@ -1082,9 +1237,7 @@ def extract_archive(self, file_id, fileformat="zip", password=None): item = self.get_file(file_id) return_ids = None - self.logger.info("Working with fileformat %s" % fileformat) with tempfile.TemporaryDirectory() as tmpdirname: - # Get archive and save phisically with open(os.path.join(tmpdirname, "archive"), "wb") as f: f.write(item["data"]) @@ -1094,13 +1247,12 @@ def extract_archive(self, file_id, fileformat="zip", password=None): # Zipfile for zipped archive if fileformat.strip().lower() == "zip": try: - self.logger.info("Starting zip extraction") - with zipfile.ZipFile(os.path.join(tmpdirname, "archive")) as z_file: + with zipfile.ZipFile( + os.path.join(tmpdirname, "archive") + ) as z_file: if password: - self.logger.info("In zip extraction with password") z_file.setpassword(bytes(password.encode())) - self.logger.info("Past zip extraction") for member in z_file.namelist(): filename = os.path.basename(member) if not filename: @@ -1108,7 +1260,10 @@ def extract_archive(self, file_id, fileformat="zip", password=None): source = z_file.open(member) to_be_uploaded.append( - {"filename": source.name, "data": source.read()} + { + "filename": source.name.split("/")[-1], + "data": source.read(), + } ) return_data["success"] = True @@ -1133,9 +1288,13 @@ def extract_archive(self, file_id, fileformat="zip", password=None): filename = os.path.basename(member) if not filename: continue + source = z_file.open(member) to_be_uploaded.append( - {"filename": source.name, "data": source.read()} + { + "filename": source.name.split("/")[-1], + "data": source.read(), + } ) return_data["success"] = True @@ -1156,9 +1315,13 @@ def extract_archive(self, file_id, fileformat="zip", password=None): ) as z_file: for member in z_file.getnames(): member_files = z_file.extractfile(member) + + if not member_files: + continue + to_be_uploaded.append( { - "filename": member, + "filename": member.split("/")[-1], "data": member_files.read(), } ) @@ -1179,13 +1342,19 @@ def extract_archive(self, file_id, fileformat="zip", password=None): ) as z_file: for member in z_file.getnames(): member_files = z_file.extractfile(member) + + if not member_files: + continue + to_be_uploaded.append( { - "filename": member, + "filename": member.split("/")[-1], "data": member_files.read(), } ) + return_data["success"] = True + except Exception as e: return_data["files"].append( { @@ -1208,7 +1377,7 @@ def extract_archive(self, file_id, fileformat="zip", password=None): filename = filename.split("/")[-1] to_be_uploaded.append( { - "filename": item["filename"], + "filename": item["filename"].split("/")[-1], "data": source.read(), } ) @@ -1225,10 +1394,8 @@ def extract_archive(self, file_id, fileformat="zip", password=None): else: return "No such format: %s" % fileformat - self.logger.info("Breaking as this only handles one archive at a time.") if len(to_be_uploaded) > 0: return_ids = self.set_files(to_be_uploaded) - self.logger.info(f"Got return ids from files: {return_ids}") for i in range(len(return_ids)): return_data["archive_id"] = file_id @@ -1248,7 +1415,6 @@ def extract_archive(self, file_id, fileformat="zip", password=None): } ) else: - self.logger.info(f"No file ids to upload.") return_data["success"] = False return_data["files"].append( { @@ -1282,7 +1448,6 @@ def create_archive(self, file_ids, fileformat, name, password=None): "reason": "Make sure to send valid file ids. Example: file_13eea837-c56a-4d52-a067-e673c7186483,file_13eea837-c56a-4d52-a067-e673c7186484", } - self.logger.info("picking {}".format(file_ids)) # GET all items from shuffle items = [self.get_file(file_id) for file_id in file_ids] @@ -1292,22 +1457,31 @@ def create_archive(self, file_ids, fileformat, name, password=None): # Dump files on disk, because libs want path :( with tempfile.TemporaryDirectory() as tmpdir: paths = [] - self.logger.info("Number 1") for item in items: with open(os.path.join(tmpdir, item["filename"]), "wb") as f: f.write(item["data"]) paths.append(os.path.join(tmpdir, item["filename"])) # Create archive temporary - self.logger.info("{} items to inflate".format(len(items))) with tempfile.NamedTemporaryFile() as archive: - if fileformat == "zip": archive_name = "archive.zip" if not name else name - pyminizip.compress_multiple( - paths, [], archive.name, password, 5 + + pwd = ( + password + if isinstance(password, (bytes, bytearray)) + else password.encode() ) + with pyzipper.AESZipFile( + archive.name, "w", compression=pyzipper.ZIP_DEFLATED + ) as zf: + zf.setpassword(pwd) + zf.setencryption(pyzipper.WZ_AES, nbits=256) + + for path in paths: + zf.write(path, arcname=os.path.basename(path)) + elif fileformat == "7zip": archive_name = "archive.7z" if not name else name with py7zr.SevenZipFile( @@ -1338,36 +1512,44 @@ def create_archive(self, file_ids, fileformat, name, password=None): return {"success": False, "message": excp} def add_list_to_list(self, list_one, list_two): - if not isinstance(list_one, list) and not isinstance(list_one, dict): - if not list_one or list_one == " " or list_one == "None" or list_one == "null": + if not isinstance(list_one, list) and not isinstance(list_one, dict): + if ( + not list_one + or list_one == " " + or list_one == "None" + or list_one == "null" + ): list_one = "[]" try: list_one = json.loads(list_one) except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse list1 as json: %s" % e) if list_one == None: list_one = [] else: return { "success": False, - "reason": f"List one is not a valid list: {list_one}" + "reason": f"List one is not a valid list: {list_one}", } if not isinstance(list_two, list) and not isinstance(list_two, dict): - if not list_two or list_two == " " or list_two == "None" or list_two == "null": + if ( + not list_two + or list_two == " " + or list_two == "None" + or list_two == "null" + ): list_two = "[]" try: list_two = json.loads(list_two) except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse list2 as json: %s" % e) if list_one == None: list_one = [] else: return { "success": False, - "reason": f"List two is not a valid list: {list_two}" + "reason": f"List two is not a valid list: {list_two}", } if isinstance(list_one, dict): @@ -1385,21 +1567,13 @@ def diff_lists(self, list_one, list_two): try: list_one = json.loads(list_one) except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse list1 as json: %s" % e) - return { - "success": False, - "reason": "list_one is not a valid list." - } + return {"success": False, "reason": "list_one is not a valid list."} if isinstance(list_two, str): try: list_two = json.loads(list_two) except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse list2 as json: %s" % e) - return { - "success": False, - "reason": "list_two is not a valid list." - } + return {"success": False, "reason": "list_two is not a valid list."} def diff(li1, li2): try: @@ -1408,7 +1582,7 @@ def diff(li1, li2): # Bad json diffing - at least order doesn't matter :) not_found = [] for item in list_one: - #item = sorted(item.items()) + # item = sorted(item.items()) if item in list_two: pass else: @@ -1436,39 +1610,59 @@ def diff(li1, li2): "diff": newdiff, } - def merge_lists(self, list_one, list_two, set_field="", sort_key_list_one="", sort_key_list_two=""): + def merge_lists( + self, + list_one, + list_two, + set_field="", + sort_key_list_one="", + sort_key_list_two="", + ): if isinstance(list_one, str): try: list_one = json.loads(list_one) except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse list1 as json: %s" % e) + pass if isinstance(list_two, str): try: list_two = json.loads(list_two) except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse list2 as json: %s" % e) + pass if not isinstance(list_one, list) or not isinstance(list_two, list): - return {"success": False, "message": "Input lists need to be valid JSON lists."} + if isinstance(list_one, dict) and isinstance(list_two, dict): + for key, value in list_two.items(): + list_one[key] = value + + return list_one + + return { + "success": False, + "message": "Both input lists need to be valid JSON lists.", + } if len(list_one) != len(list_two): - return {"success": False, "message": "Lists length must be the same. %d vs %d" % (len(list_one), len(list_two))} + return { + "success": False, + "message": "Lists length must be the same. %d vs %d. Are you trying to add them to a single list? Use add_list_to_list" + % (len(list_one), len(list_two)), + } if len(sort_key_list_one) > 0: - self.logger.info("Sort 1 %s by key: %s" % (list_one, sort_key_list_one)) try: - list_one = sorted(list_one, key=lambda k: k.get(sort_key_list_one), reverse=True) + list_one = sorted( + list_one, key=lambda k: k.get(sort_key_list_one), reverse=True + ) except: - self.logger.info("Failed to sort list one") pass if len(sort_key_list_two) > 0: - #self.logger.info("Sort 2 %s by key: %s" % (list_two, sort_key_list_two)) try: - list_two = sorted(list_two, key=lambda k: k.get(sort_key_list_two), reverse=True) + list_two = sorted( + list_two, key=lambda k: k.get(sort_key_list_two), reverse=True + ) except: - self.logger.info("Failed to sort list one") pass # Loops for each item in sub array and merges items together @@ -1476,16 +1670,17 @@ def merge_lists(self, list_one, list_two, set_field="", sort_key_list_one="", so base_key = "shuffle_auto_merge" try: for i in range(len(list_one)): - #self.logger.info(list_two[i]) if isinstance(list_two[i], dict): for key, value in list_two[i].items(): list_one[i][key] = value elif isinstance(list_two[i], str) and list_two[i] == "": continue - elif isinstance(list_two[i], str) or isinstance(list_two[i], int) or isinstance(list_two[i], bool): - self.logger.info("IN SETTER FOR %s" % list_two[i]) + elif ( + isinstance(list_two[i], str) + or isinstance(list_two[i], int) + or isinstance(list_two[i], bool) + ): if len(set_field) == 0: - self.logger.info("Define a JSON key to set for List two (Set Field)") list_one[i][base_key] = list_two[i] else: set_field = set_field.replace(" ", "_", -1) @@ -1499,6 +1694,22 @@ def merge_lists(self, list_one, list_two, set_field="", sort_key_list_one="", so return list_one + def merge_json_objects( + self, + list_one, + list_two, + set_field="", + sort_key_list_one="", + sort_key_list_two="", + ): + return self.merge_lists( + list_one, + list_two, + set_field=set_field, + sort_key_list_one=sort_key_list_one, + sort_key_list_two=sort_key_list_two, + ) + def fix_json(self, json_data): try: deletekeys = [] @@ -1518,19 +1729,19 @@ def fix_json(self, json_data): else: json_data[key] = value - #elif isinstance(value, list): + # elif isinstance(value, list): # json_data[key] = value - #else: + # else: # json_data[key] = value # #for item in json_data[key]: # # if isinstance(item, dict): # # json_data[ - + for key in deletekeys: del json_data[key] except Exception as e: - print("[DEBUG] Problem in JSON (fix_json): %s" % e) + pass return json_data @@ -1543,7 +1754,7 @@ def xml_json_convertor(self, convertto, data): try: if convertto == "json": - data = data.replace(" encoding=\"utf-8\"", " ") + data = data.replace(' encoding="utf-8"', " ") ans = xmltodict.parse(data) ans = self.fix_json(ans) json_data = json.dumps(ans) @@ -1553,20 +1764,9 @@ def xml_json_convertor(self, convertto, data): ans = readfromstring(data) return json2xml.Json2xml(ans, wrapper="all", pretty=True).to_xml() except Exception as e: - return { - "success": False, - "input": data, - "reason": f"{e}" - } + return {"success": False, "input": data, "reason": f"{e}"} def date_to_epoch(self, input_data, date_field, date_format): - - self.logger.info( - "Executing with {} on {} with format {}".format( - input_data, date_field, date_format - ) - ) - if isinstance(input_data, str): result = json.loads(input_data) else: @@ -1582,11 +1782,9 @@ def date_to_epoch(self, input_data, date_field, date_format): def compare_relative_date( self, timestamp, date_format, equality_test, offset, units, direction ): - if timestamp== "None": + if timestamp == "None": return False - print("Converting input date.") - if date_format == "autodetect": input_dt = dateutil_parser(timestamp).replace(tzinfo=None) elif date_format != "%s": @@ -1608,7 +1806,7 @@ def compare_relative_date( if utc_format.endswith("%z"): utc_format = utc_format.replace("%z", "Z") - #if date_format != "%s" and date_format != "autodetect": + # if date_format != "%s" and date_format != "autodetect": if date_format == "autodetect": formatted_dt = datetime.datetime.utcnow() + delta elif date_format != "%s": @@ -1623,29 +1821,24 @@ def compare_relative_date( comparison_dt = formatted_dt elif direction == "ago": comparison_dt = formatted_dt - delta - #formatted_dt - delta - #comparison_dt = datetime.datetime.utcnow() + # formatted_dt - delta + # comparison_dt = datetime.datetime.utcnow() else: comparison_dt = formatted_dt + delta - #comparison_dt = datetime.datetime.utcnow() - - print("{} {} {} is {}. Delta: {}".format(offset, units, direction, comparison_dt, delta)) + # comparison_dt = datetime.datetime.utcnow() diff = int((input_dt - comparison_dt).total_seconds()) - print( - "\nDifference between {} and {} is {} seconds ({} days)\n".format(timestamp, comparison_dt, diff, int(diff/86400)) - ) if units == "seconds": diff = diff elif units == "minutes": - diff = int(diff/60) + diff = int(diff / 60) elif units == "hours": - diff = int(diff/3600) + diff = int(diff / 3600) elif units == "days": - diff = int(diff/86400) + diff = int(diff / 86400) elif units == "week": - diff = int(diff/604800) + diff = int(diff / 604800) result = False if equality_test == ">": @@ -1659,7 +1852,7 @@ def compare_relative_date( result = not (result) elif equality_test == "=": - result = diff == 0 + result = diff == 0 elif equality_test == "!=": result = diff != 0 @@ -1672,23 +1865,10 @@ def compare_relative_date( if direction == "ahead" and diff != 0: result = not (result) - print( - "At {}, is {} {} to {} {} {}? {}. Diff {}".format( - formatted_dt, - timestamp, - equality_test, - offset, - units, - direction, - result, - diff, - ) - ) - parsed_string = "%s %s %s %s" % (equality_test, offset, units, direction) newdiff = diff if newdiff < 0: - newdiff = newdiff*-1 + newdiff = newdiff * -1 return { "success": True, @@ -1696,13 +1876,11 @@ def compare_relative_date( "check": parsed_string, "result": result, "diff": { - "days": int(int(newdiff)/86400), + "days": int(int(newdiff) / 86400), }, } - def run_math_operation(self, operation): - self.logger.info("Operation: %s" % operation) result = eval(operation) return result @@ -1713,12 +1891,17 @@ def escape_html(self, input_data): else: mapping = input_data - self.logger.info(f"Got mapping {json.dumps(mapping, indent=2)}") - result = markupsafe.escape(mapping) return mapping - def check_cache_contains(self, key, value, append): + def check_datastore_contains( + self, key, value, append, category="", return_values="true" + ): + return check_cache_contains(self, key, value, append, category, return_values) + + def check_cache_contains( + self, key, value, append, category="", return_values="true" + ): org_id = self.full_execution["workflow"]["execution_org"]["id"] url = "%s/api/v1/orgs/%s/get_cache" % (self.url, org_id) data = { @@ -1730,12 +1913,36 @@ def check_cache_contains(self, key, value, append): "key": key, } + if category: + data["category"] = category + + directcall = False + allvalues = {} + try: + for item in self.local_storage: + if ( + item["execution_id"] == self.current_execution_id + and item["key"] == key + ): + # Max keeping the local cache properly for 5 seconds due to workflow continuations + elapsed_time = time.time() - item["time_set"] + if elapsed_time > 5: + break + + allvalues = item["data"] + + except Exception as e: + print( + "[ERROR] Failed cache contains for current execution id local storage: %s" + % e + ) + if isinstance(value, dict) or isinstance(value, list): try: value = json.dumps(value) except Exception as e: - self.logger.info(f"[WARNING] Error in JSON dumping (cache contains): {e}") - + pass + if not isinstance(value, str): value = str(value) @@ -1744,11 +1951,27 @@ def check_cache_contains(self, key, value, append): if str(append).lower() == "true": append = True else: - append = False + append = False + + include_values = str(return_values).lower() == "true" + + if "success" not in allvalues: + # get_response = requests.post(url, json=data, verify=False) + pass - get_response = requests.post(url, json=data) try: - allvalues = get_response.json() + if "success" not in allvalues: + # allvalues = get_response.json() + allvalues = self.shared_cache + + if "success" not in allvalues: + if category: + data["category"] = category + + get_response = requests.post(url, json=data, verify=False) + allvalues = get_response.json() + directcall = True + try: if allvalues["value"] == None or allvalues["value"] == "null": allvalues["value"] = "[]" @@ -1759,21 +1982,30 @@ def check_cache_contains(self, key, value, append): if append == True: new_value = [str(value)] data["value"] = json.dumps(new_value) + if category: + data["category"] = category set_url = "%s/api/v1/orgs/%s/set_cache" % (self.url, org_id) - set_response = requests.post(set_url, json=data) + set_response = requests.post(set_url, json=data, verify=False) try: allvalues = set_response.json() - #allvalues["key"] = key - #return allvalues + self.shared_cache = self.preload_cache(key=key) - return { + newvalue = data["value"] + try: + newvalue = json.loads(data["value"]) + except json.JSONDecodeError: + pass + + response = { "success": True, "found": False, "key": key, "search": value, - "value": new_value, } + if include_values: + response["value"] = newvalue + return response except Exception as e: return { "success": False, @@ -1794,79 +2026,119 @@ def check_cache_contains(self, key, value, append): if allvalues["value"] == None or allvalues["value"] == "null": allvalues["value"] = "[]" + if isinstance(allvalues["value"], str): + try: + allvalues["value"] = json.loads(allvalues["value"]) + except json.JSONDecodeError: + self.logger.info("[WARNING] Failed inner value cache parsing") + allvalues["value"] = [allvalues["value"]] + + if not isinstance(allvalues["value"], list): + allvalues["value"] = [allvalues["value"]] + try: - parsedvalue = json.loads(allvalues["value"]) + parsedvalue = json.loads(str(allvalues["value"])) except json.decoder.JSONDecodeError as e: - parsedvalue = [] - - #return parsedvalue - - for item in parsedvalue: - #return "%s %s" % (item, value) - if item == value: - if not append: - return { - "success": True, - "found": True, - "reason": "Found and not appending!", - "key": key, - "search": value, - "value": json.loads(allvalues["value"]), - } - else: - return { - "success": True, - "found": True, - "reason": "Found, was appending, but item already exists", - "key": key, - "search": value, - "value": json.loads(allvalues["value"]), - } - - # Lol - break + parsedvalue = allvalues["value"] + + try: + for item in parsedvalue: + # return "%s %s" % (item, value) + # self.logger.info(f"{item} == {value}") + if str(item) == str(value): + if not append: + try: + newdata = json.loads(json.dumps(data)) + newdata["time_set"] = time.time() + newdata["data"] = allvalues + self.local_storage.append(newdata) + except Exception as e: + print( + "[ERROR] Failed in local storage append: %s" % e + ) + + response = { + "success": True, + "found": True, + "reason": "Found and not appending!", + "key": key, + "search": value, + } + if include_values: + response["value"] = allvalues["value"] + return response + else: + response = { + "success": True, + "found": True, + "reason": "Found, was appending, but item already exists", + "key": key, + "search": value, + } + if include_values: + response["value"] = allvalues["value"] + return response + + # Lol + break + except Exception as e: + parsedvalue = [str(parsedvalue)] + append = True if not append: - return { + response = { "success": True, "found": False, "reason": "Not found, not appending (2)!", "key": key, "search": value, - "value": json.loads(allvalues["value"]), } + if include_values: + response["value"] = allvalues["value"] + return response - #parsedvalue = json.loads(allvalues["value"]) - #if parsedvalue == None: - # parsedvalue = [] - - #return parsedvalue - new_value = parsedvalue - if new_value == None: - new_value = [value] + # parsedvalue.append(value) - new_value.append(value) + # data["value"] = json.dumps(parsedvalue) - #return new_value + if value not in allvalues["value"] and isinstance( + allvalues["value"], list + ): + self.cache_update_buffer.append(value) + allvalues["value"].append(value) - data["value"] = json.dumps(new_value) - #return allvalues - - set_url = "%s/api/v1/orgs/%s/set_cache" % (self.url, org_id) - response = requests.post(set_url, json=data) exception = "" try: - allvalues = response.json() - #return allvalues + # FIXME: This is a hack, but it works + if directcall: + new_value = allvalues["value"] + if new_value == None: + new_value = [value] - return { + data["value"] = json.dumps(new_value) + if category: + data["category"] = category + + set_url = "%s/api/v1/orgs/%s/set_cache" % (self.url, org_id) + response = requests.post(set_url, json=data, verify=False) + allvalues = response.json() + + newvalue = data["value"] + try: + newvalue = json.loads(data["value"]) + except: + pass + + response = { "success": True, "found": False, "reason": "Appended as it didn't exist", "key": key, "search": value, - "value": new_value, } + if include_values: + response["value"] = newvalue + return response except Exception as e: exception = e pass @@ -1876,25 +2148,23 @@ def check_cache_contains(self, key, value, append): "found": True, "reason": f"Failed to set append the value: {exception}. This should never happen", "search": value, - "key": key + "key": key, } - - self.logger.info("Handle all values!") - #return allvalues + # return allvalues except Exception as e: + print("[ERROR] Failed check cache contains: %s" % e) return { "success": False, "key": key, - "reason": f"Failed to get cache: {e}", + "reason": f"Failed to handle cache contains. Is the original value a list?: {e}", "search": value, "found": False, } - return value.text + return value.text - ## Adds value to a subkey of the cache ## subkey = "hi", value = "test", overwrite=False ## {"subkey": "hi", "value": "test"} @@ -1903,8 +2173,7 @@ def check_cache_contains(self, key, value, append): ## subkey = "hi", value = "test3", overwrite=False ## {"subkey": "hi", "value": ["test2", "test3"]} - #def set_cache_value(self, key, value): - def change_cache_subkey(self, key, subkey, value, overwrite): + def change_cache_subkey(self, key, subkey, value, overwrite, category=""): org_id = self.full_execution["workflow"]["execution_org"]["id"] url = "%s/api/v1/orgs/%s/set_cache" % (self.url, org_id) @@ -1913,6 +2182,7 @@ def change_cache_subkey(self, key, subkey, value, overwrite): value = json.dumps(value) except Exception as e: self.logger.info(f"[WARNING] Error in JSON dumping (set cache): {e}") + elif not isinstance(value, str): value = str(value) @@ -1925,17 +2195,24 @@ def change_cache_subkey(self, key, subkey, value, overwrite): "value": value, } - response = requests.post(url, json=data) + if category: + data["category"] = category + + response = requests.post(url, json=data, verify=False) try: allvalues = response.json() allvalues["key"] = key - #allvalues["value"] = json.loads(json.dumps(value)) + # allvalues["value"] = json.loads(json.dumps(value)) - if (value.startswith("{") and value.endswith("}")) or (value.startswith("[") and value.endswith("]")): + if (value.startswith("{") and value.endswith("}")) or ( + value.startswith("[") and value.endswith("]") + ): try: allvalues["value"] = json.loads(value) except json.decoder.JSONDecodeError as e: - self.logger.info("[WARNING] Failed inner value cache parsing: %s" % e) + self.logger.info( + "[WARNING] Failed inner value cache parsing: %s" % e + ) allvalues["value"] = str(value) else: allvalues["value"] = str(value) @@ -1945,7 +2222,41 @@ def change_cache_subkey(self, key, subkey, value, overwrite): self.logger.info("Value couldn't be parsed") return response.text - def get_cache_value(self, key): + def delete_datastore_value(self, key, category=""): + return self.delete_cache(key, category=category) + + def delete_cache_value(self, key, category=""): + return self.delete_cache(key, category=category) + + def get_datastore_value(self, key, category=""): + return self.get_cache_value(key, category=category) + + def get_ioc(self, ioc, data_type=""): + if len(data_type) == 0: + ioc_types = [ + "domains", + "urls", + "email_addresses", + "ipv4s", + "ipv6s", + "ipv4_cidrs", + "md5s", + "sha256s", + "sha1s", + "cves", + ] + + iocs = find_iocs(str(ioc)) + for key, value in iocs.items(): + for item in value: + if item.lower() == ioc.lower(): + print("[DEBUG] Found IOC %s in type %s" % (ioc, key)) + data_type = key[:-1] + break + + if len(data_type) > 0: + break + org_id = self.full_execution["workflow"]["execution_org"]["id"] url = "%s/api/v1/orgs/%s/get_cache" % (self.url, org_id) data = { @@ -1953,28 +2264,63 @@ def get_cache_value(self, key): "execution_id": self.current_execution_id, "authorization": self.authorization, "org_id": org_id, - "key": key, + "key": str(key), + "category": "ioc_%s" % data_type.replace(" ", "_").lower(), + } + + value = requests.post(url, json=data, verify=False) + try: + allvalues = value.json() + allvalues["key"] = key + + if allvalues["success"] == True and len(allvalues["value"]) > 0: + allvalues["found"] = True + else: + allvalues["success"] = True + allvalues["found"] = False + + try: + parsedvalue = json.loads(allvalues["value"]) + allvalues["value"] = parsedvalue + + except: + pass + + return json.dumps(allvalues) + except: + self.logger.info("Value couldn't be parsed, or json dump of value failed") + return value.text + + def get_cache_value(self, key, category=""): + org_id = self.full_execution["workflow"]["execution_org"]["id"] + url = "%s/api/v1/orgs/%s/get_cache" % (self.url, org_id) + data = { + "workflow_id": self.full_execution["workflow"]["id"], + "execution_id": self.current_execution_id, + "authorization": self.authorization, + "org_id": org_id, + "key": str(key), } - value = requests.post(url, json=data) + if category: + data["category"] = category + + value = requests.post(url, json=data, verify=False) try: allvalues = value.json() - self.logger.info("VAL1: ", allvalues) allvalues["key"] = key - self.logger.info("VAL2: ", allvalues) - if allvalues["success"] == True: + if allvalues["success"] == True and len(allvalues["value"]) > 0: allvalues["found"] = True else: - allvalues["success"] = True - allvalues["found"] = False + allvalues["success"] = True + allvalues["found"] = False try: parsedvalue = json.loads(allvalues["value"]) allvalues["value"] = parsedvalue except: - self.logger.info("Parsing of value as JSON failed") pass return json.dumps(allvalues) @@ -1982,8 +2328,127 @@ def get_cache_value(self, key): self.logger.info("Value couldn't be parsed, or json dump of value failed") return value.text - # FIXME: Add option for org only & sensitive data (not to be listed) - def set_cache_value(self, key, value): + def set_datastore_value(self, key, value, category=""): + return set_cache_value(self, key, value, category=category) + + # Check if a specific key exists in a datastore category or not + # Otherwise appends it automatically + def search_datastore_category(self, input_list, key, category): + returnvalue = { + "success": False, + "category": category, + "new": [], + "exists": [], + } + + if len(key) == 0 or len(category) == 0: + returnvalue["reason"] = "Key and/or Category is empty" + return returnvalue + + data = [] + if isinstance(input_list, dict): + input_list = [input_list] + + if not isinstance(input_list, list): + try: + input_list = json.loads(str(input_list)) + except Exception as e: + returnvalue["reason"] = ( + f"Input list is not a valid JSON list: {input_list}", + ) + returnvalue["details"] = str(e) + return returnvalue + + org_id = self.full_execution["workflow"]["execution_org"]["id"] + cnt = -1 + for item in input_list: + cnt += 1 + if not isinstance(item, dict): + try: + item = json.loads(str(item)) + except Exception as e: + self.logger.info( + "[ERROR][%s] Failed to parse item as JSON: %s" + % (self.current_execution_id, e) + ) + continue + + input_list[cnt] = item + if key not in item: + returnvalue["reason"] = ( + "Couldn't find key '%s' in every item. Make sure to use a key that exists in every entry." + % (key), + ) + return returnvalue + + data.append( + { + "workflow_id": self.full_execution["workflow"]["id"], + "execution_id": self.current_execution_id, + "authorization": self.authorization, + "org_id": org_id, + "key": str(item[key]), + "value": json.dumps(item), + "category": category, + } + ) + + url = f"{self.url}/api/v2/datastore?bulk=true&execution_id={self.current_execution_id}&authorization={self.authorization}" + response = requests.post(url, json=data, verify=False) + + if response.status_code != 200: + returnvalue["reason"] = "Failed to check datastore key exists" + returnvalue["details"] = response.text + returnvalue["status"] = response.status_code + return returnvalue + + data = "" + try: + data = response.json() + except json.decoder.JSONDecodeError as e: + return response.text + + if "keys_existed" not in data: + returnvalue["error"] = ( + "Invalid response from backend during bulk update of keys" + ) + returnvalue["details"] = data + + return returnvalue + + not_found_keys = [] + returnvalue["success"] = True + for datastore_item in input_list: + found = False + for existing_item in data["keys_existed"]: + if str(existing_item["key"]) != str(datastore_item[key]): + continue + + if existing_item["existed"] == True: + returnvalue["exists"].append(datastore_item) + else: + returnvalue["new"].append(datastore_item) + + found = True + break + + if not found: + print( + "[ERROR][%s] Key %s not found in datastore response, adding as new" + % (self.current_execution_id, datastore_item[key]) + ) + # returnvalue["new"].append(datastore_item) + not_found_keys.append(datastore_item[key]) + + if len(not_found_keys) > 0: + returnvalue["unhandled_keys"] = not_found_keys + returnvalue["reason"] = ( + "Something went wrong updating the unhandled_keys. Please contact support@shuffler.io if this persists." + ) + + return json.dumps(returnvalue, indent=4) + + def set_cache_value(self, key, value, category=""): org_id = self.full_execution["workflow"]["execution_org"]["id"] url = "%s/api/v1/orgs/%s/set_cache" % (self.url, org_id) @@ -1992,7 +2457,7 @@ def set_cache_value(self, key, value): value = json.dumps(value) except Exception as e: self.logger.info(f"[WARNING] Error in JSON dumping (set cache): {e}") - + if not isinstance(value, str): value = str(value) @@ -2001,36 +2466,58 @@ def set_cache_value(self, key, value): "execution_id": self.current_execution_id, "authorization": self.authorization, "org_id": org_id, - "key": key, + "key": str(key), "value": value, } - response = requests.post(url, json=data) + if category: + data["category"] = category + + response = requests.post(url, json=data, verify=False) try: allvalues = response.json() allvalues["key"] = key - #allvalues["value"] = json.loads(json.dumps(value)) + # allvalues["value"] = json.loads(json.dumps(value)) - if (value.startswith("{") and value.endswith("}")) or (value.startswith("[") and value.endswith("]")): + allvalues["existed"] = False + if "keys_existed" in allvalues: + for key_info in allvalues["keys_existed"]: + if key_info["key"] == key: + allvalues["existed"] = key_info["existed"] + break + + if (value.startswith("{") and value.endswith("}")) or ( + value.startswith("[") and value.endswith("]") + ): try: allvalues["value"] = json.loads(value) except json.decoder.JSONDecodeError as e: - self.logger.info("[WARNING] Failed inner value cache parsing: %s" % e) + self.logger.info( + "[WARNING] Failed inner value cache parsing: %s" % e + ) allvalues["value"] = str(value) else: allvalues["value"] = str(value) + if category: + allvalues["category"] = category + return json.dumps(allvalues) except: self.logger.info("Value couldn't be parsed") return response.text - def convert_json_to_tags(self, json_object, split_value=", ", include_key=True, lowercase=True): + def convert_json_to_tags( + self, json_object, split_value=", ", include_key=True, lowercase=True + ): if isinstance(json_object, str): try: json_object = json.loads(json_object) except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse list2 as json: %s. Type: %s" % (e, type(json_object))) + self.logger.info( + "Failed to parse list2 as json: %s. Type: %s" + % (e, type(json_object)) + ) if isinstance(lowercase, str) and lowercase.lower() == "true": lowercase = True @@ -2045,8 +2532,11 @@ def convert_json_to_tags(self, json_object, split_value=", ", include_key=True, parsedstring = [] try: for key, value in json_object.items(): - self.logger.info("KV: %s:%s" % (key, value)) - if isinstance(value, str) or isinstance(value, int) or isinstance(value, bool): + if ( + isinstance(value, str) + or isinstance(value, int) + or isinstance(value, bool) + ): if include_key == True: parsedstring.append("%s:%s" % (key, value)) else: @@ -2065,23 +2555,37 @@ def convert_json_to_tags(self, json_object, split_value=", ", include_key=True, return fullstring - def cidr_ip_match(self, ip, networks): - self.logger.info("Executing with\nIP: {},\nNetworks: {}".format(ip, networks)) + def autofix_network(self, ip_with_cidr): + try: + # Parse the input as an IPv4 network object + network = ipaddress.IPv4Network(ip_with_cidr, strict=False) + # Return the corrected network address + return str(network) + except ValueError as e: + print(f"Error: {e}") + return None + def cidr_ip_match(self, ip, networks): if isinstance(networks, str): try: networks = json.loads(networks) except json.decoder.JSONDecodeError as e: - self.logger.info("Failed to parse networks list as json: {}. Type: {}".format( - e, type(networks) - )) return { "success": False, "reason": "Networks is not a valid list: {}".format(networks), } + new_networks = [] + for network in networks: + new_network = self.autofix_network(network) + if new_network: + new_networks.append(new_network) + + networks = new_networks + try: ip_networks = list(map(ipaddress.ip_network, networks)) + # ip_address = ipaddress.ip_address(ip, False) ip_address = ipaddress.ip_address(ip) except ValueError as e: return "IP or some networks are not in valid format.\nError: {}".format(e) @@ -2090,15 +2594,15 @@ def cidr_ip_match(self, ip, networks): result = {} result["ip"] = ip - result['networks'] = list(map(str, matched_networks)) - result['is_contained'] = True if len(result['networks']) > 0 else False + result["networks"] = list(map(str, matched_networks)) + result["is_contained"] = True if len(result["networks"]) > 0 else False return json.dumps(result) def get_timestamp(self, time_format): timestamp = int(time.time()) if time_format == "unix" or time_format == "epoch": - self.logger.info("Running default timestamp %s" % timestamp) + pass return timestamp @@ -2107,14 +2611,14 @@ def get_hash_sum(self, value): sha256_value = "" try: - md5_value = hashlib.md5(str(value).encode('utf-8')).hexdigest() + md5_value = hashlib.md5(str(value).encode("utf-8")).hexdigest() except Exception as e: - self.logger.info(f"Error in md5sum: {e}") + pass try: - sha256_value = hashlib.sha256(str(value).encode('utf-8')).hexdigest() + sha256_value = hashlib.sha256(str(value).encode("utf-8")).hexdigest() except Exception as e: - self.logger.info(f"Error in sha256: {e}") + pass parsedvalue = { "success": True, @@ -2123,25 +2627,27 @@ def get_hash_sum(self, value): "sha256": sha256_value, } - return parsedvalue + return parsedvalue def run_oauth_request(self, url, jwt): headers = { "Content-Type": "application/x-www-form-urlencoded", } - data = "grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer&assertion=%s" % jwt + data = ( + "grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer&assertion=%s" + % jwt + ) - return requests.post(url, data=data, headers=headers).text + return requests.post(url, data=data, headers=headers, verify=False).text # Based on https://google-auth.readthedocs.io/en/master/reference/google.auth.crypt.html def get_jwt_from_file(self, file_id, jwt_audience, scopes, complete_request=True): allscopes = scopes - if "," in scopes: allscopes = " ".join(scopes.split(",")) - + # Service account key path filedata = self.get_file(file_id) if filedata["success"] == False: @@ -2149,50 +2655,43 @@ def get_jwt_from_file(self, file_id, jwt_audience, scopes, complete_request=True "success": False, "message": f"Failed to get file for ID {file_id}", } - + data = json.loads(filedata["data"], strict=False) - #sa_keyfile = "" + # sa_keyfile = "" sa_keyfile = data["private_key"] sa_email = data["client_email"] - + # The audience to target audience = jwt_audience - + """Generates a signed JSON Web Token using a Google API Service Account or similar.""" - def get_jwt(sa_keyfile, - sa_email, - audience, - allscopes, - expiry_length=3600): - + + def get_jwt(sa_keyfile, sa_email, audience, allscopes, expiry_length=3600): now = int(time.time()) - + # build payload payload = { # expires after 'expiry_length' seconds. # iss must match 'issuer' in the security configuration in your # swagger spec (e.g. service account email). It can be any string. - 'iss': sa_email, + "iss": sa_email, # aud must be either your Endpoints service name, or match the value # specified as the 'x-google-audience' in the OpenAPI document. - 'scope': allscopes, - 'aud': audience, + "scope": allscopes, + "aud": audience, "exp": now + expiry_length, - 'iat': now, - + "iat": now, # sub and email should match the service account's email address - 'sub': sa_email, - 'email': sa_email, + "sub": sa_email, + "email": sa_email, } - + # sign with keyfile - #signer = crypt.RSASigner.from_service_account_file(sa_keyfile) + # signer = crypt.RSASigner.from_service_account_file(sa_keyfile) signer = crypt.RSASigner.from_string(sa_keyfile) jwt_token = jwt.encode(signer, payload) - # print(jwt_token.decode('utf-8')) return jwt_token - - + signed_jwt = get_jwt(sa_keyfile, sa_email, audience, allscopes) if str(complete_request).lower() == "true": @@ -2221,11 +2720,18 @@ def get_synonyms(self, input_type): "uuid", "teamid", "messageid", - ], - "title": ["title", "message", "subject", "name"], - "description": ["description", "status", "explanation", "story", "details", "snippet"], - "email": ["mail", "email", "sender", "receiver", "recipient"], - "data": [ + ], + "title": ["title", "message", "subject", "name"], + "description": [ + "description", + "status", + "explanation", + "story", + "details", + "snippet", + ], + "email": ["mail", "email", "sender", "receiver", "recipient"], + "data": [ "data", "ip", "domain", @@ -2237,9 +2743,9 @@ def get_synonyms(self, input_type): "value", "item", "rules", - ], - "tags": ["tags", "taxonomies", "labels", "labelids"], - "assignment": [ + ], + "tags": ["tags", "taxonomies", "labels", "labelids"], + "assignment": [ "assignment", "user", "assigned_to", @@ -2247,40 +2753,44 @@ def get_synonyms(self, input_type): "closed_by", "closing_user", "opened_by", - ], - "severity": [ + ], + "severity": [ "severity", "sev", "magnitude", "relevance", - ] + ], } - + return [] - + def find_key(self, inputkey, synonyms): inputkey = inputkey.lower().replace(" ", "").replace(".", "") for key, value in synonyms.items(): if inputkey in value: return key - + return inputkey - + def run_key_recursion(self, json_input, synonyms): - if isinstance(json_input, str) or isinstance(json_input, int) or isinstance(json_input, float): + if ( + isinstance(json_input, str) + or isinstance(json_input, int) + or isinstance(json_input, float) + ): return json_input, {} - + if isinstance(json_input, list): if len(json_input) != 1: return json_input, {} else: json_input = json_input[0] - - #new_list = [] - #for item in json_input: - #run_key_recursion(item, synonyms) - #new_dict[new_key], found_important = run_key_recursion(value, synonyms) - + + # new_list = [] + # for item in json_input: + # run_key_recursion(item, synonyms) + # new_dict[new_key], found_important = run_key_recursion(value, synonyms) + # Looks for exact key:value stuff in other format if len(json_input.keys()) == 2: newkey = "" @@ -2290,54 +2800,58 @@ def run_key_recursion(self, json_input, synonyms): newkey = value elif key == "value": newvalue = value - + if len(newkey) > 0 and len(newvalue) > 0: json_input[newkey] = newvalue try: del json_input["name"] except: pass - + try: del json_input["value"] except: pass - + try: del json_input["key"] except: pass - + important_fields = {} new_dict = {} for key, value in json_input.items(): new_key = self.find_key(key, synonyms) - + if isinstance(value, list): new_list = [] for subitem in value: - returndata, found_important = self.run_key_recursion(subitem, synonyms) - + returndata, found_important = self.run_key_recursion( + subitem, synonyms + ) + new_list.append(returndata) for subkey, subvalue in found_important.items(): - important_fields[subkey] = subvalue - + important_fields[subkey] = subvalue + new_dict[new_key] = new_list - + elif isinstance(value, dict): # FIXMe: Try to understand Key:Values as well by translating them # name/key: subject # value: This is a subject # will become: # subject: This is a subject - - new_dict[new_key], found_important = self.run_key_recursion(value, synonyms) - + + new_dict[new_key], found_important = self.run_key_recursion( + value, synonyms + ) + for subkey, subvalue in found_important.items(): important_fields[subkey] = subvalue else: new_dict[new_key] = value - + # Translated fields are added as important if key.lower().replace(" ", "").replace(".", "") != new_key: try: @@ -2347,32 +2861,33 @@ def run_key_recursion(self, json_input, synonyms): important_fields[new_key] = new_dict[new_key] except: important_fields[new_key] = new_dict[new_key] - - #break - + + # break + return new_dict, important_fields - + # Should translate the data to something more useful def get_standardized_data(self, json_input, input_type): if isinstance(json_input, str): json_input = json.loads(json_input, strict=False) - + input_synonyms = self.get_synonyms(input_type) - - parsed_data, important_fields = self.run_key_recursion(json_input, input_synonyms) - + parsed_data, important_fields = self.run_key_recursion( + json_input, input_synonyms + ) + # Try base64 decoding and such too? for key, value in important_fields.items(): try: important_fields[key] = important_fields[key][key] except: pass - + try: important_fields[key] = base64.b64decode(important_fields[key]) except: pass - + return { "success": True, "original": json_input, @@ -2380,7 +2895,7 @@ def get_standardized_data(self, json_input, input_type): "changed_fields": important_fields, } - def generate_random_string(length=16, special_characters=True): + def generate_random_string(self, length=16, special_characters=True): try: length = int(length) except: @@ -2394,14 +2909,480 @@ def generate_random_string(length=16, special_characters=True): if str(special_characters).lower() == "false": characters = string.ascii_letters + string.digits + string.punctuation - password = ''.join(random.choice(characters) for i in range(length)) + password = "".join(random.choice(characters) for i in range(length)) return { "success": True, "password": password, } + def run_ssh_command( + self, host, port, user_name, private_key_file_id, password, command + ): + ssh_client = paramiko.SSHClient() + ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + + if port: + port = int(port) + else: + port = 22 + + if private_key_file_id: + new_file = self.get_file(private_key_file_id) + + try: + key_data = new_file["data"].decode() + except Exception as e: + return {"success": "false", "message": str(e)} + + private_key_file = StringIO() + private_key_file.write(key_data) + private_key_file.seek(0) + private_key = paramiko.RSAKey.from_private_key(private_key_file) + + try: + ssh_client.connect( + hostname=host, username=user_name, port=port, pkey=private_key + ) + except Exception as e: + return {"success": "false", "message": str(e)} + else: + try: + ssh_client.connect( + hostname=host, username=user_name, port=port, password=str(password) + ) + except Exception as e: + return {"success": "false", "message": str(e)} + + try: + stdin, stdout, stderr = ssh_client.exec_command(str(command)) + try: + errorLog = stderr.read().decode(errors="ignore") + except Exception as e: + errorLog = f"Failed to read stderr {e}" + try: + output = stdout.read().decode(errors="ignore") + except Exception as e: + output = f"Failed to read stdout {e}" + + except Exception as e: + return {"success": "false", "message": str(e)} + + return {"success": "true", "output": output, "error_logs": errorLog} + + def cleanup_ioc_data(self, input_data): + # Remove unecessary parts like { and }, quotes etc + input_data = str(input_data) + input_data = input_data.replace("{", "") + input_data = input_data.replace("}", "") + input_data = input_data.replace('"', "") + input_data = input_data.replace("'", "") + + input_data = input_data.replace("\t", " ") + input_data = input_data.replace(" ", " ") + input_data = input_data.replace("\n\n", "\n") + + # Remove html tags + input_data = re.sub(r"<[^>]*>", "", input_data) + + return input_data + + def parse_ioc(self, input_string, input_type="all"): + ioc_types = [ + "domains", + "urls", + "email_addresses", + "ipv4s", + "ipv6s", + "ipv4_cidrs", + "md5s", + "sha256s", + "sha1s", + "cves", + ] + # ioc_types = ["ipv4s"] + + try: + input_string = self.cleanup_ioc_data(input_string) + except Exception as e: + self.logger.info("[ERROR] Failed to cleanup ioc data: %s" % e) + + # Remember overriding ioc types we care about + if input_type == "" or input_type == "all": + input_type = "all" + else: + input_type = input_type.split(",") + + new_input_types = [] + for i in range(len(input_type)): + item = input_type[i] + + item = item.strip() + if not item.endswith("s"): + item = "%ss" % item + + if item not in ioc_types: + continue + + new_input_types.append(item) + + ioc_types = new_input_types + if len(ioc_types) == 0: + input_type = "all" + + # Not used for anything after cleanup fixes + max_size = 7500000 + # if len(input_string) > max_size: + # input_string = input_string[:max_size] + + self.logger.info( + "[DEBUG] Parsing data of length %d with types %s. Max size: %d" + % (len(input_string), ioc_types, max_size) + ) + self.logger.info(f"STRING: {input_string}") + + # iocs = find_iocs(str(input_string), included_ioc_types=ioc_types) + iocs = find_iocs(str(input_string)) + # self.logger.info("[DEBUG] Found %d ioc types" % len(iocs)) + + newarray = [] + for key, value in iocs.items(): + if input_type != "all": + if key not in input_type: + print("Invalid key: %s" % key) + continue + + if len(value) == 0: + continue + + for item in value: + # If in here: attack techniques. Shouldn't be 3 levels so no + # recursion necessary + if isinstance(value, dict): + for subkey, subvalue in value.items(): + if len(subvalue) > 0: + for subitem in subvalue: + data = { + "data": subitem, + "data_type": "%s_%s" % (key[:-1], subkey), + } + if data not in newarray: + newarray.append(data) + else: + data = {"data": item, "data_type": key[:-1]} + if data not in newarray: + newarray.append(data) + + # Reformatting IP + for item in newarray: + if "cidr" in item["data_type"]: + pass + elif "ip" in item["data_type"]: + item["data_type"] = "ip" + try: + item["is_private_ip"] = ipaddress.ip_address( + item["data"] + ).is_private + except: + pass + + try: + newarray = json.dumps(newarray) + except json.decoder.JSONDecodeError as e: + return "Failed to parse IOC's: %s" % e + + return newarray + + def split_text(self, text): + # Split text into chunks of 10kb. Add each 10k to array + # In case e.g. 1.2.3.4 lands exactly on 20k boundary, it may be useful to overlap here. + # (just shitty code to reduce chance of issues) while still going fast + arr_one = [] + max_len = 5000 + current_string = "" + overlaps = 100 + + for i in range(0, len(text)): + current_string += text[i] + if len(current_string) > max_len: + # Appending just in case even with overlaps + if len(text) > i + overlaps: + current_string += text[i + 1 : i + overlaps] + else: + current_string += text[i + 1 :] + + arr_one.append(current_string) + current_string = "" + + if len(current_string) > 0: + arr_one.append(current_string) + + return arr_one + + def _format_result(self, result): + final_result = {} + + for res in result: + for key, val in res.items(): + if key in final_result: + if isinstance(val, list) and len(val) > 0: + for i in val: + final_result[key].append(i) + elif isinstance(val, dict): + if key in final_result: + if isinstance(val, dict): + for k, v in val.items(): + val[k].append(v) + else: + final_result[key] = val + + return final_result + + # See function for how it works~: parse_ioc_new(..) + def _with_concurency(self, array_of_strings, ioc_types): + results = [] + # start = time.perf_counter() + + # Workers dont matter..? + # What can we use instead? + + workers = 4 + with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor: + # Submit the find_iocs function for each string in the array + futures = [ + executor.submit( + find_iocs, + text=string, + included_ioc_types=ioc_types, + ) + for string in array_of_strings + ] + + # Wait for all tasks to complete + concurrent.futures.wait(futures) + + # Retrieve the results if needed + results = [future.result() for future in futures] + + return self._format_result(results) + + # FIXME: Make this good and actually faster than normal + # For now: Concurrency doesn't make it faster due to GIL in python. + # May need to offload this to an executable or something + def parse_ioc_new(self, input_string, input_type="all"): + if input_type == "": + input_type = "all" + + ioc_types = [ + "domains", + "urls", + "email_addresses", + "ipv4s", + "ipv4_cidrs", + "md5s", + "sha256s", + "sha1s", + "cves", + ] + + if input_type == "" or input_type == "all": + ioc_types = ioc_types + else: + input_type = input_type.split(",") + for item in input_type: + item = item.strip() + + ioc_types = input_type + + input_string = str(input_string) + + if len(input_string) > 10000: + iocs = self._with_concurency( + self.split_text(input_string), ioc_types=ioc_types + ) + else: + iocs = find_iocs(input_string, included_ioc_types=ioc_types) + + newarray = [] + for key, value in iocs.items(): + if input_type != "all": + if key not in input_type: + continue + + if len(value) == 0: + continue + + for item in value: + # If in here: attack techniques. Shouldn't be 3 levels so no + # recursion necessary + if isinstance(value, dict): + for subkey, subvalue in value.items(): + if len(subvalue) == 0: + continue + + for subitem in subvalue: + data = { + "data": subitem, + "data_type": "%s_%s" % (key[:-1], subkey), + } + + if data not in newarray: + newarray.append(data) + else: + data = {"data": item, "data_type": key[:-1]} + if data not in newarray: + newarray.append(data) + + # Reformatting IP + i = -1 + for item in newarray: + i += 1 + if "ip" not in item["data_type"]: + continue + + newarray[i]["data_type"] = "ip" + try: + newarray[i]["is_private_ip"] = ipaddress.ip_address( + item["data"] + ).is_private + except Exception as e: + pass + + try: + newarray = json.dumps(newarray) + except json.decoder.JSONDecodeError as e: + return "Failed to parse IOC's: %s" % e + + return newarray + + def merge_incoming_branches(self, input_type="list"): + wf = self.full_execution["workflow"] + if "branches" not in wf or not wf["branches"]: + return {"success": False, "reason": "No branches found"} + + if "results" not in self.full_execution or not self.full_execution["results"]: + return { + "success": False, + "reason": "No results for previous actions not found", + } + + if not input_type: + input_type = "list" + + branches = wf["branches"] + cur_action = self.action + # print("Found %d branches" % len(branches)) + + results = [] + for branch in branches: + if branch["destination_id"] != cur_action["id"]: + continue + + # Find result for the source + source_id = branch["source_id"] + + for res in self.full_execution["results"]: + if res["action"]["id"] != source_id: + continue + + try: + parsed = json.loads(res["result"]) + results.append(parsed) + except Exception as e: + results.append(res["result"]) + + break + + if input_type == "list": + newlist = [] + for item in results: + if not isinstance(item, list): + continue + + for subitem in item: + if subitem in newlist: + continue + + newlist.append(subitem) + # newlist.append(item) + + results = newlist + elif input_type == "dict": + new_dict = {} + for item in results: + if not isinstance(item, dict): + continue + + new_dict = self.merge_lists(new_dict, item) + + results = json.dumps(new_dict) + else: + return { + "success": False, + "reason": "No results from source branches with type %s" % input_type, + } + + return results + + def bodyparse_test(self, body): + return body + + def list_cidr_ips(self, cidr): + defaultreturn = {"success": False, "reason": "Invalid CIDR address"} + + if not cidr: + return defaultreturn + + if "/" not in cidr: + defaultreturn["reason"] = "CIDR address must contain / (e.g. /12)" + return defaultreturn + + try: + cidrnumber = int(cidr.split("/")[1]) + except ValueError as e: + defaultreturn["exception"] = str(e) + return defaultreturn + + if cidrnumber < 12: + defaultreturn["reason"] = "CIDR address too large. Please stay above /12" + return defaultreturn + + try: + net = ipaddress.ip_network(cidr) + except ValueError as e: + defaultreturn["exception"] = str(e) + return defaultreturn + + ips = [str(ip) for ip in net] + returnvalue = {"success": True, "amount": len(ips), "ips": ips} + + return returnvalue + + def switch(self, conditions): + # Check if conditions is a list or not + if not isinstance(conditions, list): + conditions = [conditions] + + # True by default + to_return = { + "success": True, + "run_else": True, + } + + if len(conditions) == 0: + conditions = [] + + for condition in conditions: + pass + + # Loop conditions + # Return them without a loop to make it EASY to understand + # Validation should be: + # Continuation based on .id.valid + # .valid -> true/false + # If no id exists, use name? + return to_return if __name__ == "__main__": diff --git a/shuffle-tools/1.2.0/src/concurrency.py b/shuffle-tools/1.2.0/src/concurrency.py new file mode 100644 index 00000000..420d1686 --- /dev/null +++ b/shuffle-tools/1.2.0/src/concurrency.py @@ -0,0 +1,201 @@ +import time +import json +import ipaddress +import concurrent.futures +from functools import partial +from ioc_finder import find_iocs + +class Test(): + def split_text(self, text): + # Split text into chunks of 10kb. Add each 10k to array + # In case e.g. 1.2.3.4 lands exactly on 20k boundary, it may be useful to overlap here. + # (just shitty code to reduce chance of issues) while still going fast + + arr_one = [] + max_len = 2500 + current_string = "" + overlaps = 100 + + + for i in range(0, len(text)): + current_string += text[i] + if len(current_string) > max_len: + # Appending just in case even with overlaps + if len(text) > i+overlaps: + current_string += text[i+1:i+overlaps] + else: + current_string += text[i+1:] + + arr_one.append(current_string) + current_string = "" + + if len(current_string) > 0: + arr_one.append(current_string) + + #print("DATA:", arr_one) + print("Strings:", len(arr_one)) + #exit() + + return arr_one + + def _format_result(self, result): + final_result = {} + + for res in result: + for key, val in res.items(): + if key in final_result: + if isinstance(val, list) and len(val) > 0: + for i in val: + final_result[key].append(i) + elif isinstance(val, dict): + #print(key,":::",val) + if key in final_result: + if isinstance(val, dict): + for k,v in val.items(): + #print("k:",k,"v:",v) + val[k].append(v) + #print(val) + #final_result[key].append([i for i in val if len(val) > 0]) + else: + final_result[key] = val + + return final_result + + def worker_function(self, inputdata): + return find_iocs(inputdata["data"], included_ioc_types=inputdata["ioc_types"]) + + def _with_concurency(self, array_of_strings, ioc_types): + results = [] + #start = time.perf_counter() + + # Workers dont matter..? + # What can we use instead? + + results = [] + workers = 4 + with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor: + # Submit the find_iocs function for each string in the array + futures = [executor.submit( + find_iocs, + text=string, + included_ioc_types=ioc_types, + ) for string in array_of_strings] + + # Wait for all tasks to complete + concurrent.futures.wait(futures) + + # Retrieve the results if needed + results = [future.result() for future in futures] + + return self._format_result(results) + + def parse_ioc_new(self, input_string, input_type="all"): + if input_type == "": + input_type = "all" + + #ioc_types = ["domains", "urls", "email_addresses", "ipv6s", "ipv4s", "ipv4_cidrs", "md5s", "sha256s", "sha1s", "cves"] + ioc_types = ["domains", "urls", "email_addresses", "ipv4s", "ipv4_cidrs", "md5s", "sha256s", "sha1s", "cves"] + + # urls = 10.4 -> 9.1 + # emails = 10.4 -> 9.48 + # ipv6s = 10.4 -> 7.37 + # ipv4 cidrs = 10.4 -> 10.44 + + if input_type == "" or input_type == "all": + ioc_types = ioc_types + else: + input_type = input_type.split(",") + for item in input_type: + item = item.strip() + + ioc_types = input_type + + input_string = str(input_string) + if len(input_string) > 10000: + iocs = self._with_concurency(self.split_text(input_string), ioc_types=ioc_types) + else: + iocs = find_iocs(input_string, included_ioc_types=ioc_types) + + newarray = [] + for key, value in iocs.items(): + if input_type != "all": + if key not in input_type: + continue + + if len(value) == 0: + continue + + for item in value: + # If in here: attack techniques. Shouldn't be 3 levels so no + # recursion necessary + if isinstance(value, dict): + for subkey, subvalue in value.items(): + if len(subvalue) == 0: + continue + + for subitem in subvalue: + data = { + "data": subitem, + "data_type": "%s_%s" % (key[:-1], subkey), + } + + if data not in newarray: + newarray.append(data) + else: + data = {"data": item, "data_type": key[:-1]} + if data not in newarray: + newarray.append(data) + + # Reformatting IP + i = -1 + for item in newarray: + i += 1 + if "ip" not in item["data_type"]: + continue + + newarray[i]["data_type"] = "ip" + try: + newarray[i]["is_private_ip"] = ipaddress.ip_address(item["data"]).is_private + except Exception as e: + print("Error parsing %s: %s" % (item["data"], e)) + + try: + newarray = json.dumps(newarray) + except json.decoder.JSONDecodeError as e: + return "Failed to parse IOC's: %s" % e + + return newarray + +# Make it not run this for multithreads +if __name__ == "__main__": + + input_string = "" + with open("testdata.txt", "r") as f: + input_string = f.read() + + try: + json_data = json.loads(input_string) + # If array, loop + if isinstance(json_data, list): + cnt = 0 + start = time.perf_counter() + for item in json_data: + cnt += 1 + classdata = Test() + + ret = classdata.parse_ioc_new(item) + #print("OUTPUT1: ", ret) + + #if cnt == 5: + # break + + print("Total time taken:", time.perf_counter()-start) + else: + classdata = Test() + ret = classdata.parse_ioc_new(input_string) + print("OUTPUT2: ", ret) + except Exception as e: + classdata = Test() + ret = classdata.parse_ioc_new(json_data) + print("OUTPUT3: ", ret) + diff --git a/shuffle-tools/1.2.0/src/switch.py b/shuffle-tools/1.2.0/src/switch.py new file mode 100644 index 00000000..78ede505 --- /dev/null +++ b/shuffle-tools/1.2.0/src/switch.py @@ -0,0 +1,203 @@ +# self, sourcevalue, condition, destinationvalue +def validate_condition(sourcevalue, check, destinationvalue): + if check == "=" or check == "==" or check.lower() == "equals": + if str(sourcevalue).lower() == str(destinationvalue).lower(): + return True + elif check == "!=" or check.lower() == "does not equal": + if str(sourcevalue).lower() != str(destinationvalue).lower(): + return True + elif check.lower() == "startswith": + if str(sourcevalue).lower().startswith(str(destinationvalue).lower()): + return True + + + elif check.lower() == "endswith": + if str(sourcevalue).lower().endswith(str(destinationvalue).lower()): + return True + elif check.lower() == "contains": + if destinationvalue.lower() in sourcevalue.lower(): + return True + + elif check.lower() == "is empty" or check.lower() == "is_empty": + try: + if len(json.loads(sourcevalue)) == 0: + return True + except Exception as e: + print("[ERROR] Failed to check if empty as list: {e}") + + if len(str(sourcevalue)) == 0: + return True + + elif check.lower() == "contains_any_of": + newvalue = [destinationvalue.lower()] + if "," in destinationvalue: + newvalue = destinationvalue.split(",") + elif ", " in destinationvalue: + newvalue = destinationvalue.split(", ") + + for item in newvalue: + if not item: + continue + + if item.strip() in sourcevalue: + return True + + elif check.lower() == "larger than" or check.lower() == "bigger than" or check == ">" or check == ">=": + try: + if str(sourcevalue).isdigit() and str(destinationvalue).isdigit(): + if int(sourcevalue) > int(destinationvalue): + return True + + except AttributeError as e: + print("[WARNING] Condition larger than failed with values %s and %s: %s" % (sourcevalue, destinationvalue, e)) + + try: + destinationvalue = len(json.loads(destinationvalue)) + except Exception as e: + print("[WARNING] Failed to convert destination to list: {e}") + try: + # Check if it's a list in autocast and if so, check the length + if len(json.loads(sourcevalue)) > int(destinationvalue): + return True + except Exception as e: + print("[WARNING] Failed to check if larger than as list: {e}") + + + elif check.lower() == "smaller than" or check.lower() == "less than" or check == "<" or check == "<=": + print("In smaller than check: %s %s" % (sourcevalue, destinationvalue)) + + try: + if str(sourcevalue).isdigit() and str(destinationvalue).isdigit(): + if int(sourcevalue) < int(destinationvalue): + return True + + except AttributeError as e: + pass + + try: + destinationvalue = len(json.loads(destinationvalue)) + except Exception as e: + print("[WARNING] Failed to convert destination to list: {e}") + + try: + # Check if it's a list in autocast and if so, check the length + if len(json.loads(sourcevalue)) < int(destinationvalue): + return True + except Exception as e: + print("[WARNING] Failed to check if smaller than as list: {e}") + + elif check.lower() == "re" or check.lower() == "matches regex": + try: + found = re.search(str(destinationvalue), str(sourcevalue)) + except re.error as e: + return False + except Exception as e: + return False + + if found == None: + return False + + return True + else: + print("[DEBUG] Condition: can't handle %s yet. Setting to true" % check) + + return False + +def evaluate_conditions(condition_structure): + operator = condition_structure.get('operator') + + # Base case: Single condition + if 'source' in condition_structure: + source = condition_structure['source'] + condition = condition_structure['condition'] + destination = condition_structure['destination'] + + # self. + return validate_condition(source, condition, destination) + + # Recursive case: Logical operator + elif operator == "AND": + return all(evaluate_conditions(sub_condition) for sub_condition in condition_structure['conditions']) + + elif operator == "OR": + return any(evaluate_conditions(sub_condition) for sub_condition in condition_structure['conditions']) + + elif operator == "NOT": + return not evaluate_conditions(condition_structure['conditions'][0]) + + else: + raise ValueError(f"Unknown operator: {operator}") + + +def switch(conditions): + to_return = { + "success": True, + "run_else": True, + } + + for condition in conditions: + if "id" not in condition: + print("Condition ID not found") + continue + + evaluated = False + try: + evaluated = evaluate_conditions(condition) + except Exception as e: + print(f"Failed to evaluate condition {condition['id']}: {e}") + + if evaluated == True: + to_return["run_else"] = False + + to_return[condition["id"]] = evaluated + + return to_return + +# Example usage + +condition_structure = { + "id": "lol", + "operator": "AND", + "conditions": [ + { # true + "source": "20", # age + "condition": ">", + "destination": 18 + }, + { # true + "operator": "OR", + "conditions": [ + { + "source": "active", # status + "condition": "==", + "destination": "active" + }, + { + "source": "1500", # balance + "condition": ">=", + "destination": 1000 + } + ] + }, + { + "operator": "NOT", + "conditions": [ + { + "source": "user", # user + "condition": "==", + "destination": "admin" + } + ] + } + ] +} + +newcondition = condition_structure.copy() +testconditions = [condition_structure] +newcondition['id'] = "lol2" +testconditions.append(newcondition) + +result = switch(testconditions) +print() +print() +print("Output: ", result) diff --git a/shuffle-tools/README.md b/shuffle-tools/README.md index b0271276..0a3518ac 100644 --- a/shuffle-tools/README.md +++ b/shuffle-tools/README.md @@ -5,74 +5,39 @@ Shuffle tools is a utility app that simplifies your understanding of what happen ## Actions The Shuffle-tools app comes with a multitude of different actions, here we will check a few out and give a brief description. -1. Repeat back to me - This action does exactly what it says, repeats back to you what you want it to. Why is this important? You need to test as you go whilst creating your workflow, what results does the first node give and are the results okay to use in the subsequent nodes? - -2. Router - Reroutes data between different nodes. - -3. Check cache contains - Checks Shuffle cache whether a key contains a value in a list - -4. Get cache value - Get a value savesd in your Shuffle organization - -5. Send SMS Shuffle - Sends an SMS from Shuffle, currently working on getting a few demo trials. - -6. Send E-mail Shuffle - Sends an Email from shuffle, currently working on getting a few demo trials. - -7. Filter list - Takes a list and filters based on the data - -8. Parse IOC - Parses Indicators of Compromise based on https://github.com/fhightower/ioc-finder - -9. Translate Value - Takes a list of values and translates it in your input data - -10. Map value - Takes a mapping dictionary and translates the input data. This is a search and replace for multiple fields. - -11. Regex Capture Group - Returns objects matching the capture group - -12. Regex replace - Replace all instances matching the regular expression - -13. Parse List - Parses a list and returns it as a JSON object - -14. Execute Bash - Runs bash with the data input - -15. Execute Python - Runs python with the data input. Any prints will be returned. - -16. Get file value - This function is made for reading files. Prints out their data. - -17. Download remote file - Downloads a file from a url - -18. Get file meta - Gets file metadata - -19. Delete file - Delete's file based on id - -20. Extract archive - Extracts compressed files and returns file ids - -21. Inflate archive - Compress files in an archive. Return file archive ids - -22. XML JSON converter - Converts XML to JSON and vice versa - -23. Date to epoch - converts a date field with a given format to an epoch time - -24. Compare relative date - Compares an input date and a relative date and returns a True/False response - -25. Add list to list - Can append single items to a list, can also add items of a list to another list - -26. Merge lists - Merge lists of the same type and length - -27. Diff Lists - Differentiates two lists of strings or integers and finds what's missing - -28. Set JSON Key - Adds a JSON key to an existing object - -29. Delete JSON Keys - Deletes keys in a JSON object - -30. Convert JSON tags - Creates Key:Value pairs and converts JSON to tags - -31. Run Math Operation - Runs an arithmetic operation - -32. Escape HTML - Performs HTML escaping on field - -33. Base 64 Conversion - Encodes or Decodes a base64 string - -34. Get time stamp - Gets a timestamp for right now. Default returns epoch time - -35. Get Hash sum - Returns multiple format of hashes based on the input value - -36. Cidr IP match - Check if an IP is contained in a CIDR defined network +- 1. Repeat back to me - This action does exactly what it says, repeats back to you what you want it to. Why is this important? You need to test as you go whilst creating your workflow, what results does the first node give and are the results okay to use in the subsequent nodes? +- 2. Router - Reroutes data between different nodes. +- 3. Check cache contains - Checks Shuffle cache whether a key contains a value in a list +- 4. Get cache value - Get a value savesd in your Shuffle organization +- 5. Send SMS Shuffle - Sends an SMS from Shuffle, currently working on getting a few demo trials. +- 6. Send E-mail Shuffle - Sends an Email from shuffle, currently working on getting a few demo trials. +- 7. Filter list - Takes a list and filters based on the data +- 8. Parse IOC - Parses Indicators of Compromise based on https://github.com/fhightower/ioc-finder +- 9. Translate Value - Takes a list of values and translates it in your input data +- 10. Map value - Takes a mapping dictionary and translates the input data. This is a search and replace for multiple fields. +- 11. Regex Capture Group - Returns objects matching the capture group +- 12. Regex replace - Replace all instances matching the regular expression +- 13. Parse List - Parses a list and returns it as a JSON object +- 14. Execute Bash - Runs bash with the data input +- 15. Execute Python - Runs python with the data input. Any prints will be returned. +- 16. Get file value - This function is made for reading files. Prints out their data. +- 17. Download remote file - Downloads a file from a url +- 18. Get file meta - Gets file metadata +- 19. Delete file - Delete's file based on id +- 20. Extract archive - Extracts compressed files and returns file ids +- 21. Inflate archive - Compress files in an archive. Return file archive ids +- 22. XML JSON converter - Converts XML to JSON and vice versa +- 23. Date to epoch - converts a date field with a given format to an epoch time +- 24. Compare relative date - Compares an input date and a relative date and returns a True/False response +- 25. Add list to list - Can append single items to a list, can also add items of a list to another list +- 26. Merge lists - Merge lists of the same type and length +- 27. Diff Lists - Differentiates two lists of strings or integers and finds what's missing +- 28. Set JSON Key - Adds a JSON key to an existing object +- 29. Delete JSON Keys - Deletes keys in a JSON object +- 30. Convert JSON tags - Creates Key:Value pairs and converts JSON to tags +- 31. Run Math Operation - Runs an arithmetic operation +- 32. Escape HTML - Performs HTML escaping on field +- 33. Base 64 Conversion - Encodes or Decodes a base64 string +- 34. Get time stamp - Gets a timestamp for right now. Default returns epoch time +- 35. Get Hash sum - Returns multiple format of hashes based on the input value +- 36. Cidr IP match - Check if an IP is contained in a CIDR defined network diff --git a/siemonster/1.0.0/requirements.txt b/siemonster/1.0.0/requirements.txt index fd7d3e06..480d0c4b 100644 --- a/siemonster/1.0.0/requirements.txt +++ b/siemonster/1.0.0/requirements.txt @@ -1 +1 @@ -requests==2.25.1 \ No newline at end of file +requests==2.32.4 \ No newline at end of file diff --git a/sigma/1.0.0/requirements.txt b/sigma/1.0.0/requirements.txt index b49a2451..755761ce 100644 --- a/sigma/1.0.0/requirements.txt +++ b/sigma/1.0.0/requirements.txt @@ -1,2 +1,2 @@ -requests==2.25.1 +requests==2.32.4 sigmatools==0.20 diff --git a/snort3/1.0.0/requirements.txt b/snort3/1.0.0/requirements.txt index 64fe70a3..090113b4 100644 --- a/snort3/1.0.0/requirements.txt +++ b/snort3/1.0.0/requirements.txt @@ -1,2 +1,2 @@ # No extra requirements needed -requests==2.25.1 \ No newline at end of file +requests==2.32.4 \ No newline at end of file diff --git a/sooty/1.0.0/requirements.txt b/sooty/1.0.0/requirements.txt index 897de537..0ade4fc4 100644 --- a/sooty/1.0.0/requirements.txt +++ b/sooty/1.0.0/requirements.txt @@ -1,2 +1,2 @@ -requests==2.25.1 +requests==2.32.4 ipwhois==1.2.0 \ No newline at end of file diff --git a/testing/1.0.0/requirements.txt b/testing/1.0.0/requirements.txt deleted file mode 100644 index fd7d3e06..00000000 --- a/testing/1.0.0/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests==2.25.1 \ No newline at end of file diff --git a/thehive/1.1.2/requirements.txt b/thehive/1.1.2/requirements.txt deleted file mode 100644 index 1d40c46a..00000000 --- a/thehive/1.1.2/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -requests==2.25.1 -thehive4py==1.8.1 -python-magic==0.4.18 diff --git a/thehive/1.1.3/docker-compose.yml b/thehive/1.1.3/docker-compose.yml deleted file mode 100644 index 47de05b2..00000000 --- a/thehive/1.1.3/docker-compose.yml +++ /dev/null @@ -1,14 +0,0 @@ -version: '3.4' -services: - thehive: - build: - context: . - dockerfile: Dockerfile - env_file: - - env.txt - restart: "no" - deploy: - mode: replicated - replicas: 10 - restart_policy: - condition: none diff --git a/thehive/1.1.3/env.txt b/thehive/1.1.3/env.txt deleted file mode 100644 index 1398a35f..00000000 --- a/thehive/1.1.3/env.txt +++ /dev/null @@ -1,4 +0,0 @@ -REDIS_URI=redis://redis -REDIS_ACTION_RESULT_CH=action-results -REDIS_ACTION_RESULTS_GROUP=action-results-group -APP_NAME=thehive diff --git a/thehive/1.1.3/requirements.txt b/thehive/1.1.3/requirements.txt deleted file mode 100644 index 1d40c46a..00000000 --- a/thehive/1.1.3/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -requests==2.25.1 -thehive4py==1.8.1 -python-magic==0.4.18 diff --git a/http/1.2.0/Dockerfile b/twilio/1.9.0/Dockerfile similarity index 100% rename from http/1.2.0/Dockerfile rename to twilio/1.9.0/Dockerfile diff --git a/twilio/1.9.0/api.yaml b/twilio/1.9.0/api.yaml new file mode 100644 index 00000000..af084706 --- /dev/null +++ b/twilio/1.9.0/api.yaml @@ -0,0 +1,107 @@ +walkoff_version: 1.9.0 +app_version: 1.9.0 +name: twilio +description: Send SMS from Shuffle through Twilio.com +tags: + - HTTP +categories: + - HTTP +contact_info: + name: "Entwicklungsleiter" + url: https://github.com/Entwicklungsleiter + email: "50797003+Entwicklungsleiter@users.noreply.github.com" +authentication: + required: true + parameters: + - name: url + description: Twilio API URL. + example: "https://api.twilio.com/2010-04-01/Accounts/TWILIO_ACCOUNT_SID/Messages.json" + required: true + schema: + type: string + - name: username + description: Your Twilio account SID + multiline: false + required: true + example: "Username" + schema: + type: string + - name: password + description: Your Twilio account secret + multiline: false + required: true + example: "*****" + schema: + type: string + - name: headers + description: Headers to use + multiline: true + required: false + example: "Content-Type: application/x-www-form-urlencoded" + schema: + type: string +actions: + - name: Send_SMS + description: sends an SMS to Twilio API endpoint + parameters: + - name: url + description: Twilio API URL + multiline: false + example: "https://api.twilio.com/2010-04-01/Accounts/TWILIO_ACCOUNT_SID/Messages.json" + required: true + schema: + type: string + - name: headers + description: Headers to use + multiline: true + required: false + example: "Content-Type: application/x-www-form-urlencoded" + schema: + type: string + - name: username + description: Your Twilio account SID + multiline: false + required: true + example: "Username" + schema: + type: string + - name: password + description: Your Twilio account secret + multiline: false + required: true + example: "*****" + schema: + type: string + - name: timeout + description: Add a timeout (in seconds) for the request + multiline: false + required: false + example: "10" + schema: + type: bool + - name: body + description: The message to send. + multiline: true + example: "I did not have any sexual relationship with Miss Lewinsky!" + required: true + schema: + type: string + - name: From + description: The senders phone number, see Your Twilio account for accepted phone numbers. + multiline: false + example: "+1234567890" + required: true + schema: + type: string + - name: To + description: The message receiver phone number (or a comma separated list of phone numbers). + multiline: false + example: "+9876543210,+1928374650" + required: true + schema: + type: string + returns: + schema: + type: string + example: "404 NOT FOUND" +large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADkAAAA5CAYAAACMGIOFAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyNpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuNS1jMDIxIDc5LjE1NDkxMSwgMjAxMy8xMC8yOS0xMTo0NzoxNiAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENDIChNYWNpbnRvc2gpIiB4bXBNTTpJbnN0YW5jZUlEPSJ4bXAuaWlkOjREOTUwQkUyQUQwNDExRTM4QUY0RDlBODA0OUZGNENBIiB4bXBNTTpEb2N1bWVudElEPSJ4bXAuZGlkOjREOTUwQkUzQUQwNDExRTM4QUY0RDlBODA0OUZGNENBIj4gPHhtcE1NOkRlcml2ZWRGcm9tIHN0UmVmOmluc3RhbmNlSUQ9InhtcC5paWQ6NEQ5NTBCRTBBRDA0MTFFMzhBRjREOUE4MDQ5RkY0Q0EiIHN0UmVmOmRvY3VtZW50SUQ9InhtcC5kaWQ6NEQ5NTBCRTFBRDA0MTFFMzhBRjREOUE4MDQ5RkY0Q0EiLz4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz4jmx8hAAAGI0lEQVR42uRbXUwdRRReLlh5aCrUEFNNrJZaNCAPRmwR+pMKNZEaq1KTIgZNsQ819KkhYnw01jTVh5rUBGmUiJgo8SdRHwpNCKmWloYHCqmgBeoDxhBLJX2oKNHv6HfJZrN7ZmbvD4ae5Mtkd+fMnO/OzDlnZvfmeBmQ34pLNqLYAZQD9wP3AkXAaiAXWASuA7PAFPADMAL03355/Kd025OTRmIVKBqBPcDdKTT1M/Al0AXCQ8tOEsTySOwwUJqBSTEGHCPhv7JKEuRErwF4A7jHy7xMA68D3SD7d8ZJguADKN4DtnvZl37gIIheclFKOBJsRjG8TAQ9OrNh2pHekUSjq1Cc5Pr7v0gXsB+jupAySRBcg+Jz4LGYxvwOjAOXgXngBpAPSLvFQAlwW8y2TwPPgOh8bJIk2As84tj5WeBToE88pOYs6MTEM9cAzwGVjn2dB2o1ojmGKfqtwwj+CXSKy0eH4ymEpRKGpCbgFocRfSJq6mokP3JYg18Dh9DJVBqTC8mSjgO7bdco+n/BmiS91/sWDUtqdgCNf5Ip7wJb9qFoZ0pokpdhS4eRJOPgMJ2DJuJI6lKZmo5T+Bs6Kk3EqT0UjKOJECdwwoLgKFCVDYIi7KeK/Woidp8gj8hkoIEB1zSCNej412wGRfZXw/5NCUND6HRlsv2jIReVNfhw2Ajy1xP3vxVYC8wAp2xSMC6Rx4F1wFVgABgMCz2cuhcMa1Ry3fuSSb2f5IsoPjDY0xDmZNixhI/NITo9dE5zIXqFdCr1IXqDEkagNxHhjLoNtr4E3Q+DJC+iKNPCBJSejCD4PUcvSqTtan/AZqLxnaFPGdXKCKIStuo0vwG9B5fWJDe8ZYZAfyhiinYaCIpIZ0cC994y9Omx3c6gI6G00K4oKSOvJcdjCvqdEYG+MmKKhkkzRy85ivst9bYQQUc0xR9Yk0Y/yacMlY9F3N/q4CBX+XLgzby2lW2Odnl+XgkeOq3Xkm0lHq713KQwULrqhcXPs4reeuGXsIiLnynPZhyN/SWm3kxM+/6Nm0Ky3FCpV3l2ysHQa9wWiQzx2lZ6Yz4TKU9w06pteMeULOQS46CNHE1uhVD+IdeWej2GhGKMdkZJiZDcoFSYsDgdO8A4qMkXIaTk+iuD3kW2r6V7Yt+EUmWDkCxSKhhPs5nJVDOxXwiZoq8Be1FvMaAn18/yeXDqLrC96rBMydHOojxDDjhvmTxLvVfgydoYJgrpZM5rB00kegR676CsYO46R715hzWr1V0tJHMN+zOXncI8z3VcdxiyRs+ksEnR7MwVkosK0XyXnpjJ+EdyiARMeremOJKanYt53D5FHQmucSAnuWlzIJO5hmdH6VkXQ/Tkx20lCvxrEs/kGKPNkqxm53VxPLNKhY0WBAs51Q6GpGpi+JsSsEkoSLCHzwtCUkBp7wzbN4lm56yQnFQqbIrYAfilnbsMTZ7maPlFrvdY7F7aDT+y2LdJqTIpJLVzGpnGpYYdfb3lumnlWW5yDbZa6tWznygp9fQT+HEhOWLopFZ5tsvBORT4diEVIVM0rg21Bt0RIdlvqLRXeXano6tfF1Pvrpj2ifQn+I7+ilKpkkccUccTLjIXKG3lasRyKfH0dydXhF9y02zKIQ9H3B9wMHTBtws5F5ICajLgaJfn55Uk2WWo3MR3E2EnaucsDe1IxjyWJy31BongKIo9TQbdriWS/MpCO52Wt0vHI3YATRbTVnYTbYF7r3rmE3FptyliJ/Sup7/1Gk1+PeI/QX/b0OFunneGHUE8qoyoBPztwcyF19uU/aiMXqVy7lpnsHeJT7pP0LfQ8Dgn6LvodTN3gs4GnrdYn/Iuoirb70Jo3x3efwfSprdbjbDv4+RF8IVPt0XclA762GG2CfZZEOz3Aq8Qbr73k77DqRaLvqXDC2HOKM0E93ENFltUbwnzATfvNwPsZOV//eHb8a/c73gCRFfuF1mBqbtyv60LkG1mzpi/jORu0It22CrE/d5VTrd3LANBCfSZ/d7VF0d3cupOZ4ncNPvb6Uow1kgGRjXT36CPcjeR/W/QIwivzH8TKIT9/wuRECGvB6P+FzLJEJOx/4X8I8AAXH2Fd+613pMAAAAASUVORK5CYII= diff --git a/twilio/1.9.0/requirements.txt b/twilio/1.9.0/requirements.txt new file mode 100644 index 00000000..480d0c4b --- /dev/null +++ b/twilio/1.9.0/requirements.txt @@ -0,0 +1 @@ +requests==2.32.4 \ No newline at end of file diff --git a/twilio/1.9.0/src/app.py b/twilio/1.9.0/src/app.py new file mode 100755 index 00000000..d43533ff --- /dev/null +++ b/twilio/1.9.0/src/app.py @@ -0,0 +1,206 @@ +import json +import ast +import requests + +from walkoff_app_sdk.app_base import AppBase + +class TWILIO(AppBase): + __version__ = "1.9.0" + app_name = "twilio" + + def __init__(self, redis, logger, console_logger=None): + print("INIT") + """ + Each app should have this __init__ to set up Redis and logging. + :param redis: + :param logger: + :param console_logger: + """ + super().__init__(redis, logger, console_logger) + + def splitheaders(self, headers): + parsed_headers = {} + if headers: + split_headers = headers.split("\n") + self.logger.info(split_headers) + for header in split_headers: + if ": " in header: + splititem = ": " + elif ":" in header: + splititem = ":" + elif "= " in header: + splititem = "= " + elif "=" in header: + splititem = "=" + else: + self.logger.info("Skipping header %s as its invalid" % header) + continue + + splitheader = header.split(splititem) + if len(splitheader) == 2: + parsed_headers[splitheader[0]] = splitheader[1] + else: + self.logger.info("Skipping header %s with split %s cus only one item" % (header, splititem)) + continue + + return parsed_headers + + def checkbody(self, body): + # Indicates json + if isinstance(body, str): + if body.strip().startswith("{"): + body = json.dumps(ast.literal_eval(body)) + + + # Not sure if loading is necessary + # Seemed to work with plain string into data=body too, and not parsed json=body + #try: + # body = json.loads(body) + #except json.decoder.JSONDecodeError as e: + # return body + + return body + else: + return body + + if isinstance(body, dict) or isinstance(body, list): + try: + body = json.dumps(body) + except: + return body + + return body + + def fix_url(self, url): + # Random bugs seen by users + if "hhttp" in url: + url = url.replace("hhttp", "http") + + if "http:/" in url and not "http://" in url: + url = url.replace("http:/", "http://", -1) + if "https:/" in url and not "https://" in url: + url = url.replace("https:/", "https://", -1) + if "http:///" in url: + url = url.replace("http:///", "http://", -1) + if "https:///" in url: + url = url.replace("https:///", "https://", -1) + if not "http://" in url and not "http" in url: + url = f"http://{url}" + + return url + + def return_file(self, requestdata): + filedata = { + "filename": "response.txt", + "data": requestdata, + } + fileret = self.set_files([filedata]) + if len(fileret) == 1: + return {"success": True, "file_id": fileret[0]} + + return fileret + + def prepare_response(self, request): + try: + parsedheaders = {} + for key, value in request.headers.items(): + parsedheaders[key] = value + + cookies = {} + if request.cookies: + for key, value in request.cookies.items(): + cookies[key] = value + + + jsondata = request.text + try: + jsondata = json.loads(jsondata) + except: + pass + + return { + "success": True, + "status": request.status_code, + "url": request.url, + "headers": parsedheaders, + "body": jsondata, + "cookies":cookies, + } + except Exception as e: + print(f"[WARNING] Failed in request: {e}") + return { + "success": False, + "status": "XXX", + "error": request.text + } + + + def summarize_responses(self, one_response, summary): + summary["results"].append(one_response) + + # if ONE request fails, summary is marked as failed + if False == one_response["success"]: + summary["success"] = False + + # if one status code is not 200, use this failure status code for summary + if "200" != one_response["status"]: + summary["status"] = one_response["status"] + + return summary + + + def Send_SMS(self, url, headers="", username="", password="", body="", From="", To="", timeout=5): + url = self.fix_url(url) + + parsed_headers = self.splitheaders(headers) + parsed_headers["User-Agent"] = "Shuffle Automation" + body = self.checkbody(body) + + auth=None + if username or password: + # Shouldn't be used if authorization headers exist + if "Authorization" in parsed_headers: + #print("Found authorization - skipping username & pw") + pass + else: + auth = requests.auth.HTTPBasicAuth(username, password) + + if not timeout: + timeout = 5 + if timeout: + timeout = int(timeout) + + summary = { + "success": True, + "status": "200", + "url": url, + "results": [] + } + + # send Twilio API request for every single receiver number + for receiver in To.split(","): + data = {'Body' : body, 'From' : From, 'To' : receiver.strip()} + request = requests.post(url, headers=parsed_headers, auth=auth, data=data, timeout=timeout) + response = self.prepare_response(request) + summary = self.summarize_responses(response, summary) + + return json.dumps(summary) + + +# Run the actual thing after we've checked params +def run(request): + print("Starting cloud!") + action = request.get_json() + print(action) + print(type(action)) + authorization_key = action.get("authorization") + current_execution_id = action.get("execution_id") + + if action and "name" in action and "app_name" in action: + TWILIO.run(action) + return f'Attempting to execute function {action["name"]} in app {action["app_name"]}' + else: + return f'Invalid action' + +if __name__ == "__main__": + TWILIO.run() diff --git a/twitter/1.0.0/requirements.txt b/twitter/1.0.0/requirements.txt deleted file mode 100644 index 2bb51887..00000000 --- a/twitter/1.0.0/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -requests==2.25.1 -twython==3.9.1 diff --git a/unsupported/DuoSecurity/1.0.0/requirements.txt b/unsupported/DuoSecurity/1.0.0/requirements.txt index fd7d3e06..480d0c4b 100644 --- a/unsupported/DuoSecurity/1.0.0/requirements.txt +++ b/unsupported/DuoSecurity/1.0.0/requirements.txt @@ -1 +1 @@ -requests==2.25.1 \ No newline at end of file +requests==2.32.4 \ No newline at end of file diff --git a/unsupported/ad-ldap/1.0.0/requirements.txt b/unsupported/ad-ldap/1.0.0/requirements.txt index 5238833e..53f3e6a1 100644 --- a/unsupported/ad-ldap/1.0.0/requirements.txt +++ b/unsupported/ad-ldap/1.0.0/requirements.txt @@ -1,2 +1,2 @@ ldap3==2.9.1 -requests==2.25.1 +requests==2.32.4 diff --git a/unsupported/ansible/1.0.0/requirements.txt b/unsupported/ansible/1.0.0/requirements.txt index 7b2bf77e..c67cded2 100644 --- a/unsupported/ansible/1.0.0/requirements.txt +++ b/unsupported/ansible/1.0.0/requirements.txt @@ -1,2 +1,2 @@ -requests==2.25.1 -ansible==4.8.0 +requests==2.32.4 +ansible==8.5.0 diff --git a/unsupported/attack-predictor/1.0.0/requirements.txt b/unsupported/attack-predictor/1.0.0/requirements.txt index 2a46021e..5687cab1 100644 --- a/unsupported/attack-predictor/1.0.0/requirements.txt +++ b/unsupported/attack-predictor/1.0.0/requirements.txt @@ -1,7 +1,7 @@ colorama==0.4.4 -joblib==0.14.1 -nltk==3.4.5 -numpy==1.17.4 +joblib==1.2.0 +nltk==3.9.1 +numpy==1.22.0 pandas==0.25.3 #scikit-learn==0.22.2.post1 stix2==1.2.1 diff --git a/crowdstrike-falcon/1.0.0/Dockerfile b/unsupported/crowdstrike-falcon/1.0.0/Dockerfile similarity index 100% rename from crowdstrike-falcon/1.0.0/Dockerfile rename to unsupported/crowdstrike-falcon/1.0.0/Dockerfile diff --git a/crowdstrike-falcon/1.0.0/api.yaml b/unsupported/crowdstrike-falcon/1.0.0/api.yaml similarity index 100% rename from crowdstrike-falcon/1.0.0/api.yaml rename to unsupported/crowdstrike-falcon/1.0.0/api.yaml diff --git a/crowdstrike-falcon/1.0.0/requirements.txt b/unsupported/crowdstrike-falcon/1.0.0/requirements.txt similarity index 100% rename from crowdstrike-falcon/1.0.0/requirements.txt rename to unsupported/crowdstrike-falcon/1.0.0/requirements.txt diff --git a/crowdstrike-falcon/1.0.0/src/app.py b/unsupported/crowdstrike-falcon/1.0.0/src/app.py similarity index 100% rename from crowdstrike-falcon/1.0.0/src/app.py rename to unsupported/crowdstrike-falcon/1.0.0/src/app.py diff --git a/cylance/1.0.0/Dockerfile b/unsupported/cylance/1.0.0/Dockerfile similarity index 100% rename from cylance/1.0.0/Dockerfile rename to unsupported/cylance/1.0.0/Dockerfile diff --git a/cylance/1.0.0/api.yaml b/unsupported/cylance/1.0.0/api.yaml similarity index 100% rename from cylance/1.0.0/api.yaml rename to unsupported/cylance/1.0.0/api.yaml diff --git a/unsupported/cylance/1.0.0/requirements.txt b/unsupported/cylance/1.0.0/requirements.txt new file mode 100644 index 00000000..a76afb96 --- /dev/null +++ b/unsupported/cylance/1.0.0/requirements.txt @@ -0,0 +1,3 @@ +cryptography==44.0.1 +requests==2.32.4 +PyJWT==2.4.0 diff --git a/cylance/1.0.0/src/app.py b/unsupported/cylance/1.0.0/src/app.py similarity index 100% rename from cylance/1.0.0/src/app.py rename to unsupported/cylance/1.0.0/src/app.py diff --git a/unsupported/email-analyzer/1.0.0/requirements.txt b/unsupported/email-analyzer/1.0.0/requirements.txt index 4965a825..f3ecd49c 100644 --- a/unsupported/email-analyzer/1.0.0/requirements.txt +++ b/unsupported/email-analyzer/1.0.0/requirements.txt @@ -1,4 +1,4 @@ -requests==2.25.1 +requests==2.32.4 eml-parser==1.14.7 msg-parser==1.2.0 mail-parser==3.15.0 diff --git a/microsoft-security-and-compliance/1.0.0/Dockerfile b/unsupported/hoxhunt/1.0.0/Dockerfile similarity index 100% rename from microsoft-security-and-compliance/1.0.0/Dockerfile rename to unsupported/hoxhunt/1.0.0/Dockerfile diff --git a/hoxhunt/1.0.0/api.yaml b/unsupported/hoxhunt/1.0.0/api.yaml similarity index 100% rename from hoxhunt/1.0.0/api.yaml rename to unsupported/hoxhunt/1.0.0/api.yaml diff --git a/unsupported/hoxhunt/1.0.0/requirements.txt b/unsupported/hoxhunt/1.0.0/requirements.txt new file mode 100644 index 00000000..480d0c4b --- /dev/null +++ b/unsupported/hoxhunt/1.0.0/requirements.txt @@ -0,0 +1 @@ +requests==2.32.4 \ No newline at end of file diff --git a/hoxhunt/1.0.0/src/app.py b/unsupported/hoxhunt/1.0.0/src/app.py similarity index 100% rename from hoxhunt/1.0.0/src/app.py rename to unsupported/hoxhunt/1.0.0/src/app.py diff --git a/unsupported/lastline/1.0.0/requirements.txt b/unsupported/lastline/1.0.0/requirements.txt index fd7d3e06..480d0c4b 100644 --- a/unsupported/lastline/1.0.0/requirements.txt +++ b/unsupported/lastline/1.0.0/requirements.txt @@ -1 +1 @@ -requests==2.25.1 \ No newline at end of file +requests==2.32.4 \ No newline at end of file diff --git a/unsupported/microsoft-compliance/1.0.0/requirements.txt b/unsupported/microsoft-compliance/1.0.0/requirements.txt index 9d84d358..bd6f2345 100644 --- a/unsupported/microsoft-compliance/1.0.0/requirements.txt +++ b/unsupported/microsoft-compliance/1.0.0/requirements.txt @@ -1 +1 @@ -requests==2.25.1 +requests==2.32.4 diff --git a/microsoft-teams-system-access/1.0.0/Dockerfile b/unsupported/microsoft-identity-and-access/1.0.0/Dockerfile similarity index 100% rename from microsoft-teams-system-access/1.0.0/Dockerfile rename to unsupported/microsoft-identity-and-access/1.0.0/Dockerfile diff --git a/microsoft-identity-and-access/1.0.0/README.md b/unsupported/microsoft-identity-and-access/1.0.0/README.md similarity index 100% rename from microsoft-identity-and-access/1.0.0/README.md rename to unsupported/microsoft-identity-and-access/1.0.0/README.md diff --git a/microsoft-identity-and-access/1.0.0/api.yaml b/unsupported/microsoft-identity-and-access/1.0.0/api.yaml similarity index 88% rename from microsoft-identity-and-access/1.0.0/api.yaml rename to unsupported/microsoft-identity-and-access/1.0.0/api.yaml index 9a3e015d..b891edc1 100644 --- a/microsoft-identity-and-access/1.0.0/api.yaml +++ b/unsupported/microsoft-identity-and-access/1.0.0/api.yaml @@ -326,5 +326,111 @@ actions: required: true schema: type: string + - name: disable_user_account + description: Disable user account + parameters: + - name: user_email_or_id + description: User Email or Object ID + multiline: false + example: "Test.User@example.com" + required: true + schema: + type: string + - name: update_user_job_title + description: Updates user Job Title field + parameters: + - name: user_email_or_id + description: User Email or Object ID + multiline: false + example: "Test.user@example.com" + required: true + schema: + type: string + - name: user_job_title + description: Job Title to update for user + multiline: false + example: "DevOps Engineer" + required: true + schema: + type: string + - name: update_user_department + description: Updates user Department field + parameters: + - name: user_email_or_id + description: User Email or Object ID + multiline: false + example: "Test.user@example.com" + required: true + schema: + type: string + - name: user_department + description: Department to update for user + multiline: false + example: "Finance Department" + required: true + schema: + type: string + - name: update_user_employee_type + description: Updates user Employee Type field + parameters: + - name: user_email_or_id + description: User Email or Object ID + multiline: false + example: "Test.user@example.com" + required: true + schema: + type: string + - name: user_employee_type + description: Employee Type to update for user + multiline: false + example: "Contractor" + required: true + schema: + type: string + - name: update_user_leave_date + description: Updates user Leave Date field + parameters: + - name: user_email_or_id + description: User Email or Object ID + multiline: false + example: "Test.user@example.com" + required: true + schema: + type: string + - name: user_leave_date + description: User Leave Date + multiline: false + example: "2022-09-30T23:59:59Z" + required: true + schema: + type: string + - name: get_user_direct_groups + description: Retrieves Static Groups User is Member Of + parameters: + - name: user_email_or_id + description: User Email or Object ID + multiline: false + example: "Test.user@example.com" + required: true + schema: + type: string + - name: remove_user_from_group + description: Removes User from Specified Group + parameters: + - name: user_id + description: Object ID of User + multiline: false + example: eb6fa72b-f4f0-4ce0-94d2-dd16b4a22686 + required: true + schema: + type: string + - name: group_id + description: Object ID of Group + multiline: false + example: 2a712b67-91af-429f-9603-a5bfhgu7b151 + required: true + schema: + type: string + large_image: data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAK4AAACuCAYAAACvDDbuAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAACjQSURBVHhe7Z0JYBT19ce/uzvX7iYk3IegoAgIKGqpWpQj3GKxWBG1XlWUWqv/qrWeVTxarVcr1gsPRA6ltFVbLYoKAdRaLxRFVJAiCAICgRx7zLG7//cmEwlhE5LNzOxssp82GH4kOzO/+c6b936/93s/H/LYT2lKACIzEPBvxNDgvfD5Uta/5MnjYZZHz8Rb8RjejEexonIUkMobCJvxW//NYxdv7DkOsvAQNF2BoQchSbNQWtnX+tc8NpG3BHbyyndd0aawFJrRFynLO/CTbZClldC0H2N4wdbqxjzNJW9x7aJ0ewEKi2bBSO4VLZNMAqp+LHyBmfjrpqDVmqeZ5IVrB4uSMoTCvyBpjEMiYTXWIkltvtSP0a3DfdWBW57mkhduc0lR4FUYv5y+OwdGon7XSzd88Pumwh+dav5OnmaR78DmsqzqZAjCi+TDSlZLwyhyJQVuZ2JEwStWS54MyAu3OSypHABFKoWqdrRaDoyPulyUKmDoI0i8H1mteZpI3lXIlOWxXiiQF5KlbbxoGQ7cdK0NCX4uFm/tZLXmaSJ54WbCop1tyF+djbjWf58RhMbCv6PqA1DYfhZe3FFoteZpAnnhNpVUyo/CgruQTA5FImk1ZgCPPhjGBLQruAMfpESrNU8jyQu3KSxMBfB27Hr4U5ciYTQ/PjDoMwK+XyEauzo/0tA08sJtLCysjuVT4PPfRK95+/pN1QSI/ulYVnl6XryNJ99RjWVp+fGQlEUUjLUjFVuNNiLL2xHTxmJ04SdWS54GyAu3MSyP9oAYeIus48FmYOUEPjLiorgOamQMRrXbaLXmqYe8q3AgXi8rItHOcVS0TIoCPUPrjVDoWfOYeRokL9yG+NeWECT5MRiJEY6KtoYk+bh6YgjCBQ/lRxoaJi/c+pie8qOo3XXw+8+g6N9qdAHDHCY7B9HYVeYoRp605IVbH6Nj50LyXwddd1885jBZ4FZ0rppsteSpQz44qwsPSS2rGgVFegExtcBqdR++MyF5NyLaGRhZuKS6MU8NeeHWpXRPb4jBN6Bphzgy7NUUzIQccSPU5HCMCuVHGmqRdxVqs6KiI2Tlb/Sqzr5oGQ4IE4lDoAQWoHR3sdWah8gLt4bFyTAC4jPQjEHmigWvYOY06MdDVJ7AB6mQ1drqyQuX4eg9rN6KRHI8kknvuU8JOqdUahJi0elYuDA/0kDkhcvDXp2rLqXvrmpw6U220Q0BvsC16HjKNDOAbOXkg7NlkYlQxPmIxnMjL1aWq6CqP8bINsutllZJ6xbu8shxkIRFiGvtXZkZswMeaQjJmxAj8Y4o/NRqbXW0XleBl80IwpycEi3D5xrTDoYgPouXt3exWlsdrVO4pakCFLabQxH7vsU7cgUuMpJIDERR8UyzpkMrpPUJ9z/JIMT44zCMsa7mINiNWXgkMRGF0QdaY0JO6xIuD3sl1OvIyp7h6RGExsJFRnyBi1AVvay1DZO1puDMh6WVUyCJ86BpLasMkiRHYMR/hhFF/7JaWjytR7il5SdAURZRNN7Wamk58EiDLJUjHhuFkcUfWq0tmtbhKry260iEggsQ11qeaBkOMONaEV3jsyjd2d1qbdG0fOEuruyEYOhZxLRDcnIEodHQtal6H0iFc/Dynpb5gNaiZQt35gciQsJDSCYHmENILR0uUGIYI9BGuhcLVzeuCF+O0nJ9XB4iikfuBQL/B013/jotY96GTEG7gA8hivElf/Vh44kU9tDXTk704p9z+mxkiY6U+j2GKLe11I1TWqZwedira+wquro7oRnOjnGSLIqoF09sG8D5XSWcUORHmIQrUpulW5BmESdjuJH++Nt2HS98Z+AbNQVHHydZ1KAmpmF5eC5u87W4103LFO7SqlMgCVxJ0dn8VbJrJe0CuLu3jGMKAxAa2Zs7tBT+tdPADV/FsYPnQJy6C5JchlhsPMYUv2+1tBhannCXVR4JUV4GTW3nZDBWQD13Q08Zvz1EMq1rJnxDFvjyL+N4aVfCmQ2leJhMkjegcvdYjO/8ldXaImhZwi2NdIciLEZM6/+90+kAncl/XXBkEMPaCs2ObtmFuPV/Ku7bpCLBQrMb/kxFXIm4Ng4lbXZarTlPyxlVWMTrxaTnyKd1VLRh+lpwZAjDbRAto9CH3H6ojF/3kOF34rT5raMljoWsPGkWOGkhtAzhrk5JKBAfgK6dWJ184gwSWcf7+ygYQYGYnbZRorswvZeEE9uQKXdCvNwnunEqitv9rqXs+tMyhLs7di0CvjPpBjnn+pCgTu0k4OJuzgxStKHI7on+Cjo6lSrD9XwDvqshxC+wWnKa3BYuVwdfHrsYAf8tUDVHs6M60KffeZhM995qcIDDQ378vLvENcSsFptRNRmifwaWRU6zWnKW3Bbu8qrhpKT7KPBwfKx2eDsBvYLOdhd/+jU9JHMc2DGiahhC4FG8WtHPaslJcle4pdTxojQPuu58SU4S7kXkIjR2nLY5dJJ8mNjBYTfUMDqjUHnODGhzlNwULifOyNLfoWnd3Eic6UD2fFixo57IPpxMwnVkhKEGztswtKNRRA/+f5NtrNacIveEa272LD8BzRjgVrbXoYofcs38rQv0IV9XdPrSzIScxGgkjftyMSEnt4S7aC1FR4X3IZE41c1sr870+nazozqLPABg/cVJEgk/CfgSdO75G7MwSg6ROyfL1VvC3a+A3z+V/Fqr0R0UV1S0lyAdj7eEcAVdAwTxZoyMnGy15AS5I9y3YpMh+e/Ixnox3eUEdHM0zM1DqmoQijgLy8uHWC2eJzeEy+vFAoGZiOmK1eIqu8koueeYAJWJlGNDufUS1TpBCD6H18oPt1o8jfeF+/qeQyEqc6HyejG372Y1G9Wkq0LarKaqE87dhN8qunYwwsoss+Sqx/G2cEsrOiCk/A0Jo7dbIwjp+EZLYX3MuRyIuqysTMCGDVebTnWFnJNQqD6B0lRW3m6NxbvC5Yozkvw4BWLHmEkiWYRHjuZvN1yx9+yS8AqJrBUS5eo+ieQUCNHfeblCjjeFy1VZDPVmJBOTzL2/sg310ssk3HIX3t+ryNq+V57dBxV6IgB/4BpEIj+zWjyH94RrFlqeeCGd2bVmiSGPsCaWxIo9zgqKre0TW3VeEZR9OCFHEB7B0orhVoun8J5wR0UmQPL/CVoW9hdrAF4adtN6FRFe+egQBin3a3pAPLMuRdVCFGPMxYryPlaLZ/CWcJfu+QEkaRbimveqg5OYvqxK4qWdzlV45ITyad0kBLIYiO5HXOsBMbgAb0W6WS2ewDvCfWNbZ0jKM4ipHbM5gtAQPIHmd9gcTuoo4HwSr2eqIfBIg0YBcso/E3/dFLRas443hMu7hSttn6Vo1rXEmabCtRNmD1AwpbPzE3f39JYxIES3xitdUT2qcwoO6nAP3R9PaCb7J8FPcTD0GJJ6CRLeLLRcTL208Kggidad0aEOos9cRdw903XvTqDrPnrZ/BLLq670wubY2RUuJ85063QNvY8mU2Tiobu0l0I6KxbRmHZCWieBjWIms2r8Kzr9Ud+v9g/78egRCuQMPtsxVCMAQbwNHSKjrZaskT3hmps9x86ib26FZnhv5SkJ5iA6q5ePDmFcPaJl5mzVMX2DCq0JAouR0h/ZrGHU+1HM35Y+042Pd3J7AbceKsNTBZR0vQCKNBulVUdbLVkhe1ZuWcVQiPJLUFXnl940FRLh4bIP/yTRHkGWLx08avUHEuxdX2tm0PZwPwWXHGAFMM9fLN9t4Op1cayuSiFJHy2RiG87TMG1PaW0VoQ1O+3zGJ7easAz7ySzyIi0FhFjJEaHt1itrpKdriitHIiQ/DKiqudq1nI0z4HRPwcFcWg9iyPLSIHT1sTxz53G9zkFxfTfF0now9Ms8eEr3E1+wY3rVcz9Vke0Tq/zaofbyLJee4iUNoF8Dx1vwscxvFNBQVJ27tj++KlvFGkZKsVJGOMrt1pdI/2dcRJeoCfJ8xHznmhZYYML/Hjp6PpFu518gtNXxfB8LdEye8g0ch2wijrTwvy3xbsMjPgwiplb9hctw1Ub7yDr/TS5HekoFnx4ZoCCQ+kt4Bl4mEw1RkCMZmXpj7vRIVecSaaeQiIx0s2lN42CFDaGrOULg0LoJqcX7adVSZz2SQzvVtC51/0R0tR35D/0Cgfwg8K93bqe2iaSaDdwJNaAmeDxlKUk8EPJ2g8s2P+2tBd9+GFRAC+STxyz2rJOivpBFI9EQXElnv7DO1arK7hncVm0e2J/phs8OdvZXnVh9+Cn7UkU5B6wQNKxlHzTsSuj+CSSRrQ10K+mmxE2Rz4bYSwj9HX+Z+SC7EifiTaEhPskWV5PjTRomggxcCdWxC40A26XcEe4PGhdFruS7t7FrlQHbwJ+cld+3kXA7IFBhNI4mPxe4Fq2Z5B7sI1V6fDZc+9cTMHYJ2Td03FqBxE39uJpYavBC0RViQK2P2NJ9IdWi+O4I9zS6ERy5m8l0XpqGbSfIvore0j4S18FhWlEy+7qE1s0nPdpDGUuCoXTIc76JIpNXIO0DnyavzlYwhkdheqnyisYRhFC4lyz1KsLOC/c5VXHQhLm0CvFM/PcjERCvJki+bt7K2lLHvFw123/U3HFFyoqrLZM4E/e/9MPAP3CF3EKAsmf5mCwLny+Tw0IooRHMLxiednf1bQ+UAIL8Hp5e6vVMZwV7vJoD8jSfOiqp6ql8MDxQ/1k3NxTTltWiRcrXvFlHHdtVKG7807aHzqvD8lduPCzmHk+deFUhjnk3vTkkQaviLd6pGEIwtKjTifkOHdb+KlTxPlQtX6e6ViimL6eHqhgatf0Y6YskosoQHqKE7qdqBDeBDjUeYUnLNamn5nrTqKdS+LlBCDPkEz6kMAZ6NrxFidzGpwRLm9Fr0gzENeGmgu2PEInvw8LBgVxWkfx+x1xarNVTeG0j2P4O+8oYpMY+DjN+ih6eGZv1fCXb7S0Lu1JRQE8fIQCT5Ua54ItAf+V6Fw12WqxHWeEWxC9nv482zPDXmSteoo+/OuYoJl3kI4vokmM/DCCJbzey+Zeaa7hNugDbvlKrTev4ezOIqaTvx5wcw39gVBVhdzER/FmpSMJOfbeIh72Kq24BIJwIz11zrkhTYHu5ZFhHxYdE8LxXKq+DmzF3q9I4OSVUTMgssvS2g3PuP1yTRyLdu0/xstW/eqDJVzWw0MJ6ExMawuf9Jy5E5LN2Cuu0qoSyPLd5CJ4Y9iLVHlcoR8vDao/WeZVEsJEcg++ZifSAdHa+ZER+jDOkfgfD3nUgYPMO8jqjiii6/SKeHlK39A7QBBnozRVYLXagn3CfaOiPyTp2eqKM9nHT/f25PYBvHJ0CIfw1jZpWLBdx+RVMWx3cGKBs6/5f3axRU9hcj3DZEWk3nkDQxjIeRZeES+PNCR515/403bu+mOPcFdUdERIWghd6+SFxBnOtpp2kIi/HRlEuzRTuDyx8CAFOxesjiPmkGAdg853VSSJc1fH9kvoYbrJPvyDAtAekocuzKC4wUj+FG2L7zCn/m2g+cJdvDUM0d1Cyw0h0AN+fU8JD/RJP7HAhmr6/1RcR8GOZt/7xlV4mGzpngSuWaemrTHGm6A81V+Bkv3bsRcjQb0duAI7oxeTTpr9VDXv1pnDXkX30tP0Ey9kewXoFP7QWzZXDaRL8IpT1P2btXHcs1FD3GpzGj4NJ2wfJ5U/8a2GP/GOlHUEysfjpUYz+lI/eEm8uiZCEu7B0vIxVkvGZC5cs9By/AoE/JeY43bZhG5Oe7qSeQMVc2/ddBfFydiXfB7Hw5t1dwvK0bEcO5zfh1vXa2YSUDouPkjC1dQfgpfEG1fDCIdmmTFRM8hcuG9GfwI5cHs2Ci3vA92UQ8iP5bVhvAo3nUg2q9V5tGbhusyv2JOwjz5tTQzL05SH4ku9uZeMc7twQo6H1BtXD4Ii/81MCciQzG7jkrJBkMSZiKrOzEdzJ/P7jx04/uLv6f/7QW19FR/+fWwIxxcF0l7MtzwbtiqGZXRj3csW3Qsf0unD7iTNcrCWbpiMBxhm9FUwui2JN10f1sD/xv2c7ov+byt8fw3jCOqYJzOtCNn0Pi3d3RNS+FWytH3NjCC7oI/qRJfAAjymTQC9FL85vMN+6TaKqD6tTOD98gS+iFUXPeapBB7uevSIoDlnnw5efXDmpzEzWcVx9TB0kx+h8/nlQXvvxRZ6cE54L4LN6aIoO6GPP77Qj8XHhMx+qws/wD9dFcW7tfuCT4m+DqeH//i2AQwuDKA3BXZc14HZpaewLprE29Tv79OD/zV9RjPe0fsjiSn4fA9hy7+uwpQpTZpmbdrtfHVPO4SVxUgYg+0Kxnimpx913KU9JJzfVTTzYtMlv/DRdPqDxfj6LgPdSdg/7iCkDcIYLo48hSzteocmFtKSTeES3JenUZ/MHaCkTYrfSUK87Is4Sqn/Oko+nEBW+JzOAk4sFsCjZ+nyNxg2kKzZf3yn484NKr4k42HbimNJSEJPXIMdBQ9iiq/R4m384RemJHSLP41E4iwkeGF18+GI95fdJXP4irdksgOWx3/IQpxFPu1mXudlVwc3hjTCZUvHwv3GBeEyHIj9uoeIP/ZW0qZscm76Ni2JIhJ2MVnWpnYPW2EelZnxjQbVams2klwFIz4FI4pepb81qqMaJ0BOT+sav5E+8my7RCumUuZwzZ/6yLaJllm+O4HTP45WWzg3RVsPnGDj5mnwiMmMTRoe3aKlVQBPIvakP9pmIFqG1+T94TAZj/ZTzGR8W9C1AsjBp7Ck6iir5YAcWIQ87NU5ci59d5NZP8oGuKzRvIFB/OIgybabyq4E5x2cTn7cdnu8mJzFoHf+tWtVvFbmTPl/tuQXklv3+BGKPTtg8sRVXO2KkDAfr+1oVDnTAwv3zaqRkKUHSLS2DHvxhf65j4LJHInZyLvkHpxN7oGba8O8TJzFtZor5jj3FJ/bRSS3RIIt+w6zeLXEACiF87Fo5wFXzDQs3CXlfSFKcxBTeeFA86Fzm0DBAwdhtvgbtQgGql+TXoNPKVuntZXcpbM/jaVddGkHHP9dR/FJv6BNV2jmbyeHoTB0F0pTDRrK+vXzwoZiyPJsqPbtUM6BA0/HWqMttsIbRfepp/pMq4X6+bNYEi+TC+UUPHR2I91T2yY4DMNPT8RUBKKXWS1pSX+neYfyjl3m0cmcYNsJ0ceMbh/AoAJnxNWGHK8p9OoyB8zzfA/3tlLfOJdNnEFu31Fpqu9kTFznjVP+hOXRM80YKw37q4jrQEmF99IvjIdhXw4CH53dBCe78DwSbijdGFAW4Q7O5ppLPjSvCHYSfoOeQvfW1kgwrgYgBB7GsqoBVss+7H9JXQ6bihSZat2wdYUmJ2H2dbgHeVB9oNN3qalk+Tni3uDd2J2EP31Qod+sCmQrutYeQXmume9dh713mU3y0qpx8PsfgKbbG/IT/IH8OncSftKOrFVwzgvwFWdTu3xsG4fJ64XH4m03GeaUnX40BHkWFpbtU0d577GWVB2JoPQ0NNWR9WL80DvdgfxK5mqHXlowyJecTVeBb7DksI/LiOZFOnAcHmnQjVPQRbm79khDtXCXRA9BofwcVLWr+fcchbvNrAGWj8++h98/PFvmNFxIJWW3q1BDIuGD4OeRhitqgjW/ufRGwlOI6f1tG0FIA4/QpVnfZzsaX4MDD36m8Klk83Q4cAq74D1tiZNwnbzQuCZQsDYdK6pK+K9+hNpMpyOORLJJWWVNhvNd0i3usxvnRiwbR923svnXLCqXs+fqq/lrFzy98V5FAkmnfSJVL0JAmodlFUf46bVKXnXj08kyRaOvzxycfqzBjP9csOz1Ude48b3Mlm4L6MC391ZwUH25nzah0lvujTL7ylbVC7siCb0LROEPfiS+uxnJ5HQEZduy1NLBWuJ9cJ3W1AVdRLP8ps9Bt8fz0KX3oCf4H0cFMbWb/dPrdXl5ZwIbYi70txBIQRDfRkS/zo+SXnFse/lu6PolJN6djoXA9LGccsivFCfhKcjnBwXxq+4Sgh7QLovG5+KwAicxnd5BwIofhjC2nZA2Kd9O2P2792vV+WVRokR+jzQXOyomYGzRuuqHkZdNDCuYi6QxAbKyHn5nvHnOmr9unWoux3ESXroyo49ibt/E+5Vlc3iM76crsqVrPJgeWl79wDthcs6tGzyzVXd2aRQ/9IpShVTyJkTfmopJHSu5ed+rOzH8PnRjDPkQb0PgIkY2Q+fw5p4Efr9BS1vIwk44SBrbLoA3B4cxtasIt8qh17Vwbvi4/GY5u5OA/x4XNlc6uzXr/S69PaevV53bOJA7L6xshqqdgcS796Ck5PvYe//HcnhwAypjp0IQZpNpthrtg9dP/Gmjhoe+4XDNeXhG55F+Cl4gf68fWyGH48N0owqOeQp0Lbw0/690bbP7B9GVrtWpQ9WFVxSftzqG3U71Z0DgJT1rKfL7MUoKXq0tWib9+2R8cRmGKFPp9XMTJImCNnu7g2sB/JZcht9vUM1hskzZoibNh+CUj6J4+lu93uE2Hg0a117Af8jvu6qHaEbbjkeJFiwl28VE585vkGkUeH1wfBgTyaeV0t9JE54ceHyLhvEro7h2Xdxck9ecNx4n7Y+jPl/nVFlWSUhADCxGTB+JofIqq3UfGj4s17t9M3oqWd6HEYt3s3uCgksmTeoo4J7DZfQK+hvdByzQ578zcP1XKnbQ9/yqCqRSZk7uHYfJmEAiLaznfcmX8ElVAtesVbGCboCtm1eRQOaRf3kOva5riFIs+sMPIlgTtcc08TZRPyjw44G+Co5rE2gw+GKj8A5dI+9nwUORXL2LRc+LVH9UHDD7ajB9RmPd4Qhf3zYdN5DR2U2f4QiyFKNX1P1kbu7EEH+9exEe+LbxFNvyqoFQ5GcR1wfaPlFBHcCvu6ndRXMNGi/iYwtZF7YQVdRxL+808OAmDR/RjdhvxQP9DEtmcKEftx+mYHjbQNrPYviz/kHi/w1ZIB6CtCUqps+cT8L9WS3hco2O40m4n/LGfs2Bro0XGvyul2SujG57AEe2nDrsJvI/n6E3URWLLE1fhantJBLwdT1lHN/Gb+Y0sKtT86M8bMrv5zhd1wqKTe6nt9tbbK0bPnRmsD8lSFHqxMuwNTjvQEvVG38KXC5HCDyJRGIMDAdOne5rZ3Kph1FHHttGQA/FZ1ZbjFKnbVZTWEmBwH+p0zbVTC02dAaWgHnbfF76fkJRoN4f5zoN936tYTZFx2pzr4rO9a9HhTCFAqUayJvBjz6MmA9apvCY9IR2Am7rLe+z3Wo6+Chv7DLMapQf8zEPZE2pr/hsOR30CLLknWW/OdvGn1NOJvvbeBKf0UO3kfrdDMKa20fpCNABZfkrqMYvKMZaarU2SNNOg/Mawm3voc64CHFNsVptx8e9Ro87nxwbC/4mVTfqaSQSfdY5XQX8+mDJzNJP9yl8OH4orlobx4eV9ErN7FCmcP8xKISf8uZ5FpyfMezDqBmBN/mm04kdLPvM/R3O7SoeMLvuOzrY7RQ3PLmZHsJGvv73wexspuYbOmCmfdFYOAgLSq9jV9kFOLnTVqv1gDTt8sZ1jeCz1VeSqC6DopQ7FS5zYboUWdskffF/MxUtwzVwZ28zMOz9KH7xeRzfkAVhodaGO4H3yV16bAiPke/IG53svYlNo65rwp8tNq2XTcL0NbUbB5RhXERBWEOiZTfqdfJ3hpJL8siWDEXL8DHML/rD/DJbnUOSdPj9T9JNOr0pomUyP7UVsVHkOiyAqnYwnaFcgE6Ti8xM5fKbZIHT1djiK2Fxc7WWmTUlSRvbS2RxXzkmhPHkotTAweDYVVEsKWukxaWn6jAKVB/vJ2NoW6FeH70GHjG4idyCWeTLRvjkG3uu2SakRKHpN6AqOBMT/E1ON8j02aT3X3AJovHhCEnvkoCtRo9DN3ULRRu3b9Bw9LsRPErCrDuExvf9YAqzHyLL+zpZ4NFcBXLfH2mQuknbbLgatViRjsEb7f2WHqj3fhjCSPJpGxItnxIPS40kN+QvdB2883pOiJZnZWV5K7TkmRgWfjAT0TKZC5cZ3WYNotJoBKRnIIo2Dzc4CF0177Jz+ZdxHPtexNw/jKPwuoxoG8AiEi/vJdGPfM0DCpiEU3eFPGvpQMvgOL9gHB3r3ePC+OPhctp9K2qzm4KmW9arGEGi/YBHK5p3F91DCCQhCe8jQgZvmPKy1ZoRzb/kEl8VNOFSchduQlAxH/xcgWfx1qsp/HxNHCM/iJolnOrWzmANcbC1gnxN3o6pI/dYAwJOVyWxgNvS/A7nUHDNZd6v4fmjguZi0oZuCH/Ex5UJjFoZxV3kynC1mpxBEg2IgXlAbDwnyVitGWPPs1rii2Nb8D4kkz8n32WHU0GbU7AfuzKaxKRVMUz+JGrOLNWlIyn4hp4Slg8O47zOgrlJyn5ipOtOV14g3ZirQM7vtK4i3qEHwlxWn0bwteFhNd4paByJ9iOyshmPfGQDWdaR8t2GuPwLDCkus1qbhf2XX1p1NEWL82DoA6pL6uQYJEaenTq/q4DfHCKjf3j/GT3W6xKK4u/aUD37ZvBP0P+5vvS3Jxbs96q/f5Nm7pDDZoLdjaPoM7ni4fgOQqMsx1exJK4mt+YlrkjTjBEW1/HT1QXl7VD1qzA0uIAe7LqPesY40wuLKzuhUJqFZGIcNGNviJ1LUBcXU++c303ENYdIZiHpup3FbkXpbsP0N9eQFeS6DuuHFOw3DcuzfRNXRdFN9OO3vSQzudtc1HkAeMr2xe90/JJEa9vsnluYQZj0EfTEuRgmf17tGNmHc13BZZyk4lvI970ampYjww5poO7uThb0Sor2f9FdrPZX61BJgR0Ll5ewDOO9FurAlcDv/FrF5d2lRudkcADGs18cOEattpxBEpPUb68ilrwEY8PfWq224uwzzAWhO0Wn0oXcBzVeaL5jcxWyroMLqi0m18pysuN4inXqZzG8W0kHtScKcQ9Z5EmFGdi0/RaceXC9STLNxVnhMmaSTsUJCIVmI6b2QYKjmtzFn0jhCrK+9x+u2L4shj7aXJc3dU0MZbnWTRyQi9Iucg+vwpK75+O22xy9AuefZ3bIRxS9A10fi4CwDAK9RnKYJAVHWx0qEPHCDt3c9innRMsBY1D+kgLyMVgSdFy0jPMWtzalu4shBO+isH0a4nquvQSrIT+29AdhjCi2123nXNdj3otiHedBuntXmocg0pe/lBz8i1AS/NpqdRx3xVPSdg+2ypdTeHwTRZyRXBvvZXiju+E2i5bhiTtO4cwp0XKSjC/1GPZUTnJTtIz7Vo8ThF+T7yHX4TS68E3mWF+OwCmSvyL/1gltccLPiHb0QORCAMsGJyTvofjlchjBKzChQ4X1L66RxeebgrbXKnqjUH4Wqv4DegV73tZ0FX349PiwYyWNOCe4ZGXUtZ3dM8LvT0EQtlAkeUFjk76dIIvmjoI2nrOOaydDkReaBR+8DFlCdhGcrMPVN+zxfSwEARDFt6Gqo7MpWib7vVTSZifKhAuQSk4nAXs6SWdsrTxbJ2gT8KFfId0SL7oLPBokic9A+3YMRhV9abVmDW883pyTOTT4eyT0MxBStsHnPavDhnYAWUQn4XHhPpwD6aXEfPZnleAe+JJXo0CYZpbs8gDeUYjPl8TQglfIwTuVnuy15lokD6HQ/eMVyE7TnjPJvKJbFm1Q3gbDmIytoYcw0OdOFZdG4D3TNkJ6H4Y+ApLwIkTeGc0b8ACYGwkXnhkhZMMhyx8joo03V7s0YWdzN/CecJnhBVtxgjgZgcA9FAzYXkknE/iuuXHnyjglLNuXKwqcbzAfkdhojCpIW0km23hTuIyPnnDp05uRTE1FMPvJ6VxtyBSVg/DCSq7JldVrleUYvVpuQqU0FWOKdlmtnsO7wmUGD9bpNfUsVO0UKPLabE5WsGa5dJOTREi563gNWTZ0yw+LLJcjkZiKHwXvy3QRo1t4W7iMmaQTfh9xfRREeTmE7Pm9XLLJyeyRrWoKa22qMdYkAuS9h5QPqY+HY1joObPPPY73hVtDSXgzystPpe8eNAujuQ0ZpHf2JMjqOiMsVsqjmzXs5mJdbsLjs6L8IqKCZ/3ZdOSOcBmeE08Er0F1JZ2o274g53XP2KiZebN2ww8EF6hz9Y6EyJ+F/06ou85DiW+P1ZoT5JZwmRKfgZOCs8nvPR2S/I2bfi9nU8zZpuPZ7bqtQ61cjebyL+LOFUmuCz/wQbkMCUzFtn/eipLOVda/5Azumiy7eTN6MGRhlun/Gu4NM7alZ+WFQSFb0ht5vdo0Eu2C7byC12p0EvZnBfFLaPq55H59YLXmHLlncWszNLQJkvATev5mQpToPesObBnPWBXFizuMZlX23kLB2JmrY1jolmglwYAo/BuR8pG5LFomt4XLHO2PILH9Snp3XE+RsWtB2w4S77mfxnDHBjUjn5eH1iZ8FMWrZQmzoo7jSDLd7cADUPechbEdHVl56ya57SrUxlyUGZ0ISXwYmtYdSZccxmQKY9oKuKGXjBOLAg3uxcD6XhdNYs5W3axKw8Ge43egeny2Aob+O2wLPeK1qdtMaTnCrWFZ+eEQ5eeQSBxDfq9rbxReAtq/wI8Luoo4kXzfmi0BuHTSbvInPqpI4J/kWvB2WbwJoytp81yUIyh9joh2IUpC7+XC+GxjaXnCZUorOkCRZpDVnQLVxUo6JAvuUN6rIURfnKbIM25R+iJ3tnokwq0eF7m6mX85tOSFGBXaaLW2GNzqRvdZnZJQoV+DZOIOiqBz35dvCgpXkvE/jmjkeoxpV261tiharnBrWB45HSL7vWpnM4ulJcP+rChW0F29GSfKf6GGFnvBLd8SDQs9T6/LEoSCH5pjmC0VLsqhKBuga6fgNfmhlixapuVb3BpKq7pADDxMlzwJGm9p0oIIBFLk076HRPJnGBr8n9Xaomk9vl9JwTaUlZ1Hr9P7IYueWYLSbCQpQcKdg0j8lNYiWqb1WNwaeLz3rdh58Afuh653cG28127Yn1WkChiJ2xEMPojBPtdmDr1A64q2GR7LPCk4F4Y+DrL8BfmGuecL8ipoWdkKPXkm3rj7z61NtEzrs7i1eXVXDxQVzIaRHEnW12r0OLw1V0BaiVj8Qowu/MRqbXW0Potbm/Htv8H2iklkhWeSr+h2CnfTEUWevv07opUjW7NomdYtXGZSx0ro71xOEfnFCCtlpu/oNarzDaqQSkynQOxnLXVSoSm0blehNhy0/Sc2FD5xHjS1h2eCNhZtiPzZiHoJfOHFZiJ9nrxw92NZxRGQgjOR1IdCy7JGuCiHEFhDgeR5GF6w0mrNQ+RdhbqMaPM5dn43HgH/UxCF7KUA8k6MAd9LqCgfnRft/uQtbn18kBJRFfs/Eu90ch0KXS1Ex6uYU7gbfukeDPG7v6I5B8gLtyF4u6vOkVGQpGcQV7s4Lt7qJBleUnEphilz6O85OjviPHnhNoY3KvqjIDgPqn4MEg75vZwAJElroMYuJXflTas1Tz3khdtYODldEB+jHvsJNN3e5HQOwhRhERLR8+zapLmlkw/OGgtXTt9ddj69vm9G0MZKOoocp898AFHxzLxoG0/e4mbC29GzkPI/AV0vyNzvpa4PShVIJq+ErMxrjfkGzSEv3EwpjZxAwnsCqjYQiSaOmlUX5diEZOICDA0us1rzNIG8q5ApJeH/orLyJMji82Yid2MRAwkIgTcRN4blRZs5eeE2B84ZiO2+AH7/A5DlAyeni2KCArHHEI1ObIkrb90k7yrYwcLVEroediEE/x8R04qthej7IisxGMYtCAdn5P3Z5pMXrn34sCI6BIIwF5rei/zX6lauJhmUvkZcuxTDwq+hBRXlyCZ54drN63sORTA4F0njBDK8PhLye0glz8FJwfXWT+SxgbxwneD18vYIyXeTuU0hEb8Rw9rssP4ljy0A/w8b4748BFusfwAAAABJRU5ErkJggg== diff --git a/unsupported/microsoft-identity-and-access/1.0.0/requirements.txt b/unsupported/microsoft-identity-and-access/1.0.0/requirements.txt new file mode 100644 index 00000000..480d0c4b --- /dev/null +++ b/unsupported/microsoft-identity-and-access/1.0.0/requirements.txt @@ -0,0 +1 @@ +requests==2.32.4 \ No newline at end of file diff --git a/microsoft-identity-and-access/1.0.0/src/app.py b/unsupported/microsoft-identity-and-access/1.0.0/src/app.py similarity index 78% rename from microsoft-identity-and-access/1.0.0/src/app.py rename to unsupported/microsoft-identity-and-access/1.0.0/src/app.py index 9eb5b21e..19f7729f 100644 --- a/microsoft-identity-and-access/1.0.0/src/app.py +++ b/unsupported/microsoft-identity-and-access/1.0.0/src/app.py @@ -304,17 +304,17 @@ def remove_administrative_unit_member(self, tenant_id, client_id, client_secret, return {"success": False, "reason": "Bad status code %d - expecting 200." % ret.status_code,"error_response":ret.text} - def list_risky_users(self, tenant_id, client_id, client_secret, amount=20, skip=0): + def list_risky_users(self, tenant_id, client_id, client_secret, amount=50, skip=0): graph_url = "https://graph.microsoft.com" session = self.authenticate(tenant_id, client_id, client_secret, graph_url) if amount == 0 or amount == "": - amount = 20 + amount = 50 if skip == 0 or skip == "": skip = 0 #graph_url = f"https://graph.microsoft.com/v1.0/identityProtection/riskyUsers?$top=%d&$skip=%d" % (int(amount), int(skip)) - graph_url = f"https://graph.microsoft.com/v1.0/identityProtection/riskyUsers" + graph_url = f"https://graph.microsoft.com/v1.0/identityProtection/riskyUsers?$top=%d" % (int(amount)) ret = session.get(graph_url) print(ret.status_code) print(ret.text) @@ -498,5 +498,149 @@ def reset_user_password(self, tenant_id, client_id, client_secret, user_email_or return {"success": False, "reason": "Bad status code %d - expecting 200." % ret.status_code, "error_response":ret.text} + def disable_user_account(self, tenant_id, client_id, client_secret, user_email_or_id): + graph_url = "https://graph.microsoft.com" + session = self.authenticate(tenant_id, client_id, client_secret, graph_url) + + graph_url = f"https://graph.microsoft.com/beta/users/{user_email_or_id}" + + headers = { + "Content-type": "application/json" + } + request_body = { + "accountEnabled": "False" + } + + ret = session.patch(graph_url, json=request_body,headers=headers) + print(ret.status_code) + print(ret.text) + if ret.status_code < 300: + data = ret.json() + return data + + return {"success": False, "reason": "Bad status code %d - expecting 200." % ret.status_code, "error_response":ret.text} + + def update_user_job_title(self, tenant_id, client_id, client_secret, user_email_or_id, user_job_title): + graph_url = "https://graph.microsoft.com" + session = self.authenticate(tenant_id, client_id, client_secret, graph_url) + + graph_url = f"https://graph.microsoft.com/beta/users/{user_email_or_id}" + + headers = { + "Content-type": "application/json" + } + request_body = { + "jobTitle": user_job_title + } + + ret = session.patch(graph_url, json=request_body,headers=headers) + print(ret.status_code) + print(ret.text) + if ret.status_code < 300: + data = ret.json() + return data + + return {"success": False, "reason": "Bad status code %d - expecting 200." % ret.status_code, "error_response":ret.text} + + def update_user_department(self, tenant_id, client_id, client_secret, user_email_or_id, user_department): + graph_url = "https://graph.microsoft.com" + session = self.authenticate(tenant_id, client_id, client_secret, graph_url) + + graph_url = f"https://graph.microsoft.com/beta/users/{user_email_or_id}" + + headers = { + "Content-type": "application/json" + } + request_body = { + "department": user_department + } + + ret = session.patch(graph_url, json=request_body,headers=headers) + print(ret.status_code) + print(ret.text) + if ret.status_code < 300: + data = ret.json() + return data + + return {"success": False, "reason": "Bad status code %d - expecting 200." % ret.status_code, "error_response":ret.text} + + def update_user_employee_type(self, tenant_id, client_id, client_secret, user_email_or_id, user_employee_type): + graph_url = "https://graph.microsoft.com" + session = self.authenticate(tenant_id, client_id, client_secret, graph_url) + + graph_url = f"https://graph.microsoft.com/beta/users/{user_email_or_id}" + + headers = { + "Content-type": "application/json" + } + request_body = { + "employeeType": user_employee_type + } + + ret = session.patch(graph_url, json=request_body,headers=headers) + print(ret.status_code) + print(ret.text) + if ret.status_code < 300: + data = ret.json() + return data + + return {"success": False, "reason": "Bad status code %d - expecting 200." % ret.status_code, "error_response":ret.text} + + def update_user_leave_date(self, tenant_id, client_id, client_secret, user_email_or_id, user_leave_date): + graph_url = "https://graph.microsoft.com" + session = self.authenticate(tenant_id, client_id, client_secret, graph_url) + + graph_url = f"https://graph.microsoft.com/beta/users/{user_email_or_id}" + + headers = { + "Content-type": "application/json" + } + request_body = { + "employeeLeaveDateTime": user_leave_date + } + + ret = session.patch(graph_url, json=request_body,headers=headers) + print(ret.status_code) + print(ret.text) + if ret.status_code < 300: + data = ret.json() + return data + + return {"success": False, "reason": "Bad status code %d - expecting 200." % ret.status_code, "error_response":ret.text} + + def get_user_direct_groups(self, tenant_id, client_id, client_secret, user_email_or_id): + graph_url = "https://graph.microsoft.com" + session = self.authenticate(tenant_id, client_id, client_secret, graph_url) + + graph_url = f"https://graph.microsoft.com/beta/users/{user_email_or_id}/memberOf?$filter=NOT(groupTypes/any(c:c eq 'DynamicMembership'))&$count=true" + + headers = { + "ConsistencyType": "eventual" + } + + ret = session.get(graph_url,headers=headers) + print(ret.status_code) + print(ret.text) + if ret.status_code < 300: + data = ret.json() + return data + + return {"success": False, "reason": "Bad status code %d - expecting 200." % ret.status_code, "error_response":ret.text} + + def remove_user_from_group(self, tenant_id, client_id, client_secret, user_id, group_id): + graph_url = "https://graph.microsoft.com" + session = self.authenticate(tenant_id, client_id, client_secret, graph_url) + + graph_url = f"https://graph.microsoft.com/beta/groups/{group_id}/members/{user_id}/$ref" + + ret = session.delete(graph_url) + print(ret.status_code) + print(ret.text) + if ret.status_code < 300: + data = ret.json() + return data + + return {"success": False, "reason": "Bad status code %d - expecting 200." % ret.status_code, "error_response":ret.text} + if __name__ == "__main__": MsIdentityAccess.run() diff --git a/microsoft-intune/1.0.0/Dockerfile b/unsupported/microsoft-intune/1.0.0/Dockerfile similarity index 100% rename from microsoft-intune/1.0.0/Dockerfile rename to unsupported/microsoft-intune/1.0.0/Dockerfile diff --git a/microsoft-intune/1.0.0/README.md b/unsupported/microsoft-intune/1.0.0/README.md similarity index 100% rename from microsoft-intune/1.0.0/README.md rename to unsupported/microsoft-intune/1.0.0/README.md diff --git a/microsoft-intune/1.0.0/api.yaml b/unsupported/microsoft-intune/1.0.0/api.yaml similarity index 100% rename from microsoft-intune/1.0.0/api.yaml rename to unsupported/microsoft-intune/1.0.0/api.yaml diff --git a/unsupported/microsoft-intune/1.0.0/requirements.txt b/unsupported/microsoft-intune/1.0.0/requirements.txt new file mode 100644 index 00000000..480d0c4b --- /dev/null +++ b/unsupported/microsoft-intune/1.0.0/requirements.txt @@ -0,0 +1 @@ +requests==2.32.4 \ No newline at end of file diff --git a/microsoft-intune/1.0.0/src/app.py b/unsupported/microsoft-intune/1.0.0/src/app.py similarity index 100% rename from microsoft-intune/1.0.0/src/app.py rename to unsupported/microsoft-intune/1.0.0/src/app.py diff --git a/microsoft-teams/1.0.0/Dockerfile b/unsupported/microsoft-security-and-compliance/1.0.0/Dockerfile similarity index 100% rename from microsoft-teams/1.0.0/Dockerfile rename to unsupported/microsoft-security-and-compliance/1.0.0/Dockerfile diff --git a/microsoft-security-and-compliance/1.0.0/README.md b/unsupported/microsoft-security-and-compliance/1.0.0/README.md similarity index 100% rename from microsoft-security-and-compliance/1.0.0/README.md rename to unsupported/microsoft-security-and-compliance/1.0.0/README.md diff --git a/microsoft-security-and-compliance/1.0.0/api.yaml b/unsupported/microsoft-security-and-compliance/1.0.0/api.yaml similarity index 100% rename from microsoft-security-and-compliance/1.0.0/api.yaml rename to unsupported/microsoft-security-and-compliance/1.0.0/api.yaml diff --git a/unsupported/microsoft-security-and-compliance/1.0.0/requirements.txt b/unsupported/microsoft-security-and-compliance/1.0.0/requirements.txt new file mode 100644 index 00000000..480d0c4b --- /dev/null +++ b/unsupported/microsoft-security-and-compliance/1.0.0/requirements.txt @@ -0,0 +1 @@ +requests==2.32.4 \ No newline at end of file diff --git a/microsoft-security-and-compliance/1.0.0/src/app.py b/unsupported/microsoft-security-and-compliance/1.0.0/src/app.py similarity index 100% rename from microsoft-security-and-compliance/1.0.0/src/app.py rename to unsupported/microsoft-security-and-compliance/1.0.0/src/app.py diff --git a/microsoft-security-oauth2/1.0.0/Dockerfile b/unsupported/microsoft-security-oauth2/1.0.0/Dockerfile similarity index 100% rename from microsoft-security-oauth2/1.0.0/Dockerfile rename to unsupported/microsoft-security-oauth2/1.0.0/Dockerfile diff --git a/microsoft-security-oauth2/1.0.0/api.yaml b/unsupported/microsoft-security-oauth2/1.0.0/api.yaml similarity index 100% rename from microsoft-security-oauth2/1.0.0/api.yaml rename to unsupported/microsoft-security-oauth2/1.0.0/api.yaml diff --git a/unsupported/microsoft-security-oauth2/1.0.0/requirements.txt b/unsupported/microsoft-security-oauth2/1.0.0/requirements.txt new file mode 100644 index 00000000..bd6f2345 --- /dev/null +++ b/unsupported/microsoft-security-oauth2/1.0.0/requirements.txt @@ -0,0 +1 @@ +requests==2.32.4 diff --git a/microsoft-security-oauth2/1.0.0/src/app.py b/unsupported/microsoft-security-oauth2/1.0.0/src/app.py similarity index 100% rename from microsoft-security-oauth2/1.0.0/src/app.py rename to unsupported/microsoft-security-oauth2/1.0.0/src/app.py diff --git a/passivetotal/1.0.0/Dockerfile b/unsupported/microsoft-teams-system-access/1.0.0/Dockerfile similarity index 100% rename from passivetotal/1.0.0/Dockerfile rename to unsupported/microsoft-teams-system-access/1.0.0/Dockerfile diff --git a/microsoft-teams-system-access/1.0.0/README.md b/unsupported/microsoft-teams-system-access/1.0.0/README.md similarity index 100% rename from microsoft-teams-system-access/1.0.0/README.md rename to unsupported/microsoft-teams-system-access/1.0.0/README.md diff --git a/microsoft-teams-system-access/1.0.0/api.yaml b/unsupported/microsoft-teams-system-access/1.0.0/api.yaml similarity index 100% rename from microsoft-teams-system-access/1.0.0/api.yaml rename to unsupported/microsoft-teams-system-access/1.0.0/api.yaml diff --git a/unsupported/microsoft-teams-system-access/1.0.0/requirements.txt b/unsupported/microsoft-teams-system-access/1.0.0/requirements.txt new file mode 100644 index 00000000..480d0c4b --- /dev/null +++ b/unsupported/microsoft-teams-system-access/1.0.0/requirements.txt @@ -0,0 +1 @@ +requests==2.32.4 \ No newline at end of file diff --git a/microsoft-teams-system-access/1.0.0/src/app.py b/unsupported/microsoft-teams-system-access/1.0.0/src/app.py similarity index 100% rename from microsoft-teams-system-access/1.0.0/src/app.py rename to unsupported/microsoft-teams-system-access/1.0.0/src/app.py diff --git a/recordedfuture/1.0.0/Dockerfile b/unsupported/microsoft-teams/1.0.0/Dockerfile similarity index 100% rename from recordedfuture/1.0.0/Dockerfile rename to unsupported/microsoft-teams/1.0.0/Dockerfile diff --git a/microsoft-teams/1.0.0/MicrosoftTeams-image.png b/unsupported/microsoft-teams/1.0.0/MicrosoftTeams-image.png similarity index 100% rename from microsoft-teams/1.0.0/MicrosoftTeams-image.png rename to unsupported/microsoft-teams/1.0.0/MicrosoftTeams-image.png diff --git a/microsoft-teams/1.0.0/README.md b/unsupported/microsoft-teams/1.0.0/README.md similarity index 100% rename from microsoft-teams/1.0.0/README.md rename to unsupported/microsoft-teams/1.0.0/README.md diff --git a/microsoft-teams/1.0.0/api.yaml b/unsupported/microsoft-teams/1.0.0/api.yaml similarity index 100% rename from microsoft-teams/1.0.0/api.yaml rename to unsupported/microsoft-teams/1.0.0/api.yaml diff --git a/unsupported/microsoft-teams/1.0.0/requirements.txt b/unsupported/microsoft-teams/1.0.0/requirements.txt new file mode 100644 index 00000000..480d0c4b --- /dev/null +++ b/unsupported/microsoft-teams/1.0.0/requirements.txt @@ -0,0 +1 @@ +requests==2.32.4 \ No newline at end of file diff --git a/microsoft-teams/1.0.0/src/app.py b/unsupported/microsoft-teams/1.0.0/src/app.py similarity index 100% rename from microsoft-teams/1.0.0/src/app.py rename to unsupported/microsoft-teams/1.0.0/src/app.py diff --git a/microsoft-teams/1.0.0/src/teams.py b/unsupported/microsoft-teams/1.0.0/src/teams.py similarity index 100% rename from microsoft-teams/1.0.0/src/teams.py rename to unsupported/microsoft-teams/1.0.0/src/teams.py diff --git a/unsupported/misp/1.0.0/requirements.txt b/unsupported/misp/1.0.0/requirements.txt index fd7d3e06..480d0c4b 100644 --- a/unsupported/misp/1.0.0/requirements.txt +++ b/unsupported/misp/1.0.0/requirements.txt @@ -1 +1 @@ -requests==2.25.1 \ No newline at end of file +requests==2.32.4 \ No newline at end of file diff --git a/unsupported/nlp/1.0.0/requirements.txt b/unsupported/nlp/1.0.0/requirements.txt index ade97173..43e5970b 100644 --- a/unsupported/nlp/1.0.0/requirements.txt +++ b/unsupported/nlp/1.0.0/requirements.txt @@ -1,4 +1,4 @@ cyberspacy==1.1.1 tika==1.24 -requests==2.25.1 +requests==2.32.4 spacy==2.3.5 diff --git a/unsupported/office365mgmt/1.0.0/requirements.txt b/unsupported/office365mgmt/1.0.0/requirements.txt index bb17f80b..73f08b38 100644 --- a/unsupported/office365mgmt/1.0.0/requirements.txt +++ b/unsupported/office365mgmt/1.0.0/requirements.txt @@ -1,2 +1,2 @@ # No extra requirements needed -requests==2.25.1 +requests==2.32.4 diff --git a/servicenow/1.0.0/Dockerfile b/unsupported/passivetotal/1.0.0/Dockerfile similarity index 100% rename from servicenow/1.0.0/Dockerfile rename to unsupported/passivetotal/1.0.0/Dockerfile diff --git a/passivetotal/1.0.0/api.yaml b/unsupported/passivetotal/1.0.0/api.yaml similarity index 100% rename from passivetotal/1.0.0/api.yaml rename to unsupported/passivetotal/1.0.0/api.yaml diff --git a/unsupported/passivetotal/1.0.0/requirements.txt b/unsupported/passivetotal/1.0.0/requirements.txt new file mode 100644 index 00000000..480d0c4b --- /dev/null +++ b/unsupported/passivetotal/1.0.0/requirements.txt @@ -0,0 +1 @@ +requests==2.32.4 \ No newline at end of file diff --git a/passivetotal/1.0.0/src/app.py b/unsupported/passivetotal/1.0.0/src/app.py similarity index 100% rename from passivetotal/1.0.0/src/app.py rename to unsupported/passivetotal/1.0.0/src/app.py diff --git a/unsupported/python3-playground/1.0.0/api.yaml b/unsupported/python3-playground/1.0.0/api.yaml deleted file mode 100644 index 83797dd0..00000000 --- a/unsupported/python3-playground/1.0.0/api.yaml +++ /dev/null @@ -1,36 +0,0 @@ -app_version: 1.0.0 -name: Python3 playground -description: A test app made for you to personally change the code -contact_info: - name: "@frikkylikeme" - url: https://shuffler.io - email: frikky@shuffler.io -tags: - - Testing -categories: - - Testing -actions: - - name: run_python_script - description: Runs a python script defined by YOU - parameters: - - name: json_data - description: The JSON to handle - required: true - multiline: true - example: '{"data": "testing"}' - schema: - type: string - - name: function_to_execute - description: The selected python function to run - required: true - multiline: true - example: '1' - options: - - function_1 - - function_2 - schema: - type: string - returns: - schema: - type: string -large_image: data:image/jpg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBUODAsLDBkSEw8VHhsgHx4bHR0hJTApISMtJB0dKjkqLTEzNjY2ICg7Pzo0PjA1NjP/2wBDAQkJCQwLDBgODhgzIh0iMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzMzP/wAARCACuAK4DASIAAhEBAxEB/8QAHAAAAgIDAQEAAAAAAAAAAAAAAAEFBgIEBwMI/8QAQhAAAgECAgUICAUDAwMFAAAAAQIDABEEEgUTITFBBiIyM1FSYfAHFCNCcYGRoSRDYrHBNNHhVZKTU1RyFSVzgrL/xAAbAQACAwEBAQAAAAAAAAAAAAAAAwIEBQEGB//EAC8RAAICAgECBgAFAwUAAAAAAAABAgMEETESIQUTFDJBURUiM0KhBnGBI1JhsfD/2gAMAwEAAhEDEQA/AO3Es7iWUZcQuxIx73nbQCyuZY1zYlhz4+AHm1BDK4SUhsSeg/AUrMzmOM2xQ6T8CPNqABQqKUiOaJ+sbu0iqlNSW/C7xLfbempV1ZoebCvWrxalmQJrCPwe4Jxv58aAHtksZuYU6oD3qLvn1+X8Va2q8KG5rKJrMzdSe72UWbWasket2vn4WoAY9mxMXPaTrR3axCosZhVr4U7WlvtB82pjnswhsrL1xPvVrYjH4HCQCaaaOHBHejsAWPh9q6ls42lybJAdQktkiTq271BLPIJXW2JXoR8CPN6q+K5d6KiLKiTYtR1aqmUL8z/moib0h4xjmiwEWsG6SRzdfkBamxx7JcITLIrj8l/BdHMka5sQ3TTgKQCopjibNC3WPfatc2PL3TIYtHFhUc721bEn71lHy90ohynDYUoekiqy5vvU/SWEPV1nSCilBC7AYYdGQHefj9aGLPleQZZI+qXv1RMP6QjmCYzRt4eCRS7vrU1geWeiMYcs87wy39mZlyhPnupcqLI8oZG+uXDLDdhJrgt8Vu1XhQo1YbVEuJD7X9HnbWEUoxCjUSo+II65SCpHxFZrZsxh5oXrv1edtKHchlXJqQfwu8yX49lBUPZZeYkeyIj3qWaPV6wL+DG9ON+2mxVQrTDMjdSB7tAAS7PrmW2KXYsfaO396alkYvGM0z9YndoIcMI2YetkXR+AHm/CgAsxWE5Z161jubtoAQVVVokN4G2yP3fNhQVVkETkrh16D36RpBkZC8Ythl6xeJPm1MlAgkkGbDHoLxBoALBFMSnPE21pO7QVDoIWbLCvRl7fDz2UlKmMmHZhfzAd/ndQxURAy/0nuAb7+b0AZElyryLq2ToJ36WZg+uyEzHYYfDtobOHXX2MxPsSNw+NL2mtIBX1220ndbzagBj2YIQl1fpnuVq43G4XR2EPrM6phgLibeSewDj8KjtO8osNoaLJCNZiHvmiOy/iewVz9m0hp/HogD4iZicka9FB4dg8abCrq7vshFlyj2j3ZNaY5cYnFjU6OTURpsEzdNvluFQeF0dpbT+JMkMU2KcnbK5uB/8AY7KvOhOQWFwwWfSdsRLv1Q6C/HvftVyjjSKMJGioiiwVRYD5U53Rr7QQpUzs7zZznBejXEyANjsdHF2rAuY/U7PtU5h/R9oOOxkGInI355SAfkKtlFJlfZL5HRx64/BAryM5PqNmjUPxdj/NYtyJ5POD/wC3qvisjj+asFFQ8yf2T8uH0U7E+jnRUqkYebFQN25w4+hqv4/0eaTwoLYOWLFoNuXoN9Dcfeuo0UyORZH5Fyxq5fBw9H0noLEsgbEYOXe0bAqD8txq3aK5cxT6qPSqCBk2LLH0WP6hwq9YzA4XSGHMOLgjmjPB1v8ATsqgac5By4ZWxGii8sY2nDsecB+k8fhvpvmV29pLTEuuyrvB7RdUnWYLikKs7AZY1NwR2/zWYJRiyDO8nTXfkrlWhtOY3QeItEWaME5om4dtr7jXSdGaQh0phRicA9yba/NvB7PjSLK3AsV2qf8Ac2gqqhhDFom2ma/RPZ57aZAkURuciJ0H79IFDESn9H74O+/m1M5Qi67qD1QG+ljRks7iVlKSL0Yu/RcqxlClpW6UXdpHWCULKR61+WRut4/ehc+sYRkDFfmHgRQA76w64rkKbBFuzfKlmye3yl2bfD3fl530zm1i663rX5dt1IZs5MdvW/fB3W82oAAuQFAdYr737lQnKLTaaIweoQ553F42vv8AE+AqSxmLhwGClxC7MOo9rc7WPYL1zST1vTmlVCrnmmOVEvcKOHyFTgt92Jtm0tLkWA0djuUOlCiM0krnNLK+5R2n+1dR0NoTCaFwupgW7nbJKek58fDwo0NoeDQ2AXDRC7nbJId7t2/2qSrs7HIK6lHu+R071jei9LHGV6V6hdOcpcDoJF1+eSdxdIU6R8T2Dxql4r0kaQkZvVsJhok4ZyXI/amwosn3SEzvrh2bOn3orj7+kDlATsxECjsEA/vQnpF0/Gec+FkA4GG37Gm+jtF+srOwXovXNtG+lL2oTSuCVI2NtbhyTl+Knf8AKug4bFw4zDxz4eRZYpFDK6G4IpM65Q9yHwsjP2s2L0jSvRelkytcpeSkOl0fEYZVixwGxvdk8G8fGqDgMZjNBaSayukiNlliY2zeB/iux1WOVnJxdJ4Y43DR3xkS7QPzF7PiOH0psZ/DK9lX7o8m/gNIQaSwiY6A3BHUdp8fPCtu+T2oGsZt8e/JXNOTuln0TjlYldVIbNfcpPvV0mNiWLQW156252fKoSWhlc+tDyhBqlfOr7TNfofOnlzexLFAm6XvViNXkbV/0v5l99/NqZCGMCT+m/LPG/m9RGDylLRFtYzbpe7SIL+xD5XUX13e+fndSVVEZWE3wp6xjvB82rXxs0cGj3eW/qkakxkb2IGwfvQBUOV2lBisUuDiGSOE3cDZmbt+QqZ5F6HGGwh0jMntZxaMH3U/z+1VTAYaXTGl4ona7Ty3kPG29q6qipHGqIAqqAABwFGxMFuXUZUUXFFxQOCg7qVxQT4GgDkvKR0xnLqaLFPlhGIjiZr9FLC/7mrdyp0DoeHkvipEwkEDYeLNFIigNmG4X433be2qxyk5N6axfKPSE+H0fNJDJJmR1tYjKB2+FR0/J3lXPAkMuCxssSdGN5LqPgCa0tKSi1LWjMW4uScd7/4K6xtXkzVM4nktp3DYeSebRkyRRqWdjawA+dQTNVuMoy4ZVcZR5QM22umeivHSSYTSGAdiY4WWRAfdzXBH1F65czV0X0Sn8Vpb/wCOL92pGX+kx+L2sR1CiikTbfesk1h0rdm+nelegDnXLDQ4wGkPW4kAw+JNyANivxHz3/WpnkppA4/A+qO4SXDe+D014VOaawA0nomfDbM5GaM9jDaP7fOud6FnXB6XgaW4jZsrgcL/AOaGxGuiW/s6OGDe2CFETfD3/G1MnJ7UpnVvyu5Su7OjygDFflqNx87aalhKzRgetHrFO4DwoHhcOutQauJelFuzVXuVkxXR6KGtHM2yPstt8/GrCS7OrygLiR1acDVO5Vvnx0WbZKFJde6eH2qM3qOzj4NjkThL4rFYth0FEa37TtP2A+tXO9QXJKDVaED8ZJWY/sP2qdojwcitId68MZjIcBg5cViHCQxLmYmvaqR6RsaY8Lg8EGIEjNI4HELsH3P2p1VfmTUSNs+iDkQWluXGlMdKwwsjYPD+6iWznxLfwKhTp/S/+qY3/nb+9R7NXmzVrxqhFaSMd2zk9tkg3KDTP+rY3/nb+9eR5Raa/wBWxv8Azt/eo9mryZql0R+g6pfZvy6d0tNE8UulMa8bizK0xII7DUWzb6ZavJmrutcI5vYmauh+iliJNMm+3Vxfu1c5JvXSvRZh3TBaXxbAiNikSk8SLk/uKp58unGmyziJu5F810nfb616w4x1cB2up3+FapasC3GvnUMqyuSknweldaa4Jy9F68MNIHgQ+Fq9q9hXPrgpfZntabQ+BtXM+UGDGE07io0GVWbWLbsbb+966XVM5Zw2x+FmA6cRU/I/5rlj0iElsnNF4g4rR8BZi0sqArL3PD7VuhWY6oNlmXpScGqC5LyM2iDCRaHOdY/d7P4+tTjZWQRyHLhx1b8TU09rZJGTBlcRynNiG6D8APN6pPKK/wD6uUfa6oFZu01dVVUQxxtngbpyH3apunkC6RsvOQJZG7Rek5D1AC08nly6Awg/Sf8A9GpOovk+19BYXwBB/wBxqTvTK/agHxFcw9IkpblBh04Jhh92NdOvXMvSTEyaYwU1ua+HK37SG/yKuYn6qKuZ+kylsa8i1MtWCq0kixoLu5CqBxJNgK1vgy9HrhMHitIYpcNg8PJPO20IgubePYPE1YU9HPKORAxiwyX915xcfQV0HQuiIOTujUwkCqcQwBxE1trtb9hwrdJYm5O3415vM8fVVjhXHejXo8N6opzZzA+jTlEdwwf/AD/4rA+jLlITuwP/AD/4rqNz21iWPbVJ/wBSWf7B/wCGQ+zm+D9FulHnA0hjMLhYL85omMjkeAsB8/tXQsLhcLovR8OjtHxavDQjZfaWJ3kniazLeNYlqzc/xq7Kj0a0i1j4UKntDJrAtspFqwLVhSkX0iX0a18Mw/VW5WjowWwpPaxrdvXtfD9+mhv6Mq73sdVflmt48Ef1P+wqz3qscsCMmDXjmY/tT73qDYs8+SoZcLOxsYVfnr3tlWIlQgkkF8Oegg3iq9yXjAjlmBOuVuZH3tlWIFlYyIuadunH3a7U9wQCBVkLwjLhl6xDvNVflDCfWIplAELLaMcatJu7CVkySL0Yu9UJygw4khGJBtLcZ4+4KjkR3Wzq5PbkvLm0W8XGOQ/exqbqq8m8Rqsc8O4TLs+I2j7Xq1VHFs66kdktMKqPpD0c+M0CmKiUs+EfOwG/IRZvpsPyq3UmVXUhgCCLEEXBFW65uEkxVkOuLR88k1vaAs3KTRinaDio/wB6vmlPRnh8TM8ujcYcIr7dTIhdV+BBuBWvo30bYvAaVwmMbSsDrBMshVYGBNjuvetJ5Nco8masayMuC7zn8RJ/5V5E2rLEN7d/jXiWr5tky/1Zf3PTVr8qGWrAtQBncLcC53nhXscKv/cRUuFNlq3Bf9E3KMeTWLeNYFq2jhF/7mKhdH5+jiI2+G2uvByZPSj/ACjvmwXyaZasbM5CrtJ4VvHRUnGVf9prZw2ASCTOSXfh2CmU+EZU7Epx0vl9jksmtLszZijEUSoOC2rO9Hzor2MIqEVFcIzG9vY6qPKiTWaSij4Rxj6k1bfja1UTHzeuaQllBJDucvw3D9qq5s+mCX2ditk5yfjEeEUWAmclom4C9TAzs5SMgYlesY7iK1cBAuHwSwA5kYc+XueFbRUSKIy2WNejL3qswj0xSIg2bWKJiPWreztu8768MVC2IjkiQD1sjn33W82r3yiMarMZA352/JQVzAQZ8gX87veHnsqbWwKVGzYbErLGedE1x8qu8Ey4iFJU6LrcVXNMYfLLrhGIhut2+Ne2g8bq3OFkNlc3Q9h7KyqZ+nvdcuGNa6o7RYaKxv4fWi9aooyovWN6L0AR2J2TyD9VeBNbWMibOZACQd9uFaJbtrxGdCdd0upctmpS1KK0MtWPSYAbydlYlqSyFHV13qbiqPUtrq4H6euxsz4GWKIyEqbb7CvbROxpf/EVhNpHXQmNYyGfZtNbWBw7QREuLM1iQeArew6KXmxljbcUu7Kdk5eU1Pk3KKxvRevTlAyorG9BYKCSQAN5PAUAaGmcX6to9wp9pJzF8O0/Sq3o7Cy4jGIsYF1523sr30njDjcUzg+zUZUHYKk9D4S0RZyYjv1nf8BWVCXqcjf7UOa6IkmgjEZ1IthRskB338PtTJTIpk/pfy7b7+NO5c63LkKfk9/z/FF8hMuUvn/Jt0a1RIly5DqT+F/MJ3+d1ByGIazZhPctvv5vWQIca1V1aJsMQ96kWCKJ2XMjboe75/mgDwxkHrMRTEAa4j2IG751WZIZMPO0TgiRNpt/FW4goRGx1jv0X7laGP0eMQNWttcm3Xn3h2VTzMXz4duUMrn0vvwZ6Nx4xceRz7Zel4jtreqpIXjk2MySIfhb4VPYHSKz2SWyTD5K1VsPO2/Ju7SX2Tsq/dHg36KXhRWqIHevCTBwybSlie6bV7UUu2mu1anFMlGUo8M0jouM/mP9BWS6MiHSdyPkK26KqLwvET30L+RnqLNa2YRYaGDq0UHt3mvTt8aVFXIVwgtRWhTbfdjopUXABJNhvPhUzg6hNLY/WXw2HIK++w4+ArLH6SLLqsOTlPSccfAVH4fDviZTFEQpXpE7gOysbJy3fLyKO++WWIV9K65iwWD9ZmzSAjDIfakb6soVFiVZdkA6ojea8sLCkUGsSPKg2NB3j2/eve+QCRhrEfop3K0sehUQUUJnJyewOs1imWwxQHsxwt5vQC4djGB6yesHC1NlZGETMWd9qy9ygKzkxK5R03y9+nkQYs0itLzcUOrUbiPH70DMJGeIXxZHPQ7gPNqCGQiJznlboy92lZmJhVssy7Wl3ZvP8UAJMoRhCc0R65j7vwpERmLIWPql7h+N/PhWQIkGeNciJ00tbPSzLbX5bwHYIbce2gDTx2BXEgM51coFoso6Q8ftUIyPFLqZ1Mc28rwPzqzn2dhJz2fqz3K8psIuI/DvYz2vrz2dl6pZeFXkrv2f2NrtcCNw2kpIubLeRNw7RUpDioZxeNwT2HYaiZNHSqW1AzLH0gx3/A1qLIrKGIaMjZzhY1mK3Nwu0l1x/wDf5H9NVnHZlnovUHHjMRELLIbdh21sLpOQdONT4i4qxX41jy920yDxbPjuSl6L1HjSY/6J/wB1I6VHCEj4tT/xbD17/wCGR9Nb9EhfbanUS+kpmFlRF8bXrVlnlce1lJHxsKr2eN0LtBOTJrFn89iWm0hBDcXzsOAqJxWLlxNw5ypvsDYV5RBpphDDGzsfkorcw2ijMxeVjljPOTg3hSFHNzvd+WJLdVXHdmlhsO+KbMAVgGxpez4VOQYWKCJI5NiLthbi3xr1QRpFrFjy4cbDDbjWZIjs0ozq/QXfkrXxsWvHjqC/z8ladjm9sbFzIHcAYsDmINxHm/GhcwctFtmPWqdw+FKzKwhLXmO1ZuCjs89tAvISiHI6dOTv1ZICXIEZYyThT1jHeD5tQQhjVZDbDDq24k+b0wysDKi5Yk6cXeoJCrrWXPC3Rit0aABQoQpG2bDnpyH3fOykwVoxHIcuGHRl4sfN6alWQvEMuHHTQ7zQSgjEji+GPRj4g+b0ANizsrSjLKvVoPfpZmEusC/irWMfC3bTbMpUSnNK3VsPdoyvrDGG/FAbZOBFACWyBhFz1brSfc7aQCarVZvwu/W8b1ktmzGHmBetHerHMmr1mX8N/wBPjegBsA4VZuYiH2R7/ZXlNAs0msljHrVrLFwI83r2ayZdbzlbqv00ZX1ojZr4oi6ycAPN6AIx9Dx3LwFta3TQHYnwrxOjZ1ddXiNZABz5GA5vnZUyt3dli5sqn2jH3qxDKyGRFywLsePtpE8Wm3vKCZNTmuGQowOKV7u0Qi92Sx20LgMcVIcxpL7i2vmqbJRYxI4LQE2ROIosyuqSHNM/VuPdpK8NxV+xEvOs+yHXReIa2aW8w6UQAFh58a2YdFQIpykzo3WOxuU+H3rfszSGNDlxI6UnA0KVkVjEMkadYvep9ePVX7IpEHOT5Z5rDEsIhGzCjdLxv2V6Hn5dbzCnVfr87KRZdXrCv4Xdq+N6ZumXWnPm6r9PnZTiIEvrNaVAxVtkfC3bTW6MzRAO7dap9yghhIIy18TbZJwt2UKC7MIjkdetYjp0AYgIIjGrXwp6Uh3g+bU2syqkpyRL1bj3qQKGMyKtsMOlHxJ82ptZVVpefC3VrxWgAYu0iySrlxA6tB73nbTBYOXQAznpx92ghg4R2vOercbloAZnaONss69N+9QB/9k= diff --git a/unsupported/python3-playground/1.0.0/requirements.txt b/unsupported/python3-playground/1.0.0/requirements.txt deleted file mode 100644 index fd7d3e06..00000000 --- a/unsupported/python3-playground/1.0.0/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests==2.25.1 \ No newline at end of file diff --git a/unsupported/python3-playground/1.0.0/src/app.py b/unsupported/python3-playground/1.0.0/src/app.py deleted file mode 100644 index 356a6cfe..00000000 --- a/unsupported/python3-playground/1.0.0/src/app.py +++ /dev/null @@ -1,51 +0,0 @@ -import socket -import asyncio -import time -import random -import json - -from walkoff_app_sdk.app_base import AppBase - -class PythonPlayground(AppBase): - __version__ = "1.0.0" - app_name = "python_playground" # this needs to match "name" in api.yaml - - def __init__(self, redis, logger, console_logger=None): - """ - Each app should have this __init__ to set up Redis and logging. - :param redis: - :param logger: - :param console_logger: - """ - super().__init__(redis, logger, console_logger) - - def run_me_1(self, json_data): - return "Ran function 1" - - def run_me_2(self, json_data): - return "Ran function 2" - - def run_me_3(self, json_data): - return "Ran function 3" - - # Write your data inside this function - def run_python_script(self, json_data, function_to_execute): - # It comes in as a string, so needs to be set to JSON - if not isinstance(json_data, list) and not isinstance(json_data, object) and not isinstance(json_data, dict): - try: - json_data = json.loads(json_data) - except json.decoder.JSONDecodeError as e: - return "Couldn't decode json: %s" % e - - # These are functions - switcher = { - "function_1" : self.run_me_1, - "function_2" : self.run_me_2, - "function_3" : self.run_me_3, - } - - func = switcher.get(function_to_execute, lambda: "Invalid function") - return func(json_data) - -if __name__ == "__main__": - PythonPlayground.run() diff --git a/testing/1.0.0/Dockerfile b/unsupported/recordedfuture/1.0.0/Dockerfile similarity index 100% rename from testing/1.0.0/Dockerfile rename to unsupported/recordedfuture/1.0.0/Dockerfile diff --git a/recordedfuture/1.0.0/api.yaml b/unsupported/recordedfuture/1.0.0/api.yaml similarity index 100% rename from recordedfuture/1.0.0/api.yaml rename to unsupported/recordedfuture/1.0.0/api.yaml diff --git a/unsupported/recordedfuture/1.0.0/requirements.txt b/unsupported/recordedfuture/1.0.0/requirements.txt new file mode 100644 index 00000000..480d0c4b --- /dev/null +++ b/unsupported/recordedfuture/1.0.0/requirements.txt @@ -0,0 +1 @@ +requests==2.32.4 \ No newline at end of file diff --git a/recordedfuture/1.0.0/src/app.py b/unsupported/recordedfuture/1.0.0/src/app.py similarity index 100% rename from recordedfuture/1.0.0/src/app.py rename to unsupported/recordedfuture/1.0.0/src/app.py diff --git a/twitter/1.0.0/Dockerfile b/unsupported/servicenow/1.0.0/Dockerfile similarity index 100% rename from twitter/1.0.0/Dockerfile rename to unsupported/servicenow/1.0.0/Dockerfile diff --git a/servicenow/1.0.0/api.yaml b/unsupported/servicenow/1.0.0/api.yaml similarity index 100% rename from servicenow/1.0.0/api.yaml rename to unsupported/servicenow/1.0.0/api.yaml diff --git a/unsupported/servicenow/1.0.0/requirements.txt b/unsupported/servicenow/1.0.0/requirements.txt new file mode 100644 index 00000000..480d0c4b --- /dev/null +++ b/unsupported/servicenow/1.0.0/requirements.txt @@ -0,0 +1 @@ +requests==2.32.4 \ No newline at end of file diff --git a/servicenow/1.0.0/src/app.py b/unsupported/servicenow/1.0.0/src/app.py similarity index 100% rename from servicenow/1.0.0/src/app.py rename to unsupported/servicenow/1.0.0/src/app.py diff --git a/unsupported/shuffle-subflow/1.0.0/requirements.txt b/unsupported/shuffle-subflow/1.0.0/requirements.txt index fd7d3e06..480d0c4b 100644 --- a/unsupported/shuffle-subflow/1.0.0/requirements.txt +++ b/unsupported/shuffle-subflow/1.0.0/requirements.txt @@ -1 +1 @@ -requests==2.25.1 \ No newline at end of file +requests==2.32.4 \ No newline at end of file diff --git a/oauth2-example/1.0.0/Dockerfile b/unsupported/splunk/1.0.0/Dockerfile similarity index 100% rename from oauth2-example/1.0.0/Dockerfile rename to unsupported/splunk/1.0.0/Dockerfile diff --git a/splunk/1.0.0/api.yaml b/unsupported/splunk/1.0.0/api.yaml similarity index 100% rename from splunk/1.0.0/api.yaml rename to unsupported/splunk/1.0.0/api.yaml diff --git a/splunk/1.0.0/docker-compose.yml b/unsupported/splunk/1.0.0/docker-compose.yml similarity index 100% rename from splunk/1.0.0/docker-compose.yml rename to unsupported/splunk/1.0.0/docker-compose.yml diff --git a/splunk/1.0.0/env.txt b/unsupported/splunk/1.0.0/env.txt similarity index 100% rename from splunk/1.0.0/env.txt rename to unsupported/splunk/1.0.0/env.txt diff --git a/splunk/1.0.0/requirements.txt b/unsupported/splunk/1.0.0/requirements.txt similarity index 56% rename from splunk/1.0.0/requirements.txt rename to unsupported/splunk/1.0.0/requirements.txt index c5a5f6ea..bb76b456 100644 --- a/splunk/1.0.0/requirements.txt +++ b/unsupported/splunk/1.0.0/requirements.txt @@ -1,2 +1,2 @@ python-magic==0.4.18 -requests==2.25.1 \ No newline at end of file +requests==2.32.4 \ No newline at end of file diff --git a/splunk/1.0.0/src/app.py b/unsupported/splunk/1.0.0/src/app.py similarity index 100% rename from splunk/1.0.0/src/app.py rename to unsupported/splunk/1.0.0/src/app.py diff --git a/unsupported/python3-playground/1.0.0/Dockerfile b/unsupported/testing/1.0.0/Dockerfile similarity index 100% rename from unsupported/python3-playground/1.0.0/Dockerfile rename to unsupported/testing/1.0.0/Dockerfile diff --git a/testing/1.0.0/api.yaml b/unsupported/testing/1.0.0/api.yaml similarity index 100% rename from testing/1.0.0/api.yaml rename to unsupported/testing/1.0.0/api.yaml diff --git a/unsupported/testing/1.0.0/requirements.txt b/unsupported/testing/1.0.0/requirements.txt new file mode 100644 index 00000000..480d0c4b --- /dev/null +++ b/unsupported/testing/1.0.0/requirements.txt @@ -0,0 +1 @@ +requests==2.32.4 \ No newline at end of file diff --git a/thehive/1.1.0/run b/unsupported/testing/1.0.0/run old mode 100644 new mode 100755 similarity index 67% rename from thehive/1.1.0/run rename to unsupported/testing/1.0.0/run index 6127bfb7..e73f748d --- a/thehive/1.1.0/run +++ b/unsupported/testing/1.0.0/run @@ -1,9 +1,9 @@ #!/bin/sh -docker stop frikky/shuffle:thehive_1.0.0 --force -docker rm frikky/shuffle:thehive_1.0.0 --force -docker rmi frikky/shuffle:thehive_1.0.0 --force +docker stop frikky/shuffle:testing_1.0.0 --force +docker rm frikky/shuffle:testing_1.0.0 --force +docker rmi frikky/shuffle:testing_1.0.0 --force -docker build . -t frikky/shuffle:thehive_1.0.0 +docker build . -t frikky/shuffle:testing_1.0.0 echo "RUNNING!\n\n" docker run \ @@ -12,6 +12,6 @@ docker run \ --env FUNCTION_APIKEY="asdasd" \ --env EXECUTIONID="2349bf96-51ad-68d2-5ca6-75ef8f7ee814" \ --env AUTHORIZATION="8e344a2e-db51-448f-804c-eb959a32c139" \ - frikky/shuffle:thehive_1.0.0 + frikky/shuffle:testing_1.0.0 -docker push frikky/shuffle:thehive_1.0.0 +docker push frikky/shuffle:testing_1.0.0 diff --git a/testing/1.0.0/src/app.py b/unsupported/testing/1.0.0/src/app.py similarity index 100% rename from testing/1.0.0/src/app.py rename to unsupported/testing/1.0.0/src/app.py diff --git a/testing/1.0.0/tmp.py b/unsupported/testing/1.0.0/tmp.py similarity index 100% rename from testing/1.0.0/tmp.py rename to unsupported/testing/1.0.0/tmp.py diff --git a/splunk/1.0.0/Dockerfile b/unsupported/thehive/1.0.0/Dockerfile similarity index 100% rename from splunk/1.0.0/Dockerfile rename to unsupported/thehive/1.0.0/Dockerfile diff --git a/thehive/1.0.0/api.yaml b/unsupported/thehive/1.0.0/api.yaml similarity index 100% rename from thehive/1.0.0/api.yaml rename to unsupported/thehive/1.0.0/api.yaml diff --git a/oauth2-example/1.0.0/docker-compose.yml b/unsupported/thehive/1.0.0/docker-compose.yml similarity index 100% rename from oauth2-example/1.0.0/docker-compose.yml rename to unsupported/thehive/1.0.0/docker-compose.yml diff --git a/oauth2-example/1.0.0/env.txt b/unsupported/thehive/1.0.0/env.txt similarity index 100% rename from oauth2-example/1.0.0/env.txt rename to unsupported/thehive/1.0.0/env.txt diff --git a/oauth2-example/1.0.0/requirements.txt b/unsupported/thehive/1.0.0/requirements.txt similarity index 69% rename from oauth2-example/1.0.0/requirements.txt rename to unsupported/thehive/1.0.0/requirements.txt index 1d40c46a..175d3de0 100644 --- a/oauth2-example/1.0.0/requirements.txt +++ b/unsupported/thehive/1.0.0/requirements.txt @@ -1,3 +1,3 @@ -requests==2.25.1 +requests==2.32.4 thehive4py==1.8.1 python-magic==0.4.18 diff --git a/oauth2-example/1.0.0/run b/unsupported/thehive/1.0.0/run similarity index 100% rename from oauth2-example/1.0.0/run rename to unsupported/thehive/1.0.0/run diff --git a/thehive/1.0.0/src/app.py b/unsupported/thehive/1.0.0/src/app.py similarity index 100% rename from thehive/1.0.0/src/app.py rename to unsupported/thehive/1.0.0/src/app.py diff --git a/thehive/1.0.0/Dockerfile b/unsupported/thehive/1.1.0/Dockerfile similarity index 100% rename from thehive/1.0.0/Dockerfile rename to unsupported/thehive/1.1.0/Dockerfile diff --git a/thehive/1.1.0/api.yaml b/unsupported/thehive/1.1.0/api.yaml similarity index 100% rename from thehive/1.1.0/api.yaml rename to unsupported/thehive/1.1.0/api.yaml diff --git a/thehive/1.0.0/docker-compose.yml b/unsupported/thehive/1.1.0/docker-compose.yml similarity index 100% rename from thehive/1.0.0/docker-compose.yml rename to unsupported/thehive/1.1.0/docker-compose.yml diff --git a/thehive/1.0.0/env.txt b/unsupported/thehive/1.1.0/env.txt similarity index 100% rename from thehive/1.0.0/env.txt rename to unsupported/thehive/1.1.0/env.txt diff --git a/thehive/1.1.1/requirements.txt b/unsupported/thehive/1.1.0/requirements.txt similarity index 69% rename from thehive/1.1.1/requirements.txt rename to unsupported/thehive/1.1.0/requirements.txt index 1d40c46a..175d3de0 100644 --- a/thehive/1.1.1/requirements.txt +++ b/unsupported/thehive/1.1.0/requirements.txt @@ -1,3 +1,3 @@ -requests==2.25.1 +requests==2.32.4 thehive4py==1.8.1 python-magic==0.4.18 diff --git a/thehive/1.0.0/run b/unsupported/thehive/1.1.0/run similarity index 100% rename from thehive/1.0.0/run rename to unsupported/thehive/1.1.0/run diff --git a/thehive/1.1.0/src/app.py b/unsupported/thehive/1.1.0/src/app.py similarity index 100% rename from thehive/1.1.0/src/app.py rename to unsupported/thehive/1.1.0/src/app.py diff --git a/thehive/1.1.0/Dockerfile b/unsupported/thehive/1.1.1/Dockerfile similarity index 100% rename from thehive/1.1.0/Dockerfile rename to unsupported/thehive/1.1.1/Dockerfile diff --git a/thehive/1.1.1/api.yaml b/unsupported/thehive/1.1.1/api.yaml similarity index 100% rename from thehive/1.1.1/api.yaml rename to unsupported/thehive/1.1.1/api.yaml diff --git a/thehive/1.1.0/docker-compose.yml b/unsupported/thehive/1.1.1/docker-compose.yml similarity index 100% rename from thehive/1.1.0/docker-compose.yml rename to unsupported/thehive/1.1.1/docker-compose.yml diff --git a/thehive/1.1.0/env.txt b/unsupported/thehive/1.1.1/env.txt similarity index 100% rename from thehive/1.1.0/env.txt rename to unsupported/thehive/1.1.1/env.txt diff --git a/thehive/1.0.0/requirements.txt b/unsupported/thehive/1.1.1/requirements.txt similarity index 69% rename from thehive/1.0.0/requirements.txt rename to unsupported/thehive/1.1.1/requirements.txt index 1d40c46a..175d3de0 100644 --- a/thehive/1.0.0/requirements.txt +++ b/unsupported/thehive/1.1.1/requirements.txt @@ -1,3 +1,3 @@ -requests==2.25.1 +requests==2.32.4 thehive4py==1.8.1 python-magic==0.4.18 diff --git a/thehive/1.1.1/run b/unsupported/thehive/1.1.1/run similarity index 100% rename from thehive/1.1.1/run rename to unsupported/thehive/1.1.1/run diff --git a/thehive/1.1.1/src/app.py b/unsupported/thehive/1.1.1/src/app.py similarity index 100% rename from thehive/1.1.1/src/app.py rename to unsupported/thehive/1.1.1/src/app.py diff --git a/thehive/1.1.1/Dockerfile b/unsupported/thehive/1.1.2/Dockerfile similarity index 100% rename from thehive/1.1.1/Dockerfile rename to unsupported/thehive/1.1.2/Dockerfile diff --git a/thehive/1.1.2/api.yaml b/unsupported/thehive/1.1.2/api.yaml similarity index 100% rename from thehive/1.1.2/api.yaml rename to unsupported/thehive/1.1.2/api.yaml diff --git a/thehive/1.1.1/docker-compose.yml b/unsupported/thehive/1.1.2/docker-compose.yml similarity index 100% rename from thehive/1.1.1/docker-compose.yml rename to unsupported/thehive/1.1.2/docker-compose.yml diff --git a/thehive/1.1.1/env.txt b/unsupported/thehive/1.1.2/env.txt similarity index 100% rename from thehive/1.1.1/env.txt rename to unsupported/thehive/1.1.2/env.txt diff --git a/thehive/1.1.0/requirements.txt b/unsupported/thehive/1.1.2/requirements.txt similarity index 69% rename from thehive/1.1.0/requirements.txt rename to unsupported/thehive/1.1.2/requirements.txt index 1d40c46a..175d3de0 100644 --- a/thehive/1.1.0/requirements.txt +++ b/unsupported/thehive/1.1.2/requirements.txt @@ -1,3 +1,3 @@ -requests==2.25.1 +requests==2.32.4 thehive4py==1.8.1 python-magic==0.4.18 diff --git a/thehive/1.1.2/run b/unsupported/thehive/1.1.2/run similarity index 100% rename from thehive/1.1.2/run rename to unsupported/thehive/1.1.2/run diff --git a/thehive/1.1.2/src/app.py b/unsupported/thehive/1.1.2/src/app.py similarity index 100% rename from thehive/1.1.2/src/app.py rename to unsupported/thehive/1.1.2/src/app.py diff --git a/thehive/1.1.2/Dockerfile b/unsupported/thehive/1.1.3/Dockerfile similarity index 100% rename from thehive/1.1.2/Dockerfile rename to unsupported/thehive/1.1.3/Dockerfile diff --git a/thehive/1.1.3/api.yaml b/unsupported/thehive/1.1.3/api.yaml similarity index 100% rename from thehive/1.1.3/api.yaml rename to unsupported/thehive/1.1.3/api.yaml diff --git a/thehive/1.1.2/docker-compose.yml b/unsupported/thehive/1.1.3/docker-compose.yml similarity index 100% rename from thehive/1.1.2/docker-compose.yml rename to unsupported/thehive/1.1.3/docker-compose.yml diff --git a/thehive/1.1.2/env.txt b/unsupported/thehive/1.1.3/env.txt similarity index 100% rename from thehive/1.1.2/env.txt rename to unsupported/thehive/1.1.3/env.txt diff --git a/unsupported/thehive/1.1.3/requirements.txt b/unsupported/thehive/1.1.3/requirements.txt new file mode 100644 index 00000000..175d3de0 --- /dev/null +++ b/unsupported/thehive/1.1.3/requirements.txt @@ -0,0 +1,3 @@ +requests==2.32.4 +thehive4py==1.8.1 +python-magic==0.4.18 diff --git a/thehive/1.1.3/run b/unsupported/thehive/1.1.3/run similarity index 100% rename from thehive/1.1.3/run rename to unsupported/thehive/1.1.3/run diff --git a/thehive/1.1.3/src/app.py b/unsupported/thehive/1.1.3/src/app.py similarity index 100% rename from thehive/1.1.3/src/app.py rename to unsupported/thehive/1.1.3/src/app.py diff --git a/thehive/README.md b/unsupported/thehive/README.md similarity index 100% rename from thehive/README.md rename to unsupported/thehive/README.md diff --git a/thehive/conf.png b/unsupported/thehive/conf.png similarity index 100% rename from thehive/conf.png rename to unsupported/thehive/conf.png diff --git a/vulndb/1.0.0/Dockerfile b/unsupported/twitter/1.0.0/Dockerfile similarity index 100% rename from vulndb/1.0.0/Dockerfile rename to unsupported/twitter/1.0.0/Dockerfile diff --git a/twitter/1.0.0/api.yaml b/unsupported/twitter/1.0.0/api.yaml similarity index 100% rename from twitter/1.0.0/api.yaml rename to unsupported/twitter/1.0.0/api.yaml diff --git a/unsupported/twitter/1.0.0/requirements.txt b/unsupported/twitter/1.0.0/requirements.txt new file mode 100644 index 00000000..5d086241 --- /dev/null +++ b/unsupported/twitter/1.0.0/requirements.txt @@ -0,0 +1,2 @@ +requests==2.32.4 +twython==3.9.1 diff --git a/twitter/1.0.0/src/app.py b/unsupported/twitter/1.0.0/src/app.py similarity index 100% rename from twitter/1.0.0/src/app.py rename to unsupported/twitter/1.0.0/src/app.py diff --git a/http/1.3.0/Dockerfile b/unsupported/vulndb/1.0.0/Dockerfile similarity index 98% rename from http/1.3.0/Dockerfile rename to unsupported/vulndb/1.0.0/Dockerfile index 9bbc5110..364e1531 100644 --- a/http/1.3.0/Dockerfile +++ b/unsupported/vulndb/1.0.0/Dockerfile @@ -17,7 +17,6 @@ RUN pip install --prefix="/install" -r /requirements.txt FROM base COPY --from=builder /install /usr/local COPY src /app -RUN apk add curl # Install any binary dependencies needed in our final image # RUN apk --no-cache add --update my_binary_dependency diff --git a/vulndb/1.0.0/api.yaml b/unsupported/vulndb/1.0.0/api.yaml similarity index 100% rename from vulndb/1.0.0/api.yaml rename to unsupported/vulndb/1.0.0/api.yaml diff --git a/vulndb/1.0.0/docs.md b/unsupported/vulndb/1.0.0/docs.md similarity index 100% rename from vulndb/1.0.0/docs.md rename to unsupported/vulndb/1.0.0/docs.md diff --git a/unsupported/vulndb/1.0.0/requirements.txt b/unsupported/vulndb/1.0.0/requirements.txt new file mode 100644 index 00000000..bd6f2345 --- /dev/null +++ b/unsupported/vulndb/1.0.0/requirements.txt @@ -0,0 +1 @@ +requests==2.32.4 diff --git a/vulndb/1.0.0/shield-vulndb.svg b/unsupported/vulndb/1.0.0/shield-vulndb.svg similarity index 100% rename from vulndb/1.0.0/shield-vulndb.svg rename to unsupported/vulndb/1.0.0/shield-vulndb.svg diff --git a/vulndb/1.0.0/src/app.py b/unsupported/vulndb/1.0.0/src/app.py similarity index 100% rename from vulndb/1.0.0/src/app.py rename to unsupported/vulndb/1.0.0/src/app.py diff --git a/velociraptor/1.0.0/requirements.txt b/velociraptor/1.0.0/requirements.txt index ad6e959c..7698bd2c 100644 --- a/velociraptor/1.0.0/requirements.txt +++ b/velociraptor/1.0.0/requirements.txt @@ -1 +1 @@ -pyvelociraptor==0.1.6 +pyvelociraptor==0.1.8 diff --git a/vulndb/1.0.0/requirements.txt b/vulndb/1.0.0/requirements.txt deleted file mode 100644 index 9d84d358..00000000 --- a/vulndb/1.0.0/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -requests==2.25.1 diff --git a/yara/1.0.0/requirements.txt b/yara/1.0.0/requirements.txt index bb17f80b..73f08b38 100644 --- a/yara/1.0.0/requirements.txt +++ b/yara/1.0.0/requirements.txt @@ -1,2 +1,2 @@ # No extra requirements needed -requests==2.25.1 +requests==2.32.4 diff --git a/yara/1.0.0/upload.sh b/yara/1.0.0/upload.sh index f59fd283..e098fa32 100644 --- a/yara/1.0.0/upload.sh +++ b/yara/1.0.0/upload.sh @@ -26,6 +26,7 @@ gcloud run deploy yara \ --region=europe-west2 \ --max-instances=1 \ --set-env-vars=SHUFFLE_APP_EXPOSED_PORT=8080,SHUFFLE_SWARM_CONFIG=run,SHUFFLE_LOGS_DISABLED=true --source=./ \ + --service-account=shuffle-apps@shuffler.iam.gserviceaccount.com \ --timeout=1800s # With image