diff --git a/gitlab/testing/__init__.py b/gitlab/testing/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/gitlab/testing/docker/.env b/gitlab/testing/docker/.env new file mode 100644 index 000000000..f33c35752 --- /dev/null +++ b/gitlab/testing/docker/.env @@ -0,0 +1,4 @@ +GITLAB_IMAGE=gitlab/gitlab-ee +GITLAB_TAG=18.7.0-ee.0 +GITLAB_RUNNER_IMAGE=gitlab/gitlab-runner +GITLAB_RUNNER_TAG=96856197 diff --git a/gitlab/testing/docker/__init__.py b/gitlab/testing/docker/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/gitlab/testing/docker/create_license.rb b/gitlab/testing/docker/create_license.rb new file mode 100644 index 000000000..04ddb4533 --- /dev/null +++ b/gitlab/testing/docker/create_license.rb @@ -0,0 +1,51 @@ +# NOTE: As of 2022-06-01 the GitLab Enterprise Edition License has the following +# section: +# Notwithstanding the foregoing, you may copy and modify the Software for development +# and testing purposes, without requiring a subscription. +# +# https://gitlab.com/gitlab-org/gitlab/-/blob/29503bc97b96af8d4876dc23fc8996e3dab7d211/ee/LICENSE +# +# This code is strictly intended for use in the testing framework of python-gitlab + +# Code inspired by MIT licensed code at: https://github.com/CONIGUERO/gitlab-license.git + +require 'openssl' +require 'gitlab/license' + +# Generate a 2048 bit key pair. +license_encryption_key = OpenSSL::PKey::RSA.generate(2048) + +# Save the private key +File.open("/.license_encryption_key", "w") { |f| f.write(license_encryption_key.to_pem) } +# Save the public key +public_key = license_encryption_key.public_key +File.open("/.license_encryption_key.pub", "w") { |f| f.write(public_key.to_pem) } +File.open("/opt/gitlab/embedded/service/gitlab-rails/.license_encryption_key.pub", "w") { |f| f.write(public_key.to_pem) } + +Gitlab::License.encryption_key = license_encryption_key + +# Build a new license. +license = Gitlab::License.new + +license.licensee = { + "Name" => "python-gitlab-ci", + "Company" => "python-gitlab-ci", + "Email" => "python-gitlab-ci@example.com", +} + +# The date the license starts. +license.starts_at = Date.today +# Want to make sure we get at least 1 day of usage. Do two days after because if CI +# started at 23:59 we could be expired in one minute if we only did one next_day. +license.expires_at = Date.today.next_day.next_day + +# Use 'ultimate' plan so that we can test all features in the CI +license.restrictions = { + :plan => "ultimate", + :id => rand(1000..99999999) +} + +# Export the license, which encrypts and encodes it. +data = license.export + +File.open("/python-gitlab-ci.gitlab-license", 'w') { |file| file.write(data) } diff --git a/gitlab/testing/docker/docker-compose.yml b/gitlab/testing/docker/docker-compose.yml new file mode 100644 index 000000000..132c69976 --- /dev/null +++ b/gitlab/testing/docker/docker-compose.yml @@ -0,0 +1,52 @@ +version: '3.5' + +networks: + gitlab-network: + name: gitlab-network + +services: + gitlab: + image: '${GITLAB_IMAGE:-gitlab/gitlab-ee}:${GITLAB_TAG:-latest}' + container_name: 'gitlab-test' + hostname: 'gitlab.test' + privileged: true # Just in case https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/1350 + environment: + GITLAB_ROOT_PASSWORD: 5iveL!fe + GITLAB_OMNIBUS_CONFIG: | + external_url 'http://127.0.0.1:8080' + registry['enable'] = false + nginx['redirect_http_to_https'] = false + nginx['listen_port'] = 80 + nginx['listen_https'] = false + pages_external_url 'http://pages.gitlab.lxd' + gitlab_pages['enable'] = true + gitlab_pages['inplace_chroot'] = true + prometheus['enable'] = false + alertmanager['enable'] = false + node_exporter['enable'] = false + redis_exporter['enable'] = false + postgres_exporter['enable'] = false + pgbouncer_exporter['enable'] = false + gitlab_exporter['enable'] = false + letsencrypt['enable'] = false + gitlab_rails['initial_license_file'] = '/python-gitlab-ci.gitlab-license' + gitlab_rails['monitoring_whitelist'] = ['0.0.0.0/0'] + entrypoint: + - /bin/sh + - -c + - ruby /create_license.rb && /assets/init-container + volumes: + - ${PWD}/tests/functional/fixtures/create_license.rb:/create_license.rb + ports: + - '8080:80' + - '2222:22' + networks: + - gitlab-network + + gitlab-runner: + image: '${GITLAB_RUNNER_IMAGE:-gitlab/gitlab-runner}:${GITLAB_RUNNER_TAG:-latest}' + container_name: 'gitlab-runner-test' + depends_on: + - gitlab + networks: + - gitlab-network diff --git a/gitlab/testing/docker/docker.py b/gitlab/testing/docker/docker.py new file mode 100644 index 000000000..c9313c1b2 --- /dev/null +++ b/gitlab/testing/docker/docker.py @@ -0,0 +1,26 @@ +""" +pytest-docker fixture overrides. +See https://github.com/avast/pytest-docker#available-fixtures. +""" + +import pytest + + +@pytest.fixture(scope="session") +def docker_compose_project_name(): + """Set a consistent project name to enable optional reuse of containers.""" + return "pytest-python-gitlab" + + +@pytest.fixture(scope="session") +def docker_compose_file(docker_assets_dir): + return docker_assets_dir / "docker-compose.yml" + + +@pytest.fixture(scope="session") +def docker_cleanup(request): + """Conditionally keep containers around by overriding the cleanup command.""" + if request.config.getoption("--keep-containers"): + # Print version and exit. + return "-v" + return "down -v" diff --git a/gitlab/testing/docker/install_docker.sh b/gitlab/testing/docker/install_docker.sh new file mode 100644 index 000000000..648e4cdfa --- /dev/null +++ b/gitlab/testing/docker/install_docker.sh @@ -0,0 +1,11 @@ +apt-get update +apt-get install -y apt-transport-https ca-certificates curl gnupg2 software-properties-common +curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - +apt-get update +echo \ + "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/ubuntu \ + $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \ + tee /etc/apt/sources.list.d/docker.list > /dev/null +apt-get update +apt-get install -y docker-ce docker-compose +usermod -aG docker gitlab-runner \ No newline at end of file diff --git a/gitlab/testing/docker/set_token.rb b/gitlab/testing/docker/set_token.rb new file mode 100644 index 000000000..eec4e03ec --- /dev/null +++ b/gitlab/testing/docker/set_token.rb @@ -0,0 +1,9 @@ +# https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html#programmatically-creating-a-personal-access-token + +user = User.find_by_username('root') + +token = user.personal_access_tokens.first_or_create(scopes: ['api', 'sudo'], name: 'default', expires_at: 365.days.from_now); +token.set_token('glpat-python-gitlab-token_'); +token.save! + +puts token.token diff --git a/gitlab/testing/fixtures/__init__.py b/gitlab/testing/fixtures/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/gitlab/testing/fixtures/gitlab.py b/gitlab/testing/fixtures/gitlab.py new file mode 100644 index 000000000..27ce42e75 --- /dev/null +++ b/gitlab/testing/fixtures/gitlab.py @@ -0,0 +1,662 @@ +from __future__ import annotations + +import dataclasses +import datetime +import logging +import pathlib +import tempfile +import time +import uuid +from subprocess import check_output +from typing import Sequence, TYPE_CHECKING + +import pytest +import requests + +import gitlab +import gitlab.base +from gitlab.testing import helpers +from gitlab.testing.docker import * # noqa: F401,F403 + +SLEEP_TIME = 10 + + +@dataclasses.dataclass +class GitlabVersion: + major: int + minor: int + patch: str + revision: str + + def __post_init__(self): + self.major, self.minor = int(self.major), int(self.minor) + + +@pytest.fixture(scope="session") +def gitlab_version(gl) -> GitlabVersion: + version, revision = gl.version() + major, minor, patch = version.split(".") + return GitlabVersion(major=major, minor=minor, patch=patch, revision=revision) + + +@pytest.fixture(scope="session") +def docker_assets_dir() -> pathlib.Path: + import gitlab.testing + + return pathlib.Path(gitlab.testing.__file__).parent / "docker" + + +@pytest.fixture(scope="session") +def gitlab_service_name() -> str: + """The "service" name is the one defined in the `docker-compose.yml` file""" + return "gitlab" + + +@pytest.fixture(scope="session") +def gitlab_container_name() -> str: + """The "container" name is the one defined in the `docker-compose.yml` file + for the "gitlab" service""" + return "gitlab-test" + + +@pytest.fixture(scope="session") +def gitlab_docker_port(docker_services, gitlab_service_name: str) -> int: + port: int = docker_services.port_for(gitlab_service_name, container_port=80) + return port + + +@pytest.fixture(scope="session") +def gitlab_url(docker_ip: str, gitlab_docker_port: int) -> str: + return f"http://{docker_ip}:{gitlab_docker_port}" + + +def reset_gitlab(gl: gitlab.Gitlab) -> None: + """Delete resources (such as projects, groups, users) that shouldn't + exist.""" + if helpers.get_gitlab_plan(gl): + logging.info("GitLab EE detected") + # NOTE(jlvillal, timknight): By default in GitLab EE it will wait 7 days before + # deleting a group or project. + # In GL 16.0 we need to call delete with `permanently_remove=True` for projects and sub groups + # (handled in helpers.py safe_delete) + settings = gl.settings.get() + modified_settings = False + if settings.deletion_adjourned_period != 1: + logging.info("Setting `deletion_adjourned_period` to 1 Day") + settings.deletion_adjourned_period = 1 + modified_settings = True + if modified_settings: + settings.save() + + for project in gl.projects.list(): + for project_deploy_token in project.deploytokens.list(): + logging.info( + f"Deleting deploy token: {project_deploy_token.username!r} in " + f"project: {project.path_with_namespace!r}" + ) + helpers.safe_delete(project_deploy_token) + logging.info(f"Deleting project: {project.path_with_namespace!r}") + helpers.safe_delete(project) + + for group in gl.groups.list(): + # skip deletion of a descendant group to prevent scenarios where parent group + # gets deleted leaving a dangling descendant whose deletion will throw 404s. + if group.parent_id: + logging.info( + f"Skipping deletion of {group.full_path} as it is a descendant " + f"group and will be removed when the parent group is deleted" + ) + continue + + for group_deploy_token in group.deploytokens.list(): + logging.info( + f"Deleting deploy token: {group_deploy_token.username!r} in " + f"group: {group.path_with_namespace!r}" + ) + helpers.safe_delete(group_deploy_token) + logging.info(f"Deleting group: {group.full_path!r}") + helpers.safe_delete(group) + for topic in gl.topics.list(): + logging.info(f"Deleting topic: {topic.name!r}") + helpers.safe_delete(topic) + for variable in gl.variables.list(): + logging.info(f"Deleting variable: {variable.key!r}") + helpers.safe_delete(variable) + for user in gl.users.list(): + if user.username not in ["root", "ghost"]: + logging.info(f"Deleting user: {user.username!r}") + helpers.safe_delete(user) + + +def set_token(container: str, fixture_dir: pathlib.Path) -> str: + logging.info("Creating API token.") + set_token_rb = docker_assets_dir / "set_token.rb" + + with open(set_token_rb, encoding="utf-8") as f: + set_token_command = f.read().strip() + + rails_command = [ + "docker", + "exec", + container, + "gitlab-rails", + "runner", + set_token_command, + ] + output = check_output(rails_command).decode().strip() + logging.info("Finished creating API token.") + + return output + + +def pytest_report_collectionfinish( + config: pytest.Config, start_path: pathlib.Path, items: Sequence[pytest.Item] +): + return [ + "", + "Starting GitLab container.", + "Waiting for GitLab to reconfigure.", + "This will take a few minutes.", + ] + + +def pytest_addoption(parser): + parser.addoption( + "--keep-containers", + action="store_true", + help="Keep containers running after testing", + ) + + +@pytest.fixture(scope="session") +def temp_dir() -> pathlib.Path: + return pathlib.Path(tempfile.gettempdir()) + + +@pytest.fixture(scope="session") +def check_is_alive(): + """ + Return a healthcheck function fixture for the GitLab container spinup. + """ + + def _check(*, container: str, start_time: float, gitlab_url: str) -> bool: + setup_time = time.perf_counter() - start_time + minutes, seconds = int(setup_time / 60), int(setup_time % 60) + logging.info( + f"Checking if GitLab container is up. " + f"Have been checking for {minutes} minute(s), {seconds} seconds ..." + ) + logs = ["docker", "logs", container] + if "gitlab Reconfigured!" not in check_output(logs).decode(): + return False + logging.debug("GitLab has finished reconfiguring.") + for check in ("health", "readiness", "liveness"): + url = f"{gitlab_url}/-/{check}" + logging.debug(f"Checking {check!r} endpoint at: {url}") + try: + result = requests.get(url, timeout=1.0) + except requests.exceptions.Timeout: + logging.info(f"{check!r} check timed out") + return False + if result.status_code != 200: + logging.info(f"{check!r} check did not return 200: {result!r}") + return False + logging.debug(f"{check!r} check passed: {result!r}") + logging.debug(f"Sleeping for {SLEEP_TIME}") + time.sleep(SLEEP_TIME) + return True + + return _check + + +@pytest.fixture(scope="session") +def gitlab_token( + check_is_alive, + gitlab_container_name: str, + gitlab_url: str, + docker_services, + docker_assets_dir: pathlib.Path, +) -> str: + start_time = time.perf_counter() + logging.info("Waiting for GitLab container to become ready.") + docker_services.wait_until_responsive( + timeout=300, + pause=10, + check=lambda: check_is_alive( + container=gitlab_container_name, + start_time=start_time, + gitlab_url=gitlab_url, + ), + ) + setup_time = time.perf_counter() - start_time + minutes, seconds = int(setup_time / 60), int(setup_time % 60) + logging.info( + f"GitLab container is now ready after {minutes} minute(s), {seconds} seconds" + ) + + return set_token(gitlab_container_name, docker_assets_dir=docker_assets_dir) + + +@pytest.fixture(scope="session") +def gitlab_config(gitlab_url: str, gitlab_token: str, temp_dir: pathlib.Path): + config_file = temp_dir / "python-gitlab.cfg" + + config = f"""[global] +default = local +timeout = 60 + +[local] +url = {gitlab_url} +private_token = {gitlab_token} +api_version = 4""" + + with open(config_file, "w", encoding="utf-8") as f: + f.write(config) + + return config_file + + +@pytest.fixture(scope="session") +def gl(gitlab_url: str, gitlab_token: str) -> gitlab.Gitlab: + """Helper instance to make fixtures and asserts directly via the API.""" + + logging.info("Instantiating python-gitlab gitlab.Gitlab instance") + instance = gitlab.Gitlab(gitlab_url, private_token=gitlab_token) + instance.auth() + + logging.info("Reset GitLab") + reset_gitlab(instance) + + return instance + + +@pytest.fixture(scope="session") +def gitlab_plan(gl: gitlab.Gitlab) -> str | None: + return helpers.get_gitlab_plan(gl) + + +@pytest.fixture(autouse=True) +def gitlab_premium(gitlab_plan, request) -> None: + if gitlab_plan in ("premium", "ultimate"): + return + + if request.node.get_closest_marker("gitlab_ultimate"): + pytest.skip("Test requires GitLab Premium plan") + + +@pytest.fixture(autouse=True) +def gitlab_ultimate(gitlab_plan, request) -> None: + if gitlab_plan == "ultimate": + return + + if request.node.get_closest_marker("gitlab_ultimate"): + pytest.skip("Test requires GitLab Ultimate plan") + + +@pytest.fixture(scope="session") +def gitlab_runner(gl: gitlab.Gitlab): + container = "gitlab-runner-test" + runner_description = "python-gitlab-runner" + if TYPE_CHECKING: + assert gl.user is not None + + runner = gl.user.runners.create( + {"runner_type": "instance_type", "run_untagged": True} + ) + url = "http://gitlab" + + docker_exec = ["docker", "exec", container, "gitlab-runner"] + register = [ + "register", + "--non-interactive", + "--token", + runner.token, + "--description", + runner_description, + "--url", + url, + "--clone-url", + url, + "--executor", + "shell", + ] + + yield check_output(docker_exec + register).decode() + + gl.runners.delete(token=runner.token) + + +@pytest.fixture(scope="module") +def group(gl): + """Group fixture for group API resource tests.""" + _id = uuid.uuid4().hex + data = {"name": f"test-group-{_id}", "path": f"group-{_id}"} + group = gl.groups.create(data) + + yield group + + helpers.safe_delete(group) + + +@pytest.fixture(scope="module") +def project(gl): + """Project fixture for project API resource tests.""" + _id = uuid.uuid4().hex + name = f"test-project-{_id}" + + project = gl.projects.create(name=name) + + yield project + + helpers.safe_delete(project) + + +@pytest.fixture(scope="function") +def make_merge_request(project): + """Fixture factory used to create a merge_request. + + It will create a branch, add a commit to the branch, and then create a + merge request against project.default_branch. The MR will be returned. + + When finished any created merge requests and branches will be deleted. + + NOTE: No attempt is made to restore project.default_branch to its previous + state. So if the merge request is merged then its content will be in the + project.default_branch branch. + """ + + to_delete = [] + + def _make_merge_request(*, source_branch: str, create_pipeline: bool = False): + # Wait for processes to be done before we start... + # NOTE(jlvillal): Sometimes the CI would give a "500 Internal Server + # Error". Hoping that waiting until all other processes are done will + # help with that. + # Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge) + time.sleep(30) + + project.refresh() # Gets us the current default branch + logging.info(f"Creating branch {source_branch}") + mr_branch = project.branches.create( + {"branch": source_branch, "ref": project.default_branch} + ) + # NOTE(jlvillal): Must create a commit in the new branch before we can + # create an MR that will work. + project.files.create( + { + "file_path": f"README.{source_branch}", + "branch": source_branch, + "content": "Initial content", + "commit_message": "New commit in new branch", + } + ) + + if create_pipeline: + project.files.create( + { + "file_path": ".gitlab-ci.yml", + "branch": source_branch, + "content": """ +test: + rules: + - if: '$CI_PIPELINE_SOURCE == "merge_request_event"' + script: + - sleep 24h # We don't expect this to finish +""", + "commit_message": "Add a simple pipeline", + } + ) + mr = project.mergerequests.create( + { + "source_branch": source_branch, + "target_branch": project.default_branch, + "title": "Should remove source branch", + "remove_source_branch": True, + } + ) + + # Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge) + time.sleep(5) + + mr_iid = mr.iid + for _ in range(60): + mr = project.mergerequests.get(mr_iid) + if ( + mr.detailed_merge_status == "checking" + or mr.detailed_merge_status == "unchecked" + ): + time.sleep(0.5) + else: + break + + assert mr.detailed_merge_status != "checking" + assert mr.detailed_merge_status != "unchecked" + + to_delete.extend([mr, mr_branch]) + return mr + + yield _make_merge_request + + for object in to_delete: + helpers.safe_delete(object) + + +@pytest.fixture(scope="function") +def merge_request(make_merge_request, project): + _id = uuid.uuid4().hex + return make_merge_request(source_branch=f"branch-{_id}") + + +@pytest.fixture(scope="function") +def merge_request_with_pipeline(make_merge_request, project): + _id = uuid.uuid4().hex + return make_merge_request(source_branch=f"branch-{_id}", create_pipeline=True) + + +@pytest.fixture(scope="module") +def project_file(project): + """File fixture for tests requiring a project with files and branches.""" + project_file = project.files.create( + { + "file_path": "README", + "branch": "main", + "content": "Initial content", + "commit_message": "Initial commit", + } + ) + + return project_file + + +@pytest.fixture(scope="function") +def release(project, project_file): + _id = uuid.uuid4().hex + name = f"we_have_a_slash/test-release-{_id}" + + project.refresh() # Gets us the current default branch + release = project.releases.create( + { + "name": name, + "tag_name": _id, + "description": "description", + "ref": project.default_branch, + } + ) + + return release + + +@pytest.fixture(scope="function") +def service(project): + """This is just a convenience fixture to make test cases slightly prettier. Project + services are not idempotent. A service cannot be retrieved until it is enabled. + After it is enabled the first time, it can never be fully deleted, only disabled.""" + service = project.services.update("asana", {"api_key": "api_key"}) + + yield service + + try: + project.services.delete("asana") + except gitlab.exceptions.GitlabDeleteError as e: + print(f"Service already disabled: {e}") + + +@pytest.fixture(scope="module") +def user(gl): + """User fixture for user API resource tests.""" + _id = uuid.uuid4().hex + email = f"user{_id}@email.com" + username = f"user{_id}" + name = f"User {_id}" + password = "E4596f8be406Bc3a14a4ccdb1df80587" + + user = gl.users.create(email=email, username=username, name=name, password=password) + + yield user + + helpers.safe_delete(user) + + +@pytest.fixture(scope="module") +def issue(project): + """Issue fixture for issue API resource tests.""" + _id = uuid.uuid4().hex + data = {"title": f"Issue {_id}", "description": f"Issue {_id} description"} + + return project.issues.create(data) + + +@pytest.fixture(scope="module") +def milestone(project): + _id = uuid.uuid4().hex + data = {"title": f"milestone{_id}"} + + return project.milestones.create(data) + + +@pytest.fixture(scope="module") +def label(project): + """Label fixture for project label API resource tests.""" + _id = uuid.uuid4().hex + data = { + "name": f"prjlabel{_id}", + "description": f"prjlabel1 {_id} description", + "color": "#112233", + } + + return project.labels.create(data) + + +@pytest.fixture(scope="module") +def group_label(group): + """Label fixture for group label API resource tests.""" + _id = uuid.uuid4().hex + data = { + "name": f"grplabel{_id}", + "description": f"grplabel1 {_id} description", + "color": "#112233", + } + + return group.labels.create(data) + + +@pytest.fixture(scope="module") +def epic(group): + """Fixture for group epic API resource tests.""" + _id = uuid.uuid4().hex + return group.epics.create({"title": f"epic-{_id}", "description": f"Epic {_id}"}) + + +@pytest.fixture(scope="module") +def variable(project): + """Variable fixture for project variable API resource tests.""" + _id = uuid.uuid4().hex + data = {"key": f"var{_id}", "value": f"Variable {_id}"} + + return project.variables.create(data) + + +@pytest.fixture(scope="module") +def deploy_token(project): + """Deploy token fixture for project deploy token API resource tests.""" + _id = uuid.uuid4().hex + data = { + "name": f"token-{_id}", + "username": "root", + "expires_at": datetime.date.today().isoformat(), + "scopes": "read_registry", + } + + return project.deploytokens.create(data) + + +@pytest.fixture(scope="module") +def group_deploy_token(group): + """Deploy token fixture for group deploy token API resource tests.""" + _id = uuid.uuid4().hex + data = { + "name": f"group-token-{_id}", + "username": "root", + "expires_at": datetime.date.today().isoformat(), + "scopes": "read_registry", + } + + return group.deploytokens.create(data) + + +@pytest.fixture(scope="session") +def GPG_KEY(): + return """-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQENBFn5mzYBCADH6SDVPAp1zh/hxmTi0QplkOfExBACpuY6OhzNdIg+8/528b3g +Y5YFR6T/HLv/PmeHskUj21end1C0PNG2T9dTx+2Vlh9ISsSG1kyF9T5fvMR3bE0x +Dl6S489CXZrjPTS9SHk1kF+7dwjUxLJyxF9hPiSihFefDFu3NeOtG/u8vbC1mewQ +ZyAYue+mqtqcCIFFoBz7wHKMWjIVSJSyTkXExu4OzpVvy3l2EikbvavI3qNz84b+ +Mgkv/kiBlNoCy3CVuPk99RYKZ3lX1vVtqQ0OgNGQvb4DjcpyjmbKyibuZwhDjIOh +au6d1OyEbayTntd+dQ4j9EMSnEvm/0MJ4eXPABEBAAG0G0dpdGxhYlRlc3QxIDxm +YWtlQGZha2UudGxkPokBNwQTAQgAIQUCWfmbNgIbAwULCQgHAgYVCAkKCwIEFgID +AQIeAQIXgAAKCRBgxELHf8f3hF3yB/wNJlWPKY65UsB4Lo0hs1OxdxCDqXogSi0u +6crDEIiyOte62pNZKzWy8TJcGZvznRTZ7t8hXgKFLz3PRMcl+vAiRC6quIDUj+2V +eYfwaItd1lUfzvdCaC7Venf4TQ74f5vvNg/zoGwE6eRoSbjlLv9nqsxeA0rUBUQL +LYikWhVMP3TrlfgfduYvh6mfgh57BDLJ9kJVpyfxxx9YLKZbaas9sPa6LgBtR555 +JziUxHmbEv8XCsUU8uoFeP1pImbNBplqE3wzJwzOMSmmch7iZzrAwfN7N2j3Wj0H +B5kQddJ9dmB4BbU0IXGhWczvdpxboI2wdY8a1JypxOdePoph/43iuQENBFn5mzYB +CADnTPY0Zf3d9zLjBNgIb3yDl94uOcKCq0twNmyjMhHzGqw+UMe9BScy34GL94Al +xFRQoaL+7P8hGsnsNku29A/VDZivcI+uxTx4WQ7OLcn7V0bnHV4d76iky2ufbUt/ +GofthjDs1SonePO2N09sS4V4uK0d5N4BfCzzXgvg8etCLxNmC9BGt7AaKUUzKBO4 +2QvNNaC2C/8XEnOgNWYvR36ylAXAmo0sGFXUsBCTiq1fugS9pwtaS2JmaVpZZ3YT +pMZlS0+SjC5BZYFqSmKCsA58oBRzCxQz57nR4h5VEflgD+Hy0HdW0UHETwz83E6/ +U0LL6YyvhwFr6KPq5GxinSvfABEBAAGJAR8EGAEIAAkFAln5mzYCGwwACgkQYMRC +x3/H94SJgwgAlKQb10/xcL/epdDkR7vbiei7huGLBpRDb/L5fM8B5W77Qi8Xmuqj +cCu1j99ZCA5hs/vwVn8j8iLSBGMC5gxcuaar/wtmiaEvT9fO/h6q4opG7NcuiJ8H +wRj8ccJmRssNqDD913PLz7T40Ts62blhrEAlJozGVG/q7T3RAZcskOUHKeHfc2RI +YzGsC/I9d7k6uxAv1L9Nm5F2HaAQDzhkdd16nKkGaPGR35cT1JLInkfl5cdm7ldN +nxs4TLO3kZjUTgWKdhpgRNF5hwaz51ZjpebaRf/ZqRuNyX4lIRolDxzOn/+O1o8L +qG2ZdhHHmSK2LaQLFiSprUkikStNU9BqSQ== +=5OGa +-----END PGP PUBLIC KEY BLOCK-----""" + + +@pytest.fixture(scope="session") +def SSH_KEY(): + return ( + "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDZAjAX8vTiHD7Yi3/EzuVaDChtih" + "79HyJZ6H9dEqxFfmGA1YnncE0xujQ64TCebhkYJKzmTJCImSVkOu9C4hZgsw6eE76n" + "+Cg3VwEeDUFy+GXlEJWlHaEyc3HWioxgOALbUp3rOezNh+d8BDwwqvENGoePEBsz5l" + "a6WP5lTi/HJIjAl6Hu+zHgdj1XVExeH+S52EwpZf/ylTJub0Bl5gHwf/siVE48mLMI" + "sqrukXTZ6Zg+8EHAIvIQwJ1dKcXe8P5IoLT7VKrbkgAnolS0I8J+uH7KtErZJb5oZh" + "S4OEwsNpaXMAr+6/wWSpircV2/e7sFLlhlKBC4Iq1MpqlZ7G3p foo@bar" + ) + + +@pytest.fixture(scope="session") +def DEPLOY_KEY(): + return ( + "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFdRyjJQh+1niBpXqE2I8dzjG" + "MXFHlRjX9yk/UfOn075IdaockdU58sw2Ai1XIWFpZpfJkW7z+P47ZNSqm1gzeXI" + "rtKa9ZUp8A7SZe8vH4XVn7kh7bwWCUirqtn8El9XdqfkzOs/+FuViriUWoJVpA6" + "WZsDNaqINFKIA5fj/q8XQw+BcS92L09QJg9oVUuH0VVwNYbU2M2IRmSpybgC/gu" + "uWTrnCDMmLItksATifLvRZwgdI8dr+q6tbxbZknNcgEPrI2jT0hYN9ZcjNeWuyv" + "rke9IepE7SPBT41C+YtUX4dfDZDmczM1cE0YL/krdUCfuZHMa4ZS2YyNd6slufc" + "vn bar@foo" + ) diff --git a/gitlab/testing/fixtures/helpers.py b/gitlab/testing/fixtures/helpers.py new file mode 100644 index 000000000..9d313e540 --- /dev/null +++ b/gitlab/testing/fixtures/helpers.py @@ -0,0 +1,73 @@ +from __future__ import annotations + +import logging +import time +from typing import TYPE_CHECKING + +import pytest + +import gitlab +import gitlab.base +import gitlab.exceptions +import gitlab.v4.objects + +SLEEP_INTERVAL = 0.5 +TIMEOUT = 60 # seconds before timeout will occur +MAX_ITERATIONS = int(TIMEOUT / SLEEP_INTERVAL) + + +def get_gitlab_plan(gl: gitlab.Gitlab) -> str | None: + """Determine the license available on the GitLab instance""" + try: + license = gl.get_license() + except gitlab.exceptions.GitlabLicenseError: + # Without a license we assume only Free features are available + return None + + if TYPE_CHECKING: + assert isinstance(license["plan"], str) + return license["plan"] + + +def safe_delete(object: gitlab.base.RESTObject) -> None: + """Ensure the object specified can not be retrieved. If object still exists after + timeout period, fail the test""" + manager = object.manager + for index in range(MAX_ITERATIONS): + try: + object = manager.get(object.get_id()) # type: ignore[attr-defined] + except gitlab.exceptions.GitlabGetError: + return + # If object is already marked for deletion we have succeeded + if getattr(object, "marked_for_deletion_on", None) is not None: + # 'Group' and 'Project' objects have a 'marked_for_deletion_on' attribute + logging.info(f"{object!r} is marked for deletion.") + return + + if index: + logging.info(f"Attempt {index + 1} to delete {object!r}.") + try: + if isinstance(object, gitlab.v4.objects.User): + # You can't use this option if the selected user is the sole owner of any groups + # Use `hard_delete=True` or a 'Ghost User' may be created. + # https://docs.gitlab.com/ee/api/users.html#user-deletion + object.delete(hard_delete=True) + if index > 1: + # If User is the sole owner of any group it won't be deleted, + # which combined with parents group never immediately deleting in GL 16 + # we shouldn't cause test to fail if it still exists + return + elif isinstance(object, gitlab.v4.objects.Project): + # Starting in GitLab 18, projects can't be immediately deleted. + # So this will mark it for deletion. + object.delete() + else: + # We only attempt to delete parent groups to prevent dangling sub-groups + # However parent groups can only be deleted on a delay in GitLab 16 + # https://docs.gitlab.com/ee/api/groups.html#remove-group + object.delete() + except gitlab.exceptions.GitlabDeleteError: + logging.exception(f"Error attempting to delete: {object.pformat()}") + + time.sleep(SLEEP_INTERVAL) + pytest.fail(f"{object!r} was not deleted") diff --git a/gitlab/testing/fixtures/meta.py b/gitlab/testing/fixtures/meta.py new file mode 100644 index 000000000..004b00fee --- /dev/null +++ b/gitlab/testing/fixtures/meta.py @@ -0,0 +1,32 @@ +import pytest + +import gitlab + + +@pytest.fixture(autouse=True) +def mock_clean_config(monkeypatch: pytest.MonkeyPatch) -> None: + """Ensures user-defined environment variables do not interfere with tests.""" + monkeypatch.delenv("PYTHON_GITLAB_CFG", raising=False) + monkeypatch.delenv("GITLAB_PRIVATE_TOKEN", raising=False) + monkeypatch.delenv("GITLAB_URL", raising=False) + monkeypatch.delenv("CI_JOB_TOKEN", raising=False) + monkeypatch.delenv("CI_SERVER_URL", raising=False) + + +@pytest.fixture(autouse=True) +def default_files(monkeypatch: pytest.MonkeyPatch) -> None: + """Ensures user configuration files do not interfere with tests.""" + monkeypatch.setattr(gitlab.config, "_DEFAULT_FILES", []) + + +@pytest.fixture +def valid_gitlab_ci_yml() -> str: + return """--- +:test_job: + :script: echo 1 +""" + + +@pytest.fixture +def invalid_gitlab_ci_yml() -> str: + return "invalid" diff --git a/gitlab/testing/plugin.py b/gitlab/testing/plugin.py new file mode 100644 index 000000000..59a64f182 --- /dev/null +++ b/gitlab/testing/plugin.py @@ -0,0 +1,3 @@ +from .docker.docker import * # noqa +from .fixtures.gitlab import * # noqa +from .fixtures.meta import * # noqa diff --git a/pyproject.toml b/pyproject.toml index 45e8c36f8..26c418937 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,6 +23,7 @@ dependencies = [ classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", + "Framework :: Pytest", "Intended Audience :: System Administrators", "License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)", "Natural Language :: English", @@ -43,11 +44,15 @@ dynamic = ["version"] [project.optional-dependencies] autocompletion = ["argcomplete>=1.10.0,<3"] yaml = ["PyYaml>=6.0.1"] -graphql = ["gql[httpx]>=3.5.0,<5"] +graphql = ["gql[httpx]>=3.5.0,<4"] +testing = ["attrs", "pytest", "pytest-docker"] [project.scripts] gitlab = "gitlab.cli:main" +[project.entry-points.pytest11] +gitlab = "gitlab.testing.plugin" + [project.urls] Homepage = "https://github.com/python-gitlab/python-gitlab" Changelog = "https://github.com/python-gitlab/python-gitlab/blob/main/CHANGELOG.md" @@ -78,6 +83,7 @@ skip_magic_trailing_comma = true module = [ "docs.*", "docs.ext.*", + "gitlab.testing.*", "tests.unit.*", ] ignore_errors = true @@ -108,6 +114,9 @@ exclude_commit_patterns = [ mode = "update" insertion_flag = "All versions below are listed in reverse chronological order." +[tool.pylint.main] +ignore-paths = ["gitlab/testing"] + [tool.pylint.messages_control] max-line-length = 88 jobs = 0 # Use auto-detected number of multiple processes to speed up Pylint. diff --git a/requirements-docker.txt b/requirements-docker.txt index 123a4438a..3062f9bcf 100644 --- a/requirements-docker.txt +++ b/requirements-docker.txt @@ -1,3 +1,2 @@ -r requirements.txt -r requirements-test.txt -pytest-docker==3.2.5 diff --git a/requirements-test.txt b/requirements-test.txt index df8e6f47a..e65a38224 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -6,6 +6,7 @@ pytest-console-scripts==1.4.1 pytest-cov==7.0.0 pytest-github-actions-annotate-failures==0.3.0 pytest==9.0.2 +pytest-docker==3.2.5 PyYaml==6.0.3 responses==0.25.8 respx==0.22.0 diff --git a/tests/unit/test_pytest_plugin.py b/tests/unit/test_pytest_plugin.py new file mode 100644 index 000000000..213d297a6 --- /dev/null +++ b/tests/unit/test_pytest_plugin.py @@ -0,0 +1,17 @@ +"""Tests for the pytest-gitlab plugin.""" + +from importlib.metadata import entry_points + +import pytest + + +@pytest.mark.skip(reason="pytest-gitlab plugin is disabled in tox") +def test_gitlab_plugin_is_loaded(request): + """'gitlab' is a loaded pytest plugin.""" + assert request.config.pluginmanager.hasplugin("gitlab") + + +def test_gitlab_plugin_is_registered(): + """'gitlab' is registered as a pytest11 entry point.""" + pytest11_entry_points = entry_points(group="pytest11") + assert any(ep.value == "gitlab.testing.plugin" for ep in pytest11_entry_points) diff --git a/tox.ini b/tox.ini index 0ba295692..b925bdf45 100644 --- a/tox.ini +++ b/tox.ini @@ -26,7 +26,7 @@ passenv = NO_COLOR PWD PY_COLORS -setenv = +setenv = DOCS_SOURCE = docs DOCS_BUILD = build/sphinx/html VIRTUAL_ENV={envdir} @@ -38,7 +38,7 @@ isolated_build = True deps = -r{toxinidir}/requirements.txt -r{toxinidir}/requirements-test.txt commands = - pytest tests/unit {posargs} + pytest -p no:gitlab tests/unit {posargs} [testenv:black] basepython = python3 @@ -89,7 +89,7 @@ commands = commands = {posargs} [flake8] -exclude = .git,.venv,.tox,dist,doc,*egg,build, +extend-exclude = .venv,.direnv,dist,doc,build, max-line-length = 88 # We ignore the following because we use black to handle code-formatting # E203: Whitespace before ':' @@ -102,12 +102,12 @@ per-file-ignores = gitlab/v4/objects/__init__.py:F401,F403 [testenv:docs] -description = Builds the docs site. Generated HTML files will be available in '{env:DOCS_BUILD}'. +description = Builds the docs site. Generated HTML files will be available in '{env:DOCS_BUILD}'. deps = -r{toxinidir}/requirements-docs.txt commands = sphinx-build -n -W --keep-going -b html {env:DOCS_SOURCE} {env:DOCS_BUILD} [testenv:docs-serve] -description = +description = Builds and serves the HTML docs site locally. \ Use this for verifying updates to docs. \ Changes to docs files will be automatically rebuilt and served. @@ -116,7 +116,7 @@ commands = sphinx-autobuild {env:DOCS_SOURCE} {env:DOCS_BUILD} --open-browser -- [testenv:cover] commands = - pytest --cov --cov-report term --cov-report html \ + pytest -p no:gitlab --cov --cov-report term --cov-report html \ --cov-report xml tests/unit {posargs} [coverage:run] @@ -133,16 +133,16 @@ exclude_lines = [testenv:cli_func_v4] deps = -r{toxinidir}/requirements-docker.txt commands = - pytest --script-launch-mode=subprocess --cov --cov-report xml tests/functional/cli {posargs} + pytest -p no:gitlab --script-launch-mode=subprocess --cov --cov-report xml tests/functional/cli {posargs} [testenv:api_func_v4] deps = -r{toxinidir}/requirements-docker.txt commands = - pytest --cov --cov-report xml tests/functional/api {posargs} + pytest -p no:gitlab --cov --cov-report xml tests/functional/api {posargs} [testenv:smoke] deps = -r{toxinidir}/requirements-test.txt -commands = pytest tests/smoke {posargs} +commands = pytest -p no:gitlab tests/smoke {posargs} [testenv:pre-commit] skip_install = true @@ -153,4 +153,4 @@ commands = pre-commit run --all-files --show-diff-on-failure skip_install = true deps = -r{toxinidir}/requirements.txt -r{toxinidir}/requirements-test.txt -commands = pytest tests/install +commands = pytest -p no:gitlab tests/install