id int64 0 458k | file_name stringlengths 4 119 | file_path stringlengths 14 227 | content stringlengths 24 9.96M | size int64 24 9.96M | language stringclasses 1 value | extension stringclasses 14 values | total_lines int64 1 219k | avg_line_length float64 2.52 4.63M | max_line_length int64 5 9.91M | alphanum_fraction float64 0 1 | repo_name stringlengths 7 101 | repo_stars int64 100 139k | repo_forks int64 0 26.4k | repo_open_issues int64 0 2.27k | repo_license stringclasses 12 values | repo_extraction_date stringclasses 433 values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
23,200 | test_topics.py | python-gitlab_python-gitlab/tests/unit/objects/test_topics.py | """
GitLab API:
https://docs.gitlab.com/ce/api/topics.html
"""
import pytest
import responses
from gitlab.v4.objects import Topic
name = "GitLab"
topic_title = "topic title"
new_name = "gitlab-test"
topic_content = {
"id": 1,
"name": name,
"title": topic_title,
"description": "GitLab is an open source end-to-end software development platform.",
"total_projects_count": 1000,
"avatar_url": "http://www.gravatar.com/avatar/a0d477b3ea21970ce6ffcbb817b0b435?s=80&d=identicon",
}
topics_url = "http://localhost/api/v4/topics"
topic_url = f"{topics_url}/1"
@pytest.fixture
def resp_list_topics():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url=topics_url,
json=[topic_content],
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_get_topic():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url=topic_url,
json=topic_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_create_topic():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url=topics_url,
json=topic_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_update_topic():
updated_content = dict(topic_content)
updated_content["name"] = new_name
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.PUT,
url=topic_url,
json=updated_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_delete_topic():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.DELETE,
url=topic_url,
status=204,
)
yield rsps
@pytest.fixture
def resp_merge_topics():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url=f"{topics_url}/merge",
json=topic_content,
content_type="application/json",
status=200,
)
yield rsps
def test_list_topics(gl, resp_list_topics):
topics = gl.topics.list()
assert isinstance(topics, list)
assert isinstance(topics[0], Topic)
assert topics[0].name == name
def test_get_topic(gl, resp_get_topic):
topic = gl.topics.get(1)
assert isinstance(topic, Topic)
assert topic.name == name
def test_create_topic(gl, resp_create_topic):
topic = gl.topics.create({"name": name, "title": topic_title})
assert isinstance(topic, Topic)
assert topic.name == name
assert topic.title == topic_title
def test_update_topic(gl, resp_update_topic):
topic = gl.topics.get(1, lazy=True)
topic.name = new_name
topic.save()
assert topic.name == new_name
def test_delete_topic(gl, resp_delete_topic):
topic = gl.topics.get(1, lazy=True)
topic.delete()
def test_merge_topic(gl, resp_merge_topics):
topic = gl.topics.merge(123, 1)
assert topic["id"] == 1
| 3,299 | Python | .py | 111 | 22.954955 | 101 | 0.629114 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,201 | test_merge_request_pipelines.py | python-gitlab_python-gitlab/tests/unit/objects/test_merge_request_pipelines.py | """
GitLab API: https://docs.gitlab.com/ee/api/merge_requests.html#list-mr-pipelines
"""
import pytest
import responses
from gitlab.v4.objects import ProjectMergeRequestPipeline
pipeline_content = {
"id": 1,
"sha": "959e04d7c7a30600c894bd3c0cd0e1ce7f42c11d",
"ref": "main",
"status": "success",
}
@pytest.fixture()
def resp_list_merge_request_pipelines():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/projects/1/merge_requests/1/pipelines",
json=[pipeline_content],
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture()
def resp_create_merge_request_pipeline():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/projects/1/merge_requests/1/pipelines",
json=pipeline_content,
content_type="application/json",
status=201,
)
yield rsps
def test_list_merge_requests_pipelines(project, resp_list_merge_request_pipelines):
pipelines = project.mergerequests.get(1, lazy=True).pipelines.list()
assert len(pipelines) == 1
assert isinstance(pipelines[0], ProjectMergeRequestPipeline)
assert pipelines[0].sha == pipeline_content["sha"]
def test_create_merge_requests_pipelines(project, resp_create_merge_request_pipeline):
pipeline = project.mergerequests.get(1, lazy=True).pipelines.create()
assert isinstance(pipeline, ProjectMergeRequestPipeline)
assert pipeline.sha == pipeline_content["sha"]
| 1,645 | Python | .py | 43 | 31.883721 | 86 | 0.694532 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,202 | test_merge_trains.py | python-gitlab_python-gitlab/tests/unit/objects/test_merge_trains.py | """
GitLab API:
https://docs.gitlab.com/ee/api/merge_trains.html
"""
import pytest
import responses
from gitlab.v4.objects import ProjectMergeTrain
mr_content = {
"id": 110,
"merge_request": {
"id": 1,
"iid": 1,
"project_id": 3,
"title": "Test merge train",
"description": "",
"state": "merged",
"created_at": "2020-02-06T08:39:14.883Z",
"updated_at": "2020-02-06T08:40:57.038Z",
"web_url": "http://gitlab.example.com/root/merge-train-race-condition/-/merge_requests/1",
},
"user": {
"id": 1,
"name": "Administrator",
"username": "root",
"state": "active",
"avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.example.com/root",
},
"pipeline": {
"id": 246,
"sha": "bcc17a8ffd51be1afe45605e714085df28b80b13",
"ref": "refs/merge-requests/1/train",
"status": "success",
"created_at": "2020-02-06T08:40:42.410Z",
"updated_at": "2020-02-06T08:40:46.912Z",
"web_url": "http://gitlab.example.com/root/merge-train-race-condition/pipelines/246",
},
"created_at": "2020-02-06T08:39:47.217Z",
"updated_at": "2020-02-06T08:40:57.720Z",
"target_branch": "feature-1580973432",
"status": "merged",
"merged_at": "2020-02-06T08:40:57.719Z",
"duration": 70,
}
@pytest.fixture
def resp_list_merge_trains():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/projects/1/merge_trains",
json=[mr_content],
content_type="application/json",
status=200,
)
yield rsps
def test_list_project_merge_requests(project, resp_list_merge_trains):
merge_trains = project.merge_trains.list()
assert isinstance(merge_trains[0], ProjectMergeTrain)
assert merge_trains[0].id == mr_content["id"]
| 2,012 | Python | .py | 59 | 27.423729 | 106 | 0.608428 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,203 | test_members.py | python-gitlab_python-gitlab/tests/unit/objects/test_members.py | """
GitLab API: https://docs.gitlab.com/ee/api/members.html
"""
import pytest
import responses
from gitlab.const import AccessLevel
from gitlab.v4.objects import GroupBillableMember
billable_members_content = [
{
"id": 1,
"username": "raymond_smith",
"name": "Raymond Smith",
"state": "active",
"avatar_url": "https://www.gravatar.com/avatar/c2525a7f58ae3776070e44c106c48e15?s=80&d=identicon",
"web_url": "http://192.168.1.8:3000/root",
"last_activity_on": "2021-01-27",
"membership_type": "group_member",
"removable": True,
}
]
@pytest.fixture
def resp_create_group_member():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/groups/1/members",
json={"id": 1, "username": "jane_doe", "access_level": 30},
content_type="application/json",
status=201,
)
yield rsps
@pytest.fixture
def resp_list_billable_group_members():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/groups/1/billable_members",
json=billable_members_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_delete_billable_group_member():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.DELETE,
url="http://localhost/api/v4/groups/1/billable_members/1",
status=204,
)
yield rsps
def test_create_group_member(group, resp_create_group_member):
member = group.members.create({"user_id": 1, "access_level": AccessLevel.DEVELOPER})
assert member.access_level == 30
def test_list_group_billable_members(group, resp_list_billable_group_members):
billable_members = group.billable_members.list()
assert isinstance(billable_members, list)
assert isinstance(billable_members[0], GroupBillableMember)
assert billable_members[0].removable is True
def test_delete_group_billable_member(group, resp_delete_billable_group_member):
group.billable_members.delete(1)
| 2,236 | Python | .py | 61 | 29.704918 | 106 | 0.660185 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,204 | test_group_access_tokens.py | python-gitlab_python-gitlab/tests/unit/objects/test_group_access_tokens.py | """
GitLab API: https://docs.gitlab.com/ee/api/group_access_tokens.html
"""
import pytest
import responses
from gitlab.v4.objects import GroupAccessToken
@pytest.fixture
def resp_list_group_access_token(token_content):
with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/groups/1/access_tokens",
json=[token_content],
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_get_group_access_token(token_content):
with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/groups/1/access_tokens/1",
json=token_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_create_group_access_token(token_content):
with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/groups/1/access_tokens",
json=token_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_revoke_group_access_token():
content = [
{
"user_id": 141,
"scopes": ["api"],
"name": "token",
"expires_at": "2021-01-31",
"id": 42,
"active": True,
"created_at": "2021-01-20T22:11:48.151Z",
"revoked": False,
}
]
with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
rsps.add(
method=responses.DELETE,
url="http://localhost/api/v4/groups/1/access_tokens/42",
status=204,
)
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/groups/1/access_tokens",
json=content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_rotate_group_access_token(token_content):
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/groups/1/access_tokens/1/rotate",
json=token_content,
content_type="application/json",
status=200,
)
yield rsps
def test_list_group_access_tokens(gl, resp_list_group_access_token):
access_tokens = gl.groups.get(1, lazy=True).access_tokens.list()
assert len(access_tokens) == 1
assert access_tokens[0].revoked is False
assert access_tokens[0].name == "token"
def test_get_group_access_token(group, resp_get_group_access_token):
access_token = group.access_tokens.get(1)
assert isinstance(access_token, GroupAccessToken)
assert access_token.revoked is False
assert access_token.name == "token"
def test_create_group_access_token(gl, resp_create_group_access_token):
access_tokens = gl.groups.get(1, lazy=True).access_tokens.create(
{"name": "test", "scopes": ["api"]}
)
assert access_tokens.revoked is False
assert access_tokens.user_id == 141
assert access_tokens.expires_at == "2021-01-31"
def test_revoke_group_access_token(
gl, resp_list_group_access_token, resp_revoke_group_access_token
):
gl.groups.get(1, lazy=True).access_tokens.delete(42)
access_token = gl.groups.get(1, lazy=True).access_tokens.list()[0]
access_token.delete()
def test_rotate_group_access_token(group, resp_rotate_group_access_token):
access_token = group.access_tokens.get(1, lazy=True)
access_token.rotate()
assert isinstance(access_token, GroupAccessToken)
assert access_token.token == "s3cr3t"
| 3,914 | Python | .py | 106 | 29.066038 | 77 | 0.640951 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,205 | test_deployments.py | python-gitlab_python-gitlab/tests/unit/objects/test_deployments.py | """
GitLab API: https://docs.gitlab.com/ce/api/deployments.html
"""
import pytest
import responses
@pytest.fixture
def resp_deployment_get():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/projects/1/deployments/42",
json=response_get_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def deployment(project):
return project.deployments.get(42, lazy=True)
@pytest.fixture
def resp_deployment_create():
content = {"id": 42, "status": "success", "ref": "main"}
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/projects/1/deployments",
json=content,
content_type="application/json",
status=200,
)
updated_content = dict(content)
updated_content["status"] = "failed"
rsps.add(
method=responses.PUT,
url="http://localhost/api/v4/projects/1/deployments/42",
json=updated_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_deployment_approval():
content = {
"user": {
"id": 100,
"username": "security-user-1",
"name": "security user-1",
"state": "active",
"avatar_url": "https://www.gravatar.com/avatar/e130fcd3a1681f41a3de69d10841afa9?s=80&d=identicon",
"web_url": "http://localhost:3000/security-user-1",
},
"status": "approved",
"created_at": "2022-02-24T20:22:30.097Z",
"comment": "Looks good to me",
}
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/projects/1/deployments/42/approval",
json=content,
content_type="application/json",
status=200,
)
yield rsps
def test_deployment_get(project, resp_deployment_get):
deployment = project.deployments.get(42)
assert deployment.id == 42
assert deployment.iid == 2
assert deployment.status == "success"
assert deployment.ref == "main"
def test_deployment_create(project, resp_deployment_create):
deployment = project.deployments.create(
{
"environment": "Test",
"sha": "1agf4gs",
"ref": "main",
"tag": False,
"status": "created",
}
)
assert deployment.id == 42
assert deployment.status == "success"
assert deployment.ref == "main"
deployment.status = "failed"
deployment.save()
assert deployment.status == "failed"
def test_deployment_approval(deployment, resp_deployment_approval) -> None:
result = deployment.approval(status="approved")
assert result["status"] == "approved"
assert result["comment"] == "Looks good to me"
response_get_content = {
"id": 42,
"iid": 2,
"ref": "main",
"sha": "a91957a858320c0e17f3a0eca7cfacbff50ea29a",
"created_at": "2016-08-11T11:32:35.444Z",
"updated_at": "2016-08-11T11:34:01.123Z",
"status": "success",
"user": {
"name": "Administrator",
"username": "root",
"id": 1,
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://localhost:3000/root",
},
"environment": {
"id": 9,
"name": "production",
"external_url": "https://about.gitlab.com",
},
"deployable": {
"id": 664,
"status": "success",
"stage": "deploy",
"name": "deploy",
"ref": "main",
"tag": False,
"coverage": None,
"created_at": "2016-08-11T11:32:24.456Z",
"started_at": None,
"finished_at": "2016-08-11T11:32:35.145Z",
"user": {
"id": 1,
"name": "Administrator",
"username": "root",
"state": "active",
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://gitlab.dev/root",
"created_at": "2015-12-21T13:14:24.077Z",
"bio": None,
"location": None,
"skype": "",
"linkedin": "",
"twitter": "",
"website_url": "",
"organization": "",
},
"commit": {
"id": "a91957a858320c0e17f3a0eca7cfacbff50ea29a",
"short_id": "a91957a8",
"title": "Merge branch 'rename-readme' into 'main'\r",
"author_name": "Administrator",
"author_email": "admin@example.com",
"created_at": "2016-08-11T13:28:26.000+02:00",
"message": "Merge branch 'rename-readme' into 'main'\r\n\r\nRename README\r\n\r\n\r\n\r\nSee merge request !2",
},
"pipeline": {
"created_at": "2016-08-11T07:43:52.143Z",
"id": 42,
"ref": "main",
"sha": "a91957a858320c0e17f3a0eca7cfacbff50ea29a",
"status": "success",
"updated_at": "2016-08-11T07:43:52.143Z",
"web_url": "http://gitlab.dev/root/project/pipelines/5",
},
"runner": None,
},
}
| 5,438 | Python | .py | 159 | 25.389937 | 123 | 0.561727 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,206 | test_ci_lint.py | python-gitlab_python-gitlab/tests/unit/objects/test_ci_lint.py | import pytest
import responses
from gitlab import exceptions
ci_lint_create_content = {"status": "valid", "errors": [], "warnings": []}
ci_lint_create_invalid_content = {
"status": "invalid",
"errors": ["invalid format"],
"warnings": [],
}
project_ci_lint_content = {
"valid": True,
"merged_yaml": "---\n:test_job:\n :script: echo 1\n",
"errors": [],
"warnings": [],
}
@pytest.fixture
def resp_create_ci_lint():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/ci/lint",
json=ci_lint_create_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_create_ci_lint_invalid():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/ci/lint",
json=ci_lint_create_invalid_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_get_project_ci_lint():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/projects/1/ci/lint",
json=project_ci_lint_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_create_project_ci_lint():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/projects/1/ci/lint",
json=project_ci_lint_content,
content_type="application/json",
status=200,
)
yield rsps
def test_ci_lint_create(gl, resp_create_ci_lint, valid_gitlab_ci_yml):
lint_result = gl.ci_lint.create({"content": valid_gitlab_ci_yml})
assert lint_result.status == "valid"
def test_ci_lint_validate(gl, resp_create_ci_lint, valid_gitlab_ci_yml):
gl.ci_lint.validate({"content": valid_gitlab_ci_yml})
def test_ci_lint_validate_invalid_raises(
gl, resp_create_ci_lint_invalid, invalid_gitlab_ci_yml
):
with pytest.raises(exceptions.GitlabCiLintError, match="invalid format"):
gl.ci_lint.validate({"content": invalid_gitlab_ci_yml})
def test_project_ci_lint_get(project, resp_get_project_ci_lint):
lint_result = project.ci_lint.get()
assert lint_result.valid is True
def test_project_ci_lint_create(
project, resp_create_project_ci_lint, valid_gitlab_ci_yml
):
lint_result = project.ci_lint.create({"content": valid_gitlab_ci_yml})
assert lint_result.valid is True
| 2,699 | Python | .py | 77 | 28.207792 | 77 | 0.641154 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,207 | test_repositories.py | python-gitlab_python-gitlab/tests/unit/objects/test_repositories.py | """
GitLab API:
https://docs.gitlab.com/ee/api/repositories.html
https://docs.gitlab.com/ee/api/repository_files.html
"""
from urllib.parse import quote
import pytest
import responses
from requests.structures import CaseInsensitiveDict
from gitlab.v4.objects import ProjectFile
file_path = "app/models/key.rb"
ref = "main"
@pytest.fixture
def resp_head_repository_file():
header_response = {
"Cache-Control": "no-cache",
"Content-Length": "0",
"Content-Type": "application/json",
"Date": "Thu, 12 Sep 2024 14:27:49 GMT",
"Referrer-Policy": "strict-origin-when-cross-origin",
"Server": "nginx",
"Strict-Transport-Security": "max-age=63072000",
"Vary": "Origin",
"X-Content-Type-Options": "nosniff",
"X-Frame-Options": "SAMEORIGIN",
"X-Gitlab-Blob-Id": "79f7bbd25901e8334750839545a9bd021f0e4c83",
"X-Gitlab-Commit-Id": "d5a3ff139356ce33e37e73add446f16869741b50",
"X-Gitlab-Content-Sha256": "4c294617b60715c1d218e61164a3abd4808a4284cbc30e6728a01ad9aada4481",
"X-Gitlab-Encoding": "base64",
"X-Gitlab-Execute-Filemode": "false",
"X-Gitlab-File-Name": "key.rb",
"X-Gitlab-File-Path": file_path,
"X-Gitlab-Last-Commit-Id": "570e7b2abdd848b95f2f578043fc23bd6f6fd24d",
"X-Gitlab-Meta": '{"correlation_id":"01J7KFRPXBX65Y04HEH7MFX4GD","version":"1"}',
"X-Gitlab-Ref": ref,
"X-Gitlab-Size": "1476",
"X-Request-Id": "01J7KFRPXBX65Y04HEH7MFX4GD",
"X-Runtime": "0.083199",
"Connection": "keep-alive",
}
encoded_path = quote(file_path, safe="")
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.HEAD,
url=f"http://localhost/api/v4/projects/1/repository/files/{encoded_path}",
headers=header_response,
status=200,
)
yield rsps
def test_head_repository_file(project, resp_head_repository_file):
headers = project.files.head(file_path, ref=ref)
assert isinstance(headers, CaseInsensitiveDict)
assert headers["X-Gitlab-File-Path"] == file_path
@pytest.fixture
def resp_get_repository_file():
file_response = {
"file_name": "key.rb",
"file_path": file_path,
"size": 1476,
"encoding": "base64",
"content": "IyA9PSBTY2hlbWEgSW5mb3...",
"content_sha256": "4c294617b60715c1d218e61164a3abd4808a4284cbc30e6728a01ad9aada4481",
"ref": ref,
"blob_id": "79f7bbd25901e8334750839545a9bd021f0e4c83",
"commit_id": "d5a3ff139356ce33e37e73add446f16869741b50",
"last_commit_id": "570e7b2abdd848b95f2f578043fc23bd6f6fd24d",
}
encoded_path = quote(file_path, safe="")
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url=f"http://localhost/api/v4/projects/1/repository/files/{encoded_path}",
json=file_response,
content_type="application/json",
status=200,
)
yield rsps
def test_get_repository_file(project, resp_get_repository_file):
file = project.files.get(file_path, ref=ref)
assert isinstance(file, ProjectFile)
assert file.file_path == file_path
| 3,251 | Python | .py | 81 | 32.975309 | 102 | 0.661807 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,208 | test_iterations.py | python-gitlab_python-gitlab/tests/unit/objects/test_iterations.py | """
GitLab API: https://docs.gitlab.com/ee/api/iterations.html
"""
import re
import pytest
import responses
iterations_content = [
{
"id": 53,
"iid": 13,
"group_id": 5,
"title": "Iteration II",
"description": "Ipsum Lorem ipsum",
"state": 2,
"created_at": "2020-01-27T05:07:12.573Z",
"updated_at": "2020-01-27T05:07:12.573Z",
"due_date": "2020-02-01",
"start_date": "2020-02-14",
"web_url": "http://gitlab.example.com/groups/my-group/-/iterations/13",
}
]
@pytest.fixture
def resp_iterations_list():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url=re.compile(r"http://localhost/api/v4/(groups|projects)/1/iterations"),
json=iterations_content,
content_type="application/json",
status=200,
)
yield rsps
def test_list_group_iterations(group, resp_iterations_list):
iterations = group.iterations.list()
assert iterations[0].group_id == 5
def test_list_project_iterations(project, resp_iterations_list):
iterations = project.iterations.list()
assert iterations[0].group_id == 5
| 1,210 | Python | .py | 38 | 25.342105 | 86 | 0.623388 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,209 | test_deploy_tokens.py | python-gitlab_python-gitlab/tests/unit/objects/test_deploy_tokens.py | """
GitLab API: https://docs.gitlab.com/ce/api/deploy_tokens.html
"""
import pytest
import responses
from gitlab.v4.objects import ProjectDeployToken
create_content = {
"id": 1,
"name": "test_deploy_token",
"username": "custom-user",
"expires_at": "2022-01-01T00:00:00.000Z",
"token": "jMRvtPNxrn3crTAGukpZ",
"scopes": ["read_repository"],
}
@pytest.fixture
def resp_deploy_token_create():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/projects/1/deploy_tokens",
json=create_content,
content_type="application/json",
status=200,
)
yield rsps
def test_deploy_tokens(gl, resp_deploy_token_create):
deploy_token = gl.projects.get(1, lazy=True).deploytokens.create(
{
"name": "test_deploy_token",
"expires_at": "2022-01-01T00:00:00.000Z",
"username": "custom-user",
"scopes": ["read_repository"],
}
)
assert isinstance(deploy_token, ProjectDeployToken)
assert deploy_token.id == 1
assert deploy_token.expires_at == "2022-01-01T00:00:00.000Z"
assert deploy_token.username == "custom-user"
assert deploy_token.scopes == ["read_repository"]
| 1,300 | Python | .py | 39 | 26.923077 | 69 | 0.638756 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,210 | test_hooks.py | python-gitlab_python-gitlab/tests/unit/objects/test_hooks.py | """
GitLab API: https://docs.gitlab.com/ce/api/system_hooks.html
GitLab API: https://docs.gitlab.com/ce/api/groups.html#hooks
GitLab API: https://docs.gitlab.com/ee/api/projects.html#hooks
"""
import re
import pytest
import responses
import gitlab
from gitlab.v4.objects import GroupHook, Hook, ProjectHook
hooks_content = [
{
"id": 1,
"url": "testurl",
"push_events": True,
"tag_push_events": True,
},
{
"id": 2,
"url": "testurl_second",
"push_events": False,
"tag_push_events": False,
},
]
hook_content = hooks_content[0]
@pytest.fixture
def resp_hooks_list():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url=re.compile(r"http://localhost/api/v4/((groups|projects)/1/|)hooks"),
json=hooks_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_hook_get():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url=re.compile(r"http://localhost/api/v4/((groups|projects)/1/|)hooks/1"),
json=hook_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_hook_create():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url=re.compile(r"http://localhost/api/v4/((groups|projects)/1/|)hooks"),
json=hook_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_hook_update():
with responses.RequestsMock() as rsps:
pattern = re.compile(r"http://localhost/api/v4/((groups|projects)/1/|)hooks/1")
rsps.add(
method=responses.GET,
url=pattern,
json=hook_content,
content_type="application/json",
status=200,
)
rsps.add(
method=responses.PUT,
url=pattern,
json=hook_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_hook_test():
with responses.RequestsMock() as rsps:
hook_pattern = re.compile(
r"http://localhost/api/v4/((groups|projects)/1/|)hooks/1"
)
test_pattern = re.compile(
r"http://localhost/api/v4/((groups|projects)/1/|)hooks/1/test/[a-z_]+"
)
rsps.add(
method=responses.GET,
url=hook_pattern,
json=hook_content,
content_type="application/json",
status=200,
)
rsps.add(
method=responses.POST,
url=test_pattern,
json={"message": "201 Created"},
content_type="application/json",
status=201,
)
yield rsps
@pytest.fixture
def resp_hook_test_error():
with responses.RequestsMock() as rsps:
hook_pattern = re.compile(
r"http://localhost/api/v4/((groups|projects)/1/|)hooks/1"
)
test_pattern = re.compile(
r"http://localhost/api/v4/((groups|projects)/1/|)hooks/1/test/[a-z_]+"
)
rsps.add(
method=responses.GET,
url=hook_pattern,
json=hook_content,
content_type="application/json",
status=200,
)
rsps.add(
method=responses.POST,
url=test_pattern,
json={"message": "<html>error</html>"},
content_type="application/json",
status=422,
)
yield rsps
@pytest.fixture
def resp_hook_delete():
with responses.RequestsMock() as rsps:
pattern = re.compile(r"http://localhost/api/v4/((groups|projects)/1/|)hooks/1")
rsps.add(
method=responses.GET,
url=pattern,
json=hook_content,
content_type="application/json",
status=200,
)
rsps.add(
method=responses.DELETE,
url=pattern,
status=204,
)
yield rsps
def test_list_system_hooks(gl, resp_hooks_list):
hooks = gl.hooks.list()
assert hooks[0].id == 1
assert hooks[0].url == "testurl"
assert hooks[1].id == 2
assert hooks[1].url == "testurl_second"
def test_get_system_hook(gl, resp_hook_get):
data = gl.hooks.get(1)
assert isinstance(data, Hook)
assert data.url == "testurl"
assert data.id == 1
def test_create_system_hook(gl, resp_hook_create):
hook = gl.hooks.create(hook_content)
assert hook.url == "testurl"
assert hook.push_events is True
assert hook.tag_push_events is True
# there is no update method for system hooks
def test_delete_system_hook(gl, resp_hook_delete):
hook = gl.hooks.get(1)
hook.delete()
gl.hooks.delete(1)
def test_list_group_hooks(group, resp_hooks_list):
hooks = group.hooks.list()
assert hooks[0].id == 1
assert hooks[0].url == "testurl"
assert hooks[1].id == 2
assert hooks[1].url == "testurl_second"
def test_get_group_hook(group, resp_hook_get):
data = group.hooks.get(1)
assert isinstance(data, GroupHook)
assert data.url == "testurl"
assert data.id == 1
def test_create_group_hook(group, resp_hook_create):
hook = group.hooks.create(hook_content)
assert hook.url == "testurl"
assert hook.push_events is True
assert hook.tag_push_events is True
def test_update_group_hook(group, resp_hook_update):
hook = group.hooks.get(1)
assert hook.id == 1
hook.url = "testurl_more"
hook.save()
def test_delete_group_hook(group, resp_hook_delete):
hook = group.hooks.get(1)
hook.delete()
group.hooks.delete(1)
def test_test_group_hook(group, resp_hook_test):
hook = group.hooks.get(1)
hook.test("push_events")
def test_test_error_group_hook(group, resp_hook_test_error):
hook = group.hooks.get(1)
with pytest.raises(gitlab.exceptions.GitlabHookTestError):
hook.test("push_events")
def test_list_project_hooks(project, resp_hooks_list):
hooks = project.hooks.list()
assert hooks[0].id == 1
assert hooks[0].url == "testurl"
assert hooks[1].id == 2
assert hooks[1].url == "testurl_second"
def test_get_project_hook(project, resp_hook_get):
data = project.hooks.get(1)
assert isinstance(data, ProjectHook)
assert data.url == "testurl"
assert data.id == 1
def test_create_project_hook(project, resp_hook_create):
hook = project.hooks.create(hook_content)
assert hook.url == "testurl"
assert hook.push_events is True
assert hook.tag_push_events is True
def test_update_project_hook(project, resp_hook_update):
hook = project.hooks.get(1)
assert hook.id == 1
hook.url = "testurl_more"
hook.save()
def test_delete_project_hook(project, resp_hook_delete):
hook = project.hooks.get(1)
hook.delete()
project.hooks.delete(1)
| 7,083 | Python | .py | 220 | 24.809091 | 87 | 0.613803 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,211 | test_registry_repositories.py | python-gitlab_python-gitlab/tests/unit/objects/test_registry_repositories.py | """
GitLab API: https://docs.gitlab.com/ee/api/container_registry.html
"""
import re
import pytest
import responses
from gitlab.v4.objects import ProjectRegistryRepository, RegistryRepository
repositories_content = [
{
"id": 1,
"name": "",
"path": "group/project",
"project_id": 9,
"location": "gitlab.example.com:5000/group/project",
"created_at": "2019-01-10T13:38:57.391Z",
"cleanup_policy_started_at": "2020-01-10T15:40:57.391Z",
},
{
"id": 2,
"name": "releases",
"path": "group/project/releases",
"project_id": 9,
"location": "gitlab.example.com:5000/group/project/releases",
"created_at": "2019-01-10T13:39:08.229Z",
"cleanup_policy_started_at": "2020-08-17T03:12:35.489Z",
},
]
@pytest.fixture
def resp_list_registry_repositories():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url=re.compile(
r"http://localhost/api/v4/(groups|projects)/1/registry/repositories"
),
json=repositories_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_get_registry_repository():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/registry/repositories/1",
json=repositories_content[0],
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_delete_registry_repository():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.DELETE,
url="http://localhost/api/v4/projects/1/registry/repositories/1",
status=204,
)
yield rsps
def test_list_group_registry_repositories(group, resp_list_registry_repositories):
repositories = group.registry_repositories.list()
assert isinstance(repositories[0], ProjectRegistryRepository)
assert repositories[0].id == 1
def test_list_project_registry_repositories(project, resp_list_registry_repositories):
repositories = project.repositories.list()
assert isinstance(repositories[0], ProjectRegistryRepository)
assert repositories[0].id == 1
def test_delete_project_registry_repository(project, resp_delete_registry_repository):
project.repositories.delete(1)
def test_get_registry_repository(gl, resp_get_registry_repository):
repository = gl.registry_repositories.get(1)
assert isinstance(repository, RegistryRepository)
assert repository.id == 1
| 2,680 | Python | .py | 74 | 29.081081 | 86 | 0.663833 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,212 | test_bulk_imports.py | python-gitlab_python-gitlab/tests/unit/objects/test_bulk_imports.py | """
GitLab API: https://docs.gitlab.com/ce/api/bulk_imports.html
"""
import pytest
import responses
from gitlab.v4.objects import BulkImport, BulkImportAllEntity, BulkImportEntity
migration_content = {
"id": 1,
"status": "finished",
"source_type": "gitlab",
"created_at": "2021-06-18T09:45:55.358Z",
"updated_at": "2021-06-18T09:46:27.003Z",
}
entity_content = {
"id": 1,
"bulk_import_id": 1,
"status": "finished",
"source_full_path": "source_group",
"destination_slug": "destination_slug",
"destination_namespace": "destination_path",
"parent_id": None,
"namespace_id": 1,
"project_id": None,
"created_at": "2021-06-18T09:47:37.390Z",
"updated_at": "2021-06-18T09:47:51.867Z",
"failures": [],
}
@pytest.fixture
def resp_create_bulk_import():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/bulk_imports",
json=migration_content,
content_type="application/json",
status=201,
)
yield rsps
@pytest.fixture
def resp_list_bulk_imports():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/bulk_imports",
json=[migration_content],
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_get_bulk_import():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/bulk_imports/1",
json=migration_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_list_all_bulk_import_entities():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/bulk_imports/entities",
json=[entity_content],
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_list_bulk_import_entities():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/bulk_imports/1/entities",
json=[entity_content],
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_get_bulk_import_entity():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/bulk_imports/1/entities/1",
json=entity_content,
content_type="application/json",
status=200,
)
yield rsps
def test_create_bulk_import(gl, resp_create_bulk_import):
configuration = {
"url": gl.url,
"access_token": "test-token",
}
migration_entity = {
"source_full_path": "source",
"source_type": "group_entity",
"destination_slug": "destination",
"destination_namespace": "destination",
}
migration = gl.bulk_imports.create(
{
"configuration": configuration,
"entities": [migration_entity],
}
)
assert isinstance(migration, BulkImport)
assert migration.status == "finished"
def test_list_bulk_imports(gl, resp_list_bulk_imports):
migrations = gl.bulk_imports.list()
assert isinstance(migrations[0], BulkImport)
assert migrations[0].status == "finished"
def test_get_bulk_import(gl, resp_get_bulk_import):
migration = gl.bulk_imports.get(1)
assert isinstance(migration, BulkImport)
assert migration.status == "finished"
def test_list_all_bulk_import_entities(gl, resp_list_all_bulk_import_entities):
entities = gl.bulk_import_entities.list()
assert isinstance(entities[0], BulkImportAllEntity)
assert entities[0].bulk_import_id == 1
def test_list_bulk_import_entities(gl, migration, resp_list_bulk_import_entities):
entities = migration.entities.list()
assert isinstance(entities[0], BulkImportEntity)
assert entities[0].bulk_import_id == 1
def test_get_bulk_import_entity(gl, migration, resp_get_bulk_import_entity):
entity = migration.entities.get(1)
assert isinstance(entity, BulkImportEntity)
assert entity.bulk_import_id == 1
| 4,437 | Python | .py | 132 | 26.530303 | 82 | 0.63885 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,213 | test_job_artifacts.py | python-gitlab_python-gitlab/tests/unit/objects/test_job_artifacts.py | """
GitLab API: https://docs.gitlab.com/ee/api/job_artifacts.html
"""
import pytest
import responses
ref_name = "main"
job = "build"
@pytest.fixture
def resp_artifacts_by_ref_name(binary_content):
url = f"http://localhost/api/v4/projects/1/jobs/artifacts/{ref_name}/download?job={job}"
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url=url,
body=binary_content,
content_type="application/octet-stream",
status=200,
)
yield rsps
@pytest.fixture
def resp_project_artifacts_delete():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.DELETE,
url="http://localhost/api/v4/projects/1/artifacts",
status=204,
)
yield rsps
def test_project_artifacts_delete(gl, resp_project_artifacts_delete):
project = gl.projects.get(1, lazy=True)
project.artifacts.delete()
def test_project_artifacts_download_by_ref_name(
gl, binary_content, resp_artifacts_by_ref_name
):
project = gl.projects.get(1, lazy=True)
artifacts = project.artifacts.download(ref_name=ref_name, job=job)
assert artifacts == binary_content
| 1,226 | Python | .py | 37 | 26.972973 | 92 | 0.673175 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,214 | test_snippets.py | python-gitlab_python-gitlab/tests/unit/objects/test_snippets.py | """
GitLab API: https://docs.gitlab.com/ce/api/project_snippets.html
https://docs.gitlab.com/ee/api/snippets.html (todo)
"""
import pytest
import responses
title = "Example Snippet Title"
visibility = "private"
new_title = "new-title"
@pytest.fixture
def resp_snippet():
content = {
"title": title,
"description": "More verbose snippet description",
"file_name": "example.txt",
"content": "source code with multiple lines",
"visibility": visibility,
}
with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/projects/1/snippets",
json=[content],
content_type="application/json",
status=200,
)
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/projects/1/snippets/1",
json=content,
content_type="application/json",
status=200,
)
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/projects/1/snippets",
json=content,
content_type="application/json",
status=200,
)
updated_content = dict(content)
updated_content["title"] = new_title
updated_content["visibility"] = visibility
rsps.add(
method=responses.PUT,
url="http://localhost/api/v4/projects/1/snippets",
json=updated_content,
content_type="application/json",
status=200,
)
yield rsps
def test_list_project_snippets(project, resp_snippet):
snippets = project.snippets.list()
assert len(snippets) == 1
assert snippets[0].title == title
assert snippets[0].visibility == visibility
def test_get_project_snippet(project, resp_snippet):
snippet = project.snippets.get(1)
assert snippet.title == title
assert snippet.visibility == visibility
def test_create_update_project_snippets(project, resp_snippet):
snippet = project.snippets.create(
{
"title": title,
"file_name": title,
"content": title,
"visibility": visibility,
}
)
assert snippet.title == title
assert snippet.visibility == visibility
snippet.title = new_title
snippet.save()
assert snippet.title == new_title
assert snippet.visibility == visibility
| 2,498 | Python | .py | 75 | 25.12 | 77 | 0.616438 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,215 | test_resource_state_events.py | python-gitlab_python-gitlab/tests/unit/objects/test_resource_state_events.py | """
GitLab API: https://docs.gitlab.com/ee/api/resource_state_events.html
"""
import pytest
import responses
from gitlab.v4.objects import (
ProjectIssueResourceStateEvent,
ProjectMergeRequestResourceStateEvent,
)
issue_event_content = {"id": 1, "resource_type": "Issue"}
mr_event_content = {"id": 1, "resource_type": "MergeRequest"}
@pytest.fixture()
def resp_list_project_issue_state_events():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/projects/1/issues/1/resource_state_events",
json=[issue_event_content],
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture()
def resp_get_project_issue_state_event():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/projects/1/issues/1/resource_state_events/1",
json=issue_event_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture()
def resp_list_merge_request_state_events():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/projects/1/merge_requests/1/resource_state_events",
json=[mr_event_content],
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture()
def resp_get_merge_request_state_event():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/projects/1/merge_requests/1/resource_state_events/1",
json=mr_event_content,
content_type="application/json",
status=200,
)
yield rsps
def test_list_project_issue_state_events(
project_issue, resp_list_project_issue_state_events
):
state_events = project_issue.resourcestateevents.list()
assert isinstance(state_events, list)
state_event = state_events[0]
assert isinstance(state_event, ProjectIssueResourceStateEvent)
assert state_event.resource_type == "Issue"
def test_get_project_issue_state_event(
project_issue, resp_get_project_issue_state_event
):
state_event = project_issue.resourcestateevents.get(1)
assert isinstance(state_event, ProjectIssueResourceStateEvent)
assert state_event.resource_type == "Issue"
def test_list_merge_request_state_events(
project_merge_request, resp_list_merge_request_state_events
):
state_events = project_merge_request.resourcestateevents.list()
assert isinstance(state_events, list)
state_event = state_events[0]
assert isinstance(state_event, ProjectMergeRequestResourceStateEvent)
assert state_event.resource_type == "MergeRequest"
def test_get_merge_request_state_event(
project_merge_request, resp_get_merge_request_state_event
):
state_event = project_merge_request.resourcestateevents.get(1)
assert isinstance(state_event, ProjectMergeRequestResourceStateEvent)
assert state_event.resource_type == "MergeRequest"
| 3,195 | Python | .py | 83 | 31.939759 | 94 | 0.696862 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,216 | test_groups.py | python-gitlab_python-gitlab/tests/unit/objects/test_groups.py | """
GitLab API: https://docs.gitlab.com/ce/api/groups.html
"""
import re
import pytest
import responses
import gitlab
from gitlab.v4.objects import (
GroupDescendantGroup,
GroupLDAPGroupLink,
GroupSAMLGroupLink,
GroupSubgroup,
)
from gitlab.v4.objects.projects import GroupProject, SharedProject
content = {"name": "name", "id": 1, "path": "path"}
ldap_group_links_content = [
{
"cn": None,
"group_access": 40,
"provider": "ldapmain",
"filter": "(memberOf=cn=some_group,ou=groups,ou=fake_ou,dc=sub_dc,dc=example,dc=tld)",
}
]
saml_group_links_content = [{"name": "saml-group-1", "access_level": 10}]
create_saml_group_link_request_body = {
"saml_group_name": "saml-group-1",
"access_level": 10,
}
projects_content = [
{
"id": 9,
"description": "foo",
"default_branch": "master",
"name": "Html5 Boilerplate",
"name_with_namespace": "Experimental / Html5 Boilerplate",
"path": "html5-boilerplate",
"path_with_namespace": "h5bp/html5-boilerplate",
"namespace": {"id": 5, "name": "Experimental", "path": "h5bp", "kind": "group"},
}
]
subgroup_descgroup_content = [
{
"id": 2,
"name": "Bar Group",
"path": "foo/bar",
"description": "A subgroup of Foo Group",
"visibility": "public",
"share_with_group_lock": False,
"require_two_factor_authentication": False,
"two_factor_grace_period": 48,
"project_creation_level": "developer",
"auto_devops_enabled": None,
"subgroup_creation_level": "owner",
"emails_disabled": None,
"mentions_disabled": None,
"lfs_enabled": True,
"default_branch_protection": 2,
"avatar_url": "http://gitlab.example.com/uploads/group/avatar/1/bar.jpg",
"web_url": "http://gitlab.example.com/groups/foo/bar",
"request_access_enabled": False,
"full_name": "Bar Group",
"full_path": "foo/bar",
"file_template_project_id": 1,
"parent_id": 123,
"created_at": "2020-01-15T12:36:29.590Z",
},
]
push_rules_content = {
"id": 2,
"created_at": "2020-08-17T19:09:19.580Z",
"commit_message_regex": "[a-zA-Z]",
"commit_message_negative_regex": "[x+]",
"branch_name_regex": "[a-z]",
"deny_delete_tag": True,
"member_check": True,
"prevent_secrets": True,
"author_email_regex": "^[A-Za-z0-9.]+@gitlab.com$",
"file_name_regex": "(exe)$",
"max_file_size": 100,
}
service_account_content = {
"name": "gitlab-service-account",
"username": "gitlab-service-account",
}
@pytest.fixture
def resp_groups():
with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/groups/1",
json=content,
content_type="application/json",
status=200,
)
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/groups",
json=[content],
content_type="application/json",
status=200,
)
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/groups",
json=content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_list_group_projects():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url=re.compile(r"http://localhost/api/v4/groups/1/projects(/shared)?"),
json=projects_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_list_subgroups_descendant_groups():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url=re.compile(
r"http://localhost/api/v4/groups/1/(subgroups|descendant_groups)"
),
json=subgroup_descgroup_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_create_import(accepted_content):
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/groups/import",
json=accepted_content,
content_type="application/json",
status=202,
)
yield rsps
@pytest.fixture
def resp_transfer_group():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/groups/1/transfer",
json=content,
content_type="application/json",
status=200,
match=[
responses.matchers.json_params_matcher({"group_id": "test-namespace"})
],
)
yield rsps
@pytest.fixture
def resp_list_push_rules_group():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/groups/1/push_rule",
json=push_rules_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_create_push_rules_group():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/groups/1/push_rule",
json=push_rules_content,
content_type="application/json",
status=201,
)
yield rsps
@pytest.fixture
def resp_update_push_rules_group():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/groups/1/push_rule",
json=push_rules_content,
content_type="application/json",
status=200,
)
rsps.add(
method=responses.PUT,
url="http://localhost/api/v4/groups/1/push_rule",
json=push_rules_content,
content_type="application/json",
status=201,
)
yield rsps
@pytest.fixture
def resp_delete_push_rules_group():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/groups/1/push_rule",
json=push_rules_content,
content_type="application/json",
status=200,
)
rsps.add(
method=responses.DELETE,
url="http://localhost/api/v4/groups/1/push_rule",
status=204,
)
yield rsps
@pytest.fixture
def resp_list_ldap_group_links():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/groups/1/ldap_group_links",
json=ldap_group_links_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_list_saml_group_links():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/groups/1/saml_group_links",
json=saml_group_links_content,
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_get_saml_group_link():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.GET,
url="http://localhost/api/v4/groups/1/saml_group_links/saml-group-1",
json=saml_group_links_content[0],
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_create_saml_group_link():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/groups/1/saml_group_links",
match=[
responses.matchers.json_params_matcher(
create_saml_group_link_request_body
)
],
json=saml_group_links_content[0],
content_type="application/json",
status=200,
)
yield rsps
@pytest.fixture
def resp_delete_saml_group_link():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/groups/1/saml_group_links",
match=[
responses.matchers.json_params_matcher(
create_saml_group_link_request_body
)
],
json=saml_group_links_content[0],
content_type="application/json",
status=200,
)
rsps.add(
method=responses.DELETE,
url="http://localhost/api/v4/groups/1/saml_group_links/saml-group-1",
status=204,
)
yield rsps
@pytest.fixture
def resp_restore_group(created_content):
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/groups/1/restore",
json=created_content,
content_type="application/json",
status=201,
)
yield rsps
@pytest.fixture
def resp_create_group_service_account():
with responses.RequestsMock() as rsps:
rsps.add(
method=responses.POST,
url="http://localhost/api/v4/groups/1/service_accounts",
json=service_account_content,
content_type="application/json",
status=200,
)
yield rsps
def test_get_group(gl, resp_groups):
data = gl.groups.get(1)
assert isinstance(data, gitlab.v4.objects.Group)
assert data.name == "name"
assert data.path == "path"
assert data.id == 1
def test_create_group(gl, resp_groups):
name, path = "name", "path"
data = gl.groups.create({"name": name, "path": path})
assert isinstance(data, gitlab.v4.objects.Group)
assert data.name == name
assert data.path == path
def test_create_group_export(group, resp_export):
export = group.exports.create()
assert export.message == "202 Accepted"
def test_list_group_projects(group, resp_list_group_projects):
projects = group.projects.list()
assert isinstance(projects[0], GroupProject)
assert projects[0].path == projects_content[0]["path"]
def test_list_group_shared_projects(group, resp_list_group_projects):
projects = group.shared_projects.list()
assert isinstance(projects[0], SharedProject)
assert projects[0].path == projects_content[0]["path"]
def test_list_group_subgroups(group, resp_list_subgroups_descendant_groups):
subgroups = group.subgroups.list()
assert isinstance(subgroups[0], GroupSubgroup)
assert subgroups[0].path == subgroup_descgroup_content[0]["path"]
def test_list_group_descendant_groups(group, resp_list_subgroups_descendant_groups):
descendant_groups = group.descendant_groups.list()
assert isinstance(descendant_groups[0], GroupDescendantGroup)
assert descendant_groups[0].path == subgroup_descgroup_content[0]["path"]
def test_list_ldap_group_links(group, resp_list_ldap_group_links):
ldap_group_links = group.ldap_group_links.list()
assert isinstance(ldap_group_links[0], GroupLDAPGroupLink)
assert ldap_group_links[0].provider == ldap_group_links_content[0]["provider"]
@pytest.mark.skip("GitLab API endpoint not implemented")
def test_refresh_group_export_status(group, resp_export):
export = group.exports.create()
export.refresh()
assert export.export_status == "finished"
def test_download_group_export(group, resp_export, binary_content):
export = group.exports.create()
download = export.download()
assert isinstance(download, bytes)
assert download == binary_content
def test_import_group(gl, resp_create_import):
group_import = gl.groups.import_group("file", "api-group", "API Group")
assert group_import["message"] == "202 Accepted"
@pytest.mark.skip("GitLab API endpoint not implemented")
def test_refresh_group_import_status(group, resp_groups):
group_import = group.imports.get()
group_import.refresh()
assert group_import.import_status == "finished"
def test_transfer_group(gl, resp_transfer_group):
group = gl.groups.get(1, lazy=True)
group.transfer("test-namespace")
def test_list_group_push_rules(group, resp_list_push_rules_group):
pr = group.pushrules.get()
assert pr
assert pr.deny_delete_tag
def test_create_group_push_rule(group, resp_create_push_rules_group):
group.pushrules.create({"deny_delete_tag": True})
def test_update_group_push_rule(
group,
resp_update_push_rules_group,
):
pr = group.pushrules.get()
pr.deny_delete_tag = False
pr.save()
def test_delete_group_push_rule(group, resp_delete_push_rules_group):
pr = group.pushrules.get()
pr.delete()
def test_list_saml_group_links(group, resp_list_saml_group_links):
saml_group_links = group.saml_group_links.list()
assert isinstance(saml_group_links[0], GroupSAMLGroupLink)
assert saml_group_links[0].name == saml_group_links_content[0]["name"]
assert (
saml_group_links[0].access_level == saml_group_links_content[0]["access_level"]
)
def test_get_saml_group_link(group, resp_get_saml_group_link):
saml_group_link = group.saml_group_links.get("saml-group-1")
assert isinstance(saml_group_link, GroupSAMLGroupLink)
assert saml_group_link.name == saml_group_links_content[0]["name"]
assert saml_group_link.access_level == saml_group_links_content[0]["access_level"]
def test_create_saml_group_link(group, resp_create_saml_group_link):
saml_group_link = group.saml_group_links.create(create_saml_group_link_request_body)
assert isinstance(saml_group_link, GroupSAMLGroupLink)
assert (
saml_group_link.name == create_saml_group_link_request_body["saml_group_name"]
)
assert (
saml_group_link.access_level
== create_saml_group_link_request_body["access_level"]
)
def test_delete_saml_group_link(group, resp_delete_saml_group_link):
saml_group_link = group.saml_group_links.create(create_saml_group_link_request_body)
saml_group_link.delete()
def test_group_restore(group, resp_restore_group):
group.restore()
def test_create_group_service_account(group, resp_create_group_service_account):
service_account = group.service_accounts.create(
{"name": "gitlab-service-account", "username": "gitlab-service-account"}
)
assert service_account.name == "gitlab-service-account"
assert service_account.username == "gitlab-service-account"
| 14,876 | Python | .py | 411 | 28.481752 | 94 | 0.632805 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,217 | test_meta_mixins.py | python-gitlab_python-gitlab/tests/unit/mixins/test_meta_mixins.py | from gitlab.mixins import (
CreateMixin,
CRUDMixin,
DeleteMixin,
GetMixin,
ListMixin,
NoUpdateMixin,
RetrieveMixin,
UpdateMixin,
)
def test_retrieve_mixin():
class M(RetrieveMixin):
pass
obj = M()
assert hasattr(obj, "list")
assert hasattr(obj, "get")
assert not hasattr(obj, "create")
assert not hasattr(obj, "update")
assert not hasattr(obj, "delete")
assert isinstance(obj, ListMixin)
assert isinstance(obj, GetMixin)
def test_crud_mixin():
class M(CRUDMixin):
pass
obj = M()
assert hasattr(obj, "get")
assert hasattr(obj, "list")
assert hasattr(obj, "create")
assert hasattr(obj, "update")
assert hasattr(obj, "delete")
assert isinstance(obj, ListMixin)
assert isinstance(obj, GetMixin)
assert isinstance(obj, CreateMixin)
assert isinstance(obj, UpdateMixin)
assert isinstance(obj, DeleteMixin)
def test_no_update_mixin():
class M(NoUpdateMixin):
pass
obj = M()
assert hasattr(obj, "get")
assert hasattr(obj, "list")
assert hasattr(obj, "create")
assert not hasattr(obj, "update")
assert hasattr(obj, "delete")
assert isinstance(obj, ListMixin)
assert isinstance(obj, GetMixin)
assert isinstance(obj, CreateMixin)
assert not isinstance(obj, UpdateMixin)
assert isinstance(obj, DeleteMixin)
| 1,391 | Python | .py | 49 | 23.367347 | 43 | 0.686422 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,218 | test_object_mixins_attributes.py | python-gitlab_python-gitlab/tests/unit/mixins/test_object_mixins_attributes.py | from gitlab.mixins import (
AccessRequestMixin,
SetMixin,
SubscribableMixin,
TimeTrackingMixin,
TodoMixin,
UserAgentDetailMixin,
)
def test_access_request_mixin():
class TestClass(AccessRequestMixin):
pass
obj = TestClass()
assert hasattr(obj, "approve")
def test_subscribable_mixin():
class TestClass(SubscribableMixin):
pass
obj = TestClass()
assert hasattr(obj, "subscribe")
assert hasattr(obj, "unsubscribe")
def test_todo_mixin():
class TestClass(TodoMixin):
pass
obj = TestClass()
assert hasattr(obj, "todo")
def test_time_tracking_mixin():
class TestClass(TimeTrackingMixin):
pass
obj = TestClass()
assert hasattr(obj, "time_stats")
assert hasattr(obj, "time_estimate")
assert hasattr(obj, "reset_time_estimate")
assert hasattr(obj, "add_spent_time")
assert hasattr(obj, "reset_spent_time")
def test_set_mixin():
class TestClass(SetMixin):
pass
obj = TestClass()
assert hasattr(obj, "set")
def test_user_agent_detail_mixin():
class TestClass(UserAgentDetailMixin):
pass
obj = TestClass()
assert hasattr(obj, "user_agent_detail")
| 1,218 | Python | .py | 43 | 23.093023 | 46 | 0.691443 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,219 | test_mixin_methods.py | python-gitlab_python-gitlab/tests/unit/mixins/test_mixin_methods.py | from unittest.mock import mock_open, patch
import pytest
import requests
import responses
from gitlab import base, GitlabUploadError
from gitlab import types as gl_types
from gitlab.mixins import (
CreateMixin,
DeleteMixin,
GetMixin,
GetWithoutIdMixin,
ListMixin,
RefreshMixin,
SaveMixin,
SetMixin,
UpdateMethod,
UpdateMixin,
UploadMixin,
)
class FakeObject(base.RESTObject):
pass
class FakeManager(base.RESTManager):
_path = "/tests"
_obj_cls = FakeObject
@responses.activate
def test_get_mixin(gl):
class M(GetMixin, FakeManager):
pass
url = "http://localhost/api/v4/tests/42"
responses.add(
method=responses.GET,
url=url,
json={"id": 42, "foo": "bar"},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = M(gl)
obj = mgr.get(42)
assert isinstance(obj, FakeObject)
assert obj.foo == "bar"
assert obj.id == 42
assert obj._lazy is False
assert responses.assert_call_count(url, 1) is True
def test_get_mixin_lazy(gl):
class M(GetMixin, FakeManager):
pass
url = "http://localhost/api/v4/tests/42"
mgr = M(gl)
with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
rsps.add(
method=responses.GET,
url=url,
json={"id": 42, "foo": "bar"},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
obj = mgr.get(42, lazy=True)
assert isinstance(obj, FakeObject)
assert not hasattr(obj, "foo")
assert obj.id == 42
assert obj._lazy is True
# a `lazy` get does not make a network request
assert not rsps.calls
def test_get_mixin_lazy_missing_attribute(gl):
class FakeGetManager(GetMixin, FakeManager):
pass
manager = FakeGetManager(gl)
obj = manager.get(1, lazy=True)
assert obj.id == 1
with pytest.raises(AttributeError) as exc:
obj.missing_attribute
# undo `textwrap.fill()`
message = str(exc.value).replace("\n", " ")
assert "'FakeObject' object has no attribute 'missing_attribute'" in message
assert (
"note that <class 'tests.unit.mixins.test_mixin_methods.FakeObject'> was "
"created as a `lazy` object and was not initialized with any data."
) in message
@responses.activate
def test_head_mixin(gl):
class M(GetMixin, FakeManager):
pass
url = "http://localhost/api/v4/tests/42"
responses.add(
method=responses.HEAD,
url=url,
headers={"X-GitLab-Header": "test"},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
manager = M(gl)
result = manager.head(42)
assert isinstance(result, requests.structures.CaseInsensitiveDict)
assert result["x-gitlab-header"] == "test"
@responses.activate
def test_refresh_mixin(gl):
class TestClass(RefreshMixin, FakeObject):
pass
url = "http://localhost/api/v4/tests/42"
responses.add(
method=responses.GET,
url=url,
json={"id": 42, "foo": "bar"},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = FakeManager(gl)
obj = TestClass(mgr, {"id": 42})
res = obj.refresh()
assert res is None
assert obj.foo == "bar"
assert obj.id == 42
assert responses.assert_call_count(url, 1) is True
@responses.activate
def test_get_without_id_mixin(gl):
class M(GetWithoutIdMixin, FakeManager):
pass
url = "http://localhost/api/v4/tests"
responses.add(
method=responses.GET,
url=url,
json={"foo": "bar"},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = M(gl)
obj = mgr.get()
assert isinstance(obj, FakeObject)
assert obj.foo == "bar"
assert not hasattr(obj, "id")
assert responses.assert_call_count(url, 1) is True
@responses.activate
def test_list_mixin(gl):
class M(ListMixin, FakeManager):
pass
url = "http://localhost/api/v4/tests"
headers = {
"X-Page": "1",
"X-Next-Page": "2",
"X-Per-Page": "1",
"X-Total-Pages": "2",
"X-Total": "2",
"Link": ("<http://localhost/api/v4/tests" ' rel="next"'),
}
responses.add(
method=responses.GET,
headers=headers,
url=url,
json=[{"id": 42, "foo": "bar"}, {"id": 43, "foo": "baz"}],
status=200,
match=[responses.matchers.query_param_matcher({})],
)
# test RESTObjectList
mgr = M(gl)
obj_list = mgr.list(iterator=True)
assert isinstance(obj_list, base.RESTObjectList)
assert obj_list.current_page == 1
assert obj_list.prev_page is None
assert obj_list.next_page == 2
assert obj_list.per_page == 1
assert obj_list.total == 2
assert obj_list.total_pages == 2
assert len(obj_list) == 2
for obj in obj_list:
assert isinstance(obj, FakeObject)
assert obj.id in (42, 43)
# test list()
obj_list = mgr.list(get_all=True)
assert isinstance(obj_list, list)
assert obj_list[0].id == 42
assert obj_list[1].id == 43
assert isinstance(obj_list[0], FakeObject)
assert len(obj_list) == 2
assert responses.assert_call_count(url, 2) is True
@responses.activate
def test_list_mixin_with_attributes(gl):
class M(ListMixin, FakeManager):
_types = {"my_array": gl_types.ArrayAttribute}
url = "http://localhost/api/v4/tests"
responses.add(
method=responses.GET,
headers={},
url=url,
json=[],
status=200,
match=[responses.matchers.query_param_matcher({"my_array[]": ["1", "2", "3"]})],
)
mgr = M(gl)
mgr.list(iterator=True, my_array=[1, 2, 3])
@responses.activate
def test_list_other_url(gl):
class M(ListMixin, FakeManager):
pass
url = "http://localhost/api/v4/others"
responses.add(
method=responses.GET,
url=url,
json=[{"id": 42, "foo": "bar"}],
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = M(gl)
obj_list = mgr.list(path="/others", iterator=True)
assert isinstance(obj_list, base.RESTObjectList)
obj = obj_list.next()
assert obj.id == 42
assert obj.foo == "bar"
with pytest.raises(StopIteration):
obj_list.next()
def test_create_mixin_missing_attrs(gl):
class M(CreateMixin, FakeManager):
_create_attrs = gl_types.RequiredOptional(
required=("foo",), optional=("bar", "baz")
)
mgr = M(gl)
data = {"foo": "bar", "baz": "blah"}
mgr._create_attrs.validate_attrs(data=data)
data = {"baz": "blah"}
with pytest.raises(AttributeError) as error:
mgr._create_attrs.validate_attrs(data=data)
assert "foo" in str(error.value)
@responses.activate
def test_create_mixin(gl):
class M(CreateMixin, FakeManager):
_create_attrs = gl_types.RequiredOptional(
required=("foo",), optional=("bar", "baz")
)
_update_attrs = gl_types.RequiredOptional(required=("foo",), optional=("bam",))
url = "http://localhost/api/v4/tests"
responses.add(
method=responses.POST,
url=url,
json={"id": 42, "foo": "bar"},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = M(gl)
obj = mgr.create({"foo": "bar"})
assert isinstance(obj, FakeObject)
assert obj.id == 42
assert obj.foo == "bar"
assert responses.assert_call_count(url, 1) is True
@responses.activate
def test_create_mixin_custom_path(gl):
class M(CreateMixin, FakeManager):
_create_attrs = gl_types.RequiredOptional(
required=("foo",), optional=("bar", "baz")
)
_update_attrs = gl_types.RequiredOptional(required=("foo",), optional=("bam",))
url = "http://localhost/api/v4/others"
responses.add(
method=responses.POST,
url=url,
json={"id": 42, "foo": "bar"},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = M(gl)
obj = mgr.create({"foo": "bar"}, path="/others")
assert isinstance(obj, FakeObject)
assert obj.id == 42
assert obj.foo == "bar"
assert responses.assert_call_count(url, 1) is True
@responses.activate
def test_create_mixin_with_attributes(gl):
class M(CreateMixin, FakeManager):
_types = {"my_array": gl_types.ArrayAttribute}
url = "http://localhost/api/v4/tests"
responses.add(
method=responses.POST,
headers={},
url=url,
json={},
status=200,
match=[responses.matchers.json_params_matcher({"my_array": [1, 2, 3]})],
)
mgr = M(gl)
mgr.create({"my_array": [1, 2, 3]})
def test_update_mixin_missing_attrs(gl):
class M(UpdateMixin, FakeManager):
_update_attrs = gl_types.RequiredOptional(
required=("foo",), optional=("bar", "baz")
)
mgr = M(gl)
data = {"foo": "bar", "baz": "blah"}
mgr._update_attrs.validate_attrs(data=data)
data = {"baz": "blah"}
with pytest.raises(AttributeError) as error:
mgr._update_attrs.validate_attrs(data=data)
assert "foo" in str(error.value)
@responses.activate
def test_update_mixin(gl):
class M(UpdateMixin, FakeManager):
_create_attrs = gl_types.RequiredOptional(
required=("foo",), optional=("bar", "baz")
)
_update_attrs = gl_types.RequiredOptional(required=("foo",), optional=("bam",))
url = "http://localhost/api/v4/tests/42"
responses.add(
method=responses.PUT,
url=url,
json={"id": 42, "foo": "baz"},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = M(gl)
server_data = mgr.update(42, {"foo": "baz"})
assert isinstance(server_data, dict)
assert server_data["id"] == 42
assert server_data["foo"] == "baz"
assert responses.assert_call_count(url, 1) is True
@responses.activate
def test_update_mixin_uses_post(gl):
class M(UpdateMixin, FakeManager):
_update_method = UpdateMethod.POST
url = "http://localhost/api/v4/tests/1"
responses.add(
method=responses.POST,
url=url,
json={},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = M(gl)
mgr.update(1, {})
assert responses.assert_call_count(url, 1) is True
@responses.activate
def test_update_mixin_no_id(gl):
class M(UpdateMixin, FakeManager):
_create_attrs = gl_types.RequiredOptional(
required=("foo",), optional=("bar", "baz")
)
_update_attrs = gl_types.RequiredOptional(required=("foo",), optional=("bam",))
url = "http://localhost/api/v4/tests"
responses.add(
method=responses.PUT,
url=url,
json={"foo": "baz"},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = M(gl)
server_data = mgr.update(new_data={"foo": "baz"})
assert isinstance(server_data, dict)
assert server_data["foo"] == "baz"
assert responses.assert_call_count(url, 1) is True
@responses.activate
def test_delete_mixin(gl):
class M(DeleteMixin, FakeManager):
pass
url = "http://localhost/api/v4/tests/42"
responses.add(
method=responses.DELETE,
url=url,
json="",
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = M(gl)
mgr.delete(42)
assert responses.assert_call_count(url, 1) is True
@responses.activate
def test_save_mixin(gl):
class M(UpdateMixin, FakeManager):
pass
class TestClass(SaveMixin, base.RESTObject):
pass
url = "http://localhost/api/v4/tests/42"
responses.add(
method=responses.PUT,
url=url,
json={"id": 42, "foo": "baz"},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = M(gl)
obj = TestClass(mgr, {"id": 42, "foo": "bar"})
obj.foo = "baz"
obj.save()
assert obj._attrs["foo"] == "baz"
assert obj._updated_attrs == {}
assert responses.assert_call_count(url, 1) is True
@responses.activate
def test_save_mixin_without_new_data(gl):
class M(UpdateMixin, FakeManager):
pass
class TestClass(SaveMixin, base.RESTObject):
pass
url = "http://localhost/api/v4/tests/1"
responses.add(method=responses.PUT, url=url)
mgr = M(gl)
obj = TestClass(mgr, {"id": 1, "foo": "bar"})
obj.save()
assert obj._attrs["foo"] == "bar"
assert responses.assert_call_count(url, 0) is True
@responses.activate
def test_set_mixin(gl):
class M(SetMixin, FakeManager):
pass
url = "http://localhost/api/v4/tests/foo"
responses.add(
method=responses.PUT,
url=url,
json={"key": "foo", "value": "bar"},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = M(gl)
obj = mgr.set("foo", "bar")
assert isinstance(obj, FakeObject)
assert obj.key == "foo"
assert obj.value == "bar"
assert responses.assert_call_count(url, 1) is True
@responses.activate
def test_upload_mixin_with_filepath_and_filedata(gl):
class TestClass(UploadMixin, FakeObject):
_upload_path = "/tests/{id}/uploads"
url = "http://localhost/api/v4/tests/42/uploads"
responses.add(
method=responses.POST,
url=url,
json={"id": 42, "file_name": "test.txt", "file_content": "testing contents"},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = FakeManager(gl)
obj = TestClass(mgr, {"id": 42})
with pytest.raises(
GitlabUploadError, match="File contents and file path specified"
):
obj.upload("test.txt", "testing contents", "/home/test.txt")
@responses.activate
def test_upload_mixin_without_filepath_nor_filedata(gl):
class TestClass(UploadMixin, FakeObject):
_upload_path = "/tests/{id}/uploads"
url = "http://localhost/api/v4/tests/42/uploads"
responses.add(
method=responses.POST,
url=url,
json={"id": 42, "file_name": "test.txt", "file_content": "testing contents"},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = FakeManager(gl)
obj = TestClass(mgr, {"id": 42})
with pytest.raises(GitlabUploadError, match="No file contents or path specified"):
obj.upload("test.txt")
@responses.activate
def test_upload_mixin_with_filedata(gl):
class TestClass(UploadMixin, FakeObject):
_upload_path = "/tests/{id}/uploads"
url = "http://localhost/api/v4/tests/42/uploads"
responses.add(
method=responses.POST,
url=url,
json={"id": 42, "file_name": "test.txt", "file_content": "testing contents"},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = FakeManager(gl)
obj = TestClass(mgr, {"id": 42})
res_only_data = obj.upload("test.txt", "testing contents")
assert obj._get_upload_path() == "/tests/42/uploads"
assert isinstance(res_only_data, dict)
assert res_only_data["file_name"] == "test.txt"
assert res_only_data["file_content"] == "testing contents"
assert responses.assert_call_count(url, 1) is True
@responses.activate
def test_upload_mixin_with_filepath(gl):
class TestClass(UploadMixin, FakeObject):
_upload_path = "/tests/{id}/uploads"
url = "http://localhost/api/v4/tests/42/uploads"
responses.add(
method=responses.POST,
url=url,
json={"id": 42, "file_name": "test.txt", "file_content": "testing contents"},
status=200,
match=[responses.matchers.query_param_matcher({})],
)
mgr = FakeManager(gl)
obj = TestClass(mgr, {"id": 42})
with patch("builtins.open", mock_open(read_data="raw\nfile\ndata")):
res_only_path = obj.upload("test.txt", None, "/filepath")
assert obj._get_upload_path() == "/tests/42/uploads"
assert isinstance(res_only_path, dict)
assert res_only_path["file_name"] == "test.txt"
assert res_only_path["file_content"] == "testing contents"
assert responses.assert_call_count(url, 1) is True
| 16,409 | Python | .py | 488 | 27.358607 | 88 | 0.628486 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,220 | test_requests_backend.py | python-gitlab_python-gitlab/tests/unit/_backends/test_requests_backend.py | import pytest
from requests_toolbelt.multipart.encoder import MultipartEncoder # type: ignore
from gitlab._backends import requests_backend
class TestSendData:
def test_senddata_json(self) -> None:
result = requests_backend.SendData(
json={"a": 1}, content_type="application/json"
)
assert result.data is None
def test_senddata_data(self) -> None:
result = requests_backend.SendData(
data={"b": 2}, content_type="application/octet-stream"
)
assert result.json is None
def test_senddata_json_and_data(self) -> None:
with pytest.raises(ValueError, match=r"json={'a': 1} data={'b': 2}"):
requests_backend.SendData(
json={"a": 1}, data={"b": 2}, content_type="application/json"
)
class TestRequestsBackend:
@pytest.mark.parametrize(
"test_data,expected",
[
(False, "0"),
(True, "1"),
("12", "12"),
(12, "12"),
(12.0, "12.0"),
(complex(-2, 7), "(-2+7j)"),
],
)
def test_prepare_send_data_non_strings(self, test_data, expected) -> None:
assert isinstance(expected, str)
files = {"file": ("file.tar.gz", "12345", "application/octet-stream")}
post_data = {"test_data": test_data}
result = requests_backend.RequestsBackend.prepare_send_data(
files=files, post_data=post_data, raw=False
)
assert result.json is None
assert result.content_type.startswith("multipart/form-data")
assert isinstance(result.data, MultipartEncoder)
assert isinstance(result.data.fields["test_data"], str)
assert result.data.fields["test_data"] == expected
| 1,765 | Python | .py | 43 | 32.139535 | 80 | 0.6021 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,221 | test_rest_object.py | python-gitlab_python-gitlab/tests/unit/base/test_rest_object.py | import pickle
import pytest
import gitlab
from gitlab import base
from tests.unit import helpers
from tests.unit.helpers import FakeManager # noqa: F401, needed for _create_managers
def test_instantiate(gl, fake_manager):
attrs = {"foo": "bar"}
obj = helpers.FakeObject(fake_manager, attrs.copy())
assert attrs == obj._attrs
assert {} == obj._updated_attrs
assert obj._create_managers() is None
assert fake_manager == obj.manager
assert gl == obj.manager.gitlab
assert str(obj) == f"{type(obj)} => {attrs}"
def test_instantiate_non_dict(gl, fake_manager):
with pytest.raises(gitlab.exceptions.GitlabParsingError):
helpers.FakeObject(fake_manager, ["a", "list", "fails"])
def test_missing_attribute_does_not_raise_custom(gl, fake_manager):
"""Ensure a missing attribute does not raise our custom error message
if the RESTObject was not created from a list"""
obj = helpers.FakeObject(manager=fake_manager, attrs={"foo": "bar"})
with pytest.raises(AttributeError) as excinfo:
obj.missing_attribute
exc_str = str(excinfo.value)
assert "missing_attribute" in exc_str
assert "was created via a list()" not in exc_str
assert base._URL_ATTRIBUTE_ERROR not in exc_str
def test_missing_attribute_from_list_raises_custom(gl, fake_manager):
"""Ensure a missing attribute raises our custom error message if the
RESTObject was created from a list"""
obj = helpers.FakeObject(
manager=fake_manager, attrs={"foo": "bar"}, created_from_list=True
)
with pytest.raises(AttributeError) as excinfo:
obj.missing_attribute
exc_str = str(excinfo.value)
assert "missing_attribute" in exc_str
assert "was created via a list()" in exc_str
assert base._URL_ATTRIBUTE_ERROR in exc_str
def test_picklability(fake_manager):
obj = helpers.FakeObject(fake_manager, {"foo": "bar"})
original_obj_module = obj._module
pickled = pickle.dumps(obj)
unpickled = pickle.loads(pickled)
assert isinstance(unpickled, helpers.FakeObject)
assert hasattr(unpickled, "_module")
assert unpickled._module == original_obj_module
pickle.dumps(unpickled)
def test_attrs(fake_manager):
obj = helpers.FakeObject(fake_manager, {"foo": "bar"})
assert "bar" == obj.foo
with pytest.raises(AttributeError):
getattr(obj, "bar")
obj.bar = "baz"
assert "baz" == obj.bar
assert {"foo": "bar"} == obj._attrs
assert {"bar": "baz"} == obj._updated_attrs
def test_get_id(fake_manager):
obj = helpers.FakeObject(fake_manager, {"foo": "bar"})
obj.id = 42
assert 42 == obj.get_id()
obj.id = None
assert obj.get_id() is None
def test_encoded_id(fake_manager):
obj = helpers.FakeObject(fake_manager, {"foo": "bar"})
obj.id = 42
assert 42 == obj.encoded_id
obj.id = None
assert obj.encoded_id is None
obj.id = "plain"
assert "plain" == obj.encoded_id
obj.id = "a/path"
assert "a%2Fpath" == obj.encoded_id
# If you assign it again it does not double URL-encode
obj.id = obj.encoded_id
assert "a%2Fpath" == obj.encoded_id
def test_custom_id_attr(fake_manager):
obj = helpers.OtherFakeObject(fake_manager, {"foo": "bar"})
assert "bar" == obj.get_id()
def test_update_attrs(fake_manager):
obj = helpers.FakeObject(fake_manager, {"foo": "bar"})
obj.bar = "baz"
obj._update_attrs({"foo": "foo", "bar": "bar"})
assert {"foo": "foo", "bar": "bar"} == obj._attrs
assert {} == obj._updated_attrs
def test_update_attrs_deleted(fake_manager):
obj = helpers.FakeObject(fake_manager, {"foo": "foo", "bar": "bar"})
obj.bar = "baz"
obj._update_attrs({"foo": "foo"})
assert {"foo": "foo"} == obj._attrs
assert {} == obj._updated_attrs
def test_dir_unique(fake_manager):
obj = helpers.FakeObject(fake_manager, {"manager": "foo"})
assert len(dir(obj)) == len(set(dir(obj)))
def test_create_managers(gl, fake_manager):
class ObjectWithManager(helpers.FakeObject):
fakes: "FakeManager"
obj = ObjectWithManager(fake_manager, {"foo": "bar"})
obj.id = 42
assert isinstance(obj.fakes, helpers.FakeManager)
assert obj.fakes.gitlab == gl
assert obj.fakes._parent == obj
def test_equality(fake_manager):
obj1 = helpers.FakeObject(fake_manager, {"id": "foo"})
obj2 = helpers.FakeObject(fake_manager, {"id": "foo", "other_attr": "bar"})
assert obj1 == obj2
assert len(set((obj1, obj2))) == 1
def test_equality_custom_id(fake_manager):
obj1 = helpers.OtherFakeObject(fake_manager, {"foo": "bar"})
obj2 = helpers.OtherFakeObject(fake_manager, {"foo": "bar", "other_attr": "baz"})
assert obj1 == obj2
def test_equality_no_id(fake_manager):
obj1 = helpers.FakeObject(fake_manager, {"attr1": "foo"})
obj2 = helpers.FakeObject(fake_manager, {"attr1": "bar"})
assert not obj1 == obj2
def test_inequality(fake_manager):
obj1 = helpers.FakeObject(fake_manager, {"id": "foo"})
obj2 = helpers.FakeObject(fake_manager, {"id": "bar"})
assert obj1 != obj2
def test_inequality_no_id(fake_manager):
obj1 = helpers.FakeObject(fake_manager, {"attr1": "foo"})
obj2 = helpers.FakeObject(fake_manager, {"attr1": "bar"})
assert obj1 != obj2
assert len(set((obj1, obj2))) == 2
def test_equality_with_other_objects(fake_manager):
obj1 = helpers.FakeObject(fake_manager, {"id": "foo"})
obj2 = None
assert not obj1 == obj2
def test_dunder_str(fake_manager):
fake_object = helpers.FakeObject(fake_manager, {"attr1": "foo"})
assert str(fake_object) == (
"<class 'tests.unit.helpers.FakeObject'> => {'attr1': 'foo'}"
)
@pytest.mark.parametrize(
"id_attr,repr_attr, attrs, expected_repr",
[
("id", None, {"id": 1}, "<ReprObject id:1>"),
(
"id",
"name",
{"id": 1, "name": "fake"},
"<ReprObject id:1 name:fake>",
),
("name", "name", {"name": "fake"}, "<ReprObject name:fake>"),
("id", "name", {"id": 1}, "<ReprObject id:1>"),
(None, None, {}, "<ReprObject>"),
(None, "name", {"name": "fake"}, "<ReprObject name:fake>"),
(None, "name", {}, "<ReprObject>"),
],
ids=[
"GetMixin with id",
"GetMixin with id and _repr_attr",
"GetMixin with _repr_attr matching _id_attr",
"GetMixin with _repr_attr without _repr_attr value defined",
"GetWithoutIDMixin",
"GetWithoutIDMixin with _repr_attr",
"GetWithoutIDMixin with _repr_attr without _repr_attr value defined",
],
)
def test_dunder_repr(fake_manager, id_attr, repr_attr, attrs, expected_repr):
class ReprObject(helpers.FakeObject):
_id_attr = id_attr
_repr_attr = repr_attr
fake_object = ReprObject(fake_manager, attrs)
assert repr(fake_object) == expected_repr
def test_pformat(fake_manager):
fake_object = helpers.FakeObject(
fake_manager, {"attr1": "foo" * 10, "ham": "eggs" * 15}
)
assert fake_object.pformat() == (
"<class 'tests.unit.helpers.FakeObject'> => "
"\n{'attr1': 'foofoofoofoofoofoofoofoofoofoo',\n"
" 'ham': 'eggseggseggseggseggseggseggseggseggseggseggseggseggseggseggs'}"
)
def test_pprint(capfd, fake_manager):
fake_object = helpers.FakeObject(
fake_manager, {"attr1": "foo" * 10, "ham": "eggs" * 15}
)
result = fake_object.pprint()
assert result is None
stdout, stderr = capfd.readouterr()
assert stdout == (
"<class 'tests.unit.helpers.FakeObject'> => "
"\n{'attr1': 'foofoofoofoofoofoofoofoofoofoo',\n"
" 'ham': 'eggseggseggseggseggseggseggseggseggseggseggseggseggseggseggs'}\n"
)
assert stderr == ""
def test_repr(fake_manager):
attrs = {"attr1": "foo"}
obj = helpers.FakeObject(fake_manager, attrs)
assert repr(obj) == "<FakeObject id:None>"
helpers.FakeObject._id_attr = None
assert repr(obj) == "<FakeObject>"
def test_attributes_get(fake_object):
assert fake_object.attr1 == "foo"
result = fake_object.attributes
assert result == {"attr1": "foo", "alist": [1, 2, 3]}
def test_attributes_shows_updates(fake_object):
# Updated attribute value is reflected in `attributes`
fake_object.attr1 = "hello"
assert fake_object.attributes == {"attr1": "hello", "alist": [1, 2, 3]}
assert fake_object.attr1 == "hello"
# New attribute is in `attributes`
fake_object.new_attrib = "spam"
assert fake_object.attributes == {
"attr1": "hello",
"new_attrib": "spam",
"alist": [1, 2, 3],
}
def test_attributes_is_copy(fake_object):
# Modifying the dictionary does not cause modifications to the object
result = fake_object.attributes
result["alist"].append(10)
assert result == {"attr1": "foo", "alist": [1, 2, 3, 10]}
assert fake_object.attributes == {"attr1": "foo", "alist": [1, 2, 3]}
def test_attributes_has_parent_attrs(fake_object_with_parent):
assert fake_object_with_parent.attr1 == "foo"
result = fake_object_with_parent.attributes
assert result == {"attr1": "foo", "alist": [1, 2, 3], "test_id": "42"}
def test_to_json(fake_object):
assert fake_object.attr1 == "foo"
result = fake_object.to_json()
assert result == '{"attr1": "foo", "alist": [1, 2, 3]}'
def test_asdict(fake_object):
assert fake_object.attr1 == "foo"
result = fake_object.asdict()
assert result == {"attr1": "foo", "alist": [1, 2, 3]}
def test_asdict_no_parent_attrs(fake_object_with_parent):
assert fake_object_with_parent.attr1 == "foo"
result = fake_object_with_parent.asdict()
assert result == {"attr1": "foo", "alist": [1, 2, 3]}
assert "test_id" not in fake_object_with_parent.asdict()
assert "test_id" not in fake_object_with_parent.asdict(with_parent_attrs=False)
assert "test_id" in fake_object_with_parent.asdict(with_parent_attrs=True)
def test_asdict_modify_dict_does_not_change_object(fake_object):
result = fake_object.asdict()
# Demonstrate modifying the dictionary does not modify the object
result["attr1"] = "testing"
result["alist"].append(4)
assert result == {"attr1": "testing", "alist": [1, 2, 3, 4]}
assert fake_object.attr1 == "foo"
assert fake_object.alist == [1, 2, 3]
def test_asdict_modify_dict_does_not_change_object2(fake_object):
# Modify attribute and then ensure modifying a list in the returned dict won't
# modify the list in the object.
fake_object.attr1 = [9, 7, 8]
assert fake_object.asdict() == {
"attr1": [9, 7, 8],
"alist": [1, 2, 3],
}
result = fake_object.asdict()
result["attr1"].append(1)
assert fake_object.asdict() == {
"attr1": [9, 7, 8],
"alist": [1, 2, 3],
}
def test_asdict_modify_object(fake_object):
# asdict() returns the updated value
fake_object.attr1 = "spam"
assert fake_object.asdict() == {"attr1": "spam", "alist": [1, 2, 3]}
| 11,027 | Python | .py | 261 | 36.858238 | 85 | 0.652097 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,222 | test_rest_manager.py | python-gitlab_python-gitlab/tests/unit/base/test_rest_manager.py | from gitlab import base
from tests.unit import helpers
def test_computed_path_simple(gl):
class MGR(base.RESTManager):
_path = "/tests"
_obj_cls = object
mgr = MGR(gl)
assert mgr._computed_path == "/tests"
def test_computed_path_with_parent(gl, fake_manager):
class MGR(base.RESTManager):
_path = "/tests/{test_id}/cases"
_obj_cls = object
_from_parent_attrs = {"test_id": "id"}
mgr = MGR(gl, parent=helpers.FakeParent(manager=fake_manager, attrs={}))
assert mgr._computed_path == "/tests/42/cases"
def test_path_property(gl):
class MGR(base.RESTManager):
_path = "/tests"
_obj_cls = object
mgr = MGR(gl)
assert mgr.path == "/tests"
| 734 | Python | .py | 21 | 29.142857 | 76 | 0.637784 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,223 | test_imports.py | python-gitlab_python-gitlab/tests/unit/meta/test_imports.py | """
Ensure objects defined in gitlab.v4.objects are imported in
`gitlab/v4/objects/__init__.py`
"""
import pkgutil
from typing import Set
import gitlab.exceptions
import gitlab.v4.objects
def test_all_exceptions_imports_are_exported() -> None:
assert gitlab.exceptions.__all__ == sorted(
[
name
for name in dir(gitlab.exceptions)
if name.endswith("Error") and not name.startswith("_")
]
)
def test_all_v4_objects_are_imported() -> None:
assert len(gitlab.v4.objects.__path__) == 1
init_files: Set[str] = set()
with open(gitlab.v4.objects.__file__, "r", encoding="utf-8") as in_file:
for line in in_file.readlines():
if line.startswith("from ."):
init_files.add(line.rstrip())
object_files = set()
for module in pkgutil.iter_modules(gitlab.v4.objects.__path__):
object_files.add(f"from .{module.name} import *")
missing_in_init = object_files - init_files
error_message = (
f"\nThe file {gitlab.v4.objects.__file__!r} is missing the following imports:"
)
for missing in sorted(missing_in_init):
error_message += f"\n {missing}"
assert not missing_in_init, error_message
| 1,243 | Python | .py | 33 | 31.484848 | 86 | 0.642202 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,224 | test_ensure_type_hints.py | python-gitlab_python-gitlab/tests/unit/meta/test_ensure_type_hints.py | """
Ensure type-hints are setup correctly and detect if missing functions.
Original notes by John L. Villalovos
"""
import dataclasses
import functools
import inspect
from typing import Optional, Type
import pytest
import gitlab.mixins
import gitlab.v4.objects
@functools.total_ordering
@dataclasses.dataclass(frozen=True)
class ClassInfo:
name: str
type: Type # type: ignore[type-arg]
def __lt__(self, other: object) -> bool:
if not isinstance(other, ClassInfo):
return NotImplemented
return (self.type.__module__, self.name) < (other.type.__module__, other.name)
def __eq__(self, other: object) -> bool:
if not isinstance(other, ClassInfo):
return NotImplemented
return (self.type.__module__, self.name) == (other.type.__module__, other.name)
def pytest_generate_tests(metafunc: pytest.Metafunc) -> None:
"""Find all of the classes in gitlab.v4.objects and pass them to our test
function"""
class_info_set = set()
for _, module_value in inspect.getmembers(gitlab.v4.objects):
if not inspect.ismodule(module_value):
# We only care about the modules
continue
# Iterate through all the classes in our module
for class_name, class_value in inspect.getmembers(module_value):
if not inspect.isclass(class_value):
continue
module_name = class_value.__module__
# Ignore imported classes from gitlab.base
if module_name == "gitlab.base":
continue
if not class_name.endswith("Manager"):
continue
class_info_set.add(ClassInfo(name=class_name, type=class_value))
metafunc.parametrize("class_info", sorted(class_info_set))
GET_ID_METHOD_TEMPLATE = """
def get(
self, id: Union[str, int], lazy: bool = False, **kwargs: Any
) -> {obj_cls.__name__}:
return cast({obj_cls.__name__}, super().get(id=id, lazy=lazy, **kwargs))
You may also need to add the following imports:
from typing import Any, cast, Union"
"""
GET_WITHOUT_ID_METHOD_TEMPLATE = """
def get(self, **kwargs: Any) -> {obj_cls.__name__}:
return cast({obj_cls.__name__}, super().get(**kwargs))
You may also need to add the following imports:
from typing import Any, cast"
"""
class TestTypeHints:
def test_check_get_function_type_hints(self, class_info: ClassInfo) -> None:
"""Ensure classes derived from GetMixin have defined a 'get()' method with
correct type-hints.
"""
self.get_check_helper(
base_type=gitlab.mixins.GetMixin,
class_info=class_info,
method_template=GET_ID_METHOD_TEMPLATE,
optional_return=False,
)
def test_check_get_without_id_function_type_hints(
self, class_info: ClassInfo
) -> None:
"""Ensure classes derived from GetMixin have defined a 'get()' method with
correct type-hints.
"""
self.get_check_helper(
base_type=gitlab.mixins.GetWithoutIdMixin,
class_info=class_info,
method_template=GET_WITHOUT_ID_METHOD_TEMPLATE,
optional_return=False,
)
def get_check_helper(
self,
*,
base_type: Type, # type: ignore[type-arg]
class_info: ClassInfo,
method_template: str,
optional_return: bool,
) -> None:
if not class_info.name.endswith("Manager"):
return
mro = class_info.type.mro()
# The class needs to be derived from GetMixin or we ignore it
if base_type not in mro:
return
obj_cls = class_info.type._obj_cls
signature = inspect.signature(class_info.type.get)
filename = inspect.getfile(class_info.type)
fail_message = (
f"class definition for {class_info.name!r} in file {filename!r} "
f"must have defined a 'get' method with a return annotation of "
f"{obj_cls} but found {signature.return_annotation}\n"
f"Recommend adding the following method:\n"
)
fail_message += method_template.format(obj_cls=obj_cls)
check_type = obj_cls
if optional_return:
check_type = Optional[obj_cls]
assert check_type == signature.return_annotation, fail_message
| 4,353 | Python | .py | 109 | 32.155963 | 87 | 0.63908 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,225 | test_mro.py | python-gitlab_python-gitlab/tests/unit/meta/test_mro.py | """
Ensure objects defined in gitlab.v4.objects have REST* as last item in class
definition
Original notes by John L. Villalovos
An example of an incorrect definition:
class ProjectPipeline(RESTObject, RefreshMixin, ObjectDeleteMixin):
^^^^^^^^^^ This should be at the end.
Correct way would be:
class ProjectPipeline(RefreshMixin, ObjectDeleteMixin, RESTObject):
Correctly at the end ^^^^^^^^^^
Why this is an issue:
When we do type-checking for gitlab/mixins.py we make RESTObject or
RESTManager the base class for the mixins
Here is how our classes look when type-checking:
class RESTObject:
def __init__(self, manager: "RESTManager", attrs: Dict[str, Any]) -> None:
...
class Mixin(RESTObject):
...
# Wrong ordering here
class Wrongv4Object(RESTObject, RefreshMixin):
...
If we actually ran this in Python we would get the following error:
class Wrongv4Object(RESTObject, Mixin):
TypeError: Cannot create a consistent method resolution
order (MRO) for bases RESTObject, Mixin
When we are type-checking it fails to understand the class Wrongv4Object
and thus we can't type check it correctly.
Almost all classes in gitlab/v4/objects/*py were already correct before this
check was added.
"""
import inspect
import pytest
import gitlab.v4.objects
def test_show_issue() -> None:
"""Test case to demonstrate the TypeError that occurs"""
class RESTObject:
def __init__(self, manager: str, attrs: int) -> None: ...
class Mixin(RESTObject): ...
with pytest.raises(TypeError) as exc_info:
# Wrong ordering here
class Wrongv4Object(RESTObject, Mixin): # type: ignore
...
# The error message in the exception should be:
# TypeError: Cannot create a consistent method resolution
# order (MRO) for bases RESTObject, Mixin
# Make sure the exception string contains "MRO"
assert "MRO" in exc_info.exconly()
# Correctly ordered class, no exception
class Correctv4Object(Mixin, RESTObject): ...
def test_mros() -> None:
"""Ensure objects defined in gitlab.v4.objects have REST* as last item in
class definition.
We do this as we need to ensure the MRO (Method Resolution Order) is
correct.
"""
failed_messages = []
for module_name, module_value in inspect.getmembers(gitlab.v4.objects):
if not inspect.ismodule(module_value):
# We only care about the modules
continue
# Iterate through all the classes in our module
for class_name, class_value in inspect.getmembers(module_value):
if not inspect.isclass(class_value):
continue
# Ignore imported classes from gitlab.base
if class_value.__module__ == "gitlab.base":
continue
mro = class_value.mro()
# We only check classes which have a 'gitlab.base' class in their MRO
has_base = False
for count, obj in enumerate(mro, start=1):
if obj.__module__ == "gitlab.base":
has_base = True
base_classname = obj.__name__
if has_base:
filename = inspect.getfile(class_value)
# NOTE(jlvillal): The very last item 'mro[-1]' is always going
# to be 'object'. That is why we are checking 'mro[-2]'.
if mro[-2].__module__ != "gitlab.base":
failed_messages.append(
(
f"class definition for {class_name!r} in file {filename!r} "
f"must have {base_classname!r} as the last class in the "
f"class definition"
)
)
failed_msg = "\n".join(failed_messages)
assert not failed_messages, failed_msg
| 3,996 | Python | .py | 89 | 35.089888 | 88 | 0.622549 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,226 | test_dists.py | python-gitlab_python-gitlab/tests/smoke/test_dists.py | import subprocess
import sys
import tarfile
import zipfile
from pathlib import Path
import pytest
from gitlab._version import __title__, __version__
DOCS_DIR = "docs"
TEST_DIR = "tests"
DIST_NORMALIZED_TITLE = f"{__title__.replace('-', '_')}-{__version__}"
SDIST_FILE = f"{DIST_NORMALIZED_TITLE}.tar.gz"
WHEEL_FILE = f"{DIST_NORMALIZED_TITLE}-py{sys.version_info.major}-none-any.whl"
PY_TYPED = "gitlab/py.typed"
@pytest.fixture(scope="session")
def build(tmp_path_factory: pytest.TempPathFactory):
temp_dir = tmp_path_factory.mktemp("build")
subprocess.run([sys.executable, "-m", "build", "--outdir", temp_dir], check=True)
return temp_dir
def test_sdist_includes_correct_files(build: Path) -> None:
sdist = tarfile.open(build / SDIST_FILE, "r:gz")
docs_dir = sdist.getmember(f"{DIST_NORMALIZED_TITLE}/{DOCS_DIR}")
test_dir = sdist.getmember(f"{DIST_NORMALIZED_TITLE}/{TEST_DIR}")
readme = sdist.getmember(f"{DIST_NORMALIZED_TITLE}/README.rst")
py_typed = sdist.getmember(f"{DIST_NORMALIZED_TITLE}/{PY_TYPED}")
assert docs_dir.isdir()
assert test_dir.isdir()
assert py_typed.isfile()
assert readme.isfile()
def test_wheel_includes_correct_files(build: Path) -> None:
wheel = zipfile.ZipFile(build / WHEEL_FILE)
assert PY_TYPED in wheel.namelist()
def test_wheel_excludes_docs_and_tests(build: Path) -> None:
wheel = zipfile.ZipFile(build / WHEEL_FILE)
assert not any(file.startswith((DOCS_DIR, TEST_DIR)) for file in wheel.namelist())
| 1,513 | Python | .py | 34 | 41.235294 | 86 | 0.717599 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,227 | conftest.py | python-gitlab_python-gitlab/tests/functional/conftest.py | import dataclasses
import datetime
import logging
import pathlib
import tempfile
import time
import uuid
from subprocess import check_output
from typing import Optional
import pytest
import requests
import gitlab
import gitlab.base
from tests.functional import helpers
from tests.functional.fixtures.docker import * # noqa
SLEEP_TIME = 10
@dataclasses.dataclass
class GitlabVersion:
major: int
minor: int
patch: str
revision: str
def __post_init__(self):
self.major, self.minor = int(self.major), int(self.minor)
@pytest.fixture(scope="session")
def gitlab_version(gl) -> GitlabVersion:
version, revision = gl.version()
major, minor, patch = version.split(".")
return GitlabVersion(major=major, minor=minor, patch=patch, revision=revision)
@pytest.fixture(scope="session")
def fixture_dir(test_dir: pathlib.Path) -> pathlib.Path:
return test_dir / "functional" / "fixtures"
@pytest.fixture(scope="session")
def gitlab_service_name() -> str:
"""The "service" name is the one defined in the `docker-compose.yml` file"""
return "gitlab"
@pytest.fixture(scope="session")
def gitlab_container_name() -> str:
"""The "container" name is the one defined in the `docker-compose.yml` file
for the "gitlab" service"""
return "gitlab-test"
@pytest.fixture(scope="session")
def gitlab_docker_port(docker_services, gitlab_service_name: str) -> int:
port: int = docker_services.port_for(gitlab_service_name, container_port=80)
return port
@pytest.fixture(scope="session")
def gitlab_url(docker_ip: str, gitlab_docker_port: int) -> str:
return f"http://{docker_ip}:{gitlab_docker_port}"
def reset_gitlab(gl: gitlab.Gitlab) -> None:
"""Delete resources (such as projects, groups, users) that shouldn't
exist."""
if helpers.get_gitlab_plan(gl):
logging.info("GitLab EE detected")
# NOTE(jlvillal, timknight): By default in GitLab EE it will wait 7 days before
# deleting a group or project.
# In GL 16.0 we need to call delete with `permanently_remove=True` for projects and sub groups
# (handled in helpers.py safe_delete)
settings = gl.settings.get()
modified_settings = False
if settings.deletion_adjourned_period != 1:
logging.info("Setting `deletion_adjourned_period` to 1 Day")
settings.deletion_adjourned_period = 1
modified_settings = True
if modified_settings:
settings.save()
for project in gl.projects.list():
for deploy_token in project.deploytokens.list():
logging.info(
f"Deleting deploy token: {deploy_token.username!r} in "
f"project: {project.path_with_namespace!r}"
)
helpers.safe_delete(deploy_token)
logging.info(f"Deleting project: {project.path_with_namespace!r}")
helpers.safe_delete(project)
for group in gl.groups.list():
# skip deletion of a descendant group to prevent scenarios where parent group
# gets deleted leaving a dangling descendant whose deletion will throw 404s.
if group.parent_id:
logging.info(
f"Skipping deletion of {group.full_path} as it is a descendant "
f"group and will be removed when the parent group is deleted"
)
continue
for deploy_token in group.deploytokens.list():
logging.info(
f"Deleting deploy token: {deploy_token.username!r} in "
f"group: {group.path_with_namespace!r}"
)
helpers.safe_delete(deploy_token)
logging.info(f"Deleting group: {group.full_path!r}")
helpers.safe_delete(group)
for topic in gl.topics.list():
logging.info(f"Deleting topic: {topic.name!r}")
helpers.safe_delete(topic)
for variable in gl.variables.list():
logging.info(f"Deleting variable: {variable.key!r}")
helpers.safe_delete(variable)
for user in gl.users.list():
if user.username not in ["root", "ghost"]:
logging.info(f"Deleting user: {user.username!r}")
helpers.safe_delete(user)
def set_token(container: str, fixture_dir: pathlib.Path) -> str:
logging.info("Creating API token.")
set_token_rb = fixture_dir / "set_token.rb"
with open(set_token_rb, "r", encoding="utf-8") as f:
set_token_command = f.read().strip()
rails_command = [
"docker",
"exec",
container,
"gitlab-rails",
"runner",
set_token_command,
]
output = check_output(rails_command).decode().strip()
logging.info("Finished creating API token.")
return output
def pytest_report_collectionfinish(config, startdir, items):
return [
"",
"Starting GitLab container.",
"Waiting for GitLab to reconfigure.",
"This will take a few minutes.",
]
def pytest_addoption(parser):
parser.addoption(
"--keep-containers",
action="store_true",
help="Keep containers running after testing",
)
@pytest.fixture(scope="session")
def temp_dir() -> pathlib.Path:
return pathlib.Path(tempfile.gettempdir())
@pytest.fixture(scope="session")
def check_is_alive():
"""
Return a healthcheck function fixture for the GitLab container spinup.
"""
def _check(
*,
container: str,
start_time: float,
gitlab_url: str,
) -> bool:
setup_time = time.perf_counter() - start_time
minutes, seconds = int(setup_time / 60), int(setup_time % 60)
logging.info(
f"Checking if GitLab container is up. "
f"Have been checking for {minutes} minute(s), {seconds} seconds ..."
)
logs = ["docker", "logs", container]
if "gitlab Reconfigured!" not in check_output(logs).decode():
return False
logging.debug("GitLab has finished reconfiguring.")
for check in ("health", "readiness", "liveness"):
url = f"{gitlab_url}/-/{check}"
logging.debug(f"Checking {check!r} endpoint at: {url}")
try:
result = requests.get(url, timeout=1.0)
except requests.exceptions.Timeout:
logging.info(f"{check!r} check timed out")
return False
if result.status_code != 200:
logging.info(f"{check!r} check did not return 200: {result!r}")
return False
logging.debug(f"{check!r} check passed: {result!r}")
logging.debug(f"Sleeping for {SLEEP_TIME}")
time.sleep(SLEEP_TIME)
return True
return _check
@pytest.fixture(scope="session")
def gitlab_token(
check_is_alive,
gitlab_container_name: str,
gitlab_url: str,
docker_services,
fixture_dir: pathlib.Path,
) -> str:
start_time = time.perf_counter()
logging.info("Waiting for GitLab container to become ready.")
docker_services.wait_until_responsive(
timeout=300,
pause=10,
check=lambda: check_is_alive(
container=gitlab_container_name,
start_time=start_time,
gitlab_url=gitlab_url,
),
)
setup_time = time.perf_counter() - start_time
minutes, seconds = int(setup_time / 60), int(setup_time % 60)
logging.info(
f"GitLab container is now ready after {minutes} minute(s), {seconds} seconds"
)
return set_token(gitlab_container_name, fixture_dir=fixture_dir)
@pytest.fixture(scope="session")
def gitlab_config(gitlab_url: str, gitlab_token: str, temp_dir: pathlib.Path):
config_file = temp_dir / "python-gitlab.cfg"
config = f"""[global]
default = local
timeout = 60
[local]
url = {gitlab_url}
private_token = {gitlab_token}
api_version = 4"""
with open(config_file, "w", encoding="utf-8") as f:
f.write(config)
return config_file
@pytest.fixture(scope="session")
def gl(gitlab_url: str, gitlab_token: str) -> gitlab.Gitlab:
"""Helper instance to make fixtures and asserts directly via the API."""
logging.info("Instantiating python-gitlab gitlab.Gitlab instance")
instance = gitlab.Gitlab(gitlab_url, private_token=gitlab_token)
logging.info("Reset GitLab")
reset_gitlab(instance)
return instance
@pytest.fixture(scope="session")
def gitlab_plan(gl: gitlab.Gitlab) -> Optional[str]:
return helpers.get_gitlab_plan(gl)
@pytest.fixture(autouse=True)
def gitlab_premium(gitlab_plan, request) -> None:
if gitlab_plan in ("premium", "ultimate"):
return
if request.node.get_closest_marker("gitlab_ultimate"):
pytest.skip("Test requires GitLab Premium plan")
@pytest.fixture(autouse=True)
def gitlab_ultimate(gitlab_plan, request) -> None:
if gitlab_plan == "ultimate":
return
if request.node.get_closest_marker("gitlab_ultimate"):
pytest.skip("Test requires GitLab Ultimate plan")
@pytest.fixture(scope="session")
def gitlab_runner(gl):
container = "gitlab-runner-test"
runner_name = "python-gitlab-runner"
token = "registration-token"
url = "http://gitlab"
docker_exec = ["docker", "exec", container, "gitlab-runner"]
register = [
"register",
"--run-untagged",
"--non-interactive",
"--registration-token",
token,
"--name",
runner_name,
"--url",
url,
"--clone-url",
url,
"--executor",
"shell",
]
unregister = ["unregister", "--name", runner_name]
yield check_output(docker_exec + register).decode()
check_output(docker_exec + unregister).decode()
@pytest.fixture(scope="module")
def group(gl):
"""Group fixture for group API resource tests."""
_id = uuid.uuid4().hex
data = {
"name": f"test-group-{_id}",
"path": f"group-{_id}",
}
group = gl.groups.create(data)
yield group
helpers.safe_delete(group)
@pytest.fixture(scope="module")
def project(gl):
"""Project fixture for project API resource tests."""
_id = uuid.uuid4().hex
name = f"test-project-{_id}"
project = gl.projects.create(name=name)
yield project
helpers.safe_delete(project)
@pytest.fixture(scope="function")
def make_merge_request(project):
"""Fixture factory used to create a merge_request.
It will create a branch, add a commit to the branch, and then create a
merge request against project.default_branch. The MR will be returned.
When finished any created merge requests and branches will be deleted.
NOTE: No attempt is made to restore project.default_branch to its previous
state. So if the merge request is merged then its content will be in the
project.default_branch branch.
"""
to_delete = []
def _make_merge_request(*, source_branch: str, create_pipeline: bool = False):
# Wait for processes to be done before we start...
# NOTE(jlvillal): Sometimes the CI would give a "500 Internal Server
# Error". Hoping that waiting until all other processes are done will
# help with that.
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(30)
project.refresh() # Gets us the current default branch
logging.info(f"Creating branch {source_branch}")
mr_branch = project.branches.create(
{"branch": source_branch, "ref": project.default_branch}
)
# NOTE(jlvillal): Must create a commit in the new branch before we can
# create an MR that will work.
project.files.create(
{
"file_path": f"README.{source_branch}",
"branch": source_branch,
"content": "Initial content",
"commit_message": "New commit in new branch",
}
)
if create_pipeline:
project.files.create(
{
"file_path": ".gitlab-ci.yml",
"branch": source_branch,
"content": """
test:
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
script:
- sleep 24h # We don't expect this to finish
""",
"commit_message": "Add a simple pipeline",
}
)
mr = project.mergerequests.create(
{
"source_branch": source_branch,
"target_branch": project.default_branch,
"title": "Should remove source branch",
"remove_source_branch": True,
}
)
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(5)
mr_iid = mr.iid
for _ in range(60):
mr = project.mergerequests.get(mr_iid)
if (
mr.detailed_merge_status == "checking"
or mr.detailed_merge_status == "unchecked"
):
time.sleep(0.5)
else:
break
assert mr.detailed_merge_status != "checking"
assert mr.detailed_merge_status != "unchecked"
to_delete.extend([mr, mr_branch])
return mr
yield _make_merge_request
for object in to_delete:
helpers.safe_delete(object)
@pytest.fixture(scope="function")
def merge_request(make_merge_request, project):
_id = uuid.uuid4().hex
return make_merge_request(source_branch=f"branch-{_id}")
@pytest.fixture(scope="function")
def merge_request_with_pipeline(make_merge_request, project):
_id = uuid.uuid4().hex
return make_merge_request(source_branch=f"branch-{_id}", create_pipeline=True)
@pytest.fixture(scope="module")
def project_file(project):
"""File fixture for tests requiring a project with files and branches."""
project_file = project.files.create(
{
"file_path": "README",
"branch": "main",
"content": "Initial content",
"commit_message": "Initial commit",
}
)
return project_file
@pytest.fixture(scope="function")
def release(project, project_file):
_id = uuid.uuid4().hex
name = f"we_have_a_slash/test-release-{_id}"
project.refresh() # Gets us the current default branch
release = project.releases.create(
{
"name": name,
"tag_name": _id,
"description": "description",
"ref": project.default_branch,
}
)
return release
@pytest.fixture(scope="function")
def service(project):
"""This is just a convenience fixture to make test cases slightly prettier. Project
services are not idempotent. A service cannot be retrieved until it is enabled.
After it is enabled the first time, it can never be fully deleted, only disabled."""
service = project.services.update("asana", {"api_key": "api_key"})
yield service
try:
project.services.delete("asana")
except gitlab.exceptions.GitlabDeleteError as e:
print(f"Service already disabled: {e}")
@pytest.fixture(scope="module")
def user(gl):
"""User fixture for user API resource tests."""
_id = uuid.uuid4().hex
email = f"user{_id}@email.com"
username = f"user{_id}"
name = f"User {_id}"
password = "E4596f8be406Bc3a14a4ccdb1df80587"
user = gl.users.create(email=email, username=username, name=name, password=password)
yield user
helpers.safe_delete(user)
@pytest.fixture(scope="module")
def issue(project):
"""Issue fixture for issue API resource tests."""
_id = uuid.uuid4().hex
data = {"title": f"Issue {_id}", "description": f"Issue {_id} description"}
return project.issues.create(data)
@pytest.fixture(scope="module")
def milestone(project):
_id = uuid.uuid4().hex
data = {"title": f"milestone{_id}"}
return project.milestones.create(data)
@pytest.fixture(scope="module")
def label(project):
"""Label fixture for project label API resource tests."""
_id = uuid.uuid4().hex
data = {
"name": f"prjlabel{_id}",
"description": f"prjlabel1 {_id} description",
"color": "#112233",
}
return project.labels.create(data)
@pytest.fixture(scope="module")
def group_label(group):
"""Label fixture for group label API resource tests."""
_id = uuid.uuid4().hex
data = {
"name": f"grplabel{_id}",
"description": f"grplabel1 {_id} description",
"color": "#112233",
}
return group.labels.create(data)
@pytest.fixture(scope="module")
def epic(group):
"""Fixture for group epic API resource tests."""
_id = uuid.uuid4().hex
return group.epics.create({"title": f"epic-{_id}", "description": f"Epic {_id}"})
@pytest.fixture(scope="module")
def variable(project):
"""Variable fixture for project variable API resource tests."""
_id = uuid.uuid4().hex
data = {"key": f"var{_id}", "value": f"Variable {_id}"}
return project.variables.create(data)
@pytest.fixture(scope="module")
def deploy_token(project):
"""Deploy token fixture for project deploy token API resource tests."""
_id = uuid.uuid4().hex
data = {
"name": f"token-{_id}",
"username": "root",
"expires_at": datetime.date.today().isoformat(),
"scopes": "read_registry",
}
return project.deploytokens.create(data)
@pytest.fixture(scope="module")
def group_deploy_token(group):
"""Deploy token fixture for group deploy token API resource tests."""
_id = uuid.uuid4().hex
data = {
"name": f"group-token-{_id}",
"username": "root",
"expires_at": datetime.date.today().isoformat(),
"scopes": "read_registry",
}
return group.deploytokens.create(data)
@pytest.fixture(scope="session")
def GPG_KEY():
return """-----BEGIN PGP PUBLIC KEY BLOCK-----
mQENBFn5mzYBCADH6SDVPAp1zh/hxmTi0QplkOfExBACpuY6OhzNdIg+8/528b3g
Y5YFR6T/HLv/PmeHskUj21end1C0PNG2T9dTx+2Vlh9ISsSG1kyF9T5fvMR3bE0x
Dl6S489CXZrjPTS9SHk1kF+7dwjUxLJyxF9hPiSihFefDFu3NeOtG/u8vbC1mewQ
ZyAYue+mqtqcCIFFoBz7wHKMWjIVSJSyTkXExu4OzpVvy3l2EikbvavI3qNz84b+
Mgkv/kiBlNoCy3CVuPk99RYKZ3lX1vVtqQ0OgNGQvb4DjcpyjmbKyibuZwhDjIOh
au6d1OyEbayTntd+dQ4j9EMSnEvm/0MJ4eXPABEBAAG0G0dpdGxhYlRlc3QxIDxm
YWtlQGZha2UudGxkPokBNwQTAQgAIQUCWfmbNgIbAwULCQgHAgYVCAkKCwIEFgID
AQIeAQIXgAAKCRBgxELHf8f3hF3yB/wNJlWPKY65UsB4Lo0hs1OxdxCDqXogSi0u
6crDEIiyOte62pNZKzWy8TJcGZvznRTZ7t8hXgKFLz3PRMcl+vAiRC6quIDUj+2V
eYfwaItd1lUfzvdCaC7Venf4TQ74f5vvNg/zoGwE6eRoSbjlLv9nqsxeA0rUBUQL
LYikWhVMP3TrlfgfduYvh6mfgh57BDLJ9kJVpyfxxx9YLKZbaas9sPa6LgBtR555
JziUxHmbEv8XCsUU8uoFeP1pImbNBplqE3wzJwzOMSmmch7iZzrAwfN7N2j3Wj0H
B5kQddJ9dmB4BbU0IXGhWczvdpxboI2wdY8a1JypxOdePoph/43iuQENBFn5mzYB
CADnTPY0Zf3d9zLjBNgIb3yDl94uOcKCq0twNmyjMhHzGqw+UMe9BScy34GL94Al
xFRQoaL+7P8hGsnsNku29A/VDZivcI+uxTx4WQ7OLcn7V0bnHV4d76iky2ufbUt/
GofthjDs1SonePO2N09sS4V4uK0d5N4BfCzzXgvg8etCLxNmC9BGt7AaKUUzKBO4
2QvNNaC2C/8XEnOgNWYvR36ylAXAmo0sGFXUsBCTiq1fugS9pwtaS2JmaVpZZ3YT
pMZlS0+SjC5BZYFqSmKCsA58oBRzCxQz57nR4h5VEflgD+Hy0HdW0UHETwz83E6/
U0LL6YyvhwFr6KPq5GxinSvfABEBAAGJAR8EGAEIAAkFAln5mzYCGwwACgkQYMRC
x3/H94SJgwgAlKQb10/xcL/epdDkR7vbiei7huGLBpRDb/L5fM8B5W77Qi8Xmuqj
cCu1j99ZCA5hs/vwVn8j8iLSBGMC5gxcuaar/wtmiaEvT9fO/h6q4opG7NcuiJ8H
wRj8ccJmRssNqDD913PLz7T40Ts62blhrEAlJozGVG/q7T3RAZcskOUHKeHfc2RI
YzGsC/I9d7k6uxAv1L9Nm5F2HaAQDzhkdd16nKkGaPGR35cT1JLInkfl5cdm7ldN
nxs4TLO3kZjUTgWKdhpgRNF5hwaz51ZjpebaRf/ZqRuNyX4lIRolDxzOn/+O1o8L
qG2ZdhHHmSK2LaQLFiSprUkikStNU9BqSQ==
=5OGa
-----END PGP PUBLIC KEY BLOCK-----"""
@pytest.fixture(scope="session")
def SSH_KEY():
return (
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDZAjAX8vTiHD7Yi3/EzuVaDChtih"
"79HyJZ6H9dEqxFfmGA1YnncE0xujQ64TCebhkYJKzmTJCImSVkOu9C4hZgsw6eE76n"
"+Cg3VwEeDUFy+GXlEJWlHaEyc3HWioxgOALbUp3rOezNh+d8BDwwqvENGoePEBsz5l"
"a6WP5lTi/HJIjAl6Hu+zHgdj1XVExeH+S52EwpZf/ylTJub0Bl5gHwf/siVE48mLMI"
"sqrukXTZ6Zg+8EHAIvIQwJ1dKcXe8P5IoLT7VKrbkgAnolS0I8J+uH7KtErZJb5oZh"
"S4OEwsNpaXMAr+6/wWSpircV2/e7sFLlhlKBC4Iq1MpqlZ7G3p foo@bar"
)
@pytest.fixture(scope="session")
def DEPLOY_KEY():
return (
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFdRyjJQh+1niBpXqE2I8dzjG"
"MXFHlRjX9yk/UfOn075IdaockdU58sw2Ai1XIWFpZpfJkW7z+P47ZNSqm1gzeXI"
"rtKa9ZUp8A7SZe8vH4XVn7kh7bwWCUirqtn8El9XdqfkzOs/+FuViriUWoJVpA6"
"WZsDNaqINFKIA5fj/q8XQw+BcS92L09QJg9oVUuH0VVwNYbU2M2IRmSpybgC/gu"
"uWTrnCDMmLItksATifLvRZwgdI8dr+q6tbxbZknNcgEPrI2jT0hYN9ZcjNeWuyv"
"rke9IepE7SPBT41C+YtUX4dfDZDmczM1cE0YL/krdUCfuZHMa4ZS2YyNd6slufc"
"vn bar@foo"
)
| 20,667 | Python | .py | 521 | 32.934741 | 115 | 0.674414 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,228 | helpers.py | python-gitlab_python-gitlab/tests/functional/helpers.py | import logging
import time
from typing import Optional, TYPE_CHECKING
import pytest
import gitlab
import gitlab.base
import gitlab.exceptions
SLEEP_INTERVAL = 0.5
TIMEOUT = 60 # seconds before timeout will occur
MAX_ITERATIONS = int(TIMEOUT / SLEEP_INTERVAL)
def get_gitlab_plan(gl: gitlab.Gitlab) -> Optional[str]:
"""Determine the license available on the GitLab instance"""
try:
license = gl.get_license()
except gitlab.exceptions.GitlabLicenseError:
# Without a license we assume only Free features are available
return None
if TYPE_CHECKING:
assert isinstance(license["plan"], str)
return license["plan"]
def safe_delete(object: gitlab.base.RESTObject) -> None:
"""Ensure the object specified can not be retrieved. If object still exists after
timeout period, fail the test"""
manager = object.manager
for index in range(MAX_ITERATIONS):
try:
object = manager.get(object.get_id()) # type: ignore[attr-defined]
except gitlab.exceptions.GitlabGetError:
return
if index:
logging.info(f"Attempt {index + 1} to delete {object!r}.")
try:
if isinstance(object, gitlab.v4.objects.User):
# You can't use this option if the selected user is the sole owner of any groups
# Use `hard_delete=True` or a 'Ghost User' may be created.
# https://docs.gitlab.com/ee/api/users.html#user-deletion
object.delete(hard_delete=True)
if index > 1:
# If User is the sole owner of any group it won't be deleted,
# which combined with parents group never immediately deleting in GL 16
# we shouldn't cause test to fail if it still exists
return
elif isinstance(object, gitlab.v4.objects.Project):
# Immediately delete rather than waiting for at least 1day
# https://docs.gitlab.com/ee/api/projects.html#delete-project
object.delete(permanently_remove=True)
pass
else:
# We only attempt to delete parent groups to prevent dangling sub-groups
# However parent groups can only be deleted on a delay in Gl 16
# https://docs.gitlab.com/ee/api/groups.html#remove-group
object.delete()
except gitlab.exceptions.GitlabDeleteError:
logging.info(f"{object!r} already deleted or scheduled for deletion.")
if isinstance(object, gitlab.v4.objects.Group):
# Parent groups can never be immediately deleted in GL 16,
# so don't cause test to fail if it still exists
return
pass
time.sleep(SLEEP_INTERVAL)
pytest.fail(f"{object!r} was not deleted")
| 2,885 | Python | .py | 61 | 36.885246 | 96 | 0.637171 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,229 | test_releases.py | python-gitlab_python-gitlab/tests/functional/api/test_releases.py | release_name = "Demo Release"
release_tag_name = "v1.2.3"
release_description = "release notes go here"
link_data = {"url": "https://example.com", "name": "link_name"}
def test_create_project_release(project, project_file):
project.refresh() # Gets us the current default branch
release = project.releases.create(
{
"name": release_name,
"tag_name": release_tag_name,
"description": release_description,
"ref": project.default_branch,
}
)
assert release in project.releases.list()
assert project.releases.get(release_tag_name)
assert release.name == release_name
assert release.tag_name == release_tag_name
assert release.description == release_description
def test_create_project_release_no_name(project, project_file):
unnamed_release_tag_name = "v2.3.4"
project.refresh() # Gets us the current default branch
release = project.releases.create(
{
"tag_name": unnamed_release_tag_name,
"description": release_description,
"ref": project.default_branch,
}
)
assert release in project.releases.list()
assert project.releases.get(unnamed_release_tag_name)
assert release.tag_name == unnamed_release_tag_name
assert release.description == release_description
def test_update_save_project_release(project, release):
updated_description = f"{release.description} updated"
release.description = updated_description
release.save()
release = project.releases.get(release.tag_name)
assert release.description == updated_description
def test_delete_project_release(project, release):
project.releases.delete(release.tag_name)
def test_create_project_release_links(project, release):
release.links.create(link_data)
release = project.releases.get(release.tag_name)
assert release.assets["links"][0]["url"] == link_data["url"]
assert release.assets["links"][0]["name"] == link_data["name"]
| 2,016 | Python | .py | 46 | 37.695652 | 66 | 0.699591 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,230 | test_issues.py | python-gitlab_python-gitlab/tests/functional/api/test_issues.py | import gitlab
def test_create_issue(project):
issue = project.issues.create({"title": "my issue 1"})
issue2 = project.issues.create({"title": "my issue 2"})
issues = project.issues.list()
issue_iids = [issue.iid for issue in issues]
assert {issue, issue2} <= set(issues)
# Test 'iids' as a list
filtered_issues = project.issues.list(iids=issue_iids)
assert {issue, issue2} == set(filtered_issues)
issue2.state_event = "close"
issue2.save()
assert issue in project.issues.list(state="opened")
assert issue2 in project.issues.list(state="closed")
participants = issue.participants()
assert participants
assert isinstance(participants, list)
assert type(issue.closed_by()) == list
assert type(issue.related_merge_requests()) == list
def test_issue_notes(issue):
note = issue.notes.create({"body": "This is an issue note"})
assert note in issue.notes.list()
emoji = note.awardemojis.create({"name": "tractor"})
assert emoji in note.awardemojis.list()
emoji.delete()
note.delete()
def test_issue_labels(project, issue):
project.labels.create({"name": "label2", "color": "#aabbcc"})
issue.labels = ["label2"]
issue.save()
assert issue in project.issues.list(labels=["label2"])
assert issue in project.issues.list(labels="label2")
assert issue in project.issues.list(labels="Any")
assert issue not in project.issues.list(labels="None")
def test_issue_links(project, issue):
linked_issue = project.issues.create({"title": "Linked issue"})
source_issue, target_issue = issue.links.create(
{"target_project_id": project.id, "target_issue_iid": linked_issue.iid}
)
assert source_issue == issue
assert target_issue == linked_issue
links = issue.links.list()
assert links
link_id = links[0].issue_link_id
issue.links.delete(link_id)
def test_issue_label_events(issue):
events = issue.resourcelabelevents.list()
assert isinstance(events, list)
event = issue.resourcelabelevents.get(events[0].id)
assert isinstance(event, gitlab.v4.objects.ProjectIssueResourceLabelEvent)
def test_issue_weight_events(issue):
issue.weight = 13
issue.save()
events = issue.resource_weight_events.list()
assert isinstance(events, list)
event = issue.resource_weight_events.get(events[0].id)
assert isinstance(event, gitlab.v4.objects.ProjectIssueResourceWeightEvent)
def test_issue_milestones(project, milestone):
data = {"title": "my issue 1", "milestone_id": milestone.id}
issue = project.issues.create(data)
assert milestone.issues().next().title == "my issue 1"
milestone_events = issue.resourcemilestoneevents.list()
assert isinstance(milestone_events, list)
milestone_event = issue.resourcemilestoneevents.get(milestone_events[0].id)
assert isinstance(
milestone_event, gitlab.v4.objects.ProjectIssueResourceMilestoneEvent
)
assert issue in project.issues.list(milestone=milestone.title)
def test_issue_discussions(issue):
discussion = issue.discussions.create({"body": "Discussion body"})
assert discussion in issue.discussions.list()
d_note = discussion.notes.create({"body": "first note"})
d_note_from_get = discussion.notes.get(d_note.id)
d_note_from_get.body = "updated body"
d_note_from_get.save()
discussion = issue.discussions.get(discussion.id)
assert discussion.attributes["notes"][-1]["body"] == "updated body"
d_note_from_get.delete()
| 3,538 | Python | .py | 78 | 40.269231 | 79 | 0.716496 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,231 | test_repository.py | python-gitlab_python-gitlab/tests/functional/api/test_repository.py | import base64
import os
import sys
import tarfile
import time
import zipfile
from io import BytesIO
import pytest
import gitlab
def test_repository_files(project):
project.files.create(
{
"file_path": "README.md",
"branch": "main",
"content": "Initial content",
"commit_message": "Initial commit",
}
)
readme = project.files.get(file_path="README.md", ref="main")
readme.content = base64.b64encode(b"Improved README").decode()
time.sleep(2)
readme.save(branch="main", commit_message="new commit")
readme.delete(commit_message="Removing README", branch="main")
project.files.create(
{
"file_path": "README.rst",
"branch": "main",
"content": "Initial content",
"commit_message": "New commit",
}
)
readme = project.files.get(file_path="README.rst", ref="main")
# The first decode() is the ProjectFile method, the second one is the bytes
# object method
assert readme.decode().decode() == "Initial content"
headers = project.files.head("README.rst", ref="main")
assert headers["X-Gitlab-File-Path"] == "README.rst"
blame = project.files.blame(file_path="README.rst", ref="main")
assert blame
raw_file = project.files.raw(file_path="README.rst", ref="main")
assert os.fsdecode(raw_file) == "Initial content"
raw_file = project.files.raw(file_path="README.rst")
assert os.fsdecode(raw_file) == "Initial content"
def test_repository_tree(project):
tree = project.repository_tree()
assert tree
assert tree[0]["name"] == "README.rst"
blob_id = tree[0]["id"]
blob = project.repository_raw_blob(blob_id)
assert blob.decode() == "Initial content"
snapshot = project.snapshot()
assert isinstance(snapshot, bytes)
def test_repository_archive(project):
archive = project.repository_archive()
assert isinstance(archive, bytes)
archive2 = project.repository_archive("main")
assert archive == archive2
# NOTE(jlvillal): Support for using tarfile.is_tarfile() on a file or file-like object
# was added in Python 3.9
@pytest.mark.skipif(sys.version_info < (3, 9), reason="requires python3.9 or higher")
@pytest.mark.parametrize(
"format,assertion",
[
("tbz", tarfile.is_tarfile),
("tbz2", tarfile.is_tarfile),
("tb2", tarfile.is_tarfile),
("bz2", tarfile.is_tarfile),
("tar", tarfile.is_tarfile),
("tar.gz", tarfile.is_tarfile),
("tar.bz2", tarfile.is_tarfile),
("zip", zipfile.is_zipfile),
],
)
def test_repository_archive_formats(project, format, assertion):
archive = project.repository_archive(format=format)
assert assertion(BytesIO(archive))
def test_create_commit(project):
data = {
"branch": "main",
"commit_message": "blah blah blah",
"actions": [{"action": "create", "file_path": "blah", "content": "blah"}],
}
commit = project.commits.create(data)
assert "@@" in project.commits.list()[0].diff()[0]["diff"]
assert isinstance(commit.refs(), list)
assert isinstance(commit.merge_requests(), list)
def test_list_all_commits(project):
data = {
"branch": "new-branch",
"start_branch": "main",
"commit_message": "New commit on new branch",
"actions": [
{"action": "create", "file_path": "new-file", "content": "new content"}
],
}
commit = project.commits.create(data)
commits = project.commits.list(all=True)
assert commit not in commits
# Listing commits on other branches requires `all` parameter passed to the API
all_commits = project.commits.list(get_all=True, all=True)
assert commit in all_commits
assert len(all_commits) > len(commits)
def test_create_commit_status(project):
commit = project.commits.list()[0]
status = commit.statuses.create({"state": "success", "sha": commit.id})
assert status in commit.statuses.list()
def test_commit_signature(project):
commit = project.commits.list()[0]
with pytest.raises(gitlab.GitlabGetError) as e:
commit.signature()
assert "404 Signature Not Found" in str(e.value)
def test_commit_comment(project):
commit = project.commits.list()[0]
commit.comments.create({"note": "This is a commit comment"})
assert len(commit.comments.list()) == 1
def test_commit_discussion(project):
commit = project.commits.list()[0]
discussion = commit.discussions.create({"body": "Discussion body"})
assert discussion in commit.discussions.list()
note = discussion.notes.create({"body": "first note"})
note_from_get = discussion.notes.get(note.id)
note_from_get.body = "updated body"
note_from_get.save()
discussion = commit.discussions.get(discussion.id)
note_from_get.delete()
def test_revert_commit(project):
commit = project.commits.list()[0]
revert_commit = commit.revert(branch="main")
expected_message = f'Revert "{commit.message}"\n\nThis reverts commit {commit.id}'
assert revert_commit["message"] == expected_message
with pytest.raises(gitlab.GitlabRevertError):
# Two revert attempts should raise GitlabRevertError
commit.revert(branch="main")
def test_repository_merge_base(project):
refs = [commit.id for commit in project.commits.list(all=True)]
commit = project.repository_merge_base(refs)
assert commit["id"] in refs
with pytest.raises(gitlab.GitlabGetError, match="Provide at least 2 refs"):
commit = project.repository_merge_base(refs[0])
| 5,638 | Python | .py | 139 | 34.755396 | 86 | 0.673271 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,232 | test_graphql.py | python-gitlab_python-gitlab/tests/functional/api/test_graphql.py | import logging
import pytest
import gitlab
@pytest.fixture
def gl_gql(gitlab_url: str, gitlab_token: str) -> gitlab.GraphQL:
logging.info("Instantiating gitlab.GraphQL instance")
instance = gitlab.GraphQL(gitlab_url, token=gitlab_token)
return instance
def test_query_returns_valid_response(gl_gql: gitlab.GraphQL):
query = "query {currentUser {active}}"
response = gl_gql.execute(query)
assert response["currentUser"]["active"] is True
| 469 | Python | .py | 12 | 35.416667 | 65 | 0.759465 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,233 | test_boards.py | python-gitlab_python-gitlab/tests/functional/api/test_boards.py | def test_project_boards(project):
assert not project.boards.list()
board = project.boards.create({"name": "testboard"})
board = project.boards.get(board.id)
project.boards.delete(board.id)
def test_group_boards(group):
assert not group.boards.list()
board = group.boards.create({"name": "testboard"})
board = group.boards.get(board.id)
group.boards.delete(board.id)
| 404 | Python | .py | 10 | 35.6 | 56 | 0.71134 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,234 | test_epics.py | python-gitlab_python-gitlab/tests/functional/api/test_epics.py | import pytest
pytestmark = pytest.mark.gitlab_premium
def test_epics(group):
epic = group.epics.create({"title": "Test epic"})
epic.title = "Fixed title"
epic.labels = ["label1", "label2"]
epic.save()
epic = group.epics.get(epic.iid)
assert epic.title == "Fixed title"
assert epic.labels == ["label1", "label2"]
assert group.epics.list()
@pytest.mark.xfail(reason="404 on issue.id")
def test_epic_issues(epic, issue):
assert not epic.issues.list()
epic_issue = epic.issues.create({"issue_id": issue.id})
assert epic.issues.list()
epic_issue.delete()
def test_epic_notes(epic):
assert not epic.notes.list()
epic.notes.create({"body": "Test note"})
assert epic.notes.list()
| 743 | Python | .py | 21 | 31 | 59 | 0.682138 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,235 | test_import_export.py | python-gitlab_python-gitlab/tests/functional/api/test_import_export.py | import time
import pytest
import gitlab
# https://github.com/python-gitlab/python-gitlab/pull/2790#pullrequestreview-1873617123
def test_group_import_export(gl, group, temp_dir):
export = group.exports.create()
assert export.message == "202 Accepted"
# We cannot check for export_status with group export API
time.sleep(10)
import_archive = temp_dir / "gitlab-group-export.tgz"
import_path = "imported_group"
import_name = "Imported Group"
with open(import_archive, "wb") as f:
export.download(streamed=True, action=f.write)
with open(import_archive, "rb") as f:
output = gl.groups.import_group(f, import_path, import_name)
assert output["message"] == "202 Accepted"
# We cannot check for returned ID with group import API
time.sleep(10)
group_import = gl.groups.get(import_path)
assert group_import.path == import_path
assert group_import.name == import_name
# https://github.com/python-gitlab/python-gitlab/pull/2790#pullrequestreview-1873617123
@pytest.mark.xfail(reason="test_project_import_export to be worked on in a follow up")
def test_project_import_export(gl, project, temp_dir):
export = project.exports.create()
assert export.message == "202 Accepted"
export = project.exports.get()
assert isinstance(export, gitlab.v4.objects.ProjectExport)
count = 0
while export.export_status != "finished":
time.sleep(1)
export.refresh()
count += 1
if count == 15:
raise Exception("Project export taking too much time")
with open(temp_dir / "gitlab-export.tgz", "wb") as f:
export.download(streamed=True, action=f.write) # type: ignore[arg-type]
output = gl.projects.import_project(
open(temp_dir / "gitlab-export.tgz", "rb"),
"imported_project",
name="Imported Project",
)
project_import = gl.projects.get(output["id"], lazy=True).imports.get()
assert project_import.path == "imported_project"
assert project_import.name == "Imported Project"
count = 0
while project_import.import_status != "finished":
time.sleep(1)
project_import.refresh()
count += 1
if count == 15:
raise Exception("Project import taking too much time")
# https://github.com/python-gitlab/python-gitlab/pull/2790#pullrequestreview-1873617123
@pytest.mark.xfail(reason="test_project_remote_import to be worked on in a follow up")
def test_project_remote_import(gl):
with pytest.raises(gitlab.exceptions.GitlabImportError) as err_info:
gl.projects.remote_import(
"ftp://whatever.com/url", "remote-project", "remote-project", "root"
)
assert err_info.value.response_code == 400
assert (
"File url is blocked: Only allowed schemes are https"
in err_info.value.error_message
)
# https://github.com/python-gitlab/python-gitlab/pull/2790#pullrequestreview-1873617123
@pytest.mark.xfail(
reason="test_project_remote_import_s3 to be worked on in a follow up"
)
def test_project_remote_import_s3(gl):
gl.features.set("import_project_from_remote_file_s3", True)
with pytest.raises(gitlab.exceptions.GitlabImportError) as err_info:
gl.projects.remote_import_s3(
"remote-project",
"aws-region",
"aws-bucket-name",
"aws-file-key",
"aws-access-key-id",
"secret-access-key",
"remote-project",
"root",
)
assert err_info.value.response_code == 400
assert (
"Failed to open 'aws-file-key' in 'aws-bucket-name'"
in err_info.value.error_message
)
| 3,683 | Python | .py | 87 | 35.747126 | 87 | 0.678232 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,236 | test_project_job_token_scope.py | python-gitlab_python-gitlab/tests/functional/api/test_project_job_token_scope.py | # https://docs.gitlab.com/ee/ci/jobs/ci_job_token.html#allow-any-project-to-access-your-project
def test_enable_limit_access_to_this_project(gl, project):
scope = project.job_token_scope.get()
scope.enabled = True
scope.save()
scope.refresh()
assert scope.inbound_enabled
def test_disable_limit_access_to_this_project(gl, project):
scope = project.job_token_scope.get()
scope.enabled = False
scope.save()
scope.refresh()
assert not scope.inbound_enabled
def test_add_project_to_job_token_scope_allowlist(gl, project):
project_to_add = gl.projects.create({"name": "Ci_Cd_token_add_proj"})
scope = project.job_token_scope.get()
resp = scope.allowlist.create({"target_project_id": project_to_add.id})
assert resp.source_project_id == project.id
assert resp.target_project_id == project_to_add.id
project_to_add.delete()
def test_projects_job_token_scope_allowlist_contains_added_project_name(gl, project):
scope = project.job_token_scope.get()
project_name = "Ci_Cd_token_named_proj"
project_to_add = gl.projects.create({"name": project_name})
scope.allowlist.create({"target_project_id": project_to_add.id})
scope.refresh()
assert any(allowed.name == project_name for allowed in scope.allowlist.list())
project_to_add.delete()
def test_remove_project_by_id_from_projects_job_token_scope_allowlist(gl, project):
scope = project.job_token_scope.get()
project_to_add = gl.projects.create({"name": "Ci_Cd_token_remove_proj"})
scope.allowlist.create({"target_project_id": project_to_add.id})
scope.refresh()
scope.allowlist.delete(project_to_add.id)
scope.refresh()
assert not any(
allowed.id == project_to_add.id for allowed in scope.allowlist.list()
)
project_to_add.delete()
def test_add_group_to_job_token_scope_allowlist(gl, project):
group_to_add = gl.groups.create(
{"name": "add_group", "path": "allowlisted-add-test"}
)
scope = project.job_token_scope.get()
resp = scope.groups_allowlist.create({"target_group_id": group_to_add.id})
assert resp.source_project_id == project.id
assert resp.target_group_id == group_to_add.id
group_to_add.delete()
def test_projects_job_token_scope_groups_allowlist_contains_added_group_name(
gl, project
):
scope = project.job_token_scope.get()
group_name = "list_group"
group_to_add = gl.groups.create(
{"name": group_name, "path": "allowlisted-add-and-list-test"}
)
scope.groups_allowlist.create({"target_group_id": group_to_add.id})
scope.refresh()
assert any(allowed.name == group_name for allowed in scope.groups_allowlist.list())
group_to_add.delete()
def test_remove_group_by_id_from_projects_job_token_scope_groups_allowlist(gl, project):
scope = project.job_token_scope.get()
group_to_add = gl.groups.create(
{"name": "delete_group", "path": "allowlisted-delete-test"}
)
scope.groups_allowlist.create({"target_group_id": group_to_add.id})
scope.refresh()
scope.groups_allowlist.delete(group_to_add.id)
scope.refresh()
assert not any(
allowed.name == group_to_add.name for allowed in scope.groups_allowlist.list()
)
group_to_add.delete()
| 3,295 | Python | .py | 73 | 39.821918 | 95 | 0.703366 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,237 | test_keys.py | python-gitlab_python-gitlab/tests/functional/api/test_keys.py | """
GitLab API:
https://docs.gitlab.com/ce/api/keys.html
"""
import base64
import hashlib
def key_fingerprint(key: str) -> str:
key_part = key.split()[1]
decoded = base64.b64decode(key_part.encode("ascii"))
digest = hashlib.sha256(decoded).digest()
return f"SHA256:{base64.b64encode(digest).rstrip(b'=').decode('utf-8')}"
def test_keys_ssh(gl, user, SSH_KEY):
key = user.keys.create({"title": "foo@bar", "key": SSH_KEY})
# Get key by ID (admin only).
key_by_id = gl.keys.get(key.id)
assert key_by_id.title == key.title
assert key_by_id.key == key.key
fingerprint = key_fingerprint(SSH_KEY)
# Get key by fingerprint (admin only).
key_by_fingerprint = gl.keys.get(fingerprint=fingerprint)
assert key_by_fingerprint.title == key.title
assert key_by_fingerprint.key == key.key
key.delete()
def test_keys_deploy(gl, project, DEPLOY_KEY):
key = project.keys.create({"title": "foo@bar", "key": DEPLOY_KEY})
fingerprint = key_fingerprint(DEPLOY_KEY)
key_by_fingerprint = gl.keys.get(fingerprint=fingerprint)
assert key_by_fingerprint.title == key.title
assert key_by_fingerprint.key == key.key
assert len(key_by_fingerprint.deploy_keys_projects) == 1
key.delete()
| 1,256 | Python | .py | 31 | 36.290323 | 76 | 0.692498 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,238 | test_current_user.py | python-gitlab_python-gitlab/tests/functional/api/test_current_user.py | def test_current_user_email(gl):
gl.auth()
mail = gl.user.emails.create({"email": "current@user.com"})
assert mail in gl.user.emails.list()
mail.delete()
def test_current_user_gpg_keys(gl, GPG_KEY):
gl.auth()
gkey = gl.user.gpgkeys.create({"key": GPG_KEY})
assert gkey in gl.user.gpgkeys.list()
# Seems broken on the gitlab side
gkey = gl.user.gpgkeys.get(gkey.id)
gkey.delete()
def test_current_user_ssh_keys(gl, SSH_KEY):
gl.auth()
key = gl.user.keys.create({"title": "testkey", "key": SSH_KEY})
assert key in gl.user.keys.list()
key.delete()
def test_current_user_status(gl):
gl.auth()
message = "Test"
emoji = "thumbsup"
status = gl.user.status.get()
status.message = message
status.emoji = emoji
status.save()
new_status = gl.user.status.get()
assert new_status.message == message
assert new_status.emoji == emoji
| 927 | Python | .py | 28 | 28.25 | 67 | 0.660654 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,239 | test_lazy_objects.py | python-gitlab_python-gitlab/tests/functional/api/test_lazy_objects.py | import time
import pytest
import gitlab
@pytest.fixture
def lazy_project(gl, project):
assert "/" in project.path_with_namespace
return gl.projects.get(project.path_with_namespace, lazy=True)
def test_lazy_id(project, lazy_project):
assert isinstance(lazy_project.id, str)
assert isinstance(lazy_project.id, gitlab.utils.EncodedId)
assert lazy_project.id == gitlab.utils.EncodedId(project.path_with_namespace)
def test_refresh_after_lazy_get_with_path(project, lazy_project):
lazy_project.refresh()
assert lazy_project.id == project.id
def test_save_after_lazy_get_with_path(project, lazy_project):
lazy_project.description = "A new description"
lazy_project.save()
assert lazy_project.id == project.id
assert lazy_project.description == "A new description"
def test_delete_after_lazy_get_with_path(gl, group):
project = gl.projects.create({"name": "lazy_project", "namespace_id": group.id})
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(5)
lazy_project = gl.projects.get(project.path_with_namespace, lazy=True)
lazy_project.delete()
def test_list_children_after_lazy_get_with_path(gl, lazy_project):
lazy_project.mergerequests.list()
| 1,290 | Python | .py | 27 | 43.740741 | 111 | 0.751801 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,240 | test_push_rules.py | python-gitlab_python-gitlab/tests/functional/api/test_push_rules.py | import pytest
import gitlab
@pytest.mark.gitlab_premium
def test_project_push_rules(project):
with pytest.raises(gitlab.GitlabParsingError):
# when no rules are defined the API call returns back `None` which
# causes a gitlab.GitlabParsingError in RESTObject.__init__()
project.pushrules.get()
push_rules = project.pushrules.create({"deny_delete_tag": True})
assert push_rules.deny_delete_tag
push_rules.deny_delete_tag = False
push_rules.save()
push_rules = project.pushrules.get()
assert push_rules
assert not push_rules.deny_delete_tag
push_rules.delete()
with pytest.raises(gitlab.GitlabParsingError):
project.pushrules.get()
| 710 | Python | .py | 18 | 34 | 74 | 0.72807 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,241 | test_merge_requests.py | python-gitlab_python-gitlab/tests/functional/api/test_merge_requests.py | import datetime
import time
import pytest
import gitlab
import gitlab.v4.objects
def test_merge_requests(project):
project.files.create(
{
"file_path": "README.rst",
"branch": "main",
"content": "Initial content",
"commit_message": "Initial commit",
}
)
source_branch = "branch-merge-request-api"
project.branches.create({"branch": source_branch, "ref": "main"})
project.files.create(
{
"file_path": "README2.rst",
"branch": source_branch,
"content": "Initial content",
"commit_message": "New commit in new branch",
}
)
project.mergerequests.create(
{"source_branch": source_branch, "target_branch": "main", "title": "MR readme2"}
)
def test_merge_requests_get(project, merge_request):
mr = project.mergerequests.get(merge_request.iid)
assert mr.iid == merge_request.iid
mr = project.mergerequests.get(str(merge_request.iid))
assert mr.iid == merge_request.iid
@pytest.mark.gitlab_premium
def test_merge_requests_list_approver_ids(project):
# show https://github.com/python-gitlab/python-gitlab/issues/1698 is now
# fixed
project.mergerequests.list(
all=True,
state="opened",
author_id=423,
approver_ids=[423],
)
def test_merge_requests_get_lazy(project, merge_request):
mr = project.mergerequests.get(merge_request.iid, lazy=True)
assert mr.iid == merge_request.iid
def test_merge_request_discussion(project):
mr = project.mergerequests.list()[0]
discussion = mr.discussions.create({"body": "Discussion body"})
assert discussion in mr.discussions.list()
note = discussion.notes.create({"body": "first note"})
note_from_get = discussion.notes.get(note.id)
note_from_get.body = "updated body"
note_from_get.save()
discussion = mr.discussions.get(discussion.id)
assert discussion.attributes["notes"][-1]["body"] == "updated body"
note_from_get.delete()
def test_merge_request_labels(project):
mr = project.mergerequests.list()[0]
mr.labels = ["label2"]
mr.save()
events = mr.resourcelabelevents.list()
assert events
event = mr.resourcelabelevents.get(events[0].id)
assert event
def test_merge_request_milestone_events(project, milestone):
mr = project.mergerequests.list()[0]
mr.milestone_id = milestone.id
mr.save()
milestones = mr.resourcemilestoneevents.list()
assert milestones
milestone = mr.resourcemilestoneevents.get(milestones[0].id)
assert milestone
def test_merge_request_basic(project):
mr = project.mergerequests.list()[0]
# basic testing: only make sure that the methods exist
mr.commits()
mr.changes()
participants = mr.participants()
assert participants
assert isinstance(participants, list)
def test_merge_request_rebase(project):
mr = project.mergerequests.list()[0]
assert mr.rebase()
@pytest.mark.gitlab_premium
@pytest.mark.xfail(reason="project /approvers endpoint is gone")
def test_project_approvals(project):
mr = project.mergerequests.list()[0]
approval = project.approvals.get()
reset_value = approval.reset_approvals_on_push
approval.reset_approvals_on_push = not reset_value
approval.save()
approval = project.approvals.get()
assert reset_value != approval.reset_approvals_on_push
project.approvals.set_approvers([1], [])
approval = project.approvals.get()
assert approval.approvers[0]["user"]["id"] == 1
approval = mr.approvals.get()
approval.approvals_required = 2
approval.save()
approval = mr.approvals.get()
assert approval.approvals_required == 2
approval.approvals_required = 3
approval.save()
approval = mr.approvals.get()
assert approval.approvals_required == 3
mr.approvals.set_approvers(1, [1], [])
approval = mr.approvals.get()
assert approval.approvers[0]["user"]["id"] == 1
@pytest.mark.gitlab_premium
def test_project_merge_request_approval_rules(group, project):
approval_rules = project.approvalrules.list(get_all=True)
assert not approval_rules
project.approvalrules.create(
{"name": "approval-rule", "approvals_required": 2, "group_ids": [group.id]}
)
approval_rules = project.approvalrules.list(get_all=True)
assert len(approval_rules) == 1
assert approval_rules[0].approvals_required == 2
approval_rules[0].save()
approval_rules = project.approvalrules.list(get_all=True)
assert len(approval_rules) == 1
assert approval_rules[0].approvals_required == 2
approval_rules[0].delete()
def test_merge_request_reset_approvals(gitlab_url, project):
today = datetime.date.today()
future_date = today + datetime.timedelta(days=4)
bot = project.access_tokens.create(
{"name": "bot", "scopes": ["api"], "expires_at": future_date.isoformat()}
)
bot_gitlab = gitlab.Gitlab(gitlab_url, private_token=bot.token)
bot_project = bot_gitlab.projects.get(project.id, lazy=True)
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(5)
mr = bot_project.mergerequests.list()[0] # type: ignore[index]
assert mr.reset_approvals()
def test_cancel_merge_when_pipeline_succeeds(project, merge_request_with_pipeline):
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(5)
# Set to merge when the pipeline succeeds, which should never happen
merge_request_with_pipeline.merge(merge_when_pipeline_succeeds=True)
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(5)
mr = project.mergerequests.get(merge_request_with_pipeline.iid)
assert mr.merged_at is None
assert mr.merge_when_pipeline_succeeds is True
cancel = mr.cancel_merge_when_pipeline_succeeds()
assert cancel == {"status": "success"}
def test_merge_request_merge(project, merge_request):
merge_request.merge()
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(5)
mr = project.mergerequests.get(merge_request.iid)
assert mr.merged_at is not None
assert mr.merge_when_pipeline_succeeds is False
with pytest.raises(gitlab.GitlabMRClosedError):
# Two merge attempts should raise GitlabMRClosedError
mr.merge()
def test_merge_request_should_remove_source_branch(project, merge_request) -> None:
"""Test to ensure
https://github.com/python-gitlab/python-gitlab/issues/1120 is fixed.
Bug reported that they could not use 'should_remove_source_branch' in
mr.merge() call"""
merge_request.merge(should_remove_source_branch=True)
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(5)
# Wait until it is merged
mr = None
mr_iid = merge_request.iid
for _ in range(60):
mr = project.mergerequests.get(mr_iid)
if mr.merged_at is not None:
break
time.sleep(0.5)
assert mr is not None
assert mr.merged_at is not None
time.sleep(0.5)
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(5)
# Ensure we can NOT get the MR branch
with pytest.raises(gitlab.exceptions.GitlabGetError):
result = project.branches.get(merge_request.source_branch)
# Help to debug in case the expected exception doesn't happen.
import pprint
print("mr:", pprint.pformat(mr))
print("mr.merged_at:", pprint.pformat(mr.merged_at))
print("result:", pprint.pformat(result))
def test_merge_request_large_commit_message(project, merge_request) -> None:
"""Test to ensure https://github.com/python-gitlab/python-gitlab/issues/1452
is fixed.
Bug reported that very long 'merge_commit_message' in mr.merge() would
cause an error: 414 Request too large
"""
merge_commit_message = "large_message\r\n" * 1_000
assert len(merge_commit_message) > 10_000
merge_request.merge(
merge_commit_message=merge_commit_message, should_remove_source_branch=False
)
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(5)
# Wait until it is merged
mr = None
mr_iid = merge_request.iid
for _ in range(60):
mr = project.mergerequests.get(mr_iid)
if mr.merged_at is not None:
break
time.sleep(0.5)
assert mr is not None
assert mr.merged_at is not None
time.sleep(0.5)
# Ensure we can get the MR branch
project.branches.get(merge_request.source_branch)
def test_merge_request_merge_ref(merge_request) -> None:
response = merge_request.merge_ref()
assert response and "commit_id" in response
def test_merge_request_merge_ref_should_fail(project, merge_request) -> None:
# Create conflict
project.files.create(
{
"file_path": f"README.{merge_request.source_branch}",
"branch": project.default_branch,
"content": "Different initial content",
"commit_message": "Another commit in main branch",
}
)
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(5)
# Check for non-existing merge_ref for MR with conflicts
with pytest.raises(gitlab.exceptions.GitlabGetError):
response = merge_request.merge_ref()
assert "commit_id" not in response
| 9,797 | Python | .py | 231 | 36.554113 | 111 | 0.696587 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,242 | test_projects.py | python-gitlab_python-gitlab/tests/functional/api/test_projects.py | import time
import uuid
import pytest
import gitlab
from gitlab.const import AccessLevel
from gitlab.v4.objects.projects import ProjectStorage
def test_projects_head(gl):
headers = gl.projects.head()
assert headers["x-total"]
def test_project_head(gl, project):
headers = gl.projects.head(project.id)
assert headers["content-type"] == "application/json"
def test_create_project(gl, user):
# Moved from group tests chunk in legacy tests, TODO cleanup
admin_project = gl.projects.create({"name": "admin_project"})
assert isinstance(admin_project, gitlab.v4.objects.Project)
assert admin_project in gl.projects.list(search="admin_project")
sudo_project = gl.projects.create({"name": "sudo_project"}, sudo=user.id)
created = gl.projects.list()
created_gen = gl.projects.list(iterator=True)
owned = gl.projects.list(owned=True)
assert admin_project in created and sudo_project in created
assert admin_project in owned and sudo_project not in owned
assert len(created) == len(list(created_gen))
admin_project.delete()
sudo_project.delete()
def test_project_members(user, project):
member = project.members.create(
{"user_id": user.id, "access_level": AccessLevel.DEVELOPER}
)
assert member in project.members.list()
assert member.access_level == 30
member.delete()
def test_project_badges(project):
badge_image = "http://example.com"
badge_link = "http://example/img.svg"
badge = project.badges.create({"link_url": badge_link, "image_url": badge_image})
assert badge in project.badges.list()
badge.image_url = "http://another.example.com"
badge.save()
badge = project.badges.get(badge.id)
assert badge.image_url == "http://another.example.com"
badge.delete()
@pytest.mark.skip(reason="Commented out in legacy test")
def test_project_boards(project):
boards = project.boards.list()
assert boards
board = boards[0]
lists = board.lists.list()
last_list = lists[-1]
last_list.position = 0
last_list.save()
last_list.delete()
def test_project_custom_attributes(gl, project):
attrs = project.customattributes.list()
assert not attrs
attr = project.customattributes.set("key", "value1")
assert attr.key == "key"
assert attr.value == "value1"
assert attr in project.customattributes.list()
assert project in gl.projects.list(custom_attributes={"key": "value1"})
attr = project.customattributes.set("key", "value2")
attr = project.customattributes.get("key")
assert attr.value == "value2"
assert attr in project.customattributes.list()
attr.delete()
def test_project_environments(project):
environment = project.environments.create(
{"name": "env1", "external_url": "http://fake.env/whatever"}
)
environments = project.environments.list()
assert environment in environments
environment = environments[0]
environment.external_url = "http://new.env/whatever"
environment.save()
environment = project.environments.list()[0]
assert environment.external_url == "http://new.env/whatever"
environment.stop()
environment.delete()
def test_project_events(project):
events = project.events.list()
assert isinstance(events, list)
def test_project_file_uploads(project):
filename = "test.txt"
file_contents = "testing contents"
uploaded_file = project.upload(filename, file_contents)
alt, url = uploaded_file["alt"], uploaded_file["url"]
assert alt == filename
assert url.startswith("/uploads/")
assert url.endswith(f"/{filename}")
assert uploaded_file["markdown"] == f"[{alt}]({url})"
def test_project_forks(gl, project, user):
fork = project.forks.create({"namespace": user.username})
fork_project = gl.projects.get(fork.id)
assert fork_project.forked_from_project["id"] == project.id
forks = project.forks.list()
assert fork.id in [fork_project.id for fork_project in forks]
def test_project_hooks(project):
hook = project.hooks.create({"url": "http://hook.url"})
assert hook in project.hooks.list()
hook.note_events = True
hook.save()
hook = project.hooks.get(hook.id)
assert hook.note_events is True
hook.delete()
def test_project_housekeeping(project):
project.housekeeping()
def test_project_labels(project):
label = project.labels.create({"name": "label", "color": "#778899"})
labels = project.labels.list()
assert label in labels
label = project.labels.get("label")
assert label == labels[0]
label.new_name = "Label:that requires:encoding"
label.save()
assert label.name == "Label:that requires:encoding"
label = project.labels.get("Label:that requires:encoding")
assert label.name == "Label:that requires:encoding"
label.subscribe()
assert label.subscribed is True
label.unsubscribe()
assert label.subscribed is False
label.delete()
def test_project_label_promotion(gl, group):
"""
Label promotion requires the project to be a child of a group (not in a user namespace)
"""
_id = uuid.uuid4().hex
data = {
"name": f"test-project-{_id}",
"namespace_id": group.id,
}
project = gl.projects.create(data)
label_name = "promoteme"
promoted_label = project.labels.create({"name": label_name, "color": "#112233"})
promoted_label.promote()
assert any(label.name == label_name for label in group.labels.list())
group.labels.delete(label_name)
def test_project_milestones(project):
milestone = project.milestones.create({"title": "milestone1"})
assert milestone in project.milestones.list()
milestone.due_date = "2020-01-01T00:00:00Z"
milestone.save()
milestone.state_event = "close"
milestone.save()
milestone = project.milestones.get(milestone.id)
assert milestone.state == "closed"
assert not milestone.issues()
assert not milestone.merge_requests()
def test_project_milestone_promotion(gl, group):
"""
Milestone promotion requires the project to be a child of a group (not in a user namespace)
"""
_id = uuid.uuid4().hex
data = {
"name": f"test-project-{_id}",
"namespace_id": group.id,
}
project = gl.projects.create(data)
milestone_title = "promoteme"
promoted_milestone = project.milestones.create({"title": milestone_title})
promoted_milestone.promote()
assert any(
milestone.title == milestone_title for milestone in group.milestones.list()
)
def test_project_pages(project):
pages = project.pages.get()
assert pages.is_unique_domain_enabled is True
project.pages.update(new_data={"pages_unique_domain_enabled": False})
pages.refresh()
assert pages.is_unique_domain_enabled is False
project.pages.delete()
def test_project_pages_domains(gl, project):
domain = project.pagesdomains.create({"domain": "foo.domain.com"})
assert domain in project.pagesdomains.list()
assert domain in gl.pagesdomains.list()
domain = project.pagesdomains.get("foo.domain.com")
assert domain.domain == "foo.domain.com"
domain.delete()
def test_project_protected_branches(project, gitlab_version):
# Updating a protected branch is possible from Gitlab 15.6
# https://docs.gitlab.com/ee/api/protected_branches.html#update-a-protected-branch
can_update_prot_branch = gitlab_version.major > 15 or (
gitlab_version.major == 15 and gitlab_version.minor >= 6
)
p_b = project.protectedbranches.create(
{
"name": "*-stable",
"allow_force_push": False,
}
)
assert p_b.name == "*-stable"
assert not p_b.allow_force_push
assert p_b in project.protectedbranches.list()
if can_update_prot_branch:
p_b.allow_force_push = True
p_b.save()
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(5)
p_b = project.protectedbranches.get("*-stable")
if can_update_prot_branch:
assert p_b.allow_force_push
p_b.delete()
def test_project_remote_mirrors(project):
mirror_url = "https://gitlab.example.com/root/mirror.git"
mirror = project.remote_mirrors.create({"url": mirror_url})
assert mirror.url == mirror_url
mirror.enabled = True
mirror.save()
mirror = project.remote_mirrors.list()[0]
assert isinstance(mirror, gitlab.v4.objects.ProjectRemoteMirror)
assert mirror.url == mirror_url
assert mirror.enabled is True
mirror.delete()
def test_project_services(project):
# Use 'update' to create a service as we don't have a 'create' method and
# to add one is somewhat complicated so it hasn't been done yet.
project.services.update("asana", api_key="foo")
service = project.services.get("asana")
assert service.active is True
service.api_key = "whatever"
service.save()
service = project.services.get("asana")
assert service.active is True
service.delete()
def test_project_stars(project):
project.star()
assert project.star_count == 1
project.unstar()
assert project.star_count == 0
def test_project_storage(project):
storage = project.storage.get()
assert isinstance(storage, ProjectStorage)
assert storage.repository_storage == "default"
def test_project_tags(project, project_file):
tag = project.tags.create({"tag_name": "v1.0", "ref": "main"})
assert tag in project.tags.list()
tag.delete()
def test_project_triggers(project):
trigger = project.triggers.create({"description": "trigger1"})
assert trigger in project.triggers.list()
trigger.delete()
def test_project_wiki(project):
content = "Wiki page content"
wiki = project.wikis.create({"title": "wikipage", "content": content})
assert wiki in project.wikis.list()
wiki = project.wikis.get(wiki.slug)
assert wiki.content == content
# update and delete seem broken
wiki.content = "new content"
wiki.save()
wiki.delete()
def test_project_groups_list(gl, group):
"""Test listing groups of a project"""
# Create a subgroup of our top-group, we will place our new project inside
# this group.
group2 = gl.groups.create(
{"name": "group2_proj", "path": "group2_proj", "parent_id": group.id}
)
data = {
"name": "test-project-tpsg",
"namespace_id": group2.id,
}
project = gl.projects.create(data)
groups = project.groups.list()
group_ids = set([x.id for x in groups])
assert {group.id, group2.id} == group_ids
def test_project_transfer(gl, project, group):
assert project.namespace["path"] != group.full_path
project.transfer(group.id)
project = gl.projects.get(project.id)
assert project.namespace["path"] == group.full_path
gl.auth()
project.transfer(gl.user.username)
project = gl.projects.get(project.id)
assert project.namespace["path"] == gl.user.username
| 11,088 | Python | .py | 276 | 34.90942 | 115 | 0.698044 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,243 | test_packages.py | python-gitlab_python-gitlab/tests/functional/api/test_packages.py | """
GitLab API:
https://docs.gitlab.com/ce/api/packages.html
https://docs.gitlab.com/ee/user/packages/generic_packages
"""
from collections.abc import Iterator
import pytest
from gitlab import Gitlab
from gitlab.v4.objects import GenericPackage, Project, ProjectPackageProtectionRule
package_name = "hello-world"
package_version = "v1.0.0"
file_name = "hello.tar.gz"
file_name2 = "hello2.tar.gz"
file_content = "package content"
@pytest.fixture(scope="module", autouse=True)
def protected_package_feature(gl: Gitlab):
gl.features.set(name="packages_protected_packages", value=True)
def test_list_project_packages(project):
packages = project.packages.list()
assert isinstance(packages, list)
def test_list_group_packages(group):
packages = group.packages.list()
assert isinstance(packages, list)
def test_upload_generic_package(tmp_path, project):
path = tmp_path / file_name
path.write_text(file_content)
package = project.generic_packages.upload(
package_name=package_name,
package_version=package_version,
file_name=file_name,
path=path,
)
assert isinstance(package, GenericPackage)
assert package.message == "201 Created"
def test_upload_generic_package_as_bytes(tmp_path, project):
path = tmp_path / file_name
path.write_text(file_content)
package = project.generic_packages.upload(
package_name=package_name,
package_version=package_version,
file_name=file_name,
data=path.read_bytes(),
)
assert isinstance(package, GenericPackage)
assert package.message == "201 Created"
def test_upload_generic_package_as_file(tmp_path, project):
path = tmp_path / file_name
path.write_text(file_content)
package = project.generic_packages.upload(
package_name=package_name,
package_version=package_version,
file_name=file_name,
data=path.open(mode="rb"),
)
assert isinstance(package, GenericPackage)
assert package.message == "201 Created"
def test_upload_generic_package_select(tmp_path, project):
path = tmp_path / file_name2
path.write_text(file_content)
package = project.generic_packages.upload(
package_name=package_name,
package_version=package_version,
file_name=file_name2,
path=path,
select="package_file",
)
assert isinstance(package, GenericPackage)
assert package.file_name == file_name2
assert package.size == path.stat().st_size
def test_download_generic_package(project):
package = project.generic_packages.download(
package_name=package_name,
package_version=package_version,
file_name=file_name,
)
assert isinstance(package, bytes)
assert package.decode("utf-8") == file_content
def test_stream_generic_package(project):
bytes_iterator = project.generic_packages.download(
package_name=package_name,
package_version=package_version,
file_name=file_name,
iterator=True,
)
assert isinstance(bytes_iterator, Iterator)
package = bytes()
for chunk in bytes_iterator:
package += chunk
assert isinstance(package, bytes)
assert package.decode("utf-8") == file_content
def test_download_generic_package_to_file(tmp_path, project):
path = tmp_path / file_name
with open(path, "wb") as f:
project.generic_packages.download(
package_name=package_name,
package_version=package_version,
file_name=file_name,
streamed=True,
action=f.write,
)
with open(path, "r") as f:
assert f.read() == file_content
def test_stream_generic_package_to_file(tmp_path, project):
path = tmp_path / file_name
bytes_iterator = project.generic_packages.download(
package_name=package_name,
package_version=package_version,
file_name=file_name,
iterator=True,
)
with open(path, "wb") as f:
for chunk in bytes_iterator:
f.write(chunk)
with open(path, "r") as f:
assert f.read() == file_content
def test_list_project_protected_packages(project: Project):
rules = project.package_protection_rules.list()
assert isinstance(rules, list)
@pytest.mark.skip(reason="Not released yet")
def test_create_project_protected_packages(project: Project):
protected_package = project.package_protection_rules.create(
{
"package_name_pattern": "v*",
"package_type": "npm",
"minimum_access_level_for_push": "maintainer",
}
)
assert isinstance(protected_package, ProjectPackageProtectionRule)
assert protected_package.package_type == "npm"
protected_package.minimum_access_level_for_push = "owner"
protected_package.save()
protected_package.delete()
| 4,879 | Python | .py | 132 | 30.833333 | 83 | 0.696467 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,244 | test_statistics.py | python-gitlab_python-gitlab/tests/functional/api/test_statistics.py | """
GitLab API: https://docs.gitlab.com/ee/api/statistics.html
"""
def test_get_statistics(gl):
statistics = gl.statistics.get()
assert statistics.snippets.isdigit()
assert statistics.users.isdigit()
assert statistics.groups.isdigit()
assert statistics.projects.isdigit()
| 295 | Python | .py | 9 | 29.222222 | 58 | 0.749117 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,245 | test_variables.py | python-gitlab_python-gitlab/tests/functional/api/test_variables.py | """
GitLab API:
https://docs.gitlab.com/ee/api/instance_level_ci_variables.html
https://docs.gitlab.com/ee/api/project_level_variables.html
https://docs.gitlab.com/ee/api/group_level_variables.html
"""
def test_instance_variables(gl):
variable = gl.variables.create({"key": "key1", "value": "value1"})
assert variable.value == "value1"
assert variable in gl.variables.list()
variable.value = "new_value1"
variable.save()
variable = gl.variables.get(variable.key)
assert variable.value == "new_value1"
variable.delete()
def test_group_variables(group):
variable = group.variables.create({"key": "key1", "value": "value1"})
assert variable.value == "value1"
assert variable in group.variables.list()
variable.value = "new_value1"
variable.save()
variable = group.variables.get(variable.key)
assert variable.value == "new_value1"
variable.delete()
def test_project_variables(project):
variable = project.variables.create({"key": "key1", "value": "value1"})
assert variable.value == "value1"
assert variable in project.variables.list()
variable.value = "new_value1"
variable.save()
variable = project.variables.get(variable.key)
assert variable.value == "new_value1"
variable.delete()
| 1,289 | Python | .py | 33 | 34.787879 | 75 | 0.710611 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,246 | test_wikis.py | python-gitlab_python-gitlab/tests/functional/api/test_wikis.py | """
GitLab API:
https://docs.gitlab.com/ee/api/wikis.html
"""
def test_project_wikis(project):
page = project.wikis.create({"title": "title/subtitle", "content": "test content"})
page.content = "update content"
page.title = "subtitle"
page.save()
page.delete()
def test_project_wiki_file_upload(project):
page = project.wikis.create(
{"title": "title/subtitle", "content": "test page content"}
)
filename = "test.txt"
file_contents = "testing contents"
uploaded_file = page.upload(filename, file_contents)
link = uploaded_file["link"]
file_name = uploaded_file["file_name"]
file_path = uploaded_file["file_path"]
assert file_name == filename
assert file_path.startswith("uploads/")
assert file_path.endswith(f"/{filename}")
assert link["url"] == file_path
assert link["markdown"] == f"[{file_name}]({file_path})"
def test_group_wikis(group):
page = group.wikis.create({"title": "title/subtitle", "content": "test content"})
page.content = "update content"
page.title = "subtitle"
page.save()
page.delete()
def test_group_wiki_file_upload(group):
page = group.wikis.create(
{"title": "title/subtitle", "content": "test page content"}
)
filename = "test.txt"
file_contents = "testing contents"
uploaded_file = page.upload(filename, file_contents)
link = uploaded_file["link"]
file_name = uploaded_file["file_name"]
file_path = uploaded_file["file_path"]
assert file_name == filename
assert file_path.startswith("uploads/")
assert file_path.endswith(f"/{filename}")
assert link["url"] == file_path
assert link["markdown"] == f"[{file_name}]({file_path})"
| 1,728 | Python | .py | 46 | 32.73913 | 87 | 0.665666 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,247 | test_users.py | python-gitlab_python-gitlab/tests/functional/api/test_users.py | """
GitLab API:
https://docs.gitlab.com/ee/api/users.html
https://docs.gitlab.com/ee/api/users.html#delete-authentication-identity-from-user
"""
import datetime
import time
import requests
def test_create_user(gl, fixture_dir):
user = gl.users.create(
{
"email": "foo@bar.com",
"username": "foo",
"name": "foo",
"password": "E4596f8be406Bc3a14a4ccdb1df80587$3",
"avatar": open(fixture_dir / "avatar.png", "rb"),
}
)
created_user = gl.users.list(username="foo")[0]
assert created_user.username == user.username
assert created_user.email == user.email
avatar_url = user.avatar_url.replace("gitlab.test", "localhost:8080")
uploaded_avatar = requests.get(avatar_url).content
with open(fixture_dir / "avatar.png", "rb") as f:
assert uploaded_avatar == f.read()
def test_block_user(gl, user):
result = user.block()
assert result is True
users = gl.users.list(blocked=True)
assert user in users
# block again
result = user.block()
# Trying to block an already blocked user returns None
assert result is None
result = user.unblock()
assert result is True
users = gl.users.list(blocked=False)
assert user in users
# unblock again
result = user.unblock()
# Trying to unblock an already blocked user returns False
assert result is False
def test_ban_user(gl, user):
user.ban()
retrieved_user = gl.users.get(user.id)
assert retrieved_user.state == "banned"
user.unban()
retrieved_user = gl.users.get(user.id)
assert retrieved_user.state == "active"
def test_delete_user(gl):
new_user = gl.users.create(
{
"email": "delete-user@test.com",
"username": "delete-user",
"name": "delete-user",
"password": "E4596f8be406Bc3a14a4ccdb1df80587#15",
}
)
# We don't need to validate Gitlab's behaviour by checking if user is present after a delay etc,
# just that python-gitlab acted correctly to produce a 2xx from Gitlab
new_user.delete()
def test_user_projects_list(gl, user):
projects = user.projects.list()
assert isinstance(projects, list)
assert not projects
def test_user_events_list(gl, user):
events = user.events.list()
assert isinstance(events, list)
assert not events
def test_user_bio(gl, user):
user.bio = "This is the user bio"
user.save()
def test_list_multiple_users(gl, user):
second_email = f"{user.email}.2"
second_username = f"{user.username}_2"
second_user = gl.users.create(
{
"email": second_email,
"username": second_username,
"name": "Foo Bar",
"password": "E4596f8be406Bc3a14a4ccdb1df80587#!",
}
)
assert gl.users.list(search=second_user.username)[0].id == second_user.id
expected = [user, second_user]
actual = list(gl.users.list(search=user.username))
assert set(expected) == set(actual)
assert not gl.users.list(search="asdf")
def test_user_gpg_keys(gl, user, GPG_KEY):
gkey = user.gpgkeys.create({"key": GPG_KEY})
assert gkey in user.gpgkeys.list()
gkey.delete()
def test_user_ssh_keys(gl, user, SSH_KEY):
key = user.keys.create({"title": "testkey", "key": SSH_KEY})
assert key in user.keys.list()
get_key = user.keys.get(key.id)
assert get_key.key == key.key
key.delete()
def test_user_email(gl, user):
email = user.emails.create({"email": "foo2@bar.com"})
assert email in user.emails.list()
email.delete()
def test_user_custom_attributes(gl, user):
user.customattributes.list()
attr = user.customattributes.set("key", "value1")
users_with_attribute = gl.users.list(custom_attributes={"key": "value1"})
assert user in users_with_attribute
assert attr.key == "key"
assert attr.value == "value1"
assert attr in user.customattributes.list()
user.customattributes.set("key", "value2")
attr_2 = user.customattributes.get("key")
assert attr_2.value == "value2"
assert attr_2 in user.customattributes.list()
attr_2.delete()
def test_user_impersonation_tokens(gl, user):
today = datetime.date.today()
future_date = today + datetime.timedelta(days=4)
token = user.impersonationtokens.create(
{
"name": "user_impersonation_token",
"scopes": ["api", "read_user"],
"expires_at": future_date.isoformat(),
}
)
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(30)
assert token in user.impersonationtokens.list(state="active")
token.delete()
def test_user_identities(gl, user):
provider = "test_provider"
user.provider = provider
user.extern_uid = "1"
user.save()
assert provider in [item["provider"] for item in user.identities]
user.identityproviders.delete(provider)
| 5,007 | Python | .py | 136 | 30.860294 | 111 | 0.663068 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,248 | test_branches.py | python-gitlab_python-gitlab/tests/functional/api/test_branches.py | """
GitLab API:
https://docs.gitlab.com/ee/api/branches.html
"""
def test_branch_name_with_period(project):
# Make sure we can create and get a branch name containing a period '.'
branch_name = "my.branch.name"
branch = project.branches.create({"branch": branch_name, "ref": "main"})
assert branch.name == branch_name
# Ensure we can get the branch
fetched_branch = project.branches.get(branch_name)
assert branch.name == fetched_branch.name
branch.delete()
| 494 | Python | .py | 13 | 34.230769 | 76 | 0.708595 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,249 | test_services.py | python-gitlab_python-gitlab/tests/functional/api/test_services.py | """
GitLab API:
https://docs.gitlab.com/ee/api/integrations.html
"""
import gitlab
def test_get_service_lazy(project):
service = project.services.get("jira", lazy=True)
assert isinstance(service, gitlab.v4.objects.ProjectService)
def test_update_service(project):
service_dict = project.services.update(
"emails-on-push", {"recipients": "email@example.com"}
)
assert service_dict["active"]
def test_list_services(project, service):
services = project.services.list()
assert isinstance(services[0], gitlab.v4.objects.ProjectService)
assert services[0].active
def test_get_service(project, service):
service_object = project.services.get(service["slug"])
assert isinstance(service_object, gitlab.v4.objects.ProjectService)
assert service_object.active
def test_delete_service(project, service):
service_object = project.services.get(service["slug"])
service_object.delete()
| 944 | Python | .py | 24 | 35.333333 | 71 | 0.751101 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,250 | test_topics.py | python-gitlab_python-gitlab/tests/functional/api/test_topics.py | """
GitLab API:
https://docs.gitlab.com/ce/api/topics.html
"""
def test_topics(gl, gitlab_version):
assert not gl.topics.list()
create_dict = {"name": "my-topic", "description": "My Topic"}
if gitlab_version.major >= 15:
create_dict["title"] = "my topic title"
topic = gl.topics.create(create_dict)
assert topic.name == "my-topic"
if gitlab_version.major >= 15:
assert topic.title == "my topic title"
assert gl.topics.list()
topic.description = "My Updated Topic"
topic.save()
updated_topic = gl.topics.get(topic.id)
assert updated_topic.description == topic.description
create_dict = {"name": "my-second-topic", "description": "My Second Topic"}
if gitlab_version.major >= 15:
create_dict["title"] = "my second topic title"
topic2 = gl.topics.create(create_dict)
merged_topic = gl.topics.merge(topic.id, topic2.id)
assert merged_topic["id"] == topic2.id
topic2.delete()
| 971 | Python | .py | 25 | 33.84 | 79 | 0.667377 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,251 | test_deploy_tokens.py | python-gitlab_python-gitlab/tests/functional/api/test_deploy_tokens.py | import datetime
def test_project_deploy_tokens(gl, project):
today = datetime.date.today().isoformat()
deploy_token = project.deploytokens.create(
{
"name": "foo",
"username": "bar",
"expires_at": today,
"scopes": ["read_registry"],
}
)
assert deploy_token in project.deploytokens.list()
assert set(project.deploytokens.list()) <= set(gl.deploytokens.list())
deploy_token = project.deploytokens.get(deploy_token.id)
assert deploy_token.name == "foo"
assert deploy_token.expires_at == f"{today}T00:00:00.000Z"
assert deploy_token.scopes == ["read_registry"]
assert deploy_token.username == "bar"
deploy_token.delete()
def test_group_deploy_tokens(gl, group):
deploy_token = group.deploytokens.create(
{
"name": "foo",
"scopes": ["read_registry"],
}
)
assert deploy_token in group.deploytokens.list()
assert set(group.deploytokens.list()) <= set(gl.deploytokens.list())
deploy_token = group.deploytokens.get(deploy_token.id)
assert deploy_token.name == "foo"
assert deploy_token.scopes == ["read_registry"]
deploy_token.delete()
| 1,216 | Python | .py | 32 | 31.09375 | 74 | 0.642553 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,252 | test_bulk_imports.py | python-gitlab_python-gitlab/tests/functional/api/test_bulk_imports.py | import time
import pytest
import gitlab
@pytest.fixture
def bulk_import_enabled(gl: gitlab.Gitlab):
settings = gl.settings.get()
bulk_import_default = settings.bulk_import_enabled
settings.bulk_import_enabled = True
settings.save()
# todo: why so fussy with feature flag timing?
time.sleep(5)
get_settings = gl.settings.get()
assert get_settings.bulk_import_enabled is True
yield settings
settings.bulk_import_enabled = bulk_import_default
settings.save()
# https://github.com/python-gitlab/python-gitlab/pull/2790#pullrequestreview-1873617123
@pytest.mark.xfail(reason="Bulk Imports to be worked on in a follow up")
def test_bulk_imports(gl, group, bulk_import_enabled):
destination = f"{group.full_path}-import"
configuration = {
"url": gl.url,
"access_token": gl.private_token,
}
migration_entity = {
"source_full_path": group.full_path,
"source_type": "group_entity",
"destination_slug": destination,
"destination_namespace": destination,
}
created_migration = gl.bulk_imports.create(
{
"configuration": configuration,
"entities": [migration_entity],
}
)
assert created_migration.source_type == "gitlab"
assert created_migration.status == "created"
migration = gl.bulk_imports.get(created_migration.id)
assert migration == created_migration
migration.refresh()
assert migration == created_migration
migrations = gl.bulk_imports.list()
assert migration in migrations
all_entities = gl.bulk_import_entities.list()
entities = migration.entities.list()
assert isinstance(entities, list)
assert entities[0] in all_entities
entity = migration.entities.get(entities[0].id)
assert entity == entities[0]
entity.refresh()
assert entity.created_at == entities[0].created_at
| 1,908 | Python | .py | 52 | 31.057692 | 87 | 0.701468 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,253 | test_snippets.py | python-gitlab_python-gitlab/tests/functional/api/test_snippets.py | import pytest
import gitlab
def test_snippets(gl):
snippets = gl.snippets.list(get_all=True)
assert not snippets
snippet = gl.snippets.create(
{
"title": "snippet1",
"files": [{"file_path": "snippet1.py", "content": "import gitlab"}],
}
)
snippet = gl.snippets.get(snippet.id)
snippet.title = "updated_title"
snippet.save()
snippet = gl.snippets.get(snippet.id)
assert snippet.title == "updated_title"
content = snippet.content()
assert content.decode() == "import gitlab"
all_snippets = gl.snippets.list_all(get_all=True)
public_snippets = gl.snippets.public(get_all=True)
list_public_snippets = gl.snippets.list_public(get_all=True)
assert isinstance(all_snippets, list)
assert isinstance(list_public_snippets, list)
assert public_snippets == list_public_snippets
snippet.delete()
def test_project_snippets(project):
project.snippets_enabled = True
project.save()
snippet = project.snippets.create(
{
"title": "snip1",
"files": [{"file_path": "foo.py", "content": "initial content"}],
"visibility": gitlab.const.VISIBILITY_PRIVATE,
}
)
assert snippet.title == "snip1"
@pytest.mark.xfail(reason="Returning 404 UserAgentDetail not found in GL 16")
def test_project_snippet_user_agent_detail(project):
snippet = project.snippets.list()[0]
user_agent_detail = snippet.user_agent_detail()
assert user_agent_detail["user_agent"]
def test_project_snippet_discussion(project):
snippet = project.snippets.list()[0]
discussion = snippet.discussions.create({"body": "Discussion body"})
assert discussion in snippet.discussions.list()
note = discussion.notes.create({"body": "first note"})
note_from_get = discussion.notes.get(note.id)
note_from_get.body = "updated body"
note_from_get.save()
discussion = snippet.discussions.get(discussion.id)
assert discussion.attributes["notes"][-1]["body"] == "updated body"
note_from_get.delete()
def test_project_snippet_file(project):
snippet = project.snippets.list()[0]
snippet.file_name = "bar.py"
snippet.save()
snippet = project.snippets.get(snippet.id)
assert snippet.content().decode() == "initial content"
assert snippet.file_name == "bar.py"
assert snippet in project.snippets.list()
snippet.delete()
| 2,435 | Python | .py | 61 | 34.098361 | 80 | 0.684412 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,254 | test_deploy_keys.py | python-gitlab_python-gitlab/tests/functional/api/test_deploy_keys.py | def test_project_deploy_keys(gl, project, DEPLOY_KEY):
deploy_key = project.keys.create({"title": "foo@bar", "key": DEPLOY_KEY})
assert deploy_key in project.keys.list()
project2 = gl.projects.create({"name": "deploy-key-project"})
project2.keys.enable(deploy_key.id)
assert deploy_key in project2.keys.list()
project2.keys.delete(deploy_key.id)
project2.delete()
| 395 | Python | .py | 8 | 44.5 | 77 | 0.708333 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,255 | test_gitlab.py | python-gitlab_python-gitlab/tests/functional/api/test_gitlab.py | import pytest
import requests
import gitlab
@pytest.fixture(
scope="session",
params=[{"get_all": True}, {"all": True}],
ids=["get_all=True", "all=True"],
)
def get_all_kwargs(request):
"""A tiny parametrized fixture to inject both `get_all=True` and
`all=True` to ensure they behave the same way for pagination."""
return request.param
def test_auth_from_config(gl, gitlab_config, temp_dir):
"""Test token authentication from config file"""
test_gitlab = gitlab.Gitlab.from_config(config_files=[gitlab_config])
test_gitlab.auth()
assert isinstance(test_gitlab.user, gitlab.v4.objects.CurrentUser)
def test_no_custom_session(gl, temp_dir):
"""Test no custom session"""
custom_session = requests.Session()
test_gitlab = gitlab.Gitlab.from_config(
config_files=[temp_dir / "python-gitlab.cfg"]
)
assert test_gitlab.session != custom_session
def test_custom_session(gl, temp_dir):
"""Test custom session"""
custom_session = requests.Session()
test_gitlab = gitlab.Gitlab.from_config(
config_files=[temp_dir / "python-gitlab.cfg"], session=custom_session
)
assert test_gitlab.session == custom_session
def test_broadcast_messages(gl, get_all_kwargs):
msg = gl.broadcastmessages.create({"message": "this is the message"})
msg.color = "#444444"
msg.save()
msg_id = msg.id
msg = gl.broadcastmessages.list(**get_all_kwargs)[0]
assert msg.color == "#444444"
msg = gl.broadcastmessages.get(msg_id)
assert msg.color == "#444444"
msg.delete()
def test_markdown(gl):
html = gl.markdown("foo")
assert "foo" in html
def test_markdown_in_project(gl, project):
html = gl.markdown("foo", project=project.path_with_namespace)
assert "foo" in html
def test_sidekiq_queue_metrics(gl):
out = gl.sidekiq.queue_metrics()
assert isinstance(out, dict)
assert "default" in out["queues"]
def test_sidekiq_process_metrics(gl):
out = gl.sidekiq.process_metrics()
assert isinstance(out, dict)
assert "hostname" in out["processes"][0]
def test_sidekiq_job_stats(gl):
out = gl.sidekiq.job_stats()
assert isinstance(out, dict)
assert "processed" in out["jobs"]
def test_sidekiq_compound_metrics(gl):
out = gl.sidekiq.compound_metrics()
assert isinstance(out, dict)
assert "jobs" in out
assert "processes" in out
assert "queues" in out
@pytest.mark.gitlab_premium
def test_geo_nodes(gl):
# Very basic geo nodes tests because we only have 1 node.
nodes = gl.geonodes.list()
assert isinstance(nodes, list)
status = gl.geonodes.status()
assert isinstance(status, list)
@pytest.mark.gitlab_premium
def test_gitlab_license(gl):
license = gl.get_license()
assert "user_limit" in license
with pytest.raises(gitlab.GitlabLicenseError, match="The license key is invalid."):
gl.set_license("dummy key")
def test_gitlab_settings(gl):
settings = gl.settings.get()
settings.default_projects_limit = 42
settings.save()
settings = gl.settings.get()
assert settings.default_projects_limit == 42
def test_template_dockerfile(gl):
assert gl.dockerfiles.list()
dockerfile = gl.dockerfiles.get("Node")
assert dockerfile.content is not None
def test_template_gitignore(gl, get_all_kwargs):
assert gl.gitignores.list(**get_all_kwargs)
gitignore = gl.gitignores.get("Node")
assert gitignore.content is not None
def test_template_gitlabciyml(gl, get_all_kwargs):
assert gl.gitlabciymls.list(**get_all_kwargs)
gitlabciyml = gl.gitlabciymls.get("Nodejs")
assert gitlabciyml.content is not None
def test_template_license(gl):
assert gl.licenses.list()
license = gl.licenses.get(
"bsd-2-clause", project="mytestproject", fullname="mytestfullname"
)
assert "mytestfullname" in license.content
def test_hooks(gl):
hook = gl.hooks.create({"url": "http://whatever.com"})
assert hook in gl.hooks.list()
hook.delete()
def test_namespaces(gl, get_all_kwargs):
gl.auth()
current_user = gl.user.username
namespaces = gl.namespaces.list(**get_all_kwargs)
assert namespaces
namespaces = gl.namespaces.list(search=current_user, **get_all_kwargs)
assert namespaces[0].kind == "user"
namespace = gl.namespaces.get(current_user)
assert namespace.kind == "user"
namespace = gl.namespaces.exists(current_user)
assert namespace.exists
def test_notification_settings(gl):
settings = gl.notificationsettings.get()
settings.level = gitlab.const.NotificationLevel.WATCH
settings.save()
settings = gl.notificationsettings.get()
assert settings.level == gitlab.const.NotificationLevel.WATCH
def test_search(gl):
result = gl.search(scope=gitlab.const.SearchScope.USERS, search="Administrator")
assert result[0]["id"] == 1
def test_user_activities(gl):
activities = gl.user_activities.list(query_parameters={"from": "2019-01-01"})
assert isinstance(activities, list)
def test_events(gl):
events = gl.events.list()
assert isinstance(events, list)
@pytest.mark.skip
def test_features(gl):
feat = gl.features.set("foo", 30)
assert feat.name == "foo"
assert feat in gl.features.list()
feat.delete()
def test_pagination(gl, project):
project2 = gl.projects.create({"name": "project-page-2"})
list1 = gl.projects.list(per_page=1, page=1)
list2 = gl.projects.list(per_page=1, page=2)
assert len(list1) == 1
assert len(list2) == 1
assert list1[0].id != list2[0].id
project2.delete()
def test_rate_limits(gl):
settings = gl.settings.get()
settings.throttle_authenticated_api_enabled = True
settings.throttle_authenticated_api_requests_per_period = 1
settings.throttle_authenticated_api_period_in_seconds = 3
settings.save()
projects = []
for i in range(0, 20):
projects.append(gl.projects.create({"name": f"{str(i)}ok"}))
with pytest.raises(gitlab.GitlabCreateError) as e:
for i in range(20, 40):
projects.append(
gl.projects.create(
{"name": f"{str(i)}shouldfail"}, obey_rate_limit=False
)
)
assert "Retry later" in str(e.value)
settings.throttle_authenticated_api_enabled = False
settings.save()
[project.delete() for project in projects]
def test_list_default_warning(gl):
"""When there are more than 20 items and use default `list()` then warning is
generated"""
with pytest.warns(UserWarning, match="python-gitlab.readthedocs.io") as record:
gl.gitlabciymls.list()
assert len(record) == 1
warning = record[0]
assert __file__ == warning.filename
assert __file__ in str(warning.message)
def test_list_page_nowarning(gl, recwarn):
"""Using `page=X` will disable the warning"""
gl.gitlabciymls.list(page=1)
assert not recwarn
def test_list_all_false_nowarning(gl, recwarn):
"""Using `all=False` will disable the warning"""
gl.gitlabciymls.list(all=False)
assert not recwarn
def test_list_all_true_nowarning(gl, get_all_kwargs, recwarn):
"""Using `get_all=True` will disable the warning"""
items = gl.gitlabciymls.list(**get_all_kwargs)
for warn in recwarn:
if issubclass(warn.category, UserWarning):
# Our warning has a link to the docs in it, make sure we don't have
# that.
assert "python-gitlab.readthedocs.io" not in str(warn.message)
assert len(items) > 20
def test_list_iterator_true_nowarning(gl, recwarn):
"""Using `iterator=True` will disable the warning"""
items = gl.gitlabciymls.list(iterator=True)
assert not recwarn
assert len(list(items)) > 20
| 7,787 | Python | .py | 197 | 34.340102 | 87 | 0.698734 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,256 | test_registry.py | python-gitlab_python-gitlab/tests/functional/api/test_registry.py | import pytest
from gitlab import Gitlab
from gitlab.v4.objects import Project, ProjectRegistryProtectionRule
@pytest.fixture(scope="module", autouse=True)
def protected_registry_feature(gl: Gitlab):
gl.features.set(name="container_registry_protected_containers", value=True)
@pytest.mark.skip(reason="Not released yet")
def test_project_protected_registry(project: Project):
rules = project.registry_protection_rules.list()
assert isinstance(rules, list)
protected_registry = project.registry_protection_rules.create(
{
"repository_path_pattern": "test/image",
"minimum_access_level_for_push": "maintainer",
}
)
assert isinstance(protected_registry, ProjectRegistryProtectionRule)
assert protected_registry.repository_path_pattern == "test/image"
protected_registry.minimum_access_level_for_push = "owner"
protected_registry.save()
assert protected_registry.minimum_access_level_for_push == "owner"
| 986 | Python | .py | 21 | 41.809524 | 79 | 0.755741 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,257 | test_groups.py | python-gitlab_python-gitlab/tests/functional/api/test_groups.py | import pytest
import gitlab
def test_groups(gl):
# TODO: This one still needs lots of work
user = gl.users.create(
{
"email": "user@test.com",
"username": "user",
"name": "user",
"password": "E4596f8be406Bc3a14a4ccdb1df80587#!1",
}
)
user2 = gl.users.create(
{
"email": "user2@test.com",
"username": "user2",
"name": "user2",
"password": "E4596f8be406Bc3a14a4ccdb1df80587#!#2",
}
)
group1 = gl.groups.create(
{"name": "gitlab-test-group1", "path": "gitlab-test-group1"}
)
group2 = gl.groups.create(
{"name": "gitlab-test-group2", "path": "gitlab-test-group2"}
)
p_id = gl.groups.list(search="gitlab-test-group2")[0].id
group3 = gl.groups.create(
{"name": "gitlab-test-group3", "path": "gitlab-test-group3", "parent_id": p_id}
)
group4 = gl.groups.create(
{"name": "gitlab-test-group4", "path": "gitlab-test-group4"}
)
assert {group1, group2, group3, group4} <= set(gl.groups.list())
assert gl.groups.list(search="gitlab-test-group1")[0].id == group1.id
assert group3.parent_id == p_id
assert group2.subgroups.list()[0].id == group3.id
assert group2.descendant_groups.list()[0].id == group3.id
filtered_groups = gl.groups.list(skip_groups=[group3.id, group4.id])
assert group3 not in filtered_groups
assert group4 not in filtered_groups
filtered_groups = gl.groups.list(skip_groups=[group3.id])
assert group3 not in filtered_groups
assert group4 in filtered_groups
group1.members.create(
{"access_level": gitlab.const.AccessLevel.OWNER, "user_id": user.id}
)
group1.members.create(
{"access_level": gitlab.const.AccessLevel.GUEST, "user_id": user2.id}
)
group2.members.create(
{"access_level": gitlab.const.AccessLevel.OWNER, "user_id": user2.id}
)
group4.share(group1.id, gitlab.const.AccessLevel.DEVELOPER)
group4.share(group2.id, gitlab.const.AccessLevel.MAINTAINER)
# Reload group4 to have updated shared_with_groups
group4 = gl.groups.get(group4.id)
assert len(group4.shared_with_groups) == 2
group4.unshare(group1.id)
# Reload group4 to have updated shared_with_groups
group4 = gl.groups.get(group4.id)
assert len(group4.shared_with_groups) == 1
# User memberships (admin only)
memberships1 = user.memberships.list()
assert len(memberships1) == 1
memberships2 = user2.memberships.list()
assert len(memberships2) == 2
membership = memberships1[0]
assert membership.source_type == "Namespace"
assert membership.access_level == gitlab.const.AccessLevel.OWNER
project_memberships = user.memberships.list(type="Project")
assert len(project_memberships) == 0
group_memberships = user.memberships.list(type="Namespace")
assert len(group_memberships) == 1
with pytest.raises(gitlab.GitlabListError) as e:
membership = user.memberships.list(type="Invalid")
assert "type does not have a valid value" in str(e.value)
with pytest.raises(gitlab.GitlabListError) as e:
user.memberships.list(sudo=user.name)
assert "403 Forbidden" in str(e.value)
# Administrator belongs to the groups
assert len(group1.members.list()) == 3
assert len(group2.members.list()) == 2
# Test `user_ids` array
result = group1.members.list(user_ids=[user.id, 99999])
assert len(result) == 1
assert result[0].id == user.id
group1.members.delete(user.id)
assert group1.members_all.list()
member = group1.members.get(user2.id)
member.access_level = gitlab.const.AccessLevel.OWNER
member.save()
member = group1.members.get(user2.id)
assert member.access_level == gitlab.const.AccessLevel.OWNER
gl.auth()
group2.members.delete(gl.user.id)
def test_group_labels(group):
group.labels.create({"name": "foo", "description": "bar", "color": "#112233"})
label = group.labels.get("foo")
assert label.description == "bar"
label.description = "baz"
label.save()
label = group.labels.get("foo")
assert label.description == "baz"
assert label in group.labels.list()
label.new_name = "Label:that requires:encoding"
label.save()
assert label.name == "Label:that requires:encoding"
label = group.labels.get("Label:that requires:encoding")
assert label.name == "Label:that requires:encoding"
label.delete()
@pytest.mark.gitlab_premium
@pytest.mark.xfail(reason="/ldap/groups endpoint not documented")
def test_ldap_groups(gl):
assert isinstance(gl.ldapgroups.list(), list)
@pytest.mark.gitlab_premium
def test_group_ldap_links(group):
ldap_cn = "common-name"
ldap_provider = "ldap-provider"
ldap_cn_link = group.ldap_group_links.create(
{"provider": ldap_provider, "group_access": 30, "cn": ldap_cn}
)
ldap_filter_link = group.ldap_group_links.create(
{"provider": ldap_provider, "group_access": 30, "filter": "(cn=Common Name)"}
)
ldap_links = group.ldap_group_links.list()
assert ldap_cn_link.cn == ldap_links[0].cn
assert ldap_filter_link.filter == ldap_links[1].filter
with pytest.raises(gitlab.GitlabCreateError):
# todo - can we configure dummy LDAP in the container?
group.ldap_sync()
ldap_filter_link.delete()
group.ldap_group_links.delete(provider=ldap_provider, cn=ldap_cn)
with pytest.raises(gitlab.GitlabListError, match="No linked LDAP groups found"):
group.ldap_group_links.list()
def test_group_notification_settings(group):
settings = group.notificationsettings.get()
settings.level = "disabled"
settings.save()
settings = group.notificationsettings.get()
assert settings.level == "disabled"
def test_group_badges(group):
badge_image = "http://example.com"
badge_link = "http://example/img.svg"
badge = group.badges.create({"link_url": badge_link, "image_url": badge_image})
assert badge in group.badges.list()
badge.image_url = "http://another.example.com"
badge.save()
badge = group.badges.get(badge.id)
assert badge.image_url == "http://another.example.com"
badge.delete()
def test_group_milestones(group):
milestone = group.milestones.create({"title": "groupmilestone1"})
assert milestone in group.milestones.list()
milestone.due_date = "2020-01-01T00:00:00Z"
milestone.save()
milestone.state_event = "close"
milestone.save()
milestone = group.milestones.get(milestone.id)
assert milestone.state == "closed"
assert not milestone.issues()
assert not milestone.merge_requests()
def test_group_custom_attributes(gl, group):
attrs = group.customattributes.list()
assert not attrs
attr = group.customattributes.set("key", "value1")
assert group in gl.groups.list(custom_attributes={"key": "value1"})
assert attr.key == "key"
assert attr.value == "value1"
assert attr in group.customattributes.list()
attr = group.customattributes.set("key", "value2")
attr = group.customattributes.get("key")
assert attr.value == "value2"
assert attr in group.customattributes.list()
attr.delete()
def test_group_subgroups_projects(gl, user):
# TODO: fixture factories
group1 = gl.groups.list(search="group1")[0]
group2 = gl.groups.list(search="group2")[0]
group3 = gl.groups.create(
{"name": "subgroup1", "path": "subgroup1", "parent_id": group1.id}
)
group4 = gl.groups.create(
{"name": "subgroup2", "path": "subgroup2", "parent_id": group2.id}
)
gr1_project = gl.projects.create({"name": "gr1_project", "namespace_id": group1.id})
gr2_project = gl.projects.create({"name": "gr2_project", "namespace_id": group3.id})
assert group3.parent_id == group1.id
assert group4.parent_id == group2.id
assert gr1_project.namespace["id"] == group1.id
assert gr2_project.namespace["parent_id"] == group1.id
gr1_project.delete()
gr2_project.delete()
group3.delete()
group4.delete()
@pytest.mark.gitlab_premium
def test_group_wiki(group):
content = "Group Wiki page content"
wiki = group.wikis.create({"title": "groupwikipage", "content": content})
assert wiki in group.wikis.list()
wiki = group.wikis.get(wiki.slug)
assert wiki.content == content
wiki.content = "new content"
wiki.save()
wiki.delete()
@pytest.mark.gitlab_premium
def test_group_hooks(group):
hook = group.hooks.create({"url": "http://hook.url"})
assert hook in group.hooks.list()
hook.note_events = True
hook.save()
hook = group.hooks.get(hook.id)
assert hook.note_events is True
hook.delete()
def test_group_transfer(gl, group):
transfer_group = gl.groups.create(
{"name": "transfer-test-group", "path": "transfer-test-group"}
)
transfer_group = gl.groups.get(transfer_group.id)
assert transfer_group.parent_id != group.id
transfer_group.transfer(group.id)
transferred_group = gl.groups.get(transfer_group.id)
assert transferred_group.parent_id == group.id
transfer_group.transfer()
transferred_group = gl.groups.get(transfer_group.id)
assert transferred_group.path == transferred_group.full_path
@pytest.mark.gitlab_premium
@pytest.mark.xfail(reason="need to setup an identity provider or it's mock")
def test_group_saml_group_links(group):
group.saml_group_links.create(
{"saml_group_name": "saml-group-1", "access_level": 10}
)
@pytest.mark.gitlab_premium
def test_group_service_account(group):
service_account = group.service_accounts.create(
{"name": "gitlab-service-account", "username": "gitlab-service-account"}
)
assert service_account.name == "gitlab-service-account"
assert service_account.username == "gitlab-service-account"
| 9,934 | Python | .py | 239 | 35.979079 | 88 | 0.685699 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,258 | conftest.py | python-gitlab_python-gitlab/tests/functional/cli/conftest.py | import pytest
import responses
from gitlab.const import DEFAULT_URL
@pytest.fixture
def gitlab_cli(script_runner, gitlab_config):
"""Wrapper fixture to help make test cases less verbose."""
def _gitlab_cli(subcommands):
"""
Return a script_runner.run method that takes a default gitlab
command, and subcommands passed as arguments inside test cases.
"""
command = ["gitlab", "--config-file", gitlab_config]
for subcommand in subcommands:
# ensure we get strings (e.g from IDs)
command.append(str(subcommand))
return script_runner.run(command)
return _gitlab_cli
@pytest.fixture
def resp_get_project():
return {
"method": responses.GET,
"url": f"{DEFAULT_URL}/api/v4/projects/1",
"json": {"name": "name", "path": "test-path", "id": 1},
"content_type": "application/json",
"status": 200,
}
@pytest.fixture
def resp_current_user():
return {
"method": responses.GET,
"url": f"{DEFAULT_URL}/api/v4/user",
"json": {"username": "name", "id": 1},
"content_type": "application/json",
"status": 200,
}
@pytest.fixture
def resp_delete_registry_tags_in_bulk():
return {
"method": responses.DELETE,
"url": f"{DEFAULT_URL}/api/v4/projects/1/registry/repositories/1/tags",
"status": 202,
}
| 1,408 | Python | .py | 42 | 26.97619 | 79 | 0.623799 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,259 | test_cli_variables.py | python-gitlab_python-gitlab/tests/functional/cli/test_cli_variables.py | import copy
import pytest
import responses
from gitlab.const import DEFAULT_URL
def test_list_instance_variables(gitlab_cli, gl):
cmd = ["variable", "list"]
ret = gitlab_cli(cmd)
assert ret.success
def test_list_group_variables(gitlab_cli, group):
cmd = ["group-variable", "list", "--group-id", group.id]
ret = gitlab_cli(cmd)
assert ret.success
def test_list_project_variables(gitlab_cli, project):
cmd = ["project-variable", "list", "--project-id", project.id]
ret = gitlab_cli(cmd)
assert ret.success
def test_list_project_variables_with_path(gitlab_cli, project):
cmd = ["project-variable", "list", "--project-id", project.path_with_namespace]
ret = gitlab_cli(cmd)
assert ret.success
@pytest.mark.script_launch_mode("inprocess")
@responses.activate
def test_list_project_variables_with_path_url_check(script_runner, resp_get_project):
resp_get_project_variables = copy.deepcopy(resp_get_project)
resp_get_project_variables.update(
url=f"{DEFAULT_URL}/api/v4/projects/project%2Fwith%2Fa%2Fnamespace/variables"
)
resp_get_project_variables.update(json=[])
responses.add(**resp_get_project_variables)
ret = script_runner.run(
[
"gitlab",
"project-variable",
"list",
"--project-id",
"project/with/a/namespace",
]
)
assert ret.success
| 1,416 | Python | .py | 39 | 30.666667 | 85 | 0.681618 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,260 | test_cli.py | python-gitlab_python-gitlab/tests/functional/cli/test_cli.py | """
Some test cases are run in-process to intercept requests to gitlab.com
and example servers.
"""
import copy
import json
import pytest
import responses
import yaml
from gitlab import __version__, config
from gitlab.const import DEFAULT_URL
PRIVATE_TOKEN = "glpat-abc123"
CI_JOB_TOKEN = "ci-job-token"
CI_SERVER_URL = "https://gitlab.example.com"
def test_main_entrypoint(script_runner, gitlab_config):
ret = script_runner.run(["python", "-m", "gitlab", "--config-file", gitlab_config])
assert ret.returncode == 2
def test_version(script_runner):
ret = script_runner.run(["gitlab", "--version"])
assert ret.stdout.strip() == __version__
def test_config_error_with_help_prints_help(script_runner):
ret = script_runner.run(["gitlab", "-c", "invalid-file", "--help"])
assert ret.stdout.startswith("usage:")
assert ret.returncode == 0
def test_resource_help_prints_actions_vertically(script_runner):
ret = script_runner.run(["gitlab", "project", "--help"])
assert " list List the GitLab resources\n" in ret.stdout
assert " get Get a GitLab resource\n" in ret.stdout
assert ret.returncode == 0
def test_resource_help_prints_actions_vertically_only_one_action(script_runner):
ret = script_runner.run(["gitlab", "event", "--help"])
assert " {list} Action to execute on the GitLab resource.\n"
assert " list List the GitLab resources\n" in ret.stdout
assert ret.returncode == 0
@pytest.mark.script_launch_mode("inprocess")
@responses.activate
def test_defaults_to_gitlab_com(script_runner, resp_get_project, monkeypatch):
responses.add(**resp_get_project)
monkeypatch.setattr(config, "_DEFAULT_FILES", [])
ret = script_runner.run(["gitlab", "project", "get", "--id", "1"])
assert ret.success
assert "id: 1" in ret.stdout
@pytest.mark.script_launch_mode("inprocess")
@responses.activate
def test_uses_ci_server_url(monkeypatch, script_runner, resp_get_project):
monkeypatch.setenv("CI_SERVER_URL", CI_SERVER_URL)
monkeypatch.setattr(config, "_DEFAULT_FILES", [])
resp_get_project_in_ci = copy.deepcopy(resp_get_project)
resp_get_project_in_ci.update(url=f"{CI_SERVER_URL}/api/v4/projects/1")
responses.add(**resp_get_project_in_ci)
ret = script_runner.run(["gitlab", "project", "get", "--id", "1"])
assert ret.success
@pytest.mark.script_launch_mode("inprocess")
@responses.activate
def test_uses_ci_job_token(monkeypatch, script_runner, resp_get_project):
monkeypatch.setenv("CI_JOB_TOKEN", CI_JOB_TOKEN)
monkeypatch.setattr(config, "_DEFAULT_FILES", [])
resp_get_project_in_ci = copy.deepcopy(resp_get_project)
resp_get_project_in_ci.update(
match=[responses.matchers.header_matcher({"JOB-TOKEN": CI_JOB_TOKEN})],
)
responses.add(**resp_get_project_in_ci)
ret = script_runner.run(["gitlab", "project", "get", "--id", "1"])
assert ret.success
@pytest.mark.script_launch_mode("inprocess")
@responses.activate
def test_does_not_auth_on_skip_login(
monkeypatch, script_runner, resp_get_project, resp_current_user
):
monkeypatch.setenv("GITLAB_PRIVATE_TOKEN", PRIVATE_TOKEN)
monkeypatch.setattr(config, "_DEFAULT_FILES", [])
resp_user = responses.add(**resp_current_user)
resp_project = responses.add(**resp_get_project)
ret = script_runner.run(["gitlab", "--skip-login", "project", "get", "--id", "1"])
assert ret.success
assert resp_user.call_count == 0
assert resp_project.call_count == 1
@pytest.mark.script_launch_mode("inprocess")
@responses.activate
def test_private_token_overrides_job_token(
monkeypatch, script_runner, resp_get_project
):
monkeypatch.setenv("GITLAB_PRIVATE_TOKEN", PRIVATE_TOKEN)
monkeypatch.setenv("CI_JOB_TOKEN", CI_JOB_TOKEN)
resp_get_project_with_token = copy.deepcopy(resp_get_project)
resp_get_project_with_token.update(
match=[responses.matchers.header_matcher({"PRIVATE-TOKEN": PRIVATE_TOKEN})],
)
# CLI first calls .auth() when private token is present
resp_auth_with_token = copy.deepcopy(resp_get_project_with_token)
resp_auth_with_token.update(url=f"{DEFAULT_URL}/api/v4/user")
resp_auth_with_token["json"].update(username="user", web_url=f"{DEFAULT_URL}/user")
responses.add(**resp_get_project_with_token)
responses.add(**resp_auth_with_token)
ret = script_runner.run(["gitlab", "project", "get", "--id", "1"])
assert ret.success
def test_env_config_missing_file_raises(script_runner, monkeypatch):
monkeypatch.setenv("PYTHON_GITLAB_CFG", "non-existent")
ret = script_runner.run(["gitlab", "project", "list"])
assert not ret.success
assert ret.stderr.startswith("Cannot read config from PYTHON_GITLAB_CFG")
def test_arg_config_missing_file_raises(script_runner):
ret = script_runner.run(
["gitlab", "--config-file", "non-existent", "project", "list"]
)
assert not ret.success
assert ret.stderr.startswith("Cannot read config from file")
def test_invalid_config(script_runner):
ret = script_runner.run(["gitlab", "--gitlab", "invalid"])
assert not ret.success
assert not ret.stdout
def test_invalid_config_prints_help(script_runner):
ret = script_runner.run(["gitlab", "--gitlab", "invalid", "--help"])
assert ret.success
assert ret.stdout
def test_invalid_api_version(script_runner, monkeypatch, fixture_dir):
monkeypatch.setenv("PYTHON_GITLAB_CFG", str(fixture_dir / "invalid_version.cfg"))
ret = script_runner.run(["gitlab", "--gitlab", "test", "project", "list"])
assert not ret.success
assert ret.stderr.startswith("Unsupported API version:")
def test_invalid_auth_config(script_runner, monkeypatch, fixture_dir):
monkeypatch.setenv("PYTHON_GITLAB_CFG", str(fixture_dir / "invalid_auth.cfg"))
ret = script_runner.run(["gitlab", "--gitlab", "test", "project", "list"])
assert not ret.success
assert "401" in ret.stderr
format_matrix = [
("json", json.loads),
("yaml", yaml.safe_load),
]
@pytest.mark.parametrize("format,loader", format_matrix)
def test_cli_display(gitlab_cli, project, format, loader):
cmd = ["-o", format, "project", "get", "--id", project.id]
ret = gitlab_cli(cmd)
assert ret.success
content = loader(ret.stdout.strip())
assert content["id"] == project.id
@pytest.mark.parametrize("format,loader", format_matrix)
def test_cli_fields_in_list(gitlab_cli, project_file, format, loader):
cmd = [
"-o",
format,
"--fields",
"default_branch",
"project",
"list",
]
ret = gitlab_cli(cmd)
assert ret.success
content = loader(ret.stdout.strip())
assert ["default_branch" in item for item in content]
def test_cli_display_without_fields_warns(gitlab_cli, project):
cmd = ["project-ci-lint", "get", "--project-id", project.id]
ret = gitlab_cli(cmd)
assert ret.success
assert "No default fields to show" in ret.stdout
assert "merged_yaml" not in ret.stdout
def test_cli_does_not_print_token(gitlab_cli, gitlab_token):
ret = gitlab_cli(["--debug", "current-user", "get"])
assert ret.success
assert gitlab_token not in ret.stdout
assert gitlab_token not in ret.stderr
assert "[MASKED]" in ret.stderr
| 7,330 | Python | .py | 162 | 40.895062 | 87 | 0.696863 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,261 | test_cli_projects.py | python-gitlab_python-gitlab/tests/functional/cli/test_cli_projects.py | import subprocess
import time
import pytest
import responses
@pytest.mark.script_launch_mode("inprocess")
@responses.activate
def test_project_registry_delete_in_bulk(
script_runner, resp_delete_registry_tags_in_bulk
):
responses.add(**resp_delete_registry_tags_in_bulk)
cmd = [
"gitlab",
"project-registry-tag",
"delete-in-bulk",
"--project-id",
"1",
"--repository-id",
"1",
"--name-regex-delete",
"^.*dev.*$",
# TODO: remove `name` after deleting without ID is possible
# See #849 and #1631
"--name",
".*",
]
ret = ret = script_runner.run(cmd)
assert ret.success
@pytest.fixture
def project_export(project):
export = project.exports.create()
export.refresh()
count = 0
while export.export_status != "finished":
time.sleep(0.5)
export.refresh()
count += 1
if count >= 60:
raise Exception("Project export taking too much time")
return export
def test_project_export_download_custom_action(gitlab_config, project_export):
"""Tests custom action on ProjectManager"""
cmd = [
"gitlab",
"--config-file",
gitlab_config,
"project-export",
"download",
"--project-id",
str(project_export.id),
]
export = subprocess.run(cmd, capture_output=True, check=True)
assert export.returncode == 0
def test_project_languages_custom_action(gitlab_cli, project, project_file):
"""Tests custom action on Project/RESTObject"""
cmd = ["project", "languages", "--id", project.id]
ret = gitlab_cli(cmd)
assert ret.success
| 1,692 | Python | .py | 57 | 23.491228 | 78 | 0.626001 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,262 | test_cli_resource_access_tokens.py | python-gitlab_python-gitlab/tests/functional/cli/test_cli_resource_access_tokens.py | import datetime
def test_list_project_access_tokens(gitlab_cli, project):
cmd = ["project-access-token", "list", "--project-id", project.id]
ret = gitlab_cli(cmd)
assert ret.success
def test_create_project_access_token_with_scopes(gitlab_cli, project):
cmd = [
"project-access-token",
"create",
"--project-id",
project.id,
"--name",
"test-token",
"--scopes",
"api,read_repository",
"--expires-at",
datetime.date.today().isoformat(),
]
ret = gitlab_cli(cmd)
assert ret.success
def test_list_group_access_tokens(gitlab_cli, group):
cmd = ["group-access-token", "list", "--group-id", group.id]
ret = gitlab_cli(cmd)
assert ret.success
def test_create_group_access_token_with_scopes(gitlab_cli, group):
cmd = [
"group-access-token",
"create",
"--group-id",
group.id,
"--name",
"test-token",
"--scopes",
"api,read_repository",
"--expires-at",
datetime.date.today().isoformat(),
]
ret = gitlab_cli(cmd)
assert ret.success
| 1,144 | Python | .py | 39 | 22.487179 | 70 | 0.587374 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,263 | test_cli_repository.py | python-gitlab_python-gitlab/tests/functional/cli/test_cli_repository.py | import json
import time
def test_project_create_file(gitlab_cli, project):
file_path = "README"
branch = "main"
content = "CONTENT"
commit_message = "Initial commit"
cmd = [
"project-file",
"create",
"--project-id",
project.id,
"--file-path",
file_path,
"--branch",
branch,
"--content",
content,
"--commit-message",
commit_message,
]
ret = gitlab_cli(cmd)
assert ret.success
def test_list_all_commits(gitlab_cli, project):
data = {
"branch": "new-branch",
"start_branch": "main",
"commit_message": "chore: test commit on new branch",
"actions": [
{
"action": "create",
"file_path": "test-cli-repo.md",
"content": "new content",
}
],
}
commit = project.commits.create(data)
cmd = ["project-commit", "list", "--project-id", project.id, "--get-all"]
ret = gitlab_cli(cmd)
assert commit.id not in ret.stdout
# Listing commits on other branches requires `all` parameter passed to the API
cmd = [
"project-commit",
"list",
"--project-id",
project.id,
"--get-all",
"--all",
"true",
]
ret_all = gitlab_cli(cmd)
assert commit.id in ret_all.stdout
assert len(ret_all.stdout) > len(ret.stdout)
def test_list_merge_request_commits(gitlab_cli, merge_request, project):
cmd = [
"project-merge-request",
"commits",
"--project-id",
project.id,
"--iid",
merge_request.iid,
]
ret = gitlab_cli(cmd)
assert ret.success
assert ret.stdout
def test_commit_merge_requests(gitlab_cli, project, merge_request):
"""This tests the `project-commit merge-requests` command and also tests
that we can print the result using the `json` formatter"""
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(30)
merge_result = merge_request.merge(should_remove_source_branch=True)
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(5)
# Wait until it is merged
mr = None
mr_iid = merge_request.iid
for _ in range(60):
mr = project.mergerequests.get(mr_iid)
if mr.merged_at is not None:
break
time.sleep(0.5)
assert mr is not None
assert mr.merged_at is not None
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(5)
commit_sha = merge_result["sha"]
cmd = [
"-o",
"json",
"project-commit",
"merge-requests",
"--project-id",
project.id,
"--id",
commit_sha,
]
ret = gitlab_cli(cmd)
assert ret.success
json_list = json.loads(ret.stdout)
assert isinstance(json_list, list)
assert len(json_list) == 1
mr_dict = json_list[0]
assert mr_dict["id"] == mr.id
assert mr_dict["iid"] == mr.iid
def test_revert_commit(gitlab_cli, project):
commit = project.commits.list()[0]
cmd = [
"project-commit",
"revert",
"--project-id",
project.id,
"--id",
commit.id,
"--branch",
"main",
]
ret = gitlab_cli(cmd)
assert ret.success
def test_get_commit_signature_not_found(gitlab_cli, project):
commit = project.commits.list()[0]
cmd = ["project-commit", "signature", "--project-id", project.id, "--id", commit.id]
ret = gitlab_cli(cmd)
assert not ret.success
assert "404 Signature Not Found" in ret.stderr
| 3,791 | Python | .py | 124 | 23.548387 | 111 | 0.591209 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,264 | test_cli_artifacts.py | python-gitlab_python-gitlab/tests/functional/cli/test_cli_artifacts.py | import subprocess
import textwrap
import time
from io import BytesIO
from zipfile import is_zipfile
import pytest
content = textwrap.dedent(
"""\
test-artifact:
script: echo "test" > artifact.txt
artifacts:
untracked: true
"""
)
data = {
"file_path": ".gitlab-ci.yml",
"branch": "main",
"content": content,
"commit_message": "Initial commit",
}
@pytest.fixture(scope="module")
def job_with_artifacts(gitlab_runner, project):
project.files.create(data)
jobs = None
while not jobs:
time.sleep(0.5)
jobs = project.jobs.list(scope="success")
return project.jobs.get(jobs[0].id)
def test_cli_job_artifacts(capsysbinary, gitlab_config, job_with_artifacts):
cmd = [
"gitlab",
"--config-file",
gitlab_config,
"project-job",
"artifacts",
"--id",
str(job_with_artifacts.id),
"--project-id",
str(job_with_artifacts.pipeline["project_id"]),
]
with capsysbinary.disabled():
artifacts = subprocess.check_output(cmd)
assert isinstance(artifacts, bytes)
artifacts_zip = BytesIO(artifacts)
assert is_zipfile(artifacts_zip)
def test_cli_project_artifact_download(gitlab_config, job_with_artifacts):
cmd = [
"gitlab",
"--config-file",
gitlab_config,
"project-artifact",
"download",
"--project-id",
str(job_with_artifacts.pipeline["project_id"]),
"--ref-name",
job_with_artifacts.ref,
"--job",
job_with_artifacts.name,
]
artifacts = subprocess.run(cmd, capture_output=True, check=True)
assert isinstance(artifacts.stdout, bytes)
artifacts_zip = BytesIO(artifacts.stdout)
assert is_zipfile(artifacts_zip)
def test_cli_project_artifact_raw(gitlab_config, job_with_artifacts):
cmd = [
"gitlab",
"--config-file",
gitlab_config,
"project-artifact",
"raw",
"--project-id",
str(job_with_artifacts.pipeline["project_id"]),
"--ref-name",
job_with_artifacts.ref,
"--job",
job_with_artifacts.name,
"--artifact-path",
"artifact.txt",
]
artifacts = subprocess.run(cmd, capture_output=True, check=True)
assert isinstance(artifacts.stdout, bytes)
assert artifacts.stdout == b"test\n"
| 2,386 | Python | .py | 82 | 22.768293 | 76 | 0.628771 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,265 | test_cli_v4.py | python-gitlab_python-gitlab/tests/functional/cli/test_cli_v4.py | import datetime
import os
import time
branch = "BRANCH-cli-v4"
def test_create_project(gitlab_cli):
name = "test-project1"
cmd = ["project", "create", "--name", name]
ret = gitlab_cli(cmd)
assert ret.success
assert name in ret.stdout
def test_update_project(gitlab_cli, project):
description = "My New Description"
cmd = ["project", "update", "--id", project.id, "--description", description]
ret = gitlab_cli(cmd)
assert ret.success
assert description in ret.stdout
def test_validate_project_ci_lint(gitlab_cli, project, valid_gitlab_ci_yml):
cmd = [
"project-ci-lint",
"validate",
"--project-id",
project.id,
"--content",
valid_gitlab_ci_yml,
]
ret = gitlab_cli(cmd)
assert ret.success
def test_validate_project_ci_lint_invalid_exits_non_zero(
gitlab_cli, project, invalid_gitlab_ci_yml
):
cmd = [
"project-ci-lint",
"validate",
"--project-id",
project.id,
"--content",
invalid_gitlab_ci_yml,
]
ret = gitlab_cli(cmd)
assert not ret.success
assert "CI YAML Lint failed (Invalid configuration format)" in ret.stderr
def test_create_group(gitlab_cli):
name = "test-group1"
path = "group1"
cmd = ["group", "create", "--name", name, "--path", path]
ret = gitlab_cli(cmd)
assert ret.success
assert name in ret.stdout
assert path in ret.stdout
def test_update_group(gitlab_cli, gl, group):
description = "My New Description"
cmd = ["group", "update", "--id", group.id, "--description", description]
ret = gitlab_cli(cmd)
assert ret.success
group = gl.groups.get(group.id)
assert group.description == description
def test_create_user(gitlab_cli, gl):
email = "fake@email.com"
username = "user1"
name = "User One"
password = "E4596f8be406Bc3a14a4ccdb1df80587"
cmd = [
"user",
"create",
"--email",
email,
"--username",
username,
"--name",
name,
"--password",
password,
]
ret = gitlab_cli(cmd)
assert ret.success
user = gl.users.list(username=username)[0]
assert user.email == email
assert user.username == username
assert user.name == name
def test_get_user_by_id(gitlab_cli, user):
cmd = ["user", "get", "--id", user.id]
ret = gitlab_cli(cmd)
assert ret.success
assert str(user.id) in ret.stdout
def test_list_users_verbose_output(gitlab_cli):
cmd = ["-v", "user", "list"]
ret = gitlab_cli(cmd)
assert ret.success
assert "avatar-url" in ret.stdout
def test_cli_args_not_in_output(gitlab_cli):
cmd = ["-v", "user", "list"]
ret = gitlab_cli(cmd)
assert "config-file" not in ret.stdout
def test_add_member_to_project(gitlab_cli, project, user):
access_level = "40"
cmd = [
"project-member",
"create",
"--project-id",
project.id,
"--user-id",
user.id,
"--access-level",
access_level,
]
ret = gitlab_cli(cmd)
assert ret.success
def test_list_user_memberships(gitlab_cli, user):
cmd = ["user-membership", "list", "--user-id", user.id]
ret = gitlab_cli(cmd)
assert ret.success
def test_create_project_issue(gitlab_cli, project):
title = "my issue"
description = "my issue description"
cmd = [
"project-issue",
"create",
"--project-id",
project.id,
"--title",
title,
"--description",
description,
]
ret = gitlab_cli(cmd)
assert ret.success
assert title in ret.stdout
def test_create_issue_note(gitlab_cli, issue):
body = "body"
cmd = [
"project-issue-note",
"create",
"--project-id",
issue.project_id,
"--issue-iid",
issue.iid,
"--body",
body,
]
ret = gitlab_cli(cmd)
assert ret.success
def test_create_branch(gitlab_cli, project):
cmd = [
"project-branch",
"create",
"--project-id",
project.id,
"--branch",
branch,
"--ref",
"main",
]
ret = gitlab_cli(cmd)
assert ret.success
def test_create_merge_request(gitlab_cli, project):
cmd = [
"project-merge-request",
"create",
"--project-id",
project.id,
"--source-branch",
branch,
"--target-branch",
"main",
"--title",
"Update README",
]
ret = gitlab_cli(cmd)
assert ret.success
def test_accept_request_merge(gitlab_cli, project):
# MR needs at least 1 commit before we can merge
mr = project.mergerequests.list()[0]
file_data = {
"branch": mr.source_branch,
"file_path": "test-cli-v4.md",
"content": "Content",
"commit_message": "chore: test-cli-v4 change",
}
project.files.create(file_data)
# Pause to let GL catch up (happens on hosted too, sometimes takes a while for server to be ready to merge)
time.sleep(30)
approve_cmd = [
"project-merge-request",
"merge",
"--project-id",
project.id,
"--iid",
mr.iid,
]
ret = gitlab_cli(approve_cmd)
assert ret.success
def test_create_project_label(gitlab_cli, project):
name = "prjlabel1"
description = "prjlabel1 description"
color = "#112233"
cmd = [
"-v",
"project-label",
"create",
"--project-id",
project.id,
"--name",
name,
"--description",
description,
"--color",
color,
]
ret = gitlab_cli(cmd)
assert ret.success
def test_list_project_labels(gitlab_cli, project):
cmd = ["-v", "project-label", "list", "--project-id", project.id]
ret = gitlab_cli(cmd)
assert ret.success
def test_update_project_label(gitlab_cli, label):
new_label = "prjlabel2"
new_description = "prjlabel2 description"
new_color = "#332211"
cmd = [
"-v",
"project-label",
"update",
"--project-id",
label.project_id,
"--name",
label.name,
"--new-name",
new_label,
"--description",
new_description,
"--color",
new_color,
]
ret = gitlab_cli(cmd)
assert ret.success
def test_delete_project_label(gitlab_cli, label):
# TODO: due to update above, we'd need a function-scope label fixture
label_name = "prjlabel2"
cmd = [
"-v",
"project-label",
"delete",
"--project-id",
label.project_id,
"--name",
label_name,
]
ret = gitlab_cli(cmd)
assert ret.success
def test_create_group_label(gitlab_cli, group):
name = "grouplabel1"
description = "grouplabel1 description"
color = "#112233"
cmd = [
"-v",
"group-label",
"create",
"--group-id",
group.id,
"--name",
name,
"--description",
description,
"--color",
color,
]
ret = gitlab_cli(cmd)
assert ret.success
def test_list_group_labels(gitlab_cli, group):
cmd = ["-v", "group-label", "list", "--group-id", group.id]
ret = gitlab_cli(cmd)
assert ret.success
def test_update_group_label(gitlab_cli, group_label):
new_label = "grouplabel2"
new_description = "grouplabel2 description"
new_color = "#332211"
cmd = [
"-v",
"group-label",
"update",
"--group-id",
group_label.group_id,
"--name",
group_label.name,
"--new-name",
new_label,
"--description",
new_description,
"--color",
new_color,
]
ret = gitlab_cli(cmd)
assert ret.success
def test_delete_group_label(gitlab_cli, group_label):
# TODO: due to update above, we'd need a function-scope label fixture
new_label = "grouplabel2"
cmd = [
"-v",
"group-label",
"delete",
"--group-id",
group_label.group_id,
"--name",
new_label,
]
ret = gitlab_cli(cmd)
assert ret.success
def test_create_project_variable(gitlab_cli, project):
key = "junk"
value = "car"
cmd = [
"-v",
"project-variable",
"create",
"--project-id",
project.id,
"--key",
key,
"--value",
value,
]
ret = gitlab_cli(cmd)
assert ret.success
def test_get_project_variable(gitlab_cli, variable):
cmd = [
"-v",
"project-variable",
"get",
"--project-id",
variable.project_id,
"--key",
variable.key,
]
ret = gitlab_cli(cmd)
assert ret.success
def test_update_project_variable(gitlab_cli, variable):
new_value = "bus"
cmd = [
"-v",
"project-variable",
"update",
"--project-id",
variable.project_id,
"--key",
variable.key,
"--value",
new_value,
]
ret = gitlab_cli(cmd)
assert ret.success
def test_list_project_variables(gitlab_cli, project):
cmd = ["-v", "project-variable", "list", "--project-id", project.id]
ret = gitlab_cli(cmd)
assert ret.success
def test_delete_project_variable(gitlab_cli, variable):
cmd = [
"-v",
"project-variable",
"delete",
"--project-id",
variable.project_id,
"--key",
variable.key,
]
ret = gitlab_cli(cmd)
assert ret.success
def test_delete_branch(gitlab_cli, project):
cmd = ["project-branch", "delete", "--project-id", project.id, "--name", branch]
ret = gitlab_cli(cmd)
assert ret.success
def test_project_upload_file(gitlab_cli, project):
cmd = [
"project",
"upload",
"--id",
project.id,
"--filename",
__file__,
"--filepath",
os.path.realpath(__file__),
]
ret = gitlab_cli(cmd)
assert ret.success
def test_get_application_settings(gitlab_cli):
cmd = ["application-settings", "get"]
ret = gitlab_cli(cmd)
assert ret.success
def test_update_application_settings(gitlab_cli):
cmd = ["application-settings", "update", "--signup-enabled", "false"]
ret = gitlab_cli(cmd)
assert ret.success
def test_create_project_with_values_from_file(gitlab_cli, fixture_dir, tmpdir):
name = "gitlab-project-from-file"
description = "Multiline\n\nData\n"
from_file = tmpdir.join(name)
from_file.write(description)
from_file_path = f"@{str(from_file)}"
avatar_file = fixture_dir / "avatar.png"
assert avatar_file.exists()
avatar_file_path = f"@{avatar_file}"
cmd = [
"-v",
"project",
"create",
"--name",
name,
"--description",
from_file_path,
"--avatar",
avatar_file_path,
]
ret = gitlab_cli(cmd)
assert ret.success
assert description in ret.stdout
def test_create_project_with_values_at_prefixed(gitlab_cli, tmpdir):
name = "gitlab-project-at-prefixed"
description = "@at-prefixed"
at_prefixed = f"@{description}"
cmd = [
"-v",
"project",
"create",
"--name",
name,
"--description",
at_prefixed,
]
ret = gitlab_cli(cmd)
assert ret.success
assert description in ret.stdout
def test_create_project_deploy_token(gitlab_cli, project):
name = "project-token"
username = "root"
expires_at = datetime.date.today().isoformat()
scopes = "read_registry"
cmd = [
"-v",
"project-deploy-token",
"create",
"--project-id",
project.id,
"--name",
name,
"--username",
username,
"--expires-at",
expires_at,
"--scopes",
scopes,
]
ret = gitlab_cli(cmd)
assert ret.success
assert name in ret.stdout
assert username in ret.stdout
assert expires_at in ret.stdout
assert scopes in ret.stdout
def test_list_all_deploy_tokens(gitlab_cli, deploy_token):
cmd = ["-v", "deploy-token", "list"]
ret = gitlab_cli(cmd)
assert ret.success
assert deploy_token.name in ret.stdout
assert str(deploy_token.id) in ret.stdout
assert deploy_token.username in ret.stdout
assert deploy_token.expires_at in ret.stdout
assert deploy_token.scopes[0] in ret.stdout
def test_list_project_deploy_tokens(gitlab_cli, deploy_token):
cmd = [
"-v",
"project-deploy-token",
"list",
"--project-id",
deploy_token.project_id,
]
ret = gitlab_cli(cmd)
assert ret.success
assert deploy_token.name in ret.stdout
assert str(deploy_token.id) in ret.stdout
assert deploy_token.username in ret.stdout
assert deploy_token.expires_at in ret.stdout
assert deploy_token.scopes[0] in ret.stdout
def test_delete_project_deploy_token(gitlab_cli, deploy_token):
cmd = [
"-v",
"project-deploy-token",
"delete",
"--project-id",
deploy_token.project_id,
"--id",
deploy_token.id,
]
ret = gitlab_cli(cmd)
assert ret.success
# TODO assert not in list
def test_create_group_deploy_token(gitlab_cli, group):
name = "group-token"
username = "root"
expires_at = datetime.date.today().isoformat()
scopes = "read_registry"
cmd = [
"-v",
"group-deploy-token",
"create",
"--group-id",
group.id,
"--name",
name,
"--username",
username,
"--expires-at",
expires_at,
"--scopes",
scopes,
]
ret = gitlab_cli(cmd)
assert ret.success
assert name in ret.stdout
assert username in ret.stdout
assert expires_at in ret.stdout
assert scopes in ret.stdout
def test_list_group_deploy_tokens(gitlab_cli, group_deploy_token):
cmd = [
"-v",
"group-deploy-token",
"list",
"--group-id",
group_deploy_token.group_id,
]
ret = gitlab_cli(cmd)
assert ret.success
assert group_deploy_token.name in ret.stdout
assert str(group_deploy_token.id) in ret.stdout
assert group_deploy_token.username in ret.stdout
assert group_deploy_token.expires_at in ret.stdout
assert group_deploy_token.scopes[0] in ret.stdout
def test_delete_group_deploy_token(gitlab_cli, group_deploy_token):
cmd = [
"-v",
"group-deploy-token",
"delete",
"--group-id",
group_deploy_token.group_id,
"--id",
group_deploy_token.id,
]
ret = gitlab_cli(cmd)
assert ret.success
# TODO assert not in list
def test_project_member_all(gitlab_cli, project):
cmd = [
"project-member-all",
"list",
"--project-id",
project.id,
]
ret = gitlab_cli(cmd)
assert ret.success
def test_group_member_all(gitlab_cli, group):
cmd = [
"group-member-all",
"list",
"--group-id",
group.id,
]
ret = gitlab_cli(cmd)
assert ret.success
# Deleting the project and group. Add your tests above here.
def test_delete_project(gitlab_cli, project):
cmd = ["project", "delete", "--id", project.id]
ret = gitlab_cli(cmd)
assert ret.success
def test_delete_group(gitlab_cli, group):
cmd = ["group", "delete", "--id", group.id]
ret = gitlab_cli(cmd)
assert ret.success
# Don't add tests below here as the group and project have been deleted
| 15,703 | Python | .py | 575 | 20.652174 | 111 | 0.587004 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,266 | test_cli_packages.py | python-gitlab_python-gitlab/tests/functional/cli/test_cli_packages.py | package_name = "hello-world"
package_version = "v1.0.0"
file_name = "hello.tar.gz"
file_content = "package content"
def test_list_project_packages(gitlab_cli, project):
cmd = ["project-package", "list", "--project-id", project.id]
ret = gitlab_cli(cmd)
assert ret.success
def test_list_group_packages(gitlab_cli, group):
cmd = ["group-package", "list", "--group-id", group.id]
ret = gitlab_cli(cmd)
assert ret.success
def test_upload_generic_package(tmp_path, gitlab_cli, project):
path = tmp_path / file_name
path.write_text(file_content)
cmd = [
"-v",
"generic-package",
"upload",
"--project-id",
project.id,
"--package-name",
package_name,
"--path",
path,
"--package-version",
package_version,
"--file-name",
file_name,
]
ret = gitlab_cli(cmd)
assert "201 Created" in ret.stdout
def test_download_generic_package(gitlab_cli, project):
cmd = [
"generic-package",
"download",
"--project-id",
project.id,
"--package-name",
package_name,
"--package-version",
package_version,
"--file-name",
file_name,
]
ret = gitlab_cli(cmd)
assert ret.stdout == file_content
| 1,320 | Python | .py | 47 | 21.531915 | 65 | 0.588889 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,267 | test_cli_users.py | python-gitlab_python-gitlab/tests/functional/cli/test_cli_users.py | import datetime
def test_create_user_impersonation_token_with_scopes(gitlab_cli, user):
cmd = [
"user-impersonation-token",
"create",
"--user-id",
user.id,
"--name",
"test-token",
"--scopes",
"api,read_user",
"--expires-at",
datetime.date.today().isoformat(),
]
ret = gitlab_cli(cmd)
assert ret.success
def test_list_user_projects(gitlab_cli, user):
cmd = ["user-project", "list", "--user-id", user.id]
ret = gitlab_cli(cmd)
assert ret.success
| 558 | Python | .py | 20 | 21.2 | 71 | 0.578947 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,268 | docker.py | python-gitlab_python-gitlab/tests/functional/fixtures/docker.py | """
pytest-docker fixture overrides.
See https://github.com/avast/pytest-docker#available-fixtures.
"""
import pytest
@pytest.fixture(scope="session")
def docker_compose_project_name():
"""Set a consistent project name to enable optional reuse of containers."""
return "pytest-python-gitlab"
@pytest.fixture(scope="session")
def docker_compose_file(fixture_dir):
return fixture_dir / "docker-compose.yml"
@pytest.fixture(scope="session")
def docker_cleanup(request):
"""Conditionally keep containers around by overriding the cleanup command."""
if request.config.getoption("--keep-containers"):
# Print version and exit.
return "-v"
return "down -v"
| 697 | Python | .py | 19 | 33.210526 | 81 | 0.740686 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,269 | test_install.py | python-gitlab_python-gitlab/tests/install/test_install.py | import pytest
def test_install() -> None:
with pytest.raises(ImportError):
import aiohttp # type: ignore # noqa
| 127 | Python | .py | 4 | 27.25 | 45 | 0.694215 | python-gitlab/python-gitlab | 2,230 | 648 | 106 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,270 | setup.py | koalalorenzo_python-digitalocean/setup.py | #!/usr/bin/env python
import os
try:
from setuptools import setup
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup
long_description = """This library provides easy access to Digital Ocean APIs to deploy droplets, images and more."""
if os.path.isfile("DESCRIPTION.rst"):
with open('DESCRIPTION.rst') as file:
long_description = file.read()
setup(
name='python-digitalocean',
version='1.17.0',
description='digitalocean.com API to manage Droplets and Images',
author='Lorenzo Setale ( http://who.is.lorenzo.setale.me/? )',
author_email='lorenzo@setale.me',
url='https://github.com/koalalorenzo/python-digitalocean',
packages=['digitalocean'],
install_requires=['requests', 'jsonpickle'],
test_suite='digitalocean.tests',
license='LGPL v3',
long_description=long_description
)
| 903 | Python | .py | 25 | 32.12 | 117 | 0.726857 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,271 | conf.py | koalalorenzo_python-digitalocean/docs/conf.py | # -*- coding: utf-8 -*-
#
# Python Digitalocean documentation build configuration file, created by
# sphinx-quickstart on Wed Jan 25 13:52:17 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Python Digitalocean'
copyright = u'2020, Lorenzo Setale'
author = u'Lorenzo Setale'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.16.0'
# The full version, including alpha/beta/rc tags.
release = u'1.16.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'PythonDigitaloceandoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'PythonDigitalocean.tex', u'Python Digitalocean Documentation',
u'Lorenzo Setale', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pythondigitalocean', u'Python Digitalocean Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'PythonDigitalocean', u'Python Digitalocean Documentation',
author, 'PythonDigitalocean', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
| 5,590 | Python | .py | 141 | 37.617021 | 80 | 0.701332 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,272 | Image.py | koalalorenzo_python-digitalocean/digitalocean/Image.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI, POST, DELETE, PUT, NotFoundError
class Image(BaseAPI):
"""
An object representing an DigitalOcean Image.
Attributes accepted at creation time:
Args:
name (str): The name to be given to an image.
url (str): A URL from which the virtual machine image may be retrieved.
region (str): The slug of the region where the image will be available.
distribution (str, optional): The name of the image's distribution.
description (str, optional): Free-form text field to describe an image.
tags (obj:`list` of `str`, optional): List of tag names to apply to \
the image.
Attributes returned by API:
* id (int): A unique number to identify and reference a image.
* name (str): The display name given to an image.
* type (str): The kind of image. This will be either "snapshot",
"backup", or "custom".
* distribution (str): The name of the image's distribution.
* slug (str): A uniquely identifying string that is associated with each \
of the DigitalOcean-provided public images.
* public (bool): Indicates whether the image is public or not.
* regions (obj:`list` of `str`): A list of the slugs of the regions where \
the image is available for use.
* created_at (str): A time value given in ISO8601 combined date and time \
format that represents when the image was created.
* min_disk_size (int): The minimum disk size in GB required for a Droplet \
to use this image.
* size_gigabytes (int): The size of the image in gigabytes.
* description (str): Free-form text field to describing an image.
* tags (obj:`list` of `str`): List of tag names to applied to the image.
* status (str): Indicates the state of a custom image. This may be "NEW", \
"available", "pending", or "deleted".
* error_message (str): Information about errors that may occur when \
importing a custom image.
"""
def __init__(self, *args, **kwargs):
self.id = None
self.name = None
self.distribution = None
self.slug = None
self.min_disk_size = None
self.public = None
self.regions = []
self.created_at = None
self.size_gigabytes = None
self.description = None
self.status = None
self.tags = []
self.error_message = None
self.url = None
self.region = None
super(Image, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, image_id_or_slug):
"""
Class method that will return an Image object by ID or slug.
This method is used to validate the type of the image. If it is a
number, it will be considered as an Image ID, instead if it is a
string, it will considered as slug.
"""
if cls._is_string(image_id_or_slug):
image = cls(token=api_token, slug=image_id_or_slug)
image.load(use_slug=True)
else:
image = cls(token=api_token, id=image_id_or_slug)
image.load()
return image
@staticmethod
def _is_string(value):
"""
Checks if the value provided is a string (True) or not integer
(False) or something else (None).
"""
if type(value) in [type(u''), type('')]:
return True
elif type(value) in [int, type(2 ** 64)]:
return False
else:
return None
def create(self):
"""
Creates a new custom DigitalOcean Image from the Linux virtual machine
image located at the provided `url`.
"""
params = {'name': self.name,
'region': self.region,
'url': self.url,
'distribution': self.distribution,
'description': self.description,
'tags': self.tags}
data = self.get_data('images', type=POST, params=params)
if data:
for attr in data['image'].keys():
setattr(self, attr, data['image'][attr])
return self
def load(self, use_slug=False):
"""
Load slug.
Loads by id, or by slug if id is not present or use slug is True.
"""
identifier = None
if use_slug or not self.id:
identifier = self.slug
else:
identifier = self.id
if not identifier:
raise NotFoundError("One of self.id or self.slug must be set.")
data = self.get_data("images/%s" % identifier)
image_dict = data['image']
# Setting the attribute values
for attr in image_dict.keys():
setattr(self, attr, image_dict[attr])
return self
def destroy(self):
"""
Destroy the image
"""
return self.get_data("images/%s/" % self.id, type=DELETE)
def transfer(self, new_region_slug):
"""
Transfer the image
"""
return self.get_data(
"images/%s/actions/" % self.id,
type=POST,
params={"type": "transfer", "region": new_region_slug}
)
def rename(self, new_name):
"""
Rename an image
"""
return self.get_data(
"images/%s" % self.id,
type=PUT,
params={"name": new_name}
)
def __str__(self):
return "<Image: %s %s %s>" % (self.id, self.distribution, self.name)
| 5,686 | Python | .py | 140 | 30.542857 | 83 | 0.573135 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,273 | Domain.py | koalalorenzo_python-digitalocean/digitalocean/Domain.py | # -*- coding: utf-8 -*-
from .Record import Record
from .baseapi import BaseAPI, GET, POST, DELETE, PUT
class Domain(BaseAPI):
def __init__(self, *args, **kwargs):
self.name = None
self.ttl = None
self.zone_file = None
self.ip_address = None
super(Domain, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, domain_name):
"""
Class method that will return a Domain object by ID.
"""
domain = cls(token=api_token, name=domain_name)
domain.load()
return domain
def load(self):
# URL https://api.digitalocean.com/v2/domains
domains = self.get_data("domains/%s" % self.name)
domain = domains['domain']
for attr in domain.keys():
setattr(self, attr, domain[attr])
def destroy(self):
"""
Destroy the domain by name
"""
# URL https://api.digitalocean.com/v2/domains/[NAME]
return self.get_data("domains/%s" % self.name, type=DELETE)
def create_new_domain_record(self, *args, **kwargs):
"""
Create new domain record.
https://developers.digitalocean.com/#create-a-new-domain-record
Args:
type: The record type (A, MX, CNAME, etc).
name: The host name, alias, or service being defined by the record
data: Variable data depending on record type.
Optional Args:
priority: The priority of the host
port: The port that the service is accessible on
weight: The weight of records with the same priority
ttl: This value is the time to live for the record, in seconds.
flags: An unsigned integer between 0-255 used for CAA records.
tag: The parameter tag for CAA records. Valid values are "issue",
"issuewild", or "iodef".
"""
data = {
"type": kwargs.get("type", None),
"name": kwargs.get("name", None),
"data": kwargs.get("data", None)
}
# Â Optional Args
if kwargs.get("priority", None) != None:
data['priority'] = kwargs.get("priority", None)
if kwargs.get("port", None):
data['port'] = kwargs.get("port", None)
if kwargs.get("weight", None) != None:
data['weight'] = kwargs.get("weight", None)
if kwargs.get("ttl", None):
data['ttl'] = kwargs.get("ttl", 1800)
if kwargs.get("flags", None) != None:
data['flags'] = kwargs.get("flags", None)
if kwargs.get("tag", None):
data['tag'] = kwargs.get("tag", "issue")
if self.ttl:
data['ttl'] = self.ttl
return self.get_data(
"domains/%s/records" % self.name,
type=POST,
params=data
)
def update_domain_record(self, *args, **kwargs):
"""
Args:
type: The record type (A, MX, CNAME, etc).
name: The host name, alias, or service being defined by the record
data: Variable data depending on record type.
priority: The priority of the host
port: The port that the service is accessible on
weight: The weight of records with the same priority
"""
data = {
'id': kwargs.get("id", None),
'domain': kwargs.get("domain", None)
}
if kwargs.get("data", None):
data['data'] = kwargs.get("data", None)
if kwargs.get("type", None):
data['type'] = kwargs.get("type", None)
if kwargs.get("name", None):
data['name'] = kwargs.get("name", None)
if kwargs.get("port", None):
data['port'] = kwargs.get("port", None)
if kwargs.get("weight", None):
data['weight'] = kwargs.get("weight", None)
return self.get_data(
"domains/%s/records/%s" % (data['domain'], data['id']),
type=PUT,
params=data
)
def delete_domain_record(self, *args, **kwargs):
data = {
'id': kwargs.get("id", None)
}
return self.get_data(
"domains/%s/records/%s" % (self.name, data['id']),
type=DELETE
)
def create(self):
"""
Create new domain
"""
# URL https://api.digitalocean.com/v2/domains
data = {
"name": self.name,
"ip_address": self.ip_address,
}
domain = self.get_data("domains", type=POST, params=data)
return domain
def get_records(self, params=None):
"""
Returns a list of Record objects
"""
if params is None:
params = {}
# URL https://api.digitalocean.com/v2/domains/[NAME]/records/
records = []
data = self.get_data("domains/%s/records/" % self.name, type=GET, params=params)
for record_data in data['domain_records']:
record = Record(domain_name=self.name, **record_data)
record.token = self.token
records.append(record)
return records
def __str__(self):
return "%s" % (self.name)
| 5,348 | Python | .py | 136 | 28.352941 | 88 | 0.537345 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,274 | Metadata.py | koalalorenzo_python-digitalocean/digitalocean/Metadata.py | # -*- coding: utf-8 -*-
import requests
try:
from urlparse import urljoin
except ImportError:
from urllib.parse import urljoin
from .baseapi import BaseAPI
class Metadata(BaseAPI):
"""
Metadata API: Provide useful information about the current Droplet.
See: https://developers.digitalocean.com/metadata/#introduction
"""
droplet_id = None
end_point = "http://169.254.169.254/metadata/v1"
def __init__(self, *args, **kwargs):
super(Metadata, self).__init__(*args, **kwargs)
self.end_point = "http://169.254.169.254/metadata/v1"
def get_data(self, url, headers=dict(), params=dict(), render_json=True):
"""
Customized version of get_data to directly get the data without
using the authentication method.
"""
url = urljoin(self.end_point, url)
response = requests.get(url, headers=headers, params=params,
timeout=self.get_timeout())
if render_json:
return response.json()
return response.content
def load(self):
metadata = self.get_data("v1.json")
for attr in metadata.keys():
setattr(self, attr, metadata[attr])
return self
def __str__(self):
return "<Metadata: %s>" % (self.droplet_id)
| 1,328 | Python | .py | 35 | 30 | 77 | 0.622465 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,275 | Certificate.py | koalalorenzo_python-digitalocean/digitalocean/Certificate.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI, POST, DELETE
class Certificate(BaseAPI):
"""
An object representing an SSL Certificate stored on DigitalOcean.
Attributes accepted at creation time:
Args:
name (str): A name for the Certificate
private_key (str, optional): The contents of a PEM-formatted
private-key corresponding to the SSL certificate. Only used
when uploading a custom certificate.
leaf_certificate (str, optional): The contents of a PEM-formatted
public SSL certificate. Only used when uploading a custom
certificate.
certificate_chain (str, optional): The full PEM-formatted trust chain
between the certificate authority's certificate and your domain's
SSL certificate. Only used when uploading a custom certificate.
dns_names (:obj:`str`): A list of fully qualified domain names (FQDNs)
for which the certificate will be issued by Let's Encrypt
type (str): Specifies the type of certificate to be created. The value
should be "custom" for a user-uploaded certificate or
"lets_encrypt" for one automatically generated with Let's Encrypt.
Attributes returned by API:
name (str): The name of the Certificate
id (str): A unique identifier for the Certificate
not_after (str): A string that represents the Certificate's expiration
date.
sha1_fingerprint (str): A unique identifier for the Certificate
generated from its SHA-1 fingerprint
created_at (str): A string that represents when the Certificate was
created
dns_names (:obj:`str`): A list of fully qualified domain names (FQDNs)
for which a Let's Encrypt generated certificate is issued.
type (str): Specifies the type of certificate. The value will be
"custom" for a user-uploaded certificate or "lets_encrypt" for one
automatically generated with Let's Encrypt.
state (str): Represents the current state of the certificate. It may be
"pending", "verified", or "errored".
"""
def __init__(self, *args, **kwargs):
self.id = ""
self.name = None
self.private_key = None
self.leaf_certificate = None
self.certificate_chain = None
self.not_after = None
self.sha1_fingerprint = None
self.created_at = None
self.dns_names = []
self.type = None
self.state = None
super(Certificate, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, cert_id):
"""
Class method that will return a Certificate object by its ID.
"""
certificate = cls(token=api_token, id=cert_id)
certificate.load()
return certificate
def load(self):
"""
Load the Certificate object from DigitalOcean.
Requires self.id to be set.
"""
data = self.get_data("certificates/%s" % self.id)
certificate = data["certificate"]
for attr in certificate.keys():
setattr(self, attr, certificate[attr])
return self
def create(self):
"""
Create the Certificate
"""
params = {
"name": self.name,
"type": self.type,
"dns_names": self.dns_names,
"private_key": self.private_key,
"leaf_certificate": self.leaf_certificate,
"certificate_chain": self.certificate_chain
}
data = self.get_data("certificates", type=POST, params=params)
if data:
self.id = data['certificate']['id']
self.not_after = data['certificate']['not_after']
self.sha1_fingerprint = data['certificate']['sha1_fingerprint']
self.created_at = data['certificate']['created_at']
self.type = data['certificate']['type']
self.dns_names = data['certificate']['dns_names']
self.state = data['certificate']['state']
return self
def destroy(self):
"""
Delete the Certificate
"""
return self.get_data("certificates/%s" % self.id, type=DELETE)
def __str__(self):
return "<Certificate: %s %s>" % (self.id, self.name)
| 4,385 | Python | .py | 99 | 34.474747 | 79 | 0.618178 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,276 | Snapshot.py | koalalorenzo_python-digitalocean/digitalocean/Snapshot.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI, POST, DELETE, PUT
class Snapshot(BaseAPI):
def __init__(self, *args, **kwargs):
self.id = None
self.name = None
self.created_at = None
self.regions = []
self.resource_id = None
self.resource_type = None
self.min_disk_size = None
self.size_gigabytes = None
super(Snapshot, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, snapshot_id):
"""
Class method that will return a Snapshot object by ID.
"""
snapshot = cls(token=api_token, id=snapshot_id)
snapshot.load()
return snapshot
def load(self):
data = self.get_data("snapshots/%s" % self.id)
snapshot_dict = data['snapshot']
# Setting the attribute values
for attr in snapshot_dict.keys():
setattr(self, attr, snapshot_dict[attr])
return self
def destroy(self):
"""
Destroy the image
"""
return self.get_data("snapshots/%s/" % self.id, type=DELETE)
def __str__(self):
return "<Snapshot: %s %s>" % (self.id, self.name)
| 1,201 | Python | .py | 35 | 26.085714 | 68 | 0.575627 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,277 | Record.py | koalalorenzo_python-digitalocean/digitalocean/Record.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI, POST, DELETE, PUT
class Record(BaseAPI):
"""
An object representing an DigitalOcean Domain Record.
Args:
type (str): The type of the DNS record (e.g. A, CNAME, TXT).
name (str): The host name, alias, or service being defined by the
record.
data (int): Variable data depending on record type.
priority (int): The priority for SRV and MX records.
port (int): The port for SRV records.
ttl (int): The time to live for the record, in seconds.
weight (int): The weight for SRV records.
flags (int): An unsigned integer between 0-255 used for CAA records.
tags (string): The parameter tag for CAA records. Valid values are
"issue", "wildissue", or "iodef"
"""
def __init__(self, domain_name=None, *args, **kwargs):
self.domain = domain_name if domain_name else ""
self.id = None
self.type = None
self.name = None
self.data = None
self.priority = None
self.port = None
self.ttl = None
self.weight = None
self.flags = None
self.tags = None
super(Record, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, domain, record_id):
"""
Class method that will return a Record object by ID and the domain.
"""
record = cls(token=api_token, domain=domain, id=record_id)
record.load()
return record
def create(self):
"""
Creates a new record for a domain.
Args:
type (str): The type of the DNS record (e.g. A, CNAME, TXT).
name (str): The host name, alias, or service being defined by the
record.
data (int): Variable data depending on record type.
priority (int): The priority for SRV and MX records.
port (int): The port for SRV records.
ttl (int): The time to live for the record, in seconds.
weight (int): The weight for SRV records.
flags (int): An unsigned integer between 0-255 used for CAA records.
tags (string): The parameter tag for CAA records. Valid values are
"issue", "wildissue", or "iodef"
"""
input_params = {
"type": self.type,
"data": self.data,
"name": self.name,
"priority": self.priority,
"port": self.port,
"ttl": self.ttl,
"weight": self.weight,
"flags": self.flags,
"tags": self.tags
}
data = self.get_data(
"domains/%s/records" % (self.domain),
type=POST,
params=input_params,
)
if data:
self.id = data['domain_record']['id']
def destroy(self):
"""
Destroy the record
"""
return self.get_data(
"domains/%s/records/%s" % (self.domain, self.id),
type=DELETE,
)
def save(self):
"""
Save existing record
"""
data = {
"type": self.type,
"data": self.data,
"name": self.name,
"priority": self.priority,
"port": self.port,
"ttl": self.ttl,
"weight": self.weight,
"flags": self.flags,
"tags": self.tags
}
return self.get_data(
"domains/%s/records/%s" % (self.domain, self.id),
type=PUT,
params=data
)
def load(self):
url = "domains/%s/records/%s" % (self.domain, self.id)
record = self.get_data(url)
if record:
record = record[u'domain_record']
# Setting the attribute values
for attr in record.keys():
setattr(self, attr, record[attr])
def __str__(self):
return "<Record: %s %s>" % (self.id, self.domain)
| 4,016 | Python | .py | 111 | 25.891892 | 80 | 0.537018 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,278 | Manager.py | koalalorenzo_python-digitalocean/digitalocean/Manager.py | # -*- coding: utf-8 -*-
try:
from urlparse import urlparse, parse_qs
except ImportError:
from urllib.parse import urlparse, parse_qs # noqa
from .baseapi import BaseAPI
from .Account import Account
from .Action import Action
from .Balance import Balance
from .Certificate import Certificate
from .Domain import Domain
from .Droplet import Droplet
from .FloatingIP import FloatingIP
from .Firewall import Firewall, InboundRule, OutboundRule
from .Image import Image
from .LoadBalancer import LoadBalancer
from .LoadBalancer import StickySessions, HealthCheck, ForwardingRule
from .Region import Region
from .SSHKey import SSHKey
from .Size import Size
from .Snapshot import Snapshot
from .Tag import Tag
from .Volume import Volume
from .VPC import VPC
from .Project import Project
from .CDNEndpoint import CDNEndpoint
class Manager(BaseAPI):
def __init__(self, *args, **kwargs):
super(Manager, self).__init__(*args, **kwargs)
def get_account(self):
"""
Returns an Account object.
"""
return Account.get_object(api_token=self.tokens)
def get_balance(self):
"""
Returns a Balance object.
"""
return Balance.get_object(api_token=self.token)
def get_all_regions(self):
"""
This function returns a list of Region object.
"""
data = self.get_data("regions/")
regions = list()
for jsoned in data['regions']:
region = Region(**jsoned)
region.token = self.tokens
regions.append(region)
return regions
def get_all_droplets(self, params=None, tag_name=None):
"""
This function returns a list of Droplet object.
"""
if params is None:
params = dict()
if tag_name:
params["tag_name"] = tag_name
data = self.get_data("droplets/", params=params)
droplets = list()
for jsoned in data['droplets']:
droplet = Droplet(**jsoned)
droplet.token = self.tokens
for net in droplet.networks['v4']:
if net['type'] == 'private':
droplet.private_ip_address = net['ip_address']
if net['type'] == 'public':
droplet.ip_address = net['ip_address']
if droplet.networks['v6']:
droplet.ip_v6_address = droplet.networks['v6'][0]['ip_address']
if "backups" in droplet.features:
droplet.backups = True
else:
droplet.backups = False
if "ipv6" in droplet.features:
droplet.ipv6 = True
else:
droplet.ipv6 = False
if "private_networking" in droplet.features:
droplet.private_networking = True
else:
droplet.private_networking = False
droplets.append(droplet)
return droplets
def get_droplet(self, droplet_id):
"""
Return a Droplet by its ID.
"""
return Droplet.get_object(api_token=self.tokens, droplet_id=droplet_id)
def get_all_sizes(self):
"""
This function returns a list of Size object.
"""
data = self.get_data("sizes/")
sizes = list()
for jsoned in data['sizes']:
size = Size(**jsoned)
size.token = self.tokens
sizes.append(size)
return sizes
def get_images(self, private=False, type=None):
"""
This function returns a list of Image object.
"""
params = {}
if private:
params['private'] = 'true'
if type:
params['type'] = type
data = self.get_data("images/", params=params)
images = list()
for jsoned in data['images']:
image = Image(**jsoned)
image.token = self.tokens
images.append(image)
return images
def get_all_images(self):
"""
This function returns a list of Image objects containing all
available DigitalOcean images, both public and private.
"""
images = self.get_images()
return images
def get_image(self, image_id_or_slug):
"""
Return a Image by its ID/Slug.
"""
return Image.get_object(
api_token=self.tokens,
image_id_or_slug=image_id_or_slug,
)
def get_my_images(self):
"""
This function returns a list of Image objects representing
private DigitalOcean images (e.g. snapshots and backups).
"""
images = self.get_images(private=True)
return images
def get_global_images(self):
"""
This function returns a list of Image objects representing
public DigitalOcean images (e.g. base distribution images
and 'One-Click' applications).
"""
data = self.get_images()
images = list()
for i in data:
if i.public:
i.token = self.tokens
images.append(i)
return images
def get_distro_images(self):
"""
This function returns a list of Image objects representing
public base distribution images.
"""
images = self.get_images(type='distribution')
return images
def get_app_images(self):
"""
This function returns a list of Image objectobjects representing
public DigitalOcean 'One-Click' application images.
"""
images = self.get_images(type='application')
return images
def get_all_domains(self):
"""
This function returns a list of Domain object.
"""
data = self.get_data("domains/")
domains = list()
for jsoned in data['domains']:
domain = Domain(**jsoned)
domain.token = self.tokens
domains.append(domain)
return domains
def get_domain(self, domain_name):
"""
Return a Domain by its domain_name
"""
return Domain.get_object(api_token=self.tokens, domain_name=domain_name)
def get_all_sshkeys(self):
"""
This function returns a list of SSHKey object.
"""
data = self.get_data("account/keys/")
ssh_keys = list()
for jsoned in data['ssh_keys']:
ssh_key = SSHKey(**jsoned)
ssh_key.token = self.tokens
ssh_keys.append(ssh_key)
return ssh_keys
def get_ssh_key(self, ssh_key_id):
"""
Return a SSHKey object by its ID.
"""
return SSHKey.get_object(api_token=self.tokens, ssh_key_id=ssh_key_id)
def get_all_tags(self):
"""
This method returns a list of all tags.
"""
data = self.get_data("tags")
return [
Tag(token=self.token, **tag) for tag in data['tags']
]
def get_action(self, action_id):
"""
Return an Action object by a specific ID.
"""
return Action.get_object(api_token=self.tokens, action_id=action_id)
def get_all_floating_ips(self):
"""
This function returns a list of FloatingIP objects.
"""
data = self.get_data("floating_ips")
floating_ips = list()
for jsoned in data['floating_ips']:
floating_ip = FloatingIP(**jsoned)
floating_ip.token = self.tokens
floating_ips.append(floating_ip)
return floating_ips
def get_floating_ip(self, ip):
"""
Returns a of FloatingIP object by its IP address.
"""
return FloatingIP.get_object(api_token=self.tokens, ip=ip)
def get_all_load_balancers(self):
"""
Returns a list of Load Balancer objects.
"""
data = self.get_data("load_balancers")
load_balancers = list()
for jsoned in data['load_balancers']:
load_balancer = LoadBalancer(**jsoned)
load_balancer.token = self.tokens
load_balancer.health_check = HealthCheck(**jsoned['health_check'])
load_balancer.sticky_sessions = StickySessions(**jsoned['sticky_sessions'])
forwarding_rules = list()
for rule in jsoned['forwarding_rules']:
forwarding_rules.append(ForwardingRule(**rule))
load_balancer.forwarding_rules = forwarding_rules
load_balancers.append(load_balancer)
return load_balancers
def get_load_balancer(self, id):
"""
Returns a Load Balancer object by its ID.
Args:
id (str): Load Balancer ID
"""
return LoadBalancer.get_object(api_token=self.tokens, id=id)
def get_certificate(self, id):
"""
Returns a Certificate object by its ID.
Args:
id (str): Certificate ID
"""
return Certificate.get_object(api_token=self.tokens, cert_id=id)
def get_all_certificates(self):
"""
This function returns a list of Certificate objects.
"""
data = self.get_data("certificates")
certificates = list()
for jsoned in data['certificates']:
cert = Certificate(**jsoned)
cert.token = self.tokens
certificates.append(cert)
return certificates
def get_snapshot(self, snapshot_id):
"""
Return a Snapshot by its ID.
"""
return Snapshot.get_object(
api_token=self.tokens, snapshot_id=snapshot_id
)
def get_all_snapshots(self):
"""
This method returns a list of all Snapshots.
"""
data = self.get_data("snapshots/")
return [
Snapshot(token=self.tokens, **snapshot)
for snapshot in data['snapshots']
]
def get_droplet_snapshots(self):
"""
This method returns a list of all Snapshots based on Droplets.
"""
data = self.get_data("snapshots?resource_type=droplet")
return [
Snapshot(token=self.tokens, **snapshot)
for snapshot in data['snapshots']
]
def get_volume_snapshots(self):
"""
This method returns a list of all Snapshots based on volumes.
"""
data = self.get_data("snapshots?resource_type=volume")
return [
Snapshot(token=self.tokens, **snapshot)
for snapshot in data['snapshots']
]
def get_all_volumes(self, region=None, name=None):
"""
This function returns a list of Volume objects.
Args:
region (str, optional): Restrict results to volumes \
available in a specific region. e.g. nyc1
name (str, optional): List volumes on your account that \
match a specified name. e.g. example-volume
"""
url = "volumes"
parameters = []
if region:
parameters.append("region={}".format(region))
if name:
parameters.append("name={}".format(name))
if len(parameters) > 0:
url += "?" + "&".join(parameters)
data = self.get_data(url)
volumes = list()
for jsoned in data['volumes']:
volume = Volume(**jsoned)
volume.token = self.tokens
volumes.append(volume)
return volumes
def get_volume(self, volume_id):
"""
Returns a Volume object by its ID.
"""
return Volume.get_object(api_token=self.tokens, volume_id=volume_id)
def get_all_firewalls(self):
"""
This function returns a list of Firewall objects.
"""
data = self.get_data("firewalls")
firewalls = list()
for jsoned in data['firewalls']:
firewall = Firewall(**jsoned)
firewall.token = self.tokens
in_rules = list()
for rule in jsoned['inbound_rules']:
in_rules.append(InboundRule(**rule))
firewall.inbound_rules = in_rules
out_rules = list()
for rule in jsoned['outbound_rules']:
out_rules.append(OutboundRule(**rule))
firewall.outbound_rules = out_rules
firewalls.append(firewall)
return firewalls
def get_firewall(self, firewall_id):
"""
Return a Firewall by its ID.
"""
return Firewall.get_object(
api_token=self.tokens,
firewall_id=firewall_id,
)
def get_vpc(self, id):
"""
Returns a VPC object by its ID.
Args:
id (str): The VPC's ID
"""
return VPC.get_object(api_token=self.token, vpc_id=id)
def get_all_vpcs(self):
"""
This function returns a list of VPC objects.
"""
data = self.get_data("vpcs")
vpcs = list()
for jsoned in data['vpcs']:
vpc = VPC(**jsoned)
vpc.token = self.token
vpcs.append(vpc)
return vpcs
def get_all_projects(self):
"""
All the projects of the account
"""
data = self.get_data("projects")
projects = list()
for jsoned in data['projects']:
project = Project(**jsoned)
project.token = self.token
projects.append(project)
return projects
def get_project(self, project_id):
"""
Return a Project by its ID.
"""
return Project.get_object(
api_token=self.token,
project_id=project_id,
)
def get_default_project(self):
"""
Return default project of the account
"""
return Project.get_object(
api_token=self.token,
project_id="default",
)
def get_all_cdn_endpoints(self):
"""
All the projects of the account
"""
data = self.get_data("cdn/endpoints")
cdn_endpoints = list()
for jsoned in data['endpoints']:
cdn_endpoint = CDNEndpoint(**jsoned)
cdn_endpoint.token = self.token
cdn_endpoints.append(cdn_endpoint)
return cdn_endpoints
def get_cdn_endpoint(self, cdn_endpoint_id):
"""
Return a Project by its ID.
"""
return CDNEndpoint.get_object(
api_token=self.token,
cdn_endpoint_id=cdn_endpoint_id,
)
def __str__(self):
return "<Manager>"
| 14,777 | Python | .py | 424 | 24.837264 | 87 | 0.565193 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,279 | Region.py | koalalorenzo_python-digitalocean/digitalocean/Region.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI
class Region(BaseAPI):
def __init__(self, *args, **kwargs):
self.name = None
self.slug = None
self.sizes = []
self.available = None
self.features = []
super(Region, self).__init__(*args, **kwargs)
def __str__(self):
return "<Region: %s %s>" % (self.slug, self.name)
| 386 | Python | .py | 12 | 25.583333 | 57 | 0.549865 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,280 | Tag.py | koalalorenzo_python-digitalocean/digitalocean/Tag.py | from .baseapi import BaseAPI
from .Droplet import Droplet
from .Snapshot import Snapshot
class Tag(BaseAPI):
def __init__(self, *args, **kwargs):
self.name = ""
self.resources = {}
super(Tag, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, tag_name):
tag = cls(token=api_token, name=tag_name)
tag.load()
return tag
def load(self):
"""
Fetch data about tag
"""
tags = self.get_data("tags/%s" % self.name)
tag = tags['tag']
for attr in tag.keys():
setattr(self, attr, tag[attr])
return self
def create(self, **kwargs):
"""
Create the tag.
"""
for attr in kwargs.keys():
setattr(self, attr, kwargs[attr])
params = {"name": self.name}
output = self.get_data("tags", type="POST", params=params)
if output:
self.name = output['tag']['name']
self.resources = output['tag']['resources']
def delete(self):
return self.get_data("tags/%s" % self.name, type="DELETE")
def __get_resources(self, resources, method):
""" Method used to talk directly to the API (TAGs' Resources) """
tagged = self.get_data(
'tags/%s/resources' % self.name, params={
"resources": resources
},
type=method,
)
return tagged
def __add_resources(self, resources):
"""
Add the resources to this tag.
Attributes accepted at creation time:
resources: array - See API.
"""
return self.__get_resources(resources, method='POST')
def __remove_resources(self, resources):
"""
Remove resources from this tag.
Attributes accepted at creation time:
resources: array - See API.
"""
return self.__get_resources(resources, method='DELETE')
def __build_resources_field(self, resources_to_tag, object_class, resource_type):
"""
Private method to build the `resources` field used to tag/untag
DO resources. Returns an array of objects containing two fields:
resource_id and resource_type.
It checks the type of objects in the 1st argument and build the
right structure for the API. It accepts array of strings, array
of ints and array of the object type defined by object_class arg.
The 3rd argument specify the resource type as defined by DO API
(like droplet, image, volume or volume_snapshot).
See: https://developers.digitalocean.com/documentation/v2/#tag-a-resource
"""
resources_field = []
if not isinstance(resources_to_tag, list): return resources_to_tag
for resource_to_tag in resources_to_tag:
res = {}
try:
if isinstance(resource_to_tag, unicode):
res = {"resource_id": resource_to_tag}
except NameError:
pass
if isinstance(resource_to_tag, str) or isinstance(resource_to_tag, int):
res = {"resource_id": str(resource_to_tag)}
elif isinstance(resource_to_tag, object_class):
res = {"resource_id": str(resource_to_tag.id)}
if len(res) > 0:
res["resource_type"] = resource_type
resources_field.append(res)
return resources_field
def add_droplets(self, droplet):
"""
Add the Tag to a Droplet.
Attributes accepted at creation time:
droplet: array of string or array of int, or array of Droplets.
"""
droplets = droplet
if not isinstance(droplets, list):
droplets = [droplet]
# Extracting data from the Droplet object
resources = self.__build_resources_field(droplets, Droplet, "droplet")
if len(resources) > 0:
return self.__add_resources(resources)
return False
def remove_droplets(self, droplet):
"""
Remove the Tag from the Droplet.
Attributes accepted at creation time:
droplet: array of string or array of int, or array of Droplets.
"""
droplets = droplet
if not isinstance(droplets, list):
droplets = [droplet]
# Build resources field from the Droplet objects
resources = self.__build_resources_field(droplets, Droplet, "droplet")
if len(resources) > 0:
return self.__remove_resources(resources)
return False
def add_snapshots(self, snapshots):
"""
Add the Tag to the Snapshot.
Attributes accepted at creation time:
snapshots: array of string or array of int or array of Snapshot.
"""
if not isinstance(snapshots, list):
snapshots = [snapshots]
resources = self.__build_resources_field(snapshots, Snapshot, "volume_snapshot")
if len(resources) > 0:
return self.__add_resources(resources)
return False
def remove_snapshots(self, snapshots):
"""
remove the Tag from the Snapshot.
Attributes accepted at creation time:
snapshots: array of string or array of int or array of Snapshot.
"""
if not isinstance(snapshots, list):
snapshots = [snapshots]
resources = self.__build_resources_field(snapshots, Snapshot, "volume_snapshot")
if len(resources) > 0:
return self.__remove_resources(resources)
return False
def __str__(self):
return "<Tag: %s>" % self.name
| 5,846 | Python | .py | 141 | 30.496454 | 88 | 0.58394 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,281 | Action.py | koalalorenzo_python-digitalocean/digitalocean/Action.py | # -*- coding: utf-8 -*-
from time import sleep
from .baseapi import BaseAPI
class Action(BaseAPI):
def __init__(self, *args, **kwargs):
self.id = None
self.status = None
self.type = None
self.started_at = None
self.completed_at = None
self.resource_id = None
self.resource_type = None
self.region = None
self.region_slug = None
# Custom, not provided by the json object.
self.droplet_id = None
super(Action, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, action_id):
"""
Class method that will return a Action object by ID.
"""
action = cls(token=api_token, id=action_id)
action.load_directly()
return action
def load_directly(self):
action = self.get_data("actions/%s" % self.id)
if action:
action = action[u'action']
# Loading attributes
for attr in action.keys():
setattr(self, attr, action[attr])
def load(self):
if not self.droplet_id:
action = self.load_directly()
else:
action = self.get_data(
"droplets/%s/actions/%s" % (
self.droplet_id,
self.id
)
)
if action:
action = action[u'action']
# Loading attributes
for attr in action.keys():
setattr(self, attr, action[attr])
def wait(self, update_every_seconds=1, repeat=20):
"""
Wait until the action is marked as completed or with an error.
It will return True in case of success, otherwise False.
Optional Args:
update_every_seconds - int : number of seconds to wait before
checking if the action is completed.
"""
counter = 0
while self.status == u'in-progress':
sleep(update_every_seconds)
self.load()
counter += 1
if counter > repeat:
break
return self.status == u'completed'
def __str__(self):
return "<Action: %s %s %s>" % (self.id, self.type, self.status)
| 2,271 | Python | .py | 65 | 24.353846 | 77 | 0.538952 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,282 | LoadBalancer.py | koalalorenzo_python-digitalocean/digitalocean/LoadBalancer.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI, GET, POST, PUT, DELETE
class StickySessions(object):
"""
An object holding information on a LoadBalancer's sticky sessions settings.
Args:
type (str): The type of sticky sessions used. Can be "cookies" or
"none"
cookie_name (str, optional): The name used for the client cookie when
using cookies for sticky session
cookie_ttl_seconds (int, optional): The number of seconds until the
cookie expires
"""
def __init__(self, type='none', cookie_name='', cookie_ttl_seconds=None, **kwargs):
self.type = type
if type == 'cookies':
self.cookie_name = 'DO-LB'
self.cookie_ttl_seconds = 300
self.cookie_name = cookie_name
self.cookie_ttl_seconds = cookie_ttl_seconds
class ForwardingRule(object):
"""
An object holding information about a LoadBalancer forwarding rule setting.
Args:
entry_protocol (str): The protocol used for traffic to a LoadBalancer.
The possible values are: "http", "https", or "tcp"
entry_port (int): The port the LoadBalancer instance will listen on
target_protocol (str): The protocol used for traffic from a
LoadBalancer to the backend Droplets. The possible values are:
"http", "https", or "tcp"
target_port (int): The port on the backend Droplets on which the
LoadBalancer will send traffic
certificate_id (str, optional): The ID of the TLS certificate used for
SSL termination if enabled
tls_passthrough (bool, optional): A boolean indicating if SSL encrypted
traffic will be passed through to the backend Droplets
"""
def __init__(self, entry_protocol=None, entry_port=None,
target_protocol=None, target_port=None, certificate_id="",
tls_passthrough=False):
self.entry_protocol = entry_protocol
self.entry_port = entry_port
self.target_protocol = target_protocol
self.target_port = target_port
self.certificate_id = certificate_id
self.tls_passthrough = tls_passthrough
class HealthCheck(object):
"""
An object holding information about a LoadBalancer health check settings.
Args:
protocol (str): The protocol used for health checks. The possible
values are "http" or "tcp".
port (int): The port on the backend Droplets for heath checks
path (str): The path to send a health check request to
check_interval_seconds (int): The number of seconds between between two
consecutive health checks
response_timeout_seconds (int): The number of seconds the Load Balancer
instance will wait for a response until marking a check as failed
healthy_threshold (int): The number of times a health check must fail
for a backend Droplet to be removed from the pool
unhealthy_threshold (int): The number of times a health check must pass
for a backend Droplet to be re-added to the pool
"""
def __init__(self, protocol='http', port=80, path='/',
check_interval_seconds=10, response_timeout_seconds=5,
healthy_threshold=5, unhealthy_threshold=3):
self.protocol = protocol
self.port = port
self.path = path
self.check_interval_seconds = check_interval_seconds
self.response_timeout_seconds = response_timeout_seconds
self.healthy_threshold = healthy_threshold
self.unhealthy_threshold = unhealthy_threshold
class LoadBalancer(BaseAPI):
"""
An object representing an DigitalOcean Load Balancer.
Attributes accepted at creation time:
Args:
name (str): The Load Balancer's name
region (str): The slug identifier for a DigitalOcean region
size (str): The size of the load balancer. The available sizes \
are "lb-small", "lb-medium", or "lb-large". Once you have \
created a load balancer, you can't change its size
algorithm (str, optional): The load balancing algorithm to be \
used. Currently, it must be either "round_robin" or \
"least_connections"
forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects
health_check (obj, optional): A `HealthCheck` object
sticky_sessions (obj, optional): A `StickySessions` object
redirect_http_to_https (bool, optional): A boolean indicating \
whether HTTP requests to the Load Balancer should be \
redirected to HTTPS
enable_proxy_protocol (bool, optional): A boolean value indicating \
whether PROXY Protocol is in use
enable_backend_keepalive (bool, optional): A boolean value \
indicating whether HTTP keepalive connections are maintained \
to target Droplets.
droplet_ids (obj:`list` of `int`): A list of IDs representing \
Droplets to be added to the Load Balancer (mutually \
exclusive with 'tag')
tag (str): A string representing a DigitalOcean Droplet tag \
(mutually exclusive with 'droplet_ids')
vpc_uuid (str): ID of a VPC in which the Load Balancer will be created
Attributes returned by API:
* name (str): The Load Balancer's name
* id (str): An unique identifier for a LoadBalancer
* ip (str): Public IP address for a LoadBalancer
* region (str): The slug identifier for a DigitalOcean region
* size (str): The size of the load balancer
* algorithm (str, optional): The load balancing algorithm to be \
used. Currently, it must be either "round_robin" or \
"least_connections"
* forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects
* health_check (obj, optional): A `HealthCheck` object
* sticky_sessions (obj, optional): A `StickySessions` object
* redirect_http_to_https (bool, optional): A boolean indicating \
whether HTTP requests to the Load Balancer should be \
redirected to HTTPS
* enable_proxy_protocol (bool, optional): A boolean value indicating \
whether PROXY Protocol is in use
* enable_backend_keepalive (bool, optional): A boolean value \
indicating whether HTTP keepalive connections are maintained \
to target Droplets.
* droplet_ids (obj:`list` of `int`): A list of IDs representing \
Droplets to be added to the Load Balancer
* tag (str): A string representing a DigitalOcean Droplet tag
* status (string): An indication the current state of the LoadBalancer
* created_at (str): The date and time when the LoadBalancer was created
* vpc_uuid (str): ID of a VPC which the Load Balancer is assigned to
"""
def __init__(self, *args, **kwargs):
self.id = None
self.name = None
self.region = None
self.size = None
self.algorithm = None
self.forwarding_rules = []
self.health_check = None
self.sticky_sessions = None
self.redirect_http_to_https = False
self.enable_proxy_protocol = False
self.enable_backend_keepalive = False
self.droplet_ids = []
self.tag = None
self.status = None
self.created_at = None
self.vpc_uuid = None
super(LoadBalancer, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, id):
"""
Class method that will return a LoadBalancer object by its ID.
Args:
api_token (str): DigitalOcean API token
id (str): Load Balancer ID
"""
load_balancer = cls(token=api_token, id=id)
load_balancer.load()
return load_balancer
def load(self):
"""
Loads updated attributes for a LoadBalancer object.
Requires self.id to be set.
"""
data = self.get_data('load_balancers/%s' % self.id, type=GET)
load_balancer = data['load_balancer']
# Setting the attribute values
for attr in load_balancer.keys():
if attr == 'health_check':
health_check = HealthCheck(**load_balancer['health_check'])
setattr(self, attr, health_check)
elif attr == 'sticky_sessions':
sticky_ses = StickySessions(**load_balancer['sticky_sessions'])
setattr(self, attr, sticky_ses)
elif attr == 'forwarding_rules':
rules = list()
for rule in load_balancer['forwarding_rules']:
rules.append(ForwardingRule(**rule))
setattr(self, attr, rules)
else:
setattr(self, attr, load_balancer[attr])
return self
def create(self, *args, **kwargs):
"""
Creates a new LoadBalancer.
Note: Every argument and parameter given to this method will be
assigned to the object.
Args:
name (str): The Load Balancer's name
region (str): The slug identifier for a DigitalOcean region
size (str): The size of the load balancer. The available sizes
are "lb-small", "lb-medium", or "lb-large". Once you have
created a load balancer, you can't change its size
algorithm (str, optional): The load balancing algorithm to be
used. Currently, it must be either "round_robin" or
"least_connections"
forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects
health_check (obj, optional): A `HealthCheck` object
sticky_sessions (obj, optional): A `StickySessions` object
redirect_http_to_https (bool, optional): A boolean indicating
whether HTTP requests to the Load Balancer should be
redirected to HTTPS
enable_proxy_protocol (bool, optional): A boolean value indicating
whether PROXY Protocol is in use
enable_backend_keepalive (bool, optional): A boolean value
indicating whether HTTP keepalive connections are maintained
to target Droplets.
droplet_ids (obj:`list` of `int`): A list of IDs representing
Droplets to be added to the Load Balancer (mutually
exclusive with 'tag')
tag (str): A string representing a DigitalOcean Droplet tag
(mutually exclusive with 'droplet_ids')
vpc_uuid (str): ID of a Load Balancer in which the Droplet will be
created
"""
rules_dict = [rule.__dict__ for rule in self.forwarding_rules]
params = {'name': self.name, 'region': self.region,
'size': self.size,
'forwarding_rules': rules_dict,
'redirect_http_to_https': self.redirect_http_to_https,
'enable_proxy_protocol': self.enable_proxy_protocol,
'enable_backend_keepalive': self.enable_backend_keepalive,
'vpc_uuid': self.vpc_uuid}
if self.droplet_ids and self.tag:
raise ValueError('droplet_ids and tag are mutually exclusive args')
elif self.tag:
params['tag'] = self.tag
else:
params['droplet_ids'] = self.droplet_ids
if self.algorithm:
params['algorithm'] = self.algorithm
if self.health_check:
params['health_check'] = self.health_check.__dict__
if self.sticky_sessions:
params['sticky_sessions'] = self.sticky_sessions.__dict__
data = self.get_data('load_balancers', type=POST, params=params)
if data:
self.id = data['load_balancer']['id']
self.ip = data['load_balancer']['ip']
self.algorithm = data['load_balancer']['algorithm']
self.size = data['load_balancer']['size']
self.health_check = HealthCheck(
**data['load_balancer']['health_check'])
self.sticky_sessions = StickySessions(
**data['load_balancer']['sticky_sessions'])
self.droplet_ids = data['load_balancer']['droplet_ids']
self.status = data['load_balancer']['status']
self.created_at = data['load_balancer']['created_at']
self.redirect_http_to_https = data['load_balancer']['redirect_http_to_https']
self.enable_proxy_protocol = data['load_balancer']['enable_proxy_protocol']
self.enable_backend_keepalive = data['load_balancer']['enable_backend_keepalive']
self.vpc_uuid = data['load_balancer']['vpc_uuid']
return self
def save(self):
"""
Save the LoadBalancer
"""
forwarding_rules = [rule.__dict__ for rule in self.forwarding_rules]
data = {
'name': self.name,
'region': self.region['slug'],
'forwarding_rules': forwarding_rules,
'redirect_http_to_https': self.redirect_http_to_https,
'enable_proxy_protocol': self.enable_proxy_protocol,
'enable_backend_keepalive': self.enable_backend_keepalive,
'vpc_uuid': self.vpc_uuid
}
if self.tag:
data['tag'] = self.tag
else:
data['droplet_ids'] = self.droplet_ids
if self.algorithm:
data["algorithm"] = self.algorithm
if self.health_check:
data['health_check'] = self.health_check.__dict__
if self.sticky_sessions:
data['sticky_sessions'] = self.sticky_sessions.__dict__
return self.get_data("load_balancers/%s" % self.id,
type=PUT,
params=data)
def destroy(self):
"""
Destroy the LoadBalancer
"""
return self.get_data('load_balancers/%s' % self.id, type=DELETE)
def add_droplets(self, droplet_ids):
"""
Assign a LoadBalancer to a Droplet.
Args:
droplet_ids (obj:`list` of `int`): A list of Droplet IDs
"""
return self.get_data(
"load_balancers/%s/droplets" % self.id,
type=POST,
params={"droplet_ids": droplet_ids}
)
def remove_droplets(self, droplet_ids):
"""
Unassign a LoadBalancer.
Args:
droplet_ids (obj:`list` of `int`): A list of Droplet IDs
"""
return self.get_data(
"load_balancers/%s/droplets" % self.id,
type=DELETE,
params={"droplet_ids": droplet_ids}
)
def add_forwarding_rules(self, forwarding_rules):
"""
Adds new forwarding rules to a LoadBalancer.
Args:
forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects
"""
rules_dict = [rule.__dict__ for rule in forwarding_rules]
return self.get_data(
"load_balancers/%s/forwarding_rules" % self.id,
type=POST,
params={"forwarding_rules": rules_dict}
)
def remove_forwarding_rules(self, forwarding_rules):
"""
Removes existing forwarding rules from a LoadBalancer.
Args:
forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects
"""
rules_dict = [rule.__dict__ for rule in forwarding_rules]
return self.get_data(
"load_balancers/%s/forwarding_rules" % self.id,
type=DELETE,
params={"forwarding_rules": rules_dict}
)
def __str__(self):
return "%s" % (self.id)
| 15,837 | Python | .py | 333 | 36.681682 | 93 | 0.610015 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,283 | Balance.py | koalalorenzo_python-digitalocean/digitalocean/Balance.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI
class Balance(BaseAPI):
def __init__(self, *args, **kwargs):
self.month_to_date_balance = None
self.account_balance = None
self.month_to_date_usage = None
self.generated_at = None
super(Balance, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token):
"""
Class method that will return an Balance object.
"""
acct = cls(token=api_token)
acct.load()
return acct
def load(self):
# URL https://api.digitalocean.com/customers/my/balance
balance = self.get_data("customers/my/balance")
for attr in balance.keys():
setattr(self, attr, balance[attr])
def __str__(self):
return "<Balance: %s>" % (self.account_balance)
| 847 | Python | .py | 24 | 27.5 | 63 | 0.595588 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,284 | FloatingIP.py | koalalorenzo_python-digitalocean/digitalocean/FloatingIP.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI, GET, POST, DELETE
class FloatingIP(BaseAPI):
def __init__(self, *args, **kwargs):
self.ip = None
self.droplet = []
self.region = []
super(FloatingIP, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, ip):
"""
Class method that will return a FloatingIP object by its IP.
Args:
api_token: str - token
ip: str - floating ip address
"""
floating_ip = cls(token=api_token, ip=ip)
floating_ip.load()
return floating_ip
def load(self):
"""
Load the FloatingIP object from DigitalOcean.
Requires self.ip to be set.
"""
data = self.get_data('floating_ips/%s' % self.ip, type=GET)
floating_ip = data['floating_ip']
# Setting the attribute values
for attr in floating_ip.keys():
setattr(self, attr, floating_ip[attr])
return self
def create(self, *args, **kwargs):
"""
Creates a FloatingIP and assigns it to a Droplet.
Note: Every argument and parameter given to this method will be
assigned to the object.
Args:
droplet_id: int - droplet id
"""
data = self.get_data('floating_ips/',
type=POST,
params={'droplet_id': self.droplet_id})
if data:
self.ip = data['floating_ip']['ip']
self.region = data['floating_ip']['region']
return self
def reserve(self, *args, **kwargs):
"""
Creates a FloatingIP in a region without assigning
it to a specific Droplet.
Note: Every argument and parameter given to this method will be
assigned to the object.
Args:
region_slug: str - region's slug (e.g. 'nyc3')
"""
data = self.get_data('floating_ips/',
type=POST,
params={'region': self.region_slug})
if data:
self.ip = data['floating_ip']['ip']
self.region = data['floating_ip']['region']
return self
def destroy(self):
"""
Destroy the FloatingIP
"""
return self.get_data('floating_ips/%s/' % self.ip, type=DELETE)
def assign(self, droplet_id):
"""
Assign a FloatingIP to a Droplet.
Args:
droplet_id: int - droplet id
"""
return self.get_data(
"floating_ips/%s/actions/" % self.ip,
type=POST,
params={"type": "assign", "droplet_id": droplet_id}
)
def unassign(self):
"""
Unassign a FloatingIP.
"""
return self.get_data(
"floating_ips/%s/actions/" % self.ip,
type=POST,
params={"type": "unassign"}
)
def __str__(self):
return "%s" % (self.ip)
| 3,095 | Python | .py | 88 | 23.943182 | 75 | 0.514247 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,285 | SSHKey.py | koalalorenzo_python-digitalocean/digitalocean/SSHKey.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI, GET, POST, DELETE, PUT
class SSHKey(BaseAPI):
def __init__(self, *args, **kwargs):
self.id = ""
self.name = None
self.public_key = None
self.fingerprint = None
super(SSHKey, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, ssh_key_id):
"""
Class method that will return a SSHKey object by ID.
"""
ssh_key = cls(token=api_token, id=ssh_key_id)
ssh_key.load()
return ssh_key
def load(self):
"""
Load the SSHKey object from DigitalOcean.
Requires either self.id or self.fingerprint to be set.
"""
identifier = None
if self.id:
identifier = self.id
elif self.fingerprint is not None:
identifier = self.fingerprint
data = self.get_data("account/keys/%s" % identifier, type=GET)
ssh_key = data['ssh_key']
# Setting the attribute values
for attr in ssh_key.keys():
setattr(self, attr, ssh_key[attr])
self.id = ssh_key['id']
def load_by_pub_key(self, public_key):
"""
This method will load a SSHKey object from DigitalOcean
from a public_key. This method will avoid problems like
uploading the same public_key twice.
"""
data = self.get_data("account/keys/")
for jsoned in data['ssh_keys']:
if jsoned.get('public_key', "") == public_key:
self.id = jsoned['id']
self.load()
return self
return None
def create(self):
"""
Create the SSH Key
"""
input_params = {
"name": self.name,
"public_key": self.public_key,
}
data = self.get_data("account/keys/", type=POST, params=input_params)
if data:
self.id = data['ssh_key']['id']
def edit(self):
"""
Edit the SSH Key
"""
input_params = {
"name": self.name,
"public_key": self.public_key,
}
data = self.get_data(
"account/keys/%s" % self.id,
type=PUT,
params=input_params
)
if data:
self.id = data['ssh_key']['id']
def destroy(self):
"""
Destroy the SSH Key
"""
return self.get_data("account/keys/%s" % self.id, type=DELETE)
def __str__(self):
return "<SSHKey: %s %s>" % (self.id, self.name)
| 2,609 | Python | .py | 79 | 23.126582 | 77 | 0.523297 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,286 | __init__.py | koalalorenzo_python-digitalocean/digitalocean/__init__.py | # -*- coding: utf-8 -*-
"""digitalocean API to manage droplets"""
__version__ = "1.16.0"
__author__ = "Lorenzo Setale ( http://who.is.lorenzo.setale.me/? )"
__author_email__ = "lorenzo@setale.me"
__license__ = "LGPL v3"
__copyright__ = "Copyright (c) 2012-2020 Lorenzo Setale"
from .Manager import Manager
from .Droplet import Droplet, DropletError, BadKernelObject, BadSSHKeyFormat
from .Region import Region
from .Size import Size
from .Image import Image
from .Action import Action
from .Account import Account
from .Balance import Balance
from .Domain import Domain
from .Record import Record
from .SSHKey import SSHKey
from .Kernel import Kernel
from .FloatingIP import FloatingIP
from .Volume import Volume
from .baseapi import Error, EndPointError, TokenError, DataReadError, NotFoundError
from .Tag import Tag
from .LoadBalancer import LoadBalancer
from .LoadBalancer import StickySessions, ForwardingRule, HealthCheck
from .Certificate import Certificate
from .Snapshot import Snapshot
from .Project import Project
from .Firewall import Firewall, InboundRule, OutboundRule, Destinations, Sources
from .VPC import VPC
from .CDNEndpoint import CDNEndpoint
| 1,165 | Python | .py | 31 | 36.516129 | 83 | 0.80742 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,287 | VPC.py | koalalorenzo_python-digitalocean/digitalocean/VPC.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI, PATCH, POST, DELETE
class VPC(BaseAPI):
"""
An object representing a DigitalOcean VPC.
Attributes accepted at creation time:
Args:
name (str): A name for the VPC
region (str): The slug for the region where the VPC will be created
description(str): A free-form text field for describing the VPC
ip_range (str): The requested range of IP addresses for the VPC in \
CIDR notation
Attributes returned by API:
* id (str): A unique identifier for the VPC
* name (str): The name of the VPC
* region (str): The slug for the region where the VPC is located
* description(str): A free-form text field for describing the VPC
* ip_range (str): The requested range of IP addresses for the VPC in \
CIDR notation
* urn (str): The uniform resource name (URN) for the VPC
* created_at (str): A string that represents when the VPC was created
* default (bool): A boolean representing whether or not the VPC is the \
user's default VPC for the region
"""
def __init__(self, *args, **kwargs):
self.id = ""
self.name = None
self.region = None
self.description = None
self.ip_range = None
self.urn = None
self.created_at = None
self.default = False
super(VPC, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, vpc_id):
"""
Class method that will return a VPC object by its ID.
"""
vpc = cls(token=api_token, id=vpc_id)
vpc.load()
return vpc
def load(self):
"""
Load the VPC object from DigitalOcean.
Requires self.id to be set.
"""
data = self.get_data("vpcs/%s" % self.id)
vpc = data["vpc"]
for attr in vpc.keys():
setattr(self, attr, vpc[attr])
return self
def create(self):
"""
Create the VPC
"""
params = {
"name": self.name,
"region": self.region,
"description": self.description,
"ip_range": self.ip_range
}
data = self.get_data("vpcs", type=POST, params=params)
if data:
self.id = data['vpc']['id']
self.name = data['vpc']['name']
self.region = data['vpc']['region']
self.description = data['vpc']['description']
self.ip_range = data['vpc']['ip_range']
self.urn = data['vpc']['urn']
self.created_at = data['vpc']['created_at']
self.default = data['vpc']['default']
return self
def rename(self, new_name):
"""
Rename a VPC
Args:
name (str): The new name for the VPC
"""
data = self.get_data("vpcs/%s" % self.id,
type=PATCH,
params={"name": new_name})
vpc = data["vpc"]
for attr in vpc.keys():
setattr(self, attr, vpc[attr])
return self
def rename(self, new_name):
"""
Rename a VPC
Args:
name (str): The new name for the VPC
"""
data = self.get_data("vpcs/%s" % self.id,
type=PATCH,
params={"name": new_name})
vpc = data["vpc"]
for attr in vpc.keys():
setattr(self, attr, vpc[attr])
return self
def destroy(self):
"""
Delete the VPC
"""
return self.get_data("vpcs/%s" % self.id, type=DELETE)
def __str__(self):
return "<VPC: %s %s>" % (self.id, self.name)
| 3,820 | Python | .py | 106 | 25.603774 | 80 | 0.527944 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,288 | Account.py | koalalorenzo_python-digitalocean/digitalocean/Account.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI
class Account(BaseAPI):
def __init__(self, *args, **kwargs):
self.droplet_limit = None
self.floating_ip_limit = None
self.email = None
self.uuid = None
self.email_verified = None
self.status = None
self.status_message = None
super(Account, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token):
"""
Class method that will return an Account object.
"""
acct = cls(token=api_token)
acct.load()
return acct
def load(self):
# URL https://api.digitalocean.com/v2/account
data = self.get_data("account/")
account = data['account']
for attr in account.keys():
setattr(self, attr, account[attr])
def __str__(self):
return "%s" % (self.email)
| 904 | Python | .py | 28 | 24.321429 | 60 | 0.573072 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,289 | Volume.py | koalalorenzo_python-digitalocean/digitalocean/Volume.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI, POST, DELETE
from .Snapshot import Snapshot
class Volume(BaseAPI):
def __init__(self, *args, **kwargs):
self.id = None
self.name = None
self.droplet_ids = []
self.region = None
self.description = None
self.size_gigabytes = None
self.created_at = None
self.snapshot_id = None
self.filesystem_type = None
self.filesystem_label = None
self.tags = None
super(Volume, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, volume_id):
"""
Class method that will return an Volume object by ID.
"""
volume = cls(token=api_token, id=volume_id)
volume.load()
return volume
def load(self):
data = self.get_data("volumes/%s" % self.id)
volume_dict = data['volume']
# Setting the attribute values
for attr in volume_dict.keys():
setattr(self, attr, volume_dict[attr])
return self
def create(self, *args, **kwargs):
"""
Creates a Block Storage volume
Note: Every argument and parameter given to this method will be
assigned to the object.
Args:
name: string - a name for the volume
region: string - slug identifier for the region
size_gigabytes: int - size of the Block Storage volume in GiB
filesystem_type: string, optional - name of the filesystem type the
volume will be formatted with ('ext4' or 'xfs')
filesystem_label: string, optional - the label to be applied to the
filesystem, only used in conjunction with filesystem_type
Optional Args:
description: string - text field to describe a volume
tags: List[string], optional - the tags to be applied to the volume
"""
data = self.get_data('volumes/',
type=POST,
params={'name': self.name,
'region': self.region,
'size_gigabytes': self.size_gigabytes,
'description': self.description,
'filesystem_type': self.filesystem_type,
'filesystem_label': self.filesystem_label,
'tags': self.tags,
})
if data:
self.id = data['volume']['id']
self.created_at = data['volume']['created_at']
return self
def create_from_snapshot(self, *args, **kwargs):
"""
Creates a Block Storage volume
Note: Every argument and parameter given to this method will be
assigned to the object.
Args:
name: string - a name for the volume
snapshot_id: string - unique identifier for the volume snapshot
size_gigabytes: int - size of the Block Storage volume in GiB
filesystem_type: string, optional - name of the filesystem type the
volume will be formatted with ('ext4' or 'xfs')
filesystem_label: string, optional - the label to be applied to the
filesystem, only used in conjunction with filesystem_type
Optional Args:
description: string - text field to describe a volume
tags: List[string], optional - the tags to be applied to the volume
"""
data = self.get_data('volumes/',
type=POST,
params={'name': self.name,
'snapshot_id': self.snapshot_id,
'region': self.region,
'size_gigabytes': self.size_gigabytes,
'description': self.description,
'filesystem_type': self.filesystem_type,
'filesystem_label': self.filesystem_label,
'tags': self.tags,
})
if data:
self.id = data['volume']['id']
self.created_at = data['volume']['created_at']
return self
def destroy(self):
"""
Destroy a volume
"""
return self.get_data("volumes/%s/" % self.id, type=DELETE)
def attach(self, droplet_id, region):
"""
Attach a Volume to a Droplet.
Args:
droplet_id: int - droplet id
region: string - slug identifier for the region
"""
return self.get_data(
"volumes/%s/actions/" % self.id,
type=POST,
params={"type": "attach",
"droplet_id": droplet_id,
"region": region}
)
def detach(self, droplet_id, region):
"""
Detach a Volume to a Droplet.
Args:
droplet_id: int - droplet id
region: string - slug identifier for the region
"""
return self.get_data(
"volumes/%s/actions/" % self.id,
type=POST,
params={"type": "detach",
"droplet_id": droplet_id,
"region": region}
)
def resize(self, size_gigabytes, region):
"""
Detach a Volume to a Droplet.
Args:
size_gigabytes: int - size of the Block Storage volume in GiB
region: string - slug identifier for the region
"""
return self.get_data(
"volumes/%s/actions/" % self.id,
type=POST,
params={"type": "resize",
"size_gigabytes": size_gigabytes,
"region": region}
)
def snapshot(self, name):
"""
Create a snapshot of the volume.
Args:
name: string - a human-readable name for the snapshot
"""
return self.get_data(
"volumes/%s/snapshots/" % self.id,
type=POST,
params={"name": name}
)
def get_snapshots(self):
"""
Retrieve the list of snapshots that have been created from a volume.
Args:
"""
data = self.get_data("volumes/%s/snapshots/" % self.id)
snapshots = list()
for jsond in data[u'snapshots']:
snapshot = Snapshot(**jsond)
snapshot.token = self.tokens
snapshots.append(snapshot)
return snapshots
def __str__(self):
return "<Volume: %s %s %s>" % (self.id, self.name, self.size_gigabytes)
| 6,778 | Python | .py | 167 | 27.161677 | 79 | 0.519988 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,290 | Size.py | koalalorenzo_python-digitalocean/digitalocean/Size.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI
class Size(BaseAPI):
def __init__(self, *args, **kwargs):
self.slug = None
self.memory = None
self.vcpus = None
self.disk = None
self.transfer = None
self.price_monthly = None
self.price_hourly = None
self.regions = []
super(Size, self).__init__(*args, **kwargs)
def __str__(self):
return "%s" % (self.slug)
| 453 | Python | .py | 15 | 23.066667 | 51 | 0.552995 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,291 | Kernel.py | koalalorenzo_python-digitalocean/digitalocean/Kernel.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI
class Kernel(BaseAPI):
def __init__(self, *args, **kwargs):
self.name = ""
self.id = ""
self.version = ""
super(Kernel, self).__init__(*args, **kwargs)
def __str__(self):
return "<Kernel: %s %s>" % (self.name, self.version)
| 328 | Python | .py | 10 | 26.7 | 60 | 0.539683 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,292 | Project.py | koalalorenzo_python-digitalocean/digitalocean/Project.py | from .baseapi import BaseAPI, GET, POST, DELETE, PUT
class Project(BaseAPI):
def __init__(self,*args, **kwargs):
self.name = None
self.description = None
self.purpose = None
self.environment = None
self.id = None
self.is_default = None
self.owner_uuid = None
self.owner_id = None
self.created_at = None
self.updated_at = None
self.resources = None
super(Project,self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, project_id):
"""Class method that will return a Project object by ID.
Args:
api_token (str): token
kwargs (str): project id or project name
"""
project = cls(token=api_token, id=project_id)
project.load()
return project
def load(self):
# URL https://api.digitalocean.com/v2/projects
project = self.get_data("projects/%s" % self.id)
project = project['project']
for attr in project.keys():
setattr(self, attr, project[attr])
def set_as_default_project(self):
data = {
"name": self.name,
"description": self.description,
"purpose": self.purpose,
"environment": self.environment,
"is_default": True
}
project = self.get_data("projects/%s" % self.id, type=PUT, params=data)
return project
def create_project(self):
"""Creating Project with the following arguments
Args:
api_token (str): token
"name": Name of the Project - Required
"description": Description of the Project - Optional
"purpose": Purpose of the project - Required
"environment": Related Environment of Project - Optional
- Development
- Stating
- Production
"""
data = {
"name": self.name,
"purpose": self.purpose
}
if self.description:
data['description'] = self.description
if self.environment:
data['environment'] = self.environment
data = self.get_data("projects", type=POST, params=data)
if data:
self.id = data['project']['id']
self.owner_uuid = data['project']['owner_uuid']
self.owner_id = data['project']['owner_id']
self.name = data['project']['name']
self.description = data['project']['description']
self.purpose = data['project']['purpose']
self.environment = data['project']['environment']
self.is_default = data['project']['is_default']
self.created_at = data['project']['created_at']
self.updated_at = data['project']['updated_at']
def delete_project(self):
data = dict()
return self.get_data("projects/%s" % self.id, type=DELETE, params=data)
def update_project(self, **kwargs):
data = dict()
data['name'] = kwargs.get("name", self.name)
data['description'] = kwargs.get("description", self.description)
data['purpose'] = kwargs.get("purpose", self.purpose)
"""
Options for Purpose by Digital Ocean
- Just Trying out DigitalOcean
- Class Project / Educational Purposes
- Website or blog
- Web Application
- Service or API
- Mobile Application
- Machine Learning / AI / Data Processing
- IoT
- Operational / Developer tooling
- Other
"""
data['environment'] = kwargs.get("environment", self.environment)
"""
Options for Environment by Digital Ocean
- Development
- Stating
- Production
"""
data['is_default'] = kwargs.get("is_default", self.is_default)
update_response = self.get_data("projects/%s" % self.id, type=PUT, params=data)
for attr in update_response['project'].keys():
setattr(self, attr, update_response['project'][attr])
def get_all_resources(self):
project_resources_response = self.get_data("projects/%s/resources" % self.id)
project_resources = project_resources_response['resources']
self.resources = []
for i in project_resources:
self.resources.append(i['urn'])
return self.resources
def load_resources(self):
project_resources_response = self.get_data("projects/%s/resources" % self.id)
project_resources = project_resources_response['resources']
self.resources = []
for i in project_resources:
self.resources.append(i['urn'])
def assign_resource(self, resources):
data = {
'resources': resources
}
return self.get_data("projects/%s/resources" % self.id, type=POST, params=data)
def __str__(self):
return "<Project: " + self.name + "> " + self.id
| 5,027 | Python | .py | 125 | 30.072 | 87 | 0.578128 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,293 | baseapi.py | koalalorenzo_python-digitalocean/digitalocean/baseapi.py | # -*- coding: utf-8 -*-
import os
import json
import logging
import requests
from . import __name__, __version__
try:
import urlparse
except ImportError:
from urllib import parse as urlparse
GET = 'GET'
POST = 'POST'
DELETE = 'DELETE'
PUT = 'PUT'
PATCH = 'PATCH'
REQUEST_TIMEOUT_ENV_VAR = 'PYTHON_DIGITALOCEAN_REQUEST_TIMEOUT_SEC'
class Error(Exception):
"""Base exception class for this module"""
pass
class TokenError(Error):
pass
class DataReadError(Error):
pass
class JSONReadError(Error):
pass
class NotFoundError(Error):
pass
class EndPointError(Error):
pass
class ServerError(Error):
"""Raised when the server responds with a 5xx status code and no body"""
pass
class BaseAPI(object):
"""
Basic api class for
"""
tokens = []
_last_used = 0
end_point = "https://api.digitalocean.com/v2/"
def __init__(self, *args, **kwargs):
self.token = os.getenv("DIGITALOCEAN_ACCESS_TOKEN", "")
self.end_point = os.getenv("DIGITALOCEAN_END_POINT", "https://api.digitalocean.com/v2/")
self._log = logging.getLogger(__name__)
self._session = requests.Session()
for attr in kwargs.keys():
setattr(self, attr, kwargs[attr])
parsed_url = urlparse.urlparse(self.end_point)
if not parsed_url.scheme or not parsed_url.netloc:
raise EndPointError("Provided end point is not a valid URL. Please use a valid URL")
if not parsed_url.path:
self.end_point += '/'
def __getstate__(self):
state = self.__dict__.copy()
# The logger is not pickleable due to using thread.lock
del state['_log']
return state
def __setstate__(self, state):
self.__dict__ = state
self._log = logging.getLogger(__name__)
def __perform_request(self, url, type=GET, params=None):
"""
This method will perform the real request,
in this way we can customize only the "output" of the API call by
using self.__call_api method.
This method will return the request object.
"""
if params is None:
params = {}
if not self.token:
raise TokenError("No token provided. Please use a valid token")
url = urlparse.urljoin(self.end_point, url)
# lookup table to find out the appropriate requests method,
# headers and payload type (json or query parameters)
identity = lambda x: x
json_dumps = lambda x: json.dumps(x)
lookup = {
GET: (self._session.get, {'Content-type': 'application/json'}, 'params', identity),
PATCH: (requests.patch, {'Content-type': 'application/json'},
'data', json_dumps),
POST: (requests.post, {'Content-type': 'application/json'}, 'data',
json_dumps),
PUT: (self._session.put, {'Content-type': 'application/json'}, 'data',
json_dumps),
DELETE: (self._session.delete,
{'content-type': 'application/json'},
'data', json_dumps),
}
requests_method, headers, payload, transform = lookup[type]
agent = "{0}/{1} {2}/{3}".format('python-digitalocean',
__version__,
requests.__name__,
requests.__version__)
headers.update({'User-Agent': agent})
kwargs = {'headers': headers, payload: transform(params)}
# Some requests do not require a body
if params is False:
del kwargs['data']
timeout = self.get_timeout()
if timeout:
kwargs['timeout'] = timeout
# remove token from log
headers_str = str(headers)
for i, token in enumerate(self.tokens):
headers_str = headers_str.replace(token.strip(), 'TOKEN%s' % i)
self._log.debug('%s %s %s:%s %s %s' %
(type, url, payload, params, headers_str, timeout))
first_tried_token = self._last_used
while True:
headers.update({'Authorization': 'Bearer ' + self.token})
req = requests_method(url, **kwargs)
if req.status_code == 429:
self._last_used = (self._last_used + 1) % len(self.tokens)
if self._last_used == first_tried_token:
# all tokens tried
break
continue
break
return req
def __deal_with_pagination(self, url, method, params, data):
"""
Perform multiple calls in order to have a full list of elements
when the API are "paginated". (content list is divided in more
than one page)
"""
all_data = data
while data.get("links", {}).get("pages", {}).get("next"):
url, query = data["links"]["pages"]["next"].split("?", 1)
# Merge the query parameters
for key, value in urlparse.parse_qs(query).items():
params[key] = value
data = self.__perform_request(url, method, params).json()
# Merge the dictionaries
for key, value in data.items():
if isinstance(value, list) and key in all_data:
all_data[key] += value
else:
all_data[key] = value
return all_data
def __init_ratelimit(self, headers):
# Add the account requests/hour limit
self.ratelimit_limit = headers.get('Ratelimit-Limit', None)
# Add the account requests remaining
self.ratelimit_remaining = headers.get('Ratelimit-Remaining', None)
# Add the account requests limit reset time
self.ratelimit_reset = headers.get('Ratelimit-Reset', None)
@property
def token(self):
# use all the tokens round-robin style, change on reaching Ratelimit
if self.tokens:
return self.tokens[self._last_used]
return ""
@token.setter
def token(self, token):
self._last_used = 0
if isinstance(token, list):
self.tokens = token
else:
# for backward compatibility
self.tokens = [token]
def get_timeout(self):
"""
Checks if any timeout for the requests to DigitalOcean is required.
To set a timeout, use the REQUEST_TIMEOUT_ENV_VAR environment
variable.
"""
timeout_str = os.environ.get(REQUEST_TIMEOUT_ENV_VAR)
if timeout_str:
try:
return float(timeout_str)
except:
self._log.error('Failed parsing the request read timeout of '
'"%s". Please use a valid float number!' %
timeout_str)
return None
def get_data(self, url, type=GET, params=None):
"""
This method is a basic implementation of __call_api that checks
errors too. In case of success the method will return True or the
content of the response to the request.
Pagination is automatically detected and handled accordingly
"""
if params is None:
params = dict()
# If per_page is not set, make sure it has a sane default
if type is GET:
params.setdefault("per_page", 200)
req = self.__perform_request(url, type, params)
if req.status_code == 204:
return True
if req.status_code == 404:
raise NotFoundError()
if len(req.content) == 0:
# Raise an error if the request failed and there is no response content
req.raise_for_status()
try:
data = req.json()
except ValueError as e:
raise JSONReadError(
'Read failed from DigitalOcean: %s' % str(e)
)
if not req.ok:
msg = [data[m] for m in ("id", "message") if m in data][1]
raise DataReadError(msg)
# init request limits
self.__init_ratelimit(req.headers)
# If there are more elements available (total) than the elements per
# page, try to deal with pagination. Note: Breaking the logic on
# multiple pages,
pages = data.get("links", {}).get("pages", {})
if pages.get("next") and "page" not in params:
return self.__deal_with_pagination(url, type, params, data)
else:
return data
def __str__(self):
return "<%s>" % self.__class__.__name__
def __unicode__(self):
return u"%s" % self.__str__()
def __repr__(self):
return str(self)
| 8,831 | Python | .py | 219 | 29.812785 | 96 | 0.568089 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,294 | Droplet.py | koalalorenzo_python-digitalocean/digitalocean/Droplet.py | # -*- coding: utf-8 -*-
import re
from .Action import Action
from .Image import Image
from .Kernel import Kernel
from .baseapi import BaseAPI, Error, GET, POST, DELETE
from .SSHKey import SSHKey
from .Volume import Volume
class DropletError(Error):
"""Base exception class for this module"""
pass
class BadKernelObject(DropletError):
pass
class BadSSHKeyFormat(DropletError):
pass
class Droplet(BaseAPI):
"""Droplet management
Attributes accepted at creation time:
Args:
name (str): name
size_slug (str): droplet size
image (str): image name to use to create droplet
region (str): region
ssh_keys (:obj:`str`, optional): list of ssh keys
backups (bool): True if backups enabled
ipv6 (bool): True if ipv6 enabled
private_networking (bool): True if private networking enabled
user_data (str): arbitrary data to pass to droplet
volumes (:obj:`str`, optional): list of blockstorage volumes
monitoring (bool): True if installing the DigitalOcean monitoring agent
vpc_uuid (str, optional): ID of a VPC in which the Droplet will be created
Attributes returned by API:
* id (int): droplet id
* memory (str): memory size
* vcpus (int): number of vcpus
* disk (int): disk size in GB
* locked (bool): True if locked
* created_at (str): creation date in format u'2014-11-06T10:42:09Z'
* status (str): status, e.g. 'new', 'active', etc
* networks (dict): details of connected networks
* kernel (dict): details of kernel
* backup_ids (:obj:`int`, optional): list of ids of backups of this droplet
* snapshot_ids (:obj:`int`, optional): list of ids of snapshots of this droplet
* action_ids (:obj:`int`, optional): list of ids of actions
* features (:obj:`str`, optional): list of enabled features. e.g.
[u'private_networking', u'virtio']
* image (dict): details of image used to create this droplet
* ip_address (str): public ip addresses
* private_ip_address (str): private ip address
* ip_v6_address (:obj:`str`, optional): list of ipv6 addresses assigned
* end_point (str): url of api endpoint used
* volume_ids (:obj:`str`, optional): list of blockstorage volumes
* vpc_uuid (str, optional): ID of the VPC that the Droplet is assigned to
"""
def __init__(self, *args, **kwargs):
# Defining default values
self.id = None
self.name = None
self.memory = None
self.vcpus = None
self.disk = None
self.region = []
self.image = None
self.size_slug = None
self.locked = None
self.created_at = None
self.status = None
self.networks = []
self.kernel = None
self.backup_ids = []
self.snapshot_ids = []
self.action_ids = []
self.features = []
self.ip_address = None
self.private_ip_address = None
self.ip_v6_address = None
self.ssh_keys = []
self.backups = None
self.ipv6 = None
self.private_networking = None
self.user_data = None
self.volumes = []
self.tags = []
self.monitoring = None
self.vpc_uuid = None
# This will load also the values passed
super(Droplet, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, droplet_id):
"""Class method that will return a Droplet object by ID.
Args:
api_token (str): token
droplet_id (int): droplet id
"""
droplet = cls(token=api_token, id=droplet_id)
droplet.load()
return droplet
@classmethod
def create_multiple(*args, **kwargs):
api = BaseAPI(token=kwargs.get("token"))
data = {
"names": kwargs.get("names"),
"size": kwargs.get("size_slug") or kwargs.get("size"),
"image": kwargs.get("image"),
"region": kwargs.get("region"),
"backups": bool(kwargs.get("backups")),
"ipv6": bool(kwargs.get("ipv6")),
"private_networking": bool(kwargs.get("private_networking")),
"tags": kwargs.get("tags"),
"monitoring": bool(kwargs.get("monitoring")),
"vpc_uuid": kwargs.get("vpc_uuid"),
}
if kwargs.get("ssh_keys"):
data["ssh_keys"] = Droplet.__get_ssh_keys_id_or_fingerprint(
kwargs["ssh_keys"], kwargs.get("token"),
kwargs["names"][0])
if kwargs.get("user_data"):
data["user_data"] = kwargs["user_data"]
droplets = []
data = api.get_data("droplets/", type=POST, params=data)
if data:
action_ids = [data["links"]["actions"][0]["id"]]
for droplet_json in data["droplets"]:
droplet_json["token"] = kwargs["token"]
droplet = Droplet(**droplet_json)
droplet.action_ids = action_ids
droplets.append(droplet)
return droplets
def __check_actions_in_data(self, data):
# reloading actions if actions is provided.
if u"actions" in data:
self.action_ids = []
for action in data[u'actions']:
self.action_ids.append(action[u'id'])
def get_data(self, *args, **kwargs):
"""
Customized version of get_data to perform __check_actions_in_data
"""
data = super(Droplet, self).get_data(*args, **kwargs)
if "type" in kwargs:
if kwargs["type"] == POST:
self.__check_actions_in_data(data)
return data
def load(self):
"""
Fetch data about droplet - use this instead of get_data()
"""
droplets = self.get_data("droplets/%s" % self.id)
droplet = droplets['droplet']
for attr in droplet.keys():
setattr(self, attr, droplet[attr])
for net in self.networks['v4']:
if net['type'] == 'private':
self.private_ip_address = net['ip_address']
if net['type'] == 'public':
self.ip_address = net['ip_address']
if self.networks['v6']:
self.ip_v6_address = self.networks['v6'][0]['ip_address']
if "backups" in self.features:
self.backups = True
else:
self.backups = False
if "ipv6" in self.features:
self.ipv6 = True
else:
self.ipv6 = False
if "private_networking" in self.features:
self.private_networking = True
else:
self.private_networking = False
if "tags" in droplets:
self.tags = droplets["tags"]
return self
def _perform_action(self, params, return_dict=True):
"""
Perform a droplet action.
Args:
params (dict): parameters of the action
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
Returns dict or Action
"""
action = self.get_data(
"droplets/%s/actions/" % self.id,
type=POST,
params=params
)
if return_dict:
return action
else:
action = action[u'action']
return_action = Action(token=self.tokens)
# Loading attributes
for attr in action.keys():
setattr(return_action, attr, action[attr])
return return_action
def power_on(self, return_dict=True):
"""
Boot up the droplet
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
Returns dict or Action
"""
return self._perform_action({'type': 'power_on'}, return_dict)
def shutdown(self, return_dict=True):
"""
shutdown the droplet
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
Returns dict or Action
"""
return self._perform_action({'type': 'shutdown'}, return_dict)
def reboot(self, return_dict=True):
"""
restart the droplet
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
Returns dict or Action
"""
return self._perform_action({'type': 'reboot'}, return_dict)
def power_cycle(self, return_dict=True):
"""
restart the droplet
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
Returns dict or Action
"""
return self._perform_action({'type': 'power_cycle'}, return_dict)
def power_off(self, return_dict=True):
"""
restart the droplet
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
Returns dict or Action
"""
return self._perform_action({'type': 'power_off'}, return_dict)
def reset_root_password(self, return_dict=True):
"""
reset the root password
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
Returns dict or Action
"""
return self._perform_action({'type': 'password_reset'}, return_dict)
def resize(self, new_size_slug, return_dict=True, disk=True):
"""Resize the droplet to a new size slug.
https://developers.digitalocean.com/documentation/v2/#resize-a-droplet
Args:
new_size_slug (str): name of new size
Optional Args:
return_dict (bool): Return a dict when True (default), \
otherwise return an Action.
disk (bool): If a permanent resize, with disk changes included.
Returns dict or Action
"""
options = {"type": "resize", "size": new_size_slug}
if disk: options["disk"] = "true"
return self._perform_action(options, return_dict)
def take_snapshot(self, snapshot_name, return_dict=True, power_off=False):
"""Take a snapshot!
Args:
snapshot_name (str): name of snapshot
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
power_off (bool): Before taking the snapshot the droplet will be
turned off with another API call. It will wait until the
droplet will be powered off.
Returns dict or Action
"""
if power_off is True and self.status != "off":
action = self.power_off(return_dict=False)
action.wait()
self.load()
return self._perform_action(
{"type": "snapshot", "name": snapshot_name},
return_dict
)
def restore(self, image_id, return_dict=True):
"""Restore the droplet to an image ( snapshot or backup )
Args:
image_id (int): id of image
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
Returns dict or Action
"""
return self._perform_action(
{"type": "restore", "image": image_id},
return_dict
)
def rebuild(self, image_id=None, return_dict=True):
"""Restore the droplet to an image ( snapshot or backup )
Args:
image_id (int): id of image
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
Returns dict or Action
"""
if not image_id:
image_id = self.image['id']
return self._perform_action(
{"type": "rebuild", "image": image_id},
return_dict
)
def enable_backups(self, return_dict=True):
"""
Enable automatic backups
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
Returns dict or Action
"""
return self._perform_action({'type': 'enable_backups'}, return_dict)
def disable_backups(self, return_dict=True):
"""
Disable automatic backups
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
Returns dict or Action
"""
return self._perform_action({'type': 'disable_backups'}, return_dict)
def destroy(self):
"""
Destroy the droplet
Returns dict
"""
return self.get_data("droplets/%s" % self.id, type=DELETE)
def rename(self, name, return_dict=True):
"""Rename the droplet
Args:
name (str): new name
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
Returns dict or Action
"""
return self._perform_action(
{'type': 'rename', 'name': name},
return_dict
)
def enable_private_networking(self, return_dict=True):
"""
Enable private networking on an existing Droplet where available.
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
Returns dict or Action
"""
return self._perform_action(
{'type': 'enable_private_networking'},
return_dict
)
def enable_ipv6(self, return_dict=True):
"""
Enable IPv6 on an existing Droplet where available.
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
Returns dict or Action
"""
return self._perform_action({'type': 'enable_ipv6'}, return_dict)
def change_kernel(self, kernel, return_dict=True):
"""Change the kernel to a new one
Args:
kernel : instance of digitalocean.Kernel.Kernel
Optional Args:
return_dict (bool): Return a dict when True (default),
otherwise return an Action.
Returns dict or Action
"""
if type(kernel) != Kernel:
raise BadKernelObject("Use Kernel object")
return self._perform_action(
{'type': 'change_kernel', 'kernel': kernel.id},
return_dict
)
@staticmethod
def __get_ssh_keys_id_or_fingerprint(ssh_keys, token, name):
"""
Check and return a list of SSH key IDs or fingerprints according
to DigitalOcean's API. This method is used to check and create a
droplet with the correct SSH keys.
"""
ssh_keys_id = list()
for ssh_key in ssh_keys:
if type(ssh_key) in [int, type(2 ** 64)]:
ssh_keys_id.append(int(ssh_key))
elif type(ssh_key) == SSHKey:
ssh_keys_id.append(ssh_key.id)
elif type(ssh_key) in [type(u''), type('')]:
# ssh_key could either be a fingerprint or a public key
#
# type(u'') and type('') is the same in python 3 but
# different in 2. See:
# https://github.com/koalalorenzo/python-digitalocean/issues/80
regexp_of_fingerprint = '([0-9a-fA-F]{2}:){15}[0-9a-fA-F]'
match = re.match(regexp_of_fingerprint, ssh_key)
if match is not None and match.end() == len(ssh_key) - 1:
ssh_keys_id.append(ssh_key)
else:
key = SSHKey()
key.token = token
results = key.load_by_pub_key(ssh_key)
if results is None:
key.public_key = ssh_key
key.name = "SSH Key %s" % name
key.create()
else:
key = results
ssh_keys_id.append(key.id)
else:
raise BadSSHKeyFormat(
"Droplet.ssh_keys should be a list of IDs, public keys"
+ " or fingerprints."
)
return ssh_keys_id
def create(self, *args, **kwargs):
"""
Create the droplet with object properties.
Note: Every argument and parameter given to this method will be
assigned to the object.
"""
for attr in kwargs.keys():
setattr(self, attr, kwargs[attr])
# Provide backwards compatibility
if not self.size_slug and self.size:
self.size_slug = self.size
ssh_keys_id = Droplet.__get_ssh_keys_id_or_fingerprint(self.ssh_keys,
self.token,
self.name)
data = {
"name": self.name,
"size": self.size_slug,
"image": self.image,
"region": self.region,
"ssh_keys": ssh_keys_id,
"backups": bool(self.backups),
"ipv6": bool(self.ipv6),
"private_networking": bool(self.private_networking),
"volumes": self.volumes,
"tags": self.tags,
"monitoring": bool(self.monitoring),
"vpc_uuid": self.vpc_uuid,
}
if self.user_data:
data["user_data"] = self.user_data
data = self.get_data("droplets/", type=POST, params=data)
if data:
self.id = data['droplet']['id']
action_id = data['links']['actions'][0]['id']
self.action_ids = []
self.action_ids.append(action_id)
def get_events(self):
"""
A helper function for backwards compatibility.
Calls get_actions()
"""
return self.get_actions()
def get_actions(self):
"""
Returns a list of Action objects
This actions can be used to check the droplet's status
"""
answer = self.get_data("droplets/%s/actions/" % self.id, type=GET)
actions = []
for action_dict in answer['actions']:
action = Action(**action_dict)
action.token = self.tokens
action.droplet_id = self.id
action.load()
actions.append(action)
return actions
def get_action(self, action_id):
"""Returns a specific Action by its ID.
Args:
action_id (int): id of action
"""
return Action.get_object(
api_token=self.tokens,
action_id=action_id
)
def get_snapshots(self):
"""
This method will return the snapshots/images connected to that
specific droplet.
"""
snapshots = list()
for id in self.snapshot_ids:
snapshot = Image()
snapshot.id = id
snapshot.token = self.tokens
snapshots.append(snapshot)
return snapshots
def get_kernel_available(self):
"""
Get a list of kernels available
"""
kernels = list()
data = self.get_data("droplets/%s/kernels/" % self.id)
while True:
for jsond in data[u'kernels']:
kernel = Kernel(**jsond)
kernel.token = self.tokens
kernels.append(kernel)
try:
url = data[u'links'][u'pages'].get(u'next')
if not url:
break
data = self.get_data(url)
except KeyError: # No links.
break
return kernels
def update_volumes_data(self):
"""
Trigger volume objects list refresh.
When called on a droplet instance, it will take
all volumes ids(gathered in initial droplet details
collection) and will create list of object of Volume
types. Each volume is a separate api call.
"""
self.volumes = list()
for volume_id in self.volume_ids:
volume = Volume().get_object(self.tokens, volume_id)
self.volumes.append(volume)
def __str__(self):
return "<Droplet: %s %s>" % (self.id, self.name)
| 21,150 | Python | .py | 536 | 27.843284 | 87 | 0.547074 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,295 | Firewall.py | koalalorenzo_python-digitalocean/digitalocean/Firewall.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI, POST, DELETE, PUT
import jsonpickle
class _targets(object):
"""
An internal object that both `Sources` and `Destinations` derive from.
Not for direct use by end users.
"""
def __init__(self, **kwargs):
self.addresses = []
self.droplet_ids = []
self.load_balancer_uids = []
self.tags = []
for attr in kwargs.keys():
setattr(self, attr, kwargs[attr])
class Sources(_targets):
"""
An object holding information about an InboundRule's sources.
Args:
addresses (obj:`list`): An array of strings containing the IPv4
addresses, IPv6 addresses, IPv4 CIDRs, and/or IPv6 CIDRs to which
the Firewall will allow traffic.
droplet_ids (obj:`list`): An array containing the IDs of the Droplets
to which the Firewall will allow traffic.
load_balancer_uids (obj:`list`): An array containing the IDs of the
Load Balancers to which the Firewall will allow traffic.
tags (obj:`list`): An array containing the names of Tags corresponding
to groups of Droplets to which the Firewall will allow traffic.
"""
pass
class Destinations(_targets):
"""
An object holding information about an OutboundRule's destinations.
Args:
addresses (obj:`list`): An array of strings containing the IPv4
addresses, IPv6 addresses, IPv4 CIDRs, and/or IPv6 CIDRs to which
the Firewall will allow traffic.
droplet_ids (obj:`list`): An array containing the IDs of the Droplets
to which the Firewall will allow traffic.
load_balancer_uids (obj:`list`): An array containing the IDs of the
Load Balancers to which the Firewall will allow traffic.
tags (obj:`list`): An array containing the names of Tags corresponding
to groups of Droplets to which the Firewall will allow traffic.
"""
pass
class InboundRule(object):
"""
An object holding information about a Firewall's inbound rule.
Args:
protocol (str): The type of traffic to be allowed. This may be one
of "tcp", "udp", or "icmp".
ports (str): The ports on which traffic will be allowed specified as a
string containing a single port, a range (e.g. "8000-9000"), or
"all" to open all ports for a protocol.
sources (obj): A `Sources` object.
"""
def __init__(self, protocol="", ports="", sources=""):
self.protocol = protocol
self.ports = ports
if isinstance(sources, Sources):
self.sources = sources
else:
for source in sources:
self.sources = Sources(**sources)
class OutboundRule(object):
"""
An object holding information about a Firewall's outbound rule.
Args:
protocol (str): The type of traffic to be allowed. This may be one
of "tcp", "udp", or "icmp".
ports (str): The ports on which traffic will be allowed specified as a
string containing a single port, a range (e.g. "8000-9000"), or
"all" to open all ports for a protocol.
destinations (obj): A `Destinations` object.
"""
def __init__(self, protocol="", ports="", destinations=""):
self.protocol = protocol
self.ports = ports
if isinstance(destinations, Destinations):
self.destinations = destinations
else:
for destination in destinations:
self.destinations = Destinations(**destinations)
class Firewall(BaseAPI):
"""
An object representing an DigitalOcean Firewall.
Attributes accepted at creation time:
Args:
name (str): The Firewall's name.
droplet_ids (obj:`list` of `int`): A list of Droplet IDs to be assigned
to the Firewall.
tags (obj:`list` of `str`): A list Tag names to be assigned to the
Firewall.
inbound_rules (obj:`list`): A list of `InboundRules` objects
outbound_rules (obj:`list`): A list of `OutboundRules` objects
Attributes returned by API:
id (str): A UUID to identify and reference a Firewall.
status (str): A status string indicating the current state of the
Firewall. This can be "waiting", "succeeded", or "failed".
created_at (str): The time at which the Firewall was created.
name (str): The Firewall's name.
pending_changes (obj:`list`): Details exactly which Droplets are having
their security policies updated.
droplet_ids (obj:`list` of `int`): A list of Droplet IDs to be assigned
to the Firewall.
tags (obj:`list` of `str`): A list Tag names to be assigned to the
Firewall.
inbound_rules (obj:`list`): A list of `InboundRules` objects
outbound_rules (obj:`list`): A list of `OutboundRules` objects
"""
def __init__(self, *args, **kwargs):
self.id = None
self.status = None
self.created_at = None
self.pending_changes = []
self.name = None
self.inbound_rules = []
self.outbound_rules = []
self.droplet_ids = None
self.tags = None
super(Firewall, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, firewall_id):
"""
Class method that will return a Firewall object by ID.
"""
firewall = cls(token=api_token, id=firewall_id)
firewall.load()
return firewall
def _set_firewall_attributes(self, data):
self.id = data['firewall']['id']
self.name = data['firewall']['name']
self.status = data['firewall']['status']
self.created_at = data['firewall']['created_at']
self.pending_changes = data['firewall']['pending_changes']
self.droplet_ids = data['firewall']['droplet_ids']
self.tags = data['firewall']['tags']
in_rules = list()
for rule in data['firewall']['inbound_rules']:
in_rules.append(InboundRule(**rule))
self.inbound_rules = in_rules
out_rules = list()
for rule in data['firewall']['outbound_rules']:
out_rules.append(OutboundRule(**rule))
self.outbound_rules = out_rules
def load(self):
data = self.get_data("firewalls/%s" % self.id)
if data:
self._set_firewall_attributes(data)
return self
def create(self, *args, **kwargs):
inbound = jsonpickle.encode(self.inbound_rules, unpicklable=False)
outbound = jsonpickle.encode(self.outbound_rules, unpicklable=False)
params = {'name': self.name,
'droplet_ids': self.droplet_ids,
'inbound_rules': jsonpickle.decode(inbound),
'outbound_rules': jsonpickle.decode(outbound),
'tags': self.tags}
data = self.get_data('firewalls/', type=POST, params=params)
if data:
self._set_firewall_attributes(data)
return self
def add_inbound(self, inbound_rule):
"""
Add inbound rule to this Firewall.
"""
if type(inbound_rule) is not list:
inbound_rule = [inbound_rule]
inbound = jsonpickle.encode(inbound_rule, unpicklable=False)
return self.get_data(
"firewalls/%s/rules" % self.id,
type=POST,
params={"inbound_rules": jsonpickle.decode(inbound)}
)
def add_outbound(self, outbound_rule):
"""
Add outbound rule to this Firewall.
"""
if type(outbound_rule) is not list:
outbound_rule = [outbound_rule]
outbound = jsonpickle.encode(outbound_rule, unpicklable=False)
return self.get_data(
"firewalls/%s/rules" % self.id,
type=POST,
params={"outbound_rules": jsonpickle.decode(outbound)}
)
def remove_inbound(self, inbound_rule):
"""
Remove inbound rule from this Firewall.
"""
if type(inbound_rule) is not list:
inbound_rule = [inbound_rule]
inbound = jsonpickle.encode(inbound_rule, unpicklable=False)
return self.get_data(
"firewalls/%s/rules" % self.id,
type=DELETE,
params={"inbound_rules": jsonpickle.decode(inbound)}
)
def remove_outbound(self, outbound_rule):
"""
Remove outbound rule from this Firewall.
"""
if type(outbound_rule) is not list:
outbound_rule = [outbound_rule]
outbound = jsonpickle.encode(outbound_rule, unpicklable=False)
return self.get_data(
"firewalls/%s/rules" % self.id,
type=DELETE,
params={"outbound_rules": jsonpickle.decode(outbound)}
)
def add_droplets(self, droplet_ids):
"""
Add droplets to this Firewall.
"""
return self.get_data(
"firewalls/%s/droplets" % self.id,
type=POST,
params={"droplet_ids": droplet_ids}
)
def remove_droplets(self, droplet_ids):
"""
Remove droplets from this Firewall.
"""
return self.get_data(
"firewalls/%s/droplets" % self.id,
type=DELETE,
params={"droplet_ids": droplet_ids}
)
def add_tags(self, tags):
"""
Add tags to this Firewall.
"""
return self.get_data(
"firewalls/%s/tags" % self.id,
type=POST,
params={"tags": tags}
)
def remove_tags(self, tags):
"""
Remove tags from this Firewall.
"""
return self.get_data(
"firewalls/%s/tags" % self.id,
type=DELETE,
params={"tags": tags}
)
# TODO: Other Firewall calls (Add/Remove rules, Create / Delete etc)
def destroy(self):
"""
Destroy the Firewall
"""
return self.get_data("firewalls/%s/" % self.id, type=DELETE)
def __str__(self):
return "<Firewall: %s %s>" % (self.id, self.name)
| 10,260 | Python | .py | 254 | 30.933071 | 79 | 0.598273 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,296 | CDNEndpoint.py | koalalorenzo_python-digitalocean/digitalocean/CDNEndpoint.py | # -*- coding: utf-8 -*-
from .baseapi import BaseAPI, POST, DELETE, PUT
class CDNEndpoint(BaseAPI):
"""
An object representing an DigitalOcean CDN Endpoint.
Args:
origin (str): The fully qualified domain name (FQDN) for the
origin server which provides the content for the CDN.
This is currently restricted to a Space.
ttl (int): The amount of time the content is cached by the
CDN's edge servers in seconds. TTL must be one of
60, 600, 3600, 86400, or 604800.
Defaults to 3600 (one hour) when excluded.
certificate_id (str): The ID of a DigitalOcean managed TLS
certificate used for SSL when a custom subdomain is provided.
custom_domain (str): The fully qualified domain name (FQDN) of the
custom subdomain used with the CDN endpoint.
"""
def __init__(self, *args, **kwargs):
self.id = None
self.origin = None
self.endpoint = None
self.created_at = None
self.certificate_id = None
self.custom_domain = None
self.ttl = None
super(CDNEndpoint, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, cdn_endpoint_id):
"""Class method that will return a CDN Endpoint object by ID.
Args:
api_token (str): token
cdn_endpoint_id (int): CDN Endpoint id
"""
cdn_endpoint = cls(token=api_token, id=cdn_endpoint_id)
cdn_endpoint.load()
return cdn_endpoint
def load(self):
"""
Fetch data about CDN Endpoints - use this instead of get_data()
"""
cdn_endpoints = self.get_data("cdn/endpoints/%s" % self.id)
cdn_endpoint = cdn_endpoints['endpoint']
for attr in cdn_endpoint.keys():
setattr(self, attr, cdn_endpoint[attr])
return self
def create(self, **kwargs):
"""
Create the CDN Endpoint.
"""
for attr in kwargs.keys():
setattr(self, attr, kwargs[attr])
params = {
'origin': self.origin,
'ttl': self.ttl or 3600,
'certificated_id': self.certificate_id,
'custom_domain': self.custom_domain
}
output = self.get_data("cdn/endpoints", type="POST", params=params)
if output:
self.id = output['endpoint']['id']
self.created_at = output['endpoint']['created_at']
self.endpoint = output['endpoint']['endpoint']
def delete(self):
"""
Delete the CDN Endpoint.
"""
return self.get_data("cdn/endpoints/%s" % self.id, type="DELETE", params=False)
def save(self):
"""
Save existing CDN Endpoint
"""
data = {
'ttl': self.ttl,
'certificate_id': self.certificate_id,
'custom_domain': self.custom_domain,
}
return self.get_data(
"cdn/endpoints/%s" % self.id,
type=PUT,
params=data
)
def __str__(self):
return "<CDNEndpoints: %s %s>" % (self.id, self.origin)
| 3,179 | Python | .py | 84 | 28.166667 | 87 | 0.573936 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,297 | test_manager.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_manager.py | import json
import unittest
import responses
import digitalocean
from .BaseTest import BaseTest
class TestManager(BaseTest):
def setUp(self):
super(TestManager, self).setUp()
self.manager = digitalocean.Manager(token=self.token)
self.image = digitalocean.Image(
id=449676856, slug='testslug', token=self.token
)
@responses.activate
def test_get_account(self):
data = self.load_from_file('account/account.json')
url = self.base_url + 'account/'
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
acct = self.manager.get_account()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(acct.token, self.token)
self.assertEqual(acct.email, 'web@digitalocean.com')
self.assertEqual(acct.droplet_limit, 25)
self.assertEqual(acct.email_verified, True)
self.assertEqual(acct.status, "active")
@responses.activate
def test_get_balance(self):
data = self.load_from_file('balance/balance.json')
url = self.base_url + 'customers/my/balance'
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
balance = self.manager.get_balance()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(balance.token, balance.token)
self.assertEqual(balance.month_to_date_balance, '23.44')
self.assertEqual(balance.account_balance, '12.23')
self.assertEqual(balance.month_to_date_usage, '11.21')
self.assertEqual(balance.generated_at, '2019-07-09T15:01:12Z')
@responses.activate
def test_auth_fail(self):
data = self.load_from_file('errors/unauthorized.json')
url = self.base_url + 'regions/'
responses.add(responses.GET, url,
body=data,
status=401,
content_type='application/json')
bad_token = digitalocean.Manager(token='thisisnotagoodtoken')
with self.assertRaises(Exception) as error:
bad_token.get_all_regions()
exception = error.exception
self.assertEqual(str(exception), 'Unable to authenticate you.')
@responses.activate
def test_droplets(self):
data = self.load_from_file('droplets/all.json')
url = self.base_url + 'droplets/'
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
droplets = self.manager.get_all_droplets()
droplet = droplets[0]
self.assertEqual(droplet.token, self.token)
self.assertEqual(droplet.id, 3164444)
self.assertEqual(droplet.name, "example.com")
self.assertEqual(droplet.memory, 512)
self.assertEqual(droplet.vcpus, 1)
self.assertEqual(droplet.disk, 20)
self.assertEqual(droplet.backups, True)
self.assertEqual(droplet.ipv6, True)
self.assertEqual(droplet.private_networking, False)
self.assertEqual(droplet.region['slug'], "nyc3")
self.assertEqual(droplet.status, "active")
self.assertEqual(droplet.image['slug'], "ubuntu-14-04-x64")
self.assertEqual(droplet.size_slug, '512mb')
self.assertEqual(droplet.created_at, "2014-11-14T16:29:21Z")
self.assertEqual(droplet.ip_address, "104.236.32.182")
self.assertEqual(droplet.ip_v6_address,
"2604:A880:0800:0010:0000:0000:02DD:4001")
self.assertEqual(droplet.kernel['id'], 2233)
self.assertEqual(droplet.backup_ids, [7938002])
self.assertEqual(droplet.features, ["backups",
"ipv6",
"virtio"])
@responses.activate
def test_get_droplets_by_tag(self):
data = self.load_from_file('droplets/bytag.json')
url = self.base_url + "droplets"
responses.add(responses.GET,
url + "/",
body=data,
status=200,
content_type='application/json')
# The next pages don"t use trailing slashes. Return an empty result
# to prevent an infinite loop
responses.add(responses.GET,
url,
body="{}",
status=200,
content_type="application/json")
manager = digitalocean.Manager(token=self.token)
droplets = manager.get_all_droplets(tag_name="awesome")
droplet = droplets[0]
self.assertEqual(droplet.token, self.token)
self.assertEqual(droplet.id, 3164444)
self.assertEqual(droplet.name, "example.com")
self.assertEqual(droplet.memory, 512)
self.assertEqual(droplet.vcpus, 1)
self.assertEqual(droplet.disk, 20)
self.assertEqual(droplet.backups, True)
self.assertEqual(droplet.ipv6, True)
self.assertEqual(droplet.private_networking, False)
self.assertEqual(droplet.region['slug'], "nyc3")
self.assertEqual(droplet.status, "active")
self.assertEqual(droplet.image['slug'], "ubuntu-14-04-x64")
self.assertEqual(droplet.size_slug, '512mb')
self.assertEqual(droplet.created_at, "2014-11-14T16:29:21Z")
self.assertEqual(droplet.ip_address, "104.236.32.182")
self.assertEqual(droplet.ip_v6_address,
"2604:A880:0800:0010:0000:0000:02DD:4001")
self.assertEqual(droplet.kernel['id'], 2233)
self.assertEqual(droplet.backup_ids, [7938002])
self.assertEqual(droplet.features, ["backups",
"ipv6",
"virtio"])
self.assertEqual(droplet.tags, ["awesome"])
@responses.activate
def test_get_all_regions(self):
data = self.load_from_file('regions/all.json')
url = self.base_url + 'regions/'
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
all_regions = self.manager.get_all_regions()
self.assertEqual(len(all_regions), 3)
region = all_regions[0]
self.assertEqual(region.token, self.token)
self.assertEqual(region.name, 'New York')
self.assertEqual(region.slug, 'nyc1')
self.assertEqual(region.sizes, ["1gb", "512mb"])
self.assertEqual(region.features, ['virtio',
'private_networking',
'backups',
'ipv6'])
@responses.activate
def test_get_all_sizes(self):
data = self.load_from_file('sizes/all.json')
url = self.base_url + 'sizes/'
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
all_sizes = self.manager.get_all_sizes()
self.assertEqual(len(all_sizes), 2)
size = all_sizes[0]
self.assertEqual(size.token, self.token)
self.assertEqual(size.slug, '512mb')
self.assertEqual(size.memory, 512)
self.assertEqual(size.disk, 20)
self.assertEqual(size.price_hourly, 0.00744)
self.assertEqual(size.price_monthly, 5.0)
self.assertEqual(size.transfer, 1)
self.assertEqual(size.regions, ["nyc1", "ams1", "sfo1"])
@responses.activate
def test_get_image(self):
"""Test get image by id."""
data = self.load_from_file('images/single.json')
url = "{}images/{}".format(self.base_url, self.image.id)
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
self.image.load()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.image.id, 449676856)
self.assertEqual(self.image.slug, 'testslug')
self.assertEqual(self.image.name, 'My Snapshot')
@responses.activate
def test_get_image_by_slug(self):
"""Test get image by slug."""
data = self.load_from_file('images/single.json')
url = "{}images/{}".format(self.base_url, self.image.slug)
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
self.image.load(use_slug=True)
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.image.id, 449676856)
self.assertEqual(self.image.slug, 'testslug')
self.assertEqual(self.image.name, 'My Snapshot')
@responses.activate
def test_get_all_images(self):
data = self.load_from_file('images/all.json')
url = self.base_url + 'images/'
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
all_images = self.manager.get_all_images()
self.assertEqual(len(all_images), 3)
image = all_images[0]
self.assertEqual(image.token, self.token)
self.assertEqual(image.id, 119192817)
self.assertEqual(image.name, '14.04 x64')
self.assertTrue(image.public)
self.assertEqual(image.slug, "ubuntu-14-04-x64")
self.assertEqual(image.distribution, 'Ubuntu')
self.assertEqual(image.regions, ['nyc1'])
self.assertEqual(image.created_at, "2014-07-29T14:35:40Z")
@responses.activate
def test_get_global_images(self):
data = self.load_from_file('images/all.json')
url = self.base_url + 'images/'
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
global_images = self.manager.get_global_images()
self.assertEqual(len(global_images), 2)
image = global_images[0]
self.assertEqual(image.token, self.token)
self.assertEqual(image.id, 119192817)
self.assertEqual(image.name, '14.04 x64')
self.assertTrue(image.public)
self.assertEqual(image.slug, "ubuntu-14-04-x64")
self.assertEqual(image.distribution, 'Ubuntu')
self.assertEqual(image.regions, ['nyc1'])
self.assertEqual(image.created_at, "2014-07-29T14:35:40Z")
@responses.activate
def test_get_my_images(self):
data = self.load_from_file('images/private.json')
url = self.base_url + 'images/'
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
my_images = self.manager.get_my_images()
self.assertEqual(len(my_images), 1)
image = my_images[0]
self.assertEqual(image.token, self.token)
self.assertEqual(image.id, 449676856)
self.assertEqual(image.name, 'My Snapshot')
self.assertFalse(image.public)
self.assertEqual(image.slug, "")
self.assertEqual(image.distribution, 'Ubuntu')
self.assertEqual(image.regions, ['nyc1', 'nyc3'])
self.assertEqual(image.created_at, "2014-08-18T16:35:40Z")
self.assert_url_query_equal(
responses.calls[0].request.url,
'https://api.digitalocean.com/v2/images/?private=true&per_page=200'
)
@responses.activate
def test_get_distro_images(self):
data = self.load_from_file('images/distro.json')
url = self.base_url + 'images/'
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
distro_images = self.manager.get_distro_images()
self.assertEqual(len(distro_images), 2)
image = distro_images[0]
self.assertEqual(image.token, self.token)
self.assertEqual(image.id, 119192817)
self.assertEqual(image.name, '14.04 x64')
self.assertTrue(image.public)
self.assertEqual(image.slug, "ubuntu-14-04-x64")
self.assertEqual(image.distribution, 'Ubuntu')
self.assert_url_query_equal(responses.calls[0].request.url,
'https://api.digitalocean.com/v2/images/?type=distribution&per_page=200')
@responses.activate
def test_get_app_images(self):
data = self.load_from_file('images/app.json')
url = self.base_url + 'images/'
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
app_images = self.manager.get_app_images()
self.assertEqual(len(app_images), 2)
image = app_images[0]
self.assertEqual(image.token, self.token)
self.assertEqual(image.id, 11146864)
self.assertEqual(image.name, 'MEAN on 14.04')
self.assertTrue(image.public)
self.assertEqual(image.slug, "mean")
self.assertEqual(image.distribution, 'Ubuntu')
self.assert_url_query_equal(responses.calls[0].request.url,
'https://api.digitalocean.com/v2/images/?type=application&per_page=200')
@responses.activate
def test_get_all_sshkeys(self):
data = self.load_from_file('keys/all.json')
url = self.base_url + 'account/keys/'
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
ssh_keys = self.manager.get_all_sshkeys()
self.assertEqual(len(ssh_keys), 1)
# Test the few things we can assume about a random ssh key.
key = ssh_keys[0]
self.assertEqual(key.token, self.token)
self.assertEqual(key.name, "Example Key")
self.assertEqual(key.id, 1)
self.assertEqual(key.public_key,
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAQQDGk5V68BJ4P3Ereh779Vi/Ft2qs/rbXrcjKLGo6zsyeyFUE0svJUpRDEJvFSf8RlezKx1/1ulJu9+kZsxRiUKn example")
self.assertEqual(key.fingerprint,
"f5:d1:78:ed:28:72:5f:e1:ac:94:fd:1f:e0:a3:48:6d")
@responses.activate
def test_post_new_ssh_key(self):
data = self.load_from_file('keys/newly_posted.json')
url = self.base_url + 'account/keys/'
responses.add(responses.POST, url,
body=data,
status=200,
content_type='application/json')
params = {'public_key': 'AAAAkey', 'name': 'new_key'}
ssh_key = self.manager.get_data(url='account/keys/',
type='POST',
params=params)
key = ssh_key['ssh_key']
self.assertEqual(key['id'], 1234)
self.assertEqual(key['fingerprint'], 'ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff:ff')
self.assertEqual(key['public_key'], 'AAAAkey')
self.assertEqual(key['name'], 'new_key')
@responses.activate
def test_get_all_domains(self):
data = self.load_from_file('domains/all.json')
url = self.base_url + 'domains/'
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
domains = self.manager.get_all_domains()
self.assertEqual(len(domains), 1)
# Test the few things we can assume about a random domain.
domain = domains[0]
self.assertEqual(domain.token, self.token)
self.assertEqual(domain.name, "example.com")
self.assertEqual(domain.zone_file, "Example zone file text...")
self.assertEqual(domain.ttl, 1800)
@responses.activate
def test_get_all_floating_ips(self):
data = self.load_from_file('floatingip/list.json')
url = self.base_url + "floating_ips"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
fips = self.manager.get_all_floating_ips()
self.assertEqual(fips[0].ip, "45.55.96.47")
self.assertEqual(fips[0].region['slug'], 'nyc3')
@responses.activate
def test_get_all_load_balancers(self):
data = self.load_from_file('loadbalancer/all.json')
url = self.base_url + "load_balancers"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
lbs = self.manager.get_all_load_balancers()
resp_rules = lbs[0].forwarding_rules[0]
self.assertEqual(lbs[0].id, '4de7ac8b-495b-4884-9a69-1050c6793cd6')
self.assertEqual(lbs[0].algorithm, 'round_robin')
self.assertEqual(lbs[0].ip, '104.131.186.241')
self.assertEqual(lbs[0].name, 'example-lb-01')
self.assertEqual(len(lbs[0].forwarding_rules), 2)
self.assertEqual(resp_rules.entry_protocol, 'http')
self.assertEqual(resp_rules.entry_port, 80)
self.assertEqual(resp_rules.target_protocol, 'http')
self.assertEqual(resp_rules.target_port, 80)
self.assertEqual(resp_rules.tls_passthrough, False)
self.assertEqual(lbs[0].health_check.protocol, 'http')
self.assertEqual(lbs[0].health_check.port, 80)
self.assertEqual(lbs[0].sticky_sessions.type, 'none')
self.assertEqual(lbs[0].tag, '')
self.assertEqual(lbs[0].droplet_ids, [3164444, 3164445])
@responses.activate
def test_get_all_certificates(self):
data = self.load_from_file('certificate/list.json')
url = self.base_url + "certificates"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
certs = self.manager.get_all_certificates()
self.assertEqual(certs[0].id, '892071a0-bb95-49bc-8021-3afd67a210bf')
self.assertEqual(certs[0].name, 'web-cert-01')
self.assertEqual(certs[0].sha1_fingerprint,
'dfcc9f57d86bf58e321c2c6c31c7a971be244ac7')
self.assertEqual(certs[0].not_after, '2017-02-22T00:23:00Z')
self.assertEqual(certs[0].created_at, '2017-02-08T16:02:37Z')
self.assertEqual(certs[0].type, 'custom')
self.assertEqual(certs[0].state, 'verified')
self.assertEqual(certs[1].id, 'ba9b9c18-6c59-46c2-99df-70da170a42ba')
self.assertEqual(certs[1].name, 'web-cert-02')
self.assertEqual(certs[1].sha1_fingerprint,
'479c82b5c63cb6d3e6fac4624d58a33b267e166c')
self.assertEqual(certs[1].not_after, '2018-06-07T17:44:12Z')
self.assertEqual(certs[1].created_at, '2018-03-09T18:44:11Z')
self.assertEqual(certs[1].type, 'lets_encrypt')
self.assertEqual(certs[1].state, 'pending')
@responses.activate
def test_get_all_volumes(self):
data = self.load_from_file('volumes/all.json')
url = self.base_url + "volumes"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
volumes = self.manager.get_all_volumes()
self.assertEqual(volumes[0].id, "506f78a4-e098-11e5-ad9f-000f53306ae1")
self.assertEqual(volumes[0].region['slug'], 'nyc1')
self.assertEqual(volumes[0].filesystem_type, "ext4")
self.assertEqual(len(volumes), 2)
@responses.activate
def test_get_per_region_volumes(self):
data = json.loads(self.load_from_file('volumes/all.json'))
data["volumes"] = [
volume for volume in data["volumes"]
if volume["region"]["slug"] == "nyc1"]
url = self.base_url + "volumes?region=nyc1&per_page=200"
responses.add(responses.GET, url,
match_querystring=True,
body=json.dumps(data),
status=200,
content_type='application/json')
volumes = self.manager.get_all_volumes("nyc1")
self.assertEqual(volumes[0].id, "506f78a4-e098-11e5-ad9f-000f53306ae1")
self.assertEqual(volumes[0].region['slug'], 'nyc1')
self.assertEqual(len(volumes), 1)
@responses.activate
def test_get_named_volumes(self):
data = json.loads(self.load_from_file('volumes/all.json'))
data["volumes"] = [
volume for volume in data["volumes"]
if volume["name"] == "another-example"]
url = self.base_url + "volumes?name=another-example&per_page=200"
responses.add(responses.GET, url,
match_querystring=True,
body=json.dumps(data),
status=200,
content_type='application/json')
volumes = self.manager.get_all_volumes(name="another-example")
self.assertEqual(volumes[0].id, "2d2967ff-491d-11e6-860c-000f53315870")
self.assertEqual(volumes[0].name, 'another-example')
self.assertEqual(len(volumes), 1)
@responses.activate
def test_get_all_tags(self):
data = self.load_from_file('tags/all.json')
url = self.base_url + 'tags'
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
all_tags = self.manager.get_all_tags()
self.assertEqual(len(all_tags), 1)
self.assertEqual(all_tags[0].name, 'test')
self.assertEqual(all_tags[0].resources['droplets']['count'], 0)
@responses.activate
def test_get_all_snapshots(self):
data = self.load_from_file('snapshots/all.json')
url = self.base_url + 'snapshots/'
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
all_snapshots = self.manager.get_all_snapshots()
self.assertEqual(len(all_snapshots), 1)
self.assertEqual(all_snapshots[0].name, 'test')
self.assertEqual(all_snapshots[0].id, 6372321)
self.assertEqual(all_snapshots[0].size_gigabytes, 1.42)
self.assertEqual(all_snapshots[0].resource_type, 'droplet')
self.assertEqual(len(all_snapshots[0].regions), 11)
@responses.activate
def test_get_droplet_snapshots(self):
data = self.load_from_file('snapshots/droplets.json')
url = self.base_url + 'snapshots?resource_type=droplet&per_page=200'
responses.add(responses.GET, url,
match_querystring=True,
body=data,
status=200,
content_type='application/json')
droplet_snapshots = self.manager.get_droplet_snapshots()
self.assertEqual(len(droplet_snapshots), 1)
self.assertEqual(droplet_snapshots[0].name, 'droplet-test')
self.assertEqual(droplet_snapshots[0].id, 19602538)
self.assertEqual(droplet_snapshots[0].min_disk_size, 20)
self.assertEqual(droplet_snapshots[0].size_gigabytes, 0.31)
self.assertEqual(droplet_snapshots[0].resource_type, 'droplet')
self.assertEqual(len(droplet_snapshots[0].regions), 12)
@responses.activate
def test_get_volume_snapshots(self):
data = self.load_from_file('snapshots/volumes.json')
url = self.base_url + 'snapshots?resource_type=volume&per_page=200'
responses.add(responses.GET, url,
match_querystring=True,
body=data,
status=200,
content_type='application/json')
volume_snapshots = self.manager.get_volume_snapshots()
self.assertEqual(len(volume_snapshots), 1)
self.assertEqual(volume_snapshots[0].name, 'volume-test')
self.assertEqual(
volume_snapshots[0].id, '4f60fc64-85d1-11e6-a004-000f53315871'
)
self.assertEqual(volume_snapshots[0].min_disk_size, 10)
self.assertEqual(volume_snapshots[0].size_gigabytes, 0)
self.assertEqual(volume_snapshots[0].resource_type, 'volume')
self.assertEqual(len(volume_snapshots[0].regions), 1)
@responses.activate
def test_get_all_projects(self):
data = self.load_from_file('projects/all_projects_list.json')
url = self.base_url + 'projects'
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
all_projects = self.manager.get_all_projects()
self.assertEqual(len(all_projects), 1)
self.assertEqual(all_projects[0].id, "4e1bfbc3-dc3e-41f2-a18f-1b4d7ba71679")
self.assertEqual(all_projects[0].owner_uuid, "99525febec065ca37b2ffe4f852fd2b2581895e7")
self.assertEqual(all_projects[0].owner_id, 2)
self.assertEqual(all_projects[0].name, "my-web-api")
self.assertEqual(all_projects[0].description, "My website API")
self.assertEqual(all_projects[0].purpose, "Service or API")
self.assertEqual(all_projects[0].environment, "Production")
self.assertEqual(all_projects[0].is_default, False)
self.assertEqual(all_projects[0].created_at, "2018-09-27T20:10:35Z")
self.assertEqual(all_projects[0].updated_at, "2018-09-27T20:10:35Z")
@responses.activate
def test_get_default_project(self):
data = self.load_from_file('projects/default_project.json')
url = self.base_url + 'projects' + "/default"
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
default_project = self.manager.get_default_project()
self.assertEqual(default_project.id, "4e1bfbc3-dc3e-41f2-a18f-1b4d7ba71679")
self.assertEqual(default_project.owner_uuid, "99525febec065ca37b2ffe4f852fd2b2581895e7")
self.assertEqual(default_project.owner_id, 2)
self.assertEqual(default_project.name, "my-web-api")
self.assertEqual(default_project.description, "My website API")
self.assertEqual(default_project.purpose, "Service or API")
self.assertEqual(default_project.environment, "Production")
self.assertEqual(default_project.is_default, True)
self.assertEqual(default_project.created_at, "2018-09-27T20:10:35Z")
self.assertEqual(default_project.updated_at, "2018-09-27T20:10:35Z")
@responses.activate
def test_get_firewalls(self):
data = self.load_from_file('firewalls/all.json')
url = self.base_url + "firewalls"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
firewalls = self.manager.get_all_firewalls()
f = firewalls[0]
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(f.id, "12345")
self.assertEqual(f.name, "firewall")
self.assertEqual(f.status, "succeeded")
self.assertEqual(f.inbound_rules[0].ports, "80")
self.assertEqual(f.inbound_rules[0].protocol, "tcp")
self.assertEqual(f.inbound_rules[0].sources.load_balancer_uids,
["12345"])
self.assertEqual(f.inbound_rules[0].sources.addresses, [])
self.assertEqual(f.inbound_rules[0].sources.tags, [])
self.assertEqual(f.outbound_rules[0].ports, "80")
self.assertEqual(f.outbound_rules[0].protocol, "tcp")
self.assertEqual(
f.outbound_rules[0].destinations.load_balancer_uids, [])
self.assertEqual(f.outbound_rules[0].destinations.addresses,
["0.0.0.0/0", "::/0"])
self.assertEqual(f.outbound_rules[0].destinations.tags, [])
self.assertEqual(f.created_at, "2017-05-23T21:24:00Z")
self.assertEqual(f.droplet_ids, [12345])
self.assertEqual(f.tags, [])
self.assertEqual(f.pending_changes, [])
@responses.activate
def test_get_vpc(self):
data = self.load_from_file('vpcs/single.json')
vpc_id = "5a4981aa-9653-4bd1-bef5-d6bff52042e4"
url = self.base_url + 'vpcs/' + vpc_id
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
vpc = self.manager.get_vpc(vpc_id)
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(vpc.id, vpc_id)
self.assertEqual(vpc.name, 'my-new-vpc')
self.assertEqual(vpc.region, 'nyc1')
self.assertEqual(vpc.ip_range, '10.10.10.0/24')
self.assertEqual(vpc.description, '')
self.assertEqual(vpc.urn, 'do:vpc:5a4981aa-9653-4bd1-bef5-d6bff52042e4')
self.assertEqual(vpc.created_at, '2020-03-13T18:48:45Z')
self.assertEqual(vpc.default, False)
@responses.activate
def test_get_all_vpcs(self):
data = self.load_from_file('vpcs/list.json')
url = self.base_url + "vpcs"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
vpcs = self.manager.get_all_vpcs()
self.assertEqual(vpcs[0].id, '5a4981aa-9653-4bd1-bef5-d6bff52042e4')
self.assertEqual(vpcs[0].name, 'my-new-vpc')
self.assertEqual(vpcs[0].created_at, '2020-03-13T19:20:47Z')
self.assertEqual(vpcs[0].region, 'nyc1')
self.assertEqual(vpcs[0].description, '')
self.assertEqual(vpcs[0].urn,
'do:vpc:5a4981aa-9653-4bd1-bef5-d6bff52042e4')
self.assertEqual(vpcs[0].ip_range, '10.10.10.0/24')
self.assertEqual(vpcs[0].default, False)
self.assertEqual(vpcs[1].id, 'e0fe0f4d-596a-465e-a902-571ce57b79fa')
self.assertEqual(vpcs[1].name, 'default-nyc1')
self.assertEqual(vpcs[1].description, '')
self.assertEqual(vpcs[1].urn,
'do:vpc:e0fe0f4d-596a-465e-a902-571ce57b79fa')
self.assertEqual(vpcs[1].ip_range, '10.102.0.0/20')
self.assertEqual(vpcs[1].created_at, '2020-03-13T19:29:20Z')
self.assertEqual(vpcs[1].region, 'nyc1')
self.assertEqual(vpcs[1].default, True)
if __name__ == '__main__':
unittest.main()
| 31,186 | Python | .py | 644 | 36.399068 | 147 | 0.604485 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,298 | test_domain.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_domain.py | import json
import unittest
import responses
import digitalocean
from .BaseTest import BaseTest
class TestDomain(BaseTest):
def setUp(self):
super(TestDomain, self).setUp()
self.domain = digitalocean.Domain(name='example.com', token=self.token)
@responses.activate
def test_load(self):
data = self.load_from_file('domains/single.json')
url = self.base_url + "domains/example.com"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
domain = digitalocean.Domain(name='example.com', token=self.token)
domain.load()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(domain.name, "example.com")
self.assertEqual(domain.ttl, 1800)
@responses.activate
def test_destroy(self):
url = self.base_url + "domains/example.com"
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')
self.domain.destroy()
self.assertEqual(responses.calls[0].request.url, url)
@responses.activate
def test_create_new_domain_record(self):
data = self.load_from_file('domains/create_record.json')
url = self.base_url + "domains/example.com/records"
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
response = self.domain.create_new_domain_record(
type="CNAME", name="www", data="@")
self.assert_url_query_equal(
responses.calls[0].request.url,
self.base_url + "domains/example.com/records")
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "CNAME", "data": "@", "name": "www"})
self.assertEqual(response['domain_record']['type'], "CNAME")
self.assertEqual(response['domain_record']['name'], "www")
self.assertEqual(response['domain_record']['data'], "@")
self.assertEqual(response['domain_record']['ttl'], 600)
@responses.activate
def test_update_new_domain_record(self):
data = self.load_from_file('domains/update_record.json')
record_id = str(json.loads(data)['domain_record']['id'])
url = self.base_url + "domains/example.com/records/" + record_id
responses.add(responses.PUT,
url,
body=data,
status=201,
content_type='application/json')
response = self.domain.update_domain_record(
domain="example.com", id=record_id, type="CNAME", name="www", data="@")
self.assert_url_query_equal(
responses.calls[0].request.url,
self.base_url + "domains/example.com/records/" + record_id)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "CNAME", "id": record_id, "domain": "example.com", "data": "@", "name": "www"})
self.assertEqual(response['domain_record']['type'], "CNAME")
self.assertEqual(response['domain_record']['name'], "www")
self.assertEqual(response['domain_record']['data'], "@")
self.assertEqual(response['domain_record']['ttl'], 600)
@responses.activate
def test_delete_domain_record(self):
record_id = "1234"
url = self.base_url + "domains/example.com/records/" + record_id
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')
self.domain.delete_domain_record(id=record_id)
self.assertEqual(responses.calls[0].request.url, url)
@responses.activate
def test_create_new_srv_record_zero_priority(self):
data = self.load_from_file('domains/create_srv_record.json')
url = self.base_url + "domains/example.com/records"
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
response = self.domain.create_new_domain_record(
type="SRV", name="service", data="service", priority=0, weight=0)
self.assert_url_query_equal(
responses.calls[0].request.url,
self.base_url + "domains/example.com/records")
self.assertEqual(response['domain_record']['type'], "SRV")
self.assertEqual(response['domain_record']['name'], "service")
self.assertEqual(response['domain_record']['data'], "service")
self.assertEqual(response['domain_record']['priority'], 0)
self.assertEqual(response['domain_record']['weight'], 0)
@responses.activate
def test_create_new_caa_record_zero_flags(self):
data = self.load_from_file('domains/create_caa_record.json')
url = self.base_url + "domains/example.com/records"
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
response = self.domain.create_new_domain_record(
type="CAA", name="@", data="letsencrypt.org.", ttl=1800, flags=0, tag="issue")
self.assert_url_query_equal(
responses.calls[0].request.url,
self.base_url + "domains/example.com/records")
self.assertEqual(response['domain_record']['type'], "CAA")
self.assertEqual(response['domain_record']['name'], "@")
self.assertEqual(response['domain_record']['data'], "letsencrypt.org.")
self.assertEqual(response['domain_record']['ttl'], 1800)
self.assertEqual(response['domain_record']['flags'], 0)
self.assertEqual(response['domain_record']['tag'], "issue")
@responses.activate
def test_create(self):
data = self.load_from_file( 'domains/create.json')
url = self.base_url + "domains"
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
domain = digitalocean.Domain(name="example.com",
ip_address="1.1.1.1",
token=self.token).create()
self.assert_url_query_equal(
responses.calls[0].request.url, self.base_url + "domains")
self.assertEqual(json.loads(responses.calls[0].request.body),
{'ip_address': '1.1.1.1', 'name': 'example.com'})
self.assertEqual(domain['domain']['name'], "example.com")
self.assertEqual(domain['domain']['ttl'], 1800)
@responses.activate
def test_get_records(self):
data = self.load_from_file('domains/records.json')
url = self.base_url + "domains/example.com/records/"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
records = self.domain.get_records()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(len(records), 6)
self.assertEqual(records[0].type, "A")
self.assertEqual(records[0].name, "@")
self.assertEqual(records[4].type, "CNAME")
self.assertEqual(records[4].name, "example")
self.assertEqual(records[4].ttl, 600)
self.assertEqual(records[5].data, "letsencrypt.org.")
if __name__ == '__main__':
unittest.main()
| 7,801 | Python | .py | 160 | 36.36875 | 113 | 0.583684 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,299 | test_volume.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_volume.py | import unittest
import responses
import digitalocean
from .BaseTest import BaseTest
class TestVolume(BaseTest):
def setUp(self):
super(TestVolume, self).setUp()
self.volume = digitalocean.Volume(
id='506f78a4-e098-11e5-ad9f-000f53306ae1', token=self.token)
@responses.activate
def test_load(self):
data = self.load_from_file('volumes/single.json')
volume_path = "volumes/506f78a4-e098-11e5-ad9f-000f53306ae1"
url = self.base_url + volume_path
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
self.volume.load()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.volume.id,
"506f78a4-e098-11e5-ad9f-000f53306ae1")
self.assertEqual(self.volume.size_gigabytes, 100)
@responses.activate
def test_create(self):
data = self.load_from_file('volumes/single.json')
url = self.base_url + "volumes/"
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
volume = digitalocean.Volume(droplet_id=12345,
region='nyc1',
size_gigabytes=100,
filesystem_type='ext4',
filesystem_label='label',
token=self.token).create()
self.assertEqual(responses.calls[0].request.url,
self.base_url + "volumes/")
self.assertEqual(volume.id, "506f78a4-e098-11e5-ad9f-000f53306ae1")
self.assertEqual(volume.size_gigabytes, 100)
self.assertEqual(volume.filesystem_type, "ext4")
@responses.activate
def test_create_with_tags(self):
data = self.load_from_file('volumes/single_with_tags.json')
url = self.base_url + "volumes/"
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
volume = digitalocean.Volume(droplet_id=12345,
region='nyc1',
size_gigabytes=100,
filesystem_type='ext4',
filesystem_label='label',
tags=['tag1', 'tag2'],
token=self.token).create()
self.assertEqual(volume.tags, ['tag1', 'tag2'])
self.assertEqual(responses.calls[0].request.url,
self.base_url + "volumes/")
self.assertEqual(volume.id, "506f78a4-e098-11e5-ad9f-000f53306ae1")
self.assertEqual(volume.size_gigabytes, 100)
self.assertEqual(volume.filesystem_type, "ext4")
@responses.activate
def test_create_from_snapshot(self):
data = self.load_from_file('volumes/single.json')
url = self.base_url + "volumes/"
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
volume = digitalocean.Volume(droplet_id=12345,
snapshot_id='234234qwer',
region='nyc1',
size_gigabytes=100,
filesystem_type='ext4',
filesystem_label='label',
token=self.token).create()
self.assertEqual(responses.calls[0].request.url,
self.base_url + "volumes/")
self.assertEqual(volume.id, "506f78a4-e098-11e5-ad9f-000f53306ae1")
self.assertEqual(volume.size_gigabytes, 100)
self.assertEqual(volume.filesystem_type, "ext4")
@responses.activate
def test_destroy(self):
volume_path = "volumes/506f78a4-e098-11e5-ad9f-000f53306ae1/"
url = self.base_url + volume_path
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')
self.volume.destroy()
self.assertEqual(responses.calls[0].request.url,
self.base_url + volume_path)
@responses.activate
def test_attach(self):
data = self.load_from_file('volumes/attach.json')
volume_path = "volumes/" + self.volume.id + "/actions/"
url = self.base_url + volume_path
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
res = self.volume.attach(droplet_id=12345, region='nyc1')
self.assertEqual(responses.calls[0].request.url,
self.base_url + volume_path)
self.assertEqual(res['action']['type'], 'attach_volume')
self.assertEqual(res['action']['status'], 'completed')
self.assertEqual(res['action']['id'], 72531856)
@responses.activate
def test_detach(self):
data = self.load_from_file('volumes/detach.json')
volume_path = "volumes/" + self.volume.id + "/actions/"
url = self.base_url + volume_path
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
res = self.volume.detach(droplet_id=12345, region='nyc1')
self.assertEqual(responses.calls[0].request.url,
self.base_url + volume_path)
self.assertEqual(res['action']['type'], 'detach_volume')
self.assertEqual(res['action']['status'], 'in-progress')
self.assertEqual(res['action']['id'], 68212773)
@responses.activate
def test_resize(self):
data = self.load_from_file('volumes/resize.json')
volume_path = "volumes/" + self.volume.id + "/actions/"
url = self.base_url + volume_path
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
res = self.volume.resize(region='nyc1',
size_gigabytes=1000)
self.assertEqual(responses.calls[0].request.url,
self.base_url + volume_path)
self.assertEqual(res['action']['type'], 'resize_volume')
self.assertEqual(res['action']['status'], 'in-progress')
self.assertEqual(res['action']['id'], 72531856)
@responses.activate
def test_snapshot(self):
data = self.load_from_file('volumes/snapshot.json')
volume_path = "volumes/" + self.volume.id + "/snapshots/"
url = self.base_url + volume_path
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
res = self.volume.snapshot(name='big-data-snapshot1475261774')
self.assertEqual(responses.calls[0].request.url,
self.base_url + volume_path)
self.assertEqual(res['snapshot']['resource_type'], 'volume')
self.assertEqual(res['snapshot']['min_disk_size'], 10)
self.assertEqual(res['snapshot']['size_gigabytes'], 20.2)
self.assertEqual(res['snapshot']['id'], '8fa70202-873f-11e6-8b68-000f533176b1')
@responses.activate
def test_get_snapshots(self):
data = self.load_from_file('volumes/snapshots.json')
volume_path = "volumes/" + self.volume.id + "/snapshots/"
url = self.base_url + volume_path
responses.add(responses.GET,
url,
body=data,
status=201,
content_type='application/json')
res = self.volume.get_snapshots()
self.assert_get_url_equal(responses.calls[0].request.url,
self.base_url + volume_path)
self.assertEqual(len(res), 2)
self.assertEqual(res[0].id, '8eb4d51a-873f-11e6-96bf-000f53315a41')
self.assertEqual(res[0].name, 'big-data-snapshot1475261752')
self.assertEqual(res[0].size_gigabytes, 20.2)
self.assertEqual(res[1].id, '8eb4d51a-873f-11e6-96bf-000f53315a42')
self.assertEqual(res[1].name, 'big-data-snapshot1475261752-2')
self.assertEqual(res[1].size_gigabytes, 40.4)
if __name__ == '__main__':
unittest.main()
| 8,895 | Python | .py | 186 | 32.650538 | 87 | 0.546693 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |