id int64 0 458k | file_name stringlengths 4 119 | file_path stringlengths 14 227 | content stringlengths 24 9.96M | size int64 24 9.96M | language stringclasses 1 value | extension stringclasses 14 values | total_lines int64 1 219k | avg_line_length float64 2.52 4.63M | max_line_length int64 5 9.91M | alphanum_fraction float64 0 1 | repo_name stringlengths 7 101 | repo_stars int64 100 139k | repo_forks int64 0 26.4k | repo_open_issues int64 0 2.27k | repo_license stringclasses 12 values | repo_extraction_date stringclasses 433 values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
23,300 | test_vpc.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_vpc.py | import json
import unittest
import responses
import digitalocean
from .BaseTest import BaseTest
class TestVPC(BaseTest):
def setUp(self):
super(TestVPC, self).setUp()
self.vpc_id = '5a4981aa-9653-4bd1-bef5-d6bff52042e4'
self.vpc = digitalocean.VPC(id=self.vpc_id, token=self.token)
@responses.activate
def test_load(self):
data = self.load_from_file('vpcs/single.json')
url = self.base_url + 'vpcs/' + self.vpc_id
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
self.vpc.load()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.vpc.id, self.vpc_id)
self.assertEqual(self.vpc.name, 'my-new-vpc')
self.assertEqual(self.vpc.region, 'nyc1')
self.assertEqual(self.vpc.ip_range, '10.10.10.0/24')
self.assertEqual(self.vpc.description, '')
self.assertEqual(self.vpc.urn,
'do:vpc:5a4981aa-9653-4bd1-bef5-d6bff52042e4')
self.assertEqual(self.vpc.created_at, '2020-03-13T18:48:45Z')
self.assertEqual(self.vpc.default, False)
@responses.activate
def test_create(self):
data = self.load_from_file('vpcs/single.json')
url = self.base_url + 'vpcs'
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
vpc = digitalocean.VPC(name='my-new-vpc',
region='nyc1',
ip_range='10.10.10.0/24',
token=self.token).create()
self.assertEqual(responses.calls[0].request.url, url)
self.assertEqual(vpc.id, '5a4981aa-9653-4bd1-bef5-d6bff52042e4')
self.assertEqual(vpc.name, 'my-new-vpc')
self.assertEqual(vpc.ip_range, '10.10.10.0/24')
self.assertEqual(vpc.description, '')
self.assertEqual(vpc.urn, 'do:vpc:5a4981aa-9653-4bd1-bef5-d6bff52042e4')
self.assertEqual(vpc.created_at, '2020-03-13T18:48:45Z')
@responses.activate
def test_rename(self):
data = self.load_from_file('vpcs/single.json')
url = self.base_url + 'vpcs/' + self.vpc_id
responses.add(responses.PATCH,
url,
body=data,
status=200,
content_type='application/json')
self.vpc.rename('my-new-vpc')
self.assertEqual(responses.calls[0].request.url, url)
self.assertEqual(self.vpc.id, '5a4981aa-9653-4bd1-bef5-d6bff52042e4')
self.assertEqual(self.vpc.name, 'my-new-vpc')
self.assertEqual(self.vpc.created_at, '2020-03-13T18:48:45Z')
@responses.activate
def test_destroy(self):
url = self.base_url + 'vpcs/' + self.vpc_id
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')
self.vpc.destroy()
self.assertEqual(responses.calls[0].request.url, url)
if __name__ == '__main__':
unittest.main()
| 3,267 | Python | .py | 75 | 31.826667 | 80 | 0.58512 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,301 | test_certificate.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_certificate.py | import json
import unittest
import responses
import digitalocean
from .BaseTest import BaseTest
class TestCertificate(BaseTest):
def setUp(self):
super(TestCertificate, self).setUp()
self.cert_id = '892071a0-bb95-49bc-8021-3afd67a210bf'
self.cert = digitalocean.Certificate(id=self.cert_id, token=self.token)
@responses.activate
def test_load(self):
data = self.load_from_file('certificate/custom.json')
url = self.base_url + 'certificates/' + self.cert_id
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
self.cert.load()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.cert.id, self.cert_id)
self.assertEqual(self.cert.name, 'web-cert-01')
self.assertEqual(self.cert.sha1_fingerprint,
'dfcc9f57d86bf58e321c2c6c31c7a971be244ac7')
self.assertEqual(self.cert.not_after, '2017-02-22T00:23:00Z')
self.assertEqual(self.cert.created_at, '2017-02-08T16:02:37Z')
@responses.activate
def test_create_custom(self):
data = self.load_from_file('certificate/custom.json')
url = self.base_url + 'certificates'
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
cert = digitalocean.Certificate(name='web-cert-01',
private_key="a-b-c",
leaf_certificate="e-f-g",
certificate_chain="a-b-c\ne-f-g",
type="custom",
token=self.token).create()
self.assertEqual(responses.calls[0].request.url, url)
self.assertEqual(cert.id, '892071a0-bb95-49bc-8021-3afd67a210bf')
self.assertEqual(cert.name, 'web-cert-01')
self.assertEqual(cert.sha1_fingerprint,
'dfcc9f57d86bf58e321c2c6c31c7a971be244ac7')
self.assertEqual(cert.not_after, '2017-02-22T00:23:00Z')
self.assertEqual(cert.created_at, '2017-02-08T16:02:37Z')
self.assertEqual(cert.type, 'custom')
@responses.activate
def test_create_lets_encrypt(self):
data = self.load_from_file('certificate/lets_encrpyt.json')
url = self.base_url + 'certificates'
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
cert = digitalocean.Certificate(name='web-cert-02',
dns_names=["www.example.com",
"example.com"],
type="lets_encrpyt",
token=self.token).create()
self.assertEqual(responses.calls[0].request.url, url)
self.assertEqual(cert.id, 'ba9b9c18-6c59-46c2-99df-70da170a42ba')
self.assertEqual(cert.name, 'web-cert-02')
self.assertEqual(cert.sha1_fingerprint,
'479c82b5c63cb6d3e6fac4624d58a33b267e166c')
self.assertEqual(cert.not_after, '2018-06-07T17:44:12Z')
self.assertEqual(cert.created_at, '2018-03-09T18:44:11Z')
self.assertEqual(cert.type, 'lets_encrypt')
self.assertEqual(cert.state, 'pending')
@responses.activate
def test_destroy(self):
url = self.base_url + 'certificates/' + self.cert_id
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')
self.cert.destroy()
self.assertEqual(responses.calls[0].request.url, url)
if __name__ == '__main__':
unittest.main()
| 3,991 | Python | .py | 84 | 33.202381 | 79 | 0.571024 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,302 | test_firewall.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_firewall.py | import unittest
import responses
import digitalocean
import json
from .BaseTest import BaseTest
class TestFirewall(BaseTest):
@responses.activate
def setUp(self):
super(TestFirewall, self).setUp()
data = self.load_from_file('firewalls/single.json')
url = self.base_url + "firewalls/12345"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
self.firewall = digitalocean.Firewall(id=12345, token=self.token).load()
@responses.activate
def test_load(self):
data = self.load_from_file('firewalls/single.json')
url = self.base_url + "firewalls/12345"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
firewall = digitalocean.Firewall(id=12345, token=self.token)
f = firewall.load()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(f.id, 12345)
self.assertEqual(f.name, "firewall")
self.assertEqual(f.status, "succeeded")
self.assertEqual(f.inbound_rules[0].ports, "80")
self.assertEqual(f.inbound_rules[0].protocol, "tcp")
self.assertEqual(f.inbound_rules[0].sources.load_balancer_uids,
["12345"])
self.assertEqual(f.inbound_rules[0].sources.addresses, [])
self.assertEqual(f.inbound_rules[0].sources.tags, [])
self.assertEqual(f.outbound_rules[0].ports, "80")
self.assertEqual(f.outbound_rules[0].protocol, "tcp")
self.assertEqual(
f.outbound_rules[0].destinations.load_balancer_uids, [])
self.assertEqual(f.outbound_rules[0].destinations.addresses,
["0.0.0.0/0", "::/0"])
self.assertEqual(f.outbound_rules[0].destinations.tags, [])
self.assertEqual(f.created_at, "2017-05-23T21:24:00Z")
self.assertEqual(f.droplet_ids, [12345])
self.assertEqual(f.tags, [])
self.assertEqual(f.pending_changes, [])
@responses.activate
def test_add_droplets(self):
data = self.load_from_file('firewalls/droplets.json')
url = self.base_url + "firewalls/12345/droplets"
responses.add(responses.POST, url,
body=data,
status=204,
content_type='application/json')
droplet_id = json.loads(data)["droplet_ids"][0]
self.firewall.add_droplets([droplet_id])
self.assertEqual(responses.calls[0].request.url, url)
@responses.activate
def test_remove_droplets(self):
data = self.load_from_file('firewalls/droplets.json')
url = self.base_url + "firewalls/12345/droplets"
responses.add(responses.DELETE,
url,
body=data,
status=204,
content_type='application/json')
droplet_id = json.loads(data)["droplet_ids"][0]
self.firewall.remove_droplets([droplet_id])
self.assertEqual(responses.calls[0].request.url, url)
@responses.activate
def test_add_tags(self):
data = self.load_from_file('firewalls/tags.json')
url = self.base_url + "firewalls/12345/tags"
responses.add(responses.POST, url,
body=data,
status=204,
content_type='application/json')
tag = json.loads(data)["tags"][0]
self.firewall.add_tags([tag])
self.assertEqual(responses.calls[0].request.url, url)
@responses.activate
def test_remove_tags(self):
data = self.load_from_file('firewalls/tags.json')
url = self.base_url + "firewalls/12345/tags"
responses.add(responses.DELETE, url,
body=data,
status=204,
content_type='application/json')
tag = json.loads(data)["tags"][0]
self.firewall.remove_tags([tag])
self.assertEqual(responses.calls[0].request.url, url)
@responses.activate
def test_add_inbound(self):
data = self.load_from_file('firewalls/rules.json')
url = self.base_url + "firewalls/12345/rules"
responses.add(responses.POST, url,
body=data,
status=204,
content_type='application/json')
rule = json.loads(data)["rules"][0]
self.firewall.add_inbound([rule])
self.assertEqual(responses.calls[0].request.url, url)
@responses.activate
def test_add_outbound(self):
data = self.load_from_file('firewalls/rules.json')
url = self.base_url + "firewalls/12345/rules"
responses.add(responses.POST, url,
body=data,
status=204,
content_type='application/json')
rule = json.loads(data)["rules"][0]
self.firewall.add_outbound([rule])
self.assertEqual(responses.calls[0].request.url, url)
@responses.activate
def test_remove_inbound(self):
data = self.load_from_file('firewalls/rules.json')
url = self.base_url + "firewalls/12345/rules"
responses.add(responses.DELETE, url,
body=data,
status=204,
content_type='application/json')
rule = json.loads(data)["rules"][0]
self.firewall.remove_inbound([rule])
self.assertEqual(responses.calls[0].request.url, url)
@responses.activate
def test_remove_outbound(self):
data = self.load_from_file('firewalls/rules.json')
url = self.base_url + "firewalls/12345/rules"
responses.add(responses.DELETE, url,
body=data,
status=204,
content_type='application/json')
rule = json.loads(data)["rules"][0]
self.firewall.remove_outbound([rule])
self.assertEqual(responses.calls[0].request.url, url)
if __name__ == '__main__':
unittest.main()
| 6,221 | Python | .py | 140 | 32.521429 | 80 | 0.589034 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,303 | test_action.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_action.py | import unittest
import responses
import json
import digitalocean
from .BaseTest import BaseTest
class TestAction(BaseTest):
def setUp(self):
super(TestAction, self).setUp()
self.action = digitalocean.Action(id=39388122, token=self.token)
@responses.activate
def test_load_directly(self):
data = self.load_from_file('actions/ipv6_completed.json')
url = self.base_url + "actions/39388122"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
self.action.load_directly()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.action.status, "completed")
self.assertEqual(self.action.id, 39388122)
self.assertEqual(self.action.region_slug, 'nyc3')
@responses.activate
def test_load_without_droplet_id(self):
data = self.load_from_file('actions/ipv6_completed.json')
url = self.base_url + "actions/39388122"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
self.action.load()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.action.status, "completed")
self.assertEqual(self.action.id, 39388122)
self.assertEqual(self.action.region_slug, 'nyc3')
if __name__ == '__main__':
unittest.main()
| 1,586 | Python | .py | 39 | 30.487179 | 72 | 0.621005 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,304 | test_floatingip.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_floatingip.py | import unittest
import responses
import digitalocean
from .BaseTest import BaseTest
class TestFloatingIP(BaseTest):
def setUp(self):
super(TestFloatingIP, self).setUp()
self.fip = digitalocean.FloatingIP(ip='45.55.96.47', token=self.token)
@responses.activate
def test_load(self):
data = self.load_from_file('floatingip/single.json')
url = self.base_url + "floating_ips/45.55.96.47"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
self.fip.load()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.fip.ip, "45.55.96.47")
self.assertEqual(self.fip.region['slug'], 'nyc3')
@responses.activate
def test_create(self):
data = self.load_from_file('floatingip/single.json')
url = self.base_url + "floating_ips/"
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
fip = digitalocean.FloatingIP(droplet_id=12345,
token=self.token).create()
self.assertEqual(responses.calls[0].request.url,
self.base_url + "floating_ips/")
self.assertEqual(fip.ip, "45.55.96.47")
self.assertEqual(fip.region['slug'], 'nyc3')
@responses.activate
def test_reserve(self):
data = self.load_from_file('floatingip/single.json')
url = self.base_url + "floating_ips/"
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
fip = digitalocean.FloatingIP(region_slug='nyc3',
token=self.token).reserve()
self.assertEqual(responses.calls[0].request.url,
self.base_url + "floating_ips/")
self.assertEqual(fip.ip, "45.55.96.47")
self.assertEqual(fip.region['slug'], 'nyc3')
@responses.activate
def test_destroy(self):
url = self.base_url + "floating_ips/45.55.96.47/"
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')
self.fip.destroy()
self.assertEqual(responses.calls[0].request.url,
self.base_url + "floating_ips/45.55.96.47/")
@responses.activate
def test_assign(self):
data = self.load_from_file('floatingip/assign.json')
responses.add(responses.POST,
"{}floating_ips/{}/actions/".format(
self.base_url, self.fip.ip),
body=data,
status=201,
content_type='application/json')
res = self.fip.assign(droplet_id=12345)
self.assertEqual(responses.calls[0].request.url,
self.base_url + "floating_ips/45.55.96.47/actions/")
self.assertEqual(res['action']['type'], 'assign_ip')
self.assertEqual(res['action']['status'], 'in-progress')
self.assertEqual(res['action']['id'], 68212728)
@responses.activate
def test_unassign(self):
data = self.load_from_file('floatingip/unassign.json')
responses.add(responses.POST,
"{}floating_ips/{}/actions/".format(
self.base_url, self.fip.ip),
body=data,
status=201,
content_type='application/json')
res = self.fip.unassign()
self.assertEqual(responses.calls[0].request.url,
self.base_url + "floating_ips/45.55.96.47/actions/")
self.assertEqual(res['action']['type'], 'unassign_ip')
self.assertEqual(res['action']['status'], 'in-progress')
self.assertEqual(res['action']['id'], 68212773)
if __name__ == '__main__':
unittest.main()
| 4,172 | Python | .py | 93 | 31.311828 | 78 | 0.552703 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,305 | test_image.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_image.py | import unittest
import responses
import digitalocean
from .BaseTest import BaseTest
class TestImage(BaseTest):
def setUp(self):
super(TestImage, self).setUp()
self.image = digitalocean.Image(
id=449676856, token=self.token
)
self.image_with_slug = digitalocean.Image(
slug='testslug', token=self.token
)
@responses.activate
def test_load(self):
data = self.load_from_file('images/single.json')
url = "{}images/{}".format(self.base_url, self.image.id)
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
self.image.load()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.image.id, 449676856)
self.assertEqual(self.image.name, 'My Snapshot')
self.assertEqual(self.image.distribution, 'Ubuntu')
self.assertEqual(self.image.public, False)
self.assertEqual(self.image.created_at, "2014-08-18T16:35:40Z")
self.assertEqual(self.image.size_gigabytes, 2.34)
self.assertEqual(self.image.min_disk_size, 20)
@responses.activate
def test_load_by_slug(self):
"""Test loading image by slug."""
data = self.load_from_file('images/slug.json')
url = "{}images/{}".format(self.base_url, self.image_with_slug.slug)
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
self.image_with_slug.load(use_slug=True)
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.image_with_slug.id, None)
self.assertEqual(self.image_with_slug.slug, 'testslug')
self.assertEqual(self.image_with_slug.name, 'My Slug Snapshot')
self.assertEqual(self.image_with_slug.distribution, 'Ubuntu')
self.assertEqual(self.image_with_slug.public, False)
self.assertEqual(
self.image_with_slug.created_at,
"2014-08-18T16:35:40Z"
)
self.assertEqual(self.image_with_slug.size_gigabytes, 2.34)
self.assertEqual(self.image_with_slug.min_disk_size, 30)
@responses.activate
def test_create(self):
data = self.load_from_file('images/create.json')
url = self.base_url + "images"
responses.add(responses.POST,
url,
body=data,
status=202,
content_type='application/json')
image = digitalocean.Image(name='ubuntu-18.04-minimal',
url='https://www.example.com/cloud.img',
distribution='Ubuntu',
region='nyc3',
description='Cloud-optimized image',
tags=['base-image', 'prod'],
token=self.token)
image.create()
self.assertEqual(image.id, 38413969)
self.assertEqual(image.name, 'ubuntu-18.04-minimal')
self.assertEqual(image.distribution, 'Ubuntu')
self.assertEqual(image.type, 'custom')
self.assertEqual(image.status, 'NEW')
self.assertEqual(image.description, 'Cloud-optimized image')
self.assertEqual(image.tags, ['base-image', 'prod'])
self.assertEqual(image.created_at, '2018-09-20T19:28:00Z')
@responses.activate
def test_destroy(self):
responses.add(responses.DELETE,
'{}images/{}/'.format(self.base_url, self.image.id),
status=204,
content_type='application/json')
self.image.destroy()
self.assertEqual(responses.calls[0].request.url,
self.base_url + 'images/449676856/')
@responses.activate
def test_transfer(self):
data = self.load_from_file('images/transfer.json')
responses.add(responses.POST,
'{}images/{}/actions/'.format(
self.base_url, self.image.id),
body=data,
status=201,
content_type='application/json')
res = self.image.transfer(new_region_slug='lon1')
self.assertEqual(responses.calls[0].request.url,
self.base_url + 'images/449676856/actions/')
self.assertEqual(res['action']['type'], 'transfer')
self.assertEqual(res['action']['status'], 'in-progress')
self.assertEqual(res['action']['id'], 68212728)
@responses.activate
def test_rename(self):
data = self.load_from_file('images/rename.json')
responses.add(responses.PUT,
'{}images/{}'.format(self.base_url, self.image.id),
body=data,
status=200,
content_type='application/json')
res = self.image.rename(new_name='Descriptive name')
self.assertEqual(responses.calls[0].request.url,
self.base_url + 'images/449676856')
self.assertEqual(res['image']['name'], 'Descriptive name')
def test_is_string(self):
self.assertEqual(self.image._is_string("String"), True)
self.assertEqual(self.image._is_string("1234"), True)
self.assertEqual(self.image._is_string(123), False)
self.assertEqual(self.image._is_string(None), None)
self.assertEqual(self.image._is_string(True), None)
self.assertEqual(self.image._is_string(False), None)
if __name__ == '__main__':
unittest.main()
| 5,794 | Python | .py | 124 | 33.798387 | 76 | 0.580113 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,306 | test_droplet.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_droplet.py | import json
import unittest
import responses
import digitalocean
from .BaseTest import BaseTest
class TestDroplet(BaseTest):
@responses.activate
def setUp(self):
super(TestDroplet, self).setUp()
self.actions_url = self.base_url + "droplets/12345/actions/"
data = self.load_from_file('droplets/single.json')
url = self.base_url + "droplets/12345"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
self.droplet = digitalocean.Droplet(id='12345', token=self.token).load()
@responses.activate
def test_load(self):
data = self.load_from_file('droplets/single.json')
url = self.base_url + "droplets/12345"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
droplet = digitalocean.Droplet(id='12345', token=self.token)
d = droplet.load()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(d.id, 12345)
self.assertEqual(d.name, "example.com")
self.assertEqual(d.memory, 512)
self.assertEqual(d.vcpus, 1)
self.assertEqual(d.disk, 20)
self.assertEqual(d.backups, False)
self.assertEqual(d.ipv6, True)
self.assertEqual(d.private_networking, False)
self.assertEqual(d.region['slug'], "nyc3")
self.assertEqual(d.status, "active")
self.assertEqual(d.image['slug'], "ubuntu-14-04-x64")
self.assertEqual(d.size_slug, '512mb')
self.assertEqual(d.created_at, "2014-11-14T16:36:31Z")
self.assertEqual(d.ip_address, "104.131.186.241")
self.assertEqual(d.ip_v6_address,
"2604:A880:0800:0010:0000:0000:031D:2001")
self.assertEqual(d.kernel['id'], 2233)
self.assertEqual(d.features, ["ipv6", "virtio"])
self.assertEqual(d.tags, [])
self.assertEqual(d.vpc_uuid, "08187eaa-90eb-40d6-a8f0-0222b28ded72")
@responses.activate
def test_power_off(self):
data = self.load_from_file('droplet_actions/power_off.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.power_off()
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "power_off"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "power_off")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_power_off_action(self):
data = self.load_from_file('droplet_actions/power_off.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.power_off(False)
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "power_off"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "power_off")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
@responses.activate
def test_power_on(self):
data = self.load_from_file('droplet_actions/power_on.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.power_on()
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "power_on"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "power_on")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_power_on_action(self):
data = self.load_from_file('droplet_actions/power_on.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.power_on(return_dict=False)
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "power_on"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "power_on")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
@responses.activate
def test_shutdown(self):
data = self.load_from_file('droplet_actions/shutdown.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.shutdown()
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "shutdown"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "shutdown")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_shutdown_action(self):
data = self.load_from_file('droplet_actions/shutdown.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.shutdown(return_dict=False)
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "shutdown"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "shutdown")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
@responses.activate
def test_reboot(self):
data = self.load_from_file('droplet_actions/reboot.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.reboot()
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "reboot"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "reboot")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_reboot_action(self):
data = self.load_from_file('droplet_actions/reboot.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.reboot(return_dict=False)
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "reboot"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "reboot")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
@responses.activate
def test_power_cycle(self):
data = self.load_from_file('droplet_actions/power_cycle.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.power_cycle()
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "power_cycle"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "power_cycle")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_power_cycle_action(self):
data = self.load_from_file('droplet_actions/power_cycle.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.power_cycle(return_dict=False)
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "power_cycle"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "power_cycle")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
@responses.activate
def test_reset_root_password(self):
data = self.load_from_file('droplet_actions/password_reset.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.reset_root_password()
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "password_reset"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "password_reset")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_reset_root_password_action(self):
data = self.load_from_file('droplet_actions/password_reset.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.reset_root_password(return_dict=False)
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "password_reset"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "password_reset")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
@responses.activate
def test_take_snapshot(self):
data = self.load_from_file('droplet_actions/snapshot.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.take_snapshot("New Snapshot")
self.assert_url_query_equal(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "snapshot", "name": "New Snapshot"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "snapshot")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_take_snapshot_action(self):
data = self.load_from_file('droplet_actions/snapshot.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.take_snapshot("New Snapshot", return_dict=False)
self.assert_url_query_equal(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "snapshot", "name": "New Snapshot"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "snapshot")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
@responses.activate
def test_resize(self):
data = self.load_from_file('droplet_actions/resize.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.resize("64gb")
self.assert_url_query_equal(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "resize", "size": "64gb", "disk": "true"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "resize")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_resize_action(self):
data = self.load_from_file('droplet_actions/resize.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.resize("64gb", False)
self.assert_url_query_equal(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "resize", "size": "64gb", "disk": "true"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "resize")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
@responses.activate
def test_restore(self):
data = self.load_from_file('droplet_actions/restore.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.restore(image_id=78945)
self.assert_url_query_equal(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"image": 78945, "type": "restore"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "restore")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_restore_action(self):
data = self.load_from_file('droplet_actions/restore.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.restore(image_id=78945, return_dict=False)
self.assert_url_query_equal(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"image": 78945, "type": "restore"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "restore")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
@responses.activate
def test_rebuild_passing_image(self):
"""
Test rebuilding an droplet from a provided image id.
"""
data = self.load_from_file('droplet_actions/rebuild.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.rebuild(image_id=78945)
self.assert_url_query_equal(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"image": 78945, "type": "rebuild"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "rebuild")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_rebuild_passing_image_action(self):
"""
Test rebuilding an droplet from a provided image id.
"""
data = self.load_from_file('droplet_actions/rebuild.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.rebuild(image_id=78945, return_dict=False)
self.assert_url_query_equal(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"image": 78945, "type": "rebuild"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "rebuild")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
@responses.activate
def test_rebuild_not_passing_image(self):
"""
Test rebuilding an droplet from its original parent image id.
"""
data = self.load_from_file('droplet_actions/rebuild.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.rebuild()
self.assert_url_query_equal(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"image": 6918990, "type": "rebuild"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "rebuild")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_rebuild_not_passing_image_action(self):
"""
Test rebuilding an droplet from its original parent image id.
"""
data = self.load_from_file('droplet_actions/rebuild.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.rebuild(return_dict=False)
self.assert_url_query_equal(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"image": 6918990, "type": "rebuild"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "rebuild")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
@responses.activate
def test_enable_backups(self):
data = self.load_from_file('droplet_actions/enable_backups.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.enable_backups()
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "enable_backups"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "enable_backups")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_disable_backups(self):
data = self.load_from_file('droplet_actions/disable_backups.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.disable_backups()
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "disable_backups"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "disable_backups")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_disable_backups_action(self):
data = self.load_from_file('droplet_actions/disable_backups.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.disable_backups(return_dict=False)
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "disable_backups"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "disable_backups")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
@responses.activate
def test_destroy(self):
url = self.base_url + "droplets/12345"
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')
self.droplet.destroy()
self.assertEqual(responses.calls[0].request.url,
self.base_url + "droplets/12345")
@responses.activate
def test_rename(self):
data = self.load_from_file('droplet_actions/rename.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.rename(name="New Name")
self.assert_url_query_equal(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "rename", "name": "New Name"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "rename")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_rename_action(self):
data = self.load_from_file('droplet_actions/rename.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.rename(name="New Name", return_dict=False)
self.assert_url_query_equal(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "rename", "name": "New Name"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "rename")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
@responses.activate
def test_enable_private_networking(self):
data = self.load_from_file('droplet_actions/enable_private_networking.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.enable_private_networking()
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "enable_private_networking"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "enable_private_networking")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_enable_private_networking_action(self):
data = self.load_from_file('droplet_actions/enable_private_networking.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.enable_private_networking(return_dict=False)
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "enable_private_networking"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "enable_private_networking")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
@responses.activate
def test_enable_ipv6(self):
data = self.load_from_file('droplet_actions/enable_ipv6.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.enable_ipv6()
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "enable_ipv6"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "enable_ipv6")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_enable_ipv6_action(self):
data = self.load_from_file('droplet_actions/enable_ipv6.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.enable_ipv6(return_dict=False)
self.assertEqual(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{"type": "enable_ipv6"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "enable_ipv6")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
def test_change_kernel_exception(self):
with self.assertRaises(Exception) as error:
self.droplet.change_kernel(kernel=123)
exception = error.exception
self.assertEqual(str(exception), 'Use Kernel object')
@responses.activate
def test_change_kernel(self):
data = self.load_from_file('droplet_actions/change_kernel.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.change_kernel(digitalocean.Kernel(id=123))
self.assert_url_query_equal(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{u"kernel": 123, u"type": u"change_kernel"})
self.assertEqual(response['action']['id'], 54321)
self.assertEqual(response['action']['status'], "in-progress")
self.assertEqual(response['action']['type'], "change_kernel")
self.assertEqual(response['action']['resource_id'], 12345)
self.assertEqual(response['action']['resource_type'], "droplet")
@responses.activate
def test_change_kernel_action(self):
data = self.load_from_file('droplet_actions/change_kernel.json')
responses.add(responses.POST, self.actions_url,
body=data,
status=201,
content_type='application/json')
response = self.droplet.change_kernel(digitalocean.Kernel(id=123),
return_dict=False)
self.assert_url_query_equal(responses.calls[0].request.url,
self.actions_url)
self.assertEqual(json.loads(responses.calls[0].request.body),
{u"kernel": 123, u"type": u"change_kernel"})
self.assertEqual(response.id, 54321)
self.assertEqual(response.status, "in-progress")
self.assertEqual(response.type, "change_kernel")
self.assertEqual(response.resource_id, 12345)
self.assertEqual(response.resource_type, "droplet")
@responses.activate
def test_create_no_keys(self):
data = self.load_from_file('droplet_actions/create.json')
url = self.base_url + "droplets/"
responses.add(responses.POST,
url,
body=data,
status=202,
content_type='application/json')
droplet = digitalocean.Droplet(name="example.com",
size_slug="512mb",
image="ubuntu-14-04-x64",
region="nyc3",
backups=True,
ipv6=True,
private_networking=False,
monitoring=True,
user_data="Some user data.",
token=self.token,
tags=["web"],
vpc_uuid="08187eaa-90eb-40d6-a8f0-0222b28ded72")
droplet.create()
self.assert_url_query_equal(responses.calls[0].request.url, url)
self.maxDiff = None
self.assertEqual(
json.loads(responses.calls[0].request.body),
{u"name": u"example.com", u"region": u"nyc3",
u"user_data": u"Some user data.", u"ipv6": True,
u"private_networking": False, u"monitoring": True,
u"backups": True, u"image": u"ubuntu-14-04-x64",
u"size": u"512mb", u"ssh_keys": [],
u"volumes": [], u"tags": ["web"],
u"vpc_uuid": "08187eaa-90eb-40d6-a8f0-0222b28ded72"})
self.assertEqual(droplet.id, 3164494)
self.assertEqual(droplet.action_ids, [36805096])
@responses.activate
def test_create_multiple_no_keys(self):
data = self.load_from_file('droplet_actions/create_multiple.json')
url = self.base_url + "droplets/"
responses.add(responses.POST,
url,
body=data,
status=202,
content_type='application/json')
droplets = digitalocean.Droplet.create_multiple(names=["example.com",
"example2.com"],
size_slug="512mb",
image="ubuntu-14-04-x64",
region="nyc3",
backups=True,
ipv6=True,
private_networking=False,
monitoring=True,
user_data="Some user data.",
token=self.token,
tags=["web"],
vpc_uuid="08187eaa-90eb-40d6-a8f0-0222b28ded72")
self.assert_url_query_equal(responses.calls[0].request.url, url)
self.assertEqual(len(droplets), 2)
self.assertEqual(droplets[0].id, 3164494)
self.assertEqual(droplets[1].id, 3164495)
self.assertEqual(droplets[0].action_ids, [36805096])
self.assertEqual(droplets[1].action_ids, [36805096])
self.maxDiff = None
self.assertEqual(
json.loads(responses.calls[0].request.body),
{u"names": [u"example.com", u"example2.com"], u"region": u"nyc3",
u"user_data": u"Some user data.", u"ipv6": True,
u"private_networking": False, u"monitoring": True,
u"backups": True, u"image": u"ubuntu-14-04-x64",
u"size": u"512mb", u"tags": ["web"],
u"vpc_uuid": "08187eaa-90eb-40d6-a8f0-0222b28ded72"})
@responses.activate
def test_get_actions(self):
data = self.load_from_file('actions/multi.json')
create = self.load_from_file('actions/create_completed.json')
ipv6 = self.load_from_file('actions/ipv6_completed.json')
responses.add(responses.GET, self.actions_url,
body=data,
status=200,
content_type='application/json')
responses.add(responses.GET, self.actions_url + "39388122",
body=create,
status=200,
content_type='application/json')
responses.add(responses.GET, self.actions_url + "39290099",
body=ipv6,
status=200,
content_type='application/json')
actions = self.droplet.get_actions()
self.assertEqual(len(actions), 2)
self.assertEqual(len(responses.calls), 3)
self.assert_get_url_equal(responses.calls[0].request.url,
self.actions_url)
self.assert_get_url_equal(responses.calls[1].request.url,
self.actions_url + "39388122")
self.assert_get_url_equal(responses.calls[2].request.url,
self.actions_url + "39290099")
self.assertEqual(actions[0].id, 39290099)
self.assertEqual(actions[0].type, "create")
self.assertEqual(actions[0].status, "completed")
self.assertEqual(actions[1].id, 39388122)
self.assertEqual(actions[1].type, "enable_ipv6")
self.assertEqual(actions[1].status, "completed")
@responses.activate
def test_get_action(self):
data = self.load_from_file('actions/create_completed.json')
url = self.base_url + "actions/39388122"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
action = self.droplet.get_action(39388122)
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(action.id, 39290099)
self.assertEqual(action.type, "create")
self.assertEqual(action.status, "completed")
def test_get_snapshots(self):
snapshots = self.droplet.get_snapshots()
self.assertEqual(len(snapshots), 1)
self.assertEqual(snapshots[0].id, 7938206)
@responses.activate
def test_get_kernel_available_no_pages(self):
data = self.load_from_file('kernels/list.json')
url = self.base_url + "droplets/12345/kernels/"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
kernels = self.droplet.get_kernel_available()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(len(kernels), 2)
self.assertEqual(kernels[0].id, 61833229)
self.assertEqual(kernels[0].name,
"Ubuntu 14.04 x32 vmlinuz-3.13.0-24-generic")
@responses.activate
def test_get_kernel_available_with_pages(self):
one = self.load_from_file('kernels/page_one.json')
two = self.load_from_file('kernels/page_two.json')
url_0 = self.base_url + "droplets/12345/kernels/"
responses.add(responses.GET,
url_0,
body=one,
status=200,
content_type='application/json')
url_1 = self.base_url + "droplets/12345/kernels?page=2&per_page=200"
responses.add(responses.GET,
url_1,
body=two,
status=200,
content_type='application/json',
match_querystring=True)
kernels = self.droplet.get_kernel_available()
self.assert_get_url_equal(responses.calls[0].request.url, url_0)
self.assert_url_query_equal(responses.calls[1].request.url, url_1)
self.assertEqual(len(kernels), 3)
self.assertEqual(kernels[0].id, 61833229)
self.assertEqual(kernels[0].name,
"Ubuntu 14.04 x32 vmlinuz-3.13.0-24-generic")
self.assertEqual(kernels[2].id, 231)
self.assertEqual(kernels[2].name,
"Ubuntu 14.04 x64 vmlinuz-3.13.0-32-generic")
@responses.activate
def test_update_volumes_data(self):
droplet_response = self.load_from_file('droplets/single.json')
volume_response = self.load_from_file('volumes/single.json')
url_droplet =self.base_url + "droplets/12345"
url_volume = self.base_url + "volumes/506f78a4-e098-11e5-ad9f-000f53306ae1"
responses.add(responses.GET,
url_droplet,
body=droplet_response,
status=200,
content_type='application/json')
responses.add(responses.GET,
url_volume,
body=volume_response,
status=200,
content_type='application/json')
droplet = digitalocean.Droplet(id='12345', token=self.token)
d = droplet.load()
d.update_volumes_data()
self.assert_get_url_equal(responses.calls[0].request.url, url_droplet)
self.assert_get_url_equal(responses.calls[1].request.url, url_volume)
self.assertEqual(len(d.volumes), 1)
self.assertEqual(d.volumes[0].id, '506f78a4-e098-11e5-ad9f-000f53306ae1')
self.assertEqual(d.volumes[0].name, 'example')
self.assertEqual(d.volumes[0].region['slug'], 'nyc1')
if __name__ == '__main__':
unittest.main()
| 43,208 | Python | .py | 830 | 37.785542 | 104 | 0.584704 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,307 | BaseTest.py | koalalorenzo_python-digitalocean/digitalocean/tests/BaseTest.py | import os
import unittest
DEFAULT_PER_PAGE = 200
class BaseTest(unittest.TestCase):
def setUp(self):
self.base_url = "https://api.digitalocean.com/v2/"
self.token = "afaketokenthatwillworksincewemockthings"
def load_from_file(self, json_file):
cwd = os.path.dirname(__file__)
with open(os.path.join(cwd, 'data/%s' % json_file), 'r') as f:
return f.read()
def split_url(self, url):
bits = url.split('?')
if len(bits) == 1:
return url, []
qlist = bits[1].split('&')
qlist.sort()
return bits[0], qlist
def assert_url_query_equal(self, url1, url2):
""" Test if two URL queries are equal
The key=value pairs after the ? in a URL can occur in any order
(especially since dicts in python 3 are not deterministic across runs).
The method sorts the key=value pairs and then compares the URLs.
"""
base1, qlist1 = self.split_url(url1)
base2, qlist2 = self.split_url(url2)
self.assertEqual(base1, base2)
self.assertEqual(qlist1, qlist2)
def assert_get_url_equal(self, url1, url2):
if "?" in url2:
url2 += "&"
else:
url2 += "?"
url2 += "per_page=%d" % DEFAULT_PER_PAGE
return self.assert_url_query_equal(url1, url2)
| 1,362 | Python | .py | 35 | 30.542857 | 79 | 0.600914 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,308 | test_snapshot.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_snapshot.py | import unittest
import responses
import digitalocean
from .BaseTest import BaseTest
class TestSnapshot(BaseTest):
def setUp(self):
super(TestSnapshot, self).setUp()
self.snapshot = digitalocean.Snapshot(id="fbe805e8-866b-11e6-96bf-000f53315a41", token=self.token)
@responses.activate
def test_load(self):
data = self.load_from_file('snapshots/single.json')
url = "{}snapshots/{}".format(self.base_url, self.snapshot.id)
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
self.snapshot.load()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.snapshot.id, "fbe805e8-866b-11e6-96bf-000f53315a41")
self.assertEqual(self.snapshot.name, 'big-data-snapshot1475170902')
self.assertEqual(self.snapshot.created_at, "2016-09-29T17:41:42Z")
self.assertEqual(self.snapshot.size_gigabytes, 1.42)
self.assertEqual(self.snapshot.min_disk_size, 20)
@responses.activate
def test_destroy(self):
responses.add(responses.DELETE,
'{}snapshots/{}/'.format(self.base_url, self.snapshot.id),
status=204,
content_type='application/json')
self.snapshot.destroy()
self.assertEqual(responses.calls[0].request.url,
self.base_url + 'snapshots/fbe805e8-866b-11e6-96bf-000f53315a41/')
if __name__ == '__main__':
unittest.main()
| 1,605 | Python | .py | 35 | 35.285714 | 106 | 0.633504 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,309 | test_project.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_project.py | import unittest
import responses
import digitalocean
from .BaseTest import BaseTest
class TestProject(BaseTest):
def setUp(self):
super(TestProject, self).setUp()
@responses.activate
def test_load(self):
self.project = digitalocean.Project(
id='4e1bfbc3-dc3e-41f2-a18f-1b4d7ba71679',
token=self.token)
data = self.load_from_file('projects/retrieve.json')
project_path = "projects/4e1bfbc3-dc3e-41f2-a18f-1b4d7ba71679"
url = self.base_url + project_path
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
self.project.load()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.project.id, '4e1bfbc3-dc3e-41f2-a18f-1b4d7ba71679')
self.assertEqual(self.project.owner_uuid, "99525febec065ca37b2ffe4f852fd2b2581895e7")
self.assertEqual(self.project.owner_id, 2)
self.assertEqual(self.project.name, "my-web-api")
self.assertEqual(self.project.description, "My website API")
self.assertEqual(self.project.purpose, "Service or API")
self.assertEqual(self.project.environment, "Production")
self.assertEqual(self.project.is_default, False)
self.assertEqual(self.project.updated_at, "2018-09-27T20:10:35Z")
self.assertEqual(self.project.created_at, "2018-09-27T20:10:35Z")
@responses.activate
def test_create_new_project(self):
data = self.load_from_file('projects/create.json')
project_path = "projects"
url = self.base_url + project_path
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
project = digitalocean.Project(token=self.token, name="my-web-api",
purpose="Service or API",
description="My website API",
environment="Production")
project.create_project()
self.assertEqual(responses.calls[0].request.url, url)
self.assertEqual(project.id, '4e1bfbc3-dc3e-41f2-a18f-1b4d7ba71679')
self.assertEqual(project.owner_uuid, '99525febec065ca37b2ffe4f852fd2b2581895e7')
self.assertEqual(project.is_default, False)
self.assertEqual(project.name, "my-web-api")
self.assertEqual(project.description, "My website API")
self.assertEqual(project.purpose, "Service or API")
self.assertEqual(project.environment, "Production")
self.assertEqual(project.updated_at, "2018-09-27T15:52:48Z")
self.assertEqual(project.created_at, "2018-09-27T15:52:48Z")
@responses.activate
def test_update_project(self):
data = self.load_from_file('projects/update.json')
project = digitalocean.Project(token=self.token,
id="4e1bfbc3-dc3e-41f2-a18f-1b4d7ba71679")
project_path = "projects/" + project.id
url = self.base_url + project_path
responses.add(responses.PUT,
url,
body=data,
status=200,
content_type='application/json')
project.update_project(name="my-web-api",
description="My website API",
purpose="Service or API",
environment="Staging",
is_default=False)
self.assertEqual(responses.calls[0].request.url, url)
self.assertEqual(project.is_default, False)
self.assertEqual(project.name, "my-web-api")
self.assertEqual(project.description, "My website API")
self.assertEqual(project.purpose, "Service or API")
self.assertEqual(project.environment, "Staging")
@responses.activate
def test_get_all_resources(self):
data = self.load_from_file('projects/project_resources.json')
resource_project = digitalocean.Project(token=self.token,
id="4e1bfbc3-dc3e-41f2-a18f-1b4d7ba71679")
url = self.base_url + 'projects/' + resource_project.id + "/resources"
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
all_resources = resource_project.get_all_resources()
self.assertEqual(len(all_resources), 1)
self.assertEqual(all_resources[0], "do:droplet:1")
@responses.activate
def test_delete(self):
url = self.base_url + "projects/4e1bfbc3-dc3e-41f2-a18f-1b4d7ba71679"
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')
project_to_be_deleted = digitalocean.Project(token=self.token,
id="4e1bfbc3-dc3e-41f2-a18f-1b4d7ba71679")
project_to_be_deleted.delete_project()
self.assertEqual(responses.calls[0].request.url, url)
@responses.activate
def test_update_default_project(self):
data = self.load_from_file('projects/update.json')
project = digitalocean.Project(token=self.token,
id="default")
project_path = "projects/" + project.id
url = self.base_url + project_path
responses.add(responses.PUT,
url,
body=data,
status=200,
content_type='application/json')
project.update_project(name="my-web-api",
description="My website API",
purpose="Service or API",
environment="Staging",
is_default=False)
self.assertEqual(responses.calls[0].request.url, url)
self.assertEqual(project.is_default, False)
self.assertEqual(project.name, "my-web-api")
self.assertEqual(project.description, "My website API")
self.assertEqual(project.purpose, "Service or API")
self.assertEqual(project.environment, "Staging")
@responses.activate
def test_assign_resource(self):
data = self.load_from_file('projects/assign_resources.json')
resource_project = digitalocean.Project(token=self.token,
id="4e1bfbc3-dc3e-41f2-a18f-1b4d7ba71679")
url = self.base_url + 'projects/' + resource_project.id + "/resources"
responses.add(responses.POST, url,
body=data,
status=200,
content_type='application/json')
add_resources = {
"resources": ["do:droplet:1", "do:floatingip:192.168.99.100"]
}
result_resources = resource_project.assign_resource(add_resources)
self.assertEqual(len(result_resources['resources']), 2)
self.assertEqual(result_resources['resources'][0]['urn'], "do:droplet:1")
self.assertEqual(result_resources['resources'][1]['urn'],
"do:floatingip:192.168.99.100")
@responses.activate
def test_list_default_project_resources(self):
data = self.load_from_file('projects/project_resources.json')
resource_project = digitalocean.Project(token=self.token,
id="default")
url = self.base_url + 'projects/' + resource_project.id + "/resources"
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
all_resources = resource_project.get_all_resources()
self.assertEqual(len(all_resources), 1)
self.assertEqual(all_resources[0], "do:droplet:1")
@responses.activate
def test_assign_resource_to_default_project(self):
data = self.load_from_file('projects/assign_resources.json')
resource_project = digitalocean.Project(token=self.token,
id="default")
url = self.base_url + 'projects/' + resource_project.id + "/resources"
responses.add(responses.POST, url,
body=data,
status=200,
content_type='application/json')
add_resources = {
"resources": ["do:droplet:1", "do:floatingip:192.168.99.100"]
}
result_resources = resource_project.assign_resource(add_resources)
self.assertEqual(len(result_resources['resources']), 2)
self.assertEqual(result_resources['resources'][0]['urn'], "do:droplet:1")
self.assertEqual(result_resources['resources'][1]['urn'],
"do:floatingip:192.168.99.100")
if __name__ == '__main__':
unittest.main()
| 9,116 | Python | .py | 178 | 36.994382 | 95 | 0.588506 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,310 | test_load_balancer.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_load_balancer.py | import json
import unittest
import responses
import digitalocean
from .BaseTest import BaseTest
class TestLoadBalancer(BaseTest):
def setUp(self):
super(TestLoadBalancer, self).setUp()
self.lb_id = '4de7ac8b-495b-4884-9a69-1050c6793cd6'
self.vpc_uuid = "c33931f2-a26a-4e61-b85c-4e95a2ec431b"
self.lb = digitalocean.LoadBalancer(id=self.lb_id, token=self.token)
@responses.activate
def test_load(self):
data = self.load_from_file('loadbalancer/single.json')
url = self.base_url + 'load_balancers/' + self.lb_id
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
self.lb.load()
rules = self.lb.forwarding_rules
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.lb.id, self.lb_id)
self.assertEqual(self.lb.region['slug'], 'nyc3')
self.assertEqual(self.lb.size, 'lb-small')
self.assertEqual(self.lb.algorithm, 'round_robin')
self.assertEqual(self.lb.ip, '104.131.186.241')
self.assertEqual(self.lb.name, 'example-lb-01')
self.assertEqual(len(rules), 2)
self.assertEqual(rules[0].entry_protocol, 'http')
self.assertEqual(rules[0].entry_port, 80)
self.assertEqual(rules[0].target_protocol, 'http')
self.assertEqual(rules[0].target_port, 80)
self.assertEqual(rules[0].tls_passthrough, False)
self.assertEqual(self.lb.health_check.protocol, 'http')
self.assertEqual(self.lb.health_check.port, 80)
self.assertEqual(self.lb.sticky_sessions.type, 'none')
self.assertEqual(self.lb.droplet_ids, [3164444, 3164445])
self.assertEqual(self.lb.redirect_http_to_https, False)
self.assertEqual(self.lb.enable_proxy_protocol, False)
self.assertEqual(self.lb.enable_backend_keepalive, False)
self.assertEqual(self.lb.vpc_uuid, self.vpc_uuid)
@responses.activate
def test_create_ids(self):
data = self.load_from_file('loadbalancer/single.json')
url = self.base_url + "load_balancers"
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
rule1 = digitalocean.ForwardingRule(entry_port=80,
entry_protocol='http',
target_port=80,
target_protocol='http')
rule2 = digitalocean.ForwardingRule(entry_port=443,
entry_protocol='https',
target_port=443,
target_protocol='https',
tls_passthrough=True)
check = digitalocean.HealthCheck()
sticky = digitalocean.StickySessions(type='none')
lb = digitalocean.LoadBalancer(name='example-lb-01', region='nyc3',
algorithm='round_robin',
size='lb-small',
forwarding_rules=[rule1, rule2],
health_check=check,
sticky_sessions=sticky,
redirect_http_to_https=False,
droplet_ids=[3164444, 3164445],
vpc_uuid=self.vpc_uuid,
token=self.token).create()
resp_rules = lb.forwarding_rules
self.assert_url_query_equal(responses.calls[0].request.url, url)
self.assertEqual(lb.id, self.lb_id)
self.assertEqual(lb.algorithm, 'round_robin')
self.assertEqual(lb.ip, '104.131.186.241')
self.assertEqual(lb.name, 'example-lb-01')
self.assertEqual(lb.size, 'lb-small')
self.assertEqual(len(resp_rules), 2)
self.assertEqual(resp_rules[0].entry_protocol, 'http')
self.assertEqual(resp_rules[0].entry_port, 80)
self.assertEqual(resp_rules[0].target_protocol, 'http')
self.assertEqual(resp_rules[0].target_port, 80)
self.assertEqual(resp_rules[0].tls_passthrough, False)
self.assertEqual(lb.health_check.protocol, 'http')
self.assertEqual(lb.health_check.port, 80)
self.assertEqual(lb.sticky_sessions.type, 'none')
self.assertEqual(lb.droplet_ids, [3164444, 3164445])
self.assertEqual(lb.redirect_http_to_https, False)
self.assertEqual(lb.enable_proxy_protocol, False)
self.assertEqual(lb.enable_backend_keepalive, False)
self.assertEqual(lb.vpc_uuid, self.vpc_uuid)
@responses.activate
def test_create_tag(self):
data = self.load_from_file('loadbalancer/single_tag.json')
url = self.base_url + "load_balancers"
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
rule1 = digitalocean.ForwardingRule(entry_port=80,
entry_protocol='http',
target_port=80,
target_protocol='http')
rule2 = digitalocean.ForwardingRule(entry_port=443,
entry_protocol='https',
target_port=443,
target_protocol='https',
tls_passthrough=True)
check = digitalocean.HealthCheck()
sticky = digitalocean.StickySessions(type='none')
lb = digitalocean.LoadBalancer(name='example-lb-01', region='nyc3',
algorithm='round_robin',
size='lb-small',
forwarding_rules=[rule1, rule2],
health_check=check,
sticky_sessions=sticky,
redirect_http_to_https=False,
tag='web:prod',
vpc_uuid=self.vpc_uuid,
token=self.token).create()
resp_rules = lb.forwarding_rules
self.assertEqual(responses.calls[0].request.url,
self.base_url + 'load_balancers')
self.assertEqual(lb.id, '4de7ac8b-495b-4884-9a69-1050c6793cd6')
self.assertEqual(lb.algorithm, 'round_robin')
self.assertEqual(lb.ip, '104.131.186.248')
self.assertEqual(lb.name, 'example-lb-01')
self.assertEqual(lb.size, 'lb-small')
self.assertEqual(len(resp_rules), 2)
self.assertEqual(resp_rules[0].entry_protocol, 'http')
self.assertEqual(resp_rules[0].entry_port, 80)
self.assertEqual(resp_rules[0].target_protocol, 'http')
self.assertEqual(resp_rules[0].target_port, 80)
self.assertEqual(resp_rules[0].tls_passthrough, False)
self.assertEqual(lb.health_check.protocol, 'http')
self.assertEqual(lb.health_check.port, 80)
self.assertEqual(lb.sticky_sessions.type, 'none')
self.assertEqual(lb.tag, 'web:prod')
self.assertEqual(lb.droplet_ids, [3164444, 3164445])
self.assertEqual(lb.redirect_http_to_https, False)
self.assertEqual(lb.enable_proxy_protocol, False)
self.assertEqual(lb.enable_backend_keepalive, False)
self.assertEqual(lb.vpc_uuid, self.vpc_uuid)
@responses.activate
def test_create_exception(self):
data = self.load_from_file('loadbalancer/single_tag.json')
url = self.base_url + "load_balancers/"
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
rule = digitalocean.ForwardingRule(entry_port=80,
entry_protocol='http',
target_port=80,
target_protocol='http')
check = digitalocean.HealthCheck()
sticky = digitalocean.StickySessions(type='none')
lb = digitalocean.LoadBalancer(name='example-lb-01', region='nyc3',
algorithm='round_robin',
size='lb-small',
forwarding_rules=[rule],
health_check=check,
sticky_sessions=sticky,
redirect_http_to_https=False,
tag='web:prod',
droplet_ids=[123456, 789456],
vpc_uuid=self.vpc_uuid,
token=self.token)
with self.assertRaises(ValueError) as context:
lb.create()
self.assertEqual('droplet_ids and tag are mutually exclusive args',
str(context.exception))
@responses.activate
def test_save(self):
data1 = self.load_from_file('loadbalancer/single.json')
url = '{0}load_balancers/{1}'.format(self.base_url, self.lb_id)
responses.add(responses.GET,
url,
body=data1,
status=200,
content_type='application/json')
self.lb.load()
rules = self.lb.forwarding_rules
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(self.lb.id, self.lb_id)
self.assertEqual(self.lb.region['slug'], 'nyc3')
self.assertEqual(self.lb.algorithm, 'round_robin')
self.assertEqual(self.lb.ip, '104.131.186.241')
self.assertEqual(self.lb.name, 'example-lb-01')
self.assertEqual(len(rules), 2)
self.assertEqual(rules[0].entry_protocol, 'http')
self.assertEqual(rules[0].entry_port, 80)
self.assertEqual(rules[0].target_protocol, 'http')
self.assertEqual(rules[0].target_port, 80)
self.assertEqual(rules[0].tls_passthrough, False)
self.assertEqual(rules[1].entry_protocol, 'https')
self.assertEqual(rules[1].entry_port, 444)
self.assertEqual(rules[1].target_protocol, 'https')
self.assertEqual(rules[1].target_port, 443)
self.assertEqual(rules[1].tls_passthrough, True)
self.assertEqual(self.lb.health_check.protocol, 'http')
self.assertEqual(self.lb.health_check.port, 80)
self.assertEqual(self.lb.health_check.path, '/')
self.assertEqual(self.lb.health_check.check_interval_seconds, 10)
self.assertEqual(self.lb.health_check.response_timeout_seconds, 5)
self.assertEqual(self.lb.health_check.healthy_threshold, 5)
self.assertEqual(self.lb.health_check.unhealthy_threshold, 3)
self.assertEqual(self.lb.sticky_sessions.type, 'none')
self.assertEqual(self.lb.droplet_ids, [3164444, 3164445])
self.assertEqual(self.lb.tag, '')
self.assertEqual(self.lb.redirect_http_to_https, False)
self.assertEqual(self.lb.enable_proxy_protocol, False)
self.assertEqual(self.lb.enable_backend_keepalive, False)
self.assertEqual(self.lb.vpc_uuid, self.vpc_uuid)
data2 = self.load_from_file('loadbalancer/save.json')
url = '{0}load_balancers/{1}'.format(self.base_url, self.lb_id)
responses.add(responses.PUT,
url,
body=data2,
status=202,
content_type='application/json')
self.lb.algorithm = 'least_connections'
self.lb.sticky_sessions.type = 'cookies'
self.lb.sticky_sessions.cookie_name = 'DO_LB'
self.lb.sticky_sessions.cookie_ttl_seconds = 300
self.lb.droplet_ids = [34153248, 34153250]
self.lb.vpc_uuid = self.vpc_uuid
self.lb.redirect_http_to_https = True
self.lb.enable_proxy_protocol = True
self.lb.enable_backend_keepalive = True
res = self.lb.save()
lb = digitalocean.LoadBalancer(**res['load_balancer'])
lb.health_check = digitalocean.HealthCheck(**res['load_balancer']['health_check'])
lb.sticky_sessions = digitalocean.StickySessions(**res['load_balancer']['sticky_sessions'])
rules = list()
for rule in lb.forwarding_rules:
rules.append(digitalocean.ForwardingRule(**rule))
self.assertEqual(lb.id, self.lb_id)
self.assertEqual(lb.region['slug'], 'nyc3')
self.assertEqual(lb.algorithm, 'least_connections')
self.assertEqual(lb.ip, '104.131.186.241')
self.assertEqual(lb.name, 'example-lb-01')
self.assertEqual(len(rules), 2)
self.assertEqual(rules[0].entry_protocol, 'http')
self.assertEqual(rules[0].entry_port, 80)
self.assertEqual(rules[0].target_protocol, 'http')
self.assertEqual(rules[0].target_port, 80)
self.assertEqual(rules[0].tls_passthrough, False)
self.assertEqual(rules[1].entry_protocol, 'https')
self.assertEqual(rules[1].entry_port, 444)
self.assertEqual(rules[1].target_protocol, 'https')
self.assertEqual(rules[1].target_port, 443)
self.assertEqual(rules[1].tls_passthrough, True)
self.assertEqual(lb.health_check.protocol, 'http')
self.assertEqual(lb.health_check.port, 80)
self.assertEqual(lb.health_check.path, '/')
self.assertEqual(lb.health_check.check_interval_seconds, 10)
self.assertEqual(lb.health_check.response_timeout_seconds, 5)
self.assertEqual(lb.health_check.healthy_threshold, 5)
self.assertEqual(lb.health_check.unhealthy_threshold, 3)
self.assertEqual(lb.sticky_sessions.type, 'cookies')
self.assertEqual(lb.sticky_sessions.cookie_name, 'DO_LB')
self.assertEqual(lb.sticky_sessions.cookie_ttl_seconds, 300)
self.assertEqual(lb.droplet_ids, [34153248, 34153250])
self.assertEqual(lb.tag, '')
self.assertEqual(lb.redirect_http_to_https, True)
self.assertEqual(lb.enable_proxy_protocol, True)
self.assertEqual(lb.enable_backend_keepalive, True)
self.assertEqual(self.lb.vpc_uuid, self.vpc_uuid)
@responses.activate
def test_destroy(self):
url = '{0}load_balancers/{1}'.format(self.base_url, self.lb_id)
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')
self.lb.destroy()
self.assertEqual(responses.calls[0].request.url, url)
@responses.activate
def test_add_droplets(self):
url = '{0}load_balancers/{1}/droplets'.format(self.base_url,
self.lb_id)
responses.add(responses.POST,
url,
status=204,
content_type='application/json')
self.lb.add_droplets([12345, 78945])
body = '{"droplet_ids": [12345, 78945]}'
self.assertEqual(responses.calls[0].request.url, url)
self.assertEqual(responses.calls[0].request.body, body)
@responses.activate
def test_remove_droplets(self):
url = '{0}load_balancers/{1}/droplets'.format(self.base_url,
self.lb_id)
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')
self.lb.remove_droplets([12345, 78945])
body = '{"droplet_ids": [12345, 78945]}'
self.assertEqual(responses.calls[0].request.url, url)
self.assertEqual(responses.calls[0].request.body, body)
@responses.activate
def test_add_forwarding_rules(self):
url = '{0}load_balancers/{1}/forwarding_rules'.format(self.base_url,
self.lb_id)
responses.add(responses.POST,
url,
status=204,
content_type='application/json')
rule = digitalocean.ForwardingRule(entry_port=3306,
entry_protocol='tcp',
target_port=3306,
target_protocol='tcp')
self.lb.add_forwarding_rules([rule])
req_body = json.loads("""{
"forwarding_rules": [
{
"entry_protocol": "tcp",
"entry_port": 3306,
"target_protocol": "tcp",
"target_port": 3306,
"certificate_id": "",
"tls_passthrough": false
}
]
}""")
body = json.loads(responses.calls[0].request.body)
self.assertEqual(responses.calls[0].request.url, url)
self.assertEqual(sorted(body.items()), sorted(req_body.items()))
@responses.activate
def test_remove_forwarding_rules(self):
url = '{0}load_balancers/{1}/forwarding_rules'.format(self.base_url,
self.lb_id)
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')
rule = digitalocean.ForwardingRule(entry_port=3306,
entry_protocol='tcp',
target_port=3306,
target_protocol='tcp')
self.lb.remove_forwarding_rules([rule])
req_body = json.loads("""{
"forwarding_rules": [
{
"entry_protocol": "tcp",
"entry_port": 3306,
"target_protocol": "tcp",
"target_port": 3306,
"certificate_id": "",
"tls_passthrough": false
}
]
}""")
body = json.loads(responses.calls[0].request.body)
self.assertEqual(responses.calls[0].request.url, url)
self.assertEqual(sorted(body.items()), sorted(req_body.items()))
if __name__ == '__main__':
unittest.main()
| 18,560 | Python | .py | 365 | 35.534247 | 99 | 0.563461 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,311 | test_tag.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_tag.py | import unittest
import responses
import digitalocean
import json
from .BaseTest import BaseTest
class TestTags(BaseTest):
def setUp(self):
super(TestTags, self).setUp()
@responses.activate
def test_load(self):
data = self.load_from_file('tags/single.json')
url = self.base_url + "tags/awesome"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
droplet_tag = digitalocean.Tag(name='awesome', token=self.token)
droplet_tag.load()
self.assert_get_url_equal(responses.calls[0].request.url, url)
self.assertEqual(droplet_tag.name,
"awesome")
@responses.activate
def test_create(self):
data = self.load_from_file('tags/single.json')
url = self.base_url + "tags"
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
droplet_tag = digitalocean.Tag(name='awesome', token=self.token)
droplet_tag.create()
self.assertEqual(responses.calls[0].request.url,
self.base_url + "tags")
self.assertEqual(droplet_tag.name, "awesome")
@responses.activate
def test_delete(self):
url = self.base_url + "tags/awesome"
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')
droplet_tag = digitalocean.Tag(name='awesome', token=self.token)
droplet_tag.delete()
self.assertEqual(responses.calls[0].request.url,
self.base_url + "tags/awesome")
self.assertEqual(droplet_tag.name, "awesome")
@responses.activate
def test_add_droplets(self):
url = self.base_url + "tags/awesome/resources"
responses.add(responses.POST,
url,
status=204,
content_type='application/json')
droplet_tag = digitalocean.Tag(name='awesome', token=self.token)
droplet_tag.add_droplets(["9569411"])
self.assertEqual(responses.calls[0].request.url,
self.base_url + "tags/awesome/resources")
@responses.activate
def test_remove_droplets(self):
url = self.base_url + "tags/awesome/resources"
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')
droplet_tag = digitalocean.Tag(name='awesome', token=self.token)
droplet_tag.remove_droplets(["9569411"])
self.assertEqual(responses.calls[0].request.url,
self.base_url + "tags/awesome/resources")
@responses.activate
def test_add_volume_snapshots(self):
url = self.base_url + "tags/awesome/resources"
responses.add(responses.POST,
url,
status=204,
content_type='application/json')
tag = digitalocean.Tag(name='awesome', token=self.token)
tag.add_snapshots(["9569411"])
self.assertEqual(responses.calls[0].request.url,
self.base_url + "tags/awesome/resources")
@responses.activate
def test_remove_volume_snapshots(self):
url = self.base_url + "tags/awesome/resources"
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')
tag = digitalocean.Tag(name='awesome', token=self.token)
tag.remove_snapshots(["9569411"])
self.assertEqual(responses.calls[0].request.url,
self.base_url + "tags/awesome/resources")
if __name__ == '__main__':
unittest.main()
| 4,000 | Python | .py | 94 | 29.765957 | 72 | 0.572462 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,312 | test_baseapi.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_baseapi.py | import os
from digitalocean.baseapi import BaseAPI
try:
import mock
except ImportError:
from unittest import mock
import random
import responses
import requests
import digitalocean
from .BaseTest import BaseTest
class TestBaseAPI(BaseTest):
def setUp(self):
super(TestBaseAPI, self).setUp()
self.manager = digitalocean.Manager(token=self.token)
self.user_agent = "{0}/{1} {2}/{3}".format('python-digitalocean',
digitalocean.__version__,
requests.__name__,
requests.__version__)
@responses.activate
def test_user_agent(self):
data = self.load_from_file('account/account.json')
url = self.base_url + 'account/'
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
self.manager.get_account()
self.assertEqual(responses.calls[0].request.headers['User-Agent'],
self.user_agent)
@responses.activate
def test_customize_session(self):
data = self.load_from_file('account/account.json')
url = self.base_url + 'account/'
responses.add(responses.GET, url,
body=data,
status=200,
content_type='application/json')
self.manager._session.proxies['https'] = 'https://127.0.0.1:3128'
self.manager.get_account()
def test_custom_endpoint(self):
custom_endpoint = 'http://example.com/'
with mock.patch.dict(os.environ,
{'DIGITALOCEAN_END_POINT': custom_endpoint},
clear=True):
base_api = digitalocean.baseapi.BaseAPI()
self.assertEqual(base_api.end_point, custom_endpoint)
def test_invalid_custom_endpoint(self):
custom_endpoint = 'not a valid endpoint'
with mock.patch.dict(os.environ,
{'DIGITALOCEAN_END_POINT': custom_endpoint},
clear=True):
self.assertRaises(digitalocean.EndPointError, digitalocean.baseapi.BaseAPI)
def test_get_data_error_response_no_body(self):
with mock.patch.object(self.manager, '_BaseAPI__perform_request') as mock_4xx_response:
mock_4xx_response.return_value = requests.Response()
mock_4xx_response.return_value._content = b''
mock_4xx_response.return_value.status_code = random.randint(400, 499) # random 4xx status code
self.assertRaises(requests.HTTPError, self.manager.get_data, 'test')
with mock.patch.object(self.manager, '_BaseAPI__perform_request') as mock_5xx_response:
mock_5xx_response.return_value = requests.Response()
mock_5xx_response.return_value._content = b''
mock_5xx_response.return_value.status_code = random.randint(500, 599) # random 5xx status code
self.assertRaises(requests.HTTPError, self.manager.get_data, 'test')
| 3,176 | Python | .py | 64 | 36.3125 | 106 | 0.601107 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,313 | test_cdn_endpoint.py | koalalorenzo_python-digitalocean/digitalocean/tests/test_cdn_endpoint.py | import json
import unittest
import responses
import digitalocean
from .BaseTest import BaseTest
class TestCDNRecord(BaseTest):
def setUp(self):
super(TestCDNRecord, self).setUp()
@responses.activate
def test_load(self):
data = self.load_from_file('cdn_endpoints/single.json')
url = self.base_url + "cdn/endpoints/19f06b6a-3ace-4315-b086-499a0e521b76"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
c = digitalocean.CDNEndpoint(id='19f06b6a-3ace-4315-b086-499a0e521b76', token=self.token).load()
self.assertEqual(c.id, '19f06b6a-3ace-4315-b086-499a0e521b76')
self.assertEqual(c.origin, 'static-images.nyc3.digitaloceanspaces.com')
self.assertEqual(c.endpoint, 'static-images.nyc3.cdn.digitaloceanspaces.com')
self.assertEqual(c.created_at, '2018-07-19T15:04:16Z')
self.assertEqual(c.ttl, 3600)
@responses.activate
def test_create(self):
data = self.load_from_file('cdn_endpoints/single.json')
url = self.base_url + "cdn/endpoints"
responses.add(responses.POST,
url,
body=data,
status=201,
content_type='application/json')
cdn_endpoint = digitalocean.CDNEndpoint(origin='static-images.nyc3.digitaloceanspaces.com', token=self.token)
cdn_endpoint.create()
self.assertEqual(responses.calls[0].request.url,
self.base_url + "cdn/endpoints")
self.assertEqual(cdn_endpoint.origin, "static-images.nyc3.digitaloceanspaces.com")
@responses.activate
def test_delete(self):
url = self.base_url + "cdn/endpoints/19f06b6a-3ace-4315-b086-499a0e521b76"
responses.add(responses.DELETE,
url,
status=204,
content_type='application/json')
cdn_endpoint = digitalocean.CDNEndpoint(id='19f06b6a-3ace-4315-b086-499a0e521b76', token=self.token)
cdn_endpoint.delete()
self.assertEqual(responses.calls[0].request.url,
self.base_url + "cdn/endpoints/19f06b6a-3ace-4315-b086-499a0e521b76")
self.assertEqual(cdn_endpoint.id, '19f06b6a-3ace-4315-b086-499a0e521b76')
@responses.activate
def test_save(self):
data = self.load_from_file('cdn_endpoints/single.json')
url = self.base_url + "cdn/endpoints/19f06b6a-3ace-4315-b086-499a0e521b76"
responses.add(responses.GET,
url,
body=data,
status=200,
content_type='application/json')
c = digitalocean.CDNEndpoint(id='19f06b6a-3ace-4315-b086-499a0e521b76', token=self.token).load()
self.assertEqual(c.ttl, 3600)
c.ttl = 60
data = self.load_from_file('cdn_endpoints/update.json')
url = self.base_url + "cdn/endpoints/19f06b6a-3ace-4315-b086-499a0e521b76"
responses.add(responses.PUT,
url,
body=data,
status=200,
content_type='application/json')
c.save()
self.assertEqual(c.ttl, 60)
if __name__ == '__main__':
unittest.main()
| 3,377 | Python | .py | 72 | 34.805556 | 117 | 0.612295 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,314 | lets_encrpyt.json | koalalorenzo_python-digitalocean/digitalocean/tests/data/certificate/lets_encrpyt.json | {
"certificate": {
"id": "ba9b9c18-6c59-46c2-99df-70da170a42ba",
"name": "web-cert-02",
"not_after": "2018-06-07T17:44:12Z",
"sha1_fingerprint": "479c82b5c63cb6d3e6fac4624d58a33b267e166c",
"created_at": "2018-03-09T18:44:11Z",
"dns_names": ["www.example.com","example.com"],
"state": "pending",
"type": "lets_encrypt"
}
} | 391 | Python | .py | 12 | 25.833333 | 71 | 0.578947 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,315 | lets_encrpyt.json | koalalorenzo_python-digitalocean/digitalocean/tests/data/certificate/lets_encrpyt.json | {
"certificate": {
"id": "ba9b9c18-6c59-46c2-99df-70da170a42ba",
"name": "web-cert-02",
"not_after": "2018-06-07T17:44:12Z",
"sha1_fingerprint": "479c82b5c63cb6d3e6fac4624d58a33b267e166c",
"created_at": "2018-03-09T18:44:11Z",
"dns_names": ["www.example.com","example.com"],
"state": "pending",
"type": "lets_encrypt"
}
} | 391 | Python | .pyt | 12 | 25.833333 | 71 | 0.578947 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,316 | lets_encrpyt.json | koalalorenzo_python-digitalocean/digitalocean/tests/data/certificate/lets_encrpyt.json | {
"certificate": {
"id": "ba9b9c18-6c59-46c2-99df-70da170a42ba",
"name": "web-cert-02",
"not_after": "2018-06-07T17:44:12Z",
"sha1_fingerprint": "479c82b5c63cb6d3e6fac4624d58a33b267e166c",
"created_at": "2018-03-09T18:44:11Z",
"dns_names": ["www.example.com","example.com"],
"state": "pending",
"type": "lets_encrypt"
}
} | 391 | Python | .rpy | 12 | 25.833333 | 71 | 0.578947 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,317 | detach.json | koalalorenzo_python-digitalocean/digitalocean/tests/data/volumes/detach.json | {
"action": {
"id": 68212773,
"status": "in-progress",
"type": "detach_volume",
"started_at": "2015-10-15T17:46:15Z",
"completed_at": null,
"resource_id": null,
"resource_type": "backend",
"region": {
"name": "New York 1",
"slug": "nyc1",
"sizes": [
"512mb",
"1gb",
"2gb",
"4gb",
"8gb",
"16gb",
"32gb",
"48gb",
"64gb"
],
"features": [
"private_networking",
"backups",
"ipv6",
"metadata"
],
"available": true
},
"region_slug": "nyc1"
}
} | 626 | Python | .tac | 34 | 11.852941 | 41 | 0.431703 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,318 | attach.json | koalalorenzo_python-digitalocean/digitalocean/tests/data/volumes/attach.json | {
"action": {
"id": 72531856,
"status": "completed",
"type": "attach_volume",
"started_at": "2015-11-12T17:51:03Z",
"completed_at": "2015-11-12T17:51:14Z",
"resource_id": null,
"resource_type": "volume",
"region": {
"name": "New York 1",
"slug": "nyc1",
"sizes": [
"1gb",
"2gb",
"4gb",
"8gb",
"32gb",
"64gb",
"512mb",
"48gb",
"16gb"
],
"features": [
"private_networking",
"backups",
"ipv6",
"metadata"
],
"available": true
},
"region_slug": "nyc1"
}
} | 641 | Python | .tac | 34 | 12.294118 | 43 | 0.4375 | koalalorenzo/python-digitalocean | 1,252 | 301 | 11 | LGPL-3.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,319 | setup.py | zenodo_zenodo/setup.py | #
# This file is part of Zenodo.
# Copyright (C) 2015-2019 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo - Research. Shared."""
import os
from setuptools import find_packages, setup
readme = open('README.rst').read()
history = open('CHANGES.rst').read()
tests_require = [
'check-manifest>=0.35',
'coverage>=4.4.1',
'isort>=4.3.4',
'pydocstyle>=2.0.0',
'pytest-cache>=1.0',
'pytest-cov>=2.5.1',
'pytest-flask>=0.10.0,<1.0.0',
'pytest-mock>=1.6.0',
'pytest-pep8>=1.0.6',
'pytest>=3.7.0',
'selenium>=3.5.0,<3.6.0',
'docutils>=0.11,<0.17.1',
]
extras_require = {
'docs': [
'Sphinx>=1.5,<1.6',
],
'tests': tests_require,
}
extras_require['all'] = []
for name, reqs in extras_require.items():
extras_require['all'].extend(reqs)
# Do not include in all requirement
extras_require['xrootd'] = [
'invenio-xrootd>=1.0.0a6',
'xrootdpyfs>=0.1.6,<0.2.0',
]
setup_requires = [
'Babel>=2.6.0',
'pytest-runner>=2.7.0',
]
install_requires = [
'arrow>=0.13.0',
'bleach>=3.1.0',
'CairoSVG>=1.0.22,<2.0.0',
'citeproc-py-styles>=0.1.3',
'citeproc-py>=0.5.1',
'datacite>=1.0.1',
'dcxml>=0.1.1',
'dojson>=1.3.2',
'Flask-Admin>=1.5.3',
'Flask-BabelEx>=0.9.4',
'Flask-Caching>=1.6.0',
'Flask-Debugtoolbar>=0.10.1',
'Flask>=1.0.2',
'ftfy>=4.4.3,<5',
'httpretty>=0.9.6',
'idutils>=1.1.5',
'invenio-access>=1.1.0',
'invenio-accounts>=1.1.1',
'invenio-admin>=1.0.1,<1.1.0',
'invenio-app>=1.2.2',
'invenio-assets>=1.0.0,<1.1.0',
'invenio-banners>=1.0.0a0',
'invenio-base>=1.2.0',
'invenio-cache>=1.0.0',
'invenio-celery>=1.0.1',
'invenio-communities>=1.0.0a29',
'invenio-config>=1.0.1',
'invenio-csl-rest>=1.0.0a1',
'invenio-db[postgresql,versioning]>=1.0.4',
'invenio-deposit>=1.0.0a11',
'invenio-files-rest>=1.0.0a23.post3',
'invenio-formatter>=1.0.1',
'invenio-github>=1.0.0a28',
'invenio-i18n>=1.0.0,<1.1.0',
'invenio-iiif>=1.0.0a5',
'invenio-indexer>=1.1.4',
'invenio-jsonschemas>=1.0.0',
'invenio-logging>=1.1.1',
'invenio-mail>=1.0.2',
'invenio-marc21>=1.0.0a8',
'invenio-oaiserver>=1.1.1.post1',
'invenio-oauth2server>=1.0.3',
'invenio-oauthclient[github]>=1.1.2',
'invenio-openaire>=1.0.0a16',
'invenio-opendefinition>=1.0.0a12',
'invenio-pidrelations==1.0.0a4', # next versions require upgrades
'invenio-pidstore>=1.1.1',
'invenio-previewer>=1.0.0a11',
'invenio-queues>=1.0.0a2',
'invenio-records-files>=1.0.0a11.post1',
'invenio-records-rest>=1.6.6',
'invenio-records-ui>=1.0.1',
'invenio-records>=1.3.0',
'invenio-rest>=1.1.3',
'invenio-search[elasticsearch7]>=1.2.2',
'invenio-search-ui>=1.0.1,<1.1.0',
'invenio-sipstore>=1.0.0a7',
'invenio-stats==1.0.0a14.post3',
'invenio-theme>=1.0.0,<1.1.0',
'invenio-userprofiles>=1.0.1',
'invenio-webhooks>=1.0.0a4',
'joblib>=0.14.1',
'jsonref>=0.1',
'jsonresolver>=0.2.1',
'mock>=2.0.0',
'numpy>=1.16.6',
'Pillow>=6.2.2',
'pycountry>=18.12.8',
'pykerberos>=1.2.1',
'python-slugify>=3.0.1',
'raven>=6.10.0',
'requests-kerberos>=0.12.0',
'sickle>=0.6.4',
'scikit-learn>=0.20.4',
'scipy>=1.2.3',
'uwsgi>=2.0.18',
'uwsgitop>=0.11',
'wsgi-statsd>=0.3.1',
'zenodo-accessrequests>=1.0.0a6',
]
packages = find_packages()
# Get the version string. Cannot be done with import!
g = {}
with open(os.path.join('zenodo', 'version.py'), 'rt') as fp:
exec(fp.read(), g)
version = g['__version__']
setup(
name='zenodo',
version=version,
description=__doc__,
long_description=readme + '\n\n' + history,
keywords='zenodo research data repository',
license='GPLv2',
author='CERN',
author_email='info@zenodo.org',
url='https://github.com/zenodo/zenodo',
packages=packages,
zip_safe=False,
include_package_data=True,
platforms='any',
entry_points={
'console_scripts': [
'zenodo = invenio_app.cli:cli',
],
'flask.commands': [
'audit = zenodo.modules.auditor.cli:audit',
'github = zenodo.modules.github.cli:github',
'stats = zenodo.modules.stats.cli:stats',
'utils = zenodo.modules.utils.cli:utils',
],
'invenio_admin.views': [(
'zenodo_update_datacite ='
'zenodo.modules.records.admin:updatedatacite_adminview'
)
],
'invenio_base.apps': [
'zenodo_auditor = zenodo.modules.auditor.ext:ZenodoAuditor',
'zenodo_communities = '
'zenodo.modules.communities.ext:ZenodoCommunities',
'zenodo_fixtures = zenodo.modules.fixtures.ext:ZenodoFixtures',
'zenodo_sitemap = zenodo.modules.sitemap.ext:ZenodoSitemap',
'zenodo_support = zenodo.modules.support.ext:ZenodoSupport',
'zenodo_records = zenodo.modules.records.ext:ZenodoRecords',
'zenodo_deposit = zenodo.modules.deposit.ext:ZenodoDeposit',
'zenodo_jsonschemas = '
'zenodo.modules.jsonschemas.ext:ZenodoJSONSchemas',
'zenodo_openaire = zenodo.modules.openaire.ext:ZenodoOpenAIRE',
'zenodo_exporter = zenodo.modules.exporter.ext:InvenioExporter',
'zenodo_frontpage = zenodo.modules.frontpage.ext:ZenodoFrontpage',
'zenodo_stats = zenodo.modules.stats.ext:ZenodoStats',
'zenodo_theme = zenodo.modules.theme.ext:ZenodoTheme',
'zenodo_tokens = zenodo.modules.tokens.ext:ResourceAccessTokens',
'zenodo_spam = zenodo.modules.spam.ext:ZenodoSpam',
'zenodo_metrics = zenodo.modules.metrics.ext:ZenodoMetrics',
],
'invenio_base.api_apps': [
'zenodo_communities = '
'zenodo.modules.communities.ext:ZenodoCommunities',
'zenodo_deposit = zenodo.modules.deposit.ext:ZenodoDeposit',
'zenodo_openaire = zenodo.modules.openaire.ext:ZenodoOpenAIRE',
'zenodo_records = zenodo.modules.records.ext:ZenodoRecords',
'zenodo_exporter = zenodo.modules.exporter.ext:InvenioExporter',
'zenodo_tokens = zenodo.modules.tokens.ext:ResourceAccessTokens',
'zenodo_spam = zenodo.modules.spam.ext:ZenodoSpam',
'zenodo_metrics = zenodo.modules.metrics.ext:ZenodoMetrics',
],
'invenio_base.blueprints': [
'zenodo_communities = zenodo.modules.communities.views:blueprint',
'zenodo_deposit = zenodo.modules.deposit.views:blueprint',
'zenodo_frontpage = zenodo.modules.frontpage.views:blueprint',
'zenodo_openaire = zenodo.modules.openaire.views:blueprint',
'zenodo_support = zenodo.modules.support.views:blueprint',
'zenodo_redirector = zenodo.modules.redirector.views:blueprint',
'zenodo_search_ui = zenodo.modules.search_ui.views:blueprint',
'zenodo_theme = zenodo.modules.theme.views:blueprint',
'zenodo_spam = zenodo.modules.spam.views:blueprint',
'zenodo_sitemap = zenodo.modules.sitemap.views:blueprint',
],
'invenio_base.api_blueprints': [
'zenodo_rest = zenodo.modules.rest.views:blueprint',
'zenodo_deposit = zenodo.modules.deposit.views_rest:blueprint',
'zenodo_metrics = zenodo.modules.metrics.views:blueprint',
],
'invenio_base.api_converters': [
'file_key = zenodo.modules.deposit.utils:FileKeyConverter',
],
'invenio_i18n.translations': [
'messages = zenodo',
],
'invenio_celery.tasks': [
'zenodo_auditor = zenodo.modules.auditor.tasks',
'zenodo_records = zenodo.modules.records.tasks',
'zenodo_utils = zenodo.modules.utils.tasks',
'zenodo_sipstore = zenodo.modules.sipstore.tasks',
'zenodo_sitemap = zenodo.modules.sitemap.tasks',
'zenodo_exporter = zenodo.modules.exporter.tasks',
'zenodo_stats = zenodo.modules.stats.tasks',
'zenodo_communities = zenodo.modules.communities.tasks',
'zenodo_metrics = zenodo.modules.metrics.tasks',
],
'invenio_config.module': [
'zenodo = zenodo.config',
],
'invenio_pidstore.minters': [
'zenodo_record_minter '
'= zenodo.modules.records.minters:zenodo_record_minter',
'zenodo_deposit_minter '
'= zenodo.modules.deposit.minters:zenodo_deposit_minter',
],
'invenio_pidstore.fetchers': [
'zenodo_record_fetcher '
'= zenodo.modules.records.fetchers:zenodo_record_fetcher',
'zenodo_deposit_fetcher '
'= zenodo.modules.deposit.fetchers:zenodo_deposit_fetcher',
'zenodo_doi_fetcher '
'= zenodo.modules.records.fetchers:zenodo_doi_fetcher',
],
'invenio_assets.bundles': [
'zenodo_deposit_js = zenodo.modules.deposit.bundles:js_deposit',
'zenodo_theme_css = zenodo.modules.theme.bundles:css',
'zenodo_theme_js = zenodo.modules.theme.bundles:js',
'zenodo_search_js = zenodo.modules.theme.bundles:search_js',
],
'invenio_jsonschemas.schemas': [
'zenodo_records = zenodo.modules.records.jsonschemas',
'zenodo_deposit = zenodo.modules.deposit.jsonschemas',
'zenodo_sipstore = zenodo.modules.sipstore.jsonschemas',
],
'invenio_search.mappings': [
'records = zenodo.modules.records.mappings',
'deposits = zenodo.modules.deposit.mappings',
],
'invenio_oauth2server.scopes': [
('deposit_extra_formats = '
'zenodo.modules.deposit.scopes:extra_formats_scope'),
('tokens_generate = '
'zenodo.modules.tokens.scopes:tokens_generate_scope'),
],
'dojson.contrib.to_marc21': [
'zenodo = zenodo.modules.records.serializers.to_marc21.rules',
],
"invenio_db.alembic": ["zenodo_spam = zenodo.modules.spam:alembic"],
"invenio_db.models": ["zenodo_spam = zenodo.modules.spam.models"],
},
extras_require=extras_require,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Development Status :: 3 - Alpha',
],
)
| 11,949 | Python | .py | 303 | 32.108911 | 78 | 0.625828 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,320 | conf.py | zenodo_zenodo/docs/conf.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015, 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo Sphinx documentation config."""
from __future__ import print_function
import os
import sphinx.environment
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Do not warn on external images.
suppress_warnings = ['image.nonlocal_uri']
# Ignore certain warn as error messages
nitpick_ignore = [
('py:class', 'ZenodoFileObject'),
('py:class', 'ZenodoFilesIterator'),
('py:class', 'ZenodoRecord')
]
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Zenodo'
copyright = u'2015, CERN'
author = u'CERN'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# Get the version string. Cannot be done with import!
g = {}
with open(os.path.join('..', 'zenodo', 'version.py'), 'rt') as fp:
exec(fp.read(), g)
version = g['__version__']
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
html_theme = 'alabaster'
html_theme_options = {
'logo': 'zenodo-black-200.png',
'description': 'Zenodo, a CERN service, is an open dependable home for the'
' long-tail of science, enabling researchers to share and '
'preserve any research outputs in any size, any format and '
'from any science.',
'github_user': 'zenodo',
'github_repo': 'zenodo',
'github_button': False,
'github_banner': True,
'show_powered_by': False,
'extra_nav_links': {
'zenodo@GitHub': 'https://github.com/zenodo/zenodo',
'zenodo@PyPI': 'https://pypi.python.org/pypi/zenodo/',
}
}
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'searchbox.html',
'donate.html',
]
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'zenodo_namedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
# Latex figure (float) alignment
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'zenodo.tex', u'zenodo Documentation',
u'CERN', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'zenodo', u'zenodo Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'zenodo', u'Zenodo Documentation',
author, 'zenodo', 'Zenodo - Research. Shared.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
| 11,033 | Python | .py | 263 | 39.688213 | 79 | 0.711666 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,321 | conftest.py | zenodo_zenodo/tests/unit/conftest.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015, 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Pytest configuration."""
from __future__ import absolute_import, print_function, unicode_literals
import json
import os
import shutil
import sys
import tempfile
from copy import deepcopy
from datetime import date, datetime, timedelta
from uuid import UUID, uuid4
import pytest
from celery import Task
from celery.messaging import establish_connection
from click.testing import CliRunner
from flask import current_app as flask_current_app
from flask import url_for
from flask.cli import ScriptInfo
from flask_celeryext import create_celery_app
from flask_security import login_user
from fs.opener import opener
from helpers import bearer_auth
from invenio_access.models import ActionUsers
from invenio_accounts.testutils import create_test_user
from invenio_admin.permissions import action_admin_access
from invenio_app.config import set_rate_limit
from invenio_communities.models import Community
from invenio_db import db as db_
from invenio_deposit.permissions import \
action_admin_access as deposit_admin_access
from invenio_deposit.scopes import write_scope
from invenio_files_rest.models import Bucket, Location, ObjectVersion
from invenio_github.models import Repository
from invenio_indexer.api import RecordIndexer
from invenio_oaiserver.models import OAISet
from invenio_oauth2server.models import Client, Token
from invenio_oauthclient.models import RemoteAccount, UserIdentity
from invenio_pidstore.models import PersistentIdentifier, PIDStatus
from invenio_pidstore.resolver import Resolver
from invenio_queues.proxies import current_queues
from invenio_records.api import Record
from invenio_records.models import RecordMetadata
from invenio_records_files.api import RecordsBuckets
from invenio_search import current_search, current_search_client
from invenio_sipstore import current_sipstore
from pkg_resources import resource_stream
from six import BytesIO, b
from sqlalchemy_utils.functions import create_database, database_exists
from zenodo.config import APP_DEFAULT_SECURE_HEADERS
from zenodo.factory import create_app
from zenodo.modules.deposit.api import ZenodoDeposit as Deposit
from zenodo.modules.deposit.minters import zenodo_deposit_minter
from zenodo.modules.deposit.scopes import extra_formats_scope
from zenodo.modules.fixtures.records import loadsipmetadatatypes
from zenodo.modules.github.cli import github
from zenodo.modules.records.api import ZenodoRecord
from zenodo.modules.records.models import AccessRight
from zenodo.modules.records.serializers.bibtex import Bibtex
from zenodo.modules.tokens.scopes import tokens_generate_scope
def wrap_rate_limit():
"""Wrap rate limiter function to avoid affecting other tests."""
if flask_current_app.config.get('USE_FLASK_LIMITER'):
return set_rate_limit()
else:
return "1000 per second"
@pytest.fixture
def use_flask_limiter(app):
"""Activate flask limiter."""
flask_current_app.config.update(dict(
USE_FLASK_LIMITER=True,
RATELIMIT_GUEST_USER='2 per second',
RATELIMIT_AUTHENTICATED_USER='4 per second',
RATELIMIT_PER_ENDPOINT={
'zenodo_frontpage.index': '10 per second',
'security.login': '10 per second'
}))
yield
flask_current_app.config['USE_FLASK_LIMITER'] = False
@pytest.yield_fixture(scope='session')
def instance_path():
"""Default instance path."""
path = tempfile.mkdtemp()
yield path
shutil.rmtree(path)
@pytest.fixture(scope='module')
def script_dir(request):
"""Return the directory of the currently running test script."""
return request.fspath.join('..')
@pytest.fixture(scope='session')
def env_config(instance_path):
"""Default instance path."""
os.environ.update(
INVENIO_INSTANCE_PATH=os.environ.get(
'INSTANCE_PATH', instance_path),
# To avoid rebuilding the assets during test time we provide our
# prebuilt assets folder.
INVENIO_STATIC_FOLDER=os.path.join(sys.prefix, 'var/instance/static')
)
return os.environ
@pytest.yield_fixture(scope='session')
def tmp_db_path():
"""Temporary database path."""
os_path = tempfile.mkstemp(prefix='zenodo_test_', suffix='.db')[1]
path = 'sqlite:///' + os_path
yield path
os.remove(os_path)
@pytest.yield_fixture(scope='session')
def spam_domains_forbidden_list_file():
"""Temporary spam domains file path."""
fp = tempfile.NamedTemporaryFile(mode="wb")
fp.write(b"testing.com\n")
fp.write(b"evildomain.org\n")
fp.write(b"some.other.ch\n")
fp.flush()
yield fp.name
fp.close()
@pytest.yield_fixture(scope='session')
def spam_domains_safelist_file():
"""Temporary safelisted domains file path."""
fp = tempfile.NamedTemporaryFile(mode="wb")
fp.write(b"safedomain.org\n")
fp.write(b"safe.domain.org\n")
fp.flush()
yield fp.name
fp.close()
@pytest.fixture(scope='session')
def default_config(tmp_db_path, spam_domains_forbidden_list_file,
spam_domains_safelist_file):
"""Default configuration."""
ZENODO_OPENAIRE_COMMUNITIES = {
'foo': {
'name': 'Foo Optimization Organization',
'communities': ['c1', 'c2', ],
'types': {
'software': [
{'id': 'foo:t1', 'name': 'Foo sft type one'},
{'id': 'foo:t2', 'name': 'Foo sft type two'},
],
'other': [
{'id': 'foo:t4', 'name': 'Foo other type four'},
{'id': 'foo:t5', 'name': 'Foo other type five'},
]
}
},
'bar': {
'name': 'Bar Association Resources',
'communities': ['c3', 'c1'],
'types': {
'software': [
{'id': 'bar:t3', 'name': 'Bar sft type three'},
],
'other': [
{'id': 'bar:t6', 'name': 'Bar other type six'},
]
}
}
}
# Disable HTTPS
APP_DEFAULT_SECURE_HEADERS['force_https'] = False
APP_DEFAULT_SECURE_HEADERS['session_cookie_secure'] = False
return dict(
RATELIMIT_APPLICATION=wrap_rate_limit,
CFG_SITE_NAME="testserver",
DEBUG_TB_ENABLED=False,
APP_DEFAULT_SECURE_HEADERS=APP_DEFAULT_SECURE_HEADERS,
CELERY_TASK_ALWAYS_EAGER=True,
CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
COMMUNITIES_MAIL_ENABLED=False,
MAIL_SUPPRESS_SEND=True,
LOGIN_DISABLED=False,
DEPOSIT_DATACITE_MINTING_ENABLED=False,
ZENODO_COMMUNITIES_AUTO_ENABLED=False,
ZENODO_COMMUNITIES_AUTO_REQUEST=['zenodo', ],
ZENODO_COMMUNITIES_NOTIFY_DISABLED=['zenodo', 'c2'],
ZENODO_COMMUNITIES_ADD_IF_GRANTS=['grants_comm', ],
ZENODO_COMMUNITIES_REQUEST_IF_GRANTS=['ecfunded', ],
ZENODO_OPENAIRE_COMMUNITIES=ZENODO_OPENAIRE_COMMUNITIES,
ZENODO_SITEMAP_MAX_URL_COUNT=20,
SIPSTORE_ARCHIVER_WRITING_ENABLED=False,
OAUTHLIB_INSECURE_TRANSPORT=True,
SQLALCHEMY_DATABASE_URI=os.environ.get(
'SQLALCHEMY_DATABASE_URI', tmp_db_path),
TESTING=True,
THEME_SITEURL='http://localhost',
WTF_CSRF_ENABLED=False,
ZENODO_EXTRA_FORMATS_MIMETYPE_WHITELIST={
'application/foo+xml': 'Test 1',
'application/bar+xml': 'Test 2',
},
ZENODO_CUSTOM_METADATA_VOCABULARIES={
'dwc': {
'@context': 'http://rs.tdwg.org/dwc/terms/',
'attributes': {
'family': {'type': 'keyword', },
'genus': {'type': 'keyword', },
'behavior': {'type': 'text', }
}
},
'obo': {
'@context': 'http://purl.obolibrary.org/obo/',
'attributes': {
'RO_0002453': {'type': 'relationship', 'label': 'hostOf'},
}
},
},
SEARCH_INDEX_PREFIX='zenodo-test-',
ZENODO_SPAM_DOMAINS_FORBIDDEN_PATH=spam_domains_forbidden_list_file,
ZENODO_SPAM_DOMAINS_SAFELIST_PATH=spam_domains_safelist_file
)
@pytest.yield_fixture(scope='session')
def app(env_config, default_config):
"""Flask application fixture."""
app = create_app(**default_config)
# FIXME: Needs fixing flask_celeryext,
# which once creates the first celery app, the flask_app that is set
# is never released from the global state, even if you create a new
# celery application. We need to unset the "flask_app" manually.
from celery import current_app as cca
cca = cca._get_current_object()
delattr(cca, "flask_app")
celery_app = create_celery_app(app)
# FIXME: When https://github.com/inveniosoftware/flask-celeryext/issues/35
# is closed and Flask-CeleryExt is released, this can be removed.
class _TestAppContextTask(Task):
abstract = True
def __call__(self, *args, **kwargs):
if flask_current_app:
return Task.__call__(self, *args, **kwargs)
with self.app.flask_app.app_context():
return Task.__call__(self, *args, **kwargs)
celery_app.Task = _TestAppContextTask
celery_app.set_current()
with app.app_context():
yield app
@pytest.fixture()
def api(app):
"""Flask application fixture."""
return app.wsgi_app.mounts['/api']
@pytest.yield_fixture(scope='session')
def indexer_queue(app):
"""Bluk indexer celery queue."""
queue = app.config['INDEXER_MQ_QUEUE']
with establish_connection() as conn:
q = queue(conn)
yield q.declare()
q.delete()
@pytest.yield_fixture()
def event_queues(app):
"""Delete and declare test queues."""
current_queues.delete()
try:
current_queues.declare()
yield current_queues.queues
finally:
current_queues.delete()
@pytest.yield_fixture
def communities_autoadd_enabled(app):
"""Temporarily enable auto-adding and auto-requesting of communities."""
orig = app.config['ZENODO_COMMUNITIES_AUTO_ENABLED']
app.config['ZENODO_COMMUNITIES_AUTO_ENABLED'] = True
yield app.config['ZENODO_COMMUNITIES_AUTO_ENABLED']
app.config['ZENODO_COMMUNITIES_AUTO_ENABLED'] = orig
@pytest.yield_fixture
def communities_mail_enabled(app):
"""Temporarily enable auto-adding and auto-requesting of communities."""
orig = app.config['COMMUNITIES_MAIL_ENABLED']
app.config['COMMUNITIES_MAIL_ENABLED'] = True
yield
app.config['COMMUNITIES_MAIL_ENABLED'] = orig
@pytest.yield_fixture
def app_client(app):
"""Flask test client for UI app."""
with app.test_client() as client:
yield client
@pytest.yield_fixture
def api_client(api):
"""Flask test client for API app."""
with api.test_client() as client:
yield client
@pytest.fixture
def script_info(app):
"""Ensure that the database schema is created."""
return ScriptInfo(create_app=lambda info: app)
@pytest.yield_fixture
def db(app):
"""Setup database."""
if not database_exists(str(db_.engine.url)):
create_database(str(db_.engine.url))
db_.create_all()
yield db_
db_.session.remove()
db_.drop_all()
@pytest.yield_fixture
def locations(db):
"""File system locations."""
tmppath = tempfile.mkdtemp()
arch_tmppath = tempfile.mkdtemp()
loc = Location(
name='testloc',
uri=tmppath,
default=True
)
arch_loc = Location(
name='archive',
uri=arch_tmppath,
default=False
)
db.session.add(loc)
db.session.add(arch_loc)
db.session.commit()
yield {'testloc': tmppath, 'archive': arch_tmppath}
shutil.rmtree(tmppath)
shutil.rmtree(arch_tmppath)
# Delete the cached property value since the tmp archive changes
current_sipstore.__dict__.pop('archive_location', None)
@pytest.fixture
def archive_fs(locations):
"""File system for the archive location."""
archive_path = locations['archive']
fs = opener.opendir(archive_path, writeable=False, create_dir=True)
return fs
@pytest.yield_fixture
def es(app):
"""Provide elasticsearch access."""
list(current_search.delete(ignore=[400, 404]))
current_search_client.indices.delete(index='*')
current_search_client.indices.delete_template('*')
list(current_search.create())
list(current_search.put_templates())
current_search_client.indices.refresh()
try:
yield current_search_client
finally:
current_search_client.indices.delete(index='*')
current_search_client.indices.delete_template('*')
@pytest.fixture
def users(app, db):
"""Create users."""
user1 = create_test_user(
email='info@zenodo.org', password='tester', confirmed_at=datetime.now()
)
user2 = create_test_user(
email='test@zenodo.org', password='tester2',
confirmed_at=datetime.now()
)
user_admin = create_test_user(
email='admin@zenodo.org', password='admin', confirmed_at=datetime.now()
)
non_validated_user = create_test_user(
email='nonvalidated@zenodo.org', password='tester')
user_with_blacklisted_domain = create_test_user(
email='validated@evildomain.org',
password='tester',
confirmed_at=datetime.now()
)
longtime_validated_user_with_blacklisted_domain = create_test_user(
email='longvalidated@evildomain.org',
password='tester',
confirmed_at=datetime.now() - timedelta(days=40)
)
user_with_blacklisted_domain_and_ext_id = create_test_user(
email='external@evildomain.org',
password='tester'
)
ud = UserIdentity(
id='1',
method='github',
id_user=user_with_blacklisted_domain_and_ext_id.id
)
db.session.add(ud)
with db.session.begin_nested():
# set admin permissions
db.session.add(ActionUsers(action=action_admin_access.value,
user=user_admin))
db.session.add(ActionUsers(action=deposit_admin_access.value,
user=user_admin))
db.session.commit()
return [
{'email': user1.email, 'id': user1.id},
{'email': user2.email, 'id': user2.id},
{'email': user_admin.email, 'id': user_admin.id},
{'email': non_validated_user.email, 'id': non_validated_user.id},
{
'email': user_with_blacklisted_domain.email,
'id': user_with_blacklisted_domain.id
},
{
'email': longtime_validated_user_with_blacklisted_domain.email,
'id': longtime_validated_user_with_blacklisted_domain.id
},
]
@pytest.fixture
def communities(db, users):
"""Create communities."""
comm_data = [
{'id': 'c1', 'user_id': users[1]['id']},
{'id': 'c2', 'user_id': users[1]['id']},
{'id': 'c3', 'user_id': users[0]['id']},
{'id': 'c4', 'user_id': users[0]['id']},
{'id': 'c5', 'user_id': users[1]['id']},
{'id': 'zenodo', 'user_id': users[2]['id']},
{'id': 'ecfunded', 'user_id': users[2]['id']},
{'id': 'grants_comm', 'user_id': users[2]['id']},
]
for c in comm_data:
Community.create(c['id'], user_id=c['user_id'])
db.session.commit()
return comm_data
@pytest.fixture
def oaisets(db, communities):
"""Create custom OAISet objects.
Those should be custom OAISet objects which are not community based.
"""
oaisets_data = [
{
'spec': 'extra',
'search_pattern': 'title:extra'
},
{
'spec': 'user-extra', # Looks like a community-based OAISet
'search_pattern': 'title:foobar' # but has a search_pattern
},
]
for oai_data in oaisets_data:
obj = OAISet(**oai_data)
db.session.add(obj)
db.session.commit()
oai_data['id'] = obj.id
return oaisets_data
@pytest.fixture
def oauth2_client(app, db, users):
"""Create client."""
with db.session.begin_nested():
# create resource_owner -> client_1
client_ = Client(
client_id='client_test_u1c1',
client_secret='client_test_u1c1',
name='client_test_u1c1',
description='',
is_confidential=False,
user_id=users[0]['id'],
_redirect_uris='',
_default_scopes='',
)
db.session.add(client_)
db.session.commit()
return client_.client_id
@pytest.fixture
def write_token(app, db, oauth2_client, users):
"""Create token."""
with db.session.begin_nested():
token_ = Token(
client_id=oauth2_client,
user_id=users[0]['id'],
access_token='dev_access_2',
refresh_token='dev_refresh_2',
expires=datetime.utcnow() + timedelta(hours=10),
is_personal=False,
is_internal=True,
_scopes=write_scope.id,
)
db.session.add(token_)
db.session.commit()
return dict(
token=token_,
auth_header=[
('Authorization', 'Bearer {0}'.format(token_.access_token)),
]
)
@pytest.fixture
def extra_token(app, db, oauth2_client, users):
"""Create token."""
with db.session.begin_nested():
token_ = Token(
client_id=oauth2_client,
user_id=users[0]['id'],
access_token='dev_access_2',
refresh_token='dev_refresh_2',
expires=datetime.utcnow() + timedelta(hours=10),
is_personal=False,
is_internal=True,
_scopes=' '.join([extra_formats_scope.id, write_scope.id])
)
db.session.add(token_)
db.session.commit()
return dict(
token=token_,
auth_header=[
('Authorization', 'Bearer {0}'.format(token_.access_token)),
]
)
@pytest.fixture
def rat_generate_token(app, db, oauth2_client, users):
"""Create token."""
with db.session.begin_nested():
token_ = Token(
client_id=oauth2_client,
user_id=users[0]['id'],
access_token='rat_token',
expires=datetime.utcnow() + timedelta(hours=10),
is_personal=False,
is_internal=True,
_scopes=tokens_generate_scope.id,
)
db.session.add(token_)
db.session.commit()
return token_
@pytest.fixture
def minimal_record():
"""Minimal record."""
return {
"$schema": "http://zenodo.org/schemas/records/record-v1.0.0.json",
"recid": 123,
"doi": "10.5072/zenodo.123",
"resource_type": {
"type": "software",
},
"publication_date": datetime.utcnow().date().isoformat(),
"title": "Test",
"creators": [{"name": "Test"}],
"description": "My description",
"access_right": "open",
}
@pytest.fixture
def minimal_deposit():
"""Minimal deposit."""
return {
'metadata': {
'upload_type': 'presentation',
'title': 'Test title',
'creators': [
{'name': 'Doe, John', 'affiliation': 'Atlantis'},
{'name': 'Smith, Jane', 'affiliation': 'Atlantis'},
],
'description': 'Test Description',
'publication_date': '2013-05-08',
'access_right': 'open',
'license': 'CC-BY-4.0'
}
}
@pytest.fixture
def minimal_record_model(db, minimal_record, sip_metadata_types, recid_pid):
"""Minimal record."""
model = RecordMetadata(id=str(recid_pid.object_uuid))
model.created = datetime.utcnow() - timedelta(days=1)
model.updated = model.created + timedelta(days=1)
model.version_id = 0
rec = ZenodoRecord(minimal_record, model=model)
db.session.commit()
return rec
@pytest.fixture
def recid_pid(db):
"""PID for minimal record."""
pid = PersistentIdentifier.create(
pid_type='recid', pid_value='123', status='R', object_type='rec',
object_uuid=uuid4())
db.session.commit()
return pid
@pytest.fixture
def oaiid_pid():
"""PID for OAI id."""
return PersistentIdentifier(
pid_type='oai', pid_value='oai:zenodo.org:123', status='R',
object_type='rec', object_uuid=uuid4())
@pytest.fixture
def bucket(db, locations):
"""File system location."""
b1 = Bucket.create()
db.session.commit()
return b1
@pytest.fixture
def test_object(db, bucket):
"""File system location."""
data_bytes = b('test object')
obj = ObjectVersion.create(
bucket, 'test.txt', stream=BytesIO(data_bytes),
size=len(data_bytes)
)
db.session.commit()
return obj
@pytest.fixture
def depid_pid(db):
"""PID for minimal record."""
pid = PersistentIdentifier.create(
pid_type='depid', pid_value='321', status='R', object_type='rec',
object_uuid=uuid4())
db.session.commit()
return pid
@pytest.fixture
def full_record():
"""Full record fixture."""
record = dict(
recid=12345,
doi='10.1234/foo.bar',
resource_type={'type': 'publication', 'subtype': 'book'},
publication_date=date(2014, 2, 27).isoformat(),
title='Test title',
creators=[
{'name': 'Doe, John', 'affiliation': 'CERN',
'gnd': '170118215', 'orcid': '0000-0002-1694-233X',
'familyname': 'Doe', 'givennames': 'John',
},
{'name': 'Doe, Jane', 'affiliation': 'CERN',
'gnd': '', 'orcid': '0000-0002-1825-0097',
'familyname': 'Doe', 'givennames': 'Jane',
},
{'name': 'Smith, John', 'affiliation': 'CERN',
'gnd': '', 'orcid': '',
'familyname': 'Smith', 'givennames': 'John',
},
{'name': 'Nowak, Jack', 'affiliation': 'CERN',
'gnd': '170118215', 'orcid': '',
'familyname': 'Nowak', 'givennames': 'Jack',
},
],
description='Test Description',
keywords=['kw1', 'kw2', 'kw3'],
subjects=[
{'term': 'Astronomy',
'identifier': 'http://id.loc.gov/authorities/subjects/sh85009003',
'scheme': 'url',
},
],
notes='notes',
language='eng',
version='1.2.5',
access_right='open',
# embargo_date
# access_conditions
license={
'id': 'CC-BY-4.0',
'url': 'https://creativecommons.org/licenses/by/4.0/',
'title': 'Creative Commons Attribution 4.0',
},
communities=['zenodo'],
grants=[
{'title': 'Grant Title', 'code': '1234', 'identifiers': {},
'internal_id': '10.1234/foo::1234',
'funder': {'name': 'EC', 'doi': '10.1234/foo'}},
{'title': 'Title Grant', 'code': '4321', 'identifiers': {},
'internal_id': '10.1234/foo::4321',
'funder': {'name': 'EC', 'doi': '10.1234/foo'}},
],
related_identifiers=[
{'identifier': '10.1234/foo.bar',
'scheme': 'doi', 'relation': 'cites',
'resource_type': {
'type': 'dataset'}},
{'identifier': '1234.4325', 'scheme':
'arxiv', 'relation': 'isIdenticalTo'},
{'identifier': '1234.4321', 'scheme':
'arxiv', 'relation': 'cites',
'resource_type': {
'type': 'dataset'}},
{'identifier': '1234.4328', 'scheme':
'arxiv', 'relation': 'references',
'resource_type': {
'type': 'dataset'}},
{'identifier': '10.1234/zenodo.4321', 'scheme':
'doi', 'relation': 'isPartOf',
'resource_type': {
'type': 'software'}},
{'identifier': '10.1234/zenodo.1234', 'scheme':
'doi', 'relation': 'hasPart',
'resource_type': {
'type': 'publication',
'subtype': 'section'
}},
],
alternate_identifiers=[
{'identifier': 'urn:lsid:ubio.org:namebank:11815',
'scheme': 'lsid', },
{'identifier': '2011ApJS..192...18K',
'scheme': 'ads', },
{'identifier': '0317-8471',
'scheme': 'issn', },
{'identifier': '10.1234/alternate.doi',
'scheme': 'doi',
'resource_type': {
'type': 'publication',
'subtype': 'section'
}},
],
contributors=[
{'affiliation': 'CERN', 'name': 'Smith, Other', 'type': 'Other',
'gnd': '', 'orcid': '0000-0002-1825-0097'},
{'affiliation': '', 'name': 'Hansen, Viggo', 'type': 'Other',
'gnd': '', 'orcid': ''},
{'affiliation': 'CERN', 'name': 'Kowalski, Manager',
'type': 'DataManager'},
],
references=[
{'raw_reference': 'Doe, John et al (2012). Some title. Zenodo. '
'10.5281/zenodo.12'},
{'raw_reference': 'Smith, Jane et al (2012). Some title. Zenodo. '
'10.5281/zenodo.34'},
],
journal={
'issue': '2',
'pages': '20',
'volume': '20',
'title': 'Bam',
'year': '2014',
},
meeting={
'title': 'The 13th Biennial HITRAN Conference',
'place': 'Harvard-Smithsonian Center for Astrophysics',
'dates': '23-25 June, 2014',
'acronym': 'HITRAN13',
'session': 'VI',
'session_part': '1',
'url': 'http://hitran.org/conferences/hitran-13-2014/'
},
imprint={
'place': 'Staszkowka',
'publisher': 'Jol',
'isbn': '978-0201633610'
},
part_of={
'title': 'Bum',
'pages': '1-2',
},
thesis={
'university': 'I guess important',
'supervisors': [
{'name': 'Smith, Professor'},
],
},
dates=[
{'type': 'Valid', 'start': '2019-01-01', 'description': 'Bongo'},
{'type': 'Collected', 'end': '2019-01-01'},
{'type': 'Withdrawn', 'start': '2019-01-01', 'end': '2019-01-01'},
{'type': 'Collected', 'start': '2019-01-01', 'end': '2019-02-01'},
],
owners=[1, ],
method='microscopic supersampling',
locations=[{"lat": 2.35, "lon": 1.534, "place": "my place"},
{'place': 'New York'}],
_oai={
'id': 'oai:zenodo.org:1',
'sets': ['user-zenodo', 'user-ecfunded'],
'updated': '2016-01-01T12:00:00Z'
},
_deposit={
'id': '1',
'created_by': 1,
'owners': [1, ],
'pid': {
'revision_id': 1,
'type': 'recid',
'value': '12345',
},
'status': 'published'
},
_buckets={
'deposit': '11111111-1111-1111-1111-111111111111',
'record': '22222222-2222-2222-2222-222222222222',
},
_files=[
{
'bucket': '22222222-2222-2222-2222-222222222222',
'version_id': '11111111-1111-1111-1111-111111111111',
'file_id': '22222222-3333-4444-5555-666666666666',
'checksum': 'md5:11111111111111111111111111111111',
'key': 'test',
'size': 4,
'type': 'txt',
}
],
)
record['$schema'] = 'http://zenodo.org/schemas/records/record-v1.0.0.json'
return record
@pytest.fixture
def custom_metadata():
"""Custom metadata dictionary."""
return {
'dwc:family': ['Felidae'],
'dwc:genus': ['Felis'],
'dwc:behavior': ['Plays with yarn, sleeps in cardboard box.'],
'obo:RO_0002453': [
{
'subject': ['Cat', 'Felis catus'],
'object': ['Ctenocephalides felis', 'Cat flea'],
},
],
}
@pytest.fixture
def record_with_bucket(db, full_record, bucket, sip_metadata_types):
"""Create a bucket."""
record = ZenodoRecord.create(full_record)
record['_buckets']['record'] = str(bucket.id)
record['_files'][0]['bucket'] = str(bucket.id)
record.commit()
RecordsBuckets.create(bucket=bucket, record=record.model)
pid = PersistentIdentifier.create(
pid_type='recid', pid_value=12345, object_type='rec',
object_uuid=record.id, status='R')
db.session.commit()
return pid, record
@pytest.fixture
def record_with_files_creation(db, record_with_bucket):
"""Creation of a full record with files in database."""
pid, record = record_with_bucket
filename = 'Test.pdf'
record.files[filename] = BytesIO(b'v1')
record.files[filename]['type'] = 'pdf'
record.commit()
db.session.commit()
record_url = url_for('invenio_records_ui.recid', pid_value=pid.pid_value)
return pid, record, record_url
@pytest.fixture
def record_with_image_creation(db, record_with_bucket):
"""Creation of a full record with files in database."""
pid, record = record_with_bucket
filename = 'Test.png'
record.files[filename] = resource_stream(
'zenodo.modules.theme', 'static/img/eu.png')
record.files[filename]['type'] = 'png'
record.commit()
db.session.commit()
record_url = url_for('invenio_records_ui.recid', pid_value=pid.pid_value)
return pid, record, record_url
@pytest.fixture
def published_record(db, es, record_with_bucket):
"""Published and indexed record."""
_, record = record_with_bucket
indexer = RecordIndexer()
indexer.index_by_id(str(record.id))
current_search.flush_and_refresh(index='records')
return record
@pytest.fixture
def closed_access_record(db, es, record_with_files_creation):
"""Creation of a full record with closed access right."""
_, record, record_url = record_with_files_creation
record['access_right'] = AccessRight.CLOSED
record.commit()
db.session.commit()
indexer = RecordIndexer()
indexer.index(record)
current_search.flush_and_refresh(index='records')
return record
@pytest.fixture
def bibtex_records(app, db, full_record):
"""Create some records for bibtex serializer."""
test_bad_record = dict(recid='12345')
r_good = ZenodoRecord.create(
full_record, UUID("24029cb9-f0f8-4b72-94a7-bdf746f9d075"))
r_bad = ZenodoRecord.create(
test_bad_record, UUID("0281c22c-266a-499b-8446-e12eff2f79b8"))
db.session.commit()
record_good = Bibtex(r_good)
record_bad = Bibtex(r_bad)
record_empty = Bibtex({})
return (record_good, record_bad, record_empty, r_good)
@pytest.fixture
def funder_record(db):
"""Create a funder record."""
funder = Record.create(dict(
doi='10.13039/501100000780',
name='European Commission',
acronyms=['EC'],
))
PersistentIdentifier.create(
pid_type='frdoi', pid_value=funder['doi'], object_type='rec',
object_uuid=funder.id, status='R')
db.session.commit()
return funder
@pytest.fixture
def grant_records(db, es, funder_record):
"""Create grant records."""
grants = [
Record.create({
'$schema': 'https://zenodo.org/schemas/grants/grant-v1.0.0.json',
'internal_id': '10.13039/501100000780::282896',
'funder': {'$ref': 'https://dx.doi.org/10.13039/501100000780'},
'identifiers': {
'eurepo': 'info:eu-repo/grantAgreement/EC/FP7/282896',
},
'code': '282896',
'title': 'Open Access Research Infrastructure in Europe',
'acronym': 'OpenAIREplus',
'program': 'FP7',
}),
Record.create({
'$schema': 'https://zenodo.org/schemas/grants/grant-v1.0.0.json',
'internal_id': '10.13039/501100000780::027819',
'funder': {'$ref': 'https://dx.doi.org/10.13039/501100000780'},
'identifiers': {
'eurepo': 'info:eu-repo/grantAgreement/EC/FP6/027819',
},
'code': '027819',
'title': 'Integrating cognition, emotion and autonomy',
'acronym': 'ICEA',
'program': 'FP6',
}),
]
for g in grants:
PersistentIdentifier.create(
pid_type='grant', pid_value=g['internal_id'], object_type='rec',
object_uuid=g.id, status='R')
db.session.commit()
for g in grants:
RecordIndexer().index_by_id(g.id)
current_search.flush_and_refresh(index='grants')
return grants
@pytest.fixture
def license_record(db, es, sip_metadata_types):
"""Create a license record."""
licenses = [
Record.create({
"$schema":
"https://zenodo.org/schemas/licenses/license-v1.0.0.json",
"domain_content": True,
"domain_data": True,
"domain_software": True,
"family": "",
"id": "CC-BY-4.0",
"maintainer": "Creative Commons",
"od_conformance": "approved",
"osd_conformance": "not reviewed",
"status": "active",
"title": "Creative Commons Attribution International 4.0",
"url": "https://creativecommons.org/licenses/by/4.0/"
}),
Record.create({
"$schema":
"https://zenodo.org/schemas/licenses/license-v1.0.0.json",
"domain_content": True,
"domain_data": True,
"domain_software": True,
"family": "",
"id": "CC0-1.0",
"maintainer": "Creative Commons",
"od_conformance": "approved",
"osd_conformance": "not reviewed",
"status": "active",
"title": "CC0 1.0",
"url": "https://creativecommons.org/publicdomain/zero/1.0/"
})
]
for license in licenses:
PersistentIdentifier.create(
pid_type='od_lic', pid_value=license['id'], object_type='rec',
object_uuid=license.id, status='R')
db.session.commit()
for license in licenses:
RecordIndexer().index_by_id(license.id)
current_search.flush_and_refresh(index='licenses')
return licenses[1]
@pytest.fixture
def deposit_metadata():
"""Raw metadata of deposit."""
data = dict(
title='Test title',
creators=[
dict(name='Doe, John', affiliation='Atlantis'),
dict(name='Smith, Jane', affiliation='Atlantis')
],
description='Test Description',
resource_type=dict(type='publication'),
publication_date='2013-05-08',
access_right='open'
)
return data
@pytest.fixture
def sip_metadata_types(db):
"""Create the SIP Metadata types."""
loadsipmetadatatypes([
{
'title': 'Test Zenodo Record JSON v1.0.0',
'name': 'json',
'format': 'json',
'schema': 'https://zenodo.org/schemas/records/record-v1.0.0.json'
},
{
"title": "Test BagIt Archiver metadata",
"name": "bagit",
"format": "json",
"schema": "https://zenodo.org/schemas/sipstore/bagit-v1.0.0.json"
}
])
@pytest.fixture
def deposit(app, es, users, locations, deposit_metadata, sip_metadata_types):
"""New deposit with files."""
with app.test_request_context():
datastore = app.extensions['security'].datastore
login_user(datastore.get_user(users[0]['email']))
id_ = uuid4()
zenodo_deposit_minter(id_, deposit_metadata)
deposit = Deposit.create(deposit_metadata, id_=id_)
db_.session.commit()
current_search.flush_and_refresh(index='deposits')
return deposit
@pytest.fixture
def deposit_file(deposit, db):
"""Deposit files."""
deposit.files['test.txt'] = BytesIO(b'test')
db.session.commit()
return deposit.files
@pytest.fixture
def deposit_url(api):
"""Deposit API URL."""
with api.test_request_context():
return url_for('invenio_deposit_rest.depid_list')
@pytest.fixture
def json_headers():
"""JSON headers."""
return [('Content-Type', 'application/json'),
('Accept', 'application/json')]
@pytest.fixture
def json_auth_headers(json_headers, write_token):
"""Authentication headers (with a valid oauth2 token).
It uses the token associated with the first user.
"""
return bearer_auth(json_headers, write_token)
@pytest.fixture
def auth_headers(write_token):
"""Authentication headers (with a valid oauth2 token).
It uses the token associated with the first user.
"""
return bearer_auth([], write_token)
@pytest.fixture
def extra_auth_headers(extra_token):
"""Authentication headers (with a valid oauth2 token).
It uses the token associated with the first user.
"""
return bearer_auth([], extra_token)
@pytest.fixture
def json_extra_auth_headers(json_headers, extra_token):
"""Authentication headers (with a valid oauth2 token).
It uses the token associated with the first user.
"""
return bearer_auth(json_headers, extra_token)
@pytest.fixture
def get_json():
"""Function for extracting json from response."""
def inner(response, code=None):
"""Decode JSON from response."""
data = response.get_data(as_text=True)
if code is not None:
assert response.status_code == code, data
return json.loads(data)
return inner
@pytest.yield_fixture
def legacyjson_v1():
"""Function for extracting json from response."""
from zenodo.modules.records.serializers import legacyjson_v1 as serializer
serializer.replace_refs = False
yield serializer
serializer.replace_refs = True
@pytest.fixture
def resolver():
"""Get a record resolver."""
return Resolver(
pid_type='recid', object_type='rec', getter=ZenodoRecord.get_record)
@pytest.fixture
def audit_records(minimal_record, db):
"""Audit test records."""
records = {}
for i in (1, 2, 3, 4):
record = RecordMetadata()
record.json = deepcopy(minimal_record)
record.json['recid'] = i
record.json['_oai'] = {
'id': 'oai:{}'.format(i),
'sets': [],
'updated': datetime.utcnow().date().isoformat(),
}
db.session.add(record)
db.session.commit()
records[i] = str(ZenodoRecord(data=record.json, model=record).id)
recid = PersistentIdentifier(pid_type='recid', pid_value=str(i),
status='R', object_type='rec',
object_uuid=record.id)
oai_id = PersistentIdentifier(
pid_type='oai', pid_value=record.json['_oai']['id'], status='R',
object_type='rec', object_uuid=record.id)
db.session.add(recid)
db.session.add(oai_id)
db.session.commit()
return records
@pytest.fixture
def oaiset_update_records(minimal_record, db, es):
"""Fixture with records for query-based OAISet updating tests."""
rec_ok = {
'title': 'extra',
'_oai': {
'id': '12345',
'sets': ['extra', 'user-foobar'],
'updated': datetime(1970, 1, 1).isoformat(),
}
}
# Record which needs removal of 'extra' from oai sets
rec_remove = deepcopy(rec_ok)
rec_remove['title'] = 'other'
# Record which needs addition of 'extra' to oai sets
rec_add = deepcopy(rec_ok)
rec_add['_oai']['sets'] = ['user-foobar', ]
records = [rec_ok, rec_remove, rec_add, ]
rec_uuids = []
for record_meta in records:
rec = RecordMetadata()
rec.json = deepcopy(record_meta)
db.session.add(rec)
db.session.commit()
RecordIndexer().index_by_id(rec.id)
rec_uuids.append(rec.id)
current_search.flush_and_refresh('records')
return rec_uuids
#
# GitHub-specific conftest
#
@pytest.fixture
def cli_run(app):
"""Fixture for CLI runner function.
Returns a function accepting a single parameter (CLI command as string).
"""
runner = CliRunner()
script_info = ScriptInfo(create_app=lambda info: app)
def run(command):
"""Run the command from the CLI."""
command_args = command.split()
return runner.invoke(github, command_args, obj=script_info)
yield run
@pytest.fixture
def g_users_data():
"""Data for users objects."""
return [
{'email': 'u1@foo.bar', 'password': '123456'},
{'email': 'u2@foo.bar', 'password': '123456'},
]
@pytest.fixture
def g_users(app, db, g_users_data):
"""Fixture that contains multiple users for CLI/API tests."""
datastore = app.extensions['security'].datastore
for ud in g_users_data:
user = datastore.create_user(**ud)
db.session.commit()
ud['id'] = user.id
return g_users_data
@pytest.fixture
def g_remoteaccounts_data(g_users):
"""Data for RemoveAccount objects."""
return [
{
'user_id': g_users[0]['id'],
'extra_data': {
'repos': {
'8000': {
'full_name': 'foo/bar',
},
'8002': {
'full_name': 'bacon/eggs',
},
'8003': {
'full_name': 'other/repo',
},
}
},
}
]
@pytest.fixture
def g_remoteaccounts(app, db, g_remoteaccounts_data):
"""Fixture for RemoteAccount objects."""
for rad in g_remoteaccounts_data:
ra = RemoteAccount.create(rad['user_id'], 'changeme',
rad['extra_data'])
db.session.add(ra)
db.session.commit()
rad['id'] = ra.id
return g_remoteaccounts_data
@pytest.fixture
def g_repositories_data(g_users):
"""Data for repositories."""
return [
{'name': 'foo/bar', 'github_id': 8000, 'user_id': g_users[0]['id']},
{'name': 'baz/spam', 'github_id': 8001},
{'name': 'bacon/eggs', 'github_id': 8002, 'user_id': g_users[0]['id']},
]
@pytest.fixture
def g_repositories(app, db, g_repositories_data):
"""Fixture for GitHub Repository objects."""
for rd in g_repositories_data:
repository = Repository(**rd)
# We don't call GitHub hence the hook is not important, yet it should
# be not null
repository.hook = 12345
db.session.add(repository)
db.session.commit()
rd['id'] = repository.id
return g_repositories_data
@pytest.fixture
def g_tester_id(app, db):
"""Fixture that contains the test data for models tests."""
datastore = app.extensions['security'].datastore
tester = datastore.create_user(
email='info@inveniosoftware.org', password='tester',
)
db.session.commit()
return tester.id
@pytest.fixture
def sample_identifiers():
"""Sample of various identifiers."""
return {
'ads': ('ads:2011ApJS..192...18K',
'https://ui.adsabs.harvard.edu/#abs/2011ApJS..192...18K'),
'ark': ('ark:/13030/tqb3kh97gh8w', ''),
'arxiv': ('hep-th/1601.07616',
'https://arxiv.org/abs/arXiv:1601.07616'),
'bioproject': ('PRJNA224116',
'https://www.ebi.ac.uk/ena/data/view/PRJNA224116'),
'biosample': ('SAMN08289383',
'https://www.ebi.ac.uk/ena/data/view/SAMN08289383'),
'doi': ('10.1002/example',
'https://doi.org/10.1002/example'),
'ean13': ('4006381333931', ''),
'ean8': ('73513537', ''),
'ensembl': ('ENSMUST00000017290',
'https://www.ensembl.org/id/ENSMUST00000017290'),
'genome': ('GCF_000001405.38',
'https://www.ncbi.nlm.nih.gov/assembly/GCF_000001405.38'),
'gnd': ('4079154-3',
'https://d-nb.info/gnd/4079154-3'),
'hal': ('mem_13102590',
'https://hal.archives-ouvertes.fr/mem_13102590'),
'handle': ('10013/epic.10033',
'https://hdl.handle.net/10013/epic.10033'),
'isbn': ('0-9752298-0-X', ''),
'isni': ('1422-4586-3573-0476', ''),
'issn': ('1188-1534', ''),
'istc': ('0A9 2002 12B4A105 7', ''),
'lsid': ('urn:lsid:ubio.org:namebank:11815', ''),
'orcid': ('0000-0002-1694-233X',
'https://orcid.org/0000-0002-1694-233X'),
'pmcid': ('PMC2631623',
'https://www.ncbi.nlm.nih.gov/pmc/PMC2631623'),
'pmid': ('pmid:12082125',
'https://pubmed.ncbi.nlm.nih.gov/12082125'),
'purl': ('http://purl.oclc.org/foo/bar',
'http://purl.oclc.org/foo/bar'),
'refseq': ('NZ_JXSL01000036.1',
'https://www.ncbi.nlm.nih.gov/entrez/viewer.fcgi'
'?val=NZ_JXSL01000036.1'),
'sra': ('SRR6437777',
'https://www.ebi.ac.uk/ena/data/view/SRR6437777'),
'uniprot': ('Q9GYV0',
'https://purl.uniprot.org/uniprot/Q9GYV0'),
'url': ('http://www.heatflow.und.edu/index2.html',
'http://www.heatflow.und.edu/index2.html'),
'urn': ('urn:nbn:de:101:1-201102033592',
'https://nbn-resolving.org/urn:nbn:de:101:1-201102033592'),
'swh': ('swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2',
'https://archive.softwareheritage.org/'
'swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2'),
'ascl': ('ascl:1908.011', 'https://ascl.net/1908.011'),
}
@pytest.fixture
def mock_datacite_minting(mocker, app):
"""DOI registration enabled and DataCite calls mocked."""
orig = app.config['DEPOSIT_DATACITE_MINTING_ENABLED']
app.config['DEPOSIT_DATACITE_MINTING_ENABLED'] = True
datacite_mock = mocker.patch(
'invenio_pidstore.providers.datacite.DataCiteMDSClient')
yield datacite_mock
app.config['DEPOSIT_DATACITE_MINTING_ENABLED'] = orig
@pytest.fixture
def minimal_record_for_badge(db, record_with_bucket):
"""Record for reana badge."""
pid, record = record_with_bucket
filename = "reana.yaml"
record.files[filename] = BytesIO(b"v1")
record.files[filename]["type"] = "yaml"
record.commit()
db.session.commit()
return record
| 47,787 | Python | .py | 1,297 | 28.7633 | 79 | 0.596354 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,322 | test_dependencies.py | zenodo_zenodo/tests/unit/test_dependencies.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016-2018 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Basics tests to ensure DB and Elasticsearch is running."""
from invenio_search import current_search, current_search_client
def test_es_running(app):
"""Test search view."""
assert current_search_client.ping()
def test_es_state(app, es):
"""Test generated mappings, templates and aliases on ElasticSearch."""
prefix = app.config['SEARCH_INDEX_PREFIX']
suffix = current_search._current_suffix
assert current_search_client.indices.get_alias() == {
prefix + 'grants-grant-v1.0.0' + suffix: {
'aliases': {
prefix + 'grants': {},
prefix + 'grants-grant-v1.0.0': {}
}
},
prefix + 'records-record-v1.0.0' + suffix: {
'aliases': {
prefix + 'records': {},
prefix + 'records-record-v1.0.0': {}
}
},
prefix + 'deposits-records-record-v1.0.0' + suffix: {
'aliases': {
prefix + 'deposits-records-record-v1.0.0': {},
prefix + 'deposits': {},
prefix + 'deposits-records': {}
}
},
prefix + 'licenses-license-v1.0.0' + suffix: {
'aliases': {
prefix + 'licenses-license-v1.0.0': {},
prefix + 'licenses': {}
}
},
prefix + 'funders-funder-v1.0.0' + suffix: {
'aliases': {
prefix + 'funders': {},
prefix + 'funders-funder-v1.0.0': {}
}
},
}
templates = {
k: (set(v['index_patterns']), set(v['aliases'].keys()))
for k, v in current_search_client.indices.get_template().items()
}
assert templates == {
prefix + 'record-view-v1.0.0': (
{prefix + 'events-stats-record-view-*'},
{prefix + 'events-stats-record-view'},
),
prefix + 'file-download-v1.0.0': (
{prefix + 'events-stats-file-download-*'},
{prefix + 'events-stats-file-download'},
),
prefix + 'aggr-record-view-v1.0.0': (
{prefix + 'stats-record-view-*'},
{prefix + 'stats-record-view'},
),
prefix + 'aggr-record-download-v1.0.0': (
{prefix + 'stats-file-download-*'},
{prefix + 'stats-file-download'},
),
}
| 3,448 | Python | .py | 87 | 30.218391 | 76 | 0.558604 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,323 | helpers.py | zenodo_zenodo/tests/unit/helpers.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Helpers."""
from __future__ import absolute_import, print_function, unicode_literals
from contextlib import contextmanager
from copy import deepcopy
from flask import current_app
from zenodo.modules.deposit.api import ZenodoDeposit as Deposit
def bearer_auth(headers, token):
"""Create authentication headers (with a valid oauth2 token)."""
headers = deepcopy(headers)
headers.append(
('Authorization', 'Bearer {0}'.format(token['token'].access_token))
)
return headers
def login_user_via_session(client, user=None, email=None):
"""Login a user via the session."""
if not user:
user = current_app.extensions['security'].datastore.find_user(
email=email)
with client.session_transaction() as sess:
sess['user_id'] = user.get_id()
def publish_and_expunge(db, deposit):
"""Publish the deposit and expunge the session.
Use this if you want to be safe that session is synced with the DB after
the deposit publishing.
"""
deposit.publish()
dep_uuid = deposit.id
db.session.commit()
db.session.expunge_all()
deposit = Deposit.get_record(dep_uuid)
return deposit
@contextmanager
def recaptcha_enabled(app):
"""Temporarily enable recaptcha."""
orig_public_key = app.config.get('RECAPTCHA_PUBLIC_KEY')
orig_private_key = app.config.get('RECAPTCHA_PRIVATE_KEY')
app.config['RECAPTCHA_PUBLIC_KEY'] = 'test-key'
app.config['RECAPTCHA_PRIVATE_KEY'] = 'test-key'
yield
app.config['RECAPTCHA_PUBLIC_KEY'] = orig_public_key
app.config['RECAPTCHA_PRIVATE_KEY'] = orig_private_key
| 2,586 | Python | .py | 64 | 37 | 76 | 0.735247 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,324 | test_celery.py | zenodo_zenodo/tests/unit/default/test_celery.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo module test cases."""
from __future__ import absolute_import, print_function
def test_celery():
"""Test celery application."""
from zenodo.celery import celery
celery.loader.import_default_modules()
assert 'invenio_accounts.tasks.send_security_email' in celery.tasks
assert 'invenio_mail.tasks.send_email' in celery.tasks
| 1,323 | Python | .py | 31 | 40.903226 | 76 | 0.767081 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,325 | test_cli.py | zenodo_zenodo/tests/unit/default/test_cli.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo module test cases."""
from __future__ import absolute_import, print_function
from click.testing import CliRunner
def test_basic_cli():
"""Test version import."""
from zenodo.cli import cli
runner = CliRunner()
res = runner.invoke(cli)
assert res.exit_code == 0
| 1,265 | Python | .py | 32 | 37.6875 | 76 | 0.758564 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,326 | test_wsgi.py | zenodo_zenodo/tests/unit/default/test_wsgi.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo module test cases."""
from __future__ import absolute_import, print_function
def test_wsgi():
"""Test WSGI application."""
from zenodo.wsgi import application
assert application
| 1,171 | Python | .py | 29 | 38.827586 | 76 | 0.766257 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,327 | test_zenodo.py | zenodo_zenodo/tests/unit/default/test_zenodo.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo module test cases."""
from __future__ import absolute_import, print_function
def test_version():
"""Test version import."""
from zenodo import __version__
assert __version__
| 1,167 | Python | .py | 29 | 38.689655 | 76 | 0.759259 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,328 | conftest.py | zenodo_zenodo/tests/unit/exporter/conftest.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2018 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Exporter writers tests."""
from __future__ import absolute_import, print_function
import pytest
from flask import current_app
from invenio_files_rest.models import Bucket
from zenodo.modules.exporter import BucketWriter, BZip2ResultStream, \
ResultStream
@pytest.fixture()
def exporter_bucket(db, locations):
"""Bucket to write in."""
bucket_uuid = current_app.config['EXPORTER_BUCKET_UUID']
return Bucket.create(id=bucket_uuid)
@pytest.fixture()
def writer(exporter_bucket):
"""Bucket writer object fixture."""
return BucketWriter(bucket_id=exporter_bucket.id, key='test.json')
@pytest.fixture()
def searchobj():
"""Search object."""
class Hit(dict):
def __init__(self, *args, **kwargs):
super(Hit, self).__init__(*args, **kwargs)
class Meta(object):
id = args[0]['id']
self.meta = Meta()
self._d_ = args[0]
class Search(object):
def scan(self):
return iter([
Hit({'id': 1, 'title': 'test 1'}),
Hit({'id': 2, 'title': 'test 2'}),
])
return Search()
@pytest.fixture()
def serializerobj():
"""Serialize object."""
class Serializer(object):
def serialize_exporter(self, pid, record):
return record['_source']['title'].encode('utf8')
return Serializer()
@pytest.fixture()
def fetcher():
"""PID fetcher method."""
def fetcher(id_, data):
return id_
return fetcher
@pytest.fixture()
def resultstream(searchobj, serializerobj, fetcher):
"""Result stream."""
return ResultStream(searchobj, fetcher, serializerobj)
@pytest.fixture()
def bzip2resultstream(searchobj, serializerobj, fetcher):
"""BZip2 Result stream."""
return BZip2ResultStream(searchobj, fetcher, serializerobj)
| 2,805 | Python | .py | 77 | 31.987013 | 76 | 0.693757 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,329 | test_exporter_writers.py | zenodo_zenodo/tests/unit/exporter/test_exporter_writers.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2018 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Exporter writers tests."""
from __future__ import absolute_import, print_function
import pytest
from invenio_files_rest.models import ObjectVersion
from six import BytesIO
from zenodo.modules.exporter import filename_factory
def test_filename_factory():
"""Test filename factory."""
pytest.raises(KeyError, filename_factory())
fname = filename_factory(name='records', format='json')()
assert fname.startswith('records-')
assert fname.endswith('.json')
def test_bucket_writer(writer):
"""Test bucket writer."""
writer.open()
assert writer.obj.file_id is None
writer.write(BytesIO(b'this is a test'))
writer.close()
assert ObjectVersion.get(writer.bucket_id, writer.key).file_id is not None
| 1,712 | Python | .py | 42 | 38.5 | 78 | 0.760385 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,330 | test_exporter_streams.py | zenodo_zenodo/tests/unit/exporter/test_exporter_streams.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2018 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Exporter stream tests."""
from __future__ import absolute_import, print_function
import bz2
import pytest
from zenodo.modules.exporter import BZip2ResultStream, ResultStream
@pytest.fixture()
def searchobj():
"""Search object"""
class Hit(dict):
def __init__(self, *args, **kwargs):
super(Hit, self).__init__(*args, **kwargs)
class Meta(object):
id = args[0]['id']
self.meta = Meta()
self._d_ = args[0]
class Search(object):
def scan(self):
return iter([
Hit({'id': 1, 'title': 'test 1'}),
Hit({'id': 2, 'title': 'test 2'}),
])
return Search()
@pytest.fixture()
def serializerobj():
"""Serialize object"""
class Serializer(object):
def serialize_exporter(self, pid, record):
return record['_source']['title'].encode('utf8')
return Serializer()
@pytest.fixture()
def fetcher():
"""PID fetcher method"""
def fetcher(id_, data):
return id_
return fetcher
@pytest.fixture()
def resultstream(searchobj, serializerobj, fetcher):
"""Result stream"""
return ResultStream(searchobj, fetcher, serializerobj)
@pytest.fixture()
def bzip2resultstream(searchobj, serializerobj, fetcher):
"""BZip2 Result stream"""
return BZip2ResultStream(searchobj, fetcher, serializerobj)
def test_resultstream(resultstream):
"""Test result stream serializer."""
assert resultstream.read() == b'test 1'
assert resultstream.read() == b'test 2'
assert resultstream.read() == b''
assert resultstream.read() == b''
def test_resultstream(bzip2resultstream):
"""Test result stream serializer."""
c = bz2.BZ2Compressor()
c.compress(b'test 1')
c.compress(b'test 2')
data = c.flush()
assert bzip2resultstream.read() == data
assert bzip2resultstream.read() == b''
| 2,886 | Python | .py | 80 | 31.4375 | 76 | 0.684154 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,331 | test_exporter_api.py | zenodo_zenodo/tests/unit/exporter/test_exporter_api.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2018 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Exporter stream tests."""
from __future__ import absolute_import, print_function
from invenio_files_rest.models import ObjectVersion
from invenio_indexer.api import RecordIndexer
from invenio_search import current_search
from zenodo.modules.exporter.tasks import export_job
def test_exporter(app, db, es, exporter_bucket, record_with_files_creation):
"""Test record exporter."""
pid, record, record_url = record_with_files_creation
RecordIndexer().index_by_id(record.id)
current_search.flush_and_refresh('records')
with app.app_context():
assert ObjectVersion.get_by_bucket(exporter_bucket).count() == 0
export_job(job_id='records')
assert ObjectVersion.get_by_bucket(exporter_bucket).count() == 1
| 1,719 | Python | .py | 38 | 42.894737 | 76 | 0.764038 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,332 | test_deposit_views.py | zenodo_zenodo/tests/unit/deposit/test_deposit_views.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Zenodo deposit REST API."""
from __future__ import absolute_import, print_function
from copy import deepcopy
from flask import url_for
from helpers import publish_and_expunge
from invenio_accounts.testutils import login_user_via_session
from invenio_pidrelations.contrib.versioning import PIDVersioning
from invenio_pidstore.models import PersistentIdentifier as PID
from invenio_pidstore.models import PIDStatus
from invenio_records.api import Record
from six import BytesIO, b
from zenodo.modules.deposit.api import ZenodoDeposit
from zenodo.modules.deposit.utils import delete_record
from zenodo.modules.records.resolvers import record_resolver
def test_deposit_ui_login(app, app_client, deposit, deposit_file, users):
"""Test login on deposit views."""
with app.test_request_context():
record_url = url_for(
'invenio_records_ui.recid', pid_value=deposit['_deposit']['id'])
delete_url = url_for(
'zenodo_deposit.delete', pid_value=deposit['_deposit']['id'])
deposit_url = url_for(
'invenio_deposit_ui.depid', pid_value=deposit['_deposit']['id'])
new_url = url_for('invenio_deposit_ui.new')
index_url = url_for('invenio_deposit_ui.index')
# Unauthenticated users
assert app_client.get(index_url).status_code == 302
assert app_client.get(new_url).status_code == 302
assert app_client.get(deposit_url).status_code == 302
assert app_client.get(delete_url).status_code == 302
deposit.publish()
assert app_client.get(record_url).status_code == 200
assert app_client.post(record_url).status_code == 302
# Login user NOT owner of deposit
login_user_via_session(app_client, email=users[1]['email'])
# Can list deposits, create new, and view record.
assert app_client.get(index_url).status_code == 200
assert app_client.get(new_url).status_code == 200
assert app_client.get(record_url).status_code == 200
# - cannot view deposit or put record in edit mode
assert app_client.get(deposit_url).status_code == 403
assert app_client.post(record_url).status_code == 403
assert app_client.get(delete_url).status_code == 403
# Login owner of deposit
login_user_via_session(app_client, email=users[0]['email'])
# - can view deposit or put record in edit mode
res = app_client.post(record_url)
assert res.status_code == 302
assert 'login' not in res.location
assert app_client.get(deposit_url).status_code == 200
assert app_client.get(delete_url).status_code == 403
login_user_via_session(app_client, email=users[2]['email'])
# assert app_client.get(delete_url).status_code == 200
def test_tombstone(app, app_client, deposit, deposit_file, users):
"""Test tombstone for edit pages."""
with app.test_request_context():
record_url = url_for(
'invenio_records_ui.recid', pid_value=deposit['_deposit']['id'])
deposit_url = url_for(
'invenio_deposit_ui.depid', pid_value=deposit['_deposit']['id'])
delete_url = url_for(
'zenodo_deposit.delete', pid_value=deposit['_deposit']['id'])
deposit.publish()
recid, record = deposit.fetch_published()
recid.delete()
deposit.pid.delete()
assert app_client.post(record_url).status_code == 302
assert app_client.get(delete_url).status_code == 302
assert app_client.get(deposit_url).status_code == 410
login_user_via_session(app_client, email=users[0]['email'])
assert app_client.post(record_url).status_code == 410
assert app_client.get(deposit_url).status_code == 410
assert app_client.get(delete_url).status_code == 410
login_user_via_session(app_client, email=users[2]['email'])
assert app_client.get(delete_url).status_code == 410
def test_record_delete(mocker, app, db, users, deposit, deposit_file):
"""Delete the record with a single version."""
dc_mock = mocker.patch(
'invenio_pidstore.providers.datacite.DataCiteMDSClient')
deposit = publish_and_expunge(db, deposit)
recid, record = deposit.fetch_published()
# Stash a copy of record metadata for later
rec = deepcopy(record)
record_uuid = str(record.id)
assert dc_mock().metadata_delete.call_count == 0
# users[0] is not an Admin but it doesn't matter in this case.
delete_record(record.id, 'spam', users[0]['id'])
# Make sure all PIDs are deleted
# TODO: oai PID is left registered
# assert PID.get('oai', rec['_oai']['id']) == PIDStatus.DELETED
assert PID.get('doi', rec['doi']).status == PIDStatus.DELETED
assert PID.get('doi', rec['conceptdoi']).status == PIDStatus.DELETED
assert PID.get('recid', rec['recid']).status == PIDStatus.DELETED
assert PID.get('recid', rec['conceptrecid']).status == PIDStatus.DELETED
assert PID.get('depid', rec['_deposit']['id']).status == PIDStatus.DELETED
assert dc_mock().metadata_delete.call_count == 2
dc_mock().metadata_delete.assert_any_call('10.5072/zenodo.1')
dc_mock().metadata_delete.assert_any_call('10.5072/zenodo.2')
record = Record.get_record(record_uuid)
assert record['removed_by'] == users[0]['id']
assert record['removal_reason'] == 'Spam record, removed by Zenodo staff.'
def test_record_delete_v1(mocker, app, db, users, deposit, deposit_file):
"""Delete a record with multiple versions."""
dc_mock = mocker.patch(
'invenio_pidstore.providers.datacite.DataCiteMDSClient')
deposit_v1 = publish_and_expunge(db, deposit)
recid_v1, record_v1 = deposit.fetch_published()
recid_v1_value = recid_v1.pid_value
deposit_v1.newversion()
recid_v1, record_v1 = record_resolver.resolve(recid_v1_value)
# Stash a copy of v1 for later
rec1 = deepcopy(record_v1)
rec1_id = str(record_v1.id)
pv = PIDVersioning(child=recid_v1)
depid_v2 = pv.draft_child_deposit
deposit_v2 = ZenodoDeposit.get_record(depid_v2.get_assigned_object())
deposit_v2.files['file.txt'] = BytesIO(b('file1'))
deposit_v2 = publish_and_expunge(db, deposit_v2)
recid_v2, record_v2 = deposit_v2.fetch_published()
# Stash a copy of v2 for later
rec2 = deepcopy(record_v2)
assert dc_mock().metadata_delete.call_count == 0
# Remove the first version
delete_record(rec1_id, 'spam', users[0]['id'])
# Make sure all PIDs are deleted
assert PID.get('doi', rec1['doi']).status == PIDStatus.DELETED
assert PID.get('doi', rec1['conceptdoi']).status == PIDStatus.REGISTERED
assert PID.get('recid', rec1['recid']).status == PIDStatus.DELETED
# Make sure conceptrecid is redirecting to v2 (as before)
crecid = PID.get('recid', rec1['conceptrecid'])
assert crecid.get_redirect() == PID.get('recid', rec2['recid'])
assert crecid.status == PIDStatus.REDIRECTED
assert PID.get('depid', rec1['_deposit']['id']).status == PIDStatus.DELETED
# Make sure the v2 PIDs are kept intact
assert PID.get('oai', rec2['_oai']['id']).status == PIDStatus.REGISTERED
assert PID.get('doi', rec2['doi']).status == PIDStatus.REGISTERED
assert PID.get('recid', rec2['recid']).status == PIDStatus.REGISTERED
assert PID.get('depid', rec2['_deposit']['id']).status == \
PIDStatus.REGISTERED
# Only the v1 DOI should be deleted
assert dc_mock().doi_post.call_count == 2
assert dc_mock().doi_post.has_any_call('10.5072/zenodo.3')
assert dc_mock().doi_post.has_any_call('10.5072/zenodo.1')
assert dc_mock().metadata_delete.call_count == 1
dc_mock().metadata_delete.assert_any_call('10.5072/zenodo.2')
record = Record.get_record(rec1_id)
assert record['removed_by'] == users[0]['id']
assert record['removal_reason'] == 'Spam record, removed by Zenodo staff.'
def test_record_delete_v2(mocker, app, db, users, deposit, deposit_file):
"""Delete a record (only last version) with multiple versions."""
dc_mock = mocker.patch(
'invenio_pidstore.providers.datacite.DataCiteMDSClient')
deposit_v1 = publish_and_expunge(db, deposit)
recid_v1, record_v1 = deposit.fetch_published()
recid_v1_value = recid_v1.pid_value
deposit_v1.newversion()
recid_v1, record_v1 = record_resolver.resolve(recid_v1_value)
# Stash a copy of v1 for later
rec1 = deepcopy(record_v1)
pv = PIDVersioning(child=recid_v1)
depid_v2 = pv.draft_child_deposit
deposit_v2 = ZenodoDeposit.get_record(depid_v2.get_assigned_object())
deposit_v2.files['file.txt'] = BytesIO(b('file1'))
deposit_v2 = publish_and_expunge(db, deposit_v2)
recid_v2, record_v2 = deposit_v2.fetch_published()
# Stash a copy of v2 for later
rec2 = deepcopy(record_v2)
rec2_id = str(record_v2.id)
assert dc_mock().metadata_delete.call_count == 0
# Remove the first version
delete_record(rec2_id, 'spam', users[0]['id'])
# Make sure all PIDs are deleted
assert PID.get('doi', rec2['doi']).status == PIDStatus.DELETED
assert PID.get('recid', rec2['recid']).status == PIDStatus.DELETED
assert PID.get('depid', rec2['_deposit']['id']).status == PIDStatus.DELETED
# Concept DOI should be left registered
assert PID.get('doi', rec2['conceptdoi']).status == PIDStatus.REGISTERED
# Make sure conceptrecid is redirecting to v1
crecid = PID.get('recid', rec2['conceptrecid'])
assert crecid.status == PIDStatus.REDIRECTED
assert crecid.get_redirect() == PID.get('recid', rec1['recid'])
# Make sure the v1 PIDs are kept intact
assert PID.get('oai', rec1['_oai']['id']).status == PIDStatus.REGISTERED
assert PID.get('doi', rec1['doi']).status == PIDStatus.REGISTERED
assert PID.get('recid', rec1['recid']).status == PIDStatus.REGISTERED
assert PID.get('depid', rec1['_deposit']['id']).status == \
PIDStatus.REGISTERED
# Only the v1 DOI should be deleted
assert dc_mock().doi_post.call_count == 2
assert dc_mock().doi_post.has_any_call('10.5072/zenodo.2')
assert dc_mock().doi_post.has_any_call('10.5072/zenodo.1')
assert dc_mock().metadata_delete.call_count == 1
dc_mock().metadata_delete.assert_any_call('10.5072/zenodo.3')
record = Record.get_record(rec2_id)
assert record['removed_by'] == users[0]['id']
assert record['removal_reason'] == 'Spam record, removed by Zenodo staff.'
def test_record_delete_legacy(mocker, app, db, users, deposit, deposit_file):
"""Delete the non-versioned record."""
dc_mock = mocker.patch(
'invenio_pidstore.providers.datacite.DataCiteMDSClient')
deposit = publish_and_expunge(db, deposit)
recid, record = deposit.fetch_published()
# 'Simulate' a non-versioned record by removing 'conceptdoi' key
del deposit['conceptdoi']
del record['conceptdoi']
deposit.commit()
record.commit()
db.session.commit()
# Stash a copy of record metadata for later
rec = deepcopy(record)
record_uuid = str(record.id)
assert dc_mock().metadata_delete.call_count == 0
# users[0] is not an Admin but it doesn't matter in this case.
delete_record(record.id, 'spam', users[0]['id'])
# Make sure all PIDs are deleted
# TODO: oai PID is left registered
# assert PID.get('oai', rec['_oai']['id']) == PIDStatus.DELETED
assert PID.get('doi', rec['doi']).status == PIDStatus.DELETED
assert PID.get('recid', rec['recid']).status == PIDStatus.DELETED
assert PID.get('recid', rec['conceptrecid']).status == PIDStatus.DELETED
assert PID.get('depid', rec['_deposit']['id']).status == PIDStatus.DELETED
assert dc_mock().metadata_delete.call_count == 1
dc_mock().metadata_delete.assert_any_call('10.5072/zenodo.2')
record = Record.get_record(record_uuid)
assert record['removed_by'] == users[0]['id']
assert record['removal_reason'] == 'Spam record, removed by Zenodo staff.'
| 12,789 | Python | .py | 249 | 46.409639 | 79 | 0.698814 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,333 | test_sips.py | zenodo_zenodo/tests/unit/deposit/test_sips.py | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Test Zenodo deposit workflow."""
from __future__ import absolute_import, print_function
import json
from flask_security import login_user
from invenio_sipstore.models import SIP, RecordSIP, SIPFile, SIPMetadata
from six import BytesIO
def test_basic_workflow(app, db, users, deposit):
"""Test simple deposit publishing workflow."""
with app.test_request_context(environ_base={'REMOTE_ADDR': '127.0.0.1'}):
datastore = app.extensions['security'].datastore
login_user(datastore.get_user(users[0]['email']))
deposit.files['one.txt'] = BytesIO(b'Test')
deposit.files['two.txt'] = BytesIO(b'Test2')
deposit = deposit.publish()
# Should create one SIP, one RecordSIP and two SIPFiles
assert SIP.query.count() == 1
assert SIPMetadata.query.count() == 2 # BagIt + JSON metadata
assert RecordSIP.query.count() == 1
assert SIPFile.query.count() == 2
sip = SIP.query.one()
assert sip.user_id == users[0]['id']
assert sip.agent['email'] == users[0]['email']
assert sip.agent['ip_address'] == '127.0.0.1'
assert len(sip.sip_files) == 2
assert sip.sip_files[0].sip_id == sip.id
assert sip.sip_files[1].sip_id == sip.id
# Publishing the second time shuld create a new SIP and new RecordSIP
# but no new SIPFiles. This is under assumption that users cannot
# upload new files to the already published deposit.
deposit = deposit.edit()
deposit['title'] = 'New Title'
deposit = deposit.publish()
assert SIP.query.count() == 2
assert RecordSIP.query.count() == 2
assert SIPMetadata.query.count() == 4 # BagIt + JSON per publishing
assert SIPFile.query.count() == 2
# Fetch the last RecordSIP and make sure, that
# the corresponding SIP doesn't have any files
recsip = RecordSIP.query.order_by(RecordSIP.created.desc()).first()
assert not recsip.sip.sip_files
def test_programmatic_publish(app, db, deposit, deposit_file):
"""Test publishing by without request.
Might never happen, but at least shouldn't crash the system.
"""
deposit = deposit.publish()
pid, record = deposit.fetch_published()
sip = SIP.query.one()
assert not sip.user_id
assert sip.sip_metadata[0].content == json.dumps(record.dumps())
assert sip.sip_metadata[0].type.format == 'json'
assert sip.sip_metadata[0].type.name == 'json'
assert sip.sip_metadata[0].type.schema == \
'https://zenodo.org/schemas/records/record-v1.0.0.json'
assert len(sip.record_sips) == 1
assert sip.record_sips[0].pid_id == pid.id
assert len(sip.agent) == 1 # Just the '$schema' key in agent info
assert sip.agent['$schema'] == \
'https://zenodo.org/schemas/sipstore/agent-webclient-v1.0.0.json'
def test_anonymous_request(app, db, deposit):
"""Test sip creation during an anonymous request."""
with app.test_request_context(environ_base={'REMOTE_ADDR': '127.0.0.1'}):
deposit.files['one.txt'] = BytesIO(b'Test')
deposit.files['two.txt'] = BytesIO(b'Test2')
deposit.publish()
sip = SIP.query.one()
assert sip.user_id is None
assert 'email' not in sip.agent
assert sip.agent['ip_address'] == '127.0.0.1'
assert len(sip.sip_files) == 2
assert sip.sip_files[0].sip_id == sip.id
assert sip.sip_files[1].sip_id == sip.id
| 4,254 | Python | .py | 89 | 41.921348 | 77 | 0.673332 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,334 | test_deposit_utils.py | zenodo_zenodo/tests/unit/deposit/test_deposit_utils.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Zenodo deposit utils."""
from __future__ import absolute_import, print_function
from zenodo.modules.deposit.utils import suggest_language
def test_suggest_language():
"""Test language suggestions."""
s = suggest_language('pl')
assert len(s) == 1
assert s[0].alpha_3 == 'pol'
# 'Northern Sami' doesn't contain 'sme' substring but should be first
# in suggestions, since 'sme' is its ISO 639-2 code.
s = suggest_language('sme')
assert len(s) > 1 # More than one result
assert s[0].alpha_3 == 'sme'
assert 'sme' not in s[0].name.lower()
assert 'sme' in s[1].name.lower() # Second result matched by name
s = suggest_language('POLISH')
assert s[0].alpha_3 == 'pol'
# lower-case
s = suggest_language('polish')
assert s[0].alpha_3 == 'pol'
| 1,781 | Python | .py | 43 | 38.767442 | 76 | 0.721548 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,335 | test_minters.py | zenodo_zenodo/tests/unit/deposit/test_minters.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Unit tests for deposit/record minters."""
from __future__ import absolute_import, print_function
from uuid import uuid4
import pytest
from invenio_pidstore.errors import PIDDoesNotExistError
from invenio_pidstore.models import PersistentIdentifier, PIDStatus
from zenodo.modules.deposit.minters import zenodo_deposit_minter
from zenodo.modules.records.minters import zenodo_record_minter
def test_double_minting_depid_recid(db):
"""Test using same integer for dep/rec ids."""
dep_uuid = uuid4()
data = dict()
pid = zenodo_deposit_minter(dep_uuid, data)
# Assert values added to data. Depid and recid have IDs starting from
# '2' since the conceptrecid is minted first
assert data['_deposit']['id'] == '2'
assert data['conceptrecid'] == '1'
assert data['recid'] == 2
assert 'doi' not in data
# Assert pid values
assert pid.pid_type == 'depid'
assert pid.pid_value == '2'
assert pid.status == PIDStatus.REGISTERED
assert pid.object_uuid == dep_uuid
# Assert reservation of recid.
assert PersistentIdentifier.get('recid', pid.pid_value).status \
== PIDStatus.RESERVED
db.session.commit()
# Assert registration of recid.
rec_uuid = uuid4()
pid = zenodo_record_minter(rec_uuid, data)
assert pid.pid_type == 'recid'
assert pid.pid_value == '2'
assert pid.status == PIDStatus.REGISTERED
assert pid.object_uuid == rec_uuid
assert data['doi'] == '10.5072/zenodo.2'
assert data['_oai']['id'] == 'oai:zenodo.org:2'
@pytest.mark.parametrize('doi_in, doi_out', [
# ('10.1234/foo', '10.1234/foo'),
# ('10.5072/foo', '10.5072/foo'),
(None, '10.5072/zenodo.1'),
])
def test_doi_minting(db, doi_in, doi_out):
"""Test using same integer for dep/rec ids."""
dep_uuid, rec_uuid = uuid4(), uuid4()
data = dict(doi=doi_in)
zenodo_deposit_minter(dep_uuid, data)
zenodo_record_minter(rec_uuid, data)
db.session.commit()
pid = PersistentIdentifier.get('doi', doi_out)
assert pid.object_uuid == rec_uuid
assert pid.status == PIDStatus.RESERVED
@pytest.mark.parametrize('doi', [
'1234/foo',
'a',
])
def test_invalid_doi(db, doi):
"""Test using same integer for dep/rec ids."""
dep_uuid = uuid4()
data = dict(doi=doi)
zenodo_deposit_minter(dep_uuid, data)
assert PersistentIdentifier.query.count() == 3
def test_unpublished_deposit_and_pid_deletion(deposit):
"""Test deletion of deposit and pid."""
recid = PersistentIdentifier.get('recid', str(deposit['recid']))
assert recid and recid.status == PIDStatus.RESERVED
assert not recid.has_object()
depid = PersistentIdentifier.get('depid', str(deposit['_deposit']['id']))
assert depid and depid.status == PIDStatus.REGISTERED
assert depid.has_object()
# Delete deposit
deposit.delete()
pytest.raises(
PIDDoesNotExistError,
PersistentIdentifier.get,
'recid', str(deposit['recid'])
)
depid = PersistentIdentifier.get('depid', str(deposit['_deposit']['id']))
assert depid and depid.status == PIDStatus.DELETED
def test_published_external_doi(db, deposit, deposit_file):
"""Test published external DOI."""
ext_doi1 = '10.1234/foo'
ext_doi2 = '10.1234/bar'
deposit['doi'] = ext_doi1
deposit.publish()
db.session.commit()
# Published record with external DOI must have:
# 1) a registered recid with object
recid = PersistentIdentifier.get('recid', str(deposit['recid']))
assert recid and recid.status == PIDStatus.REGISTERED \
and recid.has_object()
# 2) a reserved external doi with object
doi = PersistentIdentifier.get('doi', ext_doi1)
assert doi and doi.status == PIDStatus.RESERVED \
and doi.has_object()
# Now change external DOI.
deposit = deposit.edit()
deposit['doi'] = ext_doi2
deposit.publish()
db.session.commit()
# Ensure DOI 1 has been removed.
pytest.raises(
PIDDoesNotExistError, PersistentIdentifier.get, 'doi', ext_doi1)
# Ensure DOI 2 has been reserved.
doi = PersistentIdentifier.get('doi', ext_doi2)
assert doi and doi.status == PIDStatus.RESERVED \
and doi.has_object()
| 5,122 | Python | .py | 128 | 35.773438 | 77 | 0.702273 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,336 | test_tasks.py | zenodo_zenodo/tests/unit/deposit/test_tasks.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Zenodo deposit tasks."""
from __future__ import absolute_import, print_function, unicode_literals
from copy import deepcopy
from uuid import uuid4
import datacite
import pytest
from flask_security import login_user
from invenio_pidrelations.contrib.versioning import PIDVersioning
from invenio_pidstore.models import PersistentIdentifier, PIDStatus
from invenio_records.api import Record
from invenio_search import current_search
from invenio_search.api import RecordsSearch
from zenodo.modules.deposit.api import ZenodoDeposit
from zenodo.modules.deposit.minters import zenodo_deposit_minter
from zenodo.modules.deposit.tasks import cleanup_indexed_deposits, \
datacite_register
from zenodo.modules.records.api import ZenodoRecord
from zenodo.modules.records.minters import zenodo_record_minter
def test_datacite_register(mocker, app, db, es, minimal_record):
dc_mock = mocker.patch(
'invenio_pidstore.providers.datacite.DataCiteMDSClient')
doi_tags = [
'<identifier identifierType="DOI">{doi}</identifier>',
('<relatedIdentifier relatedIdentifierType="DOI" '
'relationType="IsVersionOf">{conceptdoi}</relatedIdentifier>'),
]
conceptdoi_tags = [
'<identifier identifierType="DOI">{conceptdoi}</identifier>',
]
has_part_tag = ('<relatedIdentifier relatedIdentifierType="DOI" '
'relationType="HasVersion">{doi}</relatedIdentifier>')
# Assert calls and content
def assert_datacite_calls_and_content(record, doi_tags, conceptdoi_tags):
"""Datacite client calls assertion helper."""
assert dc_mock().metadata_post.call_count == 2
_, doi_args, _ = dc_mock().metadata_post.mock_calls[0]
_, conceptdoi_args, _ = dc_mock().metadata_post.mock_calls[1]
assert all([t.format(**record) in doi_args[0] for t in doi_tags])
assert all([t.format(**record) in conceptdoi_args[0]
for t in conceptdoi_tags])
dc_mock().doi_post.call_count == 2
dc_mock().doi_post.assert_any_call(
record['doi'],
'https://zenodo.org/record/{}'.format(record['recid']))
dc_mock().doi_post.assert_any_call(
record['conceptdoi'],
'https://zenodo.org/record/{}'.format(record['conceptrecid']))
# Create conceptrecid for the records
conceptrecid = PersistentIdentifier.create(
'recid', '100', status=PIDStatus.RESERVED)
def create_versioned_record(recid_value, conceptrecid):
"""Utility function for creating versioned records."""
recid = PersistentIdentifier.create(
'recid', recid_value, status=PIDStatus.RESERVED)
pv = PIDVersioning(parent=conceptrecid)
pv.insert_draft_child(recid)
record_metadata = deepcopy(minimal_record)
# Remove the DOI
del record_metadata['doi']
record_metadata['conceptrecid'] = conceptrecid.pid_value
record_metadata['recid'] = int(recid.pid_value)
record = ZenodoRecord.create(record_metadata)
zenodo_record_minter(record.id, record)
record.commit()
return recid, record
# Create a reserved recid
recid1, r1 = create_versioned_record('101', conceptrecid)
db.session.commit()
datacite_register(recid1.pid_value, str(r1.id))
conceptdoi_tags.append(has_part_tag.format(**r1))
assert_datacite_calls_and_content(r1, doi_tags, conceptdoi_tags)
# Create a new version
recid2, r2 = create_versioned_record('102', conceptrecid)
db.session.commit()
dc_mock().reset_mock()
datacite_register(recid2.pid_value, str(r2.id))
conceptdoi_tags.append(has_part_tag.format(**r2))
assert_datacite_calls_and_content(r2, doi_tags, conceptdoi_tags)
def test_datacite_register_fail(mocker, app, db, es, minimal_record):
# Make the datacite API unavailable
dc_mock = mocker.patch(
'invenio_pidstore.providers.datacite.DataCiteMDSClient')
dc_mock().metadata_post.side_effect = datacite.errors.HttpError()
# Create a reserved recid
record = Record.create(minimal_record)
record_uuid = record.id
recid = record['recid']
recid_pid = PersistentIdentifier.create(
'recid', recid, status=PIDStatus.RESERVED)
# Mint the record
zenodo_record_minter(record_uuid, record)
record.commit()
db.session.commit()
with pytest.raises(datacite.errors.HttpError):
datacite_register.apply((recid_pid.pid_value, str(record_uuid)))
# Check that the task was retried ("max_retries" + 1) times
dc_calls = len(dc_mock().metadata_post.mock_calls)
assert dc_calls == datacite_register.max_retries + 1
def test_cleanup_indexed_deposits(app, db, es, locations, users,
deposit_metadata, sip_metadata_types):
second_deposit_metadata = deepcopy(deposit_metadata)
with app.test_request_context():
datastore = app.extensions['security'].datastore
login_user(datastore.get_user(users[0]['email']))
id_ = uuid4()
depid = zenodo_deposit_minter(id_, deposit_metadata)
ZenodoDeposit.create(deposit_metadata, id_=id_)
depid_value = depid.pid_value
depid_type = depid.pid_type
db.session.commit()
with app.test_request_context():
datastore = app.extensions['security'].datastore
login_user(datastore.get_user(users[0]['email']))
second_id_ = uuid4()
second_depid = zenodo_deposit_minter(
second_id_, second_deposit_metadata)
ZenodoDeposit.create(second_deposit_metadata, id_=second_id_)
# Emulate a database "failure", which would wipe any models in the session
db.session.remove()
current_search.flush_and_refresh(index='deposits')
# Deposit has been indexed in ES, but not commited in DB
assert PersistentIdentifier.query.filter(
PersistentIdentifier.pid_type == second_depid.pid_type,
PersistentIdentifier.pid_value == second_depid.pid_value).count() == 0
assert (RecordsSearch(index='deposits').get_record(second_id_).execute()[0]
._deposit.id == second_depid.pid_value)
# Deposit has been indexed in ES and is also commited in DB
assert PersistentIdentifier.query.filter(
PersistentIdentifier.pid_type == depid_type,
PersistentIdentifier.pid_value == depid_value).count() == 1
cleanup_indexed_deposits.apply()
current_search.flush_and_refresh(index='deposits')
assert len(
RecordsSearch(index='deposits').get_record(second_id_).execute()) == 0
assert (RecordsSearch(index='deposits').get_record(id_).execute()[0]
._deposit.id == depid_value)
| 7,648 | Python | .py | 159 | 42.025157 | 79 | 0.707389 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,337 | test_deposit_index.py | zenodo_zenodo/tests/unit/deposit/test_deposit_index.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Unit tests Zenodo Deposit indexing."""
from __future__ import absolute_import, print_function
from helpers import publish_and_expunge
from invenio_deposit.api import Deposit
from invenio_indexer.api import RecordIndexer
from invenio_pidrelations.contrib.versioning import PIDVersioning
from invenio_pidstore.models import PersistentIdentifier, PIDStatus
from invenio_records.api import Record
from invenio_search import current_search
from invenio_search.api import RecordsSearch
from six import BytesIO, b
from zenodo.modules.deposit.api import ZenodoDeposit
from zenodo.modules.deposit.resolvers import deposit_resolver
from zenodo.modules.records.resolvers import record_resolver
def test_deposit_index(db, es):
"""Test update embargoed records."""
deposit_index_name = 'deposits-records-record-v1.0.0'
rec1 = Record.create({
'title': 'One',
'_deposit': {
'status': 'published',
'pid': {
'type': 'recid',
'value': '1'
}
}
})
PersistentIdentifier.create(pid_type='recid', pid_value='1',
status=PIDStatus.REGISTERED,
object_uuid=rec1.id, object_type='rec')
Deposit.create({
'_deposit': {
'status': 'published',
'pid': {
'type': 'recid',
'value': '1'
}
}
})
db.session.commit()
current_search.flush_and_refresh(deposit_index_name)
res = RecordsSearch(index=deposit_index_name).execute()
# Make sure the 'title' was indexed from record
assert res['hits']['hits'][0]['_source']['title'] == 'One'
def test_versioning_indexing(db, es, deposit, deposit_file):
"""Test the indexing of 'version' relations."""
deposit_index_name = 'deposits-records-record-v1.0.0'
records_index_name = 'records-record-v1.0.0'
deposit_v1 = publish_and_expunge(db, deposit)
depid_v1_value = deposit_v1['_deposit']['id']
recid_v1, record_v1 = deposit_v1.fetch_published()
recid_v1_value = recid_v1.pid_value
RecordIndexer().index_by_id(str(record_v1.id))
RecordIndexer().process_bulk_queue()
current_search.flush_and_refresh(index=deposit_index_name)
current_search.flush_and_refresh(index=records_index_name)
s_dep = RecordsSearch(index=deposit_index_name).execute()['hits']['hits']
s_rec = RecordsSearch(index=records_index_name).execute()['hits']['hits']
assert len(s_dep) == 1
assert len(s_rec) == 1
assert 'relations' in s_dep[0]['_source']
assert 'relations' in s_rec[0]['_source']
expected = {
"version": [
{
"draft_child_deposit": None,
"index": 0,
"is_last": True,
"last_child": {
"pid_type": "recid",
"pid_value": "2"
},
"count": 1,
"parent": {
"pid_type": "recid",
"pid_value": "1"
},
}
]
}
assert s_dep[0]['_source']['relations'] == expected
assert s_rec[0]['_source']['relations'] == expected
deposit_v1.newversion()
pv = PIDVersioning(child=recid_v1)
depid_v2 = pv.draft_child_deposit
deposit_v2 = ZenodoDeposit.get_record(depid_v2.object_uuid)
deposit_v2.files['file.txt'] = BytesIO(b('file1'))
depid_v1, deposit_v1 = deposit_resolver.resolve(depid_v1_value)
RecordIndexer().process_bulk_queue()
current_search.flush_and_refresh(index=deposit_index_name)
current_search.flush_and_refresh(index=records_index_name)
s_dep = RecordsSearch(index=deposit_index_name).execute()['hits']['hits']
s_rec = RecordsSearch(index=records_index_name).execute()['hits']['hits']
assert len(s_dep) == 2 # Two deposits should be indexed
assert len(s_rec) == 1 # One, since record does not exist yet
s_dep1 = RecordsSearch(index=deposit_index_name).get_record(
deposit_v1.id).execute()[0].to_dict()
s_dep2 = RecordsSearch(index=deposit_index_name).get_record(
deposit_v2.id).execute()[0].to_dict()
expected_d1 = {
"version": [
{
"draft_child_deposit": {
"pid_type": "depid",
"pid_value": "3"
},
"index": 0,
"is_last": False,
"last_child": {
"pid_type": "recid",
"pid_value": "2"
},
"parent": {
"pid_type": "recid",
"pid_value": "1"
},
"count": 2 # For deposit, draft children are also counted
}
]
}
expected_d2 = {
"version": [
{
"draft_child_deposit": {
"pid_type": "depid",
"pid_value": "3"
},
"index": 1,
"is_last": True,
"last_child": {
"pid_type": "recid",
"pid_value": "2"
},
"count": 2, # For deposit, draft children are also counted
"parent": {
"pid_type": "recid",
"pid_value": "1"
},
}
]
}
assert s_dep1['relations'] == expected_d1
assert s_dep2['relations'] == expected_d2
deposit_v2 = publish_and_expunge(db, deposit_v2)
recid_v2, record_v2 = deposit_v2.fetch_published()
recid_v1, record_v1 = record_resolver.resolve(recid_v1_value)
depid_v1, deposit_v1 = deposit_resolver.resolve(depid_v1_value)
RecordIndexer().index_by_id(str(record_v2.id))
RecordIndexer().process_bulk_queue()
current_search.flush_and_refresh(index=deposit_index_name)
current_search.flush_and_refresh(index=records_index_name)
s_dep = RecordsSearch(index=deposit_index_name).execute()['hits']['hits']
s_rec = RecordsSearch(index=records_index_name).execute()['hits']['hits']
assert len(s_dep) == 2
assert len(s_rec) == 2
s_dep1 = RecordsSearch(index=deposit_index_name).get_record(
deposit_v1.id).execute()[0].to_dict()
s_dep2 = RecordsSearch(index=deposit_index_name).get_record(
deposit_v2.id).execute()[0].to_dict()
s_rec1 = RecordsSearch(index=records_index_name).get_record(
record_v1.id).execute()[0].to_dict()
s_rec2 = RecordsSearch(index=records_index_name).get_record(
record_v2.id).execute()[0].to_dict()
expected_d1 = {
"version": [
{
"draft_child_deposit": None,
"index": 0,
"is_last": False,
"last_child": {
"pid_type": "recid",
"pid_value": "3"
},
"parent": {
"pid_type": "recid",
"pid_value": "1"
},
"count": 2
}
]
}
expected_d2 = {
"version": [
{
"draft_child_deposit": None,
"index": 1,
"is_last": True,
"last_child": {
"pid_type": "recid",
"pid_value": "3"
},
"count": 2,
"parent": {
"pid_type": "recid",
"pid_value": "1"
},
}
]
}
assert s_dep1['relations'] == expected_d1
assert s_dep2['relations'] == expected_d2
expected_r1 = {
"version": [
{
"draft_child_deposit": None,
"index": 0,
"is_last": False,
"last_child": {
"pid_type": "recid",
"pid_value": "3"
},
"parent": {
"pid_type": "recid",
"pid_value": "1"
},
"count": 2
}
]
}
expected_r2 = {
"version": [
{
"draft_child_deposit": None,
"index": 1,
"is_last": True,
"last_child": {
"pid_type": "recid",
"pid_value": "3"
},
"count": 2,
"parent": {
"pid_type": "recid",
"pid_value": "1"
},
}
]
}
assert s_rec1['relations'] == expected_r1
assert s_rec2['relations'] == expected_r2
| 9,597 | Python | .py | 260 | 26.342308 | 77 | 0.534307 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,338 | test_api_files.py | zenodo_zenodo/tests/unit/deposit/test_api_files.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test validation in Zenodo deposit REST API."""
from __future__ import absolute_import, print_function, unicode_literals
import json
from datetime import datetime
import jwt
from flask import url_for
from invenio_search import current_search
from six import BytesIO
from zenodo.modules.deposit.resolvers import deposit_resolver
def get_data(**kwargs):
"""Get test data."""
test_data = dict(
metadata=dict(
upload_type='presentation',
title='Test title',
creators=[
dict(name='Doe, John', affiliation='Atlantis'),
],
description='Test Description',
publication_date='2013-05-08',
access_right='open'
)
)
test_data['metadata'].update(kwargs)
return test_data
def test_missing_files(api_client, json_auth_headers, deposit_url, locations,
es, get_json, license_record):
"""Test data validation - no files added."""
client = api_client
headers = json_auth_headers
# Create
res = client.post(
deposit_url, data=json.dumps(get_data()), headers=headers)
links = get_json(res, code=201)['links']
current_search.flush_and_refresh(index='deposits')
# Publish - not possible (file is missing)
res = client.post(links['publish'], headers=headers)
data = get_json(res, code=400)
assert len(data['errors']) == 1
def test_multipart_onging(api, api_client, db, deposit, deposit_file, get_json,
json_auth_headers, deposit_url, license_record):
"""Test data validation."""
api.config.update(dict(
FILES_REST_MULTIPART_CHUNKSIZE_MIN=2,
))
client = api_client
headers = json_auth_headers
deposit_url = '{0}/{1}'.format(
deposit_url,
deposit['_deposit']['id']
)
# Get links
res = client.get(deposit_url, headers=headers)
links = get_json(res, code=200)['links']
# Create multipart upload
multipart_url = '{0}/bigfile?uploads&size=1000&partSize=500'.format(
links['bucket'])
res = client.post(multipart_url, headers=headers)
mp_links = get_json(res, code=200)['links']
# Publish - not possible (multipart object in progress)
res = client.post(links['publish'], headers=headers)
data = get_json(res, code=400)
assert len(data['errors']) == 1
# Delete multipart upload
assert client.delete(mp_links['self'], headers=headers).status_code == 204
# Now publishing is possible
assert client.post(links['publish'], headers=headers).status_code == 202
def test_file_ops(api_client, deposit, json_auth_headers, auth_headers,
deposit_url, get_json):
"""Test data validation."""
client = api_client
headers = json_auth_headers
auth = auth_headers
# Create empty deposit
res = client.post(deposit_url, data=json.dumps({}), headers=headers)
links = get_json(res, code=201)['links']
current_search.flush_and_refresh(index='deposits')
# Upload same file twice - first ok, second not
for code in [201, 400]:
f = dict(file=(BytesIO(b'test'), 'test1.txt'), name='test1.txt')
res = client.post(links['files'], data=f, headers=auth)
res.status_code == code
# Upload another file
client.post(
links['files'],
data=dict(file=(BytesIO(b'test'), 'test2.txt'), name='test2.txt'),
headers=auth
)
# List files
data = get_json(client.get(links['files'], headers=headers), code=200)
assert len(data) == 2
file_id = data[0]['id']
file_url = '{0}/{1}'.format(links['files'], file_id)
# Get file
assert client.get(file_url, headers=headers).status_code == 200
# File does not exists
assert client.get(
'{0}/invalid'.format(links['files']), headers=headers
).status_code == 404
data = get_json(client.get(links['files'], headers=headers), code=200)
invalid_files_list = [dict(filename=x['filename']) for x in data]
ok_files_list = list(reversed([dict(id=x['id']) for x in data]))
# Sort - invalid
assert client.put(
links['files'], data=json.dumps(invalid_files_list), headers=headers
).status_code == 400
# Sort - valid
assert client.put(
links['files'], data=json.dumps(ok_files_list), headers=headers
).status_code == 200
# Delete
assert client.delete(file_url, headers=headers).status_code == 204
assert client.get(file_url, headers=headers).status_code == 404
data = get_json(client.get(links['files'], headers=headers), code=200)
assert len(data) == 1
file_id = data[0]['id']
file_url = '{0}/{1}'.format(links['files'], file_id)
# Rename
assert client.put(
file_url, data=json.dumps(dict(filename='rename.txt')), headers=headers
).status_code == 200
# Bad renaming
for data in [dict(name='test.txt'), dict(filename='../../etc/passwd')]:
assert client.put(
file_url, data=json.dumps(data), headers=headers
).status_code == 400
data = get_json(client.get(file_url, headers=headers), code=200)
assert data['filename'] == 'rename.txt'
def test_deposit_deletion(api_client, deposit, json_auth_headers, deposit_url,
get_json, license_record, auth_headers):
"""Test file accessibility after deposit deletion."""
client = api_client
headers = json_auth_headers
auth = auth_headers
# Create
res = client.post(
deposit_url, data=json.dumps(get_data()), headers=headers)
links = get_json(res, code=201)['links']
current_search.flush_and_refresh(index='deposits')
# Upload file
res = client.post(
links['files'],
data=dict(file=(BytesIO(b'test'), 'test.txt'), name='test.txt'),
headers=auth
)
assert res.status_code == 201
# Get deposit links
res = client.get(links['self'], headers=headers)
data = get_json(res, code=200)
file_link = data['files'][0]['links']['self']
download_link = data['files'][0]['links']['download']
# Get file
res = client.get(file_link, headers=headers)
assert res.status_code == 200
res = client.get(download_link, headers=auth)
assert res.status_code == 200
# Get file - unauthenticated
res = client.get(file_link)
assert res.status_code == 401 # Any request requires auth.
res = client.get(download_link)
assert res.status_code == 404
#
# Delete upload
#
res = client.delete(links['self'], headers=auth)
assert res.status_code == 204
# Try to get deposit.
res = client.get(links['self'], headers=auth)
assert res.status_code == 410
# Try to get file
res = client.get(file_link, headers=headers)
assert res.status_code == 410
res = client.get(download_link, headers=auth)
assert res.status_code == 404
# Try to get file - unauthenticated
res = client.get(file_link)
assert res.status_code == 410
res = client.get(download_link)
assert res.status_code == 404
def test_rat_deposit_files_access(
app, db, api_client, deposit, deposit_file, deposit_url,
json_auth_headers, license_record, rat_generate_token):
"""Test deposit files access via RATs."""
client = api_client
depid = deposit['_deposit']['id']
deposit['owners'] = [rat_generate_token.user_id]
deposit['_deposit']['owners'] = [rat_generate_token.user_id]
deposit.commit()
db.session.commit()
rat_token = jwt.encode(
payload={
'iat': datetime.utcnow(),
'sub': {
'deposit_id': depid,
'access': 'read',
},
},
key=rat_generate_token.access_token,
algorithm='HS256',
headers={'kid': str(rat_generate_token.id)},
)
deposit_url += '/' + str(depid)
file_url = '/files/{}/test.txt'.format(deposit['_buckets']['deposit'])
publish_url = deposit_url + '/actions/publish'
res = client.get(file_url)
assert res.status_code == 404
res = client.get(file_url, query_string={'token': rat_token})
assert res.status_code == 200
# Try other forbidden operations using the RAT
res = client.get(deposit_url, query_string={'token': rat_token})
assert res.status_code == 401
data = json.dumps(get_data())
res = client.put(deposit_url, data=data, query_string={'token': rat_token})
assert res.status_code == 401
res = client.put(file_url, data=data, query_string={'token': rat_token})
assert res.status_code == 404
res = client.post(publish_url, query_string={'token': rat_token})
assert res.status_code == 401
# Change record owner
depid, deposit = deposit_resolver.resolve(depid)
deposit['owners'] = [123]
deposit['_deposit']['owners'] = [123]
deposit.commit()
db.session.commit()
res = client.get(file_url)
assert res.status_code == 404
res = client.get(file_url, query_string={'token': rat_token})
assert res.status_code == 404
| 10,027 | Python | .py | 251 | 34.051793 | 79 | 0.654733 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,339 | test_api_metadata.py | zenodo_zenodo/tests/unit/deposit/test_api_metadata.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016, 2019 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test validation in Zenodo deposit REST API."""
from __future__ import absolute_import, print_function
import json
from datetime import datetime, timedelta
from flask import url_for
from invenio_pidstore.models import PersistentIdentifier
from invenio_search import current_search
from six import BytesIO
def test_invalid_create(api_client, es, json_auth_headers, deposit_url,
get_json):
"""Test invalid deposit creation."""
client = api_client
headers = json_auth_headers
# Invalid deposits.
cases = [
dict(unknownkey='data', metadata={}),
dict(metadat={}),
]
for case in cases:
res = client.post(deposit_url, data=json.dumps(case), headers=headers)
assert res.status_code == 400, case
# No deposits were created
assert 0 == len(
get_json(client.get(deposit_url, headers=headers), code=200))
def test_input_output(api_client, es, json_auth_headers, deposit_url, get_json,
license_record, grant_records, locations, communities):
"""Rough validation of input against output data."""
client = api_client
headers = json_auth_headers
test_data = dict(
metadata=dict(
access_right='embargoed',
communities=[{'identifier': 'c1'}],
conference_acronym='Some acronym',
conference_dates='Some dates',
conference_place='Some place',
conference_title='Some title',
conference_url='http://someurl.com',
conference_session='VI',
conference_session_part='1',
creators=[
dict(name="Doe, John", affiliation="Atlantis",
orcid="0000-0002-1825-0097", gnd="170118215"),
dict(name="Smith, Jane", affiliation="Atlantis")
],
description="Some description",
doi="10.1234/foo.bar",
embargo_date=(
datetime.utcnow().date() + timedelta(days=1)).isoformat(),
grants=[dict(id="282896"), ],
imprint_isbn="Some isbn",
imprint_place="Some place",
imprint_publisher="Some publisher",
journal_issue="Some issue",
journal_pages="Some pages",
journal_title="Some journal name",
journal_volume="Some volume",
keywords=["Keyword 1", "keyword 2"],
subjects=[
dict(scheme="gnd", identifier="gnd:1234567899",
term="Astronaut"),
dict(scheme="gnd", identifier="gnd:1234567898", term="Amish"),
],
license="CC0-1.0",
notes="Some notes",
partof_pages="SOme part of",
partof_title="Some part of title",
prereserve_doi=True,
publication_date="2013-09-12",
publication_type="book",
references=[
"Reference 1",
"Reference 2",
],
related_identifiers=[
dict(identifier='10.1234/foo.bar2', relation='isCitedBy',
scheme='doi'),
dict(identifier='10.1234/foo.bar3', relation='cites',
scheme='doi',
resource_type='dataset'),
dict(
identifier='2011ApJS..192...18K',
relation='isAlternateIdentifier',
scheme='ads',
resource_type='publication-article'),
],
thesis_supervisors=[
dict(name="Doe Sr., John", affiliation="Atlantis"),
dict(name="Smith Sr., Jane", affiliation="Atlantis",
orcid="0000-0002-1825-0097",
gnd="170118215")
],
thesis_university="Some thesis_university",
contributors=[
dict(name="Doe Sr., Jochen", affiliation="Atlantis",
type="Other"),
dict(name="Smith Sr., Marco", affiliation="Atlantis",
orcid="0000-0002-1825-0097",
gnd="170118215",
type="DataCurator")
],
title="Test title",
upload_type="publication",
)
)
# Create
res = client.post(deposit_url, data=json.dumps(test_data), headers=headers)
links = get_json(res, code=201)['links']
current_search.flush_and_refresh(index='deposits')
# Get serialization.
data = get_json(client.get(links['self'], headers=headers), code=200)
# - fix known differences.
# DOI and recid have 2 as control number, since Concept DOI/recid are
# registered first
test_data['metadata'].update({
'prereserve_doi': {'doi': '10.5072/zenodo.2', 'recid': 2}
})
assert data['metadata'] == test_data['metadata']
def test_unicode(api_client, es, locations, json_auth_headers, deposit_url,
get_json, license_record, grant_records, auth_headers,
communities):
"""Rough validation of input against output data."""
client = api_client
headers = json_auth_headers
test_data = dict(
metadata=dict(
access_right='open',
access_conditions='Αυτή είναι μια δοκιμή',
communities=[{'identifier': 'c1'}],
conference_acronym='Αυτή είναι μια δοκιμή',
conference_dates='هذا هو اختبار',
conference_place='Սա փորձություն',
conference_title='Гэта тэст',
conference_url='http://someurl.com',
conference_session='5',
conference_session_part='a',
creators=[
dict(name="Doe, John", affiliation="Това е тест"),
dict(name="Smith, Jane", affiliation="Tio ĉi estas testo")
],
description="这是一个测试",
doi="10.1234/foo.bar",
embargo_date="2010-12-09",
grants=[dict(id="282896"), ],
imprint_isbn="Some isbn",
imprint_place="這是一個測試",
imprint_publisher="ეს არის გამოცდა",
journal_issue="આ એક કસોટી છે",
journal_pages="זהו מבחן",
journal_title="यह एक परीक्षण है",
journal_volume="Þetta er prófun",
keywords=["これはテストです", "ಇದು ಪರೀಕ್ಷೆ"],
subjects=[
dict(scheme="gnd", identifier="1234567899", term="これはです"),
dict(scheme="gnd", identifier="1234567898", term="ಇ"),
],
license="CC0-1.0",
notes="이것은 테스트입니다",
partof_pages="ນີ້ແມ່ນການທົດສອບ",
partof_title="ही चाचणी आहे",
prereserve_doi=True,
publication_date="2013-09-12",
publication_type="book",
related_identifiers=[
dict(
identifier='2011ApJS..192...18K',
relation='isAlternativeIdentifier'),
dict(identifier='10.1234/foo.bar2', relation='isCitedBy'),
dict(identifier='10.1234/foo.bar3', relation='cites'),
],
thesis_supervisors=[
dict(name="Doe Sr., این یک تست است", affiliation="Atlantis"),
dict(name="Это Sr., Jane", affiliation="Atlantis")
],
thesis_university="இந்த ஒரு சோதனை",
contributors=[
dict(name="Doe Sr., ن یک تست", affiliation="Atlantis",
type="Other"),
dict(name="SmЭтith Sr., Marco", affiliation="Atlantis",
type="DataCurator")
],
title="Đây là một thử nghiệm",
upload_type="publication",
)
)
# Create
res = client.post(deposit_url, data=json.dumps(test_data), headers=headers)
links = get_json(res, code=201)['links']
current_search.flush_and_refresh(index='deposits')
# Upload file
assert client.post(
links['files'],
data=dict(file=(BytesIO(b'test'), 'test.txt'), name='test.txt'),
headers=auth_headers,
).status_code == 201
# Publish deposition
response = client.post(links['publish'], headers=auth_headers)
record_id = get_json(response, code=202)['record_id']
# Get record.
current_search.flush_and_refresh(index='records')
response = client.get(
url_for('invenio_records_rest.recid_item', pid_value=record_id))
def test_validation(api_client, es, json_auth_headers, deposit_url, get_json,
license_record, grant_records, auth_headers):
"""Test validation."""
client = api_client
headers = json_auth_headers
test_data = dict(metadata=dict(
access_right='notvalid',
conference_url='not_a_url',
doi='not a doi',
publication_date='not a date',
title='',
upload_type='notvalid',
communities=[{'identifier': 'non-existent-community-id'}],
grants=[{'id': 'non-existent-grant-id'}],
))
data = get_json(
client.post(deposit_url, data=json.dumps(test_data), headers=headers),
code=400)
field_errors = {e['field'] for e in data['errors']}
expected_field_errors = set([
'metadata.access_right',
'metadata.conference_url',
'metadata.doi',
'metadata.publication_date',
'metadata.title',
'metadata.upload_type',
'metadata.grants',
'metadata.communities',
])
for e in expected_field_errors:
assert e in field_errors
def test_existing_doi(db, api_client, json_auth_headers, deposit_url,
minimal_deposit, get_json):
"""Test deposit creation with existing non-local DOI."""
# Create a DOI
doi_value = '10.1234/foo.bar'
PersistentIdentifier.create(pid_type='doi', pid_value=doi_value)
db.session.commit()
# Try to use the DOI for a new deposit
minimal_deposit['metadata']['doi'] = doi_value
resp = api_client.post(
deposit_url, data=json.dumps(minimal_deposit),
headers=json_auth_headers)
assert resp.status_code == 400
| 11,455 | Python | .py | 269 | 30.962825 | 79 | 0.584105 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,340 | test_api_quickstart.py | zenodo_zenodo/tests/unit/deposit/test_api_quickstart.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Zenodo deposit Quickstart."""
from __future__ import absolute_import, print_function, unicode_literals
import json
from flask import url_for
from invenio_search import current_search
from six import BytesIO
def test_zenodo_quickstart_workflow(api, db, es, locations, write_token,
json_auth_headers, license_record):
"""Test zenodo quickstart workflow."""
with api.test_request_context():
with api.test_client() as client:
# Try get deposits as anonymous user
res = client.get(url_for('invenio_deposit_rest.depid_list'))
assert res.status_code == 401
# Try get deposits as logged-in user
res = client.get(
url_for('invenio_deposit_rest.depid_list'),
headers=json_auth_headers
)
assert res.status_code == 200
data = json.loads(res.get_data(as_text=True))
assert data == []
# Create a new deposit
res = client.post(
url_for('invenio_deposit_rest.depid_list'),
headers=json_auth_headers,
data=json.dumps({})
)
assert res.status_code == 201
data = json.loads(res.get_data(as_text=True))
deposit_id = data['id']
assert data['files'] == []
assert data['title'] == ''
assert 'created' in data
assert 'modified' in data
assert 'id' in data
assert 'metadata' in data
assert 'doi' not in data
assert data['state'] == 'unsubmitted'
assert data['owner'] == write_token['token'].user_id
current_search.flush_and_refresh(index='deposits')
# Upload a file
files = {'file': (BytesIO(b'1, 2, 3'), "myfirstfile.csv"),
'name': 'myfirstfile.csv'}
res = client.post(
data['links']['files'],
headers=json_auth_headers,
data=files,
content_type='multipart/form-data',
)
assert res.status_code == 201
data = json.loads(res.get_data(as_text=True))
assert data['checksum'] == '66ce05ea43c73b8e33c74c12d0371bc9'
assert data['filename'] == 'myfirstfile.csv'
assert data['filesize'] == 7
assert data['id']
# modify deposit
deposit = {
"metadata": {
"title": "My first upload",
"upload_type": "poster",
"description": "This is my first upload",
"creators": [
{
"name": "Doe, John",
"affiliation": "Zenodo"
}
]
}
}
res = client.put(
url_for(
'invenio_deposit_rest.depid_item', pid_value=deposit_id),
headers=json_auth_headers,
data=json.dumps(deposit)
)
assert res.status_code == 200
# Publish deposit
res = client.post(
url_for('invenio_deposit_rest.depid_actions',
pid_value=deposit_id, action='publish'),
headers=json_auth_headers,
)
assert res.status_code == 202
recid = json.loads(res.get_data(as_text=True))['record_id']
# Check that record exists.
current_search.flush_and_refresh(index='records')
res = client.get(url_for(
'invenio_records_rest.recid_item', pid_value=recid))
assert res.status_code == 200
data = json.loads(res.get_data(as_text=True))
# Assert that a DOI has been assigned.
assert data['doi'] == '10.5072/zenodo.{0}'.format(recid)
| 4,942 | Python | .py | 116 | 30.887931 | 77 | 0.561721 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,341 | test_extra_buckets_permissions.py | zenodo_zenodo/tests/unit/deposit/test_extra_buckets_permissions.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2019 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Zenodo deposit REST API."""
from __future__ import absolute_import, print_function, unicode_literals
import pytest
from helpers import login_user_via_session
from zenodo.modules.deposit.resolvers import deposit_resolver
@pytest.mark.parametrize('user_email,status,use_scope', [
# anonymous user
(None, 403, False),
# owner
('info@zenodo.org', 403, False),
# owner with scope headers
('info@zenodo.org', 200, True),
# not owner
('test@zenodo.org', 403, False),
# admin user
('admin@zenodo.org', 200, False),
])
def test_extra_formats_permissions(
api, api_client, db, users, deposit, extra_auth_headers,
user_email, status, use_scope):
if use_scope:
user_headers = extra_auth_headers
else:
user_headers = []
if user_email:
# Login as user
login_user_via_session(api_client, email=user_email)
response = api_client.options(
'/deposit/depositions/{0}/formats'.format(deposit['recid']),
headers=user_headers)
assert response.status_code == status
@pytest.mark.parametrize('user_email,status', [
# anonymous user
(None, 404),
# owner
('info@zenodo.org', 404),
# not owner
('test@zenodo.org', 404),
# admin user
('admin@zenodo.org', 200),
])
def test_extra_formats_buckets_permissions(
api, api_client, minimal_deposit, deposit_url, db, es, users,
locations, json_extra_auth_headers, extra_auth_headers, license_record,
user_email, status
):
"""Test Files-REST permissions for the extra formats bucket and files."""
# Create deposit
response = api_client.post(
deposit_url, json=minimal_deposit, headers=json_extra_auth_headers)
data = response.json
# Get identifier and links
depid = data['record_id']
links = data['links']
# Upload 1 files
response = api_client.put(
links['bucket'] + '/test1.txt',
data='ctx',
headers=extra_auth_headers,
)
# Add extra_formats bucket with a file
response = api_client.put(
'/deposit/depositions/{0}/formats'.format(depid),
data='foo file',
headers=[('Content-Type', 'application/foo+xml')] +
extra_auth_headers
)
dep_uuid, deposit = deposit_resolver.resolve(depid)
if user_email:
# Login as user
login_user_via_session(api_client, email=user_email)
response = api_client.get(
'/files/' + str(deposit.extra_formats.bucket.id)
)
assert response.status_code == status
response = api_client.put(
'/files/' + str(deposit.extra_formats.bucket.id) +
'/application/foo+xml',
data='ctx'
)
assert response.status_code == status
# Publish deposition
response = api_client.post(links['publish'], headers=extra_auth_headers)
if user_email:
# Login as user
login_user_via_session(api_client, email=user_email)
response = api_client.get(
'/files/' + str(deposit.extra_formats.bucket.id)
)
assert response.status_code == status
response = api_client.put(
'/files/' + str(deposit.extra_formats.bucket.id) +
'/application/foo+xml',
data='ctx'
)
assert response.status_code == status
| 4,275 | Python | .py | 119 | 30.680672 | 79 | 0.674559 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,342 | test_api_editflow.py | zenodo_zenodo/tests/unit/deposit/test_api_editflow.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Zenodo deposit REST API."""
from __future__ import absolute_import, print_function, unicode_literals
import json
from flask import url_for
from invenio_communities.models import Community
from invenio_pidstore.models import PersistentIdentifier, PIDStatus
from invenio_search import current_search
from invenio_sipstore.models import RecordSIP
from six import BytesIO
from zenodo.modules.deposit.tasks import datacite_register
def test_edit_flow(mocker, api, api_client, db, es, locations,
json_auth_headers, deposit_url, get_json, auth_headers,
json_headers, license_record, communities, resolver,):
"""Test simple flow using REST API."""
# Stash the configuration and enable SIP writing to disk
orig = api.config['SIPSTORE_ARCHIVER_WRITING_ENABLED']
api.config['SIPSTORE_ARCHIVER_WRITING_ENABLED'] = True
datacite_mock = mocker.patch(
'invenio_pidstore.providers.datacite.DataCiteMDSClient')
archive_task_mock = mocker.patch(
'zenodo.modules.deposit.receivers.archive_sip')
headers = json_auth_headers
client = api_client
test_data = dict(
metadata=dict(
upload_type='presentation',
title='Test title',
creators=[
dict(name='Doe, John', affiliation='Atlantis'),
dict(name='Smith, Jane', affiliation='Atlantis')
],
description='Test Description',
publication_date='2013-05-08',
access_right='open',
license='CC0-1.0',
communities=[{'identifier': 'c1'}, {'identifier': 'c3'}],
)
)
# Create deposit
response = client.post(
deposit_url, data=json.dumps(test_data), headers=headers)
data = get_json(response, code=201)
# Get identifier and links
current_search.flush_and_refresh(index='deposits')
links = data['links']
# Upload 3 files
for i in range(3):
f = 'test{0}.txt'.format(i)
response = client.post(
links['files'],
data=dict(file=(BytesIO(b'ctx'), f), name=f),
headers=auth_headers,
)
assert response.status_code == 201, i
# Update metadata
newdata = dict(metadata=data['metadata'])
newdata['metadata']['title'] = 'Updated title'
resdata = get_json(client.put(
links['self'], data=json.dumps(newdata), headers=headers
), code=200)
assert not archive_task_mock.delay.called
# Publish deposition
response = client.post(links['publish'], headers=auth_headers)
data = get_json(response, code=202)
record_id = data['record_id']
assert PersistentIdentifier.query.filter_by(pid_type='depid').count() == 1
# There should be two 'recid' PIDs - Concept PID and version PID
assert PersistentIdentifier.query.filter_by(pid_type='recid').count() == 2
recid_pid = PersistentIdentifier.get('recid', str(record_id))
doi_pid = PersistentIdentifier.get(
pid_type='doi', pid_value='10.5072/zenodo.1')
assert doi_pid.status == PIDStatus.RESERVED
# This task (datacite_register) would normally be executed asynchronously
datacite_register(recid_pid.pid_value, recid_pid.object_uuid)
assert doi_pid.status == PIDStatus.REGISTERED
# Make sure it was registered properly in datacite
# It should be called twice - for concept DOI and version DOI
assert datacite_mock().metadata_post.call_count == 2
# Concept DOI call
datacite_mock().doi_post.assert_any_call(
'10.5072/zenodo.1', 'https://zenodo.org/record/1')
# Record DOI call
datacite_mock().doi_post.assert_any_call(
'10.5072/zenodo.2', 'https://zenodo.org/record/2')
# Does record exists?
current_search.flush_and_refresh(index='records')
# Was SIP writing task executed?
sip = RecordSIP.query.filter_by(pid_id=recid_pid.id).one().sip
archive_task_mock.delay.assert_called_with(str(sip.id))
preedit_data = get_json(client.get(
url_for('invenio_records_rest.recid_item', pid_value=record_id),
headers=json_headers,
), code=200)
expected_doi = '10.5072/zenodo.{0}'.format(record_id)
assert preedit_data['doi'] == expected_doi
# - community c3 got auto-accepted (owned by deposit user)
assert preedit_data['metadata']['communities'] == [{'identifier': 'c3'}]
# Are files downloadable by everyone (open)?
assert len(preedit_data['files']) == 3
download_url = preedit_data['files'][0]['links']['download']
assert client.get(download_url).status_code == 200
# Edit record - can now be done immediately after.
response = client.post(links['edit'], headers=auth_headers)
assert response.status_code == 201
# Edit - 2nd time is invalid.
response = client.post(links['edit'], headers=auth_headers)
assert response.status_code == 403 # FIXME 400
# Get data
data = get_json(client.get(links['self'], headers=auth_headers), code=200)
# Not allowed to delete
assert client.delete(
links['self'], headers=auth_headers).status_code == 403
# Update metadata
data = dict(metadata=data['metadata'])
data['metadata'].update(dict(
title='New title',
access_right='closed',
creators=[
dict(name="Smith, Jane", affiliation="Atlantis"),
dict(name="Doe, John", affiliation="Atlantis"),
],
communities=[
{'identifier': 'c1'}
]
))
resdata = get_json(client.put(
links['self'], data=json.dumps(data), headers=headers
), code=200)
assert resdata['title'] == 'New title'
assert resdata['metadata']['title'] == 'New title'
# Try to change DOI
data['metadata']['doi'] = '10.1234/foo'
data = get_json(client.put(
links['self'], data=json.dumps(data), headers=headers
), code=400)
# Approve community
c = Community.get('c1')
_, record = resolver.resolve(str(record_id))
c.accept_record(record)
record.commit()
db.session.commit()
# Get record to confirm if both communities should be visible now
assert get_json(client.get(
url_for('invenio_records_rest.recid_item', pid_value=record_id),
headers=json_headers,
), code=200)['metadata']['communities'] == [
{'identifier': 'c1'},
{'identifier': 'c3'},
]
# Publish
response = client.post(links['publish'], headers=auth_headers)
data = get_json(response, code=202)
current_search.flush_and_refresh(index='records')
# - is record still accessible?
postedit_data = get_json(client.get(
url_for('invenio_records_rest.recid_item', pid_value=record_id),
headers=json_headers,
), code=200)
# - sanity checks
assert postedit_data['doi'] == expected_doi
assert postedit_data['record_id'] == record_id
# - files should no longer be downloadable (closed access)
# - download_url worked before edit, so make sure it doesn't work now.
assert 'files' not in postedit_data
assert client.get(download_url).status_code == 404
# - c3 was removed, so only c1 one should be visible now
assert postedit_data['metadata']['communities'] == [
{'identifier': 'c1'},
]
# Was the second SIP sent for archiving?
sip2 = RecordSIP.query.filter_by(pid_id=recid_pid.id).order_by(
RecordSIP.created.desc()).first().sip
archive_task_mock.delay.assert_called_with(str(sip2.id))
# Get newversion url before editing the record
data = get_json(client.get(links['self'], headers=auth_headers), code=200)
new_version_url = data['links']['newversion']
assert new_version_url ==\
'http://localhost/deposit/depositions/2/actions/newversion'
# Edit
data = get_json(client.post(links['edit'], headers=auth_headers), code=201)
# Update
data = dict(metadata=data['metadata'])
data['metadata'].update(dict(title='Will be discarded'))
resdata = get_json(client.put(
links['self'], data=json.dumps(data), headers=headers
), code=200)
# Discard
data = get_json(
client.post(links['discard'], headers=auth_headers),
code=201)
# Get and assert metadata
data = get_json(client.get(links['self'], headers=auth_headers), code=200)
assert data['title'] == postedit_data['title']
# New Version
data = get_json(
client.post(new_version_url, headers=auth_headers),
code=201)
links = data['links']
# Check if UI new version link is correct
assert links['latest_draft_html'] ==\
'http://localhost/deposit/3'
# Get latest version
data = get_json(
client.get(links['latest_draft'], headers=auth_headers),
code=200)
links = data['links']
# Update new version
data = dict(metadata=data['metadata'])
data['metadata'].update(dict(title='This is the new version'))
resdata = get_json(client.put(
links['self'], data=json.dumps(data), headers=headers
), code=200)
links = resdata['links']
# Add a file to the new deposit
res = get_json(client.put(
links['bucket'] + '/newfile.txt',
input_stream=BytesIO(b'newfile'),
headers=auth_headers,
), code=200)
# Publish the new record
response = client.post(links['publish'], headers=auth_headers)
data = get_json(response, code=202)
links = data['links']
# Get the new record
data = get_json(
client.get(
links['record'],
headers=auth_headers
),
code=200
)
# See that the title is updated accordingly
assert data['metadata']['title'] == 'This is the new version'
# Change the config back
api.config['SIPSTORE_ARCHIVER_WRITING_ENABLED'] = orig
def create_deposit(client, headers, auth_headers, deposit_url, get_json,
data):
"""Create a deposit via the API."""
test_data = dict(
metadata=dict(
upload_type='software',
title='Test title',
creators=[
dict(name='Doe, John', affiliation='Atlantis'),
],
description='Test',
)
)
test_data['metadata'].update(data)
# Create deposit
res = client.post(
deposit_url, data=json.dumps(test_data), headers=headers)
data = get_json(res, code=201)
# Get identifier and links
current_search.flush_and_refresh(index='deposits')
links = data['links']
# Upload file
res = client.post(
links['files'],
data=dict(file=(BytesIO(b'ctx'), 'test.txt'), name='test.txt'),
headers=auth_headers,
)
assert res.status_code == 201
return links, data
def test_edit_doi(api_client, db, es, locations, json_auth_headers,
deposit_url, get_json, auth_headers, json_headers,
license_record, communities, resolver):
"""Test editing of external DOI."""
headers = json_auth_headers
client = api_client
data = dict(doi='10.1234/foo')
links, data = create_deposit(
client, headers, auth_headers, deposit_url, get_json, data)
assert data['doi'] == '10.1234/foo'
record_url = url_for(
'invenio_records_rest.recid_item', pid_value=data['record_id'])
# Create a persistent identifier
PersistentIdentifier.create('doi', '10.1234/exists',
status=PIDStatus.REGISTERED)
db.session.commit()
# DOI exists
data['metadata']['doi'] = '10.1234/exists'
res = client.put(links['self'], data=json.dumps(data), headers=headers)
assert res.status_code == 400
# Update metadata
data['metadata']['doi'] = '10.1234/bar'
res = client.put(links['self'], data=json.dumps(data), headers=headers)
data = get_json(res, code=200)
assert data['doi'] == '10.1234/bar'
# Publish
res = client.post(links['publish'], headers=auth_headers)
data = get_json(res, code=202)
assert data['doi'] == '10.1234/bar'
assert PersistentIdentifier.query.filter_by(pid_type='depid').count() == 1
assert PersistentIdentifier.query.filter_by(pid_type='recid').count() == 2
assert PersistentIdentifier.query.filter_by(pid_type='doi').count() == 2
doi_exists = PersistentIdentifier.get(pid_type='doi',
pid_value='10.1234/exists')
doi_external = PersistentIdentifier.get(pid_type='doi',
pid_value='10.1234/bar')
assert doi_exists.status == PIDStatus.REGISTERED
# User-provided DOIs are not registered
assert doi_external.status == PIDStatus.RESERVED
# Get record
res = client.get(record_url, headers=json_headers)
data = get_json(res, code=200)
assert data['doi'] == '10.1234/bar'
# Edit
res = client.post(links['edit'], headers=auth_headers)
data = get_json(res, code=201)
assert data['doi'] == '10.1234/bar'
# Update - cannot get a zenodo doi now.
data['metadata']['doi'] = ''
res = client.put(links['self'], data=json.dumps(data), headers=headers)
assert res.status_code == 400
# Update api accepts data with no doi field
del data['metadata']['doi']
res = client.put(links['self'], data=json.dumps(data), headers=headers)
assert res.status_code == 200
data = get_json(res, code=200)
assert data['doi'] == '10.1234/bar'
assert data['metadata']['doi'] == '10.1234/bar'
# Update
data['metadata']['doi'] = '10.4321/foo'
res = client.put(links['self'], data=json.dumps(data), headers=headers)
data = get_json(res, code=200)
assert data['doi'] == '10.4321/foo'
# Publish deposition
res = client.post(links['publish'], headers=auth_headers)
data = get_json(res, code=202)
assert data['doi'] == '10.4321/foo'
# Get record
res = client.get(record_url, headers=json_headers)
data = get_json(res, code=200)
assert data['doi'] == '10.4321/foo'
# Make sure the PIDs are correct
assert PersistentIdentifier.query.filter_by(pid_type='depid').count() == 1
assert PersistentIdentifier.query.filter_by(pid_type='recid').count() == 2
assert PersistentIdentifier.query.filter_by(pid_type='doi').count() == 2
doi_exists = PersistentIdentifier.get(pid_type='doi',
pid_value='10.1234/exists')
# external DOI should be updated
doi_external = PersistentIdentifier.get(pid_type='doi',
pid_value='10.4321/foo')
assert doi_exists.status == PIDStatus.REGISTERED
assert doi_external.status == PIDStatus.RESERVED
def test_noedit_doi(api_client, db, es, locations, json_auth_headers,
deposit_url, get_json, auth_headers, json_headers,
license_record, communities, resolver):
"""Test editing of external DOI."""
headers = json_auth_headers
client = api_client
links, data = create_deposit(
client, headers, auth_headers, deposit_url, get_json, {})
# Update metadata.
data['metadata']['doi'] = '10.1234/bar'
res = client.put(links['self'], data=json.dumps(data), headers=headers)
data = get_json(res, code=200)
# Update with pre-reserved DOI.
prereserved = data['metadata']['prereserve_doi']['doi']
data['metadata']['doi'] = prereserved
res = client.put(links['self'], data=json.dumps(data), headers=headers)
data = get_json(res, code=200)
# Update with empty string.
data['metadata']['doi'] = ''
res = client.put(links['self'], data=json.dumps(data), headers=headers)
data = get_json(res, code=200)
# Publish
res = client.post(links['publish'], headers=auth_headers)
data = get_json(res, code=202)
assert data['doi'] == prereserved
# Edit
res = client.post(links['edit'], headers=auth_headers)
data = get_json(res, code=201)
# Update with invalid DOIs
for d in ['10.4321/foo', '']:
data['metadata']['doi'] = d
res = client.put(links['self'], data=json.dumps(data), headers=headers)
res.status_code == 400
# Update with only valid DOI.
data['metadata']['doi'] = prereserved
res = client.put(links['self'], data=json.dumps(data), headers=headers)
data = get_json(res, code=200)
# Publish deposition
res = client.post(links['publish'], headers=auth_headers)
data = get_json(res, code=202)
# Check if PIDs have been created (depid, recid, doi)
PersistentIdentifier.query.filter_by(pid_type='depid').one()
assert PersistentIdentifier.query.filter_by(pid_type='recid').count() == 2
assert PersistentIdentifier.query.filter_by(pid_type='doi').count() == 2
doi_pid = PersistentIdentifier.get('doi', data['doi'])
assert doi_pid.status == PIDStatus.RESERVED
def test_publish_empty(api_client, db, es, locations, json_auth_headers,
deposit_url, get_json, auth_headers, json_headers,
license_record, communities, resolver):
"""Test if it is possible to circumvent metadata validation."""
headers = json_auth_headers
client = api_client
# Create deposit
response = client.post(deposit_url, data='{}', headers=headers)
data = get_json(response, code=201)
# Get identifier and links
current_search.flush_and_refresh(index='deposits')
links = data['links']
# Upload file
res = client.post(
links['files'],
data=dict(file=(BytesIO(b'ctx'), 'test.txt'), name='test.txt'),
headers=auth_headers,
)
assert res.status_code == 201
# Publish deposition - not possible
response = client.post(links['publish'], headers=auth_headers)
data = get_json(response, code=400)
def test_delete_draft(api, api_client, db, es, locations, json_auth_headers,
auth_headers, deposit_url, get_json, license_record):
"""Test deleting of Deposit draft using REST API."""
# Setting var this way doesn't work
headers = json_auth_headers
client = api_client
links, data = create_deposit(
client, headers, auth_headers, deposit_url, get_json, {})
# Two 'recid' PIDs - Concept PID and Version PID
assert PersistentIdentifier.query.filter_by(pid_type='recid').count() == 2
recid = PersistentIdentifier.get('recid', str(data['record_id']))
depid = PersistentIdentifier.query.filter_by(pid_type='depid').one()
assert recid.status == PIDStatus.RESERVED
assert depid.status == PIDStatus.REGISTERED
# Get deposition
current_search.flush_and_refresh(index='deposits')
response = client.get(links['self'], headers=auth_headers)
assert response.status_code == 200
# Delete deposition
current_search.flush_and_refresh(index='deposits')
response = client.delete(links['self'], headers=auth_headers)
assert response.status_code == 204
# 'recid' PID shuld be removed, while 'depid' should have status deleted.
# No 'doi' PIDs should be created without publishing
assert PersistentIdentifier.query.filter_by(pid_type='recid').count() == 0
depid = PersistentIdentifier.query.filter_by(pid_type='depid').one()
assert PersistentIdentifier.query.filter_by(pid_type='doi').count() == 0
assert depid.status == PIDStatus.DELETED
| 20,297 | Python | .py | 465 | 37.017204 | 79 | 0.657732 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,343 | test_deposit_api.py | zenodo_zenodo/tests/unit/deposit/test_deposit_api.py | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Test Zenodo Deposit API."""
from __future__ import absolute_import, print_function
import json
from copy import deepcopy
import pytest
from flask import url_for
from helpers import login_user_via_session, publish_and_expunge
from invenio_pidrelations.contrib.versioning import PIDVersioning
from zenodo.modules.records.resolvers import record_resolver
def test_basic_deposit_edit(app, db, communities, deposit, deposit_file):
"""Test simple deposit publishing."""
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
initial_oai = deepcopy(record['_oai'])
# Create some potential corruptions to protected fields
deposit = deposit.edit()
deposit['_files'][0]['bucket'] = record['_buckets']['deposit']
deposit['_oai'] = {}
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert record['_oai'] == initial_oai
assert record['_files'][0]['bucket'] == record['_buckets']['record']
def test_deposit_versioning_draft_child_unlinking_bug(
app, db, communities, deposit, deposit_file):
"""
Bug with draft_child_deposit unlinking.
Bug where a draft_child_deposit was unlinked from a new version draft,
when another version of a record was edited and published.
"""
deposit_v1 = publish_and_expunge(db, deposit)
recid_v1, record_v1 = deposit.fetch_published()
recid_v1_value = recid_v1.pid_value
# Initiate a new version draft
deposit_v1.newversion()
recid_v1, record_v1 = record_resolver.resolve(recid_v1_value)
pv = PIDVersioning(child=recid_v1)
assert pv.draft_child_deposit
assert pv.draft_child
deposit_v1.edit()
deposit_v1 = deposit_v1.edit()
deposit_v1 = publish_and_expunge(db, deposit_v1)
recid_v1, record_v1 = record_resolver.resolve(recid_v1_value)
pv = PIDVersioning(child=recid_v1)
# Make sure the draft child deposit was not unliked due to publishing of
# the edited draft
assert pv.draft_child_deposit
assert pv.draft_child
def test_deposit_with_custom_field(
json_auth_headers, api, api_client, db, es, locations, users,
license_record, minimal_deposit, deposit_url,
):
"""Test deposit with custom field publishing."""
auth_headers = json_auth_headers
# Test wrong term
minimal_deposit['metadata']['custom'] = {'dwc:foobar': 'Felidae'}
response = api_client.post(
deposit_url, json=minimal_deposit, headers=auth_headers)
assert response.json['errors'] == [{
'field': 'metadata.custom',
'message':
'Zenodo does not support "dwc:foobar" as a custom metadata term.'}]
# Test wrong value
minimal_deposit['metadata']['custom'] = {
'dwc:family': [12131]
}
response = api_client.post(
deposit_url, json=minimal_deposit, headers=auth_headers)
assert response.json['errors'] == [{
'field': 'metadata.custom',
'message': 'Invalid type for term "dwc:family", should be "keyword".'}]
# Test data not provided in an array
minimal_deposit['metadata']['custom'] = {
'dwc:family': 'Fox'
}
response = api_client.post(
deposit_url, json=minimal_deposit, headers=auth_headers)
assert response.json['errors'] == [{
'field': 'metadata.custom',
'message': 'Term "dwc:family" should be of type array.'}]
# Test null data
minimal_deposit['metadata']['custom'] = {
'dwc:genus': None,
'dwc:family': ['Felidae'],
}
response = api_client.post(
deposit_url, json=minimal_deposit, headers=auth_headers)
assert response.json['errors'] == [{
'field': 'metadata.custom',
'message': 'Term "dwc:genus" should be of type array.'}]
# Test null data
minimal_deposit['metadata']['custom'] = {
'dwc:family': [],
'dwc:behavior': ['Plays with yarn, sleeps in cardboard box.'],
}
response = api_client.post(
deposit_url, json=minimal_deposit, headers=auth_headers)
assert response.json['errors'] == [{
'field': 'metadata.custom',
'message': 'No values were provided for term "dwc:family".'}]
# Test null data
minimal_deposit['metadata']['custom'] = {
'dwc:family': ['Felidae'],
'dwc:behavior': ['foobar', None],
}
response = api_client.post(
deposit_url, json=minimal_deposit, headers=auth_headers)
assert response.json['errors'] == [{
'field': 'metadata.custom',
'message': 'Invalid type for term "dwc:behavior", should be "text".'}]
# Test null data
minimal_deposit['metadata']['custom'] = {
'dwc:family': ['Felidae'],
'dwc:behavior': [None],
}
response = api_client.post(
deposit_url, json=minimal_deposit, headers=auth_headers)
assert response.json['errors'] == [{
'field': 'metadata.custom',
'message': 'Invalid type for term "dwc:behavior", should be "text".'}]
expected_custom_data = {
'dwc:family': ['Felidae'],
'dwc:genus': ['Nighty', 'Reddish'],
'dwc:behavior': ['Plays with yarn, sleeps in cardboard box.'],
}
minimal_deposit['metadata']['custom'] = expected_custom_data
response = api_client.post(
deposit_url, json=minimal_deposit, headers=auth_headers)
assert response.json['metadata']['custom'] == expected_custom_data
links = response.json['links']
response = api_client.put(
links['bucket'] + '/test',
data='foo file',
headers=auth_headers,
)
assert response.status_code == 200
# Publish the record
response = api_client.post(links['publish'], headers=auth_headers)
# Get published record
response = api_client.get(response.json['links']['record'])
assert response.json['metadata']['custom'] == expected_custom_data
@pytest.mark.parametrize('user_info,status', [
# anonymous user
(None, 401),
# validated user
(dict(email='info@zenodo.org', password='tester'), 201),
# non validated user
(dict(email='nonvalidated@zenodo.org', password='tester'), 403),
# validated but with blacklisted domain
(dict(email='validated@evildomain.org', password='tester'), 403),
# validated for a long time with blacklisted domain
(dict(email='longvalidated@evildomain.org', password='tester'), 403),
# non validated with blacklisted domain and external ids
(dict(email='external@evildomain.org', password='tester'), 403),
])
def test_deposit_create_permissions(
api, api_client, db, es, users, minimal_deposit, license_record,
deposit_url, locations, user_info, status):
"""Test deposit with custom field publishing."""
if user_info:
login_user_via_session(api_client, email=user_info['email'])
# Test wrong term
response = api_client.post(
deposit_url, json=minimal_deposit)
assert response.status_code == status
| 7,751 | Python | .py | 182 | 36.983516 | 79 | 0.673708 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,344 | test_api_extra_buckets.py | zenodo_zenodo/tests/unit/deposit/test_api_extra_buckets.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2019 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Zenodo deposit REST API."""
from __future__ import absolute_import, print_function, unicode_literals
from invenio_files_rest.models import Bucket
from zenodo.modules.deposit.resolvers import deposit_resolver
from zenodo.modules.records.resolvers import record_resolver
extra_formats_urls = {
'deposit': '/deposit/depositions/{0}/formats',
'record': '/records/{0}/formats'
}
extra_formats_headers = {
'foo': [('Content-Type', 'application/foo+xml')],
'bar': [('Content-Type', 'application/bar+xml')],
'foo-accept': [('Accept', 'application/foo+xml')],
'bar-accept': [('Accept', 'application/bar+xml')],
'invalid-format': [('Content-Type', 'application/invalid-format+xml')],
}
def use_extra_formats_functions(
extra_auth_headers, api_client, get_json, recid=None, depid=None):
"""Test all available actions on extra formats for deposit and record.
After this function the extra formats bucket will contain a single file.
"""
if depid:
# Add extra_formats bucket with a file
response = api_client.put(
extra_formats_urls['deposit'].format(depid),
data='foo file',
headers=extra_formats_headers['foo'] + extra_auth_headers
)
data = get_json(response, code=200)
assert data['message'] == 'Extra format "application/foo+xml" updated.'
# Add file to extra_formats bucket
response = api_client.put(
extra_formats_urls['deposit'].format(depid),
data='bar file content',
headers=extra_formats_headers['bar'] + extra_auth_headers
)
data = get_json(response, code=200)
assert data['message'] == 'Extra format "application/bar+xml" updated.'
# Get the list of the extra_formats files attached to this deposit
response = api_client.options(
extra_formats_urls['deposit'].format(depid),
headers=extra_auth_headers)
data = get_json(response, code=200)
assert {f['key'] for f in data} == \
{'application/foo+xml', 'application/bar+xml'}
if recid:
response = api_client.options(
extra_formats_urls['record'].format(recid))
data = get_json(response, code=200)
assert {f['key'] for f in data} == \
{'application/foo+xml', 'application/bar+xml'}
response = api_client.get(
extra_formats_urls['record'].format(recid),
headers=extra_formats_headers['foo-accept']
)
assert response.get_data(as_text=True) == 'foo file'
response = api_client.get(
extra_formats_urls['record'].format(recid),
headers=extra_formats_headers['bar-accept']
)
assert response.get_data(as_text=True) == 'bar file content'
# Delete a file from the extra_formats bucket
response = api_client.delete(
extra_formats_urls['deposit'].format(depid),
headers=extra_formats_headers['bar'] + extra_auth_headers
)
data = get_json(response, code=200)
assert data['message'] == 'Extra format "application/bar+xml" deleted.'
# Get the list of the extra_formats files attached to this deposit
response = api_client.options(
extra_formats_urls['deposit'].format(depid),
headers=extra_auth_headers)
data = get_json(response, code=200)
assert data[0]['key'] == 'application/foo+xml'
assert len(data) == 1
# Update the extra_formats file
response = api_client.put(
extra_formats_urls['deposit'].format(depid),
data='foo file updated content',
headers=extra_formats_headers['foo'] + extra_auth_headers
)
data = get_json(response, code=200)
assert data['message'] == 'Extra format "application/foo+xml" updated.'
# Check if the file is updated
response = api_client.get(
extra_formats_urls['deposit'].format(depid),
headers=extra_formats_headers['foo-accept'] + extra_auth_headers
)
assert response.get_data(as_text=True) == 'foo file updated content'
# Try to add a non-whitelisted extra format mimetype
response = api_client.put(
extra_formats_urls['deposit'].format(depid),
data='A file that should not be accepted',
headers=extra_formats_headers['invalid-format'] +
extra_auth_headers
)
assert response.status_code == 400
assert response.json['message'] == \
'"application/invalid-format+xml" is not an acceptable MIMEType.'
def test_extra_formats_buckets(
api, api_client, db, es, locations, json_extra_auth_headers,
deposit_url, get_json, extra_auth_headers, json_headers,
license_record, communities, resolver, minimal_deposit):
"""Test simple flow using REST API."""
headers = json_extra_auth_headers
client = api_client
test_data = minimal_deposit
# Create deposit
response = client.post(
deposit_url, json=test_data, headers=headers)
data = get_json(response, code=201)
# Get identifier and links
depid = data['record_id']
links = data['links']
# Upload 1 files
response = client.put(
links['bucket'] + '/test1.txt',
data='ctx',
headers=extra_auth_headers,
)
assert response.status_code == 200
# Check for extra_formats bucket
response = api_client.options(
extra_formats_urls['deposit'].format(depid), headers=headers)
data = get_json(response, code=200)
# Check that no extra_formats bucket is present
buckets = Bucket.query.all()
assert len(buckets) == 1
# There are no extra_formats files
assert data == []
use_extra_formats_functions(
extra_auth_headers, api_client, get_json, depid=depid)
buckets = Bucket.query.all()
assert len(buckets) == 2
deposit = deposit_resolver.resolve(depid)[1]
assert deposit['_buckets']['extra_formats'] == \
str(deposit.extra_formats.bucket.id)
# Publish deposition
response = client.post(links['publish'], headers=extra_auth_headers)
data = get_json(response, code=202)
first_version_recid = data['record_id']
# Get the list of the extra_formats files attached to this deposit
response = api_client.options(
extra_formats_urls['record'].format(first_version_recid))
data = get_json(response, code=200)
assert data[0]['key'] == 'application/foo+xml'
assert len(data) == 1
# Test actions and clear extra_formats bucket
use_extra_formats_functions(extra_auth_headers, api_client, get_json,
depid=depid, recid=first_version_recid)
# Get newversion url
data = get_json(
client.get(links['self'], headers=extra_auth_headers), code=200
)
new_version_url = data['links']['newversion']
# New Version
data = get_json(
client.post(new_version_url, headers=extra_auth_headers), code=201)
links = data['links']
# Get the list of the extra_formats files attached to the new deposit
# Should be the same with the previous version
response = api_client.options(
extra_formats_urls['deposit'].format(depid),
headers=extra_auth_headers
)
data = get_json(response, code=200)
assert data[0]['key'] == 'application/foo+xml'
assert len(data) == 1
# Get latest version
data = get_json(
client.get(links['latest_draft'], headers=extra_auth_headers),
code=200)
links = data['links']
depid = data['record_id']
# Add a file to the new deposit
get_json(client.put(
links['bucket'] + '/newfile.txt',
data='newfile',
headers=extra_auth_headers,
), code=200)
# Publish the new record
response = client.post(links['publish'], headers=extra_auth_headers)
data = get_json(response, code=202)
links = data['links']
recid = data['record_id']
# Get the list of the extra_formats files attached to the new record
response = api_client.options(extra_formats_urls['record'].format(recid))
data = get_json(response, code=200)
assert data[0]['key'] == 'application/foo+xml'
assert len(data) == 1
# Add file to extra_formats bucket
response = api_client.put(
extra_formats_urls['deposit'].format(recid),
data='bar file content',
headers=extra_formats_headers['bar'] + extra_auth_headers
)
data = get_json(response, code=200)
assert data['message'] == 'Extra format "application/bar+xml" updated.'
# Get the list of the extra_formats files attached to the new record
response = api_client.options(extra_formats_urls['record'].format(recid))
data = get_json(response, code=200)
assert {f['key'] for f in data} == \
{'application/foo+xml', 'application/bar+xml'}
# Get the list of the extra_formats files attached to the previous record
# Make sure that the snapshots are independent
response = api_client.options(
extra_formats_urls['record'].format(first_version_recid))
data = get_json(response, code=200)
first_record = record_resolver.resolve(first_version_recid)[1]
new_record = record_resolver.resolve(recid)[1]
assert first_record.extra_formats.bucket.id != \
new_record.extra_formats.bucket.id
assert data[0]['key'] == 'application/foo+xml'
assert len(data) == 1
# Test actions and clear extra_formats bucket of deposit
use_extra_formats_functions(
extra_auth_headers, api_client, get_json, depid=depid, recid=recid)
def test_delete_deposit_with_extra_formats_bucket(
api, api_client, db, es, locations, json_extra_auth_headers,
deposit_url, get_json, extra_auth_headers, license_record,
communities, resolver, minimal_deposit):
"""Test deleting a deposit with extra formats."""
headers = json_extra_auth_headers
client = api_client
test_data = minimal_deposit
# Create deposit
response = client.post(
deposit_url, json=test_data, headers=headers)
data = get_json(response, code=201)
depid = data['record_id']
# Get identifier and links
links = data['links']
# Add extra_formats bucket with a file
response = api_client.put(
extra_formats_urls['deposit'].format(depid),
data='foo file',
headers=extra_formats_headers['foo'] + extra_auth_headers
)
data = get_json(response, code=200)
assert data['message'] == 'Extra format "application/foo+xml" updated.'
# Delete Deposit
response = client.delete(links['self'], headers=extra_auth_headers)
assert response.status_code == 204
# Buckets for files and extra_formats are cleaned.
buckets = Bucket.query.all()
assert len(buckets) == 0
def test_add_extra_formats_bucket_to_published_record(
api, api_client, db, es, locations, json_extra_auth_headers,
deposit_url, get_json, extra_auth_headers, license_record,
communities, resolver, minimal_deposit):
"""Test adding extra formats to an already published record."""
headers = json_extra_auth_headers
client = api_client
test_data = minimal_deposit
# Create deposit
response = client.post(
deposit_url, json=test_data, headers=headers)
data = get_json(response, code=201)
depid = data['record_id']
# Get identifier and links
links = data['links']
# Upload 1 files
response = client.put(
links['bucket'] + '/test1.txt',
data='ctx',
headers=extra_auth_headers,
)
assert response.status_code == 200
# Publish deposition
response = client.post(links['publish'], headers=extra_auth_headers)
data = get_json(response, code=202)
recid = data['record_id']
# Add extra_formats bucket with a file
response = api_client.put(
extra_formats_urls['deposit'].format(recid),
data='foo file',
headers=extra_formats_headers['foo'] + extra_auth_headers
)
data = get_json(response, code=200)
assert data['message'] == 'Extra format "application/foo+xml" updated.'
response = api_client.options(extra_formats_urls['record'].format(recid))
data = get_json(response, code=200)
assert data[0]['key'] == 'application/foo+xml'
assert len(data) == 1
use_extra_formats_functions(
extra_auth_headers, api_client, get_json, depid=depid, recid=recid)
| 13,594 | Python | .py | 309 | 36.954693 | 79 | 0.660613 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,345 | test_communities_merging.py | zenodo_zenodo/tests/unit/deposit/test_communities_merging.py | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Test Zenodo deposit workflow."""
from __future__ import absolute_import, print_function
import pytest
from helpers import publish_and_expunge
from invenio_communities.models import Community, InclusionRequest
from zenodo.modules.deposit.errors import MissingCommunityError
def test_basic_community_workflow(app, db, communities, deposit, deposit_file):
"""Test simple (without concurrent events) deposit publishing workflow."""
deposit = publish_and_expunge(db, deposit)
assert InclusionRequest.query.count() == 0
pid, record = deposit.fetch_published()
assert not record.get('communities', [])
# Open record for edit, request a community and publish
deposit = deposit.edit()
deposit['communities'] = ['c1', ]
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
# Should contain just an InclusionRequest
assert not record.get('communities', [])
assert not record['_oai'].get('sets', [])
assert InclusionRequest.query.count() == 1
ir = InclusionRequest.query.one()
assert ir.id_community == 'c1'
assert ir.id_record == record.id
# Accept a record to the community 'c1'
c1 = Community.get('c1')
c1.accept_record(record)
record.commit()
db.session.commit()
assert InclusionRequest.query.count() == 0
assert record['communities'] == ['c1', ]
assert record['_oai']['sets'] == ['user-c1', ]
# Open for edit and request another community
deposit = deposit.edit()
assert deposit['communities'] == ['c1', ]
deposit['communities'] = ['c1', 'c2', ] # New request for community 'c2'
deposit = publish_and_expunge(db, deposit)
deposit['communities'] = ['c1', 'c2', ]
pid, record = deposit.fetch_published()
assert record['communities'] == ['c1', ]
assert record['_oai']['sets'] == ['user-c1', ]
assert InclusionRequest.query.count() == 1
ir = InclusionRequest.query.one()
assert ir.id_community == 'c2'
assert ir.id_record == record.id
# Reject the request for community 'c2'
c2 = Community.get('c2')
c2.reject_record(record)
db.session.commit()
deposit = deposit.edit()
# The deposit should not contain obsolete inclusion requests
assert deposit['communities'] == ['c1', ]
assert InclusionRequest.query.count() == 0
pid, record = deposit.fetch_published()
assert record['communities'] == ['c1', ]
assert record['_oai']['sets'] == ['user-c1', ]
# Request for removal from a previously accepted community 'c1'
deposit['communities'] = []
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert not deposit.get('communities', [])
assert not record.get('communities', [])
assert not record['_oai'].get('sets', [])
assert InclusionRequest.query.count() == 0
def test_accept_while_edit(app, db, communities, deposit, deposit_file):
"""Test deposit publishing with concurrent events.
Accept a record, while deposit in open edit and then published.
"""
deposit['communities'] = ['c1', 'c2']
deposit = publish_and_expunge(db, deposit)
assert InclusionRequest.query.count() == 2
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['c1', 'c2']
assert not record.get('communities', [])
assert not record['_oai'].get('sets', [])
# Open for edit
deposit = deposit.edit()
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['c1', 'c2']
assert not record.get('communities', [])
assert not record['_oai'].get('sets', [])
assert InclusionRequest.query.count() == 2
# Accept a record meanwhile
c1 = Community.get('c1')
c1.accept_record(record)
record.commit()
db.session.commit()
# Publish and make sure nothing is missing
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['c1', 'c2']
assert record['communities'] == ['c1', ]
assert record['_oai']['sets'] == ['user-c1', ]
assert InclusionRequest.query.count() == 1
ir = InclusionRequest.query.one()
assert ir.id_community == 'c2'
assert ir.id_record == record.id
def test_reject_while_edit(app, db, communities, deposit, deposit_file):
"""Test deposit publishing with concurrent events.
Reject a record, while deposit in open edit and published.
"""
# Request for community 'c1'
deposit['communities'] = ['c1', ]
deposit = publish_and_expunge(db, deposit)
assert deposit['communities'] == ['c1', ]
pid, record = deposit.fetch_published()
assert not record.get('communities', [])
assert InclusionRequest.query.count() == 1
ir = InclusionRequest.query.one()
assert ir.id_community == 'c1'
assert ir.id_record == record.id
# Open deposit in edit mode and request another community 'c2'
deposit = deposit.edit()
deposit['communities'] = ['c1', 'c2']
# Reject the request for community 'c1'
c1 = Community.get('c1')
c1.reject_record(record)
db.session.commit()
# Publish the deposit
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
# NOTE: 'c1' is requested again!
assert InclusionRequest.query.count() == 2
ir1 = InclusionRequest.query.filter_by(id_community='c1').one()
ir2 = InclusionRequest.query.filter_by(id_community='c2').one()
assert ir1.id_record == record.id
assert ir2.id_record == record.id
assert deposit['communities'] == ['c1', 'c2']
assert not record.get('communities', [])
def test_record_modified_while_edit(app, db, communities, deposit,
deposit_file):
"""Test deposit publishing with concurrent events.
Modify a record, while deposit in open edit and then published.
"""
deposit['communities'] = ['c1', ]
deposit = publish_and_expunge(db, deposit)
assert InclusionRequest.query.count() == 1
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['c1', ]
assert not record.get('communities', [])
# Open for edit
deposit = deposit.edit()
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['c1', ]
assert not record.get('communities', [])
assert InclusionRequest.query.count() == 1
# Meanwhile, a record is modified
record['title'] = 'Other title'
record.commit()
db.session.commit()
# Publish and make sure nothing is missing
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['c1', ]
assert not record.get('communities', [])
assert InclusionRequest.query.count() == 1
ir = InclusionRequest.query.one()
assert ir.id_community == 'c1'
assert ir.id_record == record.id
def test_remove_obsolete_irs(app, db, communities, deposit, deposit_file):
"""Test removal of obsolete IRs in-between deposit edits."""
# Request for 'c1'
deposit['communities'] = ['c1', ]
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert InclusionRequest.query.count() == 1
assert deposit['communities'] == ['c1', ]
assert not record.get('communities', [])
# Open for edit and remove the request to community 'c1'
deposit = deposit.edit()
deposit['communities'] = []
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert InclusionRequest.query.count() == 0
assert not deposit.get('communities', [])
assert not record.get('communities', [])
def test_remove_community_by_key_del(app, db, communities, deposit,
deposit_file):
"""Test removal of communities by key deletion.
Communities can be removed by not providing or deleting the communities
from the key deposit. Moreover, the redundant 'empty' keys should not be
automatically added to deposit nor record.
"""
# If 'communities' key was not in deposit metadata,
# it shouldn't be automatically added
assert 'communities' not in deposit
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert 'communities' not in deposit
assert 'communities' not in record
assert not record['_oai'].get('sets', [])
# Request for 'c1' and 'c2'
deposit = deposit.edit()
deposit['communities'] = ['c1', 'c2', ]
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
# No reason to have 'communities' in record since nothing was accepted
assert 'communities' not in record
assert not record['_oai'].get('sets', [])
# Accept 'c1'
c1 = Community.get('c1')
c1.accept_record(record)
record.commit()
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['c1', 'c2', ]
assert InclusionRequest.query.count() == 1
assert record['communities'] == ['c1', ]
assert set(record['_oai']['sets']) == set(['user-c1'])
# Remove the key from deposit and publish
deposit = deposit.edit()
del deposit['communities']
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert 'communities' not in deposit
assert 'communities' not in record
assert InclusionRequest.query.count() == 0
assert not record['_oai'].get('sets', [])
def test_autoaccept_owned_communities(app, db, users, communities, deposit,
deposit_file):
"""Automatically accept records requested by community owners."""
# 'c3' is owned by the user, but not 'c1'
deposit['communities'] = ['c1', 'c3', ]
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['c1', 'c3', ]
assert record['communities'] == ['c3', ]
assert record['_oai']['sets'] == ['user-c3']
assert InclusionRequest.query.count() == 1
ir = InclusionRequest.query.one()
assert ir.id_community == 'c1'
assert ir.id_record == record.id
# Edit the deposit, and add more communities
# 'c4' should be added automatically, but not 'c2'
deposit = deposit.edit()
deposit['communities'] = ['c1', 'c2', 'c3', 'c4', ]
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['c1', 'c2', 'c3', 'c4', ]
assert record['communities'] == ['c3', 'c4', ]
assert set(record['_oai']['sets']) == set(['user-c3', 'user-c4'])
assert InclusionRequest.query.count() == 2
ir1 = InclusionRequest.query.filter_by(id_community='c1').one()
ir2 = InclusionRequest.query.filter_by(id_community='c2').one()
assert ir1.id_record == record.id
assert ir2.id_record == record.id
def test_fixed_communities(app, db, users, communities, deposit, deposit_file,
communities_autoadd_enabled):
"""Test automatic adding and requesting to fixed communities."""
deposit['grants'] = [{'title': 'SomeGrant'}, ]
# 'c3' is owned by one of the deposit owner
assert Community.get('c3').id_user in deposit['_deposit']['owners']
deposit['communities'] = ['c3', ]
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert record['communities'] == ['c3', 'grants_comm']
assert deposit['communities'] == ['c3', 'ecfunded', 'grants_comm',
'zenodo']
InclusionRequest.query.count() == 2
ir1 = InclusionRequest.query.filter_by(id_community='zenodo').one()
assert ir1.id_record == record.id
ir2 = InclusionRequest.query.filter_by(id_community='ecfunded').one()
assert ir2.id_record == record.id
def test_fixed_autoadd_redundant(app, db, users, communities, deposit,
deposit_file, communities_autoadd_enabled):
"""Test automatic adding and requesting to fixed communities."""
deposit['grants'] = [{'title': 'SomeGrant'}, ]
# 'c3' is owned by one of the deposit owner
assert Community.get('c3').id_user in deposit['_deposit']['owners']
# Requesting for 'grants_comm', which would be added automatically
# shouldn't cause problems
deposit['communities'] = ['c3', 'grants_comm', 'zenodo']
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert record['communities'] == ['c3', 'grants_comm']
assert deposit['communities'] == ['c3', 'ecfunded', 'grants_comm',
'zenodo']
InclusionRequest.query.count() == 2
ir1 = InclusionRequest.query.filter_by(id_community='zenodo').one()
assert ir1.id_record == record.id
ir2 = InclusionRequest.query.filter_by(id_community='ecfunded').one()
assert ir2.id_record == record.id
def test_fixed_communities_edit(app, db, users, communities, deposit,
deposit_file, communities_autoadd_enabled):
"""Test automatic adding and requesting to fixed communities."""
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['zenodo', ]
assert 'communities' not in record
ir = InclusionRequest.query.one()
assert ir.id_community == 'zenodo'
assert ir.id_record == record.id
# Open for edit
deposit = deposit.edit()
# Make sure 'zenodo' community is requested
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['zenodo', ]
assert not record.get('communities', [])
assert InclusionRequest.query.count() == 1
comm = Community.get('zenodo')
comm.accept_record(record)
record.commit()
db.session.commit()
# Publish and make sure nothing is missing
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['zenodo']
assert record['communities'] == ['zenodo', ]
assert record['_oai']['sets'] == ['user-zenodo', ]
assert InclusionRequest.query.count() == 0
def test_fixed_communities_grants(app, db, users, communities, deposit,
deposit_file, communities_autoadd_enabled):
"""Test automatic adding and requesting to fixed communities.
Add to grants_comm also after later addition of grant information.
"""
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['zenodo', ]
assert 'communities' not in record
ir = InclusionRequest.query.one()
assert ir.id_community == 'zenodo'
assert ir.id_record == record.id
deposit = deposit.edit()
deposit['grants'] = [{'title': 'SomeGrant'}, ]
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['ecfunded', 'grants_comm', 'zenodo', ]
assert record['communities'] == ['grants_comm', ]
InclusionRequest.query.count() == 2
ir1 = InclusionRequest.query.filter_by(id_community='zenodo').one()
assert ir1.id_record == record.id
ir2 = InclusionRequest.query.filter_by(id_community='ecfunded').one()
assert ir2.id_record == record.id
# Remove 'grants' without auto requested community being accepted.
# We should not remove the inclusion request as we don't know if user
# requested it manually or whether it was an automatic request
deposit = deposit.edit()
deposit['grants'] = []
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['ecfunded', 'grants_comm', 'zenodo', ]
assert record['communities'] == ['grants_comm', ]
InclusionRequest.query.count() == 2
ir1 = InclusionRequest.query.filter_by(id_community='zenodo').one()
assert ir1.id_record == record.id
ir2 = InclusionRequest.query.filter_by(id_community='ecfunded').one()
assert ir2.id_record == record.id
# However, if user explicitly removed auto-requested community, and grants
# have been removed too, the IR should be removed.
deposit = deposit.edit()
deposit['grants'] = []
# Removed 'ecfunded' and 'grants_comm' from deposit
deposit['communities'] = ['zenodo', ]
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['zenodo', ]
assert 'communities' not in record
InclusionRequest.query.count() == 1
ir1 = InclusionRequest.query.filter_by(id_community='zenodo').one()
assert ir1.id_record == record.id
def test_fixed_autoadd_edit(app, db, users, communities, deposit,
deposit_file, communities_autoadd_enabled):
"""Test automatic adding and requesting to fixed communities.
Add to grants_comm also after later addition of grant information.
"""
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
assert deposit['communities'] == ['zenodo', ]
assert 'communities' not in record
ir = InclusionRequest.query.one()
assert ir.id_community == 'zenodo'
assert ir.id_record == record.id
deposit = deposit.edit()
deposit['grants'] = [{'title': 'SomeGrant'}, ]
# Requesting for 'grants_comm' and 'ecfunded' manually even though it will
# be added due to specifying 'grants' shouldn't cause problems
deposit['communities'] = ['ecfunded', 'grants_comm', 'zenodo']
deposit = publish_and_expunge(db, deposit)
pid, record = deposit.fetch_published()
InclusionRequest.query.count() == 2
ir1 = InclusionRequest.query.filter_by(id_community='zenodo').one()
assert ir1.id_record == record.id
ir2 = InclusionRequest.query.filter_by(id_community='ecfunded').one()
assert ir2.id_record == record.id
assert deposit['communities'] == ['ecfunded', 'grants_comm', 'zenodo', ]
assert record['communities'] == ['grants_comm', ]
def test_nonexisting_communities(app, db, users, communities, deposit,
deposit_file):
"""Test adding nonexisting community."""
deposit['communities'] = ['nonexisting', ]
with pytest.raises(MissingCommunityError) as exc_info:
publish_and_expunge(db, deposit)
assert exc_info.value.errors[0].res == {
'message': 'Provided community does not exist: nonexisting',
'field': 'metadata.communities',
}
| 19,293 | Python | .py | 413 | 41.082324 | 79 | 0.672248 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,346 | test_api_simpleflow.py | zenodo_zenodo/tests/unit/deposit/test_api_simpleflow.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Zenodo deposit REST API."""
from __future__ import absolute_import, print_function, unicode_literals
import json
import pytest
from flask import url_for
from helpers import login_user_via_session
from invenio_pidstore.models import PersistentIdentifier
from invenio_search import current_search
from invenio_sipstore.models import RecordSIP
from six import BytesIO
def get_json(response, code=None):
"""Decode JSON from response."""
if code is not None:
assert response.status_code == code
return json.loads(response.get_data(as_text=True))
def make_file_fixture(filename, text=None):
"""Generate a file fixture."""
content = text or filename.encode('utf8')
return (BytesIO(content), filename)
def test_simple_rest_flow(mocker, api, api_client, db, es,
locations, users, write_token, license_record,
grant_records, funder_record):
"""Test simple flow using REST API."""
mocker.patch('invenio_pidstore.providers.datacite.DataCiteMDSClient')
# Stash the configuration and enable writing
orig = api.config['SIPSTORE_ARCHIVER_WRITING_ENABLED']
api.config['SIPSTORE_ARCHIVER_WRITING_ENABLED'] = True
archive_task_mock = mocker.patch(
'zenodo.modules.deposit.receivers.archive_sip')
# Setting var this way doesn't work
client = api_client
test_data = dict(
metadata=dict(
upload_type='presentation',
title='Test title',
creators=[
dict(name='Doe, John', affiliation='Atlantis'),
dict(name='Smith, Jane', affiliation='Atlantis')
],
description='Test Description',
publication_date='2013-05-08',
access_right='open',
)
)
# Fetch grant suggestion
funder = funder_record['doi']
res = client.get("/grants/_suggest?text=open&funder={}".format(funder))
grant_id = res.json['text'][0]['options'][0]['_source']['legacy_id']
test_data['metadata']['grants'] = [{"id": grant_id}]
# Fetch license suggestion
res = client.get('/licenses/_suggest?text=CC0-1.0')
test_data['metadata']['license'] = \
res.json['text'][0]['options'][0]['_source']['id']
# Prepare headers
auth = write_token['auth_header']
headers = [
('Content-Type', 'application/json'),
('Accept', 'application/json')
]
auth_headers = headers + auth
# Get deposit URL
with api.test_request_context():
deposit_url = url_for('invenio_deposit_rest.depid_list')
# Try to create deposit as anonymous user (failing)
response = client.post(
deposit_url, data=json.dumps(test_data), headers=headers)
assert response.status_code == 401
# Create deposit
response = client.post(
deposit_url, data=json.dumps(test_data), headers=auth_headers)
data = get_json(response, code=201)
deposit_id = data['id']
links = data['links']
# Get deposition
current_search.flush_and_refresh(index='deposits')
response = client.get(links['self'], headers=auth)
assert response.status_code == 200
# Upload 3 files
for i in range(3):
response = client.post(
links['files'],
data={
'file': make_file_fixture('test{0}.txt'.format(i)),
'name': 'test-{0}.txt'.format(i),
},
headers=auth,
)
assert response.status_code == 201, i
assert not archive_task_mock.delay.called
# Publish deposition
# Enable datacite minting
response = client.post(links['publish'], headers=auth_headers)
record_id = get_json(response, code=202)['record_id']
recid_pid = PersistentIdentifier.get('recid', str(record_id))
# Pass doi to record
test_data['metadata']['doi'] = get_json(response, code=202)['doi']
# Check that same id is being used for both deposit and record.
assert deposit_id == record_id
# Does record exists?
current_search.flush_and_refresh(index='records')
response = client.get(
url_for('invenio_records_rest.recid_item', pid_value=record_id))
# Was SIP writing task executed?
sip = RecordSIP.query.filter_by(pid_id=recid_pid.id).one().sip
archive_task_mock.delay.assert_called_with(str(sip.id))
# Second request will return forbidden since it's already published
response = client.post(links['publish'], headers=auth_headers)
assert response.status_code == 403 # FIXME should be 400
# Not allowed to edit drafts
response = client.put(
links['self'], data=json.dumps(test_data), headers=auth_headers)
assert response.status_code == 403
# Not allowed to delete
response = client.delete(
links['self'], headers=auth)
assert response.status_code == 403
# Not allowed to sort files
response = client.get(links['files'], headers=auth_headers)
data = get_json(response, code=200)
files_list = list(map(lambda x: {'id': x['id']}, data))
files_list.reverse()
response = client.put(
links['files'], data=json.dumps(files_list), headers=auth)
assert response.status_code == 403
# Not allowed to add files
i = 5
response = client.post(
links['files'],
data={
'file': make_file_fixture('test{0}.txt'.format(i)),
'name': 'test-{0}.txt'.format(i),
},
headers=auth,
)
assert response.status_code == 403
# Not allowed to delete file
file_url = '{0}/{1}'.format(links['files'], files_list[0]['id'])
response = client.delete(
file_url, headers=auth)
assert response.status_code == 403
# Not allowed to rename file
response = client.put(
file_url,
data=json.dumps(dict(filename='another_test.pdf')),
headers=auth_headers,
)
assert response.status_code == 403
# Change the config back
api.config['SIPSTORE_ARCHIVER_WRITING_ENABLED'] = orig
def test_simple_delete(api_client, db, es, locations, json_auth_headers,
auth_headers, deposit_url):
"""Deletion."""
client = api_client
headers = json_auth_headers
# Create
links = get_json(client.post(
deposit_url, data=json.dumps({}), headers=headers), code=201)['links']
current_search.flush_and_refresh(index='deposits')
# Check list
assert 1 == len(
get_json(client.get(deposit_url, headers=headers), code=200))
# Delete
assert client.delete(
links['self'], headers=auth_headers).status_code == 204
current_search.flush_and_refresh(index='deposits')
# Check list
assert 0 == len(
get_json(client.get(deposit_url, headers=headers), code=200))
# Delete again
assert client.delete(
links['self'], headers=auth_headers).status_code == 410
@pytest.mark.parametrize('user_info,status', [
# anonymous user
(None, 401),
# owner
(dict(email='info@zenodo.org', password='tester'), 200),
# not owner
(dict(email='test@zenodo.org', password='tester2'), 403),
# admin user
(dict(email='admin@zenodo.org', password='admin'), 200),
])
def test_read_deposit_users(api, api_client, db, users, deposit, json_headers,
user_info, status):
"""Test read deposit by users."""
deposit_id = deposit['_deposit']['id']
with api.test_request_context():
with api.test_client() as client:
if user_info:
# Login as user
login_user_via_session(client, email=user_info['email'])
res = client.get(
url_for('invenio_deposit_rest.depid_item',
pid_value=deposit_id),
headers=json_headers
)
assert res.status_code == status
@pytest.mark.parametrize('user_info,status,count_deposit', [
# anonymous user
(None, 401, 0),
# owner
(dict(email='info@zenodo.org', password='tester'), 200, 1),
# not owner
(dict(email='test@zenodo.org', password='tester2'), 200, 0),
# admin user
(dict(email='admin@zenodo.org', password='admin'), 200, 1),
])
def test_read_deposits_users(api, api_client, db, users, deposit, json_headers,
user_info, status, count_deposit):
"""Test read deposit by users."""
with api.test_request_context():
with api.test_client() as client:
if user_info:
# Login as user
login_user_via_session(client, email=user_info['email'])
res = client.get(
url_for('invenio_deposit_rest.depid_list'),
headers=json_headers
)
assert res.status_code == status
if user_info:
data = json.loads(res.data.decode('utf-8'))
assert len(data) == count_deposit
@pytest.mark.parametrize('user_info,status', [
# anonymous user
(None, 401),
# owner
(dict(email='info@zenodo.org', password='tester'), 200),
# not owner
(dict(email='test@zenodo.org', password='tester2'), 403),
# admin user
(dict(email='admin@zenodo.org', password='admin'), 200),
])
def test_update_deposits_users(api, api_client, db, users, deposit,
json_headers, user_info, status):
"""Test read deposit by users."""
deposit_id = deposit['_deposit']['id']
with api.test_request_context():
with api.test_client() as client:
if user_info:
# Login as user
login_user_via_session(client, email=user_info['email'])
res = client.put(
url_for('invenio_deposit_rest.depid_item',
pid_value=deposit_id),
data=json.dumps({}),
headers=json_headers
)
assert res.status_code == status
@pytest.mark.parametrize('user_info,status', [
# anonymous user
(None, 401),
# owner
(dict(email='info@zenodo.org', password='tester'), 204),
# not owner
(dict(email='test@zenodo.org', password='tester2'), 403),
# admin user
(dict(email='admin@zenodo.org', password='admin'), 204),
])
def test_delete_deposits_users(api, api_client, db, es, users, deposit,
json_headers, user_info, status):
"""Test read deposit by users."""
deposit_id = deposit['_deposit']['id']
with api.test_request_context():
with api.test_client() as client:
if user_info:
# Login as user
login_user_via_session(client, email=user_info['email'])
res = client.delete(
url_for('invenio_deposit_rest.depid_item',
pid_value=deposit_id),
headers=json_headers
)
assert res.status_code == status
def test_versioning_rest_flow(mocker, api, api_client, db, es, locations,
users, write_token, license_record):
mocker.patch('invenio_pidstore.providers.datacite.DataCiteMDSClient')
# Stash the configuration and enable SIP writing to disk
orig = api.config['SIPSTORE_ARCHIVER_WRITING_ENABLED']
api.config['SIPSTORE_ARCHIVER_WRITING_ENABLED'] = True
archive_task_mock = mocker.patch(
'zenodo.modules.deposit.receivers.archive_sip')
client = api_client
test_data = dict(
metadata=dict(
upload_type='presentation',
title='Test title',
creators=[
dict(name='Doe, John', affiliation='Atlantis'),
dict(name='Smith, Jane', affiliation='Atlantis')
],
description='Test Description',
publication_date='2013-05-08',
access_right='open'
)
)
# Prepare headers
auth = write_token['auth_header']
headers = [
('Content-Type', 'application/json'),
('Accept', 'application/json')
]
auth_headers = headers + auth
# Get deposit URL
with api.test_request_context():
deposit_url = url_for('invenio_deposit_rest.depid_list')
# Create deposit
response = client.post(
deposit_url, data=json.dumps(test_data), headers=auth_headers)
data = get_json(response, code=201)
links = data['links']
# Get deposition
current_search.flush_and_refresh(index='deposits')
response = client.get(links['self'], headers=auth)
data = get_json(response, code=200)
links = data['links']
# Upload a file
response = client.post(
links['files'],
data={
'file': make_file_fixture('test-1.txt'),
'name': 'test-1.txt',
},
headers=auth,
)
assert response.status_code == 201
# Cannot create new version for unpublished record
response = client.post(links['newversion'], headers=auth_headers)
assert response.status_code == 403
assert not archive_task_mock.delay.called
# Publish deposition
response = client.post(links['publish'], headers=auth_headers)
assert response.status_code == 202
data = get_json(response, code=202)
record_id = data['record_id']
recid_pid = PersistentIdentifier.get('recid', str(record_id))
# Was SIP writing task executed?
sip = RecordSIP.query.filter_by(pid_id=recid_pid.id).one().sip
archive_task_mock.delay.assert_called_with(str(sip.id))
# New version possible for published deposit
response = client.post(links['newversion'], headers=auth_headers)
assert response.status_code == 201
# Calling again new version is a no-op
response = client.post(links['newversion'], headers=auth_headers)
links = get_json(response, code=201)['links']
assert 'latest_draft' in links
# Get the new version deposit
current_search.flush_and_refresh(index='deposits')
response = client.get(links['latest_draft'], headers=auth)
data = get_json(response, code=200)
links = data['links']
# Deleting files allowed for new version
response = client.get(links['files'], headers=auth_headers)
data = get_json(response, code=200)
files_list = list(map(lambda x: {'id': x['id']}, data))
file_url = '{0}/{1}'.format(links['files'], files_list[0]['id'])
response = client.delete(file_url, headers=auth)
assert response.status_code == 204
# Adding files allowed for new version
response = client.post(
links['files'],
data={
'file': make_file_fixture('test-2.txt'),
'name': 'test-2.txt',
},
headers=auth,
)
assert response.status_code == 201
# Publish new verision
response = client.post(links['publish'], headers=auth_headers)
assert response.status_code == 202
data = get_json(response, code=202)
record_id = data['record_id']
recid_pid = PersistentIdentifier.get('recid', str(record_id))
# Was SIP writing task executed?
sip = RecordSIP.query.filter_by(pid_id=recid_pid.id).one().sip
archive_task_mock.delay.assert_called_with(str(sip.id))
# Change the config back
api.config['SIPSTORE_ARCHIVER_WRITING_ENABLED'] = orig
| 16,171 | Python | .py | 401 | 32.9601 | 79 | 0.636555 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,347 | test_errors.py | zenodo_zenodo/tests/unit/deposit/test_errors.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Unit tests for deposit."""
from __future__ import absolute_import, print_function, unicode_literals
import json
from flask import Flask
from zenodo.modules.deposit.errors import MarshmallowErrors
from zenodo.modules.deposit.loaders import legacyjson_v1_translator
def m(**kwargs):
"""Make a metadata dictionary."""
return dict(metadata=kwargs)
def assert_err(data, field):
"""Assert that a certain error is present when loading data."""
try:
legacyjson_v1_translator(data)
raise AssertionError('Did not raise MarshmallowErrors.')
except MarshmallowErrors as e:
body = json.loads(e.get_body())
if field:
found = False
for err in body['errors']:
if field == err['field']:
found = True
if not found:
raise AssertionError('Field {0} not found.'.format(field))
def test_level1_unknown_key():
"""Test level 1 key errors."""
assert_err(
dict(unknownkey='invalid'),
'unknownkey',
)
def test_level2_key():
"""Test level 2 key errors."""
assert_err(
m(publication_date='invalid'),
'metadata.publication_date',
)
def test_level3_list():
"""Test level 3 list key errors."""
# Min length 1 failure
assert_err(
m(creators=[]),
'metadata.creators',
)
# Missing name
assert_err(
m(creators=[dict(affiliation='CERN')]),
'metadata.creators.0.name',
)
# Invalid type.
app = Flask(__name__)
app.config['DEPOSIT_CONTRIBUTOR_DATACITE2MARC'] = {}
with app.app_context():
assert_err(
m(contributors=[dict(name='a', affiliation='b', type='invalid')]),
'metadata.contributors.0.type',
)
# Unknown key
assert_err(
m(creators=[dict(unknownkey='CERN')]),
'metadata.creators.0.unknownkey',
)
def test_upload_type():
"""Test upload type."""
# Invalid type
assert_err(
m(upload_type='invalid'),
'metadata.upload_type',
)
# Invalid subtype
assert_err(
m(upload_type='publication', subtype='invalid'),
'metadata.publication_type',
)
assert_err(
m(upload_type='image', subtype='invalid'),
'metadata.image_type',
)
def test_related_identifiers():
"""Test related identifiers."""
cases = [
# No identifier
dict(identifier='', relation='cites'),
# Non-detectable identifier
dict(identifier='abc', relation='cites'),
# Invalid identifier for scheme.
dict(identifier='10.1234/foo', scheme='orcid', relation='cites'),
]
for c in cases:
assert_err(
m(related_identifiers=[c]),
'metadata.related_identifiers.0.identifier',
)
# Invalid relation
assert_err(
m(related_identifiers=[dict(relation='invalid')]),
'metadata.related_identifiers.0.relation',
)
| 3,889 | Python | .py | 117 | 27.290598 | 78 | 0.650307 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,348 | test_api_buckets.py | zenodo_zenodo/tests/unit/deposit/test_api_buckets.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Zenodo deposit REST API."""
from __future__ import absolute_import, print_function
import json
from invenio_search import current_search
from six import BytesIO
def test_bucket_create_delete(api_client, deposit, json_auth_headers,
deposit_url, get_json, license_record,
auth_headers, minimal_deposit):
"""Test bucket creation/deletion of bucket with each deposit."""
client = api_client
headers = json_auth_headers
auth = auth_headers
# Create deposit
res = client.post(
deposit_url, data=json.dumps(minimal_deposit), headers=headers)
links = get_json(res, code=201)['links']
current_search.flush_and_refresh(index='deposits')
# Assert bucket was created and accessible
assert 'bucket' in links
res = client.get(links['bucket'], headers=auth)
assert res.status_code == 200
res = client.get(links['bucket'])
assert res.status_code == 404
# Upload object via files-rest.
object_url = links['bucket'] + '/viafilesrest'
res = client.put(
object_url,
input_stream=BytesIO(b'viafilesrest'),
headers=auth,
)
assert res.status_code == 200
# Get object via files-rest
res = client.get(object_url, headers=auth)
assert res.status_code == 200
# List files in deposit.
res = client.get(links['self'], headers=headers)
data = get_json(res, code=200)
assert len(data['files']) == 1
# Get file via deposit.
res = client.get(data['files'][0]['links']['self'], headers=headers)
data = get_json(res, code=200)
# Delete deposit
res = client.delete(links['self'], headers=auth)
assert res.status_code == 204
# Assert bucket no longer exists
res = client.get(links['bucket'], headers=auth)
assert res.status_code == 404
res = client.get(object_url, headers=auth)
assert res.status_code == 404
def test_bucket_create_publish(api_client, deposit, json_auth_headers,
deposit_url, get_json, license_record,
auth_headers, minimal_deposit):
"""Test bucket features on deposit publish."""
client = api_client
headers = json_auth_headers
auth = auth_headers
# Create deposit
res = client.post(
deposit_url, data=json.dumps(minimal_deposit), headers=headers)
links = get_json(res, code=201)['links']
current_search.flush_and_refresh(index='deposits')
# Upload file
res = client.put(
links['bucket'] + '/test.txt',
input_stream=BytesIO(b'testfile'),
headers=auth,
)
assert res.status_code == 200
# Publish deposit
res = client.post(links['publish'], headers=auth)
data = get_json(res, code=202)
# Bucket should be locked.
res = client.put(
links['bucket'] + '/newfile.txt',
input_stream=BytesIO(b'testfile'),
headers=auth,
)
assert res.status_code == 403
# Get deposit.
res = client.get(links['self'], headers=auth)
assert res.status_code == 200
# Get record.
res = client.get(data['links']['record'])
data = get_json(res, code=200)
# Assert record and deposit bucket is not identical.
assert data['links']['bucket'] != links['bucket']
# Get record bucket.
res = client.get(data['links']['bucket'])
assert res.status_code == 200
# Get file in bucket.
res = client.get(data['links']['bucket'] + '/test.txt')
assert res.status_code == 200
# Record bucket is also locked.
res = client.put(
data['links']['bucket'] + '/newfile.txt',
input_stream=BytesIO(b'testfile'),
headers=auth,
)
assert res.status_code == 404
# Delete deposit not allowed
res = client.delete(links['self'], headers=auth)
assert res.status_code == 403
def test_bucket_new_version(api_client, deposit, json_auth_headers,
deposit_url, get_json, license_record,
auth_headers, minimal_deposit, indexer_queue):
"""Test bucket features on record new version."""
client = api_client
headers = json_auth_headers
auth = auth_headers
# Create deposit
res = client.post(
deposit_url, data=json.dumps(minimal_deposit), headers=headers)
links = get_json(res, code=201)['links']
current_search.flush_and_refresh(index='deposits')
# Upload file
res = client.put(
links['bucket'] + '/test.txt',
input_stream=BytesIO(b'testfile'),
headers=auth,
)
assert res.status_code == 200
# Publish deposit
res = client.post(links['publish'], headers=auth)
data = get_json(res, code=202)
# Get record
res = client.get(data['links']['record'])
data = get_json(res, code=200)
rec_v1_bucket = data['links']['bucket']
# Get deposit
res = client.get(links['self'], headers=auth)
links = get_json(res, code=200)['links']
dep_v1_bucket = links['bucket']
# Create new version
res = client.post(links['newversion'], headers=auth)
data = get_json(res, code=201)
# Get new version deposit
res = client.get(data['links']['latest_draft'], headers=auth)
data = get_json(res, code=200)
dep_v2_publish = data['links']['publish']
dep_v2_bucket = data['links']['bucket']
# Assert that all the buckets are different
assert len(set([rec_v1_bucket, dep_v1_bucket, dep_v2_bucket])) == 3
# Get file from old version deposit bucket
res = client.get(dep_v1_bucket + '/test.txt', headers=auth)
dep_v1_file_data = res.get_data(as_text=True)
# Get file from old version record bucket
res = client.get(rec_v1_bucket + '/test.txt')
rec_v1_file_data = res.get_data(as_text=True)
# Get file from new version deposit bucket
res = client.get(dep_v2_bucket + '/test.txt', headers=auth)
dep_v2_file_data = res.get_data(as_text=True)
# Assert that the file is the same in the new version
assert rec_v1_file_data == dep_v1_file_data == dep_v2_file_data
# Record bucket is unlocked.
res = client.put(
dep_v2_bucket + '/newfile.txt',
input_stream=BytesIO(b'testfile2'),
headers=auth,
)
assert res.status_code == 200
# Deleting files in new version deposit bucket is allowed
res = client.delete(dep_v2_bucket + '/newfile.txt', headers=auth)
assert res.status_code == 204
# Try to publish the new version
# Should fail (400), since the bucket contents is the same
res = client.post(dep_v2_publish, headers=auth)
data = get_json(res, code=400)
# Add another file, so that the bucket has a different content
res = client.put(
dep_v2_bucket + '/newfile2.txt',
input_stream=BytesIO(b'testfile3'),
headers=auth,
)
assert res.status_code == 200
# Publish new version deposit
res = client.post(dep_v2_publish, headers=auth)
data = get_json(res, code=202)
# Get record
res = client.get(data['links']['record'])
data = get_json(res, code=200)
rec_v2_bucket = data['links']['bucket']
# Assert that all the buckets are different
assert len(set(
[rec_v1_bucket, rec_v2_bucket, dep_v1_bucket, dep_v2_bucket])) == 4
# Create another new version
res = client.post(links['newversion'], headers=auth)
data = get_json(res, code=201)
# Get new version deposit
res = client.get(data['links']['latest_draft'], headers=auth)
data = get_json(res, code=200)
dep_v3_bucket = data['links']['bucket']
dep_v3_publish = data['links']['publish']
# Try to publish the new version without changes (should fail as before)
res = client.post(dep_v3_publish, headers=auth)
data = get_json(res, code=400)
# Deleting the file from v2 should be possible, but publishing should
# also fail since the contents will be the same as the very first version.
res = client.delete(dep_v3_bucket + '/newfile2.txt', headers=auth)
assert res.status_code == 204
res = client.post(dep_v3_publish, headers=auth)
data = get_json(res, code=400)
def test_non_zenodo_doi(api_client, deposit, json_auth_headers,
deposit_url, get_json, license_record,
auth_headers, minimal_deposit, indexer_queue):
"""Test non-Zenodo DOI bucket operations."""
client = api_client
headers = json_auth_headers
auth = auth_headers
# Create non-Zenodo DOI deposit
minimal_deposit['metadata']['doi'] = '10.1234/nonzenodo'
res = client.post(
deposit_url, data=json.dumps(minimal_deposit), headers=headers)
links = get_json(res, code=201)['links']
deposit_bucket = links['bucket']
deposit_edit = links['edit']
deposit_publish = links['publish']
# Upload files
res = client.put(
deposit_bucket + '/test1.txt',
input_stream=BytesIO(b'testfile1'), headers=auth)
assert res.status_code == 200
res = client.put(
deposit_bucket + '/test2.txt',
input_stream=BytesIO(b'testfile2'), headers=auth)
assert res.status_code == 200
# Publish deposit
res = client.post(deposit_publish, headers=auth)
links = get_json(res, code=202)['links']
record_url = links['record']
# Deposit bucket shouldn't be editable
res = client.put(
deposit_bucket + '/test3.txt',
input_stream=BytesIO(b'testfile3'), headers=auth)
assert res.status_code == 403
# Get record
res = client.get(record_url)
record_bucket = get_json(res, code=200)['links']['bucket']
# Record bucket shouldn't be editable either
res = client.put(
record_bucket + '/test3.txt',
input_stream=BytesIO(b'testfile3'), headers=auth)
assert res.status_code == 404
# Keep the record files around for later comparison
res = client.get(record_bucket, headers=auth)
record_files_initial = {
(f['key'], f['checksum']) for f in get_json(res, code=200)['contents']}
# Edit the deposit
res = client.post(deposit_edit, headers=auth)
assert res.status_code == 201
# Deposit bucket now should be editable to add files...
res = client.put(
deposit_bucket + '/test3.txt',
input_stream=BytesIO(b'testfile3'), headers=auth)
assert res.status_code == 200
# ...remove files...
res = client.delete(deposit_bucket + '/test1.txt', headers=auth)
assert res.status_code == 204
# ...and edit files.
res = client.put(
deposit_bucket + '/test2.txt',
input_stream=BytesIO(b'testfile3_modifed'), headers=auth)
assert res.status_code == 200
# While editing the deposit, record files should be the same
res = client.get(record_bucket, headers=auth)
record_files = {
(f['key'], f['checksum']) for f in get_json(res, code=200)['contents']}
assert record_files == record_files_initial
# Publish deposit with changed files
res = client.post(deposit_publish, headers=auth)
assert res.status_code == 202
# Deposit bucket should be closed again
res = client.put(
deposit_bucket + '/test4.txt',
input_stream=BytesIO(b'testfile4'), headers=auth)
assert res.status_code == 403
# Check that record files were updated
res = client.get(deposit_bucket, headers=auth)
deposit_files = {
(f['key'], f['checksum']) for f in get_json(res, code=200)['contents']}
res = client.get(record_bucket, headers=auth)
record_files = {
(f['key'], f['checksum']) for f in get_json(res, code=200)['contents']}
assert deposit_files == record_files
assert record_files != record_files_initial
| 12,661 | Python | .py | 307 | 35.218241 | 79 | 0.660397 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,349 | test_records.py | zenodo_zenodo/tests/unit/auditor/test_records.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test for Zenodo Auditor Record checks."""
from __future__ import absolute_import, print_function
import logging
import pytest
from invenio_records.models import RecordMetadata
from zenodo.modules.auditor.records import RecordAudit, RecordCheck
from zenodo.modules.records.api import ZenodoRecord
@pytest.fixture()
def record_audit():
return RecordAudit('testAudit', logging.getLogger('auditorTesting'), [])
def test_record_audit(record_audit, full_record, db, communities, users,
oaiid_pid):
# Add the "ecfunded" community since it's usually being added automatically
# after processing a deposit if the record has an EC grant.
oaiid_pid.pid_value = full_record['communities'].append('ecfunded')
# Mint the OAI identifier
oaiid_pid.pid_value = full_record['_oai']['id']
db.session.add(oaiid_pid)
# Create the record metadata, to store the
record_model = RecordMetadata()
record_model.json = full_record
db.session.add(record_model)
db.session.commit()
record = ZenodoRecord(data=full_record, model=record_model)
check = RecordCheck(record_audit, record)
check.perform()
assert check.issues == {}
assert check.is_ok is True
assert check.dump() == {
'record': {
'recid': record['recid'],
'object_uuid': str(record.id),
},
'issues': {},
}
duplicate_community_params = (
([], None),
(['a', 'b'], None),
(['a', 'a', 'a', 'b'], ['a']),
(['a', 'a', 'b', 'b'], ['a', 'b']),
)
@pytest.mark.parametrize(('record_communities', 'issue'),
duplicate_community_params)
def test_duplicate_communities(record_audit, minimal_record,
record_communities, issue):
minimal_record.update({'communities': record_communities})
check = RecordCheck(record_audit, minimal_record)
check._duplicate_communities()
result_issue = check.issues.get('communities', {}).get('duplicates')
assert bool(result_issue) == bool(issue)
if result_issue and issue:
assert len(result_issue) == len(issue)
assert set(result_issue) == set(issue)
unresolvable_communities_params = (
([], None),
(['c1', 'c2', 'c3', 'c4', 'zenodo', 'ecfunded'], None),
(['foo'], ['foo']),
(['c1', 'c2', 'foo'], ['foo']),
(['foo', 'bar'], ['foo', 'bar']),
)
@pytest.mark.parametrize(('record_communities', 'issue'),
unresolvable_communities_params)
def test_unresolvable_communities(record_audit, minimal_record, communities,
record_communities, issue):
minimal_record.update({'communities': record_communities})
check = RecordCheck(record_audit, minimal_record)
check._unresolvable_communities()
result_issue = check.issues.get('communities', {}).get('unresolvable')
assert bool(result_issue) == bool(issue)
if result_issue and issue:
assert len(result_issue) == len(issue)
assert set(result_issue) == set(issue)
duplicate_owners_params = (
([1], None),
([1, 2, 3], None),
([1, 1, 1, 2], [1]),
([1, 1, 2, 2], [1, 2]),
)
@pytest.mark.parametrize(('record_owners', 'issue'), duplicate_owners_params)
def test_duplicate_owners(record_audit, minimal_record, record_owners, issue):
minimal_record.update({'owners': record_owners})
check = RecordCheck(record_audit, minimal_record)
check._duplicate_owners()
result_issue = check.issues.get('owners', {}).get('duplicates')
assert bool(result_issue) == bool(issue)
if result_issue and issue:
assert len(result_issue) == len(issue)
assert set(result_issue) == set(issue)
unresolvable_owners_params = (
([1], None),
([1, 2, 3, 4, 5, 6, 7], None),
([8], [8]),
([1, 2, 3, 4, 5, 6, 7, 8], [8]),
)
@pytest.mark.parametrize(('record_owners', 'issue'),
unresolvable_owners_params)
def test_unresolvable_owners(record_audit, minimal_record, users,
record_owners, issue):
minimal_record.update({'owners': record_owners})
check = RecordCheck(record_audit, minimal_record)
check._unresolvable_owners()
result_issue = check.issues.get('owners', {}).get('unresolvable')
assert bool(result_issue) == bool(issue)
if result_issue and issue:
assert len(result_issue) == len(issue)
assert set(result_issue) == set(issue)
duplicate_grants_params = (
([], None),
([{'$ref': '1'}, {'$ref': '2'}], None),
([{'$ref': '1'}, {'$ref': '1'}], ['1']),
([{'$ref': '1'}, {'$ref': '1'}, {'$ref': '2'}], ['1']),
([{'$ref': '1'}, {'$ref': '1'}, {'$ref': '2'}, {'$ref': '2'}], ['1', '2']),
)
@pytest.mark.parametrize(('record_grants', 'issue'), duplicate_grants_params)
def test_duplicate_grants(record_audit, minimal_record, record_grants, issue):
minimal_record.update({'grants': record_grants})
check = RecordCheck(record_audit, minimal_record)
check._duplicate_grants()
result_issue = check.issues.get('grants', {}).get('duplicates')
assert bool(result_issue) == bool(issue)
if result_issue and issue:
assert len(result_issue) == len(issue)
assert set(result_issue) == set(issue)
duplicate_files_params = [
([{'key': 'a', 'version_id': 1}], None),
([{'key': 'a', 'version_id': 1},
{'key': 'b', 'version_id': 2},
{'key': 'c', 'version_id': 3}],
None),
([{'key': 'a', 'version_id': 1},
{'key': 'a', 'version_id': 2},
{'key': 'a', 'version_id': 3},
{'key': 'b', 'version_id': 4}],
[{'key': 'a', 'version_id': 1},
{'key': 'a', 'version_id': 2},
{'key': 'a', 'version_id': 3}]),
([{'key': 'a', 'version_id': 1},
{'key': 'b', 'version_id': 1},
{'key': 'c', 'version_id': 1},
{'key': 'd', 'version_id': 2}],
[{'key': 'a', 'version_id': 1},
{'key': 'b', 'version_id': 1},
{'key': 'c', 'version_id': 1}]),
]
@pytest.mark.parametrize(('record_files', 'issue'), duplicate_files_params)
def test_duplicate_files(record_audit, minimal_record, record_files, issue):
minimal_record.update({'_files': record_files})
check = RecordCheck(record_audit, minimal_record)
check._duplicate_files()
result_issue = check.issues.get('files', {}).get('duplicates')
assert bool(result_issue) == bool(issue)
if result_issue and issue:
assert result_issue == issue
missing_files_params = [
([{'key': 'a'}], False),
([{'key': 'a'}, {'key': 'b'}], False),
(None, True),
([], True),
]
@pytest.mark.parametrize(('record_files', 'issue'), missing_files_params)
def test_missing_files(record_audit, minimal_record, record_files, issue):
minimal_record.update({'_files': record_files})
check = RecordCheck(record_audit, minimal_record)
check._missing_files()
result_issue = check.issues.get('files', {}).get('missing')
assert bool(result_issue) == bool(issue)
multiple_buckets_params = [
([{'bucket': 'a'}], None),
([{'bucket': 'a'}, {'bucket': 'a'}, {'bucket': 'a'}], None),
([{'bucket': 'a'}, {'bucket': 'a'}, {'bucket': 'b'}], ['a', 'b']),
([{'bucket': 'a'}, {'bucket': 'b'}, {'bucket': 'c'}], ['a', 'b', 'c']),
]
@pytest.mark.parametrize(('record_files', 'issue'), multiple_buckets_params)
def test_multiple_buckets(record_audit, minimal_record, record_files, issue):
minimal_record.update({'_files': record_files})
check = RecordCheck(record_audit, minimal_record)
check._multiple_buckets()
result_issue = check.issues.get('files', {}).get('multiple_buckets')
assert bool(result_issue) == bool(issue)
if result_issue and issue:
assert len(result_issue) == len(issue)
assert set(result_issue) == set(issue)
bucket_mismatch_params = [
('a', [{'bucket': 'a'}], None),
('a', [{'key': 'f1', 'bucket': 'a'}, {'key': 'f2', 'bucket': 'a'}], None),
('a', [{'key': 'f1', 'bucket': 'b'}], [{'key': 'f1', 'bucket': 'b'}]),
('a', [{'key': 'f1', 'bucket': 'a'}, {'key': 'f2', 'bucket': 'b'}],
[{'key': 'f2', 'bucket': 'b'}]),
]
@pytest.mark.parametrize(('record_bucket', 'record_files', 'issue'),
bucket_mismatch_params)
def test_bucket_mismatch(record_audit, minimal_record, record_bucket,
record_files, issue):
minimal_record.update({'_buckets': {'record': record_bucket}})
minimal_record.update({'_files': record_files})
check = RecordCheck(record_audit, minimal_record)
check._bucket_mismatch()
result_issue = check.issues.get('files', {}).get('bucket_mismatch')
assert bool(result_issue) == bool(issue)
if result_issue and issue:
assert len(result_issue) == len(issue)
assert result_issue == issue
oai_required_params = [
({'id': 'oai:zenodo.org:1', 'updated': '2016-01-01T12:00:00Z'}, None),
({}, {'id': True, 'updated': True}),
({'id': 'oai:zenodo.org:1'}, {'updated': True}),
({'updated': '2016-01-01T12:00:00Z'}, {'id': True}),
]
@pytest.mark.parametrize(('record_oai', 'issue'), oai_required_params)
def test_oai_required(record_audit, minimal_record, record_oai, issue):
minimal_record.update({'_oai': record_oai})
check = RecordCheck(record_audit, minimal_record)
check._oai_required()
result_issue = check.issues.get('oai', {}).get('missing')
assert bool(result_issue) == bool(issue)
if result_issue and issue:
assert result_issue == issue
oai_non_minted_pid_params = [
({'id': 'oai:zenodo.org:123'}, None),
({'id': 'oai:zenodo.org:invalid'}, 'oai:zenodo.org:invalid'),
]
@pytest.mark.parametrize(('record_oai', 'issue'), oai_non_minted_pid_params)
def test_oai_non_minted_pid(record_audit, minimal_record, db, oaiid_pid,
record_oai, issue):
db.session.add(oaiid_pid)
db.session.commit()
minimal_record.update({'_oai': record_oai})
check = RecordCheck(record_audit, minimal_record)
check._oai_non_minted_pid()
result_issue = check.issues.get('oai', {}).get('non_minted_pid')
assert bool(result_issue) == bool(issue)
if result_issue and issue:
assert result_issue == issue
oai_duplicate_sets_params = [
({}, None),
({'sets': ['a', 'b']}, None),
({'sets': ['a', 'a', 'a', 'b']}, ['a']),
({'sets': ['a', 'a', 'b', 'b']}, ['a', 'b']),
]
@pytest.mark.parametrize(('record_oai', 'issue'), oai_duplicate_sets_params)
def test_oai_duplicate_sets(record_audit, minimal_record, record_oai, issue):
minimal_record.update({'_oai': record_oai})
check = RecordCheck(record_audit, minimal_record)
check._oai_duplicate_sets()
result_issue = check.issues.get('oai', {}).get('duplicate_oai_sets')
assert bool(result_issue) == bool(issue)
if result_issue and issue:
assert len(result_issue) == len(issue)
assert set(result_issue) == set(issue)
oai_community_correspondence = [
([], [], None),
(['a'], ['user-a'], None),
(['a', 'b'], ['user-a', 'user-b'], None),
(['a'], [], {'missing_oai_sets': ['user-a']}),
(['a', 'b'], ['user-a'], {'missing_oai_sets': ['user-b'], }),
([], ['user-a'], {'redundant_oai_sets': ['user-a']}),
(['a'], ['user-a', 'user-b'], {'redundant_oai_sets': ['user-b']}),
(['a'], ['user-b'],
{'redundant_oai_sets': ['user-b'], 'missing_oai_sets': ['user-a']}),
]
@pytest.mark.parametrize(('record_communities', 'record_oai', 'issue'),
oai_community_correspondence)
def test_oai_community_correspondence(record_audit, minimal_record, db,
record_communities, record_oai, issue):
minimal_record.update({'communities': record_communities})
minimal_record.update({'_oai': {'sets': record_oai}})
check = RecordCheck(record_audit, minimal_record)
check._oai_community_correspondence()
result_issue = check.issues.get('oai', {})
assert bool(result_issue) == bool(issue)
if result_issue and issue:
assert result_issue == issue
def test_jsonschema(app, record_audit, minimal_record):
check = RecordCheck(record_audit, ZenodoRecord(minimal_record))
check.jsonschema()
assert check.issues.get('jsonschema') is None
minimal_record['invalid_key'] = 'should not be here'
check = RecordCheck(record_audit, ZenodoRecord(minimal_record))
check.jsonschema()
assert check.issues.get('jsonschema')
| 13,436 | Python | .py | 303 | 38.79868 | 79 | 0.622307 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,350 | test_oai.py | zenodo_zenodo/tests/unit/auditor/test_oai.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test for Zenodo Auditor OAI-PMH checks."""
from __future__ import absolute_import, print_function
import logging
import pytest
from invenio_communities.models import Community
from invenio_indexer.api import RecordIndexer
from invenio_oaiserver.models import OAISet
from invenio_search import current_search
from mock import MagicMock
from zenodo.modules.auditor.oai import OAIAudit, OAICorrespondenceCheck, \
OAISetResultCheck
from zenodo.modules.records.resolvers import record_resolver
oai_set_result_count_params = (
([], [], [], []),
(['a', 'b'], ['user-a', 'user-b'], [], []),
(['a'], [], ['a'], []),
([], ['user-a'], [], ['user-a']),
(['a'], ['user-b'], ['a'], ['user-b']),
)
@pytest.mark.parametrize(
('oai_communities', 'oai_sets', 'missing_oai_set', 'missing_community'),
oai_set_result_count_params
)
def test_oai_set_correspondence(db, users, oai_communities, oai_sets,
missing_oai_set, missing_community):
for c in oai_communities:
with db.session.begin_nested():
new_comm = Community.create(community_id=c, user_id=1)
db.session.commit()
# Delete the automatically created OAI Set id required by test case
if new_comm.oaiset_spec not in oai_sets:
with db.session.begin_nested():
db.session.delete(new_comm.oaiset)
db.session.commit()
for s in oai_sets:
if not OAISet.query.filter_by(spec=s).one_or_none():
with db.session.begin_nested():
db.session.add(OAISet(spec=s))
db.session.commit()
check = OAICorrespondenceCheck()
check.perform()
assert set(check.issues.get('missing_oai_set', [])) == set(missing_oai_set)
assert set(check.issues.get('missing_community', [])) == \
set(missing_community)
oai_set_result_count_params = (
(
# (State for DB, ES, /oai2d) and...
([], [], []),
# (Issues for DB, ES, /oai2d)
([], [], [])
),
(([], [], [1]),
([1], [1], [])),
(([], [1], []),
([1], [], [1])),
(([], [1], [1]),
([1], [], [])),
(([1], [], []),
([], [1], [1])),
(([1], [], [1]),
([], [1], [])),
(([1], [1], []),
([], [], [1])),
(([1], [1], [1]),
([], [], [])),
(([1], [2], [3]),
([2, 3], [1, 3], [1, 2])),
(([1, 4], [2, 4], [3, 4]),
([2, 3], [1, 3], [1, 2])),
)
@pytest.mark.parametrize(('oai_sources', 'issues'),
oai_set_result_count_params)
def test_oai_set_result_count(mocker, audit_records, db, es, communities,
oai_sources, issues):
db_records, es_records, oai2d_records = oai_sources
for recid in db_records:
_, record = record_resolver.resolve(recid)
record['_oai']['sets'] = ['user-c1']
record.commit()
db.session.commit()
indexer = RecordIndexer()
for recid in es_records:
_, record = record_resolver.resolve(recid)
record['_oai']['sets'] = ['user-c1']
indexer.index(record)
current_search.flush_and_refresh(index='records')
# '/oai2d' needs straight-forward cheating... There's no way to be sure
# why the endpoint sometimes fails to report the correct results. It could
# be a Resumption Token issue, or even an indexing issue on Elasticsearch.
# Either way, we have to be able to replicate when running on production
# this behavior and report it as an issue.
oai2d_ids_mock = MagicMock()
oai2d_ids_mock.return_value = set(oai2d_records)
oai2d_ids_mock = mocker.patch(
'zenodo.modules.auditor.oai.OAISetResultCheck'
'._oai2d_endpoint_identifiers', new=oai2d_ids_mock)
audit = OAIAudit('testAudit', logging.getLogger('auditorTesting'), [])
check = OAISetResultCheck(audit, Community.get('c1'))
check.perform()
audit.clear_db_oai_set_cache()
result_issues = check.issues.get('missing_ids', {})
db_issues, es_issues, api_issues = issues
assert set(result_issues.get('db', [])) == set(db_issues)
assert set(result_issues.get('es', [])) == set(es_issues)
assert set(result_issues.get('oai2d', [])) == set(api_issues)
| 5,194 | Python | .py | 128 | 34.976563 | 79 | 0.624777 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,351 | test_api.py | zenodo_zenodo/tests/unit/github/test_api.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test API for Zenodo and GitHub integration."""
from __future__ import absolute_import, print_function
import pytest
from invenio_sipstore.models import SIP
from mock import MagicMock, Mock
from six import BytesIO
from zenodo.modules.github.api import ZenodoGitHubRelease
creators_params = (
(dict(),
[dict(name='Contributor', affiliation='X'), ],
[dict(name='Owner', affiliation='Y'), ],
[dict(name='Contributor', affiliation='X'), ]),
(dict(creators=[]), # List of creators provided as empty
[dict(name='Contributor', affiliation='X'), ],
[dict(name='Owner', affiliation='Y'), ],
[dict(name='Owner', affiliation='Y'), ]),
(dict(creators=None),
[dict(name='Contributor', affiliation='X'), ],
None, # Failed to get GH owner
[dict(name='Unknown', affiliation=''), ]),
)
@pytest.mark.parametrize('defaults,contribs,owner,output', creators_params)
def test_github_creators_metadata(mocker, defaults, contribs, owner, output):
"""Test 'creators' metadata fetching from GitHub."""
m_ljv1t = mocker.patch(
'zenodo.modules.github.api.legacyjson_v1_translator')
m_get_contributors = mocker.patch(
'zenodo.modules.github.api.get_contributors')
m_citation_metadata = mocker.patch(
'invenio_github.api.GitHubRelease.citation_metadata')
m_citation_metadata.return_value = {}
m_get_owner = mocker.patch('zenodo.modules.github.api.get_owner')
m_get_contributors.return_value = contribs
m_get_owner.return_value = owner
release = MagicMock()
release.event.user_id = 1
release.event.payload['repository']['id'] = 1
zgh = ZenodoGitHubRelease(release)
zgh.defaults = defaults
zgh.gh.api = None
zgh.extra_metadata = {}
zgh.metadata
m_ljv1t.assert_called_with({'metadata': {'creators': output}})
def test_github_publish(mocker, db, users, locations,
deposit_metadata, sip_metadata_types,
mock_datacite_minting):
"""Test basic GitHub payload."""
datacite_mock = mock_datacite_minting
mocker.patch('zenodo.modules.github.api.ZenodoGitHubRelease.metadata')
data = b'foobar'
resp = Mock()
resp.headers = {'Content-Length': len(data)}
resp.raw = BytesIO(b'foobar')
resp.status_code = 200
gh3mock = MagicMock()
gh3mock.api.session.get = Mock(return_value=resp)
gh3mock.account.user.email = 'foo@baz.bar'
release = MagicMock()
release.event.user_id = 1
release.event.payload['release']['author']['id'] = 1
release.event.payload['foo']['bar']['baz'] = 1
release.event.payload['repository']['id'] = 1
zgh = ZenodoGitHubRelease(release)
zgh.gh = gh3mock
zgh.release = dict(author=dict(id=1))
zgh.metadata = deposit_metadata
zgh.files = (('foobar.txt', None), )
zgh.model.repository.releases.filter_by().count.return_value = 0
zgh.publish()
# datacite should be called twice - for regular DOI and Concept DOI
assert datacite_mock().metadata_post.call_count == 2
datacite_mock().doi_post.assert_any_call(
'10.5072/zenodo.1', 'https://zenodo.org/record/1')
datacite_mock().doi_post.assert_any_call(
'10.5072/zenodo.2', 'https://zenodo.org/record/2')
expected_sip_agent = {
'email': 'foo@baz.bar',
'$schema': 'https://zenodo.org/schemas/sipstore/'
'agent-githubclient-v1.0.0.json',
'user_id': 1,
'github_id': 1,
}
gh_sip = SIP.query.one()
assert gh_sip.agent == expected_sip_agent
| 4,518 | Python | .py | 107 | 37.485981 | 77 | 0.690384 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,352 | test_github_cli.py | zenodo_zenodo/tests/unit/github/test_github_cli.py | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test CLI for GitHub."""
from __future__ import absolute_import, print_function, unicode_literals
import pytest
from invenio_github.api import GitHubAPI
def test_hook_sync(mocker, app, cli_run, g_tester_id):
"""Test 'sync' CLI."""
# Test with user's email
mock_obj = mocker.patch.object(GitHubAPI, 'sync')
ret = cli_run('sync info@inveniosoftware.org -E')
assert ret.exit_code == 0
assert ret.output == ''
mock_obj.assert_called_once_with(hooks=False, async_hooks=False)
# Test call with user ID
mock_obj = mocker.patch.object(GitHubAPI, 'sync')
ret = cli_run('sync {0} -E'.format(g_tester_id))
assert ret.exit_code == 0
assert ret.output == ''
mock_obj.assert_called_once_with(hooks=False, async_hooks=False)
# Test call with flags
mock_obj = mocker.patch.object(GitHubAPI, 'sync')
ret = cli_run('sync info@inveniosoftware.org --hooks True'
' --async-hooks=True -E')
assert ret.exit_code == 0
assert ret.output == ''
mock_obj.assert_called_once_with(hooks=True, async_hooks=True)
def test_hook_create(mocker, app, cli_run, g_users, g_repositories):
"""Test 'createhook' CLI."""
mock_obj = mocker.patch.object(GitHubAPI, 'create_hook')
ret = cli_run('createhook u1@foo.bar foo/bar --yes-i-know -E')
assert ret.exit_code == 0
assert ret.output.startswith('Hook is already installed for')
assert not mock_obj.called
repo = g_repositories[1] # baz/spam repository
mock_obj = mocker.patch.object(GitHubAPI, 'create_hook')
ret = cli_run('createhook u1@foo.bar baz/spam --yes-i-know -E')
assert ret.exit_code == 0
assert ret.output == ''
mock_obj.assert_called_once_with(repo['github_id'], repo['name'])
mock_obj = mocker.patch.object(GitHubAPI, 'create_hook')
ret = cli_run('createhook u1@foo.bar {0} --yes-i-know -E'.format(
repo['github_id']))
assert ret.output == ''
assert ret.exit_code == 0
mock_obj.assert_called_once_with(repo['github_id'], repo['name'])
def test_hook_remove(mocker, app, cli_run, g_users, g_repositories):
"""Test 'removehook' CLI."""
repo0 = g_repositories[0] # foo/bar repository, owned by u1
repo1 = g_repositories[1] # baz/spam repository, orphaned
# Remove hook from an 'enabled' repo without a user
mock_obj = mocker.patch.object(GitHubAPI, 'remove_hook')
ret = cli_run('removehook foo/bar --yes-i-know -E')
assert ret.exit_code == 0
assert ret.output == ''
mock_obj.assert_called_once_with(repo0['github_id'], repo0['name'])
# Remove hook from an 'enabled' repo with owner specified
mock_obj = mocker.patch.object(GitHubAPI, 'remove_hook')
ret = cli_run('removehook foo/bar -u u1@foo.bar --yes-i-know -E')
assert ret.exit_code == 0
assert ret.output == ''
mock_obj.assert_called_once_with(repo0['github_id'], repo0['name'])
# Remove hook from an 'enabled' repo with non-owner specified
mock_obj = mocker.patch.object(GitHubAPI, 'remove_hook')
ret = cli_run('removehook foo/bar -u u2@foo.bar --yes-i-know -E')
assert ret.exit_code == 0
assert ret.output == \
'Warning: Specified user is not the owner of this repository.\n'
mock_obj.assert_called_once_with(repo0['github_id'], repo0['name'])
# Remove hook from an orphaned repo without specifying a user
mock_obj = mocker.patch.object(GitHubAPI, 'remove_hook')
ret = cli_run('removehook baz/spam --yes-i-know -E')
assert ret.exit_code == 0
assert ret.output == \
"Repository doesn't have an owner, please specify a user.\n"
assert not mock_obj.called
# Remove hook from an orphaned repo with user specified
mock_obj = mocker.patch.object(GitHubAPI, 'remove_hook')
ret = cli_run('removehook baz/spam -u u1@foo.bar --yes-i-know -E')
assert ret.exit_code == 0
assert ret.output == 'Warning: Repository is not owned by any user.\n'
mock_obj.assert_called_once_with(repo1['github_id'], repo1['name'])
def test_repo_list(app, cli_run, g_users, g_repositories, g_remoteaccounts):
"""Test 'list' CLI."""
# List repos 'owned' by the user
ret = cli_run('list u1@foo.bar -E')
assert ret.exit_code == 0
assert ret.output.startswith('User has 2 enabled repositories.')
assert 'foo/bar:8000' in ret.output
assert 'bacon/eggs:8002' in ret.output
assert 'other/repo:8003' not in ret.output
def test_repo_assign(mocker, app, cli_run, g_users, g_repositories):
"""Test 'assign' CLI."""
rh_mock = mocker.patch.object(GitHubAPI, 'remove_hook')
ch_mock = mocker.patch.object(GitHubAPI, 'create_hook')
ret = cli_run('assign u2@foo.bar 8000 --yes-i-know -E')
assert ret.exit_code == 0
rh_mock.assert_called_once_with(8000, 'foo/bar')
ch_mock.assert_called_once_with(8000, 'foo/bar')
@pytest.mark.parametrize('u2', ['u2@foo.bar', '2'])
@pytest.mark.parametrize('r1', ['foo/bar', '8000'])
@pytest.mark.parametrize('r2', ['bacon/eggs', '8002'])
def test_repo_assign_many(mocker, r2, r1, u2, app, cli_run,
g_users, g_repositories):
"""Test 'assign' CLI."""
# Make sure the 'u2' parameter is correct
rh_mock = mocker.patch.object(GitHubAPI, 'remove_hook')
ch_mock = mocker.patch.object(GitHubAPI, 'create_hook')
assert g_users[1]['email'] == 'u2@foo.bar'
assert g_users[1]['id'] == 2
cmd = 'assign {0} {1} {2} --yes-i-know -E'.format(u2, r1, r2)
ret = cli_run(cmd)
assert ret.exit_code == 0
rh_mock.call_count == 2
ch_mock.call_count == 2
rh_mock.assert_any_call(8000, 'foo/bar')
rh_mock.assert_any_call(8002, 'bacon/eggs')
ch_mock.assert_any_call(8000, 'foo/bar')
ch_mock.assert_any_call(8002, 'bacon/eggs')
| 6,722 | Python | .py | 140 | 43.521429 | 76 | 0.683087 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,353 | test_support_views.py | zenodo_zenodo/tests/unit/support/test_support_views.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2017 CERN.
#
# Zenodo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Unit tests support utils."""
from __future__ import absolute_import, print_function
from flask import url_for
from helpers import login_user_via_session, recaptcha_enabled
from six import BytesIO, b
from werkzeug import MultiDict
def test_send_support_email(app, db, es, users):
"""Test mail sending."""
with app.extensions['mail'].record_messages() as outbox:
with app.test_client() as client:
res = client.get(url_for('zenodo_support.support'))
assert res.status_code == 200
with recaptcha_enabled(app):
res = client.get(
url_for('zenodo_support.support')
)
assert b('recaptcha') in res.data
assert res.status_code == 200
res = client.post(
url_for('zenodo_support.support'),
data=dict()
)
assert res.status_code == 200
assert b('field-name has-error') in res.data
assert b('field-email has-error') in res.data
assert b('field-subject has-error') in res.data
assert b('field-description has-error') in res.data
assert b('field-attachments has-error') not in res.data
form = MultiDict(dict(
name='Aman',
email='abcxyz@example.com',
subject='hello',
issue_category='tech-support',
description='Please help us! Troubleshoot our problem.'
))
res = client.post(
url_for('zenodo_support.support'),
data=form
)
assert b('has-error') not in res.data
assert len(outbox) == 2
sent_msg = outbox[0]
assert sent_msg.sender == 'Aman <abcxyz@example.com>'
assert sent_msg.subject == '[tech-support]: hello'
assert sent_msg.reply_to == 'abcxyz@example.com'
assert 'Aman <abcxyz@example.com>' in sent_msg.body
sent_msg = outbox[1]
assert sent_msg.sender == 'Zenodo <info@zenodo.org>'
assert sent_msg.subject == 'Zenodo Support'
assert sent_msg.body == (
'Thank you for contacting Zenodo support.'
'\n\nWe have received your message, and we will do our best '
'to get back to you as soon as possible.\nThis is an '
'automated confirmation of your request, please do not reply '
'to this email.\n\nZenodo Support\n'
'https://zenodo.org\n'
)
form = MultiDict(dict(
name='Foo',
email='example@mail.com',
subject='Bar',
issue_category='tech-support',
description='Please help us! Troubleshoot our problem.'
))
test_file = BytesIO(b('My other file contents'))
test_file2 = BytesIO(b('Another My other file contents'))
form.add('attachments', (test_file, 'file2.txt'))
form.add('attachments', (test_file2, 'test3.txt'))
res = client.post(
url_for('zenodo_support.support'),
data=form,
content_type='multipart/form-data',
follow_redirects=True
)
assert len(outbox) == 4
sent_msg = outbox[2]
file1 = sent_msg.attachments[0]
assert file1.filename == 'file2.txt'
assert file1.data == b('My other file contents')
file2 = sent_msg.attachments[1]
assert file2.filename == 'test3.txt'
assert file2.data == b('Another My other file contents')
login_user_via_session(client, email=users[1]['email'])
with recaptcha_enabled(app):
res = client.get(
url_for('zenodo_support.support')
)
assert b('test@zenodo.org') in res.data
assert b('recaptcha') not in res.data
form = MultiDict(dict(
name='Foo',
subject='Bar',
issue_category='tech-support',
description='Please help us! Troubleshoot our problem.'
))
res = client.post(
url_for('zenodo_support.support'),
data=form
)
assert len(outbox) == 6
sent_msg = outbox[4]
assert 'From: Foo <test@zenodo.org> (2)' in sent_msg.body
test_file = BytesIO(b('My file contents'))
form.add('attachments', (test_file, 'file1.txt'))
res = client.post(
url_for('zenodo_support.support'),
data=form,
content_type='multipart/form-data',
follow_redirects=True
)
assert len(outbox) == 8
sent_msg = outbox[6]
file1 = sent_msg.attachments[0]
assert file1.filename == 'file1.txt'
assert file1.data == b('My file contents')
form = MultiDict(dict(
name='Foo',
subject='Bar',
issue_category='tech-support',
description='Please help us! Troubleshoot our problem.'
))
test_file = BytesIO(b('My other file contents'))
test_file2 = BytesIO(b('Another My other file contents'))
form.add('attachments', (test_file, 'file2.txt'))
form.add('attachments', (test_file2, 'test3.txt'))
res = client.post(
url_for('zenodo_support.support'),
data=form,
content_type='multipart/form-data',
follow_redirects=True
)
assert len(outbox) == 10
sent_msg = outbox[8]
file1 = sent_msg.attachments[0]
assert file1.filename == 'file2.txt'
assert file1.data == b('My other file contents')
file2 = sent_msg.attachments[1]
assert file2.filename == 'test3.txt'
assert file2.data == b('Another My other file contents')
| 7,073 | Python | .py | 160 | 31.98125 | 78 | 0.561259 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,354 | test_search_ui_views.py | zenodo_zenodo/tests/unit/search_ui/test_search_ui_views.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo search ui views."""
def test_for_smoke(app, db, es):
"""Test search view."""
with app.test_client() as client:
res = client.get('/search')
assert res.status_code == 200
res = client.get('/api/records/')
assert res.status_code == 200
| 1,253 | Python | .py | 31 | 38.032258 | 76 | 0.733388 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,355 | test_grant_utils.py | zenodo_zenodo/tests/unit/utils/test_grant_utils.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2018 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test grant utilities."""
from __future__ import absolute_import, print_function
import os
from zenodo.modules.utils.grants import OpenAIREGrantsDump
def test_openaire_grants_dump(tmpdir, script_dir):
"""Test OpenAIRE grants dump parsing and splitting."""
out_dir = tmpdir.mkdir('grants_db')
out_dir_prefix = '{0}/grants-'.format(out_dir)
grants_dump = OpenAIREGrantsDump(str(script_dir.join('grants-dump.gz')))
split_files = list(grants_dump.split(out_dir_prefix, grants_per_file=2))
assert split_files == [
('{}00.db'.format(out_dir_prefix), 2),
('{}01.db'.format(out_dir_prefix), 1),
]
assert all(os.path.exists(filepath) for filepath, _ in split_files)
| 1,680 | Python | .py | 38 | 41.789474 | 76 | 0.741443 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,356 | test_oai_update.py | zenodo_zenodo/tests/unit/utils/test_oai_update.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test query-based OAISet updating."""
from __future__ import absolute_import, print_function
from datetime import datetime
from invenio_records.api import Record
from zenodo.modules.utils.tasks import update_search_pattern_sets
def make_rec(comm, sets):
"""Create a minimal record for Community-OAISet testing."""
return {'communities': comm, '_oai': {'sets': sets}}
def test_oaiset_update(app, db, oaisets, es, oaiset_update_records):
"""Test query-based OAI sets updating."""
rec_ok, rec_rm, rec_add = [Record.get_record(uuid) for uuid
in oaiset_update_records]
year_now = str(datetime.now().year)
# Assume starting conditions
assert set(rec_ok['_oai']['sets']) == set(['extra', 'user-foobar', ])
assert rec_ok['_oai']['updated'].startswith('1970')
assert set(rec_rm['_oai']['sets']) == set(['extra', 'user-foobar', ])
assert rec_rm['_oai']['updated'].startswith('1970')
assert set(rec_add['_oai']['sets']) == set(['user-foobar', ])
assert rec_add['_oai']['updated'].startswith('1970')
# Run the updating task
update_search_pattern_sets.delay()
rec_ok, rec_rm, rec_add = [Record.get_record(uuid) for uuid
in oaiset_update_records]
# After update
assert set(rec_ok['_oai']['sets']) == set(['extra', 'user-foobar', ])
assert rec_ok['_oai']['updated'].startswith('1970')
assert set(rec_rm['_oai']['sets']) == set(['user-foobar', ])
assert rec_rm['_oai']['updated'].startswith(year_now)
assert set(rec_add['_oai']['sets']) == set(['extra', 'user-foobar', ])
assert rec_add['_oai']['updated'].startswith(year_now)
| 2,641 | Python | .py | 54 | 44.925926 | 76 | 0.686335 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,357 | test_oai_sync.py | zenodo_zenodo/tests/unit/utils/test_oai_sync.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test API for Zenodo and GitHub integration."""
from __future__ import absolute_import, print_function
from invenio_oaiserver.models import OAISet
from mock import MagicMock
from zenodo.modules.utils.tasks import comm_sets_match, get_synced_sets, \
requires_sync, update_oaisets_cache
def make_rec(comm, sets):
"""Create a minimal record for Community-OAISet testing."""
return {'communities': comm, '_oai': {'sets': sets}}
def test_synced_communities(db, oaisets):
"""Test OAI sets syncing."""
assert get_synced_sets(make_rec(['c1', 'c2'],
['user-c1', 'user-c2'])) == \
['user-c1', 'user-c2']
assert get_synced_sets(make_rec(['c1', 'c2'], [])) == \
['user-c1', 'user-c2']
assert get_synced_sets(make_rec(['c1', 'c2'],
['extra', 'user-c1', 'user-c2'])) == \
['extra', 'user-c1', 'user-c2']
assert get_synced_sets(make_rec(['c1', 'c2'],
['user-c1', 'user-c2', 'user-extra'])) == \
['user-c1', 'user-c2', 'user-extra']
assert get_synced_sets(make_rec([], ['extra'])) == ['extra']
assert get_synced_sets(make_rec([], [])) == []
assert get_synced_sets({}) == []
def test_sets_match(db, oaisets):
"""Test OAI sets and communities matching predicate."""
# Should ignore the custom "extra" OAI Set
assert comm_sets_match(make_rec(['c1', 'c2'],
['extra', 'user-c1', 'user-c2', ]))
# Should also ignore the custom "user-extra" spec which is NOT
# community-based but has a prefix as such
assert comm_sets_match(make_rec(['c1', 'c2'],
['user-c1', 'user-c2', 'user-extra', ]))
assert comm_sets_match(make_rec([], ['extra']))
assert comm_sets_match(make_rec([], []))
assert not comm_sets_match(make_rec(['c2'], ['extra']))
assert not comm_sets_match(make_rec(['c2'], ['user-c1']))
assert not comm_sets_match(make_rec(['c1'], []))
r = {
'communities': ['c1'],
'_oai': {
'id': 'some_id_1234',
'updated': 'timestamp'
}
}
assert not comm_sets_match(r)
def test_syncing_required(db, oaisets):
"""Test OAI syncing requirement criterion."""
assert requires_sync({})
r = {
'communities': ['c1', ],
'_oai': {
'id': 'some_id_1234',
'updated': 'timestamp',
'sets': ['user-c1', 'extra', ]
}
}
assert not requires_sync(r) # should not update it
r = {
'communities': ['c1', ],
'_oai': {
# 'id' is missing
'updated': 'timestamp',
'sets': ['user-c1', 'extra', ]
}
}
assert requires_sync(r)
r = {
'communities': ['c1', ],
'_oai': {
'id': '', # 'id' empty
'updated': 'timestamp',
'sets': ['user-c1', 'extra', ]
}
}
assert requires_sync(r)
r = {
'communities': ['c1', ],
'_oai': {
'id': 'some_id_1234',
# update is missing
'sets': ['user-c1', 'extra', ]
}
}
assert requires_sync(r)
r = {
'communities': ['c1', ],
'_oai': {
'id': 'some_id_1234',
'updated': 'timestamp',
'sets': ['extra', 'user-c2', ] # additional 'user-c2'
}
}
assert requires_sync(r)
r = {
'communities': ['c1', 'c2'],
'_oai': {
'id': 'some_id_1234',
'updated': 'timestamp',
'sets': ['extra', 'user-c1', ] # 'user-c2' missing
}
}
assert requires_sync(r) # should not update it
r = {
'communities': ['c1', ],
'_oai': {
'id': 'some_id_1234',
'updated': 'timestamp',
# sets missing
}
}
assert requires_sync(r) # should not update it
r = {
'communities': ['c1', ],
# _oai is missing completely
}
assert requires_sync(r) # should not update it
def test_sets_cache(mocker, db, oaisets):
"""Test caching for OAISets."""
cache = {}
rec = {
'_oai': {
'sets': ['user-c1', 'extra', ],
}
}
update_oaisets_cache(cache, rec)
assert cache['user-c1'].search_pattern is None
assert cache['extra'].search_pattern == 'title:extra' # see in conftest
query_mock = mocker.patch.object(OAISet, 'query')
# Mock the sqlalchemy query API
q_result_mock = MagicMock()
q_result_mock.count = 1
q_result_mock.one().search_pattern = None
query_mock.filter_by = MagicMock(return_value=q_result_mock)
r = {
'communities': ['c1', 'c2'],
'_oai': {
'id': 'some_id_1234',
'updated': 'timestamp',
'sets': ['user-c1', 'extra', 'user-c2']
}
}
assert not requires_sync(r, cache=cache) # Should not require sync
# Should be only called once for the item not in cache
query_mock.filter_by.assert_called_once_with(spec='user-c2')
| 6,082 | Python | .py | 169 | 28.579882 | 79 | 0.558234 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,358 | conftest.py | zenodo_zenodo/tests/unit/metrics/conftest.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2022 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Pytest configuration."""
import pytest
from zenodo.modules.metrics.api import ZenodoMetric
@pytest.fixture
def use_metrics_config(app, api):
"""Activate webhooks config."""
for _app in (app, api):
old_value = _app.config.pop('ZENODO_METRICS_DATA', None)
_app.config["_ZENODO_METRICS_DATA"] = old_value
_app.config['ZENODO_METRICS_DATA'] = {
'openaire-nexus': [
{
'name': 'zenodo_unique_visitors_web_total',
'help': 'Number of unique visitors in total on Zenodo '
'portal',
'type': 'gauge',
'value': ZenodoMetric.get_visitors
},
{
'name': 'zenodo_researchers_total',
'help': 'Number of researchers registered on Zenodo',
'type': 'gauge',
'value': ZenodoMetric.get_researchers
},
{
'name': 'zenodo_files_total',
'help': 'Number of files hosted on Zenodo',
'type': 'gauge',
'value': ZenodoMetric.get_files
},
{
'name': 'zenodo_communities_total',
'help': 'Number of Zenodo communities created',
'type': 'gauge',
'value': ZenodoMetric.get_communities
},
]
}
yield
for _app in (app, api):
_app.config['ZENODO_METRICS_DATA'] = \
_app.config.pop("_ZENODO_METRICS_DATA")
| 2,608 | Python | .py | 65 | 30.276923 | 76 | 0.582808 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,359 | test_metrics_views.py | zenodo_zenodo/tests/unit/metrics/test_metrics_views.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2017-2023 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Zenodo metrics views."""
from invenio_cache import current_cache
from zenodo.modules.metrics.utils import calculate_metrics
def test_metrics(api, api_client, db, es, use_metrics_config):
# clear any stored cache
current_cache.delete("ZENODO_METRICS_CACHE::openaire-nexus")
expected_data = '# HELP zenodo_unique_visitors_web_total Number ' \
'of unique visitors in total on Zenodo portal\n' \
'# TYPE zenodo_unique_visitors_web_total gauge\n' \
'zenodo_unique_visitors_web_total 0\n' \
'# HELP zenodo_researchers_total Number of ' \
'researchers registered on Zenodo\n' \
'# TYPE zenodo_researchers_total gauge\n' \
'zenodo_researchers_total 0\n' \
'# HELP zenodo_files_total Number of files hosted ' \
'on Zenodo\n' \
'# TYPE zenodo_files_total gauge\n' \
'zenodo_files_total 0\n' \
'# HELP zenodo_communities_total Number of Zenodo ' \
'communities created\n' \
'# TYPE zenodo_communities_total gauge\n' \
'zenodo_communities_total 0\n' \
# initial request returns a 503, since metrics are not in cache
res = api_client.get("/metrics/openaire-nexus")
assert res.status_code == 503
assert res.headers["Retry-After"] == '1800'
assert res.get_data() == \
"Metrics not available. Try again after 30 minutes."
# calculate and cache the metrics
calculate_metrics("openaire-nexus")
res = api_client.get("/metrics/openaire-nexus")
assert res.status_code == 200
assert res.get_data() == expected_data
def test_metrics_invalid_key(api_client):
res = api_client.get("/metrics/invalid-key")
assert res.status_code == 404
assert res.get_data() == 'Invalid key'
| 2,923 | Python | .py | 60 | 41.15 | 76 | 0.661059 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,360 | test_metrics_utils.py | zenodo_zenodo/tests/unit/metrics/test_metrics_utils.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2017-2023 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Zenodo metrics utils."""
from zenodo.modules.metrics.utils import calculate_metrics, formatted_response
def test_calculate_metrics(api, db, es, cache, use_metrics_config):
expected_data = [
{
'name': 'zenodo_unique_visitors_web_total',
'help': 'Number of unique visitors in total on Zenodo portal',
'type': 'gauge',
'value': 0
},
{
'name': 'zenodo_researchers_total',
'help': 'Number of researchers registered on Zenodo',
'type': 'gauge',
'value': 0
},
{
'name': 'zenodo_files_total',
'help': 'Number of files hosted on Zenodo',
'type': 'gauge',
'value': 0
},
{
'name': 'zenodo_communities_total',
'help': 'Number of Zenodo communities created',
'type': 'gauge',
'value': 0
},
]
calculated_metrics = calculate_metrics('openaire-nexus', cache=False)
assert calculated_metrics == expected_data
calculated_metrics = calculate_metrics('openaire-nexus', cache=True)
assert calculated_metrics == expected_data
def test_formatted_response(api, use_metrics_config):
metrics = [
{
'name': 'zenodo_unique_visitors_web_total',
'help': 'Number of unique visitors in total on Zenodo portal',
'type': 'gauge',
'value': 0
}
]
expected_response = '# HELP zenodo_unique_visitors_web_total Number ' \
'of unique visitors in total on Zenodo portal\n' \
'# TYPE zenodo_unique_visitors_web_total gauge\n' \
'zenodo_unique_visitors_web_total 0\n'
calculated_response = formatted_response(metrics)
assert calculated_response == expected_response
| 2,849 | Python | .py | 71 | 32.84507 | 78 | 0.640535 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,361 | test_redirection.py | zenodo_zenodo/tests/unit/redirector/test_redirection.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo redirector tests."""
from __future__ import absolute_import, print_function
from flask import url_for
try:
from urllib.parse import parse_qs, urlparse
except ImportError:
from urlparse import parse_qs, urlparse
def compare_url(url, expected):
"""Compare two urls replying if they are the same."""
def get_querystring_dict(url):
return parse_qs(urlparse(url).query)
return (get_querystring_dict(url) == get_querystring_dict(expected) and
urlparse(url).path == urlparse(expected).path)
def check_redirection(response, expected_url):
"""."""
assert response.status_code == 302
assert compare_url(response.headers['Location'], expected_url)
def test_redirection_community(app_client, db):
"""Check the redirection using a direct translation."""
url_redirection = url_for('invenio_communities.detail', community_id=1,
_external=True)
response = app_client.get('/collection/user-1')
check_redirection(response, url_redirection)
def test_redirection_community_search(app_client, db):
"""Check the redirection using a direct translation."""
url_redirection = url_for('invenio_communities.search', community_id=1,
_external=True)
response = app_client.get('/search?cc=user-1')
check_redirection(response, url_redirection)
# Query translation
url_redirection = url_for('invenio_communities.search', community_id=1,
q='test', _external=True)
response = app_client.get('/search?cc=user-1&p=test')
check_redirection(response, url_redirection)
def test_redirection_communities_provisional_user(app_client, db):
"""Check the redirection using a direct translation."""
url_redirection = url_for('invenio_communities.curate', community_id=1,
_external=True)
response = app_client.get('/search?cc=provisional-user-1')
check_redirection(response, url_redirection)
# Query translation
url_redirection = url_for('invenio_communities.curate', community_id=1,
q='test', _external=True)
response = app_client.get('/search?cc=provisional-user-1&p=test')
check_redirection(response, url_redirection)
def test_redirection_communities_about(app_client, db):
"""Check the redirection using a direct translation."""
url_redirection = url_for('invenio_communities.about', community_id=1,
_external=True)
response = app_client.get('/communities/about/1/')
check_redirection(response, url_redirection)
def test_redirection_collections_type(app_client, db):
"""Check the redirection using a direct translation."""
# Type
url_redirection = url_for('invenio_search_ui.search', type='video',
_external=True)
response = app_client.get('/collection/videos')
check_redirection(response, url_redirection)
# Type and subtype
url_redirection = url_for('invenio_search_ui.search', type='publication',
subtype='deliverable', _external=True)
response = app_client.get('/collection/deliverable')
check_redirection(response, url_redirection)
def test_redirection_collections_search(app_client, db):
"""Check the redirection using a direct translation."""
# Type
url_redirection = url_for('invenio_search_ui.search', type='video',
_external=True)
response = app_client.get('/search?cc=videos')
check_redirection(response, url_redirection)
# Type and subtype
url_redirection = url_for('invenio_search_ui.search', type='publication',
subtype='deliverable', _external=True)
response = app_client.get('/search?cc=deliverable')
check_redirection(response, url_redirection)
# Query translation
url_redirection = url_for('invenio_search_ui.search', type='publication',
subtype='deliverable', q='test', _external=True)
response = app_client.get('/search?cc=deliverable&p=test')
check_redirection(response, url_redirection)
def test_redirection_search_behaviour(app_client, db):
"""Check the behaviour of url_for using invenio_search_ui.index."""
# Empty url
response = app_client.get(url_for('invenio_search_ui.search'))
assert response.status_code == 200
assert '<invenio-search' in response.get_data(as_text=True)
# Query string
response = app_client.get(url_for('invenio_search_ui.search',
page=1, size=20, q='Aa'))
assert response.status_code == 200
assert '<invenio-search' in response.get_data(as_text=True)
| 5,709 | Python | .py | 114 | 43.157895 | 78 | 0.692806 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,362 | test_openaire.py | zenodo_zenodo/tests/unit/openaire/test_openaire.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2017 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test for OpenAIRE extension."""
from __future__ import absolute_import, print_function
from zenodo.modules.openaire import current_openaire
def test_openaire_type(app):
"""Test OpenAIRE type."""
assert set(current_openaire.inverse_openaire_community_map.keys()) == \
set(['c1', 'c2', 'c3'])
assert set(current_openaire.inverse_openaire_community_map['c1']) == \
set(['foo', 'bar'])
assert set(current_openaire.inverse_openaire_community_map['c2']) == \
set(['foo'])
assert set(current_openaire.inverse_openaire_community_map['c3']) == \
set(['bar'])
assert set(current_openaire.openaire_communities.keys()) == \
set(['foo', 'bar'])
| 1,673 | Python | .py | 38 | 41.157895 | 76 | 0.72543 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,363 | test_openaire_helpers.py | zenodo_zenodo/tests/unit/openaire/test_openaire_helpers.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2017 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test for OpenAIRE helpers."""
from __future__ import absolute_import, print_function
from zenodo.modules.openaire.helpers import openaire_id, openaire_link, \
openaire_type
def test_openaire_type(app, minimal_record):
"""Test OpenAIRE type."""
r = minimal_record
# Default zenodo type is software.
assert openaire_type(r) == 'software'
# Other type
r['resource_type']['type'] = 'other'
assert openaire_type(r) == 'other'
# Datasets just map to datasets.
r['resource_type']['type'] = 'dataset'
assert openaire_type(r) == 'dataset'
# Open publications
r['resource_type']['type'] = 'publication'
assert openaire_type(r) == 'publication'
# Non-open publications
r['access_right'] = 'embargoed'
assert openaire_type(r) is None
# with grants
r['grants'] = [{'id': 'someid'}]
assert openaire_type(r) == 'publication'
# in ecfunded community
del r['grants']
r['communities'] = ['ecfunded']
assert openaire_type(r) == 'publication'
r['communities'] = ['zenodo']
assert openaire_type(r) is None
def test_openaire_id(app, minimal_record):
"""Test OpenAIRE ID."""
r = minimal_record
r['doi'] = u'10.5281/zenodo.123'
r['_oai'] = {'id': u'oai:zenodo.org:123'}
# Default zenodo type is software
assert openaire_id(r) == 'od______2659::47287d1800c112499a117ca17aa1909d'
# Other type
r['resource_type']['type'] = 'other'
assert openaire_id(r) == 'od______2659::47287d1800c112499a117ca17aa1909d'
# Dataset ID
r['resource_type']['type'] = 'dataset'
assert openaire_id(r) == 'od______2659::204007f516ddcf0a452c2f22d48695ca'
# Publication ID
r['resource_type']['type'] = 'publication'
assert openaire_id(r) == 'od______2659::47287d1800c112499a117ca17aa1909d'
def test_openaire_link(app, minimal_record):
"""Test OpenAIRE ID."""
r = minimal_record
r['doi'] = u'10.5281/zenodo.123'
r['_oai'] = {'id': u'oai:zenodo.org:123'}
# Default zenodo type is software
assert openaire_link(r) ==\
'https://explore.openaire.eu/search/software?' \
'pid=10.5281/zenodo.123'
# Other type
r['resource_type']['type'] = 'other'
assert openaire_link(r) == \
'https://explore.openaire.eu/search/other?' \
'pid=10.5281/zenodo.123'
# Dataset ID
r['resource_type']['type'] = 'dataset'
assert openaire_link(r) == \
'https://explore.openaire.eu/search/dataset' \
'?pid=10.5281/zenodo.123'
# Publication ID
r['resource_type']['type'] = 'publication'
assert openaire_link(r) == \
'https://explore.openaire.eu/search/publication' \
'?pid=10.5281/zenodo.123'
| 3,687 | Python | .py | 93 | 35.311828 | 77 | 0.676092 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,364 | test_communities_api.py | zenodo_zenodo/tests/unit/communities/test_communities_api.py | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Test Zenodo communities API."""
from __future__ import absolute_import, print_function
from helpers import publish_and_expunge
from invenio_communities.models import InclusionRequest
from invenio_pidrelations.contrib.versioning import PIDVersioning
from six import BytesIO, b
from zenodo.modules.communities.api import ZenodoCommunity
from zenodo.modules.deposit.api import ZenodoDeposit
from zenodo.modules.deposit.resolvers import deposit_resolver
from zenodo.modules.records.resolvers import record_resolver
def test_basic_api(app, db, communities, deposit, deposit_file):
"""Test basic workflow using Deposit and Communities API."""
deposit_v1 = publish_and_expunge(db, deposit)
depid_v1_value = deposit_v1['_deposit']['id']
recid_v1, record_v1 = deposit_v1.fetch_published()
recid_v1_value = recid_v1.pid_value
deposit_v1.newversion()
pv = PIDVersioning(child=recid_v1)
depid_v2 = pv.draft_child_deposit
deposit_v2 = ZenodoDeposit.get_record(depid_v2.object_uuid)
deposit_v2.files['file.txt'] = BytesIO(b('file1'))
deposit_v2 = publish_and_expunge(db, deposit_v2)
deposit_v2 = deposit_v2.edit()
# 1. Request for 'c1' and 'c2' through deposit v2
deposit_v2['communities'] = ['c1', 'c2', ]
deposit_v2 = publish_and_expunge(db, deposit_v2)
recid_v2, record_v2 = deposit_v2.fetch_published()
recid_v2_value = recid_v2.pid_value
depid_v1, deposit_v1 = deposit_resolver.resolve(depid_v1_value)
recid_v1, record_v1 = deposit_v1.fetch_published()
assert record_v1.get('communities', []) == []
assert record_v2.get('communities', []) == []
c1_api = ZenodoCommunity('c1')
c2_api = ZenodoCommunity('c2')
# Inclusion requests should be visible for both records
assert c1_api.get_comm_irs(record_v1, pid=recid_v1).count() == 1
assert c1_api.get_comm_irs(record_v2, pid=recid_v2).count() == 1
assert c2_api.get_comm_irs(record_v1, pid=recid_v1).count() == 1
assert c2_api.get_comm_irs(record_v2, pid=recid_v2).count() == 1
# Accept to 'c1' through record_v2 (as originally requested),
# and 'c2' through record_v1 (version)
c1_api.accept_record(record_v2, pid=recid_v2)
c2_api.accept_record(record_v1, pid=recid_v1)
recid_v1, record_v1 = record_resolver.resolve(recid_v1_value)
recid_v2, record_v2 = record_resolver.resolve(recid_v2_value)
# Accepting individual record to a community should propagate the changes
# to all versions
assert record_v1['communities'] == record_v2['communities'] == \
['c1', 'c2', ]
# Removing 'c1' from deposit_v1 should remove it from two published records
depid_v1, deposit_v1 = deposit_resolver.resolve(depid_v1_value)
deposit_v1 = deposit_v1.edit()
deposit_v1['communities'] = []
deposit_v1 = publish_and_expunge(db, deposit_v1)
recid_v1, record_v1 = record_resolver.resolve(recid_v1_value)
recid_v2, record_v2 = record_resolver.resolve(recid_v2_value)
assert record_v1.get('communities', []) == []
assert record_v2.get('communities', []) == []
def test_autoadd(mocker, app, db, users, communities, deposit, deposit_file,
communities_autoadd_enabled, communities_mail_enabled):
"""Test basic workflow using Deposit and Communities API."""
email_mock = mocker.patch(
'invenio_communities.receivers.send_community_request_email')
deposit_v1 = publish_and_expunge(db, deposit)
depid_v1_value = deposit_v1['_deposit']['id']
recid_v1, record_v1 = deposit_v1.fetch_published()
recid_v1_value = recid_v1.pid_value
deposit_v1 = deposit_v1.newversion()
pv = PIDVersioning(child=recid_v1)
depid_v2 = pv.draft_child_deposit
depid_v2_value = depid_v2.pid_value
deposit_v2 = ZenodoDeposit.get_record(depid_v2.get_assigned_object())
deposit_v2.files['file.txt'] = BytesIO(b('file1'))
deposit_v2 = publish_and_expunge(db, deposit_v2)
deposit_v2 = deposit_v2.edit()
# 1. Request for 'c1' and 'c3' (owned by user) through deposit v2
deposit_v2['communities'] = ['c1', 'c2', 'c3', ]
deposit_v2['grants'] = [{'title': 'SomeGrant'}, ]
deposit_v2 = publish_and_expunge(db, deposit_v2)
recid_v2, record_v2 = deposit_v2.fetch_published()
assert record_v2['grants'] == [{'title': 'SomeGrant'}, ]
recid_v2_value = recid_v2.pid_value
depid_v1, deposit_v1 = deposit_resolver.resolve(depid_v1_value)
recid_v1, record_v1 = deposit_v1.fetch_published()
assert record_v1.get('communities', []) == ['c3', 'grants_comm']
assert record_v2.get('communities', []) == ['c3', 'grants_comm']
assert deposit_v1.get('communities', []) == ['c1', 'c2', 'c3', 'ecfunded',
'grants_comm', 'zenodo']
assert deposit_v2.get('communities', []) == ['c1', 'c2', 'c3', 'ecfunded',
'grants_comm', 'zenodo']
# 'c3' and 'grants_comm' were automatically added (no mail sent),
# while 'c1', 'c2', and 'ecfunded' and 'zenodo' are requested for,
# however, for 'c2' and 'zenodo', notifications have been disabled
assert email_mock.call_count == 2
c1_api = ZenodoCommunity('c1')
c2_api = ZenodoCommunity('c2')
c3_api = ZenodoCommunity('c3')
grants_comm_api = ZenodoCommunity('grants_comm')
ecfunded_api = ZenodoCommunity('ecfunded')
zenodo_api = ZenodoCommunity('zenodo')
# Inclusion requests should be visible for both records
assert c1_api.get_comm_irs(record_v1, pid=recid_v1).count() == 1
assert c1_api.get_comm_irs(record_v2, pid=recid_v2).count() == 1
assert c2_api.get_comm_irs(record_v1, pid=recid_v1).count() == 1
assert c2_api.get_comm_irs(record_v2, pid=recid_v2).count() == 1
assert c3_api.get_comm_irs(record_v1, pid=recid_v1).count() == 0
assert c3_api.get_comm_irs(record_v2, pid=recid_v2).count() == 0
assert grants_comm_api.get_comm_irs(
record_v1, pid=recid_v1).count() == 0
assert grants_comm_api.get_comm_irs(
record_v2, pid=recid_v2).count() == 0
assert ecfunded_api.get_comm_irs(
record_v1, pid=recid_v1).count() == 1
assert ecfunded_api.get_comm_irs(
record_v2, pid=recid_v2).count() == 1
assert zenodo_api.get_comm_irs(record_v1, pid=recid_v1).count() == 1
assert zenodo_api.get_comm_irs(record_v2, pid=recid_v2).count() == 1
# Accept to 'c1' through record_v2 (as originally requested),
# and 'c2' through record_v1 (resolved through version)
c1_api.accept_record(record_v2, pid=recid_v2)
c2_api.accept_record(record_v1, pid=recid_v1)
recid_v1, record_v1 = record_resolver.resolve(recid_v1_value)
recid_v2, record_v2 = record_resolver.resolve(recid_v2_value)
# Accepting individual record to a community should propagate the changes
# to all versions
assert record_v1.get('communities', []) == ['c1', 'c2', 'c3',
'grants_comm']
assert record_v2.get('communities', []) == ['c1', 'c2', 'c3',
'grants_comm']
assert deposit_v1.get('communities', []) == ['c1', 'c2', 'c3', 'ecfunded',
'grants_comm', 'zenodo']
assert deposit_v2.get('communities', []) == ['c1', 'c2', 'c3', 'ecfunded',
'grants_comm', 'zenodo']
# Removing 'c1'-'c3' from deposit_v1 should remove it from two published
# records and other deposits as well
depid_v1, deposit_v1 = deposit_resolver.resolve(depid_v1_value)
deposit_v1 = deposit_v1.edit()
deposit_v1['communities'] = []
deposit_v1 = publish_and_expunge(db, deposit_v1)
depid_v2, deposit_v2 = deposit_resolver.resolve(depid_v2_value)
recid_v1, record_v1 = record_resolver.resolve(recid_v1_value)
recid_v2, record_v2 = record_resolver.resolve(recid_v2_value)
assert record_v1.get('communities', []) == ['grants_comm', ]
assert record_v2.get('communities', []) == ['grants_comm', ]
assert deposit_v1.get('communities', []) == ['ecfunded', 'grants_comm',
'zenodo']
assert deposit_v2.get('communities', []) == ['ecfunded', 'grants_comm',
'zenodo']
def test_autoadd_explicit(
app, db, users, communities, deposit, deposit_file,
communities_autoadd_enabled):
"""Explicitly the autoadded communities."""
deposit['communities'] = ['ecfunded', 'grants_comm', 'zenodo']
deposit['grants'] = [{'title': 'SomeGrant'}, ]
deposit_v1 = publish_and_expunge(db, deposit)
recid_v1, record_v1 = deposit_v1.fetch_published()
assert record_v1.get('communities', []) == ['grants_comm', ]
assert deposit_v1.get('communities', []) == ['ecfunded', 'grants_comm',
'zenodo']
def test_autoadd_explicit_newversion(
app, db, users, communities, deposit, deposit_file,
communities_autoadd_enabled):
"""Explicitly the autoadded communities in a new version."""
deposit_v1 = publish_and_expunge(db, deposit)
recid_v1, record_v1 = deposit_v1.fetch_published()
depid_v1_value = deposit_v1['_deposit']['id']
recid_v1_value = recid_v1.pid_value
deposit_v1 = deposit_v1.newversion()
pv = PIDVersioning(child=recid_v1)
depid_v2 = pv.draft_child_deposit
depid_v2_value = depid_v2.pid_value
deposit_v2 = ZenodoDeposit.get_record(depid_v2.get_assigned_object())
deposit_v2['communities'] = ['ecfunded', 'grants_comm', 'zenodo']
deposit_v2['grants'] = [{'title': 'SomeGrant'}, ]
deposit_v2.files['file.txt'] = BytesIO(b('file1'))
deposit_v2 = publish_and_expunge(db, deposit_v2)
recid_v2, record_v2 = deposit_v2.fetch_published()
depid_v1, deposit_v1 = deposit_resolver.resolve(depid_v1_value)
depid_v2, deposit_v2 = deposit_resolver.resolve(depid_v2_value)
recid_v1, record_v1 = record_resolver.resolve(recid_v1_value)
assert record_v1.get('communities', []) == ['grants_comm', ]
assert deposit_v1.get('communities', []) == ['ecfunded', 'grants_comm',
'zenodo']
assert record_v2.get('communities', []) == ['grants_comm', ]
assert deposit_v2.get('communities', []) == ['ecfunded', 'grants_comm',
'zenodo']
def test_communities_newversion_addition(
app, db, users, communities, deposit, deposit_file):
"""Make sure that new version of record synchronizes the communities."""
deposit['communities'] = ['c1', 'c2']
deposit_v1 = publish_and_expunge(db, deposit)
recid_v1, record_v1 = deposit_v1.fetch_published()
depid_v1_value = deposit_v1['_deposit']['id']
recid_v1_value = recid_v1.pid_value
c1_api = ZenodoCommunity('c1')
c2_api = ZenodoCommunity('c2')
c1_api.accept_record(record_v1, pid=recid_v1)
c2_api.accept_record(record_v1, pid=recid_v1)
deposit_v1 = deposit_v1.newversion()
pv = PIDVersioning(child=recid_v1)
depid_v2 = pv.draft_child_deposit
depid_v2_value = depid_v2.pid_value
deposit_v2 = ZenodoDeposit.get_record(depid_v2.get_assigned_object())
# Remove 'c2' and request for 'c5'. Make sure that communities from
# previous record version are preserved/removed properly
deposit_v2['communities'] = ['c1', 'c5']
deposit_v2.files['file.txt'] = BytesIO(b('file1'))
deposit_v2 = publish_and_expunge(db, deposit_v2)
recid_v2, record_v2 = deposit_v2.fetch_published()
depid_v1, deposit_v1 = deposit_resolver.resolve(depid_v1_value)
depid_v2, deposit_v2 = deposit_resolver.resolve(depid_v2_value)
recid_v1, record_v1 = record_resolver.resolve(recid_v1_value)
assert record_v1.get('communities', []) == ['c1', ]
assert deposit_v1.get('communities', []) == ['c1', 'c5', ]
assert record_v2.get('communities', []) == ['c1', ]
assert deposit_v2.get('communities', []) == ['c1', 'c5', ]
def test_communities_newversion_while_ir_pending_bug(
app, db, users, communities, deposit, deposit_file):
"""Make sure that pending IRs remain after a new version (bug)."""
deposit['communities'] = ['c1', 'c2']
deposit_v1 = publish_and_expunge(db, deposit)
recid_v1, record_v1 = deposit_v1.fetch_published()
depid_v1_value = deposit_v1['_deposit']['id']
recid_v1_value = recid_v1.pid_value
# Two inclusion requests are pending
assert InclusionRequest.query.count() == 2
# Accept one community
c1_api = ZenodoCommunity('c1')
c1_api.accept_record(record_v1, pid=recid_v1)
deposit_v1 = deposit_v1.newversion()
pv = PIDVersioning(child=recid_v1)
depid_v2 = pv.draft_child_deposit
depid_v2_value = depid_v2.pid_value
deposit_v2 = ZenodoDeposit.get_record(depid_v2.get_assigned_object())
deposit_v2.files['file.txt'] = BytesIO(b('file1'))
deposit_v2 = publish_and_expunge(db, deposit_v2)
recid_v2, record_v2 = deposit_v2.fetch_published()
depid_v1, deposit_v1 = deposit_resolver.resolve(depid_v1_value)
depid_v2, deposit_v2 = deposit_resolver.resolve(depid_v2_value)
recid_v1, record_v1 = record_resolver.resolve(recid_v1_value)
# Make sure there is still IR to community 'c2' after newversion
assert InclusionRequest.query.count() == 1
assert InclusionRequest.query.one().id_community == 'c2'
assert record_v1.get('communities', []) == ['c1', ]
assert deposit_v1.get('communities', []) == ['c1', 'c2', ]
assert record_v2.get('communities', []) == ['c1', ]
assert deposit_v2.get('communities', []) == ['c1', 'c2', ]
def test_propagation_with_newversion_open(
app, db, users, communities, deposit, deposit_file):
"""Adding old versions to a community should propagate to all drafts."""
# deposit['communities'] = ['c1', 'c2']
deposit_v1 = publish_and_expunge(db, deposit)
deposit_v1 = deposit_v1.edit()
recid_v1, record_v1 = deposit_v1.fetch_published()
recid_v1_value = recid_v1.pid_value
deposit_v1 = deposit_v1.newversion()
pv = PIDVersioning(child=recid_v1)
depid_v2 = pv.draft_child_deposit
depid_v2_value = depid_v2.pid_value
# New version in 'deposit_v2' has not been published yet
deposit_v2 = ZenodoDeposit.get_record(depid_v2.get_assigned_object())
# depid_v1_value = deposit_v1['_deposit']['id']
# depid_v1, deposit_v1 = deposit_resolver.resolve(depid_v1_value)
deposit_v1['communities'] = ['c1', 'c2', ]
deposit_v1 = publish_and_expunge(db, deposit_v1)
recid_v1, record_v1 = record_resolver.resolve(recid_v1_value)
c1_api = ZenodoCommunity('c1')
c1_api.accept_record(record_v1, pid=recid_v1)
depid_v2, deposit_v2 = deposit_resolver.resolve(depid_v2_value)
assert deposit_v2['communities'] == ['c1', 'c2']
deposit_v2.files['file.txt'] = BytesIO(b('file1'))
deposit_v2 = publish_and_expunge(db, deposit_v2)
recid_v2, record_v2 = deposit_v2.fetch_published()
assert record_v2['communities'] == ['c1', ]
| 15,873 | Python | .py | 292 | 47.739726 | 79 | 0.662824 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,365 | test_communities_webhooks.py | zenodo_zenodo/tests/unit/communities/test_communities_webhooks.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2021 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Zenodo communities webhooks."""
from __future__ import absolute_import, print_function
import json
import mock
import pytest
from helpers import login_user_via_session
from six import BytesIO
@pytest.fixture
def use_webhooks_config(app, api):
"""Activate webhooks config."""
old_value = app.config.pop('ZENODO_COMMUNITIES_WEBHOOKS', None)
webhooks_config = {
'c1': {
'c1_recipient': {
'url': 'https://example.org/webhooks/zenodo',
'headers': {
'X-Custom': 'custom-header',
},
'params': {
'token': 'some-token'
}
},
}
}
app.config['ZENODO_COMMUNITIES_WEBHOOKS'] = webhooks_config
api.config['ZENODO_COMMUNITIES_WEBHOOKS'] = webhooks_config
yield
app.config['ZENODO_COMMUNITIES_WEBHOOKS'] = old_value
api.config['ZENODO_COMMUNITIES_WEBHOOKS'] = old_value
def test_basic_webhooks(
app, db, communities, deposit, deposit_file, mocker, es, deposit_url,
get_json, json_auth_headers, license_record, users, app_client,
api_client, use_webhooks_config):
"""Test community webhooks executions on inclusion request and approval."""
test_data = dict(
metadata=dict(
upload_type='presentation',
title='Test title',
creators=[
dict(name='Doe, John', affiliation='Atlantis'),
dict(name='Smith, Jane', affiliation='Atlantis')
],
description='Test Description',
publication_date='2013-05-08',
access_right='open',
license='CC0-1.0',
communities=[{'identifier': 'c1'}],
)
)
with mock.patch(
'zenodo.modules.communities.tasks.requests.post') as requests_mock:
res = api_client.post(
deposit_url, data=json.dumps(test_data), headers=json_auth_headers)
links = get_json(res, code=201)['links']
recid = get_json(res, code=201)['id']
deposit_bucket = links['bucket']
deposit_edit = links['edit']
deposit_publish = links['publish']
# Upload files
res = api_client.put(
deposit_bucket + '/test1.txt',
input_stream=BytesIO(b'testfile1'), headers=json_auth_headers)
assert res.status_code == 200
res = api_client.put(
deposit_bucket + '/test2.txt',
input_stream=BytesIO(b'testfile2'), headers=json_auth_headers)
assert res.status_code == 200
# Publish deposit
res = api_client.post(deposit_publish, headers=json_auth_headers)
links = get_json(res, code=202)['links']
record_url = links['record']
calls = requests_mock.call_args_list
assert len(calls) == 1
login_user_via_session(app_client, email=users[1]['email'])
res = app_client.post(
'/communities/c1/curaterecord/',
json={'action': 'accept', 'recid': recid}
)
assert res.status_code == 200
assert len(calls) == 2
| 4,094 | Python | .py | 103 | 32.184466 | 79 | 0.63433 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,366 | test_bagit_archiver.py | zenodo_zenodo/tests/unit/sipstore/test_bagit_archiver.py | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2017 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Test Zenodo SIPStore."""
from __future__ import absolute_import, print_function, unicode_literals
import json
import arrow
from helpers import publish_and_expunge
from invenio_pidrelations.contrib.versioning import PIDVersioning
from invenio_pidstore.models import PersistentIdentifier
from invenio_sipstore.api import SIP
from invenio_sipstore.archivers import BagItArchiver
from invenio_sipstore.models import SIP as SIPModel
from six import BytesIO, b
from zenodo.modules.deposit.api import ZenodoDeposit
from zenodo.modules.sipstore.tasks import archive_sip
from zenodo.modules.sipstore.utils import generate_bag_path
def fetch_suff(sip, filename_suffix):
"""A helper method for fetching SIPFiles by the name suffix."""
return next(f for f in sip.files if f.filepath.endswith(filename_suffix))
def get_m_item(meta, path):
"""Get metadata item by path name."""
return next(it for it in meta if it['filepath'] == path)
def test_archiving(app, db, deposit, deposit_file, locations, archive_fs):
"""Test ZenodoSIP archiving."""
# Stash the configuration and enable writing
orig = app.config['SIPSTORE_ARCHIVER_WRITING_ENABLED']
app.config['SIPSTORE_ARCHIVER_WRITING_ENABLED'] = True
deposit.files['test2.txt'] = BytesIO(b'test-two')
deposit_v1 = publish_and_expunge(db, deposit)
recid_v1, record_v1 = deposit_v1.fetch_published()
recid_v1_id = recid_v1.id
# Record files after publishing: 'test.txt', 'test2.txt'
sip1 = SIP(SIPModel.query.one())
sip1_id = sip1.id
# Edit the metadata
deposit_v1 = deposit_v1.edit()
deposit_v1['title'] = "New title"
deposit_v1 = publish_and_expunge(db, deposit_v1)
# Record files after publishing: 'test.txt', 'test2.txt'
sip2_id = SIPModel.query.order_by(SIPModel.created.desc()).first().id
# Create a new version
deposit_v1.newversion()
recid_v1 = PersistentIdentifier.query.get(recid_v1_id)
pv = PIDVersioning(child=recid_v1)
depid_v2 = pv.draft_child_deposit
deposit_v2 = ZenodoDeposit.get_record(depid_v2.object_uuid)
del deposit_v2.files['test.txt']
deposit_v2.files['test3.txt'] = BytesIO(b('test-three'))
deposit_v2 = publish_and_expunge(db, deposit_v2)
# Record files after publishing: 'test2.txt', 'test3.txt'
sip1 = SIP(SIPModel.query.get(sip1_id))
sip2 = SIP(SIPModel.query.get(sip2_id))
sip3 = SIP(SIPModel.query.order_by(SIPModel.created.desc()).first())
# Because we are using secure_filename when writing SIPFiles we need to
# genenarate the correct names: <SIPFile.id>-<secure_filename>
s1_file1_fn = '{0}-test.txt'.format(fetch_suff(sip1, 'test.txt').file_id)
s1_file1_fp = 'data/files/{0}'.format(s1_file1_fn)
s1_file2_fn = '{0}-test2.txt'.format(fetch_suff(sip1, 'test2.txt').file_id)
s1_file2_fp = 'data/files/{0}'.format(s1_file2_fn)
s3_file2_fn = '{0}-test2.txt'.format(fetch_suff(sip3, 'test2.txt').file_id)
s3_file2_fp = 'data/files/{0}'.format(s3_file2_fn)
s3_file3_fn = '{0}-test3.txt'.format(fetch_suff(sip3, 'test3.txt').file_id)
s3_file3_fp = 'data/files/{0}'.format(s3_file3_fn)
sip1_bagmeta = json.loads(next(
m.content for m in sip1.metadata if m.type.name == 'bagit'))['files']
sip2_bagmeta = json.loads(next(
m.content for m in sip2.metadata if m.type.name == 'bagit'))['files']
sip3_bagmeta = json.loads(next(
m.content for m in sip3.metadata if m.type.name == 'bagit'))['files']
# Check if Bagit metadata contains the correct file-fetching information
assert set([f['filepath'] for f in sip1_bagmeta]) == \
set([s1_file1_fp,
s1_file2_fp,
'data/filenames.txt',
'data/metadata/record-json.json', 'bag-info.txt',
'manifest-md5.txt', 'bagit.txt', 'tagmanifest-md5.txt'])
assert not BagItArchiver._is_fetched(
get_m_item(sip1_bagmeta, s1_file1_fp))
assert not BagItArchiver._is_fetched(
get_m_item(sip1_bagmeta, s1_file2_fp))
assert set([f['filepath'] for f in sip2_bagmeta]) == \
set([s1_file1_fp,
s1_file2_fp,
'data/filenames.txt',
'data/metadata/record-json.json', 'bag-info.txt',
'manifest-md5.txt', 'bagit.txt', 'tagmanifest-md5.txt',
'fetch.txt'])
# Both files should be fetched since it's only metadata-edit submission
assert BagItArchiver._is_fetched(
get_m_item(sip2_bagmeta, s1_file1_fp))
assert BagItArchiver._is_fetched(
get_m_item(sip2_bagmeta, s1_file2_fp))
assert set([f['filepath'] for f in sip3_bagmeta]) == \
set([s3_file2_fp,
s3_file3_fp,
'data/filenames.txt',
'data/metadata/record-json.json', 'bag-info.txt',
'manifest-md5.txt', 'bagit.txt', 'tagmanifest-md5.txt',
'fetch.txt'])
# First file should be fetched from previous version and new file should
# be archived in this bag.
assert BagItArchiver._is_fetched(
get_m_item(sip3_bagmeta, s3_file2_fp))
assert not BagItArchiver._is_fetched(
get_m_item(sip3_bagmeta, s3_file3_fp))
archiver1 = BagItArchiver(sip1)
archiver2 = BagItArchiver(sip2)
archiver3 = BagItArchiver(sip3)
# Each archiver subpath follows: '<recid>/r/<ISO-8601-SIP-timestamp>'
sip1_ts = arrow.get(sip1.model.created).isoformat()
sip2_ts = arrow.get(sip2.model.created).isoformat()
sip3_ts = arrow.get(sip3.model.created).isoformat()
assert archiver1.get_archive_subpath() == '2/r/{0}'.format(sip1_ts)
assert archiver2.get_archive_subpath() == '2/r/{0}'.format(sip2_ts)
assert archiver3.get_archive_subpath() == '3/r/{0}'.format(sip3_ts)
# As a test, write the SIPs in reverse chronological order
assert not sip1.archived
assert not sip2.archived
assert not sip3.archived
archive_sip.delay(sip3.id)
archive_sip.delay(sip2.id)
archive_sip.delay(sip1.id)
assert sip1.archived
assert sip2.archived
assert sip3.archived
fs1 = archive_fs.opendir(archiver1.get_archive_subpath())
assert set(fs1.listdir()) == set(['tagmanifest-md5.txt', 'bagit.txt',
'manifest-md5.txt', 'bag-info.txt',
'data'])
assert set(fs1.listdir('data')) == set(['metadata', 'files',
'filenames.txt'])
assert fs1.listdir('data/metadata') == ['record-json.json', ]
assert set(fs1.listdir('data/files')) == set([s1_file1_fn, s1_file2_fn])
fs2 = archive_fs.opendir(archiver2.get_archive_subpath())
assert set(fs2.listdir()) == set(['tagmanifest-md5.txt', 'bagit.txt',
'manifest-md5.txt', 'bag-info.txt',
'data', 'fetch.txt'])
# Second SIP has written only the metadata,
# because of that There should be no 'files/', but 'filesnames.txt' should
# still be there because of the fetch.txt
assert set(fs2.listdir('data')) == set(['metadata', 'filenames.txt'])
assert fs2.listdir('data/metadata') == ['record-json.json', ]
with fs2.open('fetch.txt') as fp:
cnt = fp.read().splitlines()
# Fetched files should correctly fetch the files from the first archive
base_uri = archiver1.get_archive_base_uri()
assert set(cnt) == set([
'{base}/2/r/{s1ts}/{fn} 4 {fn}'.format(fn=s1_file1_fp, base=base_uri,
s1ts=sip1_ts),
'{base}/2/r/{s1ts}/{fn} 8 {fn}'.format(fn=s1_file2_fp, base=base_uri,
s1ts=sip1_ts),
])
fs3 = archive_fs.opendir(archiver3.get_archive_subpath())
assert set(fs3.listdir()) == set(['tagmanifest-md5.txt', 'bagit.txt',
'manifest-md5.txt', 'bag-info.txt',
'data', 'fetch.txt'])
# Third SIP should write only the extra 'test3.txt' file
assert set(fs3.listdir('data')) == set(['metadata', 'files',
'filenames.txt'])
assert fs3.listdir('data/metadata') == ['record-json.json', ]
assert fs3.listdir('data/files') == [s3_file3_fn, ]
with fs3.open('fetch.txt') as fp:
cnt = fp.read().splitlines()
# Since 'file.txt' was removed in third SIP, we should only fetch the
# 'test2.txt', also from the first archive, since that's where this
# file resides physically.
base_uri = archiver1.get_archive_base_uri()
assert set(cnt) == set([
'{base}/2/r/{s1ts}/{fn} 8 {fn}'.format(fn=s3_file2_fp, base=base_uri,
s1ts=sip1_ts),
])
app.config['SIPSTORE_ARCHIVER_WRITING_ENABLED'] = orig
def test_bag_path_generator():
"""Test the bag path generator function."""
assert generate_bag_path('1', '1') == ['1', 'r', '1', ]
assert generate_bag_path('100', '1') == ['100', 'r', '1', ]
assert generate_bag_path('1000', '1') == ['100', '0', 'r', '1', ]
assert generate_bag_path('1234567890', '9999') == \
['123', '456', '789', '0', 'r', '9999', ]
| 9,943 | Python | .py | 195 | 43.430769 | 79 | 0.645121 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,367 | test_tokens_api.py | zenodo_zenodo/tests/unit/tokens/test_tokens_api.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2020 CERN.
#
# Zenodo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Unit tests for resource access tokens API."""
from datetime import datetime, timedelta
import jwt
import pytest
from zenodo.modules.tokens.api import decode_rat
from zenodo.modules.tokens.errors import ExpiredTokenError, \
InvalidTokenError, InvalidTokenIDError, MissingTokenIDError
def _rat_gen(token, payload=None, headers=None):
payload = {'iat': datetime.utcnow()} if payload is None else payload
headers = {'kid': str(token.id)} if headers is None else headers
return jwt.encode(
payload=payload,
key=token.access_token,
algorithm='HS256',
headers=headers,
)
def test_decoding(app, write_token, rat_generate_token):
"""Test decoding a resource access token."""
write_token = write_token['token']
with pytest.raises(MissingTokenIDError):
decode_rat(_rat_gen(rat_generate_token, headers={}))
with pytest.raises(InvalidTokenIDError):
decode_rat(_rat_gen(rat_generate_token, headers={'kid': 'invalid'}))
with pytest.raises(InvalidTokenError):
decode_rat(_rat_gen(rat_generate_token, headers={'kid': '99999'}))
with pytest.raises(InvalidTokenError):
decode_rat(_rat_gen(write_token))
with pytest.raises(InvalidTokenError):
decode_rat('not_a.valid_jwt')
with pytest.raises(ExpiredTokenError):
decode_rat(_rat_gen(
rat_generate_token,
# generate token issued an hour ago
payload={'iat': datetime.utcnow() - timedelta(hours=1)}
))
| 2,419 | Python | .py | 56 | 38.803571 | 77 | 0.725415 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,368 | test_sitemap_views.py | zenodo_zenodo/tests/unit/sitemap/test_sitemap_views.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2018 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Sitemap views."""
from __future__ import absolute_import, print_function
from flask import current_app, render_template, url_for
from zenodo.modules.sitemap.tasks import update_sitemap_cache
def test_sitemap_templates(app):
"""Test Sitemap views."""
min_urlset = [
{
'loc': 'https://zenodo.org/record/1',
}
]
page = render_template('zenodo_sitemap/sitemap.xml', urlset=min_urlset)
assert '<loc>https://zenodo.org/record/1</loc>' in page
full_urlset = [
{
'loc': 'https://zenodo.org/record/1',
'lastmod': '2018-01-01',
'changefreq': '1',
'priority': '10',
}
]
page = render_template('zenodo_sitemap/sitemap.xml', urlset=full_urlset)
assert '<loc>https://zenodo.org/record/1</loc>' in page
assert '<lastmod>2018-01-01</lastmod>' in page
assert '<changefreq>1</changefreq>' in page
assert '<priority>10</priority>' in page
def make_url(loc):
return {'loc': 'https://zenodo.org' + loc}
sitemapindex = [make_url('/sitemap{}.xml'.format(i)) for i in range(1, 4)]
page = render_template('zenodo_sitemap/sitemapindex.xml',
urlset=sitemapindex, url_scheme='https')
assert '<loc>https://zenodo.org/sitemap1.xml</loc>' in page
assert '<loc>https://zenodo.org/sitemap2.xml</loc>' in page
assert '<loc>https://zenodo.org/sitemap3.xml</loc>' in page
# Some sanity checks
assert '<loc>https://zenodo.org/sitemap0.xml</loc>' not in page
assert '<loc>https://zenodo.org/sitemap4.xml</loc>' not in page
assert '<loc>https://zenodo.org/sitemap.xml</loc>' not in page
def test_sitemap_views(app, record_with_bucket, communities):
"""Test the sitemap views."""
with app.test_request_context():
with app.test_client() as client:
res = client.get(url_for('zenodo_sitemap.sitemapindex'))
# Return 404 if sitemap has not been generated
assert res.status_code == 404
assert res.content_type == 'text/html; charset=utf-8'
res = client.get(url_for('zenodo_sitemap.sitemappage', page=1))
# Return 404 if sitemap has not been generated
assert res.status_code == 404
assert res.content_type == 'text/html; charset=utf-8'
update_sitemap_cache()
res = client.get(url_for('zenodo_sitemap.sitemapindex'))
assert res.status_code == 200
assert res.content_type == 'text/xml; charset=utf-8'
res = client.get(url_for('zenodo_sitemap.sitemappage', page=1))
assert res.status_code == 200
assert res.content_type == 'text/xml; charset=utf-8'
# Clear the cache to clean up after test
sitemap = current_app.extensions['zenodo-sitemap']
sitemap.clear_cache()
| 3,850 | Python | .py | 83 | 39.831325 | 78 | 0.662936 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,369 | test_sitemap_generator.py | zenodo_zenodo/tests/unit/sitemap/test_sitemap_generator.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2018 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test API for Zenodo and GitHub integration."""
from __future__ import absolute_import, print_function
import datetime
import re
from flask import current_app, render_template
from zenodo.modules.sitemap.generators import _sitemapdtformat
from zenodo.modules.sitemap.tasks import update_sitemap_cache
def test_sitemap_cache_update_simple(mocker, app):
"""Test Sitemap cache updating with fixed parameters."""
def make_url(loc):
return {'loc': 'https://localhost' + loc}
urls = [make_url('/record/' + str(i)) for i in range(5)]
cache_mock = mocker.patch('zenodo.modules.sitemap.ext.current_cache')
update_sitemap_cache(urls=urls, max_url_count=2)
sitemap1 = render_template('zenodo_sitemap/sitemap.xml',
urlset=urls[:2])
cache_mock.set.assert_any_call('sitemap:1', sitemap1, timeout=-1)
sitemap2 = render_template('zenodo_sitemap/sitemap.xml',
urlset=urls[2:4])
cache_mock.set.assert_any_call('sitemap:2', sitemap2, timeout=-1)
sitemap3 = render_template('zenodo_sitemap/sitemap.xml',
urlset=urls[4:])
cache_mock.set.assert_any_call('sitemap:3', sitemap3, timeout=-1)
sitemapindex = [make_url('/sitemap{}.xml'.format(i)) for i in range(1, 4)]
sitemap0 = render_template('zenodo_sitemap/sitemapindex.xml',
urlset=sitemapindex, url_scheme='https')
cache_mock.set.assert_any_call('sitemap:0', sitemap0, timeout=-1)
def test_sitemap_generators(app, record_with_bucket, communities):
"""Test Sitemap generators."""
with app.test_request_context():
sitemap = current_app.extensions['zenodo-sitemap']
urls = list(sitemap._generate_all_urls())
# Make sure the last modified are there and it's in proper UTC sitemap
# format, but remove from the result for easier comparison of URL sets
# make sure it's in the format 'YYYY-MM-DDTHH-MM-SST'
sitemap_dt_re = re.compile('\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z$')
assert all('lastmod' in url and sitemap_dt_re.match(url['lastmod'])
for url in urls)
for url in urls:
del url['lastmod']
expected = [
{'loc': 'https://localhost/record/12345'},
{'loc': 'https://localhost/communities/c1/'},
{'loc': 'https://localhost/communities/c1/search'},
{'loc': 'https://localhost/communities/c1/about/'},
{'loc': 'https://localhost/communities/c2/'},
{'loc': 'https://localhost/communities/c2/search'},
{'loc': 'https://localhost/communities/c2/about/'},
{'loc': 'https://localhost/communities/c3/'},
{'loc': 'https://localhost/communities/c3/search'},
{'loc': 'https://localhost/communities/c3/about/'},
{'loc': 'https://localhost/communities/c4/'},
{'loc': 'https://localhost/communities/c4/search'},
{'loc': 'https://localhost/communities/c4/about/'},
{'loc': 'https://localhost/communities/c5/'},
{'loc': 'https://localhost/communities/c5/search'},
{'loc': 'https://localhost/communities/c5/about/'},
{'loc': 'https://localhost/communities/zenodo/'},
{'loc': 'https://localhost/communities/zenodo/search'},
{'loc': 'https://localhost/communities/zenodo/about/'},
{'loc': 'https://localhost/communities/ecfunded/'},
{'loc': 'https://localhost/communities/ecfunded/search'},
{'loc': 'https://localhost/communities/ecfunded/about/'},
{'loc': 'https://localhost/communities/grants_comm/'},
{'loc': 'https://localhost/communities/grants_comm/search'},
{'loc': 'https://localhost/communities/grants_comm/about/'},
]
assert urls == expected
def test_sitemap_date_generator():
"""Test the sitemap timestamp generation."""
dt = datetime.datetime(2018, 1, 2, 3, 4, 5)
assert _sitemapdtformat(dt) == '2018-01-02T03:04:05Z'
dt = datetime.datetime(2018, 11, 12, 13, 14, 15)
assert _sitemapdtformat(dt) == '2018-11-12T13:14:15Z'
| 5,161 | Python | .py | 97 | 45.484536 | 78 | 0.6489 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,370 | test_templates.py | zenodo_zenodo/tests/unit/theme/test_templates.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo template tests."""
from __future__ import absolute_import, print_function
import pytest
from helpers import login_user_via_session
def test_templates():
"""Test templates."""
pass
@pytest.mark.parametrize('user_email,requests_num,status_code', [
# anonymous user
(None, 2, 429),
# user
('test@zenodo.org', 4, 429),
])
def test_429_template(
use_flask_limiter, app, app_client, db, users, user_email,
requests_num, status_code, es):
"""Test flask limiter behaviour."""
if user_email:
# Login as user
login_user_via_session(app_client, email=user_email)
for x in range(0, requests_num):
response = app_client.get('/search')
assert response.status_code == 200
response = app_client.get('/search')
assert response.status_code == status_code
response = app_client.get('/')
assert response.status_code == 200
if not user_email:
response = app_client.get('/login')
assert response.status_code == 200
| 1,997 | Python | .py | 53 | 34.283019 | 76 | 0.718055 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,371 | test_frontpage_views.py | zenodo_zenodo/tests/unit/frontpage/test_frontpage_views.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo frontpage view."""
from __future__ import absolute_import, print_function
def test_frontpage(app, db, es):
"""Test frontpage."""
with app.test_client() as client:
res = client.get("/")
assert res.status_code == 200
assert 'Recent uploads' in res.get_data(as_text=True)
def test_ping(app, db):
"""Test frontpage."""
with app.test_client() as client:
res = client.get("/ping")
assert res.status_code == 200
assert res.get_data(as_text=True) == "OK"
| 1,494 | Python | .py | 37 | 37.486486 | 76 | 0.722261 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,372 | test_schemas_schemaorg_jsonld.py | zenodo_zenodo/tests/unit/records/test_schemas_schemaorg_jsonld.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2017 CERN.
#
# Zenodo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Unit tests Zenodo JSON-LD serializer."""
from __future__ import absolute_import, print_function
from datetime import datetime
from flask_security import login_user
from zenodo.modules.records.serializers import schemaorg_jsonld_v1
from zenodo.modules.records.serializers.schemaorg import \
ZenodoSchemaOrgSerializer
from zenodo.modules.records.serializers.schemas import schemaorg
SCHEMA_ORG_TYPES = [
('poster', None, schemaorg.CreativeWork),
('presentation', None, schemaorg.PresentationDigitalDocument),
('dataset', None, schemaorg.Dataset),
('image', 'figure', schemaorg.ImageObject),
('image', 'plot', schemaorg.ImageObject),
('image', 'drawing', schemaorg.ImageObject),
('image', 'diagram', schemaorg.ImageObject),
('image', 'photo', schemaorg.Photograph),
('image', 'other', schemaorg.ImageObject),
('video', None, schemaorg.MediaObject),
('software', None, schemaorg.SoftwareSourceCode),
('lesson', None, schemaorg.CreativeWork),
('workflow', None, schemaorg.CreativeWork),
('physicalobject', None, schemaorg.CreativeWork),
('other', None, schemaorg.CreativeWork),
('publication', 'book', schemaorg.Book),
('publication', 'section', schemaorg.ScholarlyArticle),
('publication', 'conferencepaper', schemaorg.ScholarlyArticle),
('publication', 'article', schemaorg.ScholarlyArticle),
('publication', 'patent', schemaorg.CreativeWork),
('publication', 'preprint', schemaorg.ScholarlyArticle),
('publication', 'report', schemaorg.ScholarlyArticle),
('publication', 'softwaredocumentation', schemaorg.CreativeWork),
('publication', 'thesis', schemaorg.ScholarlyArticle),
('publication', 'technicalnote', schemaorg.ScholarlyArticle),
('publication', 'workingpaper', schemaorg.ScholarlyArticle),
('publication', 'proposal', schemaorg.CreativeWork),
('publication', 'deliverable', schemaorg.CreativeWork),
('publication', 'milestone', schemaorg.CreativeWork),
('publication', 'other', schemaorg.CreativeWork),
]
def test_serializer(minimal_record_model, recid_pid):
"""Test the schema.org JSON-LD serializer."""
out = schemaorg_jsonld_v1.serialize(recid_pid, minimal_record_model)
def test_schema_class_resolver():
"""Test the Marshmallow schema class based on different resource types."""
for type_, subtype, schema_class in SCHEMA_ORG_TYPES:
obj = {'metadata': {'resource_type': {'type': type_}}}
if subtype:
obj['metadata']['resource_type']['subtype'] = subtype
cls = ZenodoSchemaOrgSerializer._get_schema_class(obj)
assert schema_class == cls
def test_person():
"""Test the schema.org Person schema."""
simple_person = {'name': 'Doe, John'}
data, err = schemaorg.Person().dump(simple_person)
assert not err
assert data == {'name': 'Doe, John', '@type': 'Person'}
simple_person['affiliation'] = 'CERN'
data, err = schemaorg.Person().dump(simple_person)
assert not err
assert data == {'name': 'Doe, John',
'affiliation': 'CERN',
'@type': 'Person'}
# Add GND - it should become the identifier of the person
simple_person['gnd'] = '170118215'
data, err = schemaorg.Person().dump(simple_person)
assert not err
assert data == {'name': 'Doe, John',
'affiliation': 'CERN',
'@type': 'Person',
'@id': 'https://d-nb.info/gnd/170118215'}
# Add ORCID - it should supersede GND as the identifier
simple_person['orcid'] = '0000-0002-1825-0097'
data, err = schemaorg.Person().dump(simple_person)
assert not err
assert data == {'name': 'Doe, John',
'affiliation': 'CERN',
'@type': 'Person',
'@id': 'https://orcid.org/0000-0002-1825-0097'}
# Remove GND
del simple_person['gnd']
data, err = schemaorg.Person().dump(simple_person)
assert not err
assert data == {'name': 'Doe, John',
'affiliation': 'CERN',
'@type': 'Person',
'@id': 'https://orcid.org/0000-0002-1825-0097'}
def test_language():
"""Test the schema.org Language schema."""
lang = 'pol'
data, err = schemaorg.Language().dump(lang)
assert not err
assert data == {'alternateName': 'pol',
'@type': 'Language',
'name': 'Polish'}
def test_minimal_software_record(minimal_record_model):
"""Test the minimal record model dumping."""
minimal_record_model['related_identifiers'] = [
{
"identifier": "https://github.com/orgname/reponame/tree/v0.1.0",
"relation": "isSupplementTo",
"scheme": "url"
}
]
data, err = schemaorg.SoftwareSourceCode().dump(
dict(metadata=minimal_record_model))
assert not err
expected = {
u'@context': u'https://schema.org/',
u'@id': 'https://doi.org/10.5072/zenodo.123',
u'@type': u'SoftwareSourceCode',
u'identifier': 'https://doi.org/10.5072/zenodo.123',
u'url': 'http://localhost/record/123',
u'description': u'My description',
u'codeRepository': 'https://github.com/orgname/reponame/tree/v0.1.0',
u'creator': [
{
u'@type': u'Person',
u'name': u'Test'
}
],
u'datePublished': datetime.utcnow().date().isoformat(),
u'name': u'Test'}
assert data == expected
def test_full_record(record_with_files_creation):
"""Test the full record model dumping."""
recid, record, _ = record_with_files_creation
# full_record fixture is a "book"
schema_cls = ZenodoSchemaOrgSerializer._get_schema_class(
dict(metadata=record))
assert schema_cls == schemaorg.Book
data, err = schemaorg.ScholarlyArticle().dump(dict(metadata=record))
assert not err
expected = {
u'@context': u'https://schema.org/',
u'@id': 'https://doi.org/10.1234/foo.bar',
u'@type': u'Book',
u'identifier': 'https://doi.org/10.1234/foo.bar',
u'about': [
{
u'@id': u'http://id.loc.gov/authorities/subjects/sh85009003',
u'@type': u'CreativeWork'
}
],
u'citation': [
{
u'@id': 'https://doi.org/10.1234/foo.bar',
u'@type': u'Dataset'
},
{
u'@id': 'https://arxiv.org/abs/arXiv:1234.4321',
u'@type': u'Dataset'
},
{
'@id': 'https://arxiv.org/abs/arXiv:1234.4328',
'@type': 'Dataset'
}
],
u'contributor': [
{
u'@id': 'https://orcid.org/0000-0002-1825-0097',
u'@type': u'Person',
u'affiliation': u'CERN',
u'name': u'Smith, Other'
},
{
u'@type': u'Person',
u'affiliation': u'',
u'name': u'Hansen, Viggo'
},
{
u'@type': u'Person',
u'affiliation': u'CERN',
u'name': u'Kowalski, Manager'
}
],
u'creator': [
{
u'@id': 'https://orcid.org/0000-0002-1694-233X',
u'@type': u'Person',
u'affiliation': u'CERN',
u'name': u'Doe, John'
},
{
u'@id': 'https://orcid.org/0000-0002-1825-0097',
u'@type': u'Person',
u'affiliation': u'CERN',
u'name': u'Doe, Jane'
},
{
u'@type': u'Person',
u'affiliation': u'CERN',
u'name': u'Smith, John'
},
{
u'@id': 'https://d-nb.info/gnd/170118215',
u'@type': u'Person',
u'affiliation': u'CERN',
u'name': u'Nowak, Jack'
}
],
u'datePublished': '2014-02-27',
u'description': u'Test Description',
u'headline': u'Test title',
u'image':
u'https://zenodo.org/static/img/logos/zenodo-gradient-round.svg',
u'inLanguage': {
u'@type': u'Language',
u'alternateName': u'eng',
u'name': u'English'
},
u'sameAs': [
u'https://arxiv.org/abs/arXiv:1234.4325',
u'https://ui.adsabs.harvard.edu/#abs/2011ApJS..192...18K',
u'https://doi.org/10.1234/alternate.doi',
],
u'isPartOf': [
{
u'@id': 'https://doi.org/10.1234/zenodo.4321',
u'@type': u'SoftwareSourceCode'
}
],
u'hasPart': [
{
u'@id': 'https://doi.org/10.1234/zenodo.1234',
u'@type': u'ScholarlyArticle'
}
],
u'keywords': [u'kw1', u'kw2', u'kw3'],
u'license': u'https://creativecommons.org/licenses/by/4.0/',
u'name': u'Test title',
u'url': u'http://localhost/record/12345',
u'version': u'1.2.5',
u'temporal': [
'2019-01-01/..',
'../2019-01-01',
'2019-01-01',
'2019-01-01/2019-02-01',
],
u'spatial': [{
u'@type': u'Place',
u'geo': {
u'@type': u'GeoCoordinates',
u'latitude': 2.35,
u'longitude': 1.534
},
u'name': u'my place'
}, {
'@type': 'Place', 'name': 'New York'
}],
u'workFeatured': {
u'@type': u'Event',
u'name': u'The 13th Biennial HITRAN Conference',
u'alternateName': u'HITRAN13',
u'location': u'Harvard-Smithsonian Center for Astrophysics',
u'url': u'http://hitran.org/conferences/hitran-13-2014/',
},
}
assert data == expected
def test_dataset(app, users, minimal_record_model, recid_pid):
"""Testing the dumping of files in Open Access datasets."""
with app.test_request_context():
datastore = app.extensions['security'].datastore
login_user(datastore.get_user(users[0]['email']))
assert minimal_record_model['access_right'] == 'open'
minimal_record_model['resource_type'] = dict(type='dataset')
minimal_record_model['method'] = 'microscopic supersampling'
minimal_record_model['_files'] = [
{
'bucket': '22222222-2222-2222-2222-222222222222',
'version_id': '11111111-1111-1111-1111-111111111111',
'file_id': '22222222-3333-4444-5555-666666666666',
'checksum': 'md5:11111111111111111111111111111111',
'key': 'test',
'size': 4,
'type': 'txt',
},
{
'bucket': '22222222-2222-2222-2222-222222222222',
'version_id': '11111111-1111-1111-1111-111111111112',
'file_id': '22222222-3333-4444-5555-666666666667',
'checksum': 'md5:11111111111111111111111111111112',
'key': 'test2',
'size': 1000000,
'type': 'pdf',
},
]
data, err = schemaorg.Dataset().dump(
dict(metadata=minimal_record_model))
assert not err
assert data['distribution'] == [
{
u'@type': u'DataDownload',
u'contentUrl': u'https://localhost/api/files/'
u'22222222-2222-2222-2222-222222222222/test',
u'encodingFormat': u'txt'
},
{
u'@type': u'DataDownload',
u'contentUrl': u'https://localhost/api/files/'
u'22222222-2222-2222-2222-222222222222/test2',
u'encodingFormat': u'pdf'
}
]
assert data['measurementTechnique'] == 'microscopic supersampling'
for right in ['closed', 'embargoed', 'restricted']:
minimal_record_model['access_right'] = right
data, err = schemaorg.Dataset().dump(
dict(metadata=minimal_record_model))
assert not err
assert 'distribution' not in data
def test_image(app, users, minimal_record_model, recid_pid):
"""Testing the dumping of thumbnails in Image objects."""
with app.test_request_context():
datastore = app.extensions['security'].datastore
login_user(datastore.get_user(users[0]['email']))
assert minimal_record_model['access_right'] == 'open'
minimal_record_model['resource_type'] = dict(
type='image', subtype='figure')
minimal_record_model['_files'] = [
{
'bucket': '22222222-2222-2222-2222-222222222222',
'version_id': '11111111-1111-1111-1111-111111111111',
'file_id': '22222222-3333-4444-5555-666666666666',
'checksum': 'md5:11111111111111111111111111111111',
'key': 'test',
'size': 4,
'type': 'txt',
},
{
'bucket': '22222222-2222-2222-2222-222222222222',
'version_id': '11111111-1111-1111-1111-111111111112',
'file_id': '22222222-3333-4444-5555-666666666667',
'checksum': 'md5:11111111111111111111111111111112',
'key': 'figure.png',
'size': 1000000,
'type': 'png',
},
]
data, err = schemaorg.ImageObject().dump(
dict(metadata=minimal_record_model))
assert not err
assert data['contentUrl'] == (
u'http://localhost/api/files'
u'/22222222-2222-2222-2222-222222222222/figure.png'
)
assert data['thumbnailUrl'] == (
u'http://localhost/api/iiif/v2'
u'/22222222-2222-2222-2222-222222222222'
u':11111111-1111-1111-1111-111111111112:figure.png'
u'/full/250,/0/default.png'
)
| 15,128 | Python | .py | 373 | 30.179625 | 78 | 0.559185 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,373 | test_schemas.py | zenodo_zenodo/tests/unit/records/test_schemas.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo template tests."""
from __future__ import absolute_import, print_function
from datetime import datetime
import idutils
import pytest
from invenio_records.api import Record
from jsonschema.exceptions import ValidationError
def test_minimal_json(app, db, minimal_record):
"""Test minimal json."""
Record.create(minimal_record)
def test_recid(app, minimal_record):
"""Test recid property."""
# String instead of number
minimal_record['recid'] = '123'
pytest.raises(ValidationError, Record.create, minimal_record)
@pytest.mark.parametrize(('val', 'passing'), [
('publication', False),
({'type': 'publication', 'subtype': 'x'}, True),
({'type': 'publication', 'openaire_subtype': 'foo:t1'}, True),
({'type': 'publication', 'subtype': 'book',
'openaire_subtype': 'foo:t1'}, True),
])
def test_resource_type(app, db, minimal_record, val, passing):
"""Test resource type."""
minimal_record['resource_type'] = val
if passing:
Record.create(minimal_record)
else:
pytest.raises(ValidationError, Record.create, minimal_record)
def test_publication_date(app, db, minimal_record):
"""Test publication date."""
minimal_record['publication_date'] = datetime.utcnow().date().isoformat()
Record.create(minimal_record)
def test_contributors(app, db, minimal_record):
"""Test contributors."""
minimal_record['contributors'] = [
{'name': 'test', 'affiliation': 'test', 'type': 'ContactPerson'}
]
Record.create(minimal_record)
minimal_record['contributors'] = [
{'name': 'test', 'affiliation': 'test', 'type': 'Invalid'}
]
pytest.raises(ValidationError, Record.create, minimal_record)
def test_identifier_schemes(app, db, minimal_record):
"""Test supported identifier schemes."""
supported_schemes = [s for s, _ in idutils.PID_SCHEMES]
minimal_record['related_identifiers'] = [
{'scheme': scheme, 'relation': 'references', 'identifier': 'foobar'}
for scheme in supported_schemes
]
# JSONSchema validation should allow all supported schemes
Record.create(minimal_record)
| 3,115 | Python | .py | 75 | 38 | 77 | 0.715608 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,374 | test_schemas_legacyjson.py | zenodo_zenodo/tests/unit/records/test_schemas_legacyjson.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Unit tests Zenodo serializer legacy."""
from __future__ import absolute_import, print_function
from datetime import timedelta
import arrow
def test_id(minimal_record_model, depid_pid, legacyjson_v1):
"""Test created."""
obj = legacyjson_v1.transform_record(depid_pid, minimal_record_model)
assert obj['id'] == int(depid_pid.pid_value)
def test_created_modified(minimal_record_model, depid_pid, legacyjson_v1):
"""Test created."""
obj = legacyjson_v1.transform_record(depid_pid, minimal_record_model)
assert arrow.get(obj['created']) <= arrow.utcnow()
assert arrow.get(obj['modified']) <= arrow.utcnow()
def test_doi(minimal_record_model, depid_pid, legacyjson_v1):
"""Test created."""
minimal_record_model['doi'] = '10.1234/foo'
obj = legacyjson_v1.transform_record(depid_pid, minimal_record_model)
assert obj['doi'] == '10.1234/foo'
assert obj['doi_url'] == 'https://doi.org/10.1234/foo'
def test_owners(minimal_record_model, depid_pid, legacyjson_v1):
"""Test created."""
minimal_record_model['owners'] = [1, 2, 3]
obj = legacyjson_v1.transform_record(depid_pid, minimal_record_model)
assert obj['owner'] == 1
def test_owners_deposit(minimal_record_model, depid_pid, legacyjson_v1):
"""Test owners."""
minimal_record_model['_deposit'] = dict(owners=[3, 2, 1])
obj = legacyjson_v1.transform_record(depid_pid, minimal_record_model)
assert obj['owner'] == 3
def test_recid(minimal_record_model, depid_pid, legacyjson_v1):
"""Test recid."""
# TODO: Record URL.
obj = legacyjson_v1.transform_record(depid_pid, minimal_record_model)
assert obj['record_id'] == 123
del minimal_record_model['recid']
obj = legacyjson_v1.transform_record(depid_pid, minimal_record_model)
assert 'record_id' not in obj
def test_title(minimal_record_model, depid_pid, legacyjson_v1):
"""Test title."""
minimal_record_model['title'] = 'TEST'
obj = legacyjson_v1.transform_record(depid_pid, minimal_record_model)
assert obj['title'] == 'TEST'
assert obj['metadata']['title'] == 'TEST'
def test_upload_type(minimal_record_model, depid_pid, legacyjson_v1):
"""Test upload/publication/image type."""
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['upload_type'] == 'software'
assert 'publication_type' not in obj
assert 'image_type' not in obj
minimal_record_model['resource_type'] = dict(
type='publication', subtype='preprint')
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['upload_type'] == 'publication'
assert obj['publication_type'] == 'preprint'
assert 'image_type' not in obj
minimal_record_model['resource_type'] = dict(
type='image', subtype='photo')
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['upload_type'] == 'image'
assert obj['image_type'] == 'photo'
assert 'publication_type' not in obj
def test_publication_date(minimal_record_model, depid_pid, legacyjson_v1):
"""Test publication date."""
for k in ['publication_date', 'embargo_date']:
minimal_record_model[k] = arrow.utcnow().date() - timedelta(days=1)
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert arrow.get(obj[k]).date() <= arrow.utcnow().date()
def test_creators(minimal_record_model, depid_pid, legacyjson_v1):
"""Test creators."""
minimal_record_model['creators'] = [
{'name': 'Doe, John', 'affiliation': '', 'orcid': '',
'familyname': 'Doe', 'givennames': 'John'},
{'name': 'Smith, John', 'affiliation': 'CERN', 'orcid': '1234',
'familyname': 'Smith', 'givennames': 'John', 'gnd': '4321'},
]
minimal_record_model['thesis'] = dict(
supervisors=minimal_record_model['creators']
)
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['creators'] == [
{'name': 'Doe, John'},
{'name': 'Smith, John', 'affiliation': 'CERN', 'orcid': '1234',
'gnd': '4321', },
]
assert obj['thesis_supervisors'] == obj['creators']
def test_contributors(minimal_record_model, depid_pid, legacyjson_v1):
"""Test contributors."""
minimal_record_model['contributors'] = [
{'name': 'Doe, John', 'affiliation': '', 'orcid': '',
'familyname': 'Doe', 'givennames': 'John', 'type': 'DataCurator'},
{'name': 'Smith, John', 'affiliation': 'CERN', 'orcid': '1234',
'familyname': 'Smith', 'givennames': 'John', 'gnd': '4321',
'type': 'Other'},
]
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['contributors'] == [
{'name': 'Doe, John', 'type': 'DataCurator'},
{'name': 'Smith, John', 'affiliation': 'CERN', 'orcid': '1234',
'gnd': '4321', 'type': 'Other'},
]
def test_direct_mappings(minimal_record_model, depid_pid, legacyjson_v1):
"""Test direct mappings."""
fields = [
'title', 'description', 'notes', 'access_right', 'access_conditions'
]
for f in fields:
minimal_record_model[f] = 'TEST'
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj[f] == 'TEST'
def test_thesis_university(minimal_record_model, depid_pid, legacyjson_v1):
"""Test direct mappings."""
minimal_record_model['thesis'] = dict(university='TEST')
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['thesis_university'] == 'TEST'
def test_prereserve(minimal_record_model, depid_pid, legacyjson_v1):
"""Test prereserve DOI."""
minimal_record_model['_deposit_actions'] = dict(prereserve_doi=True)
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['prereserve_doi'] == {
'recid': 123,
'doi': '10.5072/zenodo.123'
}
def test_keywords(minimal_record_model, depid_pid, legacyjson_v1):
"""Test keywords."""
kws = ['kw1', 'kw2']
minimal_record_model['keywords'] = kws
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['keywords'] == kws
def test_references(minimal_record_model, depid_pid, legacyjson_v1):
"""Test references."""
refs = [{'raw_reference': 'ref1'}, {'raw_reference': 'ref2'}]
minimal_record_model['references'] = refs
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['references'] == ['ref1', 'ref2']
def test_communities(minimal_record_model, depid_pid, legacyjson_v1):
"""Test communities."""
minimal_record_model['communities'] = ['zenodo']
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['communities'] == [{'identifier': 'zenodo'}]
def test_journal(minimal_record_model, depid_pid, legacyjson_v1):
"""Test journal."""
minimal_record_model['journal'] = {
'title': 'Mathematical Combinations',
'volume': '1',
'issue': 'V',
'pages': '141',
'year': '2000',
}
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['journal_title']
assert obj['journal_volume']
assert obj['journal_issue']
assert obj['journal_pages']
assert 'journal_year' not in obj
def test_conference(minimal_record_model, depid_pid, legacyjson_v1):
"""Test conferences."""
minimal_record_model['meeting'] = {
'title': '20th International Conference on Computing in High Energy '
'and Nuclear Physics',
'acronym': 'CHEP\'13',
'dates': '14-18 October 2013',
'place': 'Amsterdam, The Netherlands',
'url': 'http://www.chep2013.org/',
'session': 'VI',
'session_part': '1'
}
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['conference_title']
assert obj['conference_acronym']
assert obj['conference_dates']
assert obj['conference_place']
assert obj['conference_url']
assert obj['conference_session']
assert obj['conference_session_part']
def test_related_identifiers(minimal_record_model, depid_pid, legacyjson_v1):
"""Test related identifiers."""
minimal_record_model.update(dict(
related_identifiers=[
dict(identifier='10.1234/f', scheme='doi', relation='cites'),
],
alternate_identifiers=[
dict(identifier='10.1234/f', scheme='doi'),
],
))
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['related_identifiers'] == [
dict(identifier='10.1234/f', scheme='doi', relation='cites'),
dict(identifier='10.1234/f', scheme='doi',
relation='isAlternateIdentifier'),
]
def test_grants(minimal_record_model, depid_pid, legacyjson_v1):
"""Test grants."""
minimal_record_model.update(dict(grants=[
dict(
internal_id='10.13039/501100000780::282896',
funder=dict(
doi='10.13039/501100000780',
name='European Commission',
acronyms=['EC'],
),
identifiers=dict(
eurepo='info:eu-repo/grantAgreement/EC/FP7/282896',
),
code='282896',
title='Open Access Research Infrastructure in Europe',
acronym='OpenAIREplus',
program='FP7',
),
dict(
internal_id='10.13039/501100000780::643410',
funder=dict(
doi='10.13039/501100000780',
name='European Commission',
acronyms=['EC'],
),
identifiers=dict(
eurepo='info:eu-repo/grantAgreement/EC/H2020/643410',
),
code='643410',
title='Open Access Infrastructure for Research in Europe 2020',
acronym='OpenAIRE2020',
program='H2020',
),
]))
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['grants'] == [
dict(id='282896'),
dict(id='10.13039/501100000780::643410'),
]
def test_license(minimal_record_model, depid_pid, legacyjson_v1):
"""Test license."""
minimal_record_model.update(dict(license=dict(id='CC0-1.0')))
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['license'] == 'CC0-1.0'
def test_subjects(minimal_record_model, depid_pid, legacyjson_v1):
"""Test subjects."""
minimal_record_model.update(dict(subjects=[
dict(
term="Astronomy",
identifier="http://id.loc.gov/authorities/subjects/sh85009003",
scheme="url"
),
]))
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert obj['subjects'] == [
dict(
term="Astronomy",
identifier="http://id.loc.gov/authorities/subjects/sh85009003",
scheme="url"
),
]
def test_imprint(minimal_record_model, depid_pid, legacyjson_v1):
"""Test imprint."""
minimal_record_model.update(dict(
imprint=dict(
place='Some place',
publisher='Some publisher',
isbn='978-3-16-148410-0',
),
))
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert 'imprint_publisher' in obj
assert 'imprint_place' in obj
assert 'imprint_isbn' in obj
assert 'imprint_year' not in obj
def test_partof(minimal_record_model, depid_pid, legacyjson_v1):
"""Test imprint."""
minimal_record_model.update(dict(
imprint=dict(
place='Some place',
publisher='Some publisher',
isbn='Some isbn',
),
part_of=dict(
pages="Some pages",
title="Some title",
),
))
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)['metadata']
assert 'imprint_publisher' in obj
assert 'imprint_place' in obj
assert 'imprint_isbn' in obj
assert 'partof_pages' in obj
assert 'partof_title' in obj
assert 'imprint_year' not in obj
assert 'partof_year' not in obj
def test_state(minimal_record_model, depid_pid, legacyjson_v1):
"""Test state."""
minimal_record_model.update(dict(
_deposit=dict(status='draft')
))
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)
assert obj['state'] == 'unsubmitted'
minimal_record_model.update(dict(
_deposit=dict(
status='draft',
pid={u'revision_id': 0, u'type': u'recid', u'value': u'1'}
)
))
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)
assert obj['state'] == 'inprogress'
minimal_record_model.update(dict(
_deposit=dict(status='published')
))
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)
assert obj['state'] == 'done'
del minimal_record_model['_deposit']['status']
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)
assert obj['state'] == 'unsubmitted'
minimal_record_model.update(dict(
_deposit=dict(
pid={u'revision_id': 0, u'type': u'recid', u'value': u'1'}
)
))
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)
assert obj['state'] == 'inprogress'
def test_submitted(minimal_record_model, depid_pid, legacyjson_v1):
"""Test state."""
minimal_record_model.update(dict(
_deposit=dict(status='draft')
))
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)
assert obj['submitted'] is False
minimal_record_model.update(dict(
_deposit=dict(
status='published',
pid=dict(type='recid', value='1')
)
))
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)
assert obj['submitted'] is True
minimal_record_model['_deposit']['status'] == 'draft'
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)
assert obj['submitted'] is True
del minimal_record_model['_deposit']['status']
obj = legacyjson_v1.transform_record(
depid_pid, minimal_record_model)
assert obj['submitted'] is True
| 15,731 | Python | .py | 387 | 33.816537 | 77 | 0.638394 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,375 | test_schemas_json_load.py | zenodo_zenodo/tests/unit/records/test_schemas_json_load.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Unit tests Zenodo JSON deserializer."""
from __future__ import absolute_import, print_function
from datetime import datetime
import idutils
import pytest
from zenodo.modules.records.serializers.schemas.json import MetadataSchemaV1
@pytest.mark.parametrize('input_val, ctx', [
('10.1234/foo.bar', dict()),
('http://dx.doi.org/10.1234/foo.bar', dict()),
('https://doi.org/10.1234/foo.bar', dict()),
(' doi:10.1234/foo.bar ', dict()),
(' 10.1234/foo.bar ', dict()),
('10.5281/allow', dict(
allowed_dois=['10.5281/allow'],
managed_prefixes=['10.5281']
)),
('10.5281/required', dict(
required_doi='10.5281/required',
allowed_dois=['10.5281/allow'],
managed_prefixes=['10.5281'])),
])
def test_valid_doi(input_val, ctx):
"""Test DOI."""
data, errors = MetadataSchemaV1(
partial=['doi'], context=ctx).load(dict(doi=input_val))
assert data['doi'] == idutils.normalize_doi(input_val.strip())
@pytest.mark.parametrize(('input_val', 'ctx'), [
('10.5072/test.prefix', dict()),
('not a doi', dict()),
('10.5281/banned_prefix', dict(managed_prefixes=['10.5281'])),
('10.5281/allow', dict(
required_doi='10.5281/required',
allowed_dois=['10.5281/allow'],
managed_prefixes=['10.5281'])),
('10.5281/invalid', dict(
allowed_dois=['10.5281/allow'],
managed_prefixes=['10.5281'])),
('10.5281/invalid_prefix', dict(banned_prefixes=['10.5281', '10.5072'])),
('10.5072/invalid_prefix', dict(banned_prefixes=['10.5281', '10.5072'])),
])
def test_invalid_doi(input_val, ctx):
"""Test DOI."""
data, errors = MetadataSchemaV1(
partial=['doi'], context=ctx).load(dict(doi=input_val))
assert 'doi' in errors
assert 'doi' not in data
@pytest.mark.parametrize(('val', 'expected'), [
(dict(type='publication', subtype='preprint'), None),
(dict(type='image', subtype='photo'), None),
(dict(type='dataset'), None),
(dict(type='dataset', title='Dataset'), dict(type='dataset')),
])
def test_valid_resource_type(val, expected):
"""Test resource type."""
data, errors = MetadataSchemaV1(partial=['resource_type']).load(
dict(resource_type=val))
assert data['resource_type'] == val if expected is None else expected
@pytest.mark.parametrize('val', [
dict(type='image', subtype='preprint'),
dict(subtype='photo'),
dict(type='invalid'),
dict(title='Dataset'),
dict(),
])
def test_invalid_resource_type(val):
"""Test resource type."""
data, errors = MetadataSchemaV1(partial=['resource_type']).load(
dict(resource_type=val))
assert 'resource_type' in errors
@pytest.mark.parametrize(('val', 'expected'), [
('2016-01-02', '2016-01-02'),
(' 2016-01-02 ', '2016-01-02'),
('0001-01-01', '0001-01-01'),
(None, datetime.utcnow().date().isoformat()),
('2016', datetime.utcnow().date().isoformat()),
])
def test_valid_publication_date(val, expected):
"""Test publication date."""
data, errors = MetadataSchemaV1(partial=['publication_date']).load(
dict(publication_date=val) if val is not None else dict())
assert data['publication_date'] == val if expected is None else expected
def test_language():
"""Test resource type."""
msv1 = MetadataSchemaV1(partial=['language'])
data, errors = msv1.load(dict(language='eng'))
assert data['language'] == 'eng'
assert 'language' not in errors
data, errors = msv1.load(dict(language='English'))
assert 'language' in errors
data, errors = msv1.load(dict())
assert 'language' not in errors
@pytest.mark.parametrize('val', [
'2016-02-32',
' invalid',
])
def test_invalid_publication_date(val):
"""Test publication date."""
data, errors = MetadataSchemaV1(partial=['publication_date']).load(
dict(publication_date=val))
assert 'publication_date' in errors
assert 'publication_date' not in data
@pytest.mark.parametrize(('val', 'expected'), [
('Test', 'Test',),
(' Test ', 'Test'),
('', None),
(' ', None),
])
def test_title(val, expected):
"""Test title."""
data, errors = MetadataSchemaV1(partial=['title']).load(
dict(title=val))
if expected is not None:
assert data['title'] == expected
else:
assert 'title' in errors
assert 'title' not in data
def test_dates():
"""Test dates."""
schema = MetadataSchemaV1(partial=['dates'])
data, errors = schema.load({'dates': None})
assert 'not be null' in errors['dates'][0]
data, errors = schema.load({'dates': []})
assert 'Shorter than minimum' in errors['dates'][0]
data, errors = schema.load({'dates': [{}]})
assert 'required field' in errors['dates'][0]['type'][0]
data, errors = schema.load({'dates': [{'type': 'Valid'}]})
assert 'at least one date' in errors['dates'][0]
data, errors = schema.load({'dates': [{'type': 'Valid', 'start': None}]})
assert 'not be null' in errors['dates'][0]['start'][0]
data, errors = schema.load({'dates': [{'type': 'Valid', 'start': ''}]})
assert 'Not a valid date' in errors['dates'][0]['start'][0]
# "start" date after "end"
data, errors = schema.load(
{'dates': [{'type': 'Valid',
'start': '2019-02-01', 'end': '2019-01-01'}]})
assert 'must be before "end"' in errors['dates'][0]
# Single date value (i.e. start == end)
data, errors = schema.load(
{'dates': [{'type': 'Valid',
'start': '2019-01-01', 'end': '2019-01-01'}]})
assert 'dates' not in errors
data, errors = schema.load(
{'dates': [{'type': 'Valid', 'start': '2019-01-01'}]})
assert 'dates' not in errors
data, errors = schema.load(
{'dates': [{'type': 'Valid', 'end': '2019-01-01'}]})
assert 'dates' not in errors
data, errors = schema.load(
{'dates': [{'type': 'Valid',
'start': '2019-01-01', 'end': '2019-01-31',
'description': 'Some description'}]})
assert 'dates' not in errors
@pytest.mark.parametrize(('val', 'expected'), [
([{'lat': '2.35', 'lon': '1.534', 'place': 'my place'}],
{'lat': 2.35, 'lon': 1.534, 'place': 'my place'},),
([{'lat': 2.35, 'lon': 1.534, 'place': 'my place'}],
{'lat': 2.35, 'lon': 1.534, 'place': 'my place'}),
({'lat': 2.35, 'place': 'my place'}, None),
({'lon': 1.534, 'place': 'my place'}, None),
({'lat': 2.35, 'lon': 1.534}, None),
({'lat': None, 'lon': 1.534, 'place': 'my place'}, None),
({'lat': 2.35, 'lon': 91, 'place': 'my place'}, None),
({'lat': -91, 'lon': 1.534, 'place': 'my place'}, None),
([{'lat': -90, 'lon': 90, 'place': 'my place'}],
{'lat': -90, 'lon': 90, 'place': 'my place'}),
])
def test_geographical_metadata(val, expected):
"""Test geographical metadata."""
data, errors = MetadataSchemaV1(partial=['locations']).load(
dict(locations=val))
if expected is not None:
assert data['locations'][0] == expected
else:
assert 'locations' in errors
assert 'locations' not in data
| 8,049 | Python | .py | 196 | 36.071429 | 77 | 0.622236 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,376 | test_schemas_datacite.py | zenodo_zenodo/tests/unit/records/test_schemas_datacite.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016-2018 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo Dublin Core mapping test."""
from __future__ import absolute_import, print_function
import json
from datetime import datetime, timedelta
import pytest
from zenodo.modules.records.serializers import datacite_v31, datacite_v41
def today():
"""Get todays UTC date."""
return datetime.utcnow().date()
def test_minimal(db, minimal_record_model, recid_pid):
"""Test minimal."""
minimal_record_model['doi'] = '10.5072/foo'
obj = datacite_v31.transform_record(recid_pid, minimal_record_model)
assert obj == {
'identifier': {'identifier': '10.5072/foo', 'identifierType': 'DOI'},
'creators': [{'creatorName': 'Test', 'nameIdentifier': {}}],
'titles': [{'title': 'Test'}],
'publisher': 'Zenodo',
'publicationYear': str(today().year),
'dates': [{'dateType': 'Issued', 'date': today().isoformat()}],
'subjects': [],
'contributors': [],
'resourceType': {
'resourceType': None, 'resourceTypeGeneral': 'Software'},
'alternateIdentifiers': [{
'alternateIdentifier': 'http://localhost/record/123',
'alternateIdentifierType': 'url',
}],
'relatedIdentifiers': [],
'rightsList': [
{'rights': 'Open Access',
'rightsURI': 'info:eu-repo/semantics/openAccess'}],
'descriptions': [
{'description': 'My description', 'descriptionType': 'Abstract'}]
}
def test_non_local_doi(db, minimal_record_model, recid_pid):
"""Test non-local DOI."""
minimal_record_model['doi'] = '10.1234/foo'
obj = datacite_v31.transform_record(recid_pid, minimal_record_model)
assert obj['identifier'] == {'identifier': 'http://localhost/record/123',
'identifierType': 'URL'}
assert obj['relatedIdentifiers'] == [{
'relatedIdentifier': '10.1234/foo',
'relatedIdentifierType': 'DOI',
'relationType': 'IsIdenticalTo',
}]
def test_full(db, record_with_bucket, recid_pid):
"""Test full record metadata."""
_, full_record_model = record_with_bucket
full_record_model['doi'] = '10.5072/foo'
obj = datacite_v31.transform_record(recid_pid, full_record_model)
expected = {
"alternateIdentifiers": [
{
"alternateIdentifier": "urn:lsid:ubio.org:namebank:11815",
"alternateIdentifierType": "lsid"
},
{
"alternateIdentifier": "2011ApJS..192...18K",
"alternateIdentifierType": "ads"
},
{
'alternateIdentifier': '0317-8471',
'alternateIdentifierType': 'issn',
},
{
"alternateIdentifier": "10.1234/alternate.doi",
"alternateIdentifierType": "doi"
},
{
"alternateIdentifier": "http://localhost/record/12345",
"alternateIdentifierType": "url"
},
],
"contributors": [
{
"affiliation": "CERN",
"contributorName": "Smith, Other",
"contributorType": "Other",
"nameIdentifier": {
"nameIdentifier": "0000-0002-1825-0097",
"nameIdentifierScheme": "ORCID",
"schemeURI": "http://orcid.org/"
}
},
{
"affiliation": "",
"contributorName": "Hansen, Viggo",
"contributorType": "Other",
"nameIdentifier": {}
},
{
"affiliation": "CERN",
"contributorName": "Kowalski, Manager",
"contributorType": "DataManager",
"nameIdentifier": {}
}
],
"creators": [
{
"affiliation": "CERN",
"creatorName": "Doe, John",
"nameIdentifier": {
"nameIdentifier": "0000-0002-1694-233X",
"nameIdentifierScheme": "ORCID",
"schemeURI": "http://orcid.org/"
}
},
{
"affiliation": "CERN",
"creatorName": "Doe, Jane",
"nameIdentifier": {
"nameIdentifier": "0000-0002-1825-0097",
"nameIdentifierScheme": "ORCID",
"schemeURI": "http://orcid.org/"
}
},
{
"affiliation": "CERN",
"creatorName": "Smith, John",
"nameIdentifier": {}
},
{
"affiliation": "CERN",
"creatorName": "Nowak, Jack",
"nameIdentifier": {
"nameIdentifier": "170118215",
"nameIdentifierScheme": "GND"
}
}
],
"dates": [
{"date": "2014-02-27", "dateType": "Issued"},
{"date": "2019-01-01/", "dateType": "Valid"},
# NOTE: "Withdrawn" is not in the DataCite v3.1 dateType vocabulary
# {"date": "2019-01-01", "dateType": "Withdrawn"},
{"date": "/2019-01-01", "dateType": "Collected"},
{"date": "2019-01-01/2019-02-01", "dateType": "Collected"},
],
"descriptions": [
{
"description": "Test Description",
"descriptionType": "Abstract"
},
{
"description": "notes",
"descriptionType": "Other"
},
{
"description": (
"{\"references\": [\"Doe, John et al (2012). "
"Some title. Zenodo. 10.5281/zenodo.12\", \"Smith, "
"Jane et al (2012). Some title. Zenodo. "
"10.5281/zenodo.34\"]}"
),
"descriptionType": "Other"
},
{'description': 'microscopic supersampling',
'descriptionType': 'Methods'}
],
"identifier": {"identifier": "10.5072/foo", "identifierType": "DOI"},
"language": "en",
"geoLocations": [{
"geoLocationPlace": "my place",
"geoLocationPoint": "2.35 1.534"
}, {
'geoLocationPlace': 'New York'
}],
"publicationYear": "2014",
"publisher": "Zenodo",
"relatedIdentifiers": [
{
"relationType": "Cites",
"resourceTypeGeneral": "Dataset",
"relatedIdentifier": "10.1234/foo.bar",
"relatedIdentifierType": "DOI"
},
{
"relationType": "IsIdenticalTo",
"relatedIdentifier": "1234.4325",
"relatedIdentifierType": "arXiv"
},
{
"relationType": "Cites",
"resourceTypeGeneral": "Dataset",
"relatedIdentifier": "1234.4321",
"relatedIdentifierType": "arXiv"
},
{
"relationType": "References",
"resourceTypeGeneral": "Dataset",
"relatedIdentifier": "1234.4328",
"relatedIdentifierType": "arXiv"
},
{
"relationType": "IsPartOf",
"relatedIdentifier": "10.1234/zenodo.4321",
"relatedIdentifierType": "DOI",
"resourceTypeGeneral": "Software"
},
{
"relationType": "HasPart",
"relatedIdentifier": "10.1234/zenodo.1234",
"relatedIdentifierType": "DOI",
"resourceTypeGeneral": "BookChapter"
},
{
"relationType": "IsPartOf",
"relatedIdentifier": "http://localhost/communities/zenodo",
"relatedIdentifierType": "URL"
}
],
"resourceType": {
"resourceType": None,
"resourceTypeGeneral": "Book"
},
"rightsList": [
{
"rights": "Creative Commons Attribution 4.0",
"rightsURI": "https://creativecommons.org/licenses/by/4.0/"
},
{
"rights": "Open Access",
"rightsURI": "info:eu-repo/semantics/openAccess"
}
],
"subjects": [
{"subject": "kw1"},
{"subject": "kw2"},
{"subject": "kw3"},
{
"subject": "http://id.loc.gov/authorities/subjects/sh85009003",
"subjectScheme": "url"
}
],
"titles": [{"title": "Test title"}],
"version": "1.2.5"
}
assert obj == expected
obj = datacite_v41.transform_record(recid_pid, full_record_model)
expected['creators'] = [
{
'affiliations': ['CERN'],
'creatorName': 'Doe, John',
'familyName': 'Doe',
'givenName': 'John',
'nameIdentifiers': [
{
'nameIdentifierScheme': 'ORCID',
'schemeURI': 'http://orcid.org/',
'nameIdentifier': '0000-0002-1694-233X'
},
{
'nameIdentifierScheme': 'GND',
'nameIdentifier': '170118215'
}
],
},
{
'affiliations': ['CERN'],
'creatorName': 'Doe, Jane',
'familyName': 'Doe',
'givenName': 'Jane',
'nameIdentifiers': [
{
'nameIdentifierScheme': 'ORCID',
'schemeURI': 'http://orcid.org/',
'nameIdentifier': '0000-0002-1825-0097'
}
],
},
{
'affiliations': ['CERN'],
'creatorName': 'Smith, John',
'familyName': 'Smith',
'givenName': 'John',
'nameIdentifiers': [],
},
{
'affiliations': ['CERN'],
'creatorName': 'Nowak, Jack',
'familyName': 'Nowak',
'givenName': 'Jack',
'nameIdentifiers': [
{
'nameIdentifierScheme': 'GND',
'nameIdentifier': '170118215'
}
],
}
]
expected['contributors'] = [
{
'affiliations': ['CERN'],
'nameIdentifiers': [
{
'nameIdentifierScheme': 'ORCID',
'schemeURI': 'http://orcid.org/',
'nameIdentifier': '0000-0002-1825-0097'
}
],
'contributorName': 'Smith, Other',
'familyName': 'Smith',
'givenName': 'Other',
'contributorType': 'Other',
},
{
'affiliations': [''],
'nameIdentifiers': [],
'contributorName': 'Hansen, Viggo',
'familyName': 'Hansen',
'givenName': 'Viggo',
'contributorType': 'Other',
},
{
'affiliations': ['CERN'],
'nameIdentifiers': [],
'contributorName': 'Kowalski, Manager',
'familyName': 'Kowalski',
'givenName': 'Manager',
'contributorType': 'DataManager',
},
{
'contributorName': 'Smith, Professor',
'familyName': 'Smith',
'givenName': 'Professor',
'nameIdentifiers': [],
'contributorType': 'Supervisor',
}
]
expected['fundingReferences'] = []
expected["dates"] = [
{"date": "2014-02-27", "dateType": "Issued"},
{"date": "2019-01-01/", "dateType": "Valid",
"dateInformation": "Bongo"},
{"date": "/2019-01-01", "dateType": "Collected"},
{"date": "2019-01-01", "dateType": "Withdrawn"},
{"date": "2019-01-01/2019-02-01", "dateType": "Collected"},
]
expected['geoLocations'] = [{
"geoLocationPlace": "my place",
"geoLocationPoint": {
"pointLatitude": 2.35,
"pointLongitude": 1.534
}
}, {
'geoLocationPlace': 'New York'
}]
assert obj == expected
@pytest.mark.parametrize("serializer", [
datacite_v31,
datacite_v41,
])
def test_identifier(db, minimal_record_model, recid_pid, serializer):
"""Test identifier."""
obj = serializer.transform_record(recid_pid, minimal_record_model)
assert obj['identifier'] == {
'identifier': '10.5072/zenodo.123',
'identifierType': 'DOI',
}
def test_creators(db, minimal_record_model, recid_pid):
"""Test creators."""
minimal_record_model.update({
'creators': [
{'name': 'A', 'affiliation': 'AA', 'gnd': '1234'},
{'name': 'B', 'affiliation': 'BA', 'orcid': '0000-0000-0000-0000',
'gnd': '4321'},
]})
obj = datacite_v31.transform_record(recid_pid, minimal_record_model)
assert obj['creators'] == [
{'affiliation': 'AA', 'creatorName': 'A', 'nameIdentifier': {
'nameIdentifier': '1234', 'nameIdentifierScheme': 'GND'}},
{'affiliation': 'BA', 'creatorName': 'B', 'nameIdentifier': {
'nameIdentifier': '0000-0000-0000-0000',
'nameIdentifierScheme': 'ORCID',
'schemeURI': 'http://orcid.org/'}}
]
def test_creators_v4(db, minimal_record_model, recid_pid):
"""Test creators."""
minimal_record_model.update({
'creators': [
{'name': 'A, B', 'affiliation': 'AA', 'gnd': '1234'},
{
'name': 'B',
'affiliation': 'BA',
'orcid': '0000-0000-0000-0000',
'gnd': '4321'
},
]})
obj = datacite_v41.transform_record(recid_pid, minimal_record_model)
assert obj['creators'] == [{
'affiliations': ['AA'],
'creatorName': 'A, B',
'givenName': 'B',
'familyName': 'A',
'nameIdentifiers': [{
'nameIdentifier': '1234',
'nameIdentifierScheme': 'GND'
}]},
{
'affiliations': ['BA'],
'creatorName': 'B',
'givenName': '',
'familyName': '',
'nameIdentifiers': [{
'nameIdentifier': '0000-0000-0000-0000',
'nameIdentifierScheme': 'ORCID',
'schemeURI': 'http://orcid.org/'
}, {
'nameIdentifier': '4321',
'nameIdentifierScheme': 'GND'
}]
}
]
@pytest.mark.parametrize("serializer", [
datacite_v31,
datacite_v41,
])
def test_embargo_date(db, minimal_record_model, recid_pid, serializer):
"""Test embargo date."""
dt = (today() + timedelta(days=1)).isoformat()
minimal_record_model.update({
'embargo_date': dt,
'access_right': 'embargoed',
})
obj = serializer.transform_record(recid_pid, minimal_record_model)
assert obj['dates'] == [
{'dateType': 'Available', 'date': dt},
{'dateType': 'Accepted', 'date': today().isoformat()},
]
@pytest.mark.parametrize("serializer", [
datacite_v31,
datacite_v41,
])
def test_subjects(db, minimal_record_model, recid_pid, serializer):
"""Test subjects date."""
minimal_record_model.update({
'keywords': ['kw1'],
'subjects': [{'term': 'test', 'identifier': 'id', 'scheme': 'loc'}],
})
obj = serializer.transform_record(recid_pid, minimal_record_model)
assert obj['subjects'] == [
{'subject': 'kw1'},
{'subject': 'id', 'subjectScheme': 'loc'},
]
def test_contributors(db, minimal_record_model, recid_pid):
"""Test creators."""
minimal_record_model.update({
'contributors': [{
'name': 'A',
'affiliation': 'AA',
'gnd': '1234',
'type': 'Researcher'
}, ],
'thesis_supervisors': [{
'name': 'B',
'affiliation': 'BA',
'type': 'Supervisor'
}, ],
'grants': [{
'funder': {
'name': 'European Commission',
},
'identifiers': {
'eurepo': 'info:eu-repo/grantAgreement/EC/FP7/244909'
},
}],
})
obj = datacite_v31.transform_record(recid_pid, minimal_record_model)
assert obj['contributors'] == [
{
'affiliation': 'AA',
'contributorName': 'A',
'contributorType': 'Researcher',
'nameIdentifier': {
'nameIdentifier': '1234',
'nameIdentifierScheme': 'GND'}
},
{
'affiliation': 'BA',
'contributorName': 'B',
'contributorType': 'Supervisor',
'nameIdentifier': {},
},
{
'contributorName': 'European Commission',
'contributorType': 'Funder',
'nameIdentifier': {
'nameIdentifier': 'info:eu-repo/grantAgreement/EC/FP7/244909',
'nameIdentifierScheme': 'info'}
},
]
def test_contributors_v4(db, minimal_record_model, recid_pid):
"""Test contributors."""
minimal_record_model.update({
'contributors': [{
'name': 'A, B',
'affiliation': 'AA',
'gnd': '1234',
'orcid': '0000-0000-0000-0000',
'type': 'Researcher'
}, ],
'thesis': {
'supervisors': [{
'name': 'B',
'affiliation': 'BA',
'type': 'Supervisor'
}]
}
})
obj = datacite_v41.transform_record(recid_pid, minimal_record_model)
assert obj['contributors'] == [
{
'affiliations': ['AA'],
'contributorName': 'A, B',
'givenName': 'B',
'familyName': 'A',
'contributorType': 'Researcher',
'nameIdentifiers': [
{
'nameIdentifier': '0000-0000-0000-0000',
'nameIdentifierScheme': 'ORCID',
'schemeURI': 'http://orcid.org/'
},
{
'nameIdentifier': '1234',
'nameIdentifierScheme': 'GND'
},
]
},
{
'affiliations': ['BA'],
'contributorName': 'B',
'givenName': '',
'familyName': '',
'contributorType': 'Supervisor',
'nameIdentifiers': [],
},
]
# Test without `thesis` field
minimal_record_model.pop('thesis', None)
obj = datacite_v41.transform_record(recid_pid, minimal_record_model)
assert obj['contributors'] == [
{
'affiliations': ['AA'],
'contributorName': 'A, B',
'givenName': 'B',
'familyName': 'A',
'contributorType': 'Researcher',
'nameIdentifiers': [
{
'nameIdentifier': '0000-0000-0000-0000',
'nameIdentifierScheme': 'ORCID',
'schemeURI': 'http://orcid.org/'
},
{
'nameIdentifier': '1234',
'nameIdentifierScheme': 'GND'
},
]
},
]
@pytest.mark.parametrize("serializer", [
datacite_v31,
datacite_v41,
])
def test_language(db, minimal_record_model, recid_pid, serializer):
"""Test language."""
assert 'language' not in minimal_record_model
obj = serializer.transform_record(recid_pid, minimal_record_model)
assert 'language' not in obj
minimal_record_model['language'] = 'eng'
obj = serializer.transform_record(recid_pid, minimal_record_model)
assert obj['language'] == 'en' # DataCite supports ISO 639-1 (2-letter)
minimal_record_model['language'] = 'twa' # No ISO 639-1 code
obj = serializer.transform_record(recid_pid, minimal_record_model)
assert 'language' not in obj
# This should never happen, but in case of dirty data
minimal_record_model['language'] = 'Esperanto'
obj = serializer.transform_record(recid_pid, minimal_record_model)
assert 'language' not in obj
@pytest.mark.parametrize("serializer", [
datacite_v31,
datacite_v41,
])
def test_resource_type(db, minimal_record_model, recid_pid, serializer):
"""Test language."""
minimal_record_model['resource_type'] = {'type': 'poster'}
obj = serializer.transform_record(recid_pid, minimal_record_model)
assert obj['resourceType'] == {
'resourceTypeGeneral': 'Text',
'resourceType': 'Poster',
}
# If the record is not in 'c1', OpenAIRE subtype should not be serialized
minimal_record_model['resource_type'] = {'type': 'software',
'openaire_subtype': 'foo:t1'}
obj = serializer.transform_record(recid_pid, minimal_record_model)
assert obj['resourceType'] == {
'resourceTypeGeneral': 'Software',
'resourceType': None
}
# Add 'c1' to communities. 'foo:t1' should be serialized as a type
minimal_record_model['communities'] = ['c1', ]
obj = serializer.transform_record(recid_pid, minimal_record_model)
assert obj['resourceType'] == {
'resourceTypeGeneral': 'Software',
'resourceType': 'openaire:foo:t1',
}
@pytest.mark.parametrize("serializer", [
datacite_v31,
datacite_v41,
])
def test_alt_ids(db, minimal_record_model, recid_pid, serializer):
"""Test language."""
minimal_record_model.update({
'alternate_identifiers': [{
'identifier': '10.1234/foo.bar',
'scheme': 'doi'
}],
})
obj = serializer.transform_record(recid_pid, minimal_record_model)
assert obj['alternateIdentifiers'] == [{
'alternateIdentifier': '10.1234/foo.bar',
'alternateIdentifierType': 'doi',
}, {
'alternateIdentifier': 'http://localhost/record/123',
'alternateIdentifierType': 'url',
}]
@pytest.mark.parametrize("serializer", [
datacite_v31,
datacite_v41,
])
def test_related_identifiers(db, minimal_record_model, recid_pid, serializer):
"""Test language."""
tests = [
('handle', 'Handle'),
('arxiv', 'arXiv'),
('ads', 'bibcode'),
('doi', 'DOI'),
]
for t, dc_t in tests:
minimal_record_model.update({
'related_identifiers': [{
'identifier': '1234',
'scheme': t,
'relation': 'isCitedBy',
'resource_type': {
'type': 'publication',
'subtype': 'section'
}
}, {
'identifier': '1234',
'scheme': 'invalid',
'relation': 'isCitedBy',
}],
})
obj = serializer.transform_record(recid_pid, minimal_record_model)
expected_result = [{
'relatedIdentifier': '1234',
'relatedIdentifierType': dc_t,
'relationType': 'IsCitedBy',
'resourceTypeGeneral': 'BookChapter'
}]
assert obj['relatedIdentifiers'] == expected_result
@pytest.mark.parametrize("serializer", [
datacite_v31,
datacite_v41,
])
def test_communities_rel_ids(db, minimal_record_model, recid_pid, serializer):
"""Test communities in related identifiers."""
for communities in (['zenodo'], ['c1', 'c2', 'c3']):
minimal_record_model['communities'] = communities
obj = serializer.transform_record(recid_pid, minimal_record_model)
for comm in communities:
assert {
'relatedIdentifier':
'http://localhost/communities/{}'.format(comm),
'relatedIdentifierType': 'URL',
'relationType': 'IsPartOf',
} in obj['relatedIdentifiers']
@pytest.mark.parametrize("serializer", [
datacite_v31,
datacite_v41,
])
def test_rights(db, minimal_record_model, recid_pid, serializer):
"""Test language."""
minimal_record_model.update({
'license': {
'identifier': 'cc-by-sa',
'title': 'Creative Commons Attribution Share-Alike',
'source': 'opendefinition.org',
'url': 'http://www.opendefinition.org/licenses/cc-by-sa'
}
})
obj = serializer.transform_record(recid_pid, minimal_record_model)
assert obj['rightsList'] == [{
'rights': 'Creative Commons Attribution Share-Alike',
'rightsURI': 'http://www.opendefinition.org/licenses/cc-by-sa',
}, {
'rights': 'Open Access',
'rightsURI': 'info:eu-repo/semantics/openAccess',
}]
@pytest.mark.parametrize("serializer", [
datacite_v31,
datacite_v41,
])
def test_descriptions(db, minimal_record_model, recid_pid, serializer):
"""Test descriptions."""
minimal_record_model.update({
'description': 'test',
'notes': 'again',
'references': [{'raw_reference': 'A'}],
})
obj = serializer.transform_record(recid_pid, minimal_record_model)
assert obj['descriptions'] == [{
'description': 'test',
'descriptionType': 'Abstract',
}, {
'description': 'again',
'descriptionType': 'Other',
}, {
'description': json.dumps({'references': ['A']}),
'descriptionType': 'Other',
}]
minimal_record_model.update({
'description': (20000 * 'A') + 'BBB',
'notes': (20000 * 'A') + 'BBB',
'references': [{'raw_reference': (20000 * 'A') + 'BBB'}],
})
obj = serializer.transform_record(recid_pid, minimal_record_model)
assert all(len(d['description']) == 20000 and 'B' not in d['description']
for d in obj['descriptions'])
def test_funding_ref_v4(db, minimal_record_model, recid_pid):
"""Test creators."""
minimal_record_model.update({
'grants': [
{'title': 'Grant Title',
'code': '1234',
'identifiers': {'eurepo': 'eurepo 1'},
'internal_id': '10.1234/foo::1234',
'funder': {'name': 'EC', 'doi': '10.1234/foo'}},
{'title': 'Title Grant',
'code': '4321',
'identifiers': {'eurepo': 'eurepo 2'},
'internal_id': '10.1234/foo::4321',
'funder': {'name': 'EC', 'doi': '10.1234/foo'}},
]})
obj = datacite_v41.transform_record(recid_pid, minimal_record_model)
assert obj['fundingReferences'] == [
{
'funderName': 'EC',
'funderIdentifier': {
'funderIdentifier': '10.1234/foo',
'funderIdentifierType': 'Crossref Funder ID',
},
'awardNumber': {
'awardNumber': '1234',
'awardURI': 'eurepo 1'
},
'awardTitle': 'Grant Title'
},
{
'funderName': 'EC',
'funderIdentifier': {
'funderIdentifier': '10.1234/foo',
'funderIdentifierType': 'Crossref Funder ID',
},
'awardNumber': {
'awardNumber': '4321',
'awardURI': 'eurepo 2'
},
'awardTitle': 'Title Grant'
}
]
def test_titles(db, minimal_record_model, recid_pid):
"""Test title."""
# NOTE: There used to be a bug which was modifying the case of the title
minimal_record_model['title'] = 'a lower-case title'
obj = datacite_v31.transform_record(recid_pid, minimal_record_model)
assert obj['titles'] == [{'title': 'a lower-case title'}]
minimal_record_model['title'] = 'Mixed-caSe titLE'
obj = datacite_v31.transform_record(recid_pid, minimal_record_model)
assert obj['titles'] == [{'title': 'Mixed-caSe titLE'}]
| 29,008 | Python | .py | 806 | 25.310174 | 79 | 0.516895 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,377 | test_bibtex_serializer.py | zenodo_zenodo/tests/unit/records/test_bibtex_serializer.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015, 2016 CERN.
#
# Zenodo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Unit tests BibTex formatter."""
from __future__ import absolute_import, print_function, unicode_literals
import pytest
from invenio_records.api import Record
from invenio_records.models import RecordMetadata
from zenodo.modules.records.serializers.bibtex import Bibtex, \
BibTeXSerializer, MissingRequiredFieldError
def test_serializer(bibtex_records):
"""Test serializer."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
serializer = BibTeXSerializer()
bibtex = ("""@book{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Test title},\n"""
""" publisher = {Jol},\n"""
""" year = 2014,\n"""
""" volume = 20,\n"""
""" address = {Staszkowka},\n"""
""" month = feb,\n"""
""" note = {notes},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == serializer.serialize(record=test_record, pid=1)
results = {
"hits": {
"hits": [{
"_source": test_record
}]
}
}
assert bibtex == serializer.serialize_search(search_result=results,
pid_fetcher=None)
def test_get_entry_type(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
for rec, in RecordMetadata.query.values(RecordMetadata.id):
if rec != record_bad.record.id:
r = Record.get_record(id_=rec)
b = Bibtex(r)
assert r['resource_type']['type'] == b._get_entry_type()
assert test_record['resource_type']['type'] == \
record_good._get_entry_type()
assert 'default' == record_bad._get_entry_type()
def test_get_entry_subtype(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
assert test_record['resource_type']['subtype'] == \
record_good._get_entry_subtype()
assert 'default' == record_bad._get_entry_subtype()
def test_get_citation_key(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
assert "doe_2014_12345" == record_good._get_citation_key()
assert "12345" == record_bad._get_citation_key()
with pytest.raises(MissingRequiredFieldError) as exc_info:
record_empty._get_citation_key()
assert exc_info.type is MissingRequiredFieldError
def test_get_doi(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
assert test_record['doi'] == record_good._get_doi()
assert record_empty._get_doi() is None
def test_get_author(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
authors = []
for author in test_record['creators']:
authors.append(author['name'])
assert authors == record_good._get_author()
assert [] == record_empty._get_author()
def test_get_title(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
assert test_record['title'] == record_good._get_title()
assert "" == record_empty._get_title()
def test_get_month(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
assert 'feb' == record_good._get_month()
assert "" == record_empty._get_month()
def test_get_year(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
assert '2014' == record_good._get_year()
assert "" == record_empty._get_year()
def test_get_note(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
assert test_record['notes'] == record_good._get_note()
assert "" == record_empty._get_note()
def test_get_address(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
assert test_record["imprint"]["place"] == record_good._get_address()
assert "" == record_empty._get_note()
def test_get_booktitle(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
assert test_record["part_of"]["title"] == record_good._get_booktitle()
assert "" == record_empty._get_booktitle()
def test_get_journal(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
assert test_record['journal']['title'] == record_good._get_journal()
assert "" == record_empty._get_journal()
def test_get_number(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
assert test_record['journal']['issue'] == record_good._get_number()
assert "" == record_empty._get_number()
def test_get_pages(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
assert test_record['journal']['pages'] == record_good._get_pages()
assert "" == record_empty._get_pages()
def test_get_publisher(app, bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
with app.app_context():
global_cfg = app.config['THEME_SITENAME']
assert test_record['imprint']['publisher'] == record_good._get_publisher()
assert global_cfg == record_empty._get_publisher()
def test_get_school(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
assert test_record['thesis']['university'] == record_good._get_school()
assert "" == record_empty._get_school()
def test_get_url(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
url = "https://doi.org/" + test_record['doi']
assert url == record_good._get_url()
assert "" == record_empty._get_url()
def test_get_volume(bibtex_records):
"""Test."""
(record_good, record_bad, record_empty, test_record) = bibtex_records
assert test_record['journal']['volume'] == record_good._get_volume()
assert "" == record_empty._get_volume()
def test_clean_input(full_record):
"""Test."""
full_record['resource_type']['subtype'] = 'article'
full_record['title'] = "Title & escaped chars % $ _ #"
bibtex = ("""@article{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Title \& escaped chars \% \$ \_ \#},\n"""
""" journal = {Bam},\n"""
""" year = 2014,\n"""
""" volume = 20,\n"""
""" number = 2,\n"""
""" pages = 20,\n"""
""" month = feb,\n"""
""" note = {notes},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
def test_format_article(full_record):
"""Test."""
full_record['resource_type']['subtype'] = 'article'
bibtex = ("""@article{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Test title},\n"""
""" journal = {Bam},\n"""
""" year = 2014,\n"""
""" volume = 20,\n"""
""" number = 2,\n"""
""" pages = 20,\n"""
""" month = feb,\n"""
""" note = {notes},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
def test_format_book(full_record):
"""Test."""
full_record['resource_type']['subtype'] = 'book'
bibtex = ("""@book{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Test title},\n"""
""" publisher = {Jol},\n"""
""" year = 2014,\n"""
""" volume = 20,\n"""
""" address = {Staszkowka},\n"""
""" month = feb,\n"""
""" note = {notes},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
def test_format_booklet(full_record):
"""Test."""
full_record['resource_type']['subtype'] = 'book'
del full_record['publication_date']
bibtex = ("""@booklet{doe_12345,\n"""
""" title = {Test title},\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" address = {Staszkowka},\n"""
""" note = {notes},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
def test_format_inbook(full_record):
"""Test."""
full_record['resource_type']['subtype'] = 'section'
bibtex = ("""@misc{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Test title},\n"""
""" month = feb,\n"""
""" year = 2014,\n"""
""" note = {notes},\n"""
""" publisher = {Jol},\n"""
""" version = {1.2.5},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
def test_format_inproceedings(full_record):
"""Test."""
full_record['resource_type']['subtype'] = 'conferencepaper'
bibtex = ("""@inproceedings{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Test title},\n"""
""" booktitle = {Bum},\n"""
""" year = 2014,\n"""
""" pages = 20,\n"""
""" publisher = {Jol},\n"""
""" address = {Staszkowka},\n"""
""" month = feb,\n"""
""" note = {notes},\n"""
""" venue = """
"""{Harvard-Smithsonian Center for Astrophysics},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
del full_record['journal']
full_record['part_of']['pages'] = "30"
bibtex = ("""@inproceedings{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Test title},\n"""
""" booktitle = {Bum},\n"""
""" year = 2014,\n"""
""" pages = 30,\n"""
""" publisher = {Jol},\n"""
""" address = {Staszkowka},\n"""
""" month = feb,\n"""
""" note = {notes},\n"""
""" venue = """
"""{Harvard-Smithsonian Center for Astrophysics},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
del full_record['imprint']
full_record['part_of']['publisher'] = "hello"
bibtex = ("""@inproceedings{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Test title},\n"""
""" booktitle = {Bum},\n"""
""" year = 2014,\n"""
""" pages = 30,\n"""
""" publisher = {hello},\n"""
""" month = feb,\n"""
""" note = {notes},\n"""
""" venue = """
"""{Harvard-Smithsonian Center for Astrophysics},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
del full_record['meeting']
bibtex = ("""@inproceedings{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Test title},\n"""
""" booktitle = {Bum},\n"""
""" year = 2014,\n"""
""" pages = 30,\n"""
""" publisher = {hello},\n"""
""" month = feb,\n"""
""" note = {notes},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
def test_format_proceedings(full_record):
"""Test."""
full_record['resource_type']['subtype'] = 'conferencepaper'
del full_record['part_of']
bibtex = ("""@proceedings{doe_2014_12345,\n"""
""" title = {Test title},\n"""
""" year = 2014,\n"""
""" publisher = {Jol},\n"""
""" address = {Staszkowka},\n"""
""" month = feb,\n"""
""" note = {notes},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
def test_format_manual(full_record):
"""Test."""
full_record['resource_type']['subtype'] = 'technicalnote'
bibtex = ("""@manual{doe_2014_12345,\n"""
""" title = {Test title},\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" address = {Staszkowka},\n"""
""" month = feb,\n"""
""" year = 2014,\n"""
""" note = {notes},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
full_record['creators'].append({'name': 'Bar, Fuu', 'affiliation': 'CERN',
'orcid': '', 'familyname': 'Bar',
'givennames': 'Fuu'})
bibtex = ("""@manual{doe_2014_12345,\n"""
""" title = {Test title},\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack and\n"""
""" Bar, Fuu},\n"""
""" address = {Staszkowka},\n"""
""" month = feb,\n"""
""" year = 2014,\n"""
""" note = {notes},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
authors = ""
for i in range(0, 50):
full_record['creators'].append({
'name': 'Bar, Fuu{0}'.format(i), 'affiliation': 'CERN',
'orcid': '', 'familyname': 'Bar',
'givennames': 'Fuu{0}'.format(i)})
authors += " Bar, Fuu{0}".format(i)
if i != 49:
authors += " and\n"
bibtex = ("@manual{doe_2014_12345,\n"
" title = {Test title},\n"
" author = {Doe, John and\n"
" Doe, Jane and\n"
" Smith, John and\n"
" Nowak, Jack and\n"
" Bar, Fuu and\n" +
authors + "},\n"
" address = {Staszkowka},\n"
" month = feb,\n"
" year = 2014,\n"
" note = {notes},\n"
" doi = {10.1234/foo.bar},\n"
" url = {https://doi.org/10.1234/foo.bar}\n"
"}")
assert bibtex == Bibtex(full_record).format()
full_record['creators'] = full_record['creators'][:1]
bibtex = ("""@manual{doe_2014_12345,\n"""
""" title = {Test title},\n"""
""" author = {Doe, John},\n"""
""" address = {Staszkowka},\n"""
""" month = feb,\n"""
""" year = 2014,\n"""
""" note = {notes},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
def test_format_thesis(full_record):
"""Test."""
full_record['resource_type']['subtype'] = 'thesis'
bibtex = ("""@phdthesis{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Test title},\n"""
""" school = {I guess important},\n"""
""" year = 2014,\n"""
""" address = {Staszkowka},\n"""
""" month = feb,\n"""
""" note = {notes},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
del full_record['imprint']
bibtex = ("""@phdthesis{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Test title},\n"""
""" school = {I guess important},\n"""
""" year = 2014,\n"""
""" month = feb,\n"""
""" note = {notes},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
def test_format_unpublished(full_record):
"""Test."""
full_record['resource_type']['subtype'] = 'preprint'
bibtex = ("""@unpublished{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Test title},\n"""
""" note = {notes},\n"""
""" month = feb,\n"""
""" year = 2014,\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
full_record['resource_type']['subtype'] = 'workingpaper'
assert bibtex == Bibtex(full_record).format()
def test_format_default_type(full_record):
"""Test."""
full_record['resource_type']['type'] = 'undefined_type'
bibtex = ("""@misc{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Test title},\n"""
""" month = feb,\n"""
""" year = 2014,\n"""
""" note = {notes},\n"""
""" publisher = {Jol},\n"""
""" version = {1.2.5},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
def test_format_publication_default(full_record):
"""Test."""
full_record['resource_type']['subtype'] = 'undefined_subtype'
full_record['resource_type']['type'] = 'publication'
bibtex = ("""@misc{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Test title},\n"""
""" month = feb,\n"""
""" year = 2014,\n"""
""" note = {notes},\n"""
""" publisher = {Jol},\n"""
""" version = {1.2.5},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
def test_format_long_title(full_record):
"""Test."""
full_record['title'] = (
"This is a very long title with whitespaces at "
"the end that used to bring us discomfort"
)
full_record['resource_type']['subtype'] = 'article'
bibtex = ("""@article{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {{This is a very long title """
"""with whitespaces at the \n"""
""" end that used to """
"""bring us discomfort}},\n"""
""" journal = {Bam},\n"""
""" year = 2014,\n"""
""" volume = 20,\n"""
""" number = 2,\n"""
""" pages = 20,\n"""
""" month = feb,\n"""
""" note = {notes},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
def test_format_long_title_with_trailing_whitespace(full_record):
"""Test."""
full_record['title'] = (
"This is a very long title with whitespaces"
" "
)
full_record['resource_type']['subtype'] = 'article'
bibtex = ("""@article{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {This is a very long title """
"""with whitespaces},\n"""
""" journal = {Bam},\n"""
""" year = 2014,\n"""
""" volume = 20,\n"""
""" number = 2,\n"""
""" pages = 20,\n"""
""" month = feb,\n"""
""" note = {notes},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
def test_format_software_type(full_record):
"""Test."""
full_record['resource_type']['type'] = 'software'
bibtex = ("""@software{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Test title},\n"""
""" month = feb,\n"""
""" year = 2014,\n"""
""" note = {notes},\n"""
""" publisher = {Jol},\n"""
""" version = {1.2.5},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
def test_format_dataset_type(full_record):
"""Test."""
full_record['resource_type']['type'] = 'dataset'
bibtex = ("""@dataset{doe_2014_12345,\n"""
""" author = {Doe, John and\n"""
""" Doe, Jane and\n"""
""" Smith, John and\n"""
""" Nowak, Jack},\n"""
""" title = {Test title},\n"""
""" month = feb,\n"""
""" year = 2014,\n"""
""" note = {notes},\n"""
""" publisher = {Jol},\n"""
""" version = {1.2.5},\n"""
""" doi = {10.1234/foo.bar},\n"""
""" url = {https://doi.org/10.1234/foo.bar}\n"""
"""}""")
assert bibtex == Bibtex(full_record).format()
| 28,645 | Python | .py | 600 | 37.19 | 78 | 0.426982 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,378 | test_schemas_dc.py | zenodo_zenodo/tests/unit/records/test_schemas_dc.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016-2018 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo Dublin Core mapping test."""
from __future__ import absolute_import, print_function, unicode_literals
from datetime import datetime, timedelta
from zenodo.modules.records.serializers import dc_v1
def test_minimal(app, db, minimal_record_model, recid_pid):
"""Test identifiers."""
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj == {
'sources': [],
'contributors': [],
'identifiers': ['https://zenodo.org/record/123', '10.5072/zenodo.123'],
'subjects': [],
'languages': [''],
'dates': [datetime.utcnow().date().isoformat()],
'titles': ['Test'],
'creators': ['Test'],
'rights': ['info:eu-repo/semantics/openAccess'],
'publishers': [],
'descriptions': ['My description'],
'types': [
'info:eu-repo/semantics/other',
'software',
],
'relations': []
}
def test_identifiers(app, db, minimal_record_model, recid_pid):
""""Test identifiers."""
minimal_record_model['doi'] = '10.1234/foo'
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['identifiers'] == \
['https://zenodo.org/record/123', '10.1234/foo']
minimal_record_model['_oai'] = {'id': 'oai:zenodo.org:123'}
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert 'oai:zenodo.org:123' in obj['identifiers']
def test_creators(app, db, minimal_record_model, recid_pid):
""""Test identifiers."""
minimal_record_model['creators'] = []
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['creators'] == []
def test_languages(app, db, minimal_record_model, recid_pid):
""""Test identifiers."""
minimal_record_model['language'] = 'eng'
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['languages'] == ['eng', ]
def test_relations(app, db, minimal_record_model, recid_pid):
""""Test relations."""
minimal_record_model.update({
'grants': [{
'identifiers': {
'eurepo': 'info:eu-repo/grantAgreement/EC/FP7/244909'}}],
'alternate_identifiers': [{
'identifier': '10.1234/foo.bar',
'scheme': 'doi'
}],
'related_identifiers': [{
'identifier': '1234',
'scheme': 'pmid',
'relation': 'isCited',
}],
})
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['relations'] == [
'info:eu-repo/grantAgreement/EC/FP7/244909',
'info:eu-repo/semantics/altIdentifier/doi/10.1234/foo.bar',
'pmid:1234'
]
def test_rights(app, db, minimal_record_model, recid_pid):
"""Test rights."""
minimal_record_model.update({
'license': {'url': 'http://creativecommons.org/licenses/by/4.0/'}
})
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert 'http://creativecommons.org/licenses/by/4.0/' in obj['rights']
def test_embargo_date(app, db, minimal_record_model, recid_pid):
""""Test embargo date."""
dt = (datetime.utcnow().date() + timedelta(days=1)).isoformat()
minimal_record_model.update({
'embargo_date': dt,
'access_right': 'embargoed',
})
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['rights'] == ['info:eu-repo/semantics/embargoedAccess']
assert 'info:eu-repo/date/embargoEnd/{0}'.format(dt) in obj['dates']
def test_publishers(app, db, minimal_record_model, recid_pid):
"""Test publishers."""
minimal_record_model.update({
'part_of': {'publisher': 'Zenodo'},
})
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['publishers'] == ['Zenodo']
minimal_record_model.update({
'imprint': {'publisher': 'Invenio'},
'part_of': {'publisher': 'Zenodo'},
})
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['publishers'] == ['Invenio']
def test_contributors(app, db, minimal_record_model, recid_pid):
""""Test contributors."""
minimal_record_model.update({
'contributors': [{'name': 'Smith, John'}]
})
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['contributors'] == ['Smith, John']
def test_types(app, db, minimal_record_model, recid_pid):
""""Test contributors."""
minimal_record_model.update({
'resource_type': {'type': 'publication', 'subtype': 'conferencepaper'}
})
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['types'] == [
'info:eu-repo/semantics/conferencePaper',
'publication-conferencepaper'
]
# If the record is not in 'c1', OpenAIRE subtype should not be serialized
minimal_record_model.update({
'resource_type': {'type': 'software',
'openaire_subtype': 'foo:t1'}
})
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['types'] == [
'info:eu-repo/semantics/other',
'software'
]
# Add 'c1' to communities. 'foo:t1' should be serialized as a type
minimal_record_model.update({'communities': ['c1']})
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['types'] == [
'info:eu-repo/semantics/other',
'software',
'openaire:foo:t1',
]
def test_community_relations(db, minimal_record_model, recid_pid):
"""Test communities."""
for communities in (['zenodo'], ['c1', 'c2', 'c3']):
minimal_record_model['communities'] = communities
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
for comm in communities:
assert ('url:http://localhost/communities/{}'.format(comm)
in obj['relations'])
def test_sources(app, db, minimal_record_model, recid_pid):
"""Test contributors."""
minimal_record_model.update({
'journal': {
'title': 'CAP',
'volume': '22',
'issue': '1',
'pages': '1-2',
'year': '2002'
}})
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['sources'] == ['CAP 22(1) 1-2 (2002)']
minimal_record_model.update({
'journal': {
'title': 'CAP',
'issue': '1',
}})
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['sources'] == ['CAP 1']
def test_sources_meetings(app, db, minimal_record_model, recid_pid):
""""Test contributors."""
minimal_record_model['meetings'] = {
'acronym': 'CAP',
'title': 'Communicating',
'place': 'Cape Town',
'dates': 'March, 2010',
'session': 'I',
'session_part': '1',
}
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['sources'] == ['CAP, Communicating, Cape Town, March, 2010']
def test_description(app, db, minimal_record_model, recid_pid):
"""Test description."""
minimal_record_model['description'] = \
"<p><b>Foo Bar</b></p><em><p> Foo&Bar</p></em> '" \
"This' is <i><it><i>"
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['descriptions'] == ["Foo Bar Foo&Bar 'This' is <it>"]
minimal_record_model['description'] = ''
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['descriptions'] == []
def test_subjects(app, db, minimal_record_model, recid_pid):
"""Test subjects."""
minimal_record_model['subjects'] = [{'term': 's1'}, {'term': 's2'}]
minimal_record_model['keywords'] = ['k1', 'k2']
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert set(obj['subjects']) == {'s1', 's2', 'k1', 'k2'}
def test_subjects_without_terms(app, db, minimal_record_model, recid_pid):
"""Test subjects."""
minimal_record_model['subjects'] = [{'identifier': 's1'}, {'term': 's2'}]
minimal_record_model['keywords'] = ['k1', 'k2']
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert set(obj['subjects']) == {'s2', 'k1', 'k2'}
def test_dates(app, db, full_record, minimal_record_model, recid_pid):
"""Test dates."""
minimal_record_model['dates'] = full_record['dates']
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert obj['dates'] == [
datetime.utcnow().date().isoformat(),
'2019-01-01/',
'/2019-01-01',
'2019-01-01',
'2019-01-01/2019-02-01',
]
def test_method(app, db, full_record, minimal_record_model, recid_pid):
"""Test method."""
minimal_record_model['method'] = full_record['method']
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert 'microscopic supersampling' in obj['descriptions']
def test_locations(app, db, minimal_record_model, recid_pid):
"""Test locations."""
minimal_record_model['locations'] = [
{"lat": 2.35, "lon": 1.534, "place": "my place"}]
obj = dc_v1.transform_record(recid_pid, minimal_record_model)
assert set(obj['coverage']) == {'name=my place; east=1.534; north=2.35'}
| 10,183 | Python | .py | 237 | 36.780591 | 79 | 0.632387 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,379 | test_records_minters.py | zenodo_zenodo/tests/unit/records/test_records_minters.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Record minters."""
from __future__ import absolute_import, print_function, unicode_literals
from zenodo.modules.records.minters import doi_generator
def test_doi_generator(app):
"""Test doi_generator."""
p = app.config['PIDSTORE_DATACITE_DOI_PREFIX']
# Check normal generation.
assert doi_generator(1234) == '{prefix}/zenodo.1234'.format(prefix=p)
# Check doi id for recid mapping
assert doi_generator(7468) == '{prefix}/zenodo.7448'.format(prefix=p)
| 1,450 | Python | .py | 33 | 42.060606 | 76 | 0.758499 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,380 | test_records_relations_serialization.py | zenodo_zenodo/tests/unit/records/test_records_relations_serialization.py | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Test Zenodo communities API."""
from __future__ import absolute_import, print_function
from helpers import publish_and_expunge
from invenio_pidrelations.contrib.versioning import PIDVersioning
from invenio_pidrelations.serializers.utils import serialize_relations
from invenio_pidstore.models import PersistentIdentifier
from six import BytesIO, b
from zenodo.modules.deposit.api import ZenodoDeposit
from zenodo.modules.deposit.resolvers import deposit_resolver
from zenodo.modules.records.serializers.pidrelations import \
serialize_related_identifiers
def test_relations_serialization(app, db, deposit, deposit_file):
"""Serialize PID relations."""
deposit_v1 = publish_and_expunge(db, deposit)
depid_v1_value = deposit_v1['_deposit']['id']
depid_v1, deposit_v1 = deposit_resolver.resolve(depid_v1_value)
recid_v1, record_v1 = deposit_v1.fetch_published()
expected = {
"version": [
{
"draft_child_deposit": None,
"index": 0,
"is_last": True,
"last_child": {
"pid_type": "recid",
"pid_value": "2"
},
"parent": {
"pid_type": "recid",
"pid_value": "1"
},
"count": 1
}
]
}
assert serialize_relations(recid_v1) == expected
deposit_v1.newversion()
# Should contain "draft_child_deposit" information
expected = {
"version": [
{
"draft_child_deposit": {
"pid_type": "depid",
"pid_value": "3"
},
"index": 0,
"is_last": True,
"last_child": {
"pid_type": "recid",
"pid_value": "2"
},
"count": 1,
"parent": {
"pid_type": "recid",
"pid_value": "1"
},
}
]
}
assert serialize_relations(recid_v1) == expected
# Publish the new version
pv = PIDVersioning(child=recid_v1)
depid_v2 = pv.draft_child_deposit
deposit_v2 = ZenodoDeposit.get_record(depid_v2.get_assigned_object())
deposit_v2.files['file.txt'] = BytesIO(b('file1'))
deposit_v2 = publish_and_expunge(db, deposit_v2)
recid_v2, record_v2 = deposit_v2.fetch_published()
depid_v1, deposit_v1 = deposit_resolver.resolve(depid_v1_value)
recid_v1, record_v1 = deposit_v1.fetch_published()
# Should no longer contain "draft_child_deposit" info after publishing
# and no longer be the last child
expected = {
"version": [
{
"draft_child_deposit": None,
"index": 0,
"is_last": False,
"last_child": {
"pid_type": "recid",
"pid_value": "3"
},
"parent": {
"pid_type": "recid",
"pid_value": "1"
},
"count": 2
}
]
}
assert serialize_relations(recid_v1) == expected
# New version should be the last child now
expected = {
"version": [
{
"draft_child_deposit": None,
"index": 1,
"is_last": True,
"last_child": {
"pid_type": "recid",
"pid_value": "3"
},
"count": 2,
"parent": {
"pid_type": "recid",
"pid_value": "1"
},
}
]
}
assert serialize_relations(recid_v2) == expected
def test_related_identifiers_serialization(app, db, deposit, deposit_file):
"""Serialize PID Relations to related identifiers."""
deposit_v1 = publish_and_expunge(db, deposit)
depid_v1_value = deposit_v1['_deposit']['id']
recid_v1, record_v1 = deposit_v1.fetch_published()
deposit_v1.newversion()
pv = PIDVersioning(child=recid_v1)
depid_v2 = pv.draft_child_deposit
deposit_v2 = ZenodoDeposit.get_record(depid_v2.get_assigned_object())
deposit_v2.files['file.txt'] = BytesIO(b('file1'))
deposit_v2 = publish_and_expunge(db, deposit_v2)
deposit_v2 = deposit_v2.edit()
# 1. Request for 'c1' and 'c2' through deposit v2
deposit_v2 = publish_and_expunge(db, deposit_v2)
recid_v2, record_v2 = deposit_v2.fetch_published()
depid_v1, deposit_v1 = deposit_resolver.resolve(depid_v1_value)
recid_v1, record_v1 = deposit_v1.fetch_published()
depid_v1, deposit_v1 = deposit_resolver.resolve(depid_v1_value)
rids = serialize_related_identifiers(recid_v1)
expected_v1 = [
{
'scheme': 'doi',
'identifier': '10.5072/zenodo.1',
'relation': 'isVersionOf'
}
# TODO: serialization of new version relations is disabled
# {
# 'scheme': 'doi',
# 'identifier': '10.5072/zenodo.3',
# 'relation': 'isPreviousVersionOf'
# }
]
assert rids == expected_v1
rids = serialize_related_identifiers(recid_v2)
expected_v2 = [
{
'scheme': 'doi',
'identifier': '10.5072/zenodo.1',
'relation': 'isVersionOf'
}
# TODO: serialization of new version relations is disabled
# {
# 'scheme': 'doi',
# 'identifier': '10.5072/zenodo.2',
# 'relation': 'isNewVersionOf'
# }
]
assert rids == expected_v2
parent_pid = PersistentIdentifier.get('recid', '1')
rids = serialize_related_identifiers(parent_pid)
expected_parent = [
{
'relation': 'hasVersion',
'scheme': 'doi',
'identifier': '10.5072/zenodo.2'
},
{
'relation': 'hasVersion',
'scheme': 'doi',
'identifier': '10.5072/zenodo.3'
}
]
assert rids == expected_parent
| 6,887 | Python | .py | 191 | 26.581152 | 75 | 0.56118 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,381 | test_schemas_openaire_json.py | zenodo_zenodo/tests/unit/records/test_schemas_openaire_json.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2017 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo CSL mapping test."""
from __future__ import absolute_import, print_function, unicode_literals
import pytest
from invenio_records.api import Record
from zenodo.modules.records.serializers import openaire_json_v1
@pytest.fixture()
def minimal_oai_record(minimal_record):
"""Minimal OAI record."""
minimal_record['_oai'] = {
'id': 'oai:zenodo.org:{}'.format(minimal_record['recid'])
}
minimal_record['resource_type'] = {
'type': 'publication',
'subtype': 'article'
}
return minimal_record
@pytest.fixture()
def full_oai_record(full_record):
"""Minimal OAI record."""
full_record['_oai'] = {
'id': 'oai:zenodo.org:{}'.format(full_record['recid'])
}
full_record['resource_type'] = {
'type': 'publication',
'subtype': 'article'
}
return full_record
def test_minimal(app, db, minimal_oai_record, recid_pid):
"""Test minimal record."""
obj = openaire_json_v1.transform_record(
recid_pid, Record(minimal_oai_record))
assert obj == {
'originalId': 'oai:zenodo.org:123',
'type': 'publication',
'resourceType': '0001',
'title': 'Test',
'licenseCode': 'OPEN',
'url': 'https://zenodo.org/record/123',
'authors': ['Test'],
'description': 'My description',
'pids': [
{'type': 'oai', 'value': 'oai:zenodo.org:123'},
{'type': 'doi', 'value': '10.5072/zenodo.123'}
],
'hostedById': 'opendoar____::2659',
'collectedFromId': 'opendoar____::2659',
}
def test_full(app, db, full_oai_record, recid_pid):
"""Test minimal record."""
obj = openaire_json_v1.transform_record(
recid_pid, Record(full_oai_record))
assert obj == {
'authors': ['Doe, John', 'Doe, Jane', 'Smith, John', 'Nowak, Jack'],
'collectedFromId': 'opendoar____::2659',
'description': 'Test Description',
'hostedById': 'opendoar____::2659',
'language': 'eng',
'licenseCode': 'OPEN',
'originalId': 'oai:zenodo.org:12345',
'pids': [{'type': 'oai', 'value': 'oai:zenodo.org:12345'},
{'type': 'doi', 'value': '10.1234/foo.bar'}],
'publisher': 'Jol',
'resourceType': '0001',
'title': 'Test title',
'type': 'publication',
'url': 'https://zenodo.org/record/12345',
'contexts': ['https://zenodo.org/communities/zenodo'],
'version': '1.2.5'
}
def test_resource_types(app, db, minimal_oai_record, recid_pid):
""""Test resource types."""
minimal_oai_record['doi'] = '10.1234/foo'
minimal_oai_record.update({'resource_type': {'type': 'dataset'}})
obj = openaire_json_v1.transform_record(
recid_pid, Record(minimal_oai_record))
# Datasets use the DOI
assert obj['originalId'] == '10.1234/foo'
assert obj['collectedFromId'] == 'opendoar____::2659'
assert obj['hostedById'] == 'opendoar____::2659'
assert obj['resourceType'] == '0021'
assert obj['type'] == 'dataset'
minimal_oai_record.update({'resource_type': {'type': 'poster'}})
obj = openaire_json_v1.transform_record(
recid_pid, Record(minimal_oai_record))
assert obj['originalId'] == 'oai:zenodo.org:123'
assert obj['collectedFromId'] == 'opendoar____::2659'
assert obj['hostedById'] == 'opendoar____::2659'
assert obj['resourceType'] == '0004'
assert obj['type'] == 'publication'
def test_grants(app, db, minimal_oai_record, recid_pid):
""""Test grants."""
minimal_oai_record['grants'] = [
{
'acronym': 'WorkAble',
'title': 'Making Capabilities/Design Work',
'identifiers': {
'eurepo': 'info:eu-repo/grantAgreement/EC/FP7/244909/'
},
}
]
minimal_oai_record.update({'resource_type': {'type': 'dataset'}})
obj = openaire_json_v1.transform_record(
recid_pid, Record(minimal_oai_record))
assert obj['linksToProjects'] == [
'info:eu-repo/grantAgreement/EC/FP7/244909/'
'/Making Capabilities%2FDesign Work/WorkAble'
]
minimal_oai_record['grants'][0]['title'] = u'Üniçoδé Grànt Title'
obj = openaire_json_v1.transform_record(
recid_pid, Record(minimal_oai_record))
assert obj['linksToProjects'] == [
'info:eu-repo/grantAgreement/EC/FP7/244909/'
u'/Üniçoδé Grànt Title/WorkAble'
]
def test_pids(app, db, minimal_oai_record, recid_pid):
""""Test PIDs."""
obj = openaire_json_v1.transform_record(
recid_pid, Record(minimal_oai_record))
assert obj['pids'] == \
[{'value': 'oai:zenodo.org:123', 'type': 'oai'},
{'type': 'doi', 'value': '10.5072/zenodo.123'}]
minimal_oai_record['doi'] = '10.1234/foo'
obj = openaire_json_v1.transform_record(
recid_pid, Record(minimal_oai_record))
assert obj['pids'] == \
[{'value': 'oai:zenodo.org:123', 'type': 'oai'},
{'value': '10.1234/foo', 'type': 'doi'}]
def test_publisher(app, db, minimal_oai_record, recid_pid):
"""Test publisher."""
minimal_oai_record['doi'] = '10.5281/12345'
obj = openaire_json_v1.transform_record(
recid_pid, Record(minimal_oai_record))
assert obj['publisher'] == 'Zenodo'
minimal_oai_record['part_of'] = {'publisher': 'The Good Publisher'}
obj = openaire_json_v1.transform_record(
recid_pid, Record(minimal_oai_record))
assert obj['publisher'] == 'The Good Publisher'
minimal_oai_record['imprint'] = {'publisher': 'The Bad Publisher'}
obj = openaire_json_v1.transform_record(
recid_pid, Record(minimal_oai_record))
assert obj['publisher'] == 'The Bad Publisher'
def test_license_code(app, db, minimal_oai_record, recid_pid):
"""Test license code."""
obj = openaire_json_v1.transform_record(
recid_pid, Record(minimal_oai_record))
assert obj['licenseCode'] == 'OPEN'
minimal_oai_record['access_right'] = 'restricted'
obj = openaire_json_v1.transform_record(
recid_pid, Record(minimal_oai_record))
assert obj['licenseCode'] == 'RESTRICTED'
minimal_oai_record['access_right'] = 'embargoed'
minimal_oai_record['embargo_date'] = '2017-04-22'
obj = openaire_json_v1.transform_record(
recid_pid, Record(minimal_oai_record))
assert obj['licenseCode'] == 'EMBARGO'
assert obj['embargoEndDate'] == '2017-04-22'
| 7,417 | Python | .py | 179 | 35.385475 | 76 | 0.634292 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,382 | test_schemas_legacyjson_load.py | zenodo_zenodo/tests/unit/records/test_schemas_legacyjson_load.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Unit tests Zenodo legacy JSON deserializer."""
from __future__ import absolute_import, print_function
from datetime import datetime, timedelta
import pytest
from flask import Flask
from marshmallow.exceptions import ValidationError
from zenodo.modules.deposit.api import ZenodoDeposit
from zenodo.modules.records.serializers.schemas import legacyjson
def d(**kwargs):
"""Default data."""
defaults = dict(
publication_date=datetime.utcnow().date().isoformat(),
title='Title',
description='Description',
upload_type='software',
)
defaults.update(kwargs)
return defaults
@pytest.mark.parametrize('val, expected', [
(' Test ', 'Test'),
('TEST', 'TEST'),
])
def test_title(val, expected):
"""Test titles."""
assert legacyjson.LegacyMetadataSchemaV1(strict=True).load(
d(title=val)).data['title'] == expected
@pytest.mark.parametrize('val', [
' ',
' 12 ',
None
])
def test_title_invalid(val):
"""Test invalid titles."""
pytest.raises(
ValidationError,
legacyjson.LegacyMetadataSchemaV1(strict=True).load,
d(title=val)
)
def test_upload_type(app, communities):
"""Test upload type deserialization."""
s = legacyjson.LegacyMetadataSchemaV1(
partial=['upload_type', 'publication_type', 'image_type',
'communities'],
strict=True
)
assert s.load(d(
upload_type='publication',
publication_type='book',
)).data['resource_type'] == {'subtype': 'book', 'type': 'publication'}
# Irrelevant extra key.
assert s.load(d(
upload_type='image',
publication_type='book',
image_type='photo',
)).data['resource_type'] == {'subtype': 'photo', 'type': 'image'}
assert s.load(d(
upload_type='software',
openaire_type='foo:t1',
communities=[{'identifier': 'c1'}],
)).data['resource_type'] == {'type': 'software',
'openaire_subtype': 'foo:t1'}
def test_upload_type_invalid(app, communities):
"""Test upload type deserialization."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
# Missing value
obj = d()
obj.pop('upload_type')
pytest.raises(ValidationError, s.load, obj)
# Invalid value
obj.update(dict(upload_type='invalid'))
pytest.raises(ValidationError, s.load, obj)
# Missing subtype
obj.update(dict(upload_type='publication'))
pytest.raises(ValidationError, s.load, obj)
# Invalid subtype
obj.update(dict(upload_type='image', image_type='invalid'))
pytest.raises(ValidationError, s.load, obj)
# Subtype provided for type without possibility of subtype.
obj.update(dict(upload_type='dataset', image_type='figure'))
assert s.load(obj).data['resource_type'] == {'type': 'dataset'}
obj.update(dict(upload_type='image', image_type='invalid'))
pytest.raises(ValidationError, s.load, obj)
# OpenAIRE subtype and community mismatch
obj.update(dict(upload_type='software', openaire_type='foo:t1',
communities=[{'identifier': 'foobar'}]))
pytest.raises(ValidationError, s.load, obj)
# OpenAIRE subtype invalid format (no prefix)
obj.update(dict(upload_type='software', openaire_type='invalid',
communities=[{'identifier': 'c1'}]))
pytest.raises(ValidationError, s.load, obj)
# OpenAIRE subtype not found (wrong prefix)
obj.update(dict(upload_type='software', openaire_type='xxx:t1',
communities=[{'identifier': 'c1'}]))
pytest.raises(ValidationError, s.load, obj)
# OpenAIRE subtype not found (good prefix, wrong type)
obj.update(dict(upload_type='software', openaire_type='foo:invalid',
communities=[{'identifier': 'c1'}]))
pytest.raises(ValidationError, s.load, obj)
def test_related_alternate_identifiers():
"""Test related alternate identifiers."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
result = s.load(d(related_identifiers=[
dict(identifier='10.1234/foo.bar2', relation='isCitedBy'),
dict(identifier='10.1234/foo.bar3', relation='cites', scheme='doi'),
dict(
identifier='2011ApJS..192...18K',
relation='isAlternateIdentifier'), # Name difference on purpose
dict(
identifier='2011ApJS..192...18K',
relation='isAlternativeIdentifier', # Name difference on purpose
scheme='ads'),
]))
assert result.data['related_identifiers'] == [
dict(
identifier='10.1234/foo.bar2', relation='isCitedBy',
scheme='doi'),
dict(
identifier='10.1234/foo.bar3', relation='cites', scheme='doi'),
]
assert result.data['alternate_identifiers'] == [
dict(
identifier='2011ApJS..192...18K', scheme='ads'),
dict(
identifier='2011ApJS..192...18K',
scheme='ads'),
]
def test_identifier_schemes(app, db, es, locations, license_record,
sample_identifiers):
"""Test supported identifier schemes."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
result = s.load(d(related_identifiers=[
{'identifier': _id, 'scheme': scheme, 'relation': 'references'}
for scheme, (_id, _) in sample_identifiers.items()
]))
ZenodoDeposit.create(result.data).validate()
@pytest.mark.parametrize('relation', [
'IsCitedBy',
'invalid',
None
])
def test_related_identifiers_invalid_relations(relation):
"""Test invalid related identifiers."""
pytest.raises(
ValidationError,
legacyjson.LegacyMetadataSchemaV1(strict=True).load,
d(related_identifiers=[
dict(identifier='10.1234/foo.bar2', relation=relation),
])
)
def test_related_identifiers_invalid():
"""Test missing relation."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True).load
# Missing relation
pytest.raises(ValidationError, s, d(related_identifiers=[
dict(identifier='10.1234/foo.bar2'),
]))
# Invalid scheme
pytest.raises(ValidationError, s, d(related_identifiers=[
dict(identifier='10.1234/foo.bar2', scheme='isbn'),
]))
# Invalid scheme
pytest.raises(ValidationError, s, d(related_identifiers=[
dict(identifier='10.1234/foo.bar2', scheme='invalid'),
]))
# Missing identifier
pytest.raises(ValidationError, s, d(related_identifiers=[
dict(scheme='doi', relation='isCitedBy'),
]))
@pytest.mark.parametrize('input, output, scheme', [
('https://doi.org/10.1234/foo.bar2', '10.1234/foo.bar2', None),
])
def test_related_identifiers_normalization(input, output, scheme):
"""Test missing relation."""
assert legacyjson.LegacyMetadataSchemaV1(strict=True).load(
d(related_identifiers=[
dict(identifier=input, relation='isCitedBy', scheme=scheme)])
).data['related_identifiers'][0]['identifier'] == output
def test_creators():
"""Test creators."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
assert s.load(d(creators=[
dict(name="Doe, John", affiliation="Atlantis",
orcid="0000-0002-1825-0097", gnd="170118215"),
dict(name="Smith, Jane", affiliation="Atlantis")
])).data['creators'] == [
dict(name="Doe, John", affiliation="Atlantis",
orcid="0000-0002-1825-0097", gnd="170118215"),
dict(name="Smith, Jane", affiliation="Atlantis")
]
assert s.load(d(creators=[
dict(name="Doe, John", affiliation=" "),
dict(name="Smith, Jane", affiliation="")
])).data['creators'] == [
dict(name="Doe, John"),
dict(name="Smith, Jane")
]
# Min length required
pytest.raises(
ValidationError,
legacyjson.LegacyMetadataSchemaV1(strict=True).load,
d(creators=[])
)
@pytest.mark.parametrize('creator', [
dict(name="Doe, John", orcid="invalid"),
dict(name="", affiliation="Atlantis"),
dict(name="Doe, John", gnd="invalid"),
])
def test_creators_invalid(creator):
"""Test creators."""
pytest.raises(
ValidationError,
legacyjson.LegacyMetadataSchemaV1(strict=True).load,
d(creators=[creator])
)
@pytest.mark.parametrize('date', [
'2013-05-08',
'1855-05-08',
'0001-01-01',
])
def test_publication_date(date):
"""Test creators."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
assert s.load(
d(publication_date=date)
).data['publication_date'] == date
@pytest.mark.parametrize('date', [
'2013-02-32',
'invalid',
None,
{'a dict': ''}
])
def test_publication_date_invalid(date):
"""Test creators."""
pytest.raises(
ValidationError,
legacyjson.LegacyMetadataSchemaV1(strict=True).load,
d(publication_date=date)
)
@pytest.mark.parametrize('desc, expected', [
('My description', None),
('<b>HTML test</b>', None),
('<a href="http://localhost.dk" style="background: black;">HTML test</b>',
'<a href="http://localhost.dk">HTML test</a>'),
(' My description ', 'My description'),
(' <a href="javascript:evil_function()">a link</a> ', '<a>a link</a>'),
('<p onclick="evil_function()">a paragraph</p>', '<p>a paragraph</p>'),
])
def test_description(desc, expected):
"""Test descriptions.
Note, we only do limited sanitize test because we use the bleach library
to sanitize and it already have extensive tests.
"""
assert legacyjson.LegacyMetadataSchemaV1(strict=True).load(
d(description=desc)
).data['description'] == (expected or desc)
@pytest.mark.parametrize('desc', [
' ',
'12',
' <script></script> ',
])
def test_description_invalid(desc):
"""Test invalid description."""
pytest.raises(
ValidationError,
legacyjson.LegacyMetadataSchemaV1(strict=True).load,
d(description=desc)
)
@pytest.mark.parametrize('desc, expected', [
('My notes', 'My notes'),
(' My notes ', 'My notes'),
])
def test_notes(desc, expected):
"""Test notes."""
assert legacyjson.LegacyMetadataSchemaV1(strict=True).load(
d(notes=desc)
).data['notes'] == expected
@pytest.mark.parametrize('desc', [
None,
124,
])
def test_notes_invalid(desc):
"""Test invalid notes."""
pytest.raises(
ValidationError,
legacyjson.LegacyMetadataSchemaV1(strict=True).load,
d(notes=desc)
)
def test_keywords():
"""Test keywords."""
assert legacyjson.LegacyMetadataSchemaV1(strict=True).load(
d(keywords=['kw1', ' kw2 ', ' '])
).data['keywords'] == ['kw1', 'kw2']
@pytest.mark.parametrize('keywords', [
[None],
[124, ],
[{'mykw': ''}],
{'adict': 'instead of list'},
])
def test_keywords_invalid(keywords):
"""Test invalid keywords."""
pytest.raises(
ValidationError,
legacyjson.LegacyMetadataSchemaV1(strict=True).load,
d(keywords=keywords)
)
@pytest.mark.parametrize('val', [
123,
'OPEN',
'invalid',
' open ',
])
def test_access_rights_invalid(val):
"""Test creators."""
pytest.raises(
ValidationError,
legacyjson.LegacyMetadataSchemaV1(strict=True).load,
d(access_right=val)
)
@pytest.mark.parametrize('val, removedkeys', [
('open', ['embargo_date', 'access_conditions']),
('embargoed', ['access_conditions']),
('restricted', ['license', 'embargo_date', ]),
('closed', ['license', 'embargo_date', 'access_conditions']),
])
def test_access_rights(val, removedkeys):
"""Test access rights."""
data = dict(
license='cc-by',
embargo_date=(
datetime.utcnow() + timedelta(days=2)).date().isoformat(),
access_conditions='TEST'
)
result = legacyjson.LegacyMetadataSchemaV1(strict=True).load(
d(access_right=val, **data)
)
assert result.data['access_right'] == val
# Make sure removed keys **are not** in output
for k in removedkeys:
assert k not in result.data
# Make sure non-removed keys **are** in output
for k in (set(data.keys()) - set(removedkeys)):
assert k in result.data
@pytest.mark.parametrize('dt', [
'2013-05-08',
'2100-01-00',
])
def test_embargo_date_invalid(dt):
"""Test embargo date."""
pytest.raises(
ValidationError,
legacyjson.LegacyMetadataSchemaV1(strict=True).load,
d(access_right='embargoed', embargo_date=dt)
)
@pytest.mark.parametrize('desc, expected', [
('My description', None),
('<b>HTML test</b>', None),
('<a href="http://localhost.dk" style="background: black;">HTML test</b>',
'<a href="http://localhost.dk">HTML test</a>'),
(' My description ', 'My description'),
(' <a href="javascript:evil_function()">a link</a> ', '<a>a link</a>'),
('<p onclick="evil_function()">a paragraph</p>', '<p>a paragraph</p>'),
])
def test_acess_conditions(desc, expected):
"""Test access conditions."""
assert legacyjson.LegacyMetadataSchemaV1(strict=True).load(
d(access_right='restricted', access_conditions=desc)
).data['access_conditions'] == (expected or desc)
@pytest.mark.parametrize('input_val', [
'10.1234/foo.bar',
'http://dx.doi.org/10.1234/foo.bar',
'https://doi.org/10.1234/foo.bar',
' doi:10.1234/foo.bar ',
' 10.1234/foo.bar ',
])
def test_valid_doi(input_val):
"""Test DOI."""
data, errors = legacyjson.LegacyMetadataSchemaV1(
partial=['doi'], strict=True).load(
d(doi=input_val))
assert data['doi'] == '10.1234/foo.bar'
def test_subjects():
"""Test subjects."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
assert s.load(d(subjects=[{
"term": "Astronomy",
"identifier": "http://id.loc.gov/authorities/subjects/sh85009003",
"scheme": "url"
}])).data['subjects'] == [{
"term": "Astronomy",
"identifier": "http://id.loc.gov/authorities/subjects/sh85009003",
"scheme": "url"
}]
assert s.load(d(subjects=[{
"term": "Astronomy",
"identifier": "http://id.loc.gov/authorities/subjects/sh85009003",
}])).data['subjects'] == [{
"term": "Astronomy",
"identifier": "http://id.loc.gov/authorities/subjects/sh85009003",
"scheme": "url"
}]
def test_contributors():
"""Test contributors."""
app = Flask(__name__)
app.config.update(dict(DEPOSIT_CONTRIBUTOR_DATACITE2MARC=dict(
Other='...',
DataCurator='...',
)))
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
with app.app_context():
assert s.load(d(**{'contributors': [
dict(name="Doe, John", affiliation="Atlantis",
orcid="0000-0002-1825-0097", gnd="170118215",
type="DataCurator"),
dict(name="Smith, Jane", affiliation="Atlantis", type="Other")
]})).data['contributors'] == [
dict(name="Doe, John", affiliation="Atlantis",
orcid="0000-0002-1825-0097", gnd="170118215",
type="DataCurator"),
dict(name="Smith, Jane", affiliation="Atlantis", type="Other")
]
@pytest.mark.parametrize('contributor', [
dict(name="Doe, John", orcid="invalid", type='Other'),
dict(name="", affiliation="Atlantis", type='Other'),
dict(name="Doe, John", gnd="invalid", type='Other'),
dict(name="Doe, John"),
])
def test_contributors_invalid(contributor):
"""Test creators."""
app = Flask(__name__)
app.config.update(dict(DEPOSIT_CONTRIBUTOR_DATACITE2MARC=dict(
Other='...',
DataCurator='...',
)))
with app.app_context():
pytest.raises(
ValidationError,
legacyjson.LegacyMetadataSchemaV1(strict=True).load,
d(contributors=[contributor])
)
def test_references():
"""Test references."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
assert s.load(d(references=[
"Reference 1",
" ",
"Reference 2",
])).data['references'] == [
dict(raw_reference="Reference 1"),
dict(raw_reference="Reference 2"),
]
def test_thesis():
"""Test creators and thesis supervisors."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
assert s.load(d(**{
'thesis_supervisors': [
dict(name="Doe, John", affiliation="Atlantis",
orcid="0000-0002-1825-0097", gnd="170118215"),
dict(name="Smith, Jane", affiliation="Atlantis")
],
'thesis_university': 'Important'
})).data['thesis'] == {
'supervisors': [
dict(name="Doe, John", affiliation="Atlantis",
orcid="0000-0002-1825-0097", gnd="170118215"),
dict(name="Smith, Jane", affiliation="Atlantis")
],
'university': 'Important',
}
def test_journal():
"""Test journal."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
assert s.load(d(
journal_issue="Some issue",
journal_pages="Some pages",
journal_title="Some journal name",
journal_volume="Some volume",
)).data['journal'] == dict(
issue="Some issue",
pages="Some pages",
title="Some journal name",
volume="Some volume",
)
def test_meetings():
"""Test meetings."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
assert s.load(d(
conference_acronym='Some acronym',
conference_dates='Some dates',
conference_place='Some place',
conference_title='Some title',
conference_url='http://someurl.com',
conference_session='VI',
conference_session_part='1',
)).data['meeting'] == dict(
acronym='Some acronym',
dates='Some dates',
place='Some place',
title='Some title',
url='http://someurl.com',
session='VI',
session_part='1',
)
def test_imprint():
"""Test part of vs imprint."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
assert s.load(d(
imprint_isbn="Some isbn",
imprint_place="Some place",
imprint_publisher="Some publisher",
)).data['imprint'] == dict(
isbn="Some isbn",
place="Some place",
publisher="Some publisher",
)
assert s.load(d(
publication_date="2016-01-01",
imprint_place="Some place",
imprint_publisher="Some publisher",
)).data['imprint'] == dict(
place="Some place",
publisher="Some publisher"
)
def test_partof():
"""Test part of vs imprint."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
assert s.load(d(
partof_pages="Some pages",
partof_title="Some title",
)).data['part_of'] == dict(
pages="Some pages",
title="Some title",
)
result = s.load(d(
partof_pages="Some pages",
partof_title="Some title",
publication_date="2016-01-01",
imprint_place="Some place",
imprint_publisher="Some publisher",
imprint_isbn="Some isbn",
))
assert result.data['part_of'] == dict(
pages="Some pages",
title="Some title",
)
assert result.data['imprint'] == dict(
place="Some place",
publisher="Some publisher",
isbn="Some isbn",
)
def test_prereserve_doi():
"""Test license."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
assert 'prereserve_doi' not in s.load(d(
prereserve_doi=True
)).data
def test_license():
"""Test license."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
assert s.load(d(
access_right='open', license="cc-zero"
)).data['license'] == {
'$ref': 'https://dx.zenodo.org/licenses/cc-zero'
}
def test_license_refresolver(app, db, license_record):
"""Test license."""
s = legacyjson.LegacyMetadataSchemaV1(
strict=True, context=dict(replace_refs=True))
assert s.load(d(
access_right='open', license='CC0-1.0'
)).data['license'] == {
'$ref': 'https://dx.zenodo.org/licenses/CC0-1.0'
}
pytest.raises(
ValidationError,
s.load,
d(access_right='open', license='invalid')
)
# Without ref resolving
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
assert s.load(d(
access_right='open', license='invalid'
)).data['license'] == {
'$ref': 'https://dx.zenodo.org/licenses/invalid'
}
def test_grants(grant_records):
"""Test grants."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
loaded_grants = s.load(d(
grants=[dict(id='282896'), dict(id='10.13039/501100000780::027819')],
)).data['grants']
assert {
'$ref': 'https://dx.zenodo.org/grants/10.13039/501100000780::282896'
} in loaded_grants
assert {
'$ref': 'https://dx.zenodo.org/grants/10.13039/501100000780::027819'
} in loaded_grants
def test_grants_refresolver(app, db, grant_records, license_record):
"""Test license."""
s = legacyjson.LegacyMetadataSchemaV1(
strict=True, context=dict(replace_refs=True))
assert s.load(d(
grants=[dict(id='282896'), dict(id='10.13039/501100000780::282896')],
license='CC0-1.0'
)).data['grants'] == [{
'$ref': 'https://dx.zenodo.org/grants/10.13039/501100000780::282896'
}]
# Invalid grant raises
pytest.raises(ValidationError, s.load, d(
grants=[dict(id='invalid')],
license='CC0-1.0'
))
# Without ref resolving
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
pytest.raises(ValidationError, s.load, d(
grants=[dict(id='invalid')], license='CC0-1.0'
))
def test_communities(communities):
"""Test communities."""
s = legacyjson.LegacyMetadataSchemaV1(strict=True)
assert s.load(d(
communities=[
dict(identifier='zenodo'), dict(identifier='ecfunded'),
],
)).data['communities'] == ['ecfunded', 'zenodo']
@pytest.mark.parametrize('comms', [
1234,
[1234],
'zenodo',
{'dict': 'test'},
['zenodo'],
[{'dict': 'test'}, {}],
])
def test_communities_invalid(comms):
"""Test communities."""
pytest.raises(
ValidationError,
legacyjson.LegacyMetadataSchemaV1(strict=True).load,
d(communities=comms)
)
def test_legacyjson_to_record_translation(app, db, es, grant_records,
license_record, locations,
communities):
"""Test the translator legacy_zenodo and zenodo_legacy."""
test_data = dict(
metadata=dict(
access_right='embargoed',
communities=[{'identifier': 'c1'}],
conference_acronym='Some acronym',
conference_dates='Some dates',
conference_place='Some place',
conference_title='Some title',
conference_url='http://someurl.com',
conference_session='VI',
conference_session_part='1',
creators=[
dict(name="Doe, John", affiliation="Atlantis",
orcid="0000-0002-1825-0097", gnd="170118215"),
dict(name="Smith, Jane", affiliation="Atlantis")
],
description="Some description",
doi="10.1234/foo.bar",
embargo_date=(
datetime.utcnow().date() + timedelta(days=2)).isoformat(),
grants=[dict(id="282896"), ],
imprint_isbn="Some isbn",
imprint_place="Some place",
imprint_publisher="Some publisher",
journal_issue="Some issue",
journal_pages="Some pages",
journal_title="Some journal name",
journal_volume="Some volume",
keywords=["Keyword 1", "keyword 2"],
subjects=[
dict(scheme="gnd", identifier="1234567899", term="Astronaut"),
dict(scheme="gnd", identifier="1234567898", term="Amish"),
],
license="CC0-1.0",
notes="Some notes",
partof_pages="SOme part of",
partof_title="Some part of title",
prereserve_doi=True,
publication_date="2013-09-12",
publication_type="book",
references=[
"Reference 1",
"Reference 2",
],
related_identifiers=[
dict(identifier='10.1234/foo.bar2', relation='isCitedBy'),
dict(identifier='10.1234/foo.bar3', relation='cites'),
dict(
identifier='2011ApJS..192...18K',
relation='isAlternativeIdentifier'),
],
thesis_supervisors=[
dict(name="Doe Sr., John", affiliation="Atlantis"),
dict(name="Smith Sr., Jane", affiliation="Atlantis",
orcid="http://orcid.org/0000-0002-1825-0097",
gnd="http://d-nb.info/gnd/170118215")
],
thesis_university="Some thesis_university",
contributors=[
dict(name="Doe Sr., Jochen", affiliation="Atlantis",
type="Other"),
dict(name="Smith Sr., Marco", affiliation="Atlantis",
orcid="http://orcid.org/0000-0002-1825-0097",
gnd="http://d-nb.info/gnd/170118215",
type="DataCurator")
],
title="Test title",
upload_type="publication",
)
)
ZenodoDeposit.create(
legacyjson.LegacyRecordSchemaV1(strict=True).load(test_data).data
).validate()
invalid_unicode_chars_params = (
# Zero-width space
u'\u200b',
# Line Tabulation
u'\u000b',
# Escape
u'\u001b',
# Cancel
u'\u0018',
)
@pytest.mark.parametrize('unicode_char', invalid_unicode_chars_params)
def test_invalid_unicode_characters(app, db, es, grant_records, license_record,
locations, unicode_char):
assert (legacyjson.LegacyMetadataSchemaV1(strict=True).load(
d(description=u'Invalid character: [{}]'.format(unicode_char)))
.data['description'] == u'Invalid character: []')
| 27,303 | Python | .py | 753 | 29.075697 | 79 | 0.616938 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,383 | test_grant_linking.py | zenodo_zenodo/tests/unit/records/test_grant_linking.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Grant linking tests."""
from __future__ import absolute_import, print_function
from flask import current_app
from invenio_records.api import Record
def test_grant_linking(app, db, minimal_record, grant_records):
"""Test grant linking."""
minimal_record['grants'] = [{
'$ref': 'http://dx.zenodo.org/grants/10.13039/501100000780::282896'}]
record = current_app.extensions['invenio-records'].replace_refs(
Record.create(minimal_record))
assert record['grants'][0]['funder']['name'] == 'European Commission'
record.validate()
| 1,533 | Python | .py | 35 | 41.628571 | 77 | 0.748828 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,384 | test_schemas_json.py | zenodo_zenodo/tests/unit/records/test_schemas_json.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo JSON schema tests."""
from invenio_indexer.api import RecordIndexer
from invenio_pidstore.models import PersistentIdentifier, PIDStatus
from invenio_records.api import Record
from mock import patch
from zenodo.modules.records.serializers import json_v1
def test_json_v1(app, db, minimal_record, recid_pid):
"""Test json_v1."""
stats = {
'version_views': 312, 'views': 213,
'version_downloads': 54, 'downloads': 28,
'version_volume': 213000, 'volume': 2000,
'version_unique_views': 280, 'unique_views': 27,
'version_unique_downloads': 280, 'unique_download': 27
}
with patch('zenodo.modules.records.serializers.schemas.json'
'.get_record_stats',
return_value=stats) as m:
record = Record.create(minimal_record)
db.session.commit()
obj = json_v1.transform_record(recid_pid, record)
assert m.called
assert obj['stats'] == stats
def test_record_stats_serialization(app_client, db, minimal_record):
"""Test record stats serialization."""
record = Record.create(minimal_record)
record['_stats'] = {}
pid = PersistentIdentifier.create(
'recid', '12345', object_type='rec', object_uuid=record.id,
status=PIDStatus.REGISTERED)
db.session.commit()
db.session.refresh(pid)
RecordIndexer().index_by_id(record.id)
search_url = '/search'
with patch('zenodo.modules.records.serializers.schemas.json'
'.get_record_stats') as m:
res = app_client.get(search_url)
assert not m.called
| 2,561 | Python | .py | 61 | 37.393443 | 76 | 0.709639 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,385 | test_schemas_marcxml.py | zenodo_zenodo/tests/unit/records/test_schemas_marcxml.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo Marcxml mapping test."""
from __future__ import absolute_import, print_function
from datetime import datetime
from invenio_pidstore.models import PersistentIdentifier
from invenio_records import Record
from zenodo.modules.records.serializers import marcxml_v1
def test_full_record(app, db, full_record):
"""Test MARC21 serialization of full record."""
# Add embargo date and OAI-PMH set information.
full_record['embargo_date'] = '0900-12-31'
full_record['_oai'] = {
"id": "oai:zenodo.org:1",
"sets": ["user-zenodo", "user-ecfunded"]
}
# Create record and PID.
record = Record.create(full_record)
pid = PersistentIdentifier.create(
pid_type='recid',
pid_value='12345',
object_type='rec',
object_uuid=record.id,
)
assert record.validate() is None
expected = {
u'control_number': u'12345',
u'date_and_time_of_latest_transaction': (
record.model.updated.strftime("%Y%m%d%H%M%S.0")),
u'resource_type': {
u'subtype': u'book',
u'type': u'publication'
},
u'title_statement': {
u'title': u'Test title'
},
u'publication_distribution_imprint': [
{u'date_of_publication_distribution': u'2014-02-27'},
],
u'main_entry_personal_name': {
u'affiliation': u'CERN',
u'personal_name': u'Doe, John',
u'authority_record_control_number_or_standard_number': [
u'(gnd)170118215', u'(orcid)0000-0002-1694-233X'
]
},
u'added_entry_personal_name': [
{
u'affiliation': u'CERN',
u'personal_name': u'Doe, Jane',
u'authority_record_control_number_or_standard_number': [
u'(orcid)0000-0002-1825-0097'
]
},
{
u'affiliation': u'CERN',
u'personal_name': u'Smith, John',
},
{
u'affiliation': u'CERN',
u'personal_name': u'Nowak, Jack',
u'authority_record_control_number_or_standard_number': [
u'(gnd)170118215'
]
},
{
u'affiliation': u'CERN',
u'relator_code': [u'oth'],
u'personal_name': u'Smith, Other',
u'authority_record_control_number_or_standard_number': [
u'(orcid)0000-0002-1825-0097'
]
},
{
u'personal_name': u'Hansen, Viggo',
u'relator_code': [u'oth'],
},
{
u'affiliation': u'CERN',
u'relator_code': [u'dtm'],
u'personal_name': u'Kowalski, Manager'
},
{
u'relator_code': [u'ths'],
u'personal_name': u'Smith, Professor'
},
],
u'summary': {
u'summary': u'Test Description'
},
u'index_term_uncontrolled': [
{u'uncontrolled_term': u'kw1'},
{u'uncontrolled_term': u'kw2'},
{u'uncontrolled_term': u'kw3'},
],
u'subject_added_entry_topical_term': [
{
u'topical_term_or_geographic_name_entry_element': u'cc-by',
u'source_of_heading_or_term': u'opendefinition.org',
u'level_of_subject': u'Primary',
u'thesaurus': u'Source specified in subfield $2',
},
{
u'topical_term_or_geographic_name_entry_element': u'Astronomy',
u'authority_record_control_number_or_standard_number': (
u'(url)http://id.loc.gov/authorities/subjects/sh85009003'),
u'level_of_subject': u'Primary',
},
],
u'general_note': {
u'general_note': u'notes'
},
u'information_relating_to_copyright_status': {
u'copyright_status': u'open'
},
u'terms_governing_use_and_reproduction_note': {
u'uniform_resource_identifier':
u'https://creativecommons.org/licenses/by/4.0/',
u'terms_governing_use_and_reproduction':
u'Creative Commons Attribution 4.0'
},
u'communities': [
u'zenodo',
],
u'funding_information_note': [
{u'grant_number': u'1234', u'text_of_note': u'Grant Title'},
{u'grant_number': u'4321', u'text_of_note': u'Title Grant'}
],
u'host_item_entry': [
{
u'main_entry_heading': u'10.1234/foo.bar',
u'note': u'doi',
u'relationship_information': u'cites',
},
{
'main_entry_heading': u'1234.4325',
'note': u'arxiv',
'relationship_information': u'isIdenticalTo'
},
{
u'main_entry_heading': u'1234.4321',
u'note': u'arxiv',
u'relationship_information': u'cites',
},
{
'main_entry_heading': u'1234.4328',
'note': u'arxiv',
'relationship_information': u'references'
},
{
'main_entry_heading': u'10.1234/zenodo.4321',
'note': u'doi',
'relationship_information': u'isPartOf'
},
{
'main_entry_heading': u'10.1234/zenodo.1234',
'note': u'doi',
'relationship_information': u'hasPart'
},
{
u'main_entry_heading': u'Staszkowka',
u'edition': u'Jol',
u'title': u'Bum',
u'related_parts': u'1-2',
u'international_standard_book_number': u'978-0201633610',
},
],
u'other_standard_identifier': [
{
u'standard_number_or_code': u'10.1234/foo.bar',
u'source_of_number_or_code': u'doi',
},
{
u'standard_number_or_code': (
u'urn:lsid:ubio.org:namebank:11815'),
u'source_of_number_or_code': u'lsid',
u'qualifying_information': u'alternateidentifier',
},
{
u'standard_number_or_code': u'2011ApJS..192...18K',
u'source_of_number_or_code': u'ads',
u'qualifying_information': u'alternateidentifier',
},
{
u'standard_number_or_code': u'0317-8471',
u'source_of_number_or_code': u'issn',
u'qualifying_information': u'alternateidentifier',
},
{
u'standard_number_or_code': u'10.1234/alternate.doi',
u'source_of_number_or_code': u'doi',
u'qualifying_information': u'alternateidentifier',
}
],
u'references': [
{
u'raw_reference': u'Doe, John et al (2012). Some title. '
'Zenodo. 10.5281/zenodo.12'
}, {
u'raw_reference': u'Smith, Jane et al (2012). Some title. '
'Zenodo. 10.5281/zenodo.34'
}
],
u'added_entry_meeting_name': [{
u'date_of_meeting': u'23-25 June, 2014',
u'meeting_name_or_jurisdiction_name_as_entry_element':
u'The 13th Biennial HITRAN Conference',
u'number_of_part_section_meeting': u'VI',
u'miscellaneous_information': u'HITRAN13',
u'name_of_part_section_of_a_work': u'1',
u'location_of_meeting':
u'Harvard-Smithsonian Center for Astrophysics'
}],
u'conference_url': 'http://hitran.org/conferences/hitran-13-2014/',
u'dissertation_note': {
u'name_of_granting_institution': u'I guess important',
},
u'journal': {
'issue': '2',
'pages': '20',
'volume': '20',
'title': 'Bam',
'year': '2014',
},
u'embargo_date': '0900-12-31',
u'language_code': {
'language_code_of_text_sound_track_or_separate_title': 'eng',
},
u'_oai': {
u'sets': [u'user-zenodo', u'user-ecfunded'],
u'id': u'oai:zenodo.org:1'
},
u'_files': [
{
'uri': 'https://zenodo.org/record/12345/files/test',
'checksum': 'md5:11111111111111111111111111111111',
'type': 'txt',
'size': 4,
},
],
'leader': {
'base_address_of_data': '00000',
'bibliographic_level': 'monograph_item',
'character_coding_scheme': 'marc-8',
'descriptive_cataloging_form': 'unknown',
'encoding_level': 'unknown',
'indicator_count': 2,
'length_of_the_implementation_defined_portion': 0,
'length_of_the_length_of_field_portion': 4,
'length_of_the_starting_character_position_portion': 5,
'multipart_resource_record_level':
'not_specified_or_not_applicable',
'record_length': '00000',
'record_status': 'new',
'subfield_code_count': 2,
'type_of_control': 'no_specified_type',
'type_of_record': 'language_material',
'undefined': 0,
},
}
# Dump MARC21 JSON structure and compare against expected JSON.
preprocessed_record = marcxml_v1.preprocess_record(record=record, pid=pid)
data = marcxml_v1.schema_class().dump(preprocessed_record).data
assert expected == data
# Assert that we can output MARCXML.
assert marcxml_v1.serialize(record=record, pid=pid)
def test_minimal_record(app, db, minimal_record):
"""Test minimal record."""
# Create record and pid.
record = Record.create(minimal_record)
record.model.updated = datetime.utcnow()
pid = PersistentIdentifier.create(
pid_type='recid',
pid_value='123',
object_type='rec',
object_uuid=record.id)
assert record.validate() is None
expected = {
u'date_and_time_of_latest_transaction': (
record.model.updated.strftime("%Y%m%d%H%M%S.0")),
u'publication_distribution_imprint': [{
'date_of_publication_distribution': record['publication_date']
}],
u'control_number': '123',
u'other_standard_identifier': [
{
'source_of_number_or_code': u'doi',
'standard_number_or_code': u'10.5072/zenodo.123'
}
],
u'information_relating_to_copyright_status': {
'copyright_status': 'open'
},
u'summary': {
'summary': 'My description'
},
u'main_entry_personal_name': {
'personal_name': 'Test'
},
u'resource_type': {
'type': 'software'
},
u'title_statement': {
'title': 'Test'
},
u'leader': {
'base_address_of_data': '00000',
'bibliographic_level': 'monograph_item',
'character_coding_scheme': 'marc-8',
'descriptive_cataloging_form': 'unknown',
'encoding_level': 'unknown',
'indicator_count': 2,
'length_of_the_implementation_defined_portion': 0,
'length_of_the_length_of_field_portion': 4,
'length_of_the_starting_character_position_portion': 5,
'multipart_resource_record_level':
'not_specified_or_not_applicable',
'record_length': '00000',
'record_status': 'new',
'subfield_code_count': 2,
'type_of_control': 'no_specified_type',
'type_of_record': 'computer_file',
'undefined': 0,
},
}
data = marcxml_v1.schema_class().dump(marcxml_v1.preprocess_record(
pid=pid,
record=record)).data
assert expected == data
marcxml_v1.serialize(pid=pid, record=record)
def assert_array(a1, a2):
"""Check array."""
for i in range(0, len(a1)):
if isinstance(a1[i], dict):
assert_dict(a1[i], a2[i])
elif isinstance(a1[i], list) or isinstance(a1[i], tuple):
assert_array(a1[i], a2[i])
else:
assert a1[i] in a2
assert len(a1) == len(a2)
def assert_dict(a1, a2):
"""Check dict."""
for (k, v) in a1.items():
assert k in a2
if isinstance(v, dict):
assert_dict(v, a2[k])
elif isinstance(v, list) or isinstance(v, tuple):
assert_array(v, a2[k])
else:
assert a2[k] == v
assert len(a2) == len(a1)
| 13,950 | Python | .py | 372 | 26.172043 | 79 | 0.526261 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,386 | test_schemas_dcat.py | zenodo_zenodo/tests/unit/records/test_schemas_dcat.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2019 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test the DCAT serializer."""
from __future__ import absolute_import, print_function
from zenodo.modules.records.serializers import dcat_v1
def test_dcat_serializer(db, es, record_with_bucket):
"""Tests the DCAT XSLT-based serializer."""
pid, record = record_with_bucket
serialized_record = dcat_v1.serialize(pid, record)
assert record['title'] in serialized_record
assert record['description'] in serialized_record
assert record['doi'] in serialized_record
for creator in record['creators']:
assert creator['familyname'] in serialized_record
assert creator['givennames'] in serialized_record
for f in record['_files']:
assert f['key'] in serialized_record
| 1,687 | Python | .py | 38 | 41.789474 | 76 | 0.754258 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,387 | test_schemas_geojson.py | zenodo_zenodo/tests/unit/records/test_schemas_geojson.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2019 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo GeoJSON mapping test."""
from zenodo.modules.records.serializers import geojson_v1
def test_full_record(db, record_with_bucket, recid_pid):
"""Test full record metadata."""
_, full_record_model = record_with_bucket
obj = geojson_v1.transform_record(recid_pid, full_record_model)
expected = {
u'features': [{
u'geometry': {
u'coordinates': [1.534, 2.35],
u'type': u'Point'
},
u'properties': {
u'name': u'my place'
},
u'type': u'Feature'}
],
u'type': u'FeatureCollection'
}
assert obj == expected
| 1,634 | Python | .py | 43 | 33.232558 | 76 | 0.683281 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,388 | test_records_tasks.py | zenodo_zenodo/tests/unit/records/test_records_tasks.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2018 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Zenodo records tasks."""
import uuid
from copy import deepcopy
from datetime import datetime
from invenio_cache import current_cache
from invenio_pidrelations.contrib.versioning import PIDVersioning
from invenio_pidstore.models import PersistentIdentifier, PIDStatus
from zenodo.modules.records.api import ZenodoRecord
from zenodo.modules.records.minters import zenodo_record_minter
from zenodo.modules.records.tasks import schedule_update_datacite_metadata
def test_datacite_update(mocker, db, minimal_record):
dc_mock = mocker.patch(
'invenio_pidstore.providers.datacite.DataCiteMDSClient'
)
doi_tags = [
'<identifier identifierType="DOI">{doi}</identifier>',
('<relatedIdentifier relatedIdentifierType="DOI" '
'relationType="IsVersionOf">{conceptdoi}</relatedIdentifier>'),
]
# Assert calls and content
def assert_datacite_calls_and_content(record, doi_tags):
"""Datacite client calls assertion helper."""
assert dc_mock().metadata_post.call_count == 1
_, doi_args, _ = dc_mock().metadata_post.mock_calls[0]
assert all([t.format(**record) in doi_args[0] for t in doi_tags])
assert dc_mock().doi_post.call_count == 1
dc_mock().doi_post.assert_any_call(
record['doi'],
'https://zenodo.org/record/{}'.format(record['recid']))
def assert_datacite_calls_with_missing_data():
"""Datacite client calls assertion helper."""
assert dc_mock().metadata_post.call_count == 0
assert dc_mock().doi_post.call_count == 0
def create_versioned_record(recid_value, conceptrecid):
"""Utility function for creating versioned records."""
recid = PersistentIdentifier.create(
'recid', recid_value, status=PIDStatus.RESERVED)
pv = PIDVersioning(parent=conceptrecid)
pv.insert_draft_child(recid)
record_metadata = deepcopy(minimal_record)
# Remove the DOI
del record_metadata['doi']
record_metadata['conceptrecid'] = conceptrecid.pid_value
record_metadata['recid'] = int(recid.pid_value)
record = ZenodoRecord.create(record_metadata)
zenodo_record_minter(record.id, record)
record.commit()
return recid, record
# Create conceptrecid for the records
conceptrecid = PersistentIdentifier.create(
'recid', '100', status=PIDStatus.RESERVED)
# Create a reserved recid
recid1, r1 = create_versioned_record('352543', conceptrecid)
# no registered local DOIs
schedule_update_datacite_metadata(1)
assert_datacite_calls_with_missing_data()
doi_pids = PersistentIdentifier.query.filter(
PersistentIdentifier.pid_value == '10.5072/zenodo.352543')
doi_pids[0].status = PIDStatus.REGISTERED
db.session.commit()
update_date = doi_pids[0].updated
# no task_details on Redis cache
schedule_update_datacite_metadata(1)
assert_datacite_calls_with_missing_data()
new_update_date1 = doi_pids[0].updated
assert update_date == new_update_date1
task_details = dict(
job_id=str(uuid.uuid4()),
from_date=datetime(2015, 1, 1, 13, 33),
until_date=datetime(2016, 1, 1, 13, 33),
last_update=datetime.utcnow()
)
current_cache.set('update_datacite:task_details', task_details, timeout=-1)
# no registered local DOIs updated inside the interval
schedule_update_datacite_metadata(1)
assert_datacite_calls_with_missing_data()
new_update_date2 = doi_pids[0].updated
assert update_date == new_update_date2
task_details = dict(
job_id=str(uuid.uuid4()),
from_date=datetime(2015, 1, 1, 13, 33),
until_date=datetime.utcnow(),
last_update=datetime.utcnow()
)
current_cache.set('update_datacite:task_details', task_details, timeout=-1)
schedule_update_datacite_metadata(1)
new_update_date3 = doi_pids[0].updated
assert update_date < new_update_date3
assert_datacite_calls_and_content(r1, doi_tags)
| 5,013 | Python | .py | 112 | 39.25 | 79 | 0.711708 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,389 | test_schemas_csl.py | zenodo_zenodo/tests/unit/records/test_schemas_csl.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo CSL mapping test."""
from __future__ import absolute_import, print_function
from datetime import datetime
from invenio_records.api import Record
from zenodo.modules.records.serializers import csl_v1
def test_minimal(db, minimal_record, recid_pid):
"""Test minimal record."""
obj = csl_v1.transform_record(recid_pid, Record(minimal_record))
d = datetime.utcnow().date()
assert obj == {
'id': '123',
'DOI': '10.5072/zenodo.123',
'type': 'article',
'title': 'Test',
'abstract': 'My description',
'author': [
{'family': 'Test'},
],
'issued': {
'date-parts': [[d.year, d.month, d.day]]
}
}
def test_full(db, full_record, recid_pid):
"""Test minimal record."""
obj = csl_v1.transform_record(recid_pid, Record(full_record))
assert obj == {
"publisher_place": "Staszkowka",
"type": "book",
"author": [
{
"given": "John",
"family": "Doe"
},
{
"given": "Jane",
"family": "Doe"
},
{
"given": "John",
"family": "Smith"
},
{
"given": "Jack",
"family": "Nowak"
}
],
"title": "Test title",
"ISBN": "978-0201633610",
"issue": "2",
"language": "eng",
"volume": "20",
"publisher": "Jol",
"version": "1.2.5",
"note": "notes",
"issued": {
"date-parts": [[2014, 2, 27]]
},
"abstract": "Test Description",
"DOI": "10.1234/foo.bar",
"page": "20",
"container_title": "Bam",
"id": "123",
"ISSN": "0317-8471",
"event": "The 13th Biennial HITRAN Conference (HITRAN13)",
"event-place": "Harvard-Smithsonian Center for Astrophysics",
}
def test_type(db, minimal_record, recid_pid):
""""Test type."""
minimal_record.update({
'resource_type': {'type': 'publication', 'subtype': 'thesis'}
})
obj = csl_v1.transform_record(recid_pid, Record(minimal_record))
assert obj['type'] == 'thesis'
minimal_record.update({
'resource_type': {'type': 'publication'}
})
obj = csl_v1.transform_record(recid_pid, Record(minimal_record))
assert obj['type'] == 'article'
minimal_record.update({
'resource_type': {'type': 'image'}
})
obj = csl_v1.transform_record(recid_pid, Record(minimal_record))
assert obj['type'] == 'graphic'
def test_author(db, minimal_record, recid_pid):
""""Test author."""
minimal_record['creators'] = []
obj = csl_v1.transform_record(recid_pid, Record(minimal_record))
assert obj['author'] == []
minimal_record['creators'] = [
{'familyname': 'TestFamily1', 'givennames': 'TestGiven1'},
{'familyname': 'TestFamily2', 'name': 'TestName2'},
{'name': 'TestName3'},
]
obj = csl_v1.transform_record(recid_pid, Record(minimal_record))
assert obj['author'] == [
{'family': 'TestFamily1', 'given': 'TestGiven1'},
{'family': 'TestName2'},
{'family': 'TestName3'},
]
def test_identifiers(db, minimal_record, recid_pid):
""""Test identifiers."""
minimal_record['doi'] = '10.1234/foo'
obj = csl_v1.transform_record(recid_pid, Record(minimal_record))
assert obj['DOI'] == '10.1234/foo'
assert 'publisher' not in obj
minimal_record['doi'] = '10.5281/foo'
obj = csl_v1.transform_record(recid_pid, Record(minimal_record))
assert obj['DOI'] == '10.5281/foo'
assert obj['publisher'] == 'Zenodo'
minimal_record['imprint'] = {'isbn': '978-1604598933'}
obj = csl_v1.transform_record(recid_pid, Record(minimal_record))
assert obj['ISBN'] == '978-1604598933'
minimal_record['alternate_identifiers'] = [{
'identifier': 'ISSN 0264-2875',
'scheme': 'issn'
}]
obj = csl_v1.transform_record(recid_pid, Record(minimal_record))
assert obj['ISSN'] == 'ISSN 0264-2875'
def test_journal(db, minimal_record, recid_pid):
"""Test journal record."""
minimal_record['journal'] = {
'volume': '42',
'issue': '7',
'title': 'Journal title',
'pages': '10-20',
}
obj = csl_v1.transform_record(recid_pid, Record(minimal_record))
assert obj['container_title'] == 'Journal title'
assert obj['volume'] == '42'
assert obj['issue'] == '7'
assert obj['page'] == '10-20'
def test_part_of(db, minimal_record, recid_pid):
"""Test journal record."""
minimal_record['part_of'] = {
'title': 'Conference proceedings title',
'pages': '10-20',
}
minimal_record['imprint'] = {
'publisher': 'The Good Publisher',
'place': 'Somewhere',
}
obj = csl_v1.transform_record(recid_pid, Record(minimal_record))
assert obj['container_title'] == 'Conference proceedings title'
assert obj['page'] == '10-20'
assert obj['publisher'] == 'The Good Publisher'
assert obj['publisher_place'] == 'Somewhere'
def test_other(db, minimal_record, recid_pid):
"""Test other fields."""
minimal_record['language'] = 'en'
minimal_record['notes'] = 'Test note'
minimal_record['imprint'] = {
'publisher': 'Zenodo',
}
obj = csl_v1.transform_record(recid_pid, Record(minimal_record))
assert obj['language'] == 'en'
assert obj['note'] == 'Test note'
assert obj['publisher'] == 'Zenodo'
| 6,535 | Python | .py | 180 | 29.8 | 76 | 0.600348 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,390 | test_records_utils.py | zenodo_zenodo/tests/unit/records/test_records_utils.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2017 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test Zenodo records utils."""
from __future__ import absolute_import, print_function
from zenodo.modules.records.utils import build_record_custom_fields, \
is_valid_openaire_type
def test_openaire_type_validation(app):
"""Test validation of OpenAIRE subtypes."""
assert is_valid_openaire_type({}, [])
assert is_valid_openaire_type({'type': 'dataset'}, ['c1', 'b2'])
# valid case
assert is_valid_openaire_type(
{'openaire_subtype': 'foo:t4', 'type': 'other'}, ['c1'])
# another valid case
assert is_valid_openaire_type(
{'openaire_subtype': 'bar:t3', 'type': 'software'}, ['c3'])
# valid case (mixed communities, but subtype from other/foo)
assert is_valid_openaire_type(
{'openaire_subtype': 'foo:t4', 'type': 'other'}, ['c1', 'c3'])
# valid case (mixed communities, but subtype from software/bar)
assert is_valid_openaire_type(
{'openaire_subtype': 'bar:t3', 'type': 'software'}, ['c1', 'c3'])
# invalid OA subtype
assert not is_valid_openaire_type(
{'openaire_subtype': 'xxx', 'type': 'other'}, ['c1'])
# community missing
assert not is_valid_openaire_type(
{'openaire_subtype': 'foo:oth1', 'type': 'other'}, [])
# wrong community
assert not is_valid_openaire_type(
{'openaire_subtype': 'foo:oth1', 'type': 'other'}, ['c3'])
# wrong general type (software has a definition)
assert not is_valid_openaire_type(
{'openaire_subtype': 'foo:t4', 'type': 'software'}, ['c1'])
# wrong general type (dataset has no definition)
assert not is_valid_openaire_type(
{'openaire_subtype': 'foo:t4', 'type': 'dataset'}, ['c1'])
# non-existing prefix
assert not is_valid_openaire_type(
{'openaire_subtype': 'xxx:t1', 'type': 'software'}, ['c1'])
def test_build_record_custom_fields(app, full_record, custom_metadata):
"""Test building of the records' custom fields."""
full_record['custom'] = custom_metadata
expected = dict(
custom_keywords={
('dwc:family', ('Felidae',)),
('dwc:genus', ('Felis',)),
},
custom_text={
('dwc:behavior', ('Plays with yarn, sleeps in cardboard box.',)),
},
custom_relationships={
(
'obo:RO_0002453',
('Cat', 'Felis catus'),
('Ctenocephalides felis', 'Cat flea'),
)
}
)
result = build_record_custom_fields(full_record)
assert expected == {
'custom_keywords': {
(v['key'], tuple(v['value'])) for v in result['custom_keywords']},
'custom_text': {
(v['key'], tuple(v['value'])) for v in result['custom_text']},
'custom_relationships': {
(v['key'], tuple(v['subject']), tuple(v['object']))
for v in result['custom_relationships']},
}
| 3,854 | Python | .py | 90 | 36.933333 | 78 | 0.637646 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,391 | test_serializers_json.py | zenodo_zenodo/tests/unit/records/test_serializers_json.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2018 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Unit test for Zenodo json serializer."""
import json
import pytest
from flask import current_app, url_for
from helpers import login_user_via_session
@pytest.mark.parametrize('user_info, file_info_visible', [
# anonymous user
(None, False),
# owner
(dict(email='info@zenodo.org', password='tester'), True),
# not owner
(dict(email='test@zenodo.org', password='tester2'), False),
# admin user
(dict(email='admin@zenodo.org', password='admin'), True),
])
def test_closed_access_record_serializer(api, users, json_headers,
closed_access_record,
user_info, file_info_visible):
"""Test closed access record serialisation using records API."""
with api.test_request_context():
with api.test_client() as client:
if user_info:
# Login as user
login_user_via_session(client, email=user_info['email'])
res = client.get(
url_for('invenio_records_rest.recid_item',
pid_value=closed_access_record['recid']),
headers=json_headers
)
r = json.loads(res.data.decode('utf-8'))
assert (r['links'].get('bucket') is not None) == file_info_visible
assert (r.get('files') is not None) == file_info_visible
@pytest.mark.parametrize('user_info', [
# anonymous user
None,
# owner
dict(email='info@zenodo.org', password='tester'),
# not owner
dict(email='test@zenodo.org', password='tester2'),
# admin user
dict(email='admin@zenodo.org', password='admin'),
])
def test_closed_access_record_search_serializer(
api, users, json_headers, user_info, closed_access_record):
"""Test closed access record serialisation of the search result."""
with api.test_request_context():
with api.test_client() as client:
if user_info:
# Login as user
login_user_via_session(client, email=user_info['email'])
res = client.get(
url_for('invenio_records_rest.recid_list'),
headers=json_headers
)
r = json.loads(res.data.decode('utf-8'))
assert r[0]['links'].get('bucket', None) is None
assert len(r[0].get('files', [])) == 0
def test_record_thumbnails_serializer(api, record_with_image_creation):
"""Test closed access record serialisation using records API."""
pid, record, record_url = record_with_image_creation
cached_thumbnails = current_app.config['CACHED_THUMBNAILS']
with api.test_request_context():
with api.test_client() as client:
res = client.get(url_for(
'invenio_records_rest.recid_item', pid_value=pid.pid_value))
for thumbnail in cached_thumbnails:
assert res.json['links']['thumbs'][thumbnail] == \
'http://localhost/record/12345/thumb{}'.format(thumbnail)
assert res.json['links']['thumb250']
| 4,022 | Python | .py | 92 | 36.315217 | 78 | 0.648125 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,392 | test_api_views.py | zenodo_zenodo/tests/unit/records/test_api_views.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2019 CERN.
#
# Zenodo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Unit tests Zenodo JSON deserializer."""
from urlparse import parse_qs, urlparse
import pytest
from flask import url_for
from invenio_indexer.api import RecordIndexer
from invenio_search import current_search
@pytest.mark.parametrize(('val', 'status', 'error_message'), [
('-1.43,-1.53, 2.45,1.63', 200, None),
('1.23, -1.43 , 1.53 , 2.34', 200, None),
('2.45,1.63', 400,
'Invalid bounds: four comma-separated numbers required. '
'Example: 143.37158,-38.99357,146.90918,-37.35269'),
('2.45,\'1.63\',-1.43,-1.53', 400, 'Invalid number in bounds.'),
('2.45,\' \',-1.43,-1.53', 400, 'Invalid number in bounds.'),
('2.45,\'\',-1.43,-1.53', 400, 'Invalid number in bounds.'),
('2.45,,-1.43,-1.53', 400, 'Invalid number in bounds.'),
('2.45, ,-1.43,-1.53', 400, 'Invalid number in bounds.'),
('2.45;1.63,-1.43,-1.53', 400,
'Invalid bounds: four comma-separated numbers required. '
'Example: 143.37158,-38.99357,146.90918,-37.35269'),
('181,1.63,-181,-1.53', 400, 'Longitude must be between -180 and 180.'),
('2.45,91,-1.43,-91', 400, 'Latitude must be between -90 and 90.'),
('2.45,1.63,NaN,-1.53', 400,
'Invalid number: "NaN" is not a permitted value.'),
('2.45,1.63,Infinity,-1.53', 400,
'Longitude must be between -180 and 180.'),
('-1.43,1.63,2.45,-1.53', 400,
'Top-right latitude must be greater than bottom-left latitude.'),
])
def test_geographical_search_validation(
es, api, json_headers, record_with_bucket, val, status, error_message):
"""Test geographical search validation."""
pid, record = record_with_bucket
RecordIndexer().index(record)
with api.test_request_context():
with api.test_client() as client:
res = client.get(
url_for('invenio_records_rest.recid_list', bounds=val),
headers=json_headers
)
assert res.status_code == status
if error_message:
assert res.json['message'] == 'Validation error.'
assert len(res.json['errors']) == 1
assert res.json['errors'][0]['field'] == 'bounds'
assert res.json['errors'][0]['message'] == error_message
def test_geographical_search(es, api, json_headers, record_with_bucket):
"""Test geographical search."""
pid, record = record_with_bucket
record['locations'] = [
{'lat': 46.204391, 'lon': 6.143158, 'place': 'Geneva'},
{'place': 'New York'}
]
RecordIndexer().index(record)
current_search.flush_and_refresh(index='records')
with api.test_request_context():
with api.test_client() as client:
res = client.get(
url_for('invenio_records_rest.recid_list',
bounds='6.059634,46.167928,6.230161,46.244911'),
headers=json_headers
)
assert len(res.json) == 1
@pytest.mark.parametrize(('val', 'status', 'error_message'), [
('[dwc:family]:[Felidae]', 200, None),
('[dwc:foobar]:[Felidae]', 400, 'The "dwc:foobar" term is not supported.'),
('[dwc:family]:Felidae', 400, 'The parameter should have the '
'format: custom=[term]:[value].'),
('[dwc:family]', 400, 'The parameter should have the '
'format: custom=[term]:[value].'),
(':Felidae', 400, 'The parameter should have the '
'format: custom=[term]:[value].')
])
def test_custom_search_validation(
es, api, json_headers, val, status, error_message):
"""Test custom metadata search validation."""
with api.test_request_context():
with api.test_client() as client:
res = client.get(
url_for('invenio_records_rest.recid_list', custom=val),
headers=json_headers
)
assert res.status_code == status
if error_message:
assert res.json['message'] == 'Validation error.'
assert len(res.json['errors']) == 1
assert res.json['errors'][0]['field'] == 'custom'
assert res.json['errors'][0]['message'] == error_message
@pytest.mark.parametrize(('query', 'result'), [
('[dwc:family]:[Felidae]', 1),
('[dwc:family]:[foobar]', 0),
('[obo:RO_0002453]:[Cat:]', 1),
('[obo:RO_0002453]:[:"Cat flea"]', 1),
('[obo:RO_0002453]:[foobar:]', 0),
('[obo:RO_0002453]:[(foobar OR "Felis catus"):]', 1),
('[obo:RO_0002453]:["Felis catus":"Cat flea"]', 1),
('[obo:RO_0002453]:["Felis catus":foobar]', 0),
])
def test_custom_search(es, api, json_headers, record_with_bucket,
custom_metadata, query, result):
"""Test custom metadata search."""
pid, record = record_with_bucket
record['custom'] = custom_metadata
RecordIndexer().index(record)
current_search.flush_and_refresh(index='records')
with api.test_request_context():
with api.test_client() as client:
res = client.get(
url_for('invenio_records_rest.recid_list',
custom=query),
headers=json_headers)
assert len(res.json) == result
@pytest.mark.parametrize(('query', 'result'), [
("", None),
("?all_versions", "true"),
("?all_versions=true", "true"),
("?all_versions=True", "true"),
("?all_versions=1", "true"),
("?all_versions=1234", "1234"),
("?all_versions=test", "test"),
])
def test_apply_version_filters(es, api, query, result):
"""Test record version filters to search."""
with api.test_request_context(), api.test_client() as client:
res = client.get(url_for('invenio_records_rest.recid_list') + query)
url = urlparse(res.json["links"]["self"])
qs = parse_qs(url.query, keep_blank_values=True)
assert qs.get("all_versions", [None]) == [result]
| 6,739 | Python | .py | 149 | 38.436242 | 79 | 0.616788 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,393 | test_permissions.py | zenodo_zenodo/tests/unit/records/test_permissions.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test records permissions."""
from __future__ import absolute_import, print_function, unicode_literals
from datetime import datetime
import jwt
import pytest
from flask import url_for
from flask_principal import ActionNeed
from invenio_access.models import ActionUsers
from invenio_accounts.models import User
from zenodo.modules.records.models import AccessRight
@pytest.mark.parametrize('user,access_right,expected', [
(None, AccessRight.OPEN, 200),
(None, AccessRight.EMBARGOED, 404),
(None, AccessRight.CLOSED, 404),
('auth', AccessRight.OPEN, 200),
('auth', AccessRight.EMBARGOED, 404),
('auth', AccessRight.CLOSED, 404),
('owner', AccessRight.OPEN, 200),
('owner', AccessRight.EMBARGOED, 200),
('owner', AccessRight.CLOSED, 200),
('admin', AccessRight.OPEN, 200),
('admin', AccessRight.EMBARGOED, 200),
('admin', AccessRight.CLOSED, 200),
])
def test_file_permissions(app, db, record_with_files_creation,
user, access_right, expected):
"""Test file permissions."""
pid, record, record_url = record_with_files_creation
# Create test users
admin = User(email='admin@zenodo.org', password='123456')
owner = User(email='owner@zenodo.org', password='123456')
auth = User(email='auth@zenodo.org', password='123456')
db.session.add_all([admin, owner, auth])
db.session.add(
ActionUsers.allow(ActionNeed('admin-access'), user=admin)
)
db.session.commit()
# Create test record
record['access_right'] = access_right
record['owners'] = [owner.id]
record.commit()
db.session.commit()
file_url = url_for(
'invenio_records_ui.recid_files',
pid_value=pid.pid_value,
filename='Test.pdf',
)
with app.test_client() as client:
if user:
# Login as user
with client.session_transaction() as sess:
sess['user_id'] = User.query.filter_by(
email='{}@zenodo.org'.format(user)).one().id
sess['_fresh'] = True
res = client.get(file_url)
assert res.status_code == expected
def test_rat_token(app, db, rat_generate_token, closed_access_record):
"""Test access via RAT."""
record = closed_access_record
record['owners'] = [rat_generate_token.user_id]
record['_deposit']['owners'] = [rat_generate_token.user_id]
record.commit()
db.session.commit()
rat_token = jwt.encode(
payload={
'iat': datetime.utcnow(),
'sub': {
'deposit_id': record['_deposit']['id'],
'access': 'read',
},
},
key=rat_generate_token.access_token,
algorithm='HS256',
headers={'kid': str(rat_generate_token.id)},
)
with app.test_client() as client:
file_url = url_for(
'invenio_records_ui.recid_files',
pid_value=record['recid'],
filename='Test.pdf',
)
res = client.get(file_url)
assert res.status_code == 404
res = client.get(file_url, query_string={'token': rat_token})
assert res.status_code == 200
# Change record owner
record['owners'] = [123]
record['_deposit']['owners'] = [123]
record.commit()
db.session.commit()
with app.test_client() as client:
file_url = url_for(
'invenio_records_ui.recid_files',
pid_value=record['recid'],
filename='Test.pdf',
)
res = client.get(file_url)
assert res.status_code == 404
res = client.get(file_url, query_string={'token': rat_token})
assert res.status_code == 404
| 4,654 | Python | .py | 123 | 31.699187 | 76 | 0.651895 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,394 | test_views.py | zenodo_zenodo/tests/unit/records/test_views.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo records views."""
from __future__ import absolute_import, print_function
import json
import urlparse
from datetime import datetime, timedelta
import pytest
from flask import current_app, render_template, render_template_string, url_for
from helpers import login_user_via_session
from invenio_indexer.api import RecordIndexer
from invenio_pidstore.models import PersistentIdentifier, PIDStatus
from invenio_records.api import Record
from invenio_search import current_search
from mock import Mock, patch
from six.moves.urllib.parse import urlencode
from zenodo.modules.records.serializers.schemas.common import api_link_for
from zenodo.modules.records.views import get_reana_badge, zenodo_related_links
def test_is_valid_access_right(app):
"""Test template test."""
assert render_template_string("{{ 'open' is accessright }}") == "True"
assert render_template_string("{{ 'invalid' is accessright }}") == "False"
def test_is_embargoed(app):
"""Test template test."""
today = datetime.utcnow().date()
assert render_template_string(
"{{ dt is embargoed }}", dt=today) == "False"
assert render_template_string(
"{{ dt is embargoed }}", dt=today+timedelta(days=1)) == "True"
assert render_template_string(
"{{ dt is embargoed(accessright='open') }}",
dt=today+timedelta(days=1)) == "False"
assert render_template_string(
"{{ dt is embargoed(accessright='embargoed') }}",
dt=today+timedelta(days=1)) == "True"
assert render_template_string(
"{{ dt is embargoed(accessright='embargoed') }}",
dt=None) == "False"
def test_accessright_category(app):
"""Test template filter."""
assert render_template_string(
"{{ 'open'|accessright_category }}") == "success"
def test_accessright_title(app):
"""Test template filter."""
assert render_template_string(
"{{ 'open'|accessright_title }}") == "Open Access"
def test_objecttype(app):
"""Test template filter."""
assert render_template_string(
r"{% set t = upload_type|objecttype %}{{ t.title.en }}",
upload_type=dict(type="publication", subtype="book")) == "Book"
assert render_template_string(
r"{% set t = upload_type|objecttype %}{{ t.title.en }}",
upload_type=dict(type="publication")) == "Publication"
assert render_template_string(
r"{% set t = upload_type|objecttype %}{{ t }}",
upload_type="") == "None"
def test_local_doi(app):
"""Test template test."""
orig = app.config['ZENODO_LOCAL_DOI_PREFIXES']
app.config['ZENODO_LOCAL_DOI_PREFIXES'] = ['10.123', '10.5281']
assert render_template_string(
"{{ '10.123/foo' is local_doi }}") == "True"
assert render_template_string(
"{{ '10.1234/foo' is local_doi }}") == "False"
assert render_template_string(
"{{ '10.5281/foo' is local_doi }}") == "True"
app.config['ZENODO_LOCAL_DOI_PREFIXES'] = orig
def test_relation_title(app):
"""Test relation title."""
assert render_template_string(
"{{ 'isCitedBy'|relation_title }}") == "Cited by"
assert render_template_string(
"{{ 'nonExistingRelation'|relation_title }}") == "nonExistingRelation"
def test_relation_logo(app):
"""Test relation logo."""
no_relations = {}
assert zenodo_related_links(no_relations, []) == []
class MockCommunity(object):
id = 'zenodo'
github_relation = {
'communities': [
'zenodo',
],
'related_identifiers': [
{
'scheme': 'url',
'relation': 'isSupplementTo',
'identifier': 'https://github.com/'
'TaghiAliyev/BBiCat/tree/v1.0.4-alpha',
}
],
}
assert zenodo_related_links(github_relation, [MockCommunity]) == [
{
'image': 'img/github.png',
'link': 'https://github.com/TaghiAliyev/BBiCat/tree/v1.0.4-alpha',
'prefix': 'https://github.com',
'relation': 'isSupplementTo',
'scheme': 'url',
'text': 'Available in'
}
]
def test_pid_url(app, sample_identifiers):
"""Test pid_url."""
# All types of identifiers
for scheme, (_id, url) in sample_identifiers.items():
assert render_template_string(
"{{{{ '{_id}'|pid_url(scheme='{scheme}') }}}}"
.format(_id=_id, scheme=scheme)) == url
# Specific cases and parameters
assert render_template_string(
"{{ '10.123/foo'|pid_url }}") == "https://doi.org/10.123/foo"
assert render_template_string(
"{{ 'doi: 10.123/foo'|pid_url(scheme='doi') }}") \
== "https://doi.org/10.123/foo"
assert render_template_string(
"{{ 'asfasdf'|pid_url }}") == ""
assert render_template_string(
"{{ 'arXiv:1512.01558'|pid_url(scheme='arxiv', url_scheme='http') }}"
) == "http://arxiv.org/abs/arXiv:1512.01558"
assert render_template_string(
"{{ 'arXiv:1512.01558'|pid_url(scheme='arxiv') }}") \
== "https://arxiv.org/abs/arXiv:1512.01558"
assert render_template_string(
"{{ 'hdl.handle.net/1234/5678'|pid_url(scheme='handle') }}") \
== "https://hdl.handle.net/1234/5678"
def test_records_ui_export(app, db, full_record):
"""Test export pages."""
r = Record.create(full_record)
PersistentIdentifier.create(
'recid', '12345', object_type='rec', object_uuid=r.id,
status=PIDStatus.REGISTERED)
db.session.commit()
formats = app.config['ZENODO_RECORDS_EXPORTFORMATS']
with app.test_client() as client:
for f, val in formats.items():
res = client.get(url_for(
'invenio_records_ui.recid_export', pid_value='12345',
format=f))
assert res.status_code == 410 if val is None else 200
def test_citation_formatter_styles_get(api, api_client, db):
"""Test get CSL styles."""
with api.test_request_context():
style_url = url_for('invenio_csl_rest.styles')
res = api_client.get(style_url)
styles = json.loads(res.get_data(as_text=True))
assert res.status_code == 200
assert 'apa' in styles
assert 'American Psychological Association' in styles['apa']
def test_citation_formatter_citeproc_get(api, api_client, es, db, full_record,
users):
"""Test records REST citeproc get."""
r = Record.create(full_record)
pid = PersistentIdentifier.create(
'recid', '12345', object_type='rec', object_uuid=r.id,
status=PIDStatus.REGISTERED)
db.session.commit()
db.session.refresh(pid)
RecordIndexer().index_by_id(r.id)
current_search.flush_and_refresh(index='records')
login_user_via_session(api_client, email=users[2]['email'])
with api.test_request_context():
records_url = url_for('invenio_records_rest.recid_item',
pid_value=pid.pid_value)
res = api_client.get(records_url,
query_string={'style': 'apa'},
headers={'Accept': 'text/x-bibliography'})
assert res.status_code == 200
assert 'Doe, J.' in res.get_data(as_text=True)
assert 'Test title. In Bam (1.2.5, Vol. 20, Number 2, p. 20) ' + \
'[Computer software].' \
in res.get_data(as_text=True)
assert '(2014).' in res.get_data(as_text=True)
@pytest.mark.parametrize(('stats', 'expected_result'), [
(None, {
'version_views': '0', 'views': '0',
'version_downloads': '0', 'downloads': '0',
'version_volume': '0 Bytes', 'volume': '0 Bytes',
'version_unique_views': '0', 'unique_views': '0',
'version_unique_downloads': '0', 'unique_downloads': '0'
}),
({
'version_views': 31200, 'views': 2130,
'version_downloads': 54, 'downloads': 28,
'version_volume': 213000, 'volume': 2000,
'version_unique_views': 280, 'unique_views': 27,
'version_unique_downloads': 180, 'unique_downloads': 25
}, {
'version_views': '31,200', 'views': '2,130',
'version_downloads': '54', 'downloads': '28',
'version_volume': '213.0 kB', 'volume': '2.0 kB',
'version_unique_views': '280', 'unique_views': '27',
'version_unique_downloads': '180', 'unique_downloads': '25'
}),
({
'version_views': 31200, 'views': 2130,
'version_downloads': 54, 'downloads': 28,
'version_volume': None, 'volume': 2000
}, {
'version_views': '31,200', 'views': '2,130',
'version_downloads': '54', 'downloads': '28',
'version_volume': '0 Bytes', 'volume': '2.0 kB',
'version_unique_views': '0', 'unique_views': '0',
'version_unique_downloads': '0', 'unique_downloads': '0'
})
])
def test_stats(app, db, minimal_record, stats, expected_result):
"""Test record stats."""
record = Record.create(minimal_record)
with patch('zenodo.modules.records.views.get_record_stats',
return_value=stats) as m:
# local DOI
record['doi'] = '10.5072/foo'
template = render_template('zenodo_records/box/record_stats.html',
record=record)
assert expected_result['views'] in template
assert expected_result['downloads'] in template
assert expected_result['volume'] in template
assert expected_result['unique_views'] in template
assert expected_result['unique_downloads'] in template
assert 'All versions' in template
assert expected_result['version_views'] in template
assert expected_result['version_downloads'] in template
assert expected_result['version_volume'] in template
assert expected_result['version_unique_views'] in template
assert expected_result['version_unique_downloads'] in template
# not local DOI
record['doi'] = '.dsfsdf'
template = render_template('zenodo_records/box/record_stats.html',
record=record)
assert expected_result['views'] in template
assert expected_result['downloads'] in template
assert expected_result['volume'] in template
assert expected_result['unique_views'] in template
assert expected_result['unique_downloads'] in template
assert 'All versions' not in template
def test_record_thumbnail(app, record_with_image_creation):
"""Test cached record thumbnails."""
pid, record, record_url = record_with_image_creation
for cached_thumbnail in current_app.config['CACHED_THUMBNAILS']:
with app.test_client() as client:
res = client.get(url_for(
'invenio_records_ui.recid_thumbnail',
pid_value=pid.pid_value, thumbnail_size=cached_thumbnail))
assert res.status_code == 200
with app.test_client() as client:
res = client.get(url_for(
'invenio_records_ui.recid_thumbnail',
pid_value=pid.pid_value, thumbnail_size='nonvalid'))
assert res.status_code == 400
def test_record_thumbnail_without_images(app, record_with_files_creation):
"""Test cached thumbnails on record without images."""
pid, record, record_url = record_with_files_creation
thumbnail_config = current_app.config['CACHED_THUMBNAILS']
cached_thumbnail = list(thumbnail_config)[0]
with app.test_client() as client:
res = client.get(url_for(
'invenio_records_ui.recid_thumbnail',
pid_value=pid.pid_value, thumbnail_size=cached_thumbnail))
assert res.status_code == 404
def test_record_reana_badge(app, db, minimal_record_for_badge):
"""Test reana badge and its links"""
if not (app.config["ZENODO_REANA_BADGES_ENABLED"]):
assert not get_reana_badge(minimal_record_for_badge)
app.config["ZENODO_REANA_BADGES_ENABLED"] = True
badge = get_reana_badge(minimal_record_for_badge)
# TODO: Assert actual values
assert badge
assert badge["url"]
assert badge["img_url"]
file = minimal_record_for_badge.files["reana.yaml"]
file_url = api_link_for("object", **(file.dumps()))
badge_url = "{}?{}".format(
app.config["ZENODO_REANA_LAUNCH_URL_BASE"],
urlencode({
"url": file_url,
"name": minimal_record_for_badge["title"],
})
)
assert badge_url == badge["url"]
| 13,427 | Python | .py | 302 | 37.195364 | 79 | 0.634889 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,395 | test_models.py | zenodo_zenodo/tests/unit/records/test_models.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Zenodo template tests."""
from __future__ import absolute_import, print_function
from datetime import datetime, timedelta
from invenio_indexer.api import RecordIndexer
from invenio_records.api import Record
from invenio_search import current_search
from zenodo.modules.records.models import AccessRight, ObjectType
from zenodo.modules.records.tasks import update_expired_embargos
def _today_offset(val):
return (datetime.utcnow().date() + timedelta(days=val)).isoformat()
def test_update_embargoed_records(app, db, es):
"""Test update embargoed records."""
records = [
Record.create({
'title': 'yesterday',
'access_right': 'embargoed',
'embargo_date': _today_offset(-1)
}),
Record.create({
'title': 'today',
'access_right': 'embargoed',
'embargo_date': _today_offset(0)
}),
Record.create({
'title': 'tomorrow',
'access_right': 'embargoed',
'embargo_date': _today_offset(1)
}),
Record.create({
'title': 'already open',
'access_right': 'open',
'embargo_date': _today_offset(-1)
})
]
db.session.commit()
for r in records:
RecordIndexer().index(r)
current_search.flush_and_refresh('records-record-v1.0.0')
res = AccessRight.get_expired_embargos()
assert len(res) == 2
assert str(records[0].id) in res
assert str(records[1].id) in res
update_expired_embargos()
assert Record.get_record(records[0].id)['access_right'] == AccessRight.OPEN
assert Record.get_record(records[1].id)['access_right'] == AccessRight.OPEN
def test_access_right():
"""Test basic access right features."""
for val in ['open', 'embargoed', 'restricted', 'closed']:
assert getattr(AccessRight, val.upper()) == val
assert AccessRight.is_valid(val)
assert not AccessRight.is_valid('invalid')
assert AccessRight.as_title(AccessRight.OPEN) == 'Open Access'
assert AccessRight.as_category(AccessRight.EMBARGOED) == 'warning'
options = AccessRight.as_options()
assert isinstance(options, tuple)
assert options[0] == ('open', 'Open Access')
def test_access_right_embargo():
"""Test access right embargo."""
assert AccessRight.get(AccessRight.OPEN) == 'open'
assert AccessRight.get(AccessRight.EMBARGOED) == 'embargoed'
# Embargo just lifted today.
today = datetime.utcnow().date()
assert AccessRight.get(
AccessRight.EMBARGOED, embargo_date=today) == 'open'
# Future embargo date.
assert AccessRight.get(
AccessRight.EMBARGOED, embargo_date=today+timedelta(days=1)) \
== 'embargoed'
# Should work with strings as well
assert AccessRight.get(
AccessRight.EMBARGOED, embargo_date='1253-01-01') == AccessRight.OPEN
assert AccessRight.get(
AccessRight.EMBARGOED,
embargo_date=str(today+timedelta(days=1))) == AccessRight.EMBARGOED
def test_object_type():
"""Test object type."""
types = ['publication', 'poster', 'presentation', 'software', 'dataset',
'image', 'video']
def _assert_obj(obj):
assert '$schema' in obj
assert 'id' in obj
assert 'internal_id' in obj
assert 'title' in obj
assert 'en' in obj['title']
assert 'title_plural' in obj
assert 'en' in obj['title_plural']
assert 'schema.org' in obj
for c in obj.get('children', []):
_assert_obj(c)
for t in types:
_assert_obj(ObjectType.get(t))
assert ObjectType.get('invalid') is None
| 4,629 | Python | .py | 115 | 34.347826 | 79 | 0.672833 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,396 | test_oaipmh.py | zenodo_zenodo/tests/unit/records/test_oaipmh.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2017 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""OAI-PMH test."""
from __future__ import absolute_import, print_function
def test_oaipmh_records_from(app, db, es, app_client):
"""Test selective harvesting from OAI-PMH."""
for d in ('2017-12-22', '2017-12-22T00:00:00Z'):
res = app_client.get(
'/oai2d?verb=ListRecords&metadataPrefix=oai_dc'
'&from={}'.format(d))
assert res.status_code == 200
| 1,366 | Python | .py | 32 | 40.0625 | 76 | 0.731579 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,397 | test_records_templates.py | zenodo_zenodo/tests/unit/records/test_records_templates.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2016 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Template test."""
from __future__ import absolute_import, print_function
from flask import url_for
from lxml import html
def test_header_links_exists(app, record_with_files_creation):
"""Validate that link tags to files exists in document header."""
(pid, record, record_url) = record_with_files_creation
base_url = url_for('invenio_records_ui.recid_files',
pid_value=pid.pid_value, filename='Test.pdf')
with app.test_client() as client:
res = client.get(record_url)
assert res.status_code == 200
tree = html.fromstring(res.data)
for l in tree.xpath('//link[@rel="alternate"]'):
if l.get('href').endswith(base_url):
return
assert False, "<link> tags to files not found in record page."
def test_header_pdf_metahighwire_empty(app, db, record_with_bucket):
"""Check that the meta tag for highwire is not existing without PDF."""
(pid, record) = record_with_bucket
with app.test_client() as client:
res = client.get(url_for('invenio_records_ui.recid',
pid_value=pid.pid_value))
assert res.status_code == 200
tree = html.fromstring(res.data)
assert len(tree.xpath('//meta[@name="citation_pdf_url"]')) == 0
def test_header_pdf_exits_metahighwire(app, record_with_files_creation):
"""Check that the meta tag for highwire is exists when a PDF file."""
(pid, record, record_url) = record_with_files_creation
with app.test_client() as client:
res = client.get(record_url)
assert res.status_code == 200
tree = html.fromstring(res.data)
assert len(tree.xpath('//meta[@name="citation_pdf_url"]')) == 1
| 2,697 | Python | .py | 57 | 42.157895 | 76 | 0.694709 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,398 | conftest.py | zenodo_zenodo/tests/unit/spam/conftest.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2022 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Pytest configuration."""
import pytest
from flask import current_app
@pytest.fixture
def use_safelist_config(app, api):
"""Activate webhooks config."""
# NOTE: Not optimal for applying the config patch to both apps, but works
for _app in (app, api):
old_value_safelist_index = _app.config.pop(
'ZENODO_RECORDS_SAFELIST_INDEX_THRESHOLD')
old_value_search_safelist = current_app.config.pop(
'ZENODO_RECORDS_SEARCH_SAFELIST')
_app.config['ZENODO_RECORDS_SAFELIST_INDEX_THRESHOLD'] = 2
_app.config['ZENODO_RECORDS_SEARCH_SAFELIST'] = True
yield
for _app in (app, api):
_app.config['ZENODO_RECORDS_SAFELIST_INDEX_THRESHOLD'] = \
old_value_safelist_index
_app.config['ZENODO_RECORDS_SEARCH_SAFELIST'] = \
old_value_search_safelist
| 1,819 | Python | .py | 43 | 38.418605 | 77 | 0.724548 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |
23,399 | test_spam_views.py | zenodo_zenodo/tests/unit/spam/test_spam_views.py | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2022 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Test spam views."""
from flask import url_for
from helpers import login_user_via_session
from invenio_search.proxies import current_search
from zenodo.modules.spam.models import SafelistEntry
def test_safelist_add_remove(
app, api, db, use_safelist_config,
users, published_record, json_headers,
):
with api.test_request_context():
search_empty_query = url_for('invenio_records_rest.recid_list')
search_with_query = url_for(
'invenio_records_rest.recid_list', q='test')
with api.test_client() as api_client:
# Search results for empty query should return nothing since the record
# is not safelisted
res = api_client.get(search_empty_query, headers=json_headers)
assert len(res.json) == 0
# Search results with a query should return the record always
res = api_client.get(search_with_query, headers=json_headers)
assert len(res.json) == 1
# Add to safelist
with app.test_client() as client:
login_user_via_session(client, email=users[2]['email']) # admin
res = client.post(
url_for('zenodo_spam.safelist_add_remove', user_id=1),
data={
'action': 'post',
'next': url_for('invenio_records_ui.recid',
pid_value=published_record['recid'])
},
follow_redirects=True
)
assert 'Added to safelist' in res.get_data()
assert SafelistEntry.query.get(1) is not None
with api.test_client() as api_client:
current_search.flush_and_refresh(index='records')
# Search results for empty query should return the record since it is
# now safelisted
res = api_client.get(search_empty_query, headers=json_headers)
assert len(res.json) == 1
# Search results with a query should return the record always
res = api_client.get(search_with_query, headers=json_headers)
assert len(res.json) == 1
# Remove from safelist
with app.test_client() as client:
login_user_via_session(client, email=users[2]['email']) # admin
res = client.post(
url_for('zenodo_spam.safelist_add_remove', user_id=1),
data={
'action': 'delete',
'next': url_for('invenio_records_ui.recid',
pid_value=published_record['recid']),
},
follow_redirects=True
)
assert 'Removed from safelist' in res.get_data()
assert SafelistEntry.query.get(1) is None
with api.test_client() as api_client:
current_search.flush_and_refresh(index='records')
# Search results for empty query should return nothing since the record
# is now again not safelisted
res = api_client.get(search_empty_query, headers=json_headers)
assert len(res.json) == 0
# Search results with a query should return the record always
res = api_client.get(search_with_query, headers=json_headers)
assert len(res.json) == 1
| 4,052 | Python | .py | 90 | 37.777778 | 79 | 0.666835 | zenodo/zenodo | 906 | 241 | 543 | GPL-2.0 | 9/5/2024, 5:13:26 PM (Europe/Amsterdam) |