hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c2c69d4f5aa158318f2362e729326512c8cab2cd
| 51,558
|
py
|
Python
|
tests/unit/utils/test_network.py
|
magenta-aps/salt
|
b6c78ecba697b1d7ba96ea95be300d12d8abf2d1
|
[
"Apache-2.0"
] | 1
|
2021-09-06T00:14:04.000Z
|
2021-09-06T00:14:04.000Z
|
tests/unit/utils/test_network.py
|
magenta-aps/salt
|
b6c78ecba697b1d7ba96ea95be300d12d8abf2d1
|
[
"Apache-2.0"
] | 2
|
2021-04-30T21:17:57.000Z
|
2021-12-13T20:40:23.000Z
|
tests/unit/utils/test_network.py
|
magenta-aps/salt
|
b6c78ecba697b1d7ba96ea95be300d12d8abf2d1
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import logging
import socket
import textwrap
import pytest
import salt.exceptions
# Import salt libs
import salt.utils.network as network
from salt._compat import ipaddress
from tests.support.mock import MagicMock, create_autospec, mock_open, patch
# Import Salt Testing libs
from tests.support.unit import TestCase, skipIf
log = logging.getLogger(__name__)
LINUX = """\
eth0 Link encap:Ethernet HWaddr e0:3f:49:85:6a:af
inet addr:10.10.10.56 Bcast:10.10.10.255 Mask:255.255.252.0
inet6 addr: fe80::e23f:49ff:fe85:6aaf/64 Scope:Link
UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1
RX packets:643363 errors:0 dropped:0 overruns:0 frame:0
TX packets:196539 errors:0 dropped:0 overruns:0 carrier:0
collisions:0 txqueuelen:1000
RX bytes:386388355 (368.4 MiB) TX bytes:25600939 (24.4 MiB)
lo Link encap:Local Loopback
inet addr:127.0.0.1 Mask:255.0.0.0
inet6 addr: ::1/128 Scope:Host
UP LOOPBACK RUNNING MTU:65536 Metric:1
RX packets:548901 errors:0 dropped:0 overruns:0 frame:0
TX packets:548901 errors:0 dropped:0 overruns:0 carrier:0
collisions:0 txqueuelen:0
RX bytes:613479895 (585.0 MiB) TX bytes:613479895 (585.0 MiB)
"""
FREEBSD = """
em0: flags=8843<UP,BROADCAST,RUNNING,SIMPLEX,MULTICAST> metric 0 mtu 1500
options=4219b<RXCSUM,TXCSUM,VLAN_MTU,VLAN_HWTAGGING,VLAN_HWCSUM,TSO4,WOL_MAGIC,VLAN_HWTSO>
ether 00:30:48:ff:ff:ff
inet 10.10.10.250 netmask 0xffffffe0 broadcast 10.10.10.255
inet 10.10.10.56 netmask 0xffffffc0 broadcast 10.10.10.63
media: Ethernet autoselect (1000baseT <full-duplex>)
status: active
em1: flags=8c02<BROADCAST,OACTIVE,SIMPLEX,MULTICAST> metric 0 mtu 1500
options=4219b<RXCSUM,TXCSUM,VLAN_MTU,VLAN_HWTAGGING,VLAN_HWCSUM,TSO4,WOL_MAGIC,VLAN_HWTSO>
ether 00:30:48:aa:aa:aa
media: Ethernet autoselect
status: no carrier
plip0: flags=8810<POINTOPOINT,SIMPLEX,MULTICAST> metric 0 mtu 1500
lo0: flags=8049<UP,LOOPBACK,RUNNING,MULTICAST> metric 0 mtu 16384
options=3<RXCSUM,TXCSUM>
inet6 fe80::1%lo0 prefixlen 64 scopeid 0x8
inet6 ::1 prefixlen 128
inet 127.0.0.1 netmask 0xff000000
nd6 options=3<PERFORMNUD,ACCEPT_RTADV>
tun0: flags=8051<UP,POINTOPOINT,RUNNING,MULTICAST> metric 0 mtu 1500
options=80000<LINKSTATE>
inet 10.12.0.1 --> 10.12.0.2 netmask 0xffffffff
Opened by PID 1964
"""
SOLARIS = """\
lo0: flags=2001000849<UP,LOOPBACK,RUNNING,MULTICAST,IPv4,VIRTUAL> mtu 8232 index 1
inet 127.0.0.1 netmask ff000000
net0: flags=100001100943<UP,BROADCAST,RUNNING,PROMISC,MULTICAST,ROUTER,IPv4,PHYSRUNNING> mtu 1500 index 2
inet 10.10.10.38 netmask ffffffe0 broadcast 10.10.10.63
ilbint0: flags=110001100843<UP,BROADCAST,RUNNING,MULTICAST,ROUTER,IPv4,VRRP,PHYSRUNNING> mtu 1500 index 3
inet 10.6.0.11 netmask ffffff00 broadcast 10.6.0.255
ilbext0: flags=110001100843<UP,BROADCAST,RUNNING,MULTICAST,ROUTER,IPv4,VRRP,PHYSRUNNING> mtu 1500 index 4
inet 10.10.11.11 netmask ffffffe0 broadcast 10.10.11.31
ilbext0:1: flags=110001100843<UP,BROADCAST,RUNNING,MULTICAST,ROUTER,IPv4,VRRP,PHYSRUNNING> mtu 1500 index 4
inet 10.10.11.12 netmask ffffffe0 broadcast 10.10.11.31
vpn0: flags=1000011008d1<UP,POINTOPOINT,RUNNING,NOARP,MULTICAST,ROUTER,IPv4,PHYSRUNNING> mtu 1480 index 5
inet tunnel src 10.10.11.12 tunnel dst 10.10.5.5
tunnel hop limit 64
inet 10.6.0.14 --> 10.6.0.15 netmask ff000000
lo0: flags=2002000849<UP,LOOPBACK,RUNNING,MULTICAST,IPv6,VIRTUAL> mtu 8252 index 1
inet6 ::1/128
net0: flags=120002004941<UP,RUNNING,PROMISC,MULTICAST,DHCP,IPv6,PHYSRUNNING> mtu 1500 index 2
inet6 fe80::221:9bff:fefd:2a22/10
ilbint0: flags=120002000840<RUNNING,MULTICAST,IPv6,PHYSRUNNING> mtu 1500 index 3
inet6 ::/0
ilbext0: flags=120002000840<RUNNING,MULTICAST,IPv6,PHYSRUNNING> mtu 1500 index 4
inet6 ::/0
vpn0: flags=120002200850<POINTOPOINT,RUNNING,MULTICAST,NONUD,IPv6,PHYSRUNNING> mtu 1480 index 5
inet tunnel src 10.10.11.12 tunnel dst 10.10.5.5
tunnel hop limit 64
inet6 ::/0 --> fe80::b2d6:7c10
"""
NETBSD = """\
vioif0: flags=0x8943<UP,BROADCAST,RUNNING,PROMISC,SIMPLEX,MULTICAST> mtu 1500
ec_capabilities=1<VLAN_MTU>
ec_enabled=0
address: 00:a0:98:e6:83:18
inet 192.168.1.80/24 broadcast 192.168.1.255 flags 0x0
inet6 fe80::2a0:98ff:fee6:8318%vioif0/64 flags 0x0 scopeid 0x1
lo0: flags=0x8049<UP,LOOPBACK,RUNNING,MULTICAST> mtu 33624
inet 127.0.0.1/8 flags 0x0
inet6 ::1/128 flags 0x20<NODAD>
inet6 fe80::1%lo0/64 flags 0x0 scopeid 0x2
"""
FREEBSD_SOCKSTAT = """\
USER COMMAND PID FD PROTO LOCAL ADDRESS FOREIGN ADDRESS
root python2.7 1294 41 tcp4 127.0.0.1:61115 127.0.0.1:4506
"""
FREEBSD_SOCKSTAT_WITH_FAT_PID = """\
USER COMMAND PID FD PROTO LOCAL ADDRESS FOREIGN ADDRESS
salt-master python2.781106 35 tcp4 127.0.0.1:61115 127.0.0.1:4506
"""
NETLINK_SS = """
State Recv-Q Send-Q Local Address:Port Peer Address:Port
ESTAB 0 0 127.0.0.1:56726 127.0.0.1:4505
ESTAB 0 0 ::ffff:1.2.3.4:5678 ::ffff:1.2.3.4:4505
"""
LINUX_NETLINK_SS_OUTPUT = """\
State Recv-Q Send-Q Local Address:Port Peer Address:Port
TIME-WAIT 0 0 [::1]:8009 [::1]:40368
LISTEN 0 128 127.0.0.1:5903 0.0.0.0:*
ESTAB 0 0 [::ffff:127.0.0.1]:4506 [::ffff:127.0.0.1]:32315
ESTAB 0 0 192.168.122.1:4506 192.168.122.177:24545
"""
IPV4_SUBNETS = {
True: ("10.10.0.0/24",),
False: ("10.10.0.0", "10.10.0.0/33", "FOO", 9, "0.9.800.1000/24"),
}
IPV6_SUBNETS = {
True: ("::1/128",),
False: ("::1", "::1/129", "FOO", 9, "aj01::feac/64"),
}
class NetworkTestCase(TestCase):
def test_sanitize_host(self):
ret = network.sanitize_host("10.1./2.$3")
self.assertEqual(ret, "10.1.2.3")
def test_host_to_ips(self):
"""
NOTE: When this test fails it's usually because the IP address has
changed. In these cases, we just need to update the IP address in the
assertion.
"""
def _side_effect(host, *args):
try:
return {
"github.com": [
(2, 1, 6, "", ("192.30.255.112", 0)),
(2, 1, 6, "", ("192.30.255.113", 0)),
],
"ipv6host.foo": [
(socket.AF_INET6, 1, 6, "", ("2001:a71::1", 0, 0, 0)),
],
}[host]
except KeyError:
raise socket.gaierror(-2, "Name or service not known")
getaddrinfo_mock = MagicMock(side_effect=_side_effect)
with patch.object(socket, "getaddrinfo", getaddrinfo_mock):
# Test host that can be resolved
ret = network.host_to_ips("github.com")
self.assertEqual(ret, ["192.30.255.112", "192.30.255.113"])
# Test ipv6
ret = network.host_to_ips("ipv6host.foo")
self.assertEqual(ret, ["2001:a71::1"])
# Test host that can't be resolved
ret = network.host_to_ips("someothersite.com")
self.assertEqual(ret, None)
def test_generate_minion_id(self):
self.assertTrue(network.generate_minion_id())
def test__generate_minion_id_with_unicode_in_etc_hosts(self):
"""
Test that unicode in /etc/hosts doesn't raise an error when
_generate_minion_id() helper is called to gather the hosts.
"""
content = textwrap.dedent(
"""\
# 以下为主机名解析
## ccc
127.0.0.1 localhost thisismyhostname # 本机
"""
)
fopen_mock = mock_open(read_data={"/etc/hosts": content})
with patch("salt.utils.files.fopen", fopen_mock):
assert "thisismyhostname" in network._generate_minion_id()
def test_is_ip(self):
self.assertTrue(network.is_ip("10.10.0.3"))
self.assertFalse(network.is_ip("0.9.800.1000"))
# Check 16-char-long unicode string
# https://github.com/saltstack/salt/issues/51258
self.assertFalse(network.is_ipv6("sixteen-char-str"))
def test_is_ipv4(self):
self.assertTrue(network.is_ipv4("10.10.0.3"))
self.assertFalse(network.is_ipv4("10.100.1"))
self.assertFalse(network.is_ipv4("2001:db8:0:1:1:1:1:1"))
# Check 16-char-long unicode string
# https://github.com/saltstack/salt/issues/51258
self.assertFalse(network.is_ipv4("sixteen-char-str"))
def test_is_ipv6(self):
self.assertTrue(network.is_ipv6("2001:db8:0:1:1:1:1:1"))
self.assertTrue(network.is_ipv6("0:0:0:0:0:0:0:1"))
self.assertTrue(network.is_ipv6("::1"))
self.assertTrue(network.is_ipv6("::"))
self.assertTrue(network.is_ipv6("2001:0db8:85a3:0000:0000:8a2e:0370:7334"))
self.assertTrue(network.is_ipv6("2001:0db8:85a3::8a2e:0370:7334"))
self.assertFalse(network.is_ipv6("2001:0db8:0370:7334"))
self.assertFalse(network.is_ipv6("2001:0db8:::0370:7334"))
self.assertFalse(network.is_ipv6("10.0.1.2"))
self.assertFalse(network.is_ipv6("2001.0db8.85a3.0000.0000.8a2e.0370.7334"))
# Check 16-char-long unicode string
# https://github.com/saltstack/salt/issues/51258
self.assertFalse(network.is_ipv6("sixteen-char-str"))
def test_ipv6(self):
self.assertTrue(network.ipv6("2001:db8:0:1:1:1:1:1"))
self.assertTrue(network.ipv6("0:0:0:0:0:0:0:1"))
self.assertTrue(network.ipv6("::1"))
self.assertTrue(network.ipv6("::"))
self.assertTrue(network.ipv6("2001:0db8:85a3:0000:0000:8a2e:0370:7334"))
self.assertTrue(network.ipv6("2001:0db8:85a3::8a2e:0370:7334"))
self.assertTrue(network.ipv6("2001:67c:2e8::/48"))
def test_parse_host_port(self):
_ip = ipaddress.ip_address
good_host_ports = {
"10.10.0.3": (_ip("10.10.0.3").compressed, None),
"10.10.0.3:1234": (_ip("10.10.0.3").compressed, 1234),
"2001:0db8:85a3::8a2e:0370:7334": (
_ip("2001:0db8:85a3::8a2e:0370:7334").compressed,
None,
),
"[2001:0db8:85a3::8a2e:0370:7334]:1234": (
_ip("2001:0db8:85a3::8a2e:0370:7334").compressed,
1234,
),
"2001:0db8:85a3::7334": (_ip("2001:0db8:85a3::7334").compressed, None),
"[2001:0db8:85a3::7334]:1234": (
_ip("2001:0db8:85a3::7334").compressed,
1234,
),
}
bad_host_ports = [
"10.10.0.3/24",
"10.10.0.3::1234",
"2001:0db8:0370:7334",
"2001:0db8:0370::7334]:1234",
"2001:0db8:0370:0:a:b:c:d:1234",
]
for host_port, assertion_value in good_host_ports.items():
host = port = None
host, port = network.parse_host_port(host_port)
self.assertEqual((host, port), assertion_value)
for host_port in bad_host_ports:
try:
self.assertRaises(ValueError, network.parse_host_port, host_port)
except AssertionError as _e_:
log.error(
'bad host_port value: "%s" failed to trigger ValueError exception',
host_port,
)
raise _e_
def test_dns_check(self):
hosts = [
{
"host": "10.10.0.3",
"port": "",
"mocked": [(2, 1, 6, "", ("10.10.0.3", 0))],
"ret": "10.10.0.3",
},
{
"host": "10.10.0.3",
"port": "1234",
"mocked": [(2, 1, 6, "", ("10.10.0.3", 0))],
"ret": "10.10.0.3",
},
{
"host": "2001:0db8:85a3::8a2e:0370:7334",
"port": "",
"mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))],
"ret": "[2001:db8:85a3::8a2e:370:7334]",
},
{
"host": "2001:0db8:85a3::8a2e:370:7334",
"port": "1234",
"mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))],
"ret": "[2001:db8:85a3::8a2e:370:7334]",
},
{
"host": "salt-master",
"port": "1234",
"mocked": [(2, 1, 6, "", ("127.0.0.1", 0))],
"ret": "127.0.0.1",
},
]
for host in hosts:
with patch.object(
socket,
"getaddrinfo",
create_autospec(socket.getaddrinfo, return_value=host["mocked"]),
):
with patch("socket.socket", create_autospec(socket.socket)):
ret = network.dns_check(host["host"], host["port"])
self.assertEqual(ret, host["ret"])
def test_dns_check_ipv6_filter(self):
# raise exception to skip everything after the getaddrinfo call
with patch.object(
socket,
"getaddrinfo",
create_autospec(socket.getaddrinfo, side_effect=Exception),
) as getaddrinfo:
for ipv6, param in [
(None, socket.AF_UNSPEC),
(True, socket.AF_INET6),
(False, socket.AF_INET),
]:
with self.assertRaises(Exception):
network.dns_check("foo", "1", ipv6=ipv6)
getaddrinfo.assert_called_with("foo", "1", param, socket.SOCK_STREAM)
def test_dns_check_errors(self):
with patch.object(
socket, "getaddrinfo", create_autospec(socket.getaddrinfo, return_value=[])
):
with self.assertRaisesRegex(
salt.exceptions.SaltSystemExit,
"DNS lookup or connection check of 'foo' failed",
):
network.dns_check("foo", "1")
with patch.object(
socket,
"getaddrinfo",
create_autospec(socket.getaddrinfo, side_effect=TypeError),
):
with self.assertRaisesRegex(
salt.exceptions.SaltSystemExit, "Invalid or unresolveable address"
):
network.dns_check("foo", "1")
def test_test_addrs(self):
# subset of real data from getaddrinfo against saltstack.com
addrinfo = [
(30, 2, 17, "", ("2600:9000:21eb:a800:8:1031:abc0:93a1", 0, 0, 0)),
(30, 1, 6, "", ("2600:9000:21eb:a800:8:1031:abc0:93a1", 0, 0, 0)),
(30, 2, 17, "", ("2600:9000:21eb:b400:8:1031:abc0:93a1", 0, 0, 0)),
(30, 1, 6, "", ("2600:9000:21eb:b400:8:1031:abc0:93a1", 0, 0, 0)),
(2, 1, 6, "", ("13.35.99.52", 0)),
(2, 2, 17, "", ("13.35.99.85", 0)),
(2, 1, 6, "", ("13.35.99.85", 0)),
(2, 2, 17, "", ("13.35.99.122", 0)),
]
with patch("socket.socket", create_autospec(socket.socket)) as s:
# we connect to the first address
addrs = network._test_addrs(addrinfo, 80)
self.assertTrue(len(addrs) == 1)
self.assertTrue(addrs[0] == addrinfo[0][4][0])
# the first lookup fails, succeeds on next check
s.side_effect = [socket.error, MagicMock()]
addrs = network._test_addrs(addrinfo, 80)
self.assertTrue(len(addrs) == 1)
self.assertTrue(addrs[0] == addrinfo[2][4][0])
# nothing can connect, but we've eliminated duplicates
s.side_effect = socket.error
addrs = network._test_addrs(addrinfo, 80)
self.assertTrue(len(addrs) == 5)
def test_is_subnet(self):
for subnet_data in (IPV4_SUBNETS, IPV6_SUBNETS):
for item in subnet_data[True]:
log.debug("Testing that %s is a valid subnet", item)
self.assertTrue(network.is_subnet(item))
for item in subnet_data[False]:
log.debug("Testing that %s is not a valid subnet", item)
self.assertFalse(network.is_subnet(item))
def test_is_ipv4_subnet(self):
for item in IPV4_SUBNETS[True]:
log.debug("Testing that %s is a valid subnet", item)
self.assertTrue(network.is_ipv4_subnet(item))
for item in IPV4_SUBNETS[False]:
log.debug("Testing that %s is not a valid subnet", item)
self.assertFalse(network.is_ipv4_subnet(item))
def test_is_ipv6_subnet(self):
for item in IPV6_SUBNETS[True]:
log.debug("Testing that %s is a valid subnet", item)
self.assertTrue(network.is_ipv6_subnet(item))
for item in IPV6_SUBNETS[False]:
log.debug("Testing that %s is not a valid subnet", item)
self.assertFalse(network.is_ipv6_subnet(item))
def test_cidr_to_ipv4_netmask(self):
self.assertEqual(network.cidr_to_ipv4_netmask(24), "255.255.255.0")
self.assertEqual(network.cidr_to_ipv4_netmask(21), "255.255.248.0")
self.assertEqual(network.cidr_to_ipv4_netmask(17), "255.255.128.0")
self.assertEqual(network.cidr_to_ipv4_netmask(9), "255.128.0.0")
self.assertEqual(network.cidr_to_ipv4_netmask(36), "")
self.assertEqual(network.cidr_to_ipv4_netmask("lol"), "")
def test_number_of_set_bits_to_ipv4_netmask(self):
set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFFFF00)
self.assertEqual(set_bits_to_netmask, "255.255.255.0")
set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFF6400)
def test_hex2ip(self):
self.assertEqual(network.hex2ip("0x4A7D2B63"), "74.125.43.99")
self.assertEqual(network.hex2ip("0x4A7D2B63", invert=True), "99.43.125.74")
self.assertEqual(
network.hex2ip("00000000000000000000FFFF7F000001"), "127.0.0.1"
)
self.assertEqual(
network.hex2ip("0000000000000000FFFF00000100007F", invert=True), "127.0.0.1"
)
self.assertEqual(
network.hex2ip("20010DB8000000000000000000000000"), "2001:db8::"
)
self.assertEqual(
network.hex2ip("B80D0120000000000000000000000000", invert=True),
"2001:db8::",
)
def test_interfaces_ifconfig_linux(self):
interfaces = network._interfaces_ifconfig(LINUX)
self.assertEqual(
interfaces,
{
"eth0": {
"hwaddr": "e0:3f:49:85:6a:af",
"inet": [
{
"address": "10.10.10.56",
"broadcast": "10.10.10.255",
"netmask": "255.255.252.0",
}
],
"inet6": [
{
"address": "fe80::e23f:49ff:fe85:6aaf",
"prefixlen": "64",
"scope": "link",
}
],
"up": True,
},
"lo": {
"inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}],
"inet6": [{"address": "::1", "prefixlen": "128", "scope": "host"}],
"up": True,
},
},
)
def test_interfaces_ifconfig_freebsd(self):
interfaces = network._interfaces_ifconfig(FREEBSD)
self.assertEqual(
interfaces,
{
"": {"up": False},
"em0": {
"hwaddr": "00:30:48:ff:ff:ff",
"inet": [
{
"address": "10.10.10.250",
"broadcast": "10.10.10.255",
"netmask": "255.255.255.224",
},
{
"address": "10.10.10.56",
"broadcast": "10.10.10.63",
"netmask": "255.255.255.192",
},
],
"up": True,
},
"em1": {"hwaddr": "00:30:48:aa:aa:aa", "up": False},
"lo0": {
"inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}],
"inet6": [
{"address": "fe80::1", "prefixlen": "64", "scope": "0x8"},
{"address": "::1", "prefixlen": "128", "scope": None},
],
"up": True,
},
"plip0": {"up": False},
"tun0": {
"inet": [{"address": "10.12.0.1", "netmask": "255.255.255.255"}],
"up": True,
},
},
)
def test_interfaces_ifconfig_solaris(self):
with patch("salt.utils.platform.is_sunos", lambda: True):
interfaces = network._interfaces_ifconfig(SOLARIS)
expected_interfaces = {
"ilbint0": {
"inet6": [],
"inet": [
{
"broadcast": "10.6.0.255",
"netmask": "255.255.255.0",
"address": "10.6.0.11",
}
],
"up": True,
},
"lo0": {
"inet6": [{"prefixlen": "128", "address": "::1"}],
"inet": [{"netmask": "255.0.0.0", "address": "127.0.0.1"}],
"up": True,
},
"ilbext0": {
"inet6": [],
"inet": [
{
"broadcast": "10.10.11.31",
"netmask": "255.255.255.224",
"address": "10.10.11.11",
},
{
"broadcast": "10.10.11.31",
"netmask": "255.255.255.224",
"address": "10.10.11.12",
},
],
"up": True,
},
"vpn0": {
"inet6": [],
"inet": [{"netmask": "255.0.0.0", "address": "10.6.0.14"}],
"up": True,
},
"net0": {
"inet6": [
{"prefixlen": "10", "address": "fe80::221:9bff:fefd:2a22"}
],
"inet": [
{
"broadcast": "10.10.10.63",
"netmask": "255.255.255.224",
"address": "10.10.10.38",
}
],
"up": True,
},
}
self.assertEqual(interfaces, expected_interfaces)
def test_interfaces_ifconfig_netbsd(self):
interfaces = network._netbsd_interfaces_ifconfig(NETBSD)
self.assertEqual(
interfaces,
{
"lo0": {
"inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}],
"inet6": [
{"address": "fe80::1", "prefixlen": "64", "scope": "lo0"}
],
"up": True,
},
"vioif0": {
"hwaddr": "00:a0:98:e6:83:18",
"inet": [
{
"address": "192.168.1.80",
"broadcast": "192.168.1.255",
"netmask": "255.255.255.0",
}
],
"inet6": [
{
"address": "fe80::2a0:98ff:fee6:8318",
"prefixlen": "64",
"scope": "vioif0",
}
],
"up": True,
},
},
)
def test_freebsd_remotes_on(self):
with patch("salt.utils.platform.is_sunos", lambda: False):
with patch("salt.utils.platform.is_freebsd", lambda: True):
with patch("subprocess.check_output", return_value=FREEBSD_SOCKSTAT):
remotes = network._freebsd_remotes_on("4506", "remote")
self.assertEqual(remotes, set(["127.0.0.1"]))
def test_freebsd_remotes_on_with_fat_pid(self):
with patch("salt.utils.platform.is_sunos", lambda: False):
with patch("salt.utils.platform.is_freebsd", lambda: True):
with patch(
"subprocess.check_output",
return_value=FREEBSD_SOCKSTAT_WITH_FAT_PID,
):
remotes = network._freebsd_remotes_on("4506", "remote")
self.assertEqual(remotes, set(["127.0.0.1"]))
def test_netlink_tool_remote_on_a(self):
with patch("salt.utils.platform.is_sunos", lambda: False):
with patch("salt.utils.platform.is_linux", lambda: True):
with patch(
"subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT
):
remotes = network._netlink_tool_remote_on("4506", "local")
self.assertEqual(
remotes, set(["192.168.122.177", "::ffff:127.0.0.1"])
)
def test_netlink_tool_remote_on_b(self):
with patch("subprocess.check_output", return_value=NETLINK_SS):
remotes = network._netlink_tool_remote_on("4505", "remote_port")
self.assertEqual(remotes, set(["127.0.0.1", "::ffff:1.2.3.4"]))
def test_generate_minion_id_distinct(self):
"""
Test if minion IDs are distinct in the pool.
:return:
"""
with patch("platform.node", MagicMock(return_value="nodename")), patch(
"socket.gethostname", MagicMock(return_value="hostname")
), patch(
"socket.getfqdn", MagicMock(return_value="hostname.domainname.blank")
), patch(
"socket.getaddrinfo",
MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]),
), patch(
"salt.utils.files.fopen", mock_open()
), patch(
"salt.utils.network.ip_addrs",
MagicMock(return_value=["1.2.3.4", "5.6.7.8"]),
):
self.assertEqual(
network._generate_minion_id(),
[
"hostname.domainname.blank",
"nodename",
"hostname",
"1.2.3.4",
"5.6.7.8",
],
)
def test_generate_minion_id_127_name(self):
"""
Test if minion IDs can be named 127.foo
:return:
"""
with patch("platform.node", MagicMock(return_value="127")), patch(
"socket.gethostname", MagicMock(return_value="127")
), patch(
"socket.getfqdn", MagicMock(return_value="127.domainname.blank")
), patch(
"socket.getaddrinfo",
MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]),
), patch(
"salt.utils.files.fopen", mock_open()
), patch(
"salt.utils.network.ip_addrs",
MagicMock(return_value=["1.2.3.4", "5.6.7.8"]),
):
self.assertEqual(
network._generate_minion_id(),
["127.domainname.blank", "127", "1.2.3.4", "5.6.7.8"],
)
def test_generate_minion_id_127_name_startswith(self):
"""
Test if minion IDs can be named starting from "127"
:return:
"""
with patch("platform.node", MagicMock(return_value="127890")), patch(
"socket.gethostname", MagicMock(return_value="127890")
), patch(
"socket.getfqdn", MagicMock(return_value="127890.domainname.blank")
), patch(
"socket.getaddrinfo",
MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]),
), patch(
"salt.utils.files.fopen", mock_open()
), patch(
"salt.utils.network.ip_addrs",
MagicMock(return_value=["1.2.3.4", "5.6.7.8"]),
):
self.assertEqual(
network._generate_minion_id(),
["127890.domainname.blank", "127890", "1.2.3.4", "5.6.7.8"],
)
def test_generate_minion_id_duplicate(self):
"""
Test if IP addresses in the minion IDs are distinct in the pool
:return:
"""
with patch("platform.node", MagicMock(return_value="hostname")), patch(
"socket.gethostname", MagicMock(return_value="hostname")
), patch("socket.getfqdn", MagicMock(return_value="hostname")), patch(
"socket.getaddrinfo",
MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]),
), patch(
"salt.utils.files.fopen", mock_open()
), patch(
"salt.utils.network.ip_addrs",
MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]),
):
self.assertEqual(network._generate_minion_id(), ["hostname", "1.2.3.4"])
def test_generate_minion_id_platform_used(self):
"""
Test if platform.node is used for the first occurrence.
The platform.node is most common hostname resolver before anything else.
:return:
"""
with patch(
"platform.node", MagicMock(return_value="very.long.and.complex.domain.name")
), patch("socket.gethostname", MagicMock(return_value="hostname")), patch(
"socket.getfqdn", MagicMock(return_value="")
), patch(
"socket.getaddrinfo",
MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]),
), patch(
"salt.utils.files.fopen", mock_open()
), patch(
"salt.utils.network.ip_addrs",
MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]),
):
self.assertEqual(
network.generate_minion_id(), "very.long.and.complex.domain.name"
)
def test_generate_minion_id_platform_localhost_filtered(self):
"""
Test if localhost is filtered from the first occurrence.
:return:
"""
with patch("platform.node", MagicMock(return_value="localhost")), patch(
"socket.gethostname", MagicMock(return_value="pick.me")
), patch(
"socket.getfqdn", MagicMock(return_value="hostname.domainname.blank")
), patch(
"socket.getaddrinfo",
MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]),
), patch(
"salt.utils.files.fopen", mock_open()
), patch(
"salt.utils.network.ip_addrs",
MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]),
):
self.assertEqual(network.generate_minion_id(), "hostname.domainname.blank")
def test_generate_minion_id_platform_localhost_filtered_all(self):
"""
Test if any of the localhost is filtered from everywhere.
:return:
"""
with patch("platform.node", MagicMock(return_value="localhost")), patch(
"socket.gethostname", MagicMock(return_value="ip6-loopback")
), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch(
"socket.getaddrinfo",
MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]),
), patch(
"salt.utils.files.fopen", mock_open()
), patch(
"salt.utils.network.ip_addrs",
MagicMock(
return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"]
),
):
self.assertEqual(network.generate_minion_id(), "1.2.3.4")
def test_generate_minion_id_platform_localhost_only(self):
"""
Test if there is no other choice but localhost.
:return:
"""
with patch("platform.node", MagicMock(return_value="localhost")), patch(
"socket.gethostname", MagicMock(return_value="ip6-loopback")
), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch(
"socket.getaddrinfo",
MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]),
), patch(
"salt.utils.files.fopen", mock_open()
), patch(
"salt.utils.network.ip_addrs",
MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]),
):
self.assertEqual(network.generate_minion_id(), "localhost")
def test_generate_minion_id_platform_fqdn(self):
"""
Test if fqdn is picked up.
:return:
"""
with patch("platform.node", MagicMock(return_value="localhost")), patch(
"socket.gethostname", MagicMock(return_value="ip6-loopback")
), patch("socket.getfqdn", MagicMock(return_value="pick.me")), patch(
"socket.getaddrinfo",
MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]),
), patch(
"salt.utils.files.fopen", mock_open()
), patch(
"salt.utils.network.ip_addrs",
MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]),
):
self.assertEqual(network.generate_minion_id(), "pick.me")
def test_generate_minion_id_platform_localhost_addrinfo(self):
"""
Test if addinfo is picked up.
:return:
"""
with patch("platform.node", MagicMock(return_value="localhost")), patch(
"socket.gethostname", MagicMock(return_value="ip6-loopback")
), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch(
"socket.getaddrinfo",
MagicMock(return_value=[(2, 3, 0, "pick.me", ("127.0.1.1", 0))]),
), patch(
"salt.utils.files.fopen", mock_open()
), patch(
"salt.utils.network.ip_addrs",
MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]),
):
self.assertEqual(network.generate_minion_id(), "pick.me")
def test_generate_minion_id_platform_ip_addr_only(self):
"""
Test if IP address is the only what is used as a Minion ID in case no DNS name.
:return:
"""
with patch("platform.node", MagicMock(return_value="localhost")), patch(
"socket.gethostname", MagicMock(return_value="ip6-loopback")
), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch(
"socket.getaddrinfo",
MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]),
), patch(
"salt.utils.files.fopen", mock_open()
), patch(
"salt.utils.network.ip_addrs",
MagicMock(
return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"]
),
):
self.assertEqual(network.generate_minion_id(), "1.2.3.4")
def test_gen_mac(self):
with patch("random.randint", return_value=1) as random_mock:
self.assertEqual(random_mock.return_value, 1)
ret = network.gen_mac("00:16:3E")
expected_mac = "00:16:3E:01:01:01"
self.assertEqual(ret, expected_mac)
def test_mac_str_to_bytes(self):
self.assertRaises(ValueError, network.mac_str_to_bytes, "31337")
self.assertRaises(ValueError, network.mac_str_to_bytes, "0001020304056")
self.assertRaises(ValueError, network.mac_str_to_bytes, "00:01:02:03:04:056")
self.assertRaises(ValueError, network.mac_str_to_bytes, "a0:b0:c0:d0:e0:fg")
self.assertEqual(
b"\x10\x08\x06\x04\x02\x00", network.mac_str_to_bytes("100806040200")
)
self.assertEqual(
b"\xf8\xe7\xd6\xc5\xb4\xa3", network.mac_str_to_bytes("f8e7d6c5b4a3")
)
@skipIf(True, "SLOWTEST skip")
def test_generate_minion_id_with_long_hostname(self):
"""
Validate the fix for:
https://github.com/saltstack/salt/issues/51160
"""
long_name = "localhost-abcdefghijklmnopqrstuvwxyz-abcdefghijklmnopqrstuvwxyz"
with patch("socket.gethostname", MagicMock(return_value=long_name)):
# An exception is raised if unicode is passed to socket.getfqdn
minion_id = network.generate_minion_id()
assert minion_id != "", minion_id
def test_filter_by_networks_with_no_filter(self):
ips = ["10.0.123.200", "10.10.10.10"]
with pytest.raises(TypeError):
network.filter_by_networks(ips) # pylint: disable=no-value-for-parameter
def test_filter_by_networks_empty_filter(self):
ips = ["10.0.123.200", "10.10.10.10"]
assert network.filter_by_networks(ips, []) == []
def test_filter_by_networks_ips_list(self):
ips = [
"10.0.123.200",
"10.10.10.10",
"193.124.233.5",
"fe80::d210:cf3f:64e7:5423",
]
networks = ["10.0.0.0/8", "fe80::/64"]
assert network.filter_by_networks(ips, networks) == [
"10.0.123.200",
"10.10.10.10",
"fe80::d210:cf3f:64e7:5423",
]
def test_filter_by_networks_interfaces_dict(self):
interfaces = {
"wlan0": ["192.168.1.100", "217.5.140.67", "2001:db8::ff00:42:8329"],
"eth0": [
"2001:0DB8:0:CD30:123:4567:89AB:CDEF",
"192.168.1.101",
"10.0.123.201",
],
}
assert network.filter_by_networks(
interfaces, ["192.168.1.0/24", "2001:db8::/48"]
) == {
"wlan0": ["192.168.1.100", "2001:db8::ff00:42:8329"],
"eth0": ["2001:0DB8:0:CD30:123:4567:89AB:CDEF", "192.168.1.101"],
}
def test_filter_by_networks_catch_all(self):
ips = [
"10.0.123.200",
"10.10.10.10",
"193.124.233.5",
"fe80::d210:cf3f:64e7:5423",
]
assert ips == network.filter_by_networks(ips, ["0.0.0.0/0", "::/0"])
def test_ip_networks(self):
# We don't need to test with each platform's ifconfig/iproute2 output,
# since this test isn't testing getting the interfaces. We already have
# tests for that.
interface_data = network._interfaces_ifconfig(LINUX)
# Without loopback
ret = network.ip_networks(interface_data=interface_data)
assert ret == ["10.10.8.0/22"], ret
# Without loopback, specific interface
ret = network.ip_networks(interface="eth0", interface_data=interface_data)
assert ret == ["10.10.8.0/22"], ret
# Without loopback, multiple specific interfaces
ret = network.ip_networks(interface="eth0,lo", interface_data=interface_data)
assert ret == ["10.10.8.0/22"], ret
# Without loopback, specific interface (not present)
ret = network.ip_networks(interface="eth1", interface_data=interface_data)
assert ret == [], ret
# With loopback
ret = network.ip_networks(include_loopback=True, interface_data=interface_data)
assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret
# With loopback, specific interface
ret = network.ip_networks(
interface="eth0", include_loopback=True, interface_data=interface_data
)
assert ret == ["10.10.8.0/22"], ret
# With loopback, multiple specific interfaces
ret = network.ip_networks(
interface="eth0,lo", include_loopback=True, interface_data=interface_data
)
assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret
# With loopback, specific interface (not present)
ret = network.ip_networks(
interface="eth1", include_loopback=True, interface_data=interface_data
)
assert ret == [], ret
# Verbose, without loopback
ret = network.ip_networks(verbose=True, interface_data=interface_data)
assert ret == {
"10.10.8.0/22": {
"prefixlen": 22,
"netmask": "255.255.252.0",
"num_addresses": 1024,
"address": "10.10.8.0",
},
}, ret
# Verbose, without loopback, specific interface
ret = network.ip_networks(
interface="eth0", verbose=True, interface_data=interface_data
)
assert ret == {
"10.10.8.0/22": {
"prefixlen": 22,
"netmask": "255.255.252.0",
"num_addresses": 1024,
"address": "10.10.8.0",
},
}, ret
# Verbose, without loopback, multiple specific interfaces
ret = network.ip_networks(
interface="eth0,lo", verbose=True, interface_data=interface_data
)
assert ret == {
"10.10.8.0/22": {
"prefixlen": 22,
"netmask": "255.255.252.0",
"num_addresses": 1024,
"address": "10.10.8.0",
},
}, ret
# Verbose, without loopback, specific interface (not present)
ret = network.ip_networks(
interface="eth1", verbose=True, interface_data=interface_data
)
assert ret == {}, ret
# Verbose, with loopback
ret = network.ip_networks(
include_loopback=True, verbose=True, interface_data=interface_data
)
assert ret == {
"10.10.8.0/22": {
"prefixlen": 22,
"netmask": "255.255.252.0",
"num_addresses": 1024,
"address": "10.10.8.0",
},
"127.0.0.0/8": {
"prefixlen": 8,
"netmask": "255.0.0.0",
"num_addresses": 16777216,
"address": "127.0.0.0",
},
}, ret
# Verbose, with loopback, specific interface
ret = network.ip_networks(
interface="eth0",
include_loopback=True,
verbose=True,
interface_data=interface_data,
)
assert ret == {
"10.10.8.0/22": {
"prefixlen": 22,
"netmask": "255.255.252.0",
"num_addresses": 1024,
"address": "10.10.8.0",
},
}, ret
# Verbose, with loopback, multiple specific interfaces
ret = network.ip_networks(
interface="eth0,lo",
include_loopback=True,
verbose=True,
interface_data=interface_data,
)
assert ret == {
"10.10.8.0/22": {
"prefixlen": 22,
"netmask": "255.255.252.0",
"num_addresses": 1024,
"address": "10.10.8.0",
},
"127.0.0.0/8": {
"prefixlen": 8,
"netmask": "255.0.0.0",
"num_addresses": 16777216,
"address": "127.0.0.0",
},
}, ret
# Verbose, with loopback, specific interface (not present)
ret = network.ip_networks(
interface="eth1",
include_loopback=True,
verbose=True,
interface_data=interface_data,
)
assert ret == {}, ret
def test_ip_networks6(self):
# We don't need to test with each platform's ifconfig/iproute2 output,
# since this test isn't testing getting the interfaces. We already have
# tests for that.
interface_data = network._interfaces_ifconfig(LINUX)
# Without loopback
ret = network.ip_networks6(interface_data=interface_data)
assert ret == ["fe80::/64"], ret
# Without loopback, specific interface
ret = network.ip_networks6(interface="eth0", interface_data=interface_data)
assert ret == ["fe80::/64"], ret
# Without loopback, multiple specific interfaces
ret = network.ip_networks6(interface="eth0,lo", interface_data=interface_data)
assert ret == ["fe80::/64"], ret
# Without loopback, specific interface (not present)
ret = network.ip_networks6(interface="eth1", interface_data=interface_data)
assert ret == [], ret
# With loopback
ret = network.ip_networks6(include_loopback=True, interface_data=interface_data)
assert ret == ["::1/128", "fe80::/64"], ret
# With loopback, specific interface
ret = network.ip_networks6(
interface="eth0", include_loopback=True, interface_data=interface_data
)
assert ret == ["fe80::/64"], ret
# With loopback, multiple specific interfaces
ret = network.ip_networks6(
interface="eth0,lo", include_loopback=True, interface_data=interface_data
)
assert ret == ["::1/128", "fe80::/64"], ret
# With loopback, specific interface (not present)
ret = network.ip_networks6(
interface="eth1", include_loopback=True, interface_data=interface_data
)
assert ret == [], ret
# Verbose, without loopback
ret = network.ip_networks6(verbose=True, interface_data=interface_data)
assert ret == {
"fe80::/64": {
"prefixlen": 64,
"netmask": "ffff:ffff:ffff:ffff::",
"num_addresses": 18446744073709551616,
"address": "fe80::",
},
}, ret
# Verbose, without loopback, specific interface
ret = network.ip_networks6(
interface="eth0", verbose=True, interface_data=interface_data
)
assert ret == {
"fe80::/64": {
"prefixlen": 64,
"netmask": "ffff:ffff:ffff:ffff::",
"num_addresses": 18446744073709551616,
"address": "fe80::",
},
}, ret
# Verbose, without loopback, multiple specific interfaces
ret = network.ip_networks6(
interface="eth0,lo", verbose=True, interface_data=interface_data
)
assert ret == {
"fe80::/64": {
"prefixlen": 64,
"netmask": "ffff:ffff:ffff:ffff::",
"num_addresses": 18446744073709551616,
"address": "fe80::",
},
}, ret
# Verbose, without loopback, specific interface (not present)
ret = network.ip_networks6(
interface="eth1", verbose=True, interface_data=interface_data
)
assert ret == {}, ret
# Verbose, with loopback
ret = network.ip_networks6(
include_loopback=True, verbose=True, interface_data=interface_data
)
assert ret == {
"fe80::/64": {
"prefixlen": 64,
"netmask": "ffff:ffff:ffff:ffff::",
"num_addresses": 18446744073709551616,
"address": "fe80::",
},
"::1/128": {
"prefixlen": 128,
"netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff",
"num_addresses": 1,
"address": "::1",
},
}, ret
# Verbose, with loopback, specific interface
ret = network.ip_networks6(
interface="eth0",
include_loopback=True,
verbose=True,
interface_data=interface_data,
)
assert ret == {
"fe80::/64": {
"prefixlen": 64,
"netmask": "ffff:ffff:ffff:ffff::",
"num_addresses": 18446744073709551616,
"address": "fe80::",
},
}, ret
# Verbose, with loopback, multiple specific interfaces
ret = network.ip_networks6(
interface="eth0,lo",
include_loopback=True,
verbose=True,
interface_data=interface_data,
)
assert ret == {
"fe80::/64": {
"prefixlen": 64,
"netmask": "ffff:ffff:ffff:ffff::",
"num_addresses": 18446744073709551616,
"address": "fe80::",
},
"::1/128": {
"prefixlen": 128,
"netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff",
"num_addresses": 1,
"address": "::1",
},
}, ret
# Verbose, with loopback, specific interface (not present)
ret = network.ip_networks6(
interface="eth1",
include_loopback=True,
verbose=True,
interface_data=interface_data,
)
assert ret == {}, ret
def test_get_fqhostname_return(self):
"""
Test if proper hostname is used when RevDNS differ from hostname
:return:
"""
with patch("socket.gethostname", MagicMock(return_value="hostname")), patch(
"socket.getfqdn",
MagicMock(return_value="very.long.and.complex.domain.name"),
), patch(
"socket.getaddrinfo",
MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]),
):
self.assertEqual(network.get_fqhostname(), "hostname")
def test_get_fqhostname_return_empty_hostname(self):
"""
Test if proper hostname is used when hostname returns empty string
"""
host = "hostname"
with patch("socket.gethostname", MagicMock(return_value=host)), patch(
"socket.getfqdn",
MagicMock(return_value="very.long.and.complex.domain.name"),
), patch(
"socket.getaddrinfo",
MagicMock(
return_value=[
(2, 3, 0, host, ("127.0.1.1", 0)),
(2, 3, 0, "", ("127.0.1.1", 0)),
]
),
):
self.assertEqual(network.get_fqhostname(), host)
| 40.628842
| 196
| 0.523217
|
d6be63b1d2cc4089aa47f506f68e8cace0cd20d9
| 258
|
py
|
Python
|
jogo-velha-py-main/game.py
|
DanielDDHM/my-projects-py
|
f6c3af7f6cd61c69234d25c956027e8c7e626470
|
[
"MIT"
] | null | null | null |
jogo-velha-py-main/game.py
|
DanielDDHM/my-projects-py
|
f6c3af7f6cd61c69234d25c956027e8c7e626470
|
[
"MIT"
] | null | null | null |
jogo-velha-py-main/game.py
|
DanielDDHM/my-projects-py
|
f6c3af7f6cd61c69234d25c956027e8c7e626470
|
[
"MIT"
] | null | null | null |
import pygame as pg,sys
from pygame.locals import *
import time
#initialize global variables
XO = 'x'
winner = None
draw = False
width = 400
height = 400
white = (255, 255, 255)
line_color = (10,10,10)
#TicTacToe 3x3 board
TTT = [[None]*3,[None]*3,[None]*3]
| 18.428571
| 34
| 0.697674
|
58d6dcf588df374dca401792eb78c9ed452f04ef
| 2,047
|
py
|
Python
|
app.py
|
eliza-jane/blue-hills-climate-data
|
9cd94cf6e8048b71324bdff185cf0abf3af053b6
|
[
"MIT"
] | null | null | null |
app.py
|
eliza-jane/blue-hills-climate-data
|
9cd94cf6e8048b71324bdff185cf0abf3af053b6
|
[
"MIT"
] | null | null | null |
app.py
|
eliza-jane/blue-hills-climate-data
|
9cd94cf6e8048b71324bdff185cf0abf3af053b6
|
[
"MIT"
] | null | null | null |
import dash
import dash_core_components as dcc
import dash_html_components as html
import plotly.graph_objs as go
import pandas as pd
app = dash.Dash()
df = pd.read_csv('data/1919_2019_global-monthly.csv')
month_numbers = {
"January": "01",
"February": "02",
"March": "03",
"April": "04",
"May": "05",
"June": "06",
"July": "07",
"August": "08",
"September": "09",
"October": "10",
"November": "11",
"December": "12"
}
title_map = {
"EMXP": "Highest daily total of precipitation in the month",
"TAVG": "Avg. Monthly Temperature",
"DX90": "Days greater than or equal to 90 deg F",
"DX70": "Days greater than or equal to 70 deg F",
"EMSN": "Highest daily snowfall in the month (inches)",
"EMXT": "Extreme maximum temperature for month",
"SNOW": "Total Monthly Snowfall"
}
month_dfs = {}
for month in month_numbers:
is_month = df['DATE'].str.contains("-{}".format(month_numbers[month]), regex=False)
month_dfs[month] = df[is_month]
app.layout = html.Div(children=[
html.Div(children=[
dcc.Dropdown(
id='month',
options=[{'label': i, 'value': month_dfs[i].to_json()} for i in month_dfs],
value=month_dfs["January"].to_json()
),
dcc.Dropdown(
id='stat',
options=[{'label': title_map[i], 'value': i} for i in title_map],
value='EMXP'
),
dcc.Graph(
id='stats-graph'
)
])
])
@app.callback(
dash.dependencies.Output('stats-graph', 'figure'),
[dash.dependencies.Input('stat', 'value'), dash.dependencies.Input('month', 'value')])
def select_stat(stat, month):
df = pd.read_json(month)
return {
'data': [go.Bar(
x = df['DATE'],
y = df[stat]
)],
'layout': go.Layout(
title='{}'.format(title_map[stat]),
showlegend=False,
margin=go.layout.Margin(l=20, r=10, t=40, b=30)
)
}
if __name__ == '__main__':
app.run_server(debug=True)
| 25.911392
| 90
| 0.572545
|
ea9bceb1fc71e9aee29714b165ad473534137f7d
| 25,680
|
py
|
Python
|
datalocker/views.py
|
PSUEducationalEquity/datalocker
|
d27544407492404cac8302fb969b0a40b770ad06
|
[
"BSD-3-Clause"
] | 1
|
2016-03-21T13:42:35.000Z
|
2016-03-21T13:42:35.000Z
|
datalocker/views.py
|
PSUEducationalEquity/datalocker
|
d27544407492404cac8302fb969b0a40b770ad06
|
[
"BSD-3-Clause"
] | null | null | null |
datalocker/views.py
|
PSUEducationalEquity/datalocker
|
d27544407492404cac8302fb969b0a40b770ad06
|
[
"BSD-3-Clause"
] | 1
|
2015-08-28T13:11:04.000Z
|
2015-08-28T13:11:04.000Z
|
### Copyright 2015 The Pennsylvania State University. Office of the Vice Provost for Educational Equity. All Rights Reserved. ###
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import permission_required
from django.contrib.auth.forms import AuthenticationForm, PasswordChangeForm
from django.contrib.auth.models import User
from django.contrib.auth.views import (
login as auth_login,
logout as auth_logout,
password_change as auth_password_change,
password_change_done as auth_password_change_done
)
from django.contrib.humanize.templatetags.humanize import naturaltime
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.http import (
HttpResponse,
HttpResponseBadRequest,
HttpResponseRedirect,
JsonResponse,
)
from django.shortcuts import render, get_object_or_404
from django.utils import timezone
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from smtplib import SMTPException
from .decorators import login_required, never_cache, prevent_url_guessing
from .models import (
Comment,
Locker,
Submission,
)
from .utils.notifications import get_from_address
from .utils.users import get_public_user_dict, UserColors
import logging
logger = logging.getLogger(__name__)
@login_required
@never_cache
@prevent_url_guessing
@require_http_methods(['POST'])
def comment_add(request, locker_id, submission_id):
"""Adds a comment or reply to the specified submission"""
submission = get_object_or_404(Submission, id=submission_id)
comment_text = request.POST.get('comment', '').strip()
parent_id = request.POST.get('parent', None)
try:
parent = Comment.objects.get(pk=parent_id)
except Comment.DoesNotExist:
parent = None
if comment_text:
comment = Comment(
submission=submission,
comment=comment_text,
user=request.user,
parent=parent
)
comment.save()
if request.is_ajax():
color_helper = UserColors(request)
comment_dict = comment.to_dict()
comment_dict['user']['color'] = color_helper.get(comment.user.username) # NOQA
return JsonResponse(comment_dict)
else:
messages.success(request,
u'<strong>Success!</strong> '
u'Your comment was added to the discussion.')
else:
error_msg = u'<strong>Oops!</strong> Your comment was blank.'
if request.is_ajax():
return HttpResponseBadRequest(error_msg)
else:
messages.error(request, error_msg)
return HttpResponseRedirect(reverse(
'datalocker:submission_view',
kwargs={'locker_id': locker_id, 'submission_id': submission_id}
))
@login_required
@never_cache
@prevent_url_guessing
@require_http_methods(['POST'])
def comment_modify(request, locker_id, submission_id):
"""Modifies the existing comment if it is still editable"""
comment = get_object_or_404(Comment, id=request.POST.get('id', ''))
if comment.is_editable and comment.user == request.user:
comment_text = request.POST.get('comment', '').strip()
comment.comment = comment_text
comment.save()
if request.is_ajax():
return JsonResponse({
'comment': comment_text,
'id': comment.id,
})
else:
messages.success(request,
u'<strong>Success!</strong> '
u'Your comment was added to the discussion.')
else:
error_msg = u"<strong>D'oh!</strong> This comment is no longer editable." # NOQA
if request.is_ajax():
return HttpResponseBadRequest(error_msg)
else:
messages.warning(request, error_msg)
return HttpResponseRedirect(reverse(
'datalocker:submission_view',
kwargs={
'locker_id': comment.submission.locker.id,
'submission_id': comment.submission.id,
}
))
@login_required
@never_cache
@prevent_url_guessing
@require_http_methods(['GET', 'HEAD'])
def comments_list(request, locker_id, submission_id):
"""Returns a list of comments for the specified submission"""
submission = get_object_or_404(Submission, pk=submission_id)
if submission.locker.discussion_enabled():
is_owner = submission.locker.is_owner(request.user)
is_user = submission.locker.is_user(request.user)
discussion_users = submission.locker.discussion_users_have_access()
if is_owner or (is_user and discussion_users) or request.user.is_superuser: # NOQA
if request.is_ajax():
color_helper = UserColors(request)
comments = []
comment_objs = submission.comments.order_by('parent', '-timestamp') # NOQA
for comment in comment_objs:
comment_dict = comment.to_dict()
comment_dict['user']['color'] = color_helper.get(comment.user.username) # NOQA
if comment.user != request.user:
comment_dict['editable'] = False
comments.append(comment_dict)
return JsonResponse({
'discussion': comments,
'editing_time_value': settings.COMMENT_EDIT_MAX,
'editing_time_units': 'minutes',
})
if request.is_ajax():
error_msg = u'The user does not have permission to view the discussion.' # NOQA
return HttpResponseBadRequest(error_msg)
else:
return HttpResponseRedirect(reverse(
'datalocker:submission_view',
kwargs={'locker_id': locker_id, 'submission_id': submission_id}
))
@csrf_exempt
@never_cache
def form_submission_view(request, **kwargs):
"""Handles submissions from outside forms to be saved in lockers"""
# redirect non-form submissions to the main page
if request.method != 'POST':
return HttpResponseRedirect(reverse('datalocker:index'))
safe_values = {
'identifier': request.POST.get('form-id', '').strip(),
'name': request.POST.get('name', 'New Locker').strip(),
'url': request.POST.get('url', '').strip(),
'owner_name': request.POST.get('owner', '').strip(),
'data': request.POST.get('data', '').strip(),
}
try:
safe_values['owner'] = User.objects.get(username=safe_values['owner_name']) # NOQA
except User.DoesNotExist:
safe_values['owner'] = None
Locker.objects.add_submission(safe_values, request=request)
return HttpResponse(status=201)
@login_required
@never_cache
@prevent_url_guessing
@require_http_methods(['POST'])
def locker_archive(request, locker_id):
"""Archive a locker so it receives no new submissions"""
locker = get_object_or_404(Locker, id=locker_id)
locker.archive_timestamp = timezone.now()
locker.save()
if request.is_ajax():
return JsonResponse({'locker_id': locker_id})
else:
return HttpResponseRedirect(reverse('datalocker:index'))
@login_required()
@never_cache
@require_http_methods(['GET', 'HEAD'])
def locker_list_view(request):
"""Return list of lockers for the current user
The list includes lockers for which the current user is an owner and
lockers for which the current user is included in the list of shared users.
"""
context = {}
context['owned'] = (Locker.objects
.active()
.has_access(request.user)
.include_latest()
.filter(owner=request.user))
context['shared'] = (Locker.objects
.active()
.has_access(request.user)
.include_latest()
.exclude(owner=request.user))
if request.user.is_superuser:
context['all'] = (Locker.objects.include_latest())
context['orphaned'] = (Locker.objects
.filter(owner=None)
.include_latest())
return render(request, 'datalocker/index.html', context)
@login_required
@never_cache
@prevent_url_guessing
@require_http_methods(['POST'])
def locker_unarchive(request, locker_id):
"""Unarchives a locker so it can receive new submissions"""
locker = get_object_or_404(Locker, id=locker_id)
locker.archive_timestamp = None
locker.save()
if request.is_ajax():
return JsonResponse({'locker_id': locker_id})
else:
return HttpResponseRedirect(reverse('datalocker:index'))
@login_required()
@never_cache
@prevent_url_guessing
@require_http_methods(['POST'])
def locker_user_add(request, locker_id):
"""Adds the specified user to the locker's list of shared users"""
if request.is_ajax():
locker = get_object_or_404(Locker, id=locker_id)
user = get_object_or_404(User, email=request.POST.get('email', ''))
if user not in locker.users.all():
locker.users.add(user)
from_addr = get_from_address('locker access granted')
if from_addr:
subject = u'Access to Locker: {}'.format(locker.name)
to_addr = user.email
url = request.build_absolute_uri(reverse(
'datalocker:submissions_list',
kwargs={'locker_id': locker.id}
))
message = (u'The following Data Locker of form submissions '
u'has been shared with you.\n\n'
u'Locker: {}\n\n'
u'You can view the submissions at:\n{}\n'
u''.format(locker.name, url))
try:
send_mail(subject, message, from_addr, [to_addr])
except SMTPException:
logger.exception(u'Locker shared with you email failed to send') # NOQA
return JsonResponse({'user': get_public_user_dict(user)})
else:
return HttpResponseRedirect(reverse('datalocker:index'))
@login_required()
@never_cache
@prevent_url_guessing
@require_http_methods(['POST'])
def locker_user_delete(request, locker_id):
"""Removes the specified user from the locker's list of users"""
if request.is_ajax():
locker = get_object_or_404(Locker, id=locker_id)
try:
user = get_object_or_404(User, id=request.POST.get('id', ''))
except ValueError:
error_msg = u'An invalid user was requested to be deleted.'
return HttpResponseBadRequest(error_msg)
else:
if user in locker.users.all():
locker.users.remove(user)
return JsonResponse({'user_id': user.id})
if error_msg:
error_msg = u'<strong>Oops</strong> {}'.format(error_msg)
messages.error(request, error_msg)
return HttpResponseRedirect(reverse('datalocker:index'))
@login_required()
@never_cache
@prevent_url_guessing
@require_http_methods(['GET', 'HEAD'])
def locker_users(request, locker_id):
if request.is_ajax():
locker = get_object_or_404(Locker, pk=locker_id)
users = [
get_public_user_dict(user)
for user in locker.users.all()
]
return JsonResponse({'users': users})
else:
return HttpResponseRedirect(reverse('datalocker:index'))
@never_cache
def login(request, template_name='registration/login.html',
redirect_field_name=REDIRECT_FIELD_NAME,
authentication_form=AuthenticationForm,
current_app=None, extra_context=None):
return auth_login(
request,
template_name,
redirect_field_name,
authentication_form,
current_app,
extra_context
)
@never_cache
def logout(request, next_page=None,
template_name='registration/logged_out.html',
redirect_field_name=REDIRECT_FIELD_NAME,
current_app=None, extra_context=None):
return auth_logout(
request,
next_page,
template_name,
redirect_field_name,
current_app,
extra_context
)
@login_required()
@never_cache
@prevent_url_guessing
@require_http_methods(['POST'])
def modify_locker(request, **kwargs):
"""Modifies locker name, ownership, and settings"""
locker = get_object_or_404(Locker, id=kwargs['locker_id'])
previous_owner = locker.owner
if not locker.owner:
previous_owner = request.user
new_locker_name = request.POST.get('locker-name', '')
new_owner_email = request.POST.get('locker-owner', '')
if new_locker_name != '':
locker.name = new_locker_name
if new_owner_email != '':
try:
new_owner = User.objects.get(email=new_owner_email)
except User.DoesNotExist:
logger.error(u'Attempted to reassign locker ({}) '
u'to non-existent user ({})'
u''.format(locker.name, new_owner_email))
messages.error(request,
u'<strong>Oops!</strong> The user ({}) you tried '
u'to make the owner of the <strong>{}</strong> '
u'locker does not exist. '
u'<strong>You still own the locker.</strong>'
u''.format(new_owner_email, locker.name))
else:
locker.owner = new_owner
from_addr = get_from_address(u'change locker owner')
if from_addr:
subject = u'Ownership of Locker: {}'.format(locker.name)
to_addr = new_owner_email
previous_name = u'{} {}'.format(previous_owner.first_name,
previous_owner.last_name)
url = request.build_absolute_uri(reverse(
'datalocker:submissions_list',
kwargs={'locker_id': locker.id}
))
message = (u'{} has changed the ownership of the following '
u'locker of form submissions to you.\n\n'
u'Locker: {}\n\n'
u'You can view the submissions at:\n{}\n'
u''.format(previous_name, locker.name, url))
try:
send_mail(subject, message, from_addr, [to_addr])
except SMTPException:
logger.exception(u'Locker ownership changed to you email failed to send') # NOQA
locker.save()
# update the locker settings
locker.shared_users_notification(
bool(request.POST.get('shared-users', False))
)
locker.workflow_enabled(
bool(request.POST.get('workflow-enable', False))
)
locker.workflow_users_can_edit(
bool(request.POST.get('workflow-users-can-edit', False))
)
locker.workflow_states(request.POST.get('workflow-states', ''))
locker.discussion_enabled(
bool(request.POST.get('discussion-enable', False))
)
locker.discussion_users_have_access(
bool(request.POST.get('discussion-users-have-access', False))
)
return HttpResponseRedirect(reverse('datalocker:index'))
@never_cache
def password_change(request,
template_name='registration/password_change_form.html',
post_change_redirect=None,
password_change_form=PasswordChangeForm,
current_app=None, extra_context=None):
return auth_password_change(
request,
template_name,
post_change_redirect,
password_change_form,
current_app,
extra_context
)
@never_cache
def password_change_done(request,
template_name='registration/password_change_done.html', # NOQA
current_app=None, extra_context=None):
return auth_password_change_done(
request,
template_name,
current_app,
extra_context
)
@permission_required('datalocker.add_manual_submission')
@login_required()
@require_http_methods(['POST'])
@never_cache
def submission_add(request, locker_id):
"""Manually add a submission to a locker
Arguments:
request {obj} -- Django HTTP Request object instance
locker_id {int} -- Unique identifier for the Locker to add the
submission to
"""
locker = get_object_or_404(Locker, id=locker_id)
json_data = request.POST.get('json', '').strip()
json_data = json_data.replace('\r', '')
json_data = json_data.replace('\n', '')
json_data = json_data.replace('<div>', '')
json_data = json_data.replace('</div>', '')
json_data = json_data.replace('<br />', '\\r\\n')
json_data = json_data.replace('<br>', '\\r\\n')
if json_data[-3:] == '",}':
json_data = json_data[:-3] + '"}'
Locker.objects.add_submission(
{'data': json_data},
request=request,
locker=locker
)
return HttpResponseRedirect(reverse(
'datalocker:submissions_list',
kwargs={'locker_id': locker_id}
))
@login_required()
@never_cache
@prevent_url_guessing
@require_http_methods(['POST'])
def submission_delete(request, locker_id, submission_id):
"""Marks a submission as deleted"""
submission = get_object_or_404(Submission, id=submission_id)
submission.deleted = timezone.now()
submission.save()
if request.is_ajax():
return JsonResponse({
'id': submission.id,
'timestamp': submission.timestamp,
'deleted': submission.deleted,
'purge_timestamp': submission.purge_date,
})
else:
return HttpResponseRedirect(reverse(
'datalocker:submissions_list',
kwargs={'locker_id': locker_id}
))
@login_required()
@never_cache
@prevent_url_guessing
@require_http_methods(['POST'])
def submission_undelete(request, locker_id, submission_id):
"""Removes the deleted timestamp from a submission"""
submission = get_object_or_404(Submission, id=submission_id)
submission.deleted = None
submission.save()
if request.is_ajax():
return JsonResponse({
'id': submission.id,
'timestamp': submission.timestamp,
})
else:
return HttpResponseRedirect(reverse(
'datalocker:submissions_list',
kwargs={'locker_id': locker_id}
))
@login_required()
@never_cache
@prevent_url_guessing
@require_http_methods(['GET', 'HEAD'])
def submission_view(request, locker_id, submission_id):
"""Displays an individual submission"""
submission = get_object_or_404(Submission, pk=submission_id)
newer = submission.newer()
newest = Submission.objects.newest(submission.locker)
if not newest:
newest = submission
oldest = Submission.objects.oldest(submission.locker)
if not oldest:
oldest = submission
older = submission.older()
discussion_enabled = submission.locker.discussion_enabled()
is_owner = submission.locker.owner == request.user
users_discussion = submission.locker.discussion_users_have_access()
users_workflow = submission.locker.workflow_users_can_edit()
workflow_enabled = submission.locker.workflow_enabled()
# generate a message to the user if the submission is deleted
if submission.deleted:
messages.warning(request,
u'<strong>Heads up!</strong> This submission has '
u'been deleted and <strong>will be permanently '
u'removed</strong> from the locker '
u'<strong>{}</strong>.'
u''.format(naturaltime(submission.purge_date)))
return render(request, 'datalocker/submission_view.html', {
'data': submission.data_dict(with_types=True),
'discussion_enabled': discussion_enabled,
'discussion_users_have_access': users_discussion or is_owner,
'newer': newer,
'newer_disabled': True if submission.id == newer.id else False,
'newest': newest,
'newest_disabled': True if submission.id == newest.id else False,
'older': older,
'older_disabled': True if submission.id == older.id else False,
'oldest': oldest,
'oldest_disabled': True if submission.id == oldest.id else False,
'sidebar_enabled': workflow_enabled or discussion_enabled,
'submission': submission,
'workflow_enabled': workflow_enabled,
'workflow_states': submission.locker.workflow_states(),
'workflow_state': submission.workflow_state,
'workflow_users_can_edit': users_workflow or is_owner,
})
@login_required()
@never_cache
@prevent_url_guessing
@require_http_methods(['GET', 'HEAD', 'POST'])
def submissions_list_view(request, locker_id):
"""Returns a list of submissions for the specified locker"""
locker = get_object_or_404(Locker, pk=locker_id)
if request.method == 'POST':
# Save the selected fields to include in the submissions list
locker.fields_selected(request.POST)
return HttpResponseRedirect(reverse(
'datalocker:submissions_list',
kwargs={'locker_id': locker_id}
))
is_owner = locker.owner == request.user
selected_fields = locker.fields_selected()
context = {
'allow_maintenance_mode': is_owner or request.user.is_superuser,
'column_headings': ['Submitted date', ] + selected_fields,
'data': [],
'fields_list': locker.fields_all(),
'linkable_indices': [],
'locker': locker,
'purge_days': settings.SUBMISSION_PURGE_DAYS,
'selected_fields': selected_fields,
}
##
# Build the data that is made available to the template
#
# context['data'] contains all the data and metadata for displaying
# the table of submissions.
#
# Format:
# [
# [<list of table cell data>],
# submission id,
# deleted (True/False),
# purged date
# ]
##
for submission in locker.submissions.order_by('-timestamp'):
entry_data = [submission.timestamp, ]
submission_data = submission.data_dict()
for field in selected_fields:
try:
entry_data.append(submission_data[field])
except KeyError:
if field == 'Workflow state':
entry_data.append(submission.workflow_state)
context['data'].append([
entry_data,
submission.id,
True if submission.deleted else False,
submission.purge_date,
])
# determine which indices in the cell data list will be linked
try:
context['linkable_indices'].append(
context['column_headings'].index('Submitted date')
)
except ValueError:
pass
if not context['linkable_indices']:
context['linkable_indices'] = [0, ]
return render(request, 'datalocker/submissions_list.html', context)
@never_cache
@require_http_methods(['GET', 'HEAD'])
def testing_bad_request_view(request):
"""Displays a custom bad request (400) page"""
return render(request, '400.html', {})
@never_cache
@require_http_methods(['GET', 'HEAD'])
def testing_forbidden_view(request):
"""Displays a custom forbidden (403) page"""
return render(request, '403.html', {})
@never_cache
@require_http_methods(['GET', 'HEAD'])
def testing_not_found_view(request):
"""Displays a custom not found (404) page"""
return render(request, '404.html', {})
@never_cache
@require_http_methods(['GET', 'HEAD'])
def testing_server_error_view(request):
"""Displays a custom internal server error (500) page"""
return render(request, '500.html', {})
@login_required()
@never_cache
@require_http_methods(['GET', 'HEAD'])
def users_list(request, **kwargs):
"""Returns a list of all the user email addresses in the system
This is used to power the owner and shared user auto-complete which is
driven by TypeAhead.js.
"""
users_list = []
for user in User.objects.all():
users_list.append({
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name,
})
return JsonResponse({'users': users_list})
@login_required()
@never_cache
@prevent_url_guessing
@require_http_methods(['POST'])
def workflow_modify(request, locker_id, submission_id):
submission = get_object_or_404(Submission, pk=submission_id)
new_state = request.POST.get('workflow-state', '')
if new_state in submission.locker.workflow_states():
submission.workflow_state = new_state
submission.save()
if request.is_ajax():
return JsonResponse({'state': new_state})
else:
error_msg = u'<strong>Oops!</strong> Unknown workflow state specified.'
if request.is_ajax():
return HttpResponseBadRequest(error_msg, content_type='text/plain')
else:
messages.error(request, error_msg)
return HttpResponseRedirect(reverse(
'datalocker:submission_view',
kwargs={
'locker_id': submission.locker.id,
'submission_id': submission.id,
}
))
| 36.220028
| 129
| 0.632671
|
6542ab2e4f3062d8793fe957d141aab8126088e6
| 2,897
|
py
|
Python
|
skimage/io/tests/test_multi_image.py
|
jaidevd/scikit-image
|
62d6a3d7e95a228c729c9ff99b4f45336a210885
|
[
"BSD-3-Clause"
] | 1
|
2019-09-08T15:01:42.000Z
|
2019-09-08T15:01:42.000Z
|
skimage/io/tests/test_multi_image.py
|
jaidevd/scikit-image
|
62d6a3d7e95a228c729c9ff99b4f45336a210885
|
[
"BSD-3-Clause"
] | null | null | null |
skimage/io/tests/test_multi_image.py
|
jaidevd/scikit-image
|
62d6a3d7e95a228c729c9ff99b4f45336a210885
|
[
"BSD-3-Clause"
] | 1
|
2021-09-28T18:22:50.000Z
|
2021-09-28T18:22:50.000Z
|
import os
import numpy as np
from numpy.testing import assert_raises, assert_equal, assert_allclose
from skimage import data_dir
from skimage.io.collection import MultiImage, ImageCollection
import six
class TestMultiImage():
def setUp(self):
# This multipage TIF file was created with imagemagick:
# convert im1.tif im2.tif -adjoin multipage.tif
paths = [os.path.join(data_dir, 'multipage.tif'),
os.path.join(data_dir, 'no_time_for_that.gif')]
self.imgs = [MultiImage(paths[0]),
MultiImage(paths[0], conserve_memory=False),
MultiImage(paths[1]),
MultiImage(paths[1], conserve_memory=False),
ImageCollection(paths[0]),
ImageCollection(paths[1], conserve_memory=False),
ImageCollection('%s:%s' % (paths[0], paths[1]))]
def test_len(self):
assert len(self.imgs[0]) == len(self.imgs[1]) == 2
assert len(self.imgs[2]) == len(self.imgs[3]) == 24
assert len(self.imgs[4]) == 2
assert len(self.imgs[5]) == 24
assert len(self.imgs[6]) == 26
def test_slicing(self):
img = self.imgs[-1]
assert type(img[:]) is ImageCollection
assert len(img[:]) == 26
assert len(img[:1]) == 1
assert len(img[1:]) == 25
assert_allclose(img[0], img[:1][0])
assert_allclose(img[1], img[1:][0])
assert_allclose(img[-1], img[::-1][0])
assert_allclose(img[0], img[::-1][-1])
def test_getitem(self):
for img in self.imgs:
num = len(img)
for i in range(-num, num):
assert type(img[i]) is np.ndarray
assert_allclose(img[0], img[-num])
# assert_raises expects a callable, hence this thin wrapper function.
def return_img(n):
return img[n]
assert_raises(IndexError, return_img, num)
assert_raises(IndexError, return_img, -num - 1)
def test_files_property(self):
for img in self.imgs:
if isinstance(img, ImageCollection):
continue
assert isinstance(img.filename, six.string_types)
def set_filename(f):
img.filename = f
assert_raises(AttributeError, set_filename, 'newfile')
def test_conserve_memory_property(self):
for img in self.imgs:
assert isinstance(img.conserve_memory, bool)
def set_mem(val):
img.conserve_memory = val
assert_raises(AttributeError, set_mem, True)
def test_concatenate(self):
for img in self.imgs:
array = img.concatenate()
assert_equal(array.shape, (len(img),) + img[0].shape)
if __name__ == "__main__":
from numpy.testing import run_module_suite
run_module_suite()
| 33.298851
| 81
| 0.582672
|
5973513cf4c0bb8378b571b2a7dcaad4c362851d
| 14,783
|
py
|
Python
|
shadowsocks/shell.py
|
xiaobailong24-shadowsocks/SSR-manyuser
|
0b94af0b7d721cb14d1c1a16a2e8a54e559fb616
|
[
"Apache-2.0"
] | 6
|
2018-01-06T12:10:30.000Z
|
2020-09-01T16:53:42.000Z
|
shadowsocks/shell.py
|
xiaobailong24-shadowsocks/SSR-manyuser
|
0b94af0b7d721cb14d1c1a16a2e8a54e559fb616
|
[
"Apache-2.0"
] | null | null | null |
shadowsocks/shell.py
|
xiaobailong24-shadowsocks/SSR-manyuser
|
0b94af0b7d721cb14d1c1a16a2e8a54e559fb616
|
[
"Apache-2.0"
] | 6
|
2017-10-18T09:30:19.000Z
|
2021-08-04T01:19:28.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import json
import sys
import getopt
import logging
from shadowsocks.common import to_bytes, to_str, IPNetwork, PortRange
from shadowsocks import encrypt
VERBOSE_LEVEL = 5
verbose = 0
def check_python():
info = sys.version_info
if info[0] == 2 and not info[1] >= 6:
print('Python 2.6+ required')
sys.exit(1)
elif info[0] == 3 and not info[1] >= 3:
print('Python 3.3+ required')
sys.exit(1)
elif info[0] not in [2, 3]:
print('Python version not supported')
sys.exit(1)
def print_exception(e):
global verbose
logging.error(e)
if verbose > 0:
import traceback
traceback.print_exc()
def print_shadowsocks():
version_str = ''
try:
import pkg_resources
version_str = pkg_resources.get_distribution('shadowsocks').version
except Exception:
try:
from shadowsocks import version
version_str = version.version()
except Exception:
pass
print('ShadowsocksR %s' % version_str)
def log_shadowsocks_version():
version_str = ''
try:
import pkg_resources
version_str = pkg_resources.get_distribution('shadowsocks').version
except Exception:
try:
from shadowsocks import version
version_str = version.version()
except Exception:
pass
logging.info('ShadowsocksR %s' % version_str)
def find_config():
config_path = 'user-config.json'
if os.path.exists(config_path):
return config_path
config_path = os.path.join(os.path.dirname(__file__), '../', 'user-config.json')
if os.path.exists(config_path):
return config_path
config_path = 'config.json'
if os.path.exists(config_path):
return config_path
config_path = os.path.join(os.path.dirname(__file__), '../', 'config.json')
if os.path.exists(config_path):
return config_path
return None
def check_config(config, is_local):
if config.get('daemon', None) == 'stop':
# no need to specify configuration for daemon stop
return
if is_local and not config.get('password', None):
logging.error('password not specified')
print_help(is_local)
sys.exit(2)
if not is_local and not config.get('password', None) \
and not config.get('port_password', None):
logging.error('password or port_password not specified')
print_help(is_local)
sys.exit(2)
if 'local_port' in config:
config['local_port'] = int(config['local_port'])
if 'server_port' in config and type(config['server_port']) != list:
config['server_port'] = int(config['server_port'])
if config.get('local_address', '') in [b'0.0.0.0']:
logging.warn('warning: local set to listen on 0.0.0.0, it\'s not safe')
if config.get('server', '') in ['127.0.0.1', 'localhost']:
logging.warn('warning: server set to listen on %s:%s, are you sure?' %
(to_str(config['server']), config['server_port']))
if (config.get('method', '') or '').lower() == 'table':
logging.warn('warning: table is not safe; please use a safer cipher, '
'like AES-256-CFB')
if (config.get('method', '') or '').lower() == 'rc4':
logging.warn('warning: RC4 is not safe; please use a safer cipher, '
'like AES-256-CFB')
if config.get('timeout', 300) < 100:
logging.warn('warning: your timeout %d seems too short' %
int(config.get('timeout')))
if config.get('timeout', 300) > 600:
logging.warn('warning: your timeout %d seems too long' %
int(config.get('timeout')))
if config.get('password') in [b'mypassword']:
logging.error('DON\'T USE DEFAULT PASSWORD! Please change it in your '
'config.json!')
sys.exit(1)
if config.get('user', None) is not None:
if os.name != 'posix':
logging.error('user can be used only on Unix')
sys.exit(1)
encrypt.try_cipher(config['password'], config['method'])
def get_config(is_local):
global verbose
logging.basicConfig(level=logging.INFO,
format='%(levelname)-s: %(message)s')
if is_local:
shortopts = 'hd:s:b:p:k:l:m:O:o:G:g:c:t:vq'
longopts = ['help', 'fast-open', 'pid-file=', 'log-file=', 'user=',
'version']
else:
shortopts = 'hd:s:p:k:m:O:o:G:g:c:t:vq'
longopts = ['help', 'fast-open', 'pid-file=', 'log-file=', 'workers=',
'forbidden-ip=', 'user=', 'manager-address=', 'version']
try:
config_path = find_config()
optlist, args = getopt.getopt(sys.argv[1:], shortopts, longopts)
for key, value in optlist:
if key == '-c':
config_path = value
if config_path:
logging.info('loading config from %s' % config_path)
with open(config_path, 'rb') as f:
try:
config = parse_json_in_str(f.read().decode('utf8'))
except ValueError as e:
logging.error('found an error in config.json: %s',
e.message)
sys.exit(1)
else:
config = {}
v_count = 0
for key, value in optlist:
if key == '-p':
config['server_port'] = int(value)
elif key == '-k':
config['password'] = to_bytes(value)
elif key == '-l':
config['local_port'] = int(value)
elif key == '-s':
config['server'] = to_str(value)
elif key == '-m':
config['method'] = to_str(value)
elif key == '-O':
config['protocol'] = to_str(value)
elif key == '-o':
config['obfs'] = to_str(value)
elif key == '-G':
config['protocol_param'] = to_str(value)
elif key == '-g':
config['obfs_param'] = to_str(value)
elif key == '-b':
config['local_address'] = to_str(value)
elif key == '-v':
v_count += 1
# '-vv' turns on more verbose mode
config['verbose'] = v_count
elif key == '-t':
config['timeout'] = int(value)
elif key == '--fast-open':
config['fast_open'] = True
elif key == '--workers':
config['workers'] = int(value)
elif key == '--manager-address':
config['manager_address'] = value
elif key == '--user':
config['user'] = to_str(value)
elif key == '--forbidden-ip':
config['forbidden_ip'] = to_str(value)
elif key in ('-h', '--help'):
if is_local:
print_local_help()
else:
print_server_help()
sys.exit(0)
elif key == '--version':
print_shadowsocks()
sys.exit(0)
elif key == '-d':
config['daemon'] = to_str(value)
elif key == '--pid-file':
config['pid-file'] = to_str(value)
elif key == '--log-file':
config['log-file'] = to_str(value)
elif key == '-q':
v_count -= 1
config['verbose'] = v_count
except getopt.GetoptError as e:
print(e, file=sys.stderr)
print_help(is_local)
sys.exit(2)
if not config:
logging.error('config not specified')
print_help(is_local)
sys.exit(2)
config['password'] = to_bytes(config.get('password', b''))
config['method'] = to_str(config.get('method', 'aes-256-cfb'))
config['protocol'] = to_str(config.get('protocol', 'origin'))
config['protocol_param'] = to_str(config.get('protocol_param', ''))
config['obfs'] = to_str(config.get('obfs', 'plain'))
config['obfs_param'] = to_str(config.get('obfs_param', ''))
config['port_password'] = config.get('port_password', None)
config['timeout'] = int(config.get('timeout', 300))
config['udp_timeout'] = int(config.get('udp_timeout', 120))
config['udp_cache'] = int(config.get('udp_cache', 64))
config['fast_open'] = config.get('fast_open', False)
config['workers'] = config.get('workers', 1)
config['pid-file'] = config.get('pid-file', '/var/run/shadowsocks.pid')
config['log-file'] = config.get('log-file', '/var/log/shadowsocks.log')
config['verbose'] = config.get('verbose', False)
config['connect_verbose_info'] = config.get('connect_verbose_info', 0)
config['local_address'] = to_str(config.get('local_address', '127.0.0.1'))
config['local_port'] = config.get('local_port', 1080)
if is_local:
if config.get('server', None) is None:
logging.error('server addr not specified')
print_local_help()
sys.exit(2)
else:
config['server'] = to_str(config['server'])
else:
config['server'] = to_str(config.get('server', '0.0.0.0'))
try:
config['forbidden_ip'] = \
IPNetwork(config.get('forbidden_ip', '127.0.0.0/8,::1/128'))
except Exception as e:
logging.error(e)
sys.exit(2)
try:
config['forbidden_port'] = PortRange(config.get('forbidden_port', ''))
except Exception as e:
logging.error(e)
sys.exit(2)
try:
config['ignore_bind'] = \
IPNetwork(config.get('ignore_bind', '127.0.0.0/8,::1/128'))
except Exception as e:
logging.error(e)
sys.exit(2)
config['server_port'] = config.get('server_port', 8388)
logging.getLogger('').handlers = []
logging.addLevelName(VERBOSE_LEVEL, 'VERBOSE')
if config['verbose'] >= 2:
level = VERBOSE_LEVEL
elif config['verbose'] == 1:
level = logging.DEBUG
elif config['verbose'] == -1:
level = logging.WARN
elif config['verbose'] <= -2:
level = logging.ERROR
else:
level = logging.INFO
verbose = config['verbose']
logging.basicConfig(level=level,
format='%(asctime)s %(levelname)-8s %(filename)s:%(lineno)s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
check_config(config, is_local)
return config
def print_help(is_local):
if is_local:
print_local_help()
else:
print_server_help()
def print_local_help():
print('''usage: sslocal [OPTION]...
A fast tunnel proxy that helps you bypass firewalls.
You can supply configurations via either config file or command line arguments.
Proxy options:
-c CONFIG path to config file
-s SERVER_ADDR server address
-p SERVER_PORT server port, default: 8388
-b LOCAL_ADDR local binding address, default: 127.0.0.1
-l LOCAL_PORT local port, default: 1080
-k PASSWORD password
-m METHOD encryption method, default: aes-256-cfb
-o OBFS obfsplugin, default: http_simple
-t TIMEOUT timeout in seconds, default: 300
--fast-open use TCP_FASTOPEN, requires Linux 3.7+
General options:
-h, --help show this help message and exit
-d start/stop/restart daemon mode
--pid-file PID_FILE pid file for daemon mode
--log-file LOG_FILE log file for daemon mode
--user USER username to run as
-v, -vv verbose mode
-q, -qq quiet mode, only show warnings/errors
--version show version information
Online help: <https://github.com/shadowsocks/shadowsocks>
''')
def print_server_help():
print('''usage: ssserver [OPTION]...
A fast tunnel proxy that helps you bypass firewalls.
You can supply configurations via either config file or command line arguments.
Proxy options:
-c CONFIG path to config file
-s SERVER_ADDR server address, default: 0.0.0.0
-p SERVER_PORT server port, default: 8388
-k PASSWORD password
-m METHOD encryption method, default: aes-256-cfb
-o OBFS obfsplugin, default: http_simple
-t TIMEOUT timeout in seconds, default: 300
--fast-open use TCP_FASTOPEN, requires Linux 3.7+
--workers WORKERS number of workers, available on Unix/Linux
--forbidden-ip IPLIST comma seperated IP list forbidden to connect
--manager-address ADDR optional server manager UDP address, see wiki
General options:
-h, --help show this help message and exit
-d start/stop/restart daemon mode
--pid-file PID_FILE pid file for daemon mode
--log-file LOG_FILE log file for daemon mode
--user USER username to run as
-v, -vv verbose mode
-q, -qq quiet mode, only show warnings/errors
--version show version information
Online help: <https://github.com/shadowsocks/shadowsocks>
''')
def _decode_list(data):
rv = []
for item in data:
if hasattr(item, 'encode'):
item = item.encode('utf-8')
elif isinstance(item, list):
item = _decode_list(item)
elif isinstance(item, dict):
item = _decode_dict(item)
rv.append(item)
return rv
def _decode_dict(data):
rv = {}
for key, value in data.items():
if hasattr(value, 'encode'):
value = value.encode('utf-8')
elif isinstance(value, list):
value = _decode_list(value)
elif isinstance(value, dict):
value = _decode_dict(value)
rv[key] = value
return rv
def parse_json_in_str(data):
# parse json and convert everything from unicode to str
return json.loads(data, object_hook=_decode_dict)
| 35.450839
| 97
| 0.574511
|
0fed9219bffd444bafa85622fd3377d76e50c5c1
| 3,285
|
py
|
Python
|
mesh_process_2d.py
|
TOPDyn/TOPDyn
|
27f5d2e025de99f14e755dd377e8177e24aa87b2
|
[
"MIT"
] | 9
|
2020-12-09T12:56:02.000Z
|
2021-07-07T14:48:24.000Z
|
mesh_process_2d.py
|
TOPDyn/TOPDyn
|
27f5d2e025de99f14e755dd377e8177e24aa87b2
|
[
"MIT"
] | null | null | null |
mesh_process_2d.py
|
TOPDyn/TOPDyn
|
27f5d2e025de99f14e755dd377e8177e24aa87b2
|
[
"MIT"
] | 3
|
2020-12-08T21:52:54.000Z
|
2020-12-10T22:32:38.000Z
|
import gmsh
import numpy as np
def import_mesh(path):
coord, connect = generate(path)
return coord, connect
def generate(path):
_initialize_gmsh(path)
_set_gmsh_options()
nodal_coordinates_matrix, connectivity_matrix = _get_matrices()
_finalize_gmsh()
return nodal_coordinates_matrix, connectivity_matrix
def _initialize_gmsh(path):
gmsh.initialize('', False)
gmsh.open(path)
def _set_gmsh_options():
gmsh.option.setNumber('Mesh.Optimize', 1)
gmsh.option.setNumber('Mesh.OptimizeNetgen', 0)
gmsh.option.setNumber('Mesh.HighOrderOptimize', 0)
gmsh.option.setNumber('Mesh.ElementOrder', 1)
gmsh.option.setNumber('Mesh.Algorithm', 6)
gmsh.option.setNumber('Mesh.RandomFactor', 1e-6)
gmsh.option.setNumber('Geometry.Tolerance', 1e-4)
def _finalize_gmsh():
gmsh.finalize()
def _get_matrices():
# Algorithm 2
for s in gmsh.model.getEntities(2):
gmsh.model.mesh.setTransfiniteSurface(s[1])
gmsh.model.mesh.setRecombine(s[0], s[1])
gmsh.model.mesh.generate(2)
gmsh.model.mesh.removeDuplicateNodes()
node_indexes, coords, _ = gmsh.model.mesh.getNodes(-1, -1, False)
(unique, counts) = np.unique(node_indexes, return_counts=True)
frequencies = np.asarray((unique, counts)).T
_, element_indexes, connectivity = gmsh.model.mesh.getElements()
map_nodes = dict(zip(node_indexes, np.arange(1, len(node_indexes)+1, 1)))
nodal_coordinates_matrix = _get_nodal_coordinates_matrix(node_indexes, coords, map_nodes)
nelx = len(nodal_coordinates_matrix[nodal_coordinates_matrix[:, 2] == nodal_coordinates_matrix[0, 2]]) - 1
nely = len(nodal_coordinates_matrix[nodal_coordinates_matrix[:, 1] == nodal_coordinates_matrix[0, 1]]) - 1
connectivity_matrix = _get_connectivity_matrix(element_indexes[1], connectivity[1], nelx, nely) #- (element_indexes[1][0] - 1)
return nodal_coordinates_matrix, connectivity_matrix
def _get_nodal_coordinates_matrix(indexes, coords, map_nodes):
nodal_coordinates_matrix = np.zeros((len(indexes), 3), dtype=float)
for i, (index, coord) in enumerate(zip(indexes, split_sequence(coords, 3))):
x = mm_to_m(coord[0])
y = mm_to_m(coord[1])
nodal_coordinates_matrix[i,:] = [map_nodes[index], x, y]
return nodal_coordinates_matrix
def _get_connectivity_matrix(total_el, connectivities, nelx, nely):
indexes = np.arange(0, len(total_el))
connectivity_matrix = np.zeros((len(total_el), 5), dtype=int)
connectivity_matrix[:, 0] = indexes + 1
connectivity_gmsh = split_sequence(connectivities, 4)
indexes_gmsh = indexes.reshape(nely, nelx, order='F')
indexes_fem = indexes.reshape(nely, nelx)
for row_gmsh, row_fem in zip(indexes_gmsh, indexes_fem):
for col_gmsh, col_fem in zip(row_gmsh, row_fem):
connectivity_matrix[col_fem, 1:] = connectivity_gmsh[col_gmsh]
return connectivity_matrix
def split_sequence(sequence, size):
subsequences = []
for start in range(0, len(sequence), size):
end = start + size
subsequence = sequence[start:end]
subsequences.append(subsequence)
return subsequences
def mm_to_m(m):
return float(m) / 1000
def m_to_mm(m):
return float(m) * 1000
| 38.197674
| 131
| 0.708676
|
504dceac582d0a622b1cbdd26d31e27df1351c6d
| 6,621
|
py
|
Python
|
oscwdFileopenclose-1.py
|
shyed2001/Python_Programming
|
93ef958e3d8aa77f9191b550972235ce4fe4a6cb
|
[
"bzip2-1.0.6"
] | 2
|
2019-05-01T04:32:14.000Z
|
2019-05-04T11:28:18.000Z
|
oscwdFileopenclose-1.py
|
shyed2001/python-learning-basics
|
93ef958e3d8aa77f9191b550972235ce4fe4a6cb
|
[
"bzip2-1.0.6"
] | null | null | null |
oscwdFileopenclose-1.py
|
shyed2001/python-learning-basics
|
93ef958e3d8aa77f9191b550972235ce4fe4a6cb
|
[
"bzip2-1.0.6"
] | null | null | null |
print("""Must run the IDE or text editor as adminstrator
to run/edit/save this program/script as/if these files are in C drive
of the computer""")
import os
dirpath = os.getcwd()
print("current directory is : " + dirpath)
foldername = os.path.basename(dirpath)
print("Directory name is : " + foldername)
import os
dirpath = os.getcwd()
print("current directory is : " + dirpath)
foldername = os.path.basename(dirpath)
print("Directory name is : " + foldername)
scriptpath = os.path.realpath(__file__)
print("Script path is : " + scriptpath)
print(""" Reading file all at once """)
fl = open("C:/ProgramFiles/Python37/TEST111.txt", "r+")
print("print the name of the file by print(fl.name)")
print(fl.name)
print("print the mode the file is opened in by print(fl.mode) ")
print(fl.mode)
print(fl.read())
fl.close
print(""" Reading file bit by bit """)
fl1 = open("C:/ProgramFiles/Python37/pagla.txt", "r+")
print("print the name of the file by print(fl1.name)")
print(fl1.name)
print("print the mode the file is opened in by print(fl1.mode) ")
print(fl1.mode)
print("to read bit by bit we can use 'print(fl1.read(0))' ")
print(fl1.read(0))
print(fl1.read(1))
print("""" fl1.seek(0) starts reading from zero pisition of the file""")
fl1.seek(0)
print(fl1.read(2))
print(fl1.read(3))
print(fl1.read(4))
print("""" fl1.seek(0) starts reading from zero pisition of the file""")
fl1.seek(0)
print(fl1.read(5))
print(fl1.read(7))
print(fl1.read(9))
fl1.close
print(""" Reading file line by line """)
fl1 = open("C:/ProgramFiles/Python37/pagla.txt", "r+")
print("print the name of the file by print(fl1.name)")
print(fl1.name)
print("print the mode the file is opened in by print(fl1.mode) ")
print(fl1.mode)
print("""to read line by line we can use one or many
'print(fl1.readline())' for each desired line """)
print(fl1.readline())
print(fl1.readline())
print(fl1.readline(7))
print("""to read all the content as a list we can use
'print(fl1.readlines())' for all desired line """)
print(fl1.readlines())
fl1.close
print(""" the above readlines code will show list items
starting from 3, because there were 3 lines 0, 1, 2 were
already read after the file had been opened """)
print(""" also can open file by code block -
"with open("pagla.txt", "r+") as fl1:
pass"
that was a content manager. the program will autometically close the file
when outside the nblock and if there is any error, the code will exit
and thus close the file, so the file will not be corrupted
""")
with open("C:/ProgramFiles/Python37/pagla.txt", "r+") as fl1:
pass
print(""" also can read content of file by code block -
with open("C:/ProgramFiles/Python37/pagla.txt", "r+") as fl1:
fl1_content= fl1.read()
print(fl1_content)
""")
with open("C:/ProgramFiles/Python37/pagla.txt", "r+") as fl1:
fl1_content= fl1.read()
print(fl1_content)
print(""" also can read content of file in list form by code block -
with open("pagla.txt", "r+") as fl1:
fl1_contentlines= fl1.readlines()
print(fl1_contentlines)
""")
with open("C:/ProgramFiles/Python37/pagla.txt", "r+") as fl1:
fl1_contentlines= fl1.readlines()
print(fl1_contentlines)
print(""" also can read one line at a time form the file by code block,
(here shown for two lines) -
with open("pagla.txt", "r+") as fl1:
fl1_contentperline= fl1.readline()
print(fl1_contentperline)
fl1_contentperline= fl1.readline()
print(fl1_contentperline)
""")
print(""" put end='' after every print line statement to
stop the auto newline after each line is printed
""")
with open("C:/ProgramFiles/Python37/pagla.txt", "r+") as fl1:
fl1_contentperline= fl1.readline()
print(fl1_contentperline)
print(fl1_content)
print("""" print (fl1.mode) tell us the mode of the file""")
print(fl1.mode)
print("""" print (fl1.tell()) tell us where we are now in the file""")
fl1_contentperline= fl1.readline()
print(fl1_contentperline)
with open("C:/ProgramFiles/Python37/pagla.txt", "r+") as fl1:
fl1_contentperline= fl1.readline()
print(fl1_contentperline,end='')
fl1_contentperline= fl1.readline()
print(fl1_contentperline, end='')
print("""" fl1.seek(0) starts reading from zero pisition of the file""")
fl1.seek(0)
fl1_contentperline= fl1.readline()
print(fl1_contentperline, end='')
print(""" also can read content 75 BIT at once in this case of file by
code block -
with open("pagla.txt", "r+") as fl1:
fl1_content= fl1.read()
print(fl1_content)
if there is no bits left in the the file as specified in program script, the
program will print empty script """)
with open("C:/ProgramFiles/Python37/pagla.txt", "r+") as fl1:
fl1_content= fl1.read(75)
print(fl1_content)
print("""" print (fl1.mode) tell us the mode of the file""")
print(fl1.mode)
print("""" print (fl1.tell()) tell us where we are now in the file""")
print (fl1.tell())
print("""" fl1.seek(0) starts reading from zero pisition of the file""")
fl1.seek(0)
fl1_content= fl1.read(75)
print(fl1_content)
fl1_content= fl1.read(75)
print(fl1_content)
print(""" also can read content by for loop in this case of file by
code block -
with open("pagla.txt", "r+") as fl1:
for line in fl11:
print (line, end='')
this will save us from running out of memory at once while trying to
read a very large file at once """)
with open("C:/ProgramFiles/Python37/pagla.txt", "r+") as fl11:
print(fl11.mode)
for line in fl11:
print (line, end='')
print(""" also can read content by 'with' and 'while' loop in this case of
file by code block -
with open("C:/ProgramFiles/Python37/pagla.txt", "r+") as fl11:
size_to_read = 10
f_content = fl11.read(size_to_read)
while len (f_content) > 0:
print (f_content, end='*')
f_content=fl11.read(size_to_read)
reading the whole file 10 bits at a time. """)
with open("C:/ProgramFiles/Python37/pagla.txt", "r+") as fl11:
print("""" print (fl11.mode) tell us the mode of the file""")
print(fl11.mode)
print("""" print (fl11.tell()) tell us where we are now in the file""")
print (fl11.tell())
size_to_read = 10
f_content = fl11.read(size_to_read)
while len (f_content) > 0:
print (f_content, end='*')
print (fl11.tell())
f_content=fl11.read(size_to_read)
print("""" /কি /কওরীন /আআই \আপওনীরা
\ঊহাট \ইস \টহিস
\ইহা \কি \? """)
| 34.128866
| 77
| 0.65987
|
da7a9073a49a334098b3a1ac5e905171474399f9
| 2,866
|
py
|
Python
|
ambari-common/src/main/python/resource_management/libraries/providers/xml_config.py
|
kennyballou/ambari
|
8985bcf11296d540a861a8634c17d6b9b1accd5a
|
[
"Apache-2.0"
] | null | null | null |
ambari-common/src/main/python/resource_management/libraries/providers/xml_config.py
|
kennyballou/ambari
|
8985bcf11296d540a861a8634c17d6b9b1accd5a
|
[
"Apache-2.0"
] | null | null | null |
ambari-common/src/main/python/resource_management/libraries/providers/xml_config.py
|
kennyballou/ambari
|
8985bcf11296d540a861a8634c17d6b9b1accd5a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ambari Agent
"""
import time
import os
import resource_management
from resource_management.core.resources import File
from resource_management.core.providers import Provider
from resource_management.core.source import InlineTemplate
from resource_management.libraries.functions.format import format
from resource_management.core.environment import Environment
from resource_management.core.logger import Logger
class XmlConfigProvider(Provider):
def action_create(self):
filename = self.resource.filename
xml_config_provider_config_dir = self.resource.conf_dir
# |e - for html-like escaping of <,>,',"
config_content = InlineTemplate('''<!--{{time.asctime(time.localtime())}}-->
<configuration>
{% for key, value in configurations_dict|dictsort %}
<property>
<name>{{ key|e }}</name>
<value>{{ resource_management.core.source.InlineTemplate(value).get_content().strip() |e }}</value>
{%- if not configuration_attrs is none -%}
{%- for attrib_name, attrib_occurances in configuration_attrs.items() -%}
{%- for property_name, attrib_value in attrib_occurances.items() -%}
{% if property_name == key and attrib_name %}
<{{attrib_name|e}}>{{attrib_value|e}}</{{attrib_name|e}}>
{%- endif -%}
{%- endfor -%}
{%- endfor -%}
{%- endif %}
</property>
{% endfor %}
</configuration>''', extra_imports=[time, resource_management, resource_management.core, resource_management.core.source], configurations_dict=self.resource.configurations,
configuration_attrs=self.resource.configuration_attributes)
xml_config_dest_file_path = os.path.join(xml_config_provider_config_dir, filename)
Logger.info("Generating config: {0}".format(xml_config_dest_file_path))
with Environment.get_instance_copy() as env:
File (xml_config_dest_file_path,
content = config_content,
owner = self.resource.owner,
group = self.resource.group,
mode = self.resource.mode,
encoding = self.resource.encoding
)
| 40.942857
| 174
| 0.72889
|
25c9ffe039c200a717a48b5c8f7da0cc1ed5378c
| 2,699
|
py
|
Python
|
tests/test_app.py
|
abusyd1/SFIAProject1
|
96246383b993ffb1f9cd98d378180edf0db20d3d
|
[
"Unlicense"
] | null | null | null |
tests/test_app.py
|
abusyd1/SFIAProject1
|
96246383b993ffb1f9cd98d378180edf0db20d3d
|
[
"Unlicense"
] | null | null | null |
tests/test_app.py
|
abusyd1/SFIAProject1
|
96246383b993ffb1f9cd98d378180edf0db20d3d
|
[
"Unlicense"
] | 1
|
2021-06-02T06:59:05.000Z
|
2021-06-02T06:59:05.000Z
|
#!/usr/bin/python3
import unittest
from flask_testing import TestCase
from flask import url_for
from application import app, db
from application.models import Parent, Player
class TestBase(TestCase):
def create_app(self):
app.config.update(SQLALCHEMY_DATABASE_URI="sqlite:///")
app.config['SECRET_KEY'] = "testingtesting"
return app
def setUp(self):
db.drop_all()
db.create_all()
testteam1=Parent(name="testteam1", league="testleague")
testteam2=Parent(name="testteam2", league="testleague")
testteam3=Parent(name="testteam3", league="testleague")
db.session.add(testteam1)
db.session.add(testteam2)
db.session.add(testteam3)
db.session.commit()
db.session.add_all([
Player(name="testplayer1", age="15", position="Defender", team=testteam1),
Player(name="testplayer2", age="16", position="Midfielder", team=testteam2),
Player(name="testplayer3", age="17", position="Forward", team=testteam3)])
db.session.commit()
def tearDown(self):
db.drop_all()
class TestAccess(TestBase):
def test_access_home(self):
response = self.client.get(url_for('home'))
self.assertEqual(response.status_code, 200)
def test_access_parent(self):
response=self.client.get(url_for('parent', id=1))
self.assertEqual(response.status_code, 200)
def test_access_player(self):
response=self.client.get(url_for('player', id=1))
self.assertEqual(response.status_code, 200)
def test_find_parent(self):
response = self.client.get(url_for('home'))
self.assertIn(b'testteam1', response.data)
def test_find_player(self):
response=self.client.get(url_for('parent', id=1))
self.assertIn(b'testplayer1', response.data)
class TestAdding(TestBase):
def test_add_parent(self):
response = self.client.post(url_for('add'), data=dict(name="testaddteam"))
self.assertIn(b'testaddteam', response.data)
class TestUpdate(TestBase):
def test_update_player(self):
response = self.client.post(url_for('edit', parent_id=1, id=1), data=dict(name="hello"))
self.assertIn(b'hello', response.data)
class TestDelete(TestBase):
def test_delete_player(self):
response=self.client.get(url_for('delete', parent_id=1, id=1), data=dict(name="testplayer1"), follow_redirects=True)
self.assertEqual(response.status_code, 200)
class TestModels(TestBase):
def test_parent_model(self):
self.assertEqual(Parent.query.count(), 3)
def test_player_model(self):
self.assertEqual(Player.query.count(), 3)
| 33.320988
| 124
| 0.674324
|
0a8c4036d640ae0b0f4e77b1dd2a8bc007884225
| 229
|
py
|
Python
|
AIClient_Python/src/aiclient/Singleton.py
|
CSGames-Archive/AI-2015
|
dcb49b93de4d2bdfb5600ae6be47ce513cc65c47
|
[
"MIT"
] | 1
|
2015-12-18T01:56:34.000Z
|
2015-12-18T01:56:34.000Z
|
AIClient_Python/src/aiclient/Singleton.py
|
CSGames-Archive/AI-2015
|
dcb49b93de4d2bdfb5600ae6be47ce513cc65c47
|
[
"MIT"
] | null | null | null |
AIClient_Python/src/aiclient/Singleton.py
|
CSGames-Archive/AI-2015
|
dcb49b93de4d2bdfb5600ae6be47ce513cc65c47
|
[
"MIT"
] | 1
|
2019-04-16T00:28:02.000Z
|
2019-04-16T00:28:02.000Z
|
'''
Created on Dec 17, 2014
@author: scarriere
'''
def Singleton(klass):
'''
Call to get the singleton instance of a class
'''
if not klass._instance:
klass._instance = klass()
return klass._instance
| 17.615385
| 49
| 0.633188
|
f59970b777a0a28fc51d8d173fe3752458a50cee
| 3,551
|
py
|
Python
|
dagda/analysis/static/util/utils.py
|
datamachines/dagda
|
2eac1e59b0e039efc73af640092ad5d60bbbc1c1
|
[
"Apache-2.0"
] | null | null | null |
dagda/analysis/static/util/utils.py
|
datamachines/dagda
|
2eac1e59b0e039efc73af640092ad5d60bbbc1c1
|
[
"Apache-2.0"
] | null | null | null |
dagda/analysis/static/util/utils.py
|
datamachines/dagda
|
2eac1e59b0e039efc73af640092ad5d60bbbc1c1
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed to Dagda under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Dagda licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import json
import os
import shutil
import tempfile
from tarfile import TarFile
from tarfile import ReadError
from api.internal.internal_server import InternalServer
from log.dagda_logger import DagdaLogger
# Prepare filesystem bundle
def extract_filesystem_bundle(docker_driver, container_id=None, image_name=None):
temporary_dir = tempfile.mkdtemp()
# Get and save filesystem bundle
if container_id is not None:
image = docker_driver.get_docker_client().export(container=container_id)
name = container_id
else:
image = docker_driver.get_docker_client().get_image(image=image_name)
name = image_name.replace('/', '_').replace(':', '_')
with open(temporary_dir + "/" + name + ".tar", "wb") as file:
for chunk in image:
file.write(chunk)
file.close()
# Untar filesystem bundle
tarfile = TarFile(temporary_dir + "/" + name + ".tar")
tarfile.extractall(temporary_dir)
os.remove(temporary_dir + "/" + name + ".tar")
if image_name is not None:
layers = _get_layers_from_manifest(temporary_dir)
_untar_layers(temporary_dir, layers)
# Return
return temporary_dir
# Clean the temporary directory
def clean_up(temporary_dir):
shutil.rmtree(temporary_dir)
# -- Private methods
# Gets docker image layers from manifest
def _get_layers_from_manifest(dir):
layers = []
with open(dir + "/manifest.json", "r") as manifest_json:
json_info = json.loads(''.join(manifest_json.readlines()))
if len(json_info) == 1 and 'Layers' in json_info[0]:
for layer in json_info[0]['Layers']:
layers.append(layer)
return layers
# Untar docker image layers
def _untar_layers(dir, layers):
output = {}
# Untar layer filesystem bundle
for layer in layers:
tarfile = TarFile(dir + "/" + layer)
for member in tarfile.getmembers():
output[member.name] = member
for member_name in output:
try:
tarfile.extract(output[member_name], path=dir, set_attrs=False)
except (ValueError, ReadError) as ex:
if InternalServer.is_debug_logging_enabled():
message = "Unexpected exception of type {0} occurred while untaring the docker image: {1!r}" \
.format(type(ex).__name__, ex.get_message() if type(ex).__name__ == 'DagdaError' else ex.args)
DagdaLogger.get_logger().debug(message)
except PermissionError as ex:
message = "Unexpected error occurred while untaring the docker image: " + \
"Operation not permitted on {0!r}".format(member_name)
DagdaLogger.get_logger().warn(message)
# Clean up
for layer in layers:
clean_up(dir + "/" + layer[:-10])
| 36.608247
| 114
| 0.683469
|
3185435e0833e80153adb799f68f03c9ac026175
| 6,610
|
py
|
Python
|
archive.py
|
supernifty/deduplicating_archive
|
2dfa10a6cf8bb1b2a2b55d5af6ca89adcb8b9407
|
[
"MIT"
] | null | null | null |
archive.py
|
supernifty/deduplicating_archive
|
2dfa10a6cf8bb1b2a2b55d5af6ca89adcb8b9407
|
[
"MIT"
] | null | null | null |
archive.py
|
supernifty/deduplicating_archive
|
2dfa10a6cf8bb1b2a2b55d5af6ca89adcb8b9407
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
'''
Deduplicating archiver
Copies files to a target dir and generates symlinks in the original file positions
Duplicate files point to the same file
A sqlite database tracks all generated symlinks
Example usage: python archive.py --source ./test_source/ --target ./test_target/ --verbose --dry
'''
import argparse
import datetime
import hashlib
import logging
import os
import parser
import shutil
import stat
import sys
import sqlite3
BLOCKSIZE=65536 # when reading files
MIN_SIZE=1024 # min size of file to archive
UPDATE_PERIOD=10 # how often to log progress
CHECK_BYTES=True # also look at head of file content when comparing
USE_COPY=False
def check_bytes(first_buf, target_file):
'''
additional check as well as hash
'''
with open(target_file, 'rb') as fh:
target_buf = fh.read(BLOCKSIZE)
return first_buf == target_buf
def archive(source_dir, target_dir, dry, min_size=MIN_SIZE, copy=USE_COPY):
# open db
conn = sqlite3.connect(os.path.join(target_dir, 'db.sqlite'))
c = conn.cursor()
c.execute('create table if not exists link (source text, target text, added integer)')
conn.commit()
# find all candidates for archival
considered = 0
added = 0
source_size = 0
saved_size = 0
dry_archive = set()
absolute_source = os.path.abspath(source_dir)
absolute_target = os.path.abspath(target_dir)
last_update = datetime.datetime.now()
logging.info('archiving %s to %s', absolute_source, absolute_target)
for root, dirnames, filenames in os.walk(absolute_source, followlinks=True):
for filename in filenames:
considered += 1
if datetime.datetime.now() - last_update > datetime.timedelta(seconds=UPDATE_PERIOD):
logging.info('added %i files, considered %i files, total size %i bytes, saved size %i bytes', added, considered, source_size, saved_size)
last_update = datetime.datetime.now()
source_file = os.path.join(root, filename)
if os.path.islink(source_file):
logging.debug('skipping %s: is a symlink', source_file)
continue
logging.debug('processing %s', source_file)
# find the hash of this file
file_size = os.stat(source_file).st_size
source_size += file_size
if file_size < min_size:
logging.debug('skipping %s: file size is %i, smaller than %i', source_file, file_size, min_size)
continue
try:
hasher = hashlib.sha256()
with open(source_file, 'rb') as fh:
first_buf = buf = fh.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = fh.read(BLOCKSIZE)
h = hasher.hexdigest()
target_file = os.path.join(absolute_target, h[:2], h)
if os.path.exists(target_file) and file_size == os.stat(target_file).st_size or dry and h in dry_archive: # we can symlink to the existing file
if dry:
logging.debug('would create symlink to existing file: %s -> %s', target_file, source_file)
else:
if CHECK_BYTES and not check_bytes(first_buf, target_file):
logging.error('skipping %s: check_bytes failed.', source_file)
continue
os.remove(source_file)
os.symlink(target_file, source_file)
c.execute('insert into link (source, target, added) values (?, ?, ?)', (source_file, target_file, datetime.datetime.now()))
conn.commit()
logging.debug('symlink to existing file: %s -> %s', source_file, target_file)
saved_size += file_size
else: # mv the file to the archive
if dry:
logging.debug('would move file to archive: %s -> %s', source_file, target_file)
dry_archive.add(h)
else:
if not os.path.exists(os.path.join(absolute_target, h[:2])):
os.makedirs(os.path.join(absolute_target, h[:2]))
if copy:
shutil.copy(source_file, target_file)
os.remove(source_file)
else:
shutil.move(source_file, target_file)
current = stat.S_IMODE(os.lstat(target_file).st_mode)
os.chmod(target_file, current & ~stat.S_IWUSR & ~stat.S_IWGRP & ~stat.S_IWOTH) # make the file read only
os.symlink(target_file, source_file) # link to it
c.execute('insert into link (source, target, added) values (?, ?, ?)', (source_file, target_file, datetime.datetime.now()))
conn.commit()
except IOError as ex:
logging.warn('skipping %s: exception: %s', source_file, ex)
continue
added += 1
logging.info('done archiving %s to %s: %i added out of %i files considered. total size considered %i bytes, saved %i bytes', absolute_source, absolute_target, added, considered, source_size, saved_size)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Archive files and deduplicate')
parser.add_argument('--source', nargs='+', required=True, help='source directory containing files to archive')
parser.add_argument('--target', required=True, help='target directory where files will be copied to')
parser.add_argument('--dry', action='store_true', help='just log what would be done')
parser.add_argument('--verbose', action='store_true', help='include more logging')
parser.add_argument('--min_size', type=int, default=1024, help='minimum file size to archive')
parser.add_argument('--copy', action='store_true', default=False, help='copy/rm files instead of moving')
args = parser.parse_args()
if args.verbose:
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG)
else:
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
logging.info('starting archiver with parameters %s...', sys.argv)
for source in args.source:
archive(source, args.target, args.dry, args.min_size, args.copy)
| 45.273973
| 206
| 0.604992
|
532f1de00026796999be2dfecd7a5bb0a4704b45
| 2,299
|
py
|
Python
|
fuji_server/client/fuji_test.py
|
FAIRsFAIR/fuji
|
71b771ec29b4a4405720b87a32631d05f4543a7b
|
[
"MIT"
] | null | null | null |
fuji_server/client/fuji_test.py
|
FAIRsFAIR/fuji
|
71b771ec29b4a4405720b87a32631d05f4543a7b
|
[
"MIT"
] | null | null | null |
fuji_server/client/fuji_test.py
|
FAIRsFAIR/fuji
|
71b771ec29b4a4405720b87a32631d05f4543a7b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import configparser as ConfigParser
import json
import os
from pathlib import Path
from fuji_server.controllers.fair_check import FAIRCheck
from fuji_server.helper.preprocessor import Preprocessor
identifier = 'https://doi.org/10.1594/PANGAEA.902845'
oai_pmh = 'http://ws.pangaea.de/oai/'
debug = True
def main():
config = ConfigParser.ConfigParser()
my_path = Path(__file__).parent.parent
ini_path = os.path.join(my_path,'config','server.ini')
config.read(ini_path)
YAML_DIR = config['SERVICE']['yaml_directory']
METRIC_YAML = config['SERVICE']['metrics_yaml']
METRIC_YML_PATH = os.path.join(my_path, YAML_DIR , METRIC_YAML)
SPDX_URL = config['EXTERNAL']['spdx_license_github']
DATACITE_API_REPO = config['EXTERNAL']['datacite_api_repo']
RE3DATA_API = config['EXTERNAL']['re3data_api']
METADATACATALOG_API = config['EXTERNAL']['metadata_catalog']
isDebug = config.getboolean('SERVICE', 'debug_mode')
preproc = Preprocessor()
preproc.retrieve_metrics_yaml(METRIC_YML_PATH)
print('Total metrics defined: {}'.format(preproc.get_total_metrics()))
isDebug = config.getboolean('SERVICE', 'debug_mode')
preproc.retrieve_licenses(SPDX_URL, isDebug)
preproc.retrieve_datacite_re3repos(RE3DATA_API, DATACITE_API_REPO, isDebug)
preproc.retrieve_metadata_standards(METADATACATALOG_API, isDebug)
print('Total SPDX licenses : {}'.format(preproc.get_total_licenses()))
print('Total re3repositories found from datacite api : {}'.format(len(preproc.getRE3repositories())))
print('Total subjects area of imported metadata standards : {}'.format(len(preproc.metadata_standards)))
ft = FAIRCheck(uid=identifier, oai=oai_pmh, test_debug=debug)
uid_result, pid_result = ft.check_unique_persistent()
core_metadata_result = ft.check_minimal_metatadata()
content_identifier_included_result = ft.check_content_identifier_included()
check_searchable_result = ft.check_searchable()
license_result = ft.check_license()
relatedresources_result = ft.check_relatedresources()
results = [uid_result, pid_result, core_metadata_result, content_identifier_included_result, license_result]
print(json.dumps(results, indent=4, sort_keys=True))
if __name__ == '__main__':
main()
| 44.211538
| 112
| 0.759896
|
dbe0d87eea3b99c3538408172b57144e2ac47916
| 948
|
py
|
Python
|
openstates/data/migrations/0009_auto_20200429_0913.py
|
washabstract/openstates-core
|
ea69564f1f56fe4a80181b0aa715731bbc47e3f5
|
[
"MIT"
] | 9
|
2020-04-04T00:19:07.000Z
|
2022-02-27T02:24:12.000Z
|
openstates/data/migrations/0009_auto_20200429_0913.py
|
washabstract/openstates-core
|
ea69564f1f56fe4a80181b0aa715731bbc47e3f5
|
[
"MIT"
] | 17
|
2020-03-31T18:19:59.000Z
|
2022-01-03T15:18:48.000Z
|
openstates/data/migrations/0009_auto_20200429_0913.py
|
washabstract/openstates-core
|
ea69564f1f56fe4a80181b0aa715731bbc47e3f5
|
[
"MIT"
] | 19
|
2020-04-10T21:32:21.000Z
|
2022-03-02T20:23:21.000Z
|
# Generated by Django 3.0.5 on 2020-04-29 09:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("data", "0008_person_current_state")]
operations = [
migrations.AlterField(
model_name="bill",
name="first_action_date",
field=models.CharField(default=None, max_length=25, null=True),
),
migrations.AlterField(
model_name="bill",
name="latest_action_date",
field=models.CharField(default=None, max_length=25, null=True),
),
migrations.AlterField(
model_name="bill",
name="latest_action_description",
field=models.TextField(default=""),
),
migrations.AlterField(
model_name="bill",
name="latest_passage_date",
field=models.CharField(default=None, max_length=25, null=True),
),
]
| 29.625
| 75
| 0.593882
|
6450f3672d01551e2844ece9657e39444965595a
| 1,091
|
py
|
Python
|
applications/COL380/uploads/UploadDB.TarFile.b12360829ff324e8.4465736b746f702e7461722e/run.py
|
BSatyaKishore/Portal4COL380
|
0095224cf6381dad620ecc6f101c7690d38593c4
|
[
"BSD-3-Clause"
] | null | null | null |
applications/COL380/uploads/UploadDB.TarFile.b12360829ff324e8.4465736b746f702e7461722e/run.py
|
BSatyaKishore/Portal4COL380
|
0095224cf6381dad620ecc6f101c7690d38593c4
|
[
"BSD-3-Clause"
] | null | null | null |
applications/COL380/uploads/UploadDB.TarFile.b12360829ff324e8.4465736b746f702e7461722e/run.py
|
BSatyaKishore/Portal4COL380
|
0095224cf6381dad620ecc6f101c7690d38593c4
|
[
"BSD-3-Clause"
] | null | null | null |
import os
#import requests
import sys, urllib2, urllib
comp_err_file = open("compile.e", 'r')
comp_err_str = comp_err_file.read()
comp_out_file = open("compile.o", 'r')
comp_out_str = comp_out_file.read()
fileName = str(sys.argv[1])
print 'something'
data = urllib.urlencode({'fileName':fileName,'compileO':comp_out_str, 'compileE':comp_err_str})
req = urllib.urlopen("http://10.201.136.172:8000/COL380/API/Compile/", data)
#response = urllib.urlopen(req)
if (len(comp_err_file.read()) == 0):
# compilation success
os.system("/opt/pbs/default/bin/qsub -P cse -N Test1 -lselect=1:ncpus=21:mem=24gb -l walltime=00:15:00 run1.sh")
os.system("/opt/pbs/default/bin/qsub -P cse -N Test2 -lselect=1:ncpus=21:mem=24gb -l walltime=00:15:00 run2.sh")
os.system("/opt/pbs/default/bin/qsub -P cse -N Test3 -lselect=1:ncpus=21:mem=24gb -l walltime=00:15:00 run3.sh")
os.system("/opt/pbs/default/bin/qsub -P cse -N Test4 -lselect=1:ncpus=21:mem=24gb -l walltime=00:15:00 run4.sh")
os.system("/opt/pbs/default/bin/qsub -P cse -N Test5 -lselect=1:ncpus=21:mem=24gb -l walltime=00:15:00 run5.sh")
| 43.64
| 113
| 0.721357
|
8ff31435de1bdb19fe77ce8f5867a720af7517c3
| 8,601
|
py
|
Python
|
lib/dspace.py
|
hbeyer/pylib
|
b2fdaa9159946982a6c0f9a9bd6e58a0503bcc51
|
[
"MIT"
] | null | null | null |
lib/dspace.py
|
hbeyer/pylib
|
b2fdaa9159946982a6c0f9a9bd6e58a0503bcc51
|
[
"MIT"
] | null | null | null |
lib/dspace.py
|
hbeyer/pylib
|
b2fdaa9159946982a6c0f9a9bd6e58a0503bcc51
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from lib import pica
from lib import sru
from lib import xmlreader as xr
from lib import dataset as ds
from ext import dspace_rest_client as drc
import xml.etree.ElementTree as et
import urllib.request as ur
import re
import os
import json
import shutil
import glob
import logging
class Harvester():
def __init__(self, ppn_o, folder = "downloads", diglib = "inkunabeln"):
self.valid = False
self.ppn_o = ppn_o
if not re.match("^[\d]{9,10}$", self.ppn_o):
raise Exception(f"Ungültige PPN: {self.ppn_o}")
self.folder = folder
self.diglib = diglib
self.image_list = []
file = ur.urlopen(f"http://unapi.k10plus.de/?id=gvk:ppn:{self.ppn_o}&format=picaxml")
tree = et.parse(file)
node = tree.getroot()
self.rec_o = pica.Record(node)
self.sig = self.rec_o.copies[0].sm
self.digi = self.rec_o.digi
search = re.search("http://diglib.hab.de/([^/]+)/([^/]+)/start.htm", self.digi)
try:
self.diglib = search.group(1)
except:
raise Exception("Die normalisierte Signatur konnte nicht gefunden werden")
self.norm_sig = search.group(2)
self.url_struct = f"http://diglib.hab.de/{self.diglib}/{self.norm_sig}/facsimile.xml"
status_code = ur.urlopen(self.url_struct).getcode()
if status_code != 200:
raise Exception(f"Keine facsimile.xml unter {self.url_struct}")
self.folder_ma = self.get_folder_ma()
if self.folder_ma is None:
raise Exception("Der Ordner auf dem MA-Server konnte nicht gefunden werden")
self.path = f"{self.folder}/{self.norm_sig}"
self.rec_a = None
sig_sru = self.sig.replace("(", "").replace(")", "").replace(" ", "+")
req = sru.Request_HAB()
req.prepare("pica.sgb=" + sig_sru)
wr = xr.webReader(req.url, tag = "record", namespace = "http://docs.oasis-open.org/ns/search-ws/sruResponse")
for node in wr:
rc = pica.RecordInc(node)
if rc.bbg[0] == "A":
self.rec_a = rc
self.ppn_a = self.rec_a.ppn
if self.rec_a == None:
logging.warning(f"Keine A-Aufnahme über SRU gefunden")
self.rec_a = self.rec_o
self.extract_metadata()
try:
self.meta_list = self.meta.to_list()
except:
raise Exception("Die Metadaten konnten nicht geladen werden")
self.valid = True
def __str__(self):
ret = f"Harvester für {self.digi} \n \
PPN: {self.ppn_o} (Digitalisat), {self.ppn_a} (Vorlage) \n \
Normalisierte Signatur: {self.norm_sig} \n \
Pfad Master-Images: {self.folder_ma} \n \
Valide: {'ja' if self.valid == True else 'nein'}"
return(ret)
def to_folder(self, overwrite_images = False):
logging.info(f"Harvesten des Digitalisats mit der PPN {self.ppn_o}")
self.make_folder()
self.download_xml()
self.download_images(overwrite_images = overwrite_images)
self.save_metadata()
logging.info(f"Dateien geladen im Ordner {self.path}")
def make_folder(self):
if os.path.exists(self.path):
pass
else:
os.mkdir(self.path)
def download_xml(self):
url_o = f"http://unapi.k10plus.de/?id=gvk:ppn:{self.ppn_o}&format=picaxml"
try:
ur.urlretrieve(url_o, self.path + "/o-aufnahme.xml")
except:
logging.warning("Laden der O-Aufnahme fehlgeschlagen")
try:
ur.urlretrieve(f"http://unapi.k10plus.de/?id=gvk:ppn:{self.ppn_a}&format=picaxml", f"{self.path}/a-aufnahme.xml")
except:
logging.info("Laden der A-Aufnahme fehlgeschlagen")
url_mets = f"http://oai.hab.de/?verb=GetRecord&metadataPrefix=mets&identifier=oai:diglib.hab.de:ppn_{self.ppn_o}"
try:
ur.urlretrieve(url_mets, self.path + "/mets.xml")
except:
logging.info("Laden der METS fehlgeschlagen")
try:
ur.urlretrieve(self.url_struct, self.path + "/facsimile.xml")
except:
logging.warning("Laden der facsimile.xml fehlgeschlagen")
url_transcr = f"http://diglib.hab.de/{self.diglib}/{self.norm_sig}/tei-struct.xml"
try:
ur.urlretrieve(url_transcr, self.path + "/tei-struct.xml")
except:
logging.info("Keine tei-struct.xml gefunden")
def download_images(self, overwrite_images):
with open(self.path + "/facsimile.xml", "r") as file:
tree = et.parse(file)
root = tree.getroot()
for gr in root:
im_name = gr.attrib["url"].split("/").pop()
self.image_list.append(im_name)
for im in self.image_list:
original = self.folder_ma + im.replace("jpg", "tif")
target = self.path + "/" + im.replace("jpg", "tif")
if os.path.exists(target) == False or overwrite_images == True:
shutil.copyfile(original, target)
def extract_metadata(self):
self.meta = ds.DatasetDC()
self.meta.add_entry("dc.identifier", ds.Entry(self.rec_o.digi))
self.meta.add_entry("dc.format", ds.Entry("Online resource", "eng"))
self.meta.add_entry("dc.type", ds.Entry("Digitized book", "eng"))
self.meta.add_entry("dc.title", ds.Entry(self.rec_a.title))
self.meta.add_entry("dc.date", ds.Entry(self.rec_a.date))
for pers in self.rec_a.persons:
pers.makePersName()
if pers.role == "dc.creator":
self.meta.add_entry("dc.creator", ds.Entry(pers.persName, None, "GND", pers.gnd))
else:
self.meta.add_entry("dc.contributor", ds.Entry(pers.persName, None, "GND", pers.gnd))
for pub in self.rec_a.publishers:
pub.makePersName()
self.meta.add_entry("dc.publisher", ds.Entry(pub.persName, None, "GND", pub.gnd))
for lng in self.rec_a.lang:
self.meta.add_entry("dc.language", ds.Entry(lng))
for sub in self.rec_a.subjects:
self.meta.add_entry("dc.subject", ds.Entry(sub))
mat_type = self.get_mat_type()
self.meta.add_entry("dc.description", ds.Entry(mat_type + " aus dem Bestand der Herzog August Bibliothek Wolfenbüttel", "ger"))
self.meta.add_entry("dc.rights", ds.Entry("CC BY-SA 3.0"))
self.meta.add_entry("dc.rights.uri", ds.Entry("http://diglib.hab.de/copyright.html"))
self.meta.add_entry("dcterms.rightsHolder", ds.Entry("Herzog August Bibliothek Wolfenbüttel"))
self.meta.add_entry("dc.source", ds.Entry(f"Wolfenbüttel, Herzog August Bibliothek, {self.sig}"))
try:
self.meta.add_entry("dc.relation", ds.Entry(self.rec_a.gw))
except:
pass
try:
self.meta.add_entry("dc.relation", ds.Entry(self.rec_a.istc))
except:
pass
def save_metadata(self):
meta = self.meta.to_list()
with open(self.path + "/metadata.json", "w") as target:
json.dump(meta, target, indent=2, ensure_ascii=False)
def get_mat_type(self):
if self.diglib == "inkunabeln" or re.match("14\d\d|1500", self.rec_a.date):
return("Digitalisierte Inkunabel")
return("Digitalisierter Druck")
def get_folder_ma(self):
if self.diglib == "inkunabeln":
return(f"//MASERVER/Auftraege/Master/{self.diglib}/{self.norm_sig}/")
if self.diglib == "drucke":
proceed = True
druck_no = 1
while proceed == True:
path = f"//MASERVER/Auftraege/Master/drucke{str(druck_no).zfill(2)}/drucke/{self.norm_sig}/"
if os.path.exists(path):
return(path)
druck_no += 1
if druck_no > 15:
proceed = False
return(None)
def Uploader():
def __init__(self, user, password, rest_url, collection):
self.valid = False
self.client = drc.DSpaceRestClient(user, password, rest_url, False, True)
if self.client is None:
raise Exception(f"Fehler beim Erzeugen des REST-Clients für {rest_url}, user {user}")
self.collection_id = self.client.get_id_by_handle(collection)
if collection_id is None:
raise Exception(f"ID für die Collection {collection} konnte nicht gefunden werden")
else:
self.valid = True
| 45.75
| 135
| 0.597372
|
32d103177dad4b6314963f921f0d87e022c87cd4
| 262
|
py
|
Python
|
ABC_B/ABC027_B.py
|
ryosuke0825/atcoder_python
|
185cdbe7db44ecca1aaf357858d16d31ce515ddb
|
[
"MIT"
] | null | null | null |
ABC_B/ABC027_B.py
|
ryosuke0825/atcoder_python
|
185cdbe7db44ecca1aaf357858d16d31ce515ddb
|
[
"MIT"
] | null | null | null |
ABC_B/ABC027_B.py
|
ryosuke0825/atcoder_python
|
185cdbe7db44ecca1aaf357858d16d31ce515ddb
|
[
"MIT"
] | null | null | null |
n = int(input())
al = list(map(int, input().split()))
if sum(al) % n != 0:
print(-1)
else:
ave = sum(al) // n
population = 0
ans = 0
for a in al:
population += (a-ave)
if population != 0:
ans += 1
print(ans)
| 16.375
| 36
| 0.461832
|
051113b7f3078df61ecb80a722bb23f2afe4f31f
| 973
|
py
|
Python
|
scripts/pyqtgraph-develop/pyqtgraph/widgets/HistogramLUTWidget.py
|
kuldeepaman/tf-pose
|
8050912c52a7b4f3c8a2656f267d47ba21d093f6
|
[
"Apache-2.0"
] | null | null | null |
scripts/pyqtgraph-develop/pyqtgraph/widgets/HistogramLUTWidget.py
|
kuldeepaman/tf-pose
|
8050912c52a7b4f3c8a2656f267d47ba21d093f6
|
[
"Apache-2.0"
] | null | null | null |
scripts/pyqtgraph-develop/pyqtgraph/widgets/HistogramLUTWidget.py
|
kuldeepaman/tf-pose
|
8050912c52a7b4f3c8a2656f267d47ba21d093f6
|
[
"Apache-2.0"
] | null | null | null |
"""
Widget displaying an image histogram along with gradient editor. Can be used to adjust the appearance of images.
This is a wrapper around HistogramLUTItem
"""
from ..Qt import QtGui, QtCore
from .GraphicsView import GraphicsView
from ..graphicsItems.HistogramLUTItem import HistogramLUTItem
__all__ = ['HistogramLUTWidget']
class HistogramLUTWidget(GraphicsView):
def __init__(self, parent=None, *args, **kargs):
background = kargs.get('background', 'default')
GraphicsView.__init__(self, parent, useOpenGL=False, background=background)
self.item = HistogramLUTItem(*args, **kargs)
self.setCentralItem(self.item)
self.setSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Expanding)
self.setMinimumWidth(95)
def sizeHint(self):
return QtCore.QSize(115, 200)
def __getattr__(self, attr):
return getattr(self.item, attr)
| 28.617647
| 113
| 0.684481
|
d08ae8789898c4738d28c21f170c93f77147277f
| 3,347
|
py
|
Python
|
chainer/functions/activation/softmax.py
|
mingxiaoh/chainer-v3
|
815ff00f5eaf7944d6e8a75662ff64a2fe046a4d
|
[
"BSD-3-Clause"
] | 7
|
2017-05-08T07:02:40.000Z
|
2018-12-02T18:35:39.000Z
|
chainer/functions/activation/softmax.py
|
mingxiaoh/chainer-v3
|
815ff00f5eaf7944d6e8a75662ff64a2fe046a4d
|
[
"BSD-3-Clause"
] | null | null | null |
chainer/functions/activation/softmax.py
|
mingxiaoh/chainer-v3
|
815ff00f5eaf7944d6e8a75662ff64a2fe046a4d
|
[
"BSD-3-Clause"
] | 1
|
2021-05-27T16:52:11.000Z
|
2021-05-27T16:52:11.000Z
|
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
if cuda.cudnn_enabled:
cudnn = cuda.cudnn
libcudnn = cudnn.cudnn
_cudnn_version = libcudnn.getVersion()
_algorithm = libcudnn.CUDNN_SOFTMAX_ACCURATE
_mode = libcudnn.CUDNN_SOFTMAX_MODE_CHANNEL
class Softmax(function.Function):
"""Softmax activation function."""
def __init__(self, use_cudnn=True):
self.use_cudnn = use_cudnn
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
x_type, = in_types
type_check.expect(
x_type.dtype.kind == 'f',
x_type.ndim > 1,
)
def forward(self, x):
xp = cuda.get_array_module(*x)
if (xp != numpy and cuda.cudnn_enabled and self.use_cudnn and
(_cudnn_version >= 3000 or x[0].dtype != numpy.float16)):
oz_dtype = 'd' if x[0].dtype == 'd' else 'f'
one = numpy.array(1, dtype=oz_dtype).ctypes
zero = numpy.array(0, dtype=oz_dtype).ctypes
handle = cudnn.get_handle()
x_cube = x[0].reshape(x[0].shape[:2] + (-1, 1))
desc = cudnn.create_tensor_descriptor(x_cube)
self.y = xp.empty_like(x[0])
libcudnn.softmaxForward(
handle, _algorithm, _mode, one.data, desc.value,
x_cube.data.ptr, zero.data, desc.value,
self.y.data.ptr)
else:
self.y = x[0] - x[0].max(axis=1, keepdims=True)
xp.exp(self.y, out=self.y)
self.y /= self.y.sum(axis=1, keepdims=True)
return self.y,
def backward(self, x, gy):
xp = cuda.get_array_module(*x)
if (xp != numpy and cuda.cudnn_enabled and self.use_cudnn and
(_cudnn_version >= 3000 or x[0].dtype != numpy.float16)):
oz_dtype = 'd' if x[0].dtype == 'd' else 'f'
one = numpy.array(1, dtype=oz_dtype).ctypes
zero = numpy.array(0, dtype=oz_dtype).ctypes
handle = cudnn.get_handle()
gx = xp.empty_like(x[0])
gx_cube = gx.reshape(gx.shape[:2] + (-1, 1))
desc = cudnn.create_tensor_descriptor(gx_cube)
libcudnn.softmaxBackward(
handle, _algorithm, _mode, one.data, desc.value,
self.y.data.ptr, desc.value, gy[0].data.ptr, zero.data,
desc.value, gx.data.ptr)
else:
gx = self.y * gy[0]
sumdx = gx.sum(axis=1, keepdims=True)
gx -= self.y * sumdx
return gx,
def softmax(x, use_cudnn=True):
"""Channelwise softmax function.
This function computes its softmax along the second axis. Let
:math:`x = (x_1, x_2, \\dots, x_d)^{\\top}` be the d dimensional index
array and :math:`f(x)` be the d dimensional input array. For each index
:math:`x` of the input array :math:`f(x)`, it computes the probability
:math:`p(x)` defined as
:math:`p(x) = {\\exp(f(x)) \\over \\sum_{x_2} \\exp(f(x))}`.
Args:
x (~chainer.Variable): Input variable.
use_cudnn (bool): If ``True`` and cuDNN is enabled, then this function
uses cuDNN as the core implementation.
Returns:
~chainer.Variable: Output variable.
"""
return Softmax(use_cudnn)(x)
| 34.864583
| 78
| 0.58291
|
1bf105a6bb6e7bdc9946f964861a0a05d19553b1
| 383
|
py
|
Python
|
titan/react_pkg/reactapp/makefile_rules.py
|
mnieber/gen
|
65f8aa4fb671c4f90d5cbcb1a0e10290647a31d9
|
[
"MIT"
] | null | null | null |
titan/react_pkg/reactapp/makefile_rules.py
|
mnieber/gen
|
65f8aa4fb671c4f90d5cbcb1a0e10290647a31d9
|
[
"MIT"
] | null | null | null |
titan/react_pkg/reactapp/makefile_rules.py
|
mnieber/gen
|
65f8aa4fb671c4f90d5cbcb1a0e10290647a31d9
|
[
"MIT"
] | null | null | null |
from moonleap import chop0
from titan.tools_pkg.makefile import MakefileRule
def get_runserver():
return MakefileRule(
name="runserver",
text=chop0(
"""
runserver:
\tyarn start
"""
),
)
def get_install():
return MakefileRule(
name="install",
text=chop0(
"""
install:
\tyarn install
"""
),
)
| 14.185185
| 49
| 0.556136
|
4e104be8bf4cebfc1f1fecbf28e1d4ee6ac7f7cd
| 8,948
|
py
|
Python
|
pubsub/google/cloud/pubsub_v1/subscriber/client.py
|
y1ngyang/google-cloud-python
|
1acc8c22664229b6681ff91654932998e611e1c2
|
[
"Apache-2.0"
] | null | null | null |
pubsub/google/cloud/pubsub_v1/subscriber/client.py
|
y1ngyang/google-cloud-python
|
1acc8c22664229b6681ff91654932998e611e1c2
|
[
"Apache-2.0"
] | 1
|
2021-06-25T15:16:57.000Z
|
2021-06-25T15:16:57.000Z
|
pubsub/google/cloud/pubsub_v1/subscriber/client.py
|
y1ngyang/google-cloud-python
|
1acc8c22664229b6681ff91654932998e611e1c2
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017, Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import pkg_resources
import os
import grpc
from google.api_core import grpc_helpers
from google.cloud.pubsub_v1 import _gapic
from google.cloud.pubsub_v1 import types
from google.cloud.pubsub_v1.gapic import subscriber_client
from google.cloud.pubsub_v1.subscriber import futures
from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager
from google.cloud.pubsub_v1.subscriber.policy import thread
__version__ = pkg_resources.get_distribution('google-cloud-pubsub').version
@_gapic.add_methods(subscriber_client.SubscriberClient,
blacklist=('pull', 'streaming_pull'))
class Client(object):
"""A subscriber client for Google Cloud Pub/Sub.
This creates an object that is capable of subscribing to messages.
Generally, you can instantiate this client with no arguments, and you
get sensible defaults.
Args:
policy_class (class): A class that describes how to handle
subscriptions. You may subclass the
:class:`.pubsub_v1.subscriber.policy.base.BasePolicy`
class in order to define your own consumer. This is primarily
provided to allow use of different concurrency models; the default
is based on :class:`threading.Thread`.
kwargs (dict): Any additional arguments provided are sent as keyword
keyword arguments to the underlying
:class:`~.gapic.pubsub.v1.subscriber_client.SubscriberClient`.
Generally, you should not need to set additional keyword
arguments.
"""
def __init__(self, policy_class=thread.Policy, **kwargs):
# Sanity check: Is our goal to use the emulator?
# If so, create a grpc insecure channel with the emulator host
# as the target.
if os.environ.get('PUBSUB_EMULATOR_HOST'):
kwargs['channel'] = grpc.insecure_channel(
target=os.environ.get('PUBSUB_EMULATOR_HOST'),
)
# Use a custom channel.
# We need this in order to set appropriate default message size and
# keepalive options.
if 'channel' not in kwargs:
kwargs['channel'] = grpc_helpers.create_channel(
credentials=kwargs.pop('credentials', None),
target=self.target,
scopes=subscriber_client.SubscriberClient._DEFAULT_SCOPES,
options={
'grpc.max_send_message_length': -1,
'grpc.max_receive_message_length': -1,
'grpc.keepalive_time_ms': 30000,
}.items(),
)
# Add the metrics headers, and instantiate the underlying GAPIC
# client.
self._api = subscriber_client.SubscriberClient(**kwargs)
# The subcription class is responsible to retrieving and dispatching
# messages.
self._policy_class = policy_class
@property
def target(self):
"""Return the target (where the API is).
Returns:
str: The location of the API.
"""
return subscriber_client.SubscriberClient.SERVICE_ADDRESS
@property
def api(self):
"""The underlying gapic API client."""
return self._api
def subscribe(self, subscription, callback=None, flow_control=()):
"""Return a representation of an individual subscription.
This method creates and returns a ``Consumer`` object (that is, a
:class:`~.pubsub_v1.subscriber._consumer.Consumer`)
subclass) bound to the topic. It does `not` create the subcription
on the backend (or do any API call at all); it simply returns an
object capable of doing these things.
If the ``callback`` argument is provided, then the :meth:`open` method
is automatically called on the returned object. If ``callback`` is
not provided, the subscription is returned unopened.
.. note::
It only makes sense to provide ``callback`` here if you have
already created the subscription manually in the API.
Args:
subscription (str): The name of the subscription. The
subscription should have already been created (for example,
by using :meth:`create_subscription`).
callback (function): The callback function. This function receives
the :class:`~.pubsub_v1.types.PubsubMessage` as its only
argument.
flow_control (~.pubsub_v1.types.FlowControl): The flow control
settings. Use this to prevent situations where you are
inundated with too many messages at once.
Returns:
~.pubsub_v1.subscriber._consumer.Consumer: An instance
of the defined ``consumer_class`` on the client.
Raises:
TypeError: If ``callback`` is not callable.
"""
flow_control = types.FlowControl(*flow_control)
subscr = self._policy_class(self, subscription, flow_control)
if callable(callback):
subscr.open(callback)
elif callback is not None:
error = '{!r} is not callable, please check input'.format(callback)
raise TypeError(error)
return subscr
def subscribe_experimental(
self, subscription, callback, flow_control=(),
scheduler_=None):
"""Asynchronously start receiving messages on a given subscription.
This method starts a background thread to begin pulling messages from
a Pub/Sub subscription and scheduling them to be processed using the
provided ``callback``.
The ``callback`` will be called with an individual
:class:`google.cloud.pubsub_v1.subscriber.message.Message`. It is the
responsibility of the callback to either call ``ack()`` or ``nack()``
on the message when it finished processing. If an exception occurs in
the callback during processing, the exception is logged and the message
is ``nack()`` ed.
The ``flow_control`` argument can be used to control the rate of
message processing.
This method starts the receiver in the background and returns a
*Future* representing its execution. Waiting on the future (calling
``result()``) will block forever or until a non-recoverable error
is encountered (such as loss of network connectivity). Cancelling the
future will signal the process to shutdown gracefully and exit.
Example
.. code-block:: python
from google.cloud.pubsub_v1 import subscriber
subscriber_client = pubsub.SubscriberClient()
# existing subscription
subscription = subscriber_client.subscription_path(
'my-project-id', 'my-subscription')
def callback(message):
print(message)
message.ack()
future = subscriber.subscribe_experimental(
subscription, callback)
try:
future.result()
except KeyboardInterrupt:
future.cancel()
Args:
subscription (str): The name of the subscription. The
subscription should have already been created (for example,
by using :meth:`create_subscription`).
callback (Callable[~.pubsub_v1.subscriber.message.Message]):
The callback function. This function receives the message as
its only argument and will be called from a different thread/
process depending on the scheduling strategy.
flow_control (~.pubsub_v1.types.FlowControl): The flow control
settings. Use this to prevent situations where you are
inundated with too many messages at once.
Returns:
google.cloud.pubsub_v1.futures.StreamingPullFuture: A Future object
that can be used to manage the background stream.
"""
flow_control = types.FlowControl(*flow_control)
manager = streaming_pull_manager.StreamingPullManager(
self, subscription, flow_control)
future = futures.StreamingPullFuture(manager)
manager.open(callback)
return future
| 40.306306
| 79
| 0.65333
|
7d5008336701fe75524a30ee1d3c5bf559dab852
| 803
|
py
|
Python
|
week6/w6e4.py
|
melphick/pybasics
|
68508d10b7509943b629b3c627252de60b6a5744
|
[
"Apache-2.0"
] | null | null | null |
week6/w6e4.py
|
melphick/pybasics
|
68508d10b7509943b629b3c627252de60b6a5744
|
[
"Apache-2.0"
] | null | null | null |
week6/w6e4.py
|
melphick/pybasics
|
68508d10b7509943b629b3c627252de60b6a5744
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
"""
This script will take one variable 'ip_address' and return the IP address in
dotted binary format always padded to eight binary digits (for example,
00001010.01011000.00010001.00010111).
"""
import sys
def ip_bin_convert(ip_addr):
ip_list = ip_addr.split('.')
bin_list = ip_list[:]
for i,element in enumerate(bin_list):
bin_list[i] = bin (int (element))
bin_str_list = bin_list[:]
bin_ip = []
for i,element in enumerate(bin_str_list):
str = list(element)
str = str[2:]
count = range(8 - len(str))
for i in count:
var = '0'
str.insert(0,var)
bin_ip_octet = ''.join(str)
bin_ip.append(bin_ip_octet)
bin_str = '{0}.{1}.{2}.{3}'.format(bin_ip[0], bin_ip[1], bin_ip[2], bin_ip[3])
return bin_str
| 22.305556
| 82
| 0.636364
|
1fca27e4be03dd6bf037b15519204c0c6681fbbe
| 341
|
py
|
Python
|
src/DbMeta_1.py
|
ytyaru/Python.Metaclass.Private.20180224190000
|
523a7e90f5752926a6447bc42e617b98e5c2c4a6
|
[
"CC0-1.0"
] | null | null | null |
src/DbMeta_1.py
|
ytyaru/Python.Metaclass.Private.20180224190000
|
523a7e90f5752926a6447bc42e617b98e5c2c4a6
|
[
"CC0-1.0"
] | null | null | null |
src/DbMeta_1.py
|
ytyaru/Python.Metaclass.Private.20180224190000
|
523a7e90f5752926a6447bc42e617b98e5c2c4a6
|
[
"CC0-1.0"
] | null | null | null |
class DbMeta(type):
def __new__(cls, name, bases, attrs):
attrs['_{0}__secret'.format(name)] = 'my_secret' # Db.__secret 定義
return type.__new__(cls, name, bases, attrs)
def __init__(cls, name, bases, attrs):
setattr(cls, 'Secret', property(lambda cls: attrs['_{0}__secret'.format(name)])) # Db.Secretプロパティ定義
| 42.625
| 107
| 0.653959
|
2c8c0e2284d5557163ecf63c940dd999f70eef8a
| 5,315
|
py
|
Python
|
cookie_consent/util.py
|
nicktgr15/django-cookie-consent
|
5e455baa37c6216cc6989ee285e0c20253c7ffdc
|
[
"BSD-2-Clause"
] | 1
|
2020-08-02T00:38:50.000Z
|
2020-08-02T00:38:50.000Z
|
cookie_consent/util.py
|
nicktgr15/django-cookie-consent
|
5e455baa37c6216cc6989ee285e0c20253c7ffdc
|
[
"BSD-2-Clause"
] | null | null | null |
cookie_consent/util.py
|
nicktgr15/django-cookie-consent
|
5e455baa37c6216cc6989ee285e0c20253c7ffdc
|
[
"BSD-2-Clause"
] | 4
|
2019-05-02T15:12:51.000Z
|
2021-01-20T13:12:44.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.utils.encoding import smart_str
from cookie_consent.cache import (
get_cookie_group,
all_cookie_groups,
get_cookie,
)
from cookie_consent.models import (
ACTION_ACCEPTED,
ACTION_DECLINED,
LogItem,
)
from cookie_consent.conf import settings
def parse_cookie_str(cookie):
dic = {}
if not cookie:
return dic
for c in cookie.split("|"):
key, value = c.split("=")
dic[key] = value
return dic
def dict_to_cookie_str(dic):
return "|".join(["%s=%s" % (k, v) for k, v in dic.items() if v])
def get_cookie_dict_from_request(request):
cookie_str = request.COOKIES.get(settings.COOKIE_CONSENT_NAME)
return parse_cookie_str(cookie_str)
def set_cookie_dict_to_response(response, dic):
response.set_cookie(settings.COOKIE_CONSENT_NAME,
dict_to_cookie_str(dic),
settings.COOKIE_CONSENT_MAX_AGE)
def get_cookie_value_from_request(request, varname, cookie=None):
"""
Returns if cookie group or its specific cookie has been accepted.
Returns True or False when cookie is accepted or declined or None
if cookie is not set.
"""
cookie_dic = get_cookie_dict_from_request(request)
if not cookie_dic:
return None
cookie_group = get_cookie_group(varname=varname)
if not cookie_group:
return None
if cookie:
name, domain = cookie.split(":")
cookie = get_cookie(cookie_group, name, domain)
else:
cookie = None
version = cookie_dic.get(varname, None)
if version == settings.COOKIE_CONSENT_DECLINE:
return False
if version is None:
return None
if not cookie:
v = cookie_group.get_version()
else:
v = cookie.get_version()
if version >= v:
return True
return None
def get_cookie_groups(varname=None):
if not varname:
return all_cookie_groups().values()
keys = varname.split(",")
return [g for k, g in all_cookie_groups().items() if k in keys]
def accept_cookies(request, response, varname=None):
"""
Accept cookies in Cookie Group specified by ``varname``.
"""
cookie_dic = get_cookie_dict_from_request(request)
for cookie_group in get_cookie_groups(varname):
cookie_dic[cookie_group.varname] = cookie_group.get_version()
LogItem.objects.create(action=ACTION_ACCEPTED,
cookiegroup=cookie_group,
version=cookie_group.get_version())
set_cookie_dict_to_response(response, cookie_dic)
def delete_cookies(response, cookie_group):
if cookie_group.is_deletable:
for cookie in cookie_group.cookie_set.all():
response.delete_cookie(smart_str(cookie.name),
cookie.path, cookie.domain)
def decline_cookies(request, response, varname=None):
"""
Decline and delete cookies in CookieGroup specified by ``varname``.
"""
cookie_dic = get_cookie_dict_from_request(request)
for cookie_group in get_cookie_groups(varname):
cookie_dic[cookie_group.varname] = settings.COOKIE_CONSENT_DECLINE
delete_cookies(response, cookie_group)
LogItem.objects.create(action=ACTION_DECLINED,
cookiegroup=cookie_group,
version=cookie_group.get_version())
set_cookie_dict_to_response(response, cookie_dic)
def are_all_cookies_accepted(request):
"""
Returns if all cookies are accepted.
"""
return all([get_cookie_value_from_request(request, cookie_group.varname)
for cookie_group in get_cookie_groups()])
def get_not_accepted_or_declined_cookie_groups(request):
"""
Returns all cookie groups that are neither accepted or declined.
"""
return [cookie_group for cookie_group in get_cookie_groups()
if get_cookie_value_from_request(
request, cookie_group.varname) is None]
def is_cookie_consent_enabled(request):
"""
Returns if django-cookie-consent is enabled for given request.
"""
enabled = settings.COOKIE_CONSENT_ENABLED
if callable(enabled):
return enabled(request)
else:
return enabled
def get_cookie_string(cookie_dic):
"""
Returns cookie in format suitable for use in javascript.
"""
expires = datetime.datetime.now() + datetime.timedelta(
seconds=settings.COOKIE_CONSENT_MAX_AGE)
cookie_str = "%s=%s; expires=%s; path=/" % (
settings.COOKIE_CONSENT_NAME,
dict_to_cookie_str(cookie_dic),
expires.strftime("%a, %d %b %Y %H:%M:%S GMT")
)
return cookie_str
def get_accepted_cookies(request):
"""
Returns all accepted cookies.
"""
cookie_dic = get_cookie_dict_from_request(request)
accepted_cookies = []
for cookie_group in all_cookie_groups().values():
version = cookie_dic.get(cookie_group.varname, None)
if not version or version == settings.COOKIE_CONSENT_DECLINE:
continue
for cookie in cookie_group.cookie_set.all():
if version >= cookie.get_version():
accepted_cookies.append(cookie)
return accepted_cookies
| 29.859551
| 76
| 0.670555
|
31a1bd54d4d443dd1e5c810697c84f9e56829da4
| 2,080
|
py
|
Python
|
cerebralcortex/test_suite/test_gps_cluster_udf.py
|
MD2Korg/CerebralCortex-2.0
|
8dfcef1ba96fb8653980d1cd3eee7ed3d7f28b60
|
[
"BSD-2-Clause"
] | 1
|
2020-10-19T14:36:59.000Z
|
2020-10-19T14:36:59.000Z
|
cerebralcortex/test_suite/test_gps_cluster_udf.py
|
MD2Korg/CerebralCortex-2.0
|
8dfcef1ba96fb8653980d1cd3eee7ed3d7f28b60
|
[
"BSD-2-Clause"
] | 4
|
2019-05-16T15:41:06.000Z
|
2020-04-07T06:41:39.000Z
|
cerebralcortex/test_suite/test_gps_cluster_udf.py
|
MD2Korg/CerebralCortex-2.0
|
8dfcef1ba96fb8653980d1cd3eee7ed3d7f28b60
|
[
"BSD-2-Clause"
] | 10
|
2019-01-25T20:16:54.000Z
|
2021-05-04T16:53:46.000Z
|
# Copyright (c) 2019, MD2K Center of Excellence
# - Nasir Ali <nasir.ali08@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import unittest
from cerebralcortex.algorithms.gps.clustering import cluster_gps
from cerebralcortex.test_suite.util.data_helper import gen_location_datastream
class TestDataframeUDF(unittest.TestCase):
def test_01_udf_on_gps(self):
"""
Window datastream and perform a gps clustering udf on top of it
"""
os.environ["MPROV_USER"] = "neo4j"
os.environ["MPROV_PASSWORD"] = "habitat1"
ds_gps = gen_location_datastream(user_id="bfb2ca0c-e19c-3956-9db2-5459ccadd40c",
stream_name="gps--org.md2k.phonesensor--phone")
d2 = ds_gps.window()
dd = cluster_gps(d2)
dd.show()
| 42.44898
| 88
| 0.745192
|
6d9fcdf9f78f417ce9771c2072463fec42fc0595
| 4,405
|
py
|
Python
|
ReferentialGym/utils/streamHandler/StreamHandler.py
|
Near32/ReferentialGameGym
|
afe22da2ac20c0d24e93b4dbd1f1ad61374d1a6c
|
[
"MIT"
] | 19
|
2019-11-20T05:50:37.000Z
|
2022-03-15T09:10:25.000Z
|
ReferentialGym/utils/streamHandler/StreamHandler.py
|
Near32/ReferentialGameGym
|
afe22da2ac20c0d24e93b4dbd1f1ad61374d1a6c
|
[
"MIT"
] | null | null | null |
ReferentialGym/utils/streamHandler/StreamHandler.py
|
Near32/ReferentialGameGym
|
afe22da2ac20c0d24e93b4dbd1f1ad61374d1a6c
|
[
"MIT"
] | 5
|
2020-12-20T23:00:23.000Z
|
2022-01-06T08:42:24.000Z
|
from typing import Dict, List
import copy
class StreamHandler(object):
def __init__(self):
self.placeholders = {}
def register(self, placeholder_id:str):
self.update(placeholder_id=placeholder_id, stream_data={})
def reset(self, placeholder_id:str):
self.update(placeholder_id=placeholder_id, stream_data={}, reset=True)
def update(self,
placeholder_id:str,
stream_data:Dict[str,object],
p_ptr:Dict[str,object]=None,
reset=False):
'''
Updates the streams of a given placeholder.
Hierarchically explores the placeholders and their streams.
:params placeholder_id: string formatted with ':' between the name of the streaming module/placeholder and the name of the stream.
:param stream_data: data or dict of str and torch.Tensor/List to update the stream with.
:param p_ptr: None, except when called by self in a recurrent fashion.
'''
p_ptr = self.placeholders
if stream_data is {}: return
previous_placeholder = {}
while ':' in placeholder_id:
ptr, next_placeholder_id = placeholder_id.split(":", 1)
if ptr not in p_ptr: p_ptr[ptr] = {}
placeholder_id=next_placeholder_id
p_ptr=p_ptr[ptr]
if placeholder_id not in p_ptr:
p_ptr[placeholder_id] = {}
# Not possible to copy leaves tensor at the moment with PyTorch...
previous_placeholder = None #copy.deepcopy(p_ptr[placeholder_id])
if isinstance(stream_data, dict) and not(reset):
for k,v in stream_data.items():
p_ptr[placeholder_id][k] = v
else:
p_ptr[placeholder_id] = stream_data
return
def serve(self, pipeline:List[object]):
for module_id in pipeline:
module = self[f"modules:{module_id}:ref"]
module_input_stream_dict = self._serve_module(module)
module_output_stream_dict = module.compute(input_streams_dict=module_input_stream_dict)
for stream_id, stream_data in module_output_stream_dict.items():
if ":" in stream_id:
self.update(stream_id, stream_data)
else:
self.update(f"modules:{module_id}:{stream_id}", stream_data)
def _serve_module(self, module:object):
module_input_stream_ids = module.get_input_stream_ids()
module_input_stream_dict = {}
for k_out, k_in in module_input_stream_ids.items():
module_input_stream_dict[k_out] = self[k_in]
return module_input_stream_dict
def __getitem__(self, stream_id):
'''
Hierarchically explores the streaming modules/placeholders and their streams.
:params stream_id: string formatted with ':' between the name of the streaming module/placeholder and the name of the stream.
'''
if stream_id == "None": return None
stream_id = stream_id.split(":")
p_ptr = self.placeholders
for ptr in stream_id[:-1]:
if isinstance(p_ptr,dict):
if ptr not in p_ptr.keys():
raise AssertionError("The required stream does not exists...")
elif not(hasattr(p_ptr, ptr)):
raise AssertionError("The required stream does not exists...")
if isinstance(p_ptr, dict):
p_ptr = p_ptr[ptr]
else:
p_ptr = getattr(p_ptr, ptr)
# Do we need to perform some operations on the data stream?
operations = []
if '.' in stream_id[-1]:
operations = stream_id[-1].split(".")
stream_id[-1] = operations.pop(0)
if hasattr(p_ptr, stream_id[-1]):
output = getattr(p_ptr, stream_id[-1])
elif stream_id[-1] in p_ptr:
output = p_ptr[stream_id[-1]]
else:
#raise AssertionError("The required stream does not exists...")
output = None
return self._operate(output, operations)
def _operate(self, data:object, operations:List[str]) -> object:
for operation in operations:
op = getattr(data, operation, None)
if op is not None:
data = op()
return data
| 37.974138
| 138
| 0.598638
|
611a0d163f8ca7a02d7de820f9d406d9b14c0f74
| 1,035
|
py
|
Python
|
Fluids/PointRendererFluxSimple.py
|
samgeen/Hegelian
|
5fc9b939f2e40119f18c0f6c8d314b33e27442c9
|
[
"MIT"
] | null | null | null |
Fluids/PointRendererFluxSimple.py
|
samgeen/Hegelian
|
5fc9b939f2e40119f18c0f6c8d314b33e27442c9
|
[
"MIT"
] | null | null | null |
Fluids/PointRendererFluxSimple.py
|
samgeen/Hegelian
|
5fc9b939f2e40119f18c0f6c8d314b33e27442c9
|
[
"MIT"
] | null | null | null |
'''
Created on Nov 8, 2012
@author: samgeen
'''
from OpenGL.GL import *
from PointRenderer import PointRenderer
class PointRendererSimple(PointRenderer):
'''
Render the flux from
TODO: DECOUPLE FLUX CALCULATIONS FROM POINT RENDERER
'''
def __init__(self):
'''
Constructor
'''
self._points = None
self._weight = 1.0
self._displayList = 0
self._name = "PointRenderSimple"
def Name(self):
return self._name
def Weight(self, weight):
self._weight = weight
def Build(self, points,dummy=None):
self._points = points
self._displayList = glGenLists(1)
glNewList(self._displayList,GL_COMPILE)
glBegin(GL_POINTS)
for it in self._points:
glVertex3fv(it)
glEnd()
glEndList()
def Draw(self):
glColor4f(1.0,1.0,1.0,0.4*self._weight)
#print self.__displayList
glCallList(self._displayList)
| 22.021277
| 56
| 0.575845
|
3e060222d2bf17bf38e5b2e8c376ab22ee4c915a
| 42,687
|
py
|
Python
|
test/functional/test_framework/test_framework.py
|
gorzek/jokecoin
|
c3edbbc9b5bb26718e8cb63eded9e05c7852df1b
|
[
"MIT"
] | null | null | null |
test/functional/test_framework/test_framework.py
|
gorzek/jokecoin
|
c3edbbc9b5bb26718e8cb63eded9e05c7852df1b
|
[
"MIT"
] | null | null | null |
test/functional/test_framework/test_framework.py
|
gorzek/jokecoin
|
c3edbbc9b5bb26718e8cb63eded9e05c7852df1b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Copyright (c) 2014-2020 The Dash Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Base class for RPC testing."""
import copy
from collections import deque
from enum import Enum
import logging
import optparse
import os
import pdb
import shutil
import sys
import tempfile
import time
import traceback
from concurrent.futures import ThreadPoolExecutor
from .authproxy import JSONRPCException
from . import coverage
from .test_node import TestNode
from .util import (
PortSeed,
MAX_NODES,
assert_equal,
check_json_precision,
connect_nodes_bi,
connect_nodes,
copy_datadir,
disconnect_nodes,
force_finish_mnsync,
get_datadir_path,
initialize_datadir,
p2p_port,
set_node_times,
satoshi_round,
sync_blocks,
sync_mempools,
wait_until,
)
class TestStatus(Enum):
PASSED = 1
FAILED = 2
SKIPPED = 3
TEST_EXIT_PASSED = 0
TEST_EXIT_FAILED = 1
TEST_EXIT_SKIPPED = 77
GENESISTIME = 1417713337
class BitcoinTestFramework():
"""Base class for a bitcoin test script.
Individual bitcoin test scripts should subclass this class and override the set_test_params() and run_test() methods.
Individual tests can also override the following methods to customize the test setup:
- add_options()
- setup_chain()
- setup_network()
- setup_nodes()
The __init__() and main() methods should not be overridden.
This class also contains various public and private helper methods."""
def __init__(self):
"""Sets test framework defaults. Do not override this method. Instead, override the set_test_params() method"""
self.setup_clean_chain = False
self.nodes = []
self.mocktime = 0
self.supports_cli = False
self.extra_args_from_options = []
self.set_test_params()
assert hasattr(self, "num_nodes"), "Test must set self.num_nodes in set_test_params()"
def main(self):
"""Main function. This should not be overridden by the subclass test scripts."""
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave jokecoinds and test.* datadir on exit or error")
parser.add_option("--noshutdown", dest="noshutdown", default=False, action="store_true",
help="Don't stop jokecoinds after the test execution")
parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../../src"),
help="Source directory containing jokecoind/jokecoin-cli (default: %default)")
parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"),
help="Directory for caching pregenerated datadirs")
parser.add_option("--tmpdir", dest="tmpdir", help="Root directory for datadirs")
parser.add_option("-l", "--loglevel", dest="loglevel", default="INFO",
help="log events at this level and higher to the console. Can be set to DEBUG, INFO, WARNING, ERROR or CRITICAL. Passing --loglevel DEBUG will output all logs to console. Note that logs at all levels are always written to the test_framework.log file in the temporary test directory.")
parser.add_option("--tracerpc", dest="trace_rpc", default=False, action="store_true",
help="Print out all RPC calls as they are made")
parser.add_option("--portseed", dest="port_seed", default=os.getpid(), type='int',
help="The seed to use for assigning port numbers (default: current process id)")
parser.add_option("--coveragedir", dest="coveragedir",
help="Write tested RPC commands into this directory")
parser.add_option("--configfile", dest="configfile",
help="Location of the test framework config file")
parser.add_option("--pdbonfailure", dest="pdbonfailure", default=False, action="store_true",
help="Attach a python debugger if test fails")
parser.add_option("--usecli", dest="usecli", default=False, action="store_true",
help="use jokecoin-cli instead of RPC for all commands")
parser.add_option("--jokecoind-arg", dest="jokecoind_extra_args", default=[], type='string', action='append',
help="Pass extra args to all jokecoind instances")
self.add_options(parser)
(self.options, self.args) = parser.parse_args()
PortSeed.n = self.options.port_seed
os.environ['PATH'] = self.options.srcdir + ":" + self.options.srcdir + "/qt:" + os.environ['PATH']
check_json_precision()
self.options.cachedir = os.path.abspath(self.options.cachedir)
self.extra_args_from_options = self.options.jokecoind_extra_args
# Set up temp directory and start logging
if self.options.tmpdir:
self.options.tmpdir = os.path.abspath(self.options.tmpdir)
os.makedirs(self.options.tmpdir, exist_ok=False)
else:
self.options.tmpdir = tempfile.mkdtemp(prefix="test")
self._start_logging()
success = TestStatus.FAILED
try:
if self.options.usecli and not self.supports_cli:
raise SkipTest("--usecli specified but test does not support using CLI")
self.setup_chain()
self.setup_network()
self.run_test()
success = TestStatus.PASSED
except JSONRPCException as e:
self.log.exception("JSONRPC error")
except SkipTest as e:
self.log.warning("Test Skipped: %s" % e.message)
success = TestStatus.SKIPPED
except AssertionError as e:
self.log.exception("Assertion failed")
except KeyError as e:
self.log.exception("Key error")
except Exception as e:
self.log.exception("Unexpected exception caught during testing")
except KeyboardInterrupt as e:
self.log.warning("Exiting after keyboard interrupt")
if success == TestStatus.FAILED and self.options.pdbonfailure:
print("Testcase failed. Attaching python debugger. Enter ? for help")
pdb.set_trace()
if not self.options.noshutdown:
self.log.info("Stopping nodes")
try:
if self.nodes:
self.stop_nodes()
except BaseException as e:
success = False
self.log.exception("Unexpected exception caught during shutdown")
else:
for node in self.nodes:
node.cleanup_on_exit = False
self.log.info("Note: jokecoinds were not stopped and may still be running")
if not self.options.nocleanup and not self.options.noshutdown and success != TestStatus.FAILED:
self.log.info("Cleaning up")
shutil.rmtree(self.options.tmpdir)
else:
self.log.warning("Not cleaning up dir %s" % self.options.tmpdir)
if os.getenv("PYTHON_DEBUG", ""):
# Dump the end of the debug logs, to aid in debugging rare
# travis failures.
import glob
filenames = [self.options.tmpdir + "/test_framework.log"]
filenames += glob.glob(self.options.tmpdir + "/node*/regtest/debug.log")
MAX_LINES_TO_PRINT = 1000
for fn in filenames:
try:
with open(fn, 'r') as f:
print("From", fn, ":")
print("".join(deque(f, MAX_LINES_TO_PRINT)))
except OSError:
print("Opening file %s failed." % fn)
traceback.print_exc()
if success == TestStatus.PASSED:
self.log.info("Tests successful")
sys.exit(TEST_EXIT_PASSED)
elif success == TestStatus.SKIPPED:
self.log.info("Test skipped")
sys.exit(TEST_EXIT_SKIPPED)
else:
self.log.error("Test failed. Test logging available at %s/test_framework.log", self.options.tmpdir)
logging.shutdown()
sys.exit(TEST_EXIT_FAILED)
# Methods to override in subclass test scripts.
def set_test_params(self):
"""Tests must this method to change default values for number of nodes, topology, etc"""
raise NotImplementedError
def add_options(self, parser):
"""Override this method to add command-line options to the test"""
pass
def setup_chain(self):
"""Override this method to customize blockchain setup"""
self.log.info("Initializing test directory " + self.options.tmpdir)
if self.setup_clean_chain:
self._initialize_chain_clean()
self.set_genesis_mocktime()
else:
self._initialize_chain()
self.set_cache_mocktime()
def setup_network(self):
"""Override this method to customize test network topology"""
self.setup_nodes()
# Connect the nodes as a "chain". This allows us
# to split the network between nodes 1 and 2 to get
# two halves that can work on competing chains.
for i in range(self.num_nodes - 1):
connect_nodes_bi(self.nodes, i, i + 1)
self.sync_all()
def setup_nodes(self):
"""Override this method to customize test node setup"""
extra_args = None
stderr = None
if hasattr(self, "extra_args"):
extra_args = self.extra_args
if hasattr(self, "stderr"):
stderr = self.stderr
self.add_nodes(self.num_nodes, extra_args, stderr=stderr)
self.start_nodes()
def run_test(self):
"""Tests must override this method to define test logic"""
raise NotImplementedError
# Public helper methods. These can be accessed by the subclass test scripts.
def add_nodes(self, num_nodes, extra_args=None, rpchost=None, timewait=None, binary=None, stderr=None):
"""Instantiate TestNode objects"""
if extra_args is None:
extra_args = [[]] * num_nodes
if binary is None:
binary = [None] * num_nodes
assert_equal(len(extra_args), num_nodes)
assert_equal(len(binary), num_nodes)
old_num_nodes = len(self.nodes)
for i in range(num_nodes):
self.nodes.append(TestNode(old_num_nodes + i, self.options.tmpdir, extra_args[i], self.extra_args_from_options, rpchost, timewait=timewait, binary=binary[i], stderr=stderr, mocktime=self.mocktime, coverage_dir=self.options.coveragedir, use_cli=self.options.usecli))
def start_node(self, i, *args, **kwargs):
"""Start a jokecoind"""
node = self.nodes[i]
node.start(*args, **kwargs)
node.wait_for_rpc_connection()
if self.options.coveragedir is not None:
coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
def start_nodes(self, extra_args=None, stderr=None, *args, **kwargs):
"""Start multiple jokecoinds"""
if extra_args is None:
extra_args = [None] * self.num_nodes
assert_equal(len(extra_args), self.num_nodes)
try:
for i, node in enumerate(self.nodes):
node.start(extra_args[i], stderr, *args, **kwargs)
for node in self.nodes:
node.wait_for_rpc_connection()
except:
# If one node failed to start, stop the others
self.stop_nodes()
raise
if self.options.coveragedir is not None:
for node in self.nodes:
coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
def stop_node(self, i, wait=0):
"""Stop a jokecoind test node"""
self.nodes[i].stop_node(wait=wait)
self.nodes[i].wait_until_stopped()
def stop_nodes(self, wait=0):
"""Stop multiple jokecoind test nodes"""
for node in self.nodes:
# Issue RPC to stop nodes
node.stop_node(wait=wait)
for node in self.nodes:
# Wait for nodes to stop
node.wait_until_stopped()
def restart_node(self, i, extra_args=None):
"""Stop and start a test node"""
self.stop_node(i)
self.start_node(i, extra_args)
def assert_start_raises_init_error(self, i, extra_args=None, expected_msg=None, *args, **kwargs):
with tempfile.SpooledTemporaryFile(max_size=2**16) as log_stderr:
try:
self.start_node(i, extra_args, stderr=log_stderr, *args, **kwargs)
self.stop_node(i)
except Exception as e:
assert 'jokecoind exited' in str(e) # node must have shutdown
self.nodes[i].running = False
self.nodes[i].process = None
if expected_msg is not None:
log_stderr.seek(0)
stderr = log_stderr.read().decode('utf-8')
if expected_msg not in stderr:
raise AssertionError("Expected error \"" + expected_msg + "\" not found in:\n" + stderr)
else:
if expected_msg is None:
assert_msg = "jokecoind should have exited with an error"
else:
assert_msg = "jokecoind should have exited with expected error " + expected_msg
raise AssertionError(assert_msg)
def wait_for_node_exit(self, i, timeout):
self.nodes[i].process.wait(timeout)
def split_network(self):
"""
Split the network of four nodes into nodes 0/1 and 2/3.
"""
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
self.sync_all(self.nodes[:2])
self.sync_all(self.nodes[2:])
def join_network(self):
"""
Join the (previously split) network halves together.
"""
connect_nodes_bi(self.nodes, 1, 2)
self.sync_all()
def sync_blocks(self, nodes=None, **kwargs):
sync_blocks(nodes or self.nodes, **kwargs)
def sync_mempools(self, nodes=None, **kwargs):
if self.mocktime != 0:
if 'wait' not in kwargs:
kwargs['wait'] = 0.1
if 'wait_func' not in kwargs:
kwargs['wait_func'] = lambda: self.bump_mocktime(3, nodes=nodes)
sync_mempools(nodes or self.nodes, **kwargs)
def sync_all(self, nodes=None, **kwargs):
self.sync_blocks(nodes, **kwargs)
self.sync_mempools(nodes, **kwargs)
def disable_mocktime(self):
self.mocktime = 0
for node in self.nodes:
node.mocktime = 0
def bump_mocktime(self, t, update_nodes=True, nodes=None):
self.mocktime += t
if update_nodes:
set_node_times(nodes or self.nodes, self.mocktime)
def set_cache_mocktime(self):
# For backwared compatibility of the python scripts
# with previous versions of the cache, set MOCKTIME
# to regtest genesis time + (201 * 156)
self.mocktime = GENESISTIME + (201 * 156)
for node in self.nodes:
node.mocktime = self.mocktime
def set_genesis_mocktime(self):
self.mocktime = GENESISTIME
for node in self.nodes:
node.mocktime = self.mocktime
# Private helper methods. These should not be accessed by the subclass test scripts.
def _start_logging(self):
# Add logger and logging handlers
self.log = logging.getLogger('TestFramework')
self.log.setLevel(logging.DEBUG)
# Create file handler to log all messages
fh = logging.FileHandler(self.options.tmpdir + '/test_framework.log')
fh.setLevel(logging.DEBUG)
# Create console handler to log messages to stderr. By default this logs only error messages, but can be configured with --loglevel.
ch = logging.StreamHandler(sys.stdout)
# User can provide log level as a number or string (eg DEBUG). loglevel was caught as a string, so try to convert it to an int
ll = int(self.options.loglevel) if self.options.loglevel.isdigit() else self.options.loglevel.upper()
ch.setLevel(ll)
# Format logs the same as jokecoind's debug.log with microprecision (so log files can be concatenated and sorted)
formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000 %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
formatter.converter = time.gmtime
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
self.log.addHandler(fh)
self.log.addHandler(ch)
if self.options.trace_rpc:
rpc_logger = logging.getLogger("BitcoinRPC")
rpc_logger.setLevel(logging.DEBUG)
rpc_handler = logging.StreamHandler(sys.stdout)
rpc_handler.setLevel(logging.DEBUG)
rpc_logger.addHandler(rpc_handler)
def _initialize_chain(self, extra_args=None, stderr=None):
"""Initialize a pre-mined blockchain for use by the test.
Create a cache of a 200-block-long chain (with wallet) for MAX_NODES
Afterward, create num_nodes copies from the cache."""
assert self.num_nodes <= MAX_NODES
create_cache = False
for i in range(MAX_NODES):
if not os.path.isdir(get_datadir_path(self.options.cachedir, i)):
create_cache = True
break
if create_cache:
self.log.debug("Creating data directories from cached datadir")
# find and delete old cache directories if any exist
for i in range(MAX_NODES):
if os.path.isdir(get_datadir_path(self.options.cachedir, i)):
shutil.rmtree(get_datadir_path(self.options.cachedir, i))
# Create cache directories, run jokecoinds:
self.set_genesis_mocktime()
for i in range(MAX_NODES):
datadir = initialize_datadir(self.options.cachedir, i)
args = [os.getenv("JOKECOIND", "jokecoind"), "-server", "-keypool=1", "-datadir=" + datadir, "-discover=0", "-mocktime="+str(GENESISTIME)]
if i > 0:
args.append("-connect=127.0.0.1:" + str(p2p_port(0)))
if extra_args is not None:
args.extend(extra_args)
self.nodes.append(TestNode(i, self.options.cachedir, extra_args=[], extra_args_from_options=self.extra_args_from_options, rpchost=None, timewait=None, binary=None, stderr=stderr, mocktime=self.mocktime, coverage_dir=None))
self.nodes[i].args = args
self.start_node(i)
# Wait for RPC connections to be ready
for node in self.nodes:
node.wait_for_rpc_connection()
# Create a 200-block-long chain; each of the 4 first nodes
# gets 25 mature blocks and 25 immature.
# Note: To preserve compatibility with older versions of
# initialize_chain, only 4 nodes will generate coins.
#
# blocks are created with timestamps 10 minutes apart
# starting from 2010 minutes in the past
block_time = GENESISTIME
for i in range(2):
for peer in range(4):
for j in range(25):
set_node_times(self.nodes, block_time)
self.nodes[peer].generate(1)
block_time += 156
# Must sync before next peer starts generating blocks
self.sync_blocks()
# Shut them down, and clean up cache directories:
self.stop_nodes()
self.nodes = []
self.disable_mocktime()
def cache_path(n, *paths):
return os.path.join(get_datadir_path(self.options.cachedir, n), "regtest", *paths)
for i in range(MAX_NODES):
for entry in os.listdir(cache_path(i)):
if entry not in ['wallets', 'chainstate', 'blocks', 'evodb', 'llmq', 'backups']:
os.remove(cache_path(i, entry))
for i in range(self.num_nodes):
from_dir = get_datadir_path(self.options.cachedir, i)
to_dir = get_datadir_path(self.options.tmpdir, i)
shutil.copytree(from_dir, to_dir)
initialize_datadir(self.options.tmpdir, i) # Overwrite port/rpcport in jokecoin.conf
def _initialize_chain_clean(self):
"""Initialize empty blockchain for use by the test.
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization."""
for i in range(self.num_nodes):
initialize_datadir(self.options.tmpdir, i)
MASTERNODE_COLLATERAL = 1000
class MasternodeInfo:
def __init__(self, proTxHash, ownerAddr, votingAddr, pubKeyOperator, keyOperator, collateral_address, collateral_txid, collateral_vout):
self.proTxHash = proTxHash
self.ownerAddr = ownerAddr
self.votingAddr = votingAddr
self.pubKeyOperator = pubKeyOperator
self.keyOperator = keyOperator
self.collateral_address = collateral_address
self.collateral_txid = collateral_txid
self.collateral_vout = collateral_vout
class DashTestFramework(BitcoinTestFramework):
def set_jokecoin_test_params(self, num_nodes, masterodes_count, extra_args=None, fast_dip3_enforcement=False):
self.mn_count = masterodes_count
self.num_nodes = num_nodes
self.mninfo = []
self.setup_clean_chain = True
self.is_network_split = False
# additional args
if extra_args is None:
extra_args = [[]] * num_nodes
assert_equal(len(extra_args), num_nodes)
self.extra_args = [copy.deepcopy(a) for a in extra_args]
self.extra_args[0] += ["-sporkkey=cP4EKFyJsHT39LDqgdcB43Y3YXjNyjb5Fuas1GQSeAtjnZWmZEQK"]
self.fast_dip3_enforcement = fast_dip3_enforcement
if fast_dip3_enforcement:
for i in range(0, num_nodes):
self.extra_args[i].append("-dip3params=30:50")
# LLMQ default test params (no need to pass -llmqtestparams)
self.llmq_size = 3
self.llmq_threshold = 2
def set_jokecoin_dip8_activation(self, activate_after_block):
window = int((activate_after_block + 2) / 3)
threshold = int((window + 1) / 2)
for i in range(0, self.num_nodes):
self.extra_args[i].append("-vbparams=dip0008:0:999999999999:%d:%d" % (window, threshold))
def set_jokecoin_llmq_test_params(self, llmq_size, llmq_threshold):
self.llmq_size = llmq_size
self.llmq_threshold = llmq_threshold
for i in range(0, self.num_nodes):
self.extra_args[i].append("-llmqtestparams=%d:%d" % (self.llmq_size, self.llmq_threshold))
def create_simple_node(self):
idx = len(self.nodes)
self.add_nodes(1, extra_args=[self.extra_args[idx]])
self.start_node(idx)
for i in range(0, idx):
connect_nodes(self.nodes[i], idx)
def prepare_masternodes(self):
self.log.info("Preparing %d masternodes" % self.mn_count)
for idx in range(0, self.mn_count):
self.prepare_masternode(idx)
def prepare_masternode(self, idx):
bls = self.nodes[0].bls('generate')
address = self.nodes[0].getnewaddress()
txid = self.nodes[0].sendtoaddress(address, MASTERNODE_COLLATERAL)
txraw = self.nodes[0].getrawtransaction(txid, True)
collateral_vout = 0
for vout_idx in range(0, len(txraw["vout"])):
vout = txraw["vout"][vout_idx]
if vout["value"] == MASTERNODE_COLLATERAL:
collateral_vout = vout_idx
self.nodes[0].lockunspent(False, [{'txid': txid, 'vout': collateral_vout}])
# send to same address to reserve some funds for fees
self.nodes[0].sendtoaddress(address, 0.001)
ownerAddr = self.nodes[0].getnewaddress()
votingAddr = self.nodes[0].getnewaddress()
rewardsAddr = self.nodes[0].getnewaddress()
port = p2p_port(len(self.nodes) + idx)
if (idx % 2) == 0:
self.nodes[0].lockunspent(True, [{'txid': txid, 'vout': collateral_vout}])
proTxHash = self.nodes[0].protx('register_fund', address, '127.0.0.1:%d' % port, ownerAddr, bls['public'], votingAddr, 0, rewardsAddr, address)
else:
self.nodes[0].generate(1)
proTxHash = self.nodes[0].protx('register', txid, collateral_vout, '127.0.0.1:%d' % port, ownerAddr, bls['public'], votingAddr, 0, rewardsAddr, address)
self.nodes[0].generate(1)
self.mninfo.append(MasternodeInfo(proTxHash, ownerAddr, votingAddr, bls['public'], bls['secret'], address, txid, collateral_vout))
self.sync_all()
self.log.info("Prepared masternode %d: collateral_txid=%s, collateral_vout=%d, protxHash=%s" % (idx, txid, collateral_vout, proTxHash))
def remove_mastermode(self, idx):
mn = self.mninfo[idx]
rawtx = self.nodes[0].createrawtransaction([{"txid": mn.collateral_txid, "vout": mn.collateral_vout}], {self.nodes[0].getnewaddress(): 999.9999})
rawtx = self.nodes[0].signrawtransaction(rawtx)
self.nodes[0].sendrawtransaction(rawtx["hex"])
self.nodes[0].generate(1)
self.sync_all()
self.mninfo.remove(mn)
self.log.info("Removed masternode %d", idx)
def prepare_datadirs(self):
# stop faucet node so that we can copy the datadir
self.stop_node(0)
start_idx = len(self.nodes)
for idx in range(0, self.mn_count):
copy_datadir(0, idx + start_idx, self.options.tmpdir)
# restart faucet node
self.start_node(0)
def start_masternodes(self):
self.log.info("Starting %d masternodes", self.mn_count)
start_idx = len(self.nodes)
self.add_nodes(self.mn_count)
executor = ThreadPoolExecutor(max_workers=20)
def do_connect(idx):
# Connect to the control node only, masternodes should take care of intra-quorum connections themselves
connect_nodes(self.mninfo[idx].node, 0)
jobs = []
# start up nodes in parallel
for idx in range(0, self.mn_count):
self.mninfo[idx].nodeIdx = idx + start_idx
jobs.append(executor.submit(self.start_masternode, self.mninfo[idx]))
# wait for all nodes to start up
for job in jobs:
job.result()
jobs.clear()
# connect nodes in parallel
for idx in range(0, self.mn_count):
jobs.append(executor.submit(do_connect, idx))
# wait for all nodes to connect
for job in jobs:
job.result()
jobs.clear()
executor.shutdown()
def start_masternode(self, mninfo, extra_args=None):
args = ['-masternodeblsprivkey=%s' % mninfo.keyOperator] + self.extra_args[mninfo.nodeIdx]
if extra_args is not None:
args += extra_args
self.start_node(mninfo.nodeIdx, extra_args=args)
mninfo.node = self.nodes[mninfo.nodeIdx]
force_finish_mnsync(mninfo.node)
def setup_network(self):
self.log.info("Creating and starting controller node")
self.add_nodes(1, extra_args=[self.extra_args[0]])
self.start_node(0)
required_balance = MASTERNODE_COLLATERAL * self.mn_count + 1
self.log.info("Generating %d coins" % required_balance)
while self.nodes[0].getbalance() < required_balance:
self.bump_mocktime(1)
self.nodes[0].generate(10)
num_simple_nodes = self.num_nodes - self.mn_count - 1
self.log.info("Creating and starting %s simple nodes", num_simple_nodes)
for i in range(0, num_simple_nodes):
self.create_simple_node()
self.log.info("Activating DIP3")
if not self.fast_dip3_enforcement:
while self.nodes[0].getblockcount() < 500:
self.nodes[0].generate(10)
self.sync_all()
# create masternodes
self.prepare_masternodes()
self.prepare_datadirs()
self.start_masternodes()
# non-masternodes where disconnected from the control node during prepare_datadirs,
# let's reconnect them back to make sure they receive updates
for i in range(0, num_simple_nodes):
connect_nodes(self.nodes[i+1], 0)
self.bump_mocktime(1)
self.nodes[0].generate(1)
# sync nodes
self.sync_all()
self.bump_mocktime(1)
mn_info = self.nodes[0].masternodelist("status")
assert (len(mn_info) == self.mn_count)
for status in mn_info.values():
assert (status == 'ENABLED')
def create_raw_tx(self, node_from, node_to, amount, min_inputs, max_inputs):
assert (min_inputs <= max_inputs)
# fill inputs
inputs = []
balances = node_from.listunspent()
in_amount = 0.0
last_amount = 0.0
for tx in balances:
if len(inputs) < min_inputs:
input = {}
input["txid"] = tx['txid']
input['vout'] = tx['vout']
in_amount += float(tx['amount'])
inputs.append(input)
elif in_amount > amount:
break
elif len(inputs) < max_inputs:
input = {}
input["txid"] = tx['txid']
input['vout'] = tx['vout']
in_amount += float(tx['amount'])
inputs.append(input)
else:
input = {}
input["txid"] = tx['txid']
input['vout'] = tx['vout']
in_amount -= last_amount
in_amount += float(tx['amount'])
inputs[-1] = input
last_amount = float(tx['amount'])
assert (len(inputs) >= min_inputs)
assert (len(inputs) <= max_inputs)
assert (in_amount >= amount)
# fill outputs
receiver_address = node_to.getnewaddress()
change_address = node_from.getnewaddress()
fee = 0.001
outputs = {}
outputs[receiver_address] = satoshi_round(amount)
outputs[change_address] = satoshi_round(in_amount - amount - fee)
rawtx = node_from.createrawtransaction(inputs, outputs)
ret = node_from.signrawtransaction(rawtx)
decoded = node_from.decoderawtransaction(ret['hex'])
ret = {**decoded, **ret}
return ret
def wait_for_tx(self, txid, node, expected=True, timeout=15):
def check_tx():
try:
return node.getrawtransaction(txid)
except:
return False
if wait_until(check_tx, timeout=timeout, sleep=0.5, do_assert=expected) and not expected:
raise AssertionError("waiting unexpectedly succeeded")
def wait_for_instantlock(self, txid, node, expected=True, timeout=15):
def check_instantlock():
try:
return node.getrawtransaction(txid, True)["instantlock"]
except:
return False
if wait_until(check_instantlock, timeout=timeout, sleep=0.5, do_assert=expected) and not expected:
raise AssertionError("waiting unexpectedly succeeded")
def wait_for_chainlocked_block(self, node, block_hash, expected=True, timeout=15):
def check_chainlocked_block():
try:
block = node.getblock(block_hash)
return block["confirmations"] > 0 and block["chainlock"]
except:
return False
if wait_until(check_chainlocked_block, timeout=timeout, sleep=0.1, do_assert=expected) and not expected:
raise AssertionError("waiting unexpectedly succeeded")
def wait_for_chainlocked_block_all_nodes(self, block_hash, timeout=15):
for node in self.nodes:
self.wait_for_chainlocked_block(node, block_hash, timeout=timeout)
def wait_for_best_chainlock(self, node, block_hash, timeout=15):
wait_until(lambda: node.getbestchainlock()["blockhash"] == block_hash, timeout=timeout, sleep=0.1)
def wait_for_sporks_same(self, timeout=30):
def check_sporks_same():
sporks = self.nodes[0].spork('show')
return all(node.spork('show') == sporks for node in self.nodes[1:])
wait_until(check_sporks_same, timeout=timeout, sleep=0.5)
def wait_for_quorum_connections(self, expected_connections, nodes, timeout = 60, wait_proc=None):
def check_quorum_connections():
all_ok = True
for node in nodes:
s = node.quorum("dkgstatus")
if s["session"] == {}:
continue
if "quorumConnections" not in s:
all_ok = False
break
s = s["quorumConnections"]
if "llmq_test" not in s:
all_ok = False
break
cnt = 0
for c in s["llmq_test"]:
if c["connected"]:
cnt += 1
if cnt < expected_connections:
all_ok = False
break
if not all_ok and wait_proc is not None:
wait_proc()
return all_ok
wait_until(check_quorum_connections, timeout=timeout, sleep=1)
def wait_for_masternode_probes(self, mninfos, timeout = 30, wait_proc=None):
def check_probes():
def ret():
if wait_proc is not None:
wait_proc()
return False
for mn in mninfos:
s = mn.node.quorum('dkgstatus')
if s["session"] == {}:
continue
if "quorumConnections" not in s:
return ret()
s = s["quorumConnections"]
if "llmq_test" not in s:
return ret()
for c in s["llmq_test"]:
if c["proTxHash"] == mn.proTxHash:
continue
if not c["outbound"]:
mn2 = mn.node.protx('info', c["proTxHash"])
if [m for m in mninfos if c["proTxHash"] == m.proTxHash]:
# MN is expected to be online and functioning, so let's verify that the last successful
# probe is not too old. Probes are retried after 50 minutes, while DKGs consider a probe
# as failed after 60 minutes
if mn2['metaInfo']['lastOutboundSuccessElapsed'] > 55 * 60:
return ret()
else:
# MN is expected to be offline, so let's only check that the last probe is not too long ago
if mn2['metaInfo']['lastOutboundAttemptElapsed'] > 55 * 60 and mn2['metaInfo']['lastOutboundSuccessElapsed'] > 55 * 60:
return ret()
return True
wait_until(check_probes, timeout=timeout, sleep=1)
def wait_for_quorum_phase(self, quorum_hash, phase, expected_member_count, check_received_messages, check_received_messages_count, mninfos, timeout=30, sleep=0.1):
def check_dkg_session():
all_ok = True
member_count = 0
for mn in mninfos:
s = mn.node.quorum("dkgstatus")["session"]
if "llmq_test" not in s:
continue
member_count += 1
s = s["llmq_test"]
if s["quorumHash"] != quorum_hash:
all_ok = False
break
if "phase" not in s:
all_ok = False
break
if s["phase"] != phase:
all_ok = False
break
if check_received_messages is not None:
if s[check_received_messages] < check_received_messages_count:
all_ok = False
break
if all_ok and member_count != expected_member_count:
return False
return all_ok
wait_until(check_dkg_session, timeout=timeout, sleep=sleep)
def wait_for_quorum_commitment(self, quorum_hash, nodes, timeout = 15):
def check_dkg_comitments():
all_ok = True
for node in nodes:
s = node.quorum("dkgstatus")
if "minableCommitments" not in s:
all_ok = False
break
s = s["minableCommitments"]
if "llmq_test" not in s:
all_ok = False
break
s = s["llmq_test"]
if s["quorumHash"] != quorum_hash:
all_ok = False
break
return all_ok
wait_until(check_dkg_comitments, timeout=timeout, sleep=0.1)
def mine_quorum(self, expected_members=None, expected_connections=2, expected_contributions=None, expected_complaints=0, expected_justifications=0, expected_commitments=None, mninfos=None):
if expected_members is None:
expected_members = self.llmq_size
if expected_contributions is None:
expected_contributions = self.llmq_size
if expected_commitments is None:
expected_commitments = self.llmq_size
if mninfos is None:
mninfos = self.mninfo
self.log.info("Mining quorum: expected_members=%d, expected_connections=%d, expected_contributions=%d, expected_complaints=%d, expected_justifications=%d, "
"expected_commitments=%d" % (expected_members, expected_connections, expected_contributions, expected_complaints,
expected_justifications, expected_commitments))
nodes = [self.nodes[0]] + [mn.node for mn in mninfos]
quorums = self.nodes[0].quorum("list")
# move forward to next DKG
skip_count = 24 - (self.nodes[0].getblockcount() % 24)
if skip_count != 0:
self.bump_mocktime(1, nodes=nodes)
self.nodes[0].generate(skip_count)
sync_blocks(nodes)
q = self.nodes[0].getbestblockhash()
self.log.info("Waiting for phase 1 (init)")
self.wait_for_quorum_phase(q, 1, expected_members, None, 0, mninfos)
self.wait_for_quorum_connections(expected_connections, nodes, wait_proc=lambda: self.bump_mocktime(1, nodes=nodes))
if self.nodes[0].spork('show')['SPORK_21_QUORUM_ALL_CONNECTED'] == 0:
self.wait_for_masternode_probes(mninfos, wait_proc=lambda: self.bump_mocktime(1, nodes=nodes))
self.bump_mocktime(1, nodes=nodes)
self.nodes[0].generate(2)
sync_blocks(nodes)
self.log.info("Waiting for phase 2 (contribute)")
self.wait_for_quorum_phase(q, 2, expected_members, "receivedContributions", expected_contributions, mninfos)
self.bump_mocktime(1, nodes=nodes)
self.nodes[0].generate(2)
sync_blocks(nodes)
self.log.info("Waiting for phase 3 (complain)")
self.wait_for_quorum_phase(q, 3, expected_members, "receivedComplaints", expected_complaints, mninfos)
self.bump_mocktime(1, nodes=nodes)
self.nodes[0].generate(2)
sync_blocks(nodes)
self.log.info("Waiting for phase 4 (justify)")
self.wait_for_quorum_phase(q, 4, expected_members, "receivedJustifications", expected_justifications, mninfos)
self.bump_mocktime(1, nodes=nodes)
self.nodes[0].generate(2)
sync_blocks(nodes)
self.log.info("Waiting for phase 5 (commit)")
self.wait_for_quorum_phase(q, 5, expected_members, "receivedPrematureCommitments", expected_commitments, mninfos)
self.bump_mocktime(1, nodes=nodes)
self.nodes[0].generate(2)
sync_blocks(nodes)
self.log.info("Waiting for phase 6 (mining)")
self.wait_for_quorum_phase(q, 6, expected_members, None, 0, mninfos)
self.log.info("Waiting final commitment")
self.wait_for_quorum_commitment(q, nodes)
self.log.info("Mining final commitment")
self.bump_mocktime(1, nodes=nodes)
self.nodes[0].generate(1)
while quorums == self.nodes[0].quorum("list"):
time.sleep(2)
self.bump_mocktime(1, nodes=nodes)
self.nodes[0].generate(1)
sync_blocks(nodes)
new_quorum = self.nodes[0].quorum("list", 1)["llmq_test"][0]
quorum_info = self.nodes[0].quorum("info", 100, new_quorum)
# Mine 8 (SIGN_HEIGHT_OFFSET) more blocks to make sure that the new quorum gets eligable for signing sessions
self.nodes[0].generate(8)
sync_blocks(nodes)
self.log.info("New quorum: height=%d, quorumHash=%s, minedBlock=%s" % (quorum_info["height"], new_quorum, quorum_info["minedBlock"]))
return new_quorum
def get_quorum_masternodes(self, q):
qi = self.nodes[0].quorum('info', 100, q)
result = []
for m in qi['members']:
result.append(self.get_mninfo(m['proTxHash']))
return result
def get_mninfo(self, proTxHash):
for mn in self.mninfo:
if mn.proTxHash == proTxHash:
return mn
return None
def wait_for_mnauth(self, node, count, timeout=10):
def test():
pi = node.getpeerinfo()
c = 0
for p in pi:
if "verified_proregtx_hash" in p and p["verified_proregtx_hash"] != "":
c += 1
return c >= count
wait_until(test, timeout=timeout)
class SkipTest(Exception):
"""This exception is raised to skip a test"""
def __init__(self, message):
self.message = message
| 41.93222
| 310
| 0.606227
|
492d501df38fe7dde4f8419e65c4ae98bd10b9db
| 9,061
|
py
|
Python
|
src/original/mobile_net_features_superpixels.py
|
virtualgraham/fgraph
|
d31a6253ea2fbc391d3e8e7c2d570ce83abeef86
|
[
"Apache-2.0"
] | null | null | null |
src/original/mobile_net_features_superpixels.py
|
virtualgraham/fgraph
|
d31a6253ea2fbc391d3e8e7c2d570ce83abeef86
|
[
"Apache-2.0"
] | null | null | null |
src/original/mobile_net_features_superpixels.py
|
virtualgraham/fgraph
|
d31a6253ea2fbc391d3e8e7c2d570ce83abeef86
|
[
"Apache-2.0"
] | null | null | null |
from keras.applications import mobilenetv2
import numpy as np
import cv2
import hnswlib
from os import listdir, path
from database_v2 import PatchGraphDatabase
import time
import pymeanshift as pms
patch_size = 96
window_size = 96
channel_count = 3
patch_occupancy_threshold = .75
images_directory = "/Users/user/Desktop/household_images"
feature_net = mobilenetv2.MobileNetV2(weights="imagenet", include_top=False, input_shape=(96, 96, 3))
database = PatchGraphDatabase()
desc_index = hnswlib.Index(space='cosine', dim=1280)
desc_index.init_index(max_elements=7000000, ef_construction=500, M=32)
desc_index.set_ef(500)
images = [(e, path.join(images_directory, e)) for e in listdir(images_directory)]
def open_and_prepare_image(image_path):
image = cv2.imread(image_path)
# shrink image down 1088 x 816
image = cv2.resize(image, (image.shape[1]//3, image.shape[0]//3), interpolation=cv2.INTER_CUBIC)
return image
def extract_windows(image, superpixels, window_size=96, interior_w=False, interior_h=False):
n_cells = (image.shape[0] // window_size, image.shape[1] // window_size)
# print('n_cells', n_cells)
if interior_w:
n_cells = (n_cells[0] - 1, n_cells[1])
if interior_h:
n_cells = (n_cells[0], n_cells[1] - 1)
img_shape = (n_cells[0] * window_size, n_cells[1] * window_size)
margins = ((image.shape[0] - img_shape[0])//2, (image.shape[1] - img_shape[1])//2)
superpixel_windows = np.zeros((n_cells[0] * n_cells[1], window_size, window_size))
image_windows = np.zeros((n_cells[0] * n_cells[1], window_size, window_size, channel_count))
coords = np.zeros((n_cells[0] * n_cells[1], 2))
for i in range(n_cells[0]):
for j in range(n_cells[1]):
superpixel_window = superpixels[(margins[0] + window_size*i):(margins[0] + window_size*(i+1)), (margins[1] + window_size*j):(margins[1] + window_size*(j+1))]
superpixel_windows[i * n_cells[1] + j] = superpixel_window
image_window = image[(margins[0] + window_size*i):(margins[0] + window_size*(i+1)), (margins[1] + window_size*j):(margins[1] + window_size*(j+1))]
image_windows[i * n_cells[1] + j] = image_window
coords[i * n_cells[1] + j] = (margins[0] + window_size*i + window_size//2, margins[1] + window_size*j + window_size//2)
return image_windows, superpixel_windows, coords
def get_patch_descriptors(image_windows):
if image_windows.shape[1] != patch_size or image_windows.shape[2] != patch_size:
resized_image_patches = np.empty((image_windows.shape[0], patch_size, patch_size, image_windows.shape[3]))
for i in range(image_windows.shape[0]):
resized_image_patches[i] = cv2.resize(image_windows[i], (patch_size, patch_size), interpolation=cv2.INTER_CUBIC)
else:
resized_image_patches = image_windows
feats = feature_net.predict(resized_image_patches)
reduced_feats = np.zeros((feats.shape[0], feats.shape[3]))
for i in range(feats.shape[0]):
patch_feats = feats[i] # 3x3x1280
tot = np.zeros((patch_feats.shape[2],))
for j in range(patch_feats.shape[0]):
for k in range(patch_feats.shape[1]):
tot = tot + patch_feats[j, k]
avg = tot / (patch_feats.shape[0] * patch_feats.shape[1])
reduced_feats[i] = avg
return reduced_feats
def get_superpixel_connections(superpixels):
connections = set()
for i in range(superpixels.shape[0]):
for j in range(superpixels.shape[1] - 1):
if superpixels[i, j] == superpixels[i, j+1]:
continue
connections.add((superpixels[i, j], superpixels[i, j+1]))
connections.add((superpixels[i, j+1], superpixels[i, j]))
for j in range(superpixels.shape[1]):
for i in range(superpixels.shape[0] - 1):
if superpixels[i, j] == superpixels[i+1, j]:
continue
connections.add((superpixels[i, j], superpixels[i+1, j]))
connections.add((superpixels[i+1, j], superpixels[i, j]))
return connections
def extract_features_to_db(image_path, image_name, scene_node):
img_data = open_and_prepare_image(image_path)
(segmented_image, superpixels, number_regions) = pms.segment(img_data, spatial_radius=6, range_radius=4.5, min_density=50)
# cv2.imshow('image', segmented_image)
# cv2.waitKey(0)
windows_a, sp_windows_a, coords_a = extract_windows(img_data, superpixels, window_size, False, False)
windows_b, sp_windows_b, coords_b = extract_windows(img_data, superpixels, window_size, False, True)
windows_c, sp_windows_c, coords_c = extract_windows(img_data, superpixels, window_size, True, False)
windows_d, sp_windows_d, coords_d = extract_windows(img_data, superpixels, window_size, True, True)
windows = np.concatenate((windows_a, windows_b, windows_c, windows_d), axis=0)
sp_windows = np.concatenate((sp_windows_a, sp_windows_b, sp_windows_c, sp_windows_d), axis=0)
coords = np.concatenate((coords_a, coords_b, coords_c, coords_d), axis=0)
windows_f = []
coords_f = []
patch_contains_superpixels = []
# filter out windows that are more than 2/3 occupied by a single superpixel
for i in range(windows.shape[0]):
counts = np.zeros((number_regions,))
total = 0
s = set()
for sp in np.nditer(sp_windows[i]):
total += 1
counts[int(sp)] = counts[int(sp)] + 1
s.add(int(sp))
if np.amax(counts) > (total * patch_occupancy_threshold):
continue
patch_contains_superpixels.append(s)
windows_f.append(windows[i])
coords_f.append(coords[i])
print("Discarding non-union windows:", len(windows) - len(windows_f), len(windows))
windows = np.array(windows_f)
coords = np.array(coords_f)
# insert superpixels into database
superpixel_dicts = []
for i in range(number_regions):
superpixel_dicts.append({'scene':image_name})
insert_superpixels_result = database.insert_superpixels(superpixel_dicts)
superpixel_db_ids = np.array([p['id'] for p in insert_superpixels_result])
# insert patches into database
descriptors = get_patch_descriptors(windows)
patches = []
for i in range(descriptors.shape[0]):
patches.append({'scene': image_name, 'size': window_size, 'loc': coords[i], 'des': descriptors[i]})
insert_patches_result = database.insert_patches(patches)
patch_descriptors = np.array([p['des'] for p in insert_patches_result], dtype=np.float32)
patch_db_ids = np.array([p['id'] for p in insert_patches_result])
# add patch descriptors to index
desc_index.add_items(patch_descriptors, patch_db_ids)
# insert scene contains patch relationships
scene_contains_patch_relationships = [{"from": scene_node['id'], "to": label} for label in patch_db_ids]
result = database.insert_contains_relationships(scene_contains_patch_relationships)
print("inserted scene contains patch relationships", len(result))
# insert scene contains superpixel relationships
scene_contains_superpixel_relationships = [{"from": scene_node['id'], "to": label} for label in superpixel_db_ids]
result = database.insert_contains_relationships(scene_contains_superpixel_relationships)
print("inserted scene contains superpixel relationships", len(result))
# insert patch contains superpixel relationships
patch_contains_superpixel_relationships = []
for i in range(len(patch_contains_superpixels)):
patch_superpixels = patch_contains_superpixels[i]
patch_contains_superpixel_relationships.extend([{"from": patch_db_ids[i], "to": superpixel_db_ids[int(superpixel)]} for superpixel in patch_superpixels])
result = database.insert_contains_relationships(patch_contains_superpixel_relationships)
print("inserted patch contains superpixel relationships", len(result))
# insert superpixel neighbor relationships
superpixel_connections = get_superpixel_connections(superpixels)
superpixel_neighbor_relationships = [{"from": superpixel_db_ids[int(a)], "to": superpixel_db_ids[int(b)]} for (a, b) in superpixel_connections]
result = database.insert_neighbors_relationships(superpixel_neighbor_relationships)
print("inserted superpixel neighbor relationships", len(result))
print('starting', len(images))
last_finished_index = -1
start = time.time()
for i in range(len(images)):
print('last_finished_index', last_finished_index)
image_path = images[i][1]
image_name = images[i][0]
print(image_path, image_name)
# insert scene node
insert_scene_result = database.insert_scene({"scene": image_name})
scene_node = insert_scene_result[0]
print("inserted scene node")
extract_features_to_db(image_path, image_name, scene_node)
last_finished_index = i
end = time.time()
print('time elapsed', end - start)
print('saving desc_index')
desc_index.save_index("desc_index.bin")
print('finished')
| 36.684211
| 169
| 0.702351
|
e633e7b3ef3ff4d1a2f44f21cfa9bab3152465f4
| 15,682
|
py
|
Python
|
ledger/payments/invoice/models.py
|
wilsonc86/ledger
|
a60a681e547f37e4ac81cb93dffaf90aea8c8151
|
[
"Apache-2.0"
] | null | null | null |
ledger/payments/invoice/models.py
|
wilsonc86/ledger
|
a60a681e547f37e4ac81cb93dffaf90aea8c8151
|
[
"Apache-2.0"
] | null | null | null |
ledger/payments/invoice/models.py
|
wilsonc86/ledger
|
a60a681e547f37e4ac81cb93dffaf90aea8c8151
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import unicode_literals
import traceback
import decimal
from django.db import models,transaction
from django.db.models import Q
from django.db.models import Sum
from django.db.models.functions import Coalesce
from django.conf import settings
from django.dispatch import receiver
from django.db.models.signals import post_delete, pre_save, post_save
from django.utils.encoding import python_2_unicode_compatible
from django.core.exceptions import ValidationError
from oscar.apps.order.models import Order
from ledger.payments.bpay.crn import getCRN
from ledger.payments.bpay.models import BpayTransaction
from ledger.payments.bpoint.models import BpointTransaction, TempBankCard, BpointToken, UsedBpointToken
class Invoice(models.Model):
created = models.DateTimeField(auto_now_add=True)
text = models.TextField(null=True,blank=True)
amount = models.DecimalField(decimal_places=2,max_digits=12)
order_number = models.CharField(max_length=50,unique=True)
reference = models.CharField(max_length=50, unique=True)
system = models.CharField(max_length=4,blank=True,null=True)
token = models.CharField(max_length=80,null=True,blank=True)
voided = models.BooleanField(default=False)
previous_invoice = models.ForeignKey('self',null=True,blank=True)
settlement_date = models.DateField(blank=True, null=True)
def __unicode__(self):
return 'Invoice #{0}'.format(self.reference)
class Meta:
db_table = 'payments_invoice'
# Properties
# =============================================
@property
def biller_code(self):
''' Return the biller code for bpay.
'''
return settings.BPAY_BILLER_CODE
@property
def order(self):
''' Get order matched to this invoice.
'''
try:
return Order.objects.get(number=self.order_number)
except Order.DoesNotExist:
return None
@property
def number(self):
length = len(str(self.id))
val = '0'
return '{}{}'.format((val*(6-length)),self.id)
@property
def owner(self):
if self.order:
return self.order.user
return None
@property
def refundable_amount(self):
return self.total_payment_amount - self.__calculate_total_refunds()
@property
def refundable(self):
if self.refundable_amount > 0:
return True
return False
@property
def num_items(self):
''' Get the number of items in this invoice.
'''
return self.order.num_items
@property
def shipping_required(self):
return self.order.basket.is_shipping_required() if self.order else False
@property
def linked_bpay_transactions(self):
linked = InvoiceBPAY.objects.filter(invoice=self).values('bpay')
txns = BpayTransaction.objects.filter(id__in=linked)
return txns
@property
def bpay_transactions(self):
''' Get this invoice's bpay transactions.
'''
txns = BpayTransaction.objects.filter(crn=self.reference)
linked_txns = BpayTransaction.objects.filter(id__in=InvoiceBPAY.objects.filter(invoice=self).values('bpay'))
return txns | linked_txns
@property
def bpoint_transactions(self):
''' Get this invoice's bpoint transactions.
'''
return BpointTransaction.objects.filter(crn1=self.reference)
@property
def payment_amount(self):
''' Total amount paid from bpay,bpoint and cash.
'''
return self.__calculate_bpay_payments() + self.__calculate_bpoint_payments() + self.__calculate_cash_payments() - self.__calculate_total_refunds()
@property
def total_payment_amount(self):
''' Total amount paid from bpay,bpoint and cash.
'''
return self.__calculate_bpay_payments() + self.__calculate_bpoint_payments() + self.__calculate_cash_payments()
@property
def refund_amount(self):
return self.__calculate_total_refunds()
@property
def deduction_amount(self):
return self.__calculate_deductions()
@property
def transferable_amount(self):
return self.__calculate_cash_payments()
@property
def balance(self):
if self.voided:
return decimal.Decimal(0)
amount = decimal.Decimal(self.amount - self.payment_amount)
if amount < 0:
amount = decimal.Decimal(0)
return amount
@property
def payment_status(self):
''' Payment status of the invoice.
'''
amount_paid = self.__calculate_bpay_payments() + self.__calculate_bpoint_payments() + self.__calculate_cash_payments() - self.__calculate_total_refunds()
if amount_paid == decimal.Decimal('0') and self.amount > 0:
return 'unpaid'
elif amount_paid < self.amount:
return 'partially_paid'
elif amount_paid == self.amount:
return 'paid'
else:
return 'over_paid'
@property
def single_card_payment(self):
card = self.bpoint_transactions.count()
bpay = self.bpay_transactions
cash = self.cash_transactions
if bpay or cash:
return False
if card > 1:
return False
return True
@property
def refundable_cards(self):
cards = []
refunds = self.bpoint_transactions.filter(Q(action='payment') | Q(action='capture'),dvtoken__isnull=False)
for r in refunds:
if r.refundable_amount > 0:
cards.append(r)
return cards
# Helper Functions
# =============================================
def __calculate_cash_payments(self):
''' Calcluate the amount of cash payments made
less the reversals for this invoice.
'''
payments = dict(self.cash_transactions.filter(type='payment').aggregate(amount__sum=Coalesce(Sum('amount'), decimal.Decimal('0')))).get('amount__sum')
move_ins = dict(self.cash_transactions.filter(type='move_in').aggregate(amount__sum=Coalesce(Sum('amount'), decimal.Decimal('0')))).get('amount__sum')
reversals = dict(self.cash_transactions.filter(type='reversal').aggregate(amount__sum=Coalesce(Sum('amount'), decimal.Decimal('0')))).get('amount__sum')
move_outs = dict(self.cash_transactions.filter(type='move_out').aggregate(amount__sum=Coalesce(Sum('amount'), decimal.Decimal('0')))).get('amount__sum')
return (payments + move_ins) - (reversals + move_outs)
def __calculate_deductions(self):
'''Calculate all the move out transactions for this invoice
'''
return dict(self.cash_transactions.filter(type='move_out').aggregate(amount__sum=Coalesce(Sum('amount'), decimal.Decimal('0')))).get('amount__sum')
def __calculate_bpoint_payments(self):
''' Calcluate the total amount of bpoint payments and
captures made less the reversals for this invoice.
'''
payments = reversals = 0
payments = payments + dict(self.bpoint_transactions.filter(action='payment', response_code='0').aggregate(amount__sum=Coalesce(Sum('amount'), decimal.Decimal('0')))).get('amount__sum')
payments = payments + dict(self.bpoint_transactions.filter(action='capture', response_code='0').aggregate(amount__sum=Coalesce(Sum('amount'), decimal.Decimal('0')))).get('amount__sum')
reversals = dict(self.bpoint_transactions.filter(action='reversal', response_code='0').aggregate(amount__sum=Coalesce(Sum('amount'), decimal.Decimal('0')))).get('amount__sum')
return payments - reversals
def __calculate_bpay_payments(self):
''' Calcluate the amount of bpay payments made
less the reversals for this invoice.
'''
payments = 0
reversals = 0
if self.bpay_transactions:
payments = payments + dict(self.bpay_transactions.filter(p_instruction_code='05', type=399).aggregate(amount__sum=Coalesce(Sum('amount'), decimal.Decimal('0')))).get('amount__sum')
reversals = dict(self.bpay_transactions.filter(p_instruction_code='25', type=699).aggregate(amount__sum=Coalesce(Sum('amount'), decimal.Decimal('0')))).get('amount__sum')
return payments - reversals
def __calculate_total_refunds(self):
''' Calcluate the total amount of refunds
for this invoice.
'''
refunds = 0
cash_refunds = dict(self.cash_transactions.filter(type='refund').aggregate(amount__sum=Coalesce(Sum('amount'), decimal.Decimal('0')))).get('amount__sum')
card_refunds = dict(self.bpoint_transactions.filter(action='refund', response_code='0').aggregate(amount__sum=Coalesce(Sum('amount'), decimal.Decimal('0')))).get('amount__sum')
bpay_refunds = dict(self.bpay_transactions.filter(p_instruction_code='15', type=699).aggregate(amount__sum=Coalesce(Sum('amount'), decimal.Decimal('0')))).get('amount__sum')
refunds = cash_refunds + card_refunds + bpay_refunds
return refunds
# Functions
# =============================================
def save(self,*args,**kwargs):
# prevent circular import
from ledger.payments.utils import systemid_check
if self.pk:
self.system = systemid_check(self.system)
super(Invoice,self).save(*args,**kwargs)
def make_payment(self):
''' Pay this invoice with the token attached to it.
:return: BpointTransaction
'''
from ledger.payments.facade import bpoint_facade
from ledger.payments.utils import update_payments
try:
if self.token:
card_details = self.token.split('|')
card = TempBankCard(
card_details[0],
card_details[1]
)
if len(card_details) == 3:
card.last_digits = card_details[2]
else:
card.last_digits = None
txn = bpoint_facade.pay_with_temptoken(
'payment',
'telephoneorder',
'single',
card,
self.order_number,
self.reference,
self.amount,
None
)
if txn.approved:
try:
BpointToken.objects.get(DVToken=card_details[0])
self.token = ''
self.save()
except BpointToken.DoesNotExist:
UsedBpointToken.objects.create(DVToken=card_details[0])
self.token = ''
self.save()
update_payments(self.reference)
return txn
else:
raise ValidationError('This invoice doesn\'t have any tokens attached to it.')
except Exception as e:
traceback.print_exc()
raise
def move_funds(self,amount,invoice,details):
from ledger.payments.models import CashTransaction
from ledger.payments.utils import update_payments
with transaction.atomic():
try:
# Move all the bpoint transactions to the new invoice
for txn in self.bpoint_transactions:
txn.crn1 = invoice.reference
txn.save()
# Move all the bpay transactions to the new invoice
for txn in self.bpay_transactions:
txn.crn = invoice.reference
txn.save()
# Move the remainder of the amount to the a cash transaction
new_amount = self.__calculate_cash_payments()
if self.transferable_amount < new_amount:
raise ValidationError('The amount to be moved is more than the allowed transferable amount')
if new_amount > 0:
# Create a moveout transaction for current invoice
CashTransaction.objects.create(
invoice = self,
amount = amount,
type = 'move_out',
source = 'cash',
details = 'Move funds to invoice {}'.format(invoice.reference),
movement_reference = invoice.reference
)
update_payments(self.reference)
# Create a move in transaction for other invoice
CashTransaction.objects.create(
invoice = invoice,
amount = amount,
type = 'move_in',
source = 'cash',
details = 'Move funds from invoice {}'.format(self.reference),
movement_reference = self.reference
)
# set the previous invoice in the new invoice
invoice.previous_invoice = self
invoice.save()
# Update the oracle interface invoices sp as to prevent duplicate sending of amounts to oracle
from ledger.payments.models import OracleParserInvoice
OracleParserInvoice.objects.filter(reference=self.reference).update(reference=invoice.reference)
update_payments(invoice.reference)
except:
raise
class InvoiceBPAY(models.Model):
''' Link between unmatched bpay payments and invoices
'''
invoice = models.ForeignKey(Invoice)
bpay = models.ForeignKey('bpay.BpayTransaction')
class Meta:
db_table = 'payments_invoicebpay'
def __str__(self):
return 'Invoice No. {}: BPAY CRN {}'.format(self.invoice.reference,self.bpay.crn)
def clean(self, *args, **kwargs):
if (self.invoice.payment_status == 'paid' or self.invoice.payment_status == 'over_paid') and not self.pk:
raise ValidationError('This invoice has already been paid for.')
def save(self, *args, **kwargs):
self.full_clean()
super(InvoiceBPAY,self).save(*args, **kwargs)
class InvoiceBPAYListener(object):
"""
Event listener for InvoiceBPAY
"""
@staticmethod
@receiver(pre_save, sender=InvoiceBPAY)
def _pre_save(sender, instance, **kwargs):
if instance.pk:
original_instance = InvoiceBPAY.objects.get(pk=instance.pk)
setattr(instance, "_original_instance", original_instance)
elif hasattr(instance, "_original_instance"):
delattr(instance, "_original_instance")
@staticmethod
@receiver(post_save, sender=InvoiceBPAY)
def _post_save(sender, instance, **kwargs):
from ledger.payments.utils import update_payments
original_instance = getattr(instance, "_original_instance") if hasattr(instance, "_original_instance") else None
if not original_instance:
update_payments(instance.invoice.reference)
@staticmethod
@receiver(post_delete, sender=InvoiceBPAY)
def _post_delete(sender, instance, **kwargs):
for item in instance.invoice.order.lines.all():
removable = []
payment_details = item.payment_details['bpay']
for k,v in payment_details.items():
if k == str(instance.bpay.id):
removable.append(k)
if removable:
for r in removable:
del item.payment_details['bpay'][r]
item.save()
| 40.107417
| 192
| 0.61612
|
c30abf35cd63d77ae1bb20422af9a935a291aeb9
| 617
|
py
|
Python
|
test/test_log.py
|
jakubgs/rbackup
|
d85c3397f53d7a04ac1b5241f0ea8cede339cc86
|
[
"BSD-3-Clause"
] | 1
|
2018-01-02T16:28:43.000Z
|
2018-01-02T16:28:43.000Z
|
test/test_log.py
|
jakubgs/rbackup
|
d85c3397f53d7a04ac1b5241f0ea8cede339cc86
|
[
"BSD-3-Clause"
] | 3
|
2017-12-16T00:46:42.000Z
|
2017-12-19T13:08:43.000Z
|
test/test_log.py
|
jakubgs/rbackup
|
d85c3397f53d7a04ac1b5241f0ea8cede339cc86
|
[
"BSD-3-Clause"
] | null | null | null |
from mock import patch
from logging import Logger, INFO, DEBUG
from rbackup.log import setup_logging
@patch('rbackup.log.logging.FileHandler')
def test_setup_logging(m_fhandler):
log = setup_logging('test_file')
assert isinstance(log, Logger)
assert log.level == INFO
assert m_fhandler.called
log.removeHandler(log.handlers[0])
@patch('rbackup.log.logging.FileHandler')
def test_setup_logging_debug(m_fhandler):
log = setup_logging('test_file', debug=True)
assert isinstance(log, Logger)
assert log.level == DEBUG
assert m_fhandler.called
log.removeHandler(log.handlers[0])
| 29.380952
| 48
| 0.752026
|
5f37d00cdf59b0ca3bebf0e51cbf4af1a6b811b2
| 6,599
|
py
|
Python
|
indy_node/test/request_handlers/test_revoc_reg_entry_handler.py
|
harunpehlivan/indy-node
|
3c783228dc06c33c2b1cf5c124c7dc3e1721c894
|
[
"Apache-2.0"
] | null | null | null |
indy_node/test/request_handlers/test_revoc_reg_entry_handler.py
|
harunpehlivan/indy-node
|
3c783228dc06c33c2b1cf5c124c7dc3e1721c894
|
[
"Apache-2.0"
] | null | null | null |
indy_node/test/request_handlers/test_revoc_reg_entry_handler.py
|
harunpehlivan/indy-node
|
3c783228dc06c33c2b1cf5c124c7dc3e1721c894
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from indy_common.constants import REVOC_REG_ENTRY, REVOC_REG_DEF_ID, ISSUANCE_BY_DEFAULT, \
VALUE, ISSUANCE_TYPE, ISSUED, REVOKED, ACCUM
from indy_node.server.request_handlers.domain_req_handlers.revoc_reg_def_handler import RevocRegDefHandler
from indy_node.server.request_handlers.domain_req_handlers.revoc_reg_entry_handler import RevocRegEntryHandler
from indy_node.test.request_handlers.helper import add_to_idr
from plenum.common.constants import TXN_TIME, TRUSTEE
from plenum.common.exceptions import InvalidClientRequest, UnauthorizedClientRequest
from plenum.common.request import Request
from plenum.common.txn_util import reqToTxn, append_txn_metadata, get_payload_data
from plenum.common.types import f
from plenum.common.util import randomString
from plenum.server.request_handlers.utils import encode_state_value
@pytest.fixture(scope="module")
def revoc_reg_entry_handler(db_manager, write_auth_req_validator):
return RevocRegEntryHandler(db_manager, write_auth_req_validator,
RevocRegDefHandler.get_revocation_strategy)
@pytest.fixture(scope="function")
def revoc_reg_entry_request():
identifier = randomString()
return Request(identifier= identifier,
reqId=5,
operation={'type': REVOC_REG_ENTRY,
REVOC_REG_DEF_ID: identifier,
VALUE: {ACCUM: 5}},
signature="randomString")
def test_revoc_reg_entry_dynamic_validation_without_req_def(revoc_reg_entry_handler,
revoc_reg_entry_request):
with pytest.raises(InvalidClientRequest,
match="There is no any REVOC_REG_DEF by path"):
revoc_reg_entry_handler.dynamic_validation(revoc_reg_entry_request)
def test_revoc_reg_entry_dynamic_validation_passes(revoc_reg_entry_handler,
revoc_reg_entry_request):
add_to_idr(revoc_reg_entry_handler.database_manager.idr_cache,
revoc_reg_entry_request.identifier,
TRUSTEE)
revoc_reg_entry_handler.state.set(revoc_reg_entry_request.operation[REVOC_REG_DEF_ID].encode(),
encode_state_value({VALUE: {ISSUANCE_TYPE: ISSUANCE_BY_DEFAULT}},
"seqNo", "txnTime"))
revoc_reg_entry_handler.dynamic_validation(revoc_reg_entry_request)
def test_revoc_reg_entry_dynamic_validation_fail_in_strategy(revoc_reg_entry_handler,
revoc_reg_entry_request):
add_to_idr(revoc_reg_entry_handler.database_manager.idr_cache,
revoc_reg_entry_request.identifier,
TRUSTEE)
revoc_reg_entry_handler.state.set(revoc_reg_entry_request.operation[REVOC_REG_DEF_ID].encode(),
encode_state_value({VALUE: {ISSUANCE_TYPE: ISSUANCE_BY_DEFAULT}},
"seqNo", "txnTime"))
revoc_reg_entry_request.operation[VALUE] = {ISSUED: [1],
REVOKED: [1]}
with pytest.raises(InvalidClientRequest, match="Can not have an index in both "
"'issued' and 'revoked' lists"):
revoc_reg_entry_handler.dynamic_validation(revoc_reg_entry_request)
def test_revoc_reg_entry_dynamic_validation_without_permission(revoc_reg_entry_handler,
revoc_reg_entry_request):
add_to_idr(revoc_reg_entry_handler.database_manager.idr_cache,
revoc_reg_entry_request.identifier,
None)
revoc_reg_entry_handler.state.set(revoc_reg_entry_request.operation[REVOC_REG_DEF_ID].encode(),
encode_state_value({VALUE: {ISSUANCE_TYPE: ISSUANCE_BY_DEFAULT}},
"seqNo", "txnTime"))
revoc_reg_entry_request.operation[VALUE] = {ISSUED: [1],
REVOKED: [1]}
with pytest.raises(UnauthorizedClientRequest, match="1 TRUSTEE signature is required and needs to be owner OR "
"1 STEWARD signature is required and needs to be owner OR "
"1 ENDORSER signature is required and needs to be owner"):
revoc_reg_entry_handler.dynamic_validation(revoc_reg_entry_request)
def test_failed_update_state(revoc_reg_entry_handler, revoc_reg_entry_request):
seq_no = 1
txn_time = 1560241033
txn = reqToTxn(revoc_reg_entry_request)
append_txn_metadata(txn, seq_no, txn_time)
with pytest.raises(InvalidClientRequest,
match="There is no any REVOC_REG_DEF by path"):
revoc_reg_entry_handler.update_state(txn, None, revoc_reg_entry_request)
def test_update_state(revoc_reg_entry_handler, revoc_reg_entry_request,
revoc_reg_def_handler, revoc_reg_def_request):
# create revoc_req_def
seq_no = 1
txn_time = 1560241030
revoc_reg_def_request.operation[VALUE] = {}
revoc_reg_def_request.operation[VALUE][ISSUANCE_TYPE] = ISSUANCE_BY_DEFAULT
txn = reqToTxn(revoc_reg_def_request)
append_txn_metadata(txn, seq_no, txn_time)
path = RevocRegDefHandler.prepare_revoc_def_for_state(txn,
path_only=True)
revoc_reg_def_handler.update_state(txn, None, revoc_reg_def_request)
# create revoc_req_entry
seq_no = 2
txn_time = 1560241033
revoc_reg_entry_request.operation[REVOC_REG_DEF_ID] = path.decode()
txn = reqToTxn(revoc_reg_entry_request)
append_txn_metadata(txn, seq_no, txn_time)
revoc_reg_entry_handler.update_state(txn, None, revoc_reg_entry_request)
# check state for revoc_reg_entry
txn_data = get_payload_data(txn)
txn_data[f.SEQ_NO.nm] = seq_no
txn_data[TXN_TIME] = txn_time
assert revoc_reg_entry_handler.get_from_state(
RevocRegEntryHandler.prepare_revoc_reg_entry_for_state(txn,
path_only=True)) == (txn_data, seq_no, txn_time)
# check state for revoc_reg_entry
txn_data[VALUE] = {ACCUM: txn_data[VALUE][ACCUM]}
path, _ = RevocRegEntryHandler.prepare_revoc_reg_entry_accum_for_state(txn)
assert revoc_reg_entry_handler.get_from_state(path) == (txn_data, seq_no, txn_time)
| 51.96063
| 115
| 0.668889
|
36f25d29f1ba01c06a587cd53c1927e06d9ebc3d
| 11,004
|
py
|
Python
|
arcade/examples/platform_tutorial/09_endgame.py
|
LiorAvrahami/arcade
|
fce254a9eb89629de1f99d57a63759a2953184e9
|
[
"MIT"
] | 1
|
2021-03-04T14:02:29.000Z
|
2021-03-04T14:02:29.000Z
|
arcade/examples/platform_tutorial/09_endgame.py
|
LiorAvrahami/arcade
|
fce254a9eb89629de1f99d57a63759a2953184e9
|
[
"MIT"
] | 1
|
2019-08-11T18:47:27.000Z
|
2019-08-12T03:02:11.000Z
|
arcade/examples/platform_tutorial/09_endgame.py
|
LiorAvrahami/arcade
|
fce254a9eb89629de1f99d57a63759a2953184e9
|
[
"MIT"
] | null | null | null |
"""
Platformer Game
"""
import arcade
# Constants
SCREEN_WIDTH = 1000
SCREEN_HEIGHT = 650
SCREEN_TITLE = "Platformer"
# Constants used to scale our sprites from their original size
CHARACTER_SCALING = 1
TILE_SCALING = 0.5
COIN_SCALING = 0.5
SPRITE_PIXEL_SIZE = 128
GRID_PIXEL_SIZE = (SPRITE_PIXEL_SIZE * TILE_SCALING)
# Movement speed of player, in pixels per frame
PLAYER_MOVEMENT_SPEED = 10
GRAVITY = 1
PLAYER_JUMP_SPEED = 20
# How many pixels to keep as a minimum margin between the character
# and the edge of the screen.
LEFT_VIEWPORT_MARGIN = 200
RIGHT_VIEWPORT_MARGIN = 200
BOTTOM_VIEWPORT_MARGIN = 150
TOP_VIEWPORT_MARGIN = 100
PLAYER_START_X = 64
PLAYER_START_Y = 225
class MyGame(arcade.Window):
"""
Main application class.
"""
def __init__(self):
# Call the parent class and set up the window
super().__init__(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
# These are 'lists' that keep track of our sprites. Each sprite should
# go into a list.
self.coin_list = None
self.wall_list = None
self.foreground_list = None
self.background_list = None
self.dont_touch_list = None
self.player_list = None
# Separate variable that holds the player sprite
self.player_sprite = None
# Our physics engine
self.physics_engine = None
# Used to keep track of our scrolling
self.view_bottom = 0
self.view_left = 0
# Keep track of the score
self.score = 0
# Where is the right edge of the map?
self.end_of_map = 0
# Level
self.level = 1
# Load sounds
self.collect_coin_sound = arcade.load_sound(":resources:sounds/coin1.wav")
self.jump_sound = arcade.load_sound(":resources:sounds/jump1.wav")
self.game_over = arcade.load_sound(":resources:sounds/gameover1.wav")
def setup(self, level):
""" Set up the game here. Call this function to restart the game. """
# Used to keep track of our scrolling
self.view_bottom = 0
self.view_left = 0
# Keep track of the score
self.score = 0
# Create the Sprite lists
self.player_list = arcade.SpriteList()
self.foreground_list = arcade.SpriteList()
self.background_list = arcade.SpriteList()
self.wall_list = arcade.SpriteList()
self.coin_list = arcade.SpriteList()
# Set up the player, specifically placing it at these coordinates.
image_source = ":resources:images/animated_characters/female_adventurer/femaleAdventurer_idle.png"
self.player_sprite = arcade.Sprite(image_source, CHARACTER_SCALING)
self.player_sprite.center_x = PLAYER_START_X
self.player_sprite.center_y = PLAYER_START_Y
self.player_list.append(self.player_sprite)
# --- Load in a map from the tiled editor ---
# Name of the layer in the file that has our platforms/walls
platforms_layer_name = 'Platforms'
# Name of the layer that has items for pick-up
coins_layer_name = 'Coins'
# Name of the layer that has items for foreground
foreground_layer_name = 'Foreground'
# Name of the layer that has items for background
background_layer_name = 'Background'
# Name of the layer that has items we shouldn't touch
dont_touch_layer_name = "Don't Touch"
# Map name
map_name = f":resources:tmx_maps/map2_level_{level}.tmx"
# Read in the tiled map
my_map = arcade.tilemap.read_tmx(map_name)
# Calculate the right edge of the my_map in pixels
self.end_of_map = my_map.map_size.width * GRID_PIXEL_SIZE
# -- Background
self.background_list = arcade.tilemap.process_layer(my_map,
background_layer_name,
TILE_SCALING)
# -- Foreground
self.foreground_list = arcade.tilemap.process_layer(my_map,
foreground_layer_name,
TILE_SCALING)
# -- Platforms
self.wall_list = arcade.tilemap.process_layer(map_object=my_map,
layer_name=platforms_layer_name,
scaling=TILE_SCALING,
use_spatial_hash=True)
# -- Coins
self.coin_list = arcade.tilemap.process_layer(my_map,
coins_layer_name,
TILE_SCALING,
use_spatial_hash=True)
# -- Don't Touch Layer
self.dont_touch_list = arcade.tilemap.process_layer(my_map,
dont_touch_layer_name,
TILE_SCALING,
use_spatial_hash=True)
# --- Other stuff
# Set the background color
if my_map.background_color:
arcade.set_background_color(my_map.background_color)
# Create the 'physics engine'
self.physics_engine = arcade.PhysicsEnginePlatformer(self.player_sprite,
self.wall_list,
GRAVITY)
def on_draw(self):
""" Render the screen. """
# Clear the screen to the background color
arcade.start_render()
# Draw our sprites
self.wall_list.draw()
self.background_list.draw()
self.wall_list.draw()
self.coin_list.draw()
self.dont_touch_list.draw()
self.player_list.draw()
self.foreground_list.draw()
# Draw our score on the screen, scrolling it with the viewport
score_text = f"Score: {self.score}"
arcade.draw_text(score_text, 10 + self.view_left, 10 + self.view_bottom,
arcade.csscolor.BLACK, 18)
def on_key_press(self, key, modifiers):
"""Called whenever a key is pressed. """
if key == arcade.key.UP or key == arcade.key.W:
if self.physics_engine.can_jump():
self.player_sprite.change_y = PLAYER_JUMP_SPEED
arcade.play_sound(self.jump_sound)
elif key == arcade.key.LEFT or key == arcade.key.A:
self.player_sprite.change_x = -PLAYER_MOVEMENT_SPEED
elif key == arcade.key.RIGHT or key == arcade.key.D:
self.player_sprite.change_x = PLAYER_MOVEMENT_SPEED
def on_key_release(self, key, modifiers):
"""Called when the user releases a key. """
if key == arcade.key.LEFT or key == arcade.key.A:
self.player_sprite.change_x = 0
elif key == arcade.key.RIGHT or key == arcade.key.D:
self.player_sprite.change_x = 0
def update(self, delta_time):
""" Movement and game logic """
# Move the player with the physics engine
self.physics_engine.update()
# See if we hit any coins
coin_hit_list = arcade.check_for_collision_with_list(self.player_sprite,
self.coin_list)
# Loop through each coin we hit (if any) and remove it
for coin in coin_hit_list:
# Remove the coin
coin.remove_from_sprite_lists()
# Play a sound
arcade.play_sound(self.collect_coin_sound)
# Add one to the score
self.score += 1
# Track if we need to change the viewport
changed_viewport = False
# Did the player fall off the map?
if self.player_sprite.center_y < -100:
self.player_sprite.center_x = PLAYER_START_X
self.player_sprite.center_y = PLAYER_START_Y
# Set the camera to the start
self.view_left = 0
self.view_bottom = 0
changed_viewport = True
arcade.play_sound(self.game_over)
# Did the player touch something they should not?
if arcade.check_for_collision_with_list(self.player_sprite,
self.dont_touch_list):
self.player_sprite.change_x = 0
self.player_sprite.change_y = 0
self.player_sprite.center_x = PLAYER_START_X
self.player_sprite.center_y = PLAYER_START_Y
# Set the camera to the start
self.view_left = 0
self.view_bottom = 0
changed_viewport = True
arcade.play_sound(self.game_over)
# See if the user got to the end of the level
if self.player_sprite.center_x >= self.end_of_map:
# Advance to the next level
self.level += 1
# Load the next level
self.setup(self.level)
# Set the camera to the start
self.view_left = 0
self.view_bottom = 0
changed_viewport = True
# --- Manage Scrolling ---
# Scroll left
left_boundary = self.view_left + LEFT_VIEWPORT_MARGIN
if self.player_sprite.left < left_boundary:
self.view_left -= left_boundary - self.player_sprite.left
changed_viewport = True
# Scroll right
right_boundary = self.view_left + SCREEN_WIDTH - RIGHT_VIEWPORT_MARGIN
if self.player_sprite.right > right_boundary:
self.view_left += self.player_sprite.right - right_boundary
changed_viewport = True
# Scroll up
top_boundary = self.view_bottom + SCREEN_HEIGHT - TOP_VIEWPORT_MARGIN
if self.player_sprite.top > top_boundary:
self.view_bottom += self.player_sprite.top - top_boundary
changed_viewport = True
# Scroll down
bottom_boundary = self.view_bottom + BOTTOM_VIEWPORT_MARGIN
if self.player_sprite.bottom < bottom_boundary:
self.view_bottom -= bottom_boundary - self.player_sprite.bottom
changed_viewport = True
if changed_viewport:
# Only scroll to integers. Otherwise we end up with pixels that
# don't line up on the screen
self.view_bottom = int(self.view_bottom)
self.view_left = int(self.view_left)
# Do the scrolling
arcade.set_viewport(self.view_left,
SCREEN_WIDTH + self.view_left,
self.view_bottom,
SCREEN_HEIGHT + self.view_bottom)
def main():
""" Main method """
window = MyGame()
window.setup(window.level)
arcade.run()
if __name__ == "__main__":
main()
| 35.61165
| 106
| 0.583788
|
f4398e3d197cfb649f017d11b19283cfa385f3dc
| 1,079
|
py
|
Python
|
sis/clients/migrations/0005_auto_20150205_1519.py
|
delphcf/sis
|
8a77df020463bb9c28b1a7555574014e830eeab3
|
[
"BSD-3-Clause"
] | null | null | null |
sis/clients/migrations/0005_auto_20150205_1519.py
|
delphcf/sis
|
8a77df020463bb9c28b1a7555574014e830eeab3
|
[
"BSD-3-Clause"
] | null | null | null |
sis/clients/migrations/0005_auto_20150205_1519.py
|
delphcf/sis
|
8a77df020463bb9c28b1a7555574014e830eeab3
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('clients', '0004_auto_20150128_1412'),
]
operations = [
migrations.AlterField(
model_name='referral',
name='notes',
field=models.TextField(default=b'', max_length=250, verbose_name='Referral notes', blank=True),
),
migrations.AlterField(
model_name='referral',
name='reasons',
field=models.ManyToManyField(db_constraint='Reasons for referral', to=b'clients.ReferralReason'),
),
migrations.AlterField(
model_name='referral',
name='ref_date',
field=models.DateField(verbose_name='Referral date'),
),
migrations.AlterField(
model_name='relationtype',
name='contact_type',
field=models.CharField(default=b'N', max_length=1, choices=[(b'N', 'Next of kin'), (b'W', 'Reference')]),
),
]
| 30.828571
| 117
| 0.591288
|
64298c5cb303b8ed3d219900a9389d8bf4187eb2
| 3,100
|
py
|
Python
|
Code_Python/dashboard/web_app/table.py
|
RichardScottOZ/QMineral_Modeller
|
98a43b7c5aeacc9ea0a49c7dc0980b283562d926
|
[
"BSD-3-Clause"
] | 14
|
2021-05-08T01:35:18.000Z
|
2022-01-23T09:54:03.000Z
|
Code_Python/dashboard/web_app/table.py
|
RichardScottOZ/QMineral_Modeller
|
98a43b7c5aeacc9ea0a49c7dc0980b283562d926
|
[
"BSD-3-Clause"
] | 3
|
2021-06-10T19:38:50.000Z
|
2021-06-22T23:39:27.000Z
|
Code_Python/dashboard/web_app/table.py
|
RichardScottOZ/QMineral_Modeller
|
98a43b7c5aeacc9ea0a49c7dc0980b283562d926
|
[
"BSD-3-Clause"
] | 3
|
2021-06-22T21:40:39.000Z
|
2022-01-16T19:39:07.000Z
|
import dash_html_components as html
import base64
import datetime
import io
import dash_table
import dash_core_components as dcc
def parse_contents(contents, filename, date):
import qmin
content_type, content_string = contents.split(',')
decoded = base64.b64decode(content_string)
try:
if 'csv' in filename:
# Assume that the user uploaded a CSV file is CPRM style (evandro)
df = qmin.test_cprm_datasets_web(io.StringIO(decoded.decode('ISO-8859-1')))
elif 'xls' in filename:
# Assume that the user uploaded an excel file
#This excel is format of Microssonda!!!!
df = qmin.load_data_ms_web(io.BytesIO(decoded))
# csv_string = df.to_csv(index=False, encoding='utf-8')
#csv_string = "data:text/csv;charset=utf-8," + urllib.quote(csv_string)
#update_download_link(df)
except Exception as e:
print(e)
return html.Div([
'There was an error processing this file.'
])
return html.Div([
html.H5(filename),
html.H6(datetime.datetime.fromtimestamp(date)),
dash_table.DataTable(
data=df.to_dict('records'),
columns=[{'name': i, 'id': i} for i in df.columns]
),
html.Hr(), # horizontal line
# For debugging, display the raw contents provided by the web browser
html.Div('Raw Content'),
html.Pre(contents[0:200] + '...', style={
'whiteSpace': 'pre-wrap',
'wordBreak': 'break-all'
})
])
def parse_contents_df(contents, filename, date):
import qmin
content_type, content_string = contents.split(',')
decoded = base64.b64decode(content_string)
try:
if 'csv' in filename:
# Assume that the user uploaded a CSV file is CPRM style (evandro)
df = qmin.test_cprm_datasets_web(io.StringIO(decoded.decode('ISO-8859-1')))
elif 'xls' in filename:
# Assume that the user uploaded an excel file
#This excel is format of Microssonda!!!!
df = qmin.load_data_ms_web(io.BytesIO(decoded))
# csv_string = df.to_csv(index=False, encoding='utf-8')
#csv_string = "data:text/csv;charset=utf-8," + urllib.quote(csv_string)
#update_download_link(df)
except Exception as e:
print(e)
return html.Div([
'There was an error processing this file.'
])
return
layout = html.Div([
html.H4("Upload Files"),
dcc.Upload(
id='upload-data',
children=html.Div([
'Drag and Drop or ',
html.A('Select Files')
]),
style={
'width': '50%',
'height': '60px',
'lineHeight': '60px',
'borderWidth': '1px',
'borderStyle': 'dashed',
'borderRadius': '5px',
'textAlign': 'center',
'margin': '10px'
},
# Allow multiple files to be uploaded
multiple=True
),
html.Div(id='output-data-upload')
])
| 31.958763
| 87
| 0.577097
|
d45c04cb30bf08619081b98502352279147789b4
| 19,670
|
py
|
Python
|
samtranslator/model/stepfunctions/events.py
|
hawflau/serverless-application-model
|
d2cf4b7e23d26cdf677c564d53bb58e6a5b6cac2
|
[
"Apache-2.0"
] | 1,279
|
2020-08-25T03:33:15.000Z
|
2022-03-31T09:49:22.000Z
|
samtranslator/model/stepfunctions/events.py
|
hawflau/serverless-application-model
|
d2cf4b7e23d26cdf677c564d53bb58e6a5b6cac2
|
[
"Apache-2.0"
] | 797
|
2020-08-24T23:30:05.000Z
|
2022-03-31T22:28:29.000Z
|
samtranslator/model/stepfunctions/events.py
|
hawflau/serverless-application-model
|
d2cf4b7e23d26cdf677c564d53bb58e6a5b6cac2
|
[
"Apache-2.0"
] | 431
|
2020-08-27T20:47:26.000Z
|
2022-03-31T23:57:55.000Z
|
from six import string_types
import json
from samtranslator.metrics.method_decorator import cw_timer
from samtranslator.model import PropertyType, ResourceMacro
from samtranslator.model.events import EventsRule
from samtranslator.model.iam import IAMRole, IAMRolePolicies
from samtranslator.model.types import dict_of, is_str, is_type, list_of, one_of
from samtranslator.model.intrinsics import fnSub
from samtranslator.translator import logical_id_generator
from samtranslator.model.exceptions import InvalidEventException, InvalidResourceException
from samtranslator.model.eventbridge_utils import EventBridgeRuleUtils
from samtranslator.model.eventsources.push import Api as PushApi
from samtranslator.translator.arn_generator import ArnGenerator
from samtranslator.swagger.swagger import SwaggerEditor
from samtranslator.open_api.open_api import OpenApiEditor
CONDITION = "Condition"
SFN_EVETSOURCE_METRIC_PREFIX = "SFNEventSource"
class EventSource(ResourceMacro):
"""Base class for event sources for SAM State Machine.
:cvar str principal: The AWS service principal of the source service.
"""
principal = None
def _generate_logical_id(self, prefix, suffix, resource_type):
"""Helper utility to generate a logicial ID for a new resource
:param string prefix: Prefix to use for the logical ID of the resource
:param string suffix: Suffix to add for the logical ID of the resource
:param string resource_type: Type of the resource
:returns: the logical ID for the new resource
:rtype: string
"""
if prefix is None:
prefix = self.logical_id
if suffix.isalnum():
logical_id = prefix + resource_type + suffix
else:
generator = logical_id_generator.LogicalIdGenerator(prefix + resource_type, suffix)
logical_id = generator.gen()
return logical_id
def _construct_role(self, resource, permissions_boundary=None, prefix=None, suffix=""):
"""Constructs the IAM Role resource allowing the event service to invoke
the StartExecution API of the state machine resource it is associated with.
:param model.stepfunctions.StepFunctionsStateMachine resource: The state machine resource associated with the event
:param string permissions_boundary: The ARN of the policy used to set the permissions boundary for the role
:param string prefix: Prefix to use for the logical ID of the IAM role
:param string suffix: Suffix to add for the logical ID of the IAM role
:returns: the IAM Role resource
:rtype: model.iam.IAMRole
"""
role_logical_id = self._generate_logical_id(prefix=prefix, suffix=suffix, resource_type="Role")
event_role = IAMRole(role_logical_id, attributes=resource.get_passthrough_resource_attributes())
event_role.AssumeRolePolicyDocument = IAMRolePolicies.construct_assume_role_policy_for_service_principal(
self.principal
)
state_machine_arn = resource.get_runtime_attr("arn")
event_role.Policies = [
IAMRolePolicies.step_functions_start_execution_role_policy(state_machine_arn, role_logical_id)
]
if permissions_boundary:
event_role.PermissionsBoundary = permissions_boundary
return event_role
class Schedule(EventSource):
"""Scheduled executions for SAM State Machine."""
resource_type = "Schedule"
principal = "events.amazonaws.com"
property_types = {
"Schedule": PropertyType(True, is_str()),
"Input": PropertyType(False, is_str()),
"Enabled": PropertyType(False, is_type(bool)),
"Name": PropertyType(False, is_str()),
"Description": PropertyType(False, is_str()),
"DeadLetterConfig": PropertyType(False, is_type(dict)),
"RetryPolicy": PropertyType(False, is_type(dict)),
}
@cw_timer(prefix=SFN_EVETSOURCE_METRIC_PREFIX)
def to_cloudformation(self, resource, **kwargs):
"""Returns the EventBridge Rule and IAM Role to which this Schedule event source corresponds.
:param dict kwargs: no existing resources need to be modified
:returns: a list of vanilla CloudFormation Resources, to which this Schedule event expands
:rtype: list
"""
resources = []
permissions_boundary = kwargs.get("permissions_boundary")
passthrough_resource_attributes = resource.get_passthrough_resource_attributes()
events_rule = EventsRule(self.logical_id, attributes=passthrough_resource_attributes)
resources.append(events_rule)
events_rule.ScheduleExpression = self.Schedule
if self.Enabled is not None:
events_rule.State = "ENABLED" if self.Enabled else "DISABLED"
events_rule.Name = self.Name
events_rule.Description = self.Description
role = self._construct_role(resource, permissions_boundary)
resources.append(role)
source_arn = events_rule.get_runtime_attr("arn")
dlq_queue_arn = None
if self.DeadLetterConfig is not None:
EventBridgeRuleUtils.validate_dlq_config(self.logical_id, self.DeadLetterConfig)
dlq_queue_arn, dlq_resources = EventBridgeRuleUtils.get_dlq_queue_arn_and_resources(
self, source_arn, passthrough_resource_attributes
)
resources.extend(dlq_resources)
events_rule.Targets = [self._construct_target(resource, role, dlq_queue_arn)]
return resources
def _construct_target(self, resource, role, dead_letter_queue_arn=None):
"""Constructs the Target property for the EventBridge Rule.
:returns: the Target property
:rtype: dict
"""
target = {
"Arn": resource.get_runtime_attr("arn"),
"Id": self.logical_id + "StepFunctionsTarget",
"RoleArn": role.get_runtime_attr("arn"),
}
if self.Input is not None:
target["Input"] = self.Input
if self.DeadLetterConfig is not None:
target["DeadLetterConfig"] = {"Arn": dead_letter_queue_arn}
if self.RetryPolicy is not None:
target["RetryPolicy"] = self.RetryPolicy
return target
class CloudWatchEvent(EventSource):
"""CloudWatch Events/EventBridge event source for SAM State Machine."""
resource_type = "CloudWatchEvent"
principal = "events.amazonaws.com"
property_types = {
"EventBusName": PropertyType(False, is_str()),
"Pattern": PropertyType(False, is_type(dict)),
"Input": PropertyType(False, is_str()),
"InputPath": PropertyType(False, is_str()),
"DeadLetterConfig": PropertyType(False, is_type(dict)),
"RetryPolicy": PropertyType(False, is_type(dict)),
}
@cw_timer(prefix=SFN_EVETSOURCE_METRIC_PREFIX)
def to_cloudformation(self, resource, **kwargs):
"""Returns the CloudWatch Events/EventBridge Rule and IAM Role to which this
CloudWatch Events/EventBridge event source corresponds.
:param dict kwargs: no existing resources need to be modified
:returns: a list of vanilla CloudFormation Resources, to which this CloudWatch Events/EventBridge event expands
:rtype: list
"""
resources = []
permissions_boundary = kwargs.get("permissions_boundary")
passthrough_resource_attributes = resource.get_passthrough_resource_attributes()
events_rule = EventsRule(self.logical_id, attributes=passthrough_resource_attributes)
events_rule.EventBusName = self.EventBusName
events_rule.EventPattern = self.Pattern
resources.append(events_rule)
role = self._construct_role(resource, permissions_boundary)
resources.append(role)
source_arn = events_rule.get_runtime_attr("arn")
dlq_queue_arn = None
if self.DeadLetterConfig is not None:
EventBridgeRuleUtils.validate_dlq_config(self.logical_id, self.DeadLetterConfig)
dlq_queue_arn, dlq_resources = EventBridgeRuleUtils.get_dlq_queue_arn_and_resources(
self, source_arn, passthrough_resource_attributes
)
resources.extend(dlq_resources)
events_rule.Targets = [self._construct_target(resource, role, dlq_queue_arn)]
return resources
def _construct_target(self, resource, role, dead_letter_queue_arn=None):
"""Constructs the Target property for the CloudWatch Events/EventBridge Rule.
:returns: the Target property
:rtype: dict
"""
target = {
"Arn": resource.get_runtime_attr("arn"),
"Id": self.logical_id + "StepFunctionsTarget",
"RoleArn": role.get_runtime_attr("arn"),
}
if self.Input is not None:
target["Input"] = self.Input
if self.InputPath is not None:
target["InputPath"] = self.InputPath
if self.DeadLetterConfig is not None:
target["DeadLetterConfig"] = {"Arn": dead_letter_queue_arn}
if self.RetryPolicy is not None:
target["RetryPolicy"] = self.RetryPolicy
return target
class EventBridgeRule(CloudWatchEvent):
"""EventBridge Rule event source for SAM State Machine."""
resource_type = "EventBridgeRule"
class Api(EventSource):
"""Api method event source for SAM State Machines."""
resource_type = "Api"
principal = "apigateway.amazonaws.com"
property_types = {
"Path": PropertyType(True, is_str()),
"Method": PropertyType(True, is_str()),
# Api Event sources must "always" be paired with a Serverless::Api
"RestApiId": PropertyType(True, is_str()),
"Stage": PropertyType(False, is_str()),
"Auth": PropertyType(False, is_type(dict)),
}
def resources_to_link(self, resources):
"""
If this API Event Source refers to an explicit API resource, resolve the reference and grab
necessary data from the explicit API
"""
# If RestApiId is a resource in the same template, then we try find the StageName by following the reference
# Otherwise we default to a wildcard. This stage name is solely used to construct the permission to
# allow this stage to invoke the State Machine. If we are unable to resolve the stage name, we will
# simply permit all stages to invoke this State Machine
# This hack is necessary because customers could use !ImportValue, !Ref or other intrinsic functions which
# can be sometimes impossible to resolve (ie. when it has cross-stack references)
permitted_stage = "*"
stage_suffix = "AllStages"
explicit_api = None
rest_api_id = PushApi.get_rest_api_id_string(self.RestApiId)
if isinstance(rest_api_id, string_types):
if (
rest_api_id in resources
and "Properties" in resources[rest_api_id]
and "StageName" in resources[rest_api_id]["Properties"]
):
explicit_api = resources[rest_api_id]["Properties"]
permitted_stage = explicit_api["StageName"]
# Stage could be a intrinsic, in which case leave the suffix to default value
if isinstance(permitted_stage, string_types):
stage_suffix = permitted_stage
else:
stage_suffix = "Stage"
else:
# RestApiId is a string, not an intrinsic, but we did not find a valid API resource for this ID
raise InvalidEventException(
self.relative_id,
"RestApiId property of Api event must reference a valid resource in the same template.",
)
return {"explicit_api": explicit_api, "explicit_api_stage": {"suffix": stage_suffix}}
@cw_timer(prefix=SFN_EVETSOURCE_METRIC_PREFIX)
def to_cloudformation(self, resource, **kwargs):
"""If the Api event source has a RestApi property, then simply return the IAM role resource
allowing API Gateway to start the state machine execution. If no RestApi is provided, then
additionally inject the path, method, and the x-amazon-apigateway-integration into the
Swagger body for a provided implicit API.
:param model.stepfunctions.resources.StepFunctionsStateMachine resource; the state machine \
resource to which the Api event source must be associated
:param dict kwargs: a dict containing the implicit RestApi to be modified, should no \
explicit RestApi be provided.
:returns: a list of vanilla CloudFormation Resources, to which this Api event expands
:rtype: list
"""
resources = []
intrinsics_resolver = kwargs.get("intrinsics_resolver")
permissions_boundary = kwargs.get("permissions_boundary")
if self.Method is not None:
# Convert to lower case so that user can specify either GET or get
self.Method = self.Method.lower()
role = self._construct_role(resource, permissions_boundary)
resources.append(role)
explicit_api = kwargs["explicit_api"]
if explicit_api.get("__MANAGE_SWAGGER"):
self._add_swagger_integration(explicit_api, resource, role, intrinsics_resolver)
return resources
def _add_swagger_integration(self, api, resource, role, intrinsics_resolver):
"""Adds the path and method for this Api event source to the Swagger body for the provided RestApi.
:param model.apigateway.ApiGatewayRestApi rest_api: the RestApi to which the path and method should be added.
"""
swagger_body = api.get("DefinitionBody")
if swagger_body is None:
return
resource_arn = resource.get_runtime_attr("arn")
integration_uri = fnSub("arn:${AWS::Partition}:apigateway:${AWS::Region}:states:action/StartExecution")
editor = SwaggerEditor(swagger_body)
if editor.has_integration(self.Path, self.Method):
# Cannot add the integration, if it is already present
raise InvalidEventException(
self.relative_id,
'API method "{method}" defined multiple times for path "{path}".'.format(
method=self.Method, path=self.Path
),
)
condition = None
if CONDITION in resource.resource_attributes:
condition = resource.resource_attributes[CONDITION]
editor.add_state_machine_integration(
self.Path,
self.Method,
integration_uri,
role.get_runtime_attr("arn"),
self._generate_request_template(resource),
condition=condition,
)
# Note: Refactor and combine the section below with the Api eventsource for functions
if self.Auth:
method_authorizer = self.Auth.get("Authorizer")
api_auth = api.get("Auth")
api_auth = intrinsics_resolver.resolve_parameter_refs(api_auth)
if method_authorizer:
api_authorizers = api_auth and api_auth.get("Authorizers")
if method_authorizer != "AWS_IAM":
if method_authorizer != "NONE" and not api_authorizers:
raise InvalidEventException(
self.relative_id,
"Unable to set Authorizer [{authorizer}] on API method [{method}] for path [{path}] "
"because the related API does not define any Authorizers.".format(
authorizer=method_authorizer, method=self.Method, path=self.Path
),
)
if method_authorizer != "NONE" and not api_authorizers.get(method_authorizer):
raise InvalidEventException(
self.relative_id,
"Unable to set Authorizer [{authorizer}] on API method [{method}] for path [{path}] "
"because it wasn't defined in the API's Authorizers.".format(
authorizer=method_authorizer, method=self.Method, path=self.Path
),
)
if method_authorizer == "NONE":
if not api_auth or not api_auth.get("DefaultAuthorizer"):
raise InvalidEventException(
self.relative_id,
"Unable to set Authorizer on API method [{method}] for path [{path}] because 'NONE' "
"is only a valid value when a DefaultAuthorizer on the API is specified.".format(
method=self.Method, path=self.Path
),
)
if self.Auth.get("AuthorizationScopes") and not isinstance(self.Auth.get("AuthorizationScopes"), list):
raise InvalidEventException(
self.relative_id,
"Unable to set Authorizer on API method [{method}] for path [{path}] because "
"'AuthorizationScopes' must be a list of strings.".format(method=self.Method, path=self.Path),
)
apikey_required_setting = self.Auth.get("ApiKeyRequired")
apikey_required_setting_is_false = apikey_required_setting is not None and not apikey_required_setting
if apikey_required_setting_is_false and (not api_auth or not api_auth.get("ApiKeyRequired")):
raise InvalidEventException(
self.relative_id,
"Unable to set ApiKeyRequired [False] on API method [{method}] for path [{path}] "
"because the related API does not specify any ApiKeyRequired.".format(
method=self.Method, path=self.Path
),
)
if method_authorizer or apikey_required_setting is not None:
editor.add_auth_to_method(api=api, path=self.Path, method_name=self.Method, auth=self.Auth)
if self.Auth.get("ResourcePolicy"):
resource_policy = self.Auth.get("ResourcePolicy")
editor.add_resource_policy(resource_policy=resource_policy, path=self.Path, stage=self.Stage)
if resource_policy.get("CustomStatements"):
editor.add_custom_statements(resource_policy.get("CustomStatements"))
api["DefinitionBody"] = editor.swagger
def _generate_request_template(self, resource):
"""Generates the Body mapping request template for the Api. This allows for the input
request to the Api to be passed as the execution input to the associated state machine resource.
:param model.stepfunctions.resources.StepFunctionsStateMachine resource; the state machine
resource to which the Api event source must be associated
:returns: a body mapping request which passes the Api input to the state machine execution
:rtype: dict
"""
request_templates = {
"application/json": fnSub(
json.dumps(
{
"input": "$util.escapeJavaScript($input.json('$'))",
"stateMachineArn": "${" + resource.logical_id + "}",
}
)
)
}
return request_templates
| 44.004474
| 123
| 0.649161
|
3a88fcfdb3a4a5798d74b3914ee9bde0047d0860
| 494
|
py
|
Python
|
plotly/validators/contourcarpet/colorbar/_title.py
|
gnestor/plotly.py
|
a8ae062795ddbf9867b8578fe6d9e244948c15ff
|
[
"MIT"
] | 2
|
2018-12-03T15:20:42.000Z
|
2018-12-03T15:20:47.000Z
|
plotly/validators/contourcarpet/colorbar/_title.py
|
gnestor/plotly.py
|
a8ae062795ddbf9867b8578fe6d9e244948c15ff
|
[
"MIT"
] | 1
|
2020-12-15T16:56:11.000Z
|
2020-12-15T16:56:11.000Z
|
plotly/validators/contourcarpet/colorbar/_title.py
|
gnestor/plotly.py
|
a8ae062795ddbf9867b8578fe6d9e244948c15ff
|
[
"MIT"
] | 2
|
2019-06-17T01:35:57.000Z
|
2020-11-03T01:07:19.000Z
|
import _plotly_utils.basevalidators
class TitleValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self,
plotly_name='title',
parent_name='contourcarpet.colorbar',
**kwargs
):
super(TitleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'colorbars'),
role=kwargs.pop('role', 'info'),
**kwargs
)
| 26
| 67
| 0.609312
|
2d2e709f39f705a5bc1d3aa05cd72377635b49b4
| 2,144
|
py
|
Python
|
Media/common/Interface/FrameXML/MvUnit.py
|
dmacka/MultiverseClientServer
|
b64d7d754a0b2b1a3e5acabd4d6ebb80ab1d9379
|
[
"MIT"
] | 5
|
2020-04-29T19:14:57.000Z
|
2022-02-18T08:48:37.000Z
|
Media/common/Interface/FrameXML/MvUnit.py
|
dmacka/MultiverseClientServer
|
b64d7d754a0b2b1a3e5acabd4d6ebb80ab1d9379
|
[
"MIT"
] | null | null | null |
Media/common/Interface/FrameXML/MvUnit.py
|
dmacka/MultiverseClientServer
|
b64d7d754a0b2b1a3e5acabd4d6ebb80ab1d9379
|
[
"MIT"
] | 2
|
2021-03-09T06:53:30.000Z
|
2021-03-27T12:02:39.000Z
|
def UnitFrame_OnEvent(frame, event):
if event.eventType == "UNIT_NAME_UPDATE":
unit = event.eventArgs[0]
if unit == frame.Properties["unit"]:
frame.Properties["name"].SetText(MarsUnit.UnitName(unit))
def UnitFrame_Update(frame):
unit = frame.Properties["unit"]
if not UnitExists(unit):
frame.Hide()
else:
frame.Show()
frame.Properties["name"].SetText(MarsUnit.UnitName(unit))
UnitFrameHealthBar_Update(frame.Properties["healthbar"], unit)
UnitFrameManaBar_Update(frame.Properties["manabar"], unit)
def UnitFrame_Initialize(frame, unit, name, portrait, healthbar, healthtext, manabar, manatext):
frame.Properties["unit"] = unit
frame.Properties["name"] = name
frame.Properties["portrait"] = portrait
frame.Properties["healthbar"] = healthbar
frame.Properties["manabar"] = manabar
UnitFrameHealthBar_Initialize(unit, healthbar, healthtext)
UnitFrameManaBar_Initialize(unit, manabar, manatext)
UnitFrame_Update(frame)
frame.RegisterEvent("UNIT_NAME_UPDATE")
def UnitFrameHealthBar_Initialize(unit, statusbar, statustext):
if not statusbar:
return
statusbar.Properties["unit"] = unit
# SetTextStatusBarText(statusbar, statustext)
statusbar.RegisterEvent("UNIT_HEALTH")
statusbar.RegisterEvent("UNIT_MAXHEALTH")
def UnitFrameManaBar_Initialize(unit, statusbar, statustext):
if not statusbar:
return
statusbar.Properties["unit"] = unit
# SetTextStatusBarText(statusbar, statustext)
statusbar.RegisterEvent("UNIT_MANA")
statusbar.RegisterEvent("UNIT_MAXMANA")
def UnitFrameHealthBar_Update(statusbar, unit):
if not statusbar:
return
if unit == statusbar.Properties["unit"]:
statusbar.SetMinMaxValues(0, MarsUnit.UnitHealthMax(unit))
statusbar.SetValue(MarsUnit.UnitHealth(unit))
def UnitFrameManaBar_Update(statusbar, unit):
if not statusbar:
return
if unit == statusbar.Properties["unit"]:
statusbar.SetMinMaxValues(0, MarsUnit.UnitManaMax(unit))
statusbar.SetValue(MarsUnit.UnitMana(unit))
| 36.965517
| 96
| 0.710821
|
ab173994fd5ea219de2a65bf025b29fc3eccae70
| 5,256
|
bzl
|
Python
|
repositories.bzl
|
jakebiesinger-storyhealth/rules_nodejs
|
8df86ccb799e4f9f3c4b26174f09b58a89ef3639
|
[
"Apache-2.0"
] | null | null | null |
repositories.bzl
|
jakebiesinger-storyhealth/rules_nodejs
|
8df86ccb799e4f9f3c4b26174f09b58a89ef3639
|
[
"Apache-2.0"
] | null | null | null |
repositories.bzl
|
jakebiesinger-storyhealth/rules_nodejs
|
8df86ccb799e4f9f3c4b26174f09b58a89ef3639
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dependency-related rules defining our dependency versions.
Fulfills similar role as the package.json file.
"""
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
def build_bazel_rules_nodejs_dependencies():
maybe(
http_archive,
name = "bazel_skylib",
sha256 = "c6966ec828da198c5d9adbaa94c05e3a1c7f21bd012a0b29ba8ddbccb2c93b0d",
urls = [
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.1.1/bazel-skylib-1.1.1.tar.gz",
"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.1.1/bazel-skylib-1.1.1.tar.gz",
],
)
core_sha = "03b542b22a95c4b6591630f3f6b176294033e190e08e044bdb23883693702b6b"
maybe(
http_archive,
name = "rules_nodejs",
sha256 = core_sha,
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/5.1.0/rules_nodejs-core-5.1.0.tar.gz"],
)
def build_bazel_rules_nodejs_dev_dependencies():
"""
Fetch dependencies needed for local development, but not needed by users.
These are in this file to keep version information in one place, and make the WORKSPACE
shorter.
"""
build_bazel_rules_nodejs_dependencies()
# Dependencies for generating documentation
maybe(
http_archive,
name = "io_bazel_rules_sass",
sha256 = "c6249cf64dffbc81312191800b0984b5197d77864c13d0dc4d469937cc3f8108",
strip_prefix = "rules_sass-1.32.11",
urls = [
"https://github.com/bazelbuild/rules_sass/archive/1.32.11.zip",
"https://mirror.bazel.build/github.com/bazelbuild/rules_sass/archive/1.32.11.zip",
],
)
# Needed for com_google_protobuf
maybe(
http_archive,
name = "zlib",
build_file = "@com_google_protobuf//:third_party/zlib.BUILD",
sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1",
strip_prefix = "zlib-1.2.11",
urls = [
"https://mirror.bazel.build/zlib.net/zlib-1.2.11.tar.gz",
"https://zlib.net/zlib-1.2.11.tar.gz",
],
)
maybe(
http_archive,
name = "io_bazel_stardoc",
sha256 = "d681269c40a368c6eb7e9eccfee44a9919d22f84f80e331e41e74bdf99a3108e",
strip_prefix = "stardoc-8f6d22452d088b49b13ba2c224af69ccc8ccbc90",
urls = [
"https://github.com/bazelbuild/stardoc/archive/8f6d22452d088b49b13ba2c224af69ccc8ccbc90.tar.gz",
],
)
# Needed for Remote Build Execution
# See https://releases.bazel.build/bazel-toolchains.html
maybe(
http_archive,
name = "bazel_toolchains",
sha256 = "179ec02f809e86abf56356d8898c8bd74069f1bd7c56044050c2cd3d79d0e024",
strip_prefix = "bazel-toolchains-4.1.0",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/bazel-toolchains/releases/download/4.1.0/bazel-toolchains-4.1.0.tar.gz",
"https://github.com/bazelbuild/bazel-toolchains/releases/download/4.1.0/bazel-toolchains-4.1.0.tar.gz",
],
)
maybe(
http_archive,
name = "build_bazel_integration_testing",
urls = [
"https://github.com/bazelbuild/bazel-integration-testing/archive/165440b2dbda885f8d1ccb8d0f417e6cf8c54f17.zip",
],
strip_prefix = "bazel-integration-testing-165440b2dbda885f8d1ccb8d0f417e6cf8c54f17",
sha256 = "2401b1369ef44cc42f91dc94443ef491208dbd06da1e1e10b702d8c189f098e3",
)
maybe(
http_archive,
name = "rules_codeowners",
strip_prefix = "rules_codeowners-27fe3bbe6e5b0df196e360fc9e081835f22a10be",
sha256 = "0aada1d5df72cb13161a78dff965e02575930f3ea9550e778f6fa45f3f4e2537",
urls = [
"https://github.com/zegl/rules_codeowners/archive/27fe3bbe6e5b0df196e360fc9e081835f22a10be.zip",
],
)
maybe(
http_archive,
name = "rules_pkg",
urls = [
"https://github.com/bazelbuild/rules_pkg/releases/download/0.2.6-1/rules_pkg-0.2.6.tar.gz",
"https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/0.2.6/rules_pkg-0.2.6.tar.gz",
],
sha256 = "aeca78988341a2ee1ba097641056d168320ecc51372ef7ff8e64b139516a4937",
)
maybe(
http_archive,
name = "io_bazel_rules_webtesting",
sha256 = "e9abb7658b6a129740c0b3ef6f5a2370864e102a5ba5ffca2cea565829ed825a",
urls = ["https://github.com/bazelbuild/rules_webtesting/releases/download/0.3.5/rules_webtesting.tar.gz"],
)
| 38.933333
| 134
| 0.685502
|
d3c1642333f1db1e8379565c1911deabc65e190f
| 2,099
|
py
|
Python
|
examples/dfp/v201505/contact_service/get_uninvited_contacts.py
|
wbrp/googleads-python-lib
|
c0f8ce6c4acfe88ce8f913a4f0e0e92b548e1022
|
[
"Apache-2.0"
] | 1
|
2020-05-23T11:32:32.000Z
|
2020-05-23T11:32:32.000Z
|
examples/dfp/v201505/contact_service/get_uninvited_contacts.py
|
cmm08/googleads-python-lib
|
97743df32eff92cf00cb8beaddcda42dfa0a37f4
|
[
"Apache-2.0"
] | null | null | null |
examples/dfp/v201505/contact_service/get_uninvited_contacts.py
|
cmm08/googleads-python-lib
|
97743df32eff92cf00cb8beaddcda42dfa0a37f4
|
[
"Apache-2.0"
] | 2
|
2018-04-20T02:16:33.000Z
|
2020-11-12T20:58:54.000Z
|
#!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets all contacts that aren't invited yet.
To create contacts, run create_contacts.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
# Import appropriate modules from the client library.
from googleads import dfp
def main(client):
# Initialize appropriate service.
contact_service = client.GetService('ContactService', version='v201505')
# Create statement object to only select contacts that are uninvited.
values = [{
'key': 'status',
'value': {
'xsi_type': 'TextValue',
'value': 'UNINVITED'
}
}]
query = 'WHERE status = :status'
statement = dfp.FilterStatement(query, values)
# Get contacts by statement.
while True:
response = contact_service.getContactsByStatement(
statement.ToStatement())
if 'results' in response:
# Display results.
for contact in response['results']:
print ('Contact with ID \'%s\' and name \'%s\' was found.'
% (contact['id'], contact['name']))
statement.offset += dfp.SUGGESTED_PAGE_LIMIT
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client)
| 31.80303
| 77
| 0.708433
|
ed9d57fe8edbace6d5bfc309507e23602ae1a69d
| 382
|
py
|
Python
|
dataset/DeepFakes/faceswap-master/lib/multithreading.py
|
MrThiago/FaceForensics
|
1806e70d0dd2294a12a8afd1c3f59d6ecac639bf
|
[
"MIT"
] | 1,930
|
2018-04-20T14:52:01.000Z
|
2022-03-30T13:53:31.000Z
|
dataset/DeepFakes/faceswap-master/lib/multithreading.py
|
chrisgorgo/FaceForensics
|
a815daa9ebb7c12240a4b7162c431af0e1b959fa
|
[
"MIT"
] | 68
|
2019-02-14T09:09:02.000Z
|
2022-03-23T08:55:23.000Z
|
dataset/DeepFakes/faceswap-master/lib/multithreading.py
|
chrisgorgo/FaceForensics
|
a815daa9ebb7c12240a4b7162c431af0e1b959fa
|
[
"MIT"
] | 499
|
2018-04-20T11:27:11.000Z
|
2022-03-29T16:29:50.000Z
|
import multiprocessing as mp
method = None
def pool_process(method_to_run, data, processes=None):
global method
if processes is None:
processes = mp.cpu_count()
method = method_to_run
pool = mp.Pool(processes=processes)
for i in pool.imap_unordered(runner, data):
yield i if i is not None else 0
def runner(item):
return method(item)
| 22.470588
| 54
| 0.685864
|
2abeef8bfbb079384643c58d0177f6538c878d40
| 4,818
|
py
|
Python
|
g2pw/utils.py
|
ishine/g2pW
|
e4e25f0607f7499c2dd0ed9bcb35e60d7676d323
|
[
"Apache-2.0"
] | 3
|
2022-03-24T08:31:45.000Z
|
2022-03-29T16:46:07.000Z
|
g2pw/utils.py
|
ishine/g2pW
|
e4e25f0607f7499c2dd0ed9bcb35e60d7676d323
|
[
"Apache-2.0"
] | 1
|
2022-03-22T07:30:46.000Z
|
2022-03-22T07:30:46.000Z
|
g2pw/utils.py
|
ishine/g2pW
|
e4e25f0607f7499c2dd0ed9bcb35e60d7676d323
|
[
"Apache-2.0"
] | 2
|
2022-03-23T00:54:24.000Z
|
2022-03-24T08:31:47.000Z
|
import re
import logging
import sys
class RunningAverage:
def __init__(self):
self.values = []
def add(self, val):
self.values.append(val)
def add_all(self, vals):
self.values += vals
def get(self):
if len(self.values) == 0:
return None
return sum(self.values) / len(self.values)
def flush(self):
self.values = []
def wordize_and_map(text):
words = []
index_map_from_text_to_word = []
index_map_from_word_to_text = []
while len(text) > 0:
match_space = re.match(r'^ +', text)
if match_space:
space_str = match_space.group(0)
index_map_from_text_to_word += [None] * len(space_str)
text = text[len(space_str):]
continue
match_en = re.match(r'^[a-zA-Z0-9]+', text)
if match_en:
en_word = match_en.group(0)
word_start_pos = len(index_map_from_text_to_word)
word_end_pos = word_start_pos + len(en_word)
index_map_from_word_to_text.append((word_start_pos, word_end_pos))
index_map_from_text_to_word += [len(words)] * len(en_word)
words.append(en_word)
text = text[len(en_word):]
else:
word_start_pos = len(index_map_from_text_to_word)
word_end_pos = word_start_pos + 1
index_map_from_word_to_text.append((word_start_pos, word_end_pos))
index_map_from_text_to_word += [len(words)]
words.append(text[0])
text = text[1:]
return words, index_map_from_text_to_word, index_map_from_word_to_text
def tokenize_and_map(tokenizer, text):
words, text2word, word2text = wordize_and_map(text)
tokens = []
index_map_from_token_to_text = []
for word, (word_start, word_end) in zip(words, word2text):
word_tokens = tokenizer.tokenize(word)
if len(word_tokens) == 0 or word_tokens == ['[UNK]']:
index_map_from_token_to_text.append((word_start, word_end))
tokens.append('[UNK]')
else:
current_word_start = word_start
for word_token in word_tokens:
word_token_len = len(re.sub(r'^##', '', word_token))
index_map_from_token_to_text.append(
(current_word_start, current_word_start + word_token_len))
current_word_start = current_word_start + word_token_len
tokens.append(word_token)
index_map_from_text_to_token = text2word
for i, (token_start, token_end) in enumerate(index_map_from_token_to_text):
for token_pos in range(token_start, token_end):
index_map_from_text_to_token[token_pos] = i
return tokens, index_map_from_text_to_token, index_map_from_token_to_text
def _load_config(config_path):
import importlib.util
spec = importlib.util.spec_from_file_location('__init__', config_path)
config = importlib.util.module_from_spec(spec)
spec.loader.exec_module(config)
return config
default_config_dict = {
'manual_seed': 1313,
'model_source': 'bert-base-chinese',
'window_size': 32,
'num_workers': 2,
'use_mask': True,
'use_char_phoneme': False,
'use_conditional': True,
'param_conditional': {
'affect_location': 'softmax',
'bias': True,
'char-linear': True,
'pos-linear': False,
'char+pos-second': True,
'char+pos-second_lowrank': False,
'lowrank_size': 0,
'char+pos-second_fm': False,
'fm_size': 0,
'fix_mode': None,
'count_json': 'train.count.json'
},
'lr': 5e-5,
'val_interval': 200,
'num_iter': 10000,
'use_focal': False,
'param_focal': {
'alpha': 0.0,
'gamma': 0.7
},
'use_pos': True,
'param_pos ': {
'weight': 0.1,
'pos_joint_training': True,
'train_pos_path': 'train.pos',
'valid_pos_path': 'dev.pos',
'test_pos_path': 'test.pos'
}
}
def load_config(config_path, use_default=False):
config = _load_config(config_path)
if use_default:
for attr, val in default_config_dict.items():
if not hasattr(config, attr):
setattr(config, attr, val)
elif isinstance(val, dict):
d = getattr(config, attr)
for dict_k, dict_v in val.items():
if dict_k not in d:
d[dict_k] = dict_v
return config
def get_logger(file_path):
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
output_file_handler = logging.FileHandler(file_path)
stdout_handler = logging.StreamHandler(sys.stdout)
logger.addHandler(output_file_handler)
logger.addHandler(stdout_handler)
return logger
| 29.740741
| 79
| 0.615816
|
75f7e973e6cc8b0752c0d0fdcafb190907b2ef4e
| 3,858
|
py
|
Python
|
src/lambda_router/config.py
|
jpaidoussi/lambda-router
|
c7909e6667f2fc837f34f54ccffcc409e33cebb6
|
[
"BSD-3-Clause"
] | null | null | null |
src/lambda_router/config.py
|
jpaidoussi/lambda-router
|
c7909e6667f2fc837f34f54ccffcc409e33cebb6
|
[
"BSD-3-Clause"
] | null | null | null |
src/lambda_router/config.py
|
jpaidoussi/lambda-router
|
c7909e6667f2fc837f34f54ccffcc409e33cebb6
|
[
"BSD-3-Clause"
] | 1
|
2021-03-05T06:50:26.000Z
|
2021-03-05T06:50:26.000Z
|
import os
from typing import Any, Dict, Mapping
from . import exceptions
def str_to_bool(bool_as_string: str) -> bool:
"""
A converter that converts a string representation of ``True`` into a boolean.
The following (case insensitive) strings will be return a ``True`` result:
'true', 't', '1', 'yes', 'y', everything else will return a ``False``.
:param bool_as_string: The string to be converted to a bool.
:type bool_as_string: str
:rtype: bool
:raises TypeError: Raised when any type other than a string is passed.
"""
if not isinstance(bool_as_string, str):
raise TypeError("Only string types supported")
return bool_as_string.lower() in ["true", "t", "1", "yes", "y"]
def _get_field_value(value: Any, *, params: Mapping[str, Any]) -> Any:
"""
Returns the given value with any converters applied.
"""
converter = params.get("converter", None)
if converter is not None:
return converter(value)
return value
def _filter_with_template(
unfiltered: Mapping[str, Any],
*,
template: Mapping[str, Any],
) -> Dict[str, Any]:
"""
Applies a given config template to a given mapping.
:param unfiltered: The mapping to apply the config template to.
:param template: The config template to apply.
"""
filtered: dict = {}
for field, params in template.items():
if params.get("required", False):
# Ensure required fields are present in the unfiltered dict.
try:
filtered[field] = _get_field_value(unfiltered[field], params=params)
except KeyError:
raise exceptions.ConfigError(f"Required config parameters ({field}) is missing")
else:
# Use the optional value from the unfiltered dict, otherwise fallback
# to the default from the template.
if field in unfiltered:
filtered[field] = _get_field_value(unfiltered[field], params=params)
else:
if "default" in params:
filtered[field] = params["default"]
return filtered
class Config(dict):
"""
A subclass of ``dict`` that adds helper methods to load values
from environment variables or another dictionary, optionally
appling a congifuration template to the loaded variables.
"""
def load_from_dict(
self,
unfiltered: Mapping[str, Any],
*,
prefix: str = None,
template: Mapping[str, Any] = None,
) -> None:
"""
Updates the config from an existing mapping, optionaly applying
a configuration template or filtering out fields with that don't
match a given prefix.
:param unfiltered: The mapping to update the config from.
:param prefix: The prefix to check for.
:type prefix: str
:param template: The config template to apply.
"""
if template is not None:
filtered = _filter_with_template(unfiltered, template=template)
else:
if prefix is not None:
prefix_length = len(prefix)
filtered = {key[prefix_length:]: value for key, value in unfiltered.items() if key.startswith(prefix)}
else:
filtered = unfiltered
self.update(filtered)
def load_from_environment(self, *, prefix: str = None, template: Mapping[str, Any] = None) -> None:
"""
Updates the config from the current running environment, optionaly applying
a configuration template or filtering out fields with that don't
match a given prefix.
:param prefix: The prefix to check for.
:type prefix: str
:param template: The config template to apply.
"""
self.load_from_dict(os.environ, prefix=prefix, template=template)
| 35.072727
| 118
| 0.629601
|
28b0251dfe32632ac393f6298bacdebad9ec7e6a
| 298
|
py
|
Python
|
ch0/ch0-12.py
|
ValentynaGorbachenko/cd2
|
ad9e05092ddca9bfef29f185b23fdf5b22099e05
|
[
"MIT"
] | null | null | null |
ch0/ch0-12.py
|
ValentynaGorbachenko/cd2
|
ad9e05092ddca9bfef29f185b23fdf5b22099e05
|
[
"MIT"
] | null | null | null |
ch0/ch0-12.py
|
ValentynaGorbachenko/cd2
|
ad9e05092ddca9bfef29f185b23fdf5b22099e05
|
[
"MIT"
] | null | null | null |
# Whoa, that sucker's huge
'''
Add odd integers from -300000 to 300000, and console.log the final sum
'''
def oddSum(beg, end):
sum=0
if beg > end:
print "Error"
if beg+end==0:
print sum
else:
for i in range(beg, end+1):
if i%2==1:
sum+=i
print sum
# print sum
oddSum(10,27)
| 16.555556
| 70
| 0.624161
|
7d2330719a3d114b3844a433390b4717a48bca4a
| 2,337
|
py
|
Python
|
EducationalRound108/F_Chests_and_Keys.py
|
tqa236/codeforces
|
81ad7bdb7786455f83d48d59a8884f62ded66caf
|
[
"MIT"
] | null | null | null |
EducationalRound108/F_Chests_and_Keys.py
|
tqa236/codeforces
|
81ad7bdb7786455f83d48d59a8884f62ded66caf
|
[
"MIT"
] | null | null | null |
EducationalRound108/F_Chests_and_Keys.py
|
tqa236/codeforces
|
81ad7bdb7786455f83d48d59a8884f62ded66caf
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import sys
from io import BytesIO, IOBase
import math
from collections import Counter
def func(n, m, chests, keys, prices):
if sum(keys) < sum(chests):
return -1
pass
def main():
n, m = [int(i) for i in parse_input().split()]
chests = [int(i) for i in parse_input().split()]
keys = [int(i) for i in parse_input().split()]
prices = []
for i in range(n):
prices.append([int(i) for i in parse_input().split()])
print(func(n, m, chests, keys, prices))
# region fastio
# BUFSIZE = 8192
# class FastIO(IOBase):
# newlines = 0
# def __init__(self, file):
# self._fd = file.fileno()
# self.buffer = BytesIO()
# self.writable = "x" in file.mode or "r" not in file.mode
# self.write = self.buffer.write if self.writable else None
# def read(self):
# while True:
# b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))
# if not b:
# break
# ptr = self.buffer.tell()
# self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)
# self.newlines = 0
# return self.buffer.read()
# def readline(self):
# while self.newlines == 0:
# b = os.read(self._fd, max(os.fstat(self._fd).st_size, BUFSIZE))
# self.newlines = b.count(b"\n") + (not b)
# ptr = self.buffer.tell()
# self.buffer.seek(0, 2), self.buffer.write(b), self.buffer.seek(ptr)
# self.newlines -= 1
# return self.buffer.readline()
# def flush(self):
# if self.writable:
# os.write(self._fd, self.buffer.getvalue())
# self.buffer.truncate(0), self.buffer.seek(0)
# class IOWrapper(IOBase):
# def __init__(self, file):
# self.buffer = FastIO(file)
# self.flush = self.buffer.flush
# self.writable = self.buffer.writable
# self.write = lambda s: self.buffer.write(s.encode("ascii"))
# self.read = lambda: self.buffer.read().decode("ascii")
# self.readline = lambda: self.buffer.readline().decode("ascii")
# sys.stdin, sys.stdout = IOWrapper(sys.stdin), IOWrapper(sys.stdout)
parse_input = lambda: sys.stdin.readline().rstrip("\r\n")
# endregion
if __name__ == "__main__":
main()
| 28.5
| 81
| 0.579375
|
5087b6c7840379cd600500b431ffb25ab9ab05d2
| 1,058
|
py
|
Python
|
software/glasgow/auth/challenge.py
|
DurandA/glasgow
|
ba322cb1fc1a974adeeea1efe41e959148f296f3
|
[
"Apache-2.0",
"0BSD"
] | null | null | null |
software/glasgow/auth/challenge.py
|
DurandA/glasgow
|
ba322cb1fc1a974adeeea1efe41e959148f296f3
|
[
"Apache-2.0",
"0BSD"
] | null | null | null |
software/glasgow/auth/challenge.py
|
DurandA/glasgow
|
ba322cb1fc1a974adeeea1efe41e959148f296f3
|
[
"Apache-2.0",
"0BSD"
] | null | null | null |
import usb1
import asyncio
import os
from ..device.hardware import GlasgowHardwareDevice
from ecdsa import NIST256p, VerifyingKey
from ecdsa.ellipticcurve import Point as ECDSA_Point
from binascii import hexlify, unhexlify
x, y = (
int('4addc7fb8998e8d9864d96a8899bab108ba47abe9087de214ded324a47fd87df', 16),
int('4c83f25c2a1923e6b0b7a65e618af72215f4b0b042454755ef738f26186d192f', 16)
)
point = ECDSA_Point(NIST256p.curve, x, y)
vk = VerifyingKey.from_public_point(point, curve=NIST256p)
REQ_ATECC_SIGN = 0x20
REQ_ATECC_CERT = 0x21
async def main(loop):
device = GlasgowHardwareDevice()
cert = await device.control_read(usb1.REQUEST_TYPE_VENDOR, REQ_ATECC_CERT, 0, 0, 32)
challenge = os.urandom(32)
await device.control_write(usb1.REQUEST_TYPE_VENDOR, REQ_ATECC_SIGN, 0, 0, challenge)
signature = await device.control_read(usb1.REQUEST_TYPE_VENDOR, REQ_ATECC_SIGN, 0, 0, 64)
vk.verify_digest(signature, challenge)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(main(loop))
| 34.129032
| 93
| 0.783554
|
612b4e0474a1886fa8801bde7fdd29ce0a9c7e21
| 4,668
|
py
|
Python
|
degmo/vae/vae.py
|
IcarusWizard/Deep-Generative-Models
|
4117c11ad944bdeff106a80adbb3642a076af64e
|
[
"MIT"
] | 2
|
2019-11-21T15:50:59.000Z
|
2019-12-17T02:44:19.000Z
|
degmo/vae/vae.py
|
IcarusWizard/Deep-Generative-Models
|
4117c11ad944bdeff106a80adbb3642a076af64e
|
[
"MIT"
] | null | null | null |
degmo/vae/vae.py
|
IcarusWizard/Deep-Generative-Models
|
4117c11ad944bdeff106a80adbb3642a076af64e
|
[
"MIT"
] | 1
|
2021-07-02T05:49:29.000Z
|
2021-07-02T05:49:29.000Z
|
import torch
from torch.functional import F
import numpy as np
from .modules import MLPEncoder, MLPDecoder, ConvEncoder, ConvDecoder
from .utils import get_kl, LOG2PI
from .trainer import VAETrainer
class VAE(torch.nn.Module):
r"""
Variational Auto-Encoder: http://arxiv.org/abs/1312.6114
Inputs:
c : int, channel of the input image
h : int, height of the input image
w : int, width of the input image
latent_dim : int, dimension of the latent variable
network_type : str, type of the encoder and decoder, choose from conv and mlp, default: conv
config : dict, parameters for constructe encoder and decoder
output_type : str, type of the distribution p(x|z), choose from fix_std(std=1), gauss and bernoulli, default: gauss
use_mce : bool, whether to compute KL by Mento Carlo Estimation, default: False
"""
def __init__(self, c=3, h=32, w=32, latent_dim=2, network_type='conv', config={},
output_type='gauss', use_mce=False):
super().__init__()
self.latent_dim = latent_dim
self.output_type = output_type
self.use_mce = use_mce
self.input_dim = c * h * w
output_c = 2 * c if self.output_type == 'gauss' else c
if network_type == 'mlp':
self.encoder = MLPEncoder(c, h, w, latent_dim, **config)
self.decoder = MLPDecoder(output_c, h, w, latent_dim, **config)
elif network_type == 'conv':
self.encoder = ConvEncoder(c, h, w, latent_dim, **config)
self.decoder = ConvDecoder(output_c, h, w, latent_dim, **config)
else:
raise ValueError('unsupport network type: {}'.format(network_type))
self.prior = torch.distributions.Normal(0, 1)
def forward(self, x):
mu, logs = torch.chunk(self.encoder(x), 2, dim=1)
logs = torch.clamp_max(logs, 10) # limit the max logs, prevent inf in kl
# reparameterize trick
epsilon = torch.randn_like(logs)
z = mu + epsilon * torch.exp(logs)
# compute kl divergence
if self.use_mce: # Use Mento Carlo Estimation
# kl = log q_{\phi}(z|x) - log p_{\theta}(z)
kl = torch.sum(- epsilon ** 2 / 2 - LOG2PI - logs - self.prior.log_prob(z), dim=1)
else:
kl = get_kl(mu, logs)
_x = self.decoder(z)
# compute reconstruction loss
if self.output_type == 'fix_std':
# output is a gauss with a fixed 1 variance,
# reconstruction loss is mse plus constant
reconstruction_loss = (x - _x) ** 2 / 2 + LOG2PI
elif self.output_type == 'gauss':
# output is a gauss with diagonal variance
_mu, _logs = torch.chunk(_x, 2, dim=1)
_logs = torch.tanh(_logs)
reconstruction_loss = (x - _mu) ** 2 / 2 * torch.exp(-2 * _logs) + LOG2PI + _logs
elif self.output_type == 'bernoulli':
# output is the logit of a bernouli distribution,
# reconstruction loss is cross-entropy
p = torch.sigmoid(_x)
reconstruction_loss = - x * torch.log(p + 1e-8) - (1 - x) * torch.log(1 - p + 1e-8)
kl = torch.mean(kl)
reconstruction_loss = torch.mean(torch.sum(reconstruction_loss, dim=(1, 2, 3)))
return kl + reconstruction_loss, {
"KL divergence" : kl.item(),
"reconstruction loss" : reconstruction_loss.item(),
}
def encode(self, x):
mu, logs = torch.chunk(self.encoder(x), 2, dim=1)
logs = torch.clamp_max(logs, 10)
return mu
def decode(self, z, deterministic=True):
_x = self.decoder(z)
if self.output_type == 'fix_std':
x = _x
if not deterministic:
x = x + torch.randn_like(x)
elif self.output_type == 'gauss':
_mu, _logs = torch.chunk(_x, 2, dim=1)
_logs = torch.tanh(_logs)
x = _mu
if not deterministic:
x = x + torch.exp(_logs) * torch.randn_like(x)
elif self.output_type == 'bernoulli':
p = torch.sigmoid(_x)
if not deterministic:
x = (torch.rand_like(p) < p).float()
else:
x = (p > 0.5).float()
return x
def sample(self, number=1000, deterministic=True):
device = next(self.parameters()).device
z = torch.randn(number, self.latent_dim, device=device)
return self.decode(z, deterministic=deterministic)
def get_trainer(self):
return VAETrainer
| 37.645161
| 127
| 0.578406
|
d99471b8b9c22a202bc93f7497dfd6ecc1031038
| 577
|
py
|
Python
|
8kyu/test_pre-fizzbuzz_workout_1.py
|
adun/codewars.py
|
89e7d81e9ca05a432007d634892c1cba28f5b715
|
[
"MIT"
] | null | null | null |
8kyu/test_pre-fizzbuzz_workout_1.py
|
adun/codewars.py
|
89e7d81e9ca05a432007d634892c1cba28f5b715
|
[
"MIT"
] | null | null | null |
8kyu/test_pre-fizzbuzz_workout_1.py
|
adun/codewars.py
|
89e7d81e9ca05a432007d634892c1cba28f5b715
|
[
"MIT"
] | null | null | null |
# This is the first step to understanding FizzBuzz.
# Your inputs: a positive integer, n, greater than or equal to one. n is provided,
# you have NO CONTROL over its value.
# Your expected output is an array of positive integers from 1 to n (inclusive).
# Your job is to write an algorithm that gets you from the input to the output.
def pre_fizz(n):
return list(range(1, n+1))
def test_pre_fizz():
assert pre_fizz(1) == [1]
assert pre_fizz(2) == [1,2]
assert pre_fizz(3) == [1,2,3]
assert pre_fizz(4) == [1,2,3,4]
assert pre_fizz(5) == [1,2,3,4,5]
| 32.055556
| 82
| 0.674177
|
0631c3e675c28455cba691021c7efe417a1e961f
| 856
|
py
|
Python
|
resources/DataMining/APIs_Call/APICallSample.py
|
andy897221/Proof-of-Play-Flow-Demo
|
018ec382801f1363711b7680e728535a2ac94d26
|
[
"MIT"
] | null | null | null |
resources/DataMining/APIs_Call/APICallSample.py
|
andy897221/Proof-of-Play-Flow-Demo
|
018ec382801f1363711b7680e728535a2ac94d26
|
[
"MIT"
] | null | null | null |
resources/DataMining/APIs_Call/APICallSample.py
|
andy897221/Proof-of-Play-Flow-Demo
|
018ec382801f1363711b7680e728535a2ac94d26
|
[
"MIT"
] | null | null | null |
# defined a day matches by randSampledDailyPlayerCount.py
import math, time, requests, json
url = 'https://api.opendota.com/api/explorer?sql={}'
SQL = "SELECT COUNT(match_id) FROM public_matches WHERE lobby_type = 7 and game_mode = 22 and start_time >= {} and start_time < {}"
day_start_time = 1533081600
aDay = 86400
null = None
false = False
numOfMatchPerDay = [65395, 68107, 69458, 75243, 76506, 69145, 67965, 67703, 68631, 67484, 72571, 77109, 66604, 60311, 60784, 65396, 72504, 77111, 67813, 67429, 67628, 65283, 68951, 74079, 74079, 83148, 77678, 76604, 75061, 76196, 77657]
for i in range(31,31):
curSQL = SQL.format(day_start_time+(aDay*i), day_start_time+(aDay*(i+1)))
res = json.loads(requests.get(url.format(curSQL)).text)
print(res)
numOfMatchPerDay += [res["rows"][0]["count"]]
print(numOfMatchPerDay)
time.sleep(1)
| 42.8
| 236
| 0.71028
|
8d55f8262d8f6ce9aa7d1b75088011b4d2641fb9
| 46,095
|
py
|
Python
|
contrib/runners/python_runner/tests/unit/test_pythonrunner.py
|
timgates42/st2
|
0e8ae756f30ffe2e017c64bff67830abdee7f7c9
|
[
"Apache-2.0"
] | null | null | null |
contrib/runners/python_runner/tests/unit/test_pythonrunner.py
|
timgates42/st2
|
0e8ae756f30ffe2e017c64bff67830abdee7f7c9
|
[
"Apache-2.0"
] | 15
|
2021-02-11T22:58:54.000Z
|
2021-08-06T18:03:47.000Z
|
contrib/runners/python_runner/tests/unit/test_pythonrunner.py
|
timgates42/st2
|
0e8ae756f30ffe2e017c64bff67830abdee7f7c9
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import re
import sys
import six
import mock
from oslo_config import cfg
from python_runner import python_runner
from st2actions.container.base import RunnerContainer
from st2common.runners.base_action import Action
from st2common.runners.utils import get_action_class_instance
from st2common.services import config as config_service
from st2common.constants.action import ACTION_OUTPUT_RESULT_DELIMITER
from st2common.constants.action import LIVEACTION_STATUS_SUCCEEDED, LIVEACTION_STATUS_FAILED
from st2common.constants.action import LIVEACTION_STATUS_TIMED_OUT
from st2common.constants.action import MAX_PARAM_LENGTH
from st2common.constants.pack import SYSTEM_PACK_NAME
from st2common.persistence.execution import ActionExecutionOutput
from python_runner.python_action_wrapper import PythonActionWrapper
from st2tests.base import RunnerTestCase
from st2tests.base import CleanDbTestCase
from st2tests.base import blocking_eventlet_spawn
from st2tests.base import make_mock_stream_readline
from st2tests.fixturesloader import assert_submodules_are_checked_out
import st2tests.base as tests_base
PASCAL_ROW_ACTION_PATH = os.path.join(tests_base.get_resources_path(), 'packs',
'pythonactions/actions/pascal_row.py')
ECHOER_ACTION_PATH = os.path.join(tests_base.get_resources_path(), 'packs',
'pythonactions/actions/echoer.py')
TEST_ACTION_PATH = os.path.join(tests_base.get_resources_path(), 'packs',
'pythonactions/actions/test.py')
PATHS_ACTION_PATH = os.path.join(tests_base.get_resources_path(), 'packs',
'pythonactions/actions/python_paths.py')
ACTION_1_PATH = os.path.join(tests_base.get_fixtures_path(),
'packs/dummy_pack_9/actions/list_repos_doesnt_exist.py')
ACTION_2_PATH = os.path.join(tests_base.get_fixtures_path(),
'packs/dummy_pack_9/actions/invalid_syntax.py')
NON_SIMPLE_TYPE_ACTION = os.path.join(tests_base.get_resources_path(), 'packs',
'pythonactions/actions/non_simple_type.py')
PRINT_VERSION_ACTION = os.path.join(tests_base.get_fixtures_path(), 'packs',
'test_content_version/actions/print_version.py')
PRINT_VERSION_LOCAL_MODULE_ACTION = os.path.join(tests_base.get_fixtures_path(), 'packs',
'test_content_version/actions/print_version_local_import.py')
PRINT_CONFIG_ITEM_ACTION = os.path.join(tests_base.get_resources_path(), 'packs',
'pythonactions/actions/print_config_item_doesnt_exist.py')
PRINT_TO_STDOUT_STDERR_ACTION = os.path.join(tests_base.get_resources_path(), 'packs',
'pythonactions/actions/print_to_stdout_and_stderr.py')
# Note: runner inherits parent args which doesn't work with tests since test pass additional
# unrecognized args
mock_sys = mock.Mock()
mock_sys.argv = []
mock_sys.executable = sys.executable
MOCK_EXECUTION = mock.Mock()
MOCK_EXECUTION.id = '598dbf0c0640fd54bffc688b'
@mock.patch('python_runner.python_runner.sys', mock_sys)
class PythonRunnerTestCase(RunnerTestCase, CleanDbTestCase):
register_packs = True
register_pack_configs = True
@classmethod
def setUpClass(cls):
super(PythonRunnerTestCase, cls).setUpClass()
assert_submodules_are_checked_out()
def test_runner_creation(self):
runner = python_runner.get_runner()
self.assertIsNotNone(runner, 'Creation failed. No instance.')
self.assertEqual(type(runner), python_runner.PythonRunner, 'Creation failed. No instance.')
def test_action_returns_non_serializable_result(self):
# Actions returns non-simple type which can't be serialized, verify result is simple str()
# representation of the result
runner = self._get_mock_runner_obj()
runner.entry_point = NON_SIMPLE_TYPE_ACTION
runner.pre_run()
(status, output, _) = runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertIsNotNone(output)
if six.PY2:
expected_result_re = (r"\[{'a': '1'}, {'h': 3, 'c': 2}, {'e': "
r"<non_simple_type.Test object at .*?>}\]")
else:
expected_result_re = (r"\[{'a': '1'}, {'c': 2, 'h': 3}, {'e': "
r"<non_simple_type.Test object at .*?>}\]")
match = re.match(expected_result_re, output['result'])
self.assertTrue(match)
def test_simple_action_with_result_no_status(self):
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(status, output, _) = runner.run({'row_index': 5})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertIsNotNone(output)
self.assertEqual(output['result'], [1, 5, 10, 10, 5, 1])
def test_simple_action_with_result_as_None_no_status(self):
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(status, output, _) = runner.run({'row_index': 'b'})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertIsNotNone(output)
self.assertEqual(output['exit_code'], 0)
self.assertEqual(output['result'], None)
def test_simple_action_timeout(self):
timeout = 0
runner = self._get_mock_runner_obj()
runner.runner_parameters = {python_runner.RUNNER_TIMEOUT: timeout}
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(status, output, _) = runner.run({'row_index': 4})
self.assertEqual(status, LIVEACTION_STATUS_TIMED_OUT)
self.assertIsNotNone(output)
self.assertEqual(output['result'], 'None')
self.assertEqual(output['error'], 'Action failed to complete in 0 seconds')
self.assertEqual(output['exit_code'], -9)
def test_simple_action_with_status_succeeded(self):
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(status, output, _) = runner.run({'row_index': 4})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertIsNotNone(output)
self.assertEqual(output['result'], [1, 4, 6, 4, 1])
def test_simple_action_with_status_failed(self):
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(status, output, _) = runner.run({'row_index': 'a'})
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertIsNotNone(output)
self.assertEqual(output['result'], "This is suppose to fail don't worry!!")
def test_simple_action_with_status_complex_type_returned_for_result(self):
# Result containing a complex type shouldn't break the returning a tuple with status
# behavior
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(status, output, _) = runner.run({'row_index': 'complex_type'})
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertIsNotNone(output)
self.assertIn('<pascal_row.PascalRowAction object at', output['result'])
def test_simple_action_with_status_failed_result_none(self):
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(status, output, _) = runner.run({'row_index': 'c'})
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertIsNotNone(output)
self.assertEqual(output['result'], None)
def test_exception_in_simple_action_with_invalid_status(self):
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
self.assertRaises(ValueError,
runner.run, action_parameters={'row_index': 'd'})
def test_simple_action_no_status_backward_compatibility(self):
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(status, output, _) = runner.run({'row_index': 'e'})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertIsNotNone(output)
self.assertEqual(output['result'], [1, 2])
def test_simple_action_config_value_provided_overriden_in_datastore(self):
pack = 'dummy_pack_5'
user = 'joe'
# No values provided in the datastore
runner = self._get_mock_runner_obj_from_container(pack=pack, user=user)
self.assertEqual(runner._config['api_key'], 'some_api_key') # static value
self.assertEqual(runner._config['regions'], ['us-west-1']) # static value
self.assertEqual(runner._config['api_secret'], None)
self.assertEqual(runner._config['private_key_path'], None)
# api_secret overriden in the datastore (user scoped value)
config_service.set_datastore_value_for_config_key(pack_name='dummy_pack_5',
key_name='api_secret',
user='joe',
value='foosecret',
secret=True)
# private_key_path overriden in the datastore (global / non-user scoped value)
config_service.set_datastore_value_for_config_key(pack_name='dummy_pack_5',
key_name='private_key_path',
value='foopath')
runner = self._get_mock_runner_obj_from_container(pack=pack, user=user)
self.assertEqual(runner._config['api_key'], 'some_api_key') # static value
self.assertEqual(runner._config['regions'], ['us-west-1']) # static value
self.assertEqual(runner._config['api_secret'], 'foosecret')
self.assertEqual(runner._config['private_key_path'], 'foopath')
def test_simple_action_fail(self):
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(status, result, _) = runner.run({'row_index': '4'})
self.assertIsNotNone(result)
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
def test_simple_action_no_file(self):
runner = self._get_mock_runner_obj()
runner.entry_point = 'foo.py'
runner.pre_run()
(status, result, _) = runner.run({})
self.assertIsNotNone(result)
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
def test_simple_action_no_entry_point(self):
runner = self._get_mock_runner_obj()
runner.entry_point = ''
expected_msg = 'Action .*? is missing entry_point attribute'
self.assertRaisesRegexp(Exception, expected_msg, runner.run, {})
@mock.patch('st2common.util.concurrency.subprocess_popen')
def test_action_with_user_supplied_env_vars(self, mock_popen):
env_vars = {'key1': 'val1', 'key2': 'val2', 'PYTHONPATH': 'foobar'}
mock_process = mock.Mock()
mock_process.communicate.return_value = ('', '')
mock_popen.return_value = mock_process
runner = self._get_mock_runner_obj()
runner.runner_parameters = {'env': env_vars}
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(_, _, _) = runner.run({'row_index': 4})
_, call_kwargs = mock_popen.call_args
actual_env = call_kwargs['env']
for key, value in env_vars.items():
# Verify that a blacklsited PYTHONPATH has been filtered out
if key == 'PYTHONPATH':
self.assertTrue(actual_env[key] != value)
else:
self.assertEqual(actual_env[key], value)
@mock.patch('st2common.util.concurrency.subprocess_popen')
@mock.patch('st2common.util.concurrency.spawn')
def test_action_stdout_and_stderr_is_not_stored_in_db_by_default(self, mock_spawn, mock_popen):
# Feature should be disabled by default
values = {'delimiter': ACTION_OUTPUT_RESULT_DELIMITER}
# Note: We need to mock spawn function so we can test everything in single event loop
# iteration
mock_spawn.side_effect = blocking_eventlet_spawn
# No output to stdout and no result (implicit None)
mock_stdout = [
'pre result line 1\n',
'%(delimiter)sTrue%(delimiter)s' % values,
'post result line 1'
]
mock_stderr = [
'stderr line 1\n',
'stderr line 2\n',
'stderr line 3\n'
]
mock_process = mock.Mock()
mock_process.returncode = 0
mock_popen.return_value = mock_process
mock_process.stdout.closed = False
mock_process.stderr.closed = False
mock_process.stdout.readline = make_mock_stream_readline(mock_process.stdout, mock_stdout,
stop_counter=3)
mock_process.stderr.readline = make_mock_stream_readline(mock_process.stderr, mock_stderr,
stop_counter=3)
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(_, output, _) = runner.run({'row_index': 4})
self.assertMultiLineEqual(output['stdout'], 'pre result line 1\npost result line 1')
self.assertMultiLineEqual(output['stderr'], 'stderr line 1\nstderr line 2\nstderr line 3\n')
self.assertEqual(output['result'], 'True')
self.assertEqual(output['exit_code'], 0)
output_dbs = ActionExecutionOutput.get_all()
self.assertEqual(len(output_dbs), 0)
# False is a default behavior so end result should be the same
cfg.CONF.set_override(name='stream_output', group='actionrunner', override=False)
mock_process = mock.Mock()
mock_process.returncode = 0
mock_popen.return_value = mock_process
mock_process.stdout.closed = False
mock_process.stderr.closed = False
mock_process.stdout.readline = make_mock_stream_readline(mock_process.stdout, mock_stdout,
stop_counter=3)
mock_process.stderr.readline = make_mock_stream_readline(mock_process.stderr, mock_stderr,
stop_counter=3)
runner.pre_run()
(_, output, _) = runner.run({'row_index': 4})
self.assertMultiLineEqual(output['stdout'], 'pre result line 1\npost result line 1')
self.assertMultiLineEqual(output['stderr'], 'stderr line 1\nstderr line 2\nstderr line 3\n')
self.assertEqual(output['result'], 'True')
self.assertEqual(output['exit_code'], 0)
output_dbs = ActionExecutionOutput.get_all()
self.assertEqual(len(output_dbs), 0)
@mock.patch('st2common.util.concurrency.subprocess_popen')
@mock.patch('st2common.util.concurrency.spawn')
def test_action_stdout_and_stderr_is_stored_in_the_db(self, mock_spawn, mock_popen):
# Feature is enabled
cfg.CONF.set_override(name='stream_output', group='actionrunner', override=True)
values = {'delimiter': ACTION_OUTPUT_RESULT_DELIMITER}
# Note: We need to mock spawn function so we can test everything in single event loop
# iteration
mock_spawn.side_effect = blocking_eventlet_spawn
# No output to stdout and no result (implicit None)
mock_stdout = [
'pre result line 1\n',
'pre result line 2\n',
'%(delimiter)sTrue%(delimiter)s' % values,
'post result line 1'
]
mock_stderr = [
'stderr line 1\n',
'stderr line 2\n',
'stderr line 3\n'
]
mock_process = mock.Mock()
mock_process.returncode = 0
mock_popen.return_value = mock_process
mock_process.stdout.closed = False
mock_process.stderr.closed = False
mock_process.stdout.readline = make_mock_stream_readline(mock_process.stdout, mock_stdout,
stop_counter=4)
mock_process.stderr.readline = make_mock_stream_readline(mock_process.stderr, mock_stderr,
stop_counter=3)
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(_, output, _) = runner.run({'row_index': 4})
self.assertMultiLineEqual(output['stdout'],
'pre result line 1\npre result line 2\npost result line 1')
self.assertMultiLineEqual(output['stderr'], 'stderr line 1\nstderr line 2\nstderr line 3\n')
self.assertEqual(output['result'], 'True')
self.assertEqual(output['exit_code'], 0)
# Verify stdout and stderr lines have been correctly stored in the db
# Note - result delimiter should not be stored in the db
output_dbs = ActionExecutionOutput.query(output_type='stdout')
self.assertEqual(len(output_dbs), 3)
self.assertEqual(output_dbs[0].runner_ref, 'python-script')
self.assertEqual(output_dbs[0].data, mock_stdout[0])
self.assertEqual(output_dbs[1].data, mock_stdout[1])
self.assertEqual(output_dbs[2].data, mock_stdout[3])
output_dbs = ActionExecutionOutput.query(output_type='stderr')
self.assertEqual(len(output_dbs), 3)
self.assertEqual(output_dbs[0].runner_ref, 'python-script')
self.assertEqual(output_dbs[0].data, mock_stderr[0])
self.assertEqual(output_dbs[1].data, mock_stderr[1])
self.assertEqual(output_dbs[2].data, mock_stderr[2])
def test_real_time_output_streaming_bufsize(self):
# Test various values for bufsize and verify it works / doesn't hang the process
cfg.CONF.set_override(name='stream_output', group='actionrunner', override=True)
bufsize_values = [-100, -2, -1, 0, 1, 2, 1024, 2048, 4096, 10000]
for index, bufsize in enumerate(bufsize_values, 1):
cfg.CONF.set_override(name='stream_output_buffer_size', override=bufsize,
group='actionrunner')
output_dbs = ActionExecutionOutput.get_all()
# Unexpected third party warnings will also inflate this number
self.assertGreaterEqual(len(output_dbs), (index - 1) * 4)
runner = self._get_mock_runner_obj()
runner.entry_point = PRINT_TO_STDOUT_STDERR_ACTION
runner.pre_run()
(_, output, _) = runner.run({'stdout_count': 2, 'stderr_count': 2})
# assertMultiLineEqual displays a diff if the two don't match
self.assertMultiLineEqual(output['stdout'], 'stdout line 0\nstdout line 1\n')
# Third party packages can unexpectedly emit warnings and add more
# output to the streamed stderr, so we check that the expected
# lines occurred, but we allow additional lines to exist
self.assertIn('stderr line 0\n', output['stderr'])
self.assertIn('stderr line 1\n', output['stderr'])
self.assertEqual(output['exit_code'], 0)
output_dbs = ActionExecutionOutput.get_all()
# Unexpected third party warnings will also inflate this number
self.assertGreaterEqual(len(output_dbs), (index) * 4)
@mock.patch('st2common.util.concurrency.subprocess_popen')
def test_stdout_interception_and_parsing(self, mock_popen):
values = {'delimiter': ACTION_OUTPUT_RESULT_DELIMITER}
# No output to stdout and no result (implicit None)
mock_stdout = ['%(delimiter)sNone%(delimiter)s' % values]
mock_stderr = ['foo stderr']
mock_process = mock.Mock()
mock_process.returncode = 0
mock_popen.return_value = mock_process
mock_process.stdout.closed = False
mock_process.stderr.closed = False
mock_process.stdout.readline = make_mock_stream_readline(mock_process.stdout, mock_stdout)
mock_process.stderr.readline = make_mock_stream_readline(mock_process.stderr, mock_stderr)
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(_, output, _) = runner.run({'row_index': 4})
self.assertEqual(output['stdout'], '')
self.assertEqual(output['stderr'], mock_stderr[0])
self.assertEqual(output['result'], 'None')
self.assertEqual(output['exit_code'], 0)
# Output to stdout, no result (implicit None), return_code 1 and status failed
mock_stdout = ['pre result%(delimiter)sNone%(delimiter)spost result' % values]
mock_stderr = ['foo stderr']
mock_process = mock.Mock()
mock_process.returncode = 1
mock_popen.return_value = mock_process
mock_process.stdout.closed = False
mock_process.stderr.closed = False
mock_process.stdout.readline = make_mock_stream_readline(mock_process.stdout, mock_stdout)
mock_process.stderr.readline = make_mock_stream_readline(mock_process.stderr, mock_stderr)
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(status, output, _) = runner.run({'row_index': 4})
self.assertEqual(output['stdout'], 'pre resultpost result')
self.assertEqual(output['stderr'], mock_stderr[0])
self.assertEqual(output['result'], 'None')
self.assertEqual(output['exit_code'], 1)
self.assertEqual(status, 'failed')
# Output to stdout, no result (implicit None), return_code 1 and status succeeded
mock_stdout = ['pre result%(delimiter)sNone%(delimiter)spost result' % values]
mock_stderr = ['foo stderr']
mock_process = mock.Mock()
mock_process.returncode = 0
mock_popen.return_value = mock_process
mock_process.stdout.closed = False
mock_process.stderr.closed = False
mock_process.stdout.readline = make_mock_stream_readline(mock_process.stdout, mock_stdout)
mock_process.stderr.readline = make_mock_stream_readline(mock_process.stderr, mock_stderr)
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(status, output, _) = runner.run({'row_index': 4})
self.assertEqual(output['stdout'], 'pre resultpost result')
self.assertEqual(output['stderr'], mock_stderr[0])
self.assertEqual(output['result'], 'None')
self.assertEqual(output['exit_code'], 0)
self.assertEqual(status, 'succeeded')
@mock.patch('st2common.util.concurrency.subprocess_popen')
def test_common_st2_env_vars_are_available_to_the_action(self, mock_popen):
mock_process = mock.Mock()
mock_process.communicate.return_value = ('', '')
mock_popen.return_value = mock_process
runner = self._get_mock_runner_obj()
runner.auth_token = mock.Mock()
runner.auth_token.token = 'ponies'
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(_, _, _) = runner.run({'row_index': 4})
_, call_kwargs = mock_popen.call_args
actual_env = call_kwargs['env']
self.assertCommonSt2EnvVarsAvailableInEnv(env=actual_env)
@mock.patch('st2common.util.concurrency.subprocess_popen')
def test_pythonpath_env_var_contains_common_libs_config_enabled(self, mock_popen):
mock_process = mock.Mock()
mock_process.communicate.return_value = ('', '')
mock_popen.return_value = mock_process
runner = self._get_mock_runner_obj()
runner._enable_common_pack_libs = True
runner.auth_token = mock.Mock()
runner.auth_token.token = 'ponies'
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(_, _, _) = runner.run({'row_index': 4})
_, call_kwargs = mock_popen.call_args
actual_env = call_kwargs['env']
pack_common_lib_path = 'fixtures/packs/core/lib'
self.assertIn('PYTHONPATH', actual_env)
self.assertIn(pack_common_lib_path, actual_env['PYTHONPATH'])
@mock.patch('st2common.util.concurrency.subprocess_popen')
def test_pythonpath_env_var_not_contains_common_libs_config_disabled(self, mock_popen):
mock_process = mock.Mock()
mock_process.communicate.return_value = ('', '')
mock_popen.return_value = mock_process
runner = self._get_mock_runner_obj()
runner._enable_common_pack_libs = False
runner.auth_token = mock.Mock()
runner.auth_token.token = 'ponies'
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(_, _, _) = runner.run({'row_index': 4})
_, call_kwargs = mock_popen.call_args
actual_env = call_kwargs['env']
pack_common_lib_path = '/mnt/src/storm/st2/st2tests/st2tests/fixtures/packs/core/lib'
self.assertIn('PYTHONPATH', actual_env)
self.assertNotIn(pack_common_lib_path, actual_env['PYTHONPATH'])
def test_action_class_instantiation_action_service_argument(self):
class Action1(Action):
# Constructor not overriden so no issue here
pass
def run(self):
pass
class Action2(Action):
# Constructor overriden, but takes action_service argument
def __init__(self, config, action_service=None):
super(Action2, self).__init__(config=config,
action_service=action_service)
def run(self):
pass
class Action3(Action):
# Constructor overriden, but doesn't take to action service
def __init__(self, config):
super(Action3, self).__init__(config=config)
def run(self):
pass
config = {'a': 1, 'b': 2}
action_service = 'ActionService!'
action1 = get_action_class_instance(action_cls=Action1, config=config,
action_service=action_service)
self.assertEqual(action1.config, config)
self.assertEqual(action1.action_service, action_service)
action2 = get_action_class_instance(action_cls=Action2, config=config,
action_service=action_service)
self.assertEqual(action2.config, config)
self.assertEqual(action2.action_service, action_service)
action3 = get_action_class_instance(action_cls=Action3, config=config,
action_service=action_service)
self.assertEqual(action3.config, config)
self.assertEqual(action3.action_service, action_service)
def test_action_with_same_module_name_as_module_in_stdlib(self):
runner = self._get_mock_runner_obj()
runner.entry_point = TEST_ACTION_PATH
runner.pre_run()
(status, output, _) = runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertIsNotNone(output)
self.assertEqual(output['result'], 'test action')
def test_python_action_wrapper_script_doesnt_get_added_to_sys_path(self):
# Validate that the directory where python_action_wrapper.py script is located
# (st2common/runners) doesn't get added to sys.path
runner = self._get_mock_runner_obj()
runner.entry_point = PATHS_ACTION_PATH
runner.pre_run()
(status, output, _) = runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertIsNotNone(output)
lines = output['stdout'].split('\n')
process_sys_path = lines[0]
process_pythonpath = lines[1]
assert 'sys.path' in process_sys_path
assert 'PYTHONPATH' in process_pythonpath
wrapper_script_path = 'st2common/runners'
assertion_msg = 'Found python wrapper script path in subprocess path'
self.assertNotIn(wrapper_script_path, process_sys_path, assertion_msg)
self.assertNotIn(wrapper_script_path, process_pythonpath, assertion_msg)
def test_python_action_wrapper_action_script_file_doesnt_exist_friendly_error(self):
# File in a directory which is not a Python package
wrapper = PythonActionWrapper(pack='dummy_pack_5', file_path='/tmp/doesnt.exist',
user='joe')
expected_msg = 'File "/tmp/doesnt.exist" has no action class or the file doesn\'t exist.'
self.assertRaisesRegexp(Exception, expected_msg, wrapper._get_action_instance)
# File in a directory which is a Python package
wrapper = PythonActionWrapper(pack='dummy_pack_5', file_path=ACTION_1_PATH,
user='joe')
expected_msg = (r'Failed to load action class from file ".*?list_repos_doesnt_exist.py" '
r'\(action file most likely doesn\'t exist or contains invalid syntax\): '
r'\[Errno 2\] No such file or directory')
self.assertRaisesRegexp(Exception, expected_msg, wrapper._get_action_instance)
def test_python_action_wrapper_action_script_file_contains_invalid_syntax_friendly_error(self):
wrapper = PythonActionWrapper(pack='dummy_pack_5', file_path=ACTION_2_PATH,
user='joe')
expected_msg = (r'Failed to load action class from file ".*?invalid_syntax.py" '
r'\(action file most likely doesn\'t exist or contains invalid syntax\): '
r'No module named \'?invalid\'?')
self.assertRaisesRegexp(Exception, expected_msg, wrapper._get_action_instance)
def test_simple_action_log_messages_and_log_level_runner_param(self):
expected_msg_1 = 'st2.actions.python.PascalRowAction: DEBUG Creating new Client object.'
expected_msg_2 = 'Retrieving all the values from the datastore'
expected_msg_3 = 'st2.actions.python.PascalRowAction: INFO test info log message'
expected_msg_4 = 'st2.actions.python.PascalRowAction: DEBUG test debug log message'
expected_msg_5 = 'st2.actions.python.PascalRowAction: ERROR test error log message'
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(status, output, _) = runner.run({'row_index': 'e'})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertIsNotNone(output)
self.assertEqual(output['result'], [1, 2])
self.assertIn(expected_msg_1, output['stderr'])
self.assertIn(expected_msg_2, output['stderr'])
self.assertIn(expected_msg_3, output['stderr'])
self.assertIn(expected_msg_4, output['stderr'])
self.assertIn(expected_msg_5, output['stderr'])
stderr = output['stderr'].strip().split('\n')
expected_count = 5
# Remove lines we don't care about
lines = []
for line in stderr:
if 'configuration option is not configured' in line:
continue
if 'No handlers could be found for logger' in line:
continue
lines.append(line)
msg = ('Expected %s lines, got %s - "%s"' % (expected_count, len(lines), str(lines)))
# Dependencies can inject their own warnings, which increases the
# number of lines to more than we expect with simple equality checks
self.assertGreaterEqual(len(lines), expected_count, msg)
# Only log messages with level info and above should be displayed
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.runner_parameters = {
'log_level': 'info'
}
runner.pre_run()
(status, output, _) = runner.run({'row_index': 'e'})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertIsNotNone(output)
self.assertEqual(output['result'], [1, 2])
self.assertIn(expected_msg_3, output['stderr'])
self.assertNotIn(expected_msg_4, output['stderr'])
self.assertIn(expected_msg_5, output['stderr'])
# Only log messages with level error and above should be displayed
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.runner_parameters = {
'log_level': 'error'
}
runner.pre_run()
(status, output, _) = runner.run({'row_index': 'e'})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertIsNotNone(output)
self.assertEqual(output['result'], [1, 2])
self.assertNotIn(expected_msg_3, output['stderr'])
self.assertNotIn(expected_msg_4, output['stderr'])
self.assertIn(expected_msg_5, output['stderr'])
# Default log level is changed in st2.config
cfg.CONF.set_override(name='python_runner_log_level', override='INFO',
group='actionrunner')
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.runner_parameters = {}
runner.pre_run()
(status, output, _) = runner.run({'row_index': 'e'})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertIsNotNone(output)
self.assertEqual(output['result'], [1, 2])
self.assertIn(expected_msg_3, output['stderr'])
self.assertNotIn(expected_msg_4, output['stderr'])
self.assertIn(expected_msg_5, output['stderr'])
def test_traceback_messages_are_not_duplicated_in_stderr(self):
# Verify tracebacks are not duplicated
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.pre_run()
(status, output, _) = runner.run({'row_index': 'f'})
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertIsNotNone(output)
expected_msg_1 = 'Traceback (most recent'
expected_msg_2 = 'ValueError: Duplicate traceback test'
self.assertIn(expected_msg_1, output['stderr'])
self.assertIn(expected_msg_2, output['stderr'])
self.assertEqual(output['stderr'].count(expected_msg_1), 1)
self.assertEqual(output['stderr'].count(expected_msg_2), 1)
def test_execution_with_very_large_parameter(self):
runner = self._get_mock_runner_obj()
runner.entry_point = ECHOER_ACTION_PATH
runner.pre_run()
large_value = ''.join(['1' for _ in range(MAX_PARAM_LENGTH)])
(status, output, _) = runner.run({'action_input': large_value})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertIsNotNone(output)
self.assertEqual(output['result']['action_input'], large_value)
def test_execution_with_close_to_very_large_parameter(self):
runner = self._get_mock_runner_obj()
runner.entry_point = ECHOER_ACTION_PATH
runner.pre_run()
# 21 is the minimum overhead required to make the param fall back to
# param based payload. The linux max includes all parts of the param
# not just the value portion. So we need to subtract the remaining
# overhead from the initial padding.
large_value = ''.join(['1' for _ in range(MAX_PARAM_LENGTH - 21)])
(status, output, _) = runner.run({'action_input': large_value})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertIsNotNone(output)
self.assertEqual(output['result']['action_input'], large_value)
@mock.patch('python_runner.python_runner.get_sandbox_virtualenv_path')
def test_content_version_success(self, mock_get_sandbox_virtualenv_path):
mock_get_sandbox_virtualenv_path.return_value = None
# 1. valid version - 0.2.0
runner = self._get_mock_runner_obj(pack='test_content_version', sandbox=False)
runner.entry_point = PRINT_VERSION_ACTION
runner.runner_parameters = {'content_version': 'v0.2.0'}
runner.pre_run()
(status, output, _) = runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertEqual(output['result'], 'v0.2.0')
self.assertEqual(output['stdout'].strip(), 'v0.2.0')
# 2. valid version - 0.23.0
runner = self._get_mock_runner_obj(pack='test_content_version', sandbox=False)
runner.entry_point = PRINT_VERSION_ACTION
runner.runner_parameters = {'content_version': 'v0.3.0'}
runner.pre_run()
(status, output, _) = runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertEqual(output['result'], 'v0.3.0')
self.assertEqual(output['stdout'].strip(), 'v0.3.0')
# 3. invalid version = 0.30.0
runner = self._get_mock_runner_obj(pack='test_content_version', sandbox=False)
runner.entry_point = PRINT_VERSION_ACTION
runner.runner_parameters = {'content_version': 'v0.30.0'}
expected_msg = (r'Failed to create git worktree for pack "test_content_version": '
'Invalid content_version '
'"v0.30.0" provided. Make sure that git repository is up '
'to date and contains that revision.')
self.assertRaisesRegexp(ValueError, expected_msg, runner.pre_run)
@mock.patch('python_runner.python_runner.get_sandbox_virtualenv_path')
@mock.patch('st2common.util.concurrency.subprocess_popen')
def test_content_version_contains_common_libs_config_enabled(self, mock_popen,
mock_get_sandbox_virtualenv_path):
# Verify that the common libs path correctly reflects directory in git worktree
mock_get_sandbox_virtualenv_path.return_value = None
mock_process = mock.Mock()
mock_process.communicate.return_value = ('', '')
mock_popen.return_value = mock_process
runner = self._get_mock_runner_obj(pack='test_content_version', sandbox=False)
runner._enable_common_pack_libs = True
runner.auth_token = mock.Mock()
runner.auth_token.token = 'ponies'
runner.runner_parameters = {'content_version': 'v0.3.0'}
runner.entry_point = PRINT_VERSION_ACTION
runner.pre_run()
(_, _, _) = runner.run({'row_index': 4})
_, call_kwargs = mock_popen.call_args
actual_env = call_kwargs['env']
pack_common_lib_path = os.path.join(runner.git_worktree_path, 'lib')
self.assertIn('PYTHONPATH', actual_env)
self.assertIn(pack_common_lib_path, actual_env['PYTHONPATH'])
@mock.patch('python_runner.python_runner.get_sandbox_virtualenv_path')
def test_content_version_success_local_modules_work_fine(self,
mock_get_sandbox_virtualenv_path):
# Verify that local module import correctly use git worktree directory
mock_get_sandbox_virtualenv_path.return_value = None
runner = self._get_mock_runner_obj(pack='test_content_version', sandbox=False)
runner.entry_point = PRINT_VERSION_LOCAL_MODULE_ACTION
runner.runner_parameters = {'content_version': 'v0.2.0'}
runner.pre_run()
(status, output, _) = runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
self.assertEqual(output['result'], 'v0.2.0')
# Verify local_module has been correctly loaded from git work tree directory
expected_stdout = ("<module '?local_module'? from '?%s/actions/local_module.py'?>.*" %
runner.git_worktree_path)
self.assertRegexpMatches(output['stdout'].strip(), expected_stdout)
@mock.patch('st2common.runners.base.run_command')
def test_content_version_old_git_version(self, mock_run_command):
mock_stdout = ''
mock_stderr = '''
git: 'worktree' is not a git command. See 'git --help'.
'''
mock_stderr = six.text_type(mock_stderr)
mock_run_command.return_value = 1, mock_stdout, mock_stderr, False
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.runner_parameters = {'content_version': 'v0.10.0'}
expected_msg = (r'Failed to create git worktree for pack "core": Installed git version '
'doesn\'t support git worktree command. To be able to utilize this '
'functionality you need to use git >= 2.5.0.')
self.assertRaisesRegexp(ValueError, expected_msg, runner.pre_run)
@mock.patch('st2common.runners.base.run_command')
def test_content_version_pack_repo_not_git_repository(self, mock_run_command):
mock_stdout = ''
mock_stderr = '''
fatal: Not a git repository (or any parent up to mount point /home)
Stopping at filesystem boundary (GIT_DISCOVERY_ACROSS_FILESYSTEM not set).
'''
mock_stderr = six.text_type(mock_stderr)
mock_run_command.return_value = 1, mock_stdout, mock_stderr, False
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.runner_parameters = {'content_version': 'v0.10.0'}
expected_msg = (r'Failed to create git worktree for pack "core": Pack directory '
'".*" is not a '
'git repository. To utilize this functionality, pack directory needs to '
'be a git repository.')
self.assertRaisesRegexp(ValueError, expected_msg, runner.pre_run)
@mock.patch('st2common.runners.base.run_command')
def test_content_version_invalid_git_revision(self, mock_run_command):
mock_stdout = ''
mock_stderr = '''
fatal: invalid reference: vinvalid
'''
mock_stderr = six.text_type(mock_stderr)
mock_run_command.return_value = 1, mock_stdout, mock_stderr, False
runner = self._get_mock_runner_obj()
runner.entry_point = PASCAL_ROW_ACTION_PATH
runner.runner_parameters = {'content_version': 'vinvalid'}
expected_msg = (r'Failed to create git worktree for pack "core": Invalid content_version '
'"vinvalid" provided. Make sure that git repository is up '
'to date and contains that revision.')
self.assertRaisesRegexp(ValueError, expected_msg, runner.pre_run)
def test_missing_config_item_user_friendly_error(self):
runner = self._get_mock_runner_obj()
runner.entry_point = PRINT_CONFIG_ITEM_ACTION
runner.pre_run()
(status, output, _) = runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertIsNotNone(output)
self.assertIn('{}', output['stdout'])
self.assertIn('default_value', output['stdout'])
self.assertIn('Config for pack "core" is missing key "key"', output['stderr'])
self.assertIn('make sure you run "st2ctl reload --register-configs"', output['stderr'])
def _get_mock_runner_obj(self, pack=None, sandbox=None):
runner = python_runner.get_runner()
runner.execution = MOCK_EXECUTION
runner.action = self._get_mock_action_obj()
runner.runner_parameters = {}
if pack:
runner.action.pack = pack
if sandbox is not None:
runner._sandbox = sandbox
return runner
@mock.patch('st2actions.container.base.ActionExecution.get', mock.Mock())
def _get_mock_runner_obj_from_container(self, pack, user, sandbox=None):
container = RunnerContainer()
runnertype_db = mock.Mock()
runnertype_db.name = 'python-script'
runnertype_db.runner_package = 'python_runner'
runnertype_db.runner_module = 'python_runner'
action_db = mock.Mock()
action_db.pack = pack
action_db.entry_point = 'foo.py'
liveaction_db = mock.Mock()
liveaction_db.id = '123'
liveaction_db.context = {'user': user}
runner = container._get_runner(runner_type_db=runnertype_db, action_db=action_db,
liveaction_db=liveaction_db)
runner.execution = MOCK_EXECUTION
runner.action = action_db
runner.runner_parameters = {}
if sandbox is not None:
runner._sandbox = sandbox
return runner
def _get_mock_action_obj(self):
"""
Return mock action object.
Pack gets set to the system pack so the action doesn't require a separate virtualenv.
"""
action = mock.Mock()
action.ref = 'dummy.action'
action.pack = SYSTEM_PACK_NAME
action.entry_point = 'foo.py'
action.runner_type = {
'name': 'python-script'
}
return action
| 45.45858
| 100
| 0.660202
|
bc6fbe83e9eeef092cfc061b984a87c396996a52
| 3,484
|
py
|
Python
|
gimmemotifs/commands/cluster.py
|
simonvh/gimmemotifs
|
e40ab914a93210864c358b39ae677ac0792a80f2
|
[
"MIT"
] | 20
|
2015-08-22T23:14:12.000Z
|
2018-07-24T15:41:58.000Z
|
gimmemotifs/commands/cluster.py
|
simonvh/gimmemotifs
|
e40ab914a93210864c358b39ae677ac0792a80f2
|
[
"MIT"
] | 62
|
2015-05-01T10:02:47.000Z
|
2018-11-05T15:39:44.000Z
|
gimmemotifs/commands/cluster.py
|
simonvh/gimmemotifs
|
e40ab914a93210864c358b39ae677ac0792a80f2
|
[
"MIT"
] | 8
|
2016-01-06T15:54:41.000Z
|
2018-08-24T15:26:12.000Z
|
#!/usr/bin/env python
# Copyright (c) 2009-2019 Simon van Heeringen <s.vanheeringen@science.ru.nl>
#
# This module is free software. You can redistribute it and/or modify it under
# the terms of the MIT License, see the file COPYING included with this
# distribution.
from gimmemotifs.motif import read_motifs
from gimmemotifs.comparison import MotifComparer
from gimmemotifs.cluster import cluster_motifs
from gimmemotifs.config import MotifConfig
import sys
import os
import jinja2
def _write_report(outdir, ids, tree, clusters):
config = MotifConfig()
env = jinja2.Environment(
loader=jinja2.FileSystemLoader([config.get_template_dir()])
)
template = env.get_template("cluster_template.jinja.html")
result = template.render(motifs=ids)
with open(os.path.join(outdir, "gimme.clustered.html"), "w") as f:
f.write(result)
f = open(os.path.join(outdir, "cluster_key.txt"), "w")
for motif_id in ids:
f.write("%s\t%s\n" % (motif_id[0], ",".join([x["alt"] for x in motif_id[2]])))
f.close()
f = open(os.path.join(outdir, "clustered_motifs.pfm"), "w")
if len(clusters) == 1 and len(clusters[0][1]) == 1:
f.write("%s\n" % clusters[0][0].to_ppm())
else:
for motif in tree.get_clustered_motifs():
f.write("%s\n" % motif.to_ppm())
f.close()
def _create_images(outdir, clusters):
ids = []
mc = MotifComparer()
trim_ic = 0.2
sys.stderr.write("Creating images\n")
for cluster, members in clusters:
cluster.trim(trim_ic)
cluster.plot_logo(fname=os.path.join(outdir, "%s.png" % cluster.id))
ids.append([cluster.id, {"src": "%s.png" % cluster.id}, []])
if len(members) > 1:
scores = {}
for motif in members:
scores[motif] = mc.compare_motifs(
cluster, motif, "total", "wic", "mean", pval=True
)
add_pos = sorted(scores.values(), key=lambda x: x[1])[0][1]
for motif in members:
_, pos, strand = scores[motif]
add = pos - add_pos
if strand in [1, "+"]:
pass
else:
# print "RC %s" % motif.id
rc = motif.rc()
rc.id = motif.id
motif = rc
# print "%s\t%s" % (motif.id, add)
motif.plot_logo(
fname=os.path.join(outdir, "%s.png" % motif.id.replace(" ", "_")),
add_left=add,
)
ids[-1][2] = [
dict(
[
("src", "%s.png" % m.id.replace(" ", "_")),
("alt", m.id.replace(" ", "_")),
]
)
for m in members
]
return ids
def cluster(args):
outdir = os.path.abspath(args.outdir)
if not os.path.exists(outdir):
os.mkdir(outdir)
ncpus = args.ncpus
clusters = []
motifs = read_motifs(args.inputfile)
if len(motifs) == 1:
clusters = [[motifs[0], motifs]]
else:
tree = cluster_motifs(
args.inputfile,
"total",
"wic",
"mean",
True,
threshold=args.threshold,
include_bg=True,
ncpus=ncpus,
)
clusters = tree.getResult()
ids = _create_images(outdir, clusters)
_write_report(outdir, ids, tree, clusters)
| 30.295652
| 86
| 0.535878
|
1c443523f65f5f8c973e9eb58b9cf057505dc784
| 630
|
py
|
Python
|
tests/unit/db/test_users.py
|
jaimecruz21/lifeloopweb
|
ba0ffe1ea94ba3323a4e9c66c9506a338cae3212
|
[
"MIT"
] | null | null | null |
tests/unit/db/test_users.py
|
jaimecruz21/lifeloopweb
|
ba0ffe1ea94ba3323a4e9c66c9506a338cae3212
|
[
"MIT"
] | null | null | null |
tests/unit/db/test_users.py
|
jaimecruz21/lifeloopweb
|
ba0ffe1ea94ba3323a4e9c66c9506a338cae3212
|
[
"MIT"
] | null | null | null |
import pytest
from lifeloopweb.db.models import User
from lifeloopweb import exception
import tests
class TestUser(tests.TestBase):
def test_get_email_from_full_name_and_email(self):
full_name_and_email = "Jason Meridth (jason@meridth.io)"
result = User.get_email_from_full_name_and_email(
full_name_and_email)
assert result == 'jason@meridth.io'
def test_get_email_from_full_name_and_email_with_invalid_email(self):
full_name_and_email = "invalid"
with pytest.raises(exception.InvalidEmail):
User.get_email_from_full_name_and_email(full_name_and_email)
| 33.157895
| 73
| 0.757143
|
2edde60e5f0ae8d4aaaa16e19837731138d6f997
| 29,791
|
py
|
Python
|
tests/postgres.py
|
Walicen/peewee
|
e9c8bbf912903e167e052d07f6247801dd0346aa
|
[
"MIT"
] | 1
|
2019-05-06T08:20:41.000Z
|
2019-05-06T08:20:41.000Z
|
tests/postgres.py
|
Walicen/peewee
|
e9c8bbf912903e167e052d07f6247801dd0346aa
|
[
"MIT"
] | null | null | null |
tests/postgres.py
|
Walicen/peewee
|
e9c8bbf912903e167e052d07f6247801dd0346aa
|
[
"MIT"
] | null | null | null |
#coding:utf-8
import datetime
import uuid
from decimal import Decimal as Dc
from types import MethodType
from peewee import *
from playhouse.postgres_ext import *
from .base import BaseTestCase
from .base import ModelTestCase
from .base import TestModel
from .base import db_loader
from .base import requires_models
from .base import skip_unless
from .base_models import Register
db = db_loader('postgres', db_class=PostgresqlExtDatabase)
class HStoreModel(TestModel):
name = CharField()
data = HStoreField()
D = HStoreModel.data
class ArrayModel(TestModel):
tags = ArrayField(CharField)
ints = ArrayField(IntegerField, dimensions=2)
class UUIDList(TestModel):
key = CharField()
id_list = ArrayField(BinaryUUIDField, convert_values=True, index=False)
id_list_native = ArrayField(UUIDField, index=False)
class ArrayTSModel(TestModel):
key = CharField(max_length=100, primary_key=True)
timestamps = ArrayField(TimestampField, convert_values=True)
class DecimalArray(TestModel):
values = ArrayField(DecimalField, field_kwargs={'decimal_places': 1})
class FTSModel(TestModel):
title = CharField()
data = TextField()
fts_data = TSVectorField()
try:
class JsonModel(TestModel):
data = JSONField()
class JsonModelNull(TestModel):
data = JSONField(null=True)
except:
JsonModel = JsonModelNull = None
try:
class BJson(TestModel):
data = BinaryJSONField()
except:
BJson = None
class Normal(TestModel):
data = TextField()
class Event(TestModel):
name = CharField()
duration = IntervalField()
class TZModel(TestModel):
dt = DateTimeTZField()
class TestTZField(ModelTestCase):
database = db
requires = [TZModel]
def test_tz_field(self):
self.database.execute_sql('set time zone "us/central";')
dt = datetime.datetime.now()
tz = TZModel.create(dt=dt)
self.assertTrue(tz.dt.tzinfo is None)
tz = TZModel.get(TZModel.id == tz.id)
class TestHStoreField(ModelTestCase):
database = db_loader('postgres', db_class=PostgresqlExtDatabase,
register_hstore=True)
requires = [HStoreModel]
def setUp(self):
super(TestHStoreField, self).setUp()
self.t1 = HStoreModel.create(name='t1', data={'k1': 'v1', 'k2': 'v2'})
self.t2 = HStoreModel.create(name='t2', data={'k2': 'v2', 'k3': 'v3'})
def by_name(self, name):
return HStoreModel.get(HStoreModel.name == name).data
def test_hstore_storage(self):
self.assertEqual(self.by_name('t1'), {'k1': 'v1', 'k2': 'v2'})
self.assertEqual(self.by_name('t2'), {'k2': 'v2', 'k3': 'v3'})
self.t1.data = {'k4': 'v4'}
self.t1.save()
self.assertEqual(self.by_name('t1'), {'k4': 'v4'})
HStoreModel.create(name='t3', data={})
self.assertEqual(self.by_name('t3'), {})
def query(self, *cols):
return (HStoreModel
.select(HStoreModel.name, *cols)
.order_by(HStoreModel.id))
def test_hstore_selecting(self):
query = self.query(D.keys().alias('keys'))
self.assertEqual([(x.name, sorted(x.keys)) for x in query], [
('t1', ['k1', 'k2']), ('t2', ['k2', 'k3'])])
query = self.query(D.values().alias('vals'))
self.assertEqual([(x.name, sorted(x.vals)) for x in query], [
('t1', ['v1', 'v2']), ('t2', ['v2', 'v3'])])
query = self.query(D.items().alias('mtx'))
self.assertEqual([(x.name, sorted(x.mtx)) for x in query], [
('t1', [['k1', 'v1'], ['k2', 'v2']]),
('t2', [['k2', 'v2'], ['k3', 'v3']])])
query = self.query(D.slice('k2', 'k3').alias('kz'))
self.assertEqual([(x.name, x.kz) for x in query], [
('t1', {'k2': 'v2'}),
('t2', {'k2': 'v2', 'k3': 'v3'})])
query = self.query(D.slice('k4').alias('kz'))
self.assertEqual([(x.name, x.kz) for x in query], [
('t1', {}), ('t2', {})])
query = self.query(D.exists('k3').alias('ke'))
self.assertEqual([(x.name, x.ke) for x in query], [
('t1', False), ('t2', True)])
query = self.query(D.defined('k3').alias('ke'))
self.assertEqual([(x.name, x.ke) for x in query], [
('t1', False), ('t2', True)])
query = self.query(D['k1'].alias('k1'))
self.assertEqual([(x.name, x.k1) for x in query], [
('t1', 'v1'), ('t2', None)])
query = self.query().where(D['k1'] == 'v1')
self.assertEqual([x.name for x in query], ['t1'])
def assertWhere(self, expr, names):
query = HStoreModel.select().where(expr)
self.assertEqual([x.name for x in query], names)
def test_hstore_filtering(self):
self.assertWhere(D == {'k1': 'v1', 'k2': 'v2'}, ['t1'])
self.assertWhere(D == {'k2': 'v2'}, [])
self.assertWhere(D.contains('k3'), ['t2'])
self.assertWhere(D.contains(['k2', 'k3']), ['t2'])
self.assertWhere(D.contains(['k2']), ['t1', 't2'])
# test dict
self.assertWhere(D.contains({'k2': 'v2', 'k3': 'v3'}), ['t2'])
self.assertWhere(D.contains({'k2': 'v2'}), ['t1', 't2'])
self.assertWhere(D.contains({'k2': 'v3'}), [])
# test contains any.
self.assertWhere(D.contains_any('k3', 'kx'), ['t2'])
self.assertWhere(D.contains_any('k2', 'x', 'k3'), ['t1', 't2'])
self.assertWhere(D.contains_any('x', 'kx', 'y'), [])
def test_hstore_filter_functions(self):
self.assertWhere(D.exists('k2') == True, ['t1', 't2'])
self.assertWhere(D.exists('k3') == True, ['t2'])
self.assertWhere(D.defined('k2') == True, ['t1', 't2'])
self.assertWhere(D.defined('k3') == True, ['t2'])
def test_hstore_update(self):
rc = (HStoreModel
.update(data=D.update(k4='v4'))
.where(HStoreModel.name == 't1')
.execute())
self.assertTrue(rc > 0)
self.assertEqual(self.by_name('t1'),
{'k1': 'v1', 'k2': 'v2', 'k4': 'v4'})
rc = (HStoreModel
.update(data=D.update(k5='v5', k6='v6'))
.where(HStoreModel.name == 't2')
.execute())
self.assertTrue(rc > 0)
self.assertEqual(self.by_name('t2'),
{'k2': 'v2', 'k3': 'v3', 'k5': 'v5', 'k6': 'v6'})
HStoreModel.update(data=D.update(k2='vxxx')).execute()
self.assertEqual([x.data for x in self.query(D)], [
{'k1': 'v1', 'k2': 'vxxx', 'k4': 'v4'},
{'k2': 'vxxx', 'k3': 'v3', 'k5': 'v5', 'k6': 'v6'}])
(HStoreModel
.update(data=D.delete('k4'))
.where(HStoreModel.name == 't1')
.execute())
self.assertEqual(self.by_name('t1'), {'k1': 'v1', 'k2': 'vxxx'})
HStoreModel.update(data=D.delete('k5')).execute()
self.assertEqual([x.data for x in self.query(D)], [
{'k1': 'v1', 'k2': 'vxxx'},
{'k2': 'vxxx', 'k3': 'v3', 'k6': 'v6'}
])
HStoreModel.update(data=D.delete('k1', 'k2')).execute()
self.assertEqual([x.data for x in self.query(D)], [
{},
{'k3': 'v3', 'k6': 'v6'}])
class TestArrayField(ModelTestCase):
database = db
requires = [ArrayModel]
def create_sample(self):
return ArrayModel.create(
tags=['alpha', 'beta', 'gamma', 'delta'],
ints=[[1, 2], [3, 4], [5, 6]])
def test_array_get_set(self):
am = self.create_sample()
am_db = ArrayModel.get(ArrayModel.id == am.id)
self.assertEqual(am_db.tags, ['alpha', 'beta', 'gamma', 'delta'])
self.assertEqual(am_db.ints, [[1, 2], [3, 4], [5, 6]])
def test_array_equality(self):
am1 = ArrayModel.create(tags=['t1'], ints=[[1, 2]])
am2 = ArrayModel.create(tags=['t2'], ints=[[3, 4]])
obj = ArrayModel.get(ArrayModel.tags == ['t1'])
self.assertEqual(obj.id, am1.id)
self.assertEqual(obj.tags, ['t1'])
obj = ArrayModel.get(ArrayModel.ints == [[3, 4]])
self.assertEqual(obj.id, am2.id)
obj = ArrayModel.get(ArrayModel.tags != ['t1'])
self.assertEqual(obj.id, am2.id)
def test_array_db_value(self):
am = ArrayModel.create(tags=('foo', 'bar'), ints=[])
am_db = ArrayModel.get(ArrayModel.id == am.id)
self.assertEqual(am_db.tags, ['foo', 'bar'])
def test_array_search(self):
def assertAM(where, *instances):
query = (ArrayModel
.select()
.where(where)
.order_by(ArrayModel.id))
self.assertEqual([x.id for x in query], [x.id for x in instances])
am = self.create_sample()
am2 = ArrayModel.create(tags=['alpha', 'beta'], ints=[[1, 1]])
am3 = ArrayModel.create(tags=['delta'], ints=[[3, 4]])
am4 = ArrayModel.create(tags=['中文'], ints=[[3, 4]])
am5 = ArrayModel.create(tags=['中文', '汉语'], ints=[[3, 4]])
AM = ArrayModel
T = AM.tags
assertAM((Value('beta') == fn.ANY(T)), am, am2)
assertAM((Value('delta') == fn.Any(T)), am, am3)
assertAM(Value('omega') == fn.Any(T))
# Check the contains operator.
assertAM(SQL("tags::text[] @> ARRAY['beta']"), am, am2)
# Use the nicer API.
assertAM(T.contains('beta'), am, am2)
assertAM(T.contains('omega', 'delta'))
assertAM(T.contains('汉语'), am5)
assertAM(T.contains('alpha', 'delta'), am)
# Check for any.
assertAM(T.contains_any('beta'), am, am2)
assertAM(T.contains_any('中文'), am4, am5)
assertAM(T.contains_any('omega', 'delta'), am, am3)
assertAM(T.contains_any('alpha', 'delta'), am, am2, am3)
def test_array_index_slice(self):
self.create_sample()
AM = ArrayModel
I, T = AM.ints, AM.tags
row = AM.select(T[1].alias('arrtags')).dicts().get()
self.assertEqual(row['arrtags'], 'beta')
row = AM.select(T[2:4].alias('foo')).dicts().get()
self.assertEqual(row['foo'], ['gamma', 'delta'])
row = AM.select(I[1][1].alias('ints')).dicts().get()
self.assertEqual(row['ints'], 4)
row = AM.select(I[1:2][0].alias('ints')).dicts().get()
self.assertEqual(row['ints'], [[3], [5]])
@requires_models(DecimalArray)
def test_field_kwargs(self):
vl1, vl2 = [Dc('3.1'), Dc('1.3')], [Dc('3.14'), Dc('1')]
da1, da2 = [DecimalArray.create(values=vl) for vl in (vl1, vl2)]
da1_db = DecimalArray.get(DecimalArray.id == da1.id)
da2_db = DecimalArray.get(DecimalArray.id == da2.id)
self.assertEqual(da1_db.values, [Dc('3.1'), Dc('1.3')])
self.assertEqual(da2_db.values, [Dc('3.1'), Dc('1.0')])
class TestArrayFieldConvertValues(ModelTestCase):
database = db
requires = [ArrayTSModel]
def test_value_conversion(self):
def dt(day, hour=0, minute=0, second=0):
return datetime.datetime(2018, 1, day, hour, minute, second)
data = {
'k1': [dt(1), dt(2), dt(3)],
'k2': [],
'k3': [dt(4, 5, 6, 7), dt(10, 11, 12, 13)],
}
for key in sorted(data):
ArrayTSModel.create(key=key, timestamps=data[key])
for key in sorted(data):
am = ArrayTSModel.get(ArrayTSModel.key == key)
self.assertEqual(am.timestamps, data[key])
# Perform lookup using timestamp values.
ts = ArrayTSModel.get(ArrayTSModel.timestamps.contains(dt(3)))
self.assertEqual(ts.key, 'k1')
ts = ArrayTSModel.get(ArrayTSModel.timestamps.contains(dt(4, 5, 6, 7)))
self.assertEqual(ts.key, 'k3')
self.assertRaises(ArrayTSModel.DoesNotExist, ArrayTSModel.get,
ArrayTSModel.timestamps.contains(dt(4, 5, 6)))
class TestArrayUUIDField(ModelTestCase):
database = db
requires = [UUIDList]
def setUp(self):
super(TestArrayUUIDField, self).setUp()
import psycopg2.extras
psycopg2.extras.register_uuid()
def test_array_of_uuids(self):
u1, u2, u3, u4 = [uuid.uuid4() for _ in range(4)]
a = UUIDList.create(key='a', id_list=[u1, u2, u3],
id_list_native=[u1, u2, u3])
b = UUIDList.create(key='b', id_list=[u2, u3, u4],
id_list_native=[u2, u3, u4])
a_db = UUIDList.get(UUIDList.key == 'a')
b_db = UUIDList.get(UUIDList.key == 'b')
self.assertEqual(a.id_list, [u1, u2, u3])
self.assertEqual(b.id_list, [u2, u3, u4])
self.assertEqual(a.id_list_native, [u1, u2, u3])
self.assertEqual(b.id_list_native, [u2, u3, u4])
class TestTSVectorField(ModelTestCase):
database = db
requires = [FTSModel]
messages = [
'A faith is a necessity to a man. Woe to him who believes in nothing.',
'All who call on God in true faith, earnestly from the heart, will '
'certainly be heard, and will receive what they have asked and desired.',
'Be faithful in small things because it is in them that your strength lies.',
'Faith consists in believing when it is beyond the power of reason to believe.',
'Faith has to do with things that are not seen and hope with things that are not at hand.',
]
def setUp(self):
super(TestTSVectorField, self).setUp()
for idx, message in enumerate(self.messages):
FTSModel.create(title=str(idx), data=message,
fts_data=fn.to_tsvector(message))
def assertMessages(self, expr, expected):
query = FTSModel.select().where(expr).order_by(FTSModel.id)
titles = [row.title for row in query]
self.assertEqual(list(map(int, titles)), expected)
def test_sql(self):
query = FTSModel.select().where(Match(FTSModel.data, 'foo bar'))
self.assertSQL(query, (
'SELECT "t1"."id", "t1"."title", "t1"."data", "t1"."fts_data" '
'FROM "fts_model" AS "t1" '
'WHERE (to_tsvector("t1"."data") @@ to_tsquery(?))'), ['foo bar'])
def test_match_function(self):
D = FTSModel.data
self.assertMessages(Match(D, 'heart'), [1])
self.assertMessages(Match(D, 'god'), [1])
self.assertMessages(Match(D, 'faith'), [0, 1, 2, 3, 4])
self.assertMessages(Match(D, 'thing'), [2, 4])
self.assertMessages(Match(D, 'faith & things'), [2, 4])
self.assertMessages(Match(D, 'god | things'), [1, 2, 4])
self.assertMessages(Match(D, 'god & things'), [])
def test_tsvector_field(self):
M = FTSModel.fts_data.match
self.assertMessages(M('heart'), [1])
self.assertMessages(M('god'), [1])
self.assertMessages(M('faith'), [0, 1, 2, 3, 4])
self.assertMessages(M('thing'), [2, 4])
self.assertMessages(M('faith & things'), [2, 4])
self.assertMessages(M('god | things'), [1, 2, 4])
self.assertMessages(M('god & things'), [])
class BaseJsonFieldTestCase(object):
M = None # Subclasses must define this.
def test_json_field(self):
data = {'k1': ['a1', 'a2'], 'k2': {'k3': 'v3'}}
j = self.M.create(data=data)
j_db = self.M.get(j._pk_expr())
self.assertEqual(j_db.data, data)
def test_joining_on_json_key(self):
values = [
{'foo': 'bar', 'baze': {'nugget': 'alpha'}},
{'foo': 'bar', 'baze': {'nugget': 'beta'}},
{'herp': 'derp', 'baze': {'nugget': 'epsilon'}},
{'herp': 'derp', 'bar': {'nuggie': 'alpha'}},
]
for data in values:
self.M.create(data=data)
for value in ['alpha', 'beta', 'gamma', 'delta']:
Normal.create(data=value)
query = (self.M
.select()
.join(Normal, on=(
Normal.data == self.M.data['baze']['nugget']))
.order_by(self.M.id))
results = [jm.data for jm in query]
self.assertEqual(results, [
{'foo': 'bar', 'baze': {'nugget': 'alpha'}},
{'foo': 'bar', 'baze': {'nugget': 'beta'}},
])
def test_json_lookup_methods(self):
data = {
'gp1': {
'p1': {'c1': 'foo'},
'p2': {'c2': 'bar'}},
'gp2': {}}
j = self.M.create(data=data)
def assertLookup(lookup, expected):
query = (self.M
.select(lookup)
.where(j._pk_expr())
.dicts())
self.assertEqual(query.get(), expected)
expr = self.M.data['gp1']['p1']
assertLookup(expr.alias('p1'), {'p1': '{"c1": "foo"}'})
assertLookup(expr.as_json().alias('p2'), {'p2': {'c1': 'foo'}})
expr = self.M.data['gp1']['p1']['c1']
assertLookup(expr.alias('c1'), {'c1': 'foo'})
assertLookup(expr.as_json().alias('c2'), {'c2': 'foo'})
j.data = [
{'i1': ['foo', 'bar', 'baz']},
['nugget', 'mickey']]
j.save()
expr = self.M.data[0]['i1']
assertLookup(expr.alias('i1'), {'i1': '["foo", "bar", "baz"]'})
assertLookup(expr.as_json().alias('i2'), {'i2': ['foo', 'bar', 'baz']})
expr = self.M.data[1][1]
assertLookup(expr.alias('l1'), {'l1': 'mickey'})
assertLookup(expr.as_json().alias('l2'), {'l2': 'mickey'})
def test_json_cast(self):
self.M.create(data={'foo': {'bar': 3}})
self.M.create(data={'foo': {'bar': 5}})
query = (self.M
.select(Cast(self.M.data['foo']['bar'], 'float') * 1.5)
.order_by(self.M.id)
.tuples())
self.assertEqual(query[:], [(4.5,), (7.5,)])
def test_json_path(self):
data = {
'foo': {
'baz': {
'bar': ['i1', 'i2', 'i3'],
'baze': ['j1', 'j2'],
}}}
j = self.M.create(data=data)
def assertPath(path, expected):
query = (self.M
.select(path)
.where(j._pk_expr())
.dicts())
self.assertEqual(query.get(), expected)
expr = self.M.data.path('foo', 'baz', 'bar')
assertPath(expr.alias('p1'), {'p1': '["i1", "i2", "i3"]'})
assertPath(expr.as_json().alias('p2'), {'p2': ['i1', 'i2', 'i3']})
expr = self.M.data.path('foo', 'baz', 'baze', 1)
assertPath(expr.alias('p1'), {'p1': 'j2'})
assertPath(expr.as_json().alias('p2'), {'p2': 'j2'})
def test_json_field_sql(self):
j = (self.M
.select()
.where(self.M.data == {'foo': 'bar'}))
table = self.M._meta.table_name
self.assertSQL(j, (
'SELECT "t1"."id", "t1"."data" '
'FROM "%s" AS "t1" WHERE ("t1"."data" = ?)') % table)
j = (self.M
.select()
.where(self.M.data['foo'] == 'bar'))
self.assertSQL(j, (
'SELECT "t1"."id", "t1"."data" '
'FROM "%s" AS "t1" WHERE ("t1"."data"->>? = ?)') % table)
def assertItems(self, where, *items):
query = (self.M
.select()
.where(where)
.order_by(self.M.id))
self.assertEqual(
[item.id for item in query],
[item.id for item in items])
def test_lookup(self):
t1 = self.M.create(data={'k1': 'v1', 'k2': {'k3': 'v3'}})
t2 = self.M.create(data={'k1': 'x1', 'k2': {'k3': 'x3'}})
t3 = self.M.create(data={'k1': 'v1', 'j2': {'j3': 'v3'}})
self.assertItems((self.M.data['k2']['k3'] == 'v3'), t1)
self.assertItems((self.M.data['k1'] == 'v1'), t1, t3)
# Valid key, no matching value.
self.assertItems((self.M.data['k2'] == 'v1'))
# Non-existent key.
self.assertItems((self.M.data['not-here'] == 'v1'))
# Non-existent nested key.
self.assertItems((self.M.data['not-here']['xxx'] == 'v1'))
self.assertItems((self.M.data['k2']['xxx'] == 'v1'))
def pg93():
with db:
return db.connection().server_version >= 90300
JSON_SUPPORT = (JsonModel is not None) and pg93()
@skip_unless(JSON_SUPPORT, 'json support unavailable')
class TestJsonField(BaseJsonFieldTestCase, ModelTestCase):
M = JsonModel
database = db
requires = [JsonModel, Normal, JsonModelNull]
def test_json_null(self):
tjn = JsonModelNull.create(data=None)
tj = JsonModelNull.create(data={'k1': 'v1'})
results = JsonModelNull.select().order_by(JsonModelNull.id)
self.assertEqual(
[tj_db.data for tj_db in results],
[None, {'k1': 'v1'}])
query = JsonModelNull.select().where(
JsonModelNull.data.is_null(True))
self.assertEqual(query.get(), tjn)
@skip_unless(JSON_SUPPORT, 'json support unavailable')
class TestBinaryJsonField(BaseJsonFieldTestCase, ModelTestCase):
M = BJson
database = db
requires = [BJson, Normal]
def _create_test_data(self):
data = [
{'k1': 'v1', 'k2': 'v2', 'k3': {'k4': ['i1', 'i2'], 'k5': {}}},
['a1', 'a2', {'a3': 'a4'}],
{'a1': 'x1', 'a2': 'x2', 'k4': ['i1', 'i2']},
list(range(10)),
list(range(5, 15)),
['k4', 'k1']]
self._bjson_objects = []
for json_value in data:
self._bjson_objects.append(BJson.create(data=json_value))
def assertObjects(self, expr, *indexes):
query = (BJson
.select()
.where(expr)
.order_by(BJson.id))
self.assertEqual(
[bjson.data for bjson in query],
[self._bjson_objects[index].data for index in indexes])
def test_contained_by(self):
self._create_test_data()
item1 = ['a1', 'a2', {'a3': 'a4'}, 'a5']
self.assertObjects(BJson.data.contained_by(item1), 1)
item2 = {'a1': 'x1', 'a2': 'x2', 'k4': ['i0', 'i1', 'i2'], 'x': 'y'}
self.assertObjects(BJson.data.contained_by(item2), 2)
def test_equality(self):
data = {'k1': ['a1', 'a2'], 'k2': {'k3': 'v3'}}
j = BJson.create(data=data)
j_db = BJson.get(BJson.data == data)
self.assertEqual(j.id, j_db.id)
def test_subscript_contains(self):
self._create_test_data()
D = BJson.data
# 'k3' is mapped to another dictioary {'k4': [...]}. Therefore,
# 'k3' is said to contain 'k4', but *not* ['k4'] or ['k4', 'k5'].
self.assertObjects(D['k3'].contains('k4'), 0)
self.assertObjects(D['k3'].contains(['k4']))
self.assertObjects(D['k3'].contains(['k4', 'k5']))
# We can check for the keys this way, though.
self.assertObjects(D['k3'].contains_all('k4', 'k5'), 0)
self.assertObjects(D['k3'].contains_any('k4', 'kx'), 0)
# However, in test object index=2, 'k4' can be said to contain
# both 'i1' and ['i1'].
self.assertObjects(D['k4'].contains('i1'), 2)
self.assertObjects(D['k4'].contains(['i1']), 2)
# Interestingly, we can also specify the list of contained values
# out-of-order.
self.assertObjects(D['k4'].contains(['i2', 'i1']), 2)
# We can test whether an object contains another JSON object fragment.
self.assertObjects(D['k3'].contains({'k4': ['i1']}), 0)
self.assertObjects(D['k3'].contains({'k4': ['i1', 'i2']}), 0)
# Check multiple levels of nesting / containment.
self.assertObjects(D['k3']['k4'].contains('i2'), 0)
self.assertObjects(D['k3']['k4'].contains_all('i1', 'i2'), 0)
self.assertObjects(D['k3']['k4'].contains_all('i0', 'i2'))
self.assertObjects(D['k4'].contains_all('i1', 'i2'), 2)
# Check array indexes.
self.assertObjects(D[2].contains('a3'), 1)
self.assertObjects(D[0].contains('a1'), 1)
self.assertObjects(D[0].contains('k1'))
def test_contains(self):
self._create_test_data()
D = BJson.data
# Test for keys. 'k4' is both an object key and an array element.
self.assertObjects(D.contains('k4'), 2, 5)
self.assertObjects(D.contains('a1'), 1, 2)
self.assertObjects(D.contains('k3'), 0)
# We can test for multiple top-level keys/indexes.
self.assertObjects(D.contains_all('a1', 'a2'), 1, 2)
# If we test for both with .contains(), though, it is treated as
# an object match.
self.assertObjects(D.contains(['a1', 'a2']), 1)
# Check numbers.
self.assertObjects(D.contains([2, 5, 6, 7, 8]), 3)
self.assertObjects(D.contains([5, 6, 7, 8, 9]), 3, 4)
# We can check for partial objects.
self.assertObjects(D.contains({'a1': 'x1'}), 2)
self.assertObjects(D.contains({'k3': {'k4': []}}), 0)
self.assertObjects(D.contains([{'a3': 'a4'}]), 1)
# Check for simple keys.
self.assertObjects(D.contains('a1'), 1, 2)
self.assertObjects(D.contains('k3'), 0)
# Contains any.
self.assertObjects(D.contains_any('a1', 'k1'), 0, 1, 2, 5)
self.assertObjects(D.contains_any('k4', 'xx', 'yy', '2'), 2, 5)
self.assertObjects(D.contains_any('i1', 'i2', 'a3'))
# Contains all.
self.assertObjects(D.contains_all('k1', 'k2', 'k3'), 0)
self.assertObjects(D.contains_all('k1', 'k2', 'k3', 'k4'))
def test_integer_index_weirdness(self):
self._create_test_data()
def fails():
with self.database.atomic():
expr = BJson.data.contains_any(2, 8, 12)
results = list(BJson.select().where(
BJson.data.contains_any(2, 8, 12)))
# Complains of a missing cast/conversion for the data-type?
self.assertRaises(ProgrammingError, fails)
def test_selecting(self):
self._create_test_data()
query = (BJson
.select(BJson.data['k3']['k4'].as_json().alias('k3k4'))
.order_by(BJson.id))
k3k4_data = [obj.k3k4 for obj in query]
self.assertEqual(k3k4_data, [
['i1', 'i2'],
None,
None,
None,
None,
None])
query = (BJson
.select(
BJson.data[0].as_json(),
BJson.data[2].as_json())
.order_by(BJson.id)
.tuples())
self.assertEqual(list(query), [
(None, None),
('a1', {'a3': 'a4'}),
(None, None),
(0, 2),
(5, 7),
('k4', None)])
class TestIntervalField(ModelTestCase):
database = db
requires = [Event]
def test_interval_field(self):
e1 = Event.create(name='hour', duration=datetime.timedelta(hours=1))
e2 = Event.create(name='mix', duration=datetime.timedelta(
days=1,
hours=2,
minutes=3,
seconds=4))
events = [(e.name, e.duration)
for e in Event.select().order_by(Event.duration)]
self.assertEqual(events, [
('hour', datetime.timedelta(hours=1)),
('mix', datetime.timedelta(days=1, hours=2, minutes=3, seconds=4))
])
class TestIndexedField(BaseTestCase):
def test_indexed_field_ddl(self):
class FakeIndexedField(IndexedFieldMixin, CharField):
index_type = 'FAKE'
class IndexedModel(TestModel):
array_index = ArrayField(CharField)
array_noindex= ArrayField(IntegerField, index=False)
fake_index = FakeIndexedField()
fake_index_with_type = FakeIndexedField(index_type='MAGIC')
fake_noindex = FakeIndexedField(index=False)
class Meta:
database = db
create_sql, _ = IndexedModel._schema._create_table(False).query()
self.assertEqual(create_sql, (
'CREATE TABLE "indexed_model" ('
'"id" SERIAL NOT NULL PRIMARY KEY, '
'"array_index" VARCHAR(255)[] NOT NULL, '
'"array_noindex" INTEGER[] NOT NULL, '
'"fake_index" VARCHAR(255) NOT NULL, '
'"fake_index_with_type" VARCHAR(255) NOT NULL, '
'"fake_noindex" VARCHAR(255) NOT NULL)'))
indexes = [idx.query()[0]
for idx in IndexedModel._schema._create_indexes(False)]
self.assertEqual(indexes, [
('CREATE INDEX "indexed_model_array_index" ON "indexed_model" '
'USING GIN ("array_index")'),
('CREATE INDEX "indexed_model_fake_index" ON "indexed_model" '
'USING GiST ("fake_index")'),
('CREATE INDEX "indexed_model_fake_index_with_type" '
'ON "indexed_model" '
'USING MAGIC ("fake_index_with_type")')])
class TestServerSide(ModelTestCase):
database = db
requires = [Register]
def setUp(self):
super(TestServerSide, self).setUp()
with db.atomic():
for i in range(100):
Register.create(value=i)
def test_server_side_cursor(self):
query = Register.select().order_by(Register.value)
with self.assertQueryCount(1):
data = [row.value for row in ServerSide(query)]
self.assertEqual(data, list(range(100)))
ss_query = ServerSide(query.limit(10), array_size=3)
self.assertEqual([row.value for row in ss_query], list(range(10)))
ss_query = ServerSide(query.where(SQL('1 = 0')))
self.assertEqual(list(ss_query), [])
| 34.965962
| 99
| 0.543151
|
0050f4b8b7750ffca4ce7ad4c8709b2cae606b1c
| 5,457
|
py
|
Python
|
benchmarks/f3_wrong_hints_permutations/scaling_software_termination/9-2Nested_false-termination_3.py
|
EnricoMagnago/F3
|
c863215c318d7d5f258eb9be38c6962cf6863b52
|
[
"MIT"
] | 3
|
2021-04-23T23:29:26.000Z
|
2022-03-23T10:00:30.000Z
|
benchmarks/f3_wrong_hints_permutations/scaling_software_termination/9-2Nested_false-termination_3.py
|
EnricoMagnago/F3
|
c863215c318d7d5f258eb9be38c6962cf6863b52
|
[
"MIT"
] | null | null | null |
benchmarks/f3_wrong_hints_permutations/scaling_software_termination/9-2Nested_false-termination_3.py
|
EnricoMagnago/F3
|
c863215c318d7d5f258eb9be38c6962cf6863b52
|
[
"MIT"
] | 1
|
2021-11-17T22:02:56.000Z
|
2021-11-17T22:02:56.000Z
|
from typing import Tuple, FrozenSet
from pysmt.environment import Environment as PysmtEnv
from pysmt.fnode import FNode
import pysmt.typing as types
from utils import symb_to_next
from hint import Hint, Location
def transition_system(env: PysmtEnv) -> Tuple[FrozenSet[FNode], FNode, FNode,
FNode]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
symbols = frozenset([pc, x, y])
m_1 = mgr.Int(-1)
n_locs = 3
max_int = n_locs
ints = []
pcs = []
x_pcs = []
for idx in range(n_locs):
num = mgr.Int(idx)
ints.append(num)
pcs.append(mgr.Equals(pc, num))
x_pcs.append(mgr.Equals(x_pc, num))
for idx in range(n_locs, max_int):
num = mgr.Int(idx)
ints.append(num)
pcend = mgr.Equals(pc, m_1)
x_pcend = mgr.Equals(x_pc, m_1)
init = pcs[0]
cfg = []
# pc = 0 & (x >= 0) -> pc' = 1
cond = mgr.GE(x, ints[0])
cfg.append(mgr.Implies(mgr.And(pcs[0], cond), x_pcs[1]))
# pc = 0 & !(x >= 0) -> pc' = -1
cfg.append(mgr.Implies(mgr.And(pcs[0], mgr.Not(cond)), x_pcend))
# pc = 1 -> pc' = 2
cfg.append(mgr.Implies(pcs[1], x_pcs[2]))
# pc = 2 -> pc' = 0
cfg.append(mgr.Implies(pcs[2], x_pcs[0]))
# pc = -1 -> pc' = -1
cfg.append(mgr.Implies(pcend, x_pcend))
trans = []
same_x = mgr.Equals(x_x, x)
same_y = mgr.Equals(x_y, y)
same = mgr.And(same_x, same_y)
# pc = 0 -> same
trans.append(mgr.Implies(pcs[0], same))
# pc = 1 -> x' = x + y & same_y
trans.append(mgr.Implies(pcs[1],
mgr.And(mgr.Equals(x_x, mgr.Plus(x, y)),
same_y)))
# pc = 2 -> same_x & y' = y + 1
trans.append(mgr.Implies(pcs[2],
mgr.And(same_x,
mgr.Equals(x_y, mgr.Plus(y, ints[1])))))
# pc = end -> same
trans.append(mgr.Implies(pcend, same))
trans = mgr.And(*cfg, *trans)
fairness = mgr.Not(mgr.Equals(pc, m_1))
return symbols, init, trans, fairness
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
symbs = frozenset([pc, x, y])
m_100 = mgr.Int(-100)
m_1 = mgr.Int(-1)
i_0 = mgr.Int(0)
i_1 = mgr.Int(1)
i_2 = mgr.Int(2)
i_4 = mgr.Int(4)
i_20 = mgr.Int(20)
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
res = []
stutter = mgr.Equals(x_y, y)
loc = Location(env, mgr.TRUE(), mgr.LE(x, i_20), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_y, mgr.Plus(x, y)))
h_y = Hint("h_y1", env, frozenset([y]), symbs)
h_y.set_locs([loc])
res.append(h_y)
loc0 = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Plus(x, y)))
loc1 = Location(env, mgr.GE(x, i_2), mgr.GE(y, i_1))
loc1.set_progress(0, mgr.Equals(x_x, y))
h_x = Hint("h_x2", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
loc0 = Location(env, mgr.LE(x, i_20))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Plus(mgr.Times(x, x), i_1)))
loc1 = Location(env, mgr.GE(x, i_20))
loc1.set_progress(0, mgr.LT(x_x, mgr.Times(m_1, x, x)))
h_x = Hint("h_x6", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
stutter = mgr.Equals(x_y, y)
loc = Location(env, mgr.TRUE(), mgr.LE(x, i_20), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_y, mgr.Plus(i_1, y)))
h_y = Hint("h_y0", env, frozenset([y]), symbs)
h_y.set_locs([loc])
res.append(h_y)
stutter = mgr.Equals(x_x, x)
loc = Location(env, mgr.GE(x, i_20), mgr.GE(y, i_1), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, y)))
h_x = Hint("h_x0", env, frozenset([x]), symbs)
h_x.set_locs([loc])
res.append(h_x)
stutter = mgr.Equals(x_x, x)
loc = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, y)))
h_x = Hint("h_x1", env, frozenset([x]), symbs)
h_x.set_locs([loc])
res.append(h_x)
loc0 = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Times(x, y)))
loc1 = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1))
loc1.set_progress(2, mgr.GT(x_x, y))
loc2 = Location(env, mgr.GE(x, i_2))
loc2.set_progress(0, mgr.GE(x_x, i_20))
h_x = Hint("h_x4", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1, loc2])
res.append(h_x)
loc0 = Location(env, mgr.GE(y, m_100))
loc0.set_progress(0, mgr.Equals(x_y, mgr.Times(y, y)))
h_y = Hint("h_y5", env, frozenset([y]), symbs)
h_y.set_locs([loc0])
res.append(h_y)
loc0 = Location(env, mgr.Equals(pc, i_1))
loc0.set_progress(1, mgr.GT(x_pc, mgr.Plus(pc, i_1)))
loc1 = Location(env, mgr.GT(pc, i_2))
loc1.set_progress(0, mgr.Equals(x_pc, i_1))
h_pc = Hint("h_pc0", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1])
res.append(h_pc)
return frozenset(res)
| 29.819672
| 77
| 0.577973
|
01a9f6b6c792792f496eb5566de27b76dec66348
| 3,887
|
py
|
Python
|
chimera/calibrate.py
|
caltech-chimera/PyChimera
|
61beeacea703d745b2f385b4f2197ce9c842f650
|
[
"MIT"
] | 1
|
2019-03-20T20:58:49.000Z
|
2019-03-20T20:58:49.000Z
|
chimera/calibrate.py
|
caltech-chimera/PyChimera
|
61beeacea703d745b2f385b4f2197ce9c842f650
|
[
"MIT"
] | null | null | null |
chimera/calibrate.py
|
caltech-chimera/PyChimera
|
61beeacea703d745b2f385b4f2197ce9c842f650
|
[
"MIT"
] | 3
|
2020-01-29T04:59:36.000Z
|
2020-09-29T22:51:53.000Z
|
from __future__ import division
import numpy as np
def masterbias(image, skip = 0):
"""
Create an average master bias frame.
Parameters
----------
image : numpy array
3D bias array from CHIMERA
skip : int
Number of frames of skip from start. Default is 0 (no frame is skipped).
Returns
-------
avg_bias : numpy array
2D average bias image
"""
if image.ndim == 3:
avg_bias = np.mean(image[skip:,:,:], axis = 0, dtype = np.float32)
return avg_bias
else:
print "MASTERBIAS: Only 3D image arrays are supported"
return
def masterflat(flat_image, bias_image, threshold = 0.8):
"""
Generate normalized field.
Parameters
----------
flat_image : numpy array
3D flat field image
bias_image : numpy array
2D master bias image
threshold : float
Normalized threshold used to normalize the flat field
Returns
-------
norm_flat : numpy array
Normalized flat field image
"""
# Check if flat fields are 3D array and bias frames is 2D
if flat_image.ndim == 3 and bias_image.ndim == 2:
# Change Bias image type from int to float
flat_image = flat_image.astype(np.float)
# Subtract bias dc from flat fields
flat_image -= bias_image
# Normalize each frame by mean value
nframes, ny, nx = flat_image.shape
norm_flat_data = np.zeros([nframes, ny, nx])
for i in range(nframes):
norm_flat_data[i,:,:] = flat_image[i,:,:] / np.mean(flat_image[i,:,:])
# Median normalized flat field
median_flat = np.median(norm_flat_data, axis = 0)
mask = median_flat > threshold
median_flat /= np.mean(median_flat[mask])
return median_flat
else:
print "MASTERFLAT: Only 3D flat field and 2D master bias supported."
return
def debias(sci_image, bias_image):
"""
De-bias science frames.
Parameters
----------
sci_image : numpy array
Raw science image
bias_image : numpy array
Master bias image
Returns
-------
debias_sci_image : numpy array
Debias science image
"""
if bias_image.ndim == 2 and (sci_image.ndim == 2 or sci_image.ndim == 3):
debias_sci_image = sci_image - bias_image
return debias_sci_image
else:
print "DEBIAS: Only 2D master bias and 2D/3D science images are supported."
return
def imreduce(sci_image, bias_image, flat_image):
"""
De-bias and flat field the science frames as well as generate an average
science frame (if the tracking is poor, streaked star image with be
generated.)
Parameters
----------
sci_image : numpy array
2D or 3D science image array
bias_image : numpy array
2D master bias image array
flat_image : numpy array
2D normalized flat field image array
Returns
-------
sci_red_image : numpy array
2D or 3D reduced image array
avg_sci_image : numpy array
2D average science image array
Note: Average science image is only returned if the input science image
array is 3D otherwise only reduced science image is returned.
"""
# De-bias science frames
debias_sci_image = sci_image - bias_image
# Flat-field the science frames
sci_red_image = debias_sci_image/flat_image
sci_red_image = np.asarray(sci_red_image, dtype = np.float32)
# Average the frames
if sci_image.ndim == 3:
avg_sci_image = np.mean(sci_image, axis = 0, dtype = np.float32)
return sci_red_image, avg_sci_image
else:
return sci_red_image
| 26.993056
| 83
| 0.603036
|
7ebae7f0116f484f30d0c2dc2a5ad55dd7ee34bd
| 1,647
|
py
|
Python
|
model-optimizer/mo/utils/utils_test.py
|
apexxs/dldt
|
17e66dc5a6631d630da454506902bd7c25d4170b
|
[
"Apache-2.0"
] | 2
|
2021-04-19T06:08:35.000Z
|
2021-08-25T02:43:43.000Z
|
model-optimizer/mo/utils/utils_test.py
|
apexxs/dldt
|
17e66dc5a6631d630da454506902bd7c25d4170b
|
[
"Apache-2.0"
] | 6
|
2022-01-11T18:56:22.000Z
|
2022-02-21T13:20:20.000Z
|
model-optimizer/mo/utils/utils_test.py
|
apexxs/dldt
|
17e66dc5a6631d630da454506902bd7c25d4170b
|
[
"Apache-2.0"
] | 3
|
2021-02-05T17:11:17.000Z
|
2021-04-19T08:33:31.000Z
|
"""
Copyright (c) 2018 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import networkx as nx
import numpy as np
from mo.utils.utils import match_shapes
class TestMatchShapes(unittest.TestCase):
def run_match_shapes(self, pattern: list, shape: list):
return match_shapes(np.array(pattern, dtype=np.int64), np.array(shape, dtype=np.int64))
def test_positive(self):
self.assertTrue(self.run_match_shapes([], []))
self.assertTrue(self.run_match_shapes([1,2,3], [1,2,3]))
self.assertTrue(self.run_match_shapes([-1,2,3], [1,2,3]))
self.assertTrue(self.run_match_shapes([1,-1,3], [1,2,3]))
self.assertTrue(self.run_match_shapes([-1,-1,-1], [1,2,3]))
self.assertTrue(self.run_match_shapes([-1], [2]))
def test_negative(self):
self.assertFalse(self.run_match_shapes([-1], []))
self.assertFalse(self.run_match_shapes([-1], [1,2,3]))
self.assertFalse(self.run_match_shapes([-1,2,3], [1,3,3]))
self.assertFalse(self.run_match_shapes([1,-1,3], [2,2]))
self.assertFalse(self.run_match_shapes([-1, -1, -1], [2, 3, 4, 5]))
| 39.214286
| 95
| 0.693382
|
06e3d5b534a394b949e71192382de38477050c22
| 17
|
py
|
Python
|
login.py
|
zerotsou/test
|
409c3bafe36ff7aebb3c8eacdc36d26752ff24f1
|
[
"MIT"
] | null | null | null |
login.py
|
zerotsou/test
|
409c3bafe36ff7aebb3c8eacdc36d26752ff24f1
|
[
"MIT"
] | null | null | null |
login.py
|
zerotsou/test
|
409c3bafe36ff7aebb3c8eacdc36d26752ff24f1
|
[
"MIT"
] | null | null | null |
num = 1
num2 = 2
| 5.666667
| 8
| 0.529412
|
2e595684e454fd8c14f60599a2410a0f2b883780
| 5,896
|
py
|
Python
|
22/22b.py
|
atnguyen1/Adventofcode2020
|
c57ed78354d21eb60a699ac11e4708093922cc33
|
[
"MIT"
] | null | null | null |
22/22b.py
|
atnguyen1/Adventofcode2020
|
c57ed78354d21eb60a699ac11e4708093922cc33
|
[
"MIT"
] | null | null | null |
22/22b.py
|
atnguyen1/Adventofcode2020
|
c57ed78354d21eb60a699ac11e4708093922cc33
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import argparse
import re
from collections import defaultdict, deque
import sys
# Memoize deck states and who wins
GLOBAL_STATES = dict()
GLOBAL_GAME_COUNT = 1
class Combat():
def __init__(self, one, two, game_count):
self.player1 = deque(one)
self.player2 = deque(two)
self.round = 1
# self.game_count = game_count
self.current_game_id = game_count
self.round_record = list()
#! print('=== Game ' + str(game_count) + ' ===')
#! print('')
def trick(self):
global GLOBAL_GAME_COUNT
global GLOBAL_STATES
# Check infinite case
start_record_1 = ''.join([str(x) for x in self.player1])
start_record_2 = ''.join([str(y) for y in self.player2])
record = start_record_1 + '-' + start_record_2
if record in self.round_record:
# We've played this state before
#! print('-- Round ' + str(self.round) + ' (Game ' + str(self.current_game_id) + ') --')
#! print('Player 1\'s deck:', self.player1)
#! print('Player 2\'s deck:', self.player2)
#! print('Previous State Player 1 Wins')
return 0
else:
self.round_record.append(record)
#! print('-- Round ' + str(self.round) + ' (Game ' + str(self.current_game_id) + ') --')
#! print('Player 1\'s deck:', self.player1)
#! print('Player 2\'s deck:', self.player2)
card_one = self.player1.popleft()
card_two = self.player2.popleft()
#! print('Player 1 plays', card_one)
#! print('Player 2 plays', card_two)
if (len(self.player1) >= card_one) and (len(self.player2) >= card_two):
# Recurse
#! print('Playing a sub-game to determine the winner...')
#! print('')
new_1_deck = [x for z, x in enumerate(self.player1) if z < card_one]
new_2_deck = [y for w, y in enumerate(self.player2) if w < card_two]
GLOBAL_GAME_COUNT += 1
# memoize string of deck + winner
subgame1_str = [str(x) for x in new_1_deck]
subgame1_str = ''.join(subgame1_str)
subgame2_str = [str(y) for y in new_2_deck]
subgame2_str = ''.join(subgame2_str)
subgame_record = subgame1_str + '-' + subgame2_str
if subgame_record in GLOBAL_STATES:
ng_winner = GLOBAL_STATES[subgame_record]
else:
ng = Combat(new_1_deck, new_2_deck, GLOBAL_GAME_COUNT)
ng_winner = ng.complete()
GLOBAL_STATES[subgame_record] = ng_winner
#! print('...anyway, back to game ' + str(self.current_game_id) + '.')
if ng_winner == 1:
# Player one wins trick
#! print('Player 1 Wins round ' + str(self.round) + ' of game ' + str(self.current_game_id))
#! print('')
self.player1.append(card_one)
self.player1.append(card_two)
elif ng_winner == 2:
#! print('Player 2 Wins round ' + str(self.round) + ' of game ' + str(self.current_game_id))
#! print('')
self.player2.append(card_two)
self.player2.append(card_one)
else:
if card_one > card_two:
#! print('Player 1 Wins round ' + str(self.round) + ' of game ' + str(self.current_game_id))
#! print('')
self.player1.append(card_one)
self.player1.append(card_two)
elif card_two > card_one:
#! print('Player 2 Wins round ' + str(self.round) + ' of game ' + str(self.current_game_id))
#! print('')
self.player2.append(card_two)
self.player2.append(card_one)
else:
print('Error State not >', card_one, card_two)
sys.exit()
self.round += 1
return 1 # Use return states to stop infinte recursion
def complete(self):
while self.player1 != deque() and self.player2 != deque():
success = self.trick()
if success == 0:
return 1 # If we hit previous round state, player 1 wins
# Return Winner
if self.player1 == deque():
#! print('The winner of game ' + str(self.current_game_id) + ' is player ' + str(2) + '!')
#! print('')
return 2
else:
#! print('The winner of game ' + str(self.current_game_id) + ' is player ' + str(1) + '!')
#! print('')
return 1
def compute_score(self):
if len(self.player1) > 0:
winner = self.player1
else:
winner = self.player2
score_multiplier = [x for x in range(1, len(winner) + 1)]
score_multiplier = score_multiplier[::-1]
#print(score_multiplier)
#print(winner)
score = 0
for z, s in enumerate(winner):
score += s * score_multiplier[z]
print(score)
def print_state(self):
print(self.player1)
print(self.player2)
def main(args):
"""
"""
with open(args.input, 'r') as fh:
data = fh.read().split('\n\n')
player1 = data[0].split('\n')[1:]
player1 = [int(x) for x in player1]
player2 = data[1].split('\n')[1:]
player2 = [int(x) for x in player2]
# print('Starting Player1:', player1)
# print('Starting Player2:', player2)
game = Combat(player1, player2, 1)
winner = game.complete()
game.compute_score()
if __name__ == '__main__':
desc = 'Advent 22b'
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('--input', type=str, help='Puzzle Input')
args = parser.parse_args()
main(args)
| 34.682353
| 108
| 0.540706
|
72ad61b9c560d01b439b0fdd0242a76b7d44e507
| 5,388
|
py
|
Python
|
tests/test_training_topic.py
|
martenson/planemo
|
dd4814afef0b7019ff0b5cea175724f70cc51dcf
|
[
"CC-BY-3.0"
] | null | null | null |
tests/test_training_topic.py
|
martenson/planemo
|
dd4814afef0b7019ff0b5cea175724f70cc51dcf
|
[
"CC-BY-3.0"
] | null | null | null |
tests/test_training_topic.py
|
martenson/planemo
|
dd4814afef0b7019ff0b5cea175724f70cc51dcf
|
[
"CC-BY-3.0"
] | null | null | null |
"""Training:topic functions."""
import os
import shutil
from planemo.training.topic import Topic
from planemo.training.utils import load_yaml
from .test_utils import TEST_DATA_DIR
def test_topic_init():
"""Test :func:`planemo.training.topic.Topic.init`."""
# test requirement with default parameter
topic = Topic()
assert topic.name == "new_topic"
assert topic.type == "use"
assert topic.title == "The new topic"
assert topic.summary == "Summary"
assert topic.docker_image == ""
assert "maintainers" in topic.maintainers
assert topic.parent_dir == "topics"
assert topic.dir == "topics/new_topic"
assert topic.requirements[0].link == "/introduction/"
assert topic.references[0].link == "link"
# test requirement with non default
topic = Topic(name="topic2", target="admin", title="The 2nd topic", summary="", parent_dir="dir")
assert topic.name == "topic2"
assert topic.type == "admin"
assert topic.title == "The 2nd topic"
assert topic.summary == ""
assert topic.parent_dir == "dir"
assert topic.dir == "dir/topic2"
assert len(topic.requirements) == 0
assert len(topic.references) == 0
def test_topic_init_from_kwds():
"""Test :func:`planemo.training.topic.Topic.init_from_kwds`."""
topic = Topic()
topic.init_from_kwds({
'topic_name': "topic",
'topic_title': "New topic",
'topic_target': "admin",
'topic_summary': "Topic summary"
})
assert topic.name == "topic"
assert topic.type == "admin"
assert topic.title == "New topic"
assert topic.summary == "Topic summary"
assert topic.dir == "topics/topic"
assert len(topic.requirements) == 0
assert len(topic.references) == 0
def test_topic_init_from_metadata():
"""Test :func:`planemo.training.topic.Topic.init_from_metadata`."""
topic = Topic()
os.makedirs(topic.dir)
shutil.copy(os.path.join(TEST_DATA_DIR, 'training_metadata.yaml'), topic.metadata_fp)
topic.init_from_metadata()
assert topic.name == 'test'
assert topic.title == 'Test'
assert topic.summary == 'Summary'
assert topic.requirements[0].title == 'Galaxy introduction'
assert 'maintainer1' in topic.maintainers
shutil.rmtree(topic.parent_dir)
def test_topic_get_requirements():
"""Test :func:`planemo.training.topic.Topic.get_requirements`."""
topic = Topic()
reqs = topic.get_requirements()
assert len(reqs) == 1
assert 'title' in reqs[0]
def test_topic_get_references():
"""Test :func:`planemo.training.topic.Topic.get_references`."""
topic = Topic()
refs = topic.get_references()
assert len(refs) == 1
assert 'authors' in refs[0]
def test_topic_export_metadata_to_ordered_dict():
"""Test :func:`planemo.training.topic.Topic.export_metadata_to_ordered_dict`."""
topic = Topic()
metadata = topic.export_metadata_to_ordered_dict()
assert 'name' in metadata
assert metadata['name'] == "new_topic"
assert 'type' in metadata
assert 'title' in metadata
assert 'summary' in metadata
assert 'requirements' in metadata
assert 'docker_image' in metadata
assert 'maintainers' in metadata
assert 'references' in metadata
def test_topic_set_paths():
"""Test :func:`planemo.training.topic.Topic.set_paths`."""
new_name = 'the_new_name'
topic = Topic()
topic.name = new_name
topic.set_paths()
assert new_name in topic.dir
assert new_name in topic.img_folder
assert new_name in topic.tuto_folder
assert new_name in topic.index_fp
assert new_name in topic.readme_fp
assert new_name in topic.metadata_fp
assert new_name in topic.docker_folder
assert new_name in topic.dockerfile_fp
assert new_name in topic.slides_folder
def test_topic_exists():
"""Test :func:`planemo.training.topic.Topic.exists`."""
topic = Topic()
assert not topic.exists()
os.makedirs(topic.dir)
assert topic.exists()
shutil.rmtree(topic.parent_dir)
def test_topic_create_topic_structure():
"""Test :func:`planemo.training.topic.Topic.create_topic_structure`."""
topic = Topic()
topic.create_topic_structure()
topic_name = "new_topic"
topic_title = "The new topic"
# check the folder and its structure
assert topic.exists()
assert os.path.exists(topic.img_folder)
assert os.path.exists(topic.tuto_folder)
# create the index.md and the topic name
assert os.path.exists(topic.index_fp)
assert topic_name in open(topic.index_fp, 'r').read()
# create the README.md and the topic name
assert os.path.exists(topic.readme_fp)
assert topic_title in open(topic.readme_fp, 'r').read()
# check metadata content
assert os.path.exists(topic.metadata_fp)
metadata = load_yaml(topic.metadata_fp)
assert metadata['name'] == topic_name
# check dockerfile
assert os.path.exists(topic.dockerfile_fp)
assert topic_name in open(topic.dockerfile_fp, 'r').read()
assert topic_title in open(topic.dockerfile_fp, 'r').read()
# check introduction slide
assert os.path.exists(topic.intro_slide_fp)
assert topic_title in open(topic.intro_slide_fp, 'r').read()
# check in metadata directory
assert os.path.exists(os.path.join("metadata", "%s.yaml" % topic_name))
# clean
shutil.rmtree(topic.parent_dir)
shutil.rmtree("metadata")
| 34.318471
| 101
| 0.694878
|
2d97d842c153bb8dff9d96b8ffacb20e915f1ddf
| 435
|
py
|
Python
|
less2_task5.py
|
rezapci/Algorithms-with-Python
|
5f4faf2d463f33375856f5a5ab525467d303aa24
|
[
"MIT"
] | null | null | null |
less2_task5.py
|
rezapci/Algorithms-with-Python
|
5f4faf2d463f33375856f5a5ab525467d303aa24
|
[
"MIT"
] | null | null | null |
less2_task5.py
|
rezapci/Algorithms-with-Python
|
5f4faf2d463f33375856f5a5ab525467d303aa24
|
[
"MIT"
] | null | null | null |
# Link to flowcharts:
# https://drive.google.com/file/d/12xTQSyUeeSIWUDkwn3nWW-KHMmj31Rxy/view?usp=sharing
# Display the codes and characters of the ASCII table, starting with the character at number 32 and ending with the 127th inclusive.
# Execute the output in tabular form: ten pairs of "code-symbol" in each line.
for i in range ( 32 , 128 ):
print("{} - {}".format(i, chr(i)), end=' ')
if i % 10 == 0:
print()
| 39.545455
| 132
| 0.685057
|
eb92fcee003eb3966bbb74f3b56473341c576e06
| 3,616
|
py
|
Python
|
scripts/tree_collapse_species.py
|
CGATOxford/Optic
|
2df92e953b5139ff4e5c383cb4383e6367cd47f1
|
[
"MIT"
] | null | null | null |
scripts/tree_collapse_species.py
|
CGATOxford/Optic
|
2df92e953b5139ff4e5c383cb4383e6367cd47f1
|
[
"MIT"
] | null | null | null |
scripts/tree_collapse_species.py
|
CGATOxford/Optic
|
2df92e953b5139ff4e5c383cb4383e6367cd47f1
|
[
"MIT"
] | 1
|
2020-03-31T22:55:50.000Z
|
2020-03-31T22:55:50.000Z
|
"""
tree_collapse_species.py - collapse single species nodes
========================================================
:Author: Andreas Heger
:Release: $Id$
:Date: |today|
:Tags: Python
Purpose
-------
This script reads a collection of trees from stdin and
collapses nodes that have children of the same species.
Usage
-----
Example::
python <script_name>.py --help
Type::
python <script_name>.py --help
for command line help.
Command line options
--------------------
"""
import sys
import string
import re
import getopt
from Bio.Nexus import Nexus
import CGAT.Experiment as E
USAGE = """python %s [OPTIONS] < tree.in > tree.out
Version: $Id: tree_collapse_species.py 2782 2009-09-10 11:40:29Z andreas $
Options:
-h, --help print this message.
-v, --verbose= loglevel.
-p, --pattern-species= regex pattern to extract species from identifier
""" % sys.argv[0]
param_loglevel = 1
param_long_options = ["verbose=", "help",
"pattern-species=",
"version"]
param_short_options = "v:hp:"
param_pattern_species = "^([^@:]+)[@:]"
def PruneTree(tree, id):
if id not in tree.get_terminals():
raise "Not a terminal taxon: %i" % id
prev = tree.unlink(id)
tree.kill(id)
if not prev == tree.root and len(tree.node(prev).succ) == 1:
succ = tree.node(prev).get_succ()[0]
new_bl = tree.node(prev).data.branchlength + \
tree.node(succ).data.branchlength
tree.collapse(prev)
tree.node(succ).data.branchlength = new_bl
return prev
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if argv is None:
argv = sys.argv
try:
optlist, args = getopt.getopt(
sys.argv[1:], param_short_options, param_long_options)
except getopt.error, msg:
print USAGE, msg
sys.exit(2)
for o, a in optlist:
if o in ("-v", "--verbose"):
param_loglevel = int(a)
elif o in ("-h", "--help"):
print USAGE
sys.exit(0)
elif o in ("--version", ):
print "version="
sys.exit(0)
elif o in ("-p", "--pattern-species"):
param_pattern_species = a
print E.GetHeader()
print E.GetParams()
lines = ["#NEXUS\nBegin trees;\ntree tree = "] + \
sys.stdin.readlines() + ["End;"]
nexus = Nexus.Nexus(string.join(lines, ""))
if len(nexus.trees) != 1:
raise "no tree found in file."
tree = nexus.trees[0]
if param_loglevel >= 2:
tree.display()
rx = re.compile(param_pattern_species)
changed = True
while changed:
changed = False
leaves = tree.get_terminals()
for x in range(0, len(leaves) - 1):
nx = leaves[x]
t1 = tree.node(nx).get_data().taxon
s1 = rx.search(t1).groups()[0]
p1 = tree.node(nx).get_prev()
for y in range(x + 1, len(leaves)):
ny = leaves[y]
t2 = tree.node(ny).get_data().taxon
s2 = rx.search(t2).groups()[0]
p2 = tree.node(ny).get_prev()
if s1 == s2 and tree.is_monophyletic((nx, ny)) != -1:
print "collapsing nodes", t1, t2, nx, ny, p1, p2
PruneTree(tree, nx)
if param_loglevel >= 2:
tree.display()
changed = True
print E.GetFooter()
if __name__ == "__main__":
sys.exit(main(sys.argv))
| 23.94702
| 80
| 0.545077
|
7f8acb8c32c99c7ba80d8e3a3829f071d7d4f17b
| 8,329
|
py
|
Python
|
tools/efro/terminal.py
|
BombSquaders/ballistica
|
bd1b98dfb9bf1c8c0a05a5feed44a6c8d9513e17
|
[
"MIT"
] | null | null | null |
tools/efro/terminal.py
|
BombSquaders/ballistica
|
bd1b98dfb9bf1c8c0a05a5feed44a6c8d9513e17
|
[
"MIT"
] | null | null | null |
tools/efro/terminal.py
|
BombSquaders/ballistica
|
bd1b98dfb9bf1c8c0a05a5feed44a6c8d9513e17
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2011-2020 Eric Froemling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -----------------------------------------------------------------------------
"""Functionality related to terminal IO."""
from __future__ import annotations
import sys
import os
from enum import Enum, unique
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Any
@unique
class TerminalColor(Enum):
"""Color codes for printing to terminals.
Generally the Clr class should be used when incorporating color into
terminal output, as it handles non-color-supporting terminals/etc.
"""
# Styles
RESET = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
INVERSE = '\033[7m'
# Normal foreground colors
BLACK = '\033[30m'
RED = '\033[31m'
GREEN = '\033[32m'
YELLOW = '\033[33m'
BLUE = '\033[34m'
MAGENTA = '\033[35m'
CYAN = '\033[36m'
WHITE = '\033[37m'
# Normal background colors.
BG_BLACK = '\033[40m'
BG_RED = '\033[41m'
BG_GREEN = '\033[42m'
BG_YELLOW = '\033[43m'
BG_BLUE = '\033[44m'
BG_MAGENTA = '\033[45m'
BG_CYAN = '\033[46m'
BG_WHITE = '\033[47m'
# Strong foreground colors
STRONG_BLACK = '\033[90m'
STRONG_RED = '\033[91m'
STRONG_GREEN = '\033[92m'
STRONG_YELLOW = '\033[93m'
STRONG_BLUE = '\033[94m'
STRONG_MAGENTA = '\033[95m'
STRONG_CYAN = '\033[96m'
STRONG_WHITE = '\033[97m'
# Strong background colors.
STRONG_BG_BLACK = '\033[100m'
STRONG_BG_RED = '\033[101m'
STRONG_BG_GREEN = '\033[102m'
STRONG_BG_YELLOW = '\033[103m'
STRONG_BG_BLUE = '\033[104m'
STRONG_BG_MAGENTA = '\033[105m'
STRONG_BG_CYAN = '\033[106m'
STRONG_BG_WHITE = '\033[107m'
def _default_color_enabled() -> bool:
"""Return whether we should enable ANSI color codes by default."""
import platform
# If we're not attached to a terminal, go with no-color.
if not sys.__stdout__.isatty():
return False
# On windows, try to enable ANSI color mode.
if platform.system() == 'Windows':
return _windows_enable_color()
# We seem to be a terminal with color support; let's do it!
return True
# noinspection PyPep8Naming
def _windows_enable_color() -> bool:
"""Attempt to enable ANSI color on windows terminal; return success."""
# pylint: disable=invalid-name, import-error, undefined-variable
# Pulled from: https://bugs.python.org/issue30075
import msvcrt
import ctypes
from ctypes import wintypes
kernel32 = ctypes.WinDLL('kernel32', use_last_error=True) # type: ignore
ERROR_INVALID_PARAMETER = 0x0057
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004
def _check_bool(result: Any, _func: Any, args: Any) -> Any:
if not result:
raise ctypes.WinError(ctypes.get_last_error()) # type: ignore
return args
LPDWORD = ctypes.POINTER(wintypes.DWORD)
kernel32.GetConsoleMode.errcheck = _check_bool
kernel32.GetConsoleMode.argtypes = (wintypes.HANDLE, LPDWORD)
kernel32.SetConsoleMode.errcheck = _check_bool
kernel32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)
def set_conout_mode(new_mode: int, mask: int = 0xffffffff) -> int:
# don't assume StandardOutput is a console.
# open CONOUT$ instead
fdout = os.open('CONOUT$', os.O_RDWR)
try:
hout = msvcrt.get_osfhandle(fdout)
old_mode = wintypes.DWORD()
kernel32.GetConsoleMode(hout, ctypes.byref(old_mode))
mode = (new_mode & mask) | (old_mode.value & ~mask)
kernel32.SetConsoleMode(hout, mode)
return old_mode.value
finally:
os.close(fdout)
def enable_vt_mode() -> int:
mode = mask = ENABLE_VIRTUAL_TERMINAL_PROCESSING
try:
return set_conout_mode(mode, mask)
except WindowsError as exc:
if exc.winerror == ERROR_INVALID_PARAMETER:
raise NotImplementedError
raise
try:
enable_vt_mode()
return True
except NotImplementedError:
return False
class Clr:
"""Convenience class for color terminal output.
These will be set to ANSI color escape sequences if the current process
seems to be an interactive terminal (sys.__stdout__.isatty()), otherwise
they will be empty strings.
If the environment variable EFRO_TERMCOLORS is set to 0 or 1, that
value will be used instead.
"""
_envval = os.environ.get('EFRO_TERMCOLORS')
color_enabled = (True if _envval == '1' else
False if _envval == '0' else _default_color_enabled())
if color_enabled:
# Styles
RST = TerminalColor.RESET.value
BLD = TerminalColor.BOLD.value
UND = TerminalColor.UNDERLINE.value
INV = TerminalColor.INVERSE.value
# Normal foreground colors
BLK = TerminalColor.BLACK.value
RED = TerminalColor.RED.value
GRN = TerminalColor.GREEN.value
YLW = TerminalColor.YELLOW.value
BLU = TerminalColor.BLUE.value
MAG = TerminalColor.MAGENTA.value
CYN = TerminalColor.CYAN.value
WHT = TerminalColor.WHITE.value
# Normal background colors.
BBLK = TerminalColor.BG_BLACK.value
BRED = TerminalColor.BG_RED.value
BGRN = TerminalColor.BG_GREEN.value
BYLW = TerminalColor.BG_YELLOW.value
BBLU = TerminalColor.BG_BLUE.value
BMAG = TerminalColor.BG_MAGENTA.value
BCYN = TerminalColor.BG_CYAN.value
BWHT = TerminalColor.BG_WHITE.value
# Strong foreground colors
SBLK = TerminalColor.STRONG_BLACK.value
SRED = TerminalColor.STRONG_RED.value
SGRN = TerminalColor.STRONG_GREEN.value
SYLW = TerminalColor.STRONG_YELLOW.value
SBLU = TerminalColor.STRONG_BLUE.value
SMAG = TerminalColor.STRONG_MAGENTA.value
SCYN = TerminalColor.STRONG_CYAN.value
SWHT = TerminalColor.STRONG_WHITE.value
# Strong background colors.
SBBLK = TerminalColor.STRONG_BG_BLACK.value
SBRED = TerminalColor.STRONG_BG_RED.value
SBGRN = TerminalColor.STRONG_BG_GREEN.value
SBYLW = TerminalColor.STRONG_BG_YELLOW.value
SBBLU = TerminalColor.STRONG_BG_BLUE.value
SBMAG = TerminalColor.STRONG_BG_MAGENTA.value
SBCYN = TerminalColor.STRONG_BG_CYAN.value
SBWHT = TerminalColor.STRONG_BG_WHITE.value
else:
# Styles
RST = ''
BLD = ''
UND = ''
INV = ''
# Normal foreground colors
BLK = ''
RED = ''
GRN = ''
YLW = ''
BLU = ''
MAG = ''
CYN = ''
WHT = ''
# Normal background colors.
BBLK = ''
BRED = ''
BGRN = ''
BYLW = ''
BBLU = ''
BMAG = ''
BCYN = ''
BWHT = ''
# Strong foreground colors
SBLK = ''
SRED = ''
SGRN = ''
SYLW = ''
SBLU = ''
SMAG = ''
SCYN = ''
SWHT = ''
# Strong background colors.
SBBLK = ''
SBRED = ''
SBGRN = ''
SBYLW = ''
SBBLU = ''
SBMAG = ''
SBCYN = ''
SBWHT = ''
| 31.549242
| 79
| 0.632849
|
b14cd68fe652ab1fca376338561e4b0e51d2104c
| 86,639
|
py
|
Python
|
Bio/Seq.py
|
bneron/biopython
|
2c52e57661c8f6cdf4a191850b2f6871f8582af7
|
[
"PostgreSQL"
] | null | null | null |
Bio/Seq.py
|
bneron/biopython
|
2c52e57661c8f6cdf4a191850b2f6871f8582af7
|
[
"PostgreSQL"
] | null | null | null |
Bio/Seq.py
|
bneron/biopython
|
2c52e57661c8f6cdf4a191850b2f6871f8582af7
|
[
"PostgreSQL"
] | null | null | null |
# Copyright 2000-2002 Brad Chapman.
# Copyright 2004-2005 by M de Hoon.
# Copyright 2007-2014 by Peter Cock.
# All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Provides objects to represent biological sequences with alphabets.
See also the Seq_ wiki and the chapter in our tutorial:
- `HTML Tutorial`_
- `PDF Tutorial`_
.. _Seq: http://biopython.org/wiki/Seq
.. _`HTML Tutorial`: http://biopython.org/DIST/docs/tutorial/Tutorial.html
.. _`PDF Tutorial`: http://biopython.org/DIST/docs/tutorial/Tutorial.pdf
"""
from __future__ import print_function
__docformat__ = "restructuredtext en" # Don't just use plain text in epydoc API pages!
import string # for maketrans only
import array
import sys
import warnings
from Bio._py3k import range
from Bio._py3k import basestring
from Bio import BiopythonWarning
from Bio import Alphabet
from Bio.Alphabet import IUPAC
from Bio.Data.IUPACData import ambiguous_dna_complement, ambiguous_rna_complement
from Bio.Data import CodonTable
def _maketrans(complement_mapping):
"""Makes a python string translation table (PRIVATE).
Arguments:
- complement_mapping - a dictionary such as ambiguous_dna_complement
and ambiguous_rna_complement from Data.IUPACData.
Returns a translation table (a string of length 256) for use with the
python string's translate method to use in a (reverse) complement.
Compatible with lower case and upper case sequences.
For internal use only.
"""
before = ''.join(complement_mapping.keys())
after = ''.join(complement_mapping.values())
before += before.lower()
after += after.lower()
if sys.version_info[0] == 3:
return str.maketrans(before, after)
else:
return string.maketrans(before, after)
_dna_complement_table = _maketrans(ambiguous_dna_complement)
_rna_complement_table = _maketrans(ambiguous_rna_complement)
class Seq(object):
"""A read-only sequence object (essentially a string with an alphabet).
Like normal python strings, our basic sequence object is immutable.
This prevents you from doing my_seq[5] = "A" for example, but does allow
Seq objects to be used as dictionary keys.
The Seq object provides a number of string like methods (such as count,
find, split and strip), which are alphabet aware where appropriate.
In addition to the string like sequence, the Seq object has an alphabet
property. This is an instance of an Alphabet class from Bio.Alphabet,
for example generic DNA, or IUPAC DNA. This describes the type of molecule
(e.g. RNA, DNA, protein) and may also indicate the expected symbols
(letters).
The Seq object also provides some biological methods, such as complement,
reverse_complement, transcribe, back_transcribe and translate (which are
not applicable to sequences with a protein alphabet).
"""
def __init__(self, data, alphabet=Alphabet.generic_alphabet):
"""Create a Seq object.
Arguments:
- seq - Sequence, required (string)
- alphabet - Optional argument, an Alphabet object from Bio.Alphabet
You will typically use Bio.SeqIO to read in sequences from files as
SeqRecord objects, whose sequence will be exposed as a Seq object via
the seq property.
However, will often want to create your own Seq objects directly:
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import IUPAC
>>> my_seq = Seq("MKQHKAMIVALIVICITAVVAALVTRKDLCEVHIRTGQTEVAVF",
... IUPAC.protein)
>>> my_seq
Seq('MKQHKAMIVALIVICITAVVAALVTRKDLCEVHIRTGQTEVAVF', IUPACProtein())
>>> print(my_seq)
MKQHKAMIVALIVICITAVVAALVTRKDLCEVHIRTGQTEVAVF
>>> my_seq.alphabet
IUPACProtein()
"""
# Enforce string storage
if not isinstance(data, basestring):
raise TypeError("The sequence data given to a Seq object should "
"be a string (not another Seq object etc)")
self._data = data
self.alphabet = alphabet # Seq API requirement
def __repr__(self):
"""Returns a (truncated) representation of the sequence for debugging."""
if len(self) > 60:
# Shows the last three letters as it is often useful to see if there
# is a stop codon at the end of a sequence.
# Note total length is 54+3+3=60
return "{0}('{1}...{2}', {3!r})".format(self.__class__.__name__,
str(self)[:54],
str(self)[-3:],
self.alphabet)
else:
return '{0}({1!r}, {2!r})'.format(self.__class__.__name__,
self._data,
self.alphabet)
def __str__(self):
"""Returns the full sequence as a python string, use str(my_seq).
Note that Biopython 1.44 and earlier would give a truncated
version of repr(my_seq) for str(my_seq). If you are writing code
which need to be backwards compatible with old Biopython, you
should continue to use my_seq.tostring() rather than str(my_seq).
"""
return self._data
def __hash__(self):
"""Hash for comparison.
See the __cmp__ documentation - this has changed from past
versions of Biopython!
"""
# TODO - remove this warning in a future release
warnings.warn("Biopython Seq objects now use string comparison. "
"Older versions of Biopython used object comparison. "
"During this transition, please use hash(id(my_seq)) "
"or my_dict[id(my_seq)] if you want the old behaviour, "
"or use hash(str(my_seq)) or my_dict[str(my_seq)] for "
"the new string hashing behaviour.", BiopythonWarning)
return hash(str(self))
def __eq__(self, other):
"""Compare the sequence to another sequence or a string (README).
Historically comparing Seq objects has done Python object comparison.
After considerable discussion (keeping in mind constraints of the
Python language, hashes and dictionary support), Biopython now uses
simple string comparison (with a warning about the change).
Note that incompatible alphabets (e.g. DNA to RNA) will trigger a
warning.
During this transition period, please just do explicit comparisons:
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import generic_dna
>>> seq1 = Seq("ACGT")
>>> seq2 = Seq("ACGT")
>>> id(seq1) == id(seq2)
False
>>> str(seq1) == str(seq2)
True
The new behaviour is to use string-like equality:
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import generic_dna
>>> seq1 == seq2
True
>>> seq1 == "ACGT"
True
>>> seq1 == Seq("ACGT", generic_dna)
True
"""
if hasattr(other, "alphabet"):
# other could be a Seq or a MutableSeq
if not Alphabet._check_type_compatible([self.alphabet,
other.alphabet]):
warnings.warn("Incompatible alphabets {0!r} and {1!r}".format(
self.alphabet, other.alphabet),
BiopythonWarning)
return str(self) == str(other)
def __ne__(self, other):
"""Not equal, see __eq__ documentation."""
# Seem to require this method under Python 2 but not needed on Python 3?
return not (self == other)
def __lt__(self, other):
"""Less than, see __eq__ documentation."""
if hasattr(other, "alphabet"):
if not Alphabet._check_type_compatible([self.alphabet,
other.alphabet]):
warnings.warn("Incompatible alphabets {0!r} and {1!r}".format(
self.alphabet, other.alphabet),
BiopythonWarning)
return str(self) < str(other)
def __le__(self, other):
"""Less than or equal, see __eq__ documentation."""
if hasattr(other, "alphabet"):
if not Alphabet._check_type_compatible([self.alphabet,
other.alphabet]):
warnings.warn("Incompatible alphabets {0!r} and {1!r}".format(
self.alphabet, other.alphabet),
BiopythonWarning)
return str(self) <= str(other)
def __len__(self):
"""Returns the length of the sequence, use len(my_seq)."""
return len(self._data) # Seq API requirement
def __getitem__(self, index): # Seq API requirement
"""Returns a subsequence of single letter, use my_seq[index]."""
# Note since Python 2.0, __getslice__ is deprecated
# and __getitem__ is used instead.
# See http://docs.python.org/ref/sequence-methods.html
if isinstance(index, int):
# Return a single letter as a string
return self._data[index]
else:
# Return the (sub)sequence as another Seq object
return Seq(self._data[index], self.alphabet)
def __add__(self, other):
"""Add another sequence or string to this sequence.
If adding a string to a Seq, the alphabet is preserved:
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import generic_protein
>>> Seq("MELKI", generic_protein) + "LV"
Seq('MELKILV', ProteinAlphabet())
When adding two Seq (like) objects, the alphabets are important.
Consider this example:
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet.IUPAC import unambiguous_dna, ambiguous_dna
>>> unamb_dna_seq = Seq("ACGT", unambiguous_dna)
>>> ambig_dna_seq = Seq("ACRGT", ambiguous_dna)
>>> unamb_dna_seq
Seq('ACGT', IUPACUnambiguousDNA())
>>> ambig_dna_seq
Seq('ACRGT', IUPACAmbiguousDNA())
If we add the ambiguous and unambiguous IUPAC DNA alphabets, we get
the more general ambiguous IUPAC DNA alphabet:
>>> unamb_dna_seq + ambig_dna_seq
Seq('ACGTACRGT', IUPACAmbiguousDNA())
However, if the default generic alphabet is included, the result is
a generic alphabet:
>>> Seq("") + ambig_dna_seq
Seq('ACRGT', Alphabet())
You can't add RNA and DNA sequences:
>>> from Bio.Alphabet import generic_dna, generic_rna
>>> Seq("ACGT", generic_dna) + Seq("ACGU", generic_rna)
Traceback (most recent call last):
...
TypeError: Incompatible alphabets DNAAlphabet() and RNAAlphabet()
You can't add nucleotide and protein sequences:
>>> from Bio.Alphabet import generic_dna, generic_protein
>>> Seq("ACGT", generic_dna) + Seq("MELKI", generic_protein)
Traceback (most recent call last):
...
TypeError: Incompatible alphabets DNAAlphabet() and ProteinAlphabet()
"""
if hasattr(other, "alphabet"):
# other should be a Seq or a MutableSeq
if not Alphabet._check_type_compatible([self.alphabet,
other.alphabet]):
raise TypeError("Incompatible alphabets {0!r} and {1!r}".format(
self.alphabet, other.alphabet))
# They should be the same sequence type (or one of them is generic)
a = Alphabet._consensus_alphabet([self.alphabet, other.alphabet])
return self.__class__(str(self) + str(other), a)
elif isinstance(other, basestring):
# other is a plain string - use the current alphabet
return self.__class__(str(self) + other, self.alphabet)
from Bio.SeqRecord import SeqRecord # Lazy to avoid circular imports
if isinstance(other, SeqRecord):
# Get the SeqRecord's __radd__ to handle this
return NotImplemented
else:
raise TypeError
def __radd__(self, other):
"""Adding a sequence on the left.
If adding a string to a Seq, the alphabet is preserved:
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import generic_protein
>>> "LV" + Seq("MELKI", generic_protein)
Seq('LVMELKI', ProteinAlphabet())
Adding two Seq (like) objects is handled via the __add__ method.
"""
if hasattr(other, "alphabet"):
# other should be a Seq or a MutableSeq
if not Alphabet._check_type_compatible([self.alphabet,
other.alphabet]):
raise TypeError("Incompatible alphabets {0!r} and {1!r}".format(
self.alphabet, other.alphabet))
# They should be the same sequence type (or one of them is generic)
a = Alphabet._consensus_alphabet([self.alphabet, other.alphabet])
return self.__class__(str(other) + str(self), a)
elif isinstance(other, basestring):
# other is a plain string - use the current alphabet
return self.__class__(other + str(self), self.alphabet)
else:
raise TypeError
def tostring(self): # Seq API requirement
"""Returns the full sequence as a python string (DEPRECATED).
You are now encouraged to use str(my_seq) instead of
my_seq.tostring()."""
from Bio import BiopythonDeprecationWarning
warnings.warn("This method is obsolete; please use str(my_seq) "
"instead of my_seq.tostring().",
BiopythonDeprecationWarning)
return str(self)
def tomutable(self): # Needed? Or use a function?
"""Returns the full sequence as a MutableSeq object.
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import IUPAC
>>> my_seq = Seq("MKQHKAMIVALIVICITAVVAAL",
... IUPAC.protein)
>>> my_seq
Seq('MKQHKAMIVALIVICITAVVAAL', IUPACProtein())
>>> my_seq.tomutable()
MutableSeq('MKQHKAMIVALIVICITAVVAAL', IUPACProtein())
Note that the alphabet is preserved.
"""
return MutableSeq(str(self), self.alphabet)
def _get_seq_str_and_check_alphabet(self, other_sequence):
"""string/Seq/MutableSeq to string, checking alphabet (PRIVATE).
For a string argument, returns the string.
For a Seq or MutableSeq, it checks the alphabet is compatible
(raising an exception if it isn't), and then returns a string.
"""
try:
other_alpha = other_sequence.alphabet
except AttributeError:
# Assume other_sequence is a string
return other_sequence
# Other should be a Seq or a MutableSeq
if not Alphabet._check_type_compatible([self.alphabet, other_alpha]):
raise TypeError("Incompatible alphabets {0!r} and {1!r}".format(
self.alphabet, other_alpha))
# Return as a string
return str(other_sequence)
def count(self, sub, start=0, end=sys.maxsize):
"""Non-overlapping count method, like that of a python string.
This behaves like the python string method of the same name,
which does a non-overlapping count!
Returns an integer, the number of occurrences of substring
argument sub in the (sub)sequence given by [start:end].
Optional arguments start and end are interpreted as in slice
notation.
Arguments:
- sub - a string or another Seq object to look for
- start - optional integer, slice start
- end - optional integer, slice end
e.g.
>>> from Bio.Seq import Seq
>>> my_seq = Seq("AAAATGA")
>>> print(my_seq.count("A"))
5
>>> print(my_seq.count("ATG"))
1
>>> print(my_seq.count(Seq("AT")))
1
>>> print(my_seq.count("AT", 2, -1))
1
HOWEVER, please note because python strings and Seq objects (and
MutableSeq objects) do a non-overlapping search, this may not give
the answer you expect:
>>> "AAAA".count("AA")
2
>>> print(Seq("AAAA").count("AA"))
2
An overlapping search would give the answer as three!
"""
# If it has one, check the alphabet:
sub_str = self._get_seq_str_and_check_alphabet(sub)
return str(self).count(sub_str, start, end)
def __contains__(self, char):
"""Implements the 'in' keyword, like a python string.
e.g.
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import generic_dna, generic_rna, generic_protein
>>> my_dna = Seq("ATATGAAATTTGAAAA", generic_dna)
>>> "AAA" in my_dna
True
>>> Seq("AAA") in my_dna
True
>>> Seq("AAA", generic_dna) in my_dna
True
Like other Seq methods, this will raise a type error if another Seq
(or Seq like) object with an incompatible alphabet is used:
>>> Seq("AAA", generic_rna) in my_dna
Traceback (most recent call last):
...
TypeError: Incompatible alphabets DNAAlphabet() and RNAAlphabet()
>>> Seq("AAA", generic_protein) in my_dna
Traceback (most recent call last):
...
TypeError: Incompatible alphabets DNAAlphabet() and ProteinAlphabet()
"""
# If it has one, check the alphabet:
sub_str = self._get_seq_str_and_check_alphabet(char)
return sub_str in str(self)
def find(self, sub, start=0, end=sys.maxsize):
"""Find method, like that of a python string.
This behaves like the python string method of the same name.
Returns an integer, the index of the first occurrence of substring
argument sub in the (sub)sequence given by [start:end].
Arguments:
- sub - a string or another Seq object to look for
- start - optional integer, slice start
- end - optional integer, slice end
Returns -1 if the subsequence is NOT found.
e.g. Locating the first typical start codon, AUG, in an RNA sequence:
>>> from Bio.Seq import Seq
>>> my_rna = Seq("GUCAUGGCCAUUGUAAUGGGCCGCUGAAAGGGUGCCCGAUAGUUG")
>>> my_rna.find("AUG")
3
"""
# If it has one, check the alphabet:
sub_str = self._get_seq_str_and_check_alphabet(sub)
return str(self).find(sub_str, start, end)
def rfind(self, sub, start=0, end=sys.maxsize):
"""Find from right method, like that of a python string.
This behaves like the python string method of the same name.
Returns an integer, the index of the last (right most) occurrence of
substring argument sub in the (sub)sequence given by [start:end].
Arguments:
- sub - a string or another Seq object to look for
- start - optional integer, slice start
- end - optional integer, slice end
Returns -1 if the subsequence is NOT found.
e.g. Locating the last typical start codon, AUG, in an RNA sequence:
>>> from Bio.Seq import Seq
>>> my_rna = Seq("GUCAUGGCCAUUGUAAUGGGCCGCUGAAAGGGUGCCCGAUAGUUG")
>>> my_rna.rfind("AUG")
15
"""
# If it has one, check the alphabet:
sub_str = self._get_seq_str_and_check_alphabet(sub)
return str(self).rfind(sub_str, start, end)
def startswith(self, prefix, start=0, end=sys.maxsize):
"""Does the Seq start with the given prefix? Returns True/False.
This behaves like the python string method of the same name.
Return True if the sequence starts with the specified prefix
(a string or another Seq object), False otherwise.
With optional start, test sequence beginning at that position.
With optional end, stop comparing sequence at that position.
prefix can also be a tuple of strings to try. e.g.
>>> from Bio.Seq import Seq
>>> my_rna = Seq("GUCAUGGCCAUUGUAAUGGGCCGCUGAAAGGGUGCCCGAUAGUUG")
>>> my_rna.startswith("GUC")
True
>>> my_rna.startswith("AUG")
False
>>> my_rna.startswith("AUG", 3)
True
>>> my_rna.startswith(("UCC", "UCA", "UCG"), 1)
True
"""
# If it has one, check the alphabet:
if isinstance(prefix, tuple):
prefix_strs = tuple(self._get_seq_str_and_check_alphabet(p)
for p in prefix)
return str(self).startswith(prefix_strs, start, end)
else:
prefix_str = self._get_seq_str_and_check_alphabet(prefix)
return str(self).startswith(prefix_str, start, end)
def endswith(self, suffix, start=0, end=sys.maxsize):
"""Does the Seq end with the given suffix? Returns True/False.
This behaves like the python string method of the same name.
Return True if the sequence ends with the specified suffix
(a string or another Seq object), False otherwise.
With optional start, test sequence beginning at that position.
With optional end, stop comparing sequence at that position.
suffix can also be a tuple of strings to try. e.g.
>>> from Bio.Seq import Seq
>>> my_rna = Seq("GUCAUGGCCAUUGUAAUGGGCCGCUGAAAGGGUGCCCGAUAGUUG")
>>> my_rna.endswith("UUG")
True
>>> my_rna.endswith("AUG")
False
>>> my_rna.endswith("AUG", 0, 18)
True
>>> my_rna.endswith(("UCC", "UCA", "UUG"))
True
"""
# If it has one, check the alphabet:
if isinstance(suffix, tuple):
suffix_strs = tuple(self._get_seq_str_and_check_alphabet(p)
for p in suffix)
return str(self).endswith(suffix_strs, start, end)
else:
suffix_str = self._get_seq_str_and_check_alphabet(suffix)
return str(self).endswith(suffix_str, start, end)
def split(self, sep=None, maxsplit=-1):
"""Split method, like that of a python string.
This behaves like the python string method of the same name.
Return a list of the 'words' in the string (as Seq objects),
using sep as the delimiter string. If maxsplit is given, at
most maxsplit splits are done. If maxsplit is omitted, all
splits are made.
Following the python string method, sep will by default be any
white space (tabs, spaces, newlines) but this is unlikely to
apply to biological sequences.
e.g.
>>> from Bio.Seq import Seq
>>> my_rna = Seq("GUCAUGGCCAUUGUAAUGGGCCGCUGAAAGGGUGCCCGAUAGUUG")
>>> my_aa = my_rna.translate()
>>> my_aa
Seq('VMAIVMGR*KGAR*L', HasStopCodon(ExtendedIUPACProtein(), '*'))
>>> my_aa.split("*")
[Seq('VMAIVMGR', HasStopCodon(ExtendedIUPACProtein(), '*')), Seq('KGAR', HasStopCodon(ExtendedIUPACProtein(), '*')), Seq('L', HasStopCodon(ExtendedIUPACProtein(), '*'))]
>>> my_aa.split("*", 1)
[Seq('VMAIVMGR', HasStopCodon(ExtendedIUPACProtein(), '*')), Seq('KGAR*L', HasStopCodon(ExtendedIUPACProtein(), '*'))]
See also the rsplit method:
>>> my_aa.rsplit("*", 1)
[Seq('VMAIVMGR*KGAR', HasStopCodon(ExtendedIUPACProtein(), '*')), Seq('L', HasStopCodon(ExtendedIUPACProtein(), '*'))]
"""
# If it has one, check the alphabet:
sep_str = self._get_seq_str_and_check_alphabet(sep)
# TODO - If the sep is the defined stop symbol, or gap char,
# should we adjust the alphabet?
return [Seq(part, self.alphabet)
for part in str(self).split(sep_str, maxsplit)]
def rsplit(self, sep=None, maxsplit=-1):
"""Right split method, like that of a python string.
This behaves like the python string method of the same name.
Return a list of the 'words' in the string (as Seq objects),
using sep as the delimiter string. If maxsplit is given, at
most maxsplit splits are done COUNTING FROM THE RIGHT.
If maxsplit is omitted, all splits are made.
Following the python string method, sep will by default be any
white space (tabs, spaces, newlines) but this is unlikely to
apply to biological sequences.
e.g. print(my_seq.rsplit("*",1))
See also the split method.
"""
# If it has one, check the alphabet:
sep_str = self._get_seq_str_and_check_alphabet(sep)
return [Seq(part, self.alphabet)
for part in str(self).rsplit(sep_str, maxsplit)]
def strip(self, chars=None):
"""Returns a new Seq object with leading and trailing ends stripped.
This behaves like the python string method of the same name.
Optional argument chars defines which characters to remove. If
omitted or None (default) then as for the python string method,
this defaults to removing any white space.
e.g. print(my_seq.strip("-"))
See also the lstrip and rstrip methods.
"""
# If it has one, check the alphabet:
strip_str = self._get_seq_str_and_check_alphabet(chars)
return Seq(str(self).strip(strip_str), self.alphabet)
def lstrip(self, chars=None):
"""Returns a new Seq object with leading (left) end stripped.
This behaves like the python string method of the same name.
Optional argument chars defines which characters to remove. If
omitted or None (default) then as for the python string method,
this defaults to removing any white space.
e.g. print(my_seq.lstrip("-"))
See also the strip and rstrip methods.
"""
# If it has one, check the alphabet:
strip_str = self._get_seq_str_and_check_alphabet(chars)
return Seq(str(self).lstrip(strip_str), self.alphabet)
def rstrip(self, chars=None):
"""Returns a new Seq object with trailing (right) end stripped.
This behaves like the python string method of the same name.
Optional argument chars defines which characters to remove. If
omitted or None (default) then as for the python string method,
this defaults to removing any white space.
e.g. Removing a nucleotide sequence's polyadenylation (poly-A tail):
>>> from Bio.Alphabet import IUPAC
>>> from Bio.Seq import Seq
>>> my_seq = Seq("CGGTACGCTTATGTCACGTAGAAAAAA", IUPAC.unambiguous_dna)
>>> my_seq
Seq('CGGTACGCTTATGTCACGTAGAAAAAA', IUPACUnambiguousDNA())
>>> my_seq.rstrip("A")
Seq('CGGTACGCTTATGTCACGTAG', IUPACUnambiguousDNA())
See also the strip and lstrip methods.
"""
# If it has one, check the alphabet:
strip_str = self._get_seq_str_and_check_alphabet(chars)
return Seq(str(self).rstrip(strip_str), self.alphabet)
def upper(self):
"""Returns an upper case copy of the sequence.
>>> from Bio.Alphabet import HasStopCodon, generic_protein
>>> from Bio.Seq import Seq
>>> my_seq = Seq("VHLTPeeK*", HasStopCodon(generic_protein))
>>> my_seq
Seq('VHLTPeeK*', HasStopCodon(ProteinAlphabet(), '*'))
>>> my_seq.lower()
Seq('vhltpeek*', HasStopCodon(ProteinAlphabet(), '*'))
>>> my_seq.upper()
Seq('VHLTPEEK*', HasStopCodon(ProteinAlphabet(), '*'))
This will adjust the alphabet if required. See also the lower method.
"""
return Seq(str(self).upper(), self.alphabet._upper())
def lower(self):
"""Returns a lower case copy of the sequence.
This will adjust the alphabet if required. Note that the IUPAC alphabets
are upper case only, and thus a generic alphabet must be substituted.
>>> from Bio.Alphabet import Gapped, generic_dna
>>> from Bio.Alphabet import IUPAC
>>> from Bio.Seq import Seq
>>> my_seq = Seq("CGGTACGCTTATGTCACGTAG*AAAAAA", Gapped(IUPAC.unambiguous_dna, "*"))
>>> my_seq
Seq('CGGTACGCTTATGTCACGTAG*AAAAAA', Gapped(IUPACUnambiguousDNA(), '*'))
>>> my_seq.lower()
Seq('cggtacgcttatgtcacgtag*aaaaaa', Gapped(DNAAlphabet(), '*'))
See also the upper method.
"""
return Seq(str(self).lower(), self.alphabet._lower())
def complement(self):
"""Returns the complement sequence. New Seq object.
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import IUPAC
>>> my_dna = Seq("CCCCCGATAG", IUPAC.unambiguous_dna)
>>> my_dna
Seq('CCCCCGATAG', IUPACUnambiguousDNA())
>>> my_dna.complement()
Seq('GGGGGCTATC', IUPACUnambiguousDNA())
You can of course used mixed case sequences,
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import generic_dna
>>> my_dna = Seq("CCCCCgatA-GD", generic_dna)
>>> my_dna
Seq('CCCCCgatA-GD', DNAAlphabet())
>>> my_dna.complement()
Seq('GGGGGctaT-CH', DNAAlphabet())
Note in the above example, ambiguous character D denotes
G, A or T so its complement is H (for C, T or A).
Trying to complement a protein sequence raises an exception.
>>> my_protein = Seq("MAIVMGR", IUPAC.protein)
>>> my_protein.complement()
Traceback (most recent call last):
...
ValueError: Proteins do not have complements!
"""
base = Alphabet._get_base_alphabet(self.alphabet)
if isinstance(base, Alphabet.ProteinAlphabet):
raise ValueError("Proteins do not have complements!")
if isinstance(base, Alphabet.DNAAlphabet):
ttable = _dna_complement_table
elif isinstance(base, Alphabet.RNAAlphabet):
ttable = _rna_complement_table
elif ('U' in self._data or 'u' in self._data) \
and ('T' in self._data or 't' in self._data):
# TODO - Handle this cleanly?
raise ValueError("Mixed RNA/DNA found")
elif 'U' in self._data or 'u' in self._data:
ttable = _rna_complement_table
else:
ttable = _dna_complement_table
# Much faster on really long sequences than the previous loop based one.
# thx to Michael Palmer, University of Waterloo
return Seq(str(self).translate(ttable), self.alphabet)
def reverse_complement(self):
"""Returns the reverse complement sequence. New Seq object.
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import IUPAC
>>> my_dna = Seq("CCCCCGATAGNR", IUPAC.ambiguous_dna)
>>> my_dna
Seq('CCCCCGATAGNR', IUPACAmbiguousDNA())
>>> my_dna.reverse_complement()
Seq('YNCTATCGGGGG', IUPACAmbiguousDNA())
Note in the above example, since R = G or A, its complement
is Y (which denotes C or T).
You can of course used mixed case sequences,
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import generic_dna
>>> my_dna = Seq("CCCCCgatA-G", generic_dna)
>>> my_dna
Seq('CCCCCgatA-G', DNAAlphabet())
>>> my_dna.reverse_complement()
Seq('C-TatcGGGGG', DNAAlphabet())
Trying to complement a protein sequence raises an exception:
>>> my_protein = Seq("MAIVMGR", IUPAC.protein)
>>> my_protein.reverse_complement()
Traceback (most recent call last):
...
ValueError: Proteins do not have complements!
"""
# Use -1 stride/step to reverse the complement
return self.complement()[::-1]
def transcribe(self):
"""Returns the RNA sequence from a DNA sequence. New Seq object.
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import IUPAC
>>> coding_dna = Seq("ATGGCCATTGTAATGGGCCGCTGAAAGGGTGCCCGATAG",
... IUPAC.unambiguous_dna)
>>> coding_dna
Seq('ATGGCCATTGTAATGGGCCGCTGAAAGGGTGCCCGATAG', IUPACUnambiguousDNA())
>>> coding_dna.transcribe()
Seq('AUGGCCAUUGUAAUGGGCCGCUGAAAGGGUGCCCGAUAG', IUPACUnambiguousRNA())
Trying to transcribe a protein or RNA sequence raises an exception:
>>> my_protein = Seq("MAIVMGR", IUPAC.protein)
>>> my_protein.transcribe()
Traceback (most recent call last):
...
ValueError: Proteins cannot be transcribed!
"""
base = Alphabet._get_base_alphabet(self.alphabet)
if isinstance(base, Alphabet.ProteinAlphabet):
raise ValueError("Proteins cannot be transcribed!")
if isinstance(base, Alphabet.RNAAlphabet):
raise ValueError("RNA cannot be transcribed!")
if self.alphabet == IUPAC.unambiguous_dna:
alphabet = IUPAC.unambiguous_rna
elif self.alphabet == IUPAC.ambiguous_dna:
alphabet = IUPAC.ambiguous_rna
else:
alphabet = Alphabet.generic_rna
return Seq(str(self).replace('T', 'U').replace('t', 'u'), alphabet)
def back_transcribe(self):
"""Returns the DNA sequence from an RNA sequence. New Seq object.
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import IUPAC
>>> messenger_rna = Seq("AUGGCCAUUGUAAUGGGCCGCUGAAAGGGUGCCCGAUAG",
... IUPAC.unambiguous_rna)
>>> messenger_rna
Seq('AUGGCCAUUGUAAUGGGCCGCUGAAAGGGUGCCCGAUAG', IUPACUnambiguousRNA())
>>> messenger_rna.back_transcribe()
Seq('ATGGCCATTGTAATGGGCCGCTGAAAGGGTGCCCGATAG', IUPACUnambiguousDNA())
Trying to back-transcribe a protein or DNA sequence raises an
exception:
>>> my_protein = Seq("MAIVMGR", IUPAC.protein)
>>> my_protein.back_transcribe()
Traceback (most recent call last):
...
ValueError: Proteins cannot be back transcribed!
"""
base = Alphabet._get_base_alphabet(self.alphabet)
if isinstance(base, Alphabet.ProteinAlphabet):
raise ValueError("Proteins cannot be back transcribed!")
if isinstance(base, Alphabet.DNAAlphabet):
raise ValueError("DNA cannot be back transcribed!")
if self.alphabet == IUPAC.unambiguous_rna:
alphabet = IUPAC.unambiguous_dna
elif self.alphabet == IUPAC.ambiguous_rna:
alphabet = IUPAC.ambiguous_dna
else:
alphabet = Alphabet.generic_dna
return Seq(str(self).replace("U", "T").replace("u", "t"), alphabet)
def translate(self, table="Standard", stop_symbol="*", to_stop=False,
cds=False):
"""Turns a nucleotide sequence into a protein sequence. New Seq object.
This method will translate DNA or RNA sequences, and those with a
nucleotide or generic alphabet. Trying to translate a protein
sequence raises an exception.
Arguments:
- table - Which codon table to use? This can be either a name
(string), an NCBI identifier (integer), or a CodonTable
object (useful for non-standard genetic codes). This
defaults to the "Standard" table.
- stop_symbol - Single character string, what to use for terminators.
This defaults to the asterisk, "*".
- to_stop - Boolean, defaults to False meaning do a full translation
continuing on past any stop codons (translated as the
specified stop_symbol). If True, translation is
terminated at the first in frame stop codon (and the
stop_symbol is not appended to the returned protein
sequence).
- cds - Boolean, indicates this is a complete CDS. If True,
this checks the sequence starts with a valid alternative start
codon (which will be translated as methionine, M), that the
sequence length is a multiple of three, and that there is a
single in frame stop codon at the end (this will be excluded
from the protein sequence, regardless of the to_stop option).
If these tests fail, an exception is raised.
e.g. Using the standard table:
>>> coding_dna = Seq("GTGGCCATTGTAATGGGCCGCTGAAAGGGTGCCCGATAG")
>>> coding_dna.translate()
Seq('VAIVMGR*KGAR*', HasStopCodon(ExtendedIUPACProtein(), '*'))
>>> coding_dna.translate(stop_symbol="@")
Seq('VAIVMGR@KGAR@', HasStopCodon(ExtendedIUPACProtein(), '@'))
>>> coding_dna.translate(to_stop=True)
Seq('VAIVMGR', ExtendedIUPACProtein())
Now using NCBI table 2, where TGA is not a stop codon:
>>> coding_dna.translate(table=2)
Seq('VAIVMGRWKGAR*', HasStopCodon(ExtendedIUPACProtein(), '*'))
>>> coding_dna.translate(table=2, to_stop=True)
Seq('VAIVMGRWKGAR', ExtendedIUPACProtein())
In fact, GTG is an alternative start codon under NCBI table 2, meaning
this sequence could be a complete CDS:
>>> coding_dna.translate(table=2, cds=True)
Seq('MAIVMGRWKGAR', ExtendedIUPACProtein())
It isn't a valid CDS under NCBI table 1, due to both the start codon and
also the in frame stop codons:
>>> coding_dna.translate(table=1, cds=True)
Traceback (most recent call last):
...
TranslationError: First codon 'GTG' is not a start codon
If the sequence has no in-frame stop codon, then the to_stop argument
has no effect:
>>> coding_dna2 = Seq("TTGGCCATTGTAATGGGCCGC")
>>> coding_dna2.translate()
Seq('LAIVMGR', ExtendedIUPACProtein())
>>> coding_dna2.translate(to_stop=True)
Seq('LAIVMGR', ExtendedIUPACProtein())
NOTE - Ambiguous codons like "TAN" or "NNN" could be an amino acid
or a stop codon. These are translated as "X". Any invalid codon
(e.g. "TA?" or "T-A") will throw a TranslationError.
NOTE - Does NOT support gapped sequences.
NOTE - This does NOT behave like the python string's translate
method. For that use str(my_seq).translate(...) instead.
"""
if isinstance(table, str) and len(table) == 256:
raise ValueError("The Seq object translate method DOES NOT take "
"a 256 character string mapping table like "
"the python string object's translate method. "
"Use str(my_seq).translate(...) instead.")
if isinstance(Alphabet._get_base_alphabet(self.alphabet),
Alphabet.ProteinAlphabet):
raise ValueError("Proteins cannot be translated!")
try:
table_id = int(table)
except ValueError:
# Assume its a table name
if self.alphabet == IUPAC.unambiguous_dna:
# Will use standard IUPAC protein alphabet, no need for X
codon_table = CodonTable.unambiguous_dna_by_name[table]
elif self.alphabet == IUPAC.unambiguous_rna:
# Will use standard IUPAC protein alphabet, no need for X
codon_table = CodonTable.unambiguous_rna_by_name[table]
else:
# This will use the extended IUPAC protein alphabet with X etc.
# The same table can be used for RNA or DNA (we use this for
# translating strings).
codon_table = CodonTable.ambiguous_generic_by_name[table]
except (AttributeError, TypeError):
# Assume its a CodonTable object
if isinstance(table, CodonTable.CodonTable):
codon_table = table
else:
raise ValueError('Bad table argument')
else:
# Assume its a table ID
if self.alphabet == IUPAC.unambiguous_dna:
# Will use standard IUPAC protein alphabet, no need for X
codon_table = CodonTable.unambiguous_dna_by_id[table_id]
elif self.alphabet == IUPAC.unambiguous_rna:
# Will use standard IUPAC protein alphabet, no need for X
codon_table = CodonTable.unambiguous_rna_by_id[table_id]
else:
# This will use the extended IUPAC protein alphabet with X etc.
# The same table can be used for RNA or DNA (we use this for
# translating strings).
codon_table = CodonTable.ambiguous_generic_by_id[table_id]
protein = _translate_str(str(self), codon_table,
stop_symbol, to_stop, cds)
if stop_symbol in protein:
alphabet = Alphabet.HasStopCodon(codon_table.protein_alphabet,
stop_symbol=stop_symbol)
else:
alphabet = codon_table.protein_alphabet
return Seq(protein, alphabet)
def ungap(self, gap=None):
"""Return a copy of the sequence without the gap character(s).
The gap character can be specified in two ways - either as an explicit
argument, or via the sequence's alphabet. For example:
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import generic_dna
>>> my_dna = Seq("-ATA--TGAAAT-TTGAAAA", generic_dna)
>>> my_dna
Seq('-ATA--TGAAAT-TTGAAAA', DNAAlphabet())
>>> my_dna.ungap("-")
Seq('ATATGAAATTTGAAAA', DNAAlphabet())
If the gap character is not given as an argument, it will be taken from
the sequence's alphabet (if defined). Notice that the returned sequence's
alphabet is adjusted since it no longer requires a gapped alphabet:
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import IUPAC, Gapped, HasStopCodon
>>> my_pro = Seq("MVVLE=AD*", HasStopCodon(Gapped(IUPAC.protein, "=")))
>>> my_pro
Seq('MVVLE=AD*', HasStopCodon(Gapped(IUPACProtein(), '='), '*'))
>>> my_pro.ungap()
Seq('MVVLEAD*', HasStopCodon(IUPACProtein(), '*'))
Or, with a simpler gapped DNA example:
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import IUPAC, Gapped
>>> my_seq = Seq("CGGGTAG=AAAAAA", Gapped(IUPAC.unambiguous_dna, "="))
>>> my_seq
Seq('CGGGTAG=AAAAAA', Gapped(IUPACUnambiguousDNA(), '='))
>>> my_seq.ungap()
Seq('CGGGTAGAAAAAA', IUPACUnambiguousDNA())
As long as it is consistent with the alphabet, although it is redundant,
you can still supply the gap character as an argument to this method:
>>> my_seq
Seq('CGGGTAG=AAAAAA', Gapped(IUPACUnambiguousDNA(), '='))
>>> my_seq.ungap("=")
Seq('CGGGTAGAAAAAA', IUPACUnambiguousDNA())
However, if the gap character given as the argument disagrees with that
declared in the alphabet, an exception is raised:
>>> my_seq
Seq('CGGGTAG=AAAAAA', Gapped(IUPACUnambiguousDNA(), '='))
>>> my_seq.ungap("-")
Traceback (most recent call last):
...
ValueError: Gap '-' does not match '=' from alphabet
Finally, if a gap character is not supplied, and the alphabet does not
define one, an exception is raised:
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import generic_dna
>>> my_dna = Seq("ATA--TGAAAT-TTGAAAA", generic_dna)
>>> my_dna
Seq('ATA--TGAAAT-TTGAAAA', DNAAlphabet())
>>> my_dna.ungap()
Traceback (most recent call last):
...
ValueError: Gap character not given and not defined in alphabet
"""
if hasattr(self.alphabet, "gap_char"):
if not gap:
gap = self.alphabet.gap_char
elif gap != self.alphabet.gap_char:
raise ValueError("Gap {0!r} does not match {1!r} from alphabet".format(
gap, self.alphabet.gap_char))
alpha = Alphabet._ungap(self.alphabet)
elif not gap:
raise ValueError("Gap character not given and not defined in alphabet")
else:
alpha = self.alphabet # modify!
if len(gap) != 1 or not isinstance(gap, str):
raise ValueError("Unexpected gap character, {0!r}".format(gap))
return Seq(str(self).replace(gap, ""), alpha)
class UnknownSeq(Seq):
"""A read-only sequence object of known length but unknown contents.
If you have an unknown sequence, you can represent this with a normal
Seq object, for example:
>>> my_seq = Seq("N"*5)
>>> my_seq
Seq('NNNNN', Alphabet())
>>> len(my_seq)
5
>>> print(my_seq)
NNNNN
However, this is rather wasteful of memory (especially for large
sequences), which is where this class is most usefull:
>>> unk_five = UnknownSeq(5)
>>> unk_five
UnknownSeq(5, alphabet = Alphabet(), character = '?')
>>> len(unk_five)
5
>>> print(unk_five)
?????
You can add unknown sequence together, provided their alphabets and
characters are compatible, and get another memory saving UnknownSeq:
>>> unk_four = UnknownSeq(4)
>>> unk_four
UnknownSeq(4, alphabet = Alphabet(), character = '?')
>>> unk_four + unk_five
UnknownSeq(9, alphabet = Alphabet(), character = '?')
If the alphabet or characters don't match up, the addition gives an
ordinary Seq object:
>>> unk_nnnn = UnknownSeq(4, character = "N")
>>> unk_nnnn
UnknownSeq(4, alphabet = Alphabet(), character = 'N')
>>> unk_nnnn + unk_four
Seq('NNNN????', Alphabet())
Combining with a real Seq gives a new Seq object:
>>> known_seq = Seq("ACGT")
>>> unk_four + known_seq
Seq('????ACGT', Alphabet())
>>> known_seq + unk_four
Seq('ACGT????', Alphabet())
"""
def __init__(self, length, alphabet=Alphabet.generic_alphabet, character=None):
"""Create a new UnknownSeq object.
If character is omitted, it is determined from the alphabet, "N" for
nucleotides, "X" for proteins, and "?" otherwise.
"""
self._length = int(length)
if self._length < 0:
# TODO - Block zero length UnknownSeq? You can just use a Seq!
raise ValueError("Length must not be negative.")
self.alphabet = alphabet
if character:
if len(character) != 1:
raise ValueError("character argument should be a single letter string.")
self._character = character
else:
base = Alphabet._get_base_alphabet(alphabet)
# TODO? Check the case of the letters in the alphabet?
# We may have to use "n" instead of "N" etc.
if isinstance(base, Alphabet.NucleotideAlphabet):
self._character = "N"
elif isinstance(base, Alphabet.ProteinAlphabet):
self._character = "X"
else:
self._character = "?"
def __len__(self):
"""Returns the stated length of the unknown sequence."""
return self._length
def __str__(self):
"""Returns the unknown sequence as full string of the given length."""
return self._character * self._length
def __repr__(self):
return "UnknownSeq({0}, alphabet = {1!r}, character = {2!r})".format(
self._length, self.alphabet, self._character)
def __add__(self, other):
"""Add another sequence or string to this sequence.
Adding two UnknownSeq objects returns another UnknownSeq object
provided the character is the same and the alphabets are compatible.
>>> from Bio.Seq import UnknownSeq
>>> from Bio.Alphabet import generic_protein
>>> UnknownSeq(10, generic_protein) + UnknownSeq(5, generic_protein)
UnknownSeq(15, alphabet = ProteinAlphabet(), character = 'X')
If the characters differ, an UnknownSeq object cannot be used, so a
Seq object is returned:
>>> from Bio.Seq import UnknownSeq
>>> from Bio.Alphabet import generic_protein
>>> UnknownSeq(10, generic_protein) + UnknownSeq(5, generic_protein,
... character="x")
Seq('XXXXXXXXXXxxxxx', ProteinAlphabet())
If adding a string to an UnknownSeq, a new Seq is returned with the
same alphabet:
>>> from Bio.Seq import UnknownSeq
>>> from Bio.Alphabet import generic_protein
>>> UnknownSeq(5, generic_protein) + "LV"
Seq('XXXXXLV', ProteinAlphabet())
"""
if isinstance(other, UnknownSeq) and other._character == self._character:
# TODO - Check the alphabets match
return UnknownSeq(len(self) + len(other),
self.alphabet, self._character)
# Offload to the base class...
return Seq(str(self), self.alphabet) + other
def __radd__(self, other):
# If other is an UnknownSeq, then __add__ would be called.
# Offload to the base class...
return other + Seq(str(self), self.alphabet)
def __getitem__(self, index):
"""Get a subsequence from the UnknownSeq object.
>>> unk = UnknownSeq(8, character="N")
>>> print(unk[:])
NNNNNNNN
>>> print(unk[5:3])
<BLANKLINE>
>>> print(unk[1:-1])
NNNNNN
>>> print(unk[1:-1:2])
NNN
"""
if isinstance(index, int):
# TODO - Check the bounds without wasting memory
return str(self)[index]
old_length = self._length
step = index.step
if step is None or step == 1:
# This calculates the length you'd get from ("N"*old_length)[index]
start = index.start
end = index.stop
if start is None:
start = 0
elif start < 0:
start = max(0, old_length + start)
elif start > old_length:
start = old_length
if end is None:
end = old_length
elif end < 0:
end = max(0, old_length + end)
elif end > old_length:
end = old_length
new_length = max(0, end - start)
elif step == 0:
raise ValueError("slice step cannot be zero")
else:
# TODO - handle step efficiently
new_length = len(("X" * old_length)[index])
# assert new_length == len(("X"*old_length)[index]), \
# (index, start, end, step, old_length,
# new_length, len(("X"*old_length)[index]))
return UnknownSeq(new_length, self.alphabet, self._character)
def count(self, sub, start=0, end=sys.maxsize):
"""Non-overlapping count method, like that of a python string.
This behaves like the python string (and Seq object) method of the
same name, which does a non-overlapping count!
Returns an integer, the number of occurrences of substring
argument sub in the (sub)sequence given by [start:end].
Optional arguments start and end are interpreted as in slice
notation.
Arguments:
- sub - a string or another Seq object to look for
- start - optional integer, slice start
- end - optional integer, slice end
>>> "NNNN".count("N")
4
>>> Seq("NNNN").count("N")
4
>>> UnknownSeq(4, character="N").count("N")
4
>>> UnknownSeq(4, character="N").count("A")
0
>>> UnknownSeq(4, character="N").count("AA")
0
HOWEVER, please note because that python strings and Seq objects (and
MutableSeq objects) do a non-overlapping search, this may not give
the answer you expect:
>>> UnknownSeq(4, character="N").count("NN")
2
>>> UnknownSeq(4, character="N").count("NNN")
1
"""
sub_str = self._get_seq_str_and_check_alphabet(sub)
if len(sub_str) == 1:
if str(sub_str) == self._character:
if start == 0 and end >= self._length:
return self._length
else:
# This could be done more cleverly...
return str(self).count(sub_str, start, end)
else:
return 0
else:
if set(sub_str) == set(self._character):
if start == 0 and end >= self._length:
return self._length // len(sub_str)
else:
# This could be done more cleverly...
return str(self).count(sub_str, start, end)
else:
return 0
def complement(self):
"""The complement of an unknown nucleotide equals itself.
>>> my_nuc = UnknownSeq(8)
>>> my_nuc
UnknownSeq(8, alphabet = Alphabet(), character = '?')
>>> print(my_nuc)
????????
>>> my_nuc.complement()
UnknownSeq(8, alphabet = Alphabet(), character = '?')
>>> print(my_nuc.complement())
????????
"""
if isinstance(Alphabet._get_base_alphabet(self.alphabet),
Alphabet.ProteinAlphabet):
raise ValueError("Proteins do not have complements!")
return self
def reverse_complement(self):
"""The reverse complement of an unknown nucleotide equals itself.
>>> my_nuc = UnknownSeq(10)
>>> my_nuc
UnknownSeq(10, alphabet = Alphabet(), character = '?')
>>> print(my_nuc)
??????????
>>> my_nuc.reverse_complement()
UnknownSeq(10, alphabet = Alphabet(), character = '?')
>>> print(my_nuc.reverse_complement())
??????????
"""
if isinstance(Alphabet._get_base_alphabet(self.alphabet),
Alphabet.ProteinAlphabet):
raise ValueError("Proteins do not have complements!")
return self
def transcribe(self):
"""Returns unknown RNA sequence from an unknown DNA sequence.
>>> my_dna = UnknownSeq(10, character="N")
>>> my_dna
UnknownSeq(10, alphabet = Alphabet(), character = 'N')
>>> print(my_dna)
NNNNNNNNNN
>>> my_rna = my_dna.transcribe()
>>> my_rna
UnknownSeq(10, alphabet = RNAAlphabet(), character = 'N')
>>> print(my_rna)
NNNNNNNNNN
"""
# Offload the alphabet stuff
s = Seq(self._character, self.alphabet).transcribe()
return UnknownSeq(self._length, s.alphabet, self._character)
def back_transcribe(self):
"""Returns unknown DNA sequence from an unknown RNA sequence.
>>> my_rna = UnknownSeq(20, character="N")
>>> my_rna
UnknownSeq(20, alphabet = Alphabet(), character = 'N')
>>> print(my_rna)
NNNNNNNNNNNNNNNNNNNN
>>> my_dna = my_rna.back_transcribe()
>>> my_dna
UnknownSeq(20, alphabet = DNAAlphabet(), character = 'N')
>>> print(my_dna)
NNNNNNNNNNNNNNNNNNNN
"""
# Offload the alphabet stuff
s = Seq(self._character, self.alphabet).back_transcribe()
return UnknownSeq(self._length, s.alphabet, self._character)
def upper(self):
"""Returns an upper case copy of the sequence.
>>> from Bio.Alphabet import generic_dna
>>> from Bio.Seq import UnknownSeq
>>> my_seq = UnknownSeq(20, generic_dna, character="n")
>>> my_seq
UnknownSeq(20, alphabet = DNAAlphabet(), character = 'n')
>>> print(my_seq)
nnnnnnnnnnnnnnnnnnnn
>>> my_seq.upper()
UnknownSeq(20, alphabet = DNAAlphabet(), character = 'N')
>>> print(my_seq.upper())
NNNNNNNNNNNNNNNNNNNN
This will adjust the alphabet if required. See also the lower method.
"""
return UnknownSeq(self._length, self.alphabet._upper(), self._character.upper())
def lower(self):
"""Returns a lower case copy of the sequence.
This will adjust the alphabet if required:
>>> from Bio.Alphabet import IUPAC
>>> from Bio.Seq import UnknownSeq
>>> my_seq = UnknownSeq(20, IUPAC.extended_protein)
>>> my_seq
UnknownSeq(20, alphabet = ExtendedIUPACProtein(), character = 'X')
>>> print(my_seq)
XXXXXXXXXXXXXXXXXXXX
>>> my_seq.lower()
UnknownSeq(20, alphabet = ProteinAlphabet(), character = 'x')
>>> print(my_seq.lower())
xxxxxxxxxxxxxxxxxxxx
See also the upper method.
"""
return UnknownSeq(self._length, self.alphabet._lower(), self._character.lower())
def translate(self, **kwargs):
"""Translate an unknown nucleotide sequence into an unknown protein.
e.g.
>>> my_seq = UnknownSeq(9, character="N")
>>> print(my_seq)
NNNNNNNNN
>>> my_protein = my_seq.translate()
>>> my_protein
UnknownSeq(3, alphabet = ProteinAlphabet(), character = 'X')
>>> print(my_protein)
XXX
In comparison, using a normal Seq object:
>>> my_seq = Seq("NNNNNNNNN")
>>> print(my_seq)
NNNNNNNNN
>>> my_protein = my_seq.translate()
>>> my_protein
Seq('XXX', ExtendedIUPACProtein())
>>> print(my_protein)
XXX
"""
if isinstance(Alphabet._get_base_alphabet(self.alphabet),
Alphabet.ProteinAlphabet):
raise ValueError("Proteins cannot be translated!")
return UnknownSeq(self._length // 3, Alphabet.generic_protein, "X")
def ungap(self, gap=None):
"""Return a copy of the sequence without the gap character(s).
The gap character can be specified in two ways - either as an explicit
argument, or via the sequence's alphabet. For example:
>>> from Bio.Seq import UnknownSeq
>>> from Bio.Alphabet import Gapped, generic_dna
>>> my_dna = UnknownSeq(20, Gapped(generic_dna, "-"))
>>> my_dna
UnknownSeq(20, alphabet = Gapped(DNAAlphabet(), '-'), character = 'N')
>>> my_dna.ungap()
UnknownSeq(20, alphabet = DNAAlphabet(), character = 'N')
>>> my_dna.ungap("-")
UnknownSeq(20, alphabet = DNAAlphabet(), character = 'N')
If the UnknownSeq is using the gap character, then an empty Seq is
returned:
>>> my_gap = UnknownSeq(20, Gapped(generic_dna, "-"), character="-")
>>> my_gap
UnknownSeq(20, alphabet = Gapped(DNAAlphabet(), '-'), character = '-')
>>> my_gap.ungap()
Seq('', DNAAlphabet())
>>> my_gap.ungap("-")
Seq('', DNAAlphabet())
Notice that the returned sequence's alphabet is adjusted to remove any
explicit gap character declaration.
"""
# Offload the alphabet stuff
s = Seq(self._character, self.alphabet).ungap()
if s:
return UnknownSeq(self._length, s.alphabet, self._character)
else:
return Seq("", s.alphabet)
class MutableSeq(object):
"""An editable sequence object (with an alphabet).
Unlike normal python strings and our basic sequence object (the Seq class)
which are immutable, the MutableSeq lets you edit the sequence in place.
However, this means you cannot use a MutableSeq object as a dictionary key.
>>> from Bio.Seq import MutableSeq
>>> from Bio.Alphabet import generic_dna
>>> my_seq = MutableSeq("ACTCGTCGTCG", generic_dna)
>>> my_seq
MutableSeq('ACTCGTCGTCG', DNAAlphabet())
>>> my_seq[5]
'T'
>>> my_seq[5] = "A"
>>> my_seq
MutableSeq('ACTCGACGTCG', DNAAlphabet())
>>> my_seq[5]
'A'
>>> my_seq[5:8] = "NNN"
>>> my_seq
MutableSeq('ACTCGNNNTCG', DNAAlphabet())
>>> len(my_seq)
11
Note that the MutableSeq object does not support as many string-like
or biological methods as the Seq object.
"""
def __init__(self, data, alphabet=Alphabet.generic_alphabet):
if sys.version_info[0] == 3:
self.array_indicator = "u"
else:
self.array_indicator = "c"
if isinstance(data, str): # TODO - What about unicode?
self.data = array.array(self.array_indicator, data)
else:
self.data = data # assumes the input is an array
self.alphabet = alphabet
def __repr__(self):
"""Returns a (truncated) representation of the sequence for debugging."""
if len(self) > 60:
# Shows the last three letters as it is often useful to see if there
# is a stop codon at the end of a sequence.
# Note total length is 54+3+3=60
return "{0}('{1}...{2}', {3!r})".format(self.__class__.__name__,
str(self[:54]),
str(self[-3:]),
self.alphabet)
else:
return "{0}('{1}', {2!r})".format(self.__class__.__name__,
str(self),
self.alphabet)
def __str__(self):
"""Returns the full sequence as a python string.
Note that Biopython 1.44 and earlier would give a truncated
version of repr(my_seq) for str(my_seq). If you are writing code
which needs to be backwards compatible with old Biopython, you
should continue to use my_seq.tostring() rather than str(my_seq).
"""
# See test_GAQueens.py for an historic usage of a non-string alphabet!
return "".join(self.data)
def __eq__(self, other):
"""Compare the sequence to another sequence or a string (README).
Currently if compared to another sequence the alphabets must be
compatible. Comparing DNA to RNA, or Nucleotide to Protein will raise
an exception. Otherwise only the sequence itself is compared, not the
precise alphabet.
A future release of Biopython will change this (and the Seq object etc)
to use simple string comparison. The plan is that comparing sequences
with incompatible alphabets (e.g. DNA to RNA) will trigger a warning
but not an exception.
During this transition period, please just do explicit comparisons:
>>> seq1 = MutableSeq("ACGT")
>>> seq2 = MutableSeq("ACGT")
>>> id(seq1) == id(seq2)
False
>>> str(seq1) == str(seq2)
True
Biopython now does:
>>> seq1 == seq2
True
>>> seq1 == Seq("ACGT")
True
>>> seq1 == "ACGT"
True
"""
if hasattr(other, "alphabet"):
if not Alphabet._check_type_compatible([self.alphabet,
other.alphabet]):
warnings.warn("Incompatible alphabets {0!r} and {1!r}".format(
self.alphabet, other.alphabet),
BiopythonWarning)
if isinstance(other, MutableSeq):
return self.data == other.data
return str(self) == str(other)
def __ne__(self, other):
"""Not equal, see __eq__ documentation."""
# Seem to require this method under Python 2 but not needed on Python 3?
return not (self == other)
def __lt__(self, other):
"""Less than, see __eq__ documentation."""
if hasattr(other, "alphabet"):
if not Alphabet._check_type_compatible([self.alphabet,
other.alphabet]):
warnings.warn("Incompatible alphabets {0!r} and {1!r}".format(
self.alphabet, other.alphabet),
BiopythonWarning)
if isinstance(other, MutableSeq):
return self.data < other.data
return str(self) < str(other)
def __le__(self, other):
"""Less than or equal, see __eq__ documentation."""
if hasattr(other, "alphabet"):
if not Alphabet._check_type_compatible([self.alphabet,
other.alphabet]):
warnings.warn("Incompatible alphabets {0!r} and {1!r}".format(
self.alphabet, other.alphabet),
BiopythonWarning)
if isinstance(other, MutableSeq):
return self.data <= other.data
return str(self) <= str(other)
def __len__(self):
return len(self.data)
def __getitem__(self, index):
# Note since Python 2.0, __getslice__ is deprecated
# and __getitem__ is used instead.
# See http://docs.python.org/ref/sequence-methods.html
if isinstance(index, int):
# Return a single letter as a string
return self.data[index]
else:
# Return the (sub)sequence as another Seq object
return MutableSeq(self.data[index], self.alphabet)
def __setitem__(self, index, value):
# Note since Python 2.0, __setslice__ is deprecated
# and __setitem__ is used instead.
# See http://docs.python.org/ref/sequence-methods.html
if isinstance(index, int):
# Replacing a single letter with a new string
self.data[index] = value
else:
# Replacing a sub-sequence
if isinstance(value, MutableSeq):
self.data[index] = value.data
elif isinstance(value, type(self.data)):
self.data[index] = value
else:
self.data[index] = array.array(self.array_indicator,
str(value))
def __delitem__(self, index):
# Note since Python 2.0, __delslice__ is deprecated
# and __delitem__ is used instead.
# See http://docs.python.org/ref/sequence-methods.html
# Could be deleting a single letter, or a slice
del self.data[index]
def __add__(self, other):
"""Add another sequence or string to this sequence.
Returns a new MutableSeq object."""
if hasattr(other, "alphabet"):
# other should be a Seq or a MutableSeq
if not Alphabet._check_type_compatible([self.alphabet,
other.alphabet]):
raise TypeError("Incompatible alphabets {0!r} and {1!r}".format(
self.alphabet, other.alphabet))
# They should be the same sequence type (or one of them is generic)
a = Alphabet._consensus_alphabet([self.alphabet, other.alphabet])
if isinstance(other, MutableSeq):
# See test_GAQueens.py for an historic usage of a non-string
# alphabet! Adding the arrays should support this.
return self.__class__(self.data + other.data, a)
else:
return self.__class__(str(self) + str(other), a)
elif isinstance(other, basestring):
# other is a plain string - use the current alphabet
return self.__class__(str(self) + str(other), self.alphabet)
else:
raise TypeError
def __radd__(self, other):
if hasattr(other, "alphabet"):
# other should be a Seq or a MutableSeq
if not Alphabet._check_type_compatible([self.alphabet,
other.alphabet]):
raise TypeError("Incompatible alphabets {0!r} and {1!r}".format(
self.alphabet, other.alphabet))
# They should be the same sequence type (or one of them is generic)
a = Alphabet._consensus_alphabet([self.alphabet, other.alphabet])
if isinstance(other, MutableSeq):
# See test_GAQueens.py for an historic usage of a non-string
# alphabet! Adding the arrays should support this.
return self.__class__(other.data + self.data, a)
else:
return self.__class__(str(other) + str(self), a)
elif isinstance(other, basestring):
# other is a plain string - use the current alphabet
return self.__class__(str(other) + str(self), self.alphabet)
else:
raise TypeError
def append(self, c):
self.data.append(c)
def insert(self, i, c):
self.data.insert(i, c)
def pop(self, i=(-1)):
c = self.data[i]
del self.data[i]
return c
def remove(self, item):
for i in range(len(self.data)):
if self.data[i] == item:
del self.data[i]
return
raise ValueError("MutableSeq.remove(x): x not in list")
def count(self, sub, start=0, end=sys.maxsize):
"""Non-overlapping count method, like that of a python string.
This behaves like the python string method of the same name,
which does a non-overlapping count!
Returns an integer, the number of occurrences of substring
argument sub in the (sub)sequence given by [start:end].
Optional arguments start and end are interpreted as in slice
notation.
Arguments:
- sub - a string or another Seq object to look for
- start - optional integer, slice start
- end - optional integer, slice end
e.g.
>>> from Bio.Seq import MutableSeq
>>> my_mseq = MutableSeq("AAAATGA")
>>> print(my_mseq.count("A"))
5
>>> print(my_mseq.count("ATG"))
1
>>> print(my_mseq.count(Seq("AT")))
1
>>> print(my_mseq.count("AT", 2, -1))
1
HOWEVER, please note because that python strings, Seq objects and
MutableSeq objects do a non-overlapping search, this may not give
the answer you expect:
>>> "AAAA".count("AA")
2
>>> print(MutableSeq("AAAA").count("AA"))
2
An overlapping search would give the answer as three!
"""
try:
# TODO - Should we check the alphabet?
search = str(sub)
except AttributeError:
search = sub
if not isinstance(search, basestring):
raise TypeError("expected a string, Seq or MutableSeq")
if len(search) == 1:
# Try and be efficient and work directly from the array.
count = 0
for c in self.data[start:end]:
if c == search:
count += 1
return count
else:
# TODO - Can we do this more efficiently?
return str(self).count(search, start, end)
def index(self, item):
for i in range(len(self.data)):
if self.data[i] == item:
return i
raise ValueError("MutableSeq.index(x): x not in list")
def reverse(self):
"""Modify the mutable sequence to reverse itself.
No return value.
"""
self.data.reverse()
def complement(self):
"""Modify the mutable sequence to take on its complement.
Trying to complement a protein sequence raises an exception.
No return value.
"""
if isinstance(Alphabet._get_base_alphabet(self.alphabet),
Alphabet.ProteinAlphabet):
raise ValueError("Proteins do not have complements!")
if self.alphabet in (IUPAC.ambiguous_dna, IUPAC.unambiguous_dna):
d = ambiguous_dna_complement
elif self.alphabet in (IUPAC.ambiguous_rna, IUPAC.unambiguous_rna):
d = ambiguous_rna_complement
elif 'U' in self.data and 'T' in self.data:
# TODO - Handle this cleanly?
raise ValueError("Mixed RNA/DNA found")
elif 'U' in self.data:
d = ambiguous_rna_complement
else:
d = ambiguous_dna_complement
c = dict([(x.lower(), y.lower()) for x, y in d.items()])
d.update(c)
self.data = [d[c] for c in self.data]
self.data = array.array(self.array_indicator, self.data)
def reverse_complement(self):
"""Modify the mutable sequence to take on its reverse complement.
Trying to reverse complement a protein sequence raises an exception.
No return value.
"""
self.complement()
self.data.reverse()
# Sorting a sequence makes no sense.
# def sort(self, *args): self.data.sort(*args)
def extend(self, other):
if isinstance(other, MutableSeq):
for c in other.data:
self.data.append(c)
else:
for c in other:
self.data.append(c)
def tostring(self):
"""Returns the full sequence as a python string (DEPRECATED).
You are now encouraged to use str(my_seq) instead of my_seq.tostring()
as this method is officially deprecated.
Because str(my_seq) will give you the full sequence as a python string,
there is often no need to make an explicit conversion. For example,
>>> my_seq = Seq("ATCGTG")
>>> my_name = "seq_1"
>>> print("ID={%s}, sequence={%s}" % (my_name, my_seq))
ID={seq_1}, sequence={ATCGTG}
On Biopython 1.44 or older you would have to have done this:
>>> print("ID={%s}, sequence={%s}" % (my_name, my_seq.tostring()))
ID={seq_1}, sequence={ATCGTG}
"""
from Bio import BiopythonDeprecationWarning
warnings.warn("This method is obsolete; please use str(my_seq) "
"instead of my_seq.tostring().",
BiopythonDeprecationWarning)
return "".join(self.data)
def toseq(self):
"""Returns the full sequence as a new immutable Seq object.
>>> from Bio.Seq import Seq
>>> from Bio.Alphabet import IUPAC
>>> my_mseq = MutableSeq("MKQHKAMIVALIVICITAVVAAL",
... IUPAC.protein)
>>> my_mseq
MutableSeq('MKQHKAMIVALIVICITAVVAAL', IUPACProtein())
>>> my_mseq.toseq()
Seq('MKQHKAMIVALIVICITAVVAAL', IUPACProtein())
Note that the alphabet is preserved.
"""
return Seq("".join(self.data), self.alphabet)
# The transcribe, backward_transcribe, and translate functions are
# user-friendly versions of the corresponding functions in Bio.Transcribe
# and Bio.Translate. The functions work both on Seq objects, and on strings.
def transcribe(dna):
"""Transcribes a DNA sequence into RNA.
If given a string, returns a new string object.
Given a Seq or MutableSeq, returns a new Seq object with an RNA alphabet.
Trying to transcribe a protein or RNA sequence raises an exception.
e.g.
>>> transcribe("ACTGN")
'ACUGN'
"""
if isinstance(dna, Seq):
return dna.transcribe()
elif isinstance(dna, MutableSeq):
return dna.toseq().transcribe()
else:
return dna.replace('T', 'U').replace('t', 'u')
def back_transcribe(rna):
"""Back-transcribes an RNA sequence into DNA.
If given a string, returns a new string object.
Given a Seq or MutableSeq, returns a new Seq object with an RNA alphabet.
Trying to transcribe a protein or DNA sequence raises an exception.
e.g.
>>> back_transcribe("ACUGN")
'ACTGN'
"""
if isinstance(rna, Seq):
return rna.back_transcribe()
elif isinstance(rna, MutableSeq):
return rna.toseq().back_transcribe()
else:
return rna.replace('U', 'T').replace('u', 't')
def _translate_str(sequence, table, stop_symbol="*", to_stop=False,
cds=False, pos_stop="X"):
"""Helper function to translate a nucleotide string (PRIVATE).
Arguments:
- sequence - a string
- table - a CodonTable object (NOT a table name or id number)
- stop_symbol - a single character string, what to use for terminators.
- to_stop - boolean, should translation terminate at the first
in frame stop codon? If there is no in-frame stop codon
then translation continues to the end.
- pos_stop - a single character string for a possible stop codon
(e.g. TAN or NNN)
- cds - Boolean, indicates this is a complete CDS. If True, this
checks the sequence starts with a valid alternative start
codon (which will be translated as methionine, M), that the
sequence length is a multiple of three, and that there is a
single in frame stop codon at the end (this will be excluded
from the protein sequence, regardless of the to_stop option).
If these tests fail, an exception is raised.
Returns a string.
e.g.
>>> from Bio.Data import CodonTable
>>> table = CodonTable.ambiguous_dna_by_id[1]
>>> _translate_str("AAA", table)
'K'
>>> _translate_str("TAR", table)
'*'
>>> _translate_str("TAN", table)
'X'
>>> _translate_str("TAN", table, pos_stop="@")
'@'
>>> _translate_str("TA?", table)
Traceback (most recent call last):
...
TranslationError: Codon 'TA?' is invalid
In a change to older verions of Biopython, partial codons are now
always regarded as an error (previously only checked if cds=True)
and will trigger a warning (likely to become an exception in a
future release).
If **cds=True**, the start and stop codons are checked, and the start
codon will be translated at methionine. The sequence must be an
while number of codons.
>>> _translate_str("ATGCCCTAG", table, cds=True)
'MP'
>>> _translate_str("AAACCCTAG", table, cds=True)
Traceback (most recent call last):
...
TranslationError: First codon 'AAA' is not a start codon
>>> _translate_str("ATGCCCTAGCCCTAG", table, cds=True)
Traceback (most recent call last):
...
TranslationError: Extra in frame stop codon found.
"""
sequence = sequence.upper()
amino_acids = []
forward_table = table.forward_table
stop_codons = table.stop_codons
if table.nucleotide_alphabet.letters is not None:
valid_letters = set(table.nucleotide_alphabet.letters.upper())
else:
# Assume the worst case, ambiguous DNA or RNA:
valid_letters = set(IUPAC.ambiguous_dna.letters.upper() +
IUPAC.ambiguous_rna.letters.upper())
n = len(sequence)
if cds:
if str(sequence[:3]).upper() not in table.start_codons:
raise CodonTable.TranslationError(
"First codon '{0}' is not a start codon".format(sequence[:3]))
if n % 3 != 0:
raise CodonTable.TranslationError(
"Sequence length {0} is not a multiple of three".format(n))
if str(sequence[-3:]).upper() not in stop_codons:
raise CodonTable.TranslationError(
"Final codon '{0}' is not a stop codon".format(sequence[-3:]))
# Don't translate the stop symbol, and manually translate the M
sequence = sequence[3:-3]
n -= 6
amino_acids = ["M"]
elif n % 3 != 0:
from Bio import BiopythonWarning
warnings.warn("Partial codon, len(sequence) not a multiple of three. "
"Explicitly trim the sequence or add trailing N before "
"translation. This may become an error in future.",
BiopythonWarning)
for i in range(0, n - n % 3, 3):
codon = sequence[i:i + 3]
try:
amino_acids.append(forward_table[codon])
except (KeyError, CodonTable.TranslationError):
# TODO? Treat "---" as a special case (gapped translation)
if codon in table.stop_codons:
if cds:
raise CodonTable.TranslationError(
"Extra in frame stop codon found.")
if to_stop:
break
amino_acids.append(stop_symbol)
elif valid_letters.issuperset(set(codon)):
# Possible stop codon (e.g. NNN or TAN)
amino_acids.append(pos_stop)
else:
raise CodonTable.TranslationError(
"Codon '{0}' is invalid".format(codon))
return "".join(amino_acids)
def translate(sequence, table="Standard", stop_symbol="*", to_stop=False,
cds=False):
"""Translate a nucleotide sequence into amino acids.
If given a string, returns a new string object. Given a Seq or
MutableSeq, returns a Seq object with a protein alphabet.
Arguments:
- table - Which codon table to use? This can be either a name (string),
an NCBI identifier (integer), or a CodonTable object (useful
for non-standard genetic codes). Defaults to the "Standard"
table.
- stop_symbol - Single character string, what to use for any
terminators, defaults to the asterisk, "*".
- to_stop - Boolean, defaults to False meaning do a full
translation continuing on past any stop codons
(translated as the specified stop_symbol). If
True, translation is terminated at the first in
frame stop codon (and the stop_symbol is not
appended to the returned protein sequence).
- cds - Boolean, indicates this is a complete CDS. If True, this
checks the sequence starts with a valid alternative start
codon (which will be translated as methionine, M), that the
sequence length is a multiple of three, and that there is a
single in frame stop codon at the end (this will be excluded
from the protein sequence, regardless of the to_stop option).
If these tests fail, an exception is raised.
A simple string example using the default (standard) genetic code:
>>> coding_dna = "GTGGCCATTGTAATGGGCCGCTGAAAGGGTGCCCGATAG"
>>> translate(coding_dna)
'VAIVMGR*KGAR*'
>>> translate(coding_dna, stop_symbol="@")
'VAIVMGR@KGAR@'
>>> translate(coding_dna, to_stop=True)
'VAIVMGR'
Now using NCBI table 2, where TGA is not a stop codon:
>>> translate(coding_dna, table=2)
'VAIVMGRWKGAR*'
>>> translate(coding_dna, table=2, to_stop=True)
'VAIVMGRWKGAR'
In fact this example uses an alternative start codon valid under NCBI table 2,
GTG, which means this example is a complete valid CDS which when translated
should really start with methionine (not valine):
>>> translate(coding_dna, table=2, cds=True)
'MAIVMGRWKGAR'
Note that if the sequence has no in-frame stop codon, then the to_stop
argument has no effect:
>>> coding_dna2 = "GTGGCCATTGTAATGGGCCGC"
>>> translate(coding_dna2)
'VAIVMGR'
>>> translate(coding_dna2, to_stop=True)
'VAIVMGR'
NOTE - Ambiguous codons like "TAN" or "NNN" could be an amino acid
or a stop codon. These are translated as "X". Any invalid codon
(e.g. "TA?" or "T-A") will throw a TranslationError.
NOTE - Does NOT support gapped sequences.
It will however translate either DNA or RNA.
"""
if isinstance(sequence, Seq):
return sequence.translate(table, stop_symbol, to_stop, cds)
elif isinstance(sequence, MutableSeq):
# Return a Seq object
return sequence.toseq().translate(table, stop_symbol, to_stop, cds)
else:
# Assume its a string, return a string
try:
codon_table = CodonTable.ambiguous_generic_by_id[int(table)]
except ValueError:
codon_table = CodonTable.ambiguous_generic_by_name[table]
except (AttributeError, TypeError):
if isinstance(table, CodonTable.CodonTable):
codon_table = table
else:
raise ValueError('Bad table argument')
return _translate_str(sequence, codon_table, stop_symbol, to_stop, cds)
def reverse_complement(sequence):
"""Returns the reverse complement sequence of a nucleotide string.
If given a string, returns a new string object.
Given a Seq or a MutableSeq, returns a new Seq object with the same alphabet.
Supports unambiguous and ambiguous nucleotide sequences.
e.g.
>>> reverse_complement("ACTG-NH")
'DN-CAGT'
"""
if isinstance(sequence, Seq):
# Return a Seq
return sequence.reverse_complement()
elif isinstance(sequence, MutableSeq):
# Return a Seq
# Don't use the MutableSeq reverse_complement method as it is 'in place'.
return sequence.toseq().reverse_complement()
# Assume its a string.
# In order to avoid some code duplication, the old code would turn the string
# into a Seq, use the reverse_complement method, and convert back to a string.
# This worked, but is over five times slower on short sequences!
if ('U' in sequence or 'u' in sequence) \
and ('T' in sequence or 't' in sequence):
raise ValueError("Mixed RNA/DNA found")
elif 'U' in sequence or 'u' in sequence:
ttable = _rna_complement_table
else:
ttable = _dna_complement_table
return sequence.translate(ttable)[::-1]
def _test():
"""Run the Bio.Seq module's doctests (PRIVATE)."""
if sys.version_info[0:2] == (3, 1):
print("Not running Bio.Seq doctest on Python 3.1")
print("See http://bugs.python.org/issue7490")
else:
print("Running doctests...")
import doctest
doctest.testmod(optionflags=doctest.IGNORE_EXCEPTION_DETAIL)
print("Done")
if __name__ == "__main__":
_test()
| 39.309891
| 177
| 0.601542
|
4fc681dde9753227d2b0614cbf87a3230048c605
| 7,814
|
py
|
Python
|
Lib/asyncio/queues.py
|
hroncok/cpython
|
b64de46aae148cfab0980e0ad478da7aafc44900
|
[
"PSF-2.0"
] | 32
|
2018-06-02T16:55:29.000Z
|
2022-03-10T19:39:13.000Z
|
Lib/asyncio/queues.py
|
hroncok/cpython
|
b64de46aae148cfab0980e0ad478da7aafc44900
|
[
"PSF-2.0"
] | 26
|
2018-05-24T16:37:47.000Z
|
2021-03-02T04:57:11.000Z
|
Lib/asyncio/queues.py
|
hroncok/cpython
|
b64de46aae148cfab0980e0ad478da7aafc44900
|
[
"PSF-2.0"
] | 8
|
2019-11-25T13:37:19.000Z
|
2021-11-04T16:59:36.000Z
|
"""Queues"""
__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty']
import collections
import heapq
from . import events
from . import locks
from .coroutines import coroutine
class QueueEmpty(Exception):
"""Exception raised when Queue.get_nowait() is called on a Queue object
which is empty.
"""
pass
class QueueFull(Exception):
"""Exception raised when the Queue.put_nowait() method is called on a Queue
object which is full.
"""
pass
class Queue:
"""A queue, useful for coordinating producer and consumer coroutines.
If maxsize is less than or equal to zero, the queue size is infinite. If it
is an integer greater than 0, then "yield from put()" will block when the
queue reaches maxsize, until an item is removed by get().
Unlike the standard library Queue, you can reliably know this Queue's size
with qsize(), since your single-threaded asyncio application won't be
interrupted between calling qsize() and doing an operation on the Queue.
"""
def __init__(self, maxsize=0, *, loop=None):
if loop is None:
self._loop = events.get_event_loop()
else:
self._loop = loop
self._maxsize = maxsize
# Futures.
self._getters = collections.deque()
# Futures.
self._putters = collections.deque()
self._unfinished_tasks = 0
self._finished = locks.Event(loop=self._loop)
self._finished.set()
self._init(maxsize)
# These three are overridable in subclasses.
def _init(self, maxsize):
self._queue = collections.deque()
def _get(self):
return self._queue.popleft()
def _put(self, item):
self._queue.append(item)
# End of the overridable methods.
def _wakeup_next(self, waiters):
# Wake up the next waiter (if any) that isn't cancelled.
while waiters:
waiter = waiters.popleft()
if not waiter.done():
waiter.set_result(None)
break
def __repr__(self):
return '<{} at {:#x} {}>'.format(
type(self).__name__, id(self), self._format())
def __str__(self):
return '<{} {}>'.format(type(self).__name__, self._format())
def _format(self):
result = 'maxsize={!r}'.format(self._maxsize)
if getattr(self, '_queue', None):
result += ' _queue={!r}'.format(list(self._queue))
if self._getters:
result += ' _getters[{}]'.format(len(self._getters))
if self._putters:
result += ' _putters[{}]'.format(len(self._putters))
if self._unfinished_tasks:
result += ' tasks={}'.format(self._unfinished_tasks)
return result
def qsize(self):
"""Number of items in the queue."""
return len(self._queue)
@property
def maxsize(self):
"""Number of items allowed in the queue."""
return self._maxsize
def empty(self):
"""Return True if the queue is empty, False otherwise."""
return not self._queue
def full(self):
"""Return True if there are maxsize items in the queue.
Note: if the Queue was initialized with maxsize=0 (the default),
then full() is never True.
"""
if self._maxsize <= 0:
return False
else:
return self.qsize() >= self._maxsize
@coroutine
def put(self, item):
"""Put an item into the queue.
Put an item into the queue. If the queue is full, wait until a free
slot is available before adding item.
This method is a coroutine.
"""
while self.full():
putter = self._loop.create_future()
self._putters.append(putter)
try:
yield from putter
except:
putter.cancel() # Just in case putter is not done yet.
if not self.full() and not putter.cancelled():
# We were woken up by get_nowait(), but can't take
# the call. Wake up the next in line.
self._wakeup_next(self._putters)
raise
return self.put_nowait(item)
def put_nowait(self, item):
"""Put an item into the queue without blocking.
If no free slot is immediately available, raise QueueFull.
"""
if self.full():
raise QueueFull
self._put(item)
self._unfinished_tasks += 1
self._finished.clear()
self._wakeup_next(self._getters)
@coroutine
def get(self):
"""Remove and return an item from the queue.
If queue is empty, wait until an item is available.
This method is a coroutine.
"""
while self.empty():
getter = self._loop.create_future()
self._getters.append(getter)
try:
yield from getter
except:
getter.cancel() # Just in case getter is not done yet.
try:
self._getters.remove(getter)
except ValueError:
pass
if not self.empty() and not getter.cancelled():
# We were woken up by put_nowait(), but can't take
# the call. Wake up the next in line.
self._wakeup_next(self._getters)
raise
return self.get_nowait()
def get_nowait(self):
"""Remove and return an item from the queue.
Return an item if one is immediately available, else raise QueueEmpty.
"""
if self.empty():
raise QueueEmpty
item = self._get()
self._wakeup_next(self._putters)
return item
def task_done(self):
"""Indicate that a formerly enqueued task is complete.
Used by queue consumers. For each get() used to fetch a task,
a subsequent call to task_done() tells the queue that the processing
on the task is complete.
If a join() is currently blocking, it will resume when all items have
been processed (meaning that a task_done() call was received for every
item that had been put() into the queue).
Raises ValueError if called more times than there were items placed in
the queue.
"""
if self._unfinished_tasks <= 0:
raise ValueError('task_done() called too many times')
self._unfinished_tasks -= 1
if self._unfinished_tasks == 0:
self._finished.set()
@coroutine
def join(self):
"""Block until all items in the queue have been gotten and processed.
The count of unfinished tasks goes up whenever an item is added to the
queue. The count goes down whenever a consumer calls task_done() to
indicate that the item was retrieved and all work on it is complete.
When the count of unfinished tasks drops to zero, join() unblocks.
"""
if self._unfinished_tasks > 0:
yield from self._finished.wait()
class PriorityQueue(Queue):
"""A subclass of Queue; retrieves entries in priority order (lowest first).
Entries are typically tuples of the form: (priority number, data).
"""
def _init(self, maxsize):
self._queue = []
def _put(self, item, heappush=heapq.heappush):
heappush(self._queue, item)
def _get(self, heappop=heapq.heappop):
return heappop(self._queue)
class LifoQueue(Queue):
"""A subclass of Queue that retrieves most recently added entries first."""
def _init(self, maxsize):
self._queue = []
def _put(self, item):
self._queue.append(item)
def _get(self):
return self._queue.pop()
| 30.885375
| 79
| 0.59611
|
ba0d085f6cc1892db75c0b30e3398f97e9eccdf2
| 900
|
py
|
Python
|
lab1-python/src/sandbox.py
|
ArtemK123/MachineLearning
|
022fa1ab231b3011ce3d4a868a87a8cc9ebeb70e
|
[
"MIT"
] | null | null | null |
lab1-python/src/sandbox.py
|
ArtemK123/MachineLearning
|
022fa1ab231b3011ce3d4a868a87a8cc9ebeb70e
|
[
"MIT"
] | null | null | null |
lab1-python/src/sandbox.py
|
ArtemK123/MachineLearning
|
022fa1ab231b3011ce3d4a868a87a8cc9ebeb70e
|
[
"MIT"
] | null | null | null |
import pandas as pd
from sklearn.model_selection import train_test_split
df = pd.read_csv('../resources/wikipedia-detox-250-line-data.tsv', sep='\t')
text_input = df['SentimentText']
y_input = df['Sentiment']
from sklearn.model_selection import train_test_split
X_text_train, X_text_test, y_train, y_test = train_test_split(text_input, y_input, test_size=0.25, random_state=0)
from sklearn.feature_extraction.text import CountVectorizer
cv = CountVectorizer()
cv.fit(X_text_train)
X_train = cv.transform(X_text_train)
X_test = cv.transform(X_text_test)
from sklearn.linear_model import LogisticRegression
logit = LogisticRegression(C = 5, n_jobs = -1)
logit.fit(X_train, y_train)
sentiment = ["This is very bad!"]
X_sentiment = cv.transform(sentiment)[0]
result = logit.predict(X_sentiment)
if (result == 1):
print("Bad sentiment")
else:
print("Good sentiment")
| 33.333333
| 115
| 0.755556
|
75d3a59c2cccd8a19788fb416ceee975185a91b5
| 7,503
|
py
|
Python
|
docs/examples/use_cases/pytorch/single_stage_detector/main.py
|
cyyever/DALI
|
e2b2d5a061da605e3e9e681017a7b2d53fe41a62
|
[
"ECL-2.0",
"Apache-2.0"
] | 3,967
|
2018-06-19T04:39:09.000Z
|
2022-03-31T10:57:53.000Z
|
docs/examples/use_cases/pytorch/single_stage_detector/main.py
|
cyyever/DALI
|
e2b2d5a061da605e3e9e681017a7b2d53fe41a62
|
[
"ECL-2.0",
"Apache-2.0"
] | 3,494
|
2018-06-21T07:09:58.000Z
|
2022-03-31T19:44:51.000Z
|
docs/examples/use_cases/pytorch/single_stage_detector/main.py
|
cyyever/DALI
|
e2b2d5a061da605e3e9e681017a7b2d53fe41a62
|
[
"ECL-2.0",
"Apache-2.0"
] | 531
|
2018-06-19T23:53:10.000Z
|
2022-03-30T08:35:59.000Z
|
import os
import sys
import time
from argparse import ArgumentParser
import math
import numpy as np
import time
import torch
from torch.optim.lr_scheduler import MultiStepLR
import torch.utils.data.distributed
from src.model import model, Loss
from src.utils import dboxes300_coco, Encoder
from src.evaluate import evaluate
from src.train import train_loop, tencent_trick
from src.data import *
# Apex imports
try:
from apex.parallel.LARC import LARC
from apex import amp
from apex.fp16_utils import *
except ImportError:
raise ImportError("Please install APEX from https://github.com/nvidia/apex")
class Logger:
def __init__(self, batch_size, local_rank, n_gpu, print_freq=20):
self.batch_size = batch_size
self.local_rank = local_rank
self.n_gpu = n_gpu
self.print_freq = print_freq
self.processed_samples = 0
self.epochs_times = []
self.epochs_speeds = []
def update_iter(self, epoch, iteration, loss):
if self.local_rank != 0:
return
if iteration % self.print_freq == 0:
print('Epoch: {:2d}, Iteration: {}, Loss: {}'.format(epoch, iteration, loss))
self.processed_samples = self.processed_samples + self.batch_size
def start_epoch(self):
self.epoch_start = time.time()
def end_epoch(self):
epoch_time = time.time() - self.epoch_start
epoch_speed = self.processed_samples / epoch_time
self.epochs_times.append(epoch_time)
self.epochs_speeds.append(epoch_speed)
self.processed_samples = 0
if self.local_rank == 0:
print('Epoch {:2d} finished. Time: {:4f} s, Speed: {:4f} img/sec, Average speed: {:4f}'
.format(len(self.epochs_times)-1, epoch_time, epoch_speed * self.n_gpu, self.average_speed() * self.n_gpu))
def average_speed(self):
return sum(self.epochs_speeds) / len(self.epochs_speeds)
def make_parser():
parser = ArgumentParser(
description="Train Single Shot MultiBox Detector on COCO")
parser.add_argument(
'--data', '-d', type=str, default='/coco', required=True,
help='path to test and training data files')
parser.add_argument(
'--epochs', '-e', type=int, default=65,
help='number of epochs for training')
parser.add_argument(
'--batch-size', '--bs', type=int, default=32,
help='number of examples for each iteration')
parser.add_argument(
'--eval-batch-size', '--ebs', type=int, default=32,
help='number of examples for each evaluation iteration')
parser.add_argument(
'--seed', '-s', type=int, default=0,
help='manually set random seed for torch')
parser.add_argument(
'--evaluation', nargs='*', type=int,
default=[3, 21, 31, 37, 42, 48, 53, 59, 64],
help='epochs at which to evaluate')
parser.add_argument(
'--multistep', nargs='*', type=int, default=[43, 54],
help='epochs at which to decay learning rate')
parser.add_argument(
'--target', type=float, default=None,
help='target mAP to assert against at the end')
# Hyperparameters
parser.add_argument(
'--learning-rate', '--lr', type=float, default=2.6e-3, help='learning rate')
parser.add_argument(
'--momentum', '-m', type=float, default=0.9,
help='momentum argument for SGD optimizer')
parser.add_argument(
'--weight-decay', '--wd', type=float, default=0.0005,
help='momentum argument for SGD optimizer')
parser.add_argument('--warmup', type=int, default=None)
parser.add_argument(
'--backbone', type=str, default='resnet50',
choices=['resnet18', 'resnet34', 'resnet50', 'resnet101', 'resnet152'])
parser.add_argument('--num-workers', type=int, default=4)
parser.add_argument('--fp16-mode', type=str, default='static', choices=['off', 'static', 'amp'],
help='Half precission mode to use')
# Distributed
parser.add_argument('--local_rank', default=0, type=int,
help='Used for multi-process training. Can either be manually set ' +
'or automatically set by using \'python -m multiproc\'.')
# Pipeline control
parser.add_argument(
'--data_pipeline', type=str, default='dali', choices=['dali', 'no_dali'],
help='data preprocessing pipline to use')
return parser
def train(args):
if args.amp:
amp_handle = amp.init(enabled=args.fp16)
args.distributed = False
if 'WORLD_SIZE' in os.environ:
args.distributed = int(os.environ['WORLD_SIZE']) > 1
if args.distributed:
torch.cuda.set_device(args.local_rank)
torch.distributed.init_process_group(backend='nccl', init_method='env://')
args.N_gpu = torch.distributed.get_world_size()
else:
args.N_gpu = 1
dboxes = dboxes300_coco()
encoder = Encoder(dboxes)
cocoGt = get_coco_ground_truth(args)
ssd300 = model(args)
args.learning_rate = args.learning_rate * args.N_gpu * (args.batch_size / 32)
iteration = 0
loss_func = Loss(dboxes)
loss_func.cuda()
optimizer = torch.optim.SGD(
tencent_trick(ssd300),
lr=args.learning_rate,
momentum=args.momentum,
weight_decay=args.weight_decay)
scheduler = MultiStepLR(
optimizer=optimizer,
milestones=args.multistep,
gamma=0.1)
if args.fp16:
if args.amp:
optimizer = amp_handle.wrap_optimizer(optimizer)
else:
optimizer = FP16_Optimizer(optimizer, static_loss_scale=128.)
val_dataloader, inv_map = get_val_dataloader(args)
train_loader = get_train_loader(args, dboxes)
acc = 0
logger = Logger(args.batch_size, args.local_rank, args.N_gpu)
for epoch in range(0, args.epochs):
logger.start_epoch()
scheduler.step()
iteration = train_loop(
ssd300, loss_func, epoch, optimizer,
train_loader, iteration, logger, args)
logger.end_epoch()
if epoch in args.evaluation:
acc = evaluate(ssd300, val_dataloader, cocoGt, encoder, inv_map, args)
if args.local_rank == 0:
print('Epoch {:2d}, Accuracy: {:4f} mAP'.format(epoch, acc))
if args.data_pipeline == 'dali':
train_loader.reset()
return acc, logger.average_speed()
if __name__ == "__main__":
parser = make_parser()
args = parser.parse_args()
if args.local_rank == 0:
os.makedirs('./models', exist_ok=True)
torch.backends.cudnn.benchmark = True
if args.fp16_mode != 'off':
args.fp16 = True
args.amp = (args.fp16_mode == 'amp')
else:
args.fp16 = False
args.amp = False
start_time = time.time()
acc, avg_speed = train(args)
# avg_speed is reported per node, adjust for the global speed
try:
num_shards = torch.distributed.get_world_size()
except RuntimeError:
num_shards = 1
avg_speed = num_shards * avg_speed
training_time = time.time() - start_time
if args.local_rank == 0:
print("Training end: Average speed: {:3f} img/sec, Total time: {:3f} sec, Final accuracy: {:3f} mAP"
.format(avg_speed, training_time, acc))
if args.target is not None:
if args.target > acc:
print('Target mAP of {} not met. Possible regression'.format(args.target))
sys.exit(1)
| 32.480519
| 123
| 0.637212
|
1b48302049dd3785bf8fa87dcf0b7447e647557c
| 3,873
|
py
|
Python
|
jsonism/checker.py
|
bmcollier/jsonvalidator
|
1d45cc2047958402773da849d7984285d1676cce
|
[
"BSD-3-Clause"
] | 1
|
2022-03-09T07:23:10.000Z
|
2022-03-09T07:23:10.000Z
|
jsonism/checker.py
|
bmcollier/jsonvalidator
|
1d45cc2047958402773da849d7984285d1676cce
|
[
"BSD-3-Clause"
] | 3
|
2022-03-09T10:16:40.000Z
|
2022-03-12T07:58:12.000Z
|
jsonism/checker.py
|
bmcollier/jsonvalidator
|
1d45cc2047958402773da849d7984285d1676cce
|
[
"BSD-3-Clause"
] | null | null | null |
import logging
from jsonism.types import Integer, String, Boolean, Float
def validate(input: any, schema: any, allow_empty_lists=False, parent=None):
""" Validate a top-level element. JSON supports lists, dicts and raw
elements at the top level.
:param input: JSON to be validated, as Python list, dict or base type
:param schema: JSON schema, as Python list, dict or base type
:return: True or False
"""
if schema in [Integer, String, Boolean, Float]:
return _validate_custom(input, schema, parent=parent)
if schema in [int, str, bool, float]:
return _validate_generic(input, schema, parent=parent)
if type(schema) == dict:
return _validate_dict(input, schema, allow_empty_lists=allow_empty_lists, parent=parent)
elif type(schema) == list:
return _validate_list(input, schema, allow_empty_lists=allow_empty_lists, parent=parent)
else:
raise NotImplementedError("Unknown type found in schema")
def _validate_generic(input, schema, parent=None):
""" Validate a base type.
:param input: A base value to be validated
:param schema: A base type to validate the input against
:return: True or False
"""
if type(input) != schema:
if parent:
logging.info(f"Fault in {parent}")
return False
else:
return True
def _validate_custom(input, schema, parent=None):
""" Validate a custom type.
:param input: A base value to be validated
:param schema: A base type to validate the input against
:return: True or False
"""
if not schema.validate(input):
if parent:
logging.info(f"Fault in {parent}")
return False
else:
return True
def _validate_dict(input, schema, allow_empty_lists, parent=None):
""" Validate a dictionary.
:param input: The dictionary to be validated
:param schema: The schema against which to validate, as a dictionary
:return: True or False
"""
for key, value in schema.items():
if type(value) in [dict, list]:
if type(input.get(key)) == type(value):
if not validate(input.get(key), value, allow_empty_lists=allow_empty_lists, parent=key):
return False
else:
if parent:
logging.info(f"In {parent}:")
logging.info(f"Schema field '{key}': Expected {str(dict)}, got {str(type(input.get(key)))}")
return False
elif key in input:
if type(value) in [String, Integer, Float, Boolean]:
if not value.validate(input.get(key)):
if parent:
logging.info(f"...in {parent}")
return False
elif type(input.get(key)) != value:
if parent:
logging.info(f"In {parent}:")
logging.info(f"Schema field '{key}': Expected {str(value)}, got {str(type(input.get(key)))}")
return False
else:
if parent:
logging.info(f"In {parent}:")
logging.info(f"Schema field '{key}' not found in input.")
return False
return True
def _validate_list(input, schema, allow_empty_lists, parent=None):
""" Validate a list.
:param input: The list to be validated
:param schema: The schema against which to validate, as a list
:return: True or False
"""
if len(input) == 0:
if parent:
logging.info(f"In {parent}:")
logging.info(f"Warning! Empty list encountered. 'allow_empty_lists' is currently set to {allow_empty_lists}")
return allow_empty_lists
list_item_type = schema[0]
for item in input:
if not validate(item, list_item_type, allow_empty_lists=allow_empty_lists):
return False
return True
| 36.196262
| 117
| 0.615027
|
b1860fb601a7694c018fb877b37b48ad74188426
| 4,014
|
py
|
Python
|
tests/test_mdp.py
|
bgalbraith/macarico
|
448e3e7f088dde0f4eb016fbdee857221b9523fb
|
[
"MIT"
] | 121
|
2019-04-09T15:44:26.000Z
|
2022-03-29T19:56:19.000Z
|
tests/test_mdp.py
|
bgalbraith/macarico
|
448e3e7f088dde0f4eb016fbdee857221b9523fb
|
[
"MIT"
] | 1
|
2019-04-10T16:07:04.000Z
|
2019-05-09T00:41:19.000Z
|
tests/test_mdp.py
|
bgalbraith/macarico
|
448e3e7f088dde0f4eb016fbdee857221b9523fb
|
[
"MIT"
] | 11
|
2019-04-09T16:13:34.000Z
|
2019-09-30T23:31:14.000Z
|
from __future__ import division, generators, print_function
import random
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable as Var
import macarico.util
macarico.util.reseed()
from macarico.annealing import ExponentialAnnealing, stochastic
from macarico.lts.maximum_likelihood import MaximumLikelihood
from macarico.lts.dagger import DAgger, TwistedDAgger
from macarico.lts.aggrevate import AggreVaTe
from macarico.lts.lols import lols
from macarico.annealing import EWMA
from macarico.features.actor import TransitionRNN, TransitionBOW
from macarico.features.sequence import AttendAt
from macarico.policies.linear import LinearPolicy
from macarico.tasks import mdp
class LearnerOpts:
MAXLIK = 'MaximumLikelihood'
DAGGER = 'DAgger'
TWISTED = 'TwistedDAgger'
AGGREVATE = 'AggreVaTe'
LOLS = 'LOLS'
def make_ross_mdp(T=100, reset_prob=0):
initial = [(0, 1/3), (1, 1/3)]
# s a s' p()
half_rp = reset_prob/2
default = 1-reset_prob
transitions = { 0: { 0: [(1, default), (0, half_rp), (2, half_rp)],
1: [(2, default), (0, half_rp), (1, half_rp)] },
1: { 0: [(2, default), (0, half_rp), (1, half_rp)],
1: [(1, default), (0, half_rp), (2, half_rp)] },
2: { 0: [(1, default), (1, half_rp), (2, half_rp)],
1: [(2, default), (0, half_rp), (2, half_rp)] } }
def pi_ref(s):
if isinstance(s, mdp.MDP):
s = s.s
# expert: s0->a0 s1->a1 s2->a0
if s == 0: return 0
if s == 1: return 1
if s == 2: return 0
assert False
def costs(s, a, s1):
# this is just Cmax=1 whenever we disagree with expert, and c=0 otherwise
return 0 if a == pi_ref(s) else 1
return mdp.MDPExample(initial, transitions, costs, T), \
mdp.DeterministicReference(pi_ref)
def test1(LEARNER=LearnerOpts.DAGGER):
print
print 'Running test 1 with learner=%s' % LEARNER
print '======================================================='
n_states = 3
n_actions = 2
tRNN = TransitionRNN(
[mdp.MDPFeatures(n_states, noise_rate=0.5)],
[AttendAt(lambda _: 0, 's')],
n_actions)
policy = LinearPolicy(tRNN, n_actions)
p_rollin_ref = stochastic(ExponentialAnnealing(0.99))
p_rollout_ref = stochastic(ExponentialAnnealing(1))
optimizer = torch.optim.Adam(policy.parameters(), lr=0.01)
test_mdp, pi_ref = make_ross_mdp()
if LEARNER == LearnerOpts.DAGGER:
learner = lambda: DAgger(pi_ref, policy, p_rollin_ref)
elif LEARNER == LearnerOpts.TWISTED:
learner = lambda: TwistedDAgger(pi_ref, policy, p_rollin_ref)
elif LEARNER == LearnerOpts.MAXLIK:
learner = lambda: MaximumLikelihood(pi_ref, policy)
elif LEARNER == LearnerOpts.AGGREVATE:
learner = lambda: AggreVaTe(pi_ref, policy, p_rollin_ref)
elif LEARNER == LearnerOpts.LOLS:
learner = None
losses = []
for epoch in xrange(101):
optimizer.zero_grad()
if learner is not None:
l = learner()
env = test_mdp.mk_env()
res = env.run_episode(l)
loss = mdp.MDPLoss()(test_mdp, env)
l.update(loss)
elif LEARNER == LearnerOpts.LOLS:
lols(test_mdp, mdp.MDPLoss, pi_ref, policy, p_rollin_ref, p_rollout_ref)
optimizer.step()
p_rollin_ref.step()
p_rollout_ref.step()
env = test_mdp.mk_env()
res = env.run_episode(policy)
loss = mdp.MDPLoss()(test_mdp, env)
losses.append(loss)
if epoch % 20 == 0:
print epoch, sum(losses[-100:]) / len(losses[-100:]), '\t', res
if __name__ == '__main__':
test1(LearnerOpts.MAXLIK)
test1(LearnerOpts.DAGGER)
test1(LearnerOpts.AGGREVATE)
test1(LearnerOpts.LOLS)
| 34.016949
| 84
| 0.604136
|
b61e0ca05e1005455fba07a43269d7ea930eca57
| 4,848
|
py
|
Python
|
app/__init__.py
|
ykatzir/qapp-gsuite-react
|
a2935b27049372d65f7c995fef14642685306a0e
|
[
"Apache-2.0"
] | null | null | null |
app/__init__.py
|
ykatzir/qapp-gsuite-react
|
a2935b27049372d65f7c995fef14642685306a0e
|
[
"Apache-2.0"
] | null | null | null |
app/__init__.py
|
ykatzir/qapp-gsuite-react
|
a2935b27049372d65f7c995fef14642685306a0e
|
[
"Apache-2.0"
] | null | null | null |
# pylint: skip-file
__author__ = 'IBM'
import os.path
import sys
import json
import re
from flask import Flask, Blueprint
from flask import send_from_directory, render_template, request
from flask_restplus import Api
from flask_sqlalchemy import SQLAlchemy
from flask_marshmallow import Marshmallow
from qpylib import qpylib
app = Flask(__name__)
marshal = Marshmallow(app)
app.config['SECRET_KEY'] = os.urandom(16)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + qpylib.get_store_path('gsuite.db')
db = SQLAlchemy(app)
api_blueprint = Blueprint('api', __name__, url_prefix='/api')
api = Api(api_blueprint, doc='/doc/')
app.register_blueprint(api_blueprint)
# Create log here to prevent race condition when importing views
qpylib.create_log()
try:
qpylib.register_jsonld_endpoints()
except AttributeError:
qpylib.log("Application's qpylib directory has been detected as outdated. Please consider updating to the latest version.", level='warn')
from app import views
from app import views_api
from app.models import database
db.create_all()
@app.after_request
def obscure_server_header(resp):
resp.headers['Server'] = 'QRadar App {0}'.format(qpylib.get_app_id())
return resp
@app.route('/debug')
def debug():
return 'Pong!'
@app.route('/debug_view')
def debug_view():
debug_content = open('/store/log/app.log').read()
return render_template('debug.html', debug_content=debug_content)
@app.route('/resources/<path:filename>')
def send_file(filename):
qpylib.log(" >>> route resources >>>")
qpylib.log(" filename=" + filename)
qpylib.log(" app.static_folder=" + app.static_folder)
qpylib.log(" full file path =" + app.static_folder + '/resources/'+filename )
return send_from_directory(app.static_folder, 'resources/'+filename)
@app.route('/log_level', methods=['POST'])
def log_level():
level = request.form['level'].upper()
levels = ['INFO', 'DEBUG', 'ERROR', 'WARNING', 'CRITICAL']
if any( level in s for s in levels):
qpylib.set_log_level(request.form['level'])
else:
return 'level parameter missing or unsupported - ' + str (levels), 42
return 'log level set to ' + level
# Untested or compiled code
@app.route('/react-intl/<path:requested>', methods=['GET'])
def reactIntl(requested):
def put_in_container( container, key, value ):
key_parts = key.split(".")
s = len(key_parts)
l = 0
while s > 1:
part = key_parts[l]
if not part in container:
container[part] = {}
container = container[part]
s = s-1
l = l+1
container[key_parts[l]] = value
resources = os.path.dirname( os.path.abspath( sys.argv[0] ) ) + "/app/static/resources"
requested_language = None
requested_locale = None
lang_locale = re.compile("[_\\-]").split(requested)
if len(lang_locale) == 2:
requested_language = lang_locale[0]
requested_locale = lang_locale[1]
else:
requested_language = requested
requested_locale = None
qpylib.log("Requested language {0}, locale {1}".format(requested_language,requested_locale), "DEBUG")
result = { "locales":[], "messages":{} }
for f in os.listdir(resources):
bundle_lang = f.split("_")
# This will be either application_<lang>.properties, in which case we have 2 parts, or application_<lang>_<locale>.properties, which is 3 parts
locale = None
if len(bundle_lang) == 2:
language = bundle_lang[1].split(".")[0]
else:
language = bundle_lang[1]
locale = bundle_lang[2].split(".")[0]
qpylib.log("Bundle {0} language {1}, locale {2}".format(f,language,locale), "DEBUG")
if language == requested_language:
filepath = os.path.join( resources, f )
if os.path.isfile(filepath):
with open(filepath) as thefile:
lang = {}
for line in thefile:
line = line.strip()
if len(line) > 0:
key_value = line.split("=")
put_in_container( lang, key_value[0].strip(), key_value[1].decode('unicode-escape') )
if locale is None:
result["locales"].append( language )
else:
result["locales"].append( language + "_" + locale )
result["messages"].update(lang)
return json.dumps(result)
# register the new q_url_for() method for use with Jinja2 templates
app.add_template_global(qpylib.q_url_for, 'q_url_for')
| 31.277419
| 152
| 0.613036
|
620919a7422d7c64f5d1cb8817f78e8ccc82aa86
| 7,388
|
py
|
Python
|
flaskr/templates/solver_google.py
|
sa-tony/full-stack-vrp
|
803ea720b99c46f819b4d9a3d076b655383eec75
|
[
"MIT"
] | null | null | null |
flaskr/templates/solver_google.py
|
sa-tony/full-stack-vrp
|
803ea720b99c46f819b4d9a3d076b655383eec75
|
[
"MIT"
] | 7
|
2020-08-19T00:57:33.000Z
|
2022-02-26T18:42:12.000Z
|
flaskr/templates/solver_google.py
|
sa-tony/full-stack-vrp
|
803ea720b99c46f819b4d9a3d076b655383eec75
|
[
"MIT"
] | 2
|
2020-05-21T23:20:18.000Z
|
2020-07-06T14:51:34.000Z
|
from .solver import BaseSolver
from typing import List
from ortools.constraint_solver import routing_enums_pb2
from ortools.constraint_solver import pywrapcp
import os
class GoogleLocalSearchSolver(BaseSolver):
def solve(self) -> List[List[int]]:
def print_solution(manager, routing, assignment):
"""Prints assignment on console."""
total_distance = 0
total_load = 0
result = []
for vehicle_id in range(len(self.capacities)):
temp = []
index = routing.Start(vehicle_id)
plan_output = 'Route for vehicle {}:\n'.format(vehicle_id)
route_distance = 0
route_load = 0
while not routing.IsEnd(index):
node_index = manager.IndexToNode(index)
route_load += 0 if node_index <= len(self.origins) else 1
temp.append(node_index)
plan_output += ' {0} Load({1}) -> '.format(node_index, route_load)
previous_index = index
index = assignment.Value(routing.NextVar(index))
distance = routing.GetArcCostForVehicle(
previous_index, index, vehicle_id)
route_distance += distance
plan_output += ' {0}m -> '.format(distance)
temp.append(manager.IndexToNode(index))
result.append(temp)
plan_output += ' {0} Load({1})\n'.format(
manager.IndexToNode(index), route_load)
plan_output += 'Distance of the route: {}m\n'.format(route_distance)
plan_output += 'Load of the route: {}\n'.format(route_load)
print(plan_output)
total_distance += route_distance
total_load += route_load
print('Total distance of all routes: {}m'.format(total_distance))
print('Total load of all routes: {}'.format(total_load))
return result
manager = pywrapcp.RoutingIndexManager(
self.num_locations, len(self.capacities), self.origins, [self.destination] * len(self.capacities)
)
routing = pywrapcp.RoutingModel(manager)
def distance_callback(from_index, to_index):
from_node = manager.IndexToNode(from_index)
to_node = manager.IndexToNode(to_index)
return self.distance_matrix[from_node][to_node] * 100
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
routing.AddDimension(
transit_callback_index,
0,
300000,
True,
'Distance'
)
def demand_callback(from_index):
return 0 if manager.IndexToNode(from_index) in self.origins else 1
demand_callback_index = routing.RegisterUnaryTransitCallback(demand_callback)
routing.AddDimensionWithVehicleCapacity(
demand_callback_index,
0,
self.capacities,
True,
'Capacity'
)
distance_dimension = routing.GetDimensionOrDie('Distance')
distance_dimension.SetGlobalSpanCostCoefficient(10000)
search_parameter = pywrapcp.DefaultRoutingSearchParameters()
# search_parameter.first_solution_strategy = (routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC)
search_parameter.local_search_metaheuristic = (
routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH)
search_parameter.time_limit.seconds = int(os.getenv('SOLVER_TIME_SPAN'))
assignment = routing.SolveWithParameters(search_parameter)
if assignment:
return print_solution(manager, routing, assignment)
class GoogleFirstSolutionSolver(BaseSolver):
def solve(self) -> List[List[int]]:
def print_solution(manager, routing, assignment):
"""Prints assignment on console."""
total_distance = 0
total_load = 0
result = []
for vehicle_id in range(len(self.capacities)):
temp = []
index = routing.Start(vehicle_id)
plan_output = 'Route for vehicle {}:\n'.format(vehicle_id)
route_distance = 0
route_load = 0
while not routing.IsEnd(index):
node_index = manager.IndexToNode(index)
route_load += 0 if node_index <= len(self.origins) else 1
temp.append(node_index)
plan_output += ' {0} Load({1}) -> '.format(node_index, route_load)
previous_index = index
index = assignment.Value(routing.NextVar(index))
distance = routing.GetArcCostForVehicle(
previous_index, index, vehicle_id)
route_distance += distance
plan_output += ' {0}m -> '.format(distance)
temp.append(manager.IndexToNode(index))
result.append(temp)
plan_output += ' {0} Load({1})\n'.format(
manager.IndexToNode(index), route_load)
plan_output += 'Distance of the route: {}m\n'.format(route_distance)
plan_output += 'Load of the route: {}\n'.format(route_load)
print(plan_output)
total_distance += route_distance
total_load += route_load
print('Total distance of all routes: {}m'.format(total_distance))
print('Total load of all routes: {}'.format(total_load))
return result
manager = pywrapcp.RoutingIndexManager(
self.num_locations, len(self.capacities), self.origins, [self.destination] * len(self.capacities)
)
routing = pywrapcp.RoutingModel(manager)
def distance_callback(from_index, to_index):
from_node = manager.IndexToNode(from_index)
to_node = manager.IndexToNode(to_index)
return self.distance_matrix[from_node][to_node] # Change from *100 to Nothing
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
routing.AddDimension(
transit_callback_index,
0,
300000,
True,
'Distance'
)
def demand_callback(from_index):
return 0 if manager.IndexToNode(from_index) in self.origins else 1
demand_callback_index = routing.RegisterUnaryTransitCallback(demand_callback)
routing.AddDimensionWithVehicleCapacity(
demand_callback_index,
0,
self.capacities,
True,
'Capacity'
)
distance_dimension = routing.GetDimensionOrDie('Distance')
distance_dimension.SetGlobalSpanCostCoefficient(10000)
search_parameter = pywrapcp.DefaultRoutingSearchParameters()
search_parameter.first_solution_strategy = (routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC)
assignment = routing.SolveWithParameters(search_parameter)
if assignment:
return print_solution(manager, routing, assignment)
| 43.204678
| 112
| 0.607742
|
819db9bafffd87422a6ee29a1b2d1cf9979a194b
| 547
|
py
|
Python
|
backend/domain/rules/auth/change_password.py
|
uesleicarvalhoo/Ecommerce
|
1d8d0f0c522dcd27fd90e315989b6fa93caf62b8
|
[
"MIT"
] | null | null | null |
backend/domain/rules/auth/change_password.py
|
uesleicarvalhoo/Ecommerce
|
1d8d0f0c522dcd27fd90e315989b6fa93caf62b8
|
[
"MIT"
] | null | null | null |
backend/domain/rules/auth/change_password.py
|
uesleicarvalhoo/Ecommerce
|
1d8d0f0c522dcd27fd90e315989b6fa93caf62b8
|
[
"MIT"
] | null | null | null |
from uuid import UUID
from backend.domain.contracts import AuthenticationService, Client, ClientService
class ChangePassword:
def __init__(self, service: ClientService, authentication: AuthenticationService) -> None:
self.service = service
self.auth = authentication
def handle(self, client_id: UUID, new_password: str) -> Client:
client = self.service.get_by_id(client_id)
client.password_hash = self.auth.generate_password_hash(new_password)
self.service.save(client)
return client
| 28.789474
| 94
| 0.729433
|
dd231e5b9d281b72ec25bebecc447b8d6b38c37f
| 6,806
|
py
|
Python
|
tests/test_cli.py
|
jayvdb/django-codemod
|
8626e6bb5383d9ef3b733861d6996213d90cf26d
|
[
"MIT"
] | null | null | null |
tests/test_cli.py
|
jayvdb/django-codemod
|
8626e6bb5383d9ef3b733861d6996213d90cf26d
|
[
"MIT"
] | 2
|
2022-01-19T14:30:11.000Z
|
2022-01-28T14:32:34.000Z
|
tests/test_cli.py
|
jayvdb/django-codemod
|
8626e6bb5383d9ef3b733861d6996213d90cf26d
|
[
"MIT"
] | null | null | null |
import click
import pytest
from click.testing import CliRunner
from libcst.codemod import ParallelTransformResult
from django_codemod import cli
from django_codemod.cli import DEPRECATED_IN, REMOVED_IN, build_command, call_command
@pytest.fixture()
def cli_runner():
return CliRunner()
@pytest.fixture()
def gather_files_mocked(mocker):
"""Mock return value of gather_files from libCST."""
gather_files = mocker.patch("django_codemod.cli.gather_files")
gather_files.return_value = ["some/file.py"]
@pytest.fixture()
def command_instance():
"""Dummy command instance to test call_command."""
return build_command([])
def test_missing_argument(cli_runner):
"""Should explain missing arguments."""
result = cli_runner.invoke(cli.djcodemod)
assert result.exit_code == 2
assert "Error: Missing argument 'PATH'" in result.output
@pytest.mark.parametrize(
"command_line",
[
# Missing options
["."],
# Too many options
["--removed-in", "3.0", "--deprecated-in", "2.0", "."],
],
)
def test_invalid_options(cli_runner, command_line):
"""Should explain missing option."""
result = cli_runner.invoke(cli.djcodemod, command_line)
assert result.exit_code == 2
assert (
"Error: You must specify either '--removed-in' or "
"'--deprecated-in' but not both." in result.output
)
def test_help(cli_runner):
"""The --help option should be available."""
help_result = cli_runner.invoke(cli.djcodemod, ["--help"])
assert help_result.exit_code == 0
assert "--help" in help_result.output
assert "Show this message and exit." in help_result.output
def test_missing_version_parts(cli_runner):
result = cli_runner.invoke(cli.djcodemod, ["--removed-in", "3", "."])
assert result.exit_code == 2
assert "missing version parts." in result.output
@pytest.mark.parametrize(
("option", "version"),
[
# Removed in option
("--removed-in", "1.0"),
("--removed-in", "11.0"),
# Deprecated in option
("--deprecated-in", "1.0"),
("--deprecated-in", "3.4"),
],
)
def test_non_supported_version(cli_runner, option, version):
result = cli_runner.invoke(cli.djcodemod, [option, version, "."])
assert result.exit_code == 2
assert f"'{version}' is not supported. Versions supported:" in result.output
def test_invalid_version(cli_runner):
result = cli_runner.invoke(cli.djcodemod, ["--removed-in", "not.a.version", "."])
assert result.exit_code == 2
assert "'not.a.version' is not a valid version" in result.output
@pytest.mark.parametrize(
("option", "version"),
[
# Removed in option
("--removed-in", "3.0"),
("--deprecated-in", "2.0"),
],
)
def test_basic_arguments(mocker, cli_runner, option, version):
call_command = mocker.patch("django_codemod.cli.call_command")
result = cli_runner.invoke(cli.djcodemod, [option, version, "."])
assert result.exit_code == 0
call_command.assert_called_once()
@pytest.mark.usefixtures("gather_files_mocked")
def test_call_command_success(command_instance, mocker):
executor = mocker.patch(
"django_codemod.cli.parallel_exec_transform_with_prettyprint"
)
executor.return_value = ParallelTransformResult(
successes=1, failures=0, warnings=0, skips=0
)
result = call_command(command_instance, ".")
assert result is None
@pytest.mark.usefixtures("gather_files_mocked")
def test_call_command_failure(command_instance, mocker):
executor = mocker.patch(
"django_codemod.cli.parallel_exec_transform_with_prettyprint"
)
executor.return_value = ParallelTransformResult(
successes=0, failures=1, warnings=0, skips=0
)
with pytest.raises(click.exceptions.Exit):
call_command(command_instance, ".")
@pytest.mark.usefixtures("gather_files_mocked")
def test_call_command_interrupted(command_instance, mocker):
executor = mocker.patch(
"django_codemod.cli.parallel_exec_transform_with_prettyprint",
side_effect=KeyboardInterrupt(),
)
executor.return_value = ParallelTransformResult(
successes=1, failures=0, warnings=0, skips=0
)
with pytest.raises(click.Abort):
call_command(command_instance, ".")
def _mapping_repr(mapping):
"""Helper to return class names in the dict values."""
return {
version: [klass.__name__ for klass in classes_list]
for version, classes_list in mapping.items()
}
def test_deprecated_in_mapping():
"""Transformers found by the ``DEPRECATED_IN`` mapping."""
assert _mapping_repr(DEPRECATED_IN) == {
(3, 0): [
"ForceTextTransformer",
"HttpUrlQuotePlusTransformer",
"HttpUrlQuoteTransformer",
"HttpUrlUnQuotePlusTransformer",
"HttpUrlUnQuoteTransformer",
"IsSafeUrlTransformer",
"SmartTextTransformer",
"UGetTextLazyTransformer",
"UGetTextNoopTransformer",
"UGetTextTransformer",
"UNGetTextLazyTransformer",
"UNGetTextTransformer",
"URLTransformer",
"UnescapeEntitiesTransformer",
],
(2, 1): ["InlineHasAddPermissionsTransformer"],
(2, 0): [
"AbsPathTransformer",
"AvailableAttrsTransformer",
"ContextDecoratorTransformer",
"LRUCacheTransformer",
"RenderToResponseTransformer",
"UnicodeCompatibleTransformer",
],
(1, 11): ["ModelsPermalinkTransformer"],
(1, 9): ["OnDeleteTransformer"],
}
def test_removed_in_mapping():
"""Transformers found by the ``REMOVED_IN`` mapping."""
assert _mapping_repr(REMOVED_IN) == {
(4, 0): [
"ForceTextTransformer",
"HttpUrlQuotePlusTransformer",
"HttpUrlQuoteTransformer",
"HttpUrlUnQuotePlusTransformer",
"HttpUrlUnQuoteTransformer",
"IsSafeUrlTransformer",
"SmartTextTransformer",
"UGetTextLazyTransformer",
"UGetTextNoopTransformer",
"UGetTextTransformer",
"UNGetTextLazyTransformer",
"UNGetTextTransformer",
"URLTransformer",
"UnescapeEntitiesTransformer",
],
(3, 0): [
"AbsPathTransformer",
"AvailableAttrsTransformer",
"ContextDecoratorTransformer",
"InlineHasAddPermissionsTransformer",
"LRUCacheTransformer",
"RenderToResponseTransformer",
"UnicodeCompatibleTransformer",
],
(2, 1): ["ModelsPermalinkTransformer"],
(2, 0): ["OnDeleteTransformer"],
}
| 29.982379
| 85
| 0.647223
|
ad32cf011a074e00aeab31c04da3d497ac400c4d
| 566
|
py
|
Python
|
tests/nose_plugin.py
|
xzchsia/shadowsocks-analysis
|
9d8e3e2d3b3e2b41cee51bed99231803e9b13539
|
[
"MIT"
] | 42
|
2016-09-21T09:48:48.000Z
|
2022-02-10T14:57:35.000Z
|
tests/nose_plugin.py
|
wyqwyq/shadowsocks
|
ea535c74b21144c769dd41e6ccba36498801db85
|
[
"MIT"
] | null | null | null |
tests/nose_plugin.py
|
wyqwyq/shadowsocks
|
ea535c74b21144c769dd41e6ccba36498801db85
|
[
"MIT"
] | 25
|
2015-08-22T05:41:51.000Z
|
2019-11-11T09:58:35.000Z
|
import nose
from nose.plugins.base import Plugin
class ExtensionPlugin(Plugin):
name = "ExtensionPlugin"
def options(self, parser, env):
Plugin.options(self, parser, env)
def configure(self, options, config):
Plugin.configure(self, options, config)
self.enabled = True
def wantFile(self, file):
return file.endswith('.py')
def wantDirectory(self, directory):
return True
def wantModule(self, file):
return True
if __name__ == '__main__':
nose.main(addplugins=[ExtensionPlugin()])
| 20.214286
| 47
| 0.655477
|
e105c463b3b7f4ab305c4ff5fddc29fa9faa8246
| 6,484
|
py
|
Python
|
homeassistant/components/rfxtrx/sensor.py
|
djbowers/home-assistant
|
cecdce07cc8b24ab353e74bec6bb78ef9c0fdf7f
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/rfxtrx/sensor.py
|
djbowers/home-assistant
|
cecdce07cc8b24ab353e74bec6bb78ef9c0fdf7f
|
[
"Apache-2.0"
] | 37
|
2020-07-17T23:20:00.000Z
|
2022-03-31T06:01:48.000Z
|
homeassistant/components/rfxtrx/sensor.py
|
djbowers/home-assistant
|
cecdce07cc8b24ab353e74bec6bb78ef9c0fdf7f
|
[
"Apache-2.0"
] | null | null | null |
"""Support for RFXtrx sensors."""
import logging
from RFXtrx import ControlEvent, SensorEvent
from homeassistant.components.sensor import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_SIGNAL_STRENGTH,
DEVICE_CLASS_TEMPERATURE,
)
from homeassistant.const import CONF_DEVICES
from homeassistant.core import callback
from homeassistant.helpers.restore_state import RestoreEntity
from . import (
CONF_AUTOMATIC_ADD,
DATA_TYPES,
DOMAIN,
SIGNAL_EVENT,
get_device_id,
get_rfx_object,
)
from .const import ATTR_EVENT, DATA_RFXTRX_CONFIG
_LOGGER = logging.getLogger(__name__)
def _battery_convert(value):
"""Battery is given as a value between 0 and 9."""
if value is None:
return None
return value * 10
def _rssi_convert(value):
"""Rssi is given as dBm value."""
if value is None:
return None
return f"{value*8-120}"
DEVICE_CLASSES = {
"Battery numeric": DEVICE_CLASS_BATTERY,
"Rssi numeric": DEVICE_CLASS_SIGNAL_STRENGTH,
"Humidity": DEVICE_CLASS_HUMIDITY,
"Temperature": DEVICE_CLASS_TEMPERATURE,
}
CONVERT_FUNCTIONS = {
"Battery numeric": _battery_convert,
"Rssi numeric": _rssi_convert,
}
async def async_setup_entry(
hass, config_entry, async_add_entities,
):
"""Set up platform."""
discovery_info = hass.data[DATA_RFXTRX_CONFIG]
data_ids = set()
def supported(event):
return isinstance(event, (ControlEvent, SensorEvent))
entities = []
for packet_id in discovery_info[CONF_DEVICES]:
event = get_rfx_object(packet_id)
if event is None:
_LOGGER.error("Invalid device: %s", packet_id)
continue
if not supported(event):
continue
device_id = get_device_id(event.device)
for data_type in set(event.values) & set(DATA_TYPES):
data_id = (*device_id, data_type)
if data_id in data_ids:
continue
data_ids.add(data_id)
entity = RfxtrxSensor(event.device, device_id, data_type)
entities.append(entity)
async_add_entities(entities)
@callback
def sensor_update(event, device_id):
"""Handle sensor updates from the RFXtrx gateway."""
if not supported(event):
return
for data_type in set(event.values) & set(DATA_TYPES):
data_id = (*device_id, data_type)
if data_id in data_ids:
continue
data_ids.add(data_id)
_LOGGER.info(
"Added sensor (Device ID: %s Class: %s Sub: %s, Event: %s)",
event.device.id_string.lower(),
event.device.__class__.__name__,
event.device.subtype,
"".join(f"{x:02x}" for x in event.data),
)
entity = RfxtrxSensor(event.device, device_id, data_type, event=event)
async_add_entities([entity])
# Subscribe to main RFXtrx events
if discovery_info[CONF_AUTOMATIC_ADD]:
hass.helpers.dispatcher.async_dispatcher_connect(SIGNAL_EVENT, sensor_update)
class RfxtrxSensor(RestoreEntity):
"""Representation of a RFXtrx sensor."""
def __init__(self, device, device_id, data_type, event=None):
"""Initialize the sensor."""
self._event = None
self._device = device
self._name = f"{device.type_string} {device.id_string} {data_type}"
self.data_type = data_type
self._unit_of_measurement = DATA_TYPES.get(data_type, "")
self._device_id = device_id
self._unique_id = "_".join(x for x in (*self._device_id, data_type))
self._device_class = DEVICE_CLASSES.get(data_type)
self._convert_fun = CONVERT_FUNCTIONS.get(data_type, lambda x: x)
if event:
self._apply_event(event)
async def async_added_to_hass(self):
"""Restore RFXtrx switch device state (ON/OFF)."""
await super().async_added_to_hass()
if self._event is None:
old_state = await self.async_get_last_state()
if old_state is not None:
event = old_state.attributes.get(ATTR_EVENT)
if event:
self._apply_event(get_rfx_object(event))
self.async_on_remove(
self.hass.helpers.dispatcher.async_dispatcher_connect(
SIGNAL_EVENT, self._handle_event
)
)
def __str__(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
if not self._event:
return None
value = self._event.values.get(self.data_type)
return self._convert_fun(value)
@property
def name(self):
"""Get the name of the sensor."""
return self._name
@property
def device_state_attributes(self):
"""Return the device state attributes."""
if not self._event:
return None
return {ATTR_EVENT: "".join(f"{x:02x}" for x in self._event.data)}
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return self._unit_of_measurement
@property
def device_class(self):
"""Return a device class for sensor."""
return self._device_class
@property
def unique_id(self):
"""Return unique identifier of remote device."""
return self._unique_id
@property
def device_info(self):
"""Return the device info."""
return {
"identifiers": {(DOMAIN, *self._device_id)},
"name": f"{self._device.type_string} {self._device.id_string}",
"model": self._device.type_string,
}
def _apply_event(self, event):
"""Apply command from rfxtrx."""
self._event = event
@callback
def _handle_event(self, event, device_id):
"""Check if event applies to me and update."""
if not isinstance(event, SensorEvent):
return
if device_id != self._device_id:
return
if self.data_type not in event.values:
return
_LOGGER.debug(
"Sensor update (Device ID: %s Class: %s Sub: %s)",
event.device.id_string,
event.device.__class__.__name__,
event.device.subtype,
)
self._apply_event(event)
self.async_write_ha_state()
| 28.690265
| 85
| 0.623689
|
222760d9c1a74a14a1fa8b22a7634a35a0b00165
| 7,300
|
py
|
Python
|
examples/point_pillars/configs/kitti_point_pillars_mghead_syncbn.py
|
yjmade/Det3D
|
5cefdde927d9a1d3ee53316300daf023add3fa1e
|
[
"Apache-2.0"
] | null | null | null |
examples/point_pillars/configs/kitti_point_pillars_mghead_syncbn.py
|
yjmade/Det3D
|
5cefdde927d9a1d3ee53316300daf023add3fa1e
|
[
"Apache-2.0"
] | null | null | null |
examples/point_pillars/configs/kitti_point_pillars_mghead_syncbn.py
|
yjmade/Det3D
|
5cefdde927d9a1d3ee53316300daf023add3fa1e
|
[
"Apache-2.0"
] | null | null | null |
import itertools
import logging
import os
from det3d.builder import build_box_coder
from det3d.utils.config_tool import get_downsample_factor
# norm_cfg = dict(type='SyncBN', eps=1e-3, momentum=0.01)
norm_cfg = None
pc_range = [0, -39.68, -3, 69.12, 39.68, 1]
voxel_size = [0.16, 0.16, 4.0]
tasks = [
dict(num_class=1, class_names=["Car",],),
]
class_names = list(itertools.chain(*[t["class_names"] for t in tasks]))
# training and testing settings
target_assigner = dict(
type="iou",
anchor_generators=[
dict(
type="anchor_generator_range",
sizes=[1.6, 3.9, 1.56],
anchor_ranges=[0, -39.68, -1.0, 69.12, 39.68, -1.0],
rotations=[0, 1.57],
matched_threshold=0.6,
unmatched_threshold=0.45,
class_name="Car",
),
],
sample_positive_fraction=-1,
sample_size=512,
region_similarity_calculator=dict(type="nearest_iou_similarity",),
pos_area_threshold=-1,
tasks=tasks,
)
box_coder = dict(
type="ground_box3d_coder", n_dim=7, linear_dim=False, encode_angle_vector=False,
)
# model settings
model = dict(
type="PointPillars",
pretrained=None,
reader=dict(
type="PillarFeatureNet",
num_filters=[64],
voxel_size=voxel_size,
pc_range=pc_range,
with_distance=False,
norm_cfg=norm_cfg,
),
backbone=dict(type="PointPillarsScatter", ds_factor=1, norm_cfg=norm_cfg,),
neck=dict(
type="RPN",
layer_nums=[3, 5, 5],
ds_layer_strides=[2, 2, 2],
ds_num_filters=[64, 128, 256],
us_layer_strides=[1, 2, 4],
us_num_filters=[128, 128, 128],
num_input_features=64,
norm_cfg=norm_cfg,
logger=logging.getLogger("RPN"),
),
bbox_head=dict(
# type='RPNHead',
type="MultiGroupHead",
mode="3d",
in_channels=sum([128, 128, 128]), # this is linked to 'neck' us_num_filters
norm_cfg=norm_cfg,
tasks=tasks,
weights=[1,],
box_coder=build_box_coder(box_coder),
encode_background_as_zeros=True,
loss_norm=dict(
type="NormByNumPositives", pos_cls_weight=1.0, neg_cls_weight=1.0,
),
loss_cls=dict(type="SigmoidFocalLoss", alpha=0.25, gamma=2.0, loss_weight=1.0,),
use_sigmoid_score=True,
loss_bbox=dict(
type="WeightedSmoothL1Loss",
sigma=3.0,
code_weights=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
codewise=True,
loss_weight=2.0,
),
encode_rad_error_by_sin=True,
loss_aux=dict(
type="WeightedSoftmaxClassificationLoss",
name="direction_classifier",
loss_weight=0.2,
),
direction_offset=0.0,
),
)
assigner = dict(
box_coder=box_coder,
target_assigner=target_assigner,
out_size_factor=get_downsample_factor(model),
)
train_cfg = dict(assigner=assigner)
test_cfg = dict(
nms=dict(
use_rotate_nms=True,
use_multi_class_nms=False,
nms_pre_max_size=1000,
nms_post_max_size=300,
nms_iou_threshold=0.5,
),
score_threshold=0.05,
post_center_limit_range=[0, -40.0, -5.0, 70.4, 40.0, 5.0],
max_per_img=100,
)
# dataset settings
dataset_type = "KittiDataset"
data_root = os.environ.get("KITTI_DATA_ROOT", "/data/8-1/datasets/kitti/kitti_tiny")
db_sampler = dict(
type="GT-AUG",
enable=True,
db_info_path=f"{data_root}/dbinfos_train.pkl",
sample_groups=[dict(Car=15,),],
db_prep_steps=[
dict(filter_by_min_num_points=dict(Car=5,)),
dict(filter_by_difficulty=[-1],),
],
global_random_rotation_range_per_object=[0, 0],
rate=1.0,
)
train_preprocessor = dict(
mode="train",
shuffle_points=True,
gt_loc_noise=[0.25, 0.25, 0.25],
gt_rot_noise=[-0.15707963267, 0.15707963267],
global_rot_noise=[-0.78539816, 0.78539816],
global_scale_noise=[0.95, 1.05],
global_rot_per_obj_range=[0, 0],
global_trans_noise=[0.0, 0.0, 0.0],
remove_points_after_sample=True,
gt_drop_percentage=0.0,
gt_drop_max_keep_points=15,
remove_unknown_examples=False,
remove_environment=False,
db_sampler=db_sampler,
class_names=class_names,
)
val_preprocessor = dict(
mode="val",
shuffle_points=False,
remove_environment=False,
remove_unknown_examples=False,
)
voxel_generator = dict(
range=pc_range,
voxel_size=voxel_size,
max_points_in_voxel=100,
max_voxel_num=12000,
)
train_pipeline = [
dict(type="LoadPointCloudFromFile"),
dict(type="LoadPointCloudAnnotations", with_bbox=True),
dict(type="Preprocess", cfg=train_preprocessor),
dict(type="Voxelization", cfg=voxel_generator),
dict(type="AssignTarget", cfg=train_cfg["assigner"]),
dict(type="Reformat"),
# dict(type='PointCloudCollect', keys=['points', 'voxels', 'annotations', 'calib']),
]
test_pipeline = [
dict(type="LoadPointCloudFromFile"),
dict(type="LoadPointCloudAnnotations", with_bbox=True),
dict(type="Preprocess", cfg=val_preprocessor),
dict(type="Voxelization", cfg=voxel_generator),
dict(type="AssignTarget", cfg=train_cfg["assigner"]),
dict(type="Reformat"),
]
train_anno = f"{data_root}/kitti_infos_train.pkl"
val_anno = f"{data_root}/kitti_infos_val.pkl"
test_anno = None
data = dict(
samples_per_gpu=3,
workers_per_gpu=3,
train=dict(
type=dataset_type,
root_path=data_root,
info_path=data_root + "/kitti_infos_train.pkl",
ann_file=train_anno,
class_names=class_names,
pipeline=train_pipeline,
),
val=dict(
type=dataset_type,
root_path=data_root,
info_path=data_root + "/kitti_infos_val.pkl",
ann_file=val_anno,
class_names=class_names,
pipeline=test_pipeline,
),
test=dict(
type=dataset_type,
root_path=data_root,
info_path=test_anno,
ann_file=test_anno,
class_names=class_names,
pipeline=test_pipeline,
),
)
# optimizer = dict(type='SGD', lr=0.0002, momentum=0.9, weight_decay=0.0001)
# optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# # learning policy
# lr_config = dict(
# type='multinomial',
# policy='step',
# warmup='linear',
# warmup_iters=500,
# warmup_ratio=1.0 / 3,
# step=[15, 30, 45, 60, 75, 90, 105, 120, 135, 150])
# optimizer
optimizer = dict(
TYPE="adam",
VALUE=dict(amsgrad=0.0, wd=0.01),
FIXED_WD=True,
MOVING_AVERAGE=False,
)
"""training hooks """
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy in training hooks
lr_config = dict(
type="one_cycle", lr_max=3e-3, moms=[0.95, 0.85], div_factor=10.0, pct_start=0.4,
)
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=20,
hooks=[
dict(type="TextLoggerHook"),
# dict(type='TensorboardLoggerHook')
],
)
# yapf:enable
# runtime settings
total_epochs = 100
device_ids = range(8)
dist_params = dict(backend="nccl", init_method="env://")
log_level = "INFO"
work_dir = "/data/Outputs/det3d_Outputs/Point_Pillars"
load_from = None
resume_from = None
workflow = [("train", 5), ("val", 1)]
| 27.54717
| 88
| 0.646849
|
d6d755a3ca524f0c96bbce69357e50df56960524
| 88
|
py
|
Python
|
interviews/google-2018/longest_increasing_subsequence.py
|
ridhwaans/exercise-vault
|
eaa4a33d08988b43fb627781d9edf5fe85ca9072
|
[
"MIT"
] | null | null | null |
interviews/google-2018/longest_increasing_subsequence.py
|
ridhwaans/exercise-vault
|
eaa4a33d08988b43fb627781d9edf5fe85ca9072
|
[
"MIT"
] | 8
|
2021-03-18T23:06:00.000Z
|
2021-06-14T21:46:01.000Z
|
interviews/google-2018/longest_increasing_subsequence.py
|
ridhwaans/practice-vault
|
46d3afb62377ae6e9c62c5a388fdffa6d4db3a1a
|
[
"MIT"
] | null | null | null |
'''
Given an unsorted array of integers, find the longest increasing subsequence
'''
| 22
| 77
| 0.738636
|
f43c34ba5ca6b4d4962ac38746ca77a772f20e94
| 9,154
|
py
|
Python
|
cibyl/cli/validator.py
|
adrianfusco/cibyl
|
421fb537e21abddeecdde42c18f4de60425cc804
|
[
"Apache-2.0"
] | null | null | null |
cibyl/cli/validator.py
|
adrianfusco/cibyl
|
421fb537e21abddeecdde42c18f4de60425cc804
|
[
"Apache-2.0"
] | null | null | null |
cibyl/cli/validator.py
|
adrianfusco/cibyl
|
421fb537e21abddeecdde42c18f4de60425cc804
|
[
"Apache-2.0"
] | null | null | null |
"""
# Copyright 2022 Red Hat
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
import logging
from cibyl.exceptions.model import (InvalidEnvironment, InvalidSystem,
NoEnabledSystem, NoValidSystem)
from cibyl.exceptions.source import NoValidSources
LOG = logging.getLogger(__name__)
class Validator:
"""This is a helper class that will filter the configuration according to
the user input.
"""
def __init__(self, ci_args: dict):
self.ci_args = ci_args
def _check_input_environments(self, all_envs, argument,
exception_to_raise):
"""Check if the user input environments exist in the configuration.
:param all_envs: Environments defined in the configuration
:type all_envs: list
:param argument: Name of the cli argument to check
:type argument: str
:raises: InvalidEnvironment
"""
env_user_input = self.ci_args.get(argument)
if env_user_input:
# check if user input environment name exists
for env_name in env_user_input.value:
if env_name not in all_envs:
raise exception_to_raise(env_name, all_envs)
def _consistent_environment(self, env):
"""Check if an environment should be used according to user input.
:param env: Model to validate
:type env: :class:`.Environment`
:returns: Whether the environment is consistent with user input
:rtype: bool
"""
user_env = self.ci_args.get("env_name")
if user_env:
return env.name.value in user_env.value
return True
def _consistent_system(self, system):
"""Check if a system should be used according to user input.
:param system: Model to validate
:type system: :class:`.System`
:returns: Whether the system is consistent with user input
:rtype: bool
"""
name = system.name.value
system_type = system.system_type.value
user_system_type = self.ci_args.get("system_type")
if user_system_type and system_type not in user_system_type.value:
return False
user_systems = self.ci_args.get("systems")
if user_systems and name not in user_systems.value:
return False
return True
def _system_has_valid_sources(self, system):
"""Check if a system should be used according to user input from
sources point of view.
:param system: Model to validate
:type system: :class:`.System`
:returns: Whether the system is consistent with user input
:rtype: bool
"""
system_sources = set(source.name for source in system.sources)
user_sources = self.ci_args.get("sources")
if user_sources:
user_sources_names = set(user_sources.value)
unused_sources = system_sources-user_sources_names
for source in system.sources:
if source.name in unused_sources:
source.disable()
if not user_sources_names & system_sources:
return False
return True
def check_envs(self, environments, systems_check, envs_check,
systems_msg, envs_msg):
"""Iterate over environments and systems and apply some check to each
of them. Only return those that satisfy the checks.
:param environments: Environments to validate
:type environments: list
:param systems_check: Function to use to check a system
:type systems_check: callable
:param envs_check: Function to use to check an environment
:type envs_check: callable
:param systems_msg: Message template to log in case of system check
failure
:type systems_msg: str
:param envs_msg: Message template to log in case of environment check
failure
:type envs_msg: str
:returns: Environments and systems that pass the checks
:rtype: list, list
"""
user_systems = []
user_envs = []
for env in environments:
env_systems = []
if not envs_check(env):
LOG.debug(envs_msg, env.name.value)
continue
for system in env.systems:
if not systems_check(system):
LOG.debug(systems_msg, system.name.value)
continue
env_systems.append(system)
if env_systems:
# if the environment has no valid systems, we will not pass it
# back, so for the rest of the execution we will only consider
# valid environments and systems
env.systems.value = env_systems
user_envs.append(env)
user_systems.extend(env_systems)
return user_envs, user_systems
def _system_is_enabled(self, system):
"""Check if a system should be used according to enabled parameter in
configuration file.
:param system: Model to validate
:type system: :class:`.System`
:returns: Whether the system is enabled
:rtype: bool
"""
return system.is_enabled()
def override_enabled_systems(self, systems):
"""Ensure that systems specified by the user with the --systems
argument are enabled.
:param systems: systems to check
:type systems: list
"""
user_systems = self.ci_args.get("systems")
if not user_systems or not user_systems.value:
# if the user did not specify anything for --systems, nothing to do
# here
return
for system in systems:
if system.name.value in user_systems.value:
system.enable()
def validate_environments(self, environments):
"""Filter environments and systems according to user input.
:returns: Environments and systems that can be used according to user
input
:rtype: list, list
"""
all_envs = []
all_systems = []
# first get all environments and systems so we can ensure that the user
# input is found within the configuration
for env in environments:
all_envs.append(env.name.value)
for system in env.systems:
all_systems.append(system)
self._check_input_environments(all_envs, "env_name",
InvalidEnvironment)
system_names = [system.name.value for system in all_systems]
self._check_input_environments(system_names, "systems", InvalidSystem)
# if the user input is good, then filter the environments and systems
# so we only keep the ones consistent with user input
user_envs, user_systems = self.check_envs(environments,
self._consistent_system,
self._consistent_environment,
"System %s is not consistent"
" with user input",
"Environment %s is not "
"consistent with user input")
if not user_systems:
raise NoValidSystem(system_names)
self.override_enabled_systems(user_systems)
user_envs, user_systems = self.check_envs(user_envs,
self._system_is_enabled,
lambda _: True,
"System %s is disabled ",
"")
if not user_systems:
raise NoEnabledSystem()
system_sources_check = self._system_has_valid_sources
user_envs, user_systems = self.check_envs(user_envs,
system_sources_check,
lambda _: True,
"System %s has no sources "
"consistent with user input",
"")
if not user_systems:
sources = [source['name'] for system in all_systems
for source in system.sources]
raise NoValidSources(sources=sources)
return user_envs
| 38.953191
| 79
| 0.582041
|
dac10da468025e16410ed11f05b8e3389503e195
| 529
|
py
|
Python
|
main.py
|
FelipeCapati/ann-artificial-neural-network-perceptron
|
4e934b9bc34a1dd3e2966404625ad29e6fd3902a
|
[
"MIT"
] | null | null | null |
main.py
|
FelipeCapati/ann-artificial-neural-network-perceptron
|
4e934b9bc34a1dd3e2966404625ad29e6fd3902a
|
[
"MIT"
] | null | null | null |
main.py
|
FelipeCapati/ann-artificial-neural-network-perceptron
|
4e934b9bc34a1dd3e2966404625ad29e6fd3902a
|
[
"MIT"
] | null | null | null |
import numpy as np
from perceptron import Perceptron
training_inputs = []
training_inputs.append(np.array([1, 1]))
training_inputs.append(np.array([1, 0]))
training_inputs.append(np.array([0, 1]))
training_inputs.append(np.array([0, 0]))
labels = np.array([1, 0, 0, 0])
perceptron = Perceptron(2)
perceptron.train(training_inputs, labels)
inputs = np.array([1, 1])
perceptron.predict(inputs)
print(perceptron.predict(inputs))
#=> 1
inputs = np.array([0, 1])
perceptron.predict(inputs)
print(perceptron.predict(inputs))
#=> 0
| 23
| 41
| 0.733459
|
12c52eeed8a6c832531005e8790d6e64b273ecac
| 13,459
|
py
|
Python
|
live_test.py
|
ewh/sendgrid-python
|
b07bbfaeb9d43cca7f87a17829706af5e1297b3f
|
[
"MIT"
] | 2
|
2019-08-31T17:06:44.000Z
|
2019-10-02T20:38:49.000Z
|
live_test.py
|
ewh/sendgrid-python
|
b07bbfaeb9d43cca7f87a17829706af5e1297b3f
|
[
"MIT"
] | 1
|
2020-05-08T03:38:07.000Z
|
2020-05-08T03:38:07.000Z
|
live_test.py
|
ewh/sendgrid-python
|
b07bbfaeb9d43cca7f87a17829706af5e1297b3f
|
[
"MIT"
] | null | null | null |
## Send a Single Email to a Single Recipient
import os
import json
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import Mail, From, To, Subject, PlainTextContent, HtmlContent, SendGridException
message = Mail(from_email=From('dx@sendgrid.com', 'DX'),
to_emails=To('elmer.thomas@sendgrid.com', 'Elmer Thomas'),
subject=Subject('Sending with SendGrid is Fun'),
plain_text_content=PlainTextContent('and easy to do anywhere, even with Python'),
html_content=HtmlContent('<strong>and easy to do anywhere, even with Python</strong>'))
try:
print(json.dumps(message.get(), sort_keys=True, indent=4))
sendgrid_client = SendGridAPIClient(api_key=os.environ.get('SENDGRID_API_KEY'))
response = sendgrid_client.send(message=message)
print(response.status_code)
print(response.body)
print(response.headers)
except SendGridException as e:
print(e.message)
# Send a Single Email to Multiple Recipients
import os
import json
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import Mail, From, To, Subject, PlainTextContent, HtmlContent, SendGridException
to_emails = [
To('elmer.thomas@sendgrid.com', 'Elmer SendGrid'),
To('elmer.thomas@gmail.com', 'Elmer Thomas')
]
message = Mail(from_email=From('dx@sendgrid.com', 'DX'),
to_emails=to_emails,
subject=Subject('Sending with SendGrid is Fun'),
plain_text_content=PlainTextContent('and easy to do anywhere, even with Python'),
html_content=HtmlContent('<strong>and easy to do anywhere, even with Python</strong>'))
try:
print(json.dumps(message.get(), sort_keys=True, indent=4))
sendgrid_client = SendGridAPIClient(api_key=os.environ.get('SENDGRID_API_KEY'))
response = sendgrid_client.send(message=message)
print(response.status_code)
print(response.body)
print(response.headers)
except SendGridException as e:
print(e.message)
# Send Multiple Emails to Multiple Recipients
import os
import json
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import Mail, From, To, Subject, PlainTextContent, HtmlContent, SendGridException, Substitution
import time
import datetime
to_emails = [
To(email='elmer.thomas@sendgrid.com',
name='Elmer SendGrid',
substitutions={
Substitution('-name-', 'Elmer SendGrid'),
Substitution('-github-', 'http://github.com/ethomas'),
},
subject=Subject('Override Global Subject')),
To(email='elmer.thomas@gmail.com',
name='Elmer Thomas',
substitutions={
Substitution('-name-', 'Elmer Thomas'),
Substitution('-github-', 'http://github.com/thinkingserious'),
})
]
ts = time.time()
global_substitutions = Substitution('-time-', datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S'))
message = Mail(from_email=From('dx@sendgrid.com', 'DX'),
to_emails=to_emails,
subject=Subject('Hi -name-'),
plain_text_content=PlainTextContent('Hello -name-, your github is -github-, email sent at -time-'),
html_content=HtmlContent('<strong>Hello -name-, your github is <a href=\"-github-\">here</a></strong> email sent at -time-'),
global_substitutions=global_substitutions,
is_multiple=True)
try:
print(json.dumps(message.get(), sort_keys=True, indent=4))
sendgrid_client = SendGridAPIClient(api_key=os.environ.get('SENDGRID_API_KEY'))
response = sendgrid_client.send(message=message)
print(response.status_code)
print(response.body)
print(response.headers)
except SendGridException as e:
print(e.message)
# Kitchen Sink - an example with all settings used
import os
import json
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import (
Mail, From, To, Cc, Bcc, Subject, PlainTextContent,
HtmlContent, SendGridException, Substitution,
Header, CustomArg, SendAt, Content, MimeType, Attachment,
FileName, FileContent, FileType, Disposition, ContentId,
TemplateId, Section, ReplyTo, Category, BatchId, Asm,
GroupId, GroupsToDisplay, IpPoolName, MailSettings,
BccSettings, BccSettingsEmail, BypassListManagement,
FooterSettings, FooterText, FooterHtml, SandBoxMode,
SpamCheck, SpamThreshold, SpamUrl, TrackingSettings,
ClickTracking, SubscriptionTracking, SubscriptionText,
SubscriptionHtml, SubscriptionSubstitutionTag,
OpenTracking, OpenTrackingSubstitutionTag, Ganalytics,
UtmSource, UtmMedium, UtmTerm, UtmContent, UtmCampaign)
import time
import datetime
message = Mail()
# Define Personalizations
message.to = To('elmer+test1@sendgrid.com', 'Example User1', p=0)
message.to = [
To('elmer+test2@sendgrid.com', 'Example User2', p=0),
To('elmer+test3@sendgrid.com', 'Example User3', p=0)
]
message.cc = Cc('test4@example.com', 'Example User4', p=0)
message.cc = [
Cc('test5@example.com', 'Example User5', p=0),
Cc('test6@example.com', 'Example User6', p=0)
]
message.bcc = Bcc('test7@example.com', 'Example User7', p=0)
message.bcc = [
Bcc('test8@example.com', 'Example User8', p=0),
Bcc('test9@example.com', 'Example User9', p=0)
]
message.subject = Subject('Sending with SendGrid is Fun 0', p=0)
message.header = Header('X-Test1', 'Test1', p=0)
message.header = Header('X-Test2', 'Test2', p=0)
message.header = [
Header('X-Test3', 'Test3', p=0),
Header('X-Test4', 'Test4', p=0)
]
message.substitution = Substitution('%name1%', 'Example Name 1', p=0)
message.substitution = Substitution('%city1%', 'Example City 1', p=0)
message.substitution = [
Substitution('%name2%', 'Example Name 2', p=0),
Substitution('%city2%', 'Example City 2', p=0)
]
message.custom_arg = CustomArg('marketing1', 'true', p=0)
message.custom_arg = CustomArg('transactional1', 'false', p=0)
message.custom_arg = [
CustomArg('marketing2', 'false', p=0),
CustomArg('transactional2', 'true', p=0)
]
message.send_at = SendAt(1461775051, p=0)
message.to = To('test10@example.com', 'Example User10', p=1)
message.to = [
To('test11@example.com', 'Example User11', p=1),
To('test12@example.com', 'Example User12', p=1)
]
message.cc = Cc('test13@example.com', 'Example User13', p=1)
message.cc = [
Cc('test14@example.com', 'Example User14', p=1),
Cc('test15@example.com', 'Example User15', p=1)
]
message.bcc = Bcc('test16@example.com', 'Example User16', p=1)
message.bcc = [
Bcc('test17@example.com', 'Example User17', p=1),
Bcc('test18@example.com', 'Example User18', p=1)
]
message.header = Header('X-Test5', 'Test5', p=1)
message.header = Header('X-Test6', 'Test6', p=1)
message.header = [
Header('X-Test7', 'Test7', p=1),
Header('X-Test8', 'Test8', p=1)
]
message.substitution = Substitution('%name3%', 'Example Name 3', p=1)
message.substitution = Substitution('%city3%', 'Example City 3', p=1)
message.substitution = [
Substitution('%name4%', 'Example Name 4', p=1),
Substitution('%city4%', 'Example City 4', p=1)
]
message.custom_arg = CustomArg('marketing3', 'true', p=1)
message.custom_arg = CustomArg('transactional3', 'false', p=1)
message.custom_arg = [
CustomArg('marketing4', 'false', p=1),
CustomArg('transactional4', 'true', p=1)
]
message.send_at = SendAt(1461775052, p=1)
message.subject = Subject('Sending with SendGrid is Fun 1', p=1)
# The values below this comment are global to entire message
message.from_email = From('dx@sendgrid.com', 'DX')
message.reply_to = ReplyTo('dx_reply@sendgrid.com', 'DX Reply')
message.subject = Subject('Sending with SendGrid is Fun 2')
message.content = Content(MimeType.text, 'and easy to do anywhere, even with Python')
message.content = Content(MimeType.html, '<strong>and easy to do anywhere, even with Python</strong>')
message.content = [
Content('text/calendar', 'Party Time!!'),
Content('text/custom', 'Party Time 2!!')
]
message.attachment = Attachment(FileContent('base64 encoded content 1'),
FileType('application/pdf'),
FileName('balance_001.pdf'),
Disposition('attachment'),
ContentId('Content ID 1'))
message.attachment = [
Attachment(FileContent('base64 encoded content 2'),
FileType('image/png'),
FileName('banner.png'),
Disposition('inline'),
ContentId('Content ID 2')),
Attachment(FileContent('base64 encoded content 3'),
FileType('image/png'),
FileName('banner2.png'),
Disposition('inline'),
ContentId('Content ID 3'))
]
message.template_id = TemplateId('13b8f94f-bcae-4ec6-b752-70d6cb59f932')
message.section = Section('%section1%', 'Substitution for Section 1 Tag')
message.section = [
Section('%section2%', 'Substitution for Section 2 Tag'),
Section('%section3%', 'Substitution for Section 3 Tag')
]
message.header = Header('X-Test9', 'Test9')
message.header = Header('X-Test10', 'Test10')
message.header = [
Header('X-Test11', 'Test11'),
Header('X-Test12', 'Test12')
]
message.category = Category('Category 1')
message.category = Category('Category 2')
message.category = [
Category('Category 1'),
Category('Category 2')
]
message.custom_arg = CustomArg('marketing5', 'false')
message.custom_arg = CustomArg('transactional5', 'true')
message.custom_arg = [
CustomArg('marketing6', 'true'),
CustomArg('transactional6', 'false')
]
message.send_at = SendAt(1461775053)
message.batch_id = BatchId("HkJ5yLYULb7Rj8GKSx7u025ouWVlMgAi")
message.asm = Asm(GroupId(1), GroupsToDisplay([1,2,3,4]))
message.ip_pool_name = IpPoolName("IP Pool Name")
mail_settings = MailSettings()
mail_settings.bcc_settings = BccSettings(False, BccSettingsEmail("bcc@twilio.com"))
mail_settings.bypass_list_management = BypassListManagement(False)
mail_settings.footer_settings = FooterSettings(True, FooterText("w00t"), FooterHtml("<string>w00t!<strong>"))
mail_settings.sandbox_mode = SandBoxMode(True)
mail_settings.spam_check = SpamCheck(True, SpamThreshold(5), SpamUrl("https://example.com"))
message.mail_settings = mail_settings
tracking_settings = TrackingSettings()
tracking_settings.click_tracking = ClickTracking(True, False)
tracking_settings.open_tracking = OpenTracking(True, OpenTrackingSubstitutionTag("open_tracking"))
tracking_settings.subscription_tracking = SubscriptionTracking(
True,
SubscriptionText("Goodbye"),
SubscriptionHtml("<strong>Goodbye!</strong>"),
SubscriptionSubstitutionTag("unsubscribe"))
tracking_settings.ganalytics = Ganalytics(
True,
UtmSource("utm_source"),
UtmMedium("utm_medium"),
UtmTerm("utm_term"),
UtmContent("utm_content"),
UtmCampaign("utm_campaign"))
message.tracking_settings = tracking_settings
try:
sendgrid_client = SendGridAPIClient(api_key=os.environ.get('SENDGRID_API_KEY'))
print(json.dumps(message.get(), sort_keys=True, indent=4))
response = sendgrid_client.send(message=message)
print(response.status_code)
print(response.body)
print(response.headers)
except SendGridException as e:
print(e.message)
## Send a Single Email to a Single Recipient with a Dynamic Template
import os
import json
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import Mail, From, To, Subject, PlainTextContent, HtmlContent, SendGridException, DynamicTemplateData
message = Mail(from_email=From('dx@sendgrid.com', 'DX'),
to_emails=To('elmer.thomas@sendgrid.com', 'Elmer Thomas'),
subject=Subject('Sending with SendGrid is Fun'),
plain_text_content=PlainTextContent('and easy to do anywhere, even with Python'),
html_content=HtmlContent('<strong>and easy to do anywhere, even with Python</strong>'))
message.dynamic_template_data = DynamicTemplateData({
"total":"$ 239.85",
"items":[
{
"text":"New Line Sneakers",
"image":"https://marketing-image-production.s3.amazonaws.com/uploads/8dda1131320a6d978b515cc04ed479df259a458d5d45d58b6b381cae0bf9588113e80ef912f69e8c4cc1ef1a0297e8eefdb7b270064cc046b79a44e21b811802.png",
"price":"$ 79.95"
},
{
"text":"Old Line Sneakers",
"image":"https://marketing-image-production.s3.amazonaws.com/uploads/3629f54390ead663d4eb7c53702e492de63299d7c5f7239efdc693b09b9b28c82c924225dcd8dcb65732d5ca7b7b753c5f17e056405bbd4596e4e63a96ae5018.png",
"price":"$ 79.95"
},
{
"text":"Blue Line Sneakers",
"image":"https://marketing-image-production.s3.amazonaws.com/uploads/00731ed18eff0ad5da890d876c456c3124a4e44cb48196533e9b95fb2b959b7194c2dc7637b788341d1ff4f88d1dc88e23f7e3704726d313c57f350911dd2bd0.png",
"price":"$ 79.95"
}
],
"receipt":True,
"name":"Sample Name",
"address01":"1234 Fake St.",
"address02":"Apt. 123",
"city":"Place",
"state":"CO",
"zip":"80202"
})
try:
print(json.dumps(message.get(), sort_keys=True, indent=4))
sendgrid_client = SendGridAPIClient(api_key=os.environ.get('SENDGRID_API_KEY'))
response = sendgrid_client.send(message=message)
print(response.status_code)
print(response.body)
print(response.headers)
except SendGridException as e:
print(e.message)
| 37.70028
| 215
| 0.692919
|
12dfc180b0b8b699aa8d60c6a9c4d59ade498f22
| 66,593
|
py
|
Python
|
pylinkvalidator/included/bs4/element.py
|
airsource/pylinkvalidator
|
5a52fea64ecdb867a3390a97c023765265d88d23
|
[
"MIT"
] | 127
|
2015-07-06T03:19:23.000Z
|
2022-03-14T18:34:11.000Z
|
pylinkvalidator/included/bs4/element.py
|
airsource/pylinkvalidator
|
5a52fea64ecdb867a3390a97c023765265d88d23
|
[
"MIT"
] | 32
|
2015-07-06T03:18:46.000Z
|
2020-12-14T13:14:23.000Z
|
pylinkvalidator/included/bs4/element.py
|
airsource/pylinkvalidator
|
5a52fea64ecdb867a3390a97c023765265d88d23
|
[
"MIT"
] | 36
|
2015-08-06T18:44:53.000Z
|
2022-01-09T12:38:29.000Z
|
import sys
if sys.version_info[0] < 3:
import collections
import re
import sys
import warnings
from pylinkvalidator.included.bs4.dammit import EntitySubstitution
DEFAULT_OUTPUT_ENCODING = "utf-8"
PY3K = (sys.version_info[0] > 2)
whitespace_re = re.compile("\s+")
def _alias(attr):
"""Alias one attribute name to another for backward compatibility"""
@property
def alias(self):
return getattr(self, attr)
@alias.setter
def alias(self):
return setattr(self, attr)
return alias
class NamespacedAttribute(unicode):
def __new__(cls, prefix, name, namespace=None):
if name is None:
obj = unicode.__new__(cls, prefix)
elif prefix is None:
# Not really namespaced.
obj = unicode.__new__(cls, name)
else:
obj = unicode.__new__(cls, prefix + ":" + name)
obj.prefix = prefix
obj.name = name
obj.namespace = namespace
return obj
class AttributeValueWithCharsetSubstitution(unicode):
"""A stand-in object for a character encoding specified in HTML."""
class CharsetMetaAttributeValue(AttributeValueWithCharsetSubstitution):
"""A generic stand-in for the value of a meta tag's 'charset' attribute.
When Beautiful Soup parses the markup '<meta charset="utf8">', the
value of the 'charset' attribute will be one of these objects.
"""
def __new__(cls, original_value):
obj = unicode.__new__(cls, original_value)
obj.original_value = original_value
return obj
def encode(self, encoding):
return encoding
class ContentMetaAttributeValue(AttributeValueWithCharsetSubstitution):
"""A generic stand-in for the value of a meta tag's 'content' attribute.
When Beautiful Soup parses the markup:
<meta http-equiv="content-type" content="text/html; charset=utf8">
The value of the 'content' attribute will be one of these objects.
"""
CHARSET_RE = re.compile("((^|;)\s*charset=)([^;]*)", re.M)
def __new__(cls, original_value):
match = cls.CHARSET_RE.search(original_value)
if match is None:
# No substitution necessary.
return unicode.__new__(unicode, original_value)
obj = unicode.__new__(cls, original_value)
obj.original_value = original_value
return obj
def encode(self, encoding):
def rewrite(match):
return match.group(1) + encoding
return self.CHARSET_RE.sub(rewrite, self.original_value)
class HTMLAwareEntitySubstitution(EntitySubstitution):
"""Entity substitution rules that are aware of some HTML quirks.
Specifically, the contents of <script> and <style> tags should not
undergo entity substitution.
Incoming NavigableString objects are checked to see if they're the
direct children of a <script> or <style> tag.
"""
cdata_containing_tags = set(["script", "style"])
preformatted_tags = set(["pre"])
@classmethod
def _substitute_if_appropriate(cls, ns, f):
if (isinstance(ns, NavigableString)
and ns.parent is not None
and ns.parent.name in cls.cdata_containing_tags):
# Do nothing.
return ns
# Substitute.
return f(ns)
@classmethod
def substitute_html(cls, ns):
return cls._substitute_if_appropriate(
ns, EntitySubstitution.substitute_html)
@classmethod
def substitute_xml(cls, ns):
return cls._substitute_if_appropriate(
ns, EntitySubstitution.substitute_xml)
class PageElement(object):
"""Contains the navigational information for some part of the page
(either a tag or a piece of text)"""
# There are five possible values for the "formatter" argument passed in
# to methods like encode() and prettify():
#
# "html" - All Unicode characters with corresponding HTML entities
# are converted to those entities on output.
# "minimal" - Bare ampersands and angle brackets are converted to
# XML entities: & < >
# None - The null formatter. Unicode characters are never
# converted to entities. This is not recommended, but it's
# faster than "minimal".
# A function - This function will be called on every string that
# needs to undergo entity substitution.
#
# In an HTML document, the default "html" and "minimal" functions
# will leave the contents of <script> and <style> tags alone. For
# an XML document, all tags will be given the same treatment.
HTML_FORMATTERS = {
"html" : HTMLAwareEntitySubstitution.substitute_html,
"minimal" : HTMLAwareEntitySubstitution.substitute_xml,
None : None
}
XML_FORMATTERS = {
"html" : EntitySubstitution.substitute_html,
"minimal" : EntitySubstitution.substitute_xml,
None : None
}
def format_string(self, s, formatter='minimal'):
"""Format the given string using the given formatter."""
if not callable(formatter):
formatter = self._formatter_for_name(formatter)
if formatter is None:
output = s
else:
output = formatter(s)
return output
@property
def _is_xml(self):
"""Is this element part of an XML tree or an HTML tree?
This is used when mapping a formatter name ("minimal") to an
appropriate function (one that performs entity-substitution on
the contents of <script> and <style> tags, or not). It's
inefficient, but it should be called very rarely.
"""
if self.parent is None:
# This is the top-level object. It should have .is_xml set
# from tree creation. If not, take a guess--BS is usually
# used on HTML markup.
return getattr(self, 'is_xml', False)
return self.parent._is_xml
def _formatter_for_name(self, name):
"Look up a formatter function based on its name and the tree."
if self._is_xml:
return self.XML_FORMATTERS.get(
name, EntitySubstitution.substitute_xml)
else:
return self.HTML_FORMATTERS.get(
name, HTMLAwareEntitySubstitution.substitute_xml)
def setup(self, parent=None, previous_element=None):
"""Sets up the initial relations between this element and
other elements."""
self.parent = parent
self.previous_element = previous_element
if previous_element is not None:
self.previous_element.next_element = self
self.next_element = None
self.previous_sibling = None
self.next_sibling = None
if self.parent is not None and self.parent.contents:
self.previous_sibling = self.parent.contents[-1]
self.previous_sibling.next_sibling = self
nextSibling = _alias("next_sibling") # BS3
previousSibling = _alias("previous_sibling") # BS3
def replace_with(self, replace_with):
if replace_with is self:
return
if replace_with is self.parent:
raise ValueError("Cannot replace a Tag with its parent.")
old_parent = self.parent
my_index = self.parent.index(self)
self.extract()
old_parent.insert(my_index, replace_with)
return self
replaceWith = replace_with # BS3
def unwrap(self):
my_parent = self.parent
my_index = self.parent.index(self)
self.extract()
for child in reversed(self.contents[:]):
my_parent.insert(my_index, child)
return self
replace_with_children = unwrap
replaceWithChildren = unwrap # BS3
def wrap(self, wrap_inside):
me = self.replace_with(wrap_inside)
wrap_inside.append(me)
return wrap_inside
def extract(self):
"""Destructively rips this element out of the tree."""
if self.parent is not None:
del self.parent.contents[self.parent.index(self)]
#Find the two elements that would be next to each other if
#this element (and any children) hadn't been parsed. Connect
#the two.
last_child = self._last_descendant()
next_element = last_child.next_element
if self.previous_element is not None:
self.previous_element.next_element = next_element
if next_element is not None:
next_element.previous_element = self.previous_element
self.previous_element = None
last_child.next_element = None
self.parent = None
if self.previous_sibling is not None:
self.previous_sibling.next_sibling = self.next_sibling
if self.next_sibling is not None:
self.next_sibling.previous_sibling = self.previous_sibling
self.previous_sibling = self.next_sibling = None
return self
def _last_descendant(self):
"Finds the last element beneath this object to be parsed."
last_child = self
while hasattr(last_child, 'contents') and last_child.contents:
last_child = last_child.contents[-1]
return last_child
# BS3: Not part of the API!
_lastRecursiveChild = _last_descendant
def insert(self, position, new_child):
if new_child is self:
raise ValueError("Cannot insert a tag into itself.")
if (isinstance(new_child, basestring)
and not isinstance(new_child, NavigableString)):
new_child = NavigableString(new_child)
position = min(position, len(self.contents))
if hasattr(new_child, 'parent') and new_child.parent is not None:
# We're 'inserting' an element that's already one
# of this object's children.
if new_child.parent is self:
current_index = self.index(new_child)
if current_index < position:
# We're moving this element further down the list
# of this object's children. That means that when
# we extract this element, our target index will
# jump down one.
position -= 1
new_child.extract()
new_child.parent = self
previous_child = None
if position == 0:
new_child.previous_sibling = None
new_child.previous_element = self
else:
previous_child = self.contents[position - 1]
new_child.previous_sibling = previous_child
new_child.previous_sibling.next_sibling = new_child
new_child.previous_element = previous_child._last_descendant()
if new_child.previous_element is not None:
new_child.previous_element.next_element = new_child
new_childs_last_element = new_child._last_descendant()
if position >= len(self.contents):
new_child.next_sibling = None
parent = self
parents_next_sibling = None
while parents_next_sibling is None and parent is not None:
parents_next_sibling = parent.next_sibling
parent = parent.parent
if parents_next_sibling is not None:
# We found the element that comes next in the document.
break
if parents_next_sibling is not None:
new_childs_last_element.next_element = parents_next_sibling
else:
# The last element of this tag is the last element in
# the document.
new_childs_last_element.next_element = None
else:
next_child = self.contents[position]
new_child.next_sibling = next_child
if new_child.next_sibling is not None:
new_child.next_sibling.previous_sibling = new_child
new_childs_last_element.next_element = next_child
if new_childs_last_element.next_element is not None:
new_childs_last_element.next_element.previous_element = new_childs_last_element
self.contents.insert(position, new_child)
def append(self, tag):
"""Appends the given tag to the contents of this tag."""
self.insert(len(self.contents), tag)
def insert_before(self, predecessor):
"""Makes the given element the immediate predecessor of this one.
The two elements will have the same parent, and the given element
will be immediately before this one.
"""
if self is predecessor:
raise ValueError("Can't insert an element before itself.")
parent = self.parent
if parent is None:
raise ValueError(
"Element has no parent, so 'before' has no meaning.")
# Extract first so that the index won't be screwed up if they
# are siblings.
if isinstance(predecessor, PageElement):
predecessor.extract()
index = parent.index(self)
parent.insert(index, predecessor)
def insert_after(self, successor):
"""Makes the given element the immediate successor of this one.
The two elements will have the same parent, and the given element
will be immediately after this one.
"""
if self is successor:
raise ValueError("Can't insert an element after itself.")
parent = self.parent
if parent is None:
raise ValueError(
"Element has no parent, so 'after' has no meaning.")
# Extract first so that the index won't be screwed up if they
# are siblings.
if isinstance(successor, PageElement):
successor.extract()
index = parent.index(self)
parent.insert(index+1, successor)
def find_next(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the first item that matches the given criteria and
appears after this Tag in the document."""
return self._find_one(self.find_all_next, name, attrs, text, **kwargs)
findNext = find_next # BS3
def find_all_next(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns all items that match the given criteria and appear
after this Tag in the document."""
return self._find_all(name, attrs, text, limit, self.next_elements,
**kwargs)
findAllNext = find_all_next # BS3
def find_next_sibling(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the closest sibling to this Tag that matches the
given criteria and appears after this Tag in the document."""
return self._find_one(self.find_next_siblings, name, attrs, text,
**kwargs)
findNextSibling = find_next_sibling # BS3
def find_next_siblings(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns the siblings of this Tag that match the given
criteria and appear after this Tag in the document."""
return self._find_all(name, attrs, text, limit,
self.next_siblings, **kwargs)
findNextSiblings = find_next_siblings # BS3
fetchNextSiblings = find_next_siblings # BS2
def find_previous(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the first item that matches the given criteria and
appears before this Tag in the document."""
return self._find_one(
self.find_all_previous, name, attrs, text, **kwargs)
findPrevious = find_previous # BS3
def find_all_previous(self, name=None, attrs={}, text=None, limit=None,
**kwargs):
"""Returns all items that match the given criteria and appear
before this Tag in the document."""
return self._find_all(name, attrs, text, limit, self.previous_elements,
**kwargs)
findAllPrevious = find_all_previous # BS3
fetchPrevious = find_all_previous # BS2
def find_previous_sibling(self, name=None, attrs={}, text=None, **kwargs):
"""Returns the closest sibling to this Tag that matches the
given criteria and appears before this Tag in the document."""
return self._find_one(self.find_previous_siblings, name, attrs, text,
**kwargs)
findPreviousSibling = find_previous_sibling # BS3
def find_previous_siblings(self, name=None, attrs={}, text=None,
limit=None, **kwargs):
"""Returns the siblings of this Tag that match the given
criteria and appear before this Tag in the document."""
return self._find_all(name, attrs, text, limit,
self.previous_siblings, **kwargs)
findPreviousSiblings = find_previous_siblings # BS3
fetchPreviousSiblings = find_previous_siblings # BS2
def find_parent(self, name=None, attrs={}, **kwargs):
"""Returns the closest parent of this Tag that matches the given
criteria."""
# NOTE: We can't use _find_one because findParents takes a different
# set of arguments.
r = None
l = self.find_parents(name, attrs, 1, **kwargs)
if l:
r = l[0]
return r
findParent = find_parent # BS3
def find_parents(self, name=None, attrs={}, limit=None, **kwargs):
"""Returns the parents of this Tag that match the given
criteria."""
return self._find_all(name, attrs, None, limit, self.parents,
**kwargs)
findParents = find_parents # BS3
fetchParents = find_parents # BS2
@property
def next(self):
return self.next_element
@property
def previous(self):
return self.previous_element
#These methods do the real heavy lifting.
def _find_one(self, method, name, attrs, text, **kwargs):
r = None
l = method(name, attrs, text, 1, **kwargs)
if l:
r = l[0]
return r
def _find_all(self, name, attrs, text, limit, generator, **kwargs):
"Iterates over a generator looking for things that match."
if isinstance(name, SoupStrainer):
strainer = name
elif text is None and not limit and not attrs and not kwargs:
# Optimization to find all tags.
if name is True or name is None:
return [element for element in generator
if isinstance(element, Tag)]
# Optimization to find all tags with a given name.
elif isinstance(name, basestring):
return [element for element in generator
if isinstance(element, Tag) and element.name == name]
else:
strainer = SoupStrainer(name, attrs, text, **kwargs)
else:
# Build a SoupStrainer
strainer = SoupStrainer(name, attrs, text, **kwargs)
results = ResultSet(strainer)
while True:
try:
i = next(generator)
except StopIteration:
break
if i:
found = strainer.search(i)
if found:
results.append(found)
if limit and len(results) >= limit:
break
return results
#These generators can be used to navigate starting from both
#NavigableStrings and Tags.
@property
def next_elements(self):
i = self.next_element
while i is not None:
yield i
i = i.next_element
@property
def next_siblings(self):
i = self.next_sibling
while i is not None:
yield i
i = i.next_sibling
@property
def previous_elements(self):
i = self.previous_element
while i is not None:
yield i
i = i.previous_element
@property
def previous_siblings(self):
i = self.previous_sibling
while i is not None:
yield i
i = i.previous_sibling
@property
def parents(self):
i = self.parent
while i is not None:
yield i
i = i.parent
# Methods for supporting CSS selectors.
tag_name_re = re.compile('^[a-z0-9]+$')
# /^(\w+)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/
# \---/ \---/\-------------/ \-------/
# | | | |
# | | | The value
# | | ~,|,^,$,* or =
# | Attribute
# Tag
attribselect_re = re.compile(
r'^(?P<tag>\w+)?\[(?P<attribute>\w+)(?P<operator>[=~\|\^\$\*]?)' +
r'=?"?(?P<value>[^\]"]*)"?\]$'
)
def _attr_value_as_string(self, value, default=None):
"""Force an attribute value into a string representation.
A multi-valued attribute will be converted into a
space-separated stirng.
"""
value = self.get(value, default)
if isinstance(value, list) or isinstance(value, tuple):
value =" ".join(value)
return value
def _tag_name_matches_and(self, function, tag_name):
if not tag_name:
return function
else:
def _match(tag):
return tag.name == tag_name and function(tag)
return _match
def _attribute_checker(self, operator, attribute, value=''):
"""Create a function that performs a CSS selector operation.
Takes an operator, attribute and optional value. Returns a
function that will return True for elements that match that
combination.
"""
if operator == '=':
# string representation of `attribute` is equal to `value`
return lambda el: el._attr_value_as_string(attribute) == value
elif operator == '~':
# space-separated list representation of `attribute`
# contains `value`
def _includes_value(element):
attribute_value = element.get(attribute, [])
if not isinstance(attribute_value, list):
attribute_value = attribute_value.split()
return value in attribute_value
return _includes_value
elif operator == '^':
# string representation of `attribute` starts with `value`
return lambda el: el._attr_value_as_string(
attribute, '').startswith(value)
elif operator == '$':
# string represenation of `attribute` ends with `value`
return lambda el: el._attr_value_as_string(
attribute, '').endswith(value)
elif operator == '*':
# string representation of `attribute` contains `value`
return lambda el: value in el._attr_value_as_string(attribute, '')
elif operator == '|':
# string representation of `attribute` is either exactly
# `value` or starts with `value` and then a dash.
def _is_or_starts_with_dash(element):
attribute_value = element._attr_value_as_string(attribute, '')
return (attribute_value == value or attribute_value.startswith(
value + '-'))
return _is_or_starts_with_dash
else:
return lambda el: el.has_attr(attribute)
# Old non-property versions of the generators, for backwards
# compatibility with BS3.
def nextGenerator(self):
return self.next_elements
def nextSiblingGenerator(self):
return self.next_siblings
def previousGenerator(self):
return self.previous_elements
def previousSiblingGenerator(self):
return self.previous_siblings
def parentGenerator(self):
return self.parents
class NavigableString(unicode, PageElement):
PREFIX = ''
SUFFIX = ''
def __new__(cls, value):
"""Create a new NavigableString.
When unpickling a NavigableString, this method is called with
the string in DEFAULT_OUTPUT_ENCODING. That encoding needs to be
passed in to the superclass's __new__ or the superclass won't know
how to handle non-ASCII characters.
"""
if isinstance(value, unicode):
return unicode.__new__(cls, value)
return unicode.__new__(cls, value, DEFAULT_OUTPUT_ENCODING)
def __copy__(self):
return self
def __getnewargs__(self):
return (unicode(self),)
def __getattr__(self, attr):
"""text.string gives you text. This is for backwards
compatibility for Navigable*String, but for CData* it lets you
get the string without the CData wrapper."""
if attr == 'string':
return self
else:
raise AttributeError(
"'%s' object has no attribute '%s'" % (
self.__class__.__name__, attr))
def output_ready(self, formatter="minimal"):
output = self.format_string(self, formatter)
return self.PREFIX + output + self.SUFFIX
class PreformattedString(NavigableString):
"""A NavigableString not subject to the normal formatting rules.
The string will be passed into the formatter (to trigger side effects),
but the return value will be ignored.
"""
def output_ready(self, formatter="minimal"):
"""CData strings are passed into the formatter.
But the return value is ignored."""
self.format_string(self, formatter)
return self.PREFIX + self + self.SUFFIX
class CData(PreformattedString):
PREFIX = u'<![CDATA['
SUFFIX = u']]>'
class ProcessingInstruction(PreformattedString):
PREFIX = u'<?'
SUFFIX = u'?>'
class Comment(PreformattedString):
PREFIX = u'<!--'
SUFFIX = u'-->'
class Declaration(PreformattedString):
PREFIX = u'<!'
SUFFIX = u'!>'
class Doctype(PreformattedString):
@classmethod
def for_name_and_ids(cls, name, pub_id, system_id):
value = name or ''
if pub_id is not None:
value += ' PUBLIC "%s"' % pub_id
if system_id is not None:
value += ' "%s"' % system_id
elif system_id is not None:
value += ' SYSTEM "%s"' % system_id
return Doctype(value)
PREFIX = u'<!DOCTYPE '
SUFFIX = u'>\n'
class Tag(PageElement):
"""Represents a found HTML tag with its attributes and contents."""
def __init__(self, parser=None, builder=None, name=None, namespace=None,
prefix=None, attrs=None, parent=None, previous=None):
"Basic constructor."
if parser is None:
self.parser_class = None
else:
# We don't actually store the parser object: that lets extracted
# chunks be garbage-collected.
self.parser_class = parser.__class__
if name is None:
raise ValueError("No value provided for new tag's name.")
self.name = name
self.namespace = namespace
self.prefix = prefix
if attrs is None:
attrs = {}
elif builder.cdata_list_attributes:
attrs = builder._replace_cdata_list_attribute_values(
self.name, attrs)
else:
attrs = dict(attrs)
self.attrs = attrs
self.contents = []
self.setup(parent, previous)
self.hidden = False
# Set up any substitutions, such as the charset in a META tag.
if builder is not None:
builder.set_up_substitutions(self)
self.can_be_empty_element = builder.can_be_empty_element(name)
else:
self.can_be_empty_element = False
parserClass = _alias("parser_class") # BS3
@property
def is_empty_element(self):
"""Is this tag an empty-element tag? (aka a self-closing tag)
A tag that has contents is never an empty-element tag.
A tag that has no contents may or may not be an empty-element
tag. It depends on the builder used to create the tag. If the
builder has a designated list of empty-element tags, then only
a tag whose name shows up in that list is considered an
empty-element tag.
If the builder has no designated list of empty-element tags,
then any tag with no contents is an empty-element tag.
"""
return len(self.contents) == 0 and self.can_be_empty_element
isSelfClosing = is_empty_element # BS3
@property
def string(self):
"""Convenience property to get the single string within this tag.
:Return: If this tag has a single string child, return value
is that string. If this tag has no children, or more than one
child, return value is None. If this tag has one child tag,
return value is the 'string' attribute of the child tag,
recursively.
"""
if len(self.contents) != 1:
return None
child = self.contents[0]
if isinstance(child, NavigableString):
return child
return child.string
@string.setter
def string(self, string):
self.clear()
self.append(string.__class__(string))
def _all_strings(self, strip=False, types=(NavigableString, CData)):
"""Yield all strings of certain classes, possibly stripping them.
By default, yields only NavigableString and CData objects. So
no comments, processing instructions, etc.
"""
for descendant in self.descendants:
if (
(types is None and not isinstance(descendant, NavigableString))
or
(types is not None and type(descendant) not in types)):
continue
if strip:
descendant = descendant.strip()
if len(descendant) == 0:
continue
yield descendant
strings = property(_all_strings)
@property
def stripped_strings(self):
for string in self._all_strings(True):
yield string
def get_text(self, separator=u"", strip=False,
types=(NavigableString, CData)):
"""
Get all child strings, concatenated using the given separator.
"""
return separator.join([s for s in self._all_strings(
strip, types=types)])
getText = get_text
text = property(get_text)
def decompose(self):
"""Recursively destroys the contents of this tree."""
self.extract()
i = self
while i is not None:
next = i.next_element
i.__dict__.clear()
i.contents = []
i = next
def clear(self, decompose=False):
"""
Extract all children. If decompose is True, decompose instead.
"""
if decompose:
for element in self.contents[:]:
if isinstance(element, Tag):
element.decompose()
else:
element.extract()
else:
for element in self.contents[:]:
element.extract()
def index(self, element):
"""
Find the index of a child by identity, not value. Avoids issues with
tag.contents.index(element) getting the index of equal elements.
"""
for i, child in enumerate(self.contents):
if child is element:
return i
raise ValueError("Tag.index: element not in tag")
def get(self, key, default=None):
"""Returns the value of the 'key' attribute for the tag, or
the value given for 'default' if it doesn't have that
attribute."""
return self.attrs.get(key, default)
def has_attr(self, key):
return key in self.attrs
def __hash__(self):
return str(self).__hash__()
def __getitem__(self, key):
"""tag[key] returns the value of the 'key' attribute for the tag,
and throws an exception if it's not there."""
return self.attrs[key]
def __iter__(self):
"Iterating over a tag iterates over its contents."
return iter(self.contents)
def __len__(self):
"The length of a tag is the length of its list of contents."
return len(self.contents)
def __contains__(self, x):
return x in self.contents
def __nonzero__(self):
"A tag is non-None even if it has no contents."
return True
def __setitem__(self, key, value):
"""Setting tag[key] sets the value of the 'key' attribute for the
tag."""
self.attrs[key] = value
def __delitem__(self, key):
"Deleting tag[key] deletes all 'key' attributes for the tag."
self.attrs.pop(key, None)
def __call__(self, *args, **kwargs):
"""Calling a tag like a function is the same as calling its
find_all() method. Eg. tag('a') returns a list of all the A tags
found within this tag."""
return self.find_all(*args, **kwargs)
def __getattr__(self, tag):
#print "Getattr %s.%s" % (self.__class__, tag)
if len(tag) > 3 and tag.endswith('Tag'):
# BS3: soup.aTag -> "soup.find("a")
tag_name = tag[:-3]
warnings.warn(
'.%sTag is deprecated, use .find("%s") instead.' % (
tag_name, tag_name))
return self.find(tag_name)
# We special case contents to avoid recursion.
elif not tag.startswith("__") and not tag=="contents":
return self.find(tag)
raise AttributeError(
"'%s' object has no attribute '%s'" % (self.__class__, tag))
def __eq__(self, other):
"""Returns true iff this tag has the same name, the same attributes,
and the same contents (recursively) as the given tag."""
if self is other:
return True
if (not hasattr(other, 'name') or
not hasattr(other, 'attrs') or
not hasattr(other, 'contents') or
self.name != other.name or
self.attrs != other.attrs or
len(self) != len(other)):
return False
for i, my_child in enumerate(self.contents):
if my_child != other.contents[i]:
return False
return True
def __ne__(self, other):
"""Returns true iff this tag is not identical to the other tag,
as defined in __eq__."""
return not self == other
def __repr__(self, encoding=DEFAULT_OUTPUT_ENCODING):
"""Renders this tag as a string."""
return self.encode(encoding)
def __unicode__(self):
return self.decode()
def __str__(self):
return self.encode()
if PY3K:
__str__ = __repr__ = __unicode__
def encode(self, encoding=DEFAULT_OUTPUT_ENCODING,
indent_level=None, formatter="minimal",
errors="xmlcharrefreplace"):
# Turn the data structure into Unicode, then encode the
# Unicode.
u = self.decode(indent_level, encoding, formatter)
return u.encode(encoding, errors)
def _should_pretty_print(self, indent_level):
"""Should this tag be pretty-printed?"""
return (
indent_level is not None and
(self.name not in HTMLAwareEntitySubstitution.preformatted_tags
or self._is_xml))
def decode(self, indent_level=None,
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
formatter="minimal"):
"""Returns a Unicode representation of this tag and its contents.
:param eventual_encoding: The tag is destined to be
encoded into this encoding. This method is _not_
responsible for performing that encoding. This information
is passed in so that it can be substituted in if the
document contains a <META> tag that mentions the document's
encoding.
"""
# First off, turn a string formatter into a function. This
# will stop the lookup from happening over and over again.
if not callable(formatter):
formatter = self._formatter_for_name(formatter)
attrs = []
if self.attrs:
for key, val in sorted(self.attrs.items()):
if val is None:
decoded = key
else:
if isinstance(val, list) or isinstance(val, tuple):
val = ' '.join(val)
elif not isinstance(val, basestring):
val = unicode(val)
elif (
isinstance(val, AttributeValueWithCharsetSubstitution)
and eventual_encoding is not None):
val = val.encode(eventual_encoding)
text = self.format_string(val, formatter)
decoded = (
unicode(key) + '='
+ EntitySubstitution.quoted_attribute_value(text))
attrs.append(decoded)
close = ''
closeTag = ''
prefix = ''
if self.prefix:
prefix = self.prefix + ":"
if self.is_empty_element:
close = '/'
else:
closeTag = '</%s%s>' % (prefix, self.name)
pretty_print = self._should_pretty_print(indent_level)
space = ''
indent_space = ''
if indent_level is not None:
indent_space = (' ' * (indent_level - 1))
if pretty_print:
space = indent_space
indent_contents = indent_level + 1
else:
indent_contents = None
contents = self.decode_contents(
indent_contents, eventual_encoding, formatter)
if self.hidden:
# This is the 'document root' object.
s = contents
else:
s = []
attribute_string = ''
if attrs:
attribute_string = ' ' + ' '.join(attrs)
if indent_level is not None:
# Even if this particular tag is not pretty-printed,
# we should indent up to the start of the tag.
s.append(indent_space)
s.append('<%s%s%s%s>' % (
prefix, self.name, attribute_string, close))
if pretty_print:
s.append("\n")
s.append(contents)
if pretty_print and contents and contents[-1] != "\n":
s.append("\n")
if pretty_print and closeTag:
s.append(space)
s.append(closeTag)
if indent_level is not None and closeTag and self.next_sibling:
# Even if this particular tag is not pretty-printed,
# we're now done with the tag, and we should add a
# newline if appropriate.
s.append("\n")
s = ''.join(s)
return s
def prettify(self, encoding=None, formatter="minimal"):
if encoding is None:
return self.decode(True, formatter=formatter)
else:
return self.encode(encoding, True, formatter=formatter)
def decode_contents(self, indent_level=None,
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
formatter="minimal"):
"""Renders the contents of this tag as a Unicode string.
:param eventual_encoding: The tag is destined to be
encoded into this encoding. This method is _not_
responsible for performing that encoding. This information
is passed in so that it can be substituted in if the
document contains a <META> tag that mentions the document's
encoding.
"""
# First off, turn a string formatter into a function. This
# will stop the lookup from happening over and over again.
if not callable(formatter):
formatter = self._formatter_for_name(formatter)
pretty_print = (indent_level is not None)
s = []
for c in self:
text = None
if isinstance(c, NavigableString):
text = c.output_ready(formatter)
elif isinstance(c, Tag):
s.append(c.decode(indent_level, eventual_encoding,
formatter))
if text and indent_level and not self.name == 'pre':
text = text.strip()
if text:
if pretty_print and not self.name == 'pre':
s.append(" " * (indent_level - 1))
s.append(text)
if pretty_print and not self.name == 'pre':
s.append("\n")
return ''.join(s)
def encode_contents(
self, indent_level=None, encoding=DEFAULT_OUTPUT_ENCODING,
formatter="minimal"):
"""Renders the contents of this tag as a bytestring."""
contents = self.decode_contents(indent_level, encoding, formatter)
return contents.encode(encoding)
# Old method for BS3 compatibility
def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING,
prettyPrint=False, indentLevel=0):
if not prettyPrint:
indentLevel = None
return self.encode_contents(
indent_level=indentLevel, encoding=encoding)
#Soup methods
def find(self, name=None, attrs={}, recursive=True, text=None,
**kwargs):
"""Return only the first child of this Tag matching the given
criteria."""
r = None
l = self.find_all(name, attrs, recursive, text, 1, **kwargs)
if l:
r = l[0]
return r
findChild = find
def find_all(self, name=None, attrs={}, recursive=True, text=None,
limit=None, **kwargs):
"""Extracts a list of Tag objects that match the given
criteria. You can specify the name of the Tag and any
attributes you want the Tag to have.
The value of a key-value pair in the 'attrs' map can be a
string, a list of strings, a regular expression object, or a
callable that takes a string and returns whether or not the
string matches for some custom definition of 'matches'. The
same is true of the tag name."""
generator = self.descendants
if not recursive:
generator = self.children
return self._find_all(name, attrs, text, limit, generator, **kwargs)
findAll = find_all # BS3
findChildren = find_all # BS2
#Generator methods
@property
def children(self):
# return iter() to make the purpose of the method clear
return iter(self.contents) # XXX This seems to be untested.
@property
def descendants(self):
if not len(self.contents):
return
stopNode = self._last_descendant().next_element
current = self.contents[0]
while current is not stopNode:
yield current
current = current.next_element
# CSS selector code
_selector_combinators = ['>', '+', '~']
_select_debug = False
def select(self, selector, _candidate_generator=None):
"""Perform a CSS selection operation on the current element."""
tokens = selector.split()
current_context = [self]
if tokens[-1] in self._selector_combinators:
raise ValueError(
'Final combinator "%s" is missing an argument.' % tokens[-1])
if self._select_debug:
print 'Running CSS selector "%s"' % selector
for index, token in enumerate(tokens):
if self._select_debug:
print ' Considering token "%s"' % token
recursive_candidate_generator = None
tag_name = None
if tokens[index-1] in self._selector_combinators:
# This token was consumed by the previous combinator. Skip it.
if self._select_debug:
print ' Token was consumed by the previous combinator.'
continue
# Each operation corresponds to a checker function, a rule
# for determining whether a candidate matches the
# selector. Candidates are generated by the active
# iterator.
checker = None
m = self.attribselect_re.match(token)
if m is not None:
# Attribute selector
tag_name, attribute, operator, value = m.groups()
checker = self._attribute_checker(operator, attribute, value)
elif '#' in token:
# ID selector
tag_name, tag_id = token.split('#', 1)
def id_matches(tag):
return tag.get('id', None) == tag_id
checker = id_matches
elif '.' in token:
# Class selector
tag_name, klass = token.split('.', 1)
classes = set(klass.split('.'))
def classes_match(candidate):
return classes.issubset(candidate.get('class', []))
checker = classes_match
elif ':' in token:
# Pseudo-class
tag_name, pseudo = token.split(':', 1)
if tag_name == '':
raise ValueError(
"A pseudo-class must be prefixed with a tag name.")
pseudo_attributes = re.match('([a-zA-Z\d-]+)\(([a-zA-Z\d]+)\)', pseudo)
found = []
if pseudo_attributes is not None:
pseudo_type, pseudo_value = pseudo_attributes.groups()
if pseudo_type == 'nth-of-type':
try:
pseudo_value = int(pseudo_value)
except:
raise NotImplementedError(
'Only numeric values are currently supported for the nth-of-type pseudo-class.')
if pseudo_value < 1:
raise ValueError(
'nth-of-type pseudo-class value must be at least 1.')
class Counter(object):
def __init__(self, destination):
self.count = 0
self.destination = destination
def nth_child_of_type(self, tag):
self.count += 1
if self.count == self.destination:
return True
if self.count > self.destination:
# Stop the generator that's sending us
# these things.
raise StopIteration()
return False
checker = Counter(pseudo_value).nth_child_of_type
else:
raise NotImplementedError(
'Only the following pseudo-classes are implemented: nth-of-type.')
elif token == '*':
# Star selector -- matches everything
pass
elif token == '>':
# Run the next token as a CSS selector against the
# direct children of each tag in the current context.
recursive_candidate_generator = lambda tag: tag.children
elif token == '~':
# Run the next token as a CSS selector against the
# siblings of each tag in the current context.
recursive_candidate_generator = lambda tag: tag.next_siblings
elif token == '+':
# For each tag in the current context, run the next
# token as a CSS selector against the tag's next
# sibling that's a tag.
def next_tag_sibling(tag):
yield tag.find_next_sibling(True)
recursive_candidate_generator = next_tag_sibling
elif self.tag_name_re.match(token):
# Just a tag name.
tag_name = token
else:
raise ValueError(
'Unsupported or invalid CSS selector: "%s"' % token)
if recursive_candidate_generator:
# This happens when the selector looks like "> foo".
#
# The generator calls select() recursively on every
# member of the current context, passing in a different
# candidate generator and a different selector.
#
# In the case of "> foo", the candidate generator is
# one that yields a tag's direct children (">"), and
# the selector is "foo".
next_token = tokens[index+1]
def recursive_select(tag):
if self._select_debug:
print ' Calling select("%s") recursively on %s %s' % (next_token, tag.name, tag.attrs)
print '-' * 40
for i in tag.select(next_token, recursive_candidate_generator):
if self._select_debug:
print '(Recursive select picked up candidate %s %s)' % (i.name, i.attrs)
yield i
if self._select_debug:
print '-' * 40
_use_candidate_generator = recursive_select
elif _candidate_generator is None:
# By default, a tag's candidates are all of its
# children. If tag_name is defined, only yield tags
# with that name.
if self._select_debug:
if tag_name:
check = "[any]"
else:
check = tag_name
print ' Default candidate generator, tag name="%s"' % check
if self._select_debug:
# This is redundant with later code, but it stops
# a bunch of bogus tags from cluttering up the
# debug log.
def default_candidate_generator(tag):
for child in tag.descendants:
if not isinstance(child, Tag):
continue
if tag_name and not child.name == tag_name:
continue
yield child
_use_candidate_generator = default_candidate_generator
else:
_use_candidate_generator = lambda tag: tag.descendants
else:
_use_candidate_generator = _candidate_generator
new_context = []
new_context_ids = set([])
for tag in current_context:
if self._select_debug:
print " Running candidate generator on %s %s" % (
tag.name, repr(tag.attrs))
for candidate in _use_candidate_generator(tag):
if not isinstance(candidate, Tag):
continue
if tag_name and candidate.name != tag_name:
continue
if checker is not None:
try:
result = checker(candidate)
except StopIteration:
# The checker has decided we should no longer
# run the generator.
break
if checker is None or result:
if self._select_debug:
print " SUCCESS %s %s" % (candidate.name, repr(candidate.attrs))
if id(candidate) not in new_context_ids:
# If a tag matches a selector more than once,
# don't include it in the context more than once.
new_context.append(candidate)
new_context_ids.add(id(candidate))
elif self._select_debug:
print " FAILURE %s %s" % (candidate.name, repr(candidate.attrs))
current_context = new_context
if self._select_debug:
print "Final verdict:"
for i in current_context:
print " %s %s" % (i.name, i.attrs)
return current_context
# Old names for backwards compatibility
def childGenerator(self):
return self.children
def recursiveChildGenerator(self):
return self.descendants
def has_key(self, key):
"""This was kind of misleading because has_key() (attributes)
was different from __in__ (contents). has_key() is gone in
Python 3, anyway."""
warnings.warn('has_key is deprecated. Use has_attr("%s") instead.' % (
key))
return self.has_attr(key)
# Next, a couple classes to represent queries and their results.
class SoupStrainer(object):
"""Encapsulates a number of ways of matching a markup element (tag or
text)."""
def __init__(self, name=None, attrs={}, text=None, **kwargs):
self.name = self._normalize_search_value(name)
if not isinstance(attrs, dict):
# Treat a non-dict value for attrs as a search for the 'class'
# attribute.
kwargs['class'] = attrs
attrs = None
if 'class_' in kwargs:
# Treat class_="foo" as a search for the 'class'
# attribute, overriding any non-dict value for attrs.
kwargs['class'] = kwargs['class_']
del kwargs['class_']
if kwargs:
if attrs:
attrs = attrs.copy()
attrs.update(kwargs)
else:
attrs = kwargs
normalized_attrs = {}
for key, value in attrs.items():
normalized_attrs[key] = self._normalize_search_value(value)
self.attrs = normalized_attrs
self.text = self._normalize_search_value(text)
def _normalize_search_value(self, value):
# Leave it alone if it's a Unicode string, a callable, a
# regular expression, a boolean, or None.
if (isinstance(value, unicode) or callable(value) or hasattr(value, 'match')
or isinstance(value, bool) or value is None):
return value
# If it's a bytestring, convert it to Unicode, treating it as UTF-8.
if isinstance(value, bytes):
return value.decode("utf8")
# If it's listlike, convert it into a list of strings.
if hasattr(value, '__iter__'):
new_value = []
for v in value:
if (hasattr(v, '__iter__') and not isinstance(v, bytes)
and not isinstance(v, unicode)):
# This is almost certainly the user's mistake. In the
# interests of avoiding infinite loops, we'll let
# it through as-is rather than doing a recursive call.
new_value.append(v)
else:
new_value.append(self._normalize_search_value(v))
return new_value
# Otherwise, convert it into a Unicode string.
# The unicode(str()) thing is so this will do the same thing on Python 2
# and Python 3.
return unicode(str(value))
def __str__(self):
if self.text:
return self.text
else:
return "%s|%s" % (self.name, self.attrs)
def search_tag(self, markup_name=None, markup_attrs={}):
found = None
markup = None
if isinstance(markup_name, Tag):
markup = markup_name
markup_attrs = markup
call_function_with_tag_data = (
isinstance(self.name, collections.Callable)
and not isinstance(markup_name, Tag))
if ((not self.name)
or call_function_with_tag_data
or (markup and self._matches(markup, self.name))
or (not markup and self._matches(markup_name, self.name))):
if call_function_with_tag_data:
match = self.name(markup_name, markup_attrs)
else:
match = True
markup_attr_map = None
for attr, match_against in list(self.attrs.items()):
if not markup_attr_map:
if hasattr(markup_attrs, 'get'):
markup_attr_map = markup_attrs
else:
markup_attr_map = {}
for k, v in markup_attrs:
markup_attr_map[k] = v
attr_value = markup_attr_map.get(attr)
if not self._matches(attr_value, match_against):
match = False
break
if match:
if markup:
found = markup
else:
found = markup_name
if found and self.text and not self._matches(found.string, self.text):
found = None
return found
searchTag = search_tag
def search(self, markup):
# print 'looking for %s in %s' % (self, markup)
found = None
# If given a list of items, scan it for a text element that
# matches.
if hasattr(markup, '__iter__') and not isinstance(markup, (Tag, basestring)):
for element in markup:
if isinstance(element, NavigableString) \
and self.search(element):
found = element
break
# If it's a Tag, make sure its name or attributes match.
# Don't bother with Tags if we're searching for text.
elif isinstance(markup, Tag):
if not self.text or self.name or self.attrs:
found = self.search_tag(markup)
# If it's text, make sure the text matches.
elif isinstance(markup, NavigableString) or \
isinstance(markup, basestring):
if not self.name and not self.attrs and self._matches(markup, self.text):
found = markup
else:
raise Exception(
"I don't know how to match against a %s" % markup.__class__)
return found
def _matches(self, markup, match_against):
# print u"Matching %s against %s" % (markup, match_against)
result = False
if isinstance(markup, list) or isinstance(markup, tuple):
# This should only happen when searching a multi-valued attribute
# like 'class'.
if (isinstance(match_against, unicode)
and ' ' in match_against):
# A bit of a special case. If they try to match "foo
# bar" on a multivalue attribute's value, only accept
# the literal value "foo bar"
#
# XXX This is going to be pretty slow because we keep
# splitting match_against. But it shouldn't come up
# too often.
return (whitespace_re.split(match_against) == markup)
else:
for item in markup:
if self._matches(item, match_against):
return True
return False
if match_against is True:
# True matches any non-None value.
return markup is not None
if isinstance(match_against, collections.Callable):
return match_against(markup)
# Custom callables take the tag as an argument, but all
# other ways of matching match the tag name as a string.
if isinstance(markup, Tag):
markup = markup.name
# Ensure that `markup` is either a Unicode string, or None.
markup = self._normalize_search_value(markup)
if markup is None:
# None matches None, False, an empty string, an empty list, and so on.
return not match_against
if isinstance(match_against, unicode):
# Exact string match
return markup == match_against
if hasattr(match_against, 'match'):
# Regexp match
return match_against.search(markup)
if hasattr(match_against, '__iter__'):
# The markup must be an exact match against something
# in the iterable.
return markup in match_against
class ResultSet(list):
"""A ResultSet is just a list that keeps track of the SoupStrainer
that created it."""
def __init__(self, source):
list.__init__([])
self.source = source
| 41.542732
| 117
| 0.528149
|
2e9e1648a52c645156673c4d8a46e6b35acfb485
| 425
|
py
|
Python
|
samples/get_users.py
|
voximplant/apiclient-python
|
860fc054f546152a101e29b1af388c381075ac47
|
[
"MIT"
] | 8
|
2019-06-11T07:46:38.000Z
|
2021-04-02T17:37:08.000Z
|
samples/get_users.py
|
NikolasMelui/apiclient-python
|
c94caacc07e2cdc11edd022c6f9dff3feffe8edc
|
[
"MIT"
] | 5
|
2019-06-05T18:34:40.000Z
|
2020-07-23T10:45:19.000Z
|
samples/get_users.py
|
NikolasMelui/apiclient-python
|
c94caacc07e2cdc11edd022c6f9dff3feffe8edc
|
[
"MIT"
] | 6
|
2019-07-09T05:59:35.000Z
|
2021-07-08T22:37:59.000Z
|
from voximplant.apiclient import VoximplantAPI, VoximplantException
if __name__ == "__main__":
voxapi = VoximplantAPI("credentials.json")
# Get two first identities.
APPLICATION_ID = 1
COUNT = 2
try:
res = voxapi.get_users(application_id=APPLICATION_ID,
count=COUNT)
print(res)
except VoximplantException as e:
print("Error: {}".format(e.message))
| 23.611111
| 67
| 0.644706
|
9454e92c1dbe7163af41e1f364c044175b824dd6
| 7,040
|
py
|
Python
|
examples_python/interactive_xy.py
|
notpeter/AxiDraw_API
|
d9c35eb93fd85f96cf197908415822af9a725b41
|
[
"MIT"
] | null | null | null |
examples_python/interactive_xy.py
|
notpeter/AxiDraw_API
|
d9c35eb93fd85f96cf197908415822af9a725b41
|
[
"MIT"
] | 3
|
2021-01-17T04:31:57.000Z
|
2021-01-17T04:36:41.000Z
|
examples_python/interactive_xy.py
|
notpeter/AxiDraw_API
|
d9c35eb93fd85f96cf197908415822af9a725b41
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- encoding: utf-8 -#-
'''
interactive_xy.py
Demonstrate use of axidraw module in "interactive" mode.
Run this demo by calling: python interactive_xy.py
(There is also a separate "plot" mode, which can be used for plotting an
SVG file, rather than moving to various points upon command.)
AxiDraw python API documentation is hosted at: https://axidraw.com/doc/py_api/
'''
'''
About this software:
The AxiDraw writing and drawing machine is a product of Evil Mad Scientist
Laboratories. https://axidraw.com https://shop.evilmadscientist.com
This open source software is written and maintained by Evil Mad Scientist
to support AxiDraw users across a wide range of applications. Please help
support Evil Mad Scientist and open source software development by purchasing
genuine AxiDraw hardware.
AxiDraw software development is hosted at https://github.com/evil-mad/axidraw
Additional AxiDraw documentation is available at http://axidraw.com/docs
AxiDraw owners may request technical support for this software through our
github issues page, support forums, or by contacting us directly at:
https://shop.evilmadscientist.com/contact
Copyright 2020 Windell H. Oskay, Evil Mad Scientist Laboratories
The MIT License (MIT)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
'''
Interactive mode is a mode of use, designed for plotting individual motion
segments upon request. It is a complement to the usual plotting modes, which
take an SVG document as input.
So long as the AxiDraw is started in the home corner, moves are limit checked,
and constrained to be within the safe travel range of the AxiDraw.
Recommended usage:
ad = axidraw.AxiDraw() # Initialize class
ad.interactive() # Enter interactive mode
[Optional: Apply custom settings]
ad.connect() # Open serial port to AxiDraw
[One or more motion commands]
[Optional: Update settings, followed by calling update().]
ad.disconnect() # Close connection to AxiDraw
The motion commands are as follows:
goto(x,y) # Absolute XY move to new location
moveto(x,y) # Absolute XY pen-up move. Lift pen before moving, if it is down.
lineto(x,y) # Absolute XY pen-down move. Lower pen before moving, if it is up.
go(x,y) # XY relative move.
move(x,y) # XY relative pen-up move. Lift pen before moving, if it is down.
line(x,y) # XY relative pen-down move. Lower pen before moving, if it is up.
penup() # lift pen
pendown() # lower pen
Utility commands:
interactive() # Enter interactive mode
connect() # Open serial connection to AxiDraw. Returns True if connected successfully.
update() # Apply changes to options
disable() # Disable XY motors, for example to manually move carriage to home position.
disconnect() # Terminate serial session to AxiDraw. (Required.)
The available options are as follows:
options.speed_pendown # Range: 1-110 (percent).
options.speed_penup # Range: 1-110 (percent).
options.accel # Range: 1-100 (percent).
options.pen_pos_down # Range: 0-100 (percent).
options.pen_pos_up # Range: 0-100 (percent).
options.pen_rate_lower # Range: 1-100 (percent).
options.pen_rate_raise # Range: 1-100 (percent).
options.pen_delay_down # Range: -500 - 500 (ms).
options.pen_delay_up # Range: -500 - 500 (ms).
options.const_speed # True or False. Default: False
options.units # Range: 0-1. 0: Inches (default), 1: cm
options.model # Range: 1-3. 1: AxiDraw V2 or V3 ( Default)
# 2: AxiDraw V3/A3
# 3: AxiDraw V3 XLX
options.port # String: Port name or USB nickname
options.port_config # Range: 0-1. 0: Plot to first unit found, unless port specified. (Default)
# 1: Plot to first unit found
One or more options can be set after the interactive() call, and before connect()
for example as:
ad.options.speed_pendown = 75
All options except port and port_config can be changed after connect(). However,
you must call update() after changing the options and before calling any
additional motion commands.
'''
import sys
from pyaxidraw import axidraw
ad = axidraw.AxiDraw() # Initialize class
ad.interactive() # Enter interactive mode
connected = ad.connect() # Open serial port to AxiDraw
if not connected:
sys.exit() # end script
# Draw square, using "moveto/lineto" (absolute move) syntax:
ad.moveto(1,1) # Absolute pen-up move, to (1 inch, 1 inch)
ad.lineto(2,1) # Absolute pen-down move, to (2 inches, 1 inch)
ad.lineto(2,2)
ad.lineto(1,2)
ad.lineto(1,1) # Finish drawing square
ad.moveto(0,0) # Absolute pen-up move, back to origin.
# Change some options:
ad.options.units = 1 # set working units to cm.
ad.options.speed_pendown = 10 # set pen-down speed to slow
ad.update() # Process changes to options
# Draw an "X" through the square, using "move/line" (relative move) syntax:
# Note that we have just changed the units to be in cm.
ad.move(5.08,5.08) # Relative move to (2 inches,2 inches), in cm
ad.line(-2.54,-2.54) # Relative move 2.54 cm in X and Y
ad.move(0,2.54)
ad.line(2.54,-2.54) # Relative move 2.54 cm in X and Y
ad.moveto(0,0) # Return home
# Change some options, just to show how we do so:
ad.options.units = 0 # set working units back to inches.
ad.options.speed_pendown = 75 # set pen-down speed to fast
ad.options.pen_rate_lower = 10 # Set pen down very slowly
ad.update() # Process changes to options
# Draw a "+" through the square, using "go/goto" commands,
# which do not automatically set the pen up or down:
ad.goto(1.5,1.0)
ad.pendown()
ad.go(0,1)
ad.penup()
ad.goto(1.0,1.5)
ad.pendown()
ad.go(1,0)
ad.penup()
ad.goto(0,0) # Return home
ad.disconnect() # Close serial port to AxiDraw
| 32.442396
| 101
| 0.699148
|
5decb5e3f5cbaf569d68ed5f97b6cfd7ed323324
| 842
|
py
|
Python
|
three-connector/events.py
|
ukonpower/three-connector
|
9583312c4955000da7d16b7535ffac9c4dd37ab6
|
[
"MIT"
] | 3
|
2022-03-26T09:36:26.000Z
|
2022-03-30T00:41:57.000Z
|
three-connector/events.py
|
ukonpower/three-connector
|
9583312c4955000da7d16b7535ffac9c4dd37ab6
|
[
"MIT"
] | 1
|
2022-03-22T00:56:39.000Z
|
2022-03-22T00:56:39.000Z
|
three-connector/events.py
|
ukonpower/three-connector
|
9583312c4955000da7d16b7535ffac9c4dd37ab6
|
[
"MIT"
] | null | null | null |
import bpy;
from bpy.app.handlers import persistent
from .managers.fcurve import FCurveManager
from .operators.export_gltf import THREECONNECTOR_OT_ExportGLTF
@persistent
def on_load(scene = None, context = None):
FCurveManager.update()
@persistent
def on_save(scene = None, context = None):
THREECONNECTOR_OT_ExportGLTF.on_save()
@persistent
def on_depsgraph_update(scene = None, context = None):
FCurveManager.update()
def register():
bpy.app.handlers.load_post.append(on_load)
bpy.app.handlers.save_post.append(on_save)
bpy.app.handlers.depsgraph_update_post.append(on_depsgraph_update)
def unregister():
try:
bpy.app.handlers.load_post.remove(on_load)
bpy.app.handlers.save_post.remove(on_save)
bpy.app.handlers.depsgraph_update_post.remove(on_depsgraph_update)
except ValueError:
pass
| 27.16129
| 69
| 0.776722
|
81e5e2d1ce3ea14536c2151274d5002615372b21
| 4,380
|
py
|
Python
|
test/UniformPDF.py
|
zoidy/puq
|
ed70f113f7c135ee61adeebfc9bd18c541970caf
|
[
"MIT"
] | null | null | null |
test/UniformPDF.py
|
zoidy/puq
|
ed70f113f7c135ee61adeebfc9bd18c541970caf
|
[
"MIT"
] | null | null | null |
test/UniformPDF.py
|
zoidy/puq
|
ed70f113f7c135ee61adeebfc9bd18c541970caf
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
'''
Testsuite for the UniformPDF class
'''
import numpy as np
from puq import *
import scipy.stats as stats
def _hisplot(y, nbins):
n, bins = np.histogram(y, nbins, normed=True)
mids = bins[:-1] + np.diff(bins) / 2.0
return mids, n
def compare_curves(x1, y1, x2, y2, **args):
ay = np.interp(x2, x1, y1)
rmse = np.sqrt(np.sum((ay - y2)**2))
print "maximum difference is", np.max(np.abs(ay - y2))
print "RMSE=%s" % rmse
#assert rmse < .002
assert np.allclose(ay, y2, **args)
def _test_updf(min, max):
options['pdf']['samples'] = 1000
c = UniformPDF(min=min, max=max)
assert isinstance(c, PDF)
x = c.x
y = stats.uniform(min, max-min).pdf(x)
rmse = np.sqrt(np.sum((c.y - y)**2))
print "RMSE=%s" % rmse
print "MaxError=", np.max(abs(c.y - y))
assert rmse < 1e-11
def _test_ucdf(min, max):
options['pdf']['samples'] = 1000
c = UniformPDF(min=min, max=max)
cdfy = stats.uniform(min, max-min).cdf(c.x)
rmse = np.sqrt(np.sum((c.cdfy - cdfy)**2))
print "RMSE=%s" % rmse
print "MaxError=", np.max(abs(c.cdfy - cdfy))
assert rmse < 1e-11
"""
import matplotlib.pyplot as plt
plt.plot(c.x, c.cdfy, color='green')
plt.plot(c.x, cdfy, color='red')
plt.show()
"""
# test mean, min, max and deviation
def _test_uniform_minmeanmax(min, mean, max):
c = UniformPDF(min=min, mean=mean, max=max)
cmin, cmax = c.range
print "min=%s mean=%s max=%s" % (cmin, c.mean, cmax)
if min != None:
assert min == cmin
else:
assert cmin == mean - (max - mean)
if max != None:
assert max == cmax
else:
assert cmax == mean + (mean - min)
if mean != None:
assert np.allclose(mean, c.mean)
else:
assert np.allclose(c.mean, (min + max) / 2.0)
# test lhs()
def _test_uniform_lhs(min, max):
c = UniformPDF(min=min, max=max)
# test the lhs() function to see if the curve it generates is
# close enough
data = c.ds(10000)
assert len(data) == 10000
assert np.min(data) >= min
assert np.max(data) <= max
dx,dy = _hisplot(data, 20)
x = dx
y = stats.uniform(min, max-min).pdf(x)
compare_curves(x, y, dx, dy, atol=.0001)
"""
import matplotlib.pyplot as plt
plt.plot(x, y, color='red')
plt.plot(dx, dy, color='blue')
plt.show()
"""
assert np.allclose(c.mean, np.mean(data), rtol=.001), 'mean=%s' % np.mean(data)
# test lhs1()
def _test_uniform_lhs1(min, max):
c = UniformPDF(min=min, max=max)
data = c.ds1(1000)
xs = data
assert len(xs) == 1000
assert min, max == c.range
# scale [-1,1] back to original size
mean = (min + max)/2.0
xs *= max - mean
xs += mean
dx, dy = _hisplot(xs, 20)
x = dx
y = stats.uniform(min, max-min).pdf(x)
compare_curves(x, y, dx, dy, atol=.001)
"""
import matplotlib.pyplot as plt
plt.plot(x, y, color='green')
plt.plot(dx, dy, color='blue')
plt.show()
"""
assert np.allclose(c.mean, np.mean(data), rtol=.001), 'mean=%s' % np.mean(data)
def _test_uniform_random(min, max):
c = UniformPDF(min=min, max=max)
data = c.random(1000000)
assert len(data) == 1000000
dx,dy = _hisplot(data, 20)
x = dx
y = stats.uniform(min, max-min).pdf(x)
compare_curves(x, y, dx, dy, atol=.02)
assert np.min(data) >= min
assert np.max(data) <= max
"""
import matplotlib.pyplot as plt
plt.plot(dx, dy, color='red')
plt.plot(x, y, color='blue')
plt.show()
"""
assert np.allclose(c.mean, np.mean(data), rtol=.001), 'mean=%s' % np.mean(data)
def test_updf():
_test_updf(10,20)
_test_updf(-20,-10)
def test_ucdf():
_test_ucdf(100,105)
_test_ucdf(-1,2)
def test_uniform_minmeanmax():
_test_uniform_minmeanmax(0,None,20)
_test_uniform_minmeanmax(None,0.5,2)
_test_uniform_minmeanmax(5,10,15)
_test_uniform_minmeanmax(5,10,None)
def test_uniform_lhs():
_test_uniform_lhs(10,20)
_test_uniform_lhs(-100, -50)
def test_uniform_lhs1():
_test_uniform_lhs1(10,20)
_test_uniform_lhs1(-100, -50)
def test_uniform_random():
_test_uniform_random(10,20)
if __name__ == "__main__":
test_updf()
test_ucdf()
test_uniform_minmeanmax()
test_uniform_lhs()
test_uniform_lhs1()
test_uniform_random()
| 24.886364
| 83
| 0.603196
|
0129e133debdd0bb7a9efc7213f0153f83e22422
| 2,220
|
py
|
Python
|
src/chembl_core_model/models/general.py
|
BNext-IQT/chembl_webservices_py3
|
42ccb39f0121835ca7ee9ac8ddd04cb513998079
|
[
"Apache-2.0"
] | 5
|
2020-10-23T11:56:59.000Z
|
2021-06-05T16:30:10.000Z
|
src/chembl_core_model/models/general.py
|
BNext-IQT/chembl_webservices_py3
|
42ccb39f0121835ca7ee9ac8ddd04cb513998079
|
[
"Apache-2.0"
] | 9
|
2020-02-11T08:01:40.000Z
|
2021-06-10T19:41:03.000Z
|
src/chembl_core_model/models/general.py
|
BNext-IQT/chembl_webservices_py3
|
42ccb39f0121835ca7ee9ac8ddd04cb513998079
|
[
"Apache-2.0"
] | 4
|
2020-02-11T10:45:22.000Z
|
2021-06-07T01:48:02.000Z
|
__author__ = 'mnowotka'
from chembl_core_db.db.customFields import *
from chembl_core_db.db.models.abstractModel import ChemblCoreAbstractModel
from chembl_core_db.db.models.abstractModel import ChemblModelMetaClass
from django.utils import six
# ----------------------------------------------------------------------------------------------------------------------
class Version(six.with_metaclass(ChemblModelMetaClass, ChemblCoreAbstractModel)):
name = models.CharField(primary_key=True, max_length=20, help_text='Name of release version')
creation_date = ChemblDateField(blank=True, null=True, help_text='Date database created')
comments = models.CharField(max_length=2000, blank=True, null=True, help_text='Description of release version')
class Meta(ChemblCoreAbstractModel.Meta):
pass
# ----------------------------------------------------------------------------------------------------------------------
class ChemblIdLookup(six.with_metaclass(ChemblModelMetaClass, ChemblCoreAbstractModel)):
ENTITY_TYPE_CHOICES = (
('ASSAY', 'ASSAY'),
('CELL', 'CELL'),
('COMPOUND', 'COMPOUND'),
('DOCUMENT', 'DOCUMENT'),
('TARGET', 'TARGET'),
('TISSUE', 'TISSUE'),
)
STATUS_CHOICES = (
('ACTIVE', 'ACTIVE'),
('INACTIVE', 'INACTIVE'),
('OBS', 'OBS'),
)
chembl_id = models.CharField(primary_key=True, max_length=20, help_text='ChEMBL identifier')
entity_type = models.CharField(max_length=50, choices=ENTITY_TYPE_CHOICES, help_text='Type of entity (e.g., COMPOUND, ASSAY, TARGET)')
entity_id = ChemblIntegerField(length=9, help_text='Primary key for that entity in corresponding table (e.g., molregno for compounds, tid for targets)')
status = models.CharField(max_length=10, default='ACTIVE', choices=STATUS_CHOICES, help_text='Indicates whether the status of the entity within the database - ACTIVE, INACTIVE (downgraded), OBS (obsolete/removed).')
class Meta(ChemblCoreAbstractModel.Meta):
unique_together = (("entity_id", "entity_type"),)
# ----------------------------------------------------------------------------------------------------------------------
| 43.529412
| 219
| 0.598198
|
5b46edac2953d91449e5ded97c4d7d216352df1f
| 1,543
|
py
|
Python
|
data/dataloaders/loaders/d10_1038_s41467_021_24110_y/homosapiens_dermis_2021_10x3v2_deng_001.py
|
johnmous/sfaira
|
c50240a74530e614ab7681bf9c63b04cb815b361
|
[
"BSD-3-Clause"
] | null | null | null |
data/dataloaders/loaders/d10_1038_s41467_021_24110_y/homosapiens_dermis_2021_10x3v2_deng_001.py
|
johnmous/sfaira
|
c50240a74530e614ab7681bf9c63b04cb815b361
|
[
"BSD-3-Clause"
] | null | null | null |
data/dataloaders/loaders/d10_1038_s41467_021_24110_y/homosapiens_dermis_2021_10x3v2_deng_001.py
|
johnmous/sfaira
|
c50240a74530e614ab7681bf9c63b04cb815b361
|
[
"BSD-3-Clause"
] | null | null | null |
import anndata
import gzip
import os
import pandas as pd
import scipy.io
import tarfile
sample_dict = {
'GSM4994379_KL1': "KF1",
'GSM4994380_KL2': "KF2",
'GSM4994381_KL3': "KF3",
'GSM4994382_NS1': "NF1",
'GSM4994383_NS2': "NF2",
'GSM4994384_NS3': "NF3",
}
def load(data_dir, sample_fn, **kwargs):
fn = os.path.join(data_dir, "GSE163973_RAW.tar")
with tarfile.open(fn) as tar:
with tarfile.open(fileobj=tar.extractfile(sample_fn + "_matrix.tar.gz")) as tar2:
with gzip.open(tar2.extractfile(f"{tar2.getnames()[0]}/matrix.mtx.gz"), "rb") as mm:
x = scipy.io.mmread(mm).T.tocsr()
obs = pd.read_csv(tar2.extractfile(f"{tar2.getnames()[0]}/barcodes.tsv.gz"), compression="gzip", header=None, sep="\t", index_col=0)
var = pd.read_csv(tar2.extractfile(f"{tar2.getnames()[0]}/features.tsv.gz"), compression="gzip", header=None, sep="\t")
obs.index.name = None
obs.index = [f"{i.split('-')[0]}_{sample_dict[sample_fn]}" for i in obs.index]
var.columns = ["ensembl", "symbol", "feature_class"]
var.index = var["symbol"].values
adata = anndata.AnnData(X=x, obs=obs, var=var)
meta = pd.read_csv(os.path.join(data_dir, "GSE163973_integrate.all.NS.all.KL_cell.meta.data.csv.gz"), index_col=0)
meta.index = [f"{ind.split('_')[0]}_{meta['orig.ident'].iloc[j]}" for j, ind in enumerate(meta.index)]
meta = meta[meta["orig.ident"] == sample_dict[sample_fn]]
adata = adata[meta.index].copy()
adata.obs = meta
return adata
| 40.605263
| 144
| 0.645496
|
0bcd2334242a2a4303f99f4b4b3fe5d77c6dd774
| 4,391
|
py
|
Python
|
data_preprocessing/plot/counties_plot.py
|
facebookresearch/Context-Aware-Representation-Crop-Yield-Prediction
|
9c29459e9521303f40d9d6aaa938da0c23ab4ad8
|
[
"MIT"
] | 12
|
2020-09-17T21:55:18.000Z
|
2022-01-14T21:05:23.000Z
|
data_preprocessing/plot/counties_plot.py
|
hulaba/Context-Aware-Representation-Crop-Yield-Prediction
|
9c29459e9521303f40d9d6aaa938da0c23ab4ad8
|
[
"MIT"
] | null | null | null |
data_preprocessing/plot/counties_plot.py
|
hulaba/Context-Aware-Representation-Crop-Yield-Prediction
|
9c29459e9521303f40d9d6aaa938da0c23ab4ad8
|
[
"MIT"
] | 5
|
2020-10-10T10:18:14.000Z
|
2021-12-21T07:36:27.000Z
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from bs4 import BeautifulSoup
from pathlib import Path
import matplotlib as mpl
import matplotlib.pyplot as plt
import pandas as pd
from collections import defaultdict
import numpy as np
import seaborn as sns
# colors = sns.color_palette("RdYlBu", 10).as_hex()
colors = ['#cdeaf3', '#9bcce2', '#fff1aa', '#fece7f', '#fa9b58', '#ee613e', '#d22b27']
def counties_plot(data_dict, savepath, quantiles):
"""
For the most part, reformatting of
https://github.com/JiaxuanYou/crop_yield_prediction/blob/master/6%20result_analysis/yield_map.py
"""
# load the svg file
svg = Path('../../processed_data/counties/counties.svg').open('r').read()
# Load into Beautiful Soup
soup = BeautifulSoup(svg, features="html.parser")
# Find counties
paths = soup.findAll('path')
path_style = 'font-size:12px;fill-rule:nonzero;stroke:#FFFFFF;stroke-opacity:1;stroke-width:0.1' \
';stroke-miterlimit:4;stroke-dasharray:none;stroke-linecap:butt;marker-start' \
':none;stroke-linejoin:bevel;fill:'
for p in paths:
if p['id'] not in ["State_Lines", "separator"]:
try:
rate = data_dict[p['id']]
except KeyError:
continue
if rate > quantiles[0.95]:
color_class = 6
elif rate > quantiles[0.8]:
color_class = 5
elif rate > quantiles[0.6]:
color_class = 4
elif rate > quantiles[0.4]:
color_class = 3
elif rate > quantiles[0.2]:
color_class = 2
elif rate > quantiles[0.05]:
color_class = 1
else:
color_class = 0
color = colors[color_class]
p['style'] = path_style + color
soup = soup.prettify()
with savepath.open('w') as f:
f.write(soup)
def save_colorbar(savedir, quantiles):
"""
For the most part, reformatting of
https://github.com/JiaxuanYou/crop_yield_prediction/blob/master/6%20result_analysis/yield_map.py
"""
fig = plt.figure()
ax = fig.add_axes([0.1, 0.1, 0.02, 0.8])
cmap = mpl.colors.ListedColormap(colors[1:-1])
cmap.set_over(colors[-1])
cmap.set_under(colors[0])
bounds = [quantiles[x] for x in [0.05, 0.2, 0.4, 0.6, 0.8, 0.95]]
norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
cb = mpl.colorbar.ColorbarBase(ax, cmap=cmap,
norm=norm,
# to use 'extend', you must
# specify two extra boundaries:
boundaries=[quantiles[0.0]] + bounds + [quantiles[1.0]],
extend='both',
ticks=bounds, # optional
spacing='proportional',
orientation='vertical')
plt.savefig('{}/colorbar.jpg'.format(savedir), dpi=300, bbox_inches='tight')
def process_yield_data():
important_columns = ['Year', 'State ANSI', 'County ANSI', 'Value']
yield_data = pd.read_csv('../../processed_data/crop_yield/yield_data.csv').dropna(
subset=important_columns, how='any')[['Year', 'State ANSI', 'County ANSI', 'Value']]
yield_data.columns = ['Year', 'State', 'County', 'Value']
yield_per_year_dic = defaultdict(dict)
for yd in yield_data.itertuples():
year, state, county, value = yd.Year, yd.State, int(yd.County), yd.Value
state = str(state).zfill(2)
county = str(county).zfill(3)
yield_per_year_dic[year][state+county] = value
return yield_per_year_dic
if __name__ == '__main__':
yield_data = process_yield_data()
for year in range(2003, 2017):
counties_plot(yield_data[year], Path('../../processed_data/crop_yield/plots/{}_yield.html'.format(year)))
values = np.array(list(yield_data[year].values()))
print(year, np.percentile(values, 0), np.percentile(values, 25), np.percentile(values, 50),
np.percentile(values, 75), np.percentile(values, 100))
save_colorbar('../../processed_data/crop_yield/plots')
| 37.211864
| 113
| 0.592348
|
3c8b39a76fe36f52bcfe0452b7b638423a302fd0
| 1,514
|
py
|
Python
|
ioos_metrics.py
|
MathewBiddle/ioos_by_the_numbers
|
635d44b1dbedb196eea20d8c469eee8d39fbdbcc
|
[
"MIT"
] | null | null | null |
ioos_metrics.py
|
MathewBiddle/ioos_by_the_numbers
|
635d44b1dbedb196eea20d8c469eee8d39fbdbcc
|
[
"MIT"
] | null | null | null |
ioos_metrics.py
|
MathewBiddle/ioos_by_the_numbers
|
635d44b1dbedb196eea20d8c469eee8d39fbdbcc
|
[
"MIT"
] | null | null | null |
import datetime as dt
import pandas as pd
from fiscalyear import *
# gather FY start/end dates for previous quarter
fq = FiscalQuarter.current().prev_fiscal_quarter
start_date = fq.start.strftime('%Y-%m-%d')
end_date = fq.end.strftime('%Y-%m-%d')
start = dt.datetime.strptime(start_date,'%Y-%m-%d')
end = dt.datetime.strptime(end_date,'%Y-%m-%d')
# build an array for days between dates
date_array = (start + dt.timedelta(days=x) for x in range(0, (end - start).days))
# get a unique list of year-months for url build
months=[]
for date_object in date_array:
months.append(date_object.strftime("%Y-%m"))
months = sorted(set(months))
df = pd.DataFrame(columns=['locationID', 'region', 'sponsor', 'met', 'wave'])
for month in months:
url = 'https://www.ndbc.noaa.gov/ioosstats/rpts/%s_ioos_regional.csv' % month.replace("-","_")
print('Loading %s' % url)
df1 = pd.read_csv(url, dtype={'met':float, 'wave':float})
df1['time (UTC)'] = pd.to_datetime(month)
df = pd.concat([df,df1])
df["time (UTC)"] = pd.to_datetime(df["time (UTC)"])
# Remove time-zone info for easier plotting, it is all UTC.
df["time (UTC)"] = df["time (UTC)"].dt.tz_localize(None)
groups = df.groupby(pd.Grouper(key="time (UTC)", freq="M"))
s = groups[['time (UTC)','met','wave']].sum() # reducing the columns so the summary is digestable
totals = s.assign(total=s["met"] + s["wave"])
totals.index = totals.index.to_period("M")
fname = 'gts/GTS_totals_FY%s_Q%s.csv' % (fq.fiscal_year,fq.fiscal_quarter)
totals.to_csv(fname)
| 35.209302
| 97
| 0.687583
|
c41a098037ecd852ef12dd9f252ed586e21e1cad
| 748
|
py
|
Python
|
local_server/test/fixtures/dataset_config_outline.py
|
cellgeni/cellxgene
|
2c0a36d6aea1e0f469c613105a363f4eff53abfd
|
[
"MIT"
] | 3
|
2019-11-11T15:41:07.000Z
|
2020-12-14T08:47:35.000Z
|
local_server/test/fixtures/dataset_config_outline.py
|
cellgeni/cellxgene
|
2c0a36d6aea1e0f469c613105a363f4eff53abfd
|
[
"MIT"
] | null | null | null |
local_server/test/fixtures/dataset_config_outline.py
|
cellgeni/cellxgene
|
2c0a36d6aea1e0f469c613105a363f4eff53abfd
|
[
"MIT"
] | 1
|
2021-05-12T15:15:05.000Z
|
2021-05-12T15:15:05.000Z
|
f"""
dataset:
app:
scripts: {scripts} #list of strs (filenames) or dicts containing keys
inline_scripts: {inline_scripts} #list of strs (filenames)
authentication_enable: {authentication_enable}
presentation:
max_categories: {max_categories}
custom_colors: {custom_colors}
user_annotations:
enable: {enable_users_annotations}
type: {annotation_type}
local_file_csv:
directory: {local_file_csv_directory}
file: {local_file_csv_file}
ontology:
enable: {ontology_enabled}
obo_location: {obo_location}
embeddings:
names: {embedding_names}
enable_reembedding: {enable_reembedding}
diffexp:
enable: {enable_difexp}
lfc_cutoff: {lfc_cutoff}
top_n: {top_n}
"""
| 23.375
| 73
| 0.709893
|
0edb2bf7b6c2a4d60ef24f4759c3b54d846f79af
| 582
|
py
|
Python
|
coinflip.py
|
Nahom2162002/pythonprograms
|
e0acc561e568aa1e67f9d80908d43780209e50f0
|
[
"MIT"
] | null | null | null |
coinflip.py
|
Nahom2162002/pythonprograms
|
e0acc561e568aa1e67f9d80908d43780209e50f0
|
[
"MIT"
] | null | null | null |
coinflip.py
|
Nahom2162002/pythonprograms
|
e0acc561e568aa1e67f9d80908d43780209e50f0
|
[
"MIT"
] | null | null | null |
import random
def main():
userguess = input("Let's do a coin flip, heads or tails?")
flip = random.randint(1, 10)
if flip >= 1 and flip <= 5:
result = "heads"
else:
result = "tails"
correct = False
while correct == False:
if userguess == result:
correct = True
print(result)
print("Good job, you got it")
else:
correct = False
print(result)
print("Nope, it was ", end = "")
print(result)
if __name__ == "__main__":
main()
| 24.25
| 63
| 0.491409
|
608fe9f1fea1bbf2e74f87c91dac0908c64f13e9
| 31,677
|
py
|
Python
|
conans/test/integration/command/info/info_test.py
|
Wonders11/conan
|
28ec09f6cbf1d7e27ec27393fd7bbc74891e74a8
|
[
"MIT"
] | 6,205
|
2015-12-01T13:40:05.000Z
|
2022-03-31T07:30:25.000Z
|
conans/test/integration/command/info/info_test.py
|
Wonders11/conan
|
28ec09f6cbf1d7e27ec27393fd7bbc74891e74a8
|
[
"MIT"
] | 8,747
|
2015-12-01T16:28:48.000Z
|
2022-03-31T23:34:53.000Z
|
conans/test/integration/command/info/info_test.py
|
Wonders11/conan
|
28ec09f6cbf1d7e27ec27393fd7bbc74891e74a8
|
[
"MIT"
] | 961
|
2015-12-01T16:56:43.000Z
|
2022-03-31T13:50:52.000Z
|
import json
import os
import textwrap
import unittest
from datetime import datetime
from conans import __version__ as client_version
from conans.test.utils.tools import TestClient, GenConanfile, NO_SETTINGS_PACKAGE_ID
from conans.util.files import save, load
class InfoTest(unittest.TestCase):
def _create(self, name, version, deps=None, export=True):
conanfile = textwrap.dedent("""
from conans import ConanFile
class Pkg(ConanFile):
name = "{name}"
version = "{version}"
license = {license}
description = "blah"
url = "myurl"
{requires}
""")
requires = ""
if deps:
requires = "requires = {}".format(", ".join('"{}"'.format(d) for d in deps))
conanfile = conanfile.format(name=name, version=version, requires=requires,
license='"MIT"')
self.client.save({"conanfile.py": conanfile}, clean_first=True)
if export:
self.client.run("export . lasote/stable")
def test_graph(self):
self.client = TestClient()
test_deps = {
"Hello0": ["Hello1", "Hello2", "Hello3"],
"Hello1": ["Hello4"],
"Hello2": [],
"Hello3": ["Hello7"],
"Hello4": ["Hello5", "Hello6"],
"Hello5": [],
"Hello6": [],
"Hello7": ["Hello8"],
"Hello8": ["Hello9", "Hello10"],
"Hello9": [],
"Hello10": [],
}
def create_export(testdeps, name):
deps = testdeps[name]
for dep in deps:
create_export(testdeps, dep)
expanded_deps = ["%s/0.1@lasote/stable" % dep for dep in deps]
export = False if name == "Hello0" else True
self._create(name, "0.1", expanded_deps, export=export)
create_export(test_deps, "Hello0")
self.client.run("info . --graph", assert_error=True)
# arbitrary case - file will be named according to argument
arg_filename = "test.dot"
self.client.run("info . --graph=%s" % arg_filename)
dot_file = os.path.join(self.client.current_folder, arg_filename)
contents = load(dot_file)
expected = textwrap.dedent("""
"Hello8/0.1@lasote/stable" -> "Hello9/0.1@lasote/stable"
"Hello8/0.1@lasote/stable" -> "Hello10/0.1@lasote/stable"
"Hello4/0.1@lasote/stable" -> "Hello5/0.1@lasote/stable"
"Hello4/0.1@lasote/stable" -> "Hello6/0.1@lasote/stable"
"Hello3/0.1@lasote/stable" -> "Hello7/0.1@lasote/stable"
"Hello7/0.1@lasote/stable" -> "Hello8/0.1@lasote/stable"
"conanfile.py (Hello0/0.1)" -> "Hello1/0.1@lasote/stable"
"conanfile.py (Hello0/0.1)" -> "Hello2/0.1@lasote/stable"
"conanfile.py (Hello0/0.1)" -> "Hello3/0.1@lasote/stable"
"Hello1/0.1@lasote/stable" -> "Hello4/0.1@lasote/stable"
""")
for line in expected.splitlines():
assert line in contents
def test_graph_html(self):
self.client = TestClient()
test_deps = {
"Hello0": ["Hello1"],
"Hello1": [],
}
def create_export(testdeps, name):
deps = testdeps[name]
for dep in deps:
create_export(testdeps, dep)
expanded_deps = ["%s/0.1@lasote/stable" % dep for dep in deps]
export = False if name == "Hello0" else True
self._create(name, "0.1", expanded_deps, export=export)
create_export(test_deps, "Hello0")
# arbitrary case - file will be named according to argument
arg_filename = "test.html"
self.client.run("info . --graph=%s" % arg_filename)
html = self.client.load(arg_filename)
self.assertIn("<body>", html)
self.assertIn("{ from: 0, to: 1 }", html)
self.assertIn("id: 0,\n label: 'Hello0/0.1',", html)
self.assertIn("Conan <b>v{}</b> <script>document.write(new Date().getFullYear())</script>"
" JFrog LTD. <a>https://conan.io</a>"
.format(client_version, datetime.today().year), html)
def test_only_names(self):
self.client = TestClient()
self._create("Hello0", "0.1")
self._create("Hello1", "0.1", ["Hello0/0.1@lasote/stable"])
self._create("Hello2", "0.1", ["Hello1/0.1@lasote/stable"], export=False)
self.client.run("info . --only None")
self.assertEqual(["Hello0/0.1@lasote/stable", "Hello1/0.1@lasote/stable",
"conanfile.py (Hello2/0.1)"],
str(self.client.out).splitlines()[-3:])
self.client.run("info . --only=date")
lines = [(line if "date" not in line else "Date")
for line in str(self.client.out).splitlines()]
self.assertEqual(["Hello0/0.1@lasote/stable", "Date",
"Hello1/0.1@lasote/stable", "Date",
"conanfile.py (Hello2/0.1)"], lines)
self.client.run("info . --only=invalid", assert_error=True)
self.assertIn("Invalid --only value", self.client.out)
self.assertNotIn("with --path specified, allowed values:", self.client.out)
self.client.run("info . --paths --only=bad", assert_error=True)
self.assertIn("Invalid --only value", self.client.out)
self.assertIn("with --path specified, allowed values:", self.client.out)
def test_info_virtual(self):
# Checking that "Required by: virtual" doesnt appear in the output
self.client = TestClient()
self._create("Hello", "0.1")
self.client.run("info Hello/0.1@lasote/stable")
self.assertNotIn("virtual", self.client.out)
self.assertNotIn("Required", self.client.out)
def test_reuse(self):
self.client = TestClient()
self._create("Hello0", "0.1")
self._create("Hello1", "0.1", ["Hello0/0.1@lasote/stable"])
self._create("Hello2", "0.1", ["Hello1/0.1@lasote/stable"], export=False)
self.client.run("info . -u")
self.assertIn("Creation date: ", self.client.out)
self.assertIn("ID: ", self.client.out)
self.assertIn("BuildID: ", self.client.out)
expected_output = textwrap.dedent("""\
Hello0/0.1@lasote/stable
Remote: None
URL: myurl
License: MIT
Description: blah
Provides: Hello0
Recipe: No remote%s
Binary: Missing
Binary remote: None
Required by:
Hello1/0.1@lasote/stable
Hello1/0.1@lasote/stable
Remote: None
URL: myurl
License: MIT
Description: blah
Provides: Hello1
Recipe: No remote%s
Binary: Missing
Binary remote: None
Required by:
conanfile.py (Hello2/0.1)
Requires:
Hello0/0.1@lasote/stable
conanfile.py (Hello2/0.1)
URL: myurl
License: MIT
Description: blah
Provides: Hello2
Requires:
Hello1/0.1@lasote/stable""")
expected_output = expected_output % (
"\n Revision: d6727bc577b5c6bd8ac7261eff98be93"
"\n Package revision: None",
"\n Revision: 7c5e142433a3ee0acaeffb4454a6d42f"
"\n Package revision: None",) \
if self.client.cache.config.revisions_enabled else expected_output % ("", "")
def clean_output(output):
return "\n".join([line for line in str(output).splitlines()
if not line.strip().startswith("Creation date") and
not line.strip().startswith("ID") and
not line.strip().startswith("Context") and
not line.strip().startswith("BuildID") and
not line.strip().startswith("export_folder") and
not line.strip().startswith("build_folder") and
not line.strip().startswith("source_folder") and
not line.strip().startswith("package_folder")])
# The timestamp is variable so we can't check the equality
self.assertIn(expected_output, clean_output(self.client.out))
self.client.run("info . -u --only=url")
expected_output = textwrap.dedent("""\
Hello0/0.1@lasote/stable
URL: myurl
Hello1/0.1@lasote/stable
URL: myurl
conanfile.py (Hello2/0.1)
URL: myurl""")
self.assertIn(expected_output, clean_output(self.client.out))
self.client.run("info . -u --only=url --only=license")
expected_output = textwrap.dedent("""\
Hello0/0.1@lasote/stable
URL: myurl
License: MIT
Hello1/0.1@lasote/stable
URL: myurl
License: MIT
conanfile.py (Hello2/0.1)
URL: myurl
License: MIT""")
self.assertIn(expected_output, clean_output(self.client.out))
self.client.run("info . -u --only=url --only=license --only=description")
expected_output = textwrap.dedent("""\
Hello0/0.1@lasote/stable
URL: myurl
License: MIT
Description: blah
Hello1/0.1@lasote/stable
URL: myurl
License: MIT
Description: blah
conanfile.py (Hello2/0.1)
URL: myurl
License: MIT
Description: blah""")
self.assertIn(expected_output, clean_output(self.client.out))
def test_json_info_outputs(self):
self.client = TestClient()
self._create("LibA", "0.1")
self._create("LibE", "0.1")
self._create("LibF", "0.1")
self._create("LibB", "0.1", ["LibA/0.1@lasote/stable", "LibE/0.1@lasote/stable"])
self._create("LibC", "0.1", ["LibA/0.1@lasote/stable", "LibF/0.1@lasote/stable"])
self._create("LibD", "0.1", ["LibB/0.1@lasote/stable", "LibC/0.1@lasote/stable"],
export=False)
self.client.run("info . -u --json=output.json")
# Check a couple of values in the generated JSON
content = json.loads(self.client.load("output.json"))
self.assertEqual(content[0]["reference"], "LibA/0.1@lasote/stable")
self.assertEqual(content[0]["license"][0], "MIT")
self.assertEqual(content[0]["description"], "blah")
self.assertEqual(content[0]["revision"], "33574249dee63395e86d2caee3f6c638")
self.assertEqual(content[0]["package_revision"], None)
self.assertEqual(content[1]["url"], "myurl")
self.assertEqual(content[1]["required_by"][0], "conanfile.py (LibD/0.1)")
def test_build_order(self):
self.client = TestClient()
self._create("Hello0", "0.1")
self._create("Hello1", "0.1", ["Hello0/0.1@lasote/stable"])
self._create("Hello2", "0.1", ["Hello1/0.1@lasote/stable"], export=False)
self.client.run("info ./conanfile.py -bo=Hello0/0.1@lasote/stable")
self.assertIn("[Hello0/0.1@lasote/stable], [Hello1/0.1@lasote/stable]",
self.client.out)
self.client.run("info conanfile.py -bo=Hello1/0.1@lasote/stable")
self.assertIn("[Hello1/0.1@lasote/stable]", self.client.out)
self.client.run("info ./ -bo=Hello1/0.1@lasote/stable -bo=Hello0/0.1@lasote/stable")
self.assertIn("[Hello0/0.1@lasote/stable], [Hello1/0.1@lasote/stable]",
self.client.out)
self.client.run("info Hello1/0.1@lasote/stable -bo=Hello0/0.1@lasote/stable")
self.assertIn("[Hello0/0.1@lasote/stable], [Hello1/0.1@lasote/stable]\n", self.client.out)
self.client.run("info Hello1/0.1@lasote/stable -bo=Hello0/0.1@lasote/stable "
"--json=file.json")
self.assertEqual('{"groups": [["Hello0/0.1@lasote/stable"], ["Hello1/0.1@lasote/stable"]]}',
self.client.load("file.json"))
self.client.run("info Hello1/0.1@lasote/stable -bo=Hello0/0.1@lasote/stable --json")
self.assertIn('{"groups": [["Hello0/0.1@lasote/stable"], ["Hello1/0.1@lasote/stable"]]}',
self.client.out)
self.client.run("info Hello1/0.1@lasote/stable --build-order=Hello0/0.1@lasote/stable "
"--graph=index.html", assert_error=True)
self.assertIn("--build-order cannot be used together with --graph", self.client.out)
def test_diamond_build_order(self):
self.client = TestClient()
self._create("LibA", "0.1")
self._create("LibE", "0.1")
self._create("LibF", "0.1")
self._create("LibB", "0.1", ["LibA/0.1@lasote/stable", "LibE/0.1@lasote/stable"])
self._create("LibC", "0.1", ["LibA/0.1@lasote/stable", "LibF/0.1@lasote/stable"])
self._create("LibD", "0.1", ["LibB/0.1@lasote/stable", "LibC/0.1@lasote/stable"],
export=False)
self.client.run("info . -bo=LibA/0.1@lasote/stable")
self.assertIn("[LibA/0.1@lasote/stable], "
"[LibB/0.1@lasote/stable, LibC/0.1@lasote/stable]",
self.client.out)
self.client.run("info . -bo=LibB/0.1@lasote/stable")
self.assertIn("[LibB/0.1@lasote/stable]", self.client.out)
self.client.run("info . -bo=LibE/0.1@lasote/stable")
self.assertIn("[LibE/0.1@lasote/stable], [LibB/0.1@lasote/stable]",
self.client.out)
self.client.run("info . -bo=LibF/0.1@lasote/stable")
self.assertIn("[LibF/0.1@lasote/stable], [LibC/0.1@lasote/stable]",
self.client.out)
self.client.run("info . -bo=Dev1/0.1@lasote/stable")
self.assertEqual("WARN: Usage of `--build-order` argument is deprecated and can return wrong"
" results. Use `conan lock build-order ...` instead.\n\n", self.client.out)
self.client.run("info . -bo=LibG/0.1@lasote/stable")
self.assertEqual("WARN: Usage of `--build-order` argument is deprecated and can return wrong"
" results. Use `conan lock build-order ...` instead.\n\n", self.client.out)
self.client.run("info . --build-order=ALL")
self.assertIn("[LibA/0.1@lasote/stable, LibE/0.1@lasote/stable, LibF/0.1@lasote/stable], "
"[LibB/0.1@lasote/stable, LibC/0.1@lasote/stable]",
self.client.out)
self.client.run("info . --build-order=ALL")
self.assertIn("[LibA/0.1@lasote/stable, LibE/0.1@lasote/stable, "
"LibF/0.1@lasote/stable], [LibB/0.1@lasote/stable, LibC/0.1@lasote/stable]",
self.client.out)
class InfoTest2(unittest.TestCase):
def test_not_found_package_dirty_cache(self):
# Conan does a lock on the cache, and even if the package doesn't exist
# left a trailing folder with the filelocks. This test checks
# it will be cleared
client = TestClient()
client.run("info nothing/0.1@user/testing", assert_error=True)
self.assertEqual(os.listdir(client.cache.store), [])
# This used to fail in Windows, because of the different case
client.save({"conanfile.py": GenConanfile().with_name("Nothing").with_version("0.1")})
client.run("export . user/testing")
def test_failed_info(self):
client = TestClient()
client.save({"conanfile.py": GenConanfile().with_require("Pkg/1.0.x@user/testing")})
client.run("info .", assert_error=True)
self.assertIn("Pkg/1.0.x@user/testing: Not found in local cache", client.out)
client.run("search")
self.assertIn("There are no packages", client.out)
self.assertNotIn("Pkg/1.0.x@user/testing", client.out)
def test_install_folder(self):
conanfile = GenConanfile("Pkg", "0.1").with_setting("build_type")
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("info . -s build_type=Debug")
self.assertNotIn("ID: 4024617540c4f240a6a5e8911b0de9ef38a11a72", client.out)
self.assertIn("ID: 5a67a79dbc25fd0fa149a0eb7a20715189a0d988", client.out)
client.run('install . -s build_type=Debug')
client.run("info .") # Re-uses debug from curdir
self.assertNotIn("ID: 4024617540c4f240a6a5e8911b0de9ef38a11a72", client.out)
self.assertIn("ID: 5a67a79dbc25fd0fa149a0eb7a20715189a0d988", client.out)
client.run('install . -s build_type=Release --install-folder=MyInstall')
client.run("info . --install-folder=MyInstall") # Re-uses debug from MyInstall folder
self.assertIn("ID: 4024617540c4f240a6a5e8911b0de9ef38a11a72", client.out)
self.assertNotIn("ID: 5a67a79dbc25fd0fa149a0eb7a20715189a0d988", client.out)
client.run('install . -s build_type=Debug --install-folder=MyInstall')
client.run("info . --install-folder=MyInstall") # Re-uses debug from MyInstall folder
self.assertNotIn("ID: 4024617540c4f240a6a5e8911b0de9ef38a11a72", client.out)
self.assertIn("ID: 5a67a79dbc25fd0fa149a0eb7a20715189a0d988", client.out)
# Both should raise
client.run("info . --install-folder=MyInstall -s build_type=Release",
assert_error=True) # Re-uses debug from MyInstall folder
self.assertIn("--install-folder cannot be used together with a"
" host profile (-s, -o, -e, -pr or -c)", client.out)
def test_graph_html_embedded_visj(self):
client = TestClient()
visjs_path = os.path.join(client.cache_folder, "vis.min.js")
viscss_path = os.path.join(client.cache_folder, "vis.min.css")
save(visjs_path, "")
save(viscss_path, "")
client.save({"conanfile.txt": ""})
client.run("info . --graph=file.html")
html = client.load("file.html")
self.assertIn("<body>", html)
self.assertNotIn("cloudflare", html)
self.assertIn(visjs_path, html)
self.assertIn(viscss_path, html)
def test_info_build_requires(self):
client = TestClient()
client.save({"conanfile.py": GenConanfile()})
client.run("create . tool/0.1@user/channel")
client.run("create . dep/0.1@user/channel")
conanfile = GenConanfile().with_require("dep/0.1@user/channel")
client.save({"conanfile.py": conanfile})
client.run("export . Pkg/0.1@user/channel")
client.run("export . Pkg2/0.1@user/channel")
client.save({"conanfile.txt": "[requires]\nPkg/0.1@user/channel\nPkg2/0.1@user/channel",
"myprofile": "[build_requires]\ntool/0.1@user/channel"}, clean_first=True)
client.run("info . -pr=myprofile --dry-build=missing")
# Check that there is only 1 output for tool, not repeated many times
pkgs = [line for line in str(client.out).splitlines() if line.startswith("tool")]
self.assertEqual(len(pkgs), 1)
client.run("info . -pr=myprofile --dry-build=missing --graph=file.html")
html = client.load("file.html")
self.assertIn("html", html)
# To check that this node is not duplicated
self.assertEqual(1, html.count("label: 'dep/0.1'"))
self.assertIn("label: 'Pkg2/0.1',\n "
"shape: 'box',\n "
"color: { background: 'Khaki'},", html)
self.assertIn("label: 'Pkg/0.1',\n "
"shape: 'box',\n "
"color: { background: 'Khaki'},", html)
self.assertIn("label: 'tool/0.1',\n "
"shape: 'ellipse',\n "
"color: { background: 'SkyBlue'},", html)
def test_cwd(self):
client = TestClient()
conanfile = GenConanfile("Pkg", "0.1").with_setting("build_type")
client.save({"subfolder/conanfile.py": conanfile})
client.run("export ./subfolder lasote/testing")
client.run("info ./subfolder")
self.assertIn("conanfile.py (Pkg/0.1)", client.out)
client.run("info ./subfolder --build-order Pkg/0.1@lasote/testing --json=jsonfile.txt")
path = os.path.join(client.current_folder, "jsonfile.txt")
self.assertTrue(os.path.exists(path))
def test_build_order_build_requires(self):
# https://github.com/conan-io/conan/issues/3267
client = TestClient()
conanfile = str(GenConanfile())
client.save({"conanfile.py": conanfile})
client.run("create . tool/0.1@user/channel")
client.run("create . dep/0.1@user/channel")
conanfile = conanfile + 'requires = "dep/0.1@user/channel"'
client.save({"conanfile.py": conanfile})
client.run("export . Pkg/0.1@user/channel")
client.run("export . Pkg2/0.1@user/channel")
client.save({"conanfile.txt": "[requires]\nPkg/0.1@user/channel\nPkg2/0.1@user/channel",
"myprofile": "[build_requires]\ntool/0.1@user/channel"}, clean_first=True)
client.run("info . -pr=myprofile -bo=tool/0.1@user/channel")
self.assertIn("[tool/0.1@user/channel], [Pkg/0.1@user/channel, Pkg2/0.1@user/channel]",
client.out)
def test_build_order_privates(self):
# https://github.com/conan-io/conan/issues/3267
client = TestClient()
client.save({"conanfile.py": GenConanfile()})
client.run("create . tool/0.1@user/channel")
client.save({"conanfile.py": GenConanfile().with_require("tool/0.1@user/channel")})
client.run("create . dep/0.1@user/channel")
client.save({"conanfile.py": GenConanfile().with_require("dep/0.1@user/channel",
private=True)})
client.run("export . Pkg/0.1@user/channel")
client.run("export . Pkg2/0.1@user/channel")
client.save({"conanfile.txt": "[requires]\nPkg/0.1@user/channel\nPkg2/0.1@user/channel"},
clean_first=True)
client.run("info . -bo=tool/0.1@user/channel")
self.assertIn("[tool/0.1@user/channel], [dep/0.1@user/channel], "
"[Pkg/0.1@user/channel, Pkg2/0.1@user/channel]",
client.out)
def test_wrong_path_parameter(self):
client = TestClient()
client.run("info", assert_error=True)
self.assertIn("ERROR: Exiting with code: 2", client.out)
client.run("info not_real_path", assert_error=True)
self.assertIn("ERROR: Conanfile not found", client.out)
client.run("info conanfile.txt", assert_error=True)
self.assertIn("ERROR: Conanfile not found", client.out)
def test_common_attributes(self):
client = TestClient()
conanfile = GenConanfile("Pkg", "0.1").with_setting("build_type")
client.save({"subfolder/conanfile.py": conanfile})
client.run("export ./subfolder lasote/testing")
client.run("info ./subfolder")
self.assertIn("conanfile.py (Pkg/0.1)", client.out)
self.assertNotIn("License:", client.out)
self.assertNotIn("Author:", client.out)
self.assertNotIn("Topics:", client.out)
self.assertNotIn("Homepage:", client.out)
self.assertNotIn("URL:", client.out)
def test_full_attributes(self):
client = TestClient()
conanfile = textwrap.dedent("""
from conans import ConanFile
class MyTest(ConanFile):
name = "Pkg"
version = "0.2"
settings = "build_type"
author = "John Doe"
license = "MIT"
url = "https://foo.bar.baz"
homepage = "https://foo.bar.site"
topics = ("foo", "bar", "qux")
provides = ("libjpeg", "libjpg")
deprecated = "other-pkg"
""")
client.save({"subfolder/conanfile.py": conanfile})
client.run("export ./subfolder lasote/testing")
client.run("info ./subfolder")
self.assertIn("conanfile.py (Pkg/0.2)", client.out)
self.assertIn("License: MIT", client.out)
self.assertIn("Author: John Doe", client.out)
self.assertIn("Topics: foo, bar, qux", client.out)
self.assertIn("URL: https://foo.bar.baz", client.out)
self.assertIn("Homepage: https://foo.bar.site", client.out)
self.assertIn("Provides: libjpeg, libjpg", client.out)
self.assertIn("Deprecated: other-pkg", client.out)
client.run("info ./subfolder --json=output.json")
output = json.loads(client.load('output.json'))[0]
self.assertEqual(output['reference'], 'conanfile.py (Pkg/0.2)')
self.assertListEqual(output['license'], ['MIT', ])
self.assertEqual(output['author'], 'John Doe')
self.assertListEqual(output['topics'], ['foo', 'bar', 'qux'])
self.assertEqual(output['url'], 'https://foo.bar.baz')
self.assertEqual(output['homepage'], 'https://foo.bar.site')
self.assertListEqual(output['provides'], ['libjpeg', 'libjpg'])
self.assertEqual(output['deprecated'], 'other-pkg')
def test_topics_graph(self):
conanfile = textwrap.dedent("""
from conans import ConanFile
class MyTest(ConanFile):
name = "Pkg"
version = "0.2"
topics = ("foo", "bar", "qux")
""")
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("export . lasote/testing")
# Topics as tuple
client.run("info Pkg/0.2@lasote/testing --graph file.html")
html_content = client.load("file.html")
self.assertIn("<h3>Pkg/0.2@lasote/testing</h3>", html_content)
self.assertIn("<li><b>topics</b>: foo, bar, qux</li>", html_content)
# Topics as a string
conanfile = conanfile.replace("(\"foo\", \"bar\", \"qux\")", "\"foo\"")
client.save({"conanfile.py": conanfile}, clean_first=True)
client.run("export . lasote/testing")
client.run("info Pkg/0.2@lasote/testing --graph file.html")
html_content = client.load("file.html")
self.assertIn("<h3>Pkg/0.2@lasote/testing</h3>", html_content)
self.assertIn("<li><b>topics</b>: foo", html_content)
def test_wrong_graph_info(self):
# https://github.com/conan-io/conan/issues/4443
conanfile = GenConanfile().with_name("Hello").with_version("0.1")
client = TestClient()
client.save({"conanfile.py": str(conanfile)})
client.run("install .")
path = os.path.join(client.current_folder, "graph_info.json")
graph_info = client.load(path)
graph_info = json.loads(graph_info)
graph_info.pop("root")
save(path, json.dumps(graph_info))
client.run("info .")
self.assertIn("conanfile.py (Hello/0.1)", client.out)
save(path, "broken thing")
client.run("info .", assert_error=True)
self.assertIn("ERROR: Error parsing GraphInfo from file", client.out)
def test_previous_lockfile_error(self):
# https://github.com/conan-io/conan/issues/5479
client = TestClient()
client.save({"conanfile.py": GenConanfile().with_name("pkg").with_version("0.1")})
client.run("create . user/testing")
client.save({"conanfile.py": GenConanfile().with_name("other").with_version("0.1")
.with_option("shared", [True, False])
.with_default_option("shared", False)})
client.run("install . -o shared=True")
client.run("info pkg/0.1@user/testing")
self.assertIn("pkg/0.1@user/testing", client.out)
self.assertNotIn("shared", client.out)
def test_scm_info():
# https://github.com/conan-io/conan/issues/8377
conanfile = textwrap.dedent("""
from conans import ConanFile
class Pkg(ConanFile):
scm = {"type": "git",
"url": "some-url/path",
"revision": "some commit hash"}
""")
client = TestClient()
client.save({"conanfile.py": conanfile})
client.run("export . pkg/0.1@")
client.run("info .")
assert "'revision': 'some commit hash'" in client.out
assert "'url': 'some-url/path'" in client.out
client.run("info pkg/0.1@")
assert "'revision': 'some commit hash'" in client.out
assert "'url': 'some-url/path'" in client.out
client.run("info . --json=file.json")
file_json = client.load("file.json")
info_json = json.loads(file_json)
node = info_json[0]
assert node["scm"] == {"type": "git", "url": "some-url/path", "revision": "some commit hash"}
class TestInfoContext:
# https://github.com/conan-io/conan/issues/9121
def test_context_info(self):
client = TestClient()
client.save({"conanfile.py": GenConanfile()})
client.run("info .")
assert "Context: host" in client.out
client.run("info . --json=file.json")
info = json.loads(client.load("file.json"))
assert info[0]["context"] == "host"
def test_context_build(self):
client = TestClient()
client.save({"cmake/conanfile.py": GenConanfile(),
"pkg/conanfile.py": GenConanfile().with_build_requires("cmake/1.0")})
client.run("create cmake cmake/1.0@")
client.run("export pkg pkg/1.0@")
client.run("info pkg/1.0@ -pr:b=default -pr:h=default --dry-build")
assert "cmake/1.0\n"\
" ID: {}\n"\
" BuildID: None\n"\
" Context: build".format(NO_SETTINGS_PACKAGE_ID) in client.out
assert "pkg/1.0\n" \
" ID: {}\n" \
" BuildID: None\n" \
" Context: host".format(NO_SETTINGS_PACKAGE_ID) in client.out
client.run("info pkg/1.0@ -pr:b=default -pr:h=default --dry-build --json=file.json")
info = json.loads(client.load("file.json"))
assert info[0]["reference"] == "cmake/1.0"
assert info[0]["context"] == "build"
assert info[1]["reference"] == "pkg/1.0"
assert info[1]["context"] == "host"
class TestInfoPythonRequires:
# https://github.com/conan-io/conan/issues/9277
def test_python_requires(self):
client = TestClient()
client.save({"conanfile.py": GenConanfile()})
client.run("export . tool/0.1@")
conanfile = textwrap.dedent("""
from conans import ConanFile
class Pkg(ConanFile):
python_requires = "tool/0.1"
""")
client.save({"conanfile.py": conanfile})
client.run("info .")
assert "Python-requires:" in client.out
assert "tool/0.1#f3367e0e7d170aa12abccb175fee5f97" in client.out
client.run("info . --json=file.json")
info = json.loads(client.load("file.json"))
assert info[0]["python_requires"] == ['tool/0.1#f3367e0e7d170aa12abccb175fee5f97']
| 43.813278
| 101
| 0.574234
|
9842a3ccf256b66e396092595e8c461d2789a109
| 2,657
|
py
|
Python
|
experiments/plot_tools.py
|
stevenengler/witness-encryption
|
061882a69b8d55571186b3aa1e402335e3e9bf09
|
[
"MIT"
] | null | null | null |
experiments/plot_tools.py
|
stevenengler/witness-encryption
|
061882a69b8d55571186b3aa1e402335e3e9bf09
|
[
"MIT"
] | 1
|
2021-06-27T15:14:29.000Z
|
2021-06-27T21:01:06.000Z
|
experiments/plot_tools.py
|
stevenengler/witness-encryption
|
061882a69b8d55571186b3aa1e402335e3e9bf09
|
[
"MIT"
] | 1
|
2021-06-27T15:15:46.000Z
|
2021-06-27T15:15:46.000Z
|
import matplotlib.pylab as plt
import matplotlib.ticker as ticker
from matplotlib.ticker import FormatStrFormatter
from matplotlib import transforms
from matplotlib import lines
from matplotlib.ticker import ScalarFormatter
#
import statistics
import math
import numpy as np
#
CB_color_cycle = ['#377eb8', '#ff7f00', '#4daf4a',
'#984ea3', '#a65628', '#f781bf',
'#999999', '#e41a1c', '#dede00']
#
def graph(data, keys_to_graph, keys_readable, xlabel, ylabel, color_index=0, x_log=False, ymin=None, ymax=None):
plt.xlabel(xlabel)
#
if not all([data['y'][0][0][key]['unit']==data['y'][0][0][keys_to_graph[0]]['unit'] for key in keys_to_graph]):
raise Exception('Not all units were the same.')
#
unit = data['y'][0][0][keys_to_graph[0]]['unit']
#
if unit != None:
plt.ylabel('{} ({})'.format(ylabel, unit))
else:
plt.ylabel(ylabel)
#
plt.minorticks_on()
plt.grid(color='lightgrey')
#
if x_log:
plt.gca().set_xscale('log', basex=2)
plt.gca().set_xticks([0.5, 1, 2, 4, 8, 16])
plt.gca().xaxis.set_major_formatter(ScalarFormatter())
#
markers = ['o', 's', '^', 'v', 'D']
#
for i in range(len(keys_to_graph)):
key = keys_to_graph[i]
values = [[repeat[key]['data'] for repeat in data['y'][j]] for j in range(len(data['y']))]
#
x = data['x']
y = [statistics.mean(i) for i in values]
#
import numpy as np, scipy.stats as st
conf_intervals = np.array([st.t.interval(0.95, len(repeat_vals)-1, loc=np.mean(repeat_vals), scale=st.sem(repeat_vals)) for repeat_vals in values])
yerr_up = conf_intervals[:,0]-y
yerr_down = -(conf_intervals[:,1]-y)
#
plt.errorbar(x, y, fmt='.-', label=keys_readable[i], color=CB_color_cycle[i+color_index], marker=markers[i+color_index], markersize=5, ecolor='#333333', yerr=[yerr_up, yerr_down], capsize=3)
#
if ymax is not None:
plt.ylim(ymax=ymax)
#
if plt.gca().get_ylim()[0]>0:
plt.ylim(ymin=ymin)
#
#
def plot_experiment(data, title, keys_to_graph, keys_readable, xlabel, ylabel, ymin=None, x_log=False, ymax=None, special_legend=0, color_index=0):
plt.figure(figsize=(4, 3))
#
graph(data, keys_to_graph, keys_readable, xlabel, ylabel, color_index, x_log, ymin, ymax)
#
if not all([x==None for x in keys_readable]):
# get handles
handles, labels = plt.gca().get_legend_handles_labels()
# remove the errorbars
handles = [h[0] for h in handles]
if special_legend == 1:
plt.legend(handles, labels, markerscale=1.5, bbox_to_anchor=(0, 0.3), loc='center left')
else:
plt.legend(handles, labels, markerscale=1.5)
#
#
plt.tight_layout()
plt.savefig('figures/'+title.replace(' ', '_')+'.pdf', format='pdf', bbox_inches='tight')
plt.show()
#
| 33.2125
| 192
| 0.679714
|
2ba29a9d2039b21196a30b2b905d5fc5297671ed
| 20,922
|
py
|
Python
|
tests/test_proxy_functional.py
|
ods/aiohttp
|
8c45b4ea5faa93c70eded0e4a3f6fe483ac987c0
|
[
"Apache-2.0"
] | 3
|
2019-01-15T04:17:33.000Z
|
2019-03-13T13:12:15.000Z
|
tests/test_proxy_functional.py
|
ods/aiohttp
|
8c45b4ea5faa93c70eded0e4a3f6fe483ac987c0
|
[
"Apache-2.0"
] | null | null | null |
tests/test_proxy_functional.py
|
ods/aiohttp
|
8c45b4ea5faa93c70eded0e4a3f6fe483ac987c0
|
[
"Apache-2.0"
] | 1
|
2019-01-06T11:51:49.000Z
|
2019-01-06T11:51:49.000Z
|
import asyncio
import os
import pathlib
from unittest import mock
import pytest
from yarl import URL
import aiohttp
from aiohttp import web
@pytest.fixture
def proxy_test_server(aiohttp_raw_server, loop, monkeypatch):
"""Handle all proxy requests and imitate remote server response."""
_patch_ssl_transport(monkeypatch)
default_response = dict(
status=200,
headers=None,
body=None)
proxy_mock = mock.Mock()
async def proxy_handler(request):
proxy_mock.request = request
proxy_mock.requests_list.append(request)
response = default_response.copy()
if isinstance(proxy_mock.return_value, dict):
response.update(proxy_mock.return_value)
headers = response['headers']
if not headers:
headers = {}
if request.method == 'CONNECT':
response['body'] = None
response['headers'] = headers
resp = web.Response(**response)
await resp.prepare(request)
await resp.write_eof()
return resp
async def proxy_server():
proxy_mock.request = None
proxy_mock.auth = None
proxy_mock.requests_list = []
server = await aiohttp_raw_server(proxy_handler)
proxy_mock.server = server
proxy_mock.url = server.make_url('/')
return proxy_mock
return proxy_server
@pytest.fixture()
def get_request(loop):
async def _request(method='GET', *, url, trust_env=False, **kwargs):
connector = aiohttp.TCPConnector(ssl=False, loop=loop)
client = aiohttp.ClientSession(connector=connector,
trust_env=trust_env)
try:
resp = await client.request(method, url, **kwargs)
await resp.release()
return resp
finally:
await client.close()
return _request
async def test_proxy_http_absolute_path(proxy_test_server,
get_request) -> None:
url = 'http://aiohttp.io/path?query=yes'
proxy = await proxy_test_server()
await get_request(url=url, proxy=proxy.url)
assert len(proxy.requests_list) == 1
assert proxy.request.method == 'GET'
assert proxy.request.host == 'aiohttp.io'
assert proxy.request.path_qs == 'http://aiohttp.io/path?query=yes'
async def test_proxy_http_raw_path(proxy_test_server, get_request) -> None:
url = 'http://aiohttp.io:2561/space sheep?q=can:fly'
raw_url = 'http://aiohttp.io:2561/space%20sheep?q=can:fly'
proxy = await proxy_test_server()
await get_request(url=url, proxy=proxy.url)
assert proxy.request.host == 'aiohttp.io:2561'
assert proxy.request.path_qs == raw_url
async def test_proxy_http_idna_support(proxy_test_server, get_request) -> None:
url = 'http://éé.com/'
raw_url = 'http://xn--9caa.com/'
proxy = await proxy_test_server()
await get_request(url=url, proxy=proxy.url)
assert proxy.request.host == 'xn--9caa.com'
assert proxy.request.path_qs == raw_url
async def test_proxy_http_connection_error(get_request) -> None:
url = 'http://aiohttp.io/path'
proxy_url = 'http://localhost:2242/'
with pytest.raises(aiohttp.ClientConnectorError):
await get_request(url=url, proxy=proxy_url)
async def test_proxy_http_bad_response(proxy_test_server, get_request) -> None:
url = 'http://aiohttp.io/path'
proxy = await proxy_test_server()
proxy.return_value = dict(
status=502,
headers={'Proxy-Agent': 'TestProxy'})
resp = await get_request(url=url, proxy=proxy.url)
assert resp.status == 502
assert resp.headers['Proxy-Agent'] == 'TestProxy'
async def test_proxy_http_auth(proxy_test_server, get_request) -> None:
url = 'http://aiohttp.io/path'
proxy = await proxy_test_server()
await get_request(url=url, proxy=proxy.url)
assert 'Authorization' not in proxy.request.headers
assert 'Proxy-Authorization' not in proxy.request.headers
auth = aiohttp.BasicAuth('user', 'pass')
await get_request(url=url, auth=auth, proxy=proxy.url)
assert 'Authorization' in proxy.request.headers
assert 'Proxy-Authorization' not in proxy.request.headers
await get_request(url=url, proxy_auth=auth, proxy=proxy.url)
assert 'Authorization' not in proxy.request.headers
assert 'Proxy-Authorization' in proxy.request.headers
await get_request(url=url, auth=auth,
proxy_auth=auth, proxy=proxy.url)
assert 'Authorization' in proxy.request.headers
assert 'Proxy-Authorization' in proxy.request.headers
async def test_proxy_http_auth_utf8(proxy_test_server, get_request) -> None:
url = 'http://aiohttp.io/path'
auth = aiohttp.BasicAuth('юзер', 'пасс', 'utf-8')
proxy = await proxy_test_server()
await get_request(url=url, auth=auth, proxy=proxy.url)
assert 'Authorization' in proxy.request.headers
assert 'Proxy-Authorization' not in proxy.request.headers
async def test_proxy_http_auth_from_url(proxy_test_server,
get_request) -> None:
url = 'http://aiohttp.io/path'
proxy = await proxy_test_server()
auth_url = URL(url).with_user('user').with_password('pass')
await get_request(url=auth_url, proxy=proxy.url)
assert 'Authorization' in proxy.request.headers
assert 'Proxy-Authorization' not in proxy.request.headers
proxy_url = URL(proxy.url).with_user('user').with_password('pass')
await get_request(url=url, proxy=proxy_url)
assert 'Authorization' not in proxy.request.headers
assert 'Proxy-Authorization' in proxy.request.headers
async def test_proxy_http_acquired_cleanup(proxy_test_server, loop) -> None:
url = 'http://aiohttp.io/path'
conn = aiohttp.TCPConnector(loop=loop)
sess = aiohttp.ClientSession(connector=conn, loop=loop)
proxy = await proxy_test_server()
assert 0 == len(conn._acquired)
resp = await sess.get(url, proxy=proxy.url)
assert resp.closed
assert 0 == len(conn._acquired)
await sess.close()
@pytest.mark.skip('we need to reconsider how we test this')
async def test_proxy_http_acquired_cleanup_force(proxy_test_server,
loop) -> None:
url = 'http://aiohttp.io/path'
conn = aiohttp.TCPConnector(force_close=True, loop=loop)
sess = aiohttp.ClientSession(connector=conn, loop=loop)
proxy = await proxy_test_server()
assert 0 == len(conn._acquired)
async def request():
resp = await sess.get(url, proxy=proxy.url)
assert 1 == len(conn._acquired)
await resp.release()
await request()
assert 0 == len(conn._acquired)
await sess.close()
@pytest.mark.skip('we need to reconsider how we test this')
async def test_proxy_http_multi_conn_limit(proxy_test_server, loop) -> None:
url = 'http://aiohttp.io/path'
limit, multi_conn_num = 1, 5
conn = aiohttp.TCPConnector(limit=limit, loop=loop)
sess = aiohttp.ClientSession(connector=conn, loop=loop)
proxy = await proxy_test_server()
current_pid = None
async def request(pid):
# process requests only one by one
nonlocal current_pid
resp = await sess.get(url, proxy=proxy.url)
current_pid = pid
await asyncio.sleep(0.2, loop=loop)
assert current_pid == pid
await resp.release()
return resp
requests = [request(pid) for pid in range(multi_conn_num)]
responses = await asyncio.gather(*requests, loop=loop)
assert len(responses) == multi_conn_num
assert set(resp.status for resp in responses) == {200}
await sess.close()
@pytest.mark.xfail
async def xtest_proxy_https_connect(proxy_test_server, get_request):
proxy = await proxy_test_server()
url = 'https://www.google.com.ua/search?q=aiohttp proxy'
await get_request(url=url, proxy=proxy.url)
connect = proxy.requests_list[0]
assert connect.method == 'CONNECT'
assert connect.path == 'www.google.com.ua:443'
assert connect.host == 'www.google.com.ua'
assert proxy.request.host == 'www.google.com.ua'
assert proxy.request.path_qs == '/search?q=aiohttp+proxy'
@pytest.mark.xfail
async def xtest_proxy_https_connect_with_port(proxy_test_server, get_request):
proxy = await proxy_test_server()
url = 'https://secure.aiohttp.io:2242/path'
await get_request(url=url, proxy=proxy.url)
connect = proxy.requests_list[0]
assert connect.method == 'CONNECT'
assert connect.path == 'secure.aiohttp.io:2242'
assert connect.host == 'secure.aiohttp.io:2242'
assert proxy.request.host == 'secure.aiohttp.io:2242'
assert proxy.request.path_qs == '/path'
@pytest.mark.xfail
async def xtest_proxy_https_send_body(proxy_test_server, loop):
sess = aiohttp.ClientSession(loop=loop)
proxy = await proxy_test_server()
proxy.return_value = {'status': 200, 'body': b'1'*(2**20)}
url = 'https://www.google.com.ua/search?q=aiohttp proxy'
resp = await sess.get(url, proxy=proxy.url)
body = await resp.read()
await resp.release()
await sess.close()
assert body == b'1'*(2**20)
@pytest.mark.xfail
async def xtest_proxy_https_idna_support(proxy_test_server, get_request):
url = 'https://éé.com/'
proxy = await proxy_test_server()
await get_request(url=url, proxy=proxy.url)
connect = proxy.requests_list[0]
assert connect.method == 'CONNECT'
assert connect.path == 'xn--9caa.com:443'
assert connect.host == 'xn--9caa.com'
async def test_proxy_https_connection_error(get_request) -> None:
url = 'https://secure.aiohttp.io/path'
proxy_url = 'http://localhost:2242/'
with pytest.raises(aiohttp.ClientConnectorError):
await get_request(url=url, proxy=proxy_url)
async def test_proxy_https_bad_response(proxy_test_server,
get_request) -> None:
url = 'https://secure.aiohttp.io/path'
proxy = await proxy_test_server()
proxy.return_value = dict(
status=502,
headers={'Proxy-Agent': 'TestProxy'})
with pytest.raises(aiohttp.ClientHttpProxyError):
await get_request(url=url, proxy=proxy.url)
assert len(proxy.requests_list) == 1
assert proxy.request.method == 'CONNECT'
assert proxy.request.path == 'secure.aiohttp.io:443'
@pytest.mark.xfail
async def xtest_proxy_https_auth(proxy_test_server, get_request):
url = 'https://secure.aiohttp.io/path'
auth = aiohttp.BasicAuth('user', 'pass')
proxy = await proxy_test_server()
await get_request(url=url, proxy=proxy.url)
connect = proxy.requests_list[0]
assert 'Authorization' not in connect.headers
assert 'Proxy-Authorization' not in connect.headers
assert 'Authorization' not in proxy.request.headers
assert 'Proxy-Authorization' not in proxy.request.headers
proxy = await proxy_test_server()
await get_request(url=url, auth=auth, proxy=proxy.url)
connect = proxy.requests_list[0]
assert 'Authorization' not in connect.headers
assert 'Proxy-Authorization' not in connect.headers
assert 'Authorization' in proxy.request.headers
assert 'Proxy-Authorization' not in proxy.request.headers
proxy = await proxy_test_server()
await get_request(url=url, proxy_auth=auth, proxy=proxy.url)
connect = proxy.requests_list[0]
assert 'Authorization' not in connect.headers
assert 'Proxy-Authorization' in connect.headers
assert 'Authorization' not in proxy.request.headers
assert 'Proxy-Authorization' not in proxy.request.headers
proxy = await proxy_test_server()
await get_request(url=url, auth=auth,
proxy_auth=auth, proxy=proxy.url)
connect = proxy.requests_list[0]
assert 'Authorization' not in connect.headers
assert 'Proxy-Authorization' in connect.headers
assert 'Authorization' in proxy.request.headers
assert 'Proxy-Authorization' not in proxy.request.headers
@pytest.mark.xfail
async def xtest_proxy_https_acquired_cleanup(proxy_test_server, loop):
url = 'https://secure.aiohttp.io/path'
conn = aiohttp.TCPConnector(loop=loop)
sess = aiohttp.ClientSession(connector=conn, loop=loop)
proxy = await proxy_test_server()
assert 0 == len(conn._acquired)
async def request():
resp = await sess.get(url, proxy=proxy.url)
assert 1 == len(conn._acquired)
await resp.release()
await request()
assert 0 == len(conn._acquired)
await sess.close()
@pytest.mark.xfail
async def xtest_proxy_https_acquired_cleanup_force(proxy_test_server, loop):
url = 'https://secure.aiohttp.io/path'
conn = aiohttp.TCPConnector(force_close=True, loop=loop)
sess = aiohttp.ClientSession(connector=conn, loop=loop)
proxy = await proxy_test_server()
assert 0 == len(conn._acquired)
async def request():
resp = await sess.get(url, proxy=proxy.url)
assert 1 == len(conn._acquired)
await resp.release()
await request()
assert 0 == len(conn._acquired)
await sess.close()
@pytest.mark.xfail
async def xtest_proxy_https_multi_conn_limit(proxy_test_server, loop):
url = 'https://secure.aiohttp.io/path'
limit, multi_conn_num = 1, 5
conn = aiohttp.TCPConnector(limit=limit, loop=loop)
sess = aiohttp.ClientSession(connector=conn, loop=loop)
proxy = await proxy_test_server()
current_pid = None
async def request(pid):
# process requests only one by one
nonlocal current_pid
resp = await sess.get(url, proxy=proxy.url)
current_pid = pid
await asyncio.sleep(0.2, loop=loop)
assert current_pid == pid
await resp.release()
return resp
requests = [request(pid) for pid in range(multi_conn_num)]
responses = await asyncio.gather(*requests, loop=loop)
assert len(responses) == multi_conn_num
assert set(resp.status for resp in responses) == {200}
await sess.close()
def _patch_ssl_transport(monkeypatch):
"""Make ssl transport substitution to prevent ssl handshake."""
def _make_ssl_transport_dummy(self, rawsock, protocol, sslcontext,
waiter=None, **kwargs):
return self._make_socket_transport(rawsock, protocol, waiter,
extra=kwargs.get('extra'),
server=kwargs.get('server'))
monkeypatch.setattr(
"asyncio.selector_events.BaseSelectorEventLoop._make_ssl_transport",
_make_ssl_transport_dummy)
original_is_file = pathlib.Path.is_file
def mock_is_file(self):
""" make real netrc file invisible in home dir """
if self.name in ['_netrc', '.netrc'] and self.parent == self.home():
return False
else:
return original_is_file(self)
async def test_proxy_from_env_http(proxy_test_server,
get_request, mocker) -> None:
url = 'http://aiohttp.io/path'
proxy = await proxy_test_server()
mocker.patch.dict(os.environ, {'http_proxy': str(proxy.url)})
mocker.patch('pathlib.Path.is_file', mock_is_file)
await get_request(url=url, trust_env=True)
assert len(proxy.requests_list) == 1
assert proxy.request.method == 'GET'
assert proxy.request.host == 'aiohttp.io'
assert proxy.request.path_qs == 'http://aiohttp.io/path'
assert 'Proxy-Authorization' not in proxy.request.headers
async def test_proxy_from_env_http_with_auth(proxy_test_server,
get_request, mocker):
url = 'http://aiohttp.io/path'
proxy = await proxy_test_server()
auth = aiohttp.BasicAuth('user', 'pass')
mocker.patch.dict(os.environ, {'http_proxy':
str(proxy.url
.with_user(auth.login)
.with_password(auth.password))})
await get_request(url=url, trust_env=True)
assert len(proxy.requests_list) == 1
assert proxy.request.method == 'GET'
assert proxy.request.host == 'aiohttp.io'
assert proxy.request.path_qs == 'http://aiohttp.io/path'
assert proxy.request.headers['Proxy-Authorization'] == auth.encode()
async def test_proxy_from_env_http_with_auth_from_netrc(
proxy_test_server, get_request, tmpdir, mocker):
url = 'http://aiohttp.io/path'
proxy = await proxy_test_server()
auth = aiohttp.BasicAuth('user', 'pass')
netrc_file = tmpdir.join('test_netrc')
netrc_file_data = 'machine 127.0.0.1 login %s password %s' % (
auth.login, auth.password)
with open(str(netrc_file), 'w') as f:
f.write(netrc_file_data)
mocker.patch.dict(os.environ, {'http_proxy': str(proxy.url),
'NETRC': str(netrc_file)})
await get_request(url=url, trust_env=True)
assert len(proxy.requests_list) == 1
assert proxy.request.method == 'GET'
assert proxy.request.host == 'aiohttp.io'
assert proxy.request.path_qs == 'http://aiohttp.io/path'
assert proxy.request.headers['Proxy-Authorization'] == auth.encode()
async def test_proxy_from_env_http_without_auth_from_netrc(
proxy_test_server, get_request, tmpdir, mocker):
url = 'http://aiohttp.io/path'
proxy = await proxy_test_server()
auth = aiohttp.BasicAuth('user', 'pass')
netrc_file = tmpdir.join('test_netrc')
netrc_file_data = 'machine 127.0.0.2 login %s password %s' % (
auth.login, auth.password)
with open(str(netrc_file), 'w') as f:
f.write(netrc_file_data)
mocker.patch.dict(os.environ, {'http_proxy': str(proxy.url),
'NETRC': str(netrc_file)})
await get_request(url=url, trust_env=True)
assert len(proxy.requests_list) == 1
assert proxy.request.method == 'GET'
assert proxy.request.host == 'aiohttp.io'
assert proxy.request.path_qs == 'http://aiohttp.io/path'
assert 'Proxy-Authorization' not in proxy.request.headers
async def test_proxy_from_env_http_without_auth_from_wrong_netrc(
proxy_test_server, get_request, tmpdir, mocker):
url = 'http://aiohttp.io/path'
proxy = await proxy_test_server()
auth = aiohttp.BasicAuth('user', 'pass')
netrc_file = tmpdir.join('test_netrc')
invalid_data = 'machine 127.0.0.1 %s pass %s' % (
auth.login, auth.password)
with open(str(netrc_file), 'w') as f:
f.write(invalid_data)
mocker.patch.dict(os.environ, {'http_proxy': str(proxy.url),
'NETRC': str(netrc_file)})
await get_request(url=url, trust_env=True)
assert len(proxy.requests_list) == 1
assert proxy.request.method == 'GET'
assert proxy.request.host == 'aiohttp.io'
assert proxy.request.path_qs == 'http://aiohttp.io/path'
assert 'Proxy-Authorization' not in proxy.request.headers
@pytest.mark.xfail
async def xtest_proxy_from_env_https(proxy_test_server, get_request, mocker):
url = 'https://aiohttp.io/path'
proxy = await proxy_test_server()
mocker.patch.dict(os.environ, {'https_proxy': str(proxy.url)})
mock.patch('pathlib.Path.is_file', mock_is_file)
await get_request(url=url, trust_env=True)
assert len(proxy.requests_list) == 2
assert proxy.request.method == 'GET'
assert proxy.request.host == 'aiohttp.io'
assert proxy.request.path_qs == 'https://aiohttp.io/path'
assert 'Proxy-Authorization' not in proxy.request.headers
@pytest.mark.xfail
async def xtest_proxy_from_env_https_with_auth(proxy_test_server,
get_request, mocker):
url = 'https://aiohttp.io/path'
proxy = await proxy_test_server()
auth = aiohttp.BasicAuth('user', 'pass')
mocker.patch.dict(os.environ, {'https_proxy':
str(proxy.url
.with_user(auth.login)
.with_password(auth.password))})
await get_request(url=url, trust_env=True)
assert len(proxy.requests_list) == 2
assert proxy.request.method == 'GET'
assert proxy.request.host == 'aiohttp.io'
assert proxy.request.path_qs == '/path'
assert 'Proxy-Authorization' not in proxy.request.headers
r2 = proxy.requests_list[0]
assert r2.method == 'CONNECT'
assert r2.host == 'aiohttp.io'
assert r2.path_qs == '/path'
assert r2.headers['Proxy-Authorization'] == auth.encode()
async def test_proxy_auth() -> None:
async with aiohttp.ClientSession() as session:
with pytest.raises(
ValueError,
message="proxy_auth must be None or BasicAuth() tuple"):
await session.get('http://python.org',
proxy='http://proxy.example.com',
proxy_auth=('user', 'pass'))
| 32.187692
| 79
| 0.66724
|
e8d02e720ff0f6c243f8be7c454e1d77dddab141
| 12,276
|
py
|
Python
|
test/programytest/client.py
|
motazsaad/fit-bot-fb-clt
|
580477aa1ec91855b621d9ae276f2705962f6a87
|
[
"MIT"
] | null | null | null |
test/programytest/client.py
|
motazsaad/fit-bot-fb-clt
|
580477aa1ec91855b621d9ae276f2705962f6a87
|
[
"MIT"
] | null | null | null |
test/programytest/client.py
|
motazsaad/fit-bot-fb-clt
|
580477aa1ec91855b621d9ae276f2705962f6a87
|
[
"MIT"
] | 4
|
2019-04-01T15:42:23.000Z
|
2020-11-05T08:14:27.000Z
|
import logging
import os
import os.path
from programy.clients.client import BotClient
from programy.config.programy import ProgramyConfiguration
from programy.clients.events.console.config import ConsoleConfiguration
from programy.storage.stores.file.config import FileStorageConfiguration
from programy.storage.stores.file.store.config import FileStoreConfiguration
from programy.storage.stores.file.engine import FileStorageEngine
from programy.storage.factory import StorageFactory
from programytest.clients.arguments import MockArguments
class TestClient(BotClient):
def __init__(self, debug=False, level=logging.ERROR):
if debug is True:
logging.getLogger().setLevel(level)
self._file_store_config = FileStorageConfiguration()
self._storage_engine = FileStorageEngine(self._file_store_config)
BotClient.__init__(self, "testclient")
def add_license_keys_store(self):
self._file_store_config._license_storage = FileStoreConfiguration(file=os.path.dirname(__file__) + os.sep + "testdata" + os.sep + "test_licenses.keys", format="text",
encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.LICENSE_KEYS] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.LICENSE_KEYS] = self._storage_engine
self.load_license_keys()
def add_spelling_store(self):
self._file_store_config._spelling_storage = FileStoreConfiguration(file=os.path.dirname(__file__) + os.sep + "testdata" + os.sep + "test_corpus.txt", format="text",
encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.SPELLING_CORPUS] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.SPELLING_CORPUS] = self._storage_engine
def add_usergroups_store(self):
self._file_store_config._usergroups_storage = FileStoreConfiguration(file=os.path.dirname(__file__) + os.sep + "testdata" + os.sep + "test_usergroups.yaml", format="yaml",
encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.USERGROUPS] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.USERGROUPS] = self._storage_engine
def add_categories_store(self, dirs):
self._file_store_config._categories_storage = FileStoreConfiguration(dirs=dirs, format="xml", extension="aiml", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.CATEGORIES] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.CATEGORIES] = self._storage_engine
def add_single_categories_store(self, file):
self._file_store_config._categories_storage = FileStoreConfiguration(file=file, format="xml", extension="aiml", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.CATEGORIES] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.CATEGORIES] = self._storage_engine
def add_learnf_store(self, dirs):
self._file_store_config._learnf_storage = FileStoreConfiguration(dirs=dirs, format="xml", extension="aiml", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.LEARNF] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.LEARNF] = self._storage_engine
def add_sets_store(self, dirs):
self._file_store_config._sets_storage = FileStoreConfiguration(dirs=dirs, format="text", extension="txt", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.SETS] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.SETS] = self._storage_engine
def add_set_store(self, file):
self._file_store_config._sets_storage = FileStoreConfiguration(file=file, format="text", extension="txt", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.SETS] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.SETS] = self._storage_engine
def add_maps_store(self, dirs):
self._file_store_config._maps_storage = FileStoreConfiguration(dirs=dirs, format="text", extension="txt", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.MAPS] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.MAPS] = self._storage_engine
def add_map_store(self, file):
self._file_store_config._maps_storage = FileStoreConfiguration(file=file, format="text", extension="txt", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.MAPS] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.MAPS] = self._storage_engine
def add_denormal_store(self, file):
self._file_store_config._denormal_storage = FileStoreConfiguration(dirs=file, format="text", extension="txt", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.DENORMAL] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.DENORMAL] = self._storage_engine
def add_normal_store(self, file):
self._file_store_config._normal_storage = FileStoreConfiguration(dirs=file, format="text", extension="txt", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.NORMAL] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.NORMAL] = self._storage_engine
def add_person_store(self, file):
self._file_store_config._person_storage = FileStoreConfiguration(dirs=file, format="text", extension="txt", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.PERSON] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.PERSON] = self._storage_engine
def add_person2_store(self, file):
self._file_store_config._person2_storage = FileStoreConfiguration(dirs=file, format="text", extension="txt", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.PERSON2] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.PERSON2] = self._storage_engine
def add_gender_store(self, file):
self._file_store_config._gender_storage = FileStoreConfiguration(file=file, format="text", extension="txt", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.GENDER] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.GENDER] = self._storage_engine
def add_pattern_nodes_store(self, file=None):
if file is None:
file = os.path.dirname(__file__) + os.sep + "testdata" + os.sep + "nodes" + os.sep + "test_pattern_nodes.txt"
self._file_store_config._pattern_nodes_storage = FileStoreConfiguration(file=file, format="text",
encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.PATTERN_NODES] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.PATTERN_NODES] = self._storage_engine
def add_template_nodes_store(self, file=None):
if file is None:
file = os.path.dirname(__file__) + os.sep + "testdata" + os.sep + "nodes" + os.sep + "test_template_nodes.txt"
self._file_store_config._template_nodes_storage = FileStoreConfiguration(file=file, format="text",
encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.TEMPLATE_NODES] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.TEMPLATE_NODES] = self._storage_engine
def add_twitter_store(self):
self._file_store_config._twitter_storage = FileStoreConfiguration(dirs=os.path.dirname(__file__) + os.sep + "testdata", format="text", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.TWITTER] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.TWITTER] = self._storage_engine
def add_properties_store(self, file):
self._file_store_config._properties_storage = FileStoreConfiguration(file=file, format="text", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.PROPERTIES] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.PROPERTIES] = self._storage_engine
def add_defaults_store(self, file):
self._file_store_config._defaults_storage = FileStoreConfiguration(file=file, format="text", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.DEFAULTS] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.DEFAULTS] = self._storage_engine
def add_variables_store(self, file):
self._file_store_config._variables_storage = FileStoreConfiguration(file=file, format="text", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.VARIABLES] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.VARIABLES] = self._storage_engine
def add_regex_templates_store(self, file):
self._file_store_config._regex_storage = FileStoreConfiguration(file=file, format="text", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.REGEX_TEMPLATES] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.REGEX_TEMPLATES] = self._storage_engine
def add_rdfs_store(self, dirs):
self._file_store_config._rdf_storage = FileStoreConfiguration(dirs=dirs, format="text", extension="txt", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.RDF] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.RDF] = self._storage_engine
def add_rdf_store(self, file):
self._file_store_config._rdf_storage = FileStoreConfiguration(file=file, format="text", extension="txt", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.RDF] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.RDF] = self._storage_engine
def add_conversation_store(self, dir):
self._file_store_config._conversation_storage = FileStoreConfiguration(dirs=dir, format="text", extension="txt", encoding="utf-8", delete_on_start=False)
self.storage_factory._storage_engines[StorageFactory.CONVERSATIONS] = self._storage_engine
self.storage_factory._store_to_engine_map[StorageFactory.CONVERSATIONS] = self._storage_engine
def add_default_stores(self):
self.add_license_keys_store()
self.add_spelling_store()
self.add_usergroups_store()
self.add_pattern_nodes_store()
self.add_template_nodes_store()
def parse_arguments(self, argument_parser):
return MockArguments()
def initiate_logging(self, arguments):
pass
def get_client_configuration(self):
return ConsoleConfiguration()
def load_configuration(self, arguments):
config = ConsoleConfiguration()
self._configuration = ProgramyConfiguration(config)
def set_environment(self):
"""For testing purposes we do nothing"""
return
def run(self):
"""For testing purposes we do nothing"""
return
def dump_graph(self, client_context):
client_context.brain.aiml_parser.pattern_parser.root.dump("", output_func=print)
| 64.610526
| 183
| 0.755947
|
c37c67038f1ed963b86f401d68918aed7d88e455
| 3,468
|
py
|
Python
|
tests/network/test_floatingip.py
|
bclau/ceilometer
|
90ad86c08494596dfa03c8cbfcea2c2be58fc8dc
|
[
"Apache-2.0"
] | null | null | null |
tests/network/test_floatingip.py
|
bclau/ceilometer
|
90ad86c08494596dfa03c8cbfcea2c2be58fc8dc
|
[
"Apache-2.0"
] | null | null | null |
tests/network/test_floatingip.py
|
bclau/ceilometer
|
90ad86c08494596dfa03c8cbfcea2c2be58fc8dc
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright © 2012 eNovance <licensing@enovance.com>
#
# Copyright 2013 IBM Corp
# All Rights Reserved.
#
# Author: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from ceilometer.central import manager
from ceilometer.network import floatingip
from ceilometer import nova_client
from ceilometer.openstack.common import context
from ceilometer.tests import base
class TestFloatingIPPollster(base.TestCase):
@mock.patch('ceilometer.pipeline.setup_pipeline', mock.MagicMock())
def setUp(self):
super(TestFloatingIPPollster, self).setUp()
self.context = context.get_admin_context()
self.manager = manager.AgentManager()
self.pollster = floatingip.FloatingIPPollster()
self.stubs.Set(nova_client.Client, 'floating_ip_get_all',
self.faux_get_ips)
@staticmethod
def faux_get_ips(self):
ips = []
for i in range(1, 4):
ip = mock.MagicMock()
ip.id = i
ip.ip = '1.1.1.%d' % i
ip.pool = 'public'
ips.append(ip)
return ips
# FIXME(dhellmann): Is there a useful way to define this
# test without a database?
#
# def test_get_samples_none_defined(self):
# try:
# list(self.pollster.get_samples(self.manager,
# self.context)
# )
# except exception.NoFloatingIpsDefined:
# pass
# else:
# assert False, 'Should have seen an error'
def test_get_samples_not_empty(self):
samples = list(self.pollster.get_samples(self.manager, {}))
self.assertEqual(len(samples), 3)
# It's necessary to verify all the attributes extracted by Nova
# API /os-floating-ips to make sure they're available and correct.
self.assertEqual(samples[0].resource_id, 1)
self.assertEqual(samples[0].resource_metadata["address"], "1.1.1.1")
self.assertEqual(samples[0].resource_metadata["pool"], "public")
self.assertEqual(samples[1].resource_id, 2)
self.assertEqual(samples[1].resource_metadata["address"], "1.1.1.2")
self.assertEqual(samples[1].resource_metadata["pool"], "public")
self.assertEqual(samples[2].resource_id, 3)
self.assertEqual(samples[2].resource_metadata["address"], "1.1.1.3")
self.assertEqual(samples[2].resource_metadata["pool"], "public")
def test_get_counter_names(self):
samples = list(self.pollster.get_samples(self.manager, {}))
self.assertEqual(set([s.name for s in samples]),
set(['ip.floating']))
def test_get_samples_cached(self):
cache = {}
cache['floating_ips'] = self.faux_get_ips(None)[:2]
samples = list(self.pollster.get_samples(self.manager, cache))
self.assertEqual(len(samples), 2)
| 36.893617
| 76
| 0.653979
|
faf515ccfce3b044647dc4b05409efb00571c48c
| 2,307
|
py
|
Python
|
huaweicloud-sdk-servicestage/huaweicloudsdkservicestage/v2/model/resource_refer_alias.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 64
|
2020-06-12T07:05:07.000Z
|
2022-03-30T03:32:50.000Z
|
huaweicloud-sdk-servicestage/huaweicloudsdkservicestage/v2/model/resource_refer_alias.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 11
|
2020-07-06T07:56:54.000Z
|
2022-01-11T11:14:40.000Z
|
huaweicloud-sdk-servicestage/huaweicloudsdkservicestage/v2/model/resource_refer_alias.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 24
|
2020-06-08T11:42:13.000Z
|
2022-03-04T06:44:08.000Z
|
# coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ResourceReferAlias:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
}
attribute_map = {
}
def __init__(self):
"""ResourceReferAlias - a model defined in huaweicloud sdk"""
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResourceReferAlias):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 26.825581
| 79
| 0.53186
|
c7bc49f0b60379c2e135205a15a70d5b9fa91ef2
| 1,628
|
py
|
Python
|
convert_to_onnx.py
|
potipot/ultra_light_face
|
4b8489e2e207cf490549f94de1e35212e846f115
|
[
"MIT"
] | null | null | null |
convert_to_onnx.py
|
potipot/ultra_light_face
|
4b8489e2e207cf490549f94de1e35212e846f115
|
[
"MIT"
] | null | null | null |
convert_to_onnx.py
|
potipot/ultra_light_face
|
4b8489e2e207cf490549f94de1e35212e846f115
|
[
"MIT"
] | null | null | null |
"""
This code is used to convert the pytorch model into an onnx format model.
"""
import sys
from pathlib import Path
import torch.onnx
from vision.ssd.config.fd_config import define_img_size
input_img_size = 640 # define input size ,default optional(128/160/320/480/640/1280)
define_img_size(input_img_size)
from vision.ssd.mb_tiny_RFB_fd import create_Mb_Tiny_RFB_fd
from vision.ssd.mb_tiny_fd import create_mb_tiny_fd
# net_type = "slim" # inference faster,lower precision
net_type = "RFB" # inference lower,higher precision
label_path = "models/voc-model-labels.txt"
class_names = [name.strip() for name in open(label_path).readlines()]
num_classes = len(class_names)
if net_type == 'slim':
# model_path = "models/pretrained/version-slim-320.pth"
model_path = "models/pretrained/version-slim-640.pth"
net = create_mb_tiny_fd(len(class_names), is_test=True, without_postprocessing=True)
elif net_type == 'RFB':
# model_path = "models/pretrained/version-RFB-320.pth"
model_path = "models/pretrained/ir_faces/RFB-640-best.pth"
net = create_Mb_Tiny_RFB_fd(len(class_names), is_test=True, without_postprocessing=True)
else:
print("unsupport network type.")
sys.exit(1)
net.load(model_path)
net.eval()
net.to("cuda")
model_path = Path(model_path)
model_name = model_path.stem
model_path = model_path.parent/(model_name+'_wopp.onnx')
# dummy_input = torch.randn(1, 3, 240, 320).to("cuda")
dummy_input = torch.randn(1, 3, 480, 640).to("cuda") #if input size is 640*480
torch.onnx.export(net, dummy_input, model_path, verbose=False, input_names=['input'], output_names=['scores', 'boxes'])
| 36.177778
| 119
| 0.756143
|
c5cffa1b65cc3dc78b2a78bf73b409ec9244ae12
| 44
|
py
|
Python
|
src/notion/types.py
|
nvo87/education-backend
|
1f008bd396b5dde4483af611532826a9bca9fef5
|
[
"MIT"
] | 62
|
2021-09-22T18:38:26.000Z
|
2022-03-29T06:09:42.000Z
|
src/notion/types.py
|
nvo87/education-backend
|
1f008bd396b5dde4483af611532826a9bca9fef5
|
[
"MIT"
] | 50
|
2021-09-16T07:17:31.000Z
|
2022-03-26T12:06:58.000Z
|
src/notion/types.py
|
nvo87/education-backend
|
1f008bd396b5dde4483af611532826a9bca9fef5
|
[
"MIT"
] | 16
|
2021-10-17T17:43:31.000Z
|
2022-03-26T11:22:45.000Z
|
BlockId = str
__all__ = [
'BlockId',
]
| 7.333333
| 14
| 0.545455
|
a535734d0b9c7dd415d6a3ff58336ed10e3d4958
| 1,104
|
py
|
Python
|
LeetCode/count-primes.py
|
amgad01/algorithms
|
53eecf06e907cde806d4b78dc78fcd70d0271e3e
|
[
"MIT"
] | 1
|
2021-03-05T18:13:02.000Z
|
2021-03-05T18:13:02.000Z
|
LeetCode/count-primes.py
|
amgad01/algorithms
|
53eecf06e907cde806d4b78dc78fcd70d0271e3e
|
[
"MIT"
] | null | null | null |
LeetCode/count-primes.py
|
amgad01/algorithms
|
53eecf06e907cde806d4b78dc78fcd70d0271e3e
|
[
"MIT"
] | 1
|
2021-07-25T01:55:12.000Z
|
2021-07-25T01:55:12.000Z
|
def count_primes(n):
"""Count the number of prime numbers less than a non-negative number, n.
# extra: return the prime numbers
:type n: int
:rtype: int
"""
prime_numbers = []
if n < 2:
return 0
prime = [1] * n # fill a list of length n with 1
for i in range(2, n):
if prime[i]:
prime[i * i:n:i] = [0] * len(prime[i * i:n:i]) # set all multiples of i to 0
for i, value in enumerate(prime): # to print the prime numbers themselves
if value == 1 and i >= 2: # only consider values= 1, since those are the prime numbers, disregard 1 and 0
prime_numbers.append(i) # append the (prime)number to the prime numbers list
print(prime_numbers)
return sum(prime[2:])
if __name__ == '__main__':
# Example 1:
# Input: n = 10
# Output: 4
# Explanation: There are 4 prime numbers less than 10, they are 2, 3, 5, 7.
print(count_primes(10))
# Example 2:
# Input: n = 0
# Output: 0
print(count_primes(0))
# Example 3:
# Input: n = 1
# Output: 0
print(count_primes(1))
| 32.470588
| 114
| 0.587862
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.