blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
648b52fbcf90fe013704d34e152bc581ae3c40be | 0bce796dc4d44c3f8ce5653fedcb12adba37dcb4 | /app/shadowsocks/common.py | 4e07e913304cea11f600e41112dfd2dc1bbca612 | [
"Apache-2.0"
] | permissive | littleguyy/ShadowSocksShare-OpenShift | 2d79081bc14dd2e70add77e730373a1fdac2bc96 | 4e9e0021d573391e33f104e47e882122c988a187 | refs/heads/master | 2021-08-30T01:37:37.148308 | 2017-12-15T15:25:01 | 2017-12-15T15:25:01 | 109,275,324 | 2 | 1 | null | 2017-12-03T01:59:15 | 2017-11-02T14:26:28 | Python | UTF-8 | Python | false | false | 13,597 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2013-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import socket
import struct
import logging
import binascii
import re
from app.shadowsocks import lru_cache
def compat_ord(s):
if type(s) == int:
return s
return _ord(s)
def compat_chr(d):
if bytes == str:
return _chr(d)
return bytes([d])
_ord = ord
_chr = chr
ord = compat_ord
chr = compat_chr
connect_log = logging.debug
def to_bytes(s):
if bytes != str:
if type(s) == str:
return s.encode('utf-8')
return s
def to_str(s):
if bytes != str:
if type(s) == bytes:
return s.decode('utf-8')
return s
def int32(x):
if x > 0xFFFFFFFF or x < 0:
x &= 0xFFFFFFFF
if x > 0x7FFFFFFF:
x = int(0x100000000 - x)
if x < 0x80000000:
return -x
else:
return -2147483648
return x
def inet_ntop(family, ipstr):
if family == socket.AF_INET:
return to_bytes(socket.inet_ntoa(ipstr))
elif family == socket.AF_INET6:
import re
v6addr = ':'.join(('%02X%02X' % (ord(i), ord(j))).lstrip('0')
for i, j in zip(ipstr[::2], ipstr[1::2]))
v6addr = re.sub('::+', '::', v6addr, count=1)
return to_bytes(v6addr)
def inet_pton(family, addr):
addr = to_str(addr)
if family == socket.AF_INET:
return socket.inet_aton(addr)
elif family == socket.AF_INET6:
if '.' in addr: # a v4 addr
v4addr = addr[addr.rindex(':') + 1:]
v4addr = socket.inet_aton(v4addr)
v4addr = ['%02X' % ord(x) for x in v4addr]
v4addr.insert(2, ':')
newaddr = addr[:addr.rindex(':') + 1] + ''.join(v4addr)
return inet_pton(family, newaddr)
dbyts = [0] * 8 # 8 groups
grps = addr.split(':')
for i, v in enumerate(grps):
if v:
dbyts[i] = int(v, 16)
else:
for j, w in enumerate(grps[::-1]):
if w:
dbyts[7 - j] = int(w, 16)
else:
break
break
return b''.join((chr(i // 256) + chr(i % 256)) for i in dbyts)
else:
raise RuntimeError("What family?")
def is_ip(address):
for family in (socket.AF_INET, socket.AF_INET6):
try:
if type(address) != str:
address = address.decode('utf8')
inet_pton(family, address)
return family
except (TypeError, ValueError, OSError, IOError):
pass
return False
def match_regex(regex, text):
regex = re.compile(regex)
for item in regex.findall(text):
return True
return False
def patch_socket():
if not hasattr(socket, 'inet_pton'):
socket.inet_pton = inet_pton
if not hasattr(socket, 'inet_ntop'):
socket.inet_ntop = inet_ntop
patch_socket()
ADDRTYPE_IPV4 = 1
ADDRTYPE_IPV6 = 4
ADDRTYPE_HOST = 3
def pack_addr(address):
address_str = to_str(address)
for family in (socket.AF_INET, socket.AF_INET6):
try:
r = socket.inet_pton(family, address_str)
if family == socket.AF_INET6:
return b'\x04' + r
else:
return b'\x01' + r
except (TypeError, ValueError, OSError, IOError):
pass
if len(address) > 255:
address = address[:255] # TODO
return b'\x03' + chr(len(address)) + address
def pre_parse_header(data):
if not data:
return None
datatype = ord(data[0])
if datatype == 0x80:
if len(data) <= 2:
return None
rand_data_size = ord(data[1])
if rand_data_size + 2 >= len(data):
logging.warn('header too short, maybe wrong password or '
'encryption method')
return None
data = data[rand_data_size + 2:]
elif datatype == 0x81:
data = data[1:]
elif datatype == 0x82:
if len(data) <= 3:
return None
rand_data_size = struct.unpack('>H', data[1:3])[0]
if rand_data_size + 3 >= len(data):
logging.warn('header too short, maybe wrong password or '
'encryption method')
return None
data = data[rand_data_size + 3:]
elif datatype == 0x88 or (~datatype & 0xff) == 0x88:
if len(data) <= 7 + 7:
return None
data_size = struct.unpack('>H', data[1:3])[0]
ogn_data = data
data = data[:data_size]
crc = binascii.crc32(data) & 0xffffffff
if crc != 0xffffffff:
logging.warn('uncorrect CRC32, maybe wrong password or '
'encryption method')
return None
start_pos = 3 + ord(data[3])
data = data[start_pos:-4]
if data_size < len(ogn_data):
data += ogn_data[data_size:]
return data
def parse_header(data):
addrtype = ord(data[0])
dest_addr = None
dest_port = None
header_length = 0
connecttype = (addrtype & 0x8) and 1 or 0
addrtype &= ~0x8
if addrtype == ADDRTYPE_IPV4:
if len(data) >= 7:
dest_addr = socket.inet_ntoa(data[1:5])
dest_port = struct.unpack('>H', data[5:7])[0]
header_length = 7
else:
logging.warn('header is too short')
elif addrtype == ADDRTYPE_HOST:
if len(data) > 2:
addrlen = ord(data[1])
if len(data) >= 4 + addrlen:
dest_addr = data[2:2 + addrlen]
dest_port = struct.unpack('>H', data[2 + addrlen:4 +
addrlen])[0]
header_length = 4 + addrlen
else:
logging.warn('header is too short')
else:
logging.warn('header is too short')
elif addrtype == ADDRTYPE_IPV6:
if len(data) >= 19:
dest_addr = socket.inet_ntop(socket.AF_INET6, data[1:17])
dest_port = struct.unpack('>H', data[17:19])[0]
header_length = 19
else:
logging.warn('header is too short')
else:
logging.warn('unsupported addrtype %d, maybe wrong password or '
'encryption method' % addrtype)
if dest_addr is None:
return None
return connecttype, addrtype, to_bytes(dest_addr), dest_port, header_length
class IPNetwork(object):
ADDRLENGTH = {socket.AF_INET: 32, socket.AF_INET6: 128, False: 0}
def __init__(self, addrs):
self.addrs_str = addrs
self._network_list_v4 = []
self._network_list_v6 = []
if type(addrs) == str:
addrs = addrs.split(',')
list(map(self.add_network, addrs))
def add_network(self, addr):
if addr is "":
return
block = addr.split('/')
addr_family = is_ip(block[0])
addr_len = IPNetwork.ADDRLENGTH[addr_family]
if addr_family is socket.AF_INET:
ip, = struct.unpack("!I", socket.inet_aton(block[0]))
elif addr_family is socket.AF_INET6:
hi, lo = struct.unpack("!QQ", inet_pton(addr_family, block[0]))
ip = (hi << 64) | lo
else:
raise Exception("Not a valid CIDR notation: %s" % addr)
if len(block) is 1:
prefix_size = 0
while (ip & 1) == 0 and ip is not 0:
ip >>= 1
prefix_size += 1
logging.warn("You did't specify CIDR routing prefix size for %s, "
"implicit treated as %s/%d" % (addr, addr, addr_len))
elif block[1].isdigit() and int(block[1]) <= addr_len:
prefix_size = addr_len - int(block[1])
ip >>= prefix_size
else:
raise Exception("Not a valid CIDR notation: %s" % addr)
if addr_family is socket.AF_INET:
self._network_list_v4.append((ip, prefix_size))
else:
self._network_list_v6.append((ip, prefix_size))
def __contains__(self, addr):
addr_family = is_ip(addr)
if addr_family is socket.AF_INET:
ip, = struct.unpack("!I", socket.inet_aton(addr))
return any(map(lambda n_ps: n_ps[0] == ip >> n_ps[1],
self._network_list_v4))
elif addr_family is socket.AF_INET6:
hi, lo = struct.unpack("!QQ", inet_pton(addr_family, addr))
ip = (hi << 64) | lo
return any(map(lambda n_ps: n_ps[0] == ip >> n_ps[1],
self._network_list_v6))
else:
return False
def __cmp__(self, other):
return cmp(self.addrs_str, other.addrs_str)
def __eq__(self, other):
return self.addrs_str == other.addrs_str
def __ne__(self, other):
return self.addrs_str != other.addrs_str
class PortRange(object):
def __init__(self, range_str):
self.range_str = to_str(range_str)
self.range = set()
range_str = to_str(range_str).split(',')
for item in range_str:
try:
int_range = item.split('-')
if len(int_range) == 1:
if item:
self.range.add(int(item))
elif len(int_range) == 2:
int_range[0] = int(int_range[0])
int_range[1] = int(int_range[1])
if int_range[0] < 0:
int_range[0] = 0
if int_range[1] > 65535:
int_range[1] = 65535
i = int_range[0]
while i <= int_range[1]:
self.range.add(i)
i += 1
except Exception as e:
logging.error(e)
def __contains__(self, val):
return val in self.range
def __cmp__(self, other):
return cmp(self.range_str, other.range_str)
def __eq__(self, other):
return self.range_str == other.range_str
def __ne__(self, other):
return self.range_str != other.range_str
class UDPAsyncDNSHandler(object):
dns_cache = lru_cache.LRUCache(timeout=1800)
def __init__(self, params):
self.params = params
self.remote_addr = None
self.call_back = None
def resolve(self, dns_resolver, remote_addr, call_back):
if remote_addr in UDPAsyncDNSHandler.dns_cache:
if call_back:
call_back("", remote_addr, UDPAsyncDNSHandler.dns_cache[remote_addr], self.params)
else:
self.call_back = call_back
self.remote_addr = remote_addr
dns_resolver.resolve(remote_addr[0], self._handle_dns_resolved)
UDPAsyncDNSHandler.dns_cache.sweep()
def _handle_dns_resolved(self, result, error):
if error:
logging.error("%s when resolve DNS" % (error,)) #drop
return self.call_back(error, self.remote_addr, None, self.params)
if result:
ip = result[1]
if ip:
return self.call_back("", self.remote_addr, ip, self.params)
logging.warning("can't resolve %s" % (self.remote_addr,))
return self.call_back("fail to resolve", self.remote_addr, None, self.params)
def test_inet_conv():
ipv4 = b'8.8.4.4'
b = inet_pton(socket.AF_INET, ipv4)
assert inet_ntop(socket.AF_INET, b) == ipv4
ipv6 = b'2404:6800:4005:805::1011'
b = inet_pton(socket.AF_INET6, ipv6)
assert inet_ntop(socket.AF_INET6, b) == ipv6
def test_parse_header():
assert parse_header(b'\x03\x0ewww.google.com\x00\x50') == \
(0, b'www.google.com', 80, 18)
assert parse_header(b'\x01\x08\x08\x08\x08\x00\x35') == \
(0, b'8.8.8.8', 53, 7)
assert parse_header((b'\x04$\x04h\x00@\x05\x08\x05\x00\x00\x00\x00\x00'
b'\x00\x10\x11\x00\x50')) == \
(0, b'2404:6800:4005:805::1011', 80, 19)
def test_pack_header():
assert pack_addr(b'8.8.8.8') == b'\x01\x08\x08\x08\x08'
assert pack_addr(b'2404:6800:4005:805::1011') == \
b'\x04$\x04h\x00@\x05\x08\x05\x00\x00\x00\x00\x00\x00\x10\x11'
assert pack_addr(b'www.google.com') == b'\x03\x0ewww.google.com'
def test_ip_network():
ip_network = IPNetwork('127.0.0.0/24,::ff:1/112,::1,192.168.1.1,192.0.2.0')
assert '127.0.0.1' in ip_network
assert '127.0.1.1' not in ip_network
assert ':ff:ffff' in ip_network
assert '::ffff:1' not in ip_network
assert '::1' in ip_network
assert '::2' not in ip_network
assert '192.168.1.1' in ip_network
assert '192.168.1.2' not in ip_network
assert '192.0.2.1' in ip_network
assert '192.0.3.1' in ip_network # 192.0.2.0 is treated as 192.0.2.0/23
assert 'www.google.com' not in ip_network
if __name__ == '__main__':
test_inet_conv()
test_parse_header()
test_pack_header()
test_ip_network()
| [
"charl3s.xu@gmail.com"
] | charl3s.xu@gmail.com |
3996ea5205bce142f2d098be854ce4f05c36bccd | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/SUBNETVLAN-MIB.py | 95ca61b3c6a9edbac4884b175851f6f36864374d | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 7,163 | py | #
# PySNMP MIB module SUBNETVLAN-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/SUBNETVLAN-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:04:13 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion")
dlink_common_mgmt, = mibBuilder.importSymbols("DLINK-ID-REC-MIB", "dlink-common-mgmt")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Bits, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, IpAddress, Unsigned32, iso, Counter64, Gauge32, ObjectIdentity, MibIdentifier, ModuleIdentity, TimeTicks, NotificationType, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "IpAddress", "Unsigned32", "iso", "Counter64", "Gauge32", "ObjectIdentity", "MibIdentifier", "ModuleIdentity", "TimeTicks", "NotificationType", "Integer32")
RowStatus, DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "DisplayString", "TextualConvention")
swSubnetVlanMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 171, 12, 75))
if mibBuilder.loadTexts: swSubnetVlanMIB.setLastUpdated('0812020000Z')
if mibBuilder.loadTexts: swSubnetVlanMIB.setOrganization('D-Link Corp.')
class VlanId(Integer32):
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(1, 4094)
class Ipv6Address(TextualConvention, OctetString):
status = 'current'
displayHint = '2x:'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(16, 16)
fixedLength = 16
swSubnetVlanCtrl = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 12, 75, 1))
swSubnetVlanInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 12, 75, 2))
swSubnetVlanMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 12, 75, 3))
swVlanPrecedenceTable = MibTable((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 1), )
if mibBuilder.loadTexts: swVlanPrecedenceTable.setStatus('current')
swVlanPrecedenceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 1, 1), ).setIndexNames((0, "SUBNETVLAN-MIB", "swVlanPrecedencePortIndex"))
if mibBuilder.loadTexts: swVlanPrecedenceEntry.setStatus('current')
swVlanPrecedencePortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swVlanPrecedencePortIndex.setStatus('current')
swVlanPrecedenceClassification = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("macBased", 1), ("subnetBased", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swVlanPrecedenceClassification.setStatus('current')
swSubnetVLANTable = MibTable((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 2), )
if mibBuilder.loadTexts: swSubnetVLANTable.setStatus('current')
swSubnetVLANEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 2, 1), ).setIndexNames((0, "SUBNETVLAN-MIB", "swSubnetVLANIPAddress"), (0, "SUBNETVLAN-MIB", "swSubnetVLANIPMask"))
if mibBuilder.loadTexts: swSubnetVLANEntry.setStatus('current')
swSubnetVLANIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 2, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swSubnetVLANIPAddress.setStatus('current')
swSubnetVLANIPMask = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 2, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swSubnetVLANIPMask.setStatus('current')
swSubnetVLANID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 2, 1, 3), VlanId()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swSubnetVLANID.setStatus('current')
swSubnetVLANPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swSubnetVLANPriority.setStatus('current')
swSubnetVLANRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 2, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swSubnetVLANRowStatus.setStatus('current')
swSubnetVLANIPv6Table = MibTable((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 3), )
if mibBuilder.loadTexts: swSubnetVLANIPv6Table.setStatus('current')
swSubnetVLANIPv6Entry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 3, 1), ).setIndexNames((0, "SUBNETVLAN-MIB", "swSubnetVLANIPv6Address"), (0, "SUBNETVLAN-MIB", "swSubnetVLANIPv6PrefixLength"))
if mibBuilder.loadTexts: swSubnetVLANIPv6Entry.setStatus('current')
swSubnetVLANIPv6Address = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 3, 1, 1), Ipv6Address()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swSubnetVLANIPv6Address.setStatus('current')
swSubnetVLANIPv6PrefixLength = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 3, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swSubnetVLANIPv6PrefixLength.setStatus('current')
swSubnetVLANIPv6VID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 3, 1, 3), VlanId()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swSubnetVLANIPv6VID.setStatus('current')
swSubnetVLANIPv6Priority = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swSubnetVLANIPv6Priority.setStatus('current')
swSubnetVLANIPv6RowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 12, 75, 3, 3, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swSubnetVLANIPv6RowStatus.setStatus('current')
mibBuilder.exportSymbols("SUBNETVLAN-MIB", swVlanPrecedenceTable=swVlanPrecedenceTable, swSubnetVLANPriority=swSubnetVLANPriority, swVlanPrecedenceClassification=swVlanPrecedenceClassification, Ipv6Address=Ipv6Address, swSubnetVLANIPMask=swSubnetVLANIPMask, swSubnetVLANIPAddress=swSubnetVLANIPAddress, swSubnetVLANIPv6Priority=swSubnetVLANIPv6Priority, swSubnetVLANIPv6Table=swSubnetVLANIPv6Table, swSubnetVLANIPv6PrefixLength=swSubnetVLANIPv6PrefixLength, swSubnetVLANTable=swSubnetVLANTable, swVlanPrecedenceEntry=swVlanPrecedenceEntry, swSubnetVLANIPv6Address=swSubnetVLANIPv6Address, VlanId=VlanId, swSubnetVlanMgmt=swSubnetVlanMgmt, swSubnetVLANIPv6VID=swSubnetVLANIPv6VID, swSubnetVlanInfo=swSubnetVlanInfo, PYSNMP_MODULE_ID=swSubnetVlanMIB, swSubnetVLANID=swSubnetVLANID, swSubnetVLANEntry=swSubnetVLANEntry, swVlanPrecedencePortIndex=swVlanPrecedencePortIndex, swSubnetVLANIPv6Entry=swSubnetVLANIPv6Entry, swSubnetVlanMIB=swSubnetVlanMIB, swSubnetVLANIPv6RowStatus=swSubnetVLANIPv6RowStatus, swSubnetVLANRowStatus=swSubnetVLANRowStatus, swSubnetVlanCtrl=swSubnetVlanCtrl)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
13a8a9192a0704ac0a5f613ec6e8a89444481bac | f2cb9b54e51e693e1a1f1c1b327b5b40038a8fbe | /src/bin/shipyard_airflow/shipyard_airflow/dags/config_path.py | 514e419d56879d5abf0cb8da4261ab92b9b04efb | [
"Apache-2.0"
] | permissive | airshipit/shipyard | 869b0c6d331e5b2d1c15145aee73397184290900 | 81066ae98fe2afd3a9c8c5c8556e9438ac47d5a2 | refs/heads/master | 2023-08-31T11:46:13.662886 | 2023-07-01T06:42:55 | 2023-08-30T16:04:47 | 133,844,902 | 6 | 2 | Apache-2.0 | 2023-09-12T19:09:02 | 2018-05-17T17:07:36 | Python | UTF-8 | Python | false | false | 811 | py | # Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Location of shiyard.conf
# Note that the shipyard.conf file needs to be placed on a volume
# that can be accessed by the containers
config_path = '/usr/local/airflow/plugins/shipyard.conf'
| [
"bryan.strassner@gmail.com"
] | bryan.strassner@gmail.com |
4fce15cbe97195207d61da778af98e7e72d75477 | e3a25b40812b6b70f10b52a6f66f9348dcc251a6 | /algorithm/0327야자/장기.py | 5c72cba9ab087f171a914be70dab9629329ed05f | [] | no_license | yoonwoo123/python101 | 75643cb5dcf411c9ddcf988bf09bb88e4523206c | 637dce64a6320a6f46eb941e33e8e9f6ee41c910 | refs/heads/master | 2020-04-14T11:30:43.018126 | 2019-07-25T08:28:31 | 2019-07-25T08:28:31 | 163,815,689 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 884 | py | import sys, collections
sys.stdin = open("장기_input.txt")
dx = [-2, -1, 1, 2, 2, 1, -1, -2]
dy = [1, 2, 2, 1, -1, -2, -2, -1]
def BFS(x, y, cnt):
global res
que = []
que.append([x, y, cnt])
arr[x][y] = 1 # 방문체크
while que:
x, y, cnt = que.pop(0)
cnt += 1
for i in range(8):
nx = x + dx[i]
ny = y + dy[i]
if nx < 0 or nx >= N or ny < 0 or ny >= M: continue
if arr[nx][ny] == 1: continue
if arr[nx][ny] == 2:
res = cnt
return
arr[nx][ny] = 1 # 방문처리
que.append([nx, ny, cnt])
N, M = map(int, input().split())
HX, HY, X, Y = map(int, input().split())
HX -= 1
HY -= 1
X -= 1
Y -= 1
arr = [[0 for _ in range(M)] for _ in range(N)]
arr[HX][HY] = 1 # 말
arr[X][Y] = 2 # 졸병
res = 0
BFS(HX, HY, 0)
print(res) | [
"lkkjasd@korea.ac.kr"
] | lkkjasd@korea.ac.kr |
c58cde042b033608e1766802b4f37d02faa1f2e1 | bca68c22ac8f44203a234d8dc69b37960ab813cc | /models/networks.py | 6c92fc2c50116ea33c1dc05c3bcb863985308acc | [
"MIT"
] | permissive | Jeozhao/AGIS-Net | a48362d4894de0c312008554640814677d49f333 | da0145e21d804da21ae7b9c66f7857316d0485d7 | refs/heads/master | 2022-05-29T13:33:09.355458 | 2020-04-30T11:10:07 | 2020-04-30T11:10:07 | 260,710,410 | 1 | 0 | null | 2020-05-02T14:57:21 | 2020-05-02T14:57:21 | null | UTF-8 | Python | false | false | 59,925 | py | import functools
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn import Parameter, init
from torch.optim import lr_scheduler
from .vgg import VGG19
###############################################################################
# Functions
###############################################################################
def init_weights(net, init_type='normal', gain=0.02):
def init_func(m):
classname = m.__class__.__name__
if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1):
if init_type == 'normal':
init.normal_(m.weight.data, 0.0, gain)
elif init_type == 'xavier':
init.xavier_normal_(m.weight.data, gain=gain)
elif init_type == 'kaiming':
init.kaiming_normal_(m.weight.data, a=0, mode='fan_in')
elif init_type == 'orthogonal':
init.orthogonal_(m.weight.data, gain=gain)
else:
raise NotImplementedError(
'initialization method [%s] is not implemented' % init_type)
if hasattr(m, 'bias') and m.bias is not None:
init.constant_(m.bias.data, 0.0)
elif classname.find('BatchNorm2d') != -1:
init.normal_(m.weight.data, 1.0, gain)
init.constant_(m.bias.data, 0.0)
print('initialize network with %s' % init_type)
net.apply(init_func)
def init_net(net, init_type='normal', gpu_ids=[]):
if len(gpu_ids) > 0:
assert(torch.cuda.is_available())
net.cuda()
net = torch.nn.DataParallel(net)
init_weights(net, init_type)
return net
def get_scheduler(optimizer, opt):
if opt.lr_policy == 'lambda':
def lambda_rule(epoch):
lr_l = 1.0 - max(0, epoch - opt.niter) / float(opt.niter_decay + 1)
return lr_l
scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda_rule)
elif opt.lr_policy == 'step':
scheduler = lr_scheduler.StepLR(
optimizer, step_size=opt.lr_decay_iters, gamma=0.1)
elif opt.lr_policy == 'plateau':
scheduler = lr_scheduler.ReduceLROnPlateau(
optimizer, mode='min', factor=0.2, threshold=0.01, patience=5)
else:
return NotImplementedError('learning rate policy [%s] is not implemented', opt.lr_policy)
return scheduler
def get_norm_layer(layer_type='instance'):
if layer_type == 'batch':
norm_layer = functools.partial(nn.BatchNorm2d, affine=True)
elif layer_type == 'instance':
norm_layer = functools.partial(nn.InstanceNorm2d, affine=False)
elif layer_type == 'none':
norm_layer = None
else:
raise NotImplementedError(
'normalization layer [%s] is not found' % layer_type)
return norm_layer
def get_non_linearity(layer_type='relu'):
if layer_type == 'relu':
nl_layer = functools.partial(nn.ReLU, inplace=True)
elif layer_type == 'lrelu':
nl_layer = functools.partial(
nn.LeakyReLU, negative_slope=0.2, inplace=True)
elif layer_type == 'elu':
nl_layer = functools.partial(nn.ELU, inplace=True)
else:
raise NotImplementedError(
'nonlinearity activitation [%s] is not found' % layer_type)
return nl_layer
def get_self_attention_layer(in_dim):
self_attn_layer = SelfAttention(in_dim)
return self_attn_layer
def define_G(input_nc, output_nc, nz, ngf, nencode=4, netG='unet_128', use_spectral_norm=False,
norm='batch', nl='relu', use_dropout=False, use_attention=False,
init_type='xavier', gpu_ids=[], where_add='input', upsample='bilinear'):
net = None
norm_layer = get_norm_layer(layer_type=norm)
nl_layer = get_non_linearity(layer_type=nl)
if nz == 0:
where_add = 'input'
if netG == 'agisnet':
input_content = input_nc
input_style = input_nc * nencode
net = AGISNet(input_content, input_style, output_nc, 6, ngf,
norm_layer=norm_layer, nl_layer=nl_layer,
use_dropout=use_dropout, use_attention=use_attention,
use_spectral_norm=use_spectral_norm, upsample=upsample)
elif netG == 'unet_64' and where_add == 'input':
net = G_Unet_add_input(input_nc, output_nc, nz, 6, ngf, norm_layer=norm_layer, nl_layer=nl_layer,
use_dropout=use_dropout, use_attention=use_attention,
use_spectral_norm=use_spectral_norm, upsample=upsample)
elif netG == 'unet_128' and where_add == 'input':
net = G_Unet_add_input(input_nc, output_nc, nz, 7, ngf, norm_layer=norm_layer, nl_layer=nl_layer,
use_dropout=use_dropout, use_attention=use_attention,
use_spectral_norm=use_spectral_norm, upsample=upsample)
elif netG == 'unet_256' and where_add == 'input':
net = G_Unet_add_input(input_nc, output_nc, nz, 8, ngf, norm_layer=norm_layer, nl_layer=nl_layer,
use_dropout=use_dropout, use_attention=use_attention,
use_spectral_norm=use_spectral_norm, upsample=upsample)
elif netG == 'unet_64' and where_add == 'all':
net = G_Unet_add_all(input_nc, output_nc, nz, 6, ngf, norm_layer=norm_layer, nl_layer=nl_layer,
use_dropout=use_dropout, use_attention=use_attention,
use_spectral_norm=use_spectral_norm, upsample=upsample)
elif netG == 'unet_128' and where_add == 'all':
net = G_Unet_add_all(input_nc, output_nc, nz, 7, ngf, norm_layer=norm_layer, nl_layer=nl_layer,
use_dropout=use_dropout, use_attention=use_attention,
use_spectral_norm=use_spectral_norm, upsample=upsample)
elif netG == 'unet_256' and where_add == 'all':
net = G_Unet_add_all(input_nc, output_nc, nz, 8, ngf, norm_layer=norm_layer, nl_layer=nl_layer,
use_dropout=use_dropout, use_attention=use_attention,
use_spectral_norm=use_spectral_norm, upsample=upsample)
else:
raise NotImplementedError(
'Generator model name [%s] is not recognized' % net)
return init_net(net, init_type, gpu_ids)
def define_D(input_nc, ndf, netD,
norm='batch', nl='lrelu', use_spectral_norm=False,
use_sigmoid=False, init_type='xavier', num_Ds=1, gpu_ids=[]):
net = None
norm_layer = get_norm_layer(layer_type=norm)
nl = 'lrelu' # use leaky relu for D
nl_layer = get_non_linearity(layer_type=nl)
if netD == 'basic_32':
net = D_NLayers(input_nc, ndf, n_layers=2, norm_layer=norm_layer,
use_spectral_norm=use_spectral_norm, nl_layer=nl_layer, use_sigmoid=use_sigmoid)
elif netD == 'basic_64':
net = D_NLayers(input_nc, ndf, n_layers=2, norm_layer=norm_layer,
use_spectral_norm=use_spectral_norm, nl_layer=nl_layer, use_sigmoid=use_sigmoid)
elif netD == 'basic_128':
net = D_NLayers(input_nc, ndf, n_layers=2, norm_layer=norm_layer,
use_spectral_norm=use_spectral_norm, nl_layer=nl_layer, use_sigmoid=use_sigmoid)
elif netD == 'basic_256':
net = D_NLayers(input_nc, ndf, n_layers=3, norm_layer=norm_layer,
use_spectral_norm=use_spectral_norm, nl_layer=nl_layer, use_sigmoid=use_sigmoid)
elif netD == 'basic_64_multi':
net = D_NLayersMulti(input_nc=input_nc, ndf=ndf, n_layers=1, norm_layer=norm_layer,
use_spectral_norm=use_spectral_norm, use_sigmoid=use_sigmoid, num_D=num_Ds)
elif netD == 'basic_128_multi':
net = D_NLayersMulti(input_nc=input_nc, ndf=ndf, n_layers=2, norm_layer=norm_layer,
use_spectral_norm=use_spectral_norm, use_sigmoid=use_sigmoid, num_D=num_Ds)
elif netD == 'basic_256_multi':
net = D_NLayersMulti(input_nc=input_nc, ndf=ndf, n_layers=3, norm_layer=norm_layer,
use_spectral_norm=use_spectral_norm, use_sigmoid=use_sigmoid, num_D=num_Ds)
else:
raise NotImplementedError(
'Discriminator model name [%s] is not recognized' % net)
return init_net(net, init_type, gpu_ids)
def define_R(input_nc, ndf, netR, norm='instance', nl='lrelu', use_spectral_norm=False,
use_sigmoid=False, init_type='xavier', gpu_ids=[]):
net = None
norm_layer = get_norm_layer(layer_type=norm)
nl = 'lrelu'
nl_layer = get_non_linearity(layer_type=nl)
if netR == 'basic_64':
net = R_NLayers(input_nc, ndf, n_layers=4, norm_layer=norm_layer,
use_spectral_norm=use_spectral_norm, nl_layer=nl_layer, use_sigmoid=use_sigmoid)
elif netR == 'basic_128':
net = R_NLayers(input_nc, ndf, n_layers=5, norm_layer=norm_layer,
use_spectral_norm=use_spectral_norm, nl_layer=nl_layer, use_sigmoid=use_sigmoid)
elif netR == 'basic_256':
net = R_NLayers(input_nc, ndf, n_layers=6, norm_layer=norm_layer,
use_spectral_norm=use_spectral_norm, nl_layer=nl_layer, use_sigmoid=use_sigmoid)
else:
raise NotImplementedError(
'Reviser model name [%s] is not recognized' % net)
return init_net(net, init_type, gpu_ids)
def define_E(input_nc, output_nc, nef, netE,
norm='batch', nl='lrelu',
init_type='xavier', gpu_ids=[], vaeLike=False):
net = None
norm_layer = get_norm_layer(layer_type=norm)
nl = 'lrelu' # use leaky relu for E
nl_layer = get_non_linearity(layer_type=nl)
if netE == 'resnet_64':
net = E_ResNet(input_nc, output_nc, nef, n_blocks=3, norm_layer=norm_layer,
nl_layer=nl_layer, vaeLike=vaeLike)
elif netE == 'resnet_128':
net = E_ResNet(input_nc, output_nc, nef, n_blocks=4, norm_layer=norm_layer,
nl_layer=nl_layer, vaeLike=vaeLike)
elif netE == 'resnet_256':
net = E_ResNet(input_nc, output_nc, nef, n_blocks=5, norm_layer=norm_layer,
nl_layer=nl_layer, vaeLike=vaeLike)
elif netE == 'conv_64':
net = E_NLayers(input_nc, output_nc, nef, n_layers=3, norm_layer=norm_layer,
nl_layer=nl_layer, vaeLike=vaeLike)
elif netE == 'conv_128':
net = E_NLayers(input_nc, output_nc, nef, n_layers=4, norm_layer=norm_layer,
nl_layer=nl_layer, vaeLike=vaeLike)
elif netE == 'conv_256':
net = E_NLayers(input_nc, output_nc, nef, n_layers=5, norm_layer=norm_layer,
nl_layer=nl_layer, vaeLike=vaeLike)
else:
raise NotImplementedError(
'Encoder model name [%s] is not recognized' % net)
return init_net(net, init_type, gpu_ids)
class ListModule(object):
# should work with all kind of module
def __init__(self, module, prefix, *args):
self.module = module
self.prefix = prefix
self.num_module = 0
for new_module in args:
self.append(new_module)
def append(self, new_module):
if not isinstance(new_module, nn.Module):
raise ValueError('Not a Module')
else:
self.module.add_module(
self.prefix + str(self.num_module), new_module)
self.num_module += 1
def __len__(self):
return self.num_module
def __getitem__(self, i):
if i < 0 or i >= self.num_module:
raise IndexError('Out of bound')
return getattr(self.module, self.prefix + str(i))
class D_NLayersMulti(nn.Module):
def __init__(self, input_nc, ndf=64, n_layers=3, use_spectral_norm=False,
norm_layer=nn.BatchNorm2d, use_sigmoid=False, num_D=1):
super(D_NLayersMulti, self).__init__()
# st()
self.num_D = num_D
if num_D == 1:
layers = self.get_layers(
input_nc, ndf, n_layers, norm_layer, use_sigmoid, use_spectral_norm)
self.model = nn.Sequential(*layers)
else:
self.model = ListModule(self, 'model')
layers = self.get_layers(
input_nc, ndf, n_layers, norm_layer, use_sigmoid, use_spectral_norm)
self.model.append(nn.Sequential(*layers))
self.down = nn.AvgPool2d(3, stride=2, padding=[1, 1], count_include_pad=False)
for i in range(num_D - 1):
ndf_i = int(round(ndf / (2**(i + 1))))
layers = self.get_layers(
input_nc, ndf_i, n_layers, norm_layer, use_sigmoid, use_spectral_norm)
self.model.append(nn.Sequential(*layers))
def get_layers(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d,
use_sigmoid=False, use_spectral_norm=False):
kw = 4
padw = 1
if use_spectral_norm:
sequence = [SpectralNorm(nn.Conv2d(input_nc, ndf, kernel_size=kw,
stride=2, padding=padw)), nn.LeakyReLU(0.2, True)]
else:
sequence = [nn.Conv2d(input_nc, ndf, kernel_size=kw,
stride=2, padding=padw), nn.LeakyReLU(0.2, True)]
nf_mult = 1
nf_mult_prev = 1
for n in range(1, n_layers):
nf_mult_prev = nf_mult
nf_mult = min(2**n, 8)
if use_spectral_norm:
sequence += [SpectralNorm(nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult,
kernel_size=kw, stride=2, padding=padw))]
else:
sequence += [nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult,
kernel_size=kw, stride=2, padding=padw)]
sequence += [
norm_layer(ndf * nf_mult),
nn.LeakyReLU(0.2, True)
]
nf_mult_prev = nf_mult
nf_mult = min(2**n_layers, 8)
if use_spectral_norm:
sequence += [SpectralNorm(nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult,
kernel_size=kw, stride=1, padding=padw))]
else:
sequence += [nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult,
kernel_size=kw, stride=1, padding=padw)]
sequence += [
norm_layer(ndf * nf_mult),
nn.LeakyReLU(0.2, True)
]
if use_spectral_norm:
sequence += [SpectralNorm(nn.Conv2d(ndf * nf_mult, 1,
kernel_size=kw, stride=1, padding=padw))]
else:
sequence += [nn.Conv2d(ndf * nf_mult, 1,
kernel_size=kw, stride=1, padding=padw)]
if use_sigmoid:
sequence += [nn.Sigmoid()]
return sequence
def forward(self, input):
if self.num_D == 1:
return self.model(input)
result = []
down = input
for i in range(self.num_D):
result.append(self.model[i](down))
if i != self.num_D - 1:
down = self.down(down)
return result
class G_NLayers(nn.Module):
def __init__(self, output_nc=3, nz=100, ngf=64, n_layers=3,
norm_layer=None, nl_layer=None):
super(G_NLayers, self).__init__()
kw, s, padw = 4, 2, 1
sequence = [nn.ConvTranspose2d(
nz, ngf * 4, kernel_size=kw, stride=1, padding=0, bias=True)]
if norm_layer is not None:
sequence += [norm_layer(ngf * 4)]
sequence += [nl_layer()]
nf_mult = 4
nf_mult_prev = 4
for n in range(n_layers, 0, -1):
nf_mult_prev = nf_mult
nf_mult = min(n, 4)
sequence += [nn.ConvTranspose2d(ngf * nf_mult_prev, ngf * nf_mult,
kernel_size=kw, stride=s, padding=padw, bias=True)]
if norm_layer is not None:
sequence += [norm_layer(ngf * nf_mult)]
sequence += [nl_layer()]
sequence += [nn.ConvTranspose2d(ngf, output_nc,
kernel_size=4, stride=s, padding=padw, bias=True)]
sequence += [nn.Tanh()]
self.model = nn.Sequential(*sequence)
def forward(self, input):
return self.model(input)
# Defines the conv discriminator with the specified arguments.
class D_NLayers(nn.Module):
def __init__(self, input_nc=3, ndf=64, n_layers=3, use_spectral_norm=False,
norm_layer=None, nl_layer=None, use_sigmoid=False):
super(D_NLayers, self).__init__()
kw, padw, use_bias = 4, 1, True
# st()
if use_spectral_norm:
sequence = [SpectralNorm(nn.Conv2d(input_nc, ndf, kernel_size=kw,
stride=2, padding=padw, bias=use_bias))]
else:
sequence = [nn.Conv2d(input_nc, ndf, kernel_size=kw,
stride=2, padding=padw, bias=use_bias)]
sequence += [nl_layer()]
nf_mult = 1
nf_mult_prev = 1
for n in range(1, n_layers):
nf_mult_prev = nf_mult
nf_mult = min(2**n, 8)
if use_spectral_norm:
sequence += [SpectralNorm(nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult,
kernel_size=kw, stride=2, padding=padw, bias=use_bias))]
else:
sequence += [nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult,
kernel_size=kw, stride=2, padding=padw, bias=use_bias)]
if norm_layer is not None:
sequence += [norm_layer(ndf * nf_mult)]
sequence += [nl_layer()]
nf_mult_prev = nf_mult
nf_mult = min(2**n_layers, 8)
sequence += [
nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult,
kernel_size=kw, stride=1, padding=padw, bias=use_bias)]
if norm_layer is not None:
sequence += [norm_layer(ndf * nf_mult)]
sequence += [nl_layer()]
if use_spectral_norm:
sequence += [SpectralNorm(nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw,
stride=1, padding=0, bias=use_bias))]
else:
sequence += [nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw,
stride=1, padding=0, bias=use_bias)]
if use_sigmoid:
sequence += [nn.Sigmoid()]
self.model = nn.Sequential(*sequence)
def forward(self, input):
output = self.model(input)
return output
class R_NLayers(nn.Module):
def __init__(self, input_nc=3, ndf=64, n_layers=3, use_spectral_norm=False,
norm_layer=None, nl_layer=None, use_sigmoid=False):
super(R_NLayers, self).__init__()
kw, padw, use_bias = 3, 1, True
# st()
if use_spectral_norm:
sequence = [SpectralNorm(nn.Conv2d(input_nc, ndf, kernel_size=kw,
stride=2, padding=padw, bias=use_bias))]
else:
sequence = [nn.Conv2d(input_nc, ndf, kernel_size=kw,
stride=2, padding=padw, bias=use_bias)]
sequence += [nl_layer()]
nf_mult = 1
nf_mult_prev = 1
for n in range(1, n_layers):
nf_mult_prev = nf_mult
nf_mult = min(2**n, 8)
if use_spectral_norm:
sequence += [SpectralNorm(nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult,
kernel_size=kw, stride=2, padding=padw, bias=use_bias))]
else:
sequence += [nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult,
kernel_size=kw, stride=2, padding=padw, bias=use_bias)]
if norm_layer is not None:
sequence += [norm_layer(ndf * nf_mult)]
sequence += [nl_layer()]
nf_mult_prev = nf_mult
nf_mult = min(2**n_layers, 8)
sequence += [
nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult,
kernel_size=kw, stride=1, padding=padw, bias=use_bias)]
if norm_layer is not None:
sequence += [norm_layer(ndf * nf_mult)]
sequence += [nl_layer()]
if use_spectral_norm:
sequence += [SpectralNorm(nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw,
stride=1, padding=0, bias=use_bias))]
else:
sequence += [nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw,
stride=1, padding=0, bias=use_bias)]
if use_sigmoid:
sequence += [nn.Sigmoid()]
self.model = nn.Sequential(*sequence)
def forward(self, input):
output = self.model(input)
return output
class E_NLayers(nn.Module):
def __init__(self, input_nc, output_nc=1, nef=64, n_layers=3,
norm_layer=None, nl_layer=None, vaeLike=False):
super(E_NLayers, self).__init__()
self.vaeLike = vaeLike
kw, padw = 4, 1
sequence = [nn.Conv2d(input_nc, nef, kernel_size=kw,
stride=2, padding=padw), nl_layer()]
nf_mult = 1
nf_mult_prev = 1
for n in range(1, n_layers):
nf_mult_prev = nf_mult
nf_mult = min(2**n, 4)
sequence += [
nn.Conv2d(nef * nf_mult_prev, nef * nf_mult,
kernel_size=kw, stride=2, padding=padw)]
if norm_layer is not None:
sequence += [norm_layer(nef * nf_mult)]
sequence += [nl_layer()]
sequence += [nn.AvgPool2d(8)]
self.conv = nn.Sequential(*sequence)
self.fc = nn.Sequential(*[nn.Linear(nef * nf_mult, output_nc)])
if vaeLike:
self.fcVar = nn.Sequential(*[nn.Linear(nef * nf_mult, output_nc)])
def forward(self, x):
x_conv = self.conv(x)
conv_flat = x_conv.view(x.size(0), -1)
output = self.fc(conv_flat)
if self.vaeLike:
outputVar = self.fcVar(conv_flat)
return output, outputVar
return output
class E_ResNet(nn.Module):
def __init__(self, input_nc=3, output_nc=1, nef=64, n_blocks=4,
norm_layer=None, nl_layer=None, vaeLike=False):
super(E_ResNet, self).__init__()
self.vaeLike = vaeLike
max_nef = 4
conv_layers = [
nn.Conv2d(input_nc, nef, kernel_size=4, stride=2, padding=1, bias=True)]
for n in range(1, n_blocks):
input_nef = nef * min(max_nef, n)
output_nef = nef * min(max_nef, n + 1)
conv_layers += [BasicBlock(input_nef,
output_nef, norm_layer, nl_layer)]
conv_layers += [nl_layer(), nn.AvgPool2d(8)]
if vaeLike:
self.fc = nn.Sequential(*[nn.Linear(output_nef, output_nc)])
self.fcVar = nn.Sequential(*[nn.Linear(output_nef, output_nc)])
else:
self.fc = nn.Sequential(*[nn.Linear(output_nef, output_nc)])
self.conv = nn.Sequential(*conv_layers)
def forward(self, x):
x_conv = self.conv(x)
conv_flat = x_conv.view(x.size(0), -1)
output = self.fc(conv_flat)
if self.vaeLike:
outputVar = self.fcVar(conv_flat)
return output, outputVar
else:
return output
return output
##############################################################################
# Classes
##############################################################################
class CXLoss(nn.Module):
def __init__(self, sigma=0.1, b=1.0, similarity="consine"):
super(CXLoss, self).__init__()
self.similarity = similarity
self.sigma = sigma
self.b = b
def center_by_T(self, featureI, featureT):
# Calculate mean channel vector for feature map.
meanT = featureT.mean(0, keepdim=True).mean(2, keepdim=True).mean(3, keepdim=True)
return featureI - meanT, featureT - meanT
def l2_normalize_channelwise(self, features):
# Normalize on channel dimension (axis=1)
norms = features.norm(p=2, dim=1, keepdim=True)
features = features.div(norms)
return features
def patch_decomposition(self, features):
N, C, H, W = features.shape
assert N == 1
P = H * W
# NCHW --> 1x1xCXHW --> HWxCx1x1
patches = features.view(1, 1, C, P).permute((3, 2, 0, 1))
return patches
def calc_relative_distances(self, raw_dist, axis=1):
epsilon = 1e-5
# [0] means get the value, torch min will return the index as well
div = torch.min(raw_dist, dim=axis, keepdim=True)[0]
relative_dist = raw_dist / (div + epsilon)
return relative_dist
def calc_CX(self, dist, axis=1):
W = torch.exp((self.b - dist) / self.sigma)
W_sum = W.sum(dim=axis, keepdim=True)
return W.div(W_sum)
def forward(self, featureT, featureI):
'''
:param featureT: target
:param featureI: inference
:return:
'''
# print("featureT target size:", featureT.shape)
# print("featureI inference size:", featureI.shape)
featureI, featureT = self.center_by_T(featureI, featureT)
featureI = self.l2_normalize_channelwise(featureI)
featureT = self.l2_normalize_channelwise(featureT)
dist = []
N = featureT.size()[0]
for i in range(N):
# NCHW
featureT_i = featureT[i, :, :, :].unsqueeze(0)
# NCHW
featureI_i = featureI[i, :, :, :].unsqueeze(0)
featureT_patch = self.patch_decomposition(featureT_i)
# Calculate cosine similarity
dist_i = F.conv2d(featureI_i, featureT_patch)
dist.append(dist_i)
# NCHW
dist = torch.cat(dist, dim=0)
raw_dist = (1. - dist) / 2.
relative_dist = self.calc_relative_distances(raw_dist)
CX = self.calc_CX(relative_dist)
CX = CX.max(dim=3)[0].max(dim=2)[0]
CX = CX.mean(1)
CX_B = -torch.log(CX)
CX = torch.mean(CX_B)
return CX, CX_B
class RecLoss(nn.Module):
def __init__(self, use_L2=True):
super(RecLoss, self).__init__()
self.use_L2 = use_L2
def __call__(self, input, target, batch_mean=True):
if self.use_L2:
diff = (input - target) ** 2
else:
diff = torch.abs(input - target)
if batch_mean:
return torch.mean(diff)
else:
return torch.mean(torch.mean(torch.mean(diff, dim=1), dim=2), dim=3)
# Defines the GAN loss which uses either LSGAN or the regular GAN.
# When LSGAN is used, it is basically same as MSELoss,
# but it abstracts away the need to create the target label tensor
# that has the same size as the input
class GANLoss(nn.Module):
def __init__(self, mse_loss=True, target_real_label=1.0, target_fake_label=0.0):
super(GANLoss, self).__init__()
self.register_buffer('real_label', torch.tensor(target_real_label))
self.register_buffer('fake_label', torch.tensor(target_fake_label))
self.loss = nn.MSELoss() if mse_loss else nn.BCELoss
def get_target_tensor(self, input, target_is_real):
if target_is_real:
target_tensor = self.real_label
else:
target_tensor = self.fake_label
return target_tensor.expand_as(input)
def __call__(self, inputs, target_is_real):
# if input is a list
all_losses = []
for input in inputs:
target_tensor = self.get_target_tensor(input, target_is_real)
loss_input = self.loss(input, target_tensor)
all_losses.append(loss_input)
loss = sum(all_losses)
return loss, all_losses
def upsampleLayer(inplanes, outplanes, upsample='basic', padding_type='zero', use_spectral_norm=False):
# padding_type = 'zero'
if upsample == 'basic':
if use_spectral_norm:
upconv = [SpectralNorm(nn.ConvTranspose2d(
inplanes, outplanes, kernel_size=4, stride=2, padding=1))]
else:
upconv = [nn.ConvTranspose2d(
inplanes, outplanes, kernel_size=4, stride=2, padding=1)]
elif upsample == 'bilinear':
upconv = [nn.Upsample(scale_factor=2, mode='bilinear'),
nn.ReflectionPad2d(1)]
if use_spectral_norm:
upconv += [SpectralNorm(nn.Conv2d(inplanes, outplanes, kernel_size=3, stride=1, padding=0))]
else:
upconv += [nn.Conv2d(inplanes, outplanes, kernel_size=3, stride=1, padding=0)]
else:
raise NotImplementedError(
'upsample layer [%s] not implemented' % upsample)
return upconv
def conv3x3(in_planes, out_planes):
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=1,
padding=1, bias=True)
# two usage cases, depend on kw and padw
def upsampleConv(inplanes, outplanes, kw, padw):
sequence = []
sequence += [nn.Upsample(scale_factor=2, mode='nearest')]
sequence += [nn.Conv2d(inplanes, outplanes, kernel_size=kw,
stride=1, padding=padw, bias=True)]
return nn.Sequential(*sequence)
def meanpoolConv(inplanes, outplanes):
sequence = []
sequence += [nn.AvgPool2d(kernel_size=2, stride=2)]
sequence += [nn.Conv2d(inplanes, outplanes,
kernel_size=1, stride=1, padding=0, bias=True)]
return nn.Sequential(*sequence)
def convMeanpool(inplanes, outplanes):
sequence = []
sequence += [conv3x3(inplanes, outplanes)]
sequence += [nn.AvgPool2d(kernel_size=2, stride=2)]
return nn.Sequential(*sequence)
def l2normalize(v, eps=1e-12):
return v / (v.norm() + eps)
class SpectralNorm(nn.Module):
def __init__(self, module, name='weight', power_iterations=1):
super(SpectralNorm, self).__init__()
self.module = module
self.name = name
self.power_iterations = power_iterations
if not self._made_params():
self._make_params()
def _update_u_v(self):
u = getattr(self.module, self.name + "_u")
v = getattr(self.module, self.name + "_v")
w = getattr(self.module, self.name + "_bar")
height = w.data.shape[0]
for _ in range(self.power_iterations):
v.data = l2normalize(
torch.mv(torch.t(w.view(height, -1).data), u.data))
u.data = l2normalize(torch.mv(w.view(height, -1).data, v.data))
# sigma = torch.dot(u.data, torch.mv(w.view(height,-1).data, v.data))
sigma = u.dot(w.view(height, -1).mv(v))
setattr(self.module, self.name, w / sigma.expand_as(w))
def _made_params(self):
try:
getattr(self.module, self.name + "_u")
getattr(self.module, self.name + "_v")
getattr(self.module, self.name + "_bar")
return True
except AttributeError:
return False
def _make_params(self):
w = getattr(self.module, self.name)
height = w.data.shape[0]
width = w.view(height, -1).data.shape[1]
u = Parameter(w.data.new(height).normal_(0, 1), requires_grad=False)
v = Parameter(w.data.new(width).normal_(0, 1), requires_grad=False)
u.data = l2normalize(u.data)
v.data = l2normalize(v.data)
w_bar = Parameter(w.data)
del self.module._parameters[self.name]
self.module.register_parameter(self.name + "_u", u)
self.module.register_parameter(self.name + "_v", v)
self.module.register_parameter(self.name + "_bar", w_bar)
def forward(self, *args):
self._update_u_v()
return self.module.forward(*args)
# Self Attention module from self-attention gan
class SelfAttention(nn.Module):
""" Self attention Layer"""
def __init__(self, in_dim, activation=None):
super(SelfAttention, self).__init__()
self.chanel_in = in_dim
self.activation = activation
self.query_conv = nn.Conv2d(
in_channels=in_dim, out_channels=in_dim//8, kernel_size=1)
self.key_conv = nn.Conv2d(
in_channels=in_dim, out_channels=in_dim//8, kernel_size=1)
self.value_conv = nn.Conv2d(
in_channels=in_dim, out_channels=in_dim, kernel_size=1)
self.gamma = nn.Parameter(torch.zeros(1))
self.softmax = nn.Softmax(dim=-1)
def forward(self, x):
"""
inputs :
x : input feature maps( B X C X W X H)
returns :
out : self attention value + input feature
attention: B X N X N (N is Width*Height)
"""
# print('attention size', x.size())
m_batchsize, C, width, height = x.size()
# print('query_conv size', self.query_conv(x).size())
proj_query = self.query_conv(x).view(
m_batchsize, -1, width * height).permute(0, 2, 1) # B X C X (N)
proj_key = self.key_conv(x).view(
m_batchsize, -1, width * height) # B X C X (W*H)
energy = torch.bmm(proj_query, proj_key) # transpose check
attention = self.softmax(energy) # B X (N) X (N)
proj_value = self.value_conv(x).view(
m_batchsize, -1, width * height) # B X C X N
out = torch.bmm(proj_value, attention.permute(0, 2, 1))
out = out.view(m_batchsize, C, width, height)
out = self.gamma*out + x
return out
# Defines the submodule with skip connection.
# X -------------------identity---------------------- X
# |-- downsampling -- |submodule| -- upsampling --|
class UnetBlock(nn.Module):
def __init__(self, input_nc, outer_nc, inner_nc,
submodule=None, outermost=False, innermost=False, use_spectral_norm=False,
norm_layer=None, nl_layer=None, use_dropout=False, use_attention=False,
upsample='basic', padding_type='zero'):
super(UnetBlock, self).__init__()
self.outermost = outermost
p = 0
downconv = []
if padding_type == 'reflect':
downconv += [nn.ReflectionPad2d(1)]
elif padding_type == 'replicate':
downconv += [nn.ReplicationPad2d(1)]
elif padding_type == 'zero':
p = 1
else:
raise NotImplementedError(
'padding [%s] is not implemented' % padding_type)
downconv += [nn.Conv2d(input_nc, inner_nc,
kernel_size=4, stride=2, padding=p)]
# downsample is different from upsample
downrelu = nn.LeakyReLU(0.2, True)
downnorm = norm_layer(inner_nc) if norm_layer is not None else None
uprelu = nl_layer()
upnorm = norm_layer(outer_nc) if norm_layer is not None else None
if use_attention:
attn_layer = get_self_attention_layer(outer_nc)
if outermost:
upconv = upsampleLayer(
inner_nc * 2, outer_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
down = downconv
up = [uprelu] + upconv + [nn.Tanh()]
model = down + [submodule] + up
elif innermost:
upconv = upsampleLayer(
inner_nc, outer_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
down = [downrelu] + downconv
up = [uprelu] + upconv
if upnorm is not None:
up += [upnorm]
model = down + up
else:
upconv = upsampleLayer(
inner_nc * 2, outer_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
down = [downrelu] + downconv
if downnorm is not None:
down += [downnorm]
up = [uprelu] + upconv
if use_attention:
up += [attn_layer]
if upnorm is not None:
up += [upnorm]
if use_dropout:
model = down + [submodule] + up + [nn.Dropout(0.5)]
else:
model = down + [submodule] + up
self.model = nn.Sequential(*model)
def forward(self, x):
if self.outermost:
return self.model(x)
else:
return torch.cat([self.model(x), x], 1)
class UnetBlock_with_z(nn.Module):
def __init__(self, input_nc, outer_nc, inner_nc, nz=0,
submodule=None, outermost=False, innermost=False, use_spectral_norm=False,
norm_layer=None, nl_layer=None, use_dropout=False, use_attention=False,
upsample='basic', padding_type='zero'):
super(UnetBlock_with_z, self).__init__()
p = 0
downconv = []
if padding_type == 'reflect':
downconv += [nn.ReflectionPad2d(1)]
elif padding_type == 'replicate':
downconv += [nn.ReplicationPad2d(1)]
elif padding_type == 'zero':
p = 1
else:
raise NotImplementedError(
'padding [%s] is not implemented' % padding_type)
self.outermost = outermost
self.innermost = innermost
self.nz = nz
input_nc = input_nc + nz
downconv += [nn.Conv2d(input_nc, inner_nc,
kernel_size=4, stride=2, padding=p)]
# downsample is different from upsample
downrelu = nn.LeakyReLU(0.2, True)
uprelu = nl_layer()
if use_attention:
attn_layer = get_self_attention_layer(outer_nc)
if outermost:
upconv = upsampleLayer(
inner_nc * 2, outer_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
down = downconv
up = [uprelu] + upconv + [nn.Tanh()]
elif innermost:
upconv = upsampleLayer(
inner_nc, outer_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
down = [downrelu] + downconv
up = [uprelu] + upconv
if norm_layer is not None:
up += [norm_layer(outer_nc)]
else:
upconv = upsampleLayer(
inner_nc * 2, outer_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
down = [downrelu] + downconv
if norm_layer is not None:
down += [norm_layer(inner_nc)]
up = [uprelu] + upconv
if use_attention:
up += [attn_layer]
if norm_layer is not None:
up += [norm_layer(outer_nc)]
if use_dropout:
up += [nn.Dropout(0.5)]
self.down = nn.Sequential(*down)
self.submodule = submodule
self.up = nn.Sequential(*up)
def forward(self, x, z):
if self.nz > 0:
z_img = z.view(z.size(0), z.size(1), 1, 1).expand(
z.size(0), z.size(1), x.size(2), x.size(3))
x_and_z = torch.cat([x, z_img], 1)
else:
x_and_z = x
if self.outermost:
x1 = self.down(x_and_z)
x2 = self.submodule(x1, z)
return self.up(x2)
elif self.innermost:
x1 = self.up(self.down(x_and_z))
return torch.cat([x1, x], 1)
else:
x1 = self.down(x_and_z)
x2 = self.submodule(x1, z)
return torch.cat([self.up(x2), x], 1)
class AGISNetBlock(nn.Module):
def __init__(self, input_cont, input_style, outer_nc, inner_nc,
submodule=None, outermost=False, innermost=False, use_spectral_norm=False,
norm_layer=None, nl_layer=None, use_dropout=False, use_attention=False,
upsample='basic', padding_type='zero', wo_skip=False):
super(AGISNetBlock, self).__init__()
self.wo_skip = wo_skip
p = 0
downconv1 = []
downconv2 = []
if padding_type == 'reflect':
downconv1 += [nn.ReflectionPad2d(1)]
downconv2 += [nn.ReflectionPad2d(1)]
elif padding_type == 'replicate':
downconv1 += [nn.ReplicationPad2d(1)]
downconv2 += [nn.ReplicationPad2d(1)]
elif padding_type == 'zero':
p = 1
else:
raise NotImplementedError(
'padding [%s] is not implemented' % padding_type)
self.outermost = outermost
self.innermost = innermost
downconv1 += [nn.Conv2d(input_cont, inner_nc, kernel_size=3, stride=2, padding=p)]
downconv2 += [nn.Conv2d(input_style, inner_nc, kernel_size=3, stride=2, padding=p)]
# downsample is different from upsample
downrelu1 = nn.LeakyReLU(0.2, True)
downrelu2 = nn.LeakyReLU(0.2, True)
uprelu = nl_layer()
uprelu2 = nl_layer()
attn_layer = None
if use_attention:
attn_layer = get_self_attention_layer(outer_nc)
if outermost:
if self.wo_skip:
upconv = upsampleLayer(
inner_nc, inner_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
upconv_B = upsampleLayer(
inner_nc, outer_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
else:
upconv = upsampleLayer(
inner_nc * 4, inner_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
upconv_B = upsampleLayer(
inner_nc * 3, outer_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
upconv_out = [nn.Conv2d(inner_nc + outer_nc, outer_nc, kernel_size=3, stride=1, padding=p)]
down1 = downconv1
down2 = downconv2
up = [uprelu] + upconv
up_B = [uprelu2] + upconv_B + [nn.Tanh()]
uprelu3 = nl_layer()
up_out = [uprelu3] + upconv_out + [nn.Tanh()]
self.up_out = nn.Sequential(*up_out)
if use_attention:
up += [attn_layer]
if norm_layer is not None:
up += [norm_layer(outer_nc)]
if use_dropout:
up += [nn.Dropout(0.5)]
elif innermost:
upconv = upsampleLayer(
inner_nc * 2, outer_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
upconv_B = upsampleLayer(
inner_nc * 2, outer_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
down1 = [downrelu1] + downconv1
down2 = [downrelu2] + downconv2
up = [uprelu] + upconv
up_B = [uprelu2] + upconv_B
if norm_layer is not None:
up += [norm_layer(outer_nc)]
up_B += [norm_layer(outer_nc)]
else:
if self.wo_skip: # without skip-connection
upconv = upsampleLayer(
inner_nc, outer_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
upconv_B = upsampleLayer(
inner_nc, outer_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
else:
upconv = upsampleLayer(
inner_nc * 4, outer_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
upconv_B = upsampleLayer(
inner_nc * 3, outer_nc, upsample=upsample, padding_type=padding_type,
use_spectral_norm=use_spectral_norm)
down1 = [downrelu1] + downconv1
down2 = [downrelu2] + downconv2
if norm_layer is not None:
down1 += [norm_layer(inner_nc)]
down2 += [norm_layer(inner_nc)]
up = [uprelu] + upconv
up_B = [uprelu2] + upconv_B
if use_attention:
up += [attn_layer]
attn_layer2 = get_self_attention_layer(outer_nc)
up_B += [attn_layer2]
if norm_layer is not None:
up += [norm_layer(outer_nc)]
up_B += [norm_layer(outer_nc)]
if use_dropout:
up += [nn.Dropout(0.5)]
up_B += [nn.Dropout(0.5)]
self.down1 = nn.Sequential(*down1)
self.down2 = nn.Sequential(*down2)
self.submodule = submodule
self.up = nn.Sequential(*up)
self.up_B = nn.Sequential(*up_B)
def forward(self, content, style):
x1 = self.down1(content)
x2 = self.down2(style)
if self.outermost:
mid_C, mid_B = self.submodule(x1, x2)
fake_B = self.up_B(mid_B)
mid_C2 = self.up(mid_C)
fake_C = self.up_out(torch.cat([mid_C2, fake_B], 1))
return fake_C, fake_B
elif self.innermost:
mid_C = torch.cat([x1, x2], 1)
mid_B = torch.cat([x1, x2], 1)
fake_C = self.up(mid_C)
fake_B = self.up_B(mid_B)
tmp1 = torch.cat([content, style], 1)
if self.wo_skip:
return fake_C, fake_B
else:
return torch.cat([torch.cat([fake_C, fake_B], 1), tmp1], 1), torch.cat([fake_B, tmp1], 1)
else:
mid, mid_B = self.submodule(x1, x2)
fake_C = self.up(mid)
fake_B = self.up_B(mid_B)
tmp1 = torch.cat([content, style], 1)
if self.wo_skip:
return fake_C, fake_B
else:
return torch.cat([torch.cat([fake_C, fake_B], 1), tmp1], 1), torch.cat([fake_B, tmp1], 1)
class BasicBlockUp(nn.Module):
def __init__(self, inplanes, outplanes, norm_layer=None, nl_layer=None):
super(BasicBlockUp, self).__init__()
layers = []
if norm_layer is not None:
layers += [norm_layer(inplanes)]
layers += [nl_layer()]
layers += [upsampleConv(inplanes, outplanes, kw=3, padw=1)]
if norm_layer is not None:
layers += [norm_layer(outplanes)]
layers += [conv3x3(outplanes, outplanes)]
self.conv = nn.Sequential(*layers)
self.shortcut = upsampleConv(inplanes, outplanes, kw=1, padw=0)
def forward(self, x):
out = self.conv(x) + self.shortcut(x)
return out
class BasicBlock(nn.Module):
def __init__(self, inplanes, outplanes, norm_layer=None, nl_layer=None):
super(BasicBlock, self).__init__()
layers = []
if norm_layer is not None:
layers += [norm_layer(inplanes)]
layers += [nl_layer()]
layers += [conv3x3(inplanes, inplanes)]
if norm_layer is not None:
layers += [norm_layer(inplanes)]
layers += [nl_layer()]
layers += [convMeanpool(inplanes, outplanes)]
self.conv = nn.Sequential(*layers)
self.shortcut = meanpoolConv(inplanes, outplanes)
def forward(self, x):
out = self.conv(x) + self.shortcut(x)
return out
# Defines the Unet generator.
# |num_downs|: number of downsamplings in UNet. For example,
# if |num_downs| == 7, image of size 128x128 will become of size 1x1
# at the bottleneck
class G_Unet_add_input(nn.Module):
def __init__(self, input_nc, output_nc, nz, num_downs, ngf=64,
norm_layer=None, nl_layer=None, use_dropout=False,
use_attention=False, use_spectral_norm=False, upsample='basic'):
super(G_Unet_add_input, self).__init__()
self.nz = nz
max_nchn = 8 # max channel factor
# construct unet structure
unet_block = UnetBlock(ngf*max_nchn, ngf*max_nchn, ngf*max_nchn, use_spectral_norm=use_spectral_norm,
innermost=True, norm_layer=norm_layer, nl_layer=nl_layer, upsample=upsample)
for i in range(num_downs - 5):
unet_block = UnetBlock(ngf*max_nchn, ngf*max_nchn, ngf*max_nchn, unet_block,
norm_layer=norm_layer, nl_layer=nl_layer, use_dropout=use_dropout,
use_spectral_norm=use_spectral_norm, upsample=upsample)
unet_block = UnetBlock(ngf*4, ngf*4, ngf*max_nchn, unet_block, use_attention=use_attention,
use_spectral_norm=use_spectral_norm, norm_layer=norm_layer,
nl_layer=nl_layer, upsample=upsample)
unet_block = UnetBlock(ngf*2, ngf*2, ngf*4, unet_block, use_attention=use_attention,
use_spectral_norm=use_spectral_norm, norm_layer=norm_layer,
nl_layer=nl_layer, upsample=upsample)
unet_block = UnetBlock(ngf, ngf, ngf*2, unet_block, use_attention=use_attention,
use_spectral_norm=use_spectral_norm, norm_layer=norm_layer,
nl_layer=nl_layer, upsample=upsample)
unet_block = UnetBlock(input_nc + nz, output_nc, ngf, unet_block,
use_spectral_norm=use_spectral_norm, outermost=True, norm_layer=norm_layer,
nl_layer=nl_layer, upsample=upsample)
self.model = unet_block
def forward(self, x, z=None):
if self.nz > 0:
z_img = z.view(z.size(0), z.size(1), 1, 1).expand(
z.size(0), z.size(1), x.size(2), x.size(3))
x_with_z = torch.cat([x, z_img], 1)
else:
x_with_z = x # no z
return self.model(x_with_z)
# AGISNet Module
class AGISNet(nn.Module):
def __init__(self, input_content, input_style, output_nc, num_downs, ngf=64,
norm_layer=None, nl_layer=None, use_dropout=False,
use_attention=False, use_spectral_norm=False, upsample='basic', wo_skip=False):
super(AGISNet, self).__init__()
max_nchn = 8 # max channel factor
# construct unet structure
dual_block = AGISNetBlock(ngf*max_nchn, ngf*max_nchn, ngf*max_nchn, ngf*max_nchn,
use_spectral_norm=use_spectral_norm, innermost=True,
norm_layer=norm_layer, nl_layer=nl_layer, upsample=upsample, wo_skip=wo_skip)
for i in range(num_downs - 5):
dual_block = AGISNetBlock(ngf*max_nchn, ngf*max_nchn, ngf*max_nchn, ngf*max_nchn, dual_block,
norm_layer=norm_layer, nl_layer=nl_layer, use_dropout=use_dropout,
use_spectral_norm=use_spectral_norm, upsample=upsample, wo_skip=wo_skip)
dual_block = AGISNetBlock(ngf*4, ngf*4, ngf*4, ngf*max_nchn, dual_block, use_attention=use_attention,
use_spectral_norm=use_spectral_norm, norm_layer=norm_layer,
nl_layer=nl_layer, upsample=upsample, wo_skip=wo_skip)
dual_block = AGISNetBlock(ngf*2, ngf*2, ngf*2, ngf*4, dual_block, use_attention=use_attention,
use_spectral_norm=use_spectral_norm, norm_layer=norm_layer,
nl_layer=nl_layer, upsample=upsample, wo_skip=wo_skip)
dual_block = AGISNetBlock(ngf, ngf, ngf, ngf*2, dual_block, use_attention=use_attention,
use_spectral_norm=use_spectral_norm, norm_layer=norm_layer,
nl_layer=nl_layer, upsample=upsample, wo_skip=wo_skip)
dual_block = AGISNetBlock(input_content, input_style, output_nc, ngf, dual_block,
use_spectral_norm=use_spectral_norm, outermost=True, norm_layer=norm_layer,
nl_layer=nl_layer, upsample=upsample, wo_skip=wo_skip)
self.model = dual_block
def forward(self, content, style):
return self.model(content, style)
# Defines the Unet generator.
# |num_downs|: number of downsamplings in UNet. For example,
# if |num_downs| == 7, image of size 128x128 will become of size 1x1
# at the bottleneck
class G_Unet_add_all(nn.Module):
def __init__(self, input_nc, output_nc, nz, num_downs, ngf=64,
norm_layer=None, nl_layer=None, use_dropout=False,
use_attention=False, use_spectral_norm=False, upsample='basic'):
super(G_Unet_add_all, self).__init__()
self.nz = nz
# construct unet structure
unet_block = UnetBlock_with_z(ngf*8, ngf*8, ngf*8, nz, None, innermost=True,
use_spectral_norm=use_spectral_norm, norm_layer=norm_layer,
nl_layer=nl_layer, upsample=upsample)
unet_block = UnetBlock_with_z(ngf*8, ngf*8, ngf*8, nz, unet_block,
use_spectral_norm=use_spectral_norm, norm_layer=norm_layer,
nl_layer=nl_layer, use_dropout=use_dropout, upsample=upsample)
for i in range(num_downs - 6):
unet_block = UnetBlock_with_z(ngf*8, ngf*8, ngf*8, nz, unet_block, use_spectral_norm=use_spectral_norm,
norm_layer=norm_layer, nl_layer=nl_layer,
use_dropout=use_dropout, upsample=upsample)
unet_block = UnetBlock_with_z(ngf*4, ngf*4, ngf*8, nz, unet_block, use_attention=use_attention,
use_spectral_norm=use_spectral_norm, norm_layer=norm_layer,
nl_layer=nl_layer, upsample=upsample)
unet_block = UnetBlock_with_z(ngf*2, ngf*2, ngf*4, nz, unet_block, use_attention=use_attention,
use_spectral_norm=use_spectral_norm,
norm_layer=norm_layer, nl_layer=nl_layer, upsample=upsample)
unet_block = UnetBlock_with_z(ngf, ngf, ngf*2, nz, unet_block, use_attention=use_attention,
use_spectral_norm=use_spectral_norm,
norm_layer=norm_layer, nl_layer=nl_layer, upsample=upsample)
unet_block = UnetBlock_with_z(input_nc, output_nc, ngf, nz, unet_block, use_spectral_norm=use_spectral_norm,
outermost=True, norm_layer=norm_layer, nl_layer=nl_layer, upsample=upsample)
self.model = unet_block
def forward(self, x, z):
return self.model(x, z)
class PatchLoss(nn.Module):
def __init__(self, device, opt):
super(PatchLoss, self).__init__()
self.device = device
self.vgg19 = VGG19().to(device)
self.vgg19.load_model(opt.vgg)
self.vgg19.eval()
self.vgg_layer = 'conv2_2'
self.loss = torch.nn.L1Loss()
def l2_normalize_patch(self, features):
# Normalize on patch dimension (axis=1,4,5)
N, C, H, W, k1, k2 = features.shape
features = features.permute(0, 1, 4, 5, 2, 3).contiguous().view(N, C*k1*k2, H, W)
norms = torch.norm(features, p=2, dim=1, keepdim=True)
features = features.div(norms)
features = features.view(N, C, k1, k2, H, W).permute(0, 1, 4, 5, 2, 3)
return features
def forward(self, output, reference, shape_ref, color_ref):
'''
output: G output
reference: output reference
shape_ref: color reference
color_ref: colors, style input
'''
patch_size = 3
output_feat = self.vgg19(output)[self.vgg_layer] # N * C * 8 * 8
ref_feat = self.vgg19(reference)[self.vgg_layer]
color_feat = self.vgg19(color_ref)[self.vgg_layer] # N * C * 16 * 16
shape_feat = self.vgg19(shape_ref)[self.vgg_layer]
N, C, H, W = output_feat.shape
unfolder1 = torch.nn.Unfold(kernel_size=(H-patch_size+1, W-patch_size+1))
unfolder2 = torch.nn.Unfold(kernel_size=(H*2-patch_size+1, W*2-patch_size+1))
output_pat = unfolder1(output_feat) # N * (C*H-1*W-1) * (2*2)
output_pat = output_pat.view((N, C, H-patch_size+1, W-patch_size+1, patch_size, patch_size))
shape_pat = unfolder2(shape_feat)
shape_pat = shape_pat.view((N, C, (H*2-patch_size+1)*(W*2-patch_size+1), patch_size, patch_size))
shape_pat = torch.transpose(shape_pat, 1, 2)
color_pat = unfolder2(color_feat)
color_pat = color_pat.view((N, C, (H*2-patch_size+1)*(W*2-patch_size+1), patch_size, patch_size))
dist = list()
for i in range(N):
ref_i = ref_feat[i].view(1, ref_feat[i].shape[0], ref_feat[i].shape[1], ref_feat[i].shape[2])
shape_i = shape_pat[i]
conv1 = nn.Conv2d(C, (H*2-patch_size+1)*(W*2-patch_size+1), kernel_size=2, stride=1, bias=False)
conv1.weight = torch.nn.Parameter(shape_i)
net = nn.Sequential(conv1)
similarity = net(ref_i)
argmax = torch.argmax(similarity, 1)
matched = torch.zeros(output_pat[i].shape).to(self.device)
for k in range(H-patch_size+1):
for j in range(W-patch_size+1):
row = k
col = j
ind = argmax[0, row, col]
matched[:, row, col, ...] = color_pat[i, :, ind, ...]
matched = torch.unsqueeze(matched, 0)
dist.append(matched)
dist = torch.cat(dist, dim=0)
# output_pat = self.l2_normalize_patch(output_pat)
# dist = self.l2_normalize_patch(dist)
diff = (output_pat-dist).abs()
l1_patch = diff.sum(dim=1).sum(dim=-2).sum(dim=-1)
l1_clip = torch.clamp(l1_patch, 0, 1000)
loss = l1_clip.sum()
return loss
class GramMatrix(nn.Module):
def forward(self, input):
a, b, c, d = input.size()
# a=batch size(=1)
# b=number of feature maps
# (c,d)=dimensions of a f. map (N=c*d)
features = input.view(a * b, c * d) # resise F_XL into \hat F_XL
G = torch.mm(features, features.t()) # compute the gram product
# we 'normalize' the values of the gram matrix
# by dividing by the number of element in each feature maps.
return G.div(a * b * c * d)
# base style loss
class Base_StyleLoss(nn.Module):
def __init__(self):
super(Base_StyleLoss, self).__init__()
self.gram = GramMatrix()
self.criterion = nn.MSELoss()
def __call__(self, input, target):
input_gram = self.gram(input)
target_gram = self.gram(target)
loss = self.criterion(input_gram, target_gram)
return loss
# define the style loss
class StyleLoss(nn.Module):
def __init__(self, device, opt):
super(StyleLoss, self).__init__()
self.device = device
self.vgg19 = VGG19().to(device)
self.vgg19.load_model(opt.vgg_font)
self.vgg19.eval()
self.vgg_layers = ['conv2_2', 'conv3_2']
self.criterion = Base_StyleLoss()
def __call__(self, input, target):
loss = 0.0
for layer in self.vgg_layers:
inp_feat = self.vgg19(input)[layer]
tar_feat = self.vgg19(target)[layer]
loss += self.criterion(inp_feat, tar_feat)
return loss
| [
"hologerry@gmail.com"
] | hologerry@gmail.com |
145db62f75560083ee2bd45d160704022eef6670 | 41c605bf3a002a757cb2344cff526d7a7ae56ea9 | /plotly/validators/bar/marker/__init__.py | 2d3ccf70da92019456a5471e03e28185ae7217bc | [
"MIT"
] | permissive | Jonathan-MW/plotly.py | 9674b90b5de11fd9089e6afefd04b57bc4587829 | 7528c00772f44dee24c0df7e15d70a4852f171a8 | refs/heads/master | 2020-05-30T06:04:13.621478 | 2019-05-31T10:34:15 | 2019-05-31T10:34:15 | 189,571,988 | 2 | 0 | MIT | 2019-05-31T09:59:53 | 2019-05-31T09:59:53 | null | UTF-8 | Python | false | false | 22,600 | py |
import _plotly_utils.basevalidators
class ShowscaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self, plotly_name='showscale', parent_name='bar.marker', **kwargs
):
super(ShowscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class ReversescaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self, plotly_name='reversescale', parent_name='bar.marker', **kwargs
):
super(ReversescaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'plot'),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class OpacitysrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name='opacitysrc', parent_name='bar.marker', **kwargs
):
super(OpacitysrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'none'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class OpacityValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name='opacity', parent_name='bar.marker', **kwargs
):
super(OpacityValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop('array_ok', True),
edit_type=kwargs.pop('edit_type', 'style'),
max=kwargs.pop('max', 1),
min=kwargs.pop('min', 0),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class LineValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name='line', parent_name='bar.marker', **kwargs):
super(LineValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop('data_class_str', 'Line'),
data_docs=kwargs.pop(
'data_docs', """
autocolorscale
Determines whether the colorscale is a default
palette (`autocolorscale: true`) or the palette
determined by `marker.line.colorscale`. Has an
effect only if in `marker.line.color`is set to
a numerical array. In case `colorscale` is
unspecified or `autocolorscale` is true, the
default palette will be chosen according to
whether numbers in the `color` array are all
positive, all negative or mixed.
cauto
Determines whether or not the color domain is
computed with respect to the input data (here
in `marker.line.color`) or the bounds set in
`marker.line.cmin` and `marker.line.cmax` Has
an effect only if in `marker.line.color`is set
to a numerical array. Defaults to `false` when
`marker.line.cmin` and `marker.line.cmax` are
set by the user.
cmax
Sets the upper bound of the color domain. Has
an effect only if in `marker.line.color`is set
to a numerical array. Value should have the
same units as in `marker.line.color` and if
set, `marker.line.cmin` must be set as well.
cmid
Sets the mid-point of the color domain by
scaling `marker.line.cmin` and/or
`marker.line.cmax` to be equidistant to this
point. Has an effect only if in
`marker.line.color`is set to a numerical array.
Value should have the same units as in
`marker.line.color`. Has no effect when
`marker.line.cauto` is `false`.
cmin
Sets the lower bound of the color domain. Has
an effect only if in `marker.line.color`is set
to a numerical array. Value should have the
same units as in `marker.line.color` and if
set, `marker.line.cmax` must be set as well.
color
Sets themarker.linecolor. It accepts either a
specific color or an array of numbers that are
mapped to the colorscale relative to the max
and min values of the array or relative to
`marker.line.cmin` and `marker.line.cmax` if
set.
coloraxis
Sets a reference to a shared color axis.
References to these shared color axes are
"coloraxis", "coloraxis2", "coloraxis3", etc.
Settings for these shared color axes are set in
the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple
color scales can be linked to the same color
axis.
colorscale
Sets the colorscale. Has an effect only if in
`marker.line.color`is set to a numerical array.
The colorscale must be an array containing
arrays mapping a normalized value to an rgb,
rgba, hex, hsl, hsv, or named color string. At
minimum, a mapping for the lowest (0) and
highest (1) values are required. For example,
`[[0, 'rgb(0,0,255)', [1, 'rgb(255,0,0)']]`. To
control the bounds of the colorscale in color
space, use`marker.line.cmin` and
`marker.line.cmax`. Alternatively, `colorscale`
may be a palette name string of the following
list: Greys,YlGnBu,Greens,YlOrRd,Bluered,RdBu,R
eds,Blues,Picnic,Rainbow,Portland,Jet,Hot,Black
body,Earth,Electric,Viridis,Cividis.
colorsrc
Sets the source reference on plot.ly for color
.
reversescale
Reverses the color mapping if true. Has an
effect only if in `marker.line.color`is set to
a numerical array. If true, `marker.line.cmin`
will correspond to the last color in the array
and `marker.line.cmax` will correspond to the
first color.
width
Sets the width (in px) of the lines bounding
the marker points.
widthsrc
Sets the source reference on plot.ly for width
.
"""
),
**kwargs
)
import _plotly_utils.basevalidators
class ColorsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name='colorsrc', parent_name='bar.marker', **kwargs
):
super(ColorsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'none'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class ColorscaleValidator(_plotly_utils.basevalidators.ColorscaleValidator):
def __init__(
self, plotly_name='colorscale', parent_name='bar.marker', **kwargs
):
super(ColorscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
implied_edits=kwargs.pop(
'implied_edits', {'autocolorscale': False}
),
role=kwargs.pop('role', 'style'),
**kwargs
)
import _plotly_utils.basevalidators
class ColorBarValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name='colorbar', parent_name='bar.marker', **kwargs
):
super(ColorBarValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop('data_class_str', 'ColorBar'),
data_docs=kwargs.pop(
'data_docs', """
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see: h
ttps://github.com/d3/d3-format/blob/master/READ
ME.md#locale_format And for dates see:
https://github.com/d3/d3-time-
format/blob/master/README.md#locale_format We
add one item to d3's date formatter: "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
tickformatstops
plotly.graph_objs.bar.marker.colorbar.Tickforma
tstop instance or dict with compatible
properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.bar.marker.colorbar.tickformatstopdefaults),
sets the default property values to use for
elements of bar.marker.colorbar.tickformatstops
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto",
the number of ticks is set via `nticks`. If
"linear", the placement of the ticks is
determined by a starting position `tick0` and a
tick step `dtick` ("linear" is the default
value if `tick0` and `dtick` are provided). If
"array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`.
("array" is the default value if `tickvals` is
provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to "array". Used with
`tickvals`.
ticktextsrc
Sets the source reference on plot.ly for
ticktext .
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to "array". Used with `ticktext`.
tickvalssrc
Sets the source reference on plot.ly for
tickvals .
tickwidth
Sets the tick width (in px).
title
plotly.graph_objs.bar.marker.colorbar.Title
instance or dict with compatible properties
titlefont
Deprecated: Please use
bar.marker.colorbar.title.font instead. Sets
this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
titleside
Deprecated: Please use
bar.marker.colorbar.title.side instead.
Determines the location of color bar's title
with respect to the color bar. Note that the
title's location used to be set by the now
deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position
anchor. This anchor binds the `x` position to
the "left", "center" or "right" of the color
bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
"""
),
**kwargs
)
import _plotly_utils.basevalidators
class ColoraxisValidator(_plotly_utils.basevalidators.SubplotidValidator):
def __init__(
self, plotly_name='coloraxis', parent_name='bar.marker', **kwargs
):
super(ColoraxisValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
dflt=kwargs.pop('dflt', None),
edit_type=kwargs.pop('edit_type', 'calc'),
regex=kwargs.pop('regex', '/^coloraxis([2-9]|[1-9][0-9]+)?$/'),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name='color', parent_name='bar.marker', **kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop('array_ok', True),
edit_type=kwargs.pop('edit_type', 'style'),
role=kwargs.pop('role', 'style'),
colorscale_path=kwargs.pop(
'colorscale_path', 'bar.marker.colorscale'
),
**kwargs
)
import _plotly_utils.basevalidators
class CminValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name='cmin', parent_name='bar.marker', **kwargs):
super(CminValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'plot'),
implied_edits=kwargs.pop('implied_edits', {'cauto': False}),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class CmidValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name='cmid', parent_name='bar.marker', **kwargs):
super(CmidValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
implied_edits=kwargs.pop('implied_edits', {}),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class CmaxValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name='cmax', parent_name='bar.marker', **kwargs):
super(CmaxValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'plot'),
implied_edits=kwargs.pop('implied_edits', {'cauto': False}),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class CautoValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self, plotly_name='cauto', parent_name='bar.marker', **kwargs
):
super(CautoValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
implied_edits=kwargs.pop('implied_edits', {}),
role=kwargs.pop('role', 'info'),
**kwargs
)
import _plotly_utils.basevalidators
class AutocolorscaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self, plotly_name='autocolorscale', parent_name='bar.marker', **kwargs
):
super(AutocolorscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
implied_edits=kwargs.pop('implied_edits', {}),
role=kwargs.pop('role', 'style'),
**kwargs
)
| [
"noreply@github.com"
] | Jonathan-MW.noreply@github.com |
df6ab93663cc3b77221a50587f2e67a918d035eb | a977365234cad283d9f3edc022976a70501c1c41 | /API-Auto/testCase/salesman/test6_20case.py | 2c0fd2d7a472932f5ba3de597609b5174e6782ca | [] | no_license | quqiao/hezongyy | 849f2b9c8a4db562bde5e415ef012ff29c704fd8 | cde6805ada979afe0b24911f8c5d0977cfd92e5a | refs/heads/master | 2021-08-16T10:20:12.618268 | 2020-07-23T09:09:07 | 2020-07-23T09:09:07 | 205,772,625 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,687 | py | import json
import unittest
from common1.configHttp import RunMain
import paramunittest
import geturlParams
import urllib.parse
# import pythoncom
import time
import readExcel
# pythoncom.CoInitialize()
# url = geturlParams.geturlParams().get_Url1_3() # 调用我们的geturlParams获取我们拼接的URL
login_xls = readExcel.readExcel().get_xls('业务员APP.xlsx', '20订单详情')
@paramunittest.parametrized(*login_xls)
class testUserLogin(unittest.TestCase):
def setParameters(self, case_name, url, port, path, query, method, expected, result):
"""
set params
:param case_name:
:param path
:param query
:param method
:return:
"""
self.case_name = str(case_name)
self.url = str(url)
self.port = str(int(port))
self.path = str(path)
self.query = str(query)
self.method = str(method)
self.expected = str(expected)
self.result = str(result)
def description(self):
"""
test report description
:return:
"""
self.case_name
def setUp(self):
"""
:return:
"""
print("执行用例:" + self.case_name)
url = 'http://' + self.url + ':' + self.port + self.path
print(url)
def test1_02case(self):
"""20订单详情接口"""
self.checkResult()
def tearDown(self):
print("测试结束,输出log完结\n\n")
def checkResult(self): # 断言
"""
check test result
:return:
"""
# url1 = "http://www.xxx.com/login?"
# new_url = url1 + self.query
# data1 = dict(urllib.parse.parse_qsl(urllib.parse.urlsplit(new_url).query))# 将一个完整的URL中的name=&pwd=转换为{'name':'xxx','pwd':'bbb'}
url = 'http://' + self.url + ':' + self.port + self.path
data1 = self.query.encode('utf-8')
info = RunMain().run_main(self.method, url, data1) # 根据Excel中的method调用run_main来进行requests请求,并拿到响应
ss = json.loads(info) # 将响应转换为字典格式
if self.case_name == 'url正确': # 如果case_name是login,说明合法,返回的code应该为200
self.assertEqual(ss['code'], '000000')
if self.case_name == 'url错误': # 同上
self.assertEqual(ss['code'], "900004")
if self.case_name == 'url为空': # 同上
self.assertEqual(ss['code'], "900004")
print("返回信息:" + ss['message'])
# if __name__ == '__main__': # 测试一下,我们读取配置文件的方法是否可用
# print(testUserLogin().checkResult())
| [
"553248560@.com"
] | 553248560@.com |
d3f0da288b31dd9c16b761b157afff6e6a2e560e | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_1_neat/16_0_1_Reckon_sheep.py | 53fe372642f8e0798c770f04c5611f190c3b4e98 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 502 | py | def count_sheep(n):
if n == 0:
return "INSOMNIA"
all_digits = set()
for i in range(0,10):
all_digits.add(i)
digits = set()
total = 0
prev = 0
while digits != all_digits:
prev = total
total += n
for i in str(total):
digits.add(int(i))
return str(total)
len = int(raw_input())
for case in range(0, len):
n = int(raw_input())
print "Case #" + str(case+1) + ":", count_sheep(n)
| [
"[dhuo@tcd.ie]"
] | [dhuo@tcd.ie] |
fc995d5b1e327ff5d62745113f22f6ee8701faf7 | 57db61160494659af43ee255d1e6ab2af6617114 | /ultron-api/advantages/migrations/0001_initial.py | 4896f942309121a367273a44dde8b46f7ed02bb3 | [] | no_license | gloompi/ultron-studio | fc667d563467b386a8dec04a6079e7cdcfedc5a7 | ec2ae8051644df2433b931c7e0228e75eaf20990 | refs/heads/master | 2023-06-25T19:22:45.119315 | 2019-12-08T05:53:02 | 2019-12-08T05:53:02 | 226,545,035 | 0 | 0 | null | 2023-06-10T00:22:15 | 2019-12-07T16:44:16 | JavaScript | UTF-8 | Python | false | false | 1,345 | py | # Generated by Django 2.2.3 on 2019-08-01 16:57
from django.db import migrations, models
import django.db.models.deletion
import tinymce.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AdvantagesSection',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=30)),
('description', tinymce.models.HTMLField()),
],
options={
'verbose_name': 'Advatages Section',
'verbose_name_plural': 'Advatages Section',
},
),
migrations.CreateModel(
name='Advantage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('icon', models.ImageField(max_length=25, upload_to='')),
('title', models.CharField(max_length=30)),
('description', tinymce.models.HTMLField()),
('section', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='advantages_section', to='advantages.AdvantagesSection')),
],
),
]
| [
"gloompi@gmail.com"
] | gloompi@gmail.com |
3f48495450dbab8a40f102e8fcf7f50b74aa16e7 | f8777c76ec7c8da686c72a2975c17bbd294edc0e | /eden/integration/fsck_test.py | 25fa242bb4141942126412262227e4313e8399cf | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | jmswen/eden | 3a8e96bf0fbbf6c987f4b17bbd79dcbe0964c033 | 5e0b051703fa946cc77fc43004435ae6b20599a1 | refs/heads/master | 2020-06-06T06:08:28.946268 | 2019-06-19T04:45:11 | 2019-06-19T04:45:11 | 192,659,804 | 0 | 0 | NOASSERTION | 2019-06-19T04:43:36 | 2019-06-19T04:43:36 | null | UTF-8 | Python | false | false | 6,885 | py | #!/usr/bin/env python3
#
# Copyright (c) 2016-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
import pathlib
import subprocess
import unittest
from pathlib import Path
from typing import Tuple
from eden.test_support.temporary_directory import TemporaryDirectoryMixin
from .lib import edenclient, overlay as overlay_mod, repobase, testcase
FSCK_RETCODE_OK = 0
FSCK_RETCODE_SKIPPED = 1
FSCK_RETCODE_WARNINGS = 2
FSCK_RETCODE_ERRORS = 3
class FsckTest(testcase.EdenRepoTest):
overlay: overlay_mod.OverlayStore
def populate_repo(self) -> None:
self.repo.write_file("README.md", "tbd\n")
self.repo.write_file("proj/src/main.c", "int main() { return 0; }\n")
self.repo.write_file("proj/src/lib.c", "void foo() {}\n")
self.repo.write_file("proj/src/include/lib.h", "#pragma once\nvoid foo();\n")
self.repo.write_file(
"proj/test/test.sh", "#!/bin/bash\necho test\n", mode=0o755
)
self.repo.write_file("doc/foo.txt", "foo\n")
self.repo.write_file("doc/bar.txt", "bar\n")
self.repo.symlink("proj/doc", "../doc")
self.repo.commit("Initial commit.")
def create_repo(self, name: str) -> repobase.Repository:
return self.create_hg_repo("main")
def setup_eden_test(self) -> None:
super().setup_eden_test()
self.overlay = overlay_mod.OverlayStore(self.eden, self.mount_path)
def run_fsck(self, *args: str) -> Tuple[int, str]:
"""Run `eden fsck [args]` and return a tuple of the return code and
the combined stdout and stderr.
The command output will be decoded as UTF-8 and returned as a string.
"""
cmd_result = self.eden.run_unchecked(
"fsck", *args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
fsck_out = cmd_result.stdout.decode("utf-8", errors="replace")
return (cmd_result.returncode, fsck_out)
def test_fsck_force_and_check_only(self) -> None:
"""Test the behavior of the --force and --check-only fsck flags."""
foo_overlay_path = self.overlay.materialize_file(pathlib.Path("doc/foo.txt"))
# Running fsck with the mount still mounted should fail
returncode, fsck_out = self.run_fsck(self.mount)
self.assertIn(f"Not checking {self.mount}", fsck_out)
self.assertEqual(FSCK_RETCODE_SKIPPED, returncode)
# Running fsck with --force should override that
returncode, fsck_out = self.run_fsck(self.mount, "--force")
self.assertIn(f"warning: could not obtain lock", fsck_out)
self.assertIn(f"scanning anyway due to --force", fsck_out)
self.assertIn(f"Checking {self.mount}", fsck_out)
self.assertEqual(FSCK_RETCODE_OK, returncode)
# fsck should perform the check normally without --force
# if the mount is not mounted
self.eden.run_cmd("unmount", self.mount)
returncode, fsck_out = self.run_fsck(self.mount)
self.assertIn(f"Checking {self.mount}", fsck_out)
self.assertIn("No issues found", fsck_out)
self.assertEqual(FSCK_RETCODE_OK, returncode)
# Truncate the overlay file for doc/foo.txt to 0 length
with foo_overlay_path.open("wb"):
pass
# Running fsck with --check-only should report the error but not try to fix it.
returncode, fsck_out = self.run_fsck("--check-only")
self.assertIn(f"Checking {self.mount}", fsck_out)
self.assertRegex(
fsck_out,
r"invalid overlay file for materialized file .* \(doc/foo.txt\).*: "
r"zero-sized overlay file",
)
self.assertRegex(fsck_out, r"\b1 errors")
self.assertRegex(fsck_out, "Not fixing errors: --check-only was specified")
self.assertEqual(FSCK_RETCODE_ERRORS, returncode)
# Running fsck with no arguments should attempt to fix the errors
returncode, fsck_out = self.run_fsck()
self.assertRegex(
fsck_out,
r"invalid overlay file for materialized file .* \(doc/foo.txt\).*: "
r"zero-sized overlay file",
)
self.assertRegex(fsck_out, r"\b1 errors")
self.assertRegex(fsck_out, "Beginning repairs")
self.assertRegex(
fsck_out, "replacing corrupt file inode 'doc/foo.txt' with an empty file"
)
self.assertRegex(fsck_out, "Fixed 1 of 1 issues")
self.assertEqual(FSCK_RETCODE_ERRORS, returncode)
# There should be no more errors if we run fsck again
returncode, fsck_out = self.run_fsck()
self.assertIn(f"Checking {self.mount}", fsck_out)
self.assertIn("No issues found", fsck_out)
self.assertEqual(FSCK_RETCODE_OK, returncode)
def test_fsck_multiple_mounts(self) -> None:
mount2 = Path(self.mounts_dir) / "second_mount"
mount3 = Path(self.mounts_dir) / "third_mount"
mount4 = Path(self.mounts_dir) / "fourth_mount"
self.eden.clone(self.repo_name, mount2)
self.eden.clone(self.repo_name, mount3)
self.eden.clone(self.repo_name, mount4)
# Unmount all but mount3
self.eden.unmount(Path(self.mount))
self.eden.unmount(mount2)
self.eden.unmount(mount4)
# Running fsck should check all but mount3
returncode, fsck_out = self.run_fsck()
self.assertIn(f"Checking {self.mount}", fsck_out)
self.assertIn(f"Checking {mount2}", fsck_out)
self.assertIn(f"Not checking {mount3}", fsck_out)
self.assertIn(f"Checking {mount4}", fsck_out)
self.assertEqual(FSCK_RETCODE_SKIPPED, returncode)
# Running fsck with --force should check everything
returncode, fsck_out = self.run_fsck("--force")
self.assertIn(f"Checking {self.mount}", fsck_out)
self.assertIn(f"Checking {mount2}", fsck_out)
self.assertIn(f"Checking {mount3}", fsck_out)
self.assertIn(f"Checking {mount4}", fsck_out)
self.assertEqual(FSCK_RETCODE_OK, returncode)
class FsckTestNoEdenfs(unittest.TestCase, TemporaryDirectoryMixin):
def test_fsck_no_checkouts(self) -> None:
tmp_dir = self.make_temporary_directory()
eden = edenclient.EdenFS(Path(tmp_dir))
cmd_result = eden.run_unchecked(
"fsck",
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding="utf-8",
errors="replace",
)
self.assertIn(
"No Eden checkouts are configured. Nothing to check.", cmd_result.stderr
)
self.assertEqual("", cmd_result.stdout)
self.assertEqual(0, cmd_result.returncode)
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
f29c393fb5d26ef7471da24846610d659313a979 | 44c29ed593d91919752ba251eb328330cfa8ea79 | /logstash/urls.py | f2d42066d18134332b2391b397b6f446f7ef8507 | [
"MIT"
] | permissive | Azimut-Prod/azimut-gestion | 51b21732815a6f9dbe1d15baef746c777469c601 | fafb30599a26cd89cbe34bbf53179c2cfe0d5cb7 | refs/heads/master | 2020-12-24T14:26:51.160788 | 2014-10-02T17:34:15 | 2014-10-02T17:34:15 | 13,557,198 | 0 | 2 | null | 2014-06-20T07:56:43 | 2013-10-14T09:21:32 | Python | UTF-8 | Python | false | false | 575 | py | # -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
urlpatterns = patterns(
'logstash.views',
url(r'^$', 'file_list'),
url(r'^(?P<pk>[0-9]*)/show/$', 'file_show'),
url(r'^(?P<pk>[0-9]*)/edit/$', 'file_edit'),
url(r'^(?P<pk>[0-9]*)/delete/$', 'file_delete'),
url(r'^(?P<name>.*)/shipper.conf$', 'generate_config'),
url(r'^auto$', 'start_autodetect'),
url(r'^auto/(?P<key>[0-9]*)$', 'watch_autodetect'),
url(r'^auto/w/(?P<key>[0-9]*)$', 'watch_get_status'),
url(r'^auto/confirm/(?P<key>[0-9]*)$', 'watch_final'),
)
| [
"maximilien@theglu.org"
] | maximilien@theglu.org |
4b4294d9cc6ed233881c46938df6a8a067bee583 | e146d44875fb44a13b3b004604694bccaa23ddf2 | /docs/Amadeus-master/pactravel-master/python-client/test/test_restricted_rate.py | 477739ad7b3413811f0c07b2ffdbd8a05be8093f | [] | no_license | shopglobal/travel | 8d959b66d77f2e1883b671628c856daf0f3b21bb | 0c33467cd2057da6e01f9240be2fd4b8f5490539 | refs/heads/master | 2022-12-23T00:13:02.597730 | 2017-09-26T06:03:15 | 2017-09-26T06:03:15 | 104,405,869 | 0 | 0 | null | 2022-12-08T00:35:36 | 2017-09-21T22:43:23 | PHP | UTF-8 | Python | false | false | 951 | py | # coding: utf-8
"""
Amadeus Travel Innovation Sandbox
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 1.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import swagger_client
from swagger_client.rest import ApiException
from swagger_client.models.restricted_rate import RestrictedRate
class TestRestrictedRate(unittest.TestCase):
""" RestrictedRate unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testRestrictedRate(self):
"""
Test RestrictedRate
"""
# FIXME: construct object with mandatory attributes with example values
#model = swagger_client.models.restricted_rate.RestrictedRate()
pass
if __name__ == '__main__':
unittest.main()
| [
"president@worldvaporexpo.com"
] | president@worldvaporexpo.com |
1638e7347594a5b77e8ede5907e8984e480a8384 | 320bd873b6cf5db2fc9194cc4ad782a49373d6ee | /page/base_page.py | 7753eaa01b0d7f8d3fee1fec5a4453ad2c0757e7 | [] | no_license | donniezhanggit/AppiumDemo8_Android | 7b0aed903969e2101330b5da4e89c39e3d591723 | 7a2ed3be27ed6cb27bd4e30e13d48cc8f34aa654 | refs/heads/master | 2020-09-13T17:35:33.749237 | 2019-03-10T10:04:46 | 2019-03-10T10:04:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,774 | py | from appium.webdriver import WebElement
from appium.webdriver.common.mobileby import MobileBy
from lxml.html import Element
from selenium.webdriver.common.by import By
from driver.Appium import Appium
from lxml import etree
class BasePage(object):
def __init__(self):
pass
@classmethod
def byAndroid(self, text=None, id=None):
text_selector=""
if(text!=None):
text_selector="@text='" + text + "'"
if (id != None):
id_selector = "contains(@resource-id, '" + id + "')"
return (MobileBy.ANDROID_UIAUTOMATOR, 'new UiSelector().resourceId("' + id + '").text("' + text + '")')
#return "//*[" + text_selector + " and " + id_selector + "]"
else:
return (MobileBy.ANDROID_UIAUTOMATOR,'new UiSelector().text("'+text+'")')
#return "//*[" + text_selector + "]"
@classmethod
def byAttribute(self, text=None, id=None):
text_selector=""
if(text!=None):
text_selector="@text='" + text + "'"
if (id != None):
id_selector = "contains(@resource-id, '" + id + "')"
#return 'new UiSelector().resourceId("' + id + '").text("' + text + '")'
return "//*[" + text_selector + " and " + id_selector + "]"
else:
#return 'new UiSelector().text("'+text+'")'
return "//*[" + text_selector + "]"
def findBy(self, by=By.ID, value=None):
try:
return Appium.getDriver().find_element(by, value)
except:
self.exception_handle2()
return Appium.getDriver().find_element(by, value)
def find(self, locate) -> WebElement:
return self.findBy(*locate)
def findAll(self, locate) -> []:
return Appium.getDriver().find_elements(*locate)
def exception_handle(self):
self.black_words = [self.byAttribute(text="好的"), self.byAttribute(text="下次再说")]
for w in self.black_words:
elements = Appium.getDriver().find_elements(By.XPATH, w)
if len(elements) > 0:
elements[0].click()
return Appium.getDriver().find_element(By.XPATH, w)
def exception_handle2(self):
self.black_words = [self.byAttribute(text="好的"), self.byAttribute(text="下次再说")]
#todo: 优化弹框处理逻辑,发现toast,自动发现兼容性问题等。。。
page_source=Appium.getDriver().page_source
print(page_source)
#parser = etree.XMLParser(encoding='utf-8')
xml=etree.XML(str(page_source).encode("utf-8"))
for w in self.black_words:
print(w)
if(len(xml.xpath(w))>0):
Appium.getDriver().find_element(By.XPATH, w).click()
| [
"seveniruby@gmail.com"
] | seveniruby@gmail.com |
913718a80453266c1a4049cef48c9a4760dc651f | 0fd92b7d882a1edb5542f6600bb177dcad67ed50 | /powerful104/1003.py | 37f52fb9dbd627a8706e69ccc3234286a7f874e4 | [] | no_license | alpha-kwhn/Baekjun | bce71fdfbbc8302ec254db5901109087168801ed | f8b4136130995dab78f34e84dfa18736e95c8b55 | refs/heads/main | 2023-08-02T11:11:19.482020 | 2021-03-09T05:34:01 | 2021-03-09T05:34:01 | 358,347,708 | 0 | 0 | null | 2021-04-15T17:56:14 | 2021-04-15T17:56:13 | null | UTF-8 | Python | false | false | 340 | py | def fibo(n):
fn = 0
fn1 = 1
ans = fn1
for _ in range(n - 1):
ans = fn + fn1
fn = fn1
fn1 = ans
return ans
num = int(input())
for _ in range(num):
n = int(input())
if(n==0):
print(1,0)
elif(n==1):
print(0,1)
else:
print(fibo(n-1),fibo(n)) | [
"noreply@github.com"
] | alpha-kwhn.noreply@github.com |
136b32274f21e8d126ad13eadf2f5747cf24e7c9 | 99ef323acf05ae8c76f8ade8d2fe03d455c00302 | /tmp/fast_sufarr.py | 2ecb40e3773710a7258987394d63227cc5d35d3f | [
"LicenseRef-scancode-warranty-disclaimer",
"MIT"
] | permissive | kcarnold/suggestion | b166fd3d986eace138a9d6c877ee4f71d94a796a | 27650cbf724b77361f8f4e609774ecb9bcf9e3c9 | refs/heads/master | 2021-01-13T15:24:43.195938 | 2018-12-21T23:02:31 | 2018-12-21T23:02:31 | 76,397,078 | 1 | 0 | null | 2017-02-21T16:19:02 | 2016-12-13T20:48:02 | Python | UTF-8 | Python | false | false | 5,235 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Mar 20 14:48:33 2017
@author: kcarnold
"""
import numpy as np
from suggestion import suggestion_generator
import kenlm
import tqdm
#%%
def sizeof_fmt(num, suffix='B', units=None, power=None, sep='', precision=2, sign=False):
prefix = '+' if sign and num > 0 else ''
for unit in units[:-1]:
if abs(round(num, precision)) < power:
if isinstance(num, int):
return "{}{}{}{}{}".format(prefix, num, sep, unit, suffix)
else:
return "{}{:3.{}f}{}{}{}".format(prefix, num, precision, sep, unit, suffix)
num /= float(power)
return "{}{:.{}f}{}{}{}".format(prefix, num, precision, sep, units[-1], suffix)
def sizeof_fmt_iec(num, suffix='B', sep='', precision=2, sign=False):
return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, sign=sign,
units=['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'], power=1024)
def sizeof_fmt_decimal(num, suffix='B', sep='', precision=2, sign=False):
return sizeof_fmt(num, suffix=suffix, sep=sep, precision=precision, sign=sign,
units=['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'], power=1000)
#%%
model = suggestion_generator.get_model('yelp_train')
start_state = model.get_state([','])[0]
sufarr = suggestion_generator.sufarr
a, b = sufarr.search_range((',', ''))
sizeof_fmt_decimal(b-a)
#%%
%timeit suggestion_generator.collect_words_in_range(a,b,1)
#%%
unigram_probs = model.unigram_probs
lookup_voc = model.model.vocab_index
rarest_words_flat = []
for doc in tqdm.tqdm(sufarr.docs):
word_indices = [lookup_voc(w) for w in doc]
ups = unigram_probs[word_indices]
rarest_words_flat.extend([np.nanmin(ups[i+1:i+6]) for i in range(len(doc)-1)])
rarest_words_flat.append(0.) # for the last token in the document
rarest_words_flat.append(0.) # for the end-of-document token
rarest_words_flat = np.array(rarest_words_flat)
#%%
words_before_eos = []
for doc in tqdm.tqdm(sufarr.docs):
eos_indices = [idx for idx, tok in enumerate(doc) if tok == '</S>'] + [len(doc)]
offset = 0
cur_eos_idx = eos_indices[0]
for i, tok in enumerate(doc):
until_eos = cur_eos_idx - i
if until_eos < 0:
offset += 1
cur_eos_idx = eos_indices[offset]
until_eos = cur_eos_idx - i
words_before_eos.append(until_eos)
words_before_eos.append(0) # for end of document token
words_before_eos = np.array(words_before_eos)
#%%
# A filtered suffix array is one where only a subset of the possible indices exist in the lookup tables.
# To construct one, we create a mask over the existing indices.
# The suffix_array array maps from sorted index to master location.
# doc_idx is the document idx for the
filtered_doc_idx = sufarr.doc_idx[(words_before_eos > 5)[sufarr.suffix_array[1:]]]
filtered_tok_idx = sufarr.tok_idx[(words_before_eos > 5)[sufarr.suffix_array[1:]]]
#%%
idx = 1000
sufarr.docs[filtered_doc_idx[idx]][filtered_tok_idx[idx]:][:10]
#%%
from suggestion.suffix_array import DocSuffixArray
filtered_sufarr = DocSuffixArray(sufarr.docs, None, filtered_doc_idx, filtered_tok_idx, None)
#%%
a, b = filtered_sufarr.search_range(('the', ''))
import random
filtered_sufarr.get_partial_suffix(random.randrange(a,b), 0, 10)
#%%
rare_word_raw = np.array([tok for rw in rarest_words_by_doc for tok in rw + [0]])
#%%
docs_flat_raw = []
for doc in tqdm.tqdm(sufarr.docs):
docs_flat_raw.extend(doc)
docs_flat_raw.append('</d>')
#docs_flat_raw = [tok for doc in sufarr.docs for tok in doc + ['</d>']])
#%%
%timeit rarest_words_by_sufarr_idx = rare_word_raw[sufarr.suffix_array[a:b] + 1]
#[rarest_words_by_doc[sufarr.doc_idx[idx]][sufarr.tok_idx[idx] + 1] for idx in range(a,b)]
#%%
%timeit for idx in range(a,b): offset = sufarr.suffix_array[idx]; phrase = docs_flat_raw[offset:offset+5]
#%%
%timeit for idx in range(a,b): sufarr.get_partial_suffix(idx, 1, 5)
#%%
context_words = 1
N_EVAL = 3
while True:
phrase = sufarr.get_partial_suffix(a, context_words, context_words + N_EVAL)
if len(phrase) < N_EVAL:
a += 1
else:
break
states = [start_state]
scores = [0.]
while len(states) < N_EVAL + 1:
state = kenlm.State()
score = model.model.BaseScore(states[-1], phrase[len(states) - 1], state)
scores.append(scores[-1] + score)
states.append(state)
#%%
skipped = 0
lcp = suggestion_generator.sufarr.lcp
for idx in tqdm.tqdm(range(a+1, b)):
in_common = lcp[idx-1] - context_words
new_phrase = sufarr.get_partial_suffix(idx, context_words, context_words + N_EVAL)
states[in_common+1:] = []
scores[in_common+1:] = []
if len(new_phrase) < N_EVAL or '</S>' in new_phrase:
skipped += 1
continue
while len(states) < N_EVAL + 1:
state = kenlm.State()
score = 0#model.model.BaseScore(states[-1], phrase[len(states) - 1], state)
scores.append(scores[-1] + score)
states.append(state)
# assert scores[-1] * suggestion_generator.LOG10 == model.score_seq(start_state, phrase[:N_EVAL])[0]
#%%
for idx in tqdm.tqdm(range(a+1, b)):
model.score_seq(start_state, sufarr.get_partial_suffix(idx, context_words, context_words + N_EVAL))[0] | [
"kcarnold@alum.mit.edu"
] | kcarnold@alum.mit.edu |
79b80e6a18f0426dadc135a21d26ca7e0eedd67b | 6371acdb640e62e4e6addac2ba1aa70002a8c1b1 | /Algorithms/pySINDy/env/lib/python3.6/site-packages/pylint/test/test_self.py | cbc23176b4c9b816cad25a9895ecb6ae69cadc63 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | M-Vause/SEED | 263307152ebac1e4f49cd81dcd5207ecbdf51139 | cda94a02a5ef47a1e9a885d330eef2821301ebed | refs/heads/master | 2022-12-13T20:11:58.893994 | 2020-04-27T16:10:09 | 2020-04-27T16:10:09 | 252,790,026 | 3 | 3 | MIT | 2022-12-08T01:52:05 | 2020-04-03T16:55:10 | Jupyter Notebook | UTF-8 | Python | false | false | 20,431 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Vlad Temian <vladtemian@gmail.com>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
# Copyright (c) 2017 Daniel Miller <millerdev@gmail.com>
# Copyright (c) 2017 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Copyright (c) 2017 Thomas Hisch <t.hisch@gmail.com>
# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
# Copyright (c) 2018 Jason Owen <jason.a.owen@gmail.com>
# Copyright (c) 2018 Jace Browning <jacebrowning@gmail.com>
# Copyright (c) 2018 Reverb C <reverbc@users.noreply.github.com>
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
import contextlib
import json
import re
import sys
import os
from os.path import join, dirname, abspath
import tempfile
import textwrap
import configparser
from io import StringIO
from pylint.lint import Run
from pylint.reporters import BaseReporter
from pylint.reporters.text import *
from pylint.reporters.json import JSONReporter
import pytest
from pylint import utils
HERE = abspath(dirname(__file__))
@contextlib.contextmanager
def _patch_streams(out):
sys.stderr = sys.stdout = out
try:
yield
finally:
sys.stderr = sys.__stderr__
sys.stdout = sys.__stdout__
@contextlib.contextmanager
def _configure_lc_ctype(lc_ctype):
lc_ctype_env = "LC_CTYPE"
original_lctype = os.environ.get(lc_ctype_env)
os.environ[lc_ctype_env] = lc_ctype
try:
yield
finally:
os.environ.pop(lc_ctype_env)
if original_lctype:
os.environ[lc_ctype_env] = original_lctype
class MultiReporter(BaseReporter):
def __init__(self, reporters):
self._reporters = reporters
self.path_strip_prefix = os.getcwd() + os.sep
def on_set_current_module(self, *args, **kwargs):
for rep in self._reporters:
rep.on_set_current_module(*args, **kwargs)
def handle_message(self, msg):
for rep in self._reporters:
rep.handle_message(msg)
def display_reports(self, layout):
pass
@property
def out(self):
return self._reporters[0].out
@property
def linter(self):
return self._linter
@linter.setter
def linter(self, value):
self._linter = value
for rep in self._reporters:
rep.linter = value
class TestRunTC(object):
def _runtest(self, args, reporter=None, out=None, code=None):
if out is None:
out = StringIO()
pylint_code = self._run_pylint(args, reporter=reporter, out=out)
if reporter:
output = reporter.out.getvalue()
elif hasattr(out, "getvalue"):
output = out.getvalue()
else:
output = None
msg = "expected output status %s, got %s" % (code, pylint_code)
if output is not None:
msg = "%s. Below pylint output: \n%s" % (msg, output)
assert pylint_code == code, msg
def _run_pylint(self, args, out, reporter=None):
args = args + ["--persistent=no"]
with _patch_streams(out):
with pytest.raises(SystemExit) as cm:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
Run(args, reporter=reporter)
return cm.value.code
def _clean_paths(self, output):
"""Remove version-specific tox parent directories from paths."""
return re.sub(
"^py.+/site-packages/", "", output.replace("\\", "/"), flags=re.MULTILINE
)
def _test_output(self, args, expected_output):
out = StringIO()
self._run_pylint(args, out=out)
actual_output = self._clean_paths(out.getvalue())
assert expected_output.strip() in actual_output.strip()
def test_pkginfo(self):
"""Make pylint check itself."""
self._runtest(["pylint.__pkginfo__"], reporter=TextReporter(StringIO()), code=0)
def test_all(self):
"""Make pylint check itself."""
reporters = [
TextReporter(StringIO()),
ColorizedTextReporter(StringIO()),
JSONReporter(StringIO()),
]
self._runtest(
[join(HERE, "functional/arguments.py")],
reporter=MultiReporter(reporters),
code=2,
)
def test_no_ext_file(self):
self._runtest([join(HERE, "input", "noext")], code=0)
def test_w0704_ignored(self):
self._runtest([join(HERE, "input", "ignore_except_pass_by_default.py")], code=0)
def test_exit_zero(self):
self._runtest(
["--exit-zero", join(HERE, "regrtest_data", "syntax_error.py")], code=0
)
def test_generate_config_option(self):
self._runtest(["--generate-rcfile"], code=0)
def test_generate_config_option_order(self):
out1 = StringIO()
out2 = StringIO()
self._runtest(["--generate-rcfile"], code=0, out=out1)
self._runtest(["--generate-rcfile"], code=0, out=out2)
output1 = out1.getvalue()
output2 = out2.getvalue()
assert output1 == output2
def test_generate_config_disable_symbolic_names(self):
# Test that --generate-rcfile puts symbolic names in the --disable
# option.
out = StringIO()
self._run_pylint(["--generate-rcfile", "--rcfile="], out=out)
output = out.getvalue()
# Get rid of the pesky messages that pylint emits if the
# configuration file is not found.
master = re.search(r"\[MASTER", output)
out = StringIO(output[master.start() :])
parser = configparser.RawConfigParser()
parser.readfp(out)
messages = utils._splitstrip(parser.get("MESSAGES CONTROL", "disable"))
assert "suppressed-message" in messages
def test_generate_rcfile_no_obsolete_methods(self):
out = StringIO()
self._run_pylint(["--generate-rcfile"], out=out)
output = out.getvalue()
assert "profile" not in output
def test_inexisting_rcfile(self):
out = StringIO()
with pytest.raises(IOError) as excinfo:
self._run_pylint(["--rcfile=/tmp/norcfile.txt"], out=out)
assert "The config file /tmp/norcfile.txt doesn't exist!" == str(excinfo.value)
def test_help_message_option(self):
self._runtest(["--help-msg", "W0101"], code=0)
def test_error_help_message_option(self):
self._runtest(["--help-msg", "WX101"], code=0)
def test_error_missing_arguments(self):
self._runtest([], code=32)
def test_no_out_encoding(self):
"""test redirection of stdout with non ascii caracters
"""
# This test reproduces bug #48066 ; it happens when stdout is redirected
# through '>' : the sys.stdout.encoding becomes then None, and if the
# output contains non ascii, pylint will crash
if sys.version_info < (3, 0):
strio = tempfile.TemporaryFile()
else:
strio = StringIO()
assert strio.encoding is None
self._runtest(
[join(HERE, "regrtest_data/no_stdout_encoding.py"), "--enable=all"],
out=strio,
code=28,
)
def test_parallel_execution(self):
self._runtest(
[
"-j 2",
join(HERE, "functional/arguments.py"),
join(HERE, "functional/arguments.py"),
],
code=2,
)
def test_parallel_execution_missing_arguments(self):
self._runtest(["-j 2", "not_here", "not_here_too"], code=1)
def test_py3k_option(self):
# Test that --py3k flag works.
rc_code = 0
self._runtest(
[join(HERE, "functional", "unpacked_exceptions.py"), "--py3k"], code=rc_code
)
def test_py3k_jobs_option(self):
rc_code = 0
self._runtest(
[join(HERE, "functional", "unpacked_exceptions.py"), "--py3k", "-j 2"],
code=rc_code,
)
@pytest.mark.skipif(sys.version_info[0] > 2, reason="Requires the --py3k flag.")
def test_py3k_commutative_with_errors_only(self):
# Test what gets emitted with -E only
module = join(HERE, "regrtest_data", "py3k_error_flag.py")
expected = textwrap.dedent(
"""
************* Module py3k_error_flag
Explicit return in __init__
"""
)
self._test_output(
[module, "-E", "--msg-template='{msg}'"], expected_output=expected
)
# Test what gets emitted with -E --py3k
expected = textwrap.dedent(
"""
************* Module py3k_error_flag
Use raise ErrorClass(args) instead of raise ErrorClass, args.
"""
)
self._test_output(
[module, "-E", "--py3k", "--msg-template='{msg}'"], expected_output=expected
)
# Test what gets emitted with --py3k -E
self._test_output(
[module, "--py3k", "-E", "--msg-template='{msg}'"], expected_output=expected
)
@pytest.mark.skipif(sys.version_info[0] > 2, reason="Requires the --py3k flag.")
def test_py3k_commutative_with_config_disable(self):
module = join(HERE, "regrtest_data", "py3k_errors_and_warnings.py")
rcfile = join(HERE, "regrtest_data", "py3k-disabled.rc")
cmd = [module, "--msg-template='{msg}'", "--reports=n"]
expected = textwrap.dedent(
"""
************* Module py3k_errors_and_warnings
import missing `from __future__ import absolute_import`
Use raise ErrorClass(args) instead of raise ErrorClass, args.
Calling a dict.iter*() method
print statement used
"""
)
self._test_output(cmd + ["--py3k"], expected_output=expected)
expected = textwrap.dedent(
"""
************* Module py3k_errors_and_warnings
Use raise ErrorClass(args) instead of raise ErrorClass, args.
Calling a dict.iter*() method
print statement used
"""
)
self._test_output(
cmd + ["--py3k", "--rcfile", rcfile], expected_output=expected
)
expected = textwrap.dedent(
"""
************* Module py3k_errors_and_warnings
Use raise ErrorClass(args) instead of raise ErrorClass, args.
print statement used
"""
)
self._test_output(
cmd + ["--py3k", "-E", "--rcfile", rcfile], expected_output=expected
)
self._test_output(
cmd + ["-E", "--py3k", "--rcfile", rcfile], expected_output=expected
)
def test_abbreviations_are_not_supported(self):
expected = "no such option: --load-plugin"
self._test_output([".", "--load-plugin"], expected_output=expected)
def test_enable_all_works(self):
module = join(HERE, "data", "clientmodule_test.py")
expected = textwrap.dedent(
"""
************* Module data.clientmodule_test
pylint/test/data/clientmodule_test.py:10:8: W0612: Unused variable 'local_variable' (unused-variable)
pylint/test/data/clientmodule_test.py:18:4: C0111: Missing method docstring (missing-docstring)
pylint/test/data/clientmodule_test.py:22:0: C0111: Missing class docstring (missing-docstring)
"""
)
self._test_output(
[module, "--disable=all", "--enable=all", "-rn"], expected_output=expected
)
def test_wrong_import_position_when_others_disabled(self):
expected_output = textwrap.dedent(
"""
************* Module wrong_import_position
pylint/test/regrtest_data/wrong_import_position.py:11:0: C0413: Import "import os" should be placed at the top of the module (wrong-import-position)
"""
)
module1 = join(HERE, "regrtest_data", "import_something.py")
module2 = join(HERE, "regrtest_data", "wrong_import_position.py")
args = [
module2,
module1,
"--disable=all",
"--enable=wrong-import-position",
"-rn",
"-sn",
]
out = StringIO()
self._run_pylint(args, out=out)
actual_output = self._clean_paths(out.getvalue().strip())
to_remove = "No config file found, using default configuration"
if to_remove in actual_output:
actual_output = actual_output[len(to_remove) :]
if actual_output.startswith("Using config file "):
# If ~/.pylintrc is present remove the
# Using config file... line
actual_output = actual_output[actual_output.find("\n") :]
assert expected_output.strip() == actual_output.strip()
def test_import_itself_not_accounted_for_relative_imports(self):
expected = "Your code has been rated at 10.00/10"
package = join(HERE, "regrtest_data", "dummy")
self._test_output(
[package, "--disable=locally-disabled", "-rn"], expected_output=expected
)
def test_reject_empty_indent_strings(self):
expected = "indent string can't be empty"
module = join(HERE, "data", "clientmodule_test.py")
self._test_output([module, "--indent-string="], expected_output=expected)
def test_json_report_when_file_has_syntax_error(self):
out = StringIO()
module = join(HERE, "regrtest_data", "syntax_error.py")
self._runtest([module], code=2, reporter=JSONReporter(out))
output = json.loads(out.getvalue())
assert isinstance(output, list)
assert len(output) == 1
assert isinstance(output[0], dict)
expected = {
"obj": "",
"column": 0,
"line": 1,
"type": "error",
"symbol": "syntax-error",
"module": "syntax_error",
}
message = output[0]
for key, value in expected.items():
assert key in message
assert message[key] == value
assert "invalid syntax" in message["message"].lower()
def test_json_report_when_file_is_missing(self):
out = StringIO()
module = join(HERE, "regrtest_data", "totally_missing.py")
self._runtest([module], code=1, reporter=JSONReporter(out))
output = json.loads(out.getvalue())
assert isinstance(output, list)
assert len(output) == 1
assert isinstance(output[0], dict)
expected = {
"obj": "",
"column": 0,
"line": 1,
"type": "fatal",
"symbol": "fatal",
"module": module,
}
message = output[0]
for key, value in expected.items():
assert key in message
assert message[key] == value
assert message["message"].startswith("No module named")
def test_information_category_disabled_by_default(self):
expected = "Your code has been rated at 10.00/10"
path = join(HERE, "regrtest_data", "meta.py")
self._test_output([path], expected_output=expected)
def test_error_mode_shows_no_score(self):
expected_output = textwrap.dedent(
"""
************* Module application_crash
pylint/test/regrtest_data/application_crash.py:1:6: E0602: Undefined variable 'something_undefined' (undefined-variable)
"""
)
module = join(HERE, "regrtest_data", "application_crash.py")
self._test_output([module, "-E"], expected_output=expected_output)
def test_evaluation_score_shown_by_default(self):
expected_output = "Your code has been rated at "
module = join(HERE, "regrtest_data", "application_crash.py")
self._test_output([module], expected_output=expected_output)
def test_confidence_levels(self):
expected = "Your code has been rated at"
path = join(HERE, "regrtest_data", "meta.py")
self._test_output(
[path, "--confidence=HIGH,INFERENCE"], expected_output=expected
)
def test_bom_marker(self):
path = join(HERE, "regrtest_data", "meta.py")
config_path = join(HERE, "regrtest_data", ".pylintrc")
expected = "Your code has been rated at 10.00/10"
self._test_output(
[path, "--rcfile=%s" % config_path, "-rn"], expected_output=expected
)
def test_pylintrc_plugin_duplicate_options(self):
dummy_plugin_path = join(HERE, "regrtest_data", "dummy_plugin")
# Enable --load-plugins=dummy_plugin
sys.path.append(dummy_plugin_path)
config_path = join(HERE, "regrtest_data", "dummy_plugin.rc")
expected = (
":dummy-message-01 (I9061): *Dummy short desc 01*\n"
" Dummy long desc This message belongs to the dummy_plugin checker.\n\n"
":dummy-message-02 (I9060): *Dummy short desc 02*\n"
" Dummy long desc This message belongs to the dummy_plugin checker."
)
self._test_output(
[
"--rcfile=%s" % config_path,
"--help-msg=dummy-message-01,dummy-message-02",
],
expected_output=expected,
)
expected = (
"[DUMMY_PLUGIN]\n\n# Dummy option 1\ndummy_option_1=dummy value 1\n\n"
"# Dummy option 2\ndummy_option_2=dummy value 2"
)
self._test_output(
["--rcfile=%s" % config_path, "--generate-rcfile"], expected_output=expected
)
sys.path.remove(dummy_plugin_path)
def test_pylintrc_comments_in_values(self):
path = join(HERE, "regrtest_data", "test_pylintrc_comments.py")
config_path = join(HERE, "regrtest_data", "comments_pylintrc")
expected = textwrap.dedent(
"""
************* Module test_pylintrc_comments
pylint/test/regrtest_data/test_pylintrc_comments.py:2:0: W0311: Bad indentation. Found 1 spaces, expected 4 (bad-indentation)
pylint/test/regrtest_data/test_pylintrc_comments.py:1:0: C0111: Missing module docstring (missing-docstring)
pylint/test/regrtest_data/test_pylintrc_comments.py:1:0: C0111: Missing function docstring (missing-docstring)
"""
)
self._test_output(
[path, "--rcfile=%s" % config_path, "-rn"], expected_output=expected
)
def test_no_crash_with_formatting_regex_defaults(self):
self._runtest(
["--ignore-patterns=a"], reporter=TextReporter(StringIO()), code=32
)
def test_getdefaultencoding_crashes_with_lc_ctype_utf8(self):
expected_output = textwrap.dedent(
"""
************* Module application_crash
pylint/test/regrtest_data/application_crash.py:1:6: E0602: Undefined variable 'something_undefined' (undefined-variable)
"""
)
module = join(HERE, "regrtest_data", "application_crash.py")
with _configure_lc_ctype("UTF-8"):
self._test_output([module, "-E"], expected_output=expected_output)
@pytest.mark.skipif(sys.platform == "win32", reason="only occurs on *nix")
def test_parseable_file_path(self):
file_name = "test_target.py"
fake_path = HERE + os.getcwd()
module = join(fake_path, file_name)
try:
# create module under directories which have the same name as reporter.path_strip_prefix
# e.g. /src/some/path/src/test_target.py when reporter.path_strip_prefix = /src/
os.makedirs(fake_path)
with open(module, "w") as test_target:
test_target.write("a,b = object()")
self._test_output(
[module, "--output-format=parseable"],
expected_output=join(os.getcwd(), file_name),
)
finally:
os.remove(module)
os.removedirs(fake_path)
| [
"58262117+M-Vause@users.noreply.github.com"
] | 58262117+M-Vause@users.noreply.github.com |
87f02715b690985909049acdc08e8c98984300e1 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_259/ch89_2020_05_06_17_38_05_834845.py | e551edeb3e60c84128b1bced37b853fd9b3307c6 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 265 | py | class Circulo:
def __init__(self,centro,raio):
self.centro = p1
self.raio = numero
def Contem(self,ponto):
if ((self.centro.x-ponto.x)**2+(self.centro.y-ponto.y)**2)**0.5 < self.raio:
return True
return False | [
"you@example.com"
] | you@example.com |
250a4740724dde3382658dc89f01f5972e1f4cd5 | 8b5e9766ca9e2a3457e921f8fda0649b4f4019b5 | /src/pytest_benchmark/histogram.py | 99ed4eacc97952485d642a06d049181befa89fde | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | stanislavlevin/pytest-benchmark | b53b111c20a32f8d8bc4d32a3556740ed0012d96 | e6d9b63468319ec87504f6c81592826ee242887b | refs/heads/master | 2020-04-12T09:58:10.103228 | 2018-11-23T15:45:59 | 2018-11-25T04:03:23 | 162,414,447 | 0 | 0 | BSD-2-Clause | 2018-12-19T09:30:34 | 2018-12-19T09:30:33 | null | UTF-8 | Python | false | false | 3,478 | py | from collections import Iterable
import py
from .utils import TIME_UNITS
from .utils import slugify
try:
from pygal.graph.box import Box
from pygal.style import DefaultStyle
except ImportError as exc:
raise ImportError(exc.args, "Please install pygal and pygaljs or pytest-benchmark[histogram]")
class CustomBox(Box):
def _box_points(self, serie, _):
return serie, [serie[0], serie[6]]
def _value_format(self, x):
return "Min: {0[0]:.4f}\n" \
"Q1-1.5IQR: {0[1]:.4f}\n" \
"Q1: {0[2]:.4f}\nMedian: {0[3]:.4f}\nQ3: {0[4]:.4f}\n" \
"Q3+1.5IQR: {0[5]:.4f}\n" \
"Max: {0[6]:.4f}".format(x[:7])
def _format(self, x, *args):
sup = super(CustomBox, self)._format
if args:
val = x.values
else:
val = x
if isinstance(val, Iterable):
return self._value_format(val), val[7]
else:
return sup(x, *args)
def _tooltip_data(self, node, value, x, y, classes=None, xlabel=None):
super(CustomBox, self)._tooltip_data(node, value[0], x, y, classes=classes, xlabel=None)
self.svg.node(node, 'desc', class_="x_label").text = value[1]
def make_plot(benchmarks, title, adjustment):
class Style(DefaultStyle):
colors = ["#000000" if row["path"] else DefaultStyle.colors[1]
for row in benchmarks]
font_family = 'Consolas, "Deja Vu Sans Mono", "Bitstream Vera Sans Mono", "Courier New", monospace'
minimum = int(min(row["min"] * adjustment for row in benchmarks))
maximum = int(max(
min(row["max"], row["hd15iqr"]) * adjustment
for row in benchmarks
) + 1)
try:
import pygaljs
except ImportError:
opts = {}
else:
opts = {
"js": [
pygaljs.uri("2.0.x", "pygal-tooltips.js")
]
}
plot = CustomBox(
box_mode='tukey',
x_label_rotation=-90,
x_labels=["{0[name]}".format(row) for row in benchmarks],
show_legend=False,
title=title,
x_title="Trial",
y_title="Duration",
style=Style,
min_scale=20,
max_scale=20,
truncate_label=50,
range=(minimum, maximum),
zero=minimum,
css=[
"file://style.css",
"file://graph.css",
"""inline:
.tooltip .value {
font-size: 1em !important;
}
.axis text {
font-size: 9px !important;
}
"""
],
**opts
)
for row in benchmarks:
serie = [row[field] * adjustment for field in ["min", "ld15iqr", "q1", "median", "q3", "hd15iqr", "max"]]
serie.append(row["path"])
plot.add("{0[fullname]} - {0[rounds]} rounds".format(row), serie)
return plot
def make_histogram(output_prefix, name, benchmarks, unit, adjustment):
if name:
path = "{0}-{1}.svg".format(output_prefix, slugify(name))
title = "Speed in {0} of {1}".format(TIME_UNITS[unit], name)
else:
path = "{0}.svg".format(output_prefix)
title = "Speed in {0}".format(TIME_UNITS[unit])
output_file = py.path.local(path).ensure()
plot = make_plot(
benchmarks=benchmarks,
title=title,
adjustment=adjustment,
)
plot.render_to_file(str(output_file))
return output_file
| [
"contact@ionelmc.ro"
] | contact@ionelmc.ro |
25443c2e8849e4b7ce67817990bccc5573455d64 | 1fc638bf8b485f6a0466da096a51038c2e10dad6 | /aidu_gui/src/aidu_gui/msg/__init__.py | 8f6452927883fd761b3f27ef21f14c6c61ced507 | [
"MIT"
] | permissive | MartienLagerweij/aidu | a2621288e78cc7c0d9df821ab60f597818158836 | a9b6e5a61f20bd60a7773495ba254e1bded1d7a1 | refs/heads/master | 2021-01-10T19:50:18.576658 | 2014-01-31T14:52:14 | 2014-01-31T14:52:14 | 30,254,215 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25 | py | from ._Solenoid import *
| [
"rjagerman@gmail.com"
] | rjagerman@gmail.com |
fe32a7e6d3393931c7e334cc368e537008db42c1 | c50e7eb190802d7849c0d0cea02fb4d2f0021777 | /src/securityinsight/azext_sentinel/aaz/latest/sentinel/entity_query/template/__cmd_group.py | 91049c1fe8f891761c3c334396713083e30a9df8 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | Azure/azure-cli-extensions | c1615b19930bba7166c282918f166cd40ff6609c | b8c2cf97e991adf0c0a207d810316b8f4686dc29 | refs/heads/main | 2023-08-24T12:40:15.528432 | 2023-08-24T09:17:25 | 2023-08-24T09:17:25 | 106,580,024 | 336 | 1,226 | MIT | 2023-09-14T10:48:57 | 2017-10-11T16:27:31 | Python | UTF-8 | Python | false | false | 648 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command_group(
"sentinel entity-query template",
)
class __CMDGroup(AAZCommandGroup):
"""Manage entity query template with sentinel.
"""
pass
__all__ = ["__CMDGroup"]
| [
"noreply@github.com"
] | Azure.noreply@github.com |
fd0fee403e8525c2faf6875daec96b184ed9db55 | 3f658c0098a66015840bd9d631987e6b937bb300 | /53.Flask_CafeWifi/main.py | 3e74bc359eee40baaa9c1fc79e03690a38a78d0a | [] | no_license | RohitPr/PythonProjects | 4cf7ec37cfba60afecc88ae542cc4155b72f4098 | 7dd807a45cd86cf0851cb95a1b1433805891f990 | refs/heads/main | 2023-06-01T06:42:40.147968 | 2021-06-13T00:57:05 | 2021-06-13T00:57:05 | 337,298,986 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,604 | py | from flask import Flask, render_template, redirect, url_for
from flask_bootstrap import Bootstrap
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField, DateField
from wtforms.fields.core import SelectField
from wtforms_components import TimeField
from wtforms.validators import DataRequired, URL
import csv
app = Flask(__name__)
app.config['SECRET_KEY'] = '8BYkEfBA6O6donzWlSihBXox7C0sKR6b'
Bootstrap(app)
class CafeForm(FlaskForm):
cafe = StringField('Cafe name', validators=[DataRequired()])
location = StringField("Cafe Location on Google Maps (URL)", validators=[
DataRequired(), URL()])
open = TimeField("Opening Time",
validators=[DataRequired()])
close = TimeField("Closing Time",
validators=[DataRequired()])
coffee_rating = SelectField("Coffee Rating", choices=[
"☕️", "☕☕", "☕☕☕", "☕☕☕☕", "☕☕☕☕☕"], validators=[DataRequired()])
wifi_rating = SelectField("Wifi Strength Rating", choices=[
"✘", "💪", "💪💪", "💪💪💪", "💪💪💪💪", "💪💪💪💪💪"], validators=[DataRequired()])
power_rating = SelectField("Power Socket Availability", choices=[
"✘", "🔌", "🔌🔌", "🔌🔌🔌", "🔌🔌🔌🔌", "🔌🔌🔌🔌🔌"], validators=[DataRequired()])
submit = SubmitField('Submit')
# all Flask routes below
@ app.route("/")
def home():
return render_template("index.html")
@ app.route('/add', methods=['GET', 'POST'])
def add_cafe():
form = CafeForm()
if form.validate_on_submit():
with open("cafe-data.csv", mode="a", encoding="utf-8") as csv_file:
csv_file.write(f"\n{form.cafe.data},"
f"{form.location.data},"
f"{form.open.data},"
f"{form.close.data},"
f"{form.coffee_rating.data},"
f"{form.wifi_rating.data},"
f"{form.power_rating.data}")
return redirect(url_for('cafes'))
return render_template('add.html', form=form)
@ app.route('/cafes')
def cafes():
with open('cafe-data.csv', newline='', encoding="utf8") as csv_file:
csv_data = csv.reader(csv_file, delimiter=',')
list_of_rows = []
for row in csv_data:
list_of_rows.append(row)
return render_template('cafes.html', cafes=list_of_rows)
if __name__ == '__main__':
app.run(debug=True)
| [
"bladekiller95@gmail.com"
] | bladekiller95@gmail.com |
46a2c839cd2adda1f78de52d264f9c9633dcced1 | eeeb97414f4182c373ba95e53353adb187bd3b0e | /app/main/views.py | 7272292e11c4878121ddde53a29021f4db377abc | [
"MIT"
] | permissive | tharcissie/pitchapp | 02ad1aad01a751c6bc3ddaa32e1c7bc7fd511e92 | c5c734694f7b68342db5e07e6312ed8605696263 | refs/heads/master | 2023-02-07T07:41:15.065913 | 2020-12-21T06:50:56 | 2020-12-21T06:50:56 | 316,588,283 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 4,124 | py | from flask import render_template, redirect, url_for,abort,request
from . import main
from flask_login import login_required,current_user
from ..models import User,Pitch,Comment,Upvote,Downvote
from .form import UpdateProfile,PitchForm,CommentForm
from .. import db,photos
@main.route('/')
def index():
pitch = Pitch.query.all()
hobbies = Pitch.query.filter_by(category = 'Hobbies').all()
experiences = Pitch.query.filter_by(category = 'Experiences').all()
skills = Pitch.query.filter_by(category = 'Skills').all()
return render_template('index.html', hobbies = hobbies, experiences = experiences, pitch = pitch, skills= skills)
@main.route('/new_pitch', methods = ['POST','GET'])
@login_required
def new_pitch():
form = PitchForm()
if form.validate_on_submit():
title = form.title.data
pitch = form.pitch.data
category = form.category.data
user_id = current_user
new_pitch_object = Pitch(pitch=pitch,user_id=current_user._get_current_object().id,category=category,title=title)
new_pitch_object.save_pitch()
return redirect(url_for('main.index'))
return render_template('pitch.html', form = form)
@main.route('/comment/<int:pitch_id>', methods = ['POST','GET'])
@login_required
def comment(pitch_id):
form = CommentForm()
pitch = Pitch.query.get(pitch_id)
all_comments = Comment.query.filter_by(pitch_id = pitch_id).all()
if form.validate_on_submit():
comment = form.comment.data
pitch_id = pitch_id
user_id = current_user._get_current_object().id
new_comment = Comment(comment = comment,user_id = user_id,pitch_id = pitch_id)
new_comment.save_comment()
return redirect(url_for('.comment', pitch_id = pitch_id))
return render_template('comment.html', form =form, pitch = pitch,all_comments=all_comments)
@main.route('/user/<name>')
def profile(name):
user = User.query.filter_by(username = name).first()
user_id = current_user._get_current_object().id
pitch = Pitch.query.filter_by(user_id = user_id).all()
if user is None:
abort(404)
return render_template("profile/profile.html", user = user,pitch=pitch)
@main.route('/user/<name>/updateprofile', methods = ['POST','GET'])
@login_required
def updateprofile(name):
form = UpdateProfile()
user = User.query.filter_by(username = name).first()
if user == None:
abort(404)
if form.validate_on_submit():
user.bio = form.bio.data
user.save_user()
return redirect(url_for('.profile',name = name))
return render_template('profile/update.html',form =form)
@main.route('/user/<name>/update/profile',methods= ['POST'])
@login_required
def update_profile(name):
user = User.query.filter_by(username = name).first()
if 'photo' in request.files:
filename = photos.save(request.files['photo'])
path = f'photos/{filename}'
user.profile_pic_path = path
db.session.commit()
return redirect(url_for('main.profile',name=name))
@main.route('/upvote/<int:id>',methods = ['POST','GET'])
@login_required
def like(id):
pitches = Upvote.get_upvotes(id)
valid_string = f'{current_user.id}:{id}'
for pitch in pitches:
to_str = f'{pitch}'
print(valid_string+" "+to_str)
if valid_string == to_str:
return redirect(url_for('main.index',id=id))
else:
continue
new_upvote = Upvote(user = current_user, pitch_id=id)
new_upvote.save_upvote()
return redirect(url_for('main.index',id=id))
@main.route('/downvote/<int:id>',methods = ['POST','GET'])
@login_required
def dislike(id):
pitches = Downvote.get_downvotes(id)
valid_string = f'{current_user.id}:{id}'
for pitch in pitches:
to_str = f'{p}'
print(valid_string+" "+to_str)
if valid_string == to_str:
return redirect(url_for('main.index',id=id))
else:
continue
new_downvote = Downvote(user = current_user, pitch_id=id)
new_downvote.save_downvote()
return redirect(url_for('main.index',id = id))
| [
"tharcissieidufashe@gmail.com"
] | tharcissieidufashe@gmail.com |
458015465088494352355186a88764a734c45216 | 234886b79094aa9905a4d44a5167b748c88e7961 | /nod32-2.py | 9777fd2516a205c1bf934bb15f489c493c391919 | [] | no_license | rurigeo/python | 929d3bde7dc90e38ce396831de8d6d016e5a9828 | 31a46533020fb716673e2464f573b2e15f364116 | refs/heads/master | 2021-01-01T17:42:05.071824 | 2015-01-05T03:56:42 | 2015-01-05T03:56:42 | 28,733,277 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 490 | py | #! /usr/bin/env python
# coding=utf-8
import urllib2
response = urllib2.urlopen('http://www.nod32jihuoma.cn/')
html = response.read()
with open('test','wb') as f : f.write(html)
f = open('test','rb')
for line in f:
#print line.decode('utf-8').rstrip()
if line.rstrip().startswith('<p>用户名'):
print line.rstrip()
user = line.rstrip()[15:31]
passwd = line.rstrip()[48:58]
print 'user:%s\npassword:%s' %(user,passwd)
break
| [
"you@example.com"
] | you@example.com |
b7eb06bd358d82f342e1a9fdbfe3a27e4a171857 | 494b763f2613d4447bc0013100705a0b852523c0 | /dnn/ex05_overfittingCheck.py | 9a0575cd56b68bdd68c4c397529f18b80d6362aa | [] | no_license | DL-DeepLearning/Neural-Network | dc4a2dd5efb1b4ef1a3480a1df6896c191ae487f | 3160c4af78dba6bd39552bb19f09a699aaab8e9e | refs/heads/master | 2021-06-17T05:16:22.583816 | 2017-06-07T01:21:39 | 2017-06-07T01:21:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,920 | py | #-*- coding: utf-8 -*-
''' Import theano and numpy '''
import theano
import numpy as np
execfile('00_readingInput.py')
''' set the size of mini-batch and number of epochs'''
batch_size = 16
nb_epoch = 50
''' Import keras to build a DL model '''
from keras.models import Sequential
from keras.layers.core import Dense Activation
print 'Building a model whose optimizer=adam, activation function=softplus'
model_adam = Sequential()
model_adam.add(Dense(128, input_dim=200))
model_adam.add(Activation('softplus'))
model_adam.add(Dense(256))
model_adam.add(Activation('softplus'))
model_adam.add(Dense(5))
model_adam.add(Activation('softmax'))
''' Setting optimizer as Adam '''
from keras.optimizers import SGD, Adam, RMSprop, Adagrad
model_adam.compile(loss= 'categorical_crossentropy',
optimizer='Adam',
metrics=['accuracy'])
'''Fit models and use validation_split=0.1 '''
history_adam = model_adam.fit(X_train, Y_train,
batch_size=batch_size,
nb_epoch=nb_epoch,
verbose=0,
shuffle=True,
validation_split=0.1)
loss_adam= history_adam.history.get('loss')
acc_adam = history_adam.history.get('acc')
''' Access the performance on validation data '''
val_loss_adam = history_adam.history.get('val_loss')
val_acc_adam = history_adam.history.get('val_acc')
''' Visualize the loss and accuracy of both models'''
import matplotlib.pyplot as plt
plt.figure(4)
plt.subplot(121)
plt.plot(range(len(loss_adam)), loss_adam,label='Training')
plt.plot(range(len(val_loss_adam)), val_loss_adam,label='Validation')
plt.title('Loss')
plt.legend(loc='upper left')
plt.subplot(122)
plt.plot(range(len(acc_adam)), acc_adam,label='Training')
plt.plot(range(len(val_acc_adam)), val_acc_adam,label='Validation')
plt.title('Accuracy')
#plt.show()
plt.savefig('05_overfittingCheck.png',dpi=300,format='png')
print 'Result saved into 05_overfittingCheck.png' | [
"teinhonglo@gmail.com"
] | teinhonglo@gmail.com |
2b6473bd7bb963c995ddf41427e148de56526422 | a59ec95fddc064ea9a554ad41e4ac8e82376701a | /xadmin/demo_app/repurchasearea/migrations/0002_sale.py | 02ff589f4ecd5b0fac9b11243a3304b878169813 | [
"BSD-3-Clause"
] | permissive | Nicholas86/PythonDemos | 449c08713c7c03633719a4ae7287b127783d7574 | 4f06639cc65a5e10cc993335d3d34e2d60aac983 | refs/heads/master | 2021-01-22T21:07:11.457179 | 2017-08-18T06:40:44 | 2017-08-18T06:40:44 | 100,681,216 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,954 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-07-17 10:03
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('users', '0007_address_user_id'),
('repurchasearea', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Sale',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('goods_name', models.CharField(max_length=255, verbose_name='\u5546\u54c1\u540d\u79f0')),
('goods_price', models.DecimalField(decimal_places=2, max_digits=10, verbose_name='\u5546\u54c1\u4ef7\u683c')),
('market_price', models.DecimalField(decimal_places=2, max_digits=10, verbose_name='\u5e02\u573a\u4ef7')),
('goods_tag', models.CharField(max_length=2555, verbose_name='\u5546\u54c1\u6807\u7b7e')),
('goods_picture', models.ImageField(upload_to='images/Sale', verbose_name='\u5546\u54c1\u56fe\u7247')),
('addtime', models.DateTimeField(auto_now_add=True, verbose_name='\u53d1\u5e03\u65f6\u95f4')),
('is_ok', models.CharField(choices=[('0', '\u5426'), ('1', '\u662f')], max_length=255, verbose_name='\u662f\u5426\u901a\u8fc7\u5ba1\u6838')),
('discount', models.CharField(max_length=255, verbose_name='\u4f18\u60e0\u6bd4\u4f8b')),
('goods_description', models.TextField(max_length=5555, verbose_name='\u5546\u54c1\u63cf\u8ff0')),
('user_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='users.TheUser', verbose_name='\u7528\u6237')),
],
options={
'verbose_name': '\u6211\u8981\u5356',
'verbose_name_plural': '\u6211\u8981\u5356',
},
),
]
| [
"970649831@qq.com"
] | 970649831@qq.com |
bb73d12a82c820135bba274b472bb02a2b25b8f0 | 9262e23dcff032dbbd05d724435d473778f47417 | /pywind/ofgem/StationSearch.py | cbb1acba5bd013e9b00481dfeda6c4be303982cf | [] | no_license | mseaborn/pywind | c825bd6c32db226a10a1389a6fd9e1e28656438a | ca89a305cdcfd56891afce33a0613208f1080ead | refs/heads/master | 2020-07-10T05:37:36.666758 | 2016-01-03T17:04:28 | 2016-01-03T17:04:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,380 | py | # coding=utf-8
#
# Copyright 2013-2015 david reid <zathrasorama@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
from .Base import OfgemForm
from pywind.ofgem.Station import Station
from lxml import etree
class StationSearch(object):
START_URL = 'ReportViewer.aspx?ReportPath=/Renewables/Accreditation/AccreditedStationsExternalPublic&ReportVisibility=1&ReportCategory=1'
def __init__(self):
self.form = OfgemForm(self.START_URL)
self.stations = []
def __len__(self):
return len(self.stations)
def __getitem__(self, item):
if 0 >= item < len(self.stations):
return self.stations[item]
def get_data(self):
if self.form.get_data():
doc = etree.fromstring(self.form.data)
# There are a few stations with multiple generator id's, separated by '\n' so
# capture them and add each as a separate entry.
for detail in doc.xpath("//*[local-name()='Detail']"):
st = Station(detail)
if b'\n' in st.generator_id:
ids = [x.strip() for x in st.generator_id.split(b'\n')]
st.generator_id = ids[0]
for _id in ids[1:]:
_st = copy.copy(st)
_st.generator_id = _id
self.stations.append(_st)
self.stations.append(st)
return True
return False
def filter_technology(self, what):
return self.form.set_value("technology", what)
def filter_scheme(self, scheme):
return self.form.set_value("scheme", scheme.upper())
def filter_name(self, name):
return self.form.set_value("generating station search", name)
def filter_generator_id(self, accno):
return self.form.set_value("accreditation search", accno)
| [
"zathrasorama@gmail.com"
] | zathrasorama@gmail.com |
3ae79867f0f0711e57ceb1c0cedc23de74baf1d0 | 54277288865f738e44d7be1d6b41b19c63af267e | /pyvrl/datasets/frame_samplers/uniform_sampler.py | e5263cac4c5dfcdf043d1b756c2e3cb0057d981c | [] | no_license | scenarios/SR-SVRL | 7b41d29e16cff3020f333efc28a624d85bba4537 | 26e89ecb29355635b10a355f2f16f1b5db9c4e9b | refs/heads/master | 2023-02-26T06:16:13.314491 | 2021-01-30T16:30:57 | 2021-01-30T16:30:57 | 307,295,720 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,568 | py | import numpy as np
import random
from typing import List, Union
class UniformFrameSampler(object):
def __init__(self,
num_clips: int,
clip_len: int,
strides: Union[int, List[int]],
temporal_jitter: bool):
self.num_clips = num_clips
self.clip_len = clip_len
if isinstance(strides, (tuple, list)):
self.strides = strides
else:
self.strides = [strides]
self.temporal_jitter = temporal_jitter
def sample(self, num_frames: int):
stride = random.choice(self.strides)
base_length = self.clip_len * stride
delta_length = num_frames - base_length + 1
if delta_length > 0:
tick = float(delta_length) / self.num_clips
offsets = np.array([int(tick / 2.0 + tick * x)
for x in range(self.num_clips)], np.int)
else:
offsets = np.zeros((self.num_clips, ), np.int)
inds = np.arange(0, base_length, stride, dtype=np.int).reshape(1, self.clip_len)
if self.num_clips > 1:
inds = np.tile(inds, (self.num_clips, 1))
# apply for the init offset
inds = inds + offsets.reshape(self.num_clips, 1)
if self.temporal_jitter and stride > 1:
skip_offsets = np.random.randint(stride, size=self.clip_len)
inds = inds + skip_offsets.reshape(1, self.clip_len)
inds = np.clip(inds, a_min=0, a_max=num_frames-1)
inds = inds.astype(np.int)
return inds
| [
"zyz0205@hotmail.com"
] | zyz0205@hotmail.com |
bef650bd59c2153082ed917c021d96d1a21834a3 | 786232b3c9eac87728cbf2b5c5636d7b6f10f807 | /Leetcode/medium/130.py | f3efee7f4cf1148a224934ce604343805b35df73 | [] | no_license | luoyanhan/Algorithm-and-data-structure | c9ada2e123fae33826975665be37ca625940ddd4 | fb42c3a193f58360f6b6f3b7d5d755cd6e80ad5b | refs/heads/master | 2021-12-22T15:45:28.260386 | 2021-12-02T03:08:35 | 2021-12-02T03:08:35 | 251,007,078 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,850 | py | class Solution:
def solve(self, board) -> None:
if not board:
return
rows = len(board)
cols = len(board[0])
def dfs(board, i, j):
if i < 0 or i >= rows or j < 0 or j >= cols or board[i][j] != 'O':
return
board[i][j] = '#'
dfs(board, i - 1, j)
dfs(board, i + 1, j)
dfs(board, i, j - 1)
dfs(board, i, j + 1)
for i in [0, rows-1]:
for j in range(cols):
if board[i][j] == 'O':
dfs(board, i, j)
for i in range(rows):
for j in [0, cols-1]:
if board[i][j] == 'O':
dfs(board, i, j)
for i in range(rows):
for j in range(cols):
if board[i][j] == 'O':
board[i][j] = 'X'
if board[i][j] == '#':
board[i][j] = 'O'
class Solution:
def solve(self, board) -> None:
if not board:
return
rows = len(board)
cols = len(board[0])
def dfs(i, j):
nonlocal board
board[i][j] = '#'
stack = [[i, j]]
while stack:
x, y = stack[-1]
if 0 <= x+1 < rows and board[x+1][y] == 'O':
board[x+1][y] = '#'
stack.append([x+1, y])
continue
if 0 <= x-1 < rows and board[x-1][y] == 'O':
board[x-1][y] = '#'
stack.append([x-1, y])
continue
if 0 <= y+1 < cols and board[x][y+1] == 'O':
board[x][y+1] = '#'
stack.append([x, y+1])
continue
if 0 <= y-1 < cols and board[x][y-1] == 'O':
board[x][y-1] = '#'
stack.append([x, y-1])
continue
x, y = stack.pop()
for i in [0, rows-1]:
for j in range(cols):
if board[i][j] == 'O':
dfs(i, j)
for i in range(rows):
for j in [0, cols-1]:
if board[i][j] == 'O':
dfs(i, j)
for i in range(rows):
for j in range(cols):
if board[i][j] == 'O':
board[i][j] = 'X'
if board[i][j] == '#':
board[i][j] = 'O'
class Solution:
def solve(self, board) -> None:
if not board:
return
rows = len(board)
cols = len(board[0])
def bfs(i, j):
nonlocal board
board[i][j] = '#'
stack = [[i, j]]
while stack:
x, y = stack.pop(0)
if 0 <= x+1 < rows and board[x+1][y] == 'O':
board[x+1][y] = '#'
stack.append([x+1, y])
if 0 <= x-1 < rows and board[x-1][y] == 'O':
board[x-1][y] = '#'
stack.append([x-1, y])
if 0 <= y+1 < cols and board[x][y+1] == 'O':
board[x][y+1] = '#'
stack.append([x, y+1])
if 0 <= y-1 < cols and board[x][y-1] == 'O':
board[x][y-1] = '#'
stack.append([x, y-1])
for i in [0, rows-1]:
for j in range(cols):
if board[i][j] == 'O':
bfs(i, j)
for i in range(rows):
for j in [0, cols-1]:
if board[i][j] == 'O':
bfs(i, j)
for i in range(rows):
for j in range(cols):
if board[i][j] == 'O':
board[i][j] = 'X'
if board[i][j] == '#':
board[i][j] = 'O'
| [
"707025023@qq.com"
] | 707025023@qq.com |
9be1ccf4fb9057ff69eb200527dba9ac9462a585 | 6b9888a32733bc9d67f290cd006fb4dca84bcaf1 | /users/admin.py | ad1d88cde2247208a9212ee198fc22f718c790b8 | [] | no_license | Shatki/TanyaSite2.7 | a2008257a63134411139594c54473e88f21df8c0 | 69c7d6516d3d28dbe9370d94aacce0ac04070822 | refs/heads/master | 2020-04-30T02:31:10.274659 | 2019-03-27T19:41:59 | 2019-03-27T19:41:59 | 176,562,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,569 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth import forms
from .models import User, Feedback
class UserCreationForm(forms.UserCreationForm):
class Meta:
model = User
fields = (
'username',
'email',
'first_name',
'last_name',
)
readonly_fields = (
'date_joined',
'date_updated',)
class UserChangeForm(forms.UserChangeForm):
class Meta:
model = User
fields = (
'username',
'email',
'password',
'is_admin',
'user_permissions',
)
@admin.register(User)
class UserAdmin(UserAdmin):
model = User
form = UserChangeForm
add_form = UserCreationForm
list_display = (
'username',
'email',
'first_name',
'last_name',
'last_login')
list_filter = (
'date_joined',
'last_login',
)
readonly_fields = (
'date_joined',
'date_updated',
'last_login',
)
fieldsets = (
(None, {
'fields': (
'username',
'password',
)
}),
(u'Персональная информация', {
'fields': (
'first_name',
'last_name',
'photo',
)
}),
(u'Права доступа', {
'fields': (
'groups',
'user_permissions',
'is_admin',
)
}),
(u'Важные даты', {
'fields': (
'last_login',
'date_joined',
'date_updated',
)
}),
)
add_fieldsets = (
(None, {
'classes':
('wide',),
'fields': (
'email',
'password',
'is_admin',
)
}),
)
search_fields = (
'email',)
ordering = (
'date_joined',)
filter_horizontal = (
'groups',
'user_permissions',
)
# Register your models here.
@admin.register(Feedback)
class FeedbackAdmin(admin.ModelAdmin):
list_display = ('name',
'date',
'email',
'subject',
'message',
)
search_fields = ('name',)
ordering = ('date',)
| [
"Shatki@mail.ru"
] | Shatki@mail.ru |
509047a9ed74a663ba2172533a3ff084bfe7d217 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-eihealth/huaweicloudsdkeihealth/v1/model/list_image_tag_request.py | 90185a5e2726fddf941f3db5190f683b70ee737d | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 4,316 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ListImageTagRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'eihealth_project_id': 'str',
'image_id': 'str'
}
attribute_map = {
'eihealth_project_id': 'eihealth_project_id',
'image_id': 'image_id'
}
def __init__(self, eihealth_project_id=None, image_id=None):
"""ListImageTagRequest
The model defined in huaweicloud sdk
:param eihealth_project_id: 医疗智能体平台项目ID,您可以在EIHealth平台单击所需的项目名称,进入项目设置页面查看。
:type eihealth_project_id: str
:param image_id: 镜像id
:type image_id: str
"""
self._eihealth_project_id = None
self._image_id = None
self.discriminator = None
self.eihealth_project_id = eihealth_project_id
self.image_id = image_id
@property
def eihealth_project_id(self):
"""Gets the eihealth_project_id of this ListImageTagRequest.
医疗智能体平台项目ID,您可以在EIHealth平台单击所需的项目名称,进入项目设置页面查看。
:return: The eihealth_project_id of this ListImageTagRequest.
:rtype: str
"""
return self._eihealth_project_id
@eihealth_project_id.setter
def eihealth_project_id(self, eihealth_project_id):
"""Sets the eihealth_project_id of this ListImageTagRequest.
医疗智能体平台项目ID,您可以在EIHealth平台单击所需的项目名称,进入项目设置页面查看。
:param eihealth_project_id: The eihealth_project_id of this ListImageTagRequest.
:type eihealth_project_id: str
"""
self._eihealth_project_id = eihealth_project_id
@property
def image_id(self):
"""Gets the image_id of this ListImageTagRequest.
镜像id
:return: The image_id of this ListImageTagRequest.
:rtype: str
"""
return self._image_id
@image_id.setter
def image_id(self, image_id):
"""Sets the image_id of this ListImageTagRequest.
镜像id
:param image_id: The image_id of this ListImageTagRequest.
:type image_id: str
"""
self._image_id = image_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListImageTagRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
8ee63e735b089b79918fd1a7e922e8c9e2efe5dd | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_017/ch19_2020_09_30_19_20_44_293987.py | ddb89c04d24c9ab8a2235c3a4071ba046f6c3af0 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 213 | py | def classifica_triangulo(a,b,c):
if a == b and b==c:
return "equilátero"
elif a == b and a != c or a==c and a!= b or b==c and b!=a:
return "isósceles"
else:
return "escaleno"
| [
"you@example.com"
] | you@example.com |
d2ffd9a8a0f8818b36082eb7be20047d65ee079e | 309556112e15b2b79c2e857d495d3f363a9f8c69 | /Python/GA_Class/6_logreg/example_logreg.py | d585a00b18074293f31e9f2431aeda4d7a7daddb | [] | no_license | karoljohnston/data-analysis-examples | 0aeefd8bc0780c0cc6135352a79605ac68484afd | fbd46483072bf544102bc5c02917743c5a3bb976 | refs/heads/master | 2023-03-17T15:09:29.290959 | 2016-03-22T12:15:53 | 2016-03-22T12:15:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | """ Logistic Regression is a type of probabilistic statistical classification
model used to predict a binary response from a binary predictor
* We use Linear Regression for a response that is continuous and binary for
one that is 0 and 1's, like alive or dead
* Used for predicting the outcome of a categorical dependent variable
(i.e., a class label) based on one or more predictor variables (features)
*
"""
| [
"William.Q.Liu@gmail.com"
] | William.Q.Liu@gmail.com |
c015ef847a2a849a4f5d0fbfe324e61baf9638bf | d1d79d0c3889316b298852834b346d4246825e66 | /blackbot/core/wss/ttp/art/art_T1071.004-1.py | 6a6eff49b2d77503a64c7522e67acc60bc7209ce | [] | no_license | ammasajan/Atomic-Red-Team-Intelligence-C2 | 78d1ed2de49af71d4c3c74db484e63c7e093809f | 5919804f0bdeb15ea724cd32a48f377bce208277 | refs/heads/master | 2023-07-17T12:48:15.249921 | 2021-08-21T20:10:30 | 2021-08-21T20:10:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,897 | py | from blackbot.core.utils import get_path_in_package
from blackbot.core.wss.atomic import Atomic
from terminaltables import SingleTable
import os
import json
class Atomic(Atomic):
def __init__(self):
self.name = 'CommandControl/T1071.004-1'
self.controller_type = ''
self.external_id = 'T1071.004'
self.blackbot_id = 'T1071.004-1'
self.version = ''
self.language = 'boo'
self.description = self.get_description()
self.last_updated_by = 'Blackbot, Inc. All Rights reserved'
self.references = ["System.Management.Automation"]
self.options = {
'OutString': {
'Description' : 'Appends Out-String to the PowerShellCode',
'Required' : False,
'Value' : True,
},
'BypassLogging': {
'Description' : 'Bypasses ScriptBlock and Techniques logging',
'Required' : False,
'Value' : True,
},
'BypassAmsi': {
'Description' : 'Bypasses AMSI',
'Required' : False,
'Value' : True,
}
}
def payload(self):
with open(get_path_in_package('core/wss/ttp/art/src/powershell.boo'), 'r') as ttp_src:
src = ttp_src.read()
pwsh_script = get_path_in_package('core/wss/ttp/art/pwsh_ttp/commandAndControl/T1071.004-1')
with open(pwsh_script) as pwsh:
src = src.replace("POWERSHELL_SCRIPT", pwsh.read())
src = src.replace("OUT_STRING", str(self.options["OutString"]["Value"]).lower())
src = src.replace("BYPASS_LOGGING", str(self.options["BypassLogging"]["Value"]).lower())
src = src.replace("BYPASS_AMSI", str(self.options["BypassAmsi"]["Value"]).lower())
return src
def get_description(self):
path = get_path_in_package('core/wss/ttp/art/pwsh_ttp/commandAndControl/T1071.004-1')
with open(path) as text:
head = [next(text) for l in range(4)]
technique_name = head[0].replace('#TechniqueName: ', '').strip('\n')
atomic_name = head[1].replace('#AtomicTestName: ', '').strip('\n')
description = head[2].replace('#Description: ', '').strip('\n')
language = head[3].replace('#Language: ', '').strip('\n')
aux = ''
count = 1
for char in description:
if char == '&':
continue
aux += char
if count % 126 == 0:
aux += '\n'
count += 1
out = '{}: {}\n{}\n\n{}\n'.format(technique_name, language, atomic_name, aux)
return out
| [
"root@uw2artic201.blackbot.net"
] | root@uw2artic201.blackbot.net |
c345aa80414650ac554092489ed0ab72952bae22 | eef659a707d87e979741cc11ad59344c911790f5 | /cc3/billing/migrations/0039_auto_20160907_1252.py | 99d7ee5b8028f8e281c4179cfbb988998ba510f3 | [] | no_license | qoin-open-source/samen-doen-cc3 | 1e5e40a9b677886aa78f980670df130cbbb95629 | 8b7806177e1e245af33b5112c551438b8c0af5d2 | refs/heads/master | 2020-05-04T02:26:07.039872 | 2019-04-02T21:19:54 | 2019-04-02T21:19:54 | 178,926,274 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 854 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('billing', '0038_auto_20160907_1132'),
]
operations = [
migrations.AlterModelOptions(
name='transactionparams',
options={'verbose_name_plural': 'Transaction parameters'},
),
migrations.AddField(
model_name='assignedproduct',
name='value',
field=models.FloatField(help_text='For percentage-based prices only', null=True, verbose_name='Value', blank=True),
),
migrations.AlterField(
model_name='transactionparams',
name='product',
field=models.OneToOneField(related_name='transaction_params', to='billing.Product'),
),
]
| [
"stephen.wolff@qoin.com"
] | stephen.wolff@qoin.com |
fcaed1402e3231f8c18ab3bd0e597b4762ef27b2 | e6e416ddd59ae047bc275201f1914fc19dda4083 | /pytest_asyncio/__init__.py | 4f3ba40050513ee00771465a6fdbd1b2ecd41099 | [
"Apache-2.0"
] | permissive | smagafurov/pytest-asyncio | 29652198a98349e8cc91e550fce6c32e049a444c | 16cc19d03d1922078372dc9f02ab1215f03fb1ca | refs/heads/master | 2021-01-01T17:51:28.065758 | 2017-09-11T08:47:09 | 2017-09-11T08:47:09 | 98,177,945 | 0 | 0 | null | 2017-07-24T10:16:49 | 2017-07-24T10:16:49 | null | UTF-8 | Python | false | false | 83 | py | """The main point for importing pytest-asyncio items."""
__version__ = '0.7.0-dev'
| [
"tinchester@gmail.com"
] | tinchester@gmail.com |
63cd8309b79898c60f030b6b5afa8c57404c8ecc | 6de5f28a1a87f7d5e609a8f198b286ef1b38e8b2 | /.history/__init___20191206200142.py | 04cf790acaa353cf64eac8fcf888d44c21e002dd | [] | no_license | RakshithHebbar/udacity_second_project | b8a87633ee1875b9ba49cd021871ac72e5d869dc | 25714663437aeb43dd68915b6fd0f4b11b428a0c | refs/heads/master | 2020-09-24T20:54:38.104382 | 2019-12-10T05:26:45 | 2019-12-10T05:26:45 | 225,841,384 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,778 | py | from flask import (Flask,
render_template,
request,
redirect,
jsonify,
url_for,
flash)
from sqlalchemy import create_engine, asc
from sqlalchemy.orm import sessionmaker
from database_setup import Base, User, Category, Item
from flask import session as login_session
import random
import string
from oauth2client.client import flow_from_clientsecrets
from oauth2client.client import FlowExchangeError
import httplib2
import json
from flask import make_response
import requests
app = Flask(__name__)
CLIENT_ID = json.loads(
open('client_secrets.json', 'r').read())['web']['client_id']
APPLICATION_NAME = "Item Catalog Application"
# Connect to Database and create database session
engine = create_engine('sqlite:///itemcatalog.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
xrange = range
@app.route('/login/')
def showLogin():
state = ''.join(random.choice(string.ascii_uppercase + string.digits)
for x in xrange(32))
login_session['state'] = state
# return "The current session state is %s" % login_session['state']
return render_template('login.html', STATE=state)
@app.route('/fbconnect', methods=['POST'])
def fbconnect():
if request.args.get('state') != login_session['state']:
response = make_response(json.dumps('Invalid state parameter.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
access_token = request.data
print ("access token received %s " % access_token)
app_id = json.loads(open('fb_client_secrets.json', 'r').read())[
'web']['app_id']
app_secret = json.loads(
open('fb_client_secrets.json', 'r').read())['web']['app_secret']
url = 'https://graph.facebook.com/oauth/access_token?grant_type=\
fb_exchange_token&client_id=%s&client_secret=%s&fb_exchange_token=%s' % (
app_id, app_secret, access_token)
h = httplib2.Http()
result = h.request(url, 'GET')[1]
# Use token to get user info from API
userinfo_url = "https://graph.facebook.com/v2.8/me"
'''
Due to the formatting for the result from the server \
token exchange we have to split the token first on commas \
and select the first index which gives us the key : value for \
the server access token then we split it on colons to pull out \
the actual token value and replace the remaining quotes with nothing \
so that it can be used directly in the graph api calls '''
token = result.split(',')[0].split(':')[1].replace('"', '')
url = 'https://graph.facebook.com/v2.8/me?access_token=%s\
&fields=name,id,email' % token
h = httplib2.Http()
result = h.request(url, 'GET')[1]
# print "url sent for API access:%s"% url
# print "API JSON result: %s" % result
data = json.loads(result)
login_session['provider'] = 'facebook'
login_session['username'] = data["name"]
login_session['email'] = data["email"]
login_session['facebook_id'] = data["id"]
# The token must be stored in the login_session
# in order to properly logout
login_session['access_token'] = token
# Get user picture
url = 'https://graph.facebook.com/v2.8/me/picture?\
access_token=%s&redirect=0&height=200\
&width=200' % token
h = httplib2.Http()
result = h.request(url, 'GET')[1]
data = json.loads(result)
login_session['picture'] = data["data"]["url"]
# see if user exists
user_id = getUserID(login_session['email'])
if not user_id:
user_id = createUser(login_session)
login_session['user_id'] = user_id
output = ''
output += '<h1>Welcome, '
output += login_session['username']
output += '!</h1>'
output += '<img src="'
output += login_session['picture']
output += ' " style = "width: 300px; height: 300px;\
border-radius: 150px;-webkit-border-radius: 150px;\
-moz-border-radius: 150px;"> '
flash("Now logged in as %s" % login_session['username'])
return output
@app.route('/fbdisconnect')
def fbdisconnect():
facebook_id = login_session['facebook_id']
# The access token must me included to successfully logout
access_token = login_session['access_token']
url = 'https://graph.facebook.com/%s/permissions?\
access_token=%s' % (facebook_id, access_token)
h = httplib2.Http()
result = h.request(url, 'DELETE')[1]
return "you have been logged out"
@app.route('/gconnect', methods=['POST'])
def gconnect():
# Validate state token
if request.args.get('state') != login_session['state']:
response = make_response(json.dumps('Invalid state parameter.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Obtain authorization code
code = request.data
try:
# Upgrade the authorization code into a credentials object
oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='')
print('oauth flow is %s' % oauth_flow)
oauth_flow.redirect_uri = 'postmessage'
credentials = oauth_flow.step2_exchange(code)
print()
except FlowExchangeError:
response = make_response(
json.dumps('Failed to upgrade the authorization code.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Check that the access token is valid.
access_token = credentials.access_token
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?access_token=%s'
% access_token)
h = httplib2.Http()
result = json.loads(h.request(url, 'GET')[1])
# If there was an error in the access token info, abort.
if result.get('error') is not None:
response = make_response(json.dumps(result.get('error')), 500)
response.headers['Content-Type'] = 'application/json'
return response
# Verify that the access token is used for the intended user.
gplus_id = credentials.id_token['sub']
if result['user_id'] != gplus_id:
response = make_response(
json.dumps("Token's user ID doesn't match given user ID."), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Verify that the access token is valid for this app.
if result['issued_to'] != CLIENT_ID:
response = make_response(
json.dumps("Token's client ID does not match app's."), 401)
print ("Token's client ID does not match app's.")
response.headers['Content-Type'] = 'application/json'
return response
stored_access_token = login_session.get('access_token')
stored_gplus_id = login_session.get('gplus_id')
if stored_access_token is not None and gplus_id == stored_gplus_id:
response = make_response(json.dumps('Current user\
is already connected.'), 200)
response.headers['Content-Type'] = 'application/json'
return response
# Store the access token in the session for later use.
login_session['access_token'] = credentials.access_token
login_session['gplus_id'] = gplus_id
# Get user info
userinfo_url = "https://www.googleapis.com/oauth2/v1/userinfo"
params = {'access_token': credentials.access_token, 'alt': 'json'}
answer = requests.get(userinfo_url, params=params)
data = answer.json()
login_session['username'] = data.get('name', '')
login_session['picture'] = data['picture']
login_session['email'] = data['email']
# see if user exists, if it doesn't make a new one
user_id = getUserID(data["email"])
if not user_id:
user_id = createUser(login_session)
login_session['user_id'] = user_id
output = ''
output += '<h1>Welcome, '
output += login_session['username']
output += '!</h1>'
output += '<img src="'
output += login_session['picture']
output += ' " style = "width: 300px; height: 300px;\
border-radius: 150px;-webkit-border-radius: 150px;\
-moz-border-radius: 150px;"> '
flash("you are now logged in as %s" % login_session['username'])
print ("done!")
return output
# DISCONNECT - Revoke a current user's token and reset their login_session
# User Helper Functions
def createUser(login_session):
newUser = User(name=login_session['username'], email=login_session[
'email'], picture=login_session['picture'])
session.add(newUser)
session.commit()
user = session.query(User).fil\
ter_by(email=login_session['email']).one_or_none()
return user.id
def getUserInfo(user_id):
user = session.query(User).filter_by(id=user_id).one_or_none()
return user
def getUserID(email):
try:
user = session.query(User).filter_by(email=email).one_or_none()
return user.id
except:
return None
@app.route('/gdisconnect')
def gdisconnect():
access_token = login_session['access_token']
print ('In gdisconnect access token is %s', access_token)
print (login_session['username'])
if access_token is None:
response = make_re\
sponse(json.dumps('Current user not connected.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
url = 'https://accounts.google.com/o/oauth2/revoke?\
token=%s' % login_session['access_token']
h = httplib2.Http()
result = h.request(url, 'GET')[0]
print ('result is ')
print (result)
if result['status'] == '200':
del login_session['access_token']
del login_session['gplus_id']
del login_session['username']
del login_session['email']
del login_session['picture']
del login_session['user_id']
response = make_response(json.dumps('Successfully disconnected.'), 200)
response.headers['Content-Type'] = 'application/json'
return response
else:
response = make_res\
ponse(json.dumps('Failed to revoke token for given user.', 400))
response.headers['Content-Type'] = 'application/json'
return response
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'username' in login_session:
return f(*args, **kwargs)
else:
flash("You were not authorised to access that page.")
return redirect('/login')
return decorated_function
@app.route('/')
@app.route('/catalog/')
@app.route('/catalog/items/')
def home():
"""Route to the homepage."""
print('inside home')
categories = session.query(Category).all()
items = session.query(Item).all()
return render_template(
'index.html', categories=categories, items=items)
# Add a Category
@login_required
@app.route('/category/new/', methods=['GET', 'POST'])
def add_category():
if request.method == 'POST':
newCategory = Category(name=request.form['name'],
user_id=login_session['user_id'])
session.add(newCategory)
flash('New Category %s Successfully Created' % newCategory.name)
session.commit()
return redirect(url_for('home'))
else:
return render_template('newCategory.html')
# Edit a Category
@login_required
@app.route('/category/<int:category_id>/edit/', methods=['GET', 'POST'])
def editCategory(category_id):
editedCategory = session.query(Category).filt\
er_by(id=category_id).one_or_none()
if request.method == 'POST':
if request.form['name']:
editedCategory.name = request.form['name']
flash('Category Successfully Edited %s' % editedCategory.name)
return redirect(url_for('home'))
else:
return render_template('editCategory.html', category=editedCategory)
# Delete a category
@login_required
@app.route('/category/<int:category_id>/delete/', methods=['GET', 'POST'])
def deleteCategory(category_id):
categoryToDelete = session.query(Category).fil\
ter_by(id=category_id).one_or_none()
if request.method == 'POST':
session.delete(categoryToDelete)
flash('%s Successfully Deleted' % categoryToDelete.name)
session.commit()
return redirect(url_for('home'))
else:
return render_template('deleteCategory.html',
category=categoryToDelete)
# Show items in a particular category.
@app.route('/catalog/category/<int:category_id>/items/')
def show_items_in_category(category_id):
print("inside show items")
if not exists_category(category_id):
flash("We are unable to process your request right now.")
return redirect(url_for('home'))
category = session.query(Category).filter_by(id=category_id).first()
items = session.query(Item).filter_by(category_id=category.id).all()
total = session.query(Item).filter_by(category_id=category.id).count()
return render_template(
'items.html',
category=category,
items=items,
total=total)
# Check if the category exists in the database.
def exists_category(category_id):
category = session.query(Category).filter_by(id=category_id).first()
if category is not None:
return True
else:
return False
# Check if the item exists in the database,
def exists_item(item_id):
item = session.query(Item).filter_by(id=item_id).first()
if item is not None:
return True
else:
return False
# View an item by its ID.
@app.route('/catalog/item/<int:item_id>/')
def view_item(item_id):
if exists_item(item_id):
item = session.query(Item).filter_by(id=item_id).first()
category = session.query(Category)\
.filter_by(id=item.category_id).first()
owner = session.query(User).filter_by(id=item.user_id).first()
return render_template(
"view_item.html",
item=item,
category=category,
owner=owner
)
else:
flash('We are unable to process your request right now.')
return redirect(url_for('home'))
# Create a new item
@login_required
@app.route('/category/<int:category_id>/new/', methods=['GET', 'POST'])
def add_item_by_category(category_id):
category = session.query(Category).filter_by(id=cate
gory_id).one_or_none()
if request.method == 'POST':
newItem = Item(name=request.form['name'],
description=request.form['description'],
category_id=category_id, user_id=category.user_id)
session.add(newItem)
session.commit()
flash('New Menu %s Item Successfully Created' % (newItem.name))
return redirect(url_for('show_items_in_category',
category_id=category_id))
else:
return render_template('newitem.html', category=category,
category_id=category_id)
@login_required
@app.route('/catalog/category/<int:category_id>/item/<int:item_id>/edit',
methods=['GET', 'POST'])
def edit_item(category_id, item_id):
editedItem = session.query(Item).filter_by(id=item_id).one_or_none()
if session['user_id'] != editedItem.user_id:
flash("You were not authorised to access that page.")
return redirect('/login')
category = session.query(Category).filter_by(id=category_id).one_or_none()
categories = session.query(Category).all()
if request.method == 'POST':
if request.form['name']:
editedItem.name = request.form['name']
if request.form['description']:
editedItem.description = request.form['description']
if request.form['category']:
editedItem.category_id = request.form['category']
session.add(editedItem)
session.commit()
flash('Item Successfully Edited')
return redirect(url_for('show_items_in_category',
category_id=request.form['category']))
else:
return render_template('edit_item.html', category_id=category_id,
item_id=item_id, item=editedItem,
categories=categories)
@login_required
@app.route('/catalog/category/<int:category_id>/item/<int:item_id>/delete',
methods=['GET', 'POST'])
def delete_item(category_id, item_id):
itemToDelete = session.query(Item).filter_by(id=item_id).one_or_none()
if session['user_id'] != itemToDelete.user_id:
flash("You were not authorised to access that page.")
return redirect('/login')
if request.method == 'POST':
session.delete(itemToDelete)
flash('%s Successfully Deleted' % itemToDelete.name)
session.commit()
return redir\
ect(url_for('show_items_in_category', category_id=category_id))
else:
return render_templ\
ate('delete_item.html', category_id=category_id, item=itemToDelete)
if __name__ == '__main__':
app.secret_key = 'super_secret_key'
app.debug = True
app.run(host='0.0.0.0', port=5000)
| [
"vagrant@vagrant.vm"
] | vagrant@vagrant.vm |
77843a19cd8bf835fad6c3b481a326aa9b391598 | 65329299fca8dcf2e204132624d9b0f8f8f39af7 | /napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_link/tlvs/tlv/state/__init__.py | 01dddb05a1e5b2f8b56876833126020f6f7dbddb | [
"Apache-2.0"
] | permissive | darylturner/napalm-yang | bf30420e22d8926efdc0705165ed0441545cdacf | b14946b884ad2019b896ee151285900c89653f44 | refs/heads/master | 2021-05-14T12:17:37.424659 | 2017-11-17T07:32:49 | 2017-11-17T07:32:49 | 116,404,171 | 0 | 0 | null | 2018-01-05T16:21:37 | 2018-01-05T16:21:36 | null | UTF-8 | Python | false | false | 12,940 | py |
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa-types/lsa-type/lsas/lsa/opaque-lsa/extended-link/tlvs/tlv/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State parameters relating to the sub-TLV of the extended link
LSA
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__type',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__type = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospf-types:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospft:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'network-instances', u'network-instance', u'protocols', u'protocol', u'ospfv2', u'areas', u'area', u'lsdb', u'lsa-types', u'lsa-type', u'lsas', u'lsa', u'opaque-lsa', u'extended-link', u'tlvs', u'tlv', u'state']
def _get_type(self):
"""
Getter method for type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_link/tlvs/tlv/state/type (identityref)
YANG Description: The type of the sub-TLV contained within the extended link TLV
"""
return self.__type
def _set_type(self, v, load=False):
"""
Setter method for type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_link/tlvs/tlv/state/type (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_type() directly.
YANG Description: The type of the sub-TLV contained within the extended link TLV
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospf-types:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospft:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """type must be of a type compatible with identityref""",
'defined-type': "openconfig-network-instance:identityref",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospf-types:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospft:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)""",
})
self.__type = t
if hasattr(self, '_set'):
self._set()
def _unset_type(self):
self.__type = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospf-types:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospft:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)
type = __builtin__.property(_get_type)
_pyangbind_elements = {'type': type, }
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa-types/lsa-type/lsas/lsa/opaque-lsa/extended-link/tlvs/tlv/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State parameters relating to the sub-TLV of the extended link
LSA
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__type',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__type = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospf-types:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospft:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'network-instances', u'network-instance', u'protocols', u'protocol', u'ospfv2', u'areas', u'area', u'lsdb', u'lsa-types', u'lsa-type', u'lsas', u'lsa', u'opaque-lsa', u'extended-link', u'tlvs', u'tlv', u'state']
def _get_type(self):
"""
Getter method for type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_link/tlvs/tlv/state/type (identityref)
YANG Description: The type of the sub-TLV contained within the extended link TLV
"""
return self.__type
def _set_type(self, v, load=False):
"""
Setter method for type, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/extended_link/tlvs/tlv/state/type (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_type() directly.
YANG Description: The type of the sub-TLV contained within the extended link TLV
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospf-types:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospft:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """type must be of a type compatible with identityref""",
'defined-type': "openconfig-network-instance:identityref",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospf-types:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospft:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)""",
})
self.__type = t
if hasattr(self, '_set'):
self._set()
def _unset_type(self):
self.__type = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospf-types:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}, u'oc-ospft:ADJACENCY_SID': {'@namespace': u'http://openconfig.net/yang/ospf-types', '@module': u'openconfig-ospf-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)
type = __builtin__.property(_get_type)
_pyangbind_elements = {'type': type, }
| [
"dbarrosop@dravetech.com"
] | dbarrosop@dravetech.com |
b1789fc9bfd8eb03b63cd55618176bb3246ad504 | 8e90afd3f0dc945d9ebd6099a60094807c0067cf | /Kangho/pro_four_rules_calculation.py | 152ba821a381a63c190ada513f227f36378c3783 | [] | no_license | Deserve82/KK_Algorithm_Study | ffa109e02f1c9297597c9e07c7c3006628046740 | d3ec01b66d6e3852b7d68adaa8ba87c7e9617e24 | refs/heads/master | 2021-11-10T00:23:30.711422 | 2021-10-23T02:37:04 | 2021-10-23T02:37:04 | 231,358,800 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,099 | py | def solution(arr):
mx = [[None for i in range(len(arr))] for j in range(len(arr)) ]
mi = [[None for i in range(len(arr))] for j in range(len(arr)) ]
def mxdp(a, b):
if a == b:
mx[a][b] = int(arr[a])
mi[a][b] = int(arr[a])
if mx[a][b] != None:
return mx[a][b]
tm = []
for i in range(a+1,b,2):
op = arr[i]
if op == "+":
tm.append(mxdp(a, i-1) + mxdp(i+1, b))
elif op == "-":
tm.append(mxdp(a, i-1) - midp(i+1, b))
mx[a][b] = max(tm)
return mx[a][b]
def midp(a, b):
if a == b:
mx[a][b] = int(arr[a])
mi[a][b] = int(arr[a])
if mi[a][b] != None:
return mi[a][b]
tm = []
for i in range(a+1,b,2):
op = arr[i]
if op == "+":
tm.append(midp(a, i-1) + midp(i+1, b))
elif op == "-":
tm.append(midp(a, i-1) - mxdp(i+1, b))
mi[a][b] = min(tm)
return mi[a][b]
return mxdp(0, len(arr)-1)
| [
"noreply@github.com"
] | Deserve82.noreply@github.com |
d261be900134a7888155a860a7ba078f36751b08 | 9efe15e39ffda8391abd5a63b95e441648ba57c2 | /event_service/app.py | b02fd16c6784654ca29e61e272a3ad7fd1c15105 | [] | no_license | TechAcademy-Azerbaijan/mini_microservice_app | 3af2f80047b9a945f07ac1d4c7dd5a01980169e0 | b06c13a7feac4b9f46ab1d3bed19e36a7de3cd4e | refs/heads/master | 2023-08-15T19:49:22.058966 | 2021-10-22T08:21:40 | 2021-10-22T08:21:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | from flask import Flask
from flask_cors import CORS
app = Flask(__name__)
CORS(app)
from routers import *
if __name__ == '__main__':
app.run(port=5002, debug=True) | [
"idris.sabanli@gmail.com"
] | idris.sabanli@gmail.com |
129bfd5b719f491ee5efb2dd5d38136c3627af9c | a9e3f3ad54ade49c19973707d2beb49f64490efd | /Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/openedx/core/djangoapps/system_wide_roles/admin/forms.py | 7b0f8b93b9a1abffa55e9db24a5a78a83f9d01e9 | [
"AGPL-3.0-only",
"AGPL-3.0-or-later",
"MIT"
] | permissive | luque/better-ways-of-thinking-about-software | 8c3dda94e119f0f96edbfe5ba60ca6ec3f5f625d | 5809eaca7079a15ee56b0b7fcfea425337046c97 | refs/heads/master | 2021-11-24T15:10:09.785252 | 2021-11-22T12:14:34 | 2021-11-22T12:14:34 | 163,850,454 | 3 | 1 | MIT | 2021-11-22T12:12:31 | 2019-01-02T14:21:30 | JavaScript | UTF-8 | Python | false | false | 180 | py | """
Forms used for system wide roles admin.
"""
from edx_rbac.admin import UserRoleAssignmentAdminForm
class SystemWideRoleAssignmentForm(UserRoleAssignmentAdminForm):
pass
| [
"rafael.luque@osoco.es"
] | rafael.luque@osoco.es |
3e071265284220746e3abe9cf746007211033a47 | fef0991726dad837245b206c1720cf8da894ef53 | /TestWindow/testExample.py | 584e5edfee3c325f2ad42403555dc4ea96cd8fd2 | [] | no_license | TianD/TianDao-git-Test | 95dfbc644e9802b548c8c95201400fce7acd9e19 | 99cfbb742acf04c242a34657495bfa67bcbb68cd | refs/heads/master | 2020-04-15T19:10:57.386454 | 2014-07-04T15:08:35 | 2014-07-04T15:08:35 | 21,501,299 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 359 | py |
#coding=utf-8
import sys
from PyQt4 import QtCore, QtGui
class MyWindow( QtGui.QMainWindow ):
def __init__( self ):
QtGui.QMainWindow.__init__( self )
self.setWindowTitle( "PyQt" )
self.resize( 300, 200 )
app = QtGui.QApplication( sys.argv )
mywindow = MyWindow()
mywindow.show()
app.exec_() | [
"tiandao_dunjian@sina.cn"
] | tiandao_dunjian@sina.cn |
68c58fbec15f59c374e2ad7ea1ea5561ce6410d6 | 885d3e4017d96ed9fd56545d95ad63895e6dc01d | /rootpy/utils/tests/test_cpp.py | 24e5b87907e9d4dfe79ad31dbfa0cee3d658cc29 | [
"BSD-3-Clause"
] | permissive | rootpy/rootpy | c3eb7f70d29e4779a0bda8356fb96922bb95537f | 3926935e1f2100d8ba68070c2ab44055d4800f73 | refs/heads/master | 2021-01-17T04:08:51.330059 | 2019-01-05T17:05:50 | 2019-01-05T17:05:50 | 3,276,014 | 159 | 60 | BSD-3-Clause | 2019-12-08T12:35:08 | 2012-01-26T18:05:37 | Python | UTF-8 | Python | false | false | 1,191 | py | from __future__ import print_function
import sys
from ROOT import MethodProxy
import inspect
from rootpy.utils.cpp import CPPGrammar
from rootpy.utils.extras import iter_ROOT_classes
from nose.plugins.attrib import attr
@attr('slow')
def test_cpp():
i = 0
num_methods = 0
for cls in iter_ROOT_classes():
members = inspect.getmembers(cls)
# filter out those starting with "_" or "operator "
# and non-method members
# also split overloaded methods
methods = {}
for name, func in members:
if name.startswith('_') or name.startswith('operator'):
continue
if not isinstance(func, MethodProxy):
continue
methods[name] = (func, func.func_doc.split('\n'))
for name, (func, sigs) in methods.items():
for sig in sigs:
num_methods += 1
if CPPGrammar.parse_method(sig, silent=False):
i += 1
print("{0} / {1}".format(i, num_methods), end='\r')
sys.stdout.flush()
print("{0} / {1}".format(i, num_methods))
if __name__ == "__main__":
import nose
nose.runmodule()
| [
"noel.dawe@gmail.com"
] | noel.dawe@gmail.com |
f8660d21741423252b46a2563a116dcf317a8879 | 202e4511f8e89e2eb588581b6a1f9cd7fb1a89bf | /Server_Scripts/mitoGenome.py | 2d22301eb1cd22c0ba3a5824136faab8f1d6b2c7 | [] | no_license | labdevgen/FishHiC | 6b91e606401578e1415f3295570d5ecee085d1f7 | 27df46cf2597ae15ffb2e518ab7c04b0ec270e02 | refs/heads/master | 2021-01-18T12:58:26.279717 | 2019-12-09T05:42:42 | 2019-12-09T05:42:42 | 100,369,908 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 28 | py | from mirnylib import genome
| [
"zakazserov@gmail.com"
] | zakazserov@gmail.com |
19ee67b228a24979c31aff0bd9900dd58e0f1895 | 23545a102f6f937d59d39f95c682235113599096 | /main/models.py | 13076268296cd7d4b761496e8643fb26509001f6 | [] | no_license | Adi19471/vsu-rating-2021 | 11e6204d86e28320ad505bdf77150011fd041ff9 | 3360abf7b024058c60ce4a8d80f98300048e4a2d | refs/heads/main | 2023-07-13T18:56:02.395628 | 2021-06-24T06:17:01 | 2021-06-24T06:17:01 | 379,822,363 | 0 | 0 | null | 2021-08-29T09:58:45 | 2021-06-24T06:11:43 | JavaScript | UTF-8 | Python | false | false | 1,209 | py | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Movie(models.Model):
# fields for the movie table
name = models.CharField(max_length=300)
director = models.CharField(max_length=300)
cast = models.CharField(max_length=800)
description = models.TextField(max_length=5000)
release_date = models.DateField()
averageRating = models.FloatField(default=0)
image = models.URLField(default=None, null=True)
def __str__(self):
return self.name
def __unicode__(self):
return self.name
class Review(models.Model):
movie = models.ForeignKey(Movie, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
comment = models.TextField(max_length=5000)
rating = models.FloatField(default=0)
def __str__(self):
return self.user.username
class Contact(models.Model):
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
email = models.EmailField(max_length=100)
# mobile = models.IntegerField()
comment = models.TextField(max_length=100)
def __str__(self):
return self.first_name | [
"akumatha@gmail.com"
] | akumatha@gmail.com |
313ef19914bc291335c072fdcf2d2a678887d9ff | 6c69b3229dc01621561330b67dca894799ea3804 | /PlantillaOpenGL.py | e479b259ec479cc2f03c41449ddd829dfdb32e91 | [] | no_license | DamianBurboa/PracticaCasa | 484cd86afd3890288c83c216cf24ceaee6afafa2 | 83222060594f0ff76d212a5214f0358387d22d71 | refs/heads/master | 2023-02-24T11:42:43.524122 | 2021-01-28T01:30:50 | 2021-01-28T01:30:50 | 333,610,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,207 | py | from OpenGL.GL import *
from glew_wish import *
import glfw
from math import *
def dibujarPasto():
glColor3f(0.427,0.819,0.321)
glBegin(GL_QUADS)
glVertex3f(-1.0,-0.6,0)
glVertex3f(1.0,-0.6,0)
glVertex3f(1.0,-1.0,0)
glVertex3f(-1.0,-1.0,0)
glEnd()
def dibujarSol():
glColor3f(0.921,0.898,0.301)
glBegin(GL_POLYGON)
for x in range(360):
angulo = x * 3.14159 / 180.0
glVertex3f(cos(angulo) * 0.2 - 0.6, sin(angulo) * 0.2 + 0.6,0.0)
glEnd()
def dibujarCasa():
glColor3f(0.788,0.596,0.364)
glBegin(GL_QUADS)
glVertex3f(-0.20,0.1,0)
glVertex3f(0.70,0.1,0)
glVertex3f(0.70,-0.70,0)
glVertex3f(-0.20,-0.70,0)
glEnd()
def dibujarnube():
glColor3f(1,1,1)
glBegin(GL_POLYGON)
for x in range(360):
angulo = x * 3.14159 / 180.0
glVertex3f(cos(angulo) * 0.15 - 0.2, sin(angulo) * 0.05 + 0.5 ,0.0)
glEnd()
glBegin(GL_POLYGON)
for x in range(360):
angulo = x * 3.14159 / 180.0
glVertex3f(cos(angulo) * 0.24 - 0.0, sin(angulo) * 0.08 + 0.57 ,0.0)
glEnd()
glBegin(GL_POLYGON)
for x in range(360):
angulo = x * 3.14159 / 180.0
glVertex3f(cos(angulo) * 0.20 + 0.50, sin(angulo) * 0.06 + 0.70 ,0.0)
glEnd()
glBegin(GL_POLYGON)
for x in range(360):
angulo = x * 3.14159 / 180.0
glVertex3f(cos(angulo) * 0.10 + 0.70, sin(angulo) * 0.05 + 0.75 ,0.0)
glEnd()
glBegin(GL_POLYGON)
for x in range(360):
angulo = x * 3.14159 / 180.0
glVertex3f(cos(angulo) * 0.10 + 0.75, sin(angulo) * 0.05 + 0.45 ,0.0)
glEnd()
glBegin(GL_POLYGON)
for x in range(360):
angulo = x * 3.14159 / 180.0
glVertex3f(cos(angulo) * 0.20 + 0.55, sin(angulo) * 0.05 + 0.50 ,0.0)
glEnd()
def dibujartecho():
glColor3f(0.890,0.333,0.054)
glBegin(GL_TRIANGLES)
glVertex3f(-0.40,0.1,0)
glVertex3f(0.25,0.45,0)
glVertex3f(0.90,0.1,0)
glEnd()
def dibujarventana():
#Marco de la ventana
glColor3f(0.658,0.560,0.058)
glBegin(GL_QUADS)
glVertex3f(0.30,-0.05,0)
glVertex3f(0.60,-0.05,0)
glVertex3f(0.60,-0.30,0)
glVertex3f(0.30,-0.3,0)
glEnd()
#Vidrio de la ventana
glColor3f(0.239,0.819,0.741)
glBegin(GL_QUADS)
glVertex3f(0.32,-0.07,0)
glVertex3f(0.58,-0.07,0)
glVertex3f(0.58,-0.28,0)
glVertex3f(0.32,-0.28,0)
glEnd()
#lineas de la ventana
glColor3f(0.658,0.560,0.058)
glBegin(GL_QUADS)
glVertex3f(0.32,-0.173,0) #izqArr
glVertex3f(0.58,-0.173,0) #derArr
glVertex3f(0.58,-0.182,0) #derAba
glVertex3f(0.32,-0.182,0) #IzqAba
glEnd()
glColor3f(0.658,0.560,0.058)
glBegin(GL_QUADS)
glVertex3f(0.44,-0.07,0) #izqArr
glVertex3f(0.46,-0.07,0) #derArr
glVertex3f(0.46,-0.28,0) #derAba
glVertex3f(0.44,-0.28,0) #IzqAba
glEnd()
def dibujarPuerta():
#Puerta
glColor3f(0.545,0.639,0.592)
glBegin(GL_QUADS)
glVertex3f(0.15,-0.38,0) #izqArr
glVertex3f(0.40,-0.38,0) #derArr
glVertex3f(0.40,-0.70,0) #derAba
glVertex3f(0.15,-0.70,0) #IzqAba
glEnd()
#Perilla
glColor3f(0.380,0.388,0.384)
glBegin(GL_POLYGON)
for x in range(360):
angulo = x * 3.14159 / 180.0
glVertex3f(cos(angulo) * 0.020 + 0.35, sin(angulo) * 0.020 - 0.53 ,0.0)
glEnd()
def dibujarArbol():
#Tronco
glColor3f(0.788,0.596,0.364)
glBegin(GL_QUADS)
glVertex3f(-0.75,-0.2,0) #izqArr
glVertex3f(-0.60,-0.2,0) #derArr
glVertex3f(-0.60,-0.70,0) #derAba
glVertex3f(-0.75,-0.70,0) #izqAba
glEnd()
#Hojas
glColor3f(0.196,0.470,0.180)
glBegin(GL_POLYGON)
for x in range(360):
angulo = x * 3.14159 / 180.0
glVertex3f(cos(angulo) * 0.2 - 0.675, sin(angulo) * 0.2 -0.2 ,0.0)
glEnd()
glColor3f(0.196,0.470,0.180)
glBegin(GL_POLYGON)
for x in range(360):
angulo = x * 3.14159 / 180.0
glVertex3f(cos(angulo) * 0.15 - 0.675, sin(angulo) * 0.15 - 0.03 ,0.0)
glEnd()
glColor3f(0.196,0.470,0.180)
glBegin(GL_POLYGON)
for x in range(360):
angulo = x * 3.14159 / 180.0
glVertex3f(cos(angulo) * 0.10 - 0.675, sin(angulo) * 0.10 + 0.1 ,0.0)
glEnd()
def dibujar():
#rutinas de dibujo
dibujarPasto()
dibujarSol()
dibujarCasa()
dibujarnube()
dibujartecho()
dibujarventana()
dibujarPuerta()
dibujarArbol()
def main():
#inicia glfw
if not glfw.init():
return
#crea la ventana,
# independientemente del SO que usemos
window = glfw.create_window(800,800,"Mi ventana", None, None)
#Configuramos OpenGL
glfw.window_hint(glfw.SAMPLES, 4)
glfw.window_hint(glfw.CONTEXT_VERSION_MAJOR,3)
glfw.window_hint(glfw.CONTEXT_VERSION_MINOR,3)
glfw.window_hint(glfw.OPENGL_FORWARD_COMPAT, GL_TRUE)
glfw.window_hint(glfw.OPENGL_PROFILE, glfw.OPENGL_CORE_PROFILE)
#Validamos que se cree la ventana
if not window:
glfw.terminate()
return
#Establecemos el contexto
glfw.make_context_current(window)
#Activamos la validación de
# funciones modernas de OpenGL
glewExperimental = True
#Inicializar GLEW
if glewInit() != GLEW_OK:
print("No se pudo inicializar GLEW")
return
#Obtenemos versiones de OpenGL y Shaders
version = glGetString(GL_VERSION)
print(version)
version_shaders = glGetString(GL_SHADING_LANGUAGE_VERSION)
print(version_shaders)
while not glfw.window_should_close(window):
#Establece regiond e dibujo
glViewport(0,0,800,800)
#Establece color de borrado
glClearColor(0.474,0.780,0.752,1)
#Borra el contenido de la ventana
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
#Dibujar
dibujar()
#Preguntar si hubo entradas de perifericos
#(Teclado, mouse, game pad, etc.)
glfw.poll_events()
#Intercambia los buffers
glfw.swap_buffers(window)
#Se destruye la ventana para liberar memoria
glfw.destroy_window(window)
#Termina los procesos que inició glfw.init
glfw.terminate()
if __name__ == "__main__":
main() | [
"you@example.com"
] | you@example.com |
afdd4a96f7bf490989dfa4d5e783bff6701247fa | 4d3999e06a63989cd4dc7fe928940e0ca533cbd4 | /test/test_inline_response20011.py | d78bbabcdfd3fc665194d388445a31299edf971d | [] | no_license | ribaguifi/orchestra-client-python | c8f1c4b9760c2df173222bb5bcaf73b231d2b4bb | a211f7f6c0353f4476176c6c2fed11a2be553db8 | refs/heads/main | 2023-06-03T00:28:57.108274 | 2021-06-22T12:22:28 | 2021-06-22T12:22:28 | 379,258,596 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 956 | py | # coding: utf-8
"""
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 0.0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.inline_response20011 import InlineResponse20011 # noqa: E501
from swagger_client.rest import ApiException
class TestInlineResponse20011(unittest.TestCase):
"""InlineResponse20011 unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testInlineResponse20011(self):
"""Test InlineResponse20011"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.inline_response20011.InlineResponse20011() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"santiago@ribaguifi.com"
] | santiago@ribaguifi.com |
50ba1c7ef8fbb7d9a24a7636649bee002763c5fa | 4eaab9327d25f851f9e9b2cf4e9687d5e16833f7 | /problems/validate_binary_search_tree/solution.py | 4b027255c5de04c726d305a51e41b228bcb44e00 | [] | no_license | kadhirash/leetcode | 42e372d5e77d7b3281e287189dcc1cd7ba820bc0 | 72aea7d43471e529ee757ff912b0267ca0ce015d | refs/heads/master | 2023-01-21T19:05:15.123012 | 2020-11-28T13:53:11 | 2020-11-28T13:53:11 | 250,115,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 647 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def isValidBST(self, root: TreeNode) -> bool:
stack = []
prev_node = float('-inf')
while stack or root:
if root:
stack.append(root)
root = root.left
else:
root = stack.pop()
if prev_node >= root.val:
return False
prev_node = root.val
root = root.right
return True | [
"kadhirash@gmail.com"
] | kadhirash@gmail.com |
4974c067418ad8b5e155e0a7dcd40cd94d0f920c | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_259/ch147_2020_04_08_17_39_35_490926.py | 1cb72ae4df13990fa8e046149c2139db0e4e5da0 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | def mais_frequente(lista):
ocorrencias = {}
for i in lista:
ocorrencias[i] = 0
for i in lista:
if i in ocorrencias:
ocorrencias[i]+=1
contadores = occorencias.values()
max = 0
for i in contadores:
for j in contadores:
if i>j:
max = i
for i in ocorrencias:
if ocorrencias[i] = max
return ocorrencias | [
"you@example.com"
] | you@example.com |
6a5bce666dbf467bf0894436cd1fcb1414585edc | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03127/s858089481.py | 6247fc003f271e8077da902951482327a7e441e4 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 166 | py | n=int(input())
a=list(map(int,input().split()))
def gcd(x,y):
if y==0:
return x
return gcd(y,x%y)
ans=a[0]
for i in a:
ans=gcd(ans,i)
print(ans) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
063a2ab6fefbfe29cb136d4ed2c17a6cb71ffe90 | 5ce1c0ab1b6147428fc30bcd1698e4d0e53b688e | /1094.py | fc044748f63df33ff98d34ab4b9de72e8658f50b | [] | no_license | junyang10734/leetcode-python | 035b12df3f7d9fc33553140d1eb0692750b44f0a | eff322f04d22ffbc4f9b10e77f97c28aac5c7004 | refs/heads/master | 2023-07-22T11:16:38.740863 | 2023-07-14T00:22:00 | 2023-07-14T00:22:00 | 189,197,380 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 555 | py | # 1094. Car Pooling
# Array / 差分数组
# 差分数组
# time: O(n)
class Solution:
def carPooling(self, trips: List[List[int]], capacity: int) -> bool:
maxD = 0
for n,f,t in trips:
maxD = max(maxD, t)
diff = [0 for _ in range(maxD+1)]
for n,f,t in trips:
diff[f] += n
diff[t] -= n
used_capacity = 0
for d in diff:
used_capacity += d
if used_capacity > capacity:
return False
return True | [
"48000364+junyang10734@users.noreply.github.com"
] | 48000364+junyang10734@users.noreply.github.com |
4a4a2f467172452c9d4cb3d75181baf874606b76 | 3f0a886b4ea2c8de185511d74dc5ff4175d9abc6 | /myenv/bin/easy_install-2.7 | 40fc82520ca304f4b3bee2dc9eb01436be85bda8 | [] | no_license | feminas-k/poll_app | 3a4d59810a4840dfee25ae0c2475721af2bc6741 | 405bb3377a24ff80dac6dadefbdcf7f8d3828a0b | refs/heads/master | 2021-01-22T00:52:46.012103 | 2017-03-01T17:22:50 | 2017-03-01T17:22:50 | 83,581,193 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | 7 | #!/home/feminas/pollapp_feb17/myenv/bin/python2
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"femi1991@gmail.com"
] | femi1991@gmail.com |
d7e6712811fdf092b1950a8d0880765d318b7cd9 | 4e308e8bb7056f1fd6914777b38d98b18254867f | /DECOMPYLED/LV2_LX2_LC2_LD2/Params.py | a509f673e5d1e85d5e6b3c414e982b62497335bd | [] | no_license | bschreck/cuttlefish | 165aae651bf58c1142cc47934802a7a3614e39da | 0f44ccca0ebf1a6f78165001586fcb67b98b406a | refs/heads/master | 2020-05-19T23:07:11.520086 | 2014-02-25T05:26:18 | 2014-02-25T05:26:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,418 | py | # emacs-mode: -*- python-*-
AUTO_FILTER_PARAMS = {'LFO Sync Rate': 'LFO Synced Rate'}
AUTO_PAN_PARAMS = {'Sync Rate': 'Synced Rate'}
BEAT_REPEAT_PARAMS = {'Decay': 'Damp Volume',
'Pitch Decay': 'Damp Pitch',
'Volume': 'Wet Level',
'Repeat': 'Instant Repeat'}
EQ_FOUR_PARAMS = {'1 Filter On A': '1 Filter On',
'1 Frequency A': '1 Frequency',
'1 Gain A': '1 Gain',
'1 Filter Type A': '1 Filter Type',
'1 Resonance A': '1 Resonance',
'2 Filter On A': '2 Filter On',
'2 Frequency A': '2 Frequency',
'2 Gain A': '2 Gain',
'2 Filter Type A': '2 Filter Type',
'2 Resonance A': '2 Resonance',
'3 Filter On A': '3 Filter On',
'3 Frequency A': '3 Frequency',
'3 Gain A': '3 Gain',
'3 Filter Type A': '3 Filter Type',
'3 Resonance A': '3 Resonance',
'4 Filter On A': '4 Filter On',
'4 Frequency A': '4 Frequency',
'4 Gain A': '4 Gain',
'4 Filter Type A': '4 Filter Type',
'4 Resonance A': '4 Resonance'}
FLANGER_PARAMS = {'Frequency': 'LFO Frequency',
'Sync': 'LFO Sync',
'Sync Rate': 'LFO Synced Rate'}
PHASER_PARAMS = {'Sync': 'LFO Sync',
'Sync Rate': 'LFO Synced Rate'}
SATURATOR_PARAMS = {'Base': 'BaseDrive',
'Drive': 'PreDrive'}
FIVETOSIX_PARAMS_DICT = {'AutoFilter': AUTO_FILTER_PARAMS,
'AutoPan': AUTO_PAN_PARAMS,
'BeatRepeat': BEAT_REPEAT_PARAMS,
'Eq8': EQ_FOUR_PARAMS,
'Flanger': FLANGER_PARAMS,
'Phaser': PHASER_PARAMS,
'Saturator': SATURATOR_PARAMS}
# local variables:
# tab-width: 4
| [
"bschreck@mit.edu"
] | bschreck@mit.edu |
b844fe2134f61d12924df1dbdb2e53f8a9063706 | 6fbda2fa7d0741813b5141dfbae7fec76a471fc9 | /modal/popup/urls.py | ccd86b0fcb72169d2e0d2ea843b8b97b17ac1fc7 | [] | no_license | innotexak/Popup-in-Django | f31d1bb9539d38a6bba3b29b7a9e41c7a0187125 | bfefd0feb24eb7729de1cdc3aa4c23cdf75273c7 | refs/heads/master | 2023-03-09T13:56:38.891788 | 2021-03-01T14:36:09 | 2021-03-01T14:36:09 | 343,439,152 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 380 | py | from django.urls import path
from django.conf.urls.static import static
from django.conf import settings
from . import views
urlpatterns = [
path("", views.home, name ="home"),
path('signup/', views.SignUpView.as_view(), name='signup'),
path('login/', views.CustomLoginView.as_view(), name='login'),
]+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"akuhinnocent2016@gmail.com"
] | akuhinnocent2016@gmail.com |
573e0d1262f5d6af7cbc0a4e35a0616f147502be | 2bb633b7b0590d64a1055fb9bf6fcd0e22701ef5 | /main/drive_MTM_study.py | 860ef499307487a32d38c12b2d05a12dd79a69a4 | [] | no_license | nag92/haptic_mtm | 2df0ed46ce284e6726d9352b89d7ba5f9d4b31e6 | 503858d98a395e7465ec12f46657586453d25c53 | refs/heads/master | 2022-04-07T06:29:39.937383 | 2020-02-28T00:02:08 | 2020-02-28T00:02:08 | 241,441,056 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,953 | py | from Tkinter import *
import rosbag
from datetime import datetime
import subprocess
import signal
import shlex
import rospy
from std_msgs.msg import Time
from std_msgs.msg import Empty
from geometry_msgs.msg import Pose, PoseStamped
import time
import os
import threading
class MTM_DRIVER:
def __init__(self):
rospy.init_node('user_study_data')
self._time_pub = rospy.Publisher('/ambf/env/user_study_time', Time, queue_size=1)
self._dvrk_on_pub = rospy.Publisher('/dvrk/console/power_on', Empty, queue_size=1)
self._dvrk_off_pub = rospy.Publisher('/dvrk/console/power_off', Empty, queue_size=1)
self._dvrk_home_pub = rospy.Publisher('/dvrk/console/home', Empty, queue_size=1)
self._goal_pub = rospy.Publisher('/dvrk/MTMR/set_position_goal_cartesian', Pose, queue_size=1)
rospy.Subscriber("/dvrk/MTMR/position_cartesian_current", PoseStamped, self.updatePose)
self._time_msg = 0
self._start_time = 0
self._active = False
self._time_pub_thread = 0
self._my_bag = 0
self._current_pose = PoseStamped()
self._topic_names = ["/dvrk/MTMR/twist_body_current",
"/dvrk/MTMR/set_position_goal_cartesian",
"ambf/env/psm/baselink/State",
"ambf/env/psm/baselink/Command",
"/ambf/image_data/camera1/compressed",
"/ambf/env/World/State",
"/ambf/env/user_study/Base/State",
"/ambf/env/user_study/PuzzleRed/State",
"/ambf/env/user_study/PuzzleYellow/State",
"/ambf/env/camera1/State",
"/ambf/env/camera2/State",
"/ambf/env/simulated_device_1/MTML/State",
"/ambf/env/simulated_device_1/MTMR/State",
"/ambf/env/HandleLeft/State",
"/ambf/env/HandleRight/State",
"/ambf/env/FixedBase/State",
"/ambf/env/MovingBase/State",
"/dvrk/MTML/position_cartesian_current",
"/dvrk/MTMR/position_cartesian_current",
"/dvrk/MTMR/position_cartesian_desired",
"/dvrk/MTML/position_cartesian_desired",
"/dvrk/footpedals/clutch",
"/dvrk/footpedals/coag",
"/dvrk/MTML/set_wrench_body",
"/dvrk/MTMR/set_wrench_body",
"/ambf/env/user_study_time"]
self._topic_names_str = ""
self._rosbag_filepath = 0
self._rosbag_process = 0
for name in self._topic_names:
self._topic_names_str = self._topic_names_str + ' ' + name
def updatePose(self, data):
self._current_pose = data
def call(self):
if self._rosbag_filepath is 0:
self._active = True
self._start_time = rospy.Time.now()
self._time_pub_thread = threading.Thread(target=self.time_pub_thread_func)
self._time_pub_thread.start()
print("Start Recording ROS Bag")
date_time_str = str(datetime.now()).replace(' ', '_')
self._rosbag_filepath = './user_study_data/' + str(e1.get()) + '_' + date_time_str
command = "rosbag record -O" + ' ' + self._rosbag_filepath + self._topic_names_str
print "Running Command", command
command = shlex.split(command)
self._rosbag_process = subprocess.Popen(command)
pose = self._current_pose.pose
pose.position.x = float(x.get())
pose.position.y = float(y.get())
pose.position.z = float(z.get())
self._goal_pub.publish(pose)
else:
print "Already recording a ROSBAG file, please save that first before starting a new record"
def save(self):
if self._rosbag_filepath is not 0:
# self._active = False
filepath = self._rosbag_filepath
self._rosbag_filepath = 0
node_prefix = "/record"
# Adapted from http://answers.ros.org/question/10714/start-and-stop-rosbag-within-a-python-script/
list_cmd = subprocess.Popen("rosnode list", shell=True, stdout=subprocess.PIPE)
list_output = list_cmd.stdout.read()
retcode = list_cmd.wait()
assert retcode == 0, "List command returned %d" % retcode
for node_name in list_output.split("\n"):
if node_name.startswith(node_prefix):
os.system("rosnode kill " + node_name)
print("Saved As:", filepath, ".bag")
self._active = False
else:
print("You should start recording first before trying to save")
def time_pub_thread_func(self):
while self._active:
self._time_msg = rospy.Time.now() - self._start_time
self._time_pub.publish(self._time_msg)
time.sleep(0.05)
def dvrk_power_on(self):
self._dvrk_on_pub.publish(Empty())
time.sleep(0.1)
def dvrk_power_off(self):
self._dvrk_off_pub.publish(Empty())
time.sleep(0.1)
def dvrk_home(self):
self._dvrk_home_pub.publish(Empty())
time.sleep(0.1)
study = MTM_DRIVER()
master = Tk()
master.title("AMBF USER STUDY 1")
width = 550
height = 600
master.geometry(str(width)+'x'+str(height))
Label(master, text='trial number').grid(row=0)
Label(master, text='X').grid(row=1)
Label(master, text='Y').grid(row=2)
Label(master, text='Z').grid(row=3)
e1 = Entry(master)
e1.grid(row=0, column=1)
x = Entry(master)
x.grid(row=1, column=1)
y = Entry(master)
y.grid(row=2, column=1)
z = Entry(master)
z.grid(row=3, column=1)
# Set Default Value
button_start = Button(master, text="Start Record", bg="green", fg="white", height=8, width=20, command=study.call)
button_stop = Button(master, text="Stop Record (SAVE)", bg="red", fg="white", height=8, width=20, command=study.save)
button_destroy = Button(master, text="Close App", bg="black", fg="white", height=8, width=20, command=master.destroy)
button_on = Button(master, text="DVRK ON", bg="green", fg="white", height=4, width=10, command=study.dvrk_power_on)
button_off = Button(master, text="DVRK OFF", bg="red", fg="white", height=4, width=10, command=study.dvrk_power_off)
button_home = Button(master, text="DVRK HOME", bg="purple", fg="white", height=4, width=10, command=study.dvrk_home)
button_on.grid(row=20, column=1)
button_off.grid(row=40, column=1)
button_home.grid(row=60, column=1)
button_start.grid(row=20, column=2)
button_stop.grid(row=40, column=2)
button_destroy.grid(row=60, column=2)
master.mainloop()
| [
"nagoldfarb@wpi.edu"
] | nagoldfarb@wpi.edu |
7451fcfbc277bfe38be457cb375d98fbb28cbcbd | e6dab5aa1754ff13755a1f74a28a201681ab7e1c | /.parts/lib/django-1.3/tests/regressiontests/i18n/tests.py | 3698b91f712325f4a65128e4ad056252380c8976 | [] | no_license | ronkagan/Euler_1 | 67679203a9510147320f7c6513eefd391630703e | 022633cc298475c4f3fd0c6e2bde4f4728713995 | refs/heads/master | 2021-01-06T20:45:52.901025 | 2014-09-06T22:34:16 | 2014-09-06T22:34:16 | 23,744,842 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 101 | py | /home/action/.parts/packages/googleappengine/1.9.4/lib/django-1.3/tests/regressiontests/i18n/tests.py | [
"ron.y.kagan@gmail.com"
] | ron.y.kagan@gmail.com |
af8d982fa9c11c8549511d906334edf7a27dc55f | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/303/usersdata/284/109078/submittedfiles/minha_bib.py | 225e6592cfde216e2fee7b7f14605137fb56fb8b | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,952 | py |
import random
def solicitaSimboloDoHumano():
letra = 0
while not (letra == 'O' or letra == 'X'):
print('Qual símbolo você deseja utilizar no jogo? (X ou O) ')
letra = input().upper()
if letra == 'X':
return ['X','O']
else:
return ['O','X']
def sorteioPrimeiraJogada():
if random.randrange(2) == 1:
return 'Computador'
else:
return 'Jogador'
def jogadaHumana(tabuleiro,nome):
movimento = 0
while movimento not in '1 2 3 4 5 6 7 8 9'.split() or not vazio(tabuleiro, int(movimento)):
print('Qual a sua jogada, {}?'.format(nome))
movimento = input()
if movimento == '00':
movimento= '7'
elif movimento == '01':
movimento= '8'
elif movimento == '02':
movimento= '9'
elif movimento == '10':
movimento= '4'
elif movimento == '11':
movimento= '5'
elif movimento == '12':
movimento= '6'
elif movimento == '20':
movimento= '1'
elif movimento == '21':
movimento= '2'
elif movimento == '22':
movimento= '3'
return int(movimento)
def jogadaComputador(tabuleiro, letraComputador):
if letraComputador == 'X':
letraJogador = 'O'
else:
letraJogador = 'X'
for i in range(1,10):
copy = mostraTabuleiro(tabuleiro)
if vazio(copy, i):
movimentacao(copy, letraComputador, i)
if verificaVencedor(copy, letraComputador):
return i
for i in range(1, 10):
copy = mostraTabuleiro(tabuleiro)
if vazio(copy, i):
movimentacao(copy, letraJogador, i)
if verificaVencedor(copy, letraJogador):
return i
movimento = movAleatoria(tabuleiro, [1, 3, 7, 9])
if movimento != None:
return movimento
if vazio(tabuleiro, 5):
return 5
return movAleatoria(tabuleiro, [2, 4, 6, 8])
def mostraTabuleiro(tabuleiro):
dupeTabuleiro = []
for i in tabuleiro:
dupeTabuleiro.append(i)
return dupeTabuleiro
def verificaVencedor(tabuleiro, letra):
return ((tabuleiro[7] == letra and tabuleiro[8] == letra and tabuleiro[9] == letra) or
(tabuleiro[4] == letra and tabuleiro[5] == letra and tabuleiro[6] == letra) or
(tabuleiro[1] == letra and tabuleiro[2] == letra and tabuleiro[3] == letra) or
(tabuleiro[7] == letra and tabuleiro[4] == letra and tabuleiro[1] == letra) or
(tabuleiro[8] == letra and tabuleiro[5] == letra and tabuleiro[2] == letra) or
(tabuleiro[9] == letra and tabuleiro[6] == letra and tabuleiro[3] == letra) or
(tabuleiro[7] == letra and tabuleiro[5] == letra and tabuleiro[3] == letra) or
(tabuleiro[9] == letra and tabuleiro[5] == letra and tabuleiro[1] == letra))
def vazio(tabuleiro, movimento):
return tabuleiro[movimento] == ' '
def desenhaTabuleiro(tabuleiro):
print(' ' + tabuleiro[7] + ' | ' + tabuleiro[8] + ' | ' + tabuleiro[9])
print(' ' + tabuleiro[4] + ' | ' + tabuleiro[5] + ' | ' + tabuleiro[6])
print(' ' + tabuleiro[1] + ' | ' + tabuleiro[2] + ' | ' + tabuleiro[3])
def jogarNovamente():
print('Você deseja jogar novamente?(sim ou não)')
return rodando = True
def movimentacao(tabuleiro, letra, movimento):
tabuleiro[movimento] = letra
def movAleatoria(tabuleiro, movimentosList):
movPossiveis = []
for i in movimentosList:
if vazio(tabuleiro, i):
movPossiveis.append(i)
if len(movPossiveis) != 0:
return random.choice(movPossiveis)
else:
return None
def completo(tabuleiro):
for i in range(1, 10):
if vazio(tabuleiro, i):
return False
return True
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
e8f2a8f39ca3eda6c2fbc091779c75346d7c82a4 | 7bd9be7f25be80791f9220b62025f06170273293 | /front-plugins/mstatuses/src/event.py | 0a8fbb59dc5cf9f9aff2be3263775c0b94367aed | [] | no_license | cerebrohq/cerebro-plugins | ab46b4844adcb12c51d14e21f2c0d8b758b0bb57 | e2e0f97b548ef22957e13d614200027ba89215e0 | refs/heads/master | 2021-11-12T16:25:48.228521 | 2021-10-22T11:25:58 | 2021-10-22T11:25:58 | 143,178,631 | 5 | 3 | null | null | null | null | UTF-8 | Python | false | false | 3,907 | py | # -*- coding: utf-8 -*-
import cerebro
import datetime
from cerebro.aclasses import Statuses
from cerebro.aclasses import Users
from cerebro.aclasses import AbstractMessage
report_status_name = 'pending review' # status after posting Report
review_status_name = 'in progress' # status after posting Review
client_review_status_name = 'could be better' # status after posting Client Review
new_task_percent = 5.0 # new progress % after first Report message
complete_status_name = 'completed' # Complete status name
"""
def change_status(task, to_status):
for status in task.possible_statuses():
if status[Statuses.DATA_NAME] == to_status:
task.set_status(status[Statuses.DATA_ID])
return True
return False
"""
def change_status(task, event, to_status):
for status in task.possible_statuses():
if status[Statuses.DATA_NAME] == to_status:
event.set_new_task_status(status[Statuses.DATA_ID])
return True
return False
def task_messages(task_id):
db = cerebro.db.Db()
messs = db.execute('select uid from "_event_list"(%s, false) order by mtm desc limit 1', task_id)
ids = set()
for mess in messs:
ids.add(mess[0])
return db.execute('select * from "eventQuery_08"(%s)', ids)
def remove_event(msg):
db = cerebro.db.Db()
db.execute('select "_event_update"(%s, 1::smallint, 0, 0)', msg[AbstractMessage.DATA_ID])
def before_event(event):
# change progress on first
if event.event_type() == event.EVENT_CREATION_OF_MESSAGE:
task = cerebro.core.task(event.task_id())
if event.type() == event.TYPE_REPORT:
"""has_report = False
for message in task_messages(event.task_id()):
if message[AbstractMessage.DATA_TYPE] == AbstractMessage.TYPE_REPORT:
has_report = True
break
task = cerebro.core.task(event.task_id())"""
if task.progress() < new_task_percent: #not has_report and
task.set_progress(new_task_percent)
if event.new_task_status()[cerebro.aclasses.Statuses.DATA_ID] == task.status()[cerebro.aclasses.Statuses.DATA_ID]:
change_status(task, event, report_status_name)
elif event.type() == event.TYPE_REVIEW:
if event.new_task_status()[cerebro.aclasses.Statuses.DATA_ID] == task.status()[cerebro.aclasses.Statuses.DATA_ID]:
change_status(task, event, review_status_name)
elif event.type() == event.TYPE_CLIENT_REVIEW:
if event.new_task_status()[cerebro.aclasses.Statuses.DATA_ID] == task.status()[cerebro.aclasses.Statuses.DATA_ID]:
change_status(task, event, client_review_status_name)
def after_event(event):
# Message posting
if event.event_type() == event.EVENT_CREATION_OF_MESSAGE:
task = cerebro.core.task(event.task_id())
status_t = task.status()
if status_t[Statuses.DATA_NAME] != complete_status_name and (event.type() == event.TYPE_REPORT or event.type() == event.TYPE_REVIEW or event.type() == event.TYPE_CLIENT_REVIEW):
msgs = task_messages(event.task_id())
if msgs and len(msgs):
msgdel = msgs[len(msgs) - 1]
if (msgdel[1] == AbstractMessage.TYPE_STATUS_CHANGES):
remove_event(msgdel)
if status_t[Statuses.DATA_NAME] == complete_status_name:
task.set_progress(100.0)
# Status change
elif event.event_type() == event.EVENT_CHANGING_OF_TASKS_STATUS:
tasks = event.tasks()
#user_tasks_lst = None
for task in tasks: # All selected tasks
status_t = task.status()
if status_t[Statuses.DATA_NAME] == complete_status_name:
task.set_progress(100.0) | [
"41910371+cerebroSupport@users.noreply.github.com"
] | 41910371+cerebroSupport@users.noreply.github.com |
0f92e21e1cee1331ff57006aa1afe38de3ff9f3d | 286769965e3f4a63753d96fb0bda6e1d85b0c569 | /harvest.py | 8ce22843f11462a883170a1151bbb1b4a0230cb2 | [] | no_license | Krysioly/Harvest-Lab | 2cf64829995c72eb798d31313b0567fc77307cee | b912a97b22c5fc2d2996cf7e75e1b3025fbcd13f | refs/heads/master | 2020-03-28T16:23:58.129249 | 2018-09-13T19:51:04 | 2018-09-13T19:51:04 | 148,690,533 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,429 | py | ############
# Part 1 #
############
class MelonType(object):
"""A species of melon at a melon farm."""
def __init__(self, code, first_harvest, color, is_seedless, is_bestseller,
name):
"""Initialize a melon."""
self.pairings = []
self.code = code
self.first_harvest = first_harvest
self.color = color
self.is_seedless = is_seedless
self.is_bestseller = is_bestseller
self.name = name
def add_pairing(self, pairing):
"""Add a food pairing to the instance's pairings list."""
self.pairings.append(pairing)
def update_code(self, new_code):
"""Replace the reporting code with the new_code."""
self.code = new_code
def make_melon_types():
"""Returns a list of current melon types."""
all_melon_types = []
musk = MelonType("musk", 1998, "green", True, True, "Muskmelon")
musk.add_pairing("mint")
all_melon_types.append(musk)
cas = MelonType("cas", 2003, "orange", True, False, "Casaba")
cas.add_pairing("mint")
cas.add_pairing("strawberries")
all_melon_types.append(cas)
cren = MelonType("cren", 1996, "green", True, False, "Crenshaw")
cren.add_pairing("proscuitto")
all_melon_types.append(cren)
yw = MelonType("yw", 2013, "yellow", True, True, "Yellow Watermelon")
yw.add_pairing("ice cream")
all_melon_types.append(yw)
return all_melon_types
def print_pairing_info(melon_types):
"""Prints information about each melon type's pairings."""
#for melon in melon_types:
for melon in melon_types:
print("{} pairs with ".format(melon.name))
for pairing in melon.pairings:
print("-- {}""".format(pairing))
def make_melon_type_lookup(melon_types):
"""Takes a list of MelonTypes and returns a dictionary of melon type by code."""
melon_dictionary = {}
for melon, value in melon_types.items():
melon_dictionary[melon] = value
return melon_dictionary
############
# Part 2 #
############
class Melon(object):
"""A melon in a melon harvest."""
# Fill in the rest
# Needs __init__ and is_sellable methods
def make_melons(melon_types):
"""Returns a list of Melon objects."""
# Fill in the rest
def get_sellability_report(melons):
"""Given a list of melon object, prints whether each one is sellable."""
# Fill in the rest
| [
"no-reply@hackbrightacademy.com"
] | no-reply@hackbrightacademy.com |
de5c9d890930f8e6909373eac0b3df8cf9831877 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/sieve-big-4288.py | af18005dbcc70d2bdff0291cb2217028820a6daa | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,755 | py | # A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [$ID] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
| [
"647530+Virtlink@users.noreply.github.com"
] | 647530+Virtlink@users.noreply.github.com |
33643977282d87b0b7f0d9e84435de680a12464a | ab621c65fc91f5194c4032d68e750efaa5f85682 | /account_budget_activity/models/account_budget_summary.py | c0293bb5ce21aaffd2c726edb10d045d2fc83b59 | [] | no_license | pabi2/pb2_addons | a1ca010002849b125dd89bd3d60a54cd9b9cdeef | e8c21082c187f4639373b29a7a0905d069d770f2 | refs/heads/master | 2021-06-04T19:38:53.048882 | 2020-11-25T03:18:24 | 2020-11-25T03:18:24 | 95,765,121 | 6 | 15 | null | 2022-10-06T04:28:27 | 2017-06-29T10:08:49 | Python | UTF-8 | Python | false | false | 2,378 | py | # -*- coding: utf-8 -*-
from openerp import tools
import openerp.addons.decimal_precision as dp
from openerp import models, fields, api
class BudgetSummary(models.Model):
_name = 'budget.summary'
_auto = False
budget_id = fields.Many2one(
'account.budget',
string="Budget",
)
budget_method = fields.Selection(
[('revenue', 'Revenue'),
('expense', 'Expense')],
string='Budget Method',
)
activity_group_id = fields.Many2one(
'account.activity.group',
string='Activity Group',
)
m1 = fields.Float(
string='Oct',
)
m2 = fields.Float(
string='Nov',
)
m3 = fields.Float(
string='Dec',
)
m4 = fields.Float(
string='Jan',
)
m5 = fields.Float(
string='Feb',
)
m6 = fields.Float(
string='Mar',
)
m7 = fields.Float(
string='Apr',
)
m8 = fields.Float(
string='May',
)
m9 = fields.Float(
string='Jun',
)
m10 = fields.Float(
string='July',
)
m11 = fields.Float(
string='Aug',
)
m12 = fields.Float(
string='Sep',
)
planned_amount = fields.Float(
string='Planned Amount',
compute='_compute_planned_amount',
digits_compute=dp.get_precision('Account'),
)
@api.multi
@api.depends('m1', 'm2', 'm3', 'm4', 'm5', 'm6',
'm7', 'm8', 'm9', 'm10', 'm11', 'm12',)
def _compute_planned_amount(self):
for rec in self:
planned_amount = sum([rec.m1, rec.m2, rec.m3, rec.m4,
rec.m5, rec.m6, rec.m7, rec.m8,
rec.m9, rec.m10, rec.m11, rec.m12
])
rec.planned_amount = planned_amount # from last year
def init(self, cr):
tools.drop_view_if_exists(cr, self._table)
cr.execute("""CREATE or REPLACE VIEW %s as (
select min(l.id) id, budget_id, activity_group_id, l.budget_method,
sum(m1) m1, sum(m2) m2, sum(m3) m3, sum(m4) m4,
sum(m5) m5, sum(m6) m6, sum(m7) m7, sum(m8) m8, sum(m9) m9,
sum(m10) m10, sum(m11) m11, sum(m12) m12
from account_budget_line l
group by budget_id, activity_group_id, l.budget_method
)""" % (self._table, ))
| [
"kittiu@gmail.com"
] | kittiu@gmail.com |
ea9f1d44be60e436552ebd6088b3a92204b02381 | f8d5c4eb0244c4a227a615bc11c4c797760c3bec | /log/REINFORCEMENT_2017-11-23 01:19:24.645036/REINFORCE_cart_pole.py | c9cc30fc771a980cf4d433f5a4d099bed1fa9fbf | [] | no_license | SamPlvs/reinforcement_learning_pytorch | e9b84659f870d938814177f1288fa4a2eb152599 | ffb9e53eeff011c4d3d5933a60c2b65fdbb18e2a | refs/heads/master | 2020-03-23T04:08:51.778325 | 2018-01-16T22:36:48 | 2018-01-16T22:36:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,768 | py | import torch
import gym
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
# import datetime
from REINFORCEAgent import REINFORCEAgent
# import random
# from torch.autograd import Variable
import signal
import utils.util as util
import os, sys, datetime, shutil
import utils.logger as logger
from datetime import datetime
is_ipython = 'inline' in matplotlib.get_backend()
if is_ipython:
from IPython import display
plt.ion()
# use_cuda = torch.cuda.is_available()
# FloatTensor = torch.cuda.FloatTensor if use_cuda else torch.FloatTensor
# DoubleTensor = torch.cuda.DoubleTensor if use_cuda else torch.DoubleTensor
# LongTensor = torch.cuda.LongTensor if use_cuda else torch.LongTensor
# ByteTensor = torch.cuda.ByteTensor if use_cuda else torch.ByteTensor
# Tensor = FloatTensor
class GracefulKiller:
def __init__(self):
self.kill_now = False
signal.signal(signal.SIGINT, self.exit_gracefully)
signal.signal(signal.SIGTERM, self.exit_gracefully)
def exit_gracefully(self,signum, frame):
self.kill_now=True
# gym parameters
def init_gym(env_name):
env = gym.make(env_name).unwrapped
state_dim = env.observation_space.shape[0]
disc_flag = len(env.action_space.shape)==0
if disc_flag: # discrete action
action_dim = env.action_space.n
else:
action_dim = env.action_space.shape[0]
# ActionTensor = LongTensor if disc_flag else FloatTensor
return env, state_dim, action_dim
BATCH_SIZE = 128
GAMMA = 0.999
steps_done = 0
animate = True
def run_episode(env, qf): # on line algorithm
done = False
global steps_done
obs = env.reset()
obs[0], obs[3] = 5,5
# steps_done += 1
# action_new = qf.select_action(obs,steps_done)
reward = 0
add_reward = 0
pending = []
while not done:
# if animate:
# env.render()
# action = action_new
# obs_new, rewards, done, _ = env.step(action[0,0])
# reward += rewards
# steps_done+=1
# action_new = qf.select_action(obs_new,steps_done)
# pending.append([obs,action[0,0],rewards, obs_new,action_new[0,0],done])
# if len(pending)>=6 or done:
# qf.update(pending)
# pending = []
# # qf.update(obs,action[0,0],rewards, obs_new,action_new[0,0],done)
# obs = obs_new
action = qf.select_action(obs)
obs_new, reward, done, _ = env.step(action)
add_reward+=reward
qf.trajectory.append({'reward':reward, 'state':obs, 'action':action})
qf.update()
return add_reward
def run_policy(env, qf, episodes):
total_steps = 0
reward = []
for e in range(episodes):
reward.append(run_episode(env,qf))
qf.update() # update the policy net
qf.clear_trajectory() # clear the old trajectory
return np.mean(reward)
# print(np.mean(reward))
# return reward
def main():
torch.cuda.set_device(0)
print(torch.cuda.current_device())
seed_num = 1
torch.cuda.manual_seed(seed_num)
# data_dir = '/home/bike/data/mnist/'
out_dir = '/home/becky/Git/reinforcement_learning_pytorch/log/REINFORCEMENT_{}/'.format(datetime.now())
if not os.path.exists(out_dir):
os.makedirs(out_dir)
shutil.copyfile(sys.argv[0], out_dir + '/REINFORCE_cart_pole.py')
sys.stdout = logger.Logger(out_dir)
env_name = 'CartPole-v0'
killer = GracefulKiller()
env, obs_dim, act_dim = init_gym(env_name)
num_episodes = 300
rewards = np.zeros(num_episodes)
QValue = REINFORCEAgent(obs_dim, act_dim, learning_rate=0.0001,reward_decay = 0.99, e_greedy=0.9)
for i_episode in range(num_episodes):
rewards[i_episode] = run_policy(env,QValue,episodes=100)
print("In episode {}, the reward is {}".format(str(i_episode),str(rewards[i_episode])))
if killer.kill_now:
now = "REINFORCE_v1"
QValue.save_model(str(now))
break
print('game over!')
util.before_exit(model=QValue.model, reward=rewards)
env.close()
env.render(close=True)
if __name__ == "__main__":
main()
| [
"kebai0624@gmail.com"
] | kebai0624@gmail.com |
0291a73ff7e829012f8dfc0c270e4538dbf177bd | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p4VQE/R3/benchmark/startQiskit_QC683.py | 68124f75e75ac6a9cc3004e511f0837184c87a77 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,662 | py | # qubit number=3
# total number=15
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
import networkx as nx
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def make_circuit(n:int) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
prog = QuantumCircuit(input_qubit)
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[0]) # number=12
prog.cz(input_qubit[3],input_qubit[0]) # number=13
prog.h(input_qubit[0]) # number=14
prog.z(input_qubit[3]) # number=10
prog.cx(input_qubit[3],input_qubit[0]) # number=11
prog.z(input_qubit[1]) # number=8
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
for edge in E:
k = edge[0]
l = edge[1]
prog.cp(-2 * gamma, input_qubit[k-1], input_qubit[l-1])
prog.p(gamma, k)
prog.p(gamma, l)
prog.rx(2 * beta, range(len(V)))
prog.swap(input_qubit[3],input_qubit[0]) # number=5
prog.swap(input_qubit[3],input_qubit[0]) # number=6
# circuit end
return prog
if __name__ == '__main__':
n = 4
V = np.arange(0, n, 1)
E = [(0, 1, 1.0), (0, 2, 1.0), (1, 2, 1.0), (3, 2, 1.0), (3, 1, 1.0)]
G = nx.Graph()
G.add_nodes_from(V)
G.add_weighted_edges_from(E)
step_size = 0.1
a_gamma = np.arange(0, np.pi, step_size)
a_beta = np.arange(0, np.pi, step_size)
a_gamma, a_beta = np.meshgrid(a_gamma, a_beta)
F1 = 3 - (np.sin(2 * a_beta) ** 2 * np.sin(2 * a_gamma) ** 2 - 0.5 * np.sin(4 * a_beta) * np.sin(4 * a_gamma)) * (
1 + np.cos(4 * a_gamma) ** 2)
result = np.where(F1 == np.amax(F1))
a = list(zip(result[0], result[1]))[0]
gamma = a[0] * step_size
beta = a[1] * step_size
prog = make_circuit(4)
sample_shot =5600
writefile = open("../data/startQiskit_QC683.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
IBMQ.load_account()
provider = IBMQ.get_provider(hub='ibm-q')
provider.backends()
backend = provider.get_backend("ibmq_5_yorktown")
circuit1 = transpile(prog, FakeYorktown())
circuit1.measure_all()
prog = circuit1
info = execute(prog,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| [
"wangjiyuan123@yeah.net"
] | wangjiyuan123@yeah.net |
f7cda2f1394a3ee632aec0355cfbb9862944735b | ec78979fd8479e884ab93d723360744db5152134 | /file-move-task1.py | 82398d6af3c6f1287df01f3e489cb86e09123314 | [] | no_license | xushubo/learn-python | 49c5f4fab1ac0e06c91eaa6bd54159fd661de0b9 | 8cb6f0cc23d37011442a56f1c5a11f99b1179ce6 | refs/heads/master | 2021-01-19T17:00:05.247958 | 2017-09-03T03:22:28 | 2017-09-03T03:22:28 | 101,032,298 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 617 | py | import os
from datetime import *
print('当前时间:', datetime.today())
dir_file = input('请输入所查路径:')
print('文件名 ','大小 ','修改时间 ', '创建时间')
for x in os.listdir(dir_file):
full_dir_file = os.path.join(dir_file, x)
mod_time = os.path.getmtime(full_dir_file) #获取文件修改时间
cha_time = os.path.getctime(full_dir_file) #获取文件创建时间
mod_date = datetime.fromtimestamp(mod_time)
cha_date = datetime.fromtimestamp(cha_time)
print(x, os.path.getsize(full_dir_file), mod_date.strftime('%Y-%m-%d %H:%M:%S'), cha_date.strftime('%Y-%m-%d %H:%M:%S')) | [
"tmac523@163.com"
] | tmac523@163.com |
6a824cb6a31c39b015630030f2bb6980b9dfa5d4 | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /DaVinciDev_v38r1p1/InstallArea/x86_64-slc6-gcc49-opt/python/StrippingArchive/Stripping18/StrippingB2MuMuX.py | ec0204f230df93c41cd9608cffccfd8e368d4099 | [] | no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,715 | py | #\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\#
'''
B->mumugamma,mumumumu,v0mu
Author: M. Williams
'''
__author__ = ['Mike Williams','Kostas Petridis']
moduleName = 'B2MuMuX'
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\#
from Gaudi.Configuration import *
from GaudiConfUtils.ConfigurableGenerators import FilterDesktop
from PhysSelPython.Wrappers import Selection, DataOnDemand, MergedSelection
from StrippingConf.StrippingLine import StrippingLine
from StrippingUtils.Utils import LineBuilder
from StandardParticles import StdAllNoPIDsPions, StdAllNoPIDsKaons
from StandardParticles import StdAllVeryLooseMuons, StdAllLooseMuons,\
StdAllNoPIDsMuons
from StandardParticles import StdLooseAllPhotons
from Beauty2Charm_LoKiCuts import LoKiCuts
from GaudiConfUtils.ConfigurableGenerators import CombineParticles
from Configurables import OfflineVertexFitter
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\#
# Default configuration dictionary
config = {
'MuonsLoose': {
'ISMUON' : True,
'TRCHI2DOF_MAX' : 5,
'MIPCHI2DV_MIN' : 4,
'InAccMuon' : True
},
'MuonsNoPid': {
'ISMUON' : False,
'TRCHI2DOF_MAX' : 5,
'MIPCHI2DV_MIN' : 4,
'InAccMuon' : True
},
'DiMuons': {
'AMAXDOCA_MAX' : '0.5*mm',
'ASUMPT_MIN' : '1000*MeV',
'VCHI2DOF_MAX' : 16,
'BPVVDCHI2_MIN' : 16,
'BPVDIRA_MIN' : 0.0
},
'DiMuonsLowM': {
'AMAXDOCA_MAX' : '0.5*mm',
'ASUMPT_MIN' : '1000*MeV',
'VCHI2DOF_MAX' : 16,
'BPVVDCHI2_MIN' : 16,
'BPVDIRA_MIN' : 0.0
},
'DiMuonsHighM': {
'AMAXDOCA_MAX' : '0.5*mm',
'ASUMPT_MIN' : '1000*MeV',
'VCHI2DOF_MAX' : 16,
'BPVVDCHI2_MIN' : 0,
'BPVDIRA_MIN' : 0
},
'DiMuonsCorrM': {
'AMAXDOCA_MAX' : '0.3*mm',
'ASUMPT_MIN' : '2000*MeV',
'VCHI2DOF_MAX' : 8,
'BPVVDCHI2_MIN' : 225,
'BPVIPCHI2_MAX' : 17,
'BPVDIRA_MIN' : 0.4,
'CORRM_MIN' : '4800*MeV',
'CORRM_MAX' : '6500*MeV',
'PTMU' : '900*MeV'
},
'Photons': {
'PT_MIN' : '1600*MeV',
'CL_MIN' : '0.25'
},
"V0s": { # Cuts for rho, K*, phi
'MASS_MIN' : {'KST':'700*MeV','RHO':'600*MeV','PHI':'900*MeV'},
'MASS_MAX' : {'KST':'1100*MeV','RHO':'1000*MeV','PHI':'1100*MeV'},
'DAUGHTERS' : {'PT_MIN':'150*MeV','P_MIN':'2000*MeV',
'MIPCHI2DV_MIN' : 4, 'TRCHI2DOF_MAX' : 4},
'AMAXDOCA_MAX' : '0.5*mm',
'VCHI2DOF_MAX' : 16,
'BPVIPCHI2_MAX' : 16,
'BPVVDCHI2_MIN' : 25,
'BPVDIRA_MIN' : 0,
'ASUMPT_MIN' : '1000*MeV'
},
"B2X3BODY" : {
'SUMPT_MIN' : '5000*MeV',
'VCHI2DOF_MAX' : 10,
'BPVIPCHI2_MAX' : 20,
'BPVVDCHI2_MIN' : 25,
'BPVDIRA_MIN' : 0.9, #0.0
'MASS_MIN' : {'B':'4300*MeV'},
'MASS_MAX' : {'B':'6400*MeV'},
},
"B2X3BODYLOWM" : {
'SUMPT_MIN' : '3900*MeV',
'VCHI2DOF_MAX' : 10,
'BPVIPCHI2_MAX' : 25,
'BPVVDCHI2_MIN' : 25,
'BPVDIRA_MIN' : 0.9, #0.0
'MASS_MIN' : {'J':'2600*MeV'},
'MASS_MAX' : {'J':'3600*MeV'},
},
"B2X3BODYHIGHM" : {
'SUMPT_MIN' : '5000*MeV',
'VCHI2DOF_MAX' : 10,
'BPVDIRA_MIN' : 0.9,
'MASS_MIN' : {'Y':'9000*MeV'},
'MASS_MAX' : {'Y':'10000*MeV'},
},
"B2X4BODY" : {
'SUMPT_MIN' : '5000*MeV',
'VCHI2DOF_MAX' : 6,
'BPVIPCHI2_MAX' : 16,
'BPVVDCHI2_MIN' : 50,
'BPVDIRA_MIN' : 0.0,
'MASS_MIN' : {'B':'4600*MeV'},
'MASS_MAX' : {'B':'6000*MeV'}
},
"Prescales" : {
'OS' : 1.0,
'SS' : 0.5
},
'GECNTrkMax' : 500
}
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\#
def filterMuons(muons,config):
name='B2MuMuXNoPidMuonFilter'
code = ''
if config['ISMUON']:
code += '(ISMUON) & '
name=name.replace("NoPid","Loose")
if config['InAccMuon']:
code += '(PPINFO(LHCb.ProtoParticle.InAccMuon,-1) == 1) &'
code += LoKiCuts(['TRCHI2DOF','MIPCHI2DV'],config).code()
return Selection(name,Algorithm=FilterDesktop(Code=code),
RequiredSelections=[muons])
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\#
def diMuonBuilder(muons,config,decay,tag,oneLooseMuInDimu):
comboCuts = LoKiCuts(['ASUMPT','AMAXDOCA'],config).code()
if oneLooseMuInDimu:
#comboCuts+='& ( ACHILDCUT(ISMUON, 1) | ACHILDCUT(ISMUON, 2) )'
comboCuts+='& ( (ANUM(ISMUON))>0 )'
momCuts = ''
if config['BPVVDCHI2_MIN']:
momCuts = LoKiCuts(['VCHI2DOF','BPVVDCHI2','BPVDIRA'],config).code()
else :
momCuts = LoKiCuts(['VCHI2DOF','BPVDIRA'],config).code()
cp = CombineParticles(CombinationCut=comboCuts,MotherCut=momCuts,
DecayDescriptors=[decay])
return Selection('B2MuMuXDiMuonBuilder'+tag,Algorithm=cp,
RequiredSelections=[muons])
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\#
def filterPhotons(photons,config):
code = LoKiCuts(['PT'],config).code()
code += ' & (CL>%s)' % config['CL_MIN']
return Selection('B2MuMuXPhotonFilter',
Algorithm=FilterDesktop(Code=code),
RequiredSelections=[photons])
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\#
def makeV0s(name,particles,decay,config):
''' Makes all V0 -> X+X- selections.'''
dauPiCuts = {'pi+' : LoKiCuts(['P','PT','TRCHI2DOF','MIPCHI2DV'],config['DAUGHTERS']).code()}
dauKCuts = {'K+' : LoKiCuts(['P','PT','TRCHI2DOF','MIPCHI2DV'],config['DAUGHTERS']).code()}
if 'Rho' in name:
wm = ['in_range(%s,AM,%s)' % (config['MASS_MIN']['RHO'],config['MASS_MAX']['RHO'])]
elif 'Kst' in name:
wm = ['in_range(%s,AM,%s)' % (config['MASS_MIN']['KST'],config['MASS_MAX']['KST'])]
elif 'Phi' in name:
wm = ['in_range(%s,AM,%s)' % (config['MASS_MIN']['PHI'],config['MASS_MAX']['PHI'])]
wm = '('+('|'.join(wm))+')'
comboCuts = [LoKiCuts(['ASUMPT'],config).code(),wm]
comboCuts.append(LoKiCuts(['AMAXDOCA'],config).code())
comboCuts = LoKiCuts.combine(comboCuts)
momCuts = LoKiCuts(['VCHI2DOF','BPVVDCHI2','BPVDIRA'],config).code()
cp = CombineParticles(CombinationCut=comboCuts,MotherCut=momCuts,
DecayDescriptors=[decay])
return Selection(name+'B2MuMuX',Algorithm=cp,
RequiredSelections=particles)
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\#
def makeB2X(name,decay,inputs,config,vertex=True):
wm = ['in_range(%s,AM,%s)' % (config['MASS_MIN']['B'],
config['MASS_MAX']['B'])]
wm = '('+('|'.join(wm))+')'
comboCuts = [LoKiCuts(['SUMPT'],config).code(),wm]
comboCuts = LoKiCuts.combine(comboCuts)
momCuts = LoKiCuts(['VCHI2DOF','BPVVDCHI2','BPVIPCHI2','BPVDIRA'],
config).code()
b2x = CombineParticles(DecayDescriptors=decay,CombinationCut=comboCuts,
MotherCut=momCuts)
if not vertex:
b2x = b2x.configurable(name+'B2MuMuXVertexFitter')
b2x.addTool(OfflineVertexFitter)
b2x.VertexFitters.update( { "" : "OfflineVertexFitter"} )
b2x.OfflineVertexFitter.useResonanceVertex = False
return Selection(name,Algorithm=b2x,RequiredSelections=inputs)
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\#
def makeY2X(name,decay,inputs,config,vertex=True):
wm = ['in_range(%s,AM,%s)' % (config['MASS_MIN']['Y'],
config['MASS_MAX']['Y'])]
wm = '('+('|'.join(wm))+')'
comboCuts = [LoKiCuts(['SUMPT'],config).code(),wm]
comboCuts = LoKiCuts.combine(comboCuts)
momCuts = LoKiCuts(['VCHI2DOF','BPVDIRA'],config).code()
y2x = CombineParticles(DecayDescriptors=decay,CombinationCut=comboCuts,
MotherCut=momCuts)
if not vertex:
y2x = y2x.configurable(name+'B2MuMuXVertexFitter')
y2x.addTool(OfflineVertexFitter)
y2x.VertexFitters.update( { "" : "OfflineVertexFitter"} )
y2x.OfflineVertexFitter.useResonanceVertex = False
return Selection(name,Algorithm=y2x,RequiredSelections=inputs)
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\#
def makeJ2X(name,decay,inputs,config,vertex=True):
wm = ['in_range(%s,AM,%s)' % (config['MASS_MIN']['J'],
config['MASS_MAX']['J'])]
wm = '('+('|'.join(wm))+')'
comboCuts = [LoKiCuts(['SUMPT'],config).code(),wm]
comboCuts = LoKiCuts.combine(comboCuts)
momCuts = LoKiCuts(['VCHI2DOF','BPVVDCHI2','BPVIPCHI2','BPVDIRA'],
config).code()
j2x = CombineParticles(DecayDescriptors=decay,CombinationCut=comboCuts,
MotherCut=momCuts)
if not vertex:
j2x = j2x.configurable(name+'B2MuMuXVertexFitter')
j2x.addTool(OfflineVertexFitter)
j2x.VertexFitters.update( { "" : "OfflineVertexFitter"} )
j2x.OfflineVertexFitter.useResonanceVertex = False
return Selection(name,Algorithm=j2x,RequiredSelections=inputs)
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\#
def makeB2MuMuGammaCORRM(name,input,config):
code = '(BPVCORRM > %s) & (BPVCORRM < %s)' % (config['CORRM_MIN'],config['CORRM_MAX'])
code += '& (NINGENERATION(PT<%s,1)==0)' % (config['PTMU'])
return Selection(name+'CORRMFilter',Algorithm=FilterDesktop(Code=code),
RequiredSelections=[input])
#\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\#
class B2MuMuXConf(LineBuilder):
__configuration_keys__ = ('MuonsLoose','MuonsNoPid','DiMuons','DiMuonsLowM','DiMuonsHighM',
'DiMuonsCorrM','Photons','B2X3BODY','B2X3BODYLOWM','B2X3BODYHIGHM',
'B2X4BODY','Prescales','V0s','GECNTrkMax')
def __init__(self, moduleName, config) :
LineBuilder.__init__(self, moduleName, config)
muons_loose = filterMuons(StdAllLooseMuons,config['MuonsLoose'])
muons_noPid = filterMuons(StdAllVeryLooseMuons,config['MuonsNoPid'])
# os fly and no fly dimuons
oneLooseMuInDiMu=False
dimuons_nofly_loose = diMuonBuilder(muons_loose,config['DiMuons'],
'J/psi(1S) -> mu+ mu-','NoFly_loose',
oneLooseMuInDiMu)
dimuons_nofly_loose_lowm = diMuonBuilder(muons_loose,config['DiMuonsLowM'],
'J/psi(1S) -> mu+ mu-','NoFly_loose_LowM',
oneLooseMuInDiMu)
dimuons_nofly_loose_highm = diMuonBuilder(muons_loose,config['DiMuonsHighM'],
'J/psi(1S) -> mu+ mu-','NoFly_loose_HighM',
oneLooseMuInDiMu)
dimuons_nofly_loose_corrm = diMuonBuilder(muons_loose,config['DiMuonsCorrM'],
'J/psi(1S) -> mu+ mu-','NoFly_loose_CorrM',
oneLooseMuInDiMu)
dimuons_fly_loose = diMuonBuilder(muons_loose,config['DiMuons'],
'KS0 -> mu+ mu-','Fly_loose',
oneLooseMuInDiMu)
oneLooseMuInDiMu=True
dimuons_fly_onePid = diMuonBuilder(muons_noPid,config['DiMuons'],
'KS0 -> mu+ mu-','Fly_onePid',
oneLooseMuInDiMu)
# ss fly and no fly dimuons
oneLooseMuInDiMu=False
dimuons_nofly_loose_ss = diMuonBuilder(muons_loose,config['DiMuons'],
'[J/psi(1S) -> mu+ mu+]cc',
'NoFly_loose_ss',
oneLooseMuInDiMu)
dimuons_fly_loose_ss = diMuonBuilder(muons_loose,config['DiMuons'],
'[KS0 -> mu+ mu+]cc',
'Fly_loose_ss',
oneLooseMuInDiMu)
# photons
photons = filterPhotons(StdLooseAllPhotons,config['Photons'])
# v0s for rho0,Kst,phi
rhos = makeV0s('Rho2PiPi',[StdAllNoPIDsPions],'rho(770)0 -> pi+ pi-',config['V0s'])
ksts = makeV0s('Kst2KPi' ,[StdAllNoPIDsPions,StdAllNoPIDsKaons],'[K*(892)0 -> K+ pi-]cc',config['V0s'])
phis = makeV0s('Phi2KK' ,[StdAllNoPIDsKaons],'phi(1020) -> K+ K-',config['V0s'])
# form B hadron and stripping lines
# B to Mu Mu Gamma
b2mumug = makeB2X('B2MuMuGamma',['B0 -> J/psi(1S) gamma'],
[dimuons_nofly_loose,photons],
config['B2X3BODY'],False)
self._makeLine(b2mumug,config)
# Jpsi (from B) to Mu Mu Gamma
j2mumug = makeJ2X('B2MuMuGammaLowMass',['B0 -> J/psi(1S) gamma'],
[dimuons_nofly_loose_lowm,photons],
config['B2X3BODYLOWM'],False)
self._makeLine(j2mumug,config)
# Y to Mu Mu Gamma
y2mumug = makeY2X('B2MuMuGammaHighMass',['B0 -> J/psi(1S) gamma'],
[dimuons_nofly_loose_highm,photons],
config['B2X3BODYHIGHM'],False)
self._makeLine(y2mumug,config)
# B to Mu Mu Mu Mu
b24mu = makeB2X('B2MuMuMuMu',['B0 -> KS0 KS0'],[dimuons_fly_onePid],
config['B2X4BODY'])
self._makeLine(b24mu,config)
# B to V0 MuMu
#rho
b2rhomumu = makeB2X('B2RhoMuMu_B2MuMuX',['B0 -> KS0 rho(770)0' ],
[dimuons_fly_loose,rhos],
config['B2X4BODY'])
self._makeLine(b2rhomumu,config)
#kst
b2kstmumu = makeB2X('B2KstMuMu_B2MuMuX',['[B0 -> KS0 K*(892)0]cc' ],
[dimuons_fly_loose,ksts],
config['B2X4BODY'])
self._makeLine(b2kstmumu,config)
#phi
b2phimumu = makeB2X('B2PhiMuMu_B2MuMuX',['B0 -> KS0 phi(1020)' ],
[dimuons_fly_loose,phis],
config['B2X4BODY'])
self._makeLine(b2phimumu,config)
# SS stripping lines
b2mumug_ss = makeB2X('B2MuMuGamma_ss',['B0 -> J/psi(1S) gamma'],
[dimuons_nofly_loose_ss,photons],
config['B2X3BODY'],False)
self._makeLine(b2mumug_ss,config)
#rho ss
b2rhomumu_ss = makeB2X('B2RhoMuMu_B2MuMuX_ss',['B0 -> KS0 rho(770)0' ],
[dimuons_fly_loose_ss,rhos],
config['B2X4BODY'])
self._makeLine(b2rhomumu_ss,config)
# kst ss
b2kstmumu_ss = makeB2X('B2KstMuMu_B2MuMuX_ss',['[B0 -> KS0 K*(892)0]cc' ],
[dimuons_fly_loose_ss,ksts],
config['B2X4BODY'])
self._makeLine(b2kstmumu_ss,config)
# phi ss
b2phimumu_ss = makeB2X('B2PhiMuMu_B2MuMuX_ss',['B0 -> KS0 phi(1020)' ],
[dimuons_fly_loose_ss,phis],
config['B2X4BODY'])
self._makeLine(b2phimumu_ss,config)
# Corrected mass stripping lines
# B to Mu Mu Gamma
b2mumug_mcor = makeB2MuMuGammaCORRM('B2MuMuGamma',dimuons_nofly_loose_corrm,
config['DiMuonsCorrM'])
# Mar 2012: Removed for stripping 18
# self._makeLine(b2mumug_mcor,config)
def _makeLine(self,sel,config):
tmpSel = Selection(sel.name()+'FilterALL',
Algorithm=FilterDesktop(Code='ALL'),
RequiredSelections=[sel])
name = sel.name()+'Line'
filter = {'Code' :
"(recSummaryTrack(LHCb.RecSummary.nLongTracks, TrLONG)"\
" < %s )" \
% config['GECNTrkMax'],
'Preambulo' : [ "from LoKiTracks.decorators import *",
'from LoKiCore.functions import *' ]
}
hlt = "HLT_PASS_RE('Hlt2.*Topo.*Decision')"\
"| HLT_PASS_RE('Hlt2IncPhi.*Decision')"\
"| HLT_PASS_RE('Hlt2.*Muon.*Decision')"\
"| HLT_PASS_RE('Hlt2.*Gamma.*Decision')"\
"| HLT_PASS_RE('Hlt2.*Photon.*Decision')"
if ("B2RhoMuMu" in name) | ("B2KstMuMu" in name) | ("B2PhiMuMu" in name) :
hlt = "HLT_PASS_RE('Hlt2.*Topo.*Decision')"\
"| HLT_PASS_RE('Hlt2IncPhi.*Decision')"
presc = config['Prescales']['OS']
if "_ss" in name:
presc = config['Prescales']['SS']
sline = StrippingLine(name,prescale=presc,selection=tmpSel,checkPV=True,
FILTER=filter,HLT=hlt)
self.registerLine(sline)
| [
"slavomirastefkova@b2pcx39016.desy.de"
] | slavomirastefkova@b2pcx39016.desy.de |
1c61fe33c2cca29b658edc0a181eaf396fa6a621 | d1ddb9e9e75d42986eba239550364cff3d8f5203 | /google-cloud-sdk/lib/third_party/apitools/base/py/base_api.py | c49ff783ef115c77e8093cfe05692efd1643542f | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | bopopescu/searchparty | 8ecd702af0d610a7ad3a8df9c4d448f76f46c450 | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | refs/heads/master | 2022-11-19T14:44:55.421926 | 2017-07-28T14:55:43 | 2017-07-28T14:55:43 | 282,495,798 | 0 | 0 | Apache-2.0 | 2020-07-25T17:48:53 | 2020-07-25T17:48:52 | null | UTF-8 | Python | false | false | 28,771 | py | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base class for api services."""
import base64
import contextlib
import datetime
import logging
import pprint
import six
from six.moves import http_client
from six.moves import urllib
from apitools.base.protorpclite import message_types
from apitools.base.protorpclite import messages
from apitools.base.py import encoding
from apitools.base.py import exceptions
from apitools.base.py import http_wrapper
from apitools.base.py import util
__all__ = [
'ApiMethodInfo',
'ApiUploadInfo',
'BaseApiClient',
'BaseApiService',
'NormalizeApiEndpoint',
]
# TODO(user): Remove this once we quiet the spurious logging in
# oauth2client (or drop oauth2client).
logging.getLogger('oauth2client.util').setLevel(logging.ERROR)
_MAX_URL_LENGTH = 2048
class ApiUploadInfo(messages.Message):
"""Media upload information for a method.
Fields:
accept: (repeated) MIME Media Ranges for acceptable media uploads
to this method.
max_size: (integer) Maximum size of a media upload, such as 3MB
or 1TB (converted to an integer).
resumable_path: Path to use for resumable uploads.
resumable_multipart: (boolean) Whether or not the resumable endpoint
supports multipart uploads.
simple_path: Path to use for simple uploads.
simple_multipart: (boolean) Whether or not the simple endpoint
supports multipart uploads.
"""
accept = messages.StringField(1, repeated=True)
max_size = messages.IntegerField(2)
resumable_path = messages.StringField(3)
resumable_multipart = messages.BooleanField(4)
simple_path = messages.StringField(5)
simple_multipart = messages.BooleanField(6)
class ApiMethodInfo(messages.Message):
"""Configuration info for an API method.
All fields are strings unless noted otherwise.
Fields:
relative_path: Relative path for this method.
flat_path: Expanded version (if any) of relative_path.
method_id: ID for this method.
http_method: HTTP verb to use for this method.
path_params: (repeated) path parameters for this method.
query_params: (repeated) query parameters for this method.
ordered_params: (repeated) ordered list of parameters for
this method.
description: description of this method.
request_type_name: name of the request type.
response_type_name: name of the response type.
request_field: if not null, the field to pass as the body
of this POST request. may also be the REQUEST_IS_BODY
value below to indicate the whole message is the body.
upload_config: (ApiUploadInfo) Information about the upload
configuration supported by this method.
supports_download: (boolean) If True, this method supports
downloading the request via the `alt=media` query
parameter.
"""
relative_path = messages.StringField(1)
flat_path = messages.StringField(2)
method_id = messages.StringField(3)
http_method = messages.StringField(4)
path_params = messages.StringField(5, repeated=True)
query_params = messages.StringField(6, repeated=True)
ordered_params = messages.StringField(7, repeated=True)
description = messages.StringField(8)
request_type_name = messages.StringField(9)
response_type_name = messages.StringField(10)
request_field = messages.StringField(11, default='')
upload_config = messages.MessageField(ApiUploadInfo, 12)
supports_download = messages.BooleanField(13, default=False)
REQUEST_IS_BODY = '<request>'
def _LoadClass(name, messages_module):
if name.startswith('message_types.'):
_, _, classname = name.partition('.')
return getattr(message_types, classname)
elif '.' not in name:
return getattr(messages_module, name)
else:
raise exceptions.GeneratedClientError('Unknown class %s' % name)
def _RequireClassAttrs(obj, attrs):
for attr in attrs:
attr_name = attr.upper()
if not hasattr(obj, '%s' % attr_name) or not getattr(obj, attr_name):
msg = 'No %s specified for object of class %s.' % (
attr_name, type(obj).__name__)
raise exceptions.GeneratedClientError(msg)
def NormalizeApiEndpoint(api_endpoint):
if not api_endpoint.endswith('/'):
api_endpoint += '/'
return api_endpoint
def _urljoin(base, url): # pylint: disable=invalid-name
"""Custom urljoin replacement supporting : before / in url."""
# In general, it's unsafe to simply join base and url. However, for
# the case of discovery documents, we know:
# * base will never contain params, query, or fragment
# * url will never contain a scheme or net_loc.
# In general, this means we can safely join on /; we just need to
# ensure we end up with precisely one / joining base and url. The
# exception here is the case of media uploads, where url will be an
# absolute url.
if url.startswith('http://') or url.startswith('https://'):
return urllib.parse.urljoin(base, url)
new_base = base if base.endswith('/') else base + '/'
new_url = url[1:] if url.startswith('/') else url
return new_base + new_url
class _UrlBuilder(object):
"""Convenient container for url data."""
def __init__(self, base_url, relative_path=None, query_params=None):
components = urllib.parse.urlsplit(_urljoin(
base_url, relative_path or ''))
if components.fragment:
raise exceptions.ConfigurationValueError(
'Unexpected url fragment: %s' % components.fragment)
self.query_params = urllib.parse.parse_qs(components.query or '')
if query_params is not None:
self.query_params.update(query_params)
self.__scheme = components.scheme
self.__netloc = components.netloc
self.relative_path = components.path or ''
@classmethod
def FromUrl(cls, url):
urlparts = urllib.parse.urlsplit(url)
query_params = urllib.parse.parse_qs(urlparts.query)
base_url = urllib.parse.urlunsplit((
urlparts.scheme, urlparts.netloc, '', None, None))
relative_path = urlparts.path or ''
return cls(
base_url, relative_path=relative_path, query_params=query_params)
@property
def base_url(self):
return urllib.parse.urlunsplit(
(self.__scheme, self.__netloc, '', '', ''))
@base_url.setter
def base_url(self, value):
components = urllib.parse.urlsplit(value)
if components.path or components.query or components.fragment:
raise exceptions.ConfigurationValueError(
'Invalid base url: %s' % value)
self.__scheme = components.scheme
self.__netloc = components.netloc
@property
def query(self):
# TODO(user): In the case that some of the query params are
# non-ASCII, we may silently fail to encode correctly. We should
# figure out who is responsible for owning the object -> str
# conversion.
return urllib.parse.urlencode(self.query_params, True)
@property
def url(self):
if '{' in self.relative_path or '}' in self.relative_path:
raise exceptions.ConfigurationValueError(
'Cannot create url with relative path %s' % self.relative_path)
return urllib.parse.urlunsplit((
self.__scheme, self.__netloc, self.relative_path, self.query, ''))
def _SkipGetCredentials():
"""Hook for skipping credentials. For internal use."""
return False
class BaseApiClient(object):
"""Base class for client libraries."""
MESSAGES_MODULE = None
_API_KEY = ''
_CLIENT_ID = ''
_CLIENT_SECRET = ''
_PACKAGE = ''
_SCOPES = []
_USER_AGENT = ''
def __init__(self, url, credentials=None, get_credentials=True, http=None,
model=None, log_request=False, log_response=False,
num_retries=5, max_retry_wait=60, credentials_args=None,
default_global_params=None, additional_http_headers=None,
check_response_func=None, retry_func=None):
_RequireClassAttrs(self, ('_package', '_scopes', 'messages_module'))
if default_global_params is not None:
util.Typecheck(default_global_params, self.params_type)
self.__default_global_params = default_global_params
self.log_request = log_request
self.log_response = log_response
self.__num_retries = 5
self.__max_retry_wait = 60
# We let the @property machinery below do our validation.
self.num_retries = num_retries
self.max_retry_wait = max_retry_wait
self._credentials = credentials
get_credentials = get_credentials and not _SkipGetCredentials()
if get_credentials and not credentials:
credentials_args = credentials_args or {}
self._SetCredentials(**credentials_args)
self._url = NormalizeApiEndpoint(url)
self._http = http or http_wrapper.GetHttp()
# Note that "no credentials" is totally possible.
if self._credentials is not None:
self._http = self._credentials.authorize(self._http)
# TODO(user): Remove this field when we switch to proto2.
self.__include_fields = None
self.additional_http_headers = additional_http_headers or {}
self.check_response_func = check_response_func
self.retry_func = retry_func
# TODO(user): Finish deprecating these fields.
_ = model
self.__response_type_model = 'proto'
def _SetCredentials(self, **kwds):
"""Fetch credentials, and set them for this client.
Note that we can't simply return credentials, since creating them
may involve side-effecting self.
Args:
**kwds: Additional keyword arguments are passed on to GetCredentials.
Returns:
None. Sets self._credentials.
"""
args = {
'api_key': self._API_KEY,
'client': self,
'client_id': self._CLIENT_ID,
'client_secret': self._CLIENT_SECRET,
'package_name': self._PACKAGE,
'scopes': self._SCOPES,
'user_agent': self._USER_AGENT,
}
args.update(kwds)
# credentials_lib can be expensive to import so do it only if needed.
from apitools.base.py import credentials_lib
# TODO(user): It's a bit dangerous to pass this
# still-half-initialized self into this method, but we might need
# to set attributes on it associated with our credentials.
# Consider another way around this (maybe a callback?) and whether
# or not it's worth it.
self._credentials = credentials_lib.GetCredentials(**args)
@classmethod
def ClientInfo(cls):
return {
'client_id': cls._CLIENT_ID,
'client_secret': cls._CLIENT_SECRET,
'scope': ' '.join(sorted(util.NormalizeScopes(cls._SCOPES))),
'user_agent': cls._USER_AGENT,
}
@property
def base_model_class(self):
return None
@property
def http(self):
return self._http
@property
def url(self):
return self._url
@classmethod
def GetScopes(cls):
return cls._SCOPES
@property
def params_type(self):
return _LoadClass('StandardQueryParameters', self.MESSAGES_MODULE)
@property
def user_agent(self):
return self._USER_AGENT
@property
def _default_global_params(self):
if self.__default_global_params is None:
# pylint: disable=not-callable
self.__default_global_params = self.params_type()
return self.__default_global_params
def AddGlobalParam(self, name, value):
params = self._default_global_params
setattr(params, name, value)
@property
def global_params(self):
return encoding.CopyProtoMessage(self._default_global_params)
@contextlib.contextmanager
def IncludeFields(self, include_fields):
self.__include_fields = include_fields
yield
self.__include_fields = None
@property
def response_type_model(self):
return self.__response_type_model
@contextlib.contextmanager
def JsonResponseModel(self):
"""In this context, return raw JSON instead of proto."""
old_model = self.response_type_model
self.__response_type_model = 'json'
yield
self.__response_type_model = old_model
@property
def num_retries(self):
return self.__num_retries
@num_retries.setter
def num_retries(self, value):
util.Typecheck(value, six.integer_types)
if value < 0:
raise exceptions.InvalidDataError(
'Cannot have negative value for num_retries')
self.__num_retries = value
@property
def max_retry_wait(self):
return self.__max_retry_wait
@max_retry_wait.setter
def max_retry_wait(self, value):
util.Typecheck(value, six.integer_types)
if value <= 0:
raise exceptions.InvalidDataError(
'max_retry_wait must be a postiive integer')
self.__max_retry_wait = value
@contextlib.contextmanager
def WithRetries(self, num_retries):
old_num_retries = self.num_retries
self.num_retries = num_retries
yield
self.num_retries = old_num_retries
def ProcessRequest(self, method_config, request):
"""Hook for pre-processing of requests."""
if self.log_request:
logging.info(
'Calling method %s with %s: %s', method_config.method_id,
method_config.request_type_name, request)
return request
def ProcessHttpRequest(self, http_request):
"""Hook for pre-processing of http requests."""
http_request.headers.update(self.additional_http_headers)
if self.log_request:
logging.info('Making http %s to %s',
http_request.http_method, http_request.url)
logging.info('Headers: %s', pprint.pformat(http_request.headers))
if http_request.body:
# TODO(user): Make this safe to print in the case of
# non-printable body characters.
logging.info('Body:\n%s',
http_request.loggable_body or http_request.body)
else:
logging.info('Body: (none)')
return http_request
def ProcessResponse(self, method_config, response):
if self.log_response:
logging.info('Response of type %s: %s',
method_config.response_type_name, response)
return response
# TODO(user): Decide where these two functions should live.
def SerializeMessage(self, message):
return encoding.MessageToJson(
message, include_fields=self.__include_fields)
def DeserializeMessage(self, response_type, data):
"""Deserialize the given data as method_config.response_type."""
try:
message = encoding.JsonToMessage(response_type, data)
except (exceptions.InvalidDataFromServerError,
messages.ValidationError, ValueError) as e:
raise exceptions.InvalidDataFromServerError(
'Error decoding response "%s" as type %s: %s' % (
data, response_type.__name__, e))
return message
def FinalizeTransferUrl(self, url):
"""Modify the url for a given transfer, based on auth and version."""
url_builder = _UrlBuilder.FromUrl(url)
if self.global_params.key:
url_builder.query_params['key'] = self.global_params.key
return url_builder.url
class BaseApiService(object):
"""Base class for generated API services."""
def __init__(self, client):
self.__client = client
self._method_configs = {}
self._upload_configs = {}
@property
def _client(self):
return self.__client
@property
def client(self):
return self.__client
def GetMethodConfig(self, method):
"""Returns service cached method config for given method."""
method_config = self._method_configs.get(method)
if method_config:
return method_config
func = getattr(self, method, None)
if func is None:
raise KeyError(method)
method_config = getattr(func, 'method_config', None)
if method_config is None:
raise KeyError(method)
self._method_configs[method] = config = method_config()
return config
@classmethod
def GetMethodsList(cls):
return [f.__name__ for f in six.itervalues(cls.__dict__)
if getattr(f, 'method_config', None)]
def GetUploadConfig(self, method):
return self._upload_configs.get(method)
def GetRequestType(self, method):
method_config = self.GetMethodConfig(method)
return getattr(self.client.MESSAGES_MODULE,
method_config.request_type_name)
def GetResponseType(self, method):
method_config = self.GetMethodConfig(method)
return getattr(self.client.MESSAGES_MODULE,
method_config.response_type_name)
def __CombineGlobalParams(self, global_params, default_params):
"""Combine the given params with the defaults."""
util.Typecheck(global_params, (type(None), self.__client.params_type))
result = self.__client.params_type()
global_params = global_params or self.__client.params_type()
for field in result.all_fields():
value = global_params.get_assigned_value(field.name)
if value is None:
value = default_params.get_assigned_value(field.name)
if value not in (None, [], ()):
setattr(result, field.name, value)
return result
def __EncodePrettyPrint(self, query_info):
# The prettyPrint flag needs custom encoding: it should be encoded
# as 0 if False, and ignored otherwise (True is the default).
if not query_info.pop('prettyPrint', True):
query_info['prettyPrint'] = 0
# The One Platform equivalent of prettyPrint is pp, which also needs
# custom encoding.
if not query_info.pop('pp', True):
query_info['pp'] = 0
return query_info
def __FinalUrlValue(self, value, field):
"""Encode value for the URL, using field to skip encoding for bytes."""
if isinstance(field, messages.BytesField) and value is not None:
return base64.urlsafe_b64encode(value)
elif isinstance(value, six.text_type):
return value.encode('utf8')
elif isinstance(value, six.binary_type):
return value.decode('utf8')
elif isinstance(value, datetime.datetime):
return value.isoformat()
return value
def __ConstructQueryParams(self, query_params, request, global_params):
"""Construct a dictionary of query parameters for this request."""
# First, handle the global params.
global_params = self.__CombineGlobalParams(
global_params, self.__client.global_params)
global_param_names = util.MapParamNames(
[x.name for x in self.__client.params_type.all_fields()],
self.__client.params_type)
global_params_type = type(global_params)
query_info = dict(
(param,
self.__FinalUrlValue(getattr(global_params, param),
getattr(global_params_type, param)))
for param in global_param_names)
# Next, add the query params.
query_param_names = util.MapParamNames(query_params, type(request))
request_type = type(request)
query_info.update(
(param,
self.__FinalUrlValue(getattr(request, param, None),
getattr(request_type, param)))
for param in query_param_names)
query_info = dict((k, v) for k, v in query_info.items()
if v is not None)
query_info = self.__EncodePrettyPrint(query_info)
query_info = util.MapRequestParams(query_info, type(request))
return query_info
def __ConstructRelativePath(self, method_config, request,
relative_path=None):
"""Determine the relative path for request."""
python_param_names = util.MapParamNames(
method_config.path_params, type(request))
params = dict([(param, getattr(request, param, None))
for param in python_param_names])
params = util.MapRequestParams(params, type(request))
return util.ExpandRelativePath(method_config, params,
relative_path=relative_path)
def __FinalizeRequest(self, http_request, url_builder):
"""Make any final general adjustments to the request."""
if (http_request.http_method == 'GET' and
len(http_request.url) > _MAX_URL_LENGTH):
http_request.http_method = 'POST'
http_request.headers['x-http-method-override'] = 'GET'
http_request.headers[
'content-type'] = 'application/x-www-form-urlencoded'
http_request.body = url_builder.query
url_builder.query_params = {}
http_request.url = url_builder.url
def __ProcessHttpResponse(self, method_config, http_response, request):
"""Process the given http response."""
if http_response.status_code not in (http_client.OK,
http_client.NO_CONTENT):
raise exceptions.HttpError(
http_response.info, http_response.content,
http_response.request_url, method_config, request)
if http_response.status_code == http_client.NO_CONTENT:
# TODO(user): Find out why _replace doesn't seem to work
# here.
http_response = http_wrapper.Response(
info=http_response.info, content='{}',
request_url=http_response.request_url)
if self.__client.response_type_model == 'json':
return http_response.content
response_type = _LoadClass(method_config.response_type_name,
self.__client.MESSAGES_MODULE)
return self.__client.DeserializeMessage(
response_type, http_response.content)
def __SetBaseHeaders(self, http_request, client):
"""Fill in the basic headers on http_request."""
# TODO(user): Make the default a little better here, and
# include the apitools version.
user_agent = client.user_agent or 'apitools-client/1.0'
http_request.headers['user-agent'] = user_agent
http_request.headers['accept'] = 'application/json'
http_request.headers['accept-encoding'] = 'gzip, deflate'
def __SetBody(self, http_request, method_config, request, upload):
"""Fill in the body on http_request."""
if not method_config.request_field:
return
request_type = _LoadClass(
method_config.request_type_name, self.__client.MESSAGES_MODULE)
if method_config.request_field == REQUEST_IS_BODY:
body_value = request
body_type = request_type
else:
body_value = getattr(request, method_config.request_field)
body_field = request_type.field_by_name(
method_config.request_field)
util.Typecheck(body_field, messages.MessageField)
body_type = body_field.type
# If there was no body provided, we use an empty message of the
# appropriate type.
body_value = body_value or body_type()
if upload and not body_value:
# We're going to fill in the body later.
return
util.Typecheck(body_value, body_type)
http_request.headers['content-type'] = 'application/json'
http_request.body = self.__client.SerializeMessage(body_value)
def PrepareHttpRequest(self, method_config, request, global_params=None,
upload=None, upload_config=None, download=None):
"""Prepares an HTTP request to be sent."""
request_type = _LoadClass(
method_config.request_type_name, self.__client.MESSAGES_MODULE)
util.Typecheck(request, request_type)
request = self.__client.ProcessRequest(method_config, request)
http_request = http_wrapper.Request(
http_method=method_config.http_method)
self.__SetBaseHeaders(http_request, self.__client)
self.__SetBody(http_request, method_config, request, upload)
url_builder = _UrlBuilder(
self.__client.url, relative_path=method_config.relative_path)
url_builder.query_params = self.__ConstructQueryParams(
method_config.query_params, request, global_params)
# It's important that upload and download go before we fill in the
# relative path, so that they can replace it.
if upload is not None:
upload.ConfigureRequest(upload_config, http_request, url_builder)
if download is not None:
download.ConfigureRequest(http_request, url_builder)
url_builder.relative_path = self.__ConstructRelativePath(
method_config, request, relative_path=url_builder.relative_path)
self.__FinalizeRequest(http_request, url_builder)
return self.__client.ProcessHttpRequest(http_request)
def _RunMethod(self, method_config, request, global_params=None,
upload=None, upload_config=None, download=None):
"""Call this method with request."""
if upload is not None and download is not None:
# TODO(user): This just involves refactoring the logic
# below into callbacks that we can pass around; in particular,
# the order should be that the upload gets the initial request,
# and then passes its reply to a download if one exists, and
# then that goes to ProcessResponse and is returned.
raise exceptions.NotYetImplementedError(
'Cannot yet use both upload and download at once')
http_request = self.PrepareHttpRequest(
method_config, request, global_params, upload, upload_config,
download)
# TODO(user): Make num_retries customizable on Transfer
# objects, and pass in self.__client.num_retries when initializing
# an upload or download.
if download is not None:
download.InitializeDownload(http_request, client=self.client)
return
http_response = None
if upload is not None:
http_response = upload.InitializeUpload(
http_request, client=self.client)
if http_response is None:
http = self.__client.http
if upload and upload.bytes_http:
http = upload.bytes_http
opts = {
'retries': self.__client.num_retries,
'max_retry_wait': self.__client.max_retry_wait,
}
if self.__client.check_response_func:
opts['check_response_func'] = self.__client.check_response_func
if self.__client.retry_func:
opts['retry_func'] = self.__client.retry_func
http_response = http_wrapper.MakeRequest(
http, http_request, **opts)
return self.ProcessHttpResponse(method_config, http_response, request)
def ProcessHttpResponse(self, method_config, http_response, request=None):
"""Convert an HTTP response to the expected message type."""
return self.__client.ProcessResponse(
method_config,
self.__ProcessHttpResponse(method_config, http_response, request))
| [
"vinvivo@users.noreply.github.com"
] | vinvivo@users.noreply.github.com |
b9eec62397c7f79c955d49c0b46f8fd41ced3917 | e7031386a884ae8ed568d8c219b4e5ef1bb06331 | /processor/migrations/0002_auto_20180913_1628.py | 45d0290b0812897f75230932fc52f99dfa1e0a97 | [] | no_license | ikbolpm/ultrashop-backend | a59c54b8c4d31e009704c3bf0e963085477092cf | 290fa0ecdad40ec817867a019bff2ce82f08d6fe | refs/heads/dev | 2022-11-30T21:49:17.965273 | 2020-09-24T10:16:12 | 2020-09-24T10:16:12 | 147,561,738 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 473 | py | # Generated by Django 2.1.1 on 2018-09-13 11:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('processor', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='processor',
name='integrated_graphics',
field=models.CharField(blank=True, help_text='К примеру: Intel® HD Graphics 630', max_length=255, null=True),
),
]
| [
"ikbolpm@gmail.com"
] | ikbolpm@gmail.com |
ef3543e6c47ae443673e0966415707b5c7a18f6c | 5f2103b1083b088aed3f3be145d01a770465c762 | /162. Find Peak Element.py | d52a25826c9f187eb02642488362b2153d4a0352 | [] | no_license | supersj/LeetCode | 5605c9bcb5ddcaa83625de2ad9e06c3485220019 | 690adf05774a1c500d6c9160223dab7bcc38ccc1 | refs/heads/master | 2021-01-17T17:23:39.585738 | 2017-02-27T15:08:42 | 2017-02-27T15:08:42 | 65,526,089 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 973 | py | # A peak element is an element that is greater than its neighbors.
#
# Given an input array where num[i] ≠ num[i+1], find a peak element and return its index.
#
# The array may contain multiple peaks, in that case return the index to any one of the peaks is fine.
#
# You may imagine that num[-1] = num[n] = -∞.
#
# For example, in array [1, 2, 3, 1], 3 is a peak element and your function should return the index number 2.
#
# click to show spoilers.
#
# Credits:
# Special thanks to @ts for adding this problem and creating all test cases.
#
# Subscribe to see which companies asked this question
class Solution(object):
def findPeakElement(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
lastindex = 0
for i in range(len(nums)):
if nums[i] < nums[lastindex]:
return lastindex
else:
lastindex += 1
return lastindex | [
"ml@ml.ml"
] | ml@ml.ml |
db9cd50f3aadca1b7aa8743f04df52bb8500e648 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_ragging.py | caeac9f2b2bb16f84f6163f1ccab0b4105891c5f | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 223 | py |
from xai.brain.wordbase.nouns._rag import _RAG
#calss header
class _RAGGING(_RAG, ):
def __init__(self,):
_RAG.__init__(self)
self.name = "RAGGING"
self.specie = 'nouns'
self.basic = "rag"
self.jsondata = {}
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
0732038e2ed3db4d41b13d62d60565ab5ed657eb | 4ee2ebef215cf879aafdfa44221f52d82775176a | /Inheritance/Lab/2-Single_Inheritance/project/dog.py | 56c8b0f2a4ed21f170821e2ff8e0bb9e5a15f259 | [] | no_license | Avstrian/SoftUni-Python-OOP | d2a9653863cba7bc095e647cd3f0561377f10f6d | 6789f005b311039fd46ef1f55f3eb6fa9313e5a6 | refs/heads/main | 2023-08-01T09:31:38.099842 | 2021-08-24T04:21:38 | 2021-08-24T04:21:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 107 | py | from project.animal import Animal
class Dog(Animal):
def bark(self):
return "barking..." | [
"noreply@github.com"
] | Avstrian.noreply@github.com |
242fe20624e3d8037961ba02bdb58f7f241e70f3 | 487487954ce7b34b97a904be4082e5da5cfacec2 | /111 - [Dicionários] Dicionário ao Quadrado.py | 196171116d337553c2a4b528249b5ec6029ee31c | [] | no_license | rifatmondol/Python-Exercises | 62eae905793e4f747a51653fd823fe7aba49a3c3 | 5b5f3fa6bf34408ca9afa035604a79cf19559304 | refs/heads/master | 2022-01-19T02:07:10.940300 | 2018-12-26T18:07:17 | 2018-12-26T18:07:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 356 | py | #111 - Define a function which can print a dictionary where the keys are numbers between 1 and 20
# (both included) and the values are square of keys.
#Hints:
#Use dict[key]=value pattern to put entry into a dictionary.
#Use ** operator to get power of a number.
#Use range() for loops.
dic = dict()
for i in range(1,21):
dic[i] = i**2
print(dic)
| [
"astrodelta14@gmail.com"
] | astrodelta14@gmail.com |
8d0ce9b1ef5e3b6c693faf33f8dbac726b2e6695 | d69361c62ba587b9666f39829afdc9ad8ed9e420 | /svm/svm.py | cc339ecafd0e26cb2994ff4a348ca8775d626ce9 | [] | no_license | ienoob/noob_ml | cd7dc184460344e1029631efd34ded0965e2bb48 | 6998d6e4e181e44ad3f0ca7cf971f16638d04ce4 | refs/heads/master | 2021-07-04T10:27:40.006224 | 2020-07-27T15:17:31 | 2020-07-27T15:17:31 | 129,582,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 432 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
svm 可以转化为二次规划问题,因此我们使用cvxopt, 优化计算包
hard margin
"""
import numpy as np
from cvxopt import matrix, solvers
A = matrix([[-1.0, -1.0, 0.0, 1.0], [1.0, -1.0, -1.0, -2.0]])
b = matrix([1.0, -2.0, 0.0, 4.0])
c = matrix([2.0, 1.0])
sol = solvers.lp(c,A,b)
print(sol['x'])
print(np.dot(sol['x'].T, c))
print(sol['primal objective']) | [
"jack.li@eisoo.com"
] | jack.li@eisoo.com |
be991b336ec80eefea679e6f1036e7c93a80836d | f4b60f5e49baf60976987946c20a8ebca4880602 | /lib/python2.7/site-packages/acimodel-1.3_2j-py2.7.egg/cobra/modelimpl/qos/dpppoldef.py | 07db39bf27f51318af1f2d27fe217aedf070436a | [] | no_license | cqbomb/qytang_aci | 12e508d54d9f774b537c33563762e694783d6ba8 | a7fab9d6cda7fadcc995672e55c0ef7e7187696e | refs/heads/master | 2022-12-21T13:30:05.240231 | 2018-12-04T01:46:53 | 2018-12-04T01:46:53 | 159,911,666 | 0 | 0 | null | 2022-12-07T23:53:02 | 2018-12-01T05:17:50 | Python | UTF-8 | Python | false | false | 13,954 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2016 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class DppPolDef(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = ClassMeta("cobra.model.qos.DppPolDef")
meta.moClassName = "qosDppPolDef"
meta.rnFormat = "qosdpppold-[%(dppPolDn)s]"
meta.category = MoCategory.REGULAR
meta.label = "Data Plane Policing policy Extra data"
meta.writeAccessMask = 0x81
meta.readAccessMask = 0x81
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.childClasses.add("cobra.model.qos.RtToRemoteQosDppPolDef")
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childNamesAndRnPrefix.append(("cobra.model.qos.RtToRemoteQosDppPolDef", "rtfvToRemoteQosDppPolDef-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.parentClasses.add("cobra.model.qos.DppPolDefCont")
meta.superClasses.add("cobra.model.qos.ADppPol")
meta.superClasses.add("cobra.model.fabric.ProtoPol")
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.superClasses.add("cobra.model.pol.Def")
meta.superClasses.add("cobra.model.fabric.ProtoIfPol")
meta.superClasses.add("cobra.model.fabric.L2IfPol")
meta.rnPrefixes = [
('qosdpppold-', True),
]
prop = PropMeta("str", "adminSt", "adminSt", 23167, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "disabled"
prop._addConstant("disabled", "disabled", 1)
prop._addConstant("enabled", "enabled", 0)
meta.props.add("adminSt", prop)
prop = PropMeta("str", "be", "be", 22908, PropCategory.REGULAR)
prop.label = "Excessive Burst"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 549755813760)]
prop.defaultValue = 18446744073709551615
prop.defaultValueStr = "unspecified"
prop._addConstant("unspecified", "unspecified", 18446744073709551615)
meta.props.add("be", prop)
prop = PropMeta("str", "beUnit", "beUnit", 22910, PropCategory.REGULAR)
prop.label = "Excessive Burst Unit"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("giga", "giga-bytes/packets", 3)
prop._addConstant("kilo", "kilo-bytes/packets", 1)
prop._addConstant("mega", "mega-bytes/packets", 2)
prop._addConstant("msec", "milli-seconds", 4)
prop._addConstant("unspecified", "bytes/packets", 0)
prop._addConstant("usec", "micro-seconds", 5)
meta.props.add("beUnit", prop)
prop = PropMeta("str", "burst", "burst", 22724, PropCategory.REGULAR)
prop.label = "Burst"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 549755813760)]
prop.defaultValue = 18446744073709551615
prop.defaultValueStr = "unspecified"
prop._addConstant("unspecified", "unspecified", 18446744073709551615)
meta.props.add("burst", prop)
prop = PropMeta("str", "burstUnit", "burstUnit", 22726, PropCategory.REGULAR)
prop.label = "Burst Unit"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("giga", "giga-bytes/packets", 3)
prop._addConstant("kilo", "kilo-bytes/packets", 1)
prop._addConstant("mega", "mega-bytes/packets", 2)
prop._addConstant("msec", "milli-seconds", 4)
prop._addConstant("unspecified", "bytes/packets", 0)
prop._addConstant("usec", "micro-seconds", 5)
meta.props.add("burstUnit", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "conformAction", "conformAction", 22911, PropCategory.REGULAR)
prop.label = "Confirm Action"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "transmit"
prop._addConstant("drop", "drop", 2)
prop._addConstant("mark", "mark", 3)
prop._addConstant("transmit", "transmit", 1)
meta.props.add("conformAction", prop)
prop = PropMeta("str", "conformMarkCos", "conformMarkCos", 22917, PropCategory.REGULAR)
prop.label = "Conform mark cos"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 6)]
prop.defaultValue = 65535
prop.defaultValueStr = "unspecified"
prop._addConstant("unspecified", "unspecified", 65535)
meta.props.add("conformMarkCos", prop)
prop = PropMeta("str", "conformMarkDscp", "conformMarkDscp", 22914, PropCategory.REGULAR)
prop.label = "Conform mark dscp"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.defaultValue = 65535
prop.defaultValueStr = "unspecified"
prop._addConstant("unspecified", "unspecified", 65535)
meta.props.add("conformMarkDscp", prop)
prop = PropMeta("str", "descr", "descr", 5579, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "dppPolDn", "dppPolDn", 22727, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("dppPolDn", prop)
prop = PropMeta("str", "exceedAction", "exceedAction", 22912, PropCategory.REGULAR)
prop.label = "Exceed Action"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 2
prop.defaultValueStr = "drop"
prop._addConstant("drop", "drop", 2)
prop._addConstant("mark", "mark", 3)
prop._addConstant("transmit", "transmit", 1)
meta.props.add("exceedAction", prop)
prop = PropMeta("str", "exceedMarkCos", "exceedMarkCos", 22918, PropCategory.REGULAR)
prop.label = "Exceed mark cos"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 6)]
prop.defaultValue = 65535
prop.defaultValueStr = "unspecified"
prop._addConstant("unspecified", "unspecified", 65535)
meta.props.add("exceedMarkCos", prop)
prop = PropMeta("str", "exceedMarkDscp", "exceedMarkDscp", 22915, PropCategory.REGULAR)
prop.label = "Exceed mark dscp"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.defaultValue = 65535
prop.defaultValueStr = "unspecified"
prop._addConstant("unspecified", "unspecified", 65535)
meta.props.add("exceedMarkDscp", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "mode", "mode", 22906, PropCategory.REGULAR)
prop.label = "Bit or Packet"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "bit"
prop._addConstant("bit", "bit-policer", 1)
prop._addConstant("packet", "packet-policer", 2)
meta.props.add("mode", prop)
prop = PropMeta("str", "name", "name", 4991, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "ownerKey", "ownerKey", 15230, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerKey", prop)
prop = PropMeta("str", "ownerTag", "ownerTag", 15231, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerTag", prop)
prop = PropMeta("str", "pir", "pir", 22907, PropCategory.REGULAR)
prop.label = "Peak Rate"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 4398046510080)]
meta.props.add("pir", prop)
prop = PropMeta("str", "pirUnit", "pirUnit", 22909, PropCategory.REGULAR)
prop.label = "Peak Rate Unit"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("giga", "giga-bits/packets-per-second", 3)
prop._addConstant("kilo", "kilo-bits/packets-per-second", 1)
prop._addConstant("mega", "mega-bits/packets-per-second", 2)
prop._addConstant("unspecified", "bits/packets-per-second", 0)
meta.props.add("pirUnit", prop)
prop = PropMeta("str", "rate", "rate", 22723, PropCategory.REGULAR)
prop.label = "Rate"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 4398046510080)]
meta.props.add("rate", prop)
prop = PropMeta("str", "rateUnit", "rateUnit", 22725, PropCategory.REGULAR)
prop.label = "Rate Unit"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("giga", "giga-bits/packets-per-second", 3)
prop._addConstant("kilo", "kilo-bits/packets-per-second", 1)
prop._addConstant("mega", "mega-bits/packets-per-second", 2)
prop._addConstant("unspecified", "bits/packets-per-second", 0)
meta.props.add("rateUnit", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "type", "type", 22905, PropCategory.REGULAR)
prop.label = "1R2C or 2R3C Policer"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "1R2C"
prop._addConstant("1R2C", "1-rate-2-color", 1)
prop._addConstant("2R3C", "2-rate-3-color", 2)
meta.props.add("type", prop)
prop = PropMeta("str", "violateAction", "violateAction", 22913, PropCategory.REGULAR)
prop.label = "Violate Action"
prop.isConfig = True
prop.isAdmin = True
prop.defaultValue = 2
prop.defaultValueStr = "drop"
prop._addConstant("drop", "drop", 2)
prop._addConstant("mark", "mark", 3)
prop._addConstant("transmit", "transmit", 1)
meta.props.add("violateAction", prop)
prop = PropMeta("str", "violateMarkCos", "violateMarkCos", 22919, PropCategory.REGULAR)
prop.label = "Violate mark cos"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 6)]
prop.defaultValue = 65535
prop.defaultValueStr = "unspecified"
prop._addConstant("unspecified", "unspecified", 65535)
meta.props.add("violateMarkCos", prop)
prop = PropMeta("str", "violateMarkDscp", "violateMarkDscp", 22916, PropCategory.REGULAR)
prop.label = "Violate mark dscp"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.defaultValue = 65535
prop.defaultValueStr = "unspecified"
prop._addConstant("unspecified", "unspecified", 65535)
meta.props.add("violateMarkDscp", prop)
meta.namingProps.append(getattr(meta.props, "dppPolDn"))
getattr(meta.props, "dppPolDn").needDelimiter = True
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Path"
meta.deploymentQueryPaths.append(DeploymentPathMeta("L2IfPolToEthIf", "Interface", "cobra.model.l1.EthIf"))
def __init__(self, parentMoOrDn, dppPolDn, markDirty=True, **creationProps):
namingVals = [dppPolDn]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"collinsctk@qytang.com"
] | collinsctk@qytang.com |
72fa7896536b8eb0c0e4ac68879b393b1d8eb55e | c3432a248c8a7a43425c0fe1691557c0936ab380 | /21_06/21318_피아노 체조.py | 2ac7ef5cc09f9db10fd4aaedd83e2750eab32bd7 | [] | no_license | Parkyunhwan/BaekJoon | 13cb3af1f45212d7c418ecc4b927f42615b14a74 | 9a882c568f991c9fed3df45277f091626fcc2c94 | refs/heads/master | 2022-12-24T21:47:47.052967 | 2022-12-20T16:16:59 | 2022-12-20T16:16:59 | 232,264,447 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 363 | py | import sys
input = sys.stdin.readline
N = int(input())
arr = [0] + list(map(int, input().split()))
count = [0] * (N + 1)
for i in range(1, N + 1):
if arr[i - 1] > arr[i]:
count[i] = count[i - 1] + 1
else:
count[i] = count[i - 1]
for _ in range(int(input())):
x, y = map(int, input().split())
c = count[y] - count[x]
print(c)
| [
"pyh8618@gmail.com"
] | pyh8618@gmail.com |
70cef27fcfb994c1a1bfac85041aa6da8f0723c0 | b40a25499e22c5d190a03967d894767274306cdf | /python1809/django/day01/day01/settings.py | ea10f988e583b48c9b90cabdada0da0d5b60cb1c | [] | no_license | 845788173/copy | d47a9f8250e4bdf914acac16881b54f2afefdd9a | 574f147713112afe2233074677ae0c441c2217f5 | refs/heads/master | 2022-12-18T14:20:41.998343 | 2018-12-08T11:01:12 | 2018-12-08T11:01:12 | 160,929,213 | 0 | 0 | null | 2022-12-08T02:27:51 | 2018-12-08T10:41:58 | JavaScript | UTF-8 | Python | false | false | 3,288 | py | """
Django settings for day01 project.
Generated by 'django-admin startproject' using Django 1.11.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#du75q1&m-u0yd2v1ai$1!$+%s6hxiufdzlokh#-2bh(#wp0pb'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ["*"]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'App',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'day01.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [ os.path.join(BASE_DIR,'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'day01.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'python1804django',
'USER':'root',
'PASSWORD':'7986805',
'PORT':'3306',
'HOST':'localhost'
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
| [
"845788173@qq.com"
] | 845788173@qq.com |
8a872f8344458b89f8e88fb66eedacc0c973bc03 | 96858f02c03fc60a8854ea62ee2b0d0faff52e80 | /mid/auth_/admin.py | 829b7dd3f3d8a4bda9897d6a5defdad313a3567c | [] | no_license | dorenyerbolov/django-mid | e9fe0d8dacef806d4f872f647c79f93c82690cf8 | ffdf076840c3295e6b85cbb5c9cfebd33e872bf1 | refs/heads/main | 2023-03-23T09:50:22.391463 | 2021-03-20T06:24:48 | 2021-03-20T06:24:48 | 349,621,352 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 97 | py | from django.contrib import admin
from auth_.models import Profile
admin.site.register(Profile)
| [
"doren.yerbolov@gmail.com"
] | doren.yerbolov@gmail.com |
b84e4f89e4a5a239ea8d712e0f74b8bef791e17c | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/bob/14a30f4e97714ddc8d4a087e3b3fe203.py | 2df8a4dced86b19360b6f8f44dbe4d2408d9e2e4 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 1,677 | py | #
# Skeleton file for the Python "Bob" exercise.
#
def hey(what):
""" exercise bob """
stating_something = ('Tom-ay-to, tom-aaaah-to.')
shouting = ('WATCH OUT!')
asking_a_question = ('Does this cryogenic chamber make me look fat?')
asking_a_numeric_question = ('You are, what, like 15?')
talking_forcefully = ("Let's go make out behind the gym!")
using_acronyms_in_regular_speech = ("It's OK if you don't want to go to the DMV.")
forceful_questions = ("WHAT THE HELL WERE YOU THINKING?")
shouting_numbers = ("1, 2, 3 GO!")
only_numbers = ('1, 2, 3')
question_with_only_numbers = ('4?')
shouting_with_special_characters = ('ZOMG THE %^*@#$(*^ ZOMBIES ARE COMING!!11!!1!')
shouting_with_umlauts = ('ÜMLÄÜTS!')
shouting_with_no_exclamation_mark = ('I HATE YOU')
calmly_speaking_with_umlauts = ('ÜMLäÜTS!')
statement_containing_question_mark = ('Ending with ? means a question.')
prattling_on = ("Wait! Hang on. Are you going to be OK?")
silence = ('')
prolonged_silence = (' \t')
starts_with_whitespace = (' hmmmmmmm...')
ends_with_whitespace = ('What if we end with whitespace? ')
if what in [silence, prolonged_silence]:
return 'Fine. Be that way!'
elif what in [shouting, forceful_questions, shouting_numbers, shouting_with_special_characters,
shouting_with_umlauts, shouting_with_no_exclamation_mark]:
return 'Whoa, chill out!'
elif what in [asking_a_question, asking_a_numeric_question, question_with_only_numbers, prattling_on,
ends_with_whitespace]:
return 'Sure.'
else:
return 'Whatever.'
| [
"rrc@berkeley.edu"
] | rrc@berkeley.edu |
5366df88285235633e44287a3950189650d383b1 | 2d9cea7839a900921850f2af1ccafc623b9d53b9 | /websecurityscanner/google/cloud/websecurityscanner_v1alpha/types.py | 594571c6551b0c0557820428c4ec53212cbc344e | [
"Apache-2.0"
] | permissive | smottt/google-cloud-python | cb28e8d59cc36932aa89e838412fe234f6c4498c | 2982dd3d565923509bab210eb45b800ce464fe8a | refs/heads/master | 2020-03-31T21:12:02.209919 | 2018-10-10T18:04:44 | 2018-10-10T18:04:44 | 152,571,541 | 0 | 1 | Apache-2.0 | 2018-10-11T10:10:47 | 2018-10-11T10:10:47 | null | UTF-8 | Python | false | false | 2,175 | py | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import sys
from google.api import http_pb2
from google.protobuf import descriptor_pb2
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
from google.protobuf import timestamp_pb2
from google.api_core.protobuf_helpers import get_messages
from google.cloud.websecurityscanner_v1alpha.proto import crawled_url_pb2
from google.cloud.websecurityscanner_v1alpha.proto import finding_addon_pb2
from google.cloud.websecurityscanner_v1alpha.proto import finding_pb2
from google.cloud.websecurityscanner_v1alpha.proto import (
finding_type_stats_pb2)
from google.cloud.websecurityscanner_v1alpha.proto import scan_config_pb2
from google.cloud.websecurityscanner_v1alpha.proto import scan_run_pb2
from google.cloud.websecurityscanner_v1alpha.proto import (
web_security_scanner_pb2)
_shared_modules = [
http_pb2,
descriptor_pb2,
empty_pb2,
field_mask_pb2,
timestamp_pb2,
]
_local_modules = [
crawled_url_pb2,
finding_addon_pb2,
finding_pb2,
finding_type_stats_pb2,
scan_config_pb2,
scan_run_pb2,
web_security_scanner_pb2,
]
names = []
for module in _shared_modules:
for name, message in get_messages(module).items():
setattr(sys.modules[__name__], name, message)
names.append(name)
for module in _local_modules:
for name, message in get_messages(module).items():
message.__module__ = 'google.cloud.websecurityscanner_v1alpha.types'
setattr(sys.modules[__name__], name, message)
names.append(name)
__all__ = tuple(sorted(names))
| [
"noreply@github.com"
] | smottt.noreply@github.com |
691659c96bf52e69564c6ecb6e45aef25c446dad | ab0deb25919bcc71c1314a817097429063009364 | /tests/conftest.py | 5a05b92c97189d95238ceb1c91c9ae4ff89cb1b0 | [
"MIT"
] | permissive | spiritEcosse/panda | babecf3b4c72b3ddf78d967336547f1ce8728f40 | f81284bfd331a3eb24ce6e5a8adbaf9922fa07e2 | refs/heads/master | 2020-07-29T09:45:33.123744 | 2020-02-06T12:24:31 | 2020-02-06T12:24:31 | 201,935,610 | 0 | 0 | MIT | 2019-08-12T14:09:28 | 2019-08-12T13:26:57 | Python | UTF-8 | Python | false | false | 1,461 | py | import os
import warnings
import uuid
import django
location = lambda x: os.path.join(
os.path.dirname(os.path.realpath(__file__)), x)
def pytest_addoption(parser):
parser.addoption(
'--deprecation', choices=['strict', 'log', 'none'], default='log')
def pytest_configure(config):
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tests.settings')
# media = 'media_{}'.format(uuid.uuid4())
# os.environ.setdefault('MEDIA', media)
# media_root = os.path.join(location('public'), media)
# os.mkdir(media_root)
# os.mkdir(os.path.join(media_root, "images"))
os.environ.setdefault('DJANGO_READ_DOT_ENV_FILE', '1')
os.environ.setdefault(
'DATABASE_URL', 'postgres://SUQWOetkbOGJpuXAxliKpZmnyywHdeqm:YZHXsIPZyoBwOUTwAMCLWcJKhYwpwVoeiAhjYMSIqZCBCjAvDBTXUArvSWuhxkgn@postgres:5432/test_{}'.format(uuid.uuid4())
)
deprecation = config.getoption('deprecation')
if deprecation == 'strict':
warnings.simplefilter('error', DeprecationWarning)
warnings.simplefilter('error', PendingDeprecationWarning)
warnings.simplefilter('error', RuntimeWarning)
if deprecation == 'log':
warnings.simplefilter('always', DeprecationWarning)
warnings.simplefilter('always', PendingDeprecationWarning)
warnings.simplefilter('always', RuntimeWarning)
elif deprecation == 'none':
# Deprecation warnings are ignored by default
pass
django.setup()
| [
"shevchenkcoigor@gmail.com"
] | shevchenkcoigor@gmail.com |
2950e13658a7f32171eb003a46a7e20312cf93e7 | c85ec51b920350e095b6fc67435a490352cef1f4 | /goods/migrations/0011_auto_20171123_1905.py | c64ad543bd44f19a496033c50db30be361e4f006 | [
"Apache-2.0"
] | permissive | huachao2017/goodsdl | 195dbc959d424248982b61f2fe2c22dbc4347746 | 3616d53b90696a97a5d56a064e2a14d484b821d7 | refs/heads/master | 2021-06-11T01:55:16.235507 | 2019-09-24T07:49:08 | 2019-09-24T07:49:08 | 109,931,837 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 720 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-23 19:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('goods', '0010_actionlog_traintype'),
]
operations = [
migrations.AlterField(
model_name='actionlog',
name='action',
field=models.CharField(choices=[('BT', 'Begin Train'), ('ST', 'Stop Train'), ('EG', 'Export Graph'), ('TT', 'Test Train')], max_length=2),
),
migrations.AlterField(
model_name='image',
name='deviceid',
field=models.CharField(default='0', max_length=20),
),
]
| [
"21006735@qq.com"
] | 21006735@qq.com |
99fa6952d27fe8543059eb0ba864ec39c86bcbd7 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_3094.py | fbce41b739b90bf7443909681aa9ce1029fc56ab | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 167 | py | # How can I get 'urlpatterns = __import__(<string_name>)' to work like a normal import statement?
urlpatterns = __import__(project_urls).whateversubmodule.urlpatterns
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
284762867cc565f2e98d0cd20d8e708a667b0cbd | 8dfb45ef6c142bb25183339e6a13e06a14a86dbe | /kansha/comment/comp.py | 71e725244c85f4652c5b1310caff84edf4185a08 | [] | no_license | blugand/kansha | 04be2581fa0bf9d77898bc26f668e2febb78dac4 | 227d834596bd66b03c2fc78f7f65dee04116668a | refs/heads/master | 2021-01-15T10:35:52.144198 | 2015-11-24T09:29:21 | 2015-11-24T09:29:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,094 | py | # -*- coding:utf-8 -*-
#--
# Copyright (c) 2012-2014 Net-ng.
# All rights reserved.
#
# This software is licensed under the BSD License, as described in
# the file LICENSE.txt, which you should have received as part of
# this distribution.
#--
import datetime
from nagare import component, security
from nagare.database import session
from .models import DataComment
from ..user import usermanager
from ..flow import comp as flow
from .. import notifications, validator
class Comment(flow.FlowElement):
"""Comment component"""
def __init__(self, data):
"""Initialization
In:
- ``data`` -- the comment object from the database
"""
self.db_id = data.id
self.text = data.comment
self.creation_date = data.creation_date
self.author = component.Component(usermanager.UserManager.get_app_user(data.author.username, data=data.author))
self.comment_label = component.Component(Commentlabel(self))
self.comment_label.on_answer(lambda v: self.comment_label.call(model='edit'))
def edit_comment(self):
self.comment_label.call(model='edit')
def is_author(self, user):
return self.author().username == user.username
def set_comment(self, text):
if text is None:
return
text = text.strip()
if text:
self.data.comment = validator.clean_text(text)
@property
def data(self):
"""Return the comment object from the database
"""
return DataComment.get(self.db_id)
class Commentlabel(object):
"""comment label component.
"""
def __init__(self, parent):
"""Initialization
In:
- ``parent`` -- parent of comment label
"""
self.parent = parent
self.text = parent.text or u''
def change_text(self, text):
"""Change the comment of our wrapped object
In:
- ``text`` -- the new comment
Return:
- the new comment
"""
if text is None:
return
text = text.strip()
if text:
text = validator.clean_text(text)
self.text = text
self.parent.set_comment(text)
return text
def is_author(self, user):
return self.parent.is_author(user)
class Comments(flow.FlowSource):
"""Comments component
"""
def __init__(self, parent, data_comments=()):
"""Initialization
In:
- ``parent`` -- the parent card
- ``comments`` -- the comments of the card
"""
self.parent = parent
self.comments = [self._create_comment_component(data_comment) for data_comment in data_comments]
@property
def flow_elements(self):
return self.comments
def _create_comment_component(self, data_comment):
return component.Component(Comment(data_comment)).on_answer(self.delete)
def add(self, v):
"""Add a new comment to the card
In:
- ``v`` -- the comment content
"""
security.check_permissions('comment', self.parent)
if v is None:
return
v = v.strip()
if v:
v = validator.clean_text(v)
user = security.get_user()
comment = DataComment(comment=v.strip(), card_id=self.parent.data.id,
author=user.data, creation_date=datetime.datetime.utcnow())
session.add(comment)
session.flush()
data = {'comment': v.strip(), 'card': self.parent.title().text}
notifications.add_history(self.parent.column.board.data, self.parent.data, security.get_user().data, u'card_add_comment', data)
self.comments.insert(0, self._create_comment_component(comment))
def delete(self, comp):
"""Delete a comment.
In:
- ``comment`` -- the comment to delete
"""
self.comments.remove(comp)
comment = comp()
DataComment.get(comment.db_id).delete()
session.flush()
| [
"romuald.texier-marcade@net-ng.com"
] | romuald.texier-marcade@net-ng.com |
35af09717e2aaf8be07a673fd6762eae2a90d225 | f9505e7e8d9428bea5db3a7a1d41fbae25f0d803 | /school/south_migrations/0008_auto__chg_field_deal_heading__chg_field_event_heading.py | d302a0155431fbfe3e8520eea8673f28ae95bf4c | [] | no_license | SIMSON20/main | ec2d046c0ccbe56048317e5ff6a0254a21383705 | 1f0086547123a9ccc4a2c60a5faa49cb2637ec34 | refs/heads/master | 2021-05-30T11:33:21.356930 | 2015-09-29T23:29:04 | 2015-09-29T23:29:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,034 | py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Deal.heading'
db.alter_column('school_deal', 'heading', self.gf('django.db.models.fields.CharField')(null=True, max_length=200))
# Changing field 'Event.heading'
db.alter_column('school_event', 'heading', self.gf('django.db.models.fields.CharField')(null=True, max_length=200))
def backwards(self, orm):
# Changing field 'Deal.heading'
db.alter_column('school_deal', 'heading', self.gf('django.db.models.fields.CharField')(null=True, max_length=100))
# Changing field 'Event.heading'
db.alter_column('school_event', 'heading', self.gf('django.db.models.fields.CharField')(null=True, max_length=100))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'unique': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True', 'symmetrical': 'False'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'object_name': 'Permission', 'unique_together': "(('content_type', 'codename'),)"},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'user_set'", 'symmetrical': 'False', 'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'user_set'", 'symmetrical': 'False', 'to': "orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '30', 'unique': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'db_table': "'django_content_type'", 'object_name': 'ContentType', 'unique_together': "(('app_label', 'model'),)"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'main.city': {
'Meta': {'object_name': 'City'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.CharField', [], {'null': 'True', 'blank': 'True', 'max_length': '100'}),
'multi_university': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'state': ('localflavor.us.models.USStateField', [], {'max_length': '2'})
},
'property.amenity': {
'Meta': {'ordering': "['amenity']", 'object_name': 'Amenity'},
'amenity': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'null': 'True', 'blank': 'True', 'max_length': '100'}),
'link': ('django.db.models.fields.CharField', [], {'null': 'True', 'blank': 'True', 'max_length': '100'}),
'special': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'null': 'True', 'blank': 'True', 'max_length': '3'})
},
'property.package': {
'Meta': {'ordering': "['-order']", 'object_name': 'Package'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'price': ('django.db.models.fields.IntegerField', [], {}),
'services': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['property.Service']", 'null': 'True', 'blank': 'True', 'symmetrical': 'False'}),
'similar_property_strength': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'property.property': {
'Meta': {'ordering': "['-top_list', '-sponsored', '-package__order', 'id']", 'object_name': 'Property'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'addr': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'amenities': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['property.Amenity']", 'null': 'True', 'blank': 'True', 'symmetrical': 'False'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'contact_email': ('django.db.models.fields.EmailField', [], {'null': 'True', 'blank': 'True', 'max_length': '75'}),
'contact_first_name': ('django.db.models.fields.CharField', [], {'null': 'True', 'blank': 'True', 'max_length': '50'}),
'contact_last_name': ('django.db.models.fields.CharField', [], {'null': 'True', 'blank': 'True', 'max_length': '50'}),
'contact_phone': ('localflavor.us.models.PhoneNumberField', [], {'null': 'True', 'blank': 'True', 'max_length': '20'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'fee_desc': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'internal': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'lat': ('django.db.models.fields.DecimalField', [], {'blank': 'True', 'null': 'True', 'max_digits': '12', 'decimal_places': '6'}),
'lease_term': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['property.PropertyLeaseTerm']", 'null': 'True', 'blank': 'True', 'symmetrical': 'False'}),
'lease_type': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['property.PropertyLeaseType']", 'null': 'True', 'blank': 'True', 'symmetrical': 'False'}),
'long': ('django.db.models.fields.DecimalField', [], {'blank': 'True', 'null': 'True', 'max_digits': '12', 'decimal_places': '6'}),
'neighborhood': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['school.Neighborhood']", 'null': 'True', 'blank': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['property.Package']", 'null': 'True', 'blank': 'True'}),
'place_id': ('django.db.models.fields.CharField', [], {'null': 'True', 'blank': 'True', 'max_length': '30'}),
'rating': ('django.db.models.fields.DecimalField', [], {'blank': 'True', 'null': 'True', 'max_digits': '3', 'decimal_places': '1'}),
'school': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['school.School']"}),
'services': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['property.Service']", 'null': 'True', 'blank': 'True', 'symmetrical': 'False'}),
'special': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'sponsored': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'state': ('localflavor.us.models.USStateField', [], {'max_length': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'top_list': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '20', 'default': "'APT'"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'null': 'True', 'to': "orm['auth.User']"}),
'zip_cd': ('django.db.models.fields.CharField', [], {'null': 'True', 'blank': 'True', 'max_length': '15'})
},
'property.propertyleaseterm': {
'Meta': {'ordering': "['order']", 'object_name': 'PropertyLeaseTerm'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lease_term': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'lease_term_short': ('django.db.models.fields.CharField', [], {'null': 'True', 'blank': 'True', 'max_length': '5'}),
'order': ('django.db.models.fields.IntegerField', [], {})
},
'property.propertyleasetype': {
'Meta': {'ordering': "['lease_type']", 'object_name': 'PropertyLeaseType'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lease_type': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'property.service': {
'Meta': {'object_name': 'Service'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'price': ('django.db.models.fields.IntegerField', [], {}),
'service_type': ('django.db.models.fields.CharField', [], {'max_length': '1', 'default': "'R'"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'school.deal': {
'Meta': {'ordering': "['-sponsored', '-id']", 'object_name': 'Deal'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'create_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'heading': ('django.db.models.fields.CharField', [], {'null': 'True', 'blank': 'True', 'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'null': 'True', 'blank': 'True', 'max_length': '100'}),
'property': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['property.Property']"}),
'school': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['school.School']"}),
'sponsored': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'school.event': {
'Meta': {'ordering': "['-sponsored', '-id']", 'object_name': 'Event'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'create_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'heading': ('django.db.models.fields.CharField', [], {'null': 'True', 'blank': 'True', 'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'null': 'True', 'blank': 'True', 'max_length': '100'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'property': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['property.Property']", 'null': 'True', 'blank': 'True'}),
'school': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['school.School']"}),
'sponsored': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'school.neighborhood': {
'Meta': {'object_name': 'Neighborhood'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lat': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '6'}),
'long': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '6'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'school': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['school.School']"})
},
'school.school': {
'Meta': {'object_name': 'School'},
'city': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.City']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'null': 'True', 'max_length': '100'}),
'lat': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '6'}),
'link': ('django.db.models.fields.CharField', [], {'null': 'True', 'blank': 'True', 'max_length': '100'}),
'long': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '6'}),
'mascot': ('django.db.models.fields.CharField', [], {'null': 'True', 'blank': 'True', 'max_length': '50'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['school'] | [
"awwester@gmail.com"
] | awwester@gmail.com |
e05551940606357497877b7b37864e74b5955d81 | 1577e1cf4e89584a125cffb855ca50a9654c6d55 | /pyobjc/pyobjc/pyobjc-framework-Cocoa-2.5.1/PyObjCTest/test_nsobject_additions.py | 440f0068238912d07f34ae5b24bf1f43a761262d | [
"MIT"
] | permissive | apple-open-source/macos | a4188b5c2ef113d90281d03cd1b14e5ee52ebffb | 2d2b15f13487673de33297e49f00ef94af743a9a | refs/heads/master | 2023-08-01T11:03:26.870408 | 2023-03-27T00:00:00 | 2023-03-27T00:00:00 | 180,595,052 | 124 | 24 | null | 2022-12-27T14:54:09 | 2019-04-10T14:06:23 | null | UTF-8 | Python | false | false | 15,818 | py | from PyObjCTools.TestSupport import *
import Foundation
import sys
if sys.version_info[0] == 2:
from StringIO import StringIO
def _str(v): return v
else:
from io import StringIO
def _str(v):
if isinstance(v, str):
return v
return v.decode('ascii')
class TheadingHelperTestHelper (Foundation.NSObject):
def init(self):
self = super(TheadingHelperTestHelper, self).init()
if self is None:
return None
self.calls = []
return self
def performSelector_onThread_withObject_waitUntilDone_(self,
selector, thread, object, wait):
assert isinstance(selector, bytes)
assert isinstance(thread, Foundation.NSThread)
assert isinstance(wait, bool)
self.calls.append((selector, thread, object, wait))
getattr(self, _str(selector))(object)
def performSelector_onThread_withObject_waitUntilDone_modes_(self,
selector, thread, object, wait, modes):
assert isinstance(selector, bytes)
assert isinstance(thread, Foundation.NSThread)
assert isinstance(wait, bool)
self.calls.append((selector, thread, object, wait, modes))
getattr(self, _str(selector))(object)
def performSelector_withObject_afterDelay_(self,
selector, object, delay):
assert isinstance(selector, bytes)
self.calls.append((selector, object, delay))
getattr(self, _str(selector))(object)
def performSelector_withObject_afterDelay_inModes_(self,
selector, object, delay, modes):
assert isinstance(selector, bytes)
self.calls.append((selector, object, delay, modes))
getattr(self, _str(selector))(object)
def performSelectorInBackground_withObject_(self,
selector, object):
self.calls.append((selector, object))
getattr(self, _str(selector))(object)
def performSelectorOnMainThread_withObject_waitUntilDone_(self,
selector, object, wait):
self.calls.append((selector, object, wait))
getattr(self, _str(selector))(object)
def performSelectorOnMainThread_withObject_waitUntilDone_modes_(self,
selector, object, wait, modes):
self.calls.append((selector, object, wait, modes))
getattr(self, _str(selector))(object)
def sel1_(self, arg):
pass
def sel2_(self, arg):
return arg * 2
def sel3_(self, arg):
return 1/arg
class TestThreadingHelpers (TestCase):
def testAsyncOnThreadNoResult(self):
# pyobjc_performSelector_onThread_withObject_waitUntilDone_
obj = TheadingHelperTestHelper.alloc().init()
thr = Foundation.NSThread.mainThread()
obj.pyobjc_performSelector_onThread_withObject_waitUntilDone_(
b'sel1:', thr, 1, False),
obj.pyobjc_performSelector_onThread_withObject_waitUntilDone_(
b'sel2:', thr, 2, True)
obj.pyobjc_performSelector_onThread_withObject_waitUntilDone_(
b'isEqual:', thr, obj, True)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThread:', thr, (b'sel1:', 1), False),
(b'_pyobjc_performOnThread:', thr, (b'sel2:', 2), True),
(b'_pyobjc_performOnThread:', thr, (b'isEqual:', obj), True),
])
# Raise an exception
orig_stderr = sys.stderr
sys.stderr = StringIO()
try:
obj.calls[:] = []
obj.pyobjc_performSelector_onThread_withObject_waitUntilDone_(
b'sel3:', thr, 0, False)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThread:', thr, (b'sel3:', 0), False),
])
self.assertIn('Traceback', sys.stderr.getvalue())
finally:
sys.stderr = orig_stderr
def testAsyncOnThreadNoResultModes(self):
# pyobjc_performSelector_onThread_withObject_waitUntilDone_modes_
obj = TheadingHelperTestHelper.alloc().init()
thr = Foundation.NSThread.mainThread()
obj.pyobjc_performSelector_onThread_withObject_waitUntilDone_modes_(
b'sel1:', thr, 1, False, 0),
obj.pyobjc_performSelector_onThread_withObject_waitUntilDone_modes_(
b'sel2:', thr, 2, True, 1)
obj.pyobjc_performSelector_onThread_withObject_waitUntilDone_modes_(
b'isEqual:', thr, obj, True, 2)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThread:', thr, (b'sel1:', 1), False, 0),
(b'_pyobjc_performOnThread:', thr, (b'sel2:', 2), True, 1),
(b'_pyobjc_performOnThread:', thr, (b'isEqual:', obj), True, 2),
])
# Raise an exception
orig_stderr = sys.stderr
sys.stderr = StringIO()
try:
obj.calls[:] = []
obj.pyobjc_performSelector_onThread_withObject_waitUntilDone_modes_(
b'sel3:', thr, 0, False, 4)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThread:', thr, (b'sel3:', 0), False, 4),
])
self.assertIn('Traceback', sys.stderr.getvalue())
finally:
sys.stderr = orig_stderr
def testAsyncWithDelayNoResult(self):
# pyobjc_performSelector_withObject_afterDelay_
obj = TheadingHelperTestHelper.alloc().init()
obj.pyobjc_performSelector_withObject_afterDelay_(b'sel1:', 1, 1.0)
obj.pyobjc_performSelector_withObject_afterDelay_(b'sel2:', 2, 4.5)
obj.pyobjc_performSelector_withObject_afterDelay_(b'isEqual:', obj, 8.5)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThread:', (b'sel1:', 1), 1.0),
(b'_pyobjc_performOnThread:', (b'sel2:', 2), 4.5),
(b'_pyobjc_performOnThread:', (b'isEqual:', obj), 8.5),
])
# Raise an exception
orig_stderr = sys.stderr
sys.stderr = StringIO()
try:
obj.calls[:] = []
obj.pyobjc_performSelector_withObject_afterDelay_(
b'sel3:', 0, 0.5)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThread:', (b'sel3:', 0), 0.5),
])
self.assertIn('Traceback', sys.stderr.getvalue())
finally:
sys.stderr = orig_stderr
def testAsyncWithDelayNoResultModes(self):
# pyobjc_performSelector_withObject_afterDelay_inModes_
obj = TheadingHelperTestHelper.alloc().init()
obj.pyobjc_performSelector_withObject_afterDelay_inModes_(b'sel1:', 1, 1.0, 0)
obj.pyobjc_performSelector_withObject_afterDelay_inModes_(b'sel2:', 2, 4.5, 1)
obj.pyobjc_performSelector_withObject_afterDelay_inModes_(b'isEqual:', obj, 8.5, 2)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThread:', (b'sel1:', 1), 1.0, 0),
(b'_pyobjc_performOnThread:', (b'sel2:', 2), 4.5, 1),
(b'_pyobjc_performOnThread:', (b'isEqual:', obj), 8.5, 2),
])
# Raise an exception
orig_stderr = sys.stderr
sys.stderr = StringIO()
try:
obj.calls[:] = []
obj.pyobjc_performSelector_withObject_afterDelay_inModes_(
b'sel3:', 0, 0.5, 3)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThread:', (b'sel3:', 0), 0.5, 3),
])
self.assertIn('Traceback', sys.stderr.getvalue())
finally:
sys.stderr = orig_stderr
def testInBGNoResult(self):
# pyobjc_performSelectorInBackground_withObject_
obj = TheadingHelperTestHelper.alloc().init()
obj.pyobjc_performSelectorInBackground_withObject_(b'sel1:', 1)
obj.pyobjc_performSelectorInBackground_withObject_(b'sel2:', 2)
obj.pyobjc_performSelectorInBackground_withObject_(b'isEqual:', obj)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThread:', (b'sel1:', 1)),
(b'_pyobjc_performOnThread:', (b'sel2:', 2)),
(b'_pyobjc_performOnThread:', (b'isEqual:', obj)),
])
# Raise an exception
orig_stderr = sys.stderr
sys.stderr = StringIO()
try:
obj.calls[:] = []
obj.pyobjc_performSelectorInBackground_withObject_(
b'sel3:', 0)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThread:', (b'sel3:', 0)),
])
self.assertIn('Traceback', sys.stderr.getvalue())
finally:
sys.stderr = orig_stderr
def testOnMtNoResultWait(self):
# pyobjc_performSelectorInBackground_withObject_waitUntilDone_
obj = TheadingHelperTestHelper.alloc().init()
obj.pyobjc_performSelectorOnMainThread_withObject_waitUntilDone_(b'sel1:', 1, True)
obj.pyobjc_performSelectorOnMainThread_withObject_waitUntilDone_(b'sel2:', 2, False)
obj.pyobjc_performSelectorOnMainThread_withObject_waitUntilDone_(b'isEqual:', obj, True)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThread:', (b'sel1:', 1), True),
(b'_pyobjc_performOnThread:', (b'sel2:', 2), False),
(b'_pyobjc_performOnThread:', (b'isEqual:', obj), True),
])
# Raise an exception
orig_stderr = sys.stderr
sys.stderr = StringIO()
try:
obj.calls[:] = []
obj.pyobjc_performSelectorOnMainThread_withObject_waitUntilDone_(
b'sel3:', 0, False)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThread:', (b'sel3:', 0), False),
])
self.assertIn('Traceback', sys.stderr.getvalue())
finally:
sys.stderr = orig_stderr
def testOnMtNoResultWaitModes(self):
# pyobjc_performSelectorInBackground_withObject_waitUntilDone_modes_
obj = TheadingHelperTestHelper.alloc().init()
obj.pyobjc_performSelectorOnMainThread_withObject_waitUntilDone_modes_(b'sel1:', 1, True, 4)
obj.pyobjc_performSelectorOnMainThread_withObject_waitUntilDone_modes_(b'sel2:', 2, False, 5)
obj.pyobjc_performSelectorOnMainThread_withObject_waitUntilDone_modes_(b'isEqual:', obj, True, 6)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThread:', (b'sel1:', 1), True, 4),
(b'_pyobjc_performOnThread:', (b'sel2:', 2), False, 5),
(b'_pyobjc_performOnThread:', (b'isEqual:', obj), True, 6),
])
# Raise an exception
orig_stderr = sys.stderr
sys.stderr = StringIO()
try:
obj.calls[:] = []
obj.pyobjc_performSelectorOnMainThread_withObject_waitUntilDone_modes_(
b'sel3:', 0, False, 7)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThread:', (b'sel3:', 0), False, 7),
])
self.assertIn('Traceback', sys.stderr.getvalue())
finally:
sys.stderr = orig_stderr
def testOnMtWithResult(self):
# pyobjc_performSelectorOnMainThread_withObject_
obj = TheadingHelperTestHelper.alloc().init()
r = obj.pyobjc_performSelectorOnMainThread_withObject_('sel2:', 3)
self.assertEqual(r, 6)
r = obj.pyobjc_performSelectorOnMainThread_withObject_('sel3:', 2.0)
self.assertEqual(r, 0.5)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThreadWithResult:', ('sel2:', 3, [(True, 6)]), True),
(b'_pyobjc_performOnThreadWithResult:', ('sel3:', 2.0, [(True, 0.5)]), True),
])
# Raise an exception
orig_stderr = sys.stderr
sys.stderr = StringIO()
try:
obj.calls[:] = []
self.assertRaises(ZeroDivisionError, obj.pyobjc_performSelectorOnMainThread_withObject_,
b'sel3:', 0)
self.assertEqual(len(obj.calls), 1)
self.assertEqual(obj.calls[0][0], b'_pyobjc_performOnThreadWithResult:')
self.assertEqual(obj.calls[0][1][-1][0][0], False)
self.assertNotIn('Traceback', sys.stderr.getvalue())
finally:
sys.stderr = orig_stderr
def testOnMtWithResultModes(self):
obj = TheadingHelperTestHelper.alloc().init()
r = obj.pyobjc_performSelectorOnMainThread_withObject_modes_('sel2:', 3, 1)
self.assertEqual(r, 6)
r = obj.pyobjc_performSelectorOnMainThread_withObject_modes_('sel3:', 2.0, 2)
self.assertEqual(r, 0.5)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThreadWithResult:', ('sel2:', 3, [(True, 6)]), True, 1),
(b'_pyobjc_performOnThreadWithResult:', ('sel3:', 2.0, [(True, 0.5)]), True, 2),
])
# Raise an exception
orig_stderr = sys.stderr
sys.stderr = StringIO()
try:
obj.calls[:] = []
self.assertRaises(ZeroDivisionError, obj.pyobjc_performSelectorOnMainThread_withObject_modes_,
b'sel3:', 0, 3)
self.assertEqual(len(obj.calls), 1)
self.assertEqual(obj.calls[0][0], b'_pyobjc_performOnThreadWithResult:')
self.assertEqual(obj.calls[0][1][-1][0][0], False)
self.assertNotIn('Traceback', sys.stderr.getvalue())
finally:
sys.stderr = orig_stderr
def testOnThreadWithResult(self):
obj = TheadingHelperTestHelper.alloc().init()
thr = Foundation.NSThread.mainThread()
r = obj.pyobjc_performSelector_onThread_withObject_('sel2:', thr, 3)
self.assertEqual(r, 6)
r = obj.pyobjc_performSelector_onThread_withObject_('sel3:', thr, 2.0)
self.assertEqual(r, 0.5)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThreadWithResult:', thr, ('sel2:', 3, [(True, 6)]), True),
(b'_pyobjc_performOnThreadWithResult:', thr, ('sel3:', 2.0, [(True, 0.5)]), True),
])
# Raise an exception
orig_stderr = sys.stderr
sys.stderr = StringIO()
try:
obj.calls[:] = []
self.assertRaises(ZeroDivisionError, obj.pyobjc_performSelector_onThread_withObject_,
b'sel3:', thr, 0)
self.assertEqual(len(obj.calls), 1)
self.assertEqual(obj.calls[0][0], b'_pyobjc_performOnThreadWithResult:')
self.assertEqual(obj.calls[0][2][-1][0][0], False)
self.assertNotIn('Traceback', sys.stderr.getvalue())
finally:
sys.stderr = orig_stderr
def testOnThreadWithResultModes(self):
obj = TheadingHelperTestHelper.alloc().init()
thr = Foundation.NSThread.mainThread()
r = obj.pyobjc_performSelector_onThread_withObject_modes_('sel2:', thr, 3, 1)
self.assertEqual(r, 6)
r = obj.pyobjc_performSelector_onThread_withObject_modes_('sel3:', thr, 2.0, 2)
self.assertEqual(r, 0.5)
self.assertEqual(obj.calls, [
(b'_pyobjc_performOnThreadWithResult:', thr, ('sel2:', 3, [(True, 6)]), True, 1),
(b'_pyobjc_performOnThreadWithResult:', thr, ('sel3:', 2.0, [(True, 0.5)]), True, 2),
])
# Raise an exception
orig_stderr = sys.stderr
sys.stderr = StringIO()
try:
obj.calls[:] = []
self.assertRaises(ZeroDivisionError, obj.pyobjc_performSelector_onThread_withObject_modes_,
b'sel3:', thr, 0, 3)
self.assertEqual(len(obj.calls), 1)
self.assertEqual(obj.calls[0][0], b'_pyobjc_performOnThreadWithResult:')
self.assertEqual(obj.calls[0][2][-1][0][0], False)
self.assertNotIn('Traceback', sys.stderr.getvalue())
finally:
sys.stderr = orig_stderr
if __name__ == "__main__":
main()
| [
"opensource@apple.com"
] | opensource@apple.com |
807e79a4a73a139395c2af3d8c63d99759a161d1 | e7dd22eb03b914b1be39c0e46799857fac1d3f8a | /tests/log_generator.py | dae9a56cf64769986c40651a02254901a8875048 | [] | no_license | O-Swad/PyLog | f6d774636e059939fd449daaebe5f8e807ccb1cd | 360d86ef13b40622f7fb83eaf0c2dbea77d5dbc5 | refs/heads/main | 2023-06-03T20:38:04.986515 | 2021-06-26T16:41:47 | 2021-06-26T16:41:47 | 374,059,547 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 591 | py | import time
import random
hours = 2
now_time = int(time.time()*1000)
last_time = now_time - 3600 * 1000 * hours # one day of data in milliseconds
hosts = ['everest', 'annapurna', 'lhotse', 'manaslu', 'dhaulagiri']
separator = ' '
counter = 0
with open('test-log-file.txt', 'a') as log_file:
for log_timestamp in range(last_time, now_time):
host_ori, host_dest = random.sample(hosts, 2)
tokens = (str(log_timestamp), host_ori, host_dest)
line = separator.join(tokens) + '\n'
log_file.write(line)
counter += 1
print('Líneas: ' + str(counter))
| [
"="
] | = |
5c1d8320d057b699aa7ea010ec4e33e8dd18a69a | 36c28058a966b777f39d8212ac598da48a9ec04e | /functions.py | f9611c9788718ef2431ebdb872ed792f1546c5b6 | [] | no_license | cjmugs/Python_files | 02ee681baac4a43e5aa6e490fa538a6d67932364 | 5e8d38a9938e631a8ded474708c1fab86cc0784b | refs/heads/master | 2022-12-19T16:37:34.061004 | 2020-09-29T01:43:23 | 2020-09-29T01:43:23 | 299,338,610 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 123 | py | # creating a function that squares a number
#This is how to define a function/ / /X=input
def square(x):
return x * x
| [
"cjmugs@gmail.com"
] | cjmugs@gmail.com |
7ddbc340eb0394d89d0c8a87e6b760b2066eff80 | 65d9e2e2919554af3d84f63006ce7baa9f5b0213 | /oops/class method.py | 82902983f9b732c20f372e10b6324e2014989def | [] | no_license | quintus2020intern/PM_Basic_Python | 538e970f9a416691a229a594ffadfb18f28084a7 | 4891967490f068c7dd2d86df0519d852f066046f | refs/heads/master | 2020-12-11T08:24:09.987816 | 2020-03-30T10:45:03 | 2020-03-30T10:45:03 | 233,799,667 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 458 | py | # class Test:
# count = 0 # static variable
#
# def __init__(self):
# Test.count = Test.count + 1
#
# @classmethod
# def getNoOfObject(cls):
# print("no of object", cls.count)
#
#
# t1 = Test()
# t1.getNoOfObject()
# class Test:
# @classmethod
# def df(cls):
# print(id(cls))
#
# print(id(Test))
# Test.df()
class Test:
def __init__(self):
print( id( self ) )
t1 = Test()
print( id( t1 ) )
| [
"pragayanparamitaguddi111@gmail.com"
] | pragayanparamitaguddi111@gmail.com |
90fae8edfd4aa80e595fc01abed64323bf71279f | fe0017ae33385d7a2857d0aa39fa8861b40c8a88 | /env/lib/python3.8/site-packages/sklearn/cluster/__init__.py | 04fcee55d143f04403e2a52690a0d6b07eb1a638 | [] | no_license | enriquemoncerrat/frasesback | eec60cc7f078f9d24d155713ca8aa86f401c61bf | e2c77f839c77f54e08a2f0930880cf423e66165b | refs/heads/main | 2023-01-03T23:21:05.968846 | 2020-10-18T21:20:27 | 2020-10-18T21:20:27 | 305,198,286 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,474 | py | """
The :mod:`sklearn.cluster` module gathers popular unsupervised clustering
algorithms.
"""
from ._affinity_propagation import affinity_propagation, AffinityPropagation
from ._agglomerative import (ward_tree, AgglomerativeClustering,
linkage_tree, FeatureAgglomeration)
from ._bicluster import SpectralBiclustering, SpectralCoclustering
from ._birch import Birch
from ._dbscan import dbscan, DBSCAN
from ._kmeans import k_means, KMeans, MiniBatchKMeans
from ._mean_shift import (mean_shift, MeanShift,
estimate_bandwidth, get_bin_seeds)
from ._optics import (OPTICS, cluster_optics_dbscan, compute_optics_graph,
cluster_optics_xi)
from ._spectral import spectral_clustering, SpectralClustering
__all__ = ['AffinityPropagation',
'AgglomerativeClustering',
'Birch',
'DBSCAN',
'OPTICS',
'cluster_optics_dbscan',
'cluster_optics_xi',
'compute_optics_graph',
'KMeans',
'FeatureAgglomeration',
'MeanShift',
'MiniBatchKMeans',
'SpectralClustering',
'affinity_propagation',
'dbscan',
'estimate_bandwidth',
'get_bin_seeds',
'k_means',
'linkage_tree',
'mean_shift',
'spectral_clustering',
'ward_tree',
'SpectralBiclustering',
'SpectralCoclustering']
| [
"enriquemoncerrat@gmail.com"
] | enriquemoncerrat@gmail.com |
9e23232fb9f4b9c0dfab6d79764b757d42dd29e1 | 52580a60f1f7b27655a2543b1aa4c92a56053ead | /migrations/versions/824086b2abe3_bookmarks_table.py | 63851c4dfd9eb9032d2371364c2bc96f09fa116f | [] | no_license | nitin-cherian/personal_blog | 4d42a886e527dabecbfc82d30d3125c07d428192 | f3ef365ff0a5d9004c6b5407912aa93af2f0f855 | refs/heads/master | 2021-04-09T15:18:09.855063 | 2018-03-25T11:39:51 | 2018-03-25T11:39:51 | 125,714,136 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,249 | py | """bookmarks table
Revision ID: 824086b2abe3
Revises: 9c5d6731465a
Create Date: 2018-03-25 16:32:09.932871
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '824086b2abe3'
down_revision = '9c5d6731465a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('bookmark',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('post_url', sa.String(length=256), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_bookmark_post_url'), 'bookmark', ['post_url'], unique=True)
op.create_index(op.f('ix_bookmark_timestamp'), 'bookmark', ['timestamp'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_bookmark_timestamp'), table_name='bookmark')
op.drop_index(op.f('ix_bookmark_post_url'), table_name='bookmark')
op.drop_table('bookmark')
# ### end Alembic commands ###
| [
"nitin.cherian@gmail.com"
] | nitin.cherian@gmail.com |
76ca7e8409c7d796eb9fd1e95f6eabb2688c03f6 | f9e50b7c9079c5994b9f9f1b661c6259c3ff7510 | /video_sub/settings.py | 646fdabba6d3920d27a94c5b3564da09fdf1575a | [] | no_license | njokuifeanyigerald/video-subscription | 6598f9ec3a677ec1bcb031aa5499f546fed97bf3 | 5e479f142306f149d0d9aa3169bfeeba0953d6c2 | refs/heads/master | 2022-11-16T07:15:27.780389 | 2020-07-08T12:39:17 | 2020-07-08T12:39:17 | 277,650,980 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,164 | py | """
Django settings for video_sub project.
Generated by 'django-admin startproject' using Django 3.0.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'x-7h%*t20+3)9j+c#s(bt)ei!f_byhzxrv=+j5regcn#ts!(^e'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'allauth',
'allauth.account',
'allauth.socialaccount',
'video',
'courses',
# 'allauth',
# 'allauth.account',
# 'allauth.socialaccount',
]
SITE_ID = 1
AUTHENTICATION_BACKENDS = [
# Needed to login by username in Django admin, regardless of `allauth`
'django.contrib.auth.backends.ModelBackend',
# `allauth` specific authentication methods, such as login by e-mail
'allauth.account.auth_backends.AuthenticationBackend',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'video_sub.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'video_sub.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static_root')
]
VENV_PATH = os.path.dirname(BASE_DIR)
STATIC_ROOT = os.path.join(BASE_DIR, 'static/')
MEDIA_ROOT = os.path.join(VENV_PATH, 'media_root')
if DEBUG:
STRIPE_PUBLISHABLE_KEY = 'pk_test_yhAYzCLh0qruLoqF3aKvtNa900HGnFUy7B'
STRIPE_SECRET_KEY = 'sk_test_4fZvDGB3VzQ2OYQOyRuhq9bK00b8BiWAVb'
else:
STRIPE_PUBLISHABLE_KEY = ''
STRIPE_SECRET_KEY = ''
LOGIN_REDIRECT_URL = '/'
ACCOUNT_UNIQUE_EMAIL =True
ACCOUNT_EMAIL_REQUIRED = True
| [
"brainboyrichmond@gmail.com"
] | brainboyrichmond@gmail.com |
08f4d070632ee86bad8a064fde41cbfb19aeb520 | e17e40dbb6ed8caaac5c23de29071b403637f5ae | /transformers_keras/transformer/point_wise_ffn.py | 317f811c72b207e769dbb52cc0c3d75b49867c4c | [
"Apache-2.0"
] | permissive | Linessiex/transformers-keras | cb739075c8daab39d52dc6cd6bafe5e45f8259be | 0bb576db356f575390815dc64840b78b8ecf6227 | refs/heads/master | 2020-11-25T05:58:09.448200 | 2019-09-23T09:13:59 | 2019-09-23T09:13:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 669 | py | import tensorflow as tf
class PointWiseFeedForwardNetwork(tf.keras.Model):
def __init__(self, d_model, dff):
super(PointWiseFeedForwardNetwork, self).__init__(name='ffn')
# self.d_model = d_model
self.dense_1 = tf.keras.layers.Dense(dff, activation='relu')
self.dense_2 = tf.keras.layers.Dense(d_model)
def call(self, x, training=None, mask=None):
x = self.dense_1(x)
return self.dense_2(x)
def compute_output_shape(self, input_shape):
# shapes = tf.shape(input_shape).as_list()
# shapes[-1] = self.d_model
# return tf.TensorShape(shapes)
return self.dense_2.output_shape
| [
"zhouyang.luo@gmail.com"
] | zhouyang.luo@gmail.com |
4415658f3a51da55638037244704972110642216 | bb6bc5929b463c6d8564e0d2045b25402e855041 | /graphutils/graphClusterSampler.py | 4af5dbcd62721ee1fd740f2f85707126c18a34a8 | [] | no_license | xjtuwgt/ContrastiveKGE | e6560fc7c8ddacb1bfd8118fe9c4f1112f234a18 | 15e2f58b4bef527fe49e0b2a817c4b7ab706d03b | refs/heads/main | 2023-07-20T13:04:36.780926 | 2021-09-07T02:17:19 | 2021-09-07T02:17:19 | 348,168,386 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,862 | py | import os
import dgl
import torch
from graphutils.partition_utils import get_partition_list
import numpy as np
class ClusterIter(object):
'''The partition sampler given a DGLGraph and partition number.
The metis is used as the graph partition backend.
'''
def __init__(self, dn, g, psize, batch_size):
"""Initialize the sampler.
Paramters
---------
dn : str
The dataset name.
g : DGLGraph
The full graph of dataset
psize: int
The partition number
batch_size: int
The number of partitions in one batch
"""
self.psize = psize
self.batch_size = batch_size
# cache the partitions of known datasets&partition number
if dn:
fn = os.path.join('./datasets/', dn + '_{}.npy'.format(psize))
if os.path.exists(fn):
self.par_li = np.load(fn, allow_pickle=True)
else:
os.makedirs('./datasets/', exist_ok=True)
self.par_li = get_partition_list(g, psize)
np.save(fn, self.par_li)
else:
self.par_li = get_partition_list(g, psize)
par_list = []
total_nodes = 0
for p in self.par_li:
total_nodes = total_nodes + len(p)
par = torch.Tensor(p)
par_list.append(par)
self.par_list = par_list
print('Partition number = {} over {} nodes on graph with {} nodes'.format(len(par_list), total_nodes, g.num_nodes()))
def __len__(self):
return self.psize
def __getitem__(self, idx):
return self.par_li[idx]
def subgraph_collate_fn(g, batch):
nids = np.concatenate(batch).reshape(-1).astype(np.int64)
g1 = g.subgraph(nids)
g1 = dgl.remove_self_loop(g1)
g1 = dgl.add_self_loop(g1)
return g1 | [
"guangtao.wang@jd.com"
] | guangtao.wang@jd.com |
6a4e0c9277e2d77ad14e7fd67a40cff6e71e563c | b4eb1cd674580ef0136d1d5452cc039b295ecc6c | /tests/test_middleware.py | df774ee26a644c82aa55d405a7c4aca0dcde8b3f | [
"MIT"
] | permissive | jrobichaud/django-groups-cache | e0689c276adc8db93b0babb9245d65aa1896d2bb | cf219753f1e9d8f10437ce11671a246327431598 | refs/heads/master | 2020-04-30T09:31:18.855358 | 2018-01-06T13:57:12 | 2018-01-06T13:57:12 | 176,749,747 | 0 | 0 | null | 2019-03-20T14:18:18 | 2019-03-20T14:18:18 | null | UTF-8 | Python | false | false | 2,242 | py | import django
from django.core.cache import cache
from django.contrib.auth.models import AnonymousUser, User, Group
from django.test import TestCase
from mock import Mock
import mock
from groups_cache.compat import is_authenticated
from groups_cache.middleware import GroupsCacheMiddleware
from groups_cache import signals
class TestMiddleware(TestCase):
def setUp(self):
self.gcm = GroupsCacheMiddleware()
self.request = Mock()
self.user = Mock(id=123, name='bob')
if django.VERSION < (1, 10):
self.user.is_authenticated.return_value = True
else:
self.user.is_authenticated = True
def test_request_should_not_cache_anonymous(self):
self.request.user = Mock()
if django.VERSION < (1, 10):
self.request.user.is_authenticated.return_value = False
else:
self.request.user.is_authenticated = False
self.assertEqual(self.gcm.process_request(self.request), None)
self.assertIsNone(self.request.groups_cache)
cache.clear()
def test_request_should_cache_authenticated_user(self):
self.request.user = self.user
self.user.groups.all.return_value.values_list.return_value = Group.objects.none()
self.assertEqual(self.gcm.process_request(self.request), None)
self.assertIsInstance(self.request.groups_cache, type(Group.objects.none()))
self.assertEqual(len(self.request.groups_cache), 0)
cache.clear()
def test_request_should_cache_one_group(self):
Group.objects.create(name='revelers')
self.user.groups.all.return_value.values_list.return_value = Group.objects.all()
self.request.user = self.user
self.assertEqual(self.gcm.process_request(self.request), None)
self.assertIsInstance(self.request.groups_cache, type(Group.objects.none()))
self.assertEqual(len(self.request.groups_cache), 1)
def test_request_should_hit_cached_one_group(self):
self.request.user = self.user
self.assertEqual(self.gcm.process_request(self.request), None)
self.assertIsInstance(self.request.groups_cache, type(Group.objects.none()))
self.assertEqual(len(self.request.groups_cache), 1)
| [
"castner.rr@gmail.com"
] | castner.rr@gmail.com |
bb042febed8e692192c700d682213ca7c55341eb | 3e7db53009b413d1e9d2631be662487fb1e77117 | /asposetaskscloud/models/timephased_data_type.py | d636d7d383e4b84bb6debe99fdf47c6aa0aed1ca | [
"MIT"
] | permissive | aspose-tasks-cloud/aspose-tasks-cloud-python | d35e318ecc35ee9463fd7602a2e1f4f7dad17e33 | 7b61335d82529020f610bd5ae679244d0cdb83d1 | refs/heads/master | 2022-12-24T23:06:02.336709 | 2022-12-17T12:23:33 | 2022-12-17T12:23:33 | 240,446,905 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,294 | py | # coding: utf-8
# -----------------------------------------------------------------------------------
# <copyright company="Aspose" file="TimephasedDataType.py">
# Copyright (c) 2020 Aspose.Tasks Cloud
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
import pprint
import re # noqa: F401
import six
class TimephasedDataType(object):
"""
"""
"""
allowed enum values
"""
ASSIGNMENTREMAININGWORK = "AssignmentRemainingWork"
ASSIGNMENTACTUALWORK = "AssignmentActualWork"
ASSIGNMENTACTUALOVERTIMEWORK = "AssignmentActualOvertimeWork"
ASSIGNMENTBASELINEWORK = "AssignmentBaselineWork"
ASSIGNMENTBASELINECOST = "AssignmentBaselineCost"
ASSIGNMENTACTUALCOST = "AssignmentActualCost"
RESOURCEBASELINEWORK = "ResourceBaselineWork"
RESOURCEBASELINECOST = "ResourceBaselineCost"
TASKBASELINEWORK = "TaskBaselineWork"
TASKBASELINECOST = "TaskBaselineCost"
TASKPERCENTCOMPLETE = "TaskPercentComplete"
ASSIGNMENTBASELINE1WORK = "AssignmentBaseline1Work"
ASSIGNMENTBASELINE1COST = "AssignmentBaseline1Cost"
TASKBASELINE1WORK = "TaskBaseline1Work"
TASKBASELINE1COST = "TaskBaseline1Cost"
RESOURCEBASELINE1WORK = "ResourceBaseline1Work"
RESOURCEBASELINE1COST = "ResourceBaseline1Cost"
ASSIGNMENTBASELINE2WORK = "AssignmentBaseline2Work"
ASSIGNMENTBASELINE2COST = "AssignmentBaseline2Cost"
TASKBASELINE2WORK = "TaskBaseline2Work"
TASKBASELINE2COST = "TaskBaseline2Cost"
RESOURCEBASELINE2WORK = "ResourceBaseline2Work"
RESOURCEBASELINE2COST = "ResourceBaseline2Cost"
ASSIGNMENTBASELINE3WORK = "AssignmentBaseline3Work"
ASSIGNMENTBASELINE3COST = "AssignmentBaseline3Cost"
TASKBASELINE3WORK = "TaskBaseline3Work"
TASKBASELINE3COST = "TaskBaseline3Cost"
RESOURCEBASELINE3WORK = "ResourceBaseline3Work"
RESOURCEBASELINE3COST = "ResourceBaseline3Cost"
ASSIGNMENTBASELINE4WORK = "AssignmentBaseline4Work"
ASSIGNMENTBASELINE4COST = "AssignmentBaseline4Cost"
TASKBASELINE4WORK = "TaskBaseline4Work"
TASKBASELINE4COST = "TaskBaseline4Cost"
RESOURCEBASELINE4WORK = "ResourceBaseline4Work"
RESOURCEBASELINE4COST = "ResourceBaseline4Cost"
ASSIGNMENTBASELINE5WORK = "AssignmentBaseline5Work"
ASSIGNMENTBASELINE5COST = "AssignmentBaseline5Cost"
TASKBASELINE5WORK = "TaskBaseline5Work"
TASKBASELINE5COST = "TaskBaseline5Cost"
RESOURCEBASELINE5WORK = "ResourceBaseline5Work"
RESOURCEBASELINE5COST = "ResourceBaseline5Cost"
ASSIGNMENTBASELINE6WORK = "AssignmentBaseline6Work"
ASSIGNMENTBASELINE6COST = "AssignmentBaseline6Cost"
TASKBASELINE6WORK = "TaskBaseline6Work"
TASKBASELINE6COST = "TaskBaseline6Cost"
RESOURCEBASELINE6WORK = "ResourceBaseline6Work"
RESOURCEBASELINE6COST = "ResourceBaseline6Cost"
ASSIGNMENTBASELINE7WORK = "AssignmentBaseline7Work"
ASSIGNMENTBASELINE7COST = "AssignmentBaseline7Cost"
TASKBASELINE7WORK = "TaskBaseline7Work"
TASKBASELINE7COST = "TaskBaseline7Cost"
RESOURCEBASELINE7WORK = "ResourceBaseline7Work"
RESOURCEBASELINE7COST = "ResourceBaseline7Cost"
ASSIGNMENTBASELINE8WORK = "AssignmentBaseline8Work"
ASSIGNMENTBASELINE8COST = "AssignmentBaseline8Cost"
TASKBASELINE8WORK = "TaskBaseline8Work"
TASKBASELINE8COST = "TaskBaseline8Cost"
RESOURCEBASELINE8WORK = "ResourceBaseline8Work"
RESOURCEBASELINE8COST = "ResourceBaseline8Cost"
ASSIGNMENTBASELINE9WORK = "AssignmentBaseline9Work"
ASSIGNMENTBASELINE9COST = "AssignmentBaseline9Cost"
TASKBASELINE9WORK = "TaskBaseline9Work"
TASKBASELINE9COST = "TaskBaseline9Cost"
RESOURCEBASELINE9WORK = "ResourceBaseline9Work"
RESOURCEBASELINE9COST = "ResourceBaseline9Cost"
ASSIGNMENTBASELINE10WORK = "AssignmentBaseline10Work"
ASSIGNMENTBASELINE10COST = "AssignmentBaseline10Cost"
TASKBASELINE10WORK = "TaskBaseline10Work"
TASKBASELINE10COST = "TaskBaseline10Cost"
RESOURCEBASELINE10WORK = "ResourceBaseline10Work"
RESOURCEBASELINE10COST = "ResourceBaseline10Cost"
PHYSICALPERCENTCOMPLETE = "PhysicalPercentComplete"
TASKWORK = "TaskWork"
TASKCOST = "TaskCost"
RESOURCEWORK = "ResourceWork"
RESOURCECOST = "ResourceCost"
ASSIGNMENTWORK = "AssignmentWork"
ASSIGNMENTCOST = "AssignmentCost"
UNDEFINED = "Undefined"
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
}
attribute_map = {
}
def __init__(self): # noqa: E501
"""TimephasedDataType - a model defined in Swagger""" # noqa: E501
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TimephasedDataType):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"ivan.andreychikov@gmail.com"
] | ivan.andreychikov@gmail.com |
817f724a608c79e36228a2d083a46f35bbc6f05e | 6abb92d99ff4218866eafab64390653addbf0d64 | /AtCoder/asa/2020/asa1117/c.py | f70e5d0c728ab867d45e89e016167d61880e2a9b | [] | no_license | Johannyjm/c-pro | 38a7b81aff872b2246e5c63d6e49ef3dfb0789ae | 770f2ac419b31bb0d47c4ee93c717c0c98c1d97d | refs/heads/main | 2023-08-18T01:02:23.761499 | 2023-08-07T15:13:58 | 2023-08-07T15:13:58 | 217,938,272 | 0 | 0 | null | 2023-06-25T15:11:37 | 2019-10-28T00:51:09 | C++ | UTF-8 | Python | false | false | 366 | py | n, m = map(int, input().split())
sc = [list(map(int, input().split())) for _ in range(m)]
for i in range(1000):
num = str(i)
d = len(num)
if(d != n): continue
valid = True
for s, c in sc:
s -= 1
if(num[s] != str(c)):
valid = False
break
if(valid):
print(num)
exit()
print(-1) | [
"meetpastarts@gmail.com"
] | meetpastarts@gmail.com |
0bbf230ea672d04d5387ce3b1400a15eacbd03f4 | 47404873eebf6f042fcdfc488f7f2c87d27be755 | /checkov/version.py | 2be33f00faead58ef9c74921ccdbfa39765e2abd | [
"Apache-2.0"
] | permissive | rodrigoieh/checkov | 234bd96f9de45f2a84c02d41df9baa20c61e46fd | 90323525549fd0344b46db8c9e4952e4511e9efa | refs/heads/master | 2023-03-29T15:17:37.619865 | 2021-04-10T08:20:39 | 2021-04-10T08:28:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19 | py | version = '2.0.28'
| [
"action@github.com"
] | action@github.com |
120e45f76663885b569469fe940f8275dabec8d8 | 31312c4070788c045a35efb29f97da421ca3b1cf | /models/db.py | b89340b82593eea5e256fd3fdc20294991400418 | [
"LicenseRef-scancode-public-domain"
] | permissive | hitesh96db/stopstalk | c530f30335efdc0c8939c640bb1efd9e33565851 | 486b3876a928ba634d245ed9e153e63d54af5820 | refs/heads/master | 2020-12-30T20:56:08.016000 | 2015-10-06T22:22:33 | 2015-10-06T22:22:33 | 43,397,646 | 0 | 0 | null | 2015-09-29T22:15:46 | 2015-09-29T22:15:46 | null | UTF-8 | Python | false | false | 7,246 | py | # -*- coding: utf-8 -*-
#########################################################################
## This scaffolding model makes your app work on Google App Engine too
## File is released under public domain and you can use without limitations
#########################################################################
## if SSL/HTTPS is properly configured and you want all HTTP requests to
## be redirected to HTTPS, uncomment the line below:
# request.requires_https()
## app configuration made easy. Look inside private/appconfig.ini
from gluon.contrib.appconfig import AppConfig
## once in production, remove reload=True to gain full speed
myconf = AppConfig(reload=True)
import custom_layout as custom
if not request.env.web2py_runtime_gae:
## if NOT running on Google App Engine use SQLite or other DB
# db = DAL('mysql://' + current.mysql_user + \
# ':' + current.mysql_password + \
# '@' + current.mysql_server + \
# '/' + current.mysql_dbname)
db = DAL(myconf.take('db.uri'), pool_size=myconf.take('db.pool_size', cast=int), check_reserved=['all'])
else:
## connect to Google BigTable (optional 'google:datastore://namespace')
db = DAL('google:datastore+ndb')
## store sessions and tickets there
session.connect(request, response, db=db)
## or store session in Memcache, Redis, etc.
## from gluon.contrib.memdb import MEMDB
## from google.appengine.api.memcache import Client
## session.connect(request, response, db = MEMDB(Client()))
## by default give a view/generic.extension to all actions from localhost
## none otherwise. a pattern can be 'controller/function.extension'
response.generic_patterns = ['*']
## choose a style for forms
response.formstyle = myconf.take('forms.formstyle') # or 'bootstrap3_stacked' or 'bootstrap2' or other
response.form_label_separator = myconf.take('forms.separator')
## (optional) optimize handling of static files
# response.optimize_css = 'concat,minify,inline'
# response.optimize_js = 'concat,minify,inline'
## (optional) static assets folder versioning
# response.static_version = '0.0.0'
#########################################################################
## Here is sample code if you need for
## - email capabilities
## - authentication (registration, login, logout, ... )
## - authorization (role based authorization)
## - services (xml, csv, json, xmlrpc, jsonrpc, amf, rss)
## - old style crud actions
## (more options discussed in gluon/tools.py)
#########################################################################
from gluon.tools import Auth, Service, PluginManager
from datetime import datetime
auth = Auth(db)
service = Service()
plugins = PluginManager()
initial_date = datetime.strptime("2013-01-01 00:00:00", "%Y-%m-%d %H:%M:%S")
extra_fields = [Field('institute', requires=IS_NOT_EMPTY()),
Field('stopstalk_handle',
requires=[IS_NOT_IN_DB(db,
'auth_user.stopstalk_handle',
error_message=T("Handle taken")),
IS_NOT_IN_DB(db,
'custom_friend.stopstalk_handle',
error_message=T("Handle taken"))]
),
Field('rating',
default=0,
writable=False),
Field('last_retrieved', 'datetime',
default=initial_date,
writable=False)
]
site_handles = []
for site in current.SITES:
site_handles += [Field(site.lower() + "_handle")]
extra_fields += site_handles
auth.settings.extra_fields['auth_user'] = extra_fields
auth.define_tables(username=False, signature=False)
## configure email
mail = auth.settings.mailer
mail.settings.server = current.smtp_server
mail.settings.sender = current.sender_mail
mail.settings.login = current.sender_mail + ":" + current.sender_password
## configure auth policy
auth.settings.registration_requires_verification = True
auth.settings.registration_requires_approval = False
auth.settings.reset_password_requires_verification = True
#########################################################################
## Define your tables below (or better in another model file) for example
##
## >>> db.define_table('mytable',Field('myfield','string'))
##
## Fields can be 'string','text','password','integer','double','boolean'
## 'date','time','datetime','blob','upload', 'reference TABLENAME'
## There is an implicit 'id integer autoincrement' field
## Consult manual for more options, validators, etc.
##
## More API examples for controllers:
##
## >>> db.mytable.insert(myfield='value')
## >>> rows=db(db.mytable.myfield=='value').select(db.mytable.ALL)
## >>> for row in rows: print row.id, row.myfield
#########################################################################
## after defining tables, uncomment below to enable auditing
# auth.enable_record_versioning(db)
custom_friend_fields = [Field("user_id", "reference auth_user"),
Field("first_name", requires=IS_NOT_EMPTY()),
Field("last_name", requires=IS_NOT_EMPTY()),
Field("institute", requires=IS_NOT_EMPTY()),
Field("stopstalk_handle", requires = [IS_NOT_IN_DB(db,
'auth_user.stopstalk_handle',
error_message=T("Handle already exists")),
IS_NOT_IN_DB(db,
'custom_friend.stopstalk_handle',
error_message=T("Handle already exists"))]),
Field("rating",
default=0,
writable=False),
Field("last_retrieved", "datetime",
default=initial_date,
writable=False)]
custom_friend_fields += site_handles
db.define_table("custom_friend",
*custom_friend_fields)
db.define_table("submission",
Field("user_id", "reference auth_user"),
Field("custom_user_id", "reference custom_friend"),
Field("stopstalk_handle"),
Field("site_handle"),
Field("site"),
Field("time_stamp", "datetime"),
Field("problem_name"),
Field("problem_link"),
Field("lang"),
Field("status"),
Field("points"),
)
db.define_table("friend_requests",
Field("from_h", "reference auth_user"),
Field("to_h", "reference auth_user"),
)
db.define_table("friends",
Field("user_id", "reference auth_user"),
Field("friends_list", "text"))
current.db = db
| [
"raj454raj@gmail.com"
] | raj454raj@gmail.com |
c80e6023fed108386a207eb74c98d32d92da4a79 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/adjectives/_swellest.py | c1d8de1b1e9a8d50b6af4e00d336f59e34ce7fc6 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 245 | py |
from xai.brain.wordbase.adjectives._swell import _SWELL
#calss header
class _SWELLEST(_SWELL, ):
def __init__(self,):
_SWELL.__init__(self)
self.name = "SWELLEST"
self.specie = 'adjectives'
self.basic = "swell"
self.jsondata = {}
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
d7431e16f379bab0fbe33e2d1a47595816abb9e5 | 5dff0d0bcb6f48085f934dc4ab28f4e55727282d | /server/instruments/reaper.py | 4eff8967fb514f27e40d335884bff3d7c351c998 | [] | no_license | the-fool/hoku | 4d2304d602fc86400687c68c359799569e55effd | 2f53ecf6ef978ea9d75fa3c77e750ec328065add | refs/heads/master | 2018-10-26T09:14:12.214534 | 2018-08-26T22:49:19 | 2018-08-26T22:49:19 | 103,051,786 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 785 | py | from .base import BaseInstrument
class Reaper(BaseInstrument):
MINI_1_VOL = 9
MINI_1_VERB = 10
MINI_1_DIST = 11
MINI_2_VOL = 15
MINI_2_VERB = 12
MINI_2_DIST = 13
DRUM_VERB = 14
def mini_1_vol(self, value):
self._control(self.MINI_1_VOL, int(value))
def mini_2_vol(self, value):
self._control(self.MINI_2_VOL, int(value))
def mini_1_dist(self, value):
self._control(self.MINI_1_DIST, value)
def mini_2_dist(self, value):
self._control(self.MINI_2_DIST, value)
def mini_1_verb(self, value):
self._control(self.MINI_1_VERB, value)
def mini_2_verb(self, value):
self._control(self.MINI_2_VERB, value)
def drum_verb(self, value):
self._control(self.DRUM_VERB, value)
| [
"sketchbang@gmail.com"
] | sketchbang@gmail.com |
4720ddc731d6c1da9f9078fa6bd36c468dff766e | b0b87924d07101e25fa56754ceaa2f22edc10208 | /workspace/python_study/python_gspark/day4-1.py | f83b2699257b4262d3bbda20f3dba246ce56ecb2 | [] | no_license | SoheeKwak/Python | 2295dd03e5f235315d07355cbe72998f8b86c147 | e1a5f0ecf31e926f2320c5df0e3416306b8ce316 | refs/heads/master | 2020-04-02T13:49:58.367361 | 2018-11-23T09:33:23 | 2018-11-23T09:33:23 | 154,499,204 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,893 | py | from bs4 import BeautifulSoup
html = """
<html><body>
<ul>
<li>
<a href="http://www.naver.com">naver</a>
</li>
<li>
<a href="http://www.daum.net">daum</a>
</li>
</ul>
</body></html>
"""
soup = BeautifulSoup(html, 'html.parser')
link = soup.find("a")
print(link)
link2 = soup.find_all("a")
print(link2)
for i in link2:
# print(i)
myhref = i.attrs['href']
print(myhref)
text = i.string
print(text, "-->", myhref)
from bs4 import BeautifulSoup
html="""
<html><body>
<h1>빅데이터 분석</h1>
<p>데이터 수집</p>
<p>데이터 전처리</p>
<p>데이터 마이닝</p>
</body></html>
"""
soup = BeautifulSoup(html, 'html.parser')
h1 = soup.h1
print(h1)
h1 = soup.body.h1
print(h1)
h1 = soup.html.body.h1
print(h1)
p1 = soup.html.body.p
print(p1.string)
p2 = p1.next_sibling.next_sibling
print(p2)
p3 = p2.next_sibling.next_sibling
print(p3)
from bs4 import BeautifulSoup
import urllib.request as req
url = "http://info.finance.naver.com/marketindex/"
res = req.urlopen(url).read()
soup = BeautifulSoup(res,'html.parser')
p = soup.select_one("#exchangeList > li.on > a.head.usd > div > span.value").string
print("krw/usd=",p)
html = """
<html><body>
<div id = "test">
<h1>빅데이터 분석</h1>
<ul class = "lec">
<li>파이썬</li>
<li>머신러닝</li>
<li>통계분석</li>
</ul>
</div>
</body></html>
"""
soup = BeautifulSoup(html, 'html.parser')
print(soup)
res = soup.select_one("#test > h1").string
print(res)
res = soup.select_one("div#test > ul.lec > li").string
print(res)
res = soup.select_one("li").string
print(res)
res = soup.li
res2 = res.next_sibling.next_sibling
res3 = res2.next_sibling.next_sibling
print(res.string)
print(res2.string)
print(res3.string)
myList = soup.select("div#test > ul.lec > li")
for li in myList:
print(li.string)
test = soup.find(id = "test")
print(test)
| [
"soheekwak728@gmail.com"
] | soheekwak728@gmail.com |
5b359b667dad448b3a80c84e30867c87d641f496 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/pa3/sample/str_get_element-42.py | 890eb44f037c7631688f2cc5967b68b4d12596ca | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 184 | py | x:str = "abc"
a:str = ""
b:str = ""
c:str = ""
def str_get(s:str, $TypedVar) -> str:
return s[i]
a = str_get(x, 0)
b = str_get(x, 1)
c = str_get(x, 2)
print(a)
print(b)
print(c)
| [
"647530+Virtlink@users.noreply.github.com"
] | 647530+Virtlink@users.noreply.github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.