repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
cyanobacterium/Cyanos-Planet-Factory
|
deploy.py
|
Python
|
gpl-3.0
| 7,423
| 0.034353
|
#!/usr/bin/python3
import sys, os, shutil
from os import path
from urllib.request import pathname2url
import subprocess
from subprocess import call
import sys
import re
import zipfile
import config
os.chdir(config.root_dir)
SUPPORTED_OPERATING_SYSTEMS = ('windows_x64', 'linux_x64', 'mac')#, 'linux-arm32', 'linux-arm64')
def make_dir(dir_path):
"""
make_dir(dir_path)
creates a directory if it does not already exist, including parent
directories.
dir_path - directory to create
"""
if not path.exists(dir_path):
os.makedirs(dir_path)
def make_parent_dir(file_path):
"""
make_parent_dir(file_path)
Creates the parent directory for the specified filepath if it does not
already exist.
file_path - path to some file
"""
parent_dir = path.dirname(file_path)
if parent_dir == '': # means parent is working directory
return
if not path.isdir(parent_dir):
os.makedirs(parent_dir)
def _del(filepath):
"""
Deletes a file or recursively deletes a directory. Use with caution.
"""
if(path.isdir(filepath)):
for f in os.listdir(filepath):
_del(path.join(filepath,f))
os.rmdir(filepath)
elif(path.exists(filepath)):
os.remove(filepath)
def del_file(filepath):
"""
del_file(filepath):
Deletes a file or recursively deletes a directory. Use with caution.
filepath - path to file or directory to delete
"""
if(path.isdir(filepath)):
for f in os.listdir(filepath):
_del(path.join(filepath,f))
os.rmdir(filepath)
elif(path.exists(filepath)):
os.remove(filepath)
def del_contents(dirpath):
"""
del_contents(dirpath)
Recursively deletes the contents of a directory, but not the directory itself
dirpath - path to directory to clean-out
"""
if(path.isdir(dirpath)):
for f in os.listdir(dirpath):
del_file(path.join(dirpath,f))
def list_files(dirpath):
"""
list_filetree(dirpath)
Returns a list of all files inside a directory (recursive scan)
dirpath - filepath of directory to scan
"""
if(type(dirpath) == str):
dir_list = [dirpath]
else:
dir_list = dirpath
file_list = []
for _dir_ in dir_list:
for base, directories, files in os.walk(_dir_):
for f in files:
file_list.append(path.join(base,f))
return file_list
def safe_quote_string(text):
"""
safe_quote_string(text)
returns the text in quotes, with escapes for any quotes in the text itself
text - input text to quote
returns: text in quotes with escapes
"""
if os.sep != '\\':
text2 = text.replace('\\', '\\\\')
text3 = text2.replace('"', '\\"')
else:
text3 = text.replace('\\', '/')
# windows does not allow " in file names anyway
return '"'+text3+'"'
def copy_tree(file_list, src_root, dest_root):
"""
copy_tree(file_list, src_root, dest_root)
Copies all files to directory dest_root (creating it if necessary),
preserving the folder structure relative to src_root
"""
for f in file_list:
rel_path = path.relpath(f, src_root)
dst_path = path.join(dest_root, rel_path)
make_parent_dir(dst_path)
shutil.copy(f, dst_path)
def zip_dir(dir_path, zip_path):
print('\nzipping %s to %s\n' % (dir_path, zip_path))
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
# zipf is zipfile handle
for root, dirs, files in os.walk(dir_path):
for file in files:
fname = path.basename(dir_path)
src_file = path.join(root, file)
dst_file = path.join(fname, path.relpath(src_file, dir_path) )
zipf.write(src_file, arcname=dst_file)
# done
# make dirs
make_dir(config.local_cache_dir)
make_dir(config.compile_dir)
make_dir(config.jar_dir)
make_dir(config.deploy_dir)
make_dir(config.deploy_image_dir)
make_dir(config.run_dir)
make_dir(config.src_dir)
make_dir(config.resource_dir)
# clean
del_contents(config.run_dir)
del_contents(config.jar_dir)
del_contents(config.compile_dir)
del_contents(config.deploy_image_dir)
del_contents(config.deploy_dir)
# compile (with jmods)
for release_OS in SUPPORTED_OPERATING_SYSTEMS:
print('\n',release_OS,'\n')
module_src_path = path.join(config.src_dir, config.module_name)
if(release_OS == 'windows_x64'):
#java_home = 'D:\\CCHall\\Documents\\Programming\\OpenJDK_Distros\\windows-x64\\jdk-13.0.1'
jmod_dirs = [path.join('jmods','windows')] #[path.join(java_home,'jmods')] + config.jmod_dirs_windows_x64
elif(release_OS == 'linux_x64'):
#java_home = 'D:\\CCHall\\Documents\\Programming\\OpenJDK_Distros\\linux-x64\\jdk-13.0.1'
jmod_dirs = [path.join('jmods','linux')] #[path.join(java_home,'jmods')] + config.jmod_dirs_linux_x64
elif(release_OS == 'mac'):
#java_home = 'D:\\CCHall\\Documents\\Programming\\OpenJDK_Distros\\osx-x64\\jdk-13.0.1'
jmod_dirs = [path.join('jmods','mac')] #[path.join(java_home,'jmods')] + config.jmod_dirs_mac
else:
print('UNSUPPORTED OS: %s' % release_OS)
arg_file = path.join(config.local_cache_dir, 'javac-args.txt')
command_list = []
command_list += ['-encoding', 'utf8']
command_list += ['-d', config.compile_dir]
command_list += ['--module-source-path', config.src_dir]
command_list += ['--module', config.module_name]
module_paths = jmod_dirs + [f for f in list_files(config.dependency_dirs) if str(f).endswith('.jar')] # a .jmod file is auto-discoverable by --module-path
command_list += ['--module-path', os.pathsep.join(module_paths)]
with open(arg_file, 'w') as fout:
file_content = ' '.join(map(safe_quote_string, command_list))
fout.write(file_content)
print('@%s: %s' % (arg_file, file_content))
call([config.javac_exec, '@'+str(arg_file)], cwd=config.root_dir)
print()
# need to copy resources separately
resource_files = list_files(config.resource_dir)
resource_files += [f for f in list_files(config.src_dir) if str(f).endswith('.java') == False]
copy_tree(
list_files(config.resource_dir),
config.src_dir,
config.compile_dir
)
copy_tree(
[f for f in list_files(module_src_path) if str(f).endswith('.java') == False],
config.src_dir,
config.compile_dir
)
# jlink
arg_file = path.join(config.local_cache_dir, 'jlink-args.txt')
command_list = []
command_list += ['--module-path', os.pathsep.join(module_paths + [config.compile_dir])]
command_list += ['--add-modules', config.module_name]
image_dir = path.join(config.deploy_image_dir, release_OS, config.module_name)
command_list += ['--launcher', 'launch=%s/%s' % (config.module_name, config.main_class)]
command_list += ['--output', image_dir]
with open(arg_file, 'w') as fout:
file_content = ' '.join(map(safe_quote_string, command_list))
fout.write(file_content)
print('@%s: %s' % (arg_file, file_content))
call([config.jlink_exec, '@'+str(arg_file)], cwd=config.root_dir)
# launcher
if release_OS == 'windows_x64':
with open(path.join(image_dir, 'launch_%s.bat' % config.module_title),'w') as fout:
fout.write('"%~dp0\\bin\\launch.bat"\r\n')
if release_OS == 'linux_x64':
with open(path.join(image_dir, 'launch_%s.sh' % config.module_title),'w') as fout:
fout.write('#!/bin/bash\ncd "`dirname "$0"`"\n./bin/launch\n')
if release_OS == 'mac':
with op
|
en(path.join(imag
|
e_dir, 'launch_%s.sh' % config.module_title),'w') as fout:
fout.write('#!/bin/sh\ncd "`dirname "$0"`"\n./bin/launch\n')
# package images
named_dir = path.join(config.deploy_image_dir, release_OS, config.module_title)
zip_file = path.join(config.deploy_image_dir, '%s_%s.zip' % (config.module_title, release_OS))
shutil.move(image_dir, named_dir)
zip_dir(dir_path=named_dir, zip_path=zip_file)
|
kirillmorozov/youbot_control
|
scripts/client_gui.py
|
Python
|
bsd-2-clause
| 32,532
| 0.000486
|
#!/usr/bin/python2
# -*- coding: utf-8 -*-
"""Client GUI to control youBot robot."""
import Tkinter as tk
import ttk
import tkMessageBox
import rospyoubot
from math import radians, degrees
class MainApplication(ttk.Frame):
u"""Основное окно приложения."""
def __init__(self, parent, *args, **kwargs):
u"""Конструктор основного окна."""
ttk.Frame.__init__(self, parent, *args, **kwargs)
self.grid(sticky='nswe')
self.columnconfigure(0, weight=1)
# self.columnconfigure(1, weight=1)
self.style = ttk.Style()
self.style.theme_use('clam')
self.notebook = ttk.Notebook(self)
self.notebook.grid(column=0, row=0, sticky='nswe')
self.manual_controls = ControlsPage(self.notebook)
self.notebook.add(self.manual_controls,
text='Ручное управление',
sticky='nswe')
self.automatic_controls = AutomaticControls(self.notebook)
self.notebook.add(self.automatic_controls,
text='Автоматическое управление',
sticky='nswe')
class ControlsPage(ttk.Frame):
u"""Вкладка управления."""
def __init__(self, parent):
u"""Конструктор класса."""
ttk.Frame.__init__(self, parent)
self.columnconfigure(0, weight=0)
self.columnconfigure(1, weight=1)
# Arm joints controls
self.joints_controls = JointsControlsFrame(self)
self.joints_controls.grid(column=1, row=0, sticky='nswe')
# Odometry
self.odometry = OdometryFrame(self)
self.odometry.grid(column=1, row=1, sticky='nswe')
# Base controls
self.base_control = BaseControl(self)
self.base_control.grid(column=1, row=2, sticky='nswe')
# Padding
for child in self.winfo_children():
child.grid_configure(padx=5, pady=5)
class OdometryFrame(ttk.LabelFrame):
u"""Фрейм одометрии."""
def __init__(self, parent):
u"""Инициализация класса."""
ttk.LabelFrame.__init__(self, parent, text='Одометрия:')
self.columnconfigure(0, weight=1)
self.columnconfigure(1, weight=1)
self.odom_x = tk.StringVar()
self.odom_x.set('x')
self.odom_y = tk.StringVar()
self.odom_y.set('y')
self.odom_z = tk.StringVar()
self.odom_z.set('z')
ttk.Label(self, text='X:', width=5, anchor=tk.E).grid(column=0, row=0)
ttk.Label(self,
textvariable=ODOMETRY[0],
width=6,
anchor=tk.W).grid(column=1, row=0)
ttk.Label(self, text='Y:', width=5, anchor=tk.E).grid(column=0, row=1)
ttk.Label(self,
textvariable=ODOMETRY[1],
width=6,
anchor=tk.W).grid(column=1, row=1)
ttk.Label(self, text=u'\u03c6:', width=5, anchor=tk.E).grid(column=0,
row=2)
ttk.Label(self,
textvariable=ODOMETRY[2],
width=6,
anchor=tk.W).grid(column=1, row=2)
for child in self.winfo_children():
child.grid_configure(padx=2, pady=2)
class JointsControlsFrame(ttk.LabelFrame):
u"""Фрейм управления степенями подвижности."""
def __init__(self, parent):
u"""Инициализация класса."""
ttk.LabelFrame.__init__(self, parent, text='Управление манипулятором:')
self.columnconfigure(0, weight=1)
self.columnconfigure(1, weight=1)
self.a1_joint =
|
JointControl(self, 1)
self.a1_joint.grid(row=0, columnspan=2, sticky='nswe')
self.a2_joint =
|
JointControl(self, 2)
self.a2_joint.grid(row=1, columnspan=2, sticky='nswe')
self.a3_joint = JointControl(self, 3)
self.a3_joint.grid(row=2, columnspan=2, sticky='nswe')
self.a4_joint = JointControl(self, 4)
self.a4_joint.grid(row=3, columnspan=2, sticky='nswe')
self.a5_joint = JointControl(self, 5)
self.a5_joint.grid(row=4, columnspan=2, sticky='nswe')
self.gripper = GripperControl(self)
self.gripper.grid(row=5, columnspan=2, sticky='nswe')
self.home_button = ttk.Button(self, text='Домой', width=6)
self.home_button.grid(row=6, column=0, sticky='nswe')
self.home_button.bind('<Button-1>', self.go_home)
self.home_button = ttk.Button(self, text='Свеча', width=6)
self.home_button.grid(row=6, column=1, sticky='nswe')
self.home_button.bind('<Button-1>', self.go_candle)
for child in self.winfo_children():
child.grid_configure(padx=2, pady=2)
def go_home(self, *args):
u"""Отправляет манипулятор в домашнюю позицию."""
R1.arm.set_joints_angles(0.016,
0.04,
-0.072,
0.0432,
2.839)
def go_candle(self, *args):
u"""Приводит манипулятор в положение свечки."""
R1.arm.set_joints_angles(2.9400474018133402,
1.1251030074812907,
-2.5235000069592695,
1.769468876296561,
2.838871440356912)
class JointControl(ttk.Frame):
u"""Фрейм управления отдельной степенью."""
def __init__(self, parent, joint):
u"""Инициализация класса."""
ttk.Frame.__init__(self, parent)
self.columnconfigure(0, weight=1)
self.columnconfigure(1, weight=1)
self.columnconfigure(2, weight=1)
self.columnconfigure(3, weight=1)
self.joint = joint
self.label = 'A{}:'.format(joint)
self.angle = tk.StringVar()
ttk.Label(self, text=self.label, width=6, anchor='e').grid(column=0,
row=0,
sticky=tk.E)
self.minus_button = ttk.Button(self, text='-', width=7)
self.minus_button.grid(column=1, row=0)
self.minus_button.bind('<Button-1>', self.minus_button_press)
self.minus_button.bind('<ButtonRelease-1>', key_released)
self.state_label = ttk.Label(self,
textvariable=ARM_JOINTS_ANGLES[joint-1],
width=5,
anchor=tk.CENTER)
self.state_label.grid(column=2, row=0, sticky='nswe')
self.plus_button = ttk.Button(self, text='+', width=7)
self.plus_button.grid(column=3, row=0)
self.plus_button.bind('<Button-1>', self.plus_button_press)
self.plus_button.bind('<ButtonRelease-1>', key_released)
def plus_button_press(self, *args):
u"""Задаёт скорость оси, при нажатии на кнопку '+'."""
vel = ARM_VELOCITY
arm_velocities = [vel if x == self.joint - 1 else 0 for x in range(5)]
R1.arm.set_joints_velocities(*arm_velocities)
def minus_button_press(self, *args):
u"""Задаёт скорость оси, при нажатии на кнопку '-'."""
vel = -1 * ARM_VELOCITY
arm_velocities = [vel if x == self.joint - 1 else 0 for x in range(5)]
R1.arm.set_joints_velocities(*arm_velocities)
class BaseControl(ttk.LabelFrame):
u"""Фрейм управления движением базы."""
def __init__(self, parent):
u"""Инициализация класса."""
ttk.LabelFrame.__init__(self, parent, text='Управление платформой:')
self.columnconfigure(0, weight=1)
self.columnconfigure(1, weight=1)
self.columnconfigure(2, weight=1)
controls_style = ttk.Style()
controls_style.configure('base.TButton', font=('TkDefaultFont', 20))
# Rotate left
self.rl_button = ttk.Button(self,
text=u'\u21b6',
width=2,
style='base.TButton')
self.rl_button.grid(column=0, row=0, sticky=tk.SE)
self.rl_button.bind('<Button-1>', self.rl_button_press)
self.rl_button.bind
|
tdryer/netscramble
|
netscramble/easing.py
|
Python
|
bsd-3-clause
| 1,458
| 0.004115
|
"""Animation easing functions using cubic bezier curves."""
def _cubic_bezier_parametric(t, p0, p1, p2, p3):
"""Return (x, y) on cubic bezier curve for t in [0, 1]."""
return tuple([
pow(1 - t, 3) * p0[i] +
3 * pow(1 - t, 2) * t * p1[i] +
3 * (1 - t) * pow(t, 2) * p2[i] +
pow(t, 3) * p3[i]
for i in xrange(2)])
def _cubic_bezier(x, p0, p1, p2, p3, tolerance=0.001, start=0, end=1):
"""Return y for given x on the cubic bezier curve using binary search."""
midpoint = start + (end - start) / 2.0
r_x, r_y = _cubic_bezier_parametric(midpoint, p0, p1, p2, p3)
difference = r_x - x
if abs(difference) < tolerance:
retu
|
rn r_y
elif difference < 0:
return _cubic_bez
|
ier(x, p0, p1, p2, p3, start=midpoint, end=end)
else:
return _cubic_bezier(x, p0, p1, p2, p3, start=start, end=midpoint)
def cubic_bezier(x, x1, y1, x2, y2):
"""Return y for given x on cubic bezier curve with given control points.
This is similar to the CSS3 cubic-bezier function. The curve always starts
at (0, 0) and ends at (1, 1). The control points (x1, y1) and (x2, y2)
define the shape of the curve.
"""
return _cubic_bezier(x, (0, 0), (x1, y1), (x2, y2), (1, 1))
# create using http://cubic-bezier.com/
linear = lambda x: cubic_bezier(x, 0, 0, 1, 1)
ease = lambda x: cubic_bezier(x, .25, .1, .25, 1)
elastic_out = lambda x: cubic_bezier(x, .52, 0, .86, 1.26)
|
Phrozyn/MozDef
|
tests/alerts/test_nsm_scan_address.py
|
Python
|
mpl-2.0
| 4,726
| 0.00127
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
from positive_alert_test_case import PositiveAlertTestCase
from negative_alert_test_case import NegativeAlertTestCase
from alert_test_suite import AlertTestSuite
class TestNSMScanAddress(AlertTestSuite):
alert_filename = "nsm_scan_address"
# This event is the default positive event that will cause the
# alert to trigger
default_event = {
"_source": {
"category": "bro",
"summary": "Scan::Address_Scan source 10.252.25.90 destination unknown port unknown",
"hostname": "your.friendly.nsm.sensor",
"tags": ["bro"],
"source": "notice",
"details": {
"sourceipaddress": "10.99.88.77",
"indicators": "10.99.88.77",
"source": "notice",
"note": "Scan::Address_Scan",
}
}
}
# This alert is the expected result from running this task
default_alert = {
"category": "nsm",
|
"severity": "NOTICE",
"tags": ['nsm', 'bro', 'addressscan'],
"summary": "Addr
|
ess scan from 10.99.88.77 (mock.mozilla.org)",
'notify_mozdefbot': False
}
test_cases = []
test_cases.append(
PositiveAlertTestCase(
description="Positive test with default event and default alert expected",
events=AlertTestSuite.create_events(default_event, 5),
expected_alert=default_alert
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['utctimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda(date_timedelta={'minutes': 1})
event['_source']['receivedtimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda(date_timedelta={'minutes': 1})
test_cases.append(
PositiveAlertTestCase(
description="Positive test with events a minute earlier",
events=events,
expected_alert=default_alert
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['category'] = 'syslog'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with a different category",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['source'] = 'intel'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with a different details.source",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['note'] = 'Scan::Random_Scan'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with a different scan type (note)",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['note'] = 'Scan::Port_Scan'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with a different scan type (note)",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['sourceipaddress'] = '10.54.65.234'
event['_source']['details']['indicators'] = '1.2.3.4'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with an excluded IP address",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['sourceipaddress'] = '1.2.3.4'
event['_source']['details']['indicators'] = '1.2.3.4'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with an excluded subnet",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['utctimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({'minutes': 15})
event['_source']['receivedtimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({'minutes': 15})
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with old timestamp",
events=events,
)
)
|
dongguangming/python-phonenumbers
|
python/phonenumbers/pb2/__init__.py
|
Python
|
apache-2.0
| 2,788
| 0.004304
|
"""Translate python-phonenumbers PhoneNumber to/from protobuf PhoneNumber
Examples of use:
>>> import phonenumbers
>>> from phonenumbers.pb2 import phonenumber_pb2, PBToPy, PyToPB
>>> x_py = phonenumbers.PhoneNumber(country_code=44, national_number=7912345678)
>>> print x_py
Country Code: 44 National Number: 7912345678
>>> y_pb = phonenumber_pb2.PhoneNumber()
>>> y_pb.country_code = 44
>>> y_pb.national_number = 7912345678
>>> print str(y_pb).strip()
country_code: 44
national_number: 7912345678
>>> # Check italian_leading_zero default value when not set
>>> y_pb.italian_leading_ze
|
ro
False
>>> y_py = PBToPy(y_pb)
>>> print y_py
Country Code: 44 National Number: 7912345678
>>> x_pb = PyToPB(x_py)
>>> print str(x_pb).strip()
country_code: 44
national_number: 7912345678
>>> x_py == y_py
True
>>> x_pb == y_pb
True
>>> # Explicitly set the field to its default
>>> y_pb.italian_leading_zero = y_pb.italian_leading_zero
>>> x_pb == y_pb
False
"""
from phonenumber_pb2 import PhoneNumber as PhoneNumberPB
from phonenumbers import PhoneNumber
def PBTo
|
Py(numpb):
"""Convert phonenumber_pb2.PhoneNumber to phonenumber.PhoneNumber"""
return PhoneNumber(numpb.country_code if numpb.HasField("country_code") else None,
numpb.national_number if numpb.HasField("national_number") else None,
numpb.extension if numpb.HasField("extension") else None,
numpb.italian_leading_zero if numpb.HasField("italian_leading_zero") else None,
numpb.raw_input if numpb.HasField("raw_input") else None,
numpb.country_code_source if numpb.HasField("country_code_source") else None,
numpb.preferred_domestic_carrier_code if numpb.HasField("preferred_domestic_carrier_code") else None)
def PyToPB(numobj):
"""Convert phonenumber.PhoneNumber to phonenumber_pb2.PhoneNumber"""
numpb = PhoneNumberPB()
if numobj.country_code is not None:
numpb.country_code = numobj.country_code
if numobj.national_number is not None:
numpb.national_number = numobj.national_number
if numobj.extension is not None:
numpb.extension = numobj.extension
if numobj.italian_leading_zero is not None:
numpb.italian_leading_zero = numobj.italian_leading_zero
if numobj.raw_input is not None:
numpb.raw_input = numobj.raw_input
if numobj.country_code_source is not None:
numpb.country_code_source = numobj.country_code_source
if numobj.preferred_domestic_carrier_code is not None:
numpb.preferred_domestic_carrier_code = numobj.preferred_domestic_carrier_code
return numpb
__all__ = ['PBToPy', 'PyToPB']
if __name__ == '__main__': # pragma no cover
import doctest
doctest.testmod()
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractAhgashonixnoveltranslationWordpressCom.py
|
Python
|
bsd-3-clause
| 592
| 0.032095
|
def extractAhgashonixnoveltranslationWordpressCom(item):
'''
Parser for 'ahgashonixnoveltranslation.wordpres
|
s.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, na
|
me, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
manthey/girder
|
girder/cli/__init__.py
|
Python
|
apache-2.0
| 358
| 0
|
from click_plugins imp
|
ort with_plugins
from pkg_resources import iter_entry_points
import click
@with_plugins(iter_entry_points('girder.cli_plugins'))
@click.group(help='Girder: data management platform for the web.',
context_settings=dict(help_option_names=['-h',
|
'--help']))
@click.version_option(message='%(version)s')
def main():
pass
|
cuckoobox/cuckoo
|
cuckoo/data/analyzer/android/modules/packages/apk.py
|
Python
|
mit
| 851
| 0.001175
|
# Copyright (C) 2014-2016 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox
|
- http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
# Originally contributed by Check Point Software Technologies, Ltd.
import logging
from lib.api.adb import dump_droidmon_logs, execute_sample, install_sample
from lib.common.abstrac
|
ts import Package
log = logging.getLogger(__name__)
class Apk(Package):
"""Apk analysis package."""
def __init__(self, options={}):
super(Apk, self).__init__(options)
self.package, self.activity = options.get("apk_entry", ":").split(":")
def start(self, path):
install_sample(path)
execute_sample(self.package, self.activity)
def check(self):
return True
def finish(self):
dump_droidmon_logs(self.package)
return True
|
Mangara/ArboralExplorer
|
lib/Cmpl/pyCmpl/lib/pyCmpl/CmplInstance.py
|
Python
|
apache-2.0
| 7,151
| 0.05286
|
#***********************************************************************
# This code is part of pyCMPL
#
# Copyright (C) 2013
# Mike Steglich - Technical University of Applied Sciences
# Wildau, Germany
#
# pyCMPL is a project of the Technical University of
# Applied Sciences Wildau and the Institute for Operations Research
# and Business Management at the Martin Luther University
# Halle-Wittenberg.
# Please visit the project homepage <www.coliop.org>
#
# pyCMPL is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# pyCMPL is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
#**********************************************************************
#!/usr/bin/python
import os
import xml.dom.minidom as dom
import random
import cStringIO
from xml.sax.saxutils import unescape, escape
from CmplException import *
#*************** CmplInstance ***********************************
class CmplInstance(object):
#*********** constructor **********
def __init__(self):
self.__cmplFile = ""
self.__optionsList = []
self.__dataString = ""
self.__cmplDataList = {}
self.__instStr = cStringIO.StringIO()
self.__jobId = ""
self.__cmplName = ""
#*********** end constructor ******
#*********** destructor ***********
def __del__(self):
self.__instStr.close()
#*********** end destructor *******
# getter **************************
@property
def jobId(self):
return self.__jobId
@property
def options(self):
return self.__optionsList
# end getter ***********************
#*********** cmplInstanceStr **********
def cmplInstanceStr(self, cmplFileName, optList, dataString, jobId):
if not os.path.isfile(cmplFileName):
raise CmplException("CMPL file " + cmplFileName + " does not exist." )
self.__cmplFile = cmplFileName
self.__optionsList = optList
self.__jobId = jobId
self.__dataString = dataString
if self.__dataString!="":
self.__cmplDataList.update({ "cmplData---"+os.path.basename(os.path.splitext(self.__cmplFile)[0])+".cdat" : self.__dataString })
try:
f = open(self.__cmplFile, "r")
lines = f.readlines()
f.close()
commentSection=False
lineNr = 0
for line in lines:
line = line.strip()
# besser contains
if line.startswith('/*'):
commentSection = True
continue
# besser contains
if line.startswith('*/'):
commentSection = False
if commentSection == True:
continue
if line.startswith('%data'):
if line.find(':') != -1:
tmpName = line[5:line.find(':')].strip()
else:
tmpName = line[5:].strip()
if tmpName == "":
if self.__dataString!="":
lines[lineNr] = line.replace("%data", "%data cmplData---"+os.path.basename(os.path.splitext(self.__cmplFile)[0])+".cdat ") + "\n"
tmpName="cmplData---"
else:
tmpName=os.path.basename(os.path.splitext(self.__cmplFile)[0])+".cdat"
if not (tmpName in self.__cmplDataList or tmpName=="cmplData---"):
tmpName1=os.path.dirname(os.path.abspath(cmplFileName))+"/"+tmpName
if not os.path.isfile(tmpName1):
raise CmplException("cmplDataFile " + tmpName1 + " does not exist." )
f = open(tmpName1, "r")
dataLines = f.readlines()
f.close()
tmpString = cStringIO.StringIO()
for dline in dataLines:
tmpString.write(dline)
self.__cmplDataList.update({ tmpName : tmpString.getvalue() })
tmpString.close()
lineNr += 1
self.__instStr.write("<?xml version = \"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n")
self.__instStr.write("<CmplInstance version=\"1.0\">\n")
self.__instStr.write("<general>\n")
self.__instStr.write("<name>"+os.path.basename(self.__cmplFile)+"</name>\n")
self.__instStr.write("<jobId>"+self.__jobId+"</jobId>\n")
self.__instStr.write("</general>\n")
if len(self.__optionsList) > 0:
self.__instStr.write("<options>\n")
for opt in self.__optionsList:
self.__instStr.write("<opt>"+opt+"</opt>\n")
self.__instStr.write("</options>\n")
self.__instStr.write("<problemFiles>\n")
self.__instStr.write("<file name=\""+ escape(os.path.basename(self.__cmplFile))+ "\" type=\"cmplMain\">\n")
for line in lines:
self.__instStr.write(escape(line))
#self.__instStr.write(b64encode(line))
#self.__instStr.write("\n")
self.__instStr.write("</file>\n")
for d in self.__cmplDataList:
self.__instStr.write("<file name=\""+
|
d + "\" type=\"cmplData\">\n")
#self.__instStr.write(b64encode(self.__cmplDataList[d]))
self.__instStr.write(escape(self.__cmplDataList[d]))
self.__instStr.write("\n")
self.__instStr.write("</file>\n")
self.__instStr.write("</problemFiles>\n")
self.__instStr.write("</CmplInstance>\n")
except IOError, e:
raise CmplException("IO error : "+e)
return self.__instStr.getvalue()
#*********** end cmplIns
|
tanceStr ******
#*********** writeCmplInstance **********
def writeCmplInstance(self, folder, instStr):
if os.path.exists(folder) == False:
raise CmplException("Path <"+self.__cmplServerPath+"> doesn't exist.")
instDom = dom.parseString(instStr)
if instDom.firstChild.nodeName!="CmplInstance":
raise CmplException("Cant't read cmplInstance file - wrong file type!")
for entry in instDom.firstChild.childNodes:
if entry.nodeName == "general":
for entry1 in entry.childNodes:
if entry1.nodeName == "name":
self.__cmplName = unescape(entry1.firstChild.data.strip())
continue
if entry1.nodeName == "jobId":
self.__jobId = entry1.firstChild.data.strip()
continue
if entry.nodeName == "options":
for entry1 in entry.childNodes:
if entry1.nodeName == "opt":
self.__optionsList.append(entry1.firstChild.data.strip())
if entry.nodeName == "problemFiles":
for entry1 in entry.childNodes:
if entry1.nodeName == "file":
tmpName = folder+self.__jobId+os.sep+entry1.getAttribute("name")
#tmpContent = b64decode(entry1.firstChild.data.strip())
tmpContent = unescape(entry1.firstChild.data.strip())
try:
f = open(tmpName, 'w')
f.write(tmpContent)
f.close()
except IOError, e:
raise CmplException("IO error for file " + tmpName + ": "+e)
#*********** end writeCmplInstance *******
#*************** end CmplInstance ******************************
|
chrisbay/library.kdhx.org
|
library/jinja2.py
|
Python
|
gpl-3.0
| 1,663
| 0.004811
|
from django.contrib.staticfiles.storage import staticfiles_storage
from django.contrib.contenttypes.models import ContentType
from django.core.urlreso
|
lvers import reverse
from jinja2 import Environment
from albums.models import Album, Artist, RecordLabel
def get_spotify_search_url(term):
return 'https://open.spotify.com/search/results/'+term
def get_entity_url(watson_obj):
content_type = ContentType.objects.get(app_label=watson_obj.content_type.app_label,
model=watson_obj.co
|
ntent_type.model)
obj_class = content_type.model_class()
url = ''
if obj_class == Album:
url = reverse('albums:album-detail', args=[watson_obj.object_id_int])
elif obj_class == Artist:
url = reverse('albums:albums-by-artist', args=[watson_obj.object_id_int])
elif obj_class == RecordLabel:
url = reverse('albums:albums-by-label', args=[watson_obj.object_id_int])
return url
ENTITY_LABELS = {
Album: 'Album',
RecordLabel: 'Label',
Artist: 'Artist',
}
def get_entity_type_label(watson_obj):
content_type = ContentType.objects.get(app_label=watson_obj.content_type.app_label,
model=watson_obj.content_type.model)
obj_class = content_type.model_class()
return ENTITY_LABELS[obj_class]
def environment(**options):
env = Environment(**options)
env.globals.update({
'static': staticfiles_storage.url,
'url': reverse,
'get_spotify_search_url': get_spotify_search_url,
'get_entity_url': get_entity_url,
'get_entity_type_label': get_entity_type_label,
})
return env
|
evernym/zeno
|
plenum/test/req_drop/test_req_drop_on_prepare_phase_primary.py
|
Python
|
apache-2.0
| 3,979
| 0.001257
|
import pytest
from plenum.test.helper import sdk_send_random_requests
from stp_core.loop.eventually import eventually
from plenum.common.messages.node_messages import Prepare, Commit
from plenum.test.delayers import delay
from plenum.test.propagate.helper import recvdRequest, recvdPropagate, \
sentPropagate, recvdPrepareForInstId, recvdCommitForInstId
from plenum.test.test_node import TestNode
from plenum.test.node_request.helper import sdk_ensure_pool_functional
howlong = 20
initial_ledger_size = 0
@pytest.fixture(scope="module")
def tconf(tconf):
OUTDATED_REQS_CHECK_ENABLED_OLD = tconf.OUTDATED_REQS_CHECK_ENABLED
OUTDATED_REQS_CHECK_INTERVAL_OLD = tconf.OUTDATED_REQS_CHECK_INTERVAL
PROPAGATES_PHASE_REQ_TIMEOUT_OLD = tconf.PROPAGATES_PHASE_REQ_TIMEOUT
ORDERING_PHASE_REQ_TIMEOUT_OLD = tconf.ORDERING_PHASE_REQ_TIMEOUT
tconf.OUTDATED_REQS_CHECK_ENABLED = True
tconf.OUTDATED_REQS_CHECK_INTERVAL = 1
tconf.PROPAGATES_PHASE_REQ_TIMEOUT = 3600
tconf.ORDERING_PHASE_REQ_TIMEOUT = 3
yield tconf
tconf.OUTDATED_REQS_CHECK_ENABLED = OUTDATED_REQS_CHECK_ENABLED_OLD
tconf.OUTDATED_REQS_CHECK_INTERVAL = OUTDATED_REQS_CHECK_INTERVAL_OLD
tconf.PROPAGATES_PHASE_REQ_TIMEOUT = PROPAGATES_PHASE_REQ_TIMEOUT_OLD
tconf.ORDERING_PHASE_REQ_TIMEOUT = ORDERING_PHASE_REQ_TIMEOUT_OLD
@pytest.fixture()
def setup(txnPoolNodeSet, looper, sdk_pool_handle, sdk_wallet_client):
global initial_ledger_size
A, B, C, D = txnPoolNodeSet # type: TestNode
lagged_node = A
frm = [B, C, D]
delay(Prepare, frm=frm, to=lagged_node, howlong=howlong)
delay(Commit, frm=frm, to=lagged_node, howlong=howlong + 3)
initial_ledger_size = txnPoolNodeSet[0].domainLedger.size
request_couple_json = sdk_send_random_requests(
looper, sdk_pool_handle, sdk_wallet_client, 1)
return request_couple_json
def test_req_drop_on_prepare_phase_on_master_primary_and_then_ordered(
tconf, setup, looper, txnPoolNodeSet,
sdk_wallet_client, sdk_pool_handle):
global initial_ledger_size
A, B, C, D = txnPoolNodeSet # type: TestNode
lagged_node = A
def check_propagates():
# Node should have received a request from the client
assert len(recvdRequest(lagged_node)) == 1
# Node should have received a PROPAGATEs
assert len(recvdPropagate(lagged_node)) == 3
# Node should have sent a PROPAGATE
assert len(sentPropagate(lagged_node)) == 1
# Node should have one request in the requests queue
assert len(lagged_node.requests) == 1
timeout = howlong - 2
looper.run(eventually(check_propagates, retryWait=.5, timeout=timeout))
def check_drop():
# Node should have not received Prepares and Commits for master instance
assert len(recvdPrepareForInstId(lagged_node, 0)) == 0
assert len(recvdCommitForInstId(lagged_node, 0)) == 0
# Request object should be dropped by timeout
assert len(lagged_node.requests) == 0
timeout = tconf.ORDERING_PHASE_REQ_TIMEOUT + tconf.OUTDATED_REQS_CHECK_INTERVAL + 1
looper.run(eventually(check_drop, retryWait=.5, timeout=timeout))
for n in txnPoolNodeSet:
n.nodeIbStasher.resetDelays()
|
def check_prepares_and_commits_received():
# Node should have received all delayed Prepares and Commits for master instance
assert len(recvdPrepareForInstId(lagged_node, 0)) == 3
assert len(recvdCommitForInstId(lagged_node, 0)) == 3
timeout = howlong * 2
looper
|
.run(eventually(check_prepares_and_commits_received, retryWait=.5, timeout=timeout))
def check_ledger_size():
# The request should be eventually ordered
for n in txnPoolNodeSet:
assert n.domainLedger.size - initial_ledger_size == 1
looper.run(eventually(check_ledger_size, retryWait=.5, timeout=timeout))
sdk_ensure_pool_functional(looper, txnPoolNodeSet, sdk_wallet_client, sdk_pool_handle)
|
wxgeo/geophar
|
wxgeometrie/param/modules.py
|
Python
|
gpl-2.0
| 3,148
| 0.00446
|
# -*- coding: utf-8 -*-
######################################
#
# Détection des modules
#
######################################
#
# WxGeometrie
# Dynamic geometry, graph plotter, and more for french mathematic teachers.
# Copyright (C) 2005-2013 Nicolas Pourcelot
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
######################################
import os
from runpy import run_path
from .parametres import modules_par_defaut
# Modules a importer
# ----------------
_skip_dir = ('OLD', '__pycache__')
_modules_dir = os.path.normpath(os.path.join(__file__, '..', '..', 'modules'))
def _detecter_modules():
modules = []
descriptions = {}
def err(nom, msg):
print("Warning: %s n'est pas un module valide (%s)." %(nom, msg))
for nom in os.listdir(_modules_dir):
if nom not in _skip_dir and os.path.isdir(os.path.join(_modules_dir, nom)):
description_file = os.path.join(_modules_dir, nom, 'description.py')
if os.path.isfile(description_file):
try:
compile(nom + '=0', '', 'single') # On teste si le nom est valide
try:
d = {}
d = run_path(description_file, d)
if d['description']['groupe'] != "Modules":
# Sert à désactiver les modules en construction.
continue
descriptions[nom] = d['description']
modules.append(nom)
except:
err(nom, "fichier '%s' incorrect" %description_file)
except Exception:
err(nom, "nom de module invalide")
else:
err(nom, "fichier 'description.py' introuvable")
return modules, descriptions
try:
modules, descriptions_modules = _detecter_modules()
except OSError:
print("Warning: impossible de détecter l
|
es modules (répertoire '%s') !" % _modules_dir)
modules = []
descriptions_modules = {}
modules_actifs = dict.fromkeys(modules, False)
for nom in modules_par_defaut:
|
modules_actifs[nom] = True
def _key(nom):
# les modules activés par défaut apparaissent en premier,
# les autres sont classés par ordre alphabétique.
key = [1000000, nom]
if nom in modules_par_defaut:
key[0] = modules_par_defaut.index(nom)
return key
modules.sort(key = _key)
|
setokinto/slack-shogi
|
app/kifu.py
|
Python
|
mit
| 255
| 0.011765
|
class
|
Kifu:
def __init__(self):
self.kifu = []
def add(self, from_x, from_y, to_x, to_y, promote, koma):
self.kifu.append((from_x, from_y, to_x, to_y, promote, koma))
def pop(self):
return self.kifu.p
|
op()
|
RodericDay/MiniPNM
|
unit_tests/test_graphics.py
|
Python
|
mit
| 974
| 0.016427
|
import numpy as np
import minipnm as mini
|
def test_scene(N=10):
try:
import vtk
except ImportEr
|
ror:
return
scene = mini.Scene()
network = mini.Cubic([10,10])
# draw a simple wired cubic going from red to white to blue
script = [network.diagonals.data[0]*i for i in range(N)]
wires = mini.graphics.Wires(network.points, network.pairs, script)
scene.add_actors([wires])
# draw some random green popping bubbles
network['x'] += 11
base = np.random.rand(network.order)/4+0.25
radii = [(base+0.5/N*i)%0.5 for i in range(N)]
spheres = mini.graphics.Spheres(network.points, radii, color=(0,1,0), alpha=0.5)
scene.add_actors([spheres])
# draw a tube cross
network['x'] -= 11
network['y'] -= 11
tubes = mini.graphics.Tubes([network.centroid]*2,[(10,0,0),(0,10,0)],[1,1])
scene.add_actors([tubes])
return scene
if __name__ == '__main__':
scene = test_scene(30)
scene.play()
|
PythonProgramming/Support-Vector-Machines---Basics-and-Fundamental-Investing-Project
|
p10.py
|
Python
|
mit
| 4,949
| 0.018792
|
import pandas as pd
import os
import time
from datetime import datetime
import re
from time import mktime
import matplotlib
import matplotlib.pyplot as plt
from matplotlib import style
style.use("dark_background")
# path = "X:/Backups/intraQuarter" # for Windows with X files :)
# if git clone'ed then use relative path,
# assuming you extracted the downloaded zip into this project's folder:
path = "intraQuarter"
def Key_Stats(gather="Total Debt/Equity (mrq)"):
statspath = path+'/_KeyStats'
stock_list = [x[0] for x in os.walk(statspath)]
df = pd.DataFrame(
columns = [
'Date',
'Unix',
'Ticker',
'DE Ratio',
'Price',
'stock_p_change',
'SP500',
'sp500_p_change',
'Difference',
'Status'
]
)
sp500_df = pd.DataFrame.from_csv("YAHOO-INDEX_GSPC.csv")
ticker_list = []
for each_dir in stock_list[1:25]:
each_file = os.listdir(each_dir)
# ticker = each_dir.split("\\")[1] # Windows only
# ticker = each_dir.split("/")[1] # this didn't work so do this:
ticker = os.path.basename(os.path.normpath(each_dir))
# print(ticker) # uncomment to verify
ticker_list.append(ticker)
starting_stock_value = False
starting_sp500_value = False
if len(each_file) > 0:
for file in each_file:
date_stamp = datetime.strptime(file, '%Y%m%d%H%M%S.html')
unix_time = time.mktime(date_stamp.timetuple())
full_file_path = each_dir + '/' + file
source = open(full_file_path,'r').read()
try:
try:
value = float(source.split(gather+':</td><td class="yfnc_tabledata1">')[1].split('</td>')[0])
except:
value = float(source.split(gather+':</td>\n<td class="yfnc_tabledata1">')[1].split('</td>')[0])
try:
sp500_date = datetime.fromtimestamp(unix_time).strftime('%Y-%m-%d')
row = sp500_df[(sp500_df.index == sp500_date)]
sp500_value = float(row['Adjusted Close'])
except:
sp500_date = datetime.fromtimestamp(unix_time-259200).strftime('%Y-%m-%d')
row = sp500_df[(sp500_df.index == sp500_date)]
sp500_value = float(row['Adjusted Close'])
try:
stock_price = float(source.split('</small><big><b>')[1].split('</b></big>')[0])
except:
try:
stock_price = (source.split('</small><big><b>')[1].split('</b></big>')[0])
#print(stock_price)
stock_price = re.search(r'(\d{1,8}\.\d{1,8})', stock_price)
stock_price = float(stock_price.group(1))
#print(stock_price)
except:
try:
stock_price = (source.split('<span class="time_rtq_ticker">')[1].split('</span>')[0])
#print(stock_price)
stock_price = re.search(r'(\d{1,8}\.\d{1,8})', stock_price)
stock_price = float(stock_price.group(1))
#print(stock_price)
except:
print('wtf stock price lol',ticker,file, value)
time.sleep(5)
if not starting_stock_value:
starting_stock_value = stock_price
if not starting_sp500_value:
starting_sp500_value = sp500_value
stock_p_change = ((stock_price - starting_stock_value) / starting_stock_value) * 100
sp500_p_change = ((sp500_value - starting_sp500_value) / starting_sp500_value) * 100
location = len(df['Date'])
difference = stock_p_change-sp500_p_change
if difference > 0:
status = "outperform"
else:
status = "underperform"
df = df.append({'Date':date_stamp,
'Unix':unix_time,
'Ticker':ticker,
'DE Ra
|
tio':value,
'Price':stock_price,
'stock_p_change':stock_p_change,
'SP500':sp500_value,
'sp500_p_change':sp500_p_change,
|
############################
'Difference':difference,
'Status':status},
ignore_index=True)
except Exception as e:
pass
#print(ticker,e,file, value)
#print(ticker_list)
#print(df)
for each_ticker in ticker_list:
try:
plot_df = df[(df['Ticker'] == each_ticker)]
plot_df = plot_df.set_index(['Date'])
if plot_df['Status'][-1] == 'underperform':
color = 'r'
else:
color = 'g'
plot_df['Difference'].plot(label=each_ticker, color=color)
plt.legend()
except Exception as e:
print(str(e))
plt.show()
save = gather.replace(' ','').replace(')','').replace('(','').replace('/','')+str('.csv')
print(save)
df.to_csv(save)
Key_Stats()
|
miguelinux/vbox
|
src/VBox/ValidationKit/testmanager/webui/wuihlpgraph.py
|
Python
|
gpl-2.0
| 4,309
| 0.01787
|
# -*- coding: utf-8 -*-
# $Id: wuihlpgraph.py $
"""
Test Manager Web-UI - Graph Helpers.
"""
__copyright__ = \
"""
Copyright (C) 2012-2015 Oracle Corporation
This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.org. This file is free software;
you can redistribute it and/or modify it under the terms of the GNU
General Public License (GPL) as published by the Free Software
Foundation, in version 2 as it comes in the "COPYING" file of the
VirtualBox OSE distribution. VirtualBox OSE is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
The contents of this file may alternatively be used under the terms
of the Common Development and Distribution License Version 1.0
(CDDL) only, as it comes in the "COPYING.CDDL" file of the
VirtualBox OSE distribution, in which case the provisions of the
CDDL are applicable instead of those of the GPL.
You may elect to license modified versions of this file under the
terms and conditions of either the GPL or the CDDL or both.
"""
__version__ = "$Revision: 108597 $"
class WuiHlpGraphDataTable(object): # pylint: disable=R0903
"""
Data table container.
"""
class Row(object): # pylint: disable=R0903
"""A row."""
def __init__(self, sGroup, aoValues, asValues = None):
self.sName = sGroup;
self.aoValues = aoValues;
if asValues is None:
self.asValues = [str(oVal) for oVal in aoValues];
else:
assert len(asValues) == len(aoValues);
self.asValues = asValues;
def __init__(self, sGroupLable, asMemberLabels):
self.aoTable = [ WuiHlpGraphDataTable.Row(sGroupLable, asMemberLabels), ];
def addRow(self, sGroup, aoValues, asValues = None):
"""Adds a row to the data table."""
self.aoTable.append(WuiHlpGraphDataTable.Row(sGroup, aoValues, asValues));
return True;
def getGroupCount(self):
"""Gets the number of data groups (rows)."""
return len(self.aoTable) - 1;
class WuiHlpGraphDataTableEx(object): # pylint: disable=R0903
"""
Data container for an table/graph with optional error bars on the Y values.
"""
class DataSeries(object): # pylint: disable=R0903
"""
A data series.
The aoXValues, aoYValues and aoYErrorBars are parallel arrays, making a
series of (X,Y,Y-err-above-delta,Y-err-below-delta) points.
The error bars are optional.
"""
def __init__(self, sName, aoXValues, aoYValues, asHtmlTooltips = None, aoYErrorBarBelow = None, aoYErrorBarAbove = None):
self.sName = sName;
self.aoXValues = aoXValues;
self.aoYValues = aoYValues;
self.asHtmlTooltips = asHtmlTooltips;
self.aoYErrorBarBelow = aoYErrorBarBelow;
self.aoYErrorBarAbove = aoYErrorBarAbove;
def __init__(self, sXUnit, sYUnit):
self.sXUnit = sXUnit;
self.sYUnit = sYUnit;
self.aoSeries = [];
def addDataSeries(self, sName, aoXValues, aoYValues, asHtmlTooltips = None, aoYErrorBarBelow = None, aoYErrorBarAbove = None):
"""Adds an data series to the table."""
self.aoSeries.append(WuiHlpGraphDataTableEx.DataSeries(sName, aoXValues, aoYValues, asHtmlTooltips,
aoYErrorBarBelow, aoYErrorBarAbove));
return True;
def getDataSeriesCount(self):
"""Gets the number of data series."""
return len(self.aoSeries);
#
# Dynamically choose implementation.
#
if True: # pylint: disable=W0125
from testmanager.webui import wuihlpgraphgooglechart
|
as GraphImplementation;
else:
try:
import matplotlib; # pylint: disable=W0611,F0401,import-error,wrong-import-order
from testmanager.webui import wuihlpgraphmatplotlib as GraphImplementation;
except:
from testmanager.webui import wuihlpgraphsimple as
|
GraphImplementation;
# pylint: disable=C0103
WuiHlpBarGraph = GraphImplementation.WuiHlpBarGraph;
WuiHlpLineGraph = GraphImplementation.WuiHlpLineGraph;
WuiHlpLineGraphErrorbarY = GraphImplementation.WuiHlpLineGraphErrorbarY;
|
a-rank/cassandra-tools
|
tests/test_cli.py
|
Python
|
apache-2.0
| 720
| 0
|
import
|
pytest
from click.testing import CliRunner
from cassandra_tools import cli
@pytest.fixture
def runner():
return CliRunner()
def test_cli(runner):
result = runner.invoke(cli.main)
assert result.exit_code == 0
assert not result.exception
assert result.output.strip() == 'Hello, world.'
def test_cli_with_option(runner):
result = runner.invoke(cli.main, ['--as-cowboy'])
assert not result.exception
assert result.exit_code == 0
assert result.output.strip() == 'Howdy
|
, world.'
def test_cli_with_arg(runner):
result = runner.invoke(cli.main, ['Allan'])
assert result.exit_code == 0
assert not result.exception
assert result.output.strip() == 'Hello, Allan.'
|
carolFrohlich/nipype
|
nipype/interfaces/dipy/simulate.py
|
Python
|
bsd-3-clause
| 12,156
| 0.000082
|
# -*- coding: utf-8 -*-
"""Change directory to provide relative paths for doctests
>>> import os
>>> filepath = os.path.dirname( os.path.realpath( __file__ ) )
>>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data'))
>>> os.chdir(datadir)
"""
from __future__ import print_function, division, unicode_literals, absolute_import
from multiprocessing import (Pool, cpu_count)
import os.path as op
from builtins import range
import nibabel as nb
from ... import logging
from ..base import (traits, TraitedSpec, BaseInterfaceInputSpec,
File, InputMultiPath, isdefined)
from .base import DipyBaseInterface
IFLOGGER = logging.getLogger('interface')
class SimulateMultiTensorInputSpec(BaseInterfaceInputSpec):
in_dirs = InputMultiPath(File(exists=True), mandatory=True,
desc='list of fibers (principal directions)')
in_frac = InputMultiPath(File(exists=True), mandatory=True,
desc=('volume fraction of each fiber'))
in_vfms = InputMultiPath(File(exists=True), mandatory=True,
desc=('volume fractions of isotropic '
'compartiments'))
in_mask = File(exists=True, desc='mask to simulate data')
diff_iso = traits.List(
[3000e-6, 960e-6, 680e-6], traits.Float, usedefault=True,
desc='Diffusivity of isotropic compartments')
diff_sf = traits.Tuple(
(1700e-6, 200e-6, 200e-6),
traits.Float, traits.Float, traits.Float, usedefault=True,
desc='Single fiber tensor')
n_proc = traits.Int(0, usedefault=True, desc='number of processes')
baseline = File(exists=True, mandatory=True, desc='baseline T2 signal')
gradients = File(exists=True, desc='gradients file')
in_bvec = File(exists=True, desc='input bvecs file')
in_bval = File(exists=True, desc='input bvals file')
num_dirs = traits.Int(32, usedefault=True,
desc=('number of gradient directions (when table '
'is automatically generated)'))
bvalues = traits.List(traits.Int, value=[1000, 3000], usedefault=True,
desc=('list of b-values (when table '
'is automatically generated)'))
out_file = File('sim_dwi.nii.gz', usedefault=True,
desc='output file with fractions to be simluated')
out_mask = File('sim_msk.nii.gz', usedefault=True,
desc='file with the mask simulated')
out_bvec = File('bvec.sim', usedefault=True, desc='simulated b vectors')
out_bval = File('bval.sim', usedefault=True, desc='simulated b values')
snr = traits.Int(0, usedefault=True, desc='signal-to-noise ratio (dB)')
class SimulateMultiTensorOutputSpec(TraitedSpec):
out_file = File(exists=True, desc='simulated DWIs')
out_mask = File(exists=True, desc='mask file')
out_bvec = File(exists=True, desc='simulated b vectors')
out_bval = File(exists=True, desc='simulated b values')
class SimulateMultiTensor(DipyBaseInterface):
"""
Interface to MultiTensor model simulator in dipy
http://nipy.org/dipy/examples_built/simulate_multi_tensor.html
Example
-------
>>> import nipype.interfaces.dipy as dipy
>>> sim = dipy.SimulateMultiTensor()
>>> sim.inputs.in_dirs = ['fdir00.nii', 'fdir01.nii']
>>> sim.inputs.in_frac = ['ffra00.nii', 'ffra01.nii']
>>> sim.inputs.in_vfms = ['tpm_00.nii.gz', 'tpm_01.nii.gz',
... 'tpm_02.nii.gz']
>>> sim.inputs.baseline = 'b0.nii'
>>> sim.inputs.in_bvec = 'bvecs'
>>> sim.inputs.in_bval = 'bvals'
>>> sim.run() # doctest: +SKIP
"""
input_spec = SimulateMultiTensorInputSpec
output_spec = SimulateMultiTensorOutputSpec
def _run_interface(self, runtime):
from dipy.core.gradients import gradient_table
# Gradient table
if isdefined(self.inputs.in_bval) and isdefined(self.inputs.in_bvec):
# Load the gradient strengths and directions
bvals = np.loadtxt(self.inputs.in_bval)
bvecs = np.loadtxt(self.inputs.in_bvec).T
gtab = gradient_table(bvals, bvecs)
else:
gtab = _generate_gradients(self.inputs.num_dirs,
self.inputs.bvalues)
ndirs = len(gtab.bvals)
np.savetxt(op.abspath(self.inputs.out_bvec), gtab.bvecs.T)
np.savetxt(op.abspath(self.inputs.out_bval), gtab.bvals)
# Load the baseline b0 signal
b0_im = nb.load(self.inputs.baseline)
hdr = b0_im.header
shape = b0_im.shape
aff = b0_im.affine
# Check and load sticks and their volume fractions
nsticks = len(self.inputs.in_dirs)
if len(self.inputs.in_frac) != nsticks:
raise RuntimeError(('Number of sticks and their volume fractions'
' must match.'))
# Volume fractions of isotropic compartments
nballs = len(self.inputs.in_vfms)
vfs = np.squeeze(nb.concat_images(
[nb.load(f) for f in self.inputs.in_vfms]).get_data())
if nballs == 1:
vfs = vfs[..., np.newaxis]
total_vf = np.sum(vfs, axis=3)
# Generate a mask
if isdefined(self.inputs.in_mask):
msk = nb.load(self.inputs.in_mask).get_data()
msk[msk > 0.0] = 1.0
msk[msk < 1.0] = 0.0
else:
msk = np.zeros(shape)
msk[total_vf > 0.0] = 1.0
msk = np.clip(msk, 0.0, 1.0)
nvox = len(msk[msk > 0])
# Fiber fractions
ffsim = nb.concat_images([nb.load(f) for f in self.inputs.in_frac])
ffs = np.nan_to_num(np.squeeze(ffsim.get_data())) # fiber fractions
ffs = np.clip(ffs, 0., 1.)
if nsticks == 1:
ffs = ffs[..., np.newaxis]
for i in range(nsticks):
ffs[..., i] *= msk
total_ff = np.sum(ffs, axis=3)
# Fix incongruencies in fiber fractions
for i in range(1, nsticks):
if np.any(total_ff > 1.0):
errors = np.zeros_like(total_ff)
errors[total_ff > 1.0] = total_ff[total_ff > 1.0] - 1.0
ffs[..., i] -= errors
ffs[ffs < 0.0] = 0.0
total_ff = np.sum(ffs, axis=3)
for i in range(vfs.shape[-1]):
vfs[..., i] -= total_ff
vfs = np.clip(vfs, 0., 1.)
fractions = np.concatenate((ffs, vfs), axis=3)
nb.Nifti1Image(fractions, aff, None).to_filename('fractions.nii.gz')
nb.Nifti1Image(np.sum(fractions, axis=3), aff, None).to_filename(
'total_vf.nii.gz')
mhdr = hdr.copy()
mhdr.set_data_dtype(np.uint8)
mhdr.set_xyzt_units('mm', 'sec')
nb.Nifti1Image(msk, aff, mhdr).to_filename(
op.abspath(self.inputs.out_mask))
# Initialize stack of args
fracs = fractions[msk > 0]
# Stack directions
dirs = None
for i in range(nsticks):
f = self.inputs.in_dirs[i]
fd = np.nan_to_num(nb.load(f).get_data())
w = np.linalg.norm(fd, axis=3)[..., np.newaxis]
w[w < np.finfo(float).eps] = 1.0
fd /= w
if dirs is None:
dirs = fd[msk > 0].copy()
else:
dirs = np.hstack((dirs, fd[msk > 0]))
# Add random directions for isotropic components
for d in range(nballs):
fd = np.ra
|
ndom.randn(nvox, 3)
w = np.linalg.norm(fd, axis=1)
fd[w < np.finfo(float).eps, ...] = np.array([1., 0., 0.])
w[w < np.finfo(float).eps] = 1.0
fd /= w[..., np.newaxis]
dirs = np.hstack((dirs, fd))
sf_eval
|
s = list(self.inputs.diff_sf)
ba_evals = list(self.inputs.diff_iso)
mevals = [sf_evals] * nsticks + \
[[ba_evals[d]] * 3 for d in range(nballs)]
b0 = b0_im.get_data()[msk > 0]
args = []
for i in range(nvox):
args.append(
{'fractions': fr
|
erudit/zenon
|
eruditorg/core/editor/migrations/0006_auto_20161028_1032.py
|
Python
|
gpl-3.0
| 620
| 0.001613
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-10-28 15:32
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('editor', '0005_auto_20160920_1037'),
]
operations = [
|
migrations.AlterField(
model_name='issuesubmission',
name='contact',
|
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='Personne-ressource'),
),
]
|
fxia22/ASM_xf
|
PythonD/bin/python/drv_xmlproc.py
|
Python
|
gpl-2.0
| 11,377
| 0.022062
|
"""
A SAX driver for xmlproc
$Id: drv_xmlproc.py,v 1.9 1999/10/15 07:55:33 larsga Exp $
"""
version="0.95"
from xml.sax import saxlib,saxutils,saxmisc
from xml.parsers.xmlproc import xmlproc
import os
pre_parse_properties={"http://xml.org/sax/properties/namespace-sep":1,
"http://xml.org/sax/handlers/DeclHandler":1,
"http://xml.org/sax/handlers/LexicalHandler":1,
"http://xml.org/sax/handlers/NamespaceHandler":1}
# Todo:
# - must actually use catalog file
# - document interplay between reset and SAX2
# - fix bugs:
# - startDTD must be called
# - do namespace processing, if it is requested
# - support more features and properties
XCATALOG =1
SOCATALOG=2
# --- SAX_XPParser
class SAX_XPParser(saxlib.Parser,xmlproc.Application,xmlproc.DTDConsumer,
xmlproc.ErrorHandler,xmlproc.PubIdResolver):
def __init__(self):
saxlib.Parser.__init__(self)
self.reset()
self.declHandler=saxmisc.DeclHandler()
self.lexicalHandler=saxmisc.LexicalHandler()
self.namespaceHandler=saxmisc.NamespaceHandler()
self.ns_separator=" "
self.locator=1
self.is_parsing=0
self.stop_on_error=1
self.catalog_file=None
self.catalog_type=None
def parse(self,sysID):
self.reset()
try:
self.is_parsing=1
self.parser.parse_resource(sysID)
finally:
self.is_parsing=0
def parseFile(self,file):
self.reset()
try:
self.is_parsing=1
self.parser.read_from(file)
self.parser.flush()
self.parser.parseEnd()
finally:
self.is_parsing=0
def _create_parser(self):
return xmlproc.XMLProcessor()
def setLocale(self, locale):
try:
self.parser.set_error_language(locale)
except KeyError:
raise saxlib.SAXNotSupportedException("Locale '%s' not supported" % locale)
# --- data event methods
def doc_start(self):
if self.locator:
self.doc_handler.setDocumentLocator(self)
self.doc_handler.startDocument()
def doc_end(self):
self.doc_handler.endDocument()
def handle_data(self,data,start,end):
self.doc_handler.characters(data,start,end-start)
def handle_ignorable_data(self,data,start,end):
self.doc_handler.ignorableWhitespace(data,start,end-start)
def handle_pi(self, target, data):
self.doc_handler.processingInstruction(target,data)
def handle_start_tag(self, name, attrs):
self.doc_handler.startElement(name,saxutils.AttributeMap(attrs))
def handle_end_tag(self, name):
self.doc_handler.endElement(name)
def handle_comment(self,content):
self.lexicalHandler.comment(content,0,len(content))
# --- pubid resolution
def resolve_entity_pubid(self,pubid,sysid):
return self.ent_handler.resolveEntity(pubid,sysid)
def resolve_doctype_pubid(self,pubid,sysid):
return self.ent_handler.resolveEntity(pubid,sysid)
# --- error handling
def warning(self,msg):
self.err_handler.warning(saxlib.SAXParseException(msg,None,self))
def error(self,msg):
self.err_handler.error(saxlib.SAXParseException(msg,None,self))
def fatal(self,msg):
self.err_handler.fatalError(saxlib.SAXParseException(msg,None,self))
# --- location handling
def getColumnNumber(self):
return self.parser.get_column()
def getLineNumber(self):
return self.parser.get_line()
def getSystemId(self):
return self.parser.get_current_sysid()
# --- DTD parsing
def new_external_entity(self,name,pubid,sysid,ndata):
if ndata!="":
self.dtd_handler.unparsedEntityDecl(name,pubid,sysid,ndata)
else:
# FIXME: ensure that only first decl is passed on
self.declHandler.externalEntityDecl(name,pubid,sysid)
def new_notation(self,name,pubid,sysid):
self.dtd_handler.notationDecl(name,pubid,sysid)
def dtd_start(self):
self.lexicalHandler.startDTD("","","")
def dtd_end(self):
self.lexicalHandler.endDTD()
def new_general_entity(self,name,val):
# FIXME: ensure that only first decl is passed on
self.declHandler.internalEntityDecl(name,val)
def new_element_type(self,elem_name,elem_cont):
# FIXME: only first
self.declHandler.elementDecl(elem_name,elem_cont)
def new_attribute(self,elem,attr,a_type,a_decl,a_def):
# FIXME: only first
if a_decl=="#DEFAULT": a_decl=None
self.declHandler.attributeDecl(elem,attr,a_type,a_decl,a_def)
# --- entity events
def resolve_entity(self,pubid,sysid):
newsysid=self.ent_handler.resolveEntity(pubid,sysid)
if newsysid==None:
return sysid
else:
return newsysid
# --- EXPERIMENTAL PYTHON SAX EXTENSIONS:
def get_parser_name(self):
return "xmlproc"
def get_parser_version(self):
return xmlproc.version
def get_driver_version(self):
return version
def is_validating(self):
return 0
def is_dtd_reading(self):
return 1
def reset(self):
if hasattr(self, "parser"):
self.parser.deref()
self.parser=self._create_parser()
self.parser.set_application(self)
self.parser.set_dtd_listener(self) # FIXME: Should we always do this?
self.parser.set_error_handler(self)
self.parser.set_pubid_resolver(self)
self.parser.reset()
def feed(self,data):
self.parser.feed(data)
def close(self):
self.parser.close()
self.parser.deref()
# Dereferencing to avoid circular references (grrrr)
self.err_handler = self.dtd_handler = self.doc_handler = None
self.parser = self.locator = self.ent_handler = None
# --- Configurable methods
def getFeature(self, featureId):
if featureId=="http://xml.org/sax/features/use-locator":
return self.locator
elif featureId=="http://xml.org/sax/features/validation":
return 0
elif featureId=="http://garshol.priv.no/sax/stop-on-error":
return self.stop_on_error
elif featureId=="http://garshol.priv.no/sax/use-catalog":
return self.catalog_file
elif featureId=="http://xml.org/sax/features/external-general-entities" or \
featureId=="http://xm
|
l.org/sax/features/external-parameter-entities" or \
featureId=="http://xml.org/sax/
|
features/namespaces" or \
featureId=="http://xml.org/sax/features/normalize-text":
raise saxlib.SAXNotSupportedException("Feature %s not supported" %
featureId)
else:
raise saxlib.SAXNotRecognizedException("Feature %s not recognized"
% featureId)
def setFeature(self, featureId, state):
if featureId=="http://xml.org/sax/features/use-locator":
self.locator=state
elif featureId=="http://garshol.priv.no/sax/stop-on-error":
self.stop_on_error=state
self.parser.set_data_after_wf_error(state)
elif featureId=="http://garshol.priv.no/sax/use-catalog":
if state:
if os.environ.has_key("XMLXCATALOG"):
self.catalog_file=os.environ["XMLXCATALOG"]
self.catalog_type=XCATALOG
elif os.environ.has_key("XMLSOCATALOG"):
self.catalog_file=os.environ["XMLSOCATALOG"]
self.catalog_type=SOCATALOG
else:
raise saxlib.SAXException("Neither XMLXCATALOG nor "
"XMLSOCATALOG variables set")
elif featureId=="http://xml.org/sax/features/validation" o
|
openego/data_processing
|
dataprocessing/python_scripts/ego_dp_loadarea_peakload.py
|
Python
|
agpl-3.0
| 10,383
| 0.003275
|
"""
Calculates peak load per load area
"""
__copyright__ = "Reiner Lemoine Institut, Flensburg University of Applied Sciences, Centre for Sustainable Energy Systems"
__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
__url__ = "https://github.com/openego/data_processing/blob/master/LICENSE"
__author__ = "gplssm, IlkaCu"
import pandas as pd
from workalendar.europe import Germany
from datetime import time as settime
import time
from sqlalchemy.orm import sessionmaker
from demandlib import bdew as bdew, particular_profiles as profiles
from dataprocessing.tools import io, metadata
from egoio.db_tables.model_draft import EgoDemandLoadareaPeakLoad as orm_peak_load
from oemof.db import tools
from dataprocessing.python_scripts.functions.ego_scenario_log import write_ego_scenario_log
def get_load_areas_table(schema, table, index_col, conn, columns=None):
r"""Retrieve load areas intermediate results table from oedb
"""
# retrieve table with processed input data
load_areas = pd.read_sql_table(table, conn, schema=schema,
index_col=index_col, columns=columns)
return load_areas
def add_sectoral_peak_load(load_areas, **kwargs):
r"""Add peak load per sector based on given annual consumption
"""
# define data year
# TODO: in the future get this from somewhere else
year = 2011
# call demandlib
# TODO: change to use new demandlib
# read standard load profiles
e_slp = bdew.ElecSlp(year, holidays=holidays)
# multiply given annual demand with timeseries
# elec_demand = e_slp.get_profile(load_areas['h0', 'g0', 'l0', 'i0'].to_dict())
elec_demand = e_slp.get_profile(load_areas.to_dict())
# tmp_peak_load = dm.electrical_demand(method='calculate_profile',
# year=year,
# ann_el_demand_per_sector= {
# 'h0':
# load_areas['sector_consumption_residential'],
# 'g0':
# load_areas['sector_consumption_retail'],
# 'i0':
# load_areas['sector_consumption_industrial'],
# 'l0':
# load_areas['sector_consumption_agricultural']}
# ).elec_demand
# hack correct industrial profile into dataframe
# print(load_areas['sector_consumption_industrial'])
# if load_areas['sector_consumption_industrial'] == 0:
# load_areas['sector_consumption_industrial'] = 0.1
# Add the slp for the industrial group
ilp = profiles.IndustrialLoadProfile(e_slp.date_time_index,
holidays=holidays)
# Beginning and end of workday, weekdays and weekend days, and scaling factors
# by default
elec_demand['i0'] = ilp.simple_profile(
load_areas['i0'],
am=settime(6, 0, 0),
pm=settime(22, 0, 0),
profile_factors=
{'week': {'day': 0.8, 'night': 0.6},
'weekend': {'day': 0.6, 'night': 0.6}})
# Resample 15-minute values to hourly values and sum across sectors
elec_demand = elec_demand.resample('H').mean().fillna(0).max().to_frame().T
# demand_industry = eb.IndustrialLoadProfile('simple_industrial_profile',
# **{'annual_demand': load_areas['sector_consumption_industrial'],
# 'year': year,
# 'am': settime(6, 0, 0),
# 'pm': settime(22, 0, 0),
# 'profile_factors':
# {'week': {'day': 0.8, 'night': 0.6},
# 'weekend': {'day': 0.6, 'night': 0.6}}
# })
# ind_demand = demand_industry.profile
# elec_demand['i0'] = ind_demand
peak_load = elec_demand.max(axis=0)
return peak_load
if __name__ == '__main__':
la_index_col = 'id'
schema = 'model_draft'
table = 'ego_demand_loadarea'
target_table = 'ego_demand_loadarea_peak_load'
year = 2011
db_group = 'oeuser'
cal = Germany()
holidays = dict(cal.holidays(2011))
# get database connection object
conn = io.oedb_session(section='oedb')
Session = sessionmaker(bind=conn)
session = Session()
# retrieve load areas table
columns = [la_index_col,
'sector_consumption_residential',
'sector_consumption_retail',
'sector_consumption_industrial',
'sector_consumption_agricultural']
load_areas = get_load_areas_table(schema, table, la_index_col, conn,
columns=columns)
write_ego_scenario_log(conn=conn,
version='v0.4.5',
io='input',
schema='model_draft',
table=table,
script='ego_dp_loadarea_peakload.py',
entries=len(load_areas))
names_dc = {'sector_consumption_residential': 'h0',
'sector_consumption_retail': 'g0',
'sector_consumption_agricultural': 'l0',
'sector_consumption_industrial': 'i0',}
names_dc2 = {'h0': 'residential',
'g0': 'retail',
'l0': 'agricultural',
'i0': 'industrial'}
# rename columns to demandlib compatible names
load_areas.rename(columns=names_dc, inplace=True)
# # delete old content from table
# del_str = "DROP TABLE IF EXISTS {0}.{1} CASCADE;".format(
# schema, target_table)
# conn.execute(del_str)
# empty table or create
try:
orm_peak_load.__table__.create(conn)
except:
session.query(orm_peak_load).delete()
session.commit()
# Use above function `add_sectoral_peak_load` via apply
# elec_demand = load_areas.fillna(0).apply(
# add_sectoral_peak_load, axis=1, args=())
# read standard load profiles
e_slp = bdew.ElecSlp(year, holidays=holidays)
# Add the slp for the industrial group
ilp = profiles.IndustrialLoadProfile(e_slp.date_time_index,
holidays=holidays)
# counter
ctr = 0
# iterate over substation retrieving sectoral demand at each of it
for it, row in load_areas.iterrows():
row = row.fillna(0)
# multiply given annual demand with timeseries
elec_demand = e_slp.get_profile(row.to_dict())
# Beginning and end of workday, weekdays and weekend days, and scaling factors
# by default
elec_demand['i0'] = ilp.simple_profile(
row['i0'],
am=settime(6, 0, 0),
pm=settime(22, 0, 0),
pr
|
ofile_factors=
{'week': {'day': 0.8, 'night': 0.6},
'weekend': {'day': 0.6, 'night': 0.6}})
# Resample 15-minute values to hourly values and sum across sectors
elec_demand = elec_demand.resample('H').mean().fillna(0).max().to_frame().T#.max(axis=0)#.to_frame().unstack()#.\
# to_frame(name='peak_load')
elec_demand['id'] = it
elec_demand.set_index(
|
'id', inplace=True)
# rename columns
elec_demand.rename(columns=names_dc2, inplace=True)
# Add data to orm object
peak_load = orm_peak_load(
id=it,
retail=float(elec_demand['retail']),
residential=float(elec_demand['residential']),
industrial=float(elec_demand['industrial']),
agricultural=float(elec_demand['agricultural']))
session.add(peak_load)
# # write results to new database table
# elec_demand.to_sql(target_table,
# conn,
# schema=schema,
# index=True,
# if_exists='fail')
ctr += 1
# commit data to database every 1000 datasets: This is done since pushing every
# single dataset slows down entire script, single commiting in the end sometimes
|
Taka-Coma/graphEmbedding_impls
|
TransH/train.py
|
Python
|
gpl-3.0
| 1,186
| 0.043845
|
# -*- coding: utf-8 -*-
from transH import TransH
import pickle
import numpy as np
import sys
def main():
if len(sys.argv) != 3:
print '[Usage] python train.py train_data validation_data'
exit(0)
train_data, valid_data = sys.argv[1:]
X, E, R = loadData(train_data)
V = loadData(valid_data, E=E, R=R, mode='valid')
# parameters
gamma = 1
k = 50
alpha = 0.1
b = 5000
c = 0.25
transH = TransH(len(E), len(R), gamma, k, alpha, b, c)
tran
|
sH.fit(X, validationset=V)
w = open('transH.model', 'w')
pickle.dump((transH, E, R), w)
def loadData(file_path, E=None, R=None, mode='train'):
if mode == 'train':
E, R = {}, {}
e_ind, r_ind = 0, 0
X = []
f
|
= open(file_path, 'r')
for line in f:
h, r, t = line.strip().split('\t')
if not h in E:
E[h] = e_ind
e_ind += 1
if not t in E:
E[t] = e_ind
e_ind +=1
if not r in R:
R[r] = r_ind
r_ind += 1
X.append((E[h], R[r], E[t]))
f.close()
return np.array(X), E, R
elif mode == 'valid':
X = []
f = open(file_path, 'r')
for line in f:
h, r, t = line.strip().split('\t')
X.append((E[h], R[r], E[t]))
return np.array(X)
if __name__ == "__main__":
main()
|
respawner/peering-manager
|
peering/migrations/0020_auto_20181105_0850.py
|
Python
|
apache-2.0
| 619
| 0
|
# Generated by Django 2.1.3 on 2018-11-05 07:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("peering", "0019_router_netbo
|
x_device_id")]
operations = [
migrations.AddField(
model_name="directpeeringsession
|
",
name="last_established_state",
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name="internetexchangepeeringsession",
name="last_established_state",
field=models.DateTimeField(blank=True, null=True),
),
]
|
DedMemez/ODS-August-2017
|
minigame/MazeBase.py
|
Python
|
apache-2.0
| 5,752
| 0.003825
|
# Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.minigame.MazeBase
from panda3d.core import VBase3
from direct.showbase.RandomNumGen import RandomNumGen
class MazeBase:
def __init__(self, model, mazeData, cellWidth, parent = None):
if parent is None:
parent = render
self.width = mazeData['width']
self.height = mazeData['height']
self.originTX = mazeData['originX']
self.originTY = mazeData['originY']
self.collisionTable = mazeData['collisionTable']
self._initialCellWidth = cellWidth
self.cellWidth = self._initialCellWidth
self.maze = model
self.maze.setPos(0, 0, 0)
self.maze.reparentTo(parent)
self.maze.stash()
return
def destroy(self):
self.maze.removeNode()
del self.maze
def onstage(self):
self.maze.unstash()
def offstage(self):
self.maze.stash()
def setScale(self, xy = 1, z = 1):
self.maze.setScale(VBase3(xy, xy, z))
self.cellWidth = self._initialCellWidth * xy
def isWalkable(self, tX, tY, rejectList = ()):
if tX <= 0 or tY <= 0 or tX >= self.width or tY >= self.height:
return 0
return not self.collisionTable[tY][tX] and not self.collisionTable[tY - 1][tX] and not self.collisionTable[tY][tX - 1] and not self.collisionTable[tY - 1][tX - 1] and (tX, tY) not in rejectList
def tile2world(self, TX, TY):
return [(TX - self.originTX) * self.cellWidth, (TY - self.originTY) * self.cellWidth]
def world2tile(self, x, y):
return [int(x / self.cellWidth + self.originTX), int(y / self.cellWidth + self.originTY)]
def world2tileClipped(self, x, y):
coords = [int(x / self.cellWidth + self.originTX), int(y / self.cellWidth + self.originTY)]
coords[0] = min(max(coords[0], 0), self.width - 1)
coords[1] = min(max(coords[1], 0), self.height - 1)
return coords
def doOrthoCollisions(self, oldPos, newPos):
offset = newPos - oldPos
WALL_OFFSET = 1.0
curX = oldPos[0]
curY = oldPos[1]
curTX, curTY = self.world2tile(curX, curY)
def calcFlushCoord(curTile, newTile, centerTile):
EPSILON = 0.01
if newTile > curTile:
return (newTile - centerTile) * self.cellWidth - EPSILON - WALL_OFFSET
else:
return (curTile - centerTile) * self.cellWidth + WALL_OFFSET
offsetX = offset[0]
offsetY = offset[1]
W
|
ALL_OFFSET_X = WALL_OFFSET
if offsetX < 0:
WALL_OFFSET_X = -WALL_OFFSET_X
WALL_OFFSET_Y = WALL_OFFSET
if offsetY < 0:
WALL_OFFSET_Y = -WALL_OFFSET_Y
newX = curX + offsetX + WALL_OFFSET_X
|
newY = curY
newTX, newTY = self.world2tile(newX, newY)
if newTX != curTX:
if self.collisionTable[newTY][newTX] == 1:
offset.setX(calcFlushCoord(curTX, newTX, self.originTX) - curX)
newX = curX
newY = curY + offsetY + WALL_OFFSET_Y
newTX, newTY = self.world2tile(newX, newY)
if newTY != curTY:
if self.collisionTable[newTY][newTX] == 1:
offset.setY(calcFlushCoord(curTY, newTY, self.originTY) - curY)
offsetX = offset[0]
offsetY = offset[1]
newX = curX + offsetX + WALL_OFFSET_X
newY = curY + offsetY + WALL_OFFSET_Y
newTX, newTY = self.world2tile(newX, newY)
if self.collisionTable[newTY][newTX] == 1:
cX = calcFlushCoord(curTX, newTX, self.originTX)
cY = calcFlushCoord(curTY, newTY, self.originTY)
if abs(cX - curX) < abs(cY - curY):
offset.setX(cX - curX)
else:
offset.setY(cY - curY)
return oldPos + offset
def createRandomSpotsList(self, numSpots, randomNumGen):
randomNumGen = RandomNumGen(randomNumGen)
width = self.width
height = self.height
halfWidth = int(width / 2)
halfHeight = int(height / 2)
quadrants = [(0,
0,
halfWidth - 1,
halfHeight - 1),
(halfWidth,
0,
width - 1,
halfHeight - 1),
(0,
halfHeight,
halfWidth - 1,
height - 1),
(halfWidth,
halfHeight,
width - 1,
height - 1)]
spotsTaken = []
def getEmptySpotInQuadrant(quadrant):
tX = -1
tY = -1
while tX < 0 or not self.isWalkable(tX, tY, spotsTaken):
tX = randomNumGen.randint(quadrant[0], quadrant[2])
tY = randomNumGen.randint(quadrant[1], quadrant[3])
spot = (tX, tY)
spotsTaken.append(spot)
return spot
def getSpotList(length):
randomNumGen.shuffle(quadrants)
l = []
remaining = length
for quadrant in quadrants:
for u in xrange(int(length / 4)):
l.append(getEmptySpotInQuadrant(quadrant))
remaining -= int(length / 4)
for u in xrange(remaining):
quadrant = quadrants[randomNumGen.randint(0, len(quadrants) - 1)]
l.append(getEmptySpotInQuadrant(quadrant))
return l
if type(numSpots) == tuple or type(numSpots) == list:
spots = []
for i in numSpots:
spots.append(getSpotList(i))
return spots
return getSpotList(numSpots)
|
LoaDy588/py_battleship_sim
|
examples.py
|
Python
|
mit
| 1,044
| 0
|
from core import display, field_utils, player
from core import hunt_ai, probabilistic_ai
import time
def game_example():
"""
Simple simulation of Probabilistic AI playing against a dummy.
Displays the game field of dummy.
"""
# create players
dummy = player.Player()
ai = probabilistic_ai.Probabilistic_AI()
# game loop
while not dummy.has_lost():
ai.turn(dummy)
display.display_field(dummy.get_field()) # display game field
time.sleep(0.3)
def cheat_example():
"""
Simple simulation of Target/Hunt AI cheating ag
|
ainst a dummy.
Displays the g
|
ame field of dummy.
"""
# create dummy
dummy = player.Player()
# create cheat_list for Hunt AI, create AI
cheat_list = field_utils.generate_cheat_list(dummy.get_field(), 3)
ai = hunt_ai.Hunt_AI(cheat=True, cheat_input=cheat_list)
# game loop
while not dummy.has_lost():
ai.turn(dummy)
display.display_field(dummy.get_field()) # display game field
time.sleep(0.3)
|
lxc/pylxd
|
pylxd/models/operation.py
|
Python
|
apache-2.0
| 3,440
| 0
|
# Copyright (c) 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import warnings
from urllib import parse
from pylxd import exceptions
# Global used to record which warnings have been issues already for unknown
# attributes.
_seen_attribute_warnings = set()
class Operation:
"""An LXD operation.
If the LXD server sends attributes that this version of pylxd is unaware of
then a warning is printed. By default the warning is issued ONCE and then
supressed for every subsequent attempted setting. The warnings can be
completely suppressed by setting the environment variable PYLXD_WARNINGS to
'none', or always displayed by setting the PYLXD_WARNINGS variable to
'always'.
"""
__slots__ = [
"_client",
"class",
"created_at",
"description",
"err",
"id",
"location",
"may_cancel",
|
"metadata",
"resources",
"status",
"status_code",
"updated_at",
]
@classmethod
def wait_for_operation(cls, client, operation_id):
"""Get an operation and wait for it to complete."""
operation = cls.get(client, operation_id)
operation.wait()
return cls.get(client, operation.id)
@classmethod
def extract_operation_id(cls, s):
return os.path.split(parse.urlparse(s).path)[-1]
@classmethod
d
|
ef get(cls, client, operation_id):
"""Get an operation."""
operation_id = cls.extract_operation_id(operation_id)
response = client.api.operations[operation_id].get()
return cls(_client=client, **response.json()["metadata"])
def __init__(self, **kwargs):
super().__init__()
for key, value in kwargs.items():
try:
setattr(self, key, value)
except AttributeError:
# ignore attributes we don't know about -- prevent breakage
# in the future if new attributes are added.
global _seen_attribute_warnings
env = os.environ.get("PYLXD_WARNINGS", "").lower()
if env != "always" and key in _seen_attribute_warnings:
continue
_seen_attribute_warnings.add(key)
if env == "none":
continue
warnings.warn(
'Attempted to set unknown attribute "{}" '
'on instance of "{}"'.format(key, self.__class__.__name__)
)
pass
def wait(self):
"""Wait for the operation to complete and return."""
response = self._client.api.operations[self.id].wait.get()
try:
if response.json()["metadata"]["status"] == "Failure":
raise exceptions.LXDAPIException(response)
except KeyError:
# Support for legacy LXD
pass
|
allmightyspiff/softlayer-python
|
SoftLayer/CLI/block/replication/disaster_recovery_failover.py
|
Python
|
mit
| 1,954
| 0.005118
|
"""Failover an inaccessible block volume to its available replicant volume."""
# :license: MIT, see LICENSE for more details.
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
from SoftLayer.CLI import formatting
@click.command(epilog="""Failover an inaccessible block volume to its available replicant volume.
If a volume (with replication) becomes inaccessible due to a disaster event, this method can be used to immediately
failover to an available replica in another location. This method does not allow for failback via API.
After using this method, to failback to the original volume, please open a support ticket.
If you wish to test failover, please use replica-failover.""")
@click.argument('volume-id')
@click.option('--replicant-id', help="ID of the replicant volume")
@environment.pass_env
def cli(env, volume_id, replicant_id):
"""Failover an inaccessible block volume to its available replicant volume."""
block_storage_manager = SoftLayer.BlockStorageManager(env.client)
click.secho("""WARNING : Failover
|
an inacc
|
essible block volume to its available replicant volume."""
"""If a volume (with replication) becomes inaccessible due to a disaster event,"""
"""this method can be used to immediately failover to an available replica in another location."""
"""This method does not allow for failback via the API."""
"""To failback to the original volume after using this method, open a support ticket."""
"""If you wish to test failover, use replica-failover instead.""", fg='red')
if not formatting.confirm('Are you sure you want to continue?'):
raise exceptions.CLIAbort('Aborted.')
block_storage_manager.disaster_recovery_failover_to_replicant(
volume_id,
replicant_id
)
click.echo("Disaster Recovery Failover to replicant is now in progress.")
|
terotic/digihel
|
digi/migrations/0008_auto_20160909_1909.py
|
Python
|
mit
| 3,342
| 0.005087
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-09-09 16:09
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.fields
class Migration(migrations.Migration):
dependencies = [
('wagtaildocs', '0007_merge'),
('wagtailcore', '0029_unicode_slugfield_dj19'),
('digi', '0007_themepage_type'),
]
operations = [
migrations.CreateModel(
name='ProjectLink',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('link_external', models.URLField(blank=True, verbose_name='External link')),
('title', models.CharField(help_text='Link title', max_length=255)),
('link_document', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtaildocs.Document')),
('link_page', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtailcore.Page')),
('theme', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='links', to='digi.ProjectPage')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
migrations.CreateModel(
name='ThemeLink',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_
|
name='ID')),
('sort_order', models.IntegerField(blank=True, editable=False, null=True)),
('link_external', models.URLField(blank=True, verbose_name='External link')),
('
|
title', models.CharField(help_text='Link title', max_length=255)),
('link_document', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtaildocs.Document')),
('link_page', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='wagtailcore.Page')),
('theme', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='links', to='digi.ThemePage')),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
migrations.AlterField(
model_name='projectrole',
name='person',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_roles', to='people.Person'),
),
migrations.AlterField(
model_name='projectrole',
name='project',
field=modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='roles', to='digi.ProjectPage'),
),
migrations.AlterField(
model_name='themerole',
name='person',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='theme_roles', to='people.Person'),
),
]
|
CheckiO-Missions/checkio-task-fizz-buzz
|
verification/referee.py
|
Python
|
gpl-2.0
| 2,378
| 0.005046
|
"""
CheckiOReferee is a base referee for checking you code.
arguments:
tests -- the dict contains t
|
ests in the specific structure.
You can find an
|
example in tests.py.
cover_code -- is a wrapper for the user function and additional operations before give data
in the user function. You can use some predefined codes from checkio.referee.cover_codes
checker -- is replacement for the default checking of an user function result. If given, then
instead simple "==" will be using the checker function which return tuple with result
(false or true) and some additional info (some message).
You can use some predefined codes from checkio.referee.checkers
add_allowed_modules -- additional module which will be allowed for your task.
add_close_builtins -- some closed builtin words, as example, if you want, you can close "eval"
remove_allowed_modules -- close standard library modules, as example "math"
checkio.referee.checkers
checkers.float_comparison -- Checking function fabric for check result with float numbers.
Syntax: checkers.float_comparison(digits) -- where "digits" is a quantity of significant
digits after coma.
checkio.referee.cover_codes
cover_codes.unwrap_args -- Your "input" from test can be given as a list. if you want unwrap this
before user function calling, then using this function. For example: if your test's input
is [2, 2] and you use this cover_code, then user function will be called as checkio(2, 2)
cover_codes.unwrap_kwargs -- the same as unwrap_kwargs, but unwrap dict.
"""
from checkio.signals import ON_CONNECT
from checkio import api
from checkio.referees.io import CheckiOReferee
from checkio.referees import cover_codes
from checkio.referees import checkers
from tests import TESTS
api.add_listener(
ON_CONNECT,
CheckiOReferee(
tests=TESTS,
function_name={
"python": "checkio",
"js": "fizzBuzz"
}
# cover_code={
# 'python-27': cover_codes.unwrap_args, # or None
# 'python-3': cover_codes.unwrap_args
# },
# checker=None, # checkers.float.comparison(2)
# add_allowed_modules=[],
# add_close_builtins=[],
# remove_allowed_modules=[]
).on_ready)
|
ScottWales/rose
|
lib/python/rose/suite_engine_procs/cylc.py
|
Python
|
gpl-3.0
| 54,627
| 0.000092
|
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# (C) British Crown Copyright 2012-5 Met Office.
#
# This file is part of Rose, a framework for meteorological suites.
#
# Rose is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Rose is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Rose. If not, see <http://www.gnu.org/licenses/>.
#-----------------------------------------------------------------------------
"""Logic specific to the Cylc suite engine."""
import filecmp
from fnmatch import fnmatch
from glob import glob
import os
import pwd
import re
from rose.fs_util import FileSystemEvent
from rose.popen import RosePopenError
from rose.reporter import Event, Reporter
from rose.suite_engine_proc import (
SuiteEngineProcessor, SuiteScanResult,
SuiteEngineGlobalConfCompatError, TaskProps)
import socket
import sqlite3
import tarfile
from tempfile import mkstemp
from time import sleep
from uuid import uuid4
_PORT_FILE = "port-file"
_PORT_SCAN = "port-scan"
class CylcProcessor(SuiteEngineProcessor):
"""Logic specific to the Cylc suite engine."""
CYCLE_ORDERS = {"time_desc": " DESC", "time_asc": " ASC"}
EVENTS = {"submission succeeded": "submit",
"submission failed": "fail(submit)",
"submitting now": "submit-init",
"incrementing submit number": "submit-init",
"started": "init",
"succeeded": "success",
"failed": "fail",
"execution started": "init",
"execution succeeded": "success",
"execution failed": "fail",
"signaled": "fail(%s)"}
EVENT_TIME_INDICES = {
"submit-init": 0, "init": 1, "success": 2, "fail": 2, "fail(%s)": 2}
EVENT_RANKS = {"submit-init": 0, "submit": 1, "fail(submit)": 1, "init": 2,
"success": 3, "fail": 3, "fail(%s)": 4}
JOB_LOGS_DB = "log/rose-job-logs.db"
JOB_ORDERS = {
"time_desc":
"time DESC, task_events.submit_num DESC, name DESC, cycle DESC",
"time_asc":
"time ASC, task_events.submit_num ASC, name ASC, cycle ASC",
"cycle_desc_name_asc":
"cycle DESC, name ASC, task_events.submit_num DESC",
"cycle_desc_name_desc":
"cycle DESC, name DESC, task_events.submit_num DESC",
"cycle_asc_name_asc":
"cycle ASC, name ASC, task_events.submit_num DESC",
"cycle_asc_name_desc":
"cycle ASC, name DESC, task_events.submit_num DESC",
"name_asc_cycle_asc":
"name ASC, cycle ASC, task_events.submit_num DESC",
"name_desc_cycle_asc":
"name DESC, cycle ASC, task_events.submit_num DESC",
"name_asc_cycle_desc":
"name ASC, cy
|
cle DESC, task_events.submit_num DESC",
"name_desc_cycle_desc":
"name DESC, cycle DESC, task_events.submit_num DESC"}
PGREP_CYLC_RUN = r"python.*cylc-(run|restart)( | .+ )%s( |$)"
REASON_KEY_PROC = "process"
REASON_KEY_F
|
ILE = "port-file"
REC_CYCLE_TIME = re.compile(
r"\A[\+\-]?\d+(?:W\d+)?(?:T\d+(?:Z|[+-]\d+)?)?\Z") # Good enough?
REC_SEQ_LOG = re.compile(r"\A(.*\.)(\d+)(\.html)?\Z")
REC_SIGNALLED = re.compile(r"Task\sjob\sscript\sreceived\ssignal\s(\S+)")
SCHEME = "cylc"
STATUSES = {"active": ["ready", "queued", "submitting", "submitted",
"submit-retrying", "running", "retrying"],
"fail": ["submission failed", "failed"],
"success": ["succeeded"]}
SUITE_CONF = "suite.rc"
SUITE_DB = "cylc-suite.db"
SUITE_DIR_REL_ROOT = "cylc-run"
TASK_ID_DELIM = "."
TIMEOUT = 5 # seconds
def __init__(self, *args, **kwargs):
SuiteEngineProcessor.__init__(self, *args, **kwargs)
self.daos = {self.SUITE_DB: {}, self.JOB_LOGS_DB: {}}
# N.B. Should be considered a constant after initialisation
self.state_of = {}
for status, names in self.STATUSES.items():
for name in names:
self.state_of[name] = status
self.host = None
self.user = None
def check_global_conf_compat(self):
"""Raise exception on incompatible Cylc global configuration."""
expected = os.path.join("~", self.SUITE_DIR_REL_ROOT)
expected = os.path.expanduser(expected)
for key in ["[hosts][localhost]run directory",
"[hosts][localhost]work directory"]:
out = self.popen("cylc", "get-global-config", "-i", key)[0]
lines = out.splitlines()
if lines and lines[0] != expected:
raise SuiteEngineGlobalConfCompatError(
self.SCHEME, key, lines[0])
def clean_hook(self, suite_name=None):
"""Run "cylc refresh --unregister" (at end of "rose suite-clean")."""
self.popen.run("cylc", "refresh", "--unregister")
passphrase_dir_root = os.path.expanduser(os.path.join("~", ".cylc"))
for name in os.listdir(passphrase_dir_root):
path = os.path.join(passphrase_dir_root, name)
if os.path.islink(path) and not os.path.exists(path):
self.fs_util.delete(path)
def cmp_suite_conf(self, suite_name, strict_mode=False, debug_mode=False):
"""Parse and compare current "suite.rc" with that in the previous run.
(Re-)register and validate the "suite.rc" file.
Raise RosePopenError on failure.
Return True if "suite.rc.processed" is unmodified c.f. previous run.
Return False otherwise.
"""
suite_dir = self.get_suite_dir(suite_name)
out = self.popen.run("cylc", "get-directory", suite_name)[1]
suite_dir_old = None
if out:
suite_dir_old = out.strip()
suite_passphrase = os.path.join(suite_dir, "passphrase")
self.clean_hook(suite_name)
if suite_dir_old != suite_dir or not os.path.exists(suite_passphrase):
self.popen.run_simple("cylc", "unregister", suite_name)
suite_dir_old = None
if suite_dir_old is None:
self.popen.run_simple("cylc", "register", suite_name, suite_dir)
passphrase_dir = os.path.join("~", ".cylc", suite_name)
passphrase_dir = os.path.expanduser(passphrase_dir)
self.fs_util.symlink(suite_dir, passphrase_dir)
command = ["cylc", "validate", "-v"]
if debug_mode:
command.append("--debug")
if strict_mode:
command.append("--strict")
command.append(suite_name)
suite_rc_processed = os.path.join(suite_dir, "suite.rc.processed")
old_suite_rc_processed = None
if os.path.exists(suite_rc_processed):
f_desc, old_suite_rc_processed = mkstemp(
dir=suite_dir,
prefix="suite.rc.processed.")
os.close(f_desc)
os.rename(suite_rc_processed, old_suite_rc_processed)
try:
self.popen.run_simple(*command, stdout_level=Event.V)
return (old_suite_rc_processed and
filecmp.cmp(old_suite_rc_processed, suite_rc_processed))
finally:
if old_suite_rc_processed:
os.unlink(old_suite_rc_processed)
def gcontrol(self, suite_name, host=None, engine_version=None, args=None):
"""Launch control GUI for a suite_name running at a host."""
if not self.is_suite_registered(suite_name):
raise SuiteNotRegisteredError(suite_name)
if not host:
host = "localhost"
environ = dict(os.environ)
if engine_version:
environ.update({self.get_version_env_name(): engine_version})
fmt = r"nohup cylc gui --host=%s %s %s 1
|
DayGitH/Python-Challenges
|
DailyProgrammer/DP20130510C.py
|
Python
|
mit
| 7,377
| 0.007049
|
"""
[05/10/13] Challenge #123 [Hard] Robot Jousting
https://www.reddit.com/r/dailyprogrammer/comments/1ej32w/051013_challenge_123_hard_robot_jousting/
# [](#HardIcon) *(Hard)*: Robot Jousting
You are an expert in the new and exciting field of *Robot Jousting*! Yes, you read that right: robots that charge one
another to see who wins and who gets destroyed. Your job has been to work on a simulation of the joust matches and
compute *when* there is a collision between the two robots and *which* robot would win (the robot with the higher
velocity), thus preventing the destruction of very expensive hardware.
Let's define the actual behavior of the jousting event and how the robots work: the event takes place in a long
hallway. Robots are placed initially in the center on the far left or far right of the hallway. When robots start, they
choose a given starting angle, and keep moving forward until they hit a wall. Once a robot hits a wall, they stop
movement, and rotate back to the angle in which they came into the wall. Basically robots "reflect" themselves off the
wall at the angle in which they hit it. For every wall-hit event, the robot loses 10% of its speed, thus robots will
slow down over time (but never stop until there is a collision).
[Check out these two images as examples of the described scene](http://imgur.com/a/NSzpY). Note that the actual robot
geometry you want to simulate is a perfect circle, where the radius is 0.25 meters, or 25 centimeters.
# Formal Inputs & Outputs
## Input Description
You will be given three separate lines of information: the first has data describing the hallway that the robots will
joust in, and then the second and third represent information on the left and right robots, respectively.
The first line will contain two integers: how long and wide the hallway is in meters. As an example, given the line "10
2", then you should know that the length of the hallway is 10 meters, while the width is just 2 meters.
The second and third lines also contain two integers: the first is the initial angle the robot will move towards (in
degrees, as a signed number, where degree 0 always points to the center of the hallway, negative points to the left,
and positive points to the right). The second integer is the speed that the robot will initially move at, as defined in
millimeters per second. As an example, given the two lines "45 5" and "-45 2", we know that the left robot will launch
at 45 degrees to its left, and that the second robot will launch 45 degrees to its left (really try to understand the
angle standard we use). The left robot starts with an initial speed of 5 mm/s with the right robot starting at 2 mm/s.
Assume that the robot radius will always be a quarter of a meter (25 centimeters).
## Output Description
Simply print "Left robot wins at X seconds." or "Right robot wins at X seconds." whenever the robots collide: make sure
that the variable X is the number of seconds elapsed since start, and that the winning robot is whichever robot had the
higher velocity. In case the robots never hit each other during a simulation, simply print "No winner found".
# Sample Inputs & Outputs
## Sample Input
10 2
30 5
-10 4
## Sample Output
*Please note that this is FAKE data; I've yet to write my own simulation...*
Left robot wins at 32.5 seconds.
# Challenge Note
Make sure to keep your simulation as precise as possible! Any cool tricks with a focus on precision management will get
bonus awards! This is also a very open-ended challenge question, so feel free to ask question and discuss in the
comments section.
"""
def main():
pass
if __name__ == "__main__":
main()
"""
[05/10/13] Challenge #122 [Hard] Subset Sum Insanity
https://www.reddit.com/r/dailyprogrammer/comments/1e2rcx/051013_challenge_122_hard_subset_sum_insanity/
# [](#HardIcon) *(Hard)*: Subset Sum
The [subset sum](http://en.wikipedia.org/wiki/Subset_sum_problem) problem is a classic computer science challenge:
though it may appear trivial on its surface, there is no known solution that runs in [deterministic polynomial
time](http://en.wikipedia.org/wiki/P_(complexity)) (basically this is an
[NP-complete](http://en.wikipedia.org/wiki/Subset_sum_problem) problem). To make this challenge more "fun" (in the same
way that losing in Dwarf Fortress is "fun"), we will be solving this problem in a three-dimensional matrix and define a
subset as a set of integers that are directly adjacent!
**Don't forget our [previous
week-long](http://www.reddit.com/r/dailyprogrammer/comments/1dk7c7/05213_challenge_121_hard_medal_management/) [Hard]
challenge competition ends today!**
# Formal Inputs & Outputs
## Input Description
You will be given three integers `(U, V, W)` on the first line of data, where each is the length of the matrices'
respective dimensions (meaning U is the number of elements in the X dimension, V is the number of elements in the Y
dimension, and W is the number of elements in the Z dimension). After the initial line of input, you will be given a
series of space-delimited integers that makes up the 3D matrix. Integers are ordered first in the X dimension, then Y,
and then Z ( [the coordinate system is clarified here](http://i.imgur.com/nxChpUZ.png) ).
## Output Description
Simply print all sets of integers that sum to 0, if this set is of directly-adjacent integers (meaning a set that
travels vertically or horizontally, but never diagonally). If there are no such sets, simply print "No subsets sum to
0".
# Sample Inputs & Outputs
## Sample Input
2 2 3
-1 2 3 4 1 3 4 5 4 6 8 10
## Sample Output
-1 1
*Note:* This is set of positions (0, 0, 0), and (0, 0, 1).
# Challenge Input
8 8 8
-7 0 -10 -4 -1 -9 4 3 -9 -1 2 4 -6 3 3 -9 9 0 -7 3 -7 -10 -9 4 -6 1 5 -1 -8 9 1 -9 6 -1 1 -8 -6 -5 -3 5 10 6 -1 2
-2 -7 4 -4 5 2 -10 -8 9 7 7 9 -7 2 2 9 2 6 6 -3 8 -4 -6 0 -2 -8 6 3 8 10 -5 8 8 8 8 0 -1 4 -5 9 -7 -10 1 -7 6 1 -10 8 8
-8 -9 6 -3 -3 -9 1 4 -9 2 5 -2 -10 8 3 3 -1 0 -2 4 -5 -2 8 -8 9 2 7 9 -10 4 9 10 -6 5 -3 -5 5 1 -1 -3 2 3 2 -8 -9 10 4
10 -4 2 -5 0 -4 4 6 -1 9 1 3 -7 6 -3 -3 -9 6 10 8 -3 -5 5 2 6 -1 2 5 10 1 -3 3 -10 6 -6 9 -3 -9 9 -10 6 7 7 10 -6 0 6 8
-10 6 4 -4 -1 7 4 -9 -3 -10 0 -6 7 10 1 -9 1 9 5 7 -2 9 -8 10 -8 -7 0 -10 -7 5 3 2 0 0 -1 10 3 3 -7 8 7 5 9 -7 3 10 7
10 0 -10 10 7 5 6 -6 6 -9 -1 -8 9 -2 8 -7 -6 -8 5 -2 1 -9 -8 2 9 -9 3 3 -8 1 -3 9 1 3 6 -6 9 -2 5 8 2 -6 -9 -9 1 1 -9 5
-4 -9 6 -10 10 -1 8 -2 -6 8 -9 9 0 8 0 4 8 -7 -9 5 -4 0 -9 -8 2 -1 5 -6 -5 5 9
|
-8 3 8 -3 -1 -10 10 -9 -10 3 -1 1 -1 5
-7 -8 -5 -10 1 7 -3 -6 5 5 2 6 3 -8 9 1 -5 8 5 1 4 -8 7 1 3 -5 10 -9 -2 4 -5 -7 8 8 -8 -7 9 1 6 6 3 4 5 6 -3 -7 2 -2 7
-1 2 2 2 5 10 0 9 6 10 -4 9 7 -10 -9 -6 0 -1 9 -3 -9 -7 0 8 -5 -7 -10 10 4 4 7 3 -5 3 7 6 3 -1 9 -5 4 -9 -8 -
|
2 7 10 -1
-10 -10 -3 4 -7 5 -5 -3 9 7 -3 10 -8 -9 3 9 3 10 -10 -8 6 0 0 8 1 -7 -8 -6 7 8 -1 -4 0 -1 1 -4 4 9 0 1 -6 -5 2 5 -1 2 7
-8 5 -7 7 -7 9 -8 -10 -4 10 6 -1 -4 -5 0 -2 -3 1 -1 -3 4 -4 -6 4 5 7 5 -6 -6 4 -10 -3 -4 -4 -2 6 0 1 2 1 -7
# Challenge Note
Like any challenge of this complexity class, you are somewhat constrained to solving the problem with brute-force (sum
all possible sub-sets). We really want to encourage any and all new ideas, so really go wild and absolutely do whatever
you think could solve this problem quickly!
"""
def main():
pass
if __name__ == "__main__":
main()
|
girving/tensorflow
|
tensorflow/python/keras/optimizer_v2/adagrad.py
|
Python
|
apache-2.0
| 4,793
| 0.003547
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Adagrad optimizer for TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.keras.optimizer_v2 import optimizer_v2
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.training import training_ops
class Adagrad(optimizer_v2.OptimizerV2):
"""Adagrad optimizer.
It is recommended to leave the parameters of this optimizer at their default
values.
See this [paper](http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf)
or this
[intro](https://ppasupat.github.io/a9online/uploads/proximal_notes.pdf).
The learning_rate arg below is a hyperparameter, where a hyperparameter is
defined as a scalar Tensor, a regular Python value, or a callable (which
will be evaluated when `apply_gradients` is called) returning a scalar
Tensor or a Python value.
Arguments:
learning_rate: float hyperparameter >= 0. Learning rate.
initial_accumulator_value: A floating point value. Starting value for the
accumulators, must be positive.
name: Optional name prefix for the operations created when applying
gradients. Defaults to 'Adagrad'.
Raises:
ValueError: If the `initial_accumulator_value` is invalid.
"""
def __init__(self,
learning_rate=0.001,
initial_accumulator_value=0.1,
name="Adagrad"):
if initial_accumulator_value <= 0.0:
raise ValueError("initial_accumulator_value must be positive: %s" %
initial_accumulator_value)
super(Adagrad, self).__init__(name)
self._set_hyper("learning_rate", learning_rate)
self._initial_accumulator_value = initial_accumulator_value
def _create_vars(self, var_list, state):
for v in var_list:
dtype = v.dtype.base_dtype
if v.get_shape().is_fully_defined():
init = init_ops.constant_initializer(self._initial_accumulator_value,
dtype=dtype)
else:
def init(v=v, dtype=dtype):
# Use a Tensor instead of initializer if variable does not have
# static shape.
init_constant = gen_array_ops.fill(array_ops.shape(v),
self._initial_accumulator_value)
return math_ops.cast(init_constant, dtype)
state.create_slot_with_initializer(v, init, v.get_shape(),
|
dtype,
"accumulator")
def _apply_dense(self, grad, var, state):
acc = state.get_slot(var, "accumulator")
return training_ops.apply_adagrad(
var,
acc,
state.get_hyper("learning_rate", var.dtyp
|
e.base_dtype),
grad,
use_locking=self._use_locking)
def _resource_apply_dense(self, grad, var, state):
acc = state.get_slot(var, "accumulator")
return training_ops.resource_apply_adagrad(
var.handle,
acc.handle,
state.get_hyper("learning_rate", var.dtype.base_dtype),
grad,
use_locking=self._use_locking)
def _apply_sparse(self, grad, var, state):
acc = state.get_slot(var, "accumulator")
return training_ops.sparse_apply_adagrad(
var,
acc,
state.get_hyper("learning_rate", var.dtype.base_dtype),
grad.values,
grad.indices,
use_locking=self._use_locking)
def _resource_apply_sparse(self, grad, var, indices, state):
acc = state.get_slot(var, "accumulator")
return training_ops.resource_sparse_apply_adagrad(
var.handle,
acc.handle,
state.get_hyper("learning_rate", var.dtype.base_dtype),
grad,
indices,
use_locking=self._use_locking)
def get_config(self):
config = super(Adagrad, self).get_config()
config.update({
"learning_rate": self._serialize_hyperparameter("learning_rate"),
"initial_accumulator_value": self._initial_accumulator_value
})
return config
|
ucb-sejits/ctree
|
ctree/tools/runner.py
|
Python
|
bsd-2-clause
| 5,410
| 0.002588
|
"""
create specializer projects
basically copies all files and directories from a template.
"""
from __future__ import print_function
import sys
import argparse
import collections
import shutil
import os
import ctree
from ctree.tools.generators.builder import Builder
if sys.version_info >= (3, 0, 0): # python 3
# noinspection PyPep8Naming
import configparser as ConfigParser
else:
# noinspection PyPep8Naming
import ConfigP
|
arser
__author__ = 'chick'
def main(*args):
"""run ctree utility stuff, currently only the project generator"""
if sys.argv:
args = sys.argv[1:]
parser = argparse.ArgumentParser(prog="ctree", description="ctree is a python SEJITS framework")
parser.
|
add_argument('-sp', '--startproject', help='generate a specializer project')
parser.add_argument(
'-wu', '--wattsupmeter', help="start interactive watts up meter shell", action="store_true"
)
parser.add_argument('-p', '--port', help="/dev name to use for wattsup meter port")
parser.add_argument('-v', '--verbose', help='show more debug than you like', action="store_true")
parser.add_argument('-dc', '--disable_cache', help='disable and delete the persistent cache', action="store_true")
parser.add_argument('-ec', '--enable_cache', help='enable the persistent cache', action="store_true")
parser.add_argument('-cc', '--clear_cache', help='clear the persistent cache', action="store_true")
args = parser.parse_args(args)
if args.startproject:
specializer_name = args.startproject
print("create project specializer %s" % specializer_name)
builder = Builder("create", specializer_name, verbose=args.verbose)
builder.build(None, None)
elif args.wattsupmeter:
from ctree.metrics.watts_up_reader import WattsUpReader
port = args.port if args.port else WattsUpReader.guess_port()
meter = WattsUpReader(port_name=port)
meter.interactive_mode()
elif args.enable_cache:
ctree.CONFIG.set("jit", "CACHE", value="True")
write_success = write_to_config('jit', 'CACHE', True)
if write_success:
print("[SUCCESS] ctree caching enabled.")
elif args.disable_cache:
wipe_cache()
ctree.CONFIG.set("jit", "CACHE", value="False")
write_success = write_to_config('jit', 'CACHE', False)
args.clear_cache = True
if write_success:
print("[SUCCESS] ctree caching disabled.")
elif args.clear_cache:
wipe_cache()
else:
parser.print_usage()
def get_responsible(section, key):
"""
:param section: Section to search for
:param key: key to search for
:return: path of config file responsible for setting
"""
first = ctree.CFG_PATHS[-1]
paths = reversed(ctree.CFG_PATHS)
for path in paths:
config = ConfigParser.ConfigParser()
config.read(path)
if config.has_option(section, key):
return path
return first
def write_to_config(section, key, value):
"""
This method handles writing to the closest config file to the current
project, but does not write to the defaults.cfg file in ctree.
:return: return True if write is successful. False otherwise.
"""
if ctree.CFG_PATHS:
target = get_responsible(section, key)
config = ConfigParser.ConfigParser()
config.read(target)
print(target)
if not config.has_section(section):
config.add_section(section)
config.set(section, key, value)
with open(target, 'w') as configfile:
config.write(configfile)
configfile.close()
return True
else:
print("[FAILURE] No config file detected. Please create a '.ctree.cfg' file in your project directory.")
return False
def wipe_cache():
"""
if path is absolute, just remove the directory
if the path is relative, recursively look from current directory down
looking for matching paths. This can take a long time looking for
:return:
"""
cache_name = os.path.expanduser(ctree.CONFIG.get('jit', 'COMPILE_PATH'))
if os.path.isabs(cache_name):
if os.path.exists(cache_name):
result = shutil.rmtree(cache_name)
print("removed cache directory {} {}".format(
cache_name, result if result else ""))
exit(0)
splitted = cache_name.split(os.sep)
while splitted:
first = splitted[0]
if first == '.':
splitted.pop(0)
elif first == '..':
os.chdir('../')
splitted.pop(0)
else:
cache_name = os.sep.join(splitted)
break
wipe_queue = collections.deque([os.path.abspath(p) for p in os.listdir(os.getcwd())])
print("ctree looking for relative cache directories named {}, checking directories under this one".format(
cache_name))
while wipe_queue:
directory = wipe_queue.popleft()
if not os.path.isdir(directory):
continue
if os.path.split(directory)[-1] == cache_name:
shutil.rmtree(directory)
else:
#print("{} ".format(directory))
for sub_item in os.listdir(directory):
wipe_queue.append(os.path.join(directory, sub_item))
print()
if __name__ == '__main__':
main(sys.argv[1:])
|
agry/NGECore2
|
scripts/loot/lootPools/talus/re_junk_aakuan_follower.py
|
Python
|
lgpl-3.0
| 92
| 0.086957
|
d
|
ef itemNames():
return ['motor','software_module']
def itemChances():
return [50,50
|
]
|
linglung/ytdl
|
youtube_dl/extractor/kamcord.py
|
Python
|
unlicense
| 2,262
| 0.001326
|
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
int_or_none,
qualities,
)
class KamcordIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?kamcord\.com/v/(?P<id>[^/?#&]+)'
_TEST = {
'url': 'https://www.kamcord.com/v/hNYRduDgWb4',
'md5': 'c3180e8a9cfac2e86e1b88cb8751b54c',
'info_dict': {
'id': 'hNYRduDgWb4',
'ext': 'mp4',
'title': 'Drinking Madness',
'uploader': 'jacksfilms',
'uploader_id': '3044562',
'view_count': int,
'like_count': int,
'comment_count': int,
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video = self._parse_json(
self._search_regex(
r'window\.__props\s*=\s*({.+?});?(?:\n|\s*</script)',
webpage, 'video'),
video_id)['video']
title = video['title']
formats = self._extract_m3u8_formats(
video['play']['hls'], video_id, 'mp4', entry_protocol='m3u8_native')
self._sort_formats(formats)
uploader = video.get('user', {}).get('username')
uploader_id = video.get('user', {}).get('id')
view_count = int_or_none(video.get('viewCount'))
like_count = int_or_none(video.get('heartCount'))
comment_count = int_or_none(video.get('messageCount'))
preference_key = qualities(('small', 'medium', 'large'))
thumbnails = [{
'url': thumbna
|
il_url,
'id': thumbnail_id,
'preference': preference_key(thumbnail_id),
} for thumbnail_id, thumbnail_url in (video.get('thumbnail') or {}).items()
if isinstance(thumbnail_id, compat_str) and isinstance(thumbnail_url, compat_str)]
return {
'id': video_id,
'title': title,
|
'uploader': uploader,
'uploader_id': uploader_id,
'view_count': view_count,
'like_count': like_count,
'comment_count': comment_count,
'thumbnails': thumbnails,
'formats': formats,
}
|
AMLab-Amsterdam/lie_learn
|
lie_learn/representations/SO3/spherical_harmonics.py
|
Python
|
mit
| 13,535
| 0.005098
|
import numpy as np
from scipy.special import sph_harm, lpmv
try:
from scipy.misc import factorial
except:
from scipy.special import factorial
def sh(l, m, theta, phi, field='real', normalization='quantum', condon_shortley=True):
if field == 'real':
return rsh(l, m, theta, phi, normalization, condon_shortley)
elif field == 'complex':
return csh(l, m, theta, phi, normalization, condon_shortley)
else:
raise ValueError('Unknown field: ' + str(field))
def sh_squared_norm(l, normalization='quantum', normalized_haar=True):
"""
Compute the squared norm of the spherical harmonics.
The squared norm of a function on the sphere is defined as
|f|^2 = int_S^2 |f(x)|^2 dx
where dx is a Haar measure.
:param l: for some normalization conventions, the norm of a spherical harmonic Y^l_m depends on the degree l
:param normalization: normalization convention for the spherical harmonic
:param normalized_haar: whether to use the Haar measure da db sinb or the normalized Haar measure da db sinb / 4pi
:return: the squared norm of the spherical harmonic with respect to given measure
"""
if normalization == 'quantum' or normalization == 'seismology':
# The quantum and seismology spherical harmonics are normalized with respect to the Haar measure
# dmu(theta, phi) = dtheta sin(theta) dphi
sqnorm = 1.
elif normalization == 'geodesy':
# The geodesy spherical harmonics are normalized with respect to the *normalized* Haar measure
# dmu(theta, phi) = dtheta sin(theta) dphi / 4pi
sqnorm = 4 * np.pi
elif normalization == 'nfft':
sqnorm = 4 * np.pi / (2 * l + 1)
else:
raise ValueError('Unknown normalization')
if normalized_haar:
return sqnorm / (4 * np.pi)
else:
return sqnorm
def block_sh_ph(L_max, theta, phi):
"""
Compute all spherical harmonics up to and including degree L_max, for angles theta and phi.
This function is currently rather hacky, but the method used here is very fast and stable, compared
to builtin scipy functions.
:param L_max:
:param theta:
:param phi:
:return:
"""
from .pinchon_hoggan.pinchon_hoggan import apply_rotation_block, make_c2b
from .irrep_bases import change_of_basis_function
irreps = np.arange(L_max + 1)
ls = [[ls] * (2 * ls + 1) for ls in irreps]
ls = np.array([ll for sublist in ls for ll in sublist]) # 0, 1, 1, 1, 2, 2, 2, 2, 2, ...
ms = [list(range(-ls, ls + 1)) for ls in irreps]
ms = np.array([mm for sublist in ms for mm in sublist]) # 0, -1, 0, 1, -2, -1, 0, 1, 2, ...
# Get a vector Y that selects the 0-frequency component from each irrep in the centered basis
# If D is a Wigner D matrix, then D Y is the center column of D, which is equal to the spherical harmonics.
Y = (ms == 0).astype(float)
# Change to / from the block basis (since the rotation code works in that basis)
c2b = change_of_basis_function(irreps,
frm=('real', 'quantum', 'centered', 'cs'),
to=('real', 'quantum', 'block', 'cs'))
b2c = change_of_basis_function(irreps,
frm=('real', 'quantum', 'block', 'cs'),
to=('real', 'quantum', 'centered', 'cs'))
Yb = c2b(Y)
# Rotate Yb:
c2b = make_c2b(irreps)
import os
J_block = np.load(os.path.join(os.path.dirname(__file__), 'pinchon_hoggan', 'J_block_0-278.npy'), allow_pickle=True)
J_block = list(J_block[irreps])
g = np.zeros((theta.size, 3))
g[:, 0] = phi
g[:, 1] = theta
TYb = apply_rotation_block(g=g, X=Yb[np.newaxis, :],
irreps=irreps, c2b=c2b,
J_block=J_block, l_max=np.max(irreps))
print(Yb.shape, TYb.shape)
# Change back to centered basis
TYc = b2c(TYb.T).T # b2c doesn't work properly for matrices, so do a transpose hack
print(TYc.shape)
# Somehow, the SH obtained so far are equal to real, nfft, cs spherical harmonics
# Change to real quantum centered cs
c = change_of_basis_function(irreps,
frm=('real', 'nfft', 'centered', 'cs'),
to=('real', 'quantum', 'centered', 'cs'))
TYc2 = c(TYc)
print(TYc2.shape)
return TYc2
def rsh(l, m, theta, phi, normalization='quantum', condon_shortley=True):
"""
Compute the real spherical harmonic (RSH) S_l^m(theta, phi).
The RSH are obtained from Complex Spherical Harmonics (CSH) as follows:
if m < 0:
S_l^m = i / sqrt(2) * (Y_l^m - (-1)^m Y_l^{-m})
if m == 0:
S_l^m = Y_l^0
if m > 0:
S_l^m = 1 / sqrt(2) * (Y_l^{-m} + (-1)^m Y_l^m)
(see [1])
Various normalizations for the CSH exist, see the CSH() function. Since the CSH->RSH change of basis is unitary,
the orthogonality and normalization properties of the RSH are the same as those of the CSH from which they were
obtained. Furthermore, the operation of changing normalization and that of changeing field
(complex->real or vice-versa) commute, because the ratio c_m of normalization constants are always the same for
m and -m (to see this that this implies commutativity, substitute Y_l^m * c_m for Y_l^m in the above formula).
Pinchon & Hoggan [2] define a different change of basis for CSH -> RSH, but they also use an unusual definition
of CSH. To obtain RSH as defined by Pinchon-Hoggan, use this function with normalization='quantum'.
References:
[1] http://en.wikipedia.org/wiki/Spherical_harmonics#Real_form
[2] Rotation matrices for real spherical harmonics: general rotations of atomic orbitals in space-fixed axes.
:param l: non-negative intege
|
r; the degree of the CSH.
:param m: integer, -l <= m <= l; the order of the CSH.
:param theta: the colatitude / polar angle,
ranging from 0 (North Pole, (X,Y,Z)=(0,0,1)) to pi (South Pole, (X,Y,Z)=(0,0,-1)).
:param phi: the longitude / azimuthal angle, ranging from 0 to 2 pi.
:param normalization: how to normalize the RSH:
'seismology', 'quantum', 'geodesy'.
these are immediately passed to the CSH functions, and since the change of
|
basis
from CSH to RSH is unitary, the orthogonality and normalization properties are unchanged.
:return: the value of the real spherical harmonic S^l_m(theta, phi)
"""
l, m, theta, phi = np.broadcast_arrays(l, m, theta, phi)
# Get the CSH for m and -m, using Condon-Shortley phase (regardless of whhether CS is requested or not)
# The reason is that the code that changes from CSH to RSH assumes CS phase.
a = csh(l=l, m=m, theta=theta, phi=phi, normalization=normalization, condon_shortley=True)
b = csh(l=l, m=-m, theta=theta, phi=phi, normalization=normalization, condon_shortley=True)
#if m > 0:
# y = np.array((b + ((-1.)**m) * a).real / np.sqrt(2.))
#elif m < 0:
# y = np.array((1j * a - 1j * ((-1.)**(-m)) * b).real / np.sqrt(2.))
#else:
# # For m == 0, the complex spherical harmonics are already real
# y = np.array(a.real)
y = ((m > 0) * np.array((b + ((-1.)**m) * a).real / np.sqrt(2.))
+ (m < 0) * np.array((1j * a - 1j * ((-1.)**(-m)) * b).real / np.sqrt(2.))
+ (m == 0) * np.array(a.real))
if condon_shortley:
return y
else:
# Cancel the CS phase of y (i.e. multiply by -1 when m is both odd and greater than 0)
return y * ((-1.) ** (m * (m > 0)))
def csh(l, m, theta, phi, normalization='quantum', condon_shortley=True):
"""
Compute Complex Spherical Harmonics (CSH) Y_l^m(theta, phi).
Unlike the scipy.special.sph_harm function, we use the common convention that
theta is the polar angle (0 to pi) and phi is the azimuthal angle (0 to 2pi).
The spherical harmonic 'backbone' is:
Y_l^m(theta, phi) = P_l^m(cos(theta)) exp(i m phi)
where P_l^m is the associated Legendre function as defined in the scipy library (scipy.special.sph_harm).
Various normalization factor
|
PyCQA/pylint
|
tests/functional/ext/typing/typing_consider_using_alias_without_future.py
|
Python
|
gpl-2.0
| 2,165
| 0.004157
|
"""Test pylint.extension.typing - consider-using-alias
'py-version' needs to be set to '3.7' or '3.8' and 'runtime-typing=no'.
"""
# pylint: disable=missing-docstring,invalid-name,unused-arg
|
ument,line-too-long,unsubscriptable-object
import collections
import collections.abc
import typing
from collections.abc import Awaitable
from dataclasses import dataclass
from typing import Dict, List, Set, Union, TypedDict, Callable, Tuple, Type
var1: typing.Dict[str, int] # [consider-using-alias]
var2: List[int] # [con
|
sider-using-alias]
var3: collections.abc.Iterable[int]
var4: typing.OrderedDict[str, int] # [consider-using-alias]
var5: typing.Awaitable[None] # [consider-using-alias]
var6: typing.Iterable[int] # [consider-using-alias]
var7: typing.Hashable # [consider-using-alias]
var8: typing.ContextManager[str] # [consider-using-alias]
var9: typing.Pattern[str] # [consider-using-alias]
var10: typing.re.Match[str] # [consider-using-alias]
var11: list[int]
var12: collections.abc
var13: Awaitable[None]
var14: collections.defaultdict[str, str]
Alias1 = Set[int]
Alias2 = Dict[int, List[int]]
Alias3 = Union[int, typing.List[str]]
Alias4 = List # [consider-using-alias]
var21: Type[object] # [consider-using-alias]
var22: Tuple[str] # [consider-using-alias]
var23: Callable[..., str] # [consider-using-alias]
var31: type[object]
var32: tuple[str]
var33: collections.abc.Callable[..., str]
def func1(arg1: List[int], /, *args: List[int], arg2: set[int], **kwargs: Dict[str, int]) -> typing.Tuple[int]:
# -1:[consider-using-alias,consider-using-alias,consider-using-alias,consider-using-alias]
pass
def func2(arg1: list[int]) -> tuple[int, int]:
pass
class CustomIntList(typing.List[int]):
pass
cast_variable = [1, 2, 3]
cast_variable = typing.cast(List[int], cast_variable)
(lambda x: 2)(List[int])
class CustomNamedTuple(typing.NamedTuple):
my_var: List[int] # [consider-using-alias]
CustomTypedDict1 = TypedDict("CustomTypedDict1", my_var=List[int])
class CustomTypedDict2(TypedDict):
my_var: List[int] # [consider-using-alias]
@dataclass
class CustomDataClass:
my_var: List[int] # [consider-using-alias]
|
EMSTrack/WebServerAndClient
|
login/models.py
|
Python
|
bsd-3-clause
| 17,047
| 0.002053
|
import logging
from enum import Enum
from django.contrib.auth.models import Group
from django.contrib.auth.models import User
from django.contrib.gis.db import models
from django.core.exceptions import PermissionDenied
from django.core.validators import MinValueValidator
from django.template.defaulttags import register
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from ambulance.models import AmbulanceStatus
from emstrack.util import make_choices
from login.mixins import ClearPermissionCacheMixin
from login.permissions import get_permissions
logger = logging.getLogger(__name__)
# filters
@register.filter
def get_client_status(key):
return ClientStatus[key].value
@register.filter
def get_client_activity(key):
return ClientActivity[key].value
@register.filter
def is_dispatcher(user):
return user.is_superuser or user.is_staff or user.userprofile.is_dispatcher
class UserProfile(ClearPermissionCacheMixin,
models.Model):
user = models.OneToOneField(User,
on_delete=models.CASCADE,
verbose_name=_('user'))
is_dispatcher = models.BooleanField(_('is_dispatcher'), default=False)
def get_absolute_url(self):
return reverse('login:detail-user', kwargs={'pk': self.user.id})
def __str__(self):
return '{}'.format(self.user)
# GroupProfile
class GroupProfile(ClearPermissionCacheMixin,
models.Model):
group = models.OneToOneField(Group,
on_delete=models.CASCADE,
verbose_name=_('group'))
description = models.CharField(_('description'), max_length=100, blank=True)
priority = models.PositiveIntegerField(_('priority'), validators=[MinValueValidator(1)], default=10)
def get_absolute_url(self):
return reverse('login:detail-group', kwargs={'pk': self.group.id})
def __str__(self):
return '{}: description = {}'.format(self.group, self.description)
class Meta:
indexes = [models.Index(fields=['priority'])]
# Group Ambulance and Hospital Permissions
class Permission(models.Model):
can_read = models.BooleanField(_('can_read'), default=True)
can_write = models.BooleanField(_('can_write'), default=False)
class Meta:
abstract = True
class UserAmbulancePermission(ClearPermissionCacheMixin,
Permission):
user = models.ForeignKey(User,
on_delete=models.CASCADE,
verbose_name=_('user'))
ambulance = models.ForeignKey('ambulance.Ambulance',
on_delete=models.CASCADE,
verbose_name=_('ambulance'))
class Meta:
unique_together = ('user', 'ambulance')
def __str__(self):
return '{}/{}(id={}): read[{}] write[{}]'.format(self.user,
self.ambulance.identifier,
self.ambulance.id,
self.can_read,
self.can_write)
class UserHospitalPermission(ClearPermissionCacheMixin,
Permission):
user = models.ForeignKey(User,
on_delete=models.CASCADE,
verbose_name=_('user'))
hospital = models.ForeignKey('hospital.Hospital',
on_delete=models.CASCADE,
verbose_name=_('hospital'))
class Meta:
unique_together = ('user', 'hospital')
def __str__(self):
return '{}/{}(id={}): read[{}] write[{}]'.format(self.user,
self.hospital.name,
self.hospital.id,
self.can_read,
self.can_write)
class GroupAmbulancePermission(ClearPermissionCacheMixin,
Permission):
group = models.ForeignKey(Group,
on_delete=models.CASCADE,
verbose_name=_('group'))
ambulance = models.ForeignKey('ambulance.Ambulance',
on_delete=models.CASCADE,
verbose_name=_('ambulance'))
class Meta:
unique_together = ('group', 'ambulance')
def __str__(self):
return '{}/{}(id={}): read[{}] write[{}]'.format(self.group,
self.ambulance.identifier,
self.ambulance.id,
self.can_read,
self.can_write)
class GroupHospitalPermission(ClearPermissionCacheMixin,
Permission):
group = models.ForeignKey(Group,
on_delete=models.CASCADE
|
,
verbose_name=_('group'))
hospital = models.ForeignKey('hospital.Hospital',
on_delete=models.CASCADE,
verbose_name=_('hospital'))
class Meta:
un
|
ique_together = ('group', 'hospital')
def __str__(self):
return '{}/{}(id={}): read[{}] write[{}]'.format(self.group,
self.hospital.name,
self.hospital.id,
self.can_read,
self.can_write)
# TemporaryPassword
class TemporaryPassword(models.Model):
user = models.OneToOneField(User,
on_delete=models.CASCADE,
verbose_name=_('user'))
password = models.CharField(_('password'), max_length=254)
created_on = models.DateTimeField(_('created_on'), auto_now=True)
def __str__(self):
return '"{}" (created on: {})'.format(self.password, self.created_on)
# Client status
class ClientStatus(Enum):
O = _('online')
F = _('offline')
D = _('disconnected')
R = _('reconnected')
# Client information
class Client(models.Model):
# NOTE: This shouldn't be needed but django was giving me a hard time
# id = models.AutoField(_('id'), primary_key=True)
# WARNING: mqtt client_id's can be up to 65536 bytes!
client_id = models.CharField(_('client_id'), max_length=254, unique=True)
user = models.ForeignKey(User,
on_delete=models.CASCADE,
verbose_name=_('user'))
status = models.CharField(_('status'), max_length=1,
choices=make_choices(ClientStatus))
ambulance = models.OneToOneField('ambulance.Ambulance',
on_delete=models.CASCADE,
blank=True, null=True,
verbose_name=_('ambulance'))
hospital = models.OneToOneField('hospital.Hospital',
on_delete=models.CASCADE,
blank=True, null=True,
verbose_name=_('hospital'))
updated_on = models.DateTimeField(_('updated_on'), auto_now=True)
# default value for _loaded_values
_loaded_values = None
def __str__(self):
return '{}[{},{}](ambulance={},hospital={})'.format(self.client_id, self.status,
self.user, self.ambulance, self.hospital)
def get_absolute_url(self):
return reverse('login:detail-client', kwargs={'pk': self.id})
@classmethod
def from_db(cls, db, field_names, values):
# call super
instance = super(Client, cls).from_db(db, field_names, v
|
heiths/allura
|
Allura/allura/tests/unit/test_mixins.py
|
Python
|
apache-2.0
| 3,087
| 0
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from mock import Mock
from allura.model import VotableArtifact
class TestVotableArtifact(object):
def setUp(self):
self.user1 = Mock()
self.user1.username = 'test-user'
self.user2 = Mock()
self.user2.username = 'user2'
def test_vote_up(self):
vote = VotableArtifact()
vote.vote_up(self.user1)
assert vote.votes_up == 1
assert vote.votes_up_users == [self.user1.username]
vote.vote_up(self.user2)
assert vote.votes_up == 2
assert vote.votes_up_users == [self.user1.username,
self.user2.username]
vote.vote_up(self.user1) # unvote user1
assert vote.votes_up == 1
assert vote.votes_up_users == [self.user2.username]
assert vote.votes_down == 0, 'vote_down must be 0 if we voted up only'
assert len(vote.votes_down_users) == 0
def test_vote_down(self):
vote = VotableArtifact()
vote.vote_down(self.user1)
assert vote.votes_down == 1
assert vote.votes_down_users == [self.user1.username]
vote.vote_down(self.user2)
assert vote.votes_down == 2
assert vote.votes_down_u
|
sers == [self.user1.username,
self.user2.username]
vote.vote_down(self.user1) # unvote user1
assert vote.votes_down == 1
assert vote.votes_down_users == [self.use
|
r2.username]
assert vote.votes_up == 0, 'vote_up must be 0 if we voted down only'
assert len(vote.votes_up_users) == 0
def test_change_vote(self):
vote = VotableArtifact()
vote.vote_up(self.user1)
vote.vote_down(self.user1)
assert vote.votes_down == 1
assert vote.votes_down_users == [self.user1.username]
assert vote.votes_up == 0
assert len(vote.votes_up_users) == 0
def test_json(self):
vote = VotableArtifact()
assert vote.__json__() == {'votes_up': 0, 'votes_down': 0}
vote.vote_down(self.user1)
assert vote.__json__() == {'votes_up': 0, 'votes_down': 1}
vote.vote_up(self.user2)
assert vote.__json__() == {'votes_up': 1, 'votes_down': 1}
|
zhangpf/vbox
|
src/VBox/ValidationKit/tests/installation/tdGuestOsInstTest1.py
|
Python
|
gpl-2.0
| 20,047
| 0.012471
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# $Id$
"""
VirtualBox Validation Kit - Guest OS installation tests.
"""
__copyright__ = \
"""
Copyright (C) 2010-2014 Oracle Corporation
This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.org. This file is free software;
you can redistribute it and/or modify it under the terms of the GNU
General Public License (GPL) as published by the Free Software
Foundation, in version 2 as it comes in the "COPYING" file of the
VirtualBox OSE distribution. VirtualBox OSE is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
The contents of this file may alternatively be used under the terms
of the Common Development and Distribution License Version 1.0
(CDDL) only, as it comes in the "COPYING.CDDL" file of the
VirtualBox OSE distribution, in which case the provisions of the
CDDL are applicable instead of those of the GPL.
You may elect to license modified versions of this file under the
terms and conditions of either the GPL or the CDDL or both.
"""
__version__ = "$Revision$"
# Standard Python imports.
import os
import sys
# Only the main script needs to modify the path.
try: __file__
except: __file__ = sys.argv[0]
g_ksValidationKitDir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(g_ksValidationKitDir)
# Validation Kit imports.
from testdriver import vbox;
from testdriver import base;
from testdriver import reporter;
from testdriver import vboxcon;
from testdriver import vboxtestvms;
class InstallTestVm(vboxtestvms.TestVm):
""" Installation test VM. """
## @name The primary controller, to which the disk will be attached.
ksSataController = 'SATA Controller'
ksIdeController = 'IDE Controller'
## @name VM option flags (OR together).
## @{
kf32Bit = 0x01;
kf64Bit = 0x02;
kfReqIoApic = 0x10;
kfReqIoApicSmp = 0x20;
kfReqPae = 0x40;
kfIdeIrqDelay = 0x80;
kfUbuntuNewAmdBug = 0x80;
## @}
## IRQ delay extra data config
|
for win2k VMs.
kasIdeIrqDelay = [ 'VBoxInternal/Devices/piix3ide/0/Config/IRQDelay:1', ];
## Install ISO path relative to the testrsrc root.
ksIsoPathBase = os.path.join('4.2', 'isos');
def __init__(self, oSet, sVmName, sKind, sInstallIso, sHdCtrlNm, cGbHdd, fFlags):
vboxtestvms.TestVm.__init__(self, oSet, sVmName, sKind = sKind, sHddControllerType = sHdCtrlNm, fUseParavirtProvider=True); # pylint: disable=C0301
self.sDvdImage = os.path.join(
|
self.ksIsoPathBase, sInstallIso);
self.cGbHdd = cGbHdd;
self.fInstVmFlags = fFlags;
if fFlags & self.kfReqPae:
self.fPae = True;
if fFlags & (self.kfReqIoApic | self.kfReqIoApicSmp):
self.fIoApic = True;
# Tweaks
self.iOptRamAdjust = 0;
self.asExtraData = [];
if fFlags & self.kfIdeIrqDelay:
self.asExtraData = self.kasIdeIrqDelay;
def detatchAndDeleteHd(self, oTestDrv):
"""
Detaches and deletes the HD.
Returns success indicator, error info logged.
"""
fRc = False;
oVM = oTestDrv.getVmByName(self.sVmName);
if oVM is not None:
oSession = oTestDrv.openSession(oVM);
if oSession is not None:
(fRc, oHd) = oSession.detachHd(self.sHddControllerType, iPort = 0, iDevice = 0);
if fRc is True and oHd is not None:
fRc = oSession.saveSettings();
fRc = fRc and oTestDrv.oVBox.deleteHdByMedium(oHd);
fRc = fRc and oSession.saveSettings(); # Necessary for media reg?
fRc = oSession.close() and fRc;
return fRc;
def getReconfiguredVm(self, oTestDrv, cCpus, sVirtMode, sParavirtMode=None):
#
# Do the standard reconfig in the base class first, it'll figure out
# if we can run the VM as requested.
#
(fRc, oVM) = vboxtestvms.TestVm.getReconfiguredVm(self, oTestDrv, cCpus, sVirtMode, sParavirtMode);
#
# Make sure there is no HD from the previous run attached nor taking
# up storage on the host.
#
if fRc is True:
fRc = self.detatchAndDeleteHd(oTestDrv);
#
# Check for ubuntu installer vs. AMD host CPU.
#
if fRc is True and (self.fInstVmFlags & self.kfUbuntuNewAmdBug):
if self.isHostCpuAffectedByUbuntuNewAmdBug(oTestDrv):
return (None, None); # (skip)
#
# Make adjustments to the default config, and adding a fresh HD.
#
if fRc is True:
oSession = oTestDrv.openSession(oVM);
if oSession is not None:
if self.sHddControllerType == self.ksSataController:
fRc = fRc and oSession.setStorageControllerType(vboxcon.StorageControllerType_IntelAhci,
self.sHddControllerType)
fRc = fRc and oSession.setStorageControllerPortCount(self.sHddControllerType, 1)
try:
sHddPath = os.path.join(os.path.dirname(oVM.settingsFilePath),
'%s-%s-%s.vdi' % (self.sVmName, sVirtMode, cCpus,));
except:
reporter.errorXcpt();
sHddPath = None;
fRc = False;
fRc = fRc and oSession.createAndAttachHd(sHddPath,
cb = self.cGbHdd * 1024*1024*1024,
sController = self.sHddControllerType,
iPort = 0,
fImmutable = False);
# Set proper boot order
fRc = fRc and oSession.setBootOrder(1, vboxcon.DeviceType_HardDisk)
fRc = fRc and oSession.setBootOrder(2, vboxcon.DeviceType_DVD)
# Adjust memory if requested.
if self.iOptRamAdjust != 0:
fRc = fRc and oSession.setRamSize(oSession.o.machine.memorySize + self.iOptRamAdjust);
# Set extra data
for sExtraData in self.asExtraData:
try:
sKey, sValue = sExtraData.split(':')
except ValueError:
raise base.InvalidOption('Invalid extradata specified: %s' % sExtraData)
reporter.log('Set extradata: %s => %s' % (sKey, sValue))
fRc = fRc and oSession.setExtraData(sKey, sValue)
# Other variations?
# Save the settings.
fRc = fRc and oSession.saveSettings()
fRc = oSession.close() and fRc;
else:
fRc = False;
if fRc is not True:
oVM = None;
# Done.
return (fRc, oVM)
def isHostCpuAffectedByUbuntuNewAmdBug(self, oTestDrv):
"""
Checks if the host OS is affected by older ubuntu installers being very
picky about which families of AMD CPUs it would run on.
The installer checks for family 15, later 16, later 20, and in 11.10
they remove the family check for AMD CPUs.
"""
if not oTestDrv.isHostCpuAmd():
return False;
try:
(uMaxExt, _, _, _) = oTestDrv.oVBox.host.getProcessorCPUIDLeaf(0, 0x80000000, 0);
(uFamilyModel, _, _, _) = oTestDrv.oVBox.host.getProcessorCPUIDLeaf(0, 0x80000001, 0);
except:
reporter.logXcpt();
return False;
if uMaxExt < 0x80000001 or uMaxExt > 0x8000ffff:
return False;
uFamily = (uFamilyModel >> 8) & 0xf
if uFamily == 0xf:
uFamily = ((uFamilyModel >> 20) & 0x7f) + 0xf;
## @todo Break this down into which old ubuntu release supports exactly
|
assafnativ/NativDebugging
|
src/Win32/MemReaderBaseWin.py
|
Python
|
gpl-3.0
| 16,875
| 0.006815
|
#
# MemoryReaderBaseWin.py
#
# MemoryReader - Remote process memory inspection python module
# https://github.com/assafnativ/NativDebugging.git
# Nativ.Assaf@gmail.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
from builtins import bytes
from ..MemReaderBase import *
from .Win32Structs import *
from .Win32Utilities import *
from ..Utilities import printIfVerbose, integer_types
try:
import distorm3
IS_DISASSEMBLER_FOUND = True
except ImportError as e:
IS_DISASSEMBLER_FOUND = False
class MemReaderBaseWin( MemReaderBase ):
def __init__(self, *argv, **argm):
self._ENDIANITY = '<' # Intel is always Little-endian
MemReaderBase.__init__(self, *argv, **argm)
def enumModulesAddresses(self):
ALLOCATION_GRANULARITY = 0x1000
mem_basic_info = MEMORY_BASIC_INFORMATION()
addr = ALLOCATION_GRANULARITY
while True:
if 0x7fffffffffff < addr:
break
queryResult = VirtualQueryEx(self._process, c_void_p(addr), byref(mem_basic_info), sizeof(mem_basic_info))
assert(queryResult == sizeof(mem_basic_info))
if 0xfff == (mem_basic_info.RegionSize & 0xfff):
# This is the last mem region in user space
break
if (mem_basic_info.BaseAddress + mem_basic_info.RegionSize) < addr:
# Overflow in address, we are probably done
break
else:
new_addr = mem_basic_info.BaseAddress + mem_basic_info.RegionSize
assert(addr < new_addr)
addr = new_addr
if (
(mem_basic_info.State != win32con.MEM_COMMIT) or
(win32con.PAGE_NOACCESS == (mem_basic_info.Protect & 0xff)) or
(0 != (mem_basic_info.Protect & win32con.PAGE_GUARD)) ):
# Not a module
continue
module_addr = mem_basic_info.BaseAddress
if b'MZ' != self.readMemory(module_addr, 2):
# Not an MZ, not a module
continue
lfanew = self.readUInt32(module_addr + 0x3c)
if (mem_basic_info.RegionSize - 4) < lfanew:
# Invalid MZ header
continue
nt_header_addr = module_addr + lfanew
if b'PE\x00\x00' != self.readMemory(nt_header_addr, 4):
# Invalid PE
continue
yield module_addr
def getPEBAddress(self):
processInfo = PROCESS_BASIC_INFORMATION()
NtQueryInformationProcess(self._process, win32con.PROCESS_BASIC_INFORMATION, byref(processInfo), sizeof(processInfo), None)
return processInfo.PebBaseAddress
def imageBaseAddressFromPeb(self, peb):
return self.readAddr(peb + (self.getPointerSize() * 2))
def enumModules( self, isVerbose=False ):
"""
Return list of tuples containg infromation about the modules loaded in memory in the form of
(Address, module_name, module_size)
"""
modules = c_ARRAY( c_void_p, 0x10000 )(0)
bytes_written = c_uint32(0)
res = EnumProcessModules( self._process, byref(modules), sizeof(modules), byref(bytes_written))
if 0 == res and 0 == bytes_written.value:
# If this function is called from a 32-bit application running on WOW64, it can only enumerate the modules
# of a 32-bit process. If the process is a 64-bit process, this function fails and the last error code is
# ERROR_PARTIAL_COPY (299).
# Or process not started yet
raise Exception("Not supported")
num_modules = bytes_written.value // sizeof(c_void_p(0))
printIfVerbose("Found %d modules" % num_modules, isVerbose)
for module_iter in range(num_modules):
module_name = c_wchar_p('a' * 0x1000)
GetModuleBaseName(self._process, modules[module_iter], len(module_name.value))
module_name = module_name.value
module_info = MODULEINFO(0)
GetModuleInformation( self._process, modules[module_iter], byref(module_info), sizeof(module_info) )
module_base = module_info.lpBaseOfDll
if module_base != module_info.lpBaseOfDll:
printIfVerbose("This is strange, found inconsistency module address (0x{0:x} 0x{1:x})".format(module_base, module_info.lpBaseOfDll), isVerbose)
module_size = module_info.SizeOfImage
printIfVerbose("Module: (0x{0:x}) {1!s} of size (0x{2:x})".format(module_base, module_name, module_size), isVerbose)
yield (module_base, module_name, module_size)
def findModule( self, target_module, isVerbose=False ):
target_module = target_module.lower()
for base, name, moduleSize in self.enumModules(isVerbose):
if target_module in name.lower():
return base
raise Exception("Can't find module {0!s}".format(target_module))
def getModulePath( self, base ):
if isinstance(base, str):
base = self.findModule(base)
file_name = c_ARRAY(c_char, 10000)(b'\x00')
file_name_size = c_uint32(0)
GetModuleFileName(base, byref(file_name), byref(file_name_size))
return file_name.raw.replace(b'\x00\x00', b'').decode('utf16')
def _getSomePEInfo( self, module_base ):
pe = module_base + self.readUInt32( module_base + win32con.PE_POINTER_OFFSET )
first_section = self.readUInt16( pe + win32con.PE_SIZEOF_OF_OPTIONAL_HEADER_OFFSET) + win32con.PE_SIZEOF_NT_HEADER
num_sections = self.readUInt16( pe + win32con.PE_NUM_OF_SECTIONS_OFFSET )
isPePlus = (0x20b == self.readUInt16(pe + win32con.PE_OPTIONAL_HEADER_TYPE) )
return (pe, first_section, num_sections, isPePlus)
def getAllSections( self, module_base, isVerbose=False ):
pe, first_section, num_sections, isPePlus = self._getSomePEInfo( module_base )
bytes_read = c_uint32(0)
result = []
for sections_iter in range(num_sections):
if isVerbose:
print(hex(pe + first_section + (sections_iter * win32con.IMAGE_SIZEOF_SECTION_HEADER)))
section_name = self.readMemory( \
pe + first_section + (sections_iter * win32con.IMAGE_SIZEOF_SECTION_HEADER), \
win32con.PE_SECTION_NAME_SIZE )
section_name = section_name.replace(b'\x00', b'')
section_base = self.readUInt32( \
pe + fi
|
rst_section + (sections_iter * win32con.IMAGE_SIZEOF_SECTION_HEADER) + win32con.PE_SECTION_VOFFSET_OFFSET )
section_size = self.readUInt32( \
pe + first_section + (sections_iter * win32con.IMAGE_SIZEOF_SECTION_HEADER) + win32con.PE_SECTION_SIZE_OF_RAW_DATA_OFFSET )
result.append( (section_name, section_base, section_size) )
if isVerbose:
print("Section: {0:s} @0x{1:x} of 0x{2:x} bytes".format(section_name, section_base, section_s
|
ize))
return result
def findSection( self, module_base, target_section, isVerbose=False ):
target_section = target_section.lower()
for section in self.getAllSections( module_base, isVerbose ):
if section[0].lower() == target_section:
return section
return ('',0,0)
def getProcAddress( self, dllName, procName ):
""
|
omkartest123/django-causecode
|
causecode/causecode/project/migrations/0003_remove_product_code.py
|
Python
|
gpl-3.0
| 392
| 0
|
# -*- codin
|
g: utf-8 -*-
# Generated by Django 1.9.12 on 2017-08-14 10:52
from __future__ impor
|
t unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('project', '0002_auto_20170814_1039'),
]
operations = [
migrations.RemoveField(
model_name='product',
name='code',
),
]
|
sonali0901/zulip
|
zerver/migrations/0030_realm_org_type.py
|
Python
|
apache-2.0
| 409
| 0
|
# -
|
*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migrat
|
ion):
dependencies = [
('zerver', '0029_realm_subdomain'),
]
operations = [
migrations.AddField(
model_name='realm',
name='org_type',
field=models.PositiveSmallIntegerField(default=1),
),
]
|
channelcat/sanic
|
tests/test_worker.py
|
Python
|
mit
| 5,289
| 0
|
import asyncio
import json
import shlex
import subprocess
import time
import urllib.request
from unittest import mock
import pytest
from sanic_testing.testing import ASGI_PORT as PORT
from sanic.app import Sanic
from sanic.worker import GunicornWorker
@pytest.fixture(scope="module")
def gunicorn_worker():
command = (
"gunicorn "
f"--bind 127.0.0.1:{PORT} "
"--worker-class sanic.worker.GunicornWorker "
"examples.simple_server:app"
)
worker = subprocess.Popen(shlex.split(command))
time.sleep(3)
yield
worker.kill()
@pytest.fixture(scope="module")
def gunicorn_worker_with_access_logs():
command = (
"gunicorn "
f"--bind 127.0.0.1:{PORT + 1} "
"--worker-class sanic.worker.GunicornWorker "
"examples.simple_server:app"
)
worker = subprocess.Popen(shlex.split(command), stdout=subprocess.PIPE)
time.sleep(2)
return worker
@pytest.fixture(scope="module")
def gunicorn_worker_with_env_var():
command = (
'env SANIC_ACCESS_LOG="False" '
"gunicorn "
f"--bind 127.0.0.1:{PORT + 2} "
"--worker-class sanic.worker.GunicornWorker "
"--log-level info "
"examples.simple_server:app"
)
worker = subprocess.Popen(shlex.split(command), stdout=subprocess.PIPE)
time.sleep(2)
return worker
def test_gunicorn_worker(gunicorn_worker):
with urllib.request.urlopen(f"http://localhost:{PORT}/") as f:
res = json.loads(f.read(100).decode())
assert res["test"]
def test_gunicorn_worker_no_logs(gunicorn_worker_with_env_var):
"""
if SANIC_ACCESS_LOG was set to False do not show access logs
"""
with urllib.request.url
|
open(f"http:
|
//localhost:{PORT + 2}/") as _:
gunicorn_worker_with_env_var.kill()
assert not gunicorn_worker_with_env_var.stdout.read()
def test_gunicorn_worker_with_logs(gunicorn_worker_with_access_logs):
"""
default - show access logs
"""
with urllib.request.urlopen(f"http://localhost:{PORT + 1}/") as _:
gunicorn_worker_with_access_logs.kill()
assert (
b"(sanic.access)[INFO][127.0.0.1"
in gunicorn_worker_with_access_logs.stdout.read()
)
class GunicornTestWorker(GunicornWorker):
def __init__(self):
self.app = mock.Mock()
self.app.callable = Sanic("test_gunicorn_worker")
self.servers = {}
self.exit_code = 0
self.cfg = mock.Mock()
self.notify = mock.Mock()
@pytest.fixture
def worker():
return GunicornTestWorker()
def test_worker_init_process(worker):
with mock.patch("sanic.worker.asyncio") as mock_asyncio:
try:
worker.init_process()
except TypeError:
pass
assert mock_asyncio.get_event_loop.return_value.close.called
assert mock_asyncio.new_event_loop.called
assert mock_asyncio.set_event_loop.called
def test_worker_init_signals(worker):
worker.loop = mock.Mock()
worker.init_signals()
assert worker.loop.add_signal_handler.called
def test_handle_abort(worker):
with mock.patch("sanic.worker.sys") as mock_sys:
worker.handle_abort(object(), object())
assert not worker.alive
assert worker.exit_code == 1
mock_sys.exit.assert_called_with(1)
def test_handle_quit(worker):
worker.handle_quit(object(), object())
assert not worker.alive
assert worker.exit_code == 0
async def _a_noop(*a, **kw):
pass
def test_run_max_requests_exceeded(worker):
loop = asyncio.new_event_loop()
worker.ppid = 1
worker.alive = True
sock = mock.Mock()
sock.cfg_addr = ("localhost", 8080)
worker.sockets = [sock]
worker.wsgi = mock.Mock()
worker.connections = set()
worker.log = mock.Mock()
worker.loop = loop
worker.servers = {
"server1": {"requests_count": 14},
"server2": {"requests_count": 15},
}
worker.max_requests = 10
worker._run = mock.Mock(wraps=_a_noop)
# exceeding request count
_runner = asyncio.ensure_future(worker._check_alive(), loop=loop)
loop.run_until_complete(_runner)
assert not worker.alive
worker.notify.assert_called_with()
worker.log.info.assert_called_with(
"Max requests exceeded, shutting " "down: %s", worker
)
def test_worker_close(worker):
loop = asyncio.new_event_loop()
asyncio.sleep = mock.Mock(wraps=_a_noop)
worker.ppid = 1
worker.pid = 2
worker.cfg.graceful_timeout = 1.0
worker.signal = mock.Mock()
worker.signal.stopped = False
worker.wsgi = mock.Mock()
conn = mock.Mock()
conn.websocket = mock.Mock()
conn.websocket.close_connection = mock.Mock(wraps=_a_noop)
worker.connections = set([conn])
worker.log = mock.Mock()
worker.loop = loop
server = mock.Mock()
server.close = mock.Mock(wraps=lambda *a, **kw: None)
server.wait_closed = mock.Mock(wraps=_a_noop)
worker.servers = {server: {"requests_count": 14}}
worker.max_requests = 10
# close worker
_close = asyncio.ensure_future(worker.close(), loop=loop)
loop.run_until_complete(_close)
assert worker.signal.stopped
assert conn.websocket.close_connection.called
assert len(worker.servers) == 0
|
saymedia/python-danga-gearman
|
setup.py
|
Python
|
mit
| 699
| 0.02289
|
#!/usr/bin/env python
from distutils.core import setup
from dangagearman import __version__ as version
setup(
name = 'danga-gearman',
version = version,
description = 'Client for the Danga (Perl) Gearman implementation',
author = 'Samu
|
el Stauffer',
author_email = 'samuel@descolada.com',
url = 'http://github.com/saymedia/python-danga-gearman/tree/master',
packages = ['dangagearman'],
classifiers = [
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'
|
Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
timdiels/0install
|
zeroinstall/cmd/whatchanged.py
|
Python
|
lgpl-2.1
| 2,814
| 0.033404
|
"""
The B{0install whatchanged} command-line interface.
"""
# Copyright (C) 2012, Thomas Leonard
# See the README file for details, or visit http://0install.net.
from __future__ import print_function
import os
from zeroinstall import _, SafeException
from zeroinstall.cmd import UsageError
syntax = "APP-NAME"
def add_options(parser):
parser.add_option("", "--full", help=_("show diff of the XML"), action='store_true')
def handle(config, options, args):
if len(args) != 1:
raise UsageError()
name = args[0]
app = config.app_mgr.lo
|
okup_app(name, missing_ok = False)
history = app.get_history()
if not history:
raise SafeException(_("Invalid application: no selections found! Try '0install destroy {name}'").format(name = name))
import time
last_checked = app.get_last_c
|
hecked()
if last_checked is not None:
print(_("Last checked : {date}").format(date = time.ctime(last_checked)))
last_attempt = app.get_last_check_attempt()
if last_attempt is not None:
print(_("Last attempted update: {date}").format(date = time.ctime(last_attempt)))
print(_("Last update : {date}").format(date = history[0]))
current_sels = app.get_selections(snapshot_date = history[0])
if len(history) == 1:
print(_("No previous history to compare against."))
print(_("Use '0install select {name}' to see the current selections.").format(name = name))
return
print(_("Previous update : {date}").format(date = history[1]))
def get_selections_path(date):
return os.path.join(app.path, 'selections-{date}.xml'.format(date = date))
print()
if options.full:
import difflib, sys
def load_lines(date):
with open(get_selections_path(date), 'r') as stream:
return stream.readlines()
old_lines = load_lines(history[1])
new_lines = load_lines(history[0])
for line in difflib.unified_diff(old_lines, new_lines, fromfile = history[1], tofile = history[0]):
sys.stdout.write(line)
else:
changes = show_changes(app.get_selections(snapshot_date = history[1]).selections, current_sels.selections)
if not changes:
print(_("No changes to versions (use --full to see all changes)."))
print()
print(_("To run using the previous selections, use:"))
print("0install run {path}".format(path = get_selections_path(history[1])))
def show_changes(old_selections, new_selections):
changes = False
for iface, old_sel in old_selections.items():
new_sel = new_selections.get(iface, None)
if new_sel is None:
print(_("No longer used: %s") % iface)
changes = True
elif old_sel.version != new_sel.version:
print(_("%s: %s -> %s") % (iface, old_sel.version, new_sel.version))
changes = True
for iface, new_sel in new_selections.items():
if iface not in old_selections:
print(_("%s: new -> %s") % (iface, new_sel.version))
changes = True
return changes
|
OKFNat/offenewahlen-nrw17
|
src/offenewahlen_api/wsgi.py
|
Python
|
mit
| 701
| 0.001427
|
"""
WSGI config for offenewahlen_nrw17 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https:
|
//docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefa
|
ult("DJANGO_SETTINGS_MODULE", "offenewahlen_api.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
# Fix django closing connection to MemCachier after every request (#11331)
from django.core.cache.backends.memcached import BaseMemcachedCache
BaseMemcachedCache.close = lambda self, **kwargs: None
|
RGood/praw
|
tests/integration/models/test_comment_forest.py
|
Python
|
bsd-2-clause
| 3,621
| 0
|
"""Test praw.models.comment_forest."""
from praw.models import Submission
from .. import IntegrationTest
class TestCommentForest(IntegrationTest):
def setup(self):
super(TestCommentForest, self).se
|
tup()
# Responses do not decode well on travis so manually renable gzip.
self.reddit._core._r
|
equestor._http.headers['Accept-Encoding'] = 'gzip'
def test_replace__all(self):
with self.recorder.use_cassette(
'TestCommentForest.test_replace__all',
match_requests_on=['uri', 'method', 'body']):
submission = Submission(self.reddit, '3hahrw')
before_count = len(submission.comments.list())
skipped = submission.comments.replace_more(None, threshold=0)
assert len(skipped) == 0
assert all(x.submission == submission for x in
submission.comments.list())
assert before_count < len(submission.comments.list())
def test_replace__all_large(self):
with self.recorder.use_cassette(
'TestCommentForest.test_replace__all_large',
match_requests_on=['uri', 'method', 'body']):
submission = Submission(self.reddit, 'n49rw')
skipped = submission.comments.replace_more(None, threshold=0)
assert len(skipped) == 0
assert len(submission.comments.list()) > 1000
assert len(submission.comments.list()) == \
len(submission.comments._comments_by_id)
def test_replace__all_with_comment_limit(self):
with self.recorder.use_cassette(
'TestCommentForest.test_replace__all_with_comment_limit',
match_requests_on=['uri', 'method', 'body']):
submission = Submission(self.reddit, '3hahrw')
submission.comment_limit = 10
skipped = submission.comments.replace_more(None, threshold=0)
assert len(skipped) == 0
assert len(submission.comments.list()) >= 500
def test_replace__skip_at_limit(self):
with self.recorder.use_cassette(
'TestCommentForest.test_replace__skip_at_limit',
match_requests_on=['uri', 'method', 'body']):
submission = Submission(self.reddit, '3hahrw')
skipped = submission.comments.replace_more(1)
assert len(skipped) == 17
def test_replace__skip_below_threshold(self):
with self.recorder.use_cassette(
'TestCommentForest.test_replace__skip_below_threshold',
match_requests_on=['uri', 'method', 'body']):
submission = Submission(self.reddit, '3hahrw')
before_count = len(submission.comments.list())
skipped = submission.comments.replace_more(16, 5)
assert len(skipped) == 13
assert all(x.count < 5 for x in skipped)
assert all(x.submission == submission for x in skipped)
assert before_count < len(submission.comments.list())
def test_replace__skip_all(self):
with self.recorder.use_cassette(
'TestCommentForest.test_replace__skip_all',
match_requests_on=['uri', 'method', 'body']):
submission = Submission(self.reddit, '3hahrw')
before_count = len(submission.comments.list())
skipped = submission.comments.replace_more(limit=0)
assert len(skipped) == 18
assert all(x.submission == submission for x in skipped)
after_count = len(submission.comments.list())
assert before_count == after_count + len(skipped)
|
LABETE/TestYourProject
|
config/urls.py
|
Python
|
bsd-3-clause
| 1,501
| 0.000666
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.c
|
onf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
urlpatterns = [
url(r'^$', TemplateView.as_view(
template_name='pages/home.html'), name="home"),
url(r'^about/$',
TemplateView.as_view(template_name='pages/about.html'), name="about"),
# Django Admin
#url(r'^admin/', include(admin.site.urls)),
# User management
url(r'^users/', include("TestYourProject.us
|
ers.urls", namespace="users")),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls includes go here
url(r'^api-auth/', include(
'rest_framework.urls', namespace='rest_framework')),
url(r'^api/', include('core.api', namespace='api')),
url(r'^rest-auth/', include('rest_auth.urls')),
url(r'^rest-auth/registration', include('rest_auth.registration.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', 'django.views.defaults.bad_request'),
url(r'^403/$', 'django.views.defaults.permission_denied'),
url(r'^404/$', 'django.views.defaults.page_not_found'),
url(r'^500/$', 'django.views.defaults.server_error'),
]
|
jiobert/python
|
Quintana_Jerrod/Assignments/f+sql_projects/full_friends/mysqlconnection.py
|
Python
|
mit
| 2,239
| 0.005806
|
""" import the necessary modules """
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.sql import text
# Create a class that will give us an object that we can use to connect to a database
class MySQLConnection(object):
def __init__(self, app, db):
config = {
'host': 'localhost',
'database': 'friends', # we got db as an argument
# my note: The database name above is the only db from the original copy of this document that changes
'user': 'root',
'password': '',
# password is blank because I never set it
'port': '3306' # change the port to match the port your SQL server is running on
}
# this will use the above values to generate the path to connect to your sql database
DATABASE_URI = "mysql://{}:{}@127.0.0.1:{}/{}".format(config['user'], config['password'], config['port'], config['database'])
app.config['SQLALCHEMY_DATABASE_URI'] = DATABASE_URI
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
# establish the connection to database
self.db = SQLAlchemy(app)
# this is the method we will use to query the database
def query_db(self, query, data=None):
result = self.db.session.execute(text(query), data)
if query[0:6].lower() == 'select':
# if the query was a select
# convert the result to a list of dictionaries
list_result = [dict(r) for r in result]
# return the results as a list of dictionaries
return list_result
elif query[0:6].lower() == 'insert':
# if the query was
|
an insert, return the id of the
# commit changes
self.db.sess
|
ion.commit()
# row that was inserted
return result.lastrowid
else:
# if the query was an update or delete, return nothing and commit changes
self.db.session.commit()
# This is the module method to be called by the user in server.py. Make sure to provide the db name!
# My note: best I can tell, these two db's don't change, only the middle one
def MySQLConnector(app, db):
return MySQLConnection(app, db)
|
frasertweedale/drill
|
py/test_trie.py
|
Python
|
mit
| 1,273
| 0
|
import random
import unittest
from . import trie
class RWayTrieCase(unittest.TestCase):
def test_stores_values(self):
xs = range(4096)
random.shuffle(xs)
t = trie.RWayTrie()
for i in xs:
t.put(str(i), i)
for i in xs:
self.assertEqual(t.get(str(i)), i)
def test_raises_KeyError_if_key_not_in_tree(self):
t = trie.RWayTrie()
for i in range(15):
t.put(str(i), i)
with self.assertRaises(KeyError):
t
|
.get('16')
t = trie.RWayTrie()
t.put('asdf', 1)
with self.assertRaises(KeyError):
t.get('a')
class TernarySearchTrieCase(unittest.TestCase):
def test_stores_values(self):
xs = range(4096)
random.shuffle(xs)
t = trie.TernarySearchTrie()
|
for x in xs:
t.put(str(x), x)
for x in xs:
self.assertEqual(t.get(str(x)), x)
def test_raises_KeyError_if_key_not_in_tree(self):
t = trie.TernarySearchTrie()
for i in range(15):
t.put(str(i), i)
with self.assertRaises(KeyError):
t.get('16')
t = trie.RWayTrie()
t.put('asdf', 1)
with self.assertRaises(KeyError):
t.get('a')
|
zygmuntz/kaggle-advertised-salaries
|
split.py
|
Python
|
mit
| 825
| 0.069091
|
'''
split a file into two randomly, line by line.
Usage: split.py <input file> <output file 1> <output file 2> [<probability of writing to the first file>]'
'''
import csv
import sys
import random
try:
P = float( sys.argv[4] )
except IndexError:
P = 0.9
print "P = %s" % ( P )
input_file = sys.argv[1]
output_file1 = sys.argv[2]
output_file2 = sys.argv[3]
i = open( input_file )
o1 = open( output_file1, 'wb' )
o2 = open( output_file2, 'wb' )
reader = csv.rea
|
der( i )
writer1 = csv.writer( o1 )
writer2 = csv.writer( o2 )
#headers = reader.next()
#writer1.writerow( headers )
#writer2.writerow( headers )
counter = 0
for line in reader:
r = random.random()
if r > P:
writer2.writerow( line )
else:
writer1.writerow( line )
counter += 1
if counter % 100000
|
== 0:
print counter
|
ubiquitypress/rua
|
src/api/views.py
|
Python
|
gpl-2.0
| 1,221
| 0
|
import json
from django.http import HttpResponse
from django.utils.encoding import smart_text
from rest_framework import viewsets, permissions
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import IsAuthenticated
from rest_framework.renderers import JSONRendere
|
r
from api import serializers
from core.models import Book
class JSONResponse(HttpResponse):
""" An HttpResponse that renders its content into JSON. """
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs['content_type'] = 'application/json'
super(JSONResponse, self).__init__(content, **kwargs)
@api_view(['GET'])
@permission_classes((permissions.AllowAny,))
def i
|
ndex(request):
response_dict = {
'Message': 'Welcome to the API',
'Version': '1.0',
'API Endpoints':
[],
}
json_content = smart_text(json.dumps(response_dict))
return HttpResponse(json_content, content_type="application/json")
class JuraBookViewSet(viewsets.ModelViewSet):
permission_classes = IsAuthenticated,
queryset = Book.objects.all().order_by('id')
serializer_class = serializers.JuraBookSerializer
|
sergiooramas/tartarus
|
src/train.py
|
Python
|
mit
| 27,751
| 0.008937
|
from __future__ import print_function
import argparse
from collections import OrderedDict
import json
import os
import logging
from keras.callbacks import EarlyStopping
from sklearn.preprocessing import normalize
from sklearn.metrics import roc_curve, auc, roc_auc_score, precision_score, recall_score, f1_score, accuracy_score, average_precision_score
from scipy.sparse import csr_matrix
from keras.utils.io_utils import HDF5Matrix
#from keras.utils.visualize_util import plot
from keras.optimizers import SGD, Adam
from sklearn.metrics import r2_score
import numpy as np
import theano.tensor as tt
import pandas as pd
import random
import common
import models
from predict import obtain_predictions
from eval import do_eval
import h5py
class Config(object):
"""Configuration for the training process."""
def __init__(self, params, normalize=False, whiten=True):
self.model_id = common.get_next_model_id()
self.norm = normalize
self.whiten = whiten
self.x_path = '%s_%sx%s' % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
self.y_path = '%s_%s_%s' % (params['dataset']['fact'],params['dataset']['dim'],params['dataset']['dataset'])
self.dataset_settings = params['dataset']
self.training_params = params['training']
self.model_arch = params['cnn']
self.predicting_params = params['predicting']
def get_dict(self):
object_dict = self.__dict__
first_key = "model_id"
conf_dict = OrderedDict({first_key: object_dict[first_key]})
conf_dict.update(object_dict)
return conf_dict
def _squared_magnitude(x):
return tt.sqr(x).sum(axis=-1)
def _magnitude(x):
return tt.sqrt(tt.maximum(_squared_magnitude(x), np.finfo(x.dtype).tiny))
def cosine(x, y):
return tt.clip((1 - (x * y).sum(axis=-1) /
(_magnitude(x) * _magnitude(y))) / 2, 0, 1)
def load_sparse_csr(filename):
loader = np.load(filename)
return csr_matrix(( loader['data'], loader['indices'], loader['indptr']),
shape = loader['shape'])
def build_model(config):
"""Builds the cnn."""
params = config.model_arch
get_model = getattr(models, 'get_model_'+str(params['architecture']))
model = get_model(params)
#model = model_kenun.build_convnet_model(params)
# Learning setup
t_params = config.training_params
sgd = SGD(lr=t_params["learning_rate"], decay=t_params["decay"],
momentum=t_params["momentum"], nesterov=t_params["nesterov"])
adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08)
optimizer = eval(t_params['optimizer'])
metrics = ['mean_squared_error']
if config.model_arch["final_activation"] == 'softmax':
metrics.append('categorical_accuracy')
if t_params['loss_func'] == 'cosine':
loss_func = eval(t_params['loss_func'])
else:
loss_func = t_params['loss_func']
model.compile(loss=loss_func, optimizer=optimizer,metrics=metrics)
return model
def load_data_preprocesed(params, X_path, Y_path, dataset, val_percent, test_percent, n_samples, with_metadata=False, only_metadata=False, metadata_source='rovi'):
factors = np.load(common.DATASETS_DIR+'/y_train_'+Y_path+'.npy') # OJO remove S
index_factors = open(common.DATASETS_DIR+'/items_index_train_'+dataset+'.tsv').read().splitlines()
if not only_metadata:
all_X = np.load(common.TRAINDATA_DIR+'/X_train_'+X_path+'.npy')
index_train = open(common.TRAINDATA_DIR+'/index_train_%s.tsv' % (X_path)).read().splitlines()
all_Y = np.zeros((len(index_train),factors.shape[1]))
index_factors_inv = dict()
for i,item in enumerate(index_factors):
index_factors_inv[item] = i
for i,item in enumerate(index_train):
all_Y[i,:] = factors[index_factors_inv[item]]
else:
all_Y = factors
if with_metadata:
if 'w2v' in metadata_source:
all_X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,dataset))[:,:int(params['cnn']['sequence_length'])]
elif 'model' in metadata_source or not params['dataset']['sparse']:
all_X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,dataset))
else:
all_X_meta = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (metadata_source,dataset)).todense()
all_X_in_meta = all_X = all_X_meta
print(all_X.shape)
print(all_Y.shape)
if n_samples != 'all':
n_samples = int(n_samples)
all_X = all_X[:n_samples]
all_Y = all_Y[:n_samples]
if with_metadata:
all_X_in_meta = all_X_in_meta[:n_samples]
if params['training']['normalize_y'] == True:
normalize(all_Y,copy=False)
if params['training']["val_from_file"]:
Y_val = np.load(common.DATASETS_DIR+'/y_val_'+Y_path+'.npy')
Y_test = np.load(common.DATASETS_DIR+'/y_test_'+Y_path+'.npy') #!!! OJO remove S from trainS
if params['dataset']['sparse']:
X_val = load_sparse_csr(common.TRAINDATA_DIR+'/X_val_%s_%s.npz' % (metadata_source,dataset)).todense()
X_test = load_sparse_csr(common.TRAINDATA_DIR+'/X_test_%s_%s.npz' % (metadata_source,dataset)).todense()
else:
X_val = np.load(common.TRAINDATA_DIR+'/X_val_%s_%s.npy' % (metadata_source,dataset))
X_test = np.load(common.TRAINDATA_DIR+'/X_test_%s_%s.np
|
y' % (metadata_source,dataset))
X_train = all_X
Y_train = all_Y
else:
N = all_Y.shape[0]
train_percent = 1 - val_percent - test_percent
N_train = int(train_percent * N)
N_val = int(val_percent * N)
logging.debug("
|
Training data points: %d" % N_train)
logging.debug("Validation data points: %d" % N_val)
logging.debug("Test data points: %d" % (N - N_train - N_val))
if not only_metadata:
# Slice data
X_train = all_X[:N_train]
X_val = all_X[N_train:N_train + N_val]
X_test = all_X[N_train + N_val:]
Y_train = all_Y[:N_train]
Y_val = all_Y[N_train:N_train + N_val]
Y_test = all_Y[N_train + N_val:]
if with_metadata:
if only_metadata:
X_train = all_X_in_meta[:N_train]
X_val = all_X_in_meta[N_train:N_train + N_val]
X_test = all_X_in_meta[N_train + N_val:]
else:
X_train = [X_train,all_X_in_meta[:N_train]]
X_val = [X_val,all_X_in_meta[N_train:N_train + N_val]]
X_test = [X_test,all_X_in_meta[N_train + N_val:]]
return X_train, Y_train, X_val, Y_val, X_test, Y_test
def load_data_hf5(params,val_percent, test_percent):
hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%s.hdf5" % (params['dataset']['dataset'],params['dataset']['window'])
f = h5py.File(hdf5_file,"r")
N = f["targets"].shape[0]
f.close()
train_percent = 1 - val_percent - test_percent
N_train = int(train_percent * N)
N_val = int(val_percent * N)
X_train = HDF5Matrix(hdf5_file, 'features', start=0, end=N_train)
Y_train = HDF5Matrix(hdf5_file, 'targets', start=0, end=N_train)
X_val = HDF5Matrix(hdf5_file, 'features', start=N_train, end=N_train+N_val)
Y_val = HDF5Matrix(hdf5_file, 'targets', start=N_train, end=N_train+N_val)
X_test = HDF5Matrix(hdf5_file, 'features', start=N_train+N_val, end=N)
Y_test = HDF5Matrix(hdf5_file, 'targets', start=N_train+N_val, end=N)
return X_train, Y_train, X_val, Y_val, X_test, Y_test, N_train
def load_data_hf5_memory(params,val_percent, test_percent, y_path, id2gt, X_meta = None, val_from_file = False):
if val_from_file:
hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
f = h5py.File(hdf5_file,"r")
index_train = f["index"][:]
index_train = np.delete(index_train, np.where(index_train == ""))
N_train = index_train.shape[0]
val_hdf5_file = common.PATCHES_DIR+"/pat
|
hamish2014/optTune
|
docs/conf.py
|
Python
|
gpl-3.0
| 7,028
| 0.006545
|
# -*- coding: utf-8 -*-
#
# optTune documentation build configuration file, created by
# sphinx-quickstart on Wed Jan 11 12:14:27 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
im
|
port sys, os
# If extensions (or modu
|
les to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc']
autoclass_content = 'both'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'optTune'
copyright = u'2014, Antoine Dymond'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'optTunedoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'optTune.tex', u'optTune Documentation',
u'Antoine Dymond', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'opttune', u'optTune Documentation',
[u'Antoine Dymond'], 1)
]
|
Stanford-Online/edx-platform
|
lms/djangoapps/certificates/queue.py
|
Python
|
agpl-3.0
| 22,442
| 0.001203
|
"""Interface for adding certificate generation tasks to the XQueue. """
import json
import logging
import random
from uuid import uuid4
import lxml.html
from django.conf import settings
from django.urls import reverse
from django.test.client import RequestFactory
from lxml.etree import ParserError, XMLSyntaxError
from requests.auth import HTTPBasicAuth
from capa.xqueue_interface import XQueueInterface, make_hashkey, make_xheader
from lms.djangoapps.certificates.models import CertificateStatuses as status
from lms.djangoapps.certificates.models import (
CertificateWhitelist,
ExampleCertificate,
GeneratedCertificate,
certificate_status_for_student
)
from course_modes.models import CourseMode
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
from lms.djangoapps.verify_student.services import IDVerificationService
from student.models import CourseEnrollment, UserProfile
from xmodule.modulestore.django import modulestore
LOGGER = logging.getLogger(__name__)
class XQueueAddToQueueError(Exception):
"""An error occurred when adding a certificate task to the queue. """
def __init__(self, error_code, error_msg):
self.error_code = error_code
self.error_msg = error_msg
super(XQueueAddToQueueError, self).__init__(unicode(self))
def __unicode__(self):
return (
u"Could not add certificate to the XQueue. "
u"The error code was '{code}' and the message was '{msg}'."
).format(
code=self.error_code,
msg=self.error_msg
)
class XQueueCertInterface(object):
"""
XQueueCertificateInterface provides an
interface to the xqueue server for
managing student certificates.
Instantiating an object will create a new
connection to the queue server.
See models.py for valid state transitions,
summary of methods:
add_cert: Add a new certificate. Puts a single
request on the queue for the student/course.
Once the certificate is generated a post
will be made to the update_certificate
view which will save the certificate
download URL.
regen_cert: Regenerate an existing certificate.
For a user that already has a certificate
this will delete the existing one and
generate a new cert.
del_cert: Delete an existing certificate
For a user that already has a certificate
this will delete his cert.
"""
def __init__(self, request=None):
# Get basic auth (username/password) for
# xqueue connection if it's in the settings
if settings.XQUEUE_INTERFACE.get('basic_auth') is not None:
requests_auth = HTTPBasicAuth(
*settings.XQUEUE_INTERFACE['basic_auth'])
else:
requests_auth = None
if request is None:
factory = RequestFactory()
self.request = factory.get('/')
else:
self.request = request
self.xqueue_interface = XQueueInterface(
settings.XQUEUE_INTERFACE['url'],
settings.XQUEUE_INTERFACE['django_auth'],
requests_auth,
)
self.whitelist = CertificateWhitelist.objects.all()
self.restricted = UserProfile.objects.filter(allow_certificate=False)
self.use_https = True
def regen_cert(
self,
student,
course_id,
course=None,
designation=None,
forced_grade=None,
template_file=None,
generate_pdf=True,
):
"""(Re-)Make certificate for a particular student in a particular course
Arguments:
student - User.object
course_id - courseenrollment.course_id (string)
WARNING: this command will leave the old certificate, if one exists,
laying around in AWS taking up space. If this is a problem,
take pains to clean up storage before running this command.
Change the certificate status to unavailable (if it exists) and request
grading. Passing grades will put a certificate request on the queue.
Return the certificate.
"""
# TODO: when del_cert is implemented and plumbed through certificates
# repo also, do a deletion followed by a creation r/t a simple
# recreation. XXX: this leaves orphan cert files laying around in
# AWS. See note in the docstring too.
try:
certificate = GeneratedCertificate.eligible_certificates.get(user=student, course_id=course_id)
LOGGER.info(
(
u"Found an existing certificate entry for student %s "
u"in course '%s' "
u"with status '%s' while regenerating certificates. "
),
student.id,
unicode(course_id),
certificate.status
)
certificate.status = status.unavailable
certificate.save()
LOGGER.info(
(
u"The certificate status for student %s "
u"in course '%s' has been changed to '%s'."
),
student.id,
unicode(course_id),
certificate.status
)
except GeneratedCertificate.DoesNotExist:
pass
return self.add_cert(
student,
course_id,
course=course,
designation=designation,
forced_grade=forced_grade,
template_file=template_file,
generate_pdf=generate_pdf
)
def del_cert(self, student, course_id):
"""
Arguments:
student - User.object
course_id - courseenrollment.course_id (string)
Removes certificate for a student, will change
the certificate status to 'deleting'.
Certificate must be in the 'error' or 'downloadable' state
otherwise it will return the current state
"""
raise NotImplementedError
# pylint: disable=too-many-statements
def add_cert(self, student, course_id, course=None, forced_grade=None, template_file=None, designation=None, generate_pdf=True):
"""
Request a new certificate for a student.
Arguments:
student - User.object
course_id - courseenrollment.course_id (CourseKey)
forced_grade - a string indicating a grade parameter to pass with
the certificate request. If this is given, grading
will be skipped.
generate_pdf - Boolean should a message be sent in queue to generate certificate PDF
Will change the certificate status to 'generating' or
`downloadable` in case of web view certificates.
The course must not be a CCX.
Certificate must be in the 'unavailable', 'error',
'deleted' or 'generating' state.
If a student has a passing grade or
|
is in the whitelist
table for the course a request will be made for a new cert.
If a student has allow_certificate set to False in the
userprofile table the status will change to 'restricted'
If a student does not have a passing grade the status
will change to status.notpassing
Returns the newly created certificate instance
"""
if hasattr(course_id, 'ccx'):
LOGGER.warning(
|
(
u"Cannot create certificate generation task for user %s "
u"in the course '%s'; "
u"certificates are not allowed for CCX courses."
),
student.id,
unicode(course_id)
)
return None
valid_statuses = [
status.generating,
status.unavailable,
status.deleted,
status.error,
status.notpas
|
PegasusWang/pyhome
|
crawler/morningstar/morningstar.py
|
Python
|
mit
| 2,629
| 0.001193
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""晨星基金评级数据,用来买基金作为参考"""
import _env
import copy
import heapq
import requests
from operator import itemgetter
from tornado.escape import utf8
from six import print_
from bs4 import BeautifulSoup
from web_util import get
def parse_html(html):
html = utf8(html)
soup = BeautifulSoup(html)
tb = soup.find('table', class_="fr_tablecontent")
tr_li = tb.find_all('tr')
for tr in tr_li:
td_li = tr.find_all('td')
for td in td_li:
cxt = ''.join(td.get_text().split()) # remove space
print_(cxt, end=' ')
print_('\n')
def parse_txt(filepath):
with open(filepath) as f:
lines = f.readlines()
for line in lines:
d = line.strip().split()
if d:
assert len(d) == 14
print_(d[0], end='\n')
def main():
with open('./stock_fund.html') as f:
parse_html(f.read())
#parse_txt('./txt')
def fetch_parse():
url = 'http://cn.morningstar.com/handler/fundranking.ashx?date=2016-04-08&fund=&category=mix_radical&rating=&company=&cust=&sort=Return2Year&direction=desc&tabindex=1&pageindex=1&pagesize=10000&randomid=0.043611296370827723'
html = get(url).text
parse_html(html)
def choose(filepath_txt, sort_index_list=[10, 8, 7]):
"""
0 序号
1 基金代码
2 基金名称
3 今年以来
4 一周
5 一个月
6 三个月
7 六个月
8 一年
9 两年
10 三年
11 五年
12 十年
13 设立以来
"""
with open(filepath_txt, 'r') as f:
lines = f.readlines()
fund_array = [] # 2 demonsional array of fund_info
for line in lines:
info_list = line.strip().split()
if info_list:
for index, value in enumerate(info_list):
if index >= 3:
try:
value = float(value)
except ValueError:
value = 0.0
info_list[index] = value
fund_array.append(info_list)
num = 100
for sort_index in sort_index_list:
|
fund_array.sort(key=itemgetter(sort_index), reverse=True)
fund_array = fund_array[0:num]
num /= 2
fund_str_array = [' '.join([str(i) for i in
|
l]) for l in fund_array]
res = '\n'.join(fund_str_array)
with open('res', 'w') as f:
f.write(res)
if __name__ == '__main__':
choose('./log')
|
galeone/pgnet
|
inputs/pascifar.py
|
Python
|
mpl-2.0
| 3,128
| 0.003517
|
#Copyright (C) 2016 Paolo Galeone <nessuno@nerdz.eu>
#
#This Source Code Form is subject to the terms of the Mozilla Public
#License, v. 2.0. If a copy of the MPL was not distributed with this
#file, you can obtain one at http://mozilla.org/MPL/2.0/.
#Exhibit B is not attached; this software is compatible with the
#licenses expressed under Section 1.12 of the MPL v2.
"""Generate the input from the PASCIFAR dataset"""
import os
import tensorflow as tf
from . import image_processing
# The depth of the example
INPUT_DEPTH = 3
INP
|
UT_SIDE = 32
# Global constants describing the PASCIFAR data set.
NUM_CLASSES = 17
NUM_EXAMPLES = 42000
def read_pascifar(pascifar_path, queue):
""" Reads and parses files from the queue.
Args:
pascifar_path: a constant string tensor representing the path of the PASCIFAR dataset
queue: A queue of strings in the format: file, label
Returns:
|
image_path: a tf.string tensor. The absolute path of the image in the dataset
label: a int64 tensor with the label
"""
# Reader for text lines
reader = tf.TextLineReader(skip_header_lines=1)
# read a record from the queue
_, row = reader.read(queue)
# file,width,height,label
record_defaults = [[""], [0]]
image_path, label = tf.decode_csv(row, record_defaults, field_delim=",")
image_path = pascifar_path + tf.constant("/") + image_path
label = tf.cast(label, tf.int64)
return image_path, label
def test(pascifar_path,
batch_size,
input_side,
csv_path=os.path.abspath(os.getcwd())):
"""Returns a batch of images from the test dataset.
Args:
pascifar_path: path of the test dataset
batch_size: Number of images per batch.
input_side: resize images to shape [input_side, input_side, 3]
csv_path: path (into the test dataset usually) where to find the list of file to read.
specify the filename and the path here, eg:
~/data/PASCAL_2012/test/VOCdevkit/VOC2012/ImageSets/Main/test.txt
Returns:
images: Images. 4D tensor of [batch_size, input_side, input_side, DEPTH] size.
filenames: file names. [batch_size] tensor with the fileneme read. (without extension)
"""
pascifar_path = tf.constant(
os.path.abspath(os.path.expanduser(pascifar_path)).rstrip("/") + "/")
# read every line in the file, only once
queue = tf.train.string_input_producer(
[csv_path], num_epochs=1, shuffle=False, name="pascifar_queue")
image_path, label = read_pascifar(pascifar_path, queue)
# read, resize, scale between [-1,1]
image = image_processing.eval_image(
image_path, input_side, image_type="png")
# create a batch of images & filenames
# (using a queue runner, that extracts image from the queue)
images, labels = tf.train.batch(
[image, label],
batch_size,
shapes=[[input_side, input_side, INPUT_DEPTH], []],
num_threads=1,
capacity=20000,
enqueue_many=False,
name="pascifar_inputs")
return images, labels
|
fifengine/fifengine-demos
|
pychan_demo/styling.py
|
Python
|
lgpl-2.1
| 3,586
| 0.044897
|
# -*- coding: utf-8 -*-
# ####################################################################
# Copyright (C) 2005-2013 by the FIFE team
# http://www.fifengine.net
# This file is part of FIFE.
#
# FIFE is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ####################################################################
from builtins import str
from fife import fife, fifechan
from fife.extensions import pychan
from pychan_demo import PyChanExample
STYLES= {
'new default': {
'default' : {
'border_size': 2,
'margins': (0,0),
'base_color' : fifechan.Color(128,128,128),
'foreground_color' : fifechan.Color(255,255,255),
'background_color' : fifechan.Color(55,55,55),
'font' : 'samanata_small'
},
'Button' : {
'border_size': 2,
'margins' : (20,5),
'min_size' : (100,20),
'font' : 'samanata_small'
},
'CheckBox' : {
|
'border_size': 0,
'background_color' : fifechan.Color(0,0,0,0),
},
'RadioButton' : {
'border_size': 0,
'background_color' : fifechan.Color(0,0,0,0),
},
'Label' : {
'border_size': 0,
'font' : 'samanata_small'
},
'ListBox' : {
'border_size': 0,
'font' : 'samanata_small'
},
'Window' : {
'border_
|
size': 1,
'margins': (10,10),
'opaque' : False,
'titlebar_height' : 30,
'background_image' : 'gui/backgrounds/background.png',
'font' : 'samanata_large'
},
'TextBox' : {
'font' : 'samanata_small'
},
('Container','HBox','VBox') : {
'border_size': 0,
'background_image' : 'gui/backgrounds/background.png',
'opaque' : False
}
},
'greenzone' : {
'default' : {
'base_color': fifechan.Color(80,200,80) ,
'background_color': fifechan.Color(200,250,200),
},
'Window' : {
'titlebar_height' : 30,
},
'ListBox' : {
'font' : 'samanata_large'
}
}
}
class StylingExample(PyChanExample):
def __init__(self):
super(StylingExample,self).__init__('gui/styling.xml')
self.styles = ['default'] + list(STYLES.keys())
for name,style in list(STYLES.items()):
pychan.manager.addStyle(name,style)
pychan.loadFonts("fonts/samanata.xml")
def start(self):
self.styledCredits = pychan.loadXML('gui/all_widgets.xml')
self.styledCredits.distributeInitialData({
'demoList' : [x for x in dir(pychan)],
'demoText' : pychan.__doc__
})
self.widget = pychan.loadXML(self.xmlFile)
self.widget.mapEvents({
'testStyle' : self.testStyle,
'closeButton':self.stop,
})
self.widget.distributeInitialData({
'styleList' : self.styles
})
self.widget.position_technique = 'right-20:center'
self.styledCredits.position_technique = 'left+20:center'
self.widget.show()
self.styledCredits.show()
def stop(self):
super(StylingExample,self).stop()
if self.styledCredits:
self.styledCredits.hide()
self.styledCredits = None
def testStyle(self):
style = self.styles[self.widget.collectData('styleList')]
self.styledCredits.stylize(style)
self.styledCredits.show()
|
calvinchengx/O-Kay-Blog-wih-Kay-0.10.0
|
kay/auth/backends/googleaccount.py
|
Python
|
bsd-3-clause
| 2,423
| 0.011969
|
# -*- coding: utf-8 -*-
"""
Kay authentication backend using google account.
:Copyright: (c) 2009 Accense Technology, Inc.
Takashi Matsuo <tmatsuo@candit.jp>,
All rights reserved.
:license: BSD, see LICENSE for more details.
"""
from google.appengine.ext import db
from google.appengine.api import users
from werkzeug.utils import import_string
from kay.exceptions import ImproperlyConfigured
from kay.conf import settings
from kay.auth.models import AnonymousUser
class GoogleBackend(object):
def get_user(self, request):
try:
dot = settings.AUTH_USER_MODEL.rfind('.')
auth_module = settings.AUTH_USER_MODEL[:dot]
import sys
try:
del(sys.modules[auth_module])
except KeyError:
pass
auth_model_class = import_string(settings.AUTH_USER_MODEL)
except (ImportError, AttributeError), e:
raise ImproperlyConfigured, \
'Failed to import %s: "%s".' % (settings.AUTH_USER_MODEL, e)
user = users.get_current_user()
if user:
key_name = '_%s' % user.user_id()
email = user.email()
is_current_user_admin = users.is_current_user_admin()
def txn():
e
|
ntity = auth_model_class.get_by_key_name(key_name)
if entity is None:
entity = auth_model_class(
key_name=key_name,
email=email,
is_admin=is_current_user_admin,
)
entity.put()
else:
update_user = Fal
|
se
if entity.is_admin != is_current_user_admin:
entity.is_admin = is_current_user_admin
update_user = True
if entity.email != email:
entity.email = email
update_user = True
if update_user:
entity.put()
return entity
return db.run_in_transaction(txn)
else:
return AnonymousUser()
def create_login_url(self, url, **kwargs):
return users.create_login_url(url, **kwargs)
def create_logout_url(self, url, **kwargs):
return users.create_logout_url(url, **kwargs)
def login(self, request, user_name, password):
return
def test_login(self, client, email='', is_admin=''):
import os
os.environ['USER_EMAIL'] = email
os.environ['USER_IS_ADMIN'] = '1' if is_admin else ''
def test_logout(self, client):
import os
os.environ['USER_EMAIL'] = ''
os.environ['USER_IS_ADMIN'] = ''
|
RaitoBezarius/mangaki
|
mangaki/mangaki/migrations/0014_auto_20150624_0003.py
|
Python
|
agpl-3.0
| 2,006
| 0.001496
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mangaki', '0013_auto_20150616_0919'),
]
operations = [
migrations.AddField(
model_name='profile',
name='nsfw_ok',
field=models.BooleanField(default=False),
preserve_default=True,
),
migrations.AlterField(
model_name='manga',
name='manga_type',
field=models.TextField(blank=True, max_length=16, choices=[('seinen', 'Seinen'), ('shonen', 'Shonen'), ('shojo', 'Shojo'), ('yaoi', 'Yaoi'), ('sonyun-manhwa', 'Sonyun-Manhwa'), ('kodomo', 'Kodomo'), ('ecchi-hentai', 'Ecchi-Hentai'), ('global-manga', 'Global-Manga'), ('manhua', 'Manhua'), ('josei', 'Josei'), ('sunjung-sunjeong', 'Sunjung-Sunjeong'), ('chungnyun', 'Chungnyun'), ('yuri', 'Yuri'), ('dojinshi-parodie', 'Dojinshi-Parodie'), ('manhwa', 'Manhwa'), ('yonkoma', 'Yonkoma')]),
preserve_default=True,
),
migrations.AlterField(
model_name='rating',
name='choice',
field=models.CharField(max_length=8, choices=[('favorite', 'Mon favori !'), ('like', "J'aime"), ('dislike', "Je n'aime pas"), ('neutral', 'Neutre'), ('willsee', 'Je veux voir'), ('wontsee', 'Je ne veux pas voir')]),
preserve_default=True,
),
migrations.AlterField(
model_name='suggestion',
name='probl
|
em',
field=models.CharField(verbose_name='Partie concernée', max_length=8, choices=[('title', "Le titre n'est pas le bon"), ('poster', 'Le poster ne convient pas'), (
|
'synopsis', 'Le synopsis comporte des erreurs'), ('author', "L'auteur n'est pas le bon"), ('composer', "Le compositeur n'est pas le bon"), ('double', 'Ceci est un doublon'), ('nsfw', "L'oeuvre est NSFW"), ('n_nsfw', "L'oeuvre n'est pas NSFW")]),
preserve_default=True,
),
]
|
jolyonb/edx-platform
|
cms/djangoapps/contentstore/management/commands/delete_course.py
|
Python
|
agpl-3.0
| 3,575
| 0.004755
|
from __future__ import print_function
from six import text_type
from django.core.management.base import BaseCommand, CommandError
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from contentstore.utils import delete_course
from xmodule.contentstore.django import contentstore
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from .prompt import query_yes_no
class Command(BaseCommand):
"""
Delete a MongoDB backed course
Example usage:
$ ./manage.py cms delete_course 'course-v1:edX+DemoX+Demo_Course' --settings=devstack
$ ./manage.py cms delete_course 'course-v1:edX+DemoX+Demo_Course' --keep-instructors --settings=devstack
$ ./manage.py cms delete_course 'cou
|
rse-v1:edX+DemoX+Demo_Course' --remove-assets --settings=devstack
Note:
The keep-instructors option is useful for resolving issues that arise when a course run's ID is duplicated
in a case-insensitive manner. MongoDB is case-sensitive, but MySQL is case-insensitive. This results in
course-v1:edX+DemoX+1t2017 being treated differ
|
ently in MongoDB from course-v1:edX+DemoX+1T2017 (capital 'T').
If you need to remove a duplicate that has resulted from casing issues, use the --keep-instructors flag
to ensure that permissions for the remaining course run are not deleted.
Use the remove-assets option to ensure all assets are deleted. This is especially relevant to users of the
split Mongo modulestore.
"""
help = 'Delete a MongoDB backed course'
def add_arguments(self, parser):
parser.add_argument(
'course_key',
help='ID of the course to delete.',
)
parser.add_argument(
'--keep-instructors',
action='store_true',
default=False,
help='Do not remove permissions of users and groups for course',
)
parser.add_argument(
'--remove-assets',
action='store_true',
help='Remove all assets associated with the course. '
'Be careful! These assets may be associated with another course',
)
def handle(self, *args, **options):
try:
# a course key may have unicode chars in it
try:
course_key = text_type(options['course_key'], 'utf8')
# May already be decoded to unicode if coming in through tests, this is ok.
except TypeError:
course_key = text_type(options['course_key'])
course_key = CourseKey.from_string(course_key)
except InvalidKeyError:
raise CommandError(u'Invalid course_key: {}'.format(options['course_key']))
if not modulestore().get_course(course_key):
raise CommandError(u'Course not found: {}'.format(options['course_key']))
print(u'Preparing to delete course %s from module store....' % options['course_key'])
if query_yes_no(u'Are you sure you want to delete course {}?'.format(course_key), default='no'):
if query_yes_no(u'Are you sure? This action cannot be undone!', default='no'):
delete_course(course_key, ModuleStoreEnum.UserID.mgmt_command, options['keep_instructors'])
if options['remove_assets']:
contentstore().delete_all_course_assets(course_key)
print(u'Deleted assets for course'.format(course_key))
print(u'Deleted course {}'.format(course_key))
|
utcoupe/coupe18
|
ros_ws/src/processing_belt_interpreter/src/belt_interpreter_node.py
|
Python
|
gpl-3.0
| 9,252
| 0.002702
|
#!/usr/bin/env python
import rospy
from belt_parser import BeltParser
import tf
import tf2_ros
import math
import copy
from memory_definitions.srv import GetDefinition
from processing_belt_interpreter.msg import *
from drivers_ard_others.msg import BeltRange
from geometry_msgs.msg import Pose2D, TransformStamped, PointStamped
from ai_game_manager import StatusServices
from dynamic_reconfigure.server import Server
from processing_belt_interpreter.cfg import BeltInterpreterConfig
from multiprocessing import Lock
class BeltInterpreter(object):
def __init__(self):
super(BeltInterpreter, self).__init__()
rospy.init_node("belt_interpreter")
rospy.loginfo("Belt interpreter is initializing...")
# template for the sensor frame id, with '{}' being the sensor id
self.SENSOR_FRAME_ID = "belt_{}"
self.DEF_FILE = "processing/belt.xml"
self.TOPIC = "/processing/belt_interpreter/rects"
self.SENSORS_TOPIC = "/drivers/ard_others/belt_ranges"
self.PUB_RATE = rospy.Rate(10)
self.RECT_SCALE_WIDTH = 1.0
self.RECT_SCALE_HEIGHT = 1.0
self.WATCHDOG_PERIOD_BELT = rospy.Duration(0.015)
self.WATCHDOG_PERIOD_TERA = rospy.Duration(0.05)
self.PREVIOUS_DATA_SIZE = 2
filepath = self.fetch_definition()
self._belt_parser = BeltParser(filepath)
self._pub = rospy.Publisher(self.TOPIC, BeltRects, queue_size=1)
self._broadcaster = tf2_ros.StaticTransformBroadcaster()
self.pub_static_transforms()
self._sensors_sub = rospy.Subscriber(self.SENSORS_TOPIC, BeltRange,
self.callback)
self.syn_param_srv = Server(BeltInterpreterConfig, self.dyn_param_cb)
self._mutex = Lock()
self._watchdog = rospy.Timer(self.WATCHDOG_PERIOD_TERA, self.publish, oneshot=True)
self._current_rects = {}
self._current_statuses = {}
self._data_to_process = []
self._previous_rects = []
self._previous_statuses = []
self._same_bad_value_counter = {s: 0 for s in self._belt_parser.Sensors.keys()}
self._last_bad_value = {s: 0 for s in self._belt_parser.Sensors.keys()}
rospy.loginfo("Belt interpreter is ready. Listening for sensor data on '{}'.".format(self.SENSORS_TOPIC)) # TODO duplicate log with status_services.ready()
# Tell ai/game_manager the node initialized successfuly.
StatusServices("processing", "belt_interpreter").ready(True)
rospy.spin()
def dyn_param_cb(self, config, level):
self.RECT_SCALE_HEIGHT = config["RECT_SCALE_HEIGHT"]
self.RECT_SCALE_WIDTH = config["RECT_SCALE_WIDTH"]
rospy.loginfo("Set rect scale to (%f, %f)" % (self.RECT_SCALE_WIDTH, self.RECT_SCALE_HEIGHT))
return config
def publish(self, event):
with self._mutex:
if self._current_rects.keys() == ["sensor_tera1"] or not self._current_rects:
if self._watchdog:
self._watchdog.shutdown()
self._watchdog = rospy.Timer(self.WATCHDOG_PERIOD_TERA, self.publish, oneshot=True)
if len(self._current_rects) > 0:
self._previous_rects.append(copy.deepcopy(self._current_rects))
self._previous_statuses.append(copy.deepcopy(self._current_statuses))
if(len(self._previous_rects) > self.PREVIOUS_DATA_SIZE):
self._previous_rects.pop(0)
if (len(self._previous_statuses) > self.PREVIOUS_DATA_SIZE):
self._previous_statuses.pop(0)
self._pub.publish(self._current_rects.values())
self._current_rects.clear()
self._current_statuses.clear()
def process_range(self, data):
if data.sensor_id not in self._belt_parser.Sensors.keys():
rospy.logerr("Received data from belt sensor '{}' but no such sensor is defined"
.format(data.sensor_id))
return
with self._mutex:
params = self._belt_parser.Params[self._belt_parser.Sensors[data.sensor_id]["type"]]
if data.range > params["max_range"] or data.range <= 0:
self._current_statuses.update({data.sensor_id: False})
if data.range == self._last_bad_value[data.sensor_id]:
self._same_bad_value_counter[data.sensor_id] += 1
else:
self._same_bad_value_counter[data.sensor_id] = 0
self._last_bad_value[data.sensor_id] = data.range
if self._same_bad_value_counter[data.sensor_id] > 100:
rospy.logwarn_throttle(1, "Sensor %s might be disconnected !" % data.sensor_id)
# If we published this sensor most of the time and its bad, publish the last one we got
l = [data.sensor_id in d and d[data.sensor_id] for d in self._previous_statuses]
if sum(l) > math.ceil((self.PREVIOUS_DATA_SIZE + 1) / 2):
for d in reversed(self._previous_rects):
if data.sensor_id in d:
rospy.logdebug('Got bad data for sensor %s but publishing the last good data' % data.sensor_id)
r = d[data.sensor_id]
r.header.stamp = rospy.Time.now()
self._current_rects.update({data.sensor_id: d[data.sensor_id]})
return
return
self._same_bad_value_counter[data.sensor_id] = 0
if params["scale_responsive"]:
width = self.get_rect_width(data.range, params) * self.RECT_SCALE_WIDTH
height = self.get_rect_height(data.range, params) * self.RECT_SCALE_HEIGHT
else:
|
width = self.get_rect_width(data.range, params)
height = self.get_rect_height(data.range, params)
rect = RectangleStamped()
rect.header.frame_id = self.SENSOR_FRAME_ID.format(data.sensor_id)
rect.header.stamp = rospy.Time.now()
rect.x = self.get_rect_x(data.range
|
, params)
rect.y = 0
rect.w = width
rect.h = height
rect.a = 0
self._current_rects.update({data.sensor_id: rect})
self._current_statuses.update({data.sensor_id: True})
def get_rect_width(self, r, params):
prec = r * params["precision"]
angle = params["angle"]
x_far = r + prec
x_close = math.cos(angle / 2) * (r - prec)
# called width because along x axis, but it is the smaller side
width = abs(x_far - x_close)
return width
def get_rect_height(self, r, params):
prec = r * params["precision"]
angle = params["angle"]
return abs(2 * math.sin(angle / 2) * (r + prec))
def get_rect_x(self, r, params):
prec = r * params["precision"]
angle = params["angle"]
x_far = r + prec
x_close = math.cos(angle / 2) * (r - prec)
return (x_far + x_close) / 2
def callback(self, data):
publish_now = False
if data.sensor_id in self._current_rects and data.sensor_id != 'sensor_tera1':
publish_now = True
self.process_range(data)
if data.sensor_id != 'sensor_tera1' and not publish_now:
if self._watchdog:
self._watchdog.shutdown()
self._watchdog = rospy.Timer(self.WATCHDOG_PERIOD_BELT, self.publish, oneshot=True)
elif publish_now:
self.publish(None)
def pub_static_transforms(self):
tr_list = []
for id, s in self._belt_parser.Sensors.items():
tr = TransformStamped()
tr.header.stamp = rospy.Time.now()
tr.header.frame_id = "robot"
tr.child_frame_id = self.SENSOR_FRAME_ID.format(id)
tr.transform.translation.x = s["x"]
tr.transform.translation.y = s["y"]
tr.transform.translation.z = 0
quat = tf.transformations.quaternion_from_euler
|
zeraien/odb_shared_django
|
http_shortcuts.py
|
Python
|
mit
| 2,641
| 0.006437
|
from past.builtins import basestring
import os.path
import simplejson as json
from django.shortcuts import render as django_render
from django.http import HttpResponseRedirect, HttpResponse, HttpResponsePermanentRedirect
from django.utils.decorators
|
import available_attrs
from functools import wraps
def render(request, template, context = {}, ignore_ajax = False, obj=None, content_type=None, status=None
|
, using=None):
if request.is_ajax() and not ignore_ajax:
basename = os.path.basename(template)
if not basename.startswith("_"):
dirname = os.path.dirname(template)
template = "%s/_%s"%(dirname,basename)
response = django_render(request=request, template_name=template, context=context)
else:
response = django_render(request,
template_name=template,
context=context,
content_type=content_type,
status=status,
using=using)
return response
def permanent_redirect(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def wrapper(request, *args, **kw):
to = view_func(request, *args, **kw)
if isinstance(to, basestring):
return HttpResponsePermanentRedirect(to)
else:
return to
return wrapper
def redirect(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def wrapper(request, *args, **kw):
to = view_func(request, *args, **kw)
if isinstance(to, basestring):
return HttpResponseRedirect(to)
else:
return to
return wrapper
def render_json(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def wrapper(request, *args, **kwargs):
_json = view_func(request, *args, **kwargs)
if not isinstance(_json, str) and not isinstance(_json, dict) and not isinstance(_json, list) and not isinstance(_json, tuple):
return _json
return HttpResponse(json.dumps(_json), content_type="application/json")
return wrapper
def render_to(template_name, ignore_ajax=False):
def renderer(func):
@wraps(func, assigned=available_attrs(func))
def wrapper(request, *args, **kw):
output = func(request, *args, **kw)
if not isinstance(output, dict):
return output
output['request'] = request
return render(request, template=template_name, context=output, ignore_ajax=ignore_ajax)
return wrapper
return renderer
|
3299/visioninabox
|
helpers/generateCalibration.py
|
Python
|
mit
| 1,843
| 0.004883
|
#!/usr/bin/env python
# Thank you
|
to https://goo.gl/NDyw63
# Imports
import os
import json
from glob import glob
import numpy as np
import cv2
class GenerateCalibration(object):
def __init__(self, directory, saveFilename):
self.directory = directory
self.saveFilename = saveFilename
def run(self):
img_names = glob(os.path.abspath(self.directory) + '/*')
square_size = 1.0
pattern_size = (9, 6)
|
pattern_points = np.zeros((np.prod(pattern_size), 3), np.float32)
pattern_points[:, :2] = np.indices(pattern_size).T.reshape(-1, 2)
pattern_points *= square_size
obj_points = []
img_points = []
h, w = 0, 0
img_names_undistort = []
for fn in img_names:
print('processing %s... ' % fn, end='')
img = cv2.imread(fn, 0)
if img is None:
print("Failed to load", fn)
continue
h, w = img.shape[:2]
found, corners = cv2.findChessboardCorners(img, pattern_size)
if found:
term = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_COUNT, 30, 0.1)
cv2.cornerSubPix(img, corners, (5, 5), (-1, -1), term)
img_points.append(corners.reshape(-1, 2))
obj_points.append(pattern_points)
if (len(obj_points) == 0): # no images had the chessboard in them
return False
# calculate camera distortion
rms, camera_matrix, dist_coefs, rvecs, tvecs = cv2.calibrateCamera(obj_points, img_points, (w, h), None, None)
# Dump to JSON file
with open(self.saveFilename, 'w') as f:
json.dump({'matrix': camera_matrix.tolist(), 'distortion': dist_coefs.ravel().tolist(), 'rms': rms}, f, sort_keys = True, indent = 2)
return True
|
CasherWest/django-post_office
|
post_office/migrations/0003_auto_20150608_1115.py
|
Python
|
mit
| 699
| 0.002861
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('post_office', '0002_auto_20150204_1403'),
]
operations = [
migrations.AddField(
model_name='emailbackend',
name='email',
field=models.EmailFi
|
eld(default='hosting@zweipunktnull.de', max_length=254),
preserve_default=False,
),
migrations.AlterField(
model_name='email',
name='backend',
field=models.ForeignKey(related_name='emails', blank=True, to='post_office.EmailBackend', null=True),
|
),
]
|
henrythasler/TileGenerator
|
py3_render.py
|
Python
|
gpl-2.0
| 18,299
| 0.014427
|
#!/usr/bin/env python
"""
Python script to generate map tiles with mapnik using metatiles and multiprocessing/threading for improved performance
(c) Henry Thasler
based on other scripts from http://svn.openstreetmap.org/applications/rendering/mapnik/
"""
from math import pi, cos, sin, log, exp, atan, floor, ceil, sqrt
from subprocess import call
from datetime import datetime, timedelta
import sys, os
import sqlite3 as sqlite
from queue import Queue
import multiprocessing
import threading
import argparse
import mapnik
MULTIPROCESSING = True # True = multiprocessing; False = treading
DEG_TO_RAD = pi / 180.0
RAD_TO_DEG = 180.0 / pi
# Map defines
TILE_SIZE = 256
# amount of pixels the metatile is increased on each edge
BUF_SIZE = 1024
# Default number of rendering threads to spawn, should be roughly equal to number of CPU cores available
NUM_THREADS = 1
def limit (a):
if a < -0.9999:
return -0.9999
if a > 0.9999:
return 0.9999
return a
class GoogleProjection:
def __init__(self, levels = 18):
self.Bc = []
self.Cc = []
self.zc = []
self.Ac = []
c = 256
for d in range(0, levels + 1):
e = c / 2
self.Bc.append(c / 360.0)
self.Cc.append(0.5 * c / pi)
self.zc.append((e, e))
self.Ac.append(c)
c *= 2
def fromLLtoPixel(self, ll, zoom):
d = self.zc[zoom]
e = round(d[0] + ll[0] * self.Bc[zoom])
f = limit(sin(DEG_TO_RAD * ll[1]))
g = round(d[1] + 0.5 * log((1.0 + f) / (1.0 - f)) * -self.Cc[zoom])
return (e, g)
def fromPixelToLL(self, px, zoom):
e = self.zc[zoom]
f = (px[0] - e[0]) / self.Bc[zoom]
g = (px[1] - e[1]) / -self.Cc[zoom]
h = RAD_TO_DEG * (2.0 * atan(exp(g)) - 0.5 * pi)
return (f, h)
class MinimalProgressBar:
def __init__(self, maxValue, width = 50):
self.maxValue = maxValue
self.width = width
self.startTime = datetime.now()
def setMax(self, maxValue):
self.maxValue = maxValue
def update(self, value):
percentage = float(value) / self.maxValue
dots = '.' * int(percentage * self.width)
spaces = ' ' * (self.width - len(dots))
delta = datetime.now() - self.startTime
elapsed = float(delta.microseconds + delta.seconds * 1000000 + delta.days * 24 * 60 * 60 * 1000000) / 1000000
eta = int(elapsed / max(percentage, 0.01) - elapsed)
hms = "{:02}:{:02}:{:02}".format(eta // 3600, (eta // 60) % 60, eta % 60)
sys.stdout.write("\r[{}] {:6.2%} eta {}".format(dots + spaces, percentage, hms))
sys.stdout.flush()
class FileWriter:
def __init__(self, tile_dir):
self.tile_dir = tile_dir
if not self.tile_dir.endswith('/'):
self.tile_dir = self.tile_dir + '/'
if not os.path.isdir(self.tile_dir):
os.mkdir(self.tile_dir)
def __str__(self):
return "FileWriter({0})".format(self.tile_dir)
def write_poly(self, poly):
pass
def tile_uri(self, x, y, z):
return '{0}{1:d}/{2:d}/{3:d}.png'.format(self.tile_dir, int(z), int(x), int(y))
def exists(self, x, y, z):
return os.path.isfile(self.tile_uri(x, y, z))
def write(self, x, y, z, imagestring):
uri = self.tile_uri(x, y, z)
try:
os.makedirs(os.path.dirname(uri))
except OSError:
pass
fh = open(uri, "wb")
fh.write(imagestring)
fh.close()
# image.save(uri, 'png256')
def commit(self):
pass
def need_image(self):
return True
def multithreading(self):
return True
def close(self):
pass
class SQLiteDBWriter:
def __init__(self, database):
self.database = database
try:
self.db = sqlite.connect(self.database)
self.cur = self.db.cursor()
self.cur.execute('CREATE TABLE IF NOT EXISTS tiles (x int, y int, z int, s int, image blob, PRIMARY KEY (x, y, z, s))')
self.cur.execute('CREATE TABLE IF NOT EXISTS info (minzoom TEXT, maxzoom TEXT)')
self.cur.execute('CREATE TABLE IF NOT EXISTS android_metadata (locale TEXT)')
self.cur.execute('CREATE INDEX IF NOT EXISTS IND on tiles(x, y, z, s)')
except sqlite.Error as e:
print("SQLiteDBWriter Error %s:" % e.args[0])
def __str__(self):
return "SQLiteDBWriter({0})".format(self.database)
def write_poly(self, poly):
pass
def tile_uri(self, x, y, z):
pass
def exists(self, x, y, z):
pass
def write(self, x, y, z, imagestring):
if self.db:
try:
self.cur.execute("INSERT OR REPLACE INTO tiles(image, x, y, z, s) VALUES (?, ?, ?, ?, ?)", (sqlite.Binary(imagestring), x, y, 17 - z, 0) )
except sqlite.Error as e:
print("SQLiteDBWriter Error %s:" % e.args[0])
def commit(self):
if self.db:
self.db.commit()
def need_image(self):
return True
def multithreading(self):
return False
def close(self):
if self.db:
self.cur.close()
self.db.close()
class Command:
write, commit, sum = range(3)
class WriterThread:
def __init__(self, options, q, lock):
self.q = q
self.lock = lock
self.options = options
self.tilecounter = {'sum': 0, 'count': 0}
def loop(self):
if self.options.tiledir:
tiledir = self.options.tiledir
if not tiledir.endswith('/'):
tiledir = tiledir + '/'
self.writer = FileWriter(tiledir)
elif self.options.sqlitedb:
self.writer = SQLiteDBWriter(self.options.sqlitedb)
else:
self.writer = FileWriter(os.getcwd() + '/tiles')
consoleWidth = int(os.popen('stty size', 'r').read().split()[1])
self.progressBar = MinimalProgressBar(0, consoleWidth - 25)
while True:
#Fetch a tile from the queue and save it
item = self.q.get()
if (item == None):
self.writer.commit()
self.writer.close()
self.q.task_done()
break
else:
(cmd, x, y, z, image) = item
if cmd == Command.write:
self.writer.write(x, y, z, image)
self.tilecounter['count'] += 1
self.progressBar.update(self.tilecounter['count'])
elif cmd == Command.commit:
self.writer.commit()
self.progressBar.update(self.tilecounter['count'])
elif cmd == Command.sum:
self.tilecounter['sum'] = x
self.progressBar.setMax(self.tilecounter['sum'])
self.progressBar.update(self.tilecounter['count'])
self.q.task_done()
class RenderThread:
def __init__(self, writer, mapfile, q, lock, maxZoom):
self.writer = writer
self.q = q
self.mapfile = mapfile
self.lock = lock
self.m = mapnik.M
|
ap(TILE_SIZE, TILE_SIZE)
# Load style XML
mapnik.load_map(self.m, mapfile, True)
# Obtain
|
<Map> projection
self.prj = mapnik.Projection(self.m.srs)
# Projects between tile pixel co-ordinates and LatLong (EPSG:4326)
self.tileproj = GoogleProjection(maxZoom)
def render_tile(self, z, scale, p0, p1, metawidth, metaheight, debug):
# Calculate pixel positions of bottom-left & top-right
# p0 = (x * 256, (y + 1) * 256)
# p1 = ((x + 1) * 256, y * 256)
# Convert to LatLong (EPSG:4326)
l0 = self.tileproj.fromPixelToLL(p0, z)
l1 = self.tileproj.fromPixelToLL(p1, z)
# Convert to map projection (e.g. mercator co-ords EPSG:900913)
c0 = self.prj.forward(mapnik.Coord(l0[0], l0[1]))
c1 = self.prj.forward(mapnik.Coord(l1[0], l1[1]))
# Bounding box for the tile
bbox = mapnik.Box2d(c0.x, c0.y, c1.x, c1.y)
self.m.resize(metawidth * TILE_SIZE, metaheight * TILE_SIZE)
self.m.zoom_to_box(bbox)
|
maxwward/SCOPEBak
|
askbot/migrations/0017_add_group__moderators.py
|
Python
|
gpl-3.0
| 25,692
| 0.008446
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.contrib.auth.models import Group
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
moderators = Group(name = 'askbot_moderators')
moderators.save()
def backwards(self, orm):
"Write your backwards methods here."
try:
moderators = Group.objects.get(name = 'askbot_moderators')
moderators.delete()
except Group.DoesNotExist:
pass
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'to': "orm['auth.User']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousproblem': {
'Meta': {'object_name': 'AnonymousProblem'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'exercise': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_problems'", 'to': "orm['askbot.Exercise']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.anonymousexercise': {
'Meta': {'object_name': 'AnonymousExercise'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.problem': {
'Meta': {'object_name': 'Problem', 'db_table': "u'problem'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'problems'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_problems'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_problems'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('dja
|
ngo.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
|
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_problems'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'exercise': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'problems'", 'to': "orm['askbot.Exercise']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.problemrevision': {
'Meta': {'object_name': 'ProblemRevision', 'db_table': "u'problem_revision'"},
'problem': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Problem']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'problemrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.Badge']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badge': {
'Meta': {'unique_together': "(('name', '
|
santisiri/popego
|
envs/ALPHA-POPEGO/lib/python2.5/site-packages/SQLAlchemy-0.4.3-py2.5.egg/sqlalchemy/schema.py
|
Python
|
bsd-3-clause
| 64,162
| 0.001901
|
# schema.py
# Copyright (C) 2005, 2006, 2007, 2008 Michael Bayer mike_mp@zzzcomputing.com
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""The schema module provides the building blocks for database metadata.
Each element within this module describes a database entity
which can be created and dropped, or is otherwise part of such an entity.
Examples include tables, columns, sequences, and indexes.
All entities are subclasses of [sqlalchemy.schema#SchemaItem], and as
defined in this module they are intended to be agnostic of any
vendor-specific constructs.
A collection of entities are grouped into a unit called [sqlalchemy.schema#MetaData].
MetaData serves as a logical grouping of schema elements, and can also
be associated with an actual database connection such that operations
involving the contained elements can contact the database as needed.
Two of the elements here also build upon their "syntactic" counterparts,
which are defined in [sqlalchemy.sql.expression#], specifically [sqlalchemy.schema#Table]
and [sqlalchemy.schema#Column]. Since these objects are part of the
SQL expression language, they are usable as components in SQL expressions.
"""
import re, inspect
from sqlalchemy import types, exceptions, util, databases
from sqlalchemy.sql import expression, visitors
URL = None
__all__ = ['SchemaItem', 'Table', 'Column', 'ForeignKey', 'Sequence', 'Index',
'ForeignKeyConstraint', 'PrimaryKeyConstraint', 'CheckConstraint',
'UniqueConstraint', 'DefaultGenerator', 'Constraint', 'MetaData',
'ThreadLocalMetaData', 'SchemaVisitor', 'PassiveDefault',
'ColumnDefault', 'DDL']
class SchemaItem(object):
"""Base class for items that define a database schema."""
__metaclass__ = expression._FigureVisitName
def _init_items(self, *args):
"""Initialize the list of child items for this SchemaItem."""
for item in args:
if item is not None:
item._set_parent(self)
def _set_parent(self, parent):
"""Associate with this SchemaItem's parent object."""
raise NotImplementedError()
def get_children(self, **kwargs):
"""used to allow SchemaVisitor access"""
return []
def __repr__(self):
return "%s()" % self.__class__.__name__
def bind(self):
"""Return the connectable associated with this SchemaItem."""
m = self.metadata
return m and m.bind or None
bind = property(bind)
def info(self):
try:
return self._info
except AttributeError:
self._info = {}
return self._info
info = property(info)
def _get_table_key(name, schema):
if schema is None:
return name
else:
return schema + "." + name
class _TableSingleton(expression._FigureVisitName):
"""A metaclass used by the ``Table`` object to provide singleton behavior."""
def __call__(self, name, metadata, *args, **kwargs):
schema = kwargs.get('schema', None)
useexisting = kwargs.pop('useexisting', False)
mustexist = kwargs.pop('mustexist', False)
key = _get_table_key(name, schema)
try:
table = metadata.tables[key]
if not useexisting and table._cant_override(*args, **kwargs):
raise exceptions.InvalidRequestError("Table '%s' is already defined for this MetaData instance. Specify 'useexisting=True' to redefine options and columns on an existing Table object." % key)
else:
table._init_existing(*args, **kwargs)
return table
except KeyError:
if mustexist:
raise exceptions.InvalidRequestError("Table '%s' not defined" % (key))
try:
return type.__call__(self, name, metadata, *args, **kwargs)
except:
if key in metadata.tables:
del metadata.tables[key]
raise
class Table(SchemaItem, expression.TableClause):
"""Represent a relational database table."""
__metaclass__ = _TableSingleton
ddl_events = ('before-create', 'after-create', 'before-drop', 'after-drop')
def __init__(self, name, metadata, *args, **kwargs):
"""Construct a Table.
Table objects can be constructed directly. Arguments
are:
name
The name of this table, exactly as it appears, or will
appear, in the database.
This property, along with the *schema*, indicates the
*singleton identity* of this table.
Further tables constructed with the same name/schema
combination will return the same Table instance.
\*args
Should contain a listing of the Column objects for this table.
\**kwargs
kwargs include:
schema
The *schema name* for this table, which is
required if the table resides in a schema other than the
default selected schema for the engine's database
connection. Defaults to ``None``.
autoload
Defaults to False: the Columns for this table should be
reflected from the database. Usually there will be no
Column objects in the constructor if this property is set.
autoload_with
if autoload==True, this is an optional Engine or Connection
instance to be used for the table reflection. If ``None``,
the underlying MetaData's bound connectable will be used.
include_columns
A list of strings indicating a subset of columns to be
loaded via the ``autoload`` operation; table columns who
aren't present in this list will not be represented on the resulting
``Table`` object. Defaults to ``None`` which indicates all
columns should be reflected.
info
Defaults to {}: A space to store application specific data;
this must be a dictionary.
mustexist
Defaults to False: indicates that this Table must already
have been defined elsewhere in the application, else an
exception is raised.
useexisting
Defaults to False: indicates that if this Table was
already defined elsewhere in the application, disregard
the rest of the constructor arguments.
|
owner
Defaults to None: optional owning user of this table.
useful for databases such as Oracle to aid in table
reflection.
quote
Defaults to False: indicates that the Table identifier
must be properly escaped and quoted before being sent to
the database. This flag overrides all other quoting
behavior.
quote_schema
Defaults to False: indicates that the Namespace identifie
|
r
must be properly escaped and quoted before being sent to
the database. This flag overrides all other quoting
behavior.
"""
super(Table, self).__init__(name)
self.metadata = metadata
self.schema = kwargs.pop('schema', None)
self.owner = kwargs.pop('owner', None)
self.indexes = util.Set()
self.constraints = util.Set()
self._columns = expression.ColumnCollection()
self.primary_key = PrimaryKeyConstraint()
self._foreign_keys = util.OrderedSet()
self.ddl_listeners = util.defaultdict(list)
self.kwargs = {}
if self.schema is not None:
self.fullname = "%s.%s" % (self.schema, self.name)
else:
self.fullname = self.name
autoload = kwargs.pop('autoload', False)
autoload_with = kwargs.pop('autoload_with', None)
include_columns = kwargs.pop('include_columns', None)
self._set_parent(metadata)
# load column definitions from the database if 'autoload' is defined
# we do it after the table is in the singleton dictionary to support
|
ajkannan/Classics-Research
|
Utilities/TermFrequencyInverseDocumentFrequency.py
|
Python
|
mit
| 1,955
| 0.032225
|
from Text import Text
from pprint import pprint
import numpy as np
class TermFrequencyInverseDocumentFrequency(object):
"""docstring for TermFrequencyInverseDocumentFrequency"""
def __init__(self):
super(TermFrequencyInverseDocumentFrequency, self).__init__()
self.corpus = []
self.corpus_frequencies = {}
def add_text_to_corpus(self, text):
text_word_list = text.list
text_frequencies = self.calculate_normalized_frequencies(text_word_list)
self.corpus.append((text.name, text_frequencies))
def calculate_normalized_frequencies(self, text_word_list, add_text = True):
text_frequencies = {}
length = float(len(text_word_list))
for word in text_word_list:
text_frequencies[wo
|
rd] = text_freque
|
ncies.get(word, 0.0) + 1.0
if add_text:
self.corpus_frequencies[word] = self.corpus_frequencies.get(word, 0.0) + 1.0
for word in text_frequencies.keys():
text_frequencies[word] /= length
return text_frequencies
def calculate_similarity_scores(self, text):
query_text_frequencies = self.calculate_normalized_frequencies(text.list, add_text = False)
similarities = []
for document in self.corpus:
similarity_score = 0.0
document_frequencies = document[1]
for word in query_text_frequencies.keys():
if word in document_frequencies.keys():
similarity_score += (query_text_frequencies[word] / self.corpus_frequencies[word]) + (
document_frequencies[word] / self.corpus_frequencies[word]
)
similarities.append((document[0], similarity_score))
return similarities
def calculate_features_for_corpus(self):
features = np.zeros((len(self.corpus), len(self.corpus_frequencies.keys())))
for i, document in enumerate(self.corpus):
for j, word in enumerate(self.corpus_frequencies.keys()):
if word in document[1].keys():
features[i, j] = document[1][word]
else:
features[i, j] = 0.0
return features, self.corpus_frequencies.keys()
|
rdhyee/osf.io
|
admin_tests/meetings/test_forms.py
|
Python
|
apache-2.0
| 2,864
| 0
|
from nose import tools as nt
from tests.base import AdminTestCase
from tests.factories import AuthUserFactory
from tests.test_conferences import ConferenceFactory
from admin.meetings.forms import MeetingForm, MultiEmailField
data = dict(
edit='False',
endpoint='short',
name='Much longer',
info_url='www.something.com',
logo_url='osf.io/eg634',
active='True',
admins='zzz@email.org',
public_projects='True',
poster='True',
talk='True',
submission1='poster',
submission2='talk',
submission1_plural='posters',
submission2_plural='talks',
meeting_title_type='Of course',
add_submission='No more',
mail_subject='Awesome',
mail_message_body='Nothings',
mail_attachment='Again',
homepage_link_text='Need to add to tests',
)
class TestMultiEmailField(AdminTestCase):
def test_to_python_nothing(self):
field = MultiEmailField()
res = field.to_python('')
nt.assert_equal(res, [])
def test_to_python_one(self):
field = MultiEmailField()
res = field.to_python('aaa@email.org')
nt.assert_equal(res, ['aaa@email.org'])
def test_to_python_more(self):
field = MultiEmailField()
res = field.to_python('aaa@email.org, bbb@email.org, ccc@email.org')
nt.assert_equal(res,
['aaa@email.org', 'bbb@email.org', 'ccc@email.org'])
class TestMeetingForm(AdminTestCase):
def setUp(self):
super(TestMeetingForm, self).setUp()
self.user = AuthUserFactory()
def test_clean_admins_raise(self):
form = MeetingForm(data=data)
nt.assert_false(form.is_valid())
nt.assert_in('admins', form.errors)
nt.assert_in('zzz@email.org', form.errors['admins'][0])
nt.assert_in('does not have an OSF account', form.errors['admins'][0])
def test_clean_admins_okay(self):
mod_data = dict(data)
mod_data.update({'admins': self.user.emails[0]})
form = MeetingForm(data=mod_data)
nt.assert_true(form.is_valid())
def test_clean_endpoint_raise_not_exist(self):
mod_data = dic
|
t(data)
mod_data.update({'admins': self.user.emails[0], 'edit': 'True'})
f
|
orm = MeetingForm(data=mod_data)
nt.assert_in('endpoint', form.errors)
nt.assert_equal('Meeting not found with this endpoint to update',
form.errors['endpoint'][0])
def test_clean_endpoint_raise_exists(self):
conf = ConferenceFactory()
mod_data = dict(data)
mod_data.update({'admins': self.user.emails[0],
'endpoint': conf.endpoint})
form = MeetingForm(data=mod_data)
nt.assert_in('endpoint', form.errors)
nt.assert_equal('A meeting with this endpoint exists already.',
form.errors['endpoint'][0])
|
RedhawkSDR/integration-gnuhawk
|
components/quadrature_demod_cf/tests/test_quadrature_demod_cf.py
|
Python
|
gpl-3.0
| 4,545
| 0.006601
|
#!/usr/bin/env python
#
# This file is protected by Copyright. Please refer to the COPYRIGHT file
# distributed with this source distribution.
#
# This file is part of GNUHAWK.
#
# GNUHAWK is free software: you can redistribute it and/or modify is under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# GNUHAWK is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
# You should have recei
|
ved a copy of the GNU General Public License along with
# this program. If not, see http://www.gnu.org/licenses/.
#
import unittest
import ossie.utils.testing
import os
from omniORB import any
class ComponentTests(ossie.utils.testing.ScaComponentTestCase):
"""Test for all component implementations in quadrature_demod_cf"""
def testScaBasicBehavior(self):
#################################
|
######################################
# Launch the component with the default execparams
execparams = self.getPropertySet(kinds=("execparam",), modes=("readwrite", "writeonly"), includeNil=False)
execparams = dict([(x.id, any.from_any(x.value)) for x in execparams])
self.launch(execparams)
#######################################################################
# Verify the basic state of the component
self.assertNotEqual(self.comp, None)
self.assertEqual(self.comp.ref._non_existent(), False)
self.assertEqual(self.comp.ref._is_a("IDL:CF/Resource:1.0"), True)
self.assertEqual(self.spd.get_id(), self.comp.ref._get_identifier())
#######################################################################
# Simulate regular component startup
# Verify that initialize nor configure throw errors
self.comp.initialize()
configureProps = self.getPropertySet(kinds=("configure",), modes=("readwrite", "writeonly"), includeNil=False)
self.comp.configure(configureProps)
#######################################################################
# Validate that query returns all expected parameters
# Query of '[]' should return the following set of properties
expectedProps = []
expectedProps.extend(self.getPropertySet(kinds=("configure", "execparam"), modes=("readwrite", "readonly"), includeNil=True))
expectedProps.extend(self.getPropertySet(kinds=("allocate",), action="external", includeNil=True))
props = self.comp.query([])
props = dict((x.id, any.from_any(x.value)) for x in props)
# Query may return more than expected, but not less
for expectedProp in expectedProps:
self.assertEquals(props.has_key(expectedProp.id), True)
#######################################################################
# Verify that all expected ports are available
for port in self.scd.get_componentfeatures().get_ports().get_uses():
port_obj = self.comp.getPort(str(port.get_usesname()))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a("IDL:CF/Port:1.0"), True)
for port in self.scd.get_componentfeatures().get_ports().get_provides():
port_obj = self.comp.getPort(str(port.get_providesname()))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a(port.get_repid()), True)
#######################################################################
# Make sure start and stop can be called without throwing exceptions
self.comp.start()
self.comp.stop()
#######################################################################
# Simulate regular component shutdown
self.comp.releaseObject()
# TODO Add additional tests here
#
# See:
# ossie.utils.bulkio.bulkio_helpers,
# ossie.utils.bluefile.bluefile_helpers
# for modules that will assist with testing components with BULKIO ports
if __name__ == "__main__":
ossie.utils.testing.main("../quadrature_demod_cf.spd.xml") # By default tests all implementations
|
BaiduPS/tera
|
src/sdk/python/TeraSdk.py
|
Python
|
bsd-3-clause
| 39,743
| 0
|
# -*- coding: utf-8 -*-
"""
Tera Python SDK. It needs a libtera_c.so
TODO(taocipian) __init__.py
"""
from ctypes import CFUNCTYPE, POINTER
from ctypes import byref, cdll, string_at
from ctypes import c_bool, c_char_p, c_void_p
from ctypes import c_uint32, c_int32, c_int64, c_ubyte, c_uint64
class Status(object):
""" status code """
# C++ tera.h ErrorCode
OK = 0
NotFound = 1
BadParam = 2
System = 3
Timeout = 4
Busy = 5
NoQuota = 6
NoAuth = 7
Unknown = 8
NotImpl = 9
reason_list_ = ["ok", "not found", "bad parameter",
"unknown error", "request timeout", "busy",
"no quota", "operation not permitted", "unknown error",
"not implemented"]
def __init__(self, c):
""" init """
self.c_ = c
if c < 0 or c > len(Status.reason_list_) - 1:
self.reason_ = "bad status code"
else:
self.reason_ = Status.reason_list_[c]
def GetReasonString(self):
"""
Returns:
(string) status string
"""
return Status.reason_list_[self.c_]
def GetReasonNumber(self):
"""
Returns:
(long) status code
"""
return self.c_
class ScanDescriptor(object):
""" scan操作描述符
scan出[start_key, end_key)范围内的所有数据,每个cell默认返回最新的1个版本
"""
def __init__(self, start_key):
"""
Args:
start_key(string): scan操作的起始位置,scan结果包含start_key
"""
self.desc = lib.tera_scan_descriptor(start_key,
c_uint64(len(start_key)))
def Destroy(self):
"""
销毁这个scan_descriptor,释放底层资源,以后不得再使用这个对象
"""
lib.tera_scan_descriptor_destroy(self.desc)
def SetEnd(self, end_key):
"""
不调用此函数时,end_key被置为“无穷大”
Args:
end_key(string): scan操作的终止位置,scan结果不包含end_key
"""
lib.tera_scan_descriptor_set_end(self.desc, end_key,
c_uint64(len(end_key)))
def SetMaxVersions(self, versions):
"""
不调用此函数时,默认每个cell只scan出最新版本
Args:
versions(long): scan时某个cell最多被选出多少个版本
"""
lib.tera_scan_descriptor_set_max_versions(self.desc, versions
|
)
def SetBufferSize(self, buffer_size):
"""
服务端将读取的数据攒到buffer里,最多积攒到达buffer_size以后返回一次
|
,
也有可能因为超时或者读取到达终点而buffer没有满就返回,默认值 64 * 1024
这个选项对scan性能有非常明显的影响,
我们的测试显示,1024*1024(1MB)在很多场景下都有比较好的表现,
建议根据自己的场景进行调优
Args:
buffer_size: scan操作buffer的size,单位Byte
"""
lib.tera_scan_descriptor_set_buffer_size(self.desc, buffer_size)
def SetPackInterval(self, interval):
"""
设置scan操作的超时时长,单位ms
服务端在scan操作达到约 interval 毫秒后尽快返回给client结果
Args:
iinterval(long): 一次scan的超时时长,单位ms
"""
lib.tera_scan_descriptor_set_pack_interval(self.desc, interval)
def AddColumn(self, cf, qu):
"""
scan时选择某个Column(ColumnFamily + Qualifier),其它Column过滤掉不返回给客户端
Args:
cf(string): 需要的ColumnFamily名
qu(string): 需要的Qualifier名
"""
lib.tera_scan_descriptor_add_column(self.desc, cf,
qu, c_uint64(len(qu)))
def AddColumnFamily(self, cf):
"""
类同 AddColumn, 这里选择整个 ColumnFamily
Args:
cf(string): 需要的ColumnFamily名
"""
lib.tera_scan_descriptor_add_column_family(self.desc, cf)
def SetTimeRange(self, start, end):
"""
设置返回版本的时间范围
C++接口用户注意:C++的这个接口里start和end参数的顺序和这里相反!
Args:
start(long): 开始时间戳(结果包含该值),
Epoch (00:00:00 UTC, January 1, 1970), measured in us
end(long): 截止时间戳(结果包含该值),
Epoch (00:00:00 UTC, January 1, 1970), measured in us
"""
lib.tera_scan_descriptor_set_time_range(self.desc, start, end)
class ResultStream(object):
""" scan操作返回的输出流
"""
def __init__(self, stream):
""" init """
self.stream = stream
def Destroy(self):
"""
销毁这个result_stream,释放底层资源,以后不得再使用这个对象
"""
lib.tera_result_stream_destroy(self.stream)
def Done(self):
""" 此stream是否已经读完
Returns:
(bool) 如果已经读完,则返回 true, 否则返回 false.
"""
err = c_char_p()
return lib.tera_result_stream_done(self.stream, byref(err))
def Next(self):
""" 迭代到下一个cell
"""
lib.tera_result_stream_next(self.stream)
def RowName(self):
"""
Returns:
(string) 当前cell对应的Rowkey
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_result_stream_row_name(self.stream,
byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def Family(self):
"""
Returns:
(string) 当前cell对应的ColumnFamily
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_result_stream_family(self.stream, byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def Qualifier(self):
"""
Returns:
(string) 当前cell对应的Qulifier
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_result_stream_qualifier(self.stream,
byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def ColumnName(self):
"""
Returns:
(string) 当前cell对应的 ColumnName(即 ColumnFamily:Qulifier)
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_result_stream_column_name(self.stream,
byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def Value(self):
"""
Returns:
(string) 当前cell对应的value
"""
value = POINTER(c_ubyte)()
vallen = c_uint64()
lib.tera_result_stream_value(self.stream, byref(value), byref(vallen))
return copy_string_to_user(value, long(vallen.value))
def ValueInt64(self):
"""
Returns:
(long) 当前cell为一个int64计数器,取出该计数器的数值
对一个非int64计数器调用该方法,属未定义行为
"""
return lib.tera_result_stream_value_int64(self.stream)
def Timestamp(self):
"""
Returns:
(long) 当前cell对应的时间戳,
Epoch (00:00:00 UTC, January 1, 1970), measured in us
"""
return lib.tera_result_stream_timestamp(self.stream)
class Client(object):
""" 通过Client对象访问一个tera集群
使用建议:一个集群对应一个Client即可,如需访问多个Client,需要创建多个
"""
def __init__(self, conf_path, log_prefix):
"""
Raises:
TeraSdkException: 创建一个Client对象失败
"""
err = c_char_p()
self.client = lib.tera_client_open(conf_path, log_prefix, byref(err))
if self.client is None:
raise TeraSdkException("open client failed:" + str(err.value))
def Close(self):
"""
销毁这个client,释放底层资源,以后不得再使用这个对象
"""
lib.tera_client_close(self.client)
def OpenTable(self, name):
""" 打开名为<name>的表
Args:
name(string): 表名
Returns:
(Table) 打开的Table指针
Raises:
TeraSdkException: 打开table时出错
"""
err = c_char_p()
table_ptr = lib.tera_table_open(self.client, name, byref(err))
if table_ptr is None:
raise TeraSdkException("open table failed:" + err.value)
return Table(table_ptr)
MUTATION_CALLBACK = CFUNCTYPE(None, c_void_p)
class RowMutation(object):
""" 对某一行的变更
在Table.ApplyMutation()调用之前,
RowMutation的所有操作(如Put/DeleteColumn)都不会立即生效
"""
def __init__(self, mutation):
""" init """
self.mutation = mutation
def PutKV(self, value, ttl):
""" 写入(修改)值为<value>
Args:
value(string): cell
|
codemasteroy/py-viitenumero
|
setup.py
|
Python
|
gpl-3.0
| 547
| 0.014625
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='py-viitenumero',
version='1.0',
description='Python module for generating Finnish national payment reference number',
author='Mohanjith Sudirikku Hannadige',
author_email='moha@codemaster.fi',
url='http://www.codemaster.fi/python/maksu/',
download_url = 'https://github.com/codemasteroy/py-viitenumero/tarball/1.0'
packages=[ 'maksu'
|
],
|
keywords=[ 'payments', 'creditor reference', 'finland', 'suomi' ]
)
|
balanced/status.balancedpayments.com
|
situation/settings.py
|
Python
|
mit
| 2,611
| 0
|
# Notice:
# If you are running this in production environment, generate
# these for your app at https://dev.twitter.com/apps/new
TWITTER = {
'AUTH': {
'consumer_key': 'XXXX',
'consumer_secret': 'XXXX',
'token': 'XXXX',
'token_secret': 'XXXX',
}
}
# We're pulling data from graphite to calculate the uptime. Each service has a
# list of counters that it uses to help calculate the % of successful / failed
# requests.
UPTIME = {
'root_uri': 'http://graphite.balancedpayments.com/render/?',
'username': 'username',
'password': 'password',
'services': {
'DASH': {
'OK_TARGETS': [
'stats_counts.status.dashboard.2xx',
'stats_counts.status.dashboard.3xx',
'stats_counts.status.dashboard.4xx',
],
'ERROR_TARGETS': [
'stats_counts.status.dashboard.5xx',
'stats_counts.status.dashboard.timeout',
]
},
'JS': {
'OK_TARGETS': [
'stats_counts.status.balanced-js.2xx',
'stats_counts.status.balanced-js.3xx',
'stats_counts.status.balanced-js.4xx',
],
'ERROR_TARGETS': [
'stats_counts.status.balanced-js.5xx',
'stats_counts.status.balanced-js.timeout',
]
},
'API': {
'OK_TARGETS': [
'stats_counts.status.balanced-api.2xx',
'stats_counts.status.balanced-api.3xx',
'stats_counts.status.balanced-api.4xx',
],
'ERROR_TARGETS': [
'stats_counts.status.balanced-api.5xx',
'stats_counts.status.balanced-api.timeout',
]
}
}
}
# The e-mail address to send notifications from
EMAIL = {
'sender': 'Balanced Status <noreply@balancedpayments.com>'
}
LIBRATO_UPTIME = {
'root_uri': 'https://metrics-api.librato.com/v1/metrics/',
'username': 'FIXME',
'password': 'FIXME',
'services': {
'API': {
'SOURCE': '*bapi-live*',
'TOTAL_TARGETS': [
'AWS.ELB.RequestCount',
],
'ERROR_TARGETS': [
'AWS.ELB.HTTPCode_Backend_5XX',
'AWS.ELB.HTTPCode_ELB_5XX',
]
},
}
}
# TWILIO API credentials
TWILIO = {
'account_sid': 'XXXX',
'auth_token':
|
'XXXX',
'from_number': 'XXXX'
}
DEBUG = True
# Currently DASHBOARD do
|
es not send out notifications
NOTIFY_SERVICES = ['API', 'JS']
|
square/pants
|
tests/python/pants_test/tasks/test_ensime_integration.py
|
Python
|
apache-2.0
| 2,018
| 0.011893
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (nested_scopes, generators, division, absolute_import, with_statement,
print_function, unicode_literals)
import os
from pants.util.contextutil import temporary_dir
from pants_test.pants_run_integration_test import PantsRunIntegrationTest
class EnsimeIntegrationTest(PantsRunIntegrationTest):
def _ensime_test(self, specs, project_dir = os.path.join('.pants.d', 'tmp-ensime', 'project'),
project_name='project'):
"""Helper method that tests ensime generation on the input spec list."""
if not os.path.exists(project_dir):
os.makedirs(project_dir)
with temporary_dir(root_dir=project_dir) as path:
pants_run = self.run_pants(['goal', 'ensime',] + specs
|
+ ['--ensime-project-dir={dir}'.format(dir=path), ])
self.assertEquals(pants_run.returncode, self.PANTS_SUCCESS_CODE,
"goal ensime expected success, got {0}\n"
"got stderr:\n{1}\n"
"got stdout:\n{2
|
}\n".format(pants_run.returncode,
pants_run.stderr_data,
pants_run.stdout_data))
# TODO: Actually validate the contents of the project files, rather than just
# checking if they exist.
expected_files = ('.ensime',)
workdir = os.path.join(path, project_name)
self.assertTrue(os.path.exists(workdir),
'Failed to find project_dir at {dir}.'.format(dir=workdir))
self.assertTrue(all(os.path.exists(os.path.join(workdir, name))
for name in expected_files), 'Failed to find one of the ensime project files at {dir}'.format(dir=path))
# Testing Ensime integration on a sample project
def test_ensime_on_all_examples(self):
self._ensime_test(['examples/src/scala/com/pants/example::'])
|
marlengit/electrum198
|
lib/__init__.py
|
Python
|
gpl-3.0
| 692
| 0.001445
|
from version import ELECTRUM_VERSION
from util import format_satoshis, print_msg, print_json, print_error, set_verbosity
from wallet import WalletSynchronizer, WalletStorage
from wallet import Wallet
from verifier import TxVerifier
from network import Network, DEFAULT_SERVERS, DEFAULT_PORTS, pick_random_server
from interface import Interface
from simple_config import SimpleConfig
import bitcoin
import account
import transaction
from transaction import Transaction
from plugins import BasePlugin
from mnemonic import mn_encode as mnemonic_encode
from mnemon
|
ic import mn_decode as mnemo
|
nic_decode
from commands import Commands, known_commands
from daemon import NetworkProxy, NetworkServer
|
danche354/Sequence-Labeling
|
ner_BIOES/evaluate-senna-hash-2-pos-chunk-128-64-rmsprop5.py
|
Python
|
mit
| 3,163
| 0.008536
|
'''
evaluate result
'''
from keras.models import load_model
from keras.utils import np_utils
import numpy as np
import os
import sys
# add path
sys.path.append('../')
sys.path.append('../tools')
from tools import conf
from tools import load_data
from tools import prepare
# input sentence dimensions
step_leng
|
th = conf.ner_step_length
pos_length = conf.ner_pos_length
chunk_length = conf.ner_chunk_length
# gazetteer_length = conf.gazetteer_length
IOB = conf.ner_BIOES_decode
data = sys.argv[1]
best_epoch = sys.argv[2]
if data=="dev":
|
test_data = load_data.load_ner(dataset='eng.testa', form='BIOES')
elif data == "test":
test_data = load_data.load_ner(dataset='eng.testb', form='BIOES')
tokens = [len(x[0]) for x in test_data]
print(sum(tokens))
print('%s shape:'%data, len(test_data))
model_name = os.path.basename(__file__)[9:-3]
folder_path = './model/%s'%model_name
model_path = '%s/model_epoch_%s.h5'%(folder_path, best_epoch)
result = open('%s/predict.txt'%folder_path, 'w')
def convert(chunktags):
# convert BIOES to BIO
for p, q in enumerate(chunktags):
if q.startswith("E-"):
chunktags[p] = "I-" + q[2:]
elif q.startswith("S-"):
if p==0:
chunktags[p] = "I-" + q[2:]
elif q[2:]==chunktags[p-1][2:]:
chunktags[p] = "B-" + q[2:]
elif q[2:]!=chunktags[p-1][2:]:
chunktags[p] = "I-" + q[2:]
elif q.startswith("B-"):
if p==0:
chunktags[p] = "I-" + q[2:]
else:
if q[2:]!=chunktags[p-1][2:]:
chunktags[p] = "I-" + q[2:]
return chunktags
print('loading model...')
model = load_model(model_path)
print('loading model finished.')
for each in test_data:
embed_index, hash_index, pos, chunk, label, length, sentence = prepare.prepare_ner(batch=[each], gram='bi', form='BIOES')
pos = np.array([(np.concatenate([np_utils.to_categorical(p, pos_length), np.zeros((step_length-length[l], pos_length))])) for l,p in enumerate(pos)])
chunk = np.array([(np.concatenate([np_utils.to_categorical(c, chunk_length), np.zeros((step_length-length[l], chunk_length))])) for l,c in enumerate(chunk)])
prob = model.predict_on_batch([embed_index, hash_index, pos, chunk])
for i, l in enumerate(length):
predict_label = np_utils.categorical_probas_to_classes(prob[i])
chunktags = [IOB[j] for j in predict_label][:l]
word_pos_chunk = list(zip(*each))
# convert
word_pos_chunk = list(zip(*word_pos_chunk))
word_pos_chunk = [list(x) for x in word_pos_chunk]
# if data == "test":
# word_pos_chunk[3] = convert(word_pos_chunk[3])
word_pos_chunk = list(zip(*word_pos_chunk))
#convert
# if data == "test":
# chunktags = convert(chunktags)
# chunktags = prepare.gazetteer_lookup(each[0], chunktags, data)
for ind, chunktag in enumerate(chunktags):
result.write(' '.join(word_pos_chunk[ind])+' '+chunktag+'\n')
result.write('\n')
result.close()
print('epoch %s predict over !'%best_epoch)
os.system('../tools/conlleval < %s/predict.txt'%folder_path)
|
projectcalico/calico-neutron
|
neutron/db/l3_agentschedulers_db.py
|
Python
|
apache-2.0
| 22,584
| 0.000221
|
# Copyright (c) 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import random
import time
from oslo.config import cfg
from oslo.db import exception as db_exc
from oslo import messaging
from oslo.utils import timeutils
import sqlalchemy as sa
from sqlalchemy import func
from sqlalchemy import or_
from sqlalchemy import orm
from sqlalch
|
emy.orm import exc
from sqlalchemy.orm import joinedload
from sqlalchemy import sql
from neutron.common import constants
from neutron.common import utils as n_utils
from neutron import context as n_ctx
from neutron.db import agents_db
from neutron.db import agentschedulers_db
from neutron.db import l3_attrs_db
from neutron.db import model_base
from neutron.extensions import l3agentscheduler
from neutron.i18n import _LE, _LI, _LW
|
from neutron import manager
from neutron.openstack.common import log as logging
from neutron.openstack.common import loopingcall
LOG = logging.getLogger(__name__)
L3_AGENTS_SCHEDULER_OPTS = [
cfg.StrOpt('router_scheduler_driver',
default='neutron.scheduler.l3_agent_scheduler.ChanceScheduler',
help=_('Driver to use for scheduling '
'router to a default L3 agent')),
cfg.BoolOpt('router_auto_schedule', default=True,
help=_('Allow auto scheduling of routers to L3 agent.')),
cfg.BoolOpt('allow_automatic_l3agent_failover', default=False,
help=_('Automatically reschedule routers from offline L3 '
'agents to online L3 agents.')),
]
cfg.CONF.register_opts(L3_AGENTS_SCHEDULER_OPTS)
class RouterL3AgentBinding(model_base.BASEV2):
"""Represents binding between neutron routers and L3 agents."""
router_id = sa.Column(sa.String(36),
sa.ForeignKey("routers.id", ondelete='CASCADE'),
primary_key=True)
l3_agent = orm.relation(agents_db.Agent)
l3_agent_id = sa.Column(sa.String(36),
sa.ForeignKey("agents.id", ondelete='CASCADE'),
primary_key=True)
class L3AgentSchedulerDbMixin(l3agentscheduler.L3AgentSchedulerPluginBase,
agentschedulers_db.AgentSchedulerDbMixin):
"""Mixin class to add l3 agent scheduler extension to plugins
using the l3 agent for routing.
"""
router_scheduler = None
def start_periodic_agent_status_check(self):
if not cfg.CONF.allow_automatic_l3agent_failover:
LOG.info(_LI("Skipping period L3 agent status check because "
"automatic router rescheduling is disabled."))
return
self.periodic_agent_loop = loopingcall.FixedIntervalLoopingCall(
self.reschedule_routers_from_down_agents)
interval = max(cfg.CONF.agent_down_time / 2, 1)
# add random initial delay to allow agents to check in after the
# neutron server first starts. random to offset multiple servers
self.periodic_agent_loop.start(interval=interval,
initial_delay=random.randint(interval, interval * 2))
def reschedule_routers_from_down_agents(self):
"""Reschedule routers from down l3 agents if admin state is up."""
# give agents extra time to handle transient failures
agent_dead_limit = cfg.CONF.agent_down_time * 2
# check for an abrupt clock change since last check. if a change is
# detected, sleep for a while to let the agents check in.
tdelta = timeutils.utcnow() - getattr(self, '_clock_jump_canary',
timeutils.utcnow())
if timeutils.total_seconds(tdelta) > cfg.CONF.agent_down_time:
LOG.warn(_LW("Time since last L3 agent reschedule check has "
"exceeded the interval between checks. Waiting "
"before check to allow agents to send a heartbeat "
"in case there was a clock adjustment."))
time.sleep(agent_dead_limit)
self._clock_jump_canary = timeutils.utcnow()
context = n_ctx.get_admin_context()
cutoff = timeutils.utcnow() - datetime.timedelta(
seconds=agent_dead_limit)
down_bindings = (
context.session.query(RouterL3AgentBinding).
join(agents_db.Agent).
filter(agents_db.Agent.heartbeat_timestamp < cutoff,
agents_db.Agent.admin_state_up).
outerjoin(l3_attrs_db.RouterExtraAttributes,
l3_attrs_db.RouterExtraAttributes.router_id ==
RouterL3AgentBinding.router_id).
filter(sa.or_(l3_attrs_db.RouterExtraAttributes.ha == sql.false(),
l3_attrs_db.RouterExtraAttributes.ha == sql.null())))
try:
for binding in down_bindings:
LOG.warn(_LW(
"Rescheduling router %(router)s from agent %(agent)s "
"because the agent did not report to the server in "
"the last %(dead_time)s seconds."),
{'router': binding.router_id,
'agent': binding.l3_agent_id,
'dead_time': agent_dead_limit})
try:
self.reschedule_router(context, binding.router_id)
except (l3agentscheduler.RouterReschedulingFailed,
messaging.RemoteError):
# Catch individual router rescheduling errors here
# so one broken one doesn't stop the iteration.
LOG.exception(_LE("Failed to reschedule router %s"),
binding.router_id)
except db_exc.DBError:
# Catch DB errors here so a transient DB connectivity issue
# doesn't stop the loopingcall.
LOG.exception(_LE("Exception encountered during router "
"rescheduling."))
def validate_agent_router_combination(self, context, agent, router):
"""Validate if the router can be correctly assigned to the agent.
:raises: RouterL3AgentMismatch if attempting to assign DVR router
to legacy agent, or centralized router to compute's L3 agents.
:raises: InvalidL3Agent if attempting to assign router to an
unsuitable agent (disabled, type != L3, incompatible configuration)
:raises: DVRL3CannotAssignToDvrAgent if attempting to assign DVR
router from one DVR Agent to another.
"""
is_distributed = router.get('distributed')
agent_conf = self.get_configuration_dict(agent)
agent_mode = agent_conf.get('agent_mode', 'legacy')
router_type = ('distributed' if is_distributed else 'centralized')
is_agent_router_types_incompatible = (
agent_mode == 'dvr' and not is_distributed
or agent_mode == 'legacy' and is_distributed
)
if is_agent_router_types_incompatible:
raise l3agentscheduler.RouterL3AgentMismatch(
router_type=router_type, router_id=router['id'],
agent_mode=agent_mode, agent_id=agent['id'])
if agent_mode == 'dvr' and is_distributed:
raise l3agentscheduler.DVRL3CannotAssignToDvrAgent(
router_type=router_type, router_id=router['id'],
agent_id=agent['id'])
is_wrong_type_or_unsuitable_agent = (
agent['agent_type'] != constants.AGENT_TYPE_L3 or
not agent['admin_state_up'] or
not self.get_l3_agent_candid
|
happz/ducky
|
ducky/cpu/instructions.py
|
Python
|
mit
| 78,868
| 0.019247
|
import ctypes
import enum
import logging
import sys
from six import add_metaclass, iteritems, string_types
from six.moves import range
from functools import partial
from collections import OrderedDict
from .registers import Registers, REGISTER_NAMES
from ..mm import u32_t, i32_t, UINT16_FMT, UINT32_FMT
from ..util import LoggingCapable
from ..errors import EncodingLargeValueError, UnalignedJumpTargetError, InvalidOpcodeError, DivideByZeroError, InvalidInstructionSetError, OperandMismatchError, PrivilegedInstructionError
def UINT20_FMT(i):
return '0x%05X' % (i & 0xFFFFF)
def encoding_to_u32(inst):
return ctypes.cast(ctypes.byref(inst), ctypes.POINTER(u32_t)).contents.value
def IE_OPCODE():
return ('opcode', u32_t, 6)
def IE_FLAG(n):
return (n, u32_t, 1)
def IE_REG(n):
return (n, u32_t, 5)
def IE_IMM(n, l):
return (n, u32_t, l)
class Encoding(ctypes.LittleEndianStructure):
@staticmethod
def sign_extend_immediate(logger, inst, sign_mask, ext_mask):
logger.debug('sign_extend_immediate: inst=%s, sign_mask=%s, ext_mask=%s', inst, UINT32_FMT(sign_mask), UINT32_FMT(ext_mask))
if __debug__:
u = u32_t(ext_mask | inst.immediate) if inst.immediate & sign_mask else u32_t(inst.immediate)
logger.debug(' result=%s', UINT32_FMT(u))
return u.value
else:
i = inst.immediate
return ((ext_mask | i) & 0xFFFFFFFF) if i & sign_mask else i
@staticmethod
def repr(inst, fields):
d = OrderedDict()
fields.insert(0, ('opcode', '%02d'))
for field, fmt in fields:
d[field] = fmt % getattr(inst, field)
if hasattr(inst, 'refers_to'):
d['refers_to'] = str(getattr(inst, 'refers_to'))
return '<%s: %s>' % (inst.__class__.__name__, ', '.join(['%s=%s' % (k, v) for k, v in iteritems(d)]))
class EncodingR(ctypes.LittleEndianStructure):
_pack_ = 0
_fields_ = [
IE_OPCODE(), # 0
IE_REG('reg1'), # 6
IE_REG('reg2'), # 11
IE_FLAG('immediate_flag'), # 16
IE_IMM('immediate', 15), # 17
]
@staticmethod
def fill_reloc_slot(inst, slot):
logging.getLogger().debug('fill_reloc_slot: inst=%s, slot=%s', inst, slot)
slot.patch_offset = 17
slot.patch_size = 15
@staticmethod
def sign_extend_immediate(logger, inst):
return Encoding.sign_extend_immediate(logger, inst, 0x4000, 0xFFFF8000)
def __repr__(self):
return Encoding.repr(self, [('reg1', '%02d'), ('reg2', '%02d'), ('immediate_flag', '%d'), ('immediate', '0x%04X')])
class EncodingC(ctypes.LittleEndianStructure):
_pack_ = 0
_fields_ = [
IE_OPCODE(), # 0
IE_REG('reg'), # 6
IE_IMM('flag', 3), # 11
IE_FLAG('value'), # 14
IE_FLAG('immediate_flag'), # 25
IE_IMM('immediate', 16) # 16
]
@staticmethod
def fill_reloc_slot(inst, slot):
logging.getLogger().debug('fill_reloc_slot: inst=%s, slot=%s', inst, slot)
slot.patch_offset = 16
slot.patch_size = 16
@staticmethod
def sign_extend_immediate(logger, inst):
return Encoding.sign_extend_immediate(logger, inst, 0x8000, 0xFFFF0000)
def __repr__(self):
return Encoding.repr(self, [('reg', '%02d'), ('flag', '%02d'), ('value', '%d'), ('immediate_flag', '%d'), ('immediate', '0x%04X')])
class EncodingS(ctypes.LittleEndianStructure):
_pack_ = 0
_fields_ = [
IE_OPCODE(), # 0
IE_REG('reg1'), # 6
IE_REG('reg2'), # 11
IE_IMM('flag', 3), # 16
IE_FLAG('value'), # 19
IE_FLAG('immediate_flag'), # 20
IE_IMM('immediate', 11) # 21
]
@staticmethod
def fill_reloc_slot(inst, slot):
logging.getLogger().debug('fill_reloc_slot: inst=%s, slot=%s', inst, slot)
slot.patch_offset = 21
slot.patch_size = 11
@staticmethod
def sign_extend_immediate(logger, inst):
return Encoding.sign_extend_immediate(logger, inst, 0x400, 0xFFFFF800)
def __repr__(self):
return Encoding.repr(self, [('reg1', '%02d'), ('reg2', '%02d'), ('flag', '%02d'), ('value', '%d'), ('immediate_flag', '%d'), ('immediate', '0x%04X')])
class EncodingI(ctypes.LittleEndianStructure):
_pack_ = 0
_fields_ = [
IE_OPCODE(), # 0
IE_REG('reg'), # 6
IE_FLAG('immediate_flag'), # 11
IE_IMM('immediate', 20), # 12
]
@staticmethod
def fill_reloc_slot(inst, slot):
logging.getLogger().debug('fill_reloc_slot: inst=%s, slot=%s', inst, slot)
slot.patch_offset = 12
slot.patch_size = 20
@staticmethod
def sign_extend_immediate(logger, inst):
return Encoding.sign_extend_immediate(logger, inst, 0x80000, 0xFFF00000)
def __repr__(self):
return Encoding.repr(self, [('reg', '%02d'), ('immediate_flag', '%d'), ('immediate', '0x%04X')])
class EncodingA(ctypes.LittleEndianStructure):
_pack_ = 0
_fields_ = [
IE_OPCODE(), # 0
IE_REG('reg1'), # 6
IE_REG('reg2'), # 11
IE_REG('reg3') # 16
]
def __repr__(self):
return En
|
coding.repr(sel
|
f, [('reg1', '%02d'), ('reg2', '%02d'), ('reg3', '%02d')])
class EncodingContext(LoggingCapable, object):
def __init__(self, logger):
super(EncodingContext, self).__init__(logger)
if hasattr(sys, 'pypy_version_info'):
self.u32_to_encoding = self._u32_to_encoding_pypy
else:
self.u32_to_encoding = self._u32_to_encoding_python
def _u32_to_encoding_pypy(self, u, encoding):
class _Cast(ctypes.Union):
_pack_ = 0
_fields_ = [
('overall', u32_t),
('encoding', encoding)
]
caster = _Cast()
caster.overall = u32_t(u).value
return caster.encoding
def _u32_to_encoding_python(self, u, encoding):
u = u32_t(u)
e = encoding()
ctypes.cast(ctypes.byref(e), ctypes.POINTER(encoding))[0] = ctypes.cast(ctypes.byref(u), ctypes.POINTER(encoding)).contents
return e
def encode(self, inst, field, size, value, raise_on_large_value = False):
self.DEBUG('encode: inst=%s, field=%s, size=%s, value=%s, raise_on_large_value=%s', inst, field, size, value, raise_on_large_value)
setattr(inst, field, value)
self.DEBUG('encode: inst=%s', inst)
if value >= 2 ** size:
e = buffer.get_error(EncodingLargeValueError, 'inst=%s, field=%s, size=%s, value=%s' % (inst, field, size, UINT32_FMT(value)))
if raise_on_large_value is True:
raise e
e.log(self.WARN)
def decode(self, instr_set, inst, core = None):
self.DEBUG('%s.decode: inst=%s, core=%s', self.__class__.__name__, inst, core)
opcode = inst & 0x3F
if opcode not in instr_set.opcode_desc_map:
raise InvalidOpcodeError(opcode, core = core)
return self.u32_to_encoding(inst, instr_set.opcode_encoding_map[opcode]), instr_set.opcode_desc_map[opcode], opcode
class Descriptor(object):
mnemonic = None
opcode = None
operands = None
# this is a default encoding, and by the way it silents Codacy's warning
encoding = EncodingR
relative_address = False
inst_aligned = False
def __init__(self, instruction_set):
super(Descriptor, self).__init__()
self.instruction_set = instruction_set
self.instruction_set.instructions.append(self)
self._expand_operands()
def _expand_operands(self):
if isinstance(self.__class__.operands, list):
return
self.__class__.operands = [ot.strip() for ot in self.operands.split(',')] if self.operands is not None else []
self.operands = self.__class__.operands
#
# Execution
#
@staticmethod
def jit(core, inst):
return None
@staticmethod
def execute(core, inst):
raise NotImplementedError('%s does not implement execute method' % inst.opcode)
#
# Encoding
#
@staticmethod
def assemble_operands(ctx, inst, operands):
pass
@staticmethod
def fill_reloc_slot(inst, slot):
inst.fill_reloc_slot(inst, slot)
@staticmethod
def disassemble_operands(logger, inst):
return []
@classmethod
def disassemble_mnemonic(cls, inst):
return cls.mnemonic
@staticmethod
def _match_operand_type(allowed, operand):
from ..asm.ast import Regist
|
quanta413/Population-Evolution-Project-Source-Code
|
populationevolution/stenciledsum.py
|
Python
|
bsd-2-clause
| 6,254
| 0.00048
|
# -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""
import numpy as np
def subarray_multislice(array_ndim, fixed_axes, indices):
'''
Return tuple of slices that if indexed into an array with given dimensions
will return subarray with the axes in axes fixed at given indices
'''
indices = np.array(indices)
colon = slice(None, None, None)
multislice = ()
for i in range(array_ndim):
if i in fixed_axes:
multislice = multislice + \
(indices[np.where(fixed_axes == i)[0][0]],)
else:
multislice = multislice + (colon,)
return multislice
def subarray_view(array, fixed_axes, indices, checks=True):
'''
Return view of subarray of input array with fixed_axes at
corresponding indices.'''
if checks:
# Coerce the inputs into flat numpy arrays to allow for easy handling
# of a variety of input types
fixed_axes = np.atleast_1d(np.array(fixed_axes)).flatten()
indices = np.atleast_1d(np.array(indices)).flatten()
check_axes_access(fixed_axes, array.ndim)
convert_axes_to_positive(fixed_axes, array.ndim)
if fixed_axes.shape != indices.shape:
raise ValueError('axes and indices must have matching shapes or'
' both be integers')
return array[subarray_multislice(array.ndim, fixed_axes, indices)]
def subrange_view(array, starts, ends, steps=None, checks=True):
'''
Return view of array with each axes indexed between starts and ends.
'''
if checks:
# Coerce the inputs into flat numpy arrays to allow for easy handling
# of a variety of input types
starts = np.atleast_1d(np.array(starts)).flatten()
ends = np.atleast_1d(np.array(ends)).flatten()
if steps is not None:
steps = np.atleast_1d(np.array(steps)).flatten()
# Check number of array axes matches up with starts and ends
if (array.ndim != starts.size) or (array.ndim != ends.size):
raise ValueError('the size of starts and ends must equal the '
'number of array dimensions')
multislice = ()
|
# If steps is None, default to step size of 1
if steps is None:
for i in range(array.ndim):
multislice = multislice + (slice(starts[i], ends[i], 1),)
else:
for i in range(array.ndim):
multislice = multislice + (slice(starts[i], ends[i], steps[i]),)
return
|
array[multislice]
def check_axes_access(axes, array_ndim):
if np.max(axes) >= array_ndim or np.min(axes) < -array_ndim:
raise IndexError('too many indices for array')
# regular numpy scheme for which positive index a negative index corresponds to
def convert_axes_to_positive(axes, array_ndim):
for index, element in enumerate(axes):
if element < 0:
axes[index] = element + array_ndim
def correct_stencil_shape(array_ndim, axes, summed_axes_shape):
return np.hstack([np.array(summed_axes_shape),
np.array(array_ndim - len(axes))])
def check_stencil_shape(array_ndim, axes, summed_axes_shape, stencil):
if not np.all(np.array(stencil.shape) ==
correct_stencil_shape(array_ndim, axes, summed_axes_shape)):
raise ValueError('The shape of the stencil must match the big'
' array and axes appropriately')
def stenciled_sum(array, summed_axes, stencil):
summed_axes = np.atleast_1d(np.array(summed_axes))
summed_axes_shape = np.array(array.shape)[summed_axes]
fixed_stencil_summer = fixedStencilSum(array.ndim, summed_axes,
summed_axes_shape, stencil)
return fixed_stencil_summer.stenciled_sum(array)
class fixedStencilSum(object):
def __init__(self, array_ndim, axes_summed_over, summed_axes_shape,
stencil):
axes = np.atleast_1d(np.array(axes_summed_over)).flatten()
# check that inputs are compatible
check_axes_access(axes, array_ndim)
convert_axes_to_positive(axes, array_ndim)
check_stencil_shape(array_ndim, axes, summed_axes_shape, stencil)
# handle a trivial case where we sum the entire array into one number
if array_ndim == len(axes):
self.stenciled_sum = np.sum
self.array_ndim = array_ndim
self.axes = axes
self.not_axes = [i for i in range(array_ndim) if i not in axes]
self.summed_axes_shape = summed_axes_shape
# left zero the stencil, ablsa is a tuple of indices into
# "all but last stencil axis"
ablsa = tuple(range(stencil.ndim-1))
stencil = stencil - np.amin(stencil, axis=ablsa)
self.stencil = stencil
self.input_expand = np.amax(stencil, axis=ablsa) - \
np.amin(stencil, axis=ablsa)
self.stencil_loop_indices = [i for i in np.ndindex(stencil.shape[:-1])]
self.multislices = [subarray_multislice(self.array_ndim, self.axes,
indices) for indices in
self.stencil_loop_indices]
def stenciled_sum(self, big_array):
subarray_shape = np.array(big_array.shape)[self.not_axes]
return_array_shape = subarray_shape + self.input_expand
return_array = np.zeros(return_array_shape, dtype=big_array.dtype)
for indices, multislice in zip(self.stencil_loop_indices,
self.multislices):
starts = self.stencil[indices]
ends = self.stencil[indices] + subarray_shape
chunk_to_increase = subrange_view(return_array, starts, ends,
checks=False)
chunk_to_increase[:] += big_array[multislice]
return return_array
def __eq__(self, other):
if isinstance(other, fixedStencilSum):
is_equal = True
for attribute in self.__dict__:
is_equal = (np.all(np.equal(getattr(self, attribute),
getattr(other, attribute)))) and is_equal
return is_equal
else:
return NotImplemented
|
xbmcmegapack/plugin.video.megapack.dev
|
resources/lib/favourites_manager.py
|
Python
|
gpl-3.0
| 2,043
| 0.000979
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This file is part of XBMC Mega Pack Addon.
Copyright (C) 2014 Wolverine (xbmcmegapack@gmail.com)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version
|
3 of the Lic
|
ense, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see http://www.gnu.org/licenses/gpl-3.0.html
"""
import os
import stat
import sys
import copy
import xbmc
import config
class Favourites_manager():
""" Class that manages this specific menu context."""
def __init__(self):
"""Private Properties."""
self.f = xbmc.translatePath(config.ADDON_SPECIAL_PATH) + \
config.PLUGIN_NAME + config.PLUGIN_MY_FAVOURITES_FILE
self.dictionary = {}
if not os.stat(self.f)[stat.ST_SIZE] == 0: # if not no favourites
self.dictionary = copy.deepcopy(eval(open(self.f, 'r').read()))
def add(self, favourite):
"""Add favourite to dictionary."""
self.dictionary.update(favourite)
f = open(self.f, 'w')
f.write(str(self.dictionary))
f.close()
def delete(self, key):
"""Delete favourite from dictionary."""
del self.dictionary[key]
f = open(self.f, 'w')
f.write(str(self.dictionary))
f.close()
def delete_all(self):
"""Delete all favourites from dictionary."""
f = open(self.f, 'w')
f.flush()
f.close()
if __name__ == "__main__":
context_menu_command = str(sys.argv[1])
exec context_menu_command # See config.CONTEXT_MENU_FAVOURITES_COMMANDS
|
optiflows/nyuki
|
nyuki/workflow/db/storage.py
|
Python
|
apache-2.0
| 8,557
| 0
|
import logging
import os
from copy import deepcopy
from motor.motor_asyncio import AsyncIOMotorClient
from pymongo.errors import ServerSelectionTimeoutError
from .triggers import TriggerCollection
from .data_processing import DataProcessingCollection
from .metadata import MetadataCollection
from .workflow_templates import WorkflowTemplatesCollection, TemplateState
from .task_templates import TaskTemplatesCollection
from .workflow_instances import WorkflowInstancesCollection
from .task_instances import TaskInstancesCollection
log = logging.getLogger(__name__)
class MongoStorage:
def __init__(self):
self._client = None
self._db = None
self._validate_on_start = False
# Collections
self._workflow_templates = None
self._task_templates = None
self._workflow_metadata = None
self._workflow_instances = None
self._task_instances = None
self.regexes = None
self.lookups = None
self.triggers = None
def configure(self, host, database, validate_on_start=True, **kwargs):
log.info("Setting up mongo storage with host '%s'", host)
self._client = AsyncIOMotorClient(host, **kwargs)
self._validate_on_start = validate_on_start
self._db_name = database
async def index(self):
"""
Try to connect to mongo and index all the collections.
"""
if self._db_name not in await self._client.list_database_names():
# If the old mongo DB does not exist, use the new tenant format
self._db_name = f"{os.environ['TENANT_ID']}#{self._db_name}"
log.info("Selected database '%s'", self._db_name)
self._db = self._client[self._db_name]
# Collections
self._workflow_templates = WorkflowTemplatesCollection(self._db)
self._task_templates = TaskTemplatesCollection(self._db)
self._workflow_metadata = MetadataCollection(self._db)
self._workflow_instances = WorkflowInstancesCollection(self._db)
self._task_instances = TaskInstancesCollection(self._db)
self.regexes = DataProcessingCollection(self._db, 'regexes')
self.lookups = DataProcessingCollection(self._db, 'lookups')
self.triggers = TriggerCollection(self._db)
log.info('Trying to connect to Mongo...')
while True:
try:
await self._workflow_templates.index()
await self._task_templates.index()
await self._workflow_metadata.index()
await self._workflow_instances.index()
await self._task_instances.index()
await self.regexes.index()
await self.lookups.index()
await self.triggers.index()
except ServerSelectionTimeoutError as exc:
log.error('Could not connect to Mongo - %s', exc)
else:
log.info('Successfully connected to Mongo')
break
if self._validate_on_start is True:
collections = await self._db.list_collection_names()
log.info('Validating %s collections', len(collections))
for collection in collections:
await self._db.validate_collection(collection)
log.info('Validated collection %s', collection)
# Templates
async def update_workflow_metadata(self, tid, metadata):
"""
Update and return
"""
return await self._workflow_metadata.update(tid, metadata)
async def upsert_draft(self, template):
"""
Update a template's draft and all its associated tasks.
"""
metadata = await self._workflow_metadata.get_one(template['id'])
if not metadata:
metadata = {
'workflow_template_id': template['id'],
'title': template.pop('title'),
'tags': template.pop('tags', []),
}
await self._workflow_metadata.insert(metadata)
# Force set of values 'version' and 'draft'.
template['version'] = await self._workflow_templates.get_last_version(
template['id']
) + 1
template['state'] = TemplateState.DRAFT.value
# Split and insert tasks.
tasks = template.pop('tasks')
await self._task_templates.insert_many(deepcopy(tasks), template)
# Insert template without tasks.
await self._workflow_templates.insert_draft(template)
template['tasks'] = tasks
template.update({'title': metadata['title'], 'tags': metadata['tags']})
return template
async def publish_draft(self, template_id):
"""
Publish a draft into an 'active' state, and archive the old active.
"""
await self._workflow_templates.publish_draft(template_id)
log.info('Draft for template %s published', template_id[:8])
async def get_for_topic(self, topic):
"""
Return all the templates listening on a particular topic.
This does not append the metadata.
"""
templates = await self._workflow_templates.get_for_topic(topic)
if templates:
log.info(
'Fetched %s templates for event from "%s"',
len(templates), topic,
)
for template in templates:
template['tasks'] = await self._task_templates.get(
template['id'], template['version']
)
return templates
async def get_templates(self, template_id=None, full=False):
"""
Return all active/draft templates
Limited to a small set of fields if 'full' is False.
TODO: Pagination.
"""
templates = await self._workflow_templates.get(template_id, full)
for template in templates:
metadata = await self._workflow_metadata.get_one(template['id'])
template.update(metadata)
if full is True:
template['tasks'] = await self._task_templates.get(
template['id'], template['version']
)
return templates
async def get_template(self, tid, draft=False, version=None):
"""
Return the active template.
"""
template = await self._workflow_templates.get_o
|
ne(
tid,
draft=draft,
version=int(version) if version else None,
)
if not template:
return
metadata = await self._workflow_metadata.get_one(tid)
template.update(metadata)
template['tasks'
|
] = await self._task_templates.get(
template['id'], template['version']
)
return template
async def delete_template(self, tid, draft=False):
"""
Delete a whole template or only its draft.
"""
await self._workflow_templates.delete(tid, draft)
if draft is False:
await self._task_templates.delete_many(tid)
await self._workflow_metadata.delete(tid)
await self.triggers.delete(tid)
# Instances
async def insert_instance(self, instance):
"""
Insert a static workflow instance and all its tasks.
"""
task_instances = []
for task in instance['template'].pop('tasks'):
task['workflow_instance_id'] = instance['id']
task_instances.append(task)
await self._task_instances.insert_many(task_instances)
await self._workflow_instances.insert(instance)
# History
async def get_history(self, **kwargs):
"""
Return paginated workflow history.
"""
count, workflows = await self._workflow_instances.get(**kwargs)
if kwargs.get('full') is True:
for workflow in workflows:
workflow['template']['tasks'] = await self._task_instances.get(
workflow['id'], True
)
return count, workflows
async def get_instance(self, instance_id, full=False):
workflow = await self._workflow_instances.get_one(instance_id, full)
if not workflow:
return
|
mikekestemont/PyStyl
|
pystyl/clustering/distance.py
|
Python
|
bsd-3-clause
| 1,971
| 0.006596
|
# Hierarchical Agglomerative Cluster Analysis
#
# Copyright (C) 2013 Folgert Karsdorp
# Author: Folgert Karsdorp <fbkarsdorp@gmail.com>
# URL: <https://github.com/fbkarsdorp/HAC-python>
# For licence information, see LICENCE.TXT
import numpy
from numpy import dot, sqrt
def binarize_vector(u):
return u > 0
def cosine_distance(u, v, binary=False):
"""Return the cosine distance between two vectors."""
if binary:
return cosine_distance_binary(u, v)
return 1.0 - dot(u, v) / (sqrt(dot(u, u)) * sqrt(dot(v, v)))
def cosine_distance_binary(u, v):
u = binarize_vector(u)
v = binarize_vector(v)
return (1.0 * (u * v).sum()) / numpy.sqrt((u.sum() * v.sum()))
def euclidean_distance(u, v):
"""Return the euclidean distance between two vectors."""
diff = u - v
return sqrt(dot(diff, diff))
def cityblock_distance(u, v):
"""Return the Manhattan/City Block distance between two vectors."""
return abs(u-v).sum()
def canberra_distance(u, v):
"""Return the canberra distance between two vectors."""
return numpy.s
|
um(abs(u-v) / abs(u+v))
def correlation(u, v):
"""Return the correlation distance between two vectors."""
u_var = u - u.mean()
v_var = v - v.mean()
return 1.0 - dot(u_var, v_var) / (sqrt(dot(u_var, u_var)) *
sqrt(dot(v_var, v_var)))
def dice(u, v):
"""Return the dice coefficient between two vectors."""
u = u > 0
v = v > 0
return (2.0 * (u * v).sum()) / (u.sum() + v.sum())
def jaccard_distance(u, v):
|
"""return jaccard distance"""
u = numpy.asarray(u)
v = numpy.asarray(v)
return (numpy.double(numpy.bitwise_and((u != v),
numpy.bitwise_or(u != 0, v != 0)).sum())
/ numpy.double(numpy.bitwise_or(u != 0, v != 0).sum()))
def jaccard(u, v):
"""Return the Jaccard coefficient between two vectors."""
u = u > 0
v = v > 0
return (1.0 * (u * v).sum()) / (u + v).sum()
|
dsqmoore/0install
|
zeroinstall/injector/trust.py
|
Python
|
lgpl-2.1
| 9,078
| 0.031725
|
"""
Records who we trust to sign feeds.
Trust is divided up into domains, so that it is possible to trust a key
in some cases and not others.
@var trust_db: Singleton trust database instance.
"""
# Copyright (C) 2009, Thomas Leonard
# See the README file for details, or visit http://0install.net.
from zeroinstall import _, SafeException, logger
import os
from zeroinstall import support
from zeroinstall.support import basedir, tasks
from .namespaces import config_site, config_prog, XMLNS_TRUST
KEY_INFO_TIMEOUT = 10 # Maximum time to wait for response from key-info-server
class TrustDB(object):
"""A database of trusted keys.
@ivar keys: maps trusted key fingerprints to a set of domains for which where it is trusted
@type keys: {str: set(str)}
@ivar watchers: callbacks invoked by L{notify}
@see: L{trust_db} - the singleton instance of this class"""
__slots__ = ['keys', 'watchers', '_dry_run']
def __init__(self):
self.keys = None
self.watchers = []
self._dry_run = False
def is_trusted(self, fingerprint, domain = None):
self.ensure_uptodate()
domains = self.keys.get(fingerprint, None)
if not domains: return False # Unknown key
if domain is None:
return True # Deprecated
return domain in domains or '*' in domains
def get_trust_domains(self, fingerprint):
"""Return the set of domains in which this key is trusted.
If the list includes '*' then the key is trusted everywhere.
@since: 0.27
"""
self.ensure_uptodate()
return self.keys.get(fingerprint, set())
def get_keys_for_domain(self, domain):
"""Return the set of keys trusted for this domain.
@since: 0.27"""
self.ensure_uptodate()
return set([fp for fp in self.keys
if domain in self.keys[fp]])
def trust_key(self, fingerprint, domain = '*'):
"""Add key to the list of trusted fingerprints.
@param fingerprint: base 16 fingerprint without any spaces
@type fingerprint: str
@param domain: domain in which key is to be trusted
@type domain: str
@note: call L{notify} after trusting one or more new keys"""
if self.is_trusted(fingerprint, domain): return
if self._dry_run:
print(_("[dry-run] would trust key {key} for {domain}").format(key = fingerprint, domain = domain))
int(fingerprint, 16) # Ensure fingerprint is valid
if fingerprint not in self.keys:
self.keys[fingerprint] = set()
#if domain == '*':
# warn("Calling trust_key() without a domain is deprecated")
self.keys[fingerprint].add(domain)
self.save()
def untrust_key(self, key, domain = '*'):
if self._dry_run:
print(_("[dry-run] would untrust key {key} for {domain}").format(key = key, domain = domain))
self.ensure_uptodate()
self.keys[key].remove(domain)
if not self.keys[key]:
# No more domains for this key
del self.keys[key]
self.save()
def save(self):
d = basedir.save_config_path(config_site, config_prog)
db_file = os.path.join(d, 'trustdb.xml')
if self._dry_run:
print(_("[dry-run] would update trust database {file}").format(file = db_file))
return
from xml.dom import minidom
import tempfile
doc = minidom.Document()
root = doc.createElementNS(XMLNS_TRUST, 'trusted-keys')
root.setAttribute('xmlns', XMLNS_TRUST)
doc.appendChild(root)
for fingerprint in self.keys:
keyelem = doc.createElementNS(XMLNS_TRUST, 'key')
root.appendChild(keyelem)
keyelem.setAttribute('fingerprint', fingerprint)
for domain in self.keys[fingerprint]:
domainelem = doc.createElementNS(XMLNS_TRUST, 'domain')
domainelem.setAttribute('value', domain)
keyelem.appendChild(domainelem)
with tempfile.NamedTemporaryFile(dir = d, prefix = 'trust-', delete = False, mode = 'wt') as tmp:
doc.writexml(tmp, indent = "", addindent = " ", newl = "\n", encoding = 'utf-8')
support.portable_rename(tmp.name, db_file)
def notify(self):
"""Call all watcher callbacks.
This should be called after trusting or untrusting one or more new keys.
@since: 0.25"""
for w in self.watchers: w()
def ensure_uptodate(self):
if self._dry_run:
if self.keys is None: self.keys = {}
return
from xml.dom import minidom
# This is a bit inefficient... (could cache things)
self.keys = {}
trust = basedir.load_first_config(config_site, config_prog, 'trustdb.xml')
if trust:
keys = minidom.parse(trust).documentElement
for key in keys.getElementsByTagNameNS(XMLNS_TRUST, 'key'):
domains = set()
self.keys[key.getAttribute('fingerprint')] = domains
for domain in key.getElementsByTagNameNS(XMLNS_TRUST, 'domain'):
domains.add(domain.getAttribute('value'))
else:
# Convert old database to XML format
trust = basedir.load_first_config(config_site, config_prog, 'trust')
if trust:
#print "Loading trust from", trust_db
with open(trust, 'rt') as stream:
for key in stream:
if key:
self.keys[key] = set(['*'])
def domain_from_url(url):
"""Extract the trust domain for a URL.
@param url: the feed's URL
@type url: str
@return: the trust domain
@rtype: str
@since: 0.27
@raise SafeException: the URL can't be parsed"""
try:
import urlparse
except ImportError:
from urllib import parse as urlparse # Python 3
if os.path.isabs(url):
raise SafeException(_("Can't get domain from a local path: '%s'") % url)
domain = urlparse.urlparse(url)[1]
if domain and domain != '*':
return domain
raise SafeException(_("Can't extract domain from URL '%s'") % url)
trust_db = TrustDB()
class TrustMgr(object):
"""A TrustMgr handles the process of deciding whether to trust new keys
(contacting the key information server, prompting the user, accepting automatically, etc)
@since: 0.53"""
__slots__ = ['config', '_current_confirm']
def __init__(self, config):
self.
|
config = config
self._current_confirm = None # (a lock to prevent asking the
|
user multiple questions at once)
@tasks.async
def confirm_keys(self, pending):
"""We don't trust any of the signatures yet. Collect information about them and add the keys to the
trusted list, possibly after confirming with the user (via config.handler).
Updates the L{trust} database, and then calls L{trust.TrustDB.notify}.
@since: 0.53
@arg pending: an object holding details of the updated feed
@type pending: L{PendingFeed}
@return: A blocker that triggers when the user has chosen, or None if already done.
@rtype: None | L{Blocker}"""
assert pending.sigs
from zeroinstall.injector import gpg
valid_sigs = [s for s in pending.sigs if isinstance(s, gpg.ValidSig)]
if not valid_sigs:
def format_sig(sig):
msg = str(sig)
if sig.messages:
msg += "\nMessages from GPG:\n" + sig.messages
return msg
raise SafeException(_('No valid signatures found on "%(url)s". Signatures:%(signatures)s') %
{'url': pending.url, 'signatures': ''.join(['\n- ' + format_sig(s) for s in pending.sigs])})
# Start downloading information about the keys...
fetcher = self.config.fetcher
kfs = {}
for sig in valid_sigs:
kfs[sig] = fetcher.fetch_key_info(sig.fingerprint)
# Wait up to KEY_INFO_TIMEOUT seconds for key information to arrive. Avoids having the dialog
# box update while the user is looking at it, and may allow it to be skipped completely in some
# cases.
timeout = tasks.TimeoutBlocker(KEY_INFO_TIMEOUT, "key info timeout")
while True:
key_info_blockers = [sig_info.blocker for sig_info in kfs.values() if sig_info.blocker is not None]
if not key_info_blockers:
break
logger.info("Waiting for response from key-info server: %s", key_info_blockers)
yield [timeout] + key_info_blockers
if timeout.happened:
logger.info("Timeout waiting for key info response")
break
# If we're already confirming something else, wait for that to finish...
while self._current_confirm is not None:
logger.info("Waiting for previous key confirmations to finish")
yield self._current_confirm
domain = domain_from_url(pending.url)
if self.config.auto_approve_keys:
existing_feed = self.config.iface_cache.get_feed(pending.url)
if not existing_feed:
changes = False
trust_db._dry_run = self.config.handler.dry_run
for sig, kf in kfs.items():
|
ActiveState/code
|
recipes/Python/577283_Decorator_expose_local_variables_functiafter/recipe-577283.py
|
Python
|
mit
| 3,276
| 0.004579
|
import new
import byteplay as bp
import inspect
def persistent_locals(f):
"""Function decorator to expose local variables after execution.
Modify the function such that, at the exit of the function
(regular exit or exceptions), the local dictionary is copied to a
read-only function property 'locals'.
This decorator wraps the function in a callable object, and
modifies its bytecode by adding an external try...finally
statement equivalent to the
|
following:
def f(self, *args, **kwargs):
try:
... old code ...
finally:
self._locals = locals().copy()
del self._locals['self']
"""
# ### disassemble f
f_code = bp.Code.from_code(f.func_code)
# ### use bytecode injection to add try...finally statement around code
finally_label = bp.La
|
bel()
# try:
code_before = (bp.SETUP_FINALLY, finally_label)
# [original code here]
# finally:
code_after = [(finally_label, None),
# self._locals = locals().copy()
(bp.LOAD_GLOBAL, 'locals'),
(bp.CALL_FUNCTION, 0),
(bp.LOAD_ATTR, 'copy'),
(bp.CALL_FUNCTION, 0),
(bp.LOAD_FAST, 'self'),
(bp.STORE_ATTR, '_locals'),
# del self._locals['self']
(bp.LOAD_FAST, 'self'),
(bp.LOAD_ATTR, '_locals'),
(bp.LOAD_CONST, 'self'),
(bp.DELETE_SUBSCR, None),
(bp.END_FINALLY, None),
(bp.LOAD_CONST, None),
(bp.RETURN_VALUE, None)]
f_code.code.insert(0, code_before)
f_code.code.extend(code_after)
# ### re-assemble
f_code.args = ('self',) + f_code.args
func = new.function(f_code.to_code(), f.func_globals, f.func_name,
f.func_defaults, f.func_closure)
return PersistentLocalsFunction(func)
_docpostfix = """
This function has been decorated with the 'persistent_locals'
decorator. You can access the dictionary of the variables in the inner
scope of the function via the 'locals' attribute.
For more information about the original function, query the self._func
attribute.
"""
class PersistentLocalsFunction(object):
"""Wrapper class for the 'persistent_locals' decorator.
Refer to the docstring of instances for help about the wrapped
function.
"""
def __init__(self, func):
self._locals = {}
# make function an instance method
self._func = new.instancemethod(func, self, PersistentLocalsFunction)
# create nice-looking doc string for the class
signature = inspect.getargspec(func)
signature[0].pop(0) # remove 'self' argument
signature = inspect.formatargspec(*signature)
docprefix = func.func_name + signature
default_doc = '<no docstring>'
self.__doc__ = (docprefix + '\n\n' + (func.__doc__ or default_doc)
+ _docpostfix)
def __call__(self, *args, **kwargs):
return self._func(*args, **kwargs)
@property
def locals(self):
return self._locals
|
viewworld/django-auth-iam
|
setup.py
|
Python
|
gpl-3.0
| 1,017
| 0.001967
|
#!/usr/bin/env python
import os
from setuptools import setup
|
fr
|
om distutils.cmd import Command
import django_auth_iam
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
setup(
name='django-auth-iam',
version=django_auth_iam.__version__,
description='Django authentication backend using Amazon IAM',
long_description=read('README.rst'),
url='https://github.com/viewworld/django-auth-iam/',
author='Michael Budde',
author_email='mb@viewworld.dk',
license='GPL v3',
packages=['django_auth_iam'],
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Programming Language :: Python',
'Topic :: Security',
'Topic :: System :: Systems Administration :: Authentication/Directory',
],
keywords=['django', 'amazon', 'authentication', 'auth'],
install_requires=['boto', 'PyCrypto', 'py_bcrypt'],
)
|
Karosuo/Linux_tools
|
xls_handlers/xls_sum_venv/lib/python3.6/site-packages/pip/_internal/exceptions.py
|
Python
|
gpl-3.0
| 9,145
| 0
|
"""Exceptions used throughout package"""
from __future__ import absolute_import
from itertools import chain, groupby
|
, repeat
from pip._vendor.six import iteritems
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Optional # noqa: F401
from pip._internal.req.req_install import InstallRequirement # noqa: F401
class PipError(Exception):
"""Base pip exception"""
class ConfigurationError(PipError):
"""General exception in configuration"""
class InstallationError(PipError):
"""General exception during installation"""
|
class UninstallationError(PipError):
"""General exception during uninstallation"""
class DistributionNotFound(InstallationError):
"""Raised when a distribution cannot be found to satisfy a requirement"""
class RequirementsFileParseError(InstallationError):
"""Raised when a general error occurs parsing a requirements file line."""
class BestVersionAlreadyInstalled(PipError):
"""Raised when the most up-to-date version of a package is already
installed."""
class BadCommand(PipError):
"""Raised when virtualenv or a command is not found"""
class CommandError(PipError):
"""Raised when there is an error in command-line arguments"""
class PreviousBuildDirError(PipError):
"""Raised when there's a previous conflicting build directory"""
class InvalidWheelFilename(InstallationError):
"""Invalid wheel filename."""
class UnsupportedWheel(InstallationError):
"""Unsupported wheel."""
class HashErrors(InstallationError):
"""Multiple HashError instances rolled into one for reporting"""
def __init__(self):
self.errors = []
def append(self, error):
self.errors.append(error)
def __str__(self):
lines = []
self.errors.sort(key=lambda e: e.order)
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
lines.append(cls.head)
lines.extend(e.body() for e in errors_of_cls)
if lines:
return '\n'.join(lines)
def __nonzero__(self):
return bool(self.errors)
def __bool__(self):
return self.__nonzero__()
class HashError(InstallationError):
"""
A failure to verify a package against known-good hashes
:cvar order: An int sorting hash exception classes by difficulty of
recovery (lower being harder), so the user doesn't bother fretting
about unpinned packages when he has deeper issues, like VCS
dependencies, to deal with. Also keeps error reports in a
deterministic order.
:cvar head: A section heading for display above potentially many
exceptions of this kind
:ivar req: The InstallRequirement that triggered this error. This is
pasted on after the exception is instantiated, because it's not
typically available earlier.
"""
req = None # type: Optional[InstallRequirement]
head = ''
def body(self):
"""Return a summary of me for display under the heading.
This default implementation simply prints a description of the
triggering requirement.
:param req: The InstallRequirement that provoked this error, with
populate_link() having already been called
"""
return ' %s' % self._requirement_name()
def __str__(self):
return '%s\n%s' % (self.head, self.body())
def _requirement_name(self):
"""Return a description of the requirement that triggered me.
This default implementation returns long description of the req, with
line numbers
"""
return str(self.req) if self.req else 'unknown package'
class VcsHashUnsupported(HashError):
"""A hash was provided for a version-control-system-based requirement, but
we don't have a method for hashing those."""
order = 0
head = ("Can't verify hashes for these requirements because we don't "
"have a way to hash version control repositories:")
class DirectoryUrlHashUnsupported(HashError):
"""A hash was provided for a version-control-system-based requirement, but
we don't have a method for hashing those."""
order = 1
head = ("Can't verify hashes for these file:// requirements because they "
"point to directories:")
class HashMissing(HashError):
"""A hash was needed for a requirement but is absent."""
order = 2
head = ('Hashes are required in --require-hashes mode, but they are '
'missing from some requirements. Here is a list of those '
'requirements along with the hashes their downloaded archives '
'actually had. Add lines like these to your requirements files to '
'prevent tampering. (If you did not enable --require-hashes '
'manually, note that it turns on automatically when any package '
'has a hash.)')
def __init__(self, gotten_hash):
"""
:param gotten_hash: The hash of the (possibly malicious) archive we
just downloaded
"""
self.gotten_hash = gotten_hash
def body(self):
# Dodge circular import.
from pip._internal.utils.hashes import FAVORITE_HASH
package = None
if self.req:
# In the case of URL-based requirements, display the original URL
# seen in the requirements file rather than the package name,
# so the output can be directly copied into the requirements file.
package = (self.req.original_link if self.req.original_link
# In case someone feeds something downright stupid
# to InstallRequirement's constructor.
else getattr(self.req, 'req', None))
return ' %s --hash=%s:%s' % (package or 'unknown package',
FAVORITE_HASH,
self.gotten_hash)
class HashUnpinned(HashError):
"""A requirement had a hash specified but was not pinned to a specific
version."""
order = 3
head = ('In --require-hashes mode, all requirements must have their '
'versions pinned with ==. These do not:')
class HashMismatch(HashError):
"""
Distribution file hash values don't match.
:ivar package_name: The name of the package that triggered the hash
mismatch. Feel free to write to this after the exception is raise to
improve its error message.
"""
order = 4
head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
'FILE. If you have updated the package versions, please update '
'the hashes. Otherwise, examine the package contents carefully; '
'someone may have tampered with them.')
def __init__(self, allowed, gots):
"""
:param allowed: A dict of algorithm names pointing to lists of allowed
hex digests
:param gots: A dict of algorithm names pointing to hashes we
actually got from the files under suspicion
"""
self.allowed = allowed
self.gots = gots
def body(self):
return ' %s:\n%s' % (self._requirement_name(),
self._hash_comparison())
def _hash_comparison(self):
"""
Return a comparison of actual and expected hash values.
Example::
Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
or 123451234512345123451234512345123451234512345
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
"""
def hash_then_or(hash_name):
# For now, all the decent hashes have 6-char names, so we can get
# away with hard-coding space literals.
return chain([hash_name], repeat(' or'))
lines = []
for hash_name, expecteds in iteritems(self.allowed):
prefix = hash_then_or(hash_name)
lines.extend((' Expected %s %s' % (next(prefix), e))
for e in expecteds)
lines.append('
|
Shinoby1992/xstream
|
sites/hdfilme_tv.py
|
Python
|
gpl-3.0
| 13,459
| 0.006638
|
# -*- coding: utf-8 -*-
from resources.lib.gui.gui import cGui
from resources.lib.gui.guiElement import cGuiElement
from resources.lib.handler.requestHandler import cRequestHandler
from resources.lib.parser import cParser
from resources.lib import logger
from resources.lib.handler.ParameterHandler import ParameterHandler
from resources.lib.util import cUtil
import re, json
# Plugin-Eigenschaften
SITE_IDENTIFIER = 'hdfilme_tv'
SITE_NAME = 'HDfilme'
SITE_ICON = 'hdfilme.png'
# Basis-URL's
URL_MAIN = 'http://hdfilme.tv/'
URL_MOVIES = URL_MAIN + 'movie-movies?'
URL_SHOWS = URL_MAIN + 'movie-series?'
URL_SEARCH = URL_MAIN + 'movie/search?key="%s"'
# Parameter für die Sortierung
URL_PARMS_ORDER_ID = 'order_f=id'
URL_PARMS_ORDER_ID_ASC = URL_PARMS_ORDER_ID +'&order_d=asc'
URL_PARMS_ORDER_NAME = 'order_f=name'
URL_PARMS_ORDER_NAME_ASC = URL_PARMS_ORDER_NAME +'&order_d=asc'
QUALITY_ENUM = {'240':0, '360':1, '480':2, '720':3, '1080':4}
def load():
# Logger-Eintrag
logger.info("Load %s" % SITE_NAME)
# GUI-Element erzeugen
oGui = cGui()
# ParameterHandler erzeugen
params = ParameterHandler()
# Einträge anlegen
params.setParam('sUrl', URL_MOVIES)
oGui.addFolder(cGuiElement('Filme', SITE_IDENTIFIER, 'showContentMenu'), params)
params.setParam('sUrl', URL_SHOWS)
oGui.addFolder(cGuiElement('Serien', SITE_IDENTIFIER, 'showContentMenu'), params)
oGui.addFolder(cGuiElement('Suche', SITE_IDENTIFIER, 'showSearch'))
# Liste abschließen
oGui.setEndOfDirectory()
def showContentMenu():
# GUI-Element erzeugen
oGui = cGui()
# ParameterHandler erzeugen
params = ParameterHandler()
# Basis-URL ermitteln (Filme oder Serien)
baseURL = params.getValue('sUrl')
# Einträge anlegen
params.setParam('sUrl', baseURL + URL_PARMS_ORDER_ID)
oGui.addFolder(cGuiElement('Neu hinzugefügt', SITE_IDENTIFIER, 'showEntries'), params)
params.setParam('sUrl', baseURL + URL_PARMS_ORDER_NAME_ASC)
oGui.addFolder(cGuiElement('Alphabetisch', SITE_IDENTIFIER, 'showEntries'), params)
params.setParam('sUrl', baseURL + URL_PARMS_ORDER_NAME_ASC)
oGui.addFolder(cGuiElement('Genre',SITE_IDENTIFIER,'showGenreList'), params)
# Liste abschließen
oGui.setEndOfDirectory()
def showGenreList():
# GUI-Element erzeugen
oGui = cGui()
# ParameterHandler erzeugen
params = ParameterHandler()
# URL vom ParameterHandler ermitteln
entryUrl = params.getValue('sUrl')
# Movie-Seite laden
sHtmlContent = cRequestHandler(entryUrl).request()
# Select für Generes Laden
pattern = '<select[^>]*name="cat"[^>]*>(.*?)</select[>].*?'
# Regex parsen
aResult = cParser().parse(sHtmlContent, pattern)
# Nichts gefunden? => raus hier
if not aResult[0]:
return
# Filter für Genres
pattern = '<option[^>]*value="(\d[^ ]*)"[^>]*>(.*?)</option[>].*?'
# Regex parsen
aResult = cParser().parse(aResult[1][0], pattern)
# Nichts gefunden? => raus hier
if not aResult[0]:
return
# Alle Genres durchlaufen und Liste erzeugen
for sID,sGenre in aResult[1]:
params.setParam('sUrl',entryUrl + '&cat=' + sID)
oGui.addFolder(cGuiElement(sGenre.strip(), SITE_IDENTIFIER, 'showEntries'), params)
# Liste abschließen
oGui.setEndOfDirectory()
def showEntries(entryUrl = False, sGui = False):
# GUI-Element erzeugen wenn nötig
oGui = sGui if sGui else cGui()
# ParameterHandler erzeugen
params = ParameterHandler()
# URL ermitteln falls nicht übergeben
if not entryUrl: entryUrl = params.getValue('sUrl')
# Aktuelle Seite ermitteln und ggf. URL anpassen
iPage = int(params.getValue('page'))
oRequest = cRequestHandler(entryUrl + '&per_page=' + str(iPage * 50) if iPage > 0 else entryUrl)
# Daten ermitteln
sHtmlContent = oRequest.request()
# Filter out the main section
pattern = '<ul class="products row">(.*?)</ul>'
aResult = cParser().parse(sHtmlContent, pattern)
# Funktion verlassen falls keine Daten ermittelt werden konnten
if not aResult[0] or not aResult[1][0]:
if not sGui: oGui.showInfo('xStream','Es wurde kein Eintrag gefunden')
return
# Content festlegen der geparst werden soll
sMainContent = aResult[1][0]
# URL ermitteln
pattern = '<div[^>]*class="box-product clearfix"[^>]*>\s*?'
pattern += '<a[^>]*href="([^"]*)"[^>]*>.*?'
# Thumbnail ermitteln
pattern += '<img[^>]*src="([^"]*)"[^>]*>.*?'
# Prüfung auf Episoden-Einträge
pattern += '(?:<div[^>]*class="episode"[^>]*>([^"]*)</div>.*?)?'
# Name ermitteln
pattern += '<div[^>]*class="popover-title"[^>]*>.*?'
pattern += '<span[^>]*class="name"[^>]*>([^<>]*)</span>.*?'
# Beschreibung ermitteln
pattern += '<div[^>]*class="popover-content"[^>]*>\s*<p[^>]*>([^<>]*)</p>'
# HTML parsen
aResult = cParser().parse(sMainContent, pattern)
# Kein Einträge gefunden? => Raus hier
if not aResult[0]:
if not sGui: oGui.showInfo('xStream','Es wurde kein Eintrag gefunden')
return
# Listengröße ermitteln
total = len (aResult[1])
# Alle Ergebnisse durchlaufen
for sUrl, sThumbnail, sEpisodeNrs, sName, sDesc in aResult[1]:
# Bei Filmen das Jahr vom Title trennen
aYear = re.compile("(.*?)\((\d*)\)").findall(sName)
iYear = False
for name, year in aYear:
sName =
|
name
iYear = year
break
# prüfen ob der Eintrag ein Serie/Staffel ist
isTvshow = True if sEpisodeNrs else False
# Listen-Eintrag erzeugen
oGuiElement = cGuiEl
|
ement(sName, SITE_IDENTIFIER, 'showHosters')
# Bei Serien Title anpassen
res = re.search('(.*?)\s(?:staf+el|s)\s*(\d+)', sName,re.I)
if res:
oGuiElement.setTVShowTitle(res.group(1))
oGuiElement.setTitle('%s - Staffel %s' % (res.group(1),int(res.group(2))))
params.setParam('sSeason', int(res.group(2)))
elif not res and isTvshow:
oGuiElement.setTVShowTitle(sName)
oGuiElement.setTitle('%s - Staffel %s' % (sName,"1"))
params.setParam('sSeason', "1")
# Thumbnail und Beschreibung für Anzeige anpassen
sThumbnail = sThumbnail.replace('_thumb', '')
sDesc = cUtil().unescape(sDesc.decode('utf-8')).encode('utf-8').strip()
# Falls vorhanden Jahr ergänzen
if iYear:
oGuiElement.setYear(iYear)
# Eigenschaften setzen und Listeneintrag hinzufügen
oGuiElement.setThumbnail(sThumbnail)
oGuiElement.setMediaType('tvshow' if isTvshow else 'movie')
oGuiElement.setDescription(sDesc)
params.setParam('entryUrl', sUrl)
params.setParam('sName', sName)
params.setParam('sThumbnail', sThumbnail)
params.setParam('isTvshow', isTvshow)
oGui.addFolder(oGuiElement, params, isTvshow, total)
# Pattern um die Aktuelle Seite zu ermitteln
pattern = '<ul[^>]*class="pagination[^>]*>.*'
pattern += '<li[^>]*class="active"[^>]*><a>(\d*)</a>.*</ul>'
# Seite parsen
aResult = cParser().parse(sHtmlContent, pattern)
# Falls ein Ergebniss gefunden wurden "Next-Page" ergänzen
if aResult[0] and aResult[1][0]:
params.setParam('page', int(aResult[1][0]))
oGui.addNextPage(SITE_IDENTIFIER, 'showEntries', params)
# Liste abschließen und View setzen
if not sGui:
oGui.setView('tvshows' if URL_SHOWS in entryUrl else 'movies')
oGui.setEndOfDirectory()
def showHosters():
# ParameterHandler erzeugen
params = ParameterHandler()
# URL Anpassen um die Stream und nicht die Infos zu bekommen
entryUrl = params.getValue('entryUrl').replace("-info","-stream")
# Seite abrufen
sHtmlContent = cRequestHandler(entryUrl).request()
# Prüfen ob Episoden gefu
|
PSU-OIT-ARC/django-arcutils
|
arcutils/tests/test_settings.py
|
Python
|
mit
| 2,723
| 0.000367
|
from django.test import override_settings, SimpleTestCase
from arcutils.settings import NO_DEFAULT, PrefixedSettings, get_setting
@override_settings(ARC={
'a': 'a',
'b': [0, 1],
'c': [{'c': 'c'}],
'd': 'd',
})
class TestGetSettings(SimpleTestCase):
def get_setting(self, key, default=NO_DEFAULT):
return get_setting(key, default=default)
def test_can_traverse_into_
|
dict(self):
self.assertEqual(self.get_setting('ARC.a'), 'a')
def test_can_traverse_into_dict_then_list(self):
|
self.assertEqual(self.get_setting('ARC.b.0'), 0)
def test_can_traverse_into_list_then_dict(self):
self.assertEqual(self.get_setting('ARC.c.0.c'), 'c')
def test_returns_default_for_non_existent_root(self):
default = object()
self.assertIs(self.get_setting('NOPE', default), default)
def test_returns_default_for_non_existent_nested_setting(self):
default = object()
self.assertIs(self.get_setting('ARC.nope', default), default)
def test_raises_when_not_found_and_no_default(self):
self.assertRaises(KeyError, self.get_setting, 'NOPE')
def test_can_traverse_into_string_setting(self):
self.assertEqual(self.get_setting('ARC.d.0'), 'd')
def test_bad_index_causes_type_error(self):
self.assertRaises(TypeError, self.get_setting, 'ARC.b.nope')
@override_settings(CAS={
'extra': 'extra',
'overridden': 'overridden',
})
class TestGetPrefixedSettings(SimpleTestCase):
def setUp(self):
super().setUp()
defaults = {
'base_url': 'http://example.com/cas/',
'parent': {
'child': 'child',
},
'overridden': 'default',
}
self.settings = PrefixedSettings('CAS', defaults)
def test_get_from_defaults(self):
self.assertEqual(self.settings.get('base_url'), 'http://example.com/cas/')
def test_get_nested_from_defaults(self):
self.assertEqual(self.settings.get('parent.child'), 'child')
def test_get_from_project_settings(self):
self.assertEqual(self.settings.get('extra'), 'extra')
def test_get_setting_overridden_in_project_settings(self):
self.assertEqual(self.settings.get('overridden'), 'overridden')
def test_defaults_trump_passed_default(self):
self.assertEqual(
self.settings.get('base_url', 'http://example.com/other/'),
'http://example.com/cas/')
def test_passed_default_does_not_trump_project_setting(self):
self.assertEqual(self.settings.get('extra', 'default'), 'extra')
def test_get_default_for_nonexistent(self):
self.assertEqual(self.settings.get('pants', 'jeans'), 'jeans')
|
ktan2020/legacy-automation
|
win/Lib/test/test_dictviews.py
|
Python
|
mit
| 6,667
| 0.00045
|
import unittest
from test import test_support
class DictSetTest(unittest.TestCase):
def test_constructors_not_callable(self):
kt = type({}.viewkeys())
self.assertRaises(TypeError, kt, {})
self.assertRaises(TypeError, kt)
it = type({}.viewitems())
self.assertRaises(TypeError, it, {})
self.assertRaises(TypeError, it)
vt = type({}.viewvalues())
self.assertRaises(TypeError, vt, {})
self.assertRaises(TypeError, vt)
def test_dict_keys(self):
d = {1: 10, "a": "ABC"}
keys = d.viewkeys()
self.assertEqual(len(keys), 2)
self.assertEqual(set(keys), set([1, "a"]))
self.assertEqual(keys, set([1, "a"]))
self.assertNotEqual(keys, set([1, "a", "b"]))
self.assertNotEqual(keys, set([1, "b"]))
self.assertNotEqual(keys, set([1]))
self.assertNotEqual(keys, 42)
self.assertIn(1, keys)
self.assertIn("a", keys)
self.assertNotIn(10, keys)
self.assertNotIn("Z", keys)
self.assertEqual(d.viewkeys(), d.viewkeys())
e = {1: 11, "a": "def"}
self.assertEqual(d.viewkeys(), e.viewkeys())
del e["a"]
self.assertNotEqual(d.viewkeys(), e.viewkeys())
def test_dict_items(self):
d = {1: 10, "a": "ABC"}
items = d.viewitems()
self.assertEqual(len(items), 2)
self.assertEqual(set(items), set([(1, 10), ("a", "ABC")]))
self.assertEqual(items, set([(1, 10), ("a", "ABC")]))
self.assertNotEqual(items, set([(1, 10), ("a", "ABC"), "junk"]))
self.assertNotEqual(items, set([(1, 10), ("a", "def")]))
self.assertNotEqual(items, set([(1, 10)]))
self.assertNotEqual(items, 42)
self.assertIn((1, 10), items)
self.assertIn(("a", "ABC"), items)
self.assertNotIn((1, 11), items)
self.assertNotIn(1, items)
self.assertNotIn((), items)
self.assertNotIn((1,), items)
self.assertNotIn((1, 2, 3), items)
self.assertEqual(d.viewitems(), d.viewitems())
e = d.copy()
self.assertEqual(d.viewitems(), e.viewitems())
e["a"] = "def"
self.assertNotEqual(d.viewitems(), e.viewitems())
def test_dict_mixed_keys_items(self):
d = {(1, 1): 11, (2, 2): 22}
e = {1: 1, 2: 2}
self.assertEqual(d.viewkeys(), e.viewitems())
self.assertNotEqual(d.viewitems(), e.viewkeys())
def test_dict_values(self):
d = {1: 10, "a": "ABC"}
values = d.viewvalues()
self.assertEqual(set(values), set([10, "ABC"]))
self.assertEqual(len(values), 2)
def test_dict_repr(self):
d = {1: 10, "a": "ABC"}
self.assertIsInstance(repr(d), str)
r = repr(d.viewitems())
self.assertIsInstance(r, str)
self.assertTrue(r == "dict_items([('a', 'ABC'), (1, 10)])" or
r == "dict_items([(1, 10), ('a', 'ABC')])")
r = repr(d.viewkeys())
self.assertIsInstance(r, str)
self.assertTrue(r == "dict_keys(['a', 1])" or
r == "dict_keys([1, 'a'])")
r = repr(d.viewvalues())
self.assertIsInstance(r, str)
self.assertTrue(r == "dict_values(['ABC', 10])" or
r == "dict_values([10, 'ABC'])")
def test_keys_set_operations(self):
d1 = {'a': 1, 'b': 2}
d2 = {'b': 3, 'c': 2}
d3 = {'d': 4, 'e': 5}
self.assertEqual(d1.viewkeys() & d1.viewkeys(), {'a', 'b'})
self.assertEqual(d1.viewkeys() & d2.viewkeys(), {'b'})
self.assertEqual(d1.viewkeys() & d3.viewkeys(), set())
self.assertEqual(d1.viewkeys() & set(d1.viewkeys()), {'a', 'b'})
self.assertEqual(d1.viewkeys() & set(d2.viewkeys()), {'b'})
self.assertEqual(d1.viewkeys() & set(d3.viewkeys()), set())
self.assertEqual(d1.viewkeys() | d1.viewkeys(), {'a', 'b'})
self.assertEqual(d1.viewkeys() | d2.viewkeys(), {'a', 'b', 'c'})
self.assertEqual(d1.viewkeys() | d3.viewkeys(), {'a', 'b', 'd', 'e'})
self.assertEqual(d1.viewkeys() | set(d1.viewkeys()), {'a', 'b'})
self.assertEqual(d1.viewkeys() | set(d2.viewkeys()), {'a', 'b', 'c'})
self.assertEqual(d1.viewkeys() | set(d3.viewkeys()),
{'a', 'b', 'd', 'e'})
self.assertEqual(d1.viewkeys() ^ d1.viewkeys(), set())
self.assertEqual(d1.viewkeys() ^ d2.viewkeys(), {'a', 'c'})
self.assertEqual(d1.viewkeys() ^ d3.viewkeys(), {'a', 'b', 'd', 'e'})
self.assertEqual(d1.viewkeys() ^ set(d1.viewkeys()), set())
self.assertEqual(d1.viewkeys() ^ set(d2.viewkeys()), {'a', 'c'})
self.assertEqual(d1.viewkeys() ^ set(d3.viewkeys()),
{'a', 'b', 'd', 'e'})
def test_items_set_operations(self):
d1 = {'a': 1, 'b': 2}
d2 = {'a': 2, 'b': 2}
d3 = {'d': 4, 'e': 5}
self.assertEqual(
d1.viewitems() & d1.viewitems(), {('a', 1), ('b', 2)})
self.assertEqual(d1.viewitems() & d2.viewitems(), {('b', 2)})
self.assertEqual(d1.viewitems() & d3.viewitems(), set())
self.assertEqual(d1.viewitems() & set(d1.viewitems()),
{('a', 1), ('b', 2)})
self.assertEqual(d1.viewitems() & set(d2.viewitems()), {('b', 2)})
self.a
|
ssertEqual(d1.viewitems() & set(d3.viewitems()), set())
self.assertEqual(d1.viewitems() | d1.vie
|
witems(),
{('a', 1), ('b', 2)})
self.assertEqual(d1.viewitems() | d2.viewitems(),
{('a', 1), ('a', 2), ('b', 2)})
self.assertEqual(d1.viewitems() | d3.viewitems(),
{('a', 1), ('b', 2), ('d', 4), ('e', 5)})
self.assertEqual(d1.viewitems() | set(d1.viewitems()),
{('a', 1), ('b', 2)})
self.assertEqual(d1.viewitems() | set(d2.viewitems()),
{('a', 1), ('a', 2), ('b', 2)})
self.assertEqual(d1.viewitems() | set(d3.viewitems()),
{('a', 1), ('b', 2), ('d', 4), ('e', 5)})
self.assertEqual(d1.viewitems() ^ d1.viewitems(), set())
self.assertEqual(d1.viewitems() ^ d2.viewitems(),
{('a', 1), ('a', 2)})
self.assertEqual(d1.viewitems() ^ d3.viewitems(),
{('a', 1), ('b', 2), ('d', 4), ('e', 5)})
def test_main():
test_support.run_unittest(DictSetTest)
if __name__ == "__main__":
test_main()
|
prppedro/mercurium
|
plugins/multipurpose.py
|
Python
|
mit
| 3,764
| 0.003208
|
# Plugin mais feio do mundo, mas é só pra um chat específico.
# Aqui temos comandos que são pequenos demais pra terem seu próprio módulo.
# O stats fora inicialmente implementado aqui, depois fora transferido [Tadeu, 23/Ago]
# A maioria é um port bem rápido de https://github.com/lucasberti/telegrao/blob/master/plugins/taup.lua
# Este plugin é, de certa maneira, uma aberração, porque precisa ser processado
# por último e geralmente dá problema quando alguém desativa/ativa comando pelo
# !plugin [cycrano]... Idealmente, eu me livraria deste plugin, mas ele tem lá
# seus usos, então talvez eu tenha de dar um jeito de o multipurpose ser invo-
# cado diretamente pelo reborn ao invés de ser listado no config... [Tadeu, 23/Ago]
from api import send_message, send_audio_id, send_sticker
from random import randint
import requests
import re
import plugins.stats as stats
import plugins.ed as ed
def on_msg_received(msg, matches):
chat = msg["chat"]["id"]
text = msg["text"]
stats.do_statistics(msg)
#ed.run_ed(msg) -> A API foi para o saco. TODO: reparar
# Precisamos manter log de todas as mensagens pro /xet e /wordcloud
with open("data/log.txt", "a", encoding='utf-8') as f:
f.write(text + "\n")
# /ip
pattern = re.compile("^[!/]ip(?:@PintaoBot)?$")
match = pattern.search(text)
if match:
# A versão original retornava um IP hardcodded
# Esta se conecta a um serviço de checkagem de IP externo
ipregex = re.compile("(\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3})")
url = "http://checkip.dyndns.com/"
saida = ""
try:
req = requests.get(url)
body = str(req.content)
ipmatch = ipregex.search(body)
if ipmatch:
saida = ipmatch[0]
else:
saida = "Sei lá, deu algum outro pau aqui... "
except Exception as e:
saida = str(e)
send_message(chat, saida)
# /mps
pattern = re.compile("^[!/]mps(?:@PintaoBot)?$")
match = pattern.search(text)
if match:
send_message(chat, "ok to calculando aki q esistem " + str(randint(500, 10000)) + "/s por segundo de SUPER MAEMES NESNTE CHAT1")
# @todos
pattern = re.compile("(?:@todos|@todomundo)")
match = pattern.sea
|
rch(text)
if match:
send_message(chat, "@berti @beaea @getulhao @rauzao @xisteaga @axasdas @Garzarella")
# TODO: fazer esta listagem de modo dinâmico e, talvez, por plugin
# calma
pattern = re.compile("^calma$")
match = pattern.search(text)
if match:
send_message(chat, "ok esto mais calmo obrigada")
# vc esta ai
pattern = re.compile("^Ping$")
match = pattern.search(text)
if match:
send_message(chat, "Pong")
|
# rau
pattern = re.compile("^rau$")
match = pattern.search(text)
if match:
send_message(chat, "meu pau no seu cu")
# Contribuição de Humberto
pattern = re.compile("^!+$")
match = pattern.search(text)
if match:
send_audio_id(chat, "CQADAQADFQAD4CAoRdcd4TJB2ecNAg")
# /on
pattern = re.compile("^[!/]on(?:@MercuriumBot)?$")
match = pattern.search(text)
if match:
send_sticker(chat, "CAADAQADYQEAAsptnwnj5ix5ioNoNwI")
# /off
pattern = re.compile("^[!/]off(?:@MercuriumBot)?$")
match = pattern.search(text)
if match:
send_sticker(chat, "CAADAQADYgEAAsptnwlEQdht940XBgI")
# /on
pattern = re.compile("^[Ee]stou on$")
match = pattern.search(text)
if match:
send_sticker(chat, "CAADAQADYQEAAsptnwnj5ix5ioNoNwI")
|
Azure/azure-sdk-for-python
|
sdk/communication/azure-communication-identity/tests/testcase.py
|
Python
|
mit
| 2,935
| 0.006814
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import os
from azure.communication.identity._shared.utils import parse_connection_str
from _shared.testcase import CommunicationTestCase
from msal import PublicClientApplication
class CommunicationIdentityTestCase(CommunicationTestCase):
def __init__(self, method_name, *args, **kwargs):
super(CommunicationIdentityTestCase, self).__init__(method_name, *args, **kwargs)
def setUp(self):
|
super(CommunicationIdentityTestCase, self).setUp()
if self.is_playback():
self.connection_str = "endpoint=https://sanitized/;accesskey=fake==="
self.m365_app_id = "sanitized"
self.m365_aad_authority = "sanitized"
|
self.m365_aad_tenant = "sanitized"
self.m365_scope = "sanitized"
self.msal_username = "sanitized"
self.msal_password = "sanitized"
self.expired_teams_token = "sanitized"
self.skip_get_token_for_teams_user_tests = "false"
else:
self.connection_str = os.getenv('COMMUNICATION_LIVETEST_DYNAMIC_CONNECTION_STRING')
self.m365_app_id = os.getenv('COMMUNICATION_M365_APP_ID')
self.m365_aad_authority = os.getenv('COMMUNICATION_M365_AAD_AUTHORITY')
self.m365_aad_tenant = os.getenv('COMMUNICATION_M365_AAD_TENANT')
self.m365_scope = os.getenv('COMMUNICATION_M365_SCOPE')
self.msal_username = os.getenv('COMMUNICATION_MSAL_USERNAME')
self.msal_password = os.getenv('COMMUNICATION_MSAL_PASSWORD')
self.expired_teams_token = os.getenv('COMMUNICATION_EXPIRED_TEAMS_TOKEN')
endpoint, _ = parse_connection_str(self.connection_str)
self._resource_name = endpoint.split(".")[0]
self.scrubber.register_name_pair(self._resource_name, "sanitized")
self.skip_get_token_for_teams_user_tests = os.getenv('SKIP_INT_IDENTITY_EXCHANGE_TOKEN_TEST')
def generate_teams_user_aad_token(self):
if self.is_playback():
teams_user_aad_token = "sanitized"
else:
msal_app = PublicClientApplication(
client_id=self.m365_app_id,
authority="{}/{}".format(self.m365_aad_authority, self.m365_aad_tenant))
result = msal_app.acquire_token_by_username_password(username=self.msal_username, password=self.msal_password, scopes=[self.m365_scope])
teams_user_aad_token = result["access_token"]
return teams_user_aad_token
def skip_get_token_for_teams_user_test(self):
return str(self.skip_get_token_for_teams_user_tests).lower() == 'true'
|
e-gob/plataforma-kioscos-autoatencion
|
scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/windows/win_msg.py
|
Python
|
bsd-3-clause
| 3,581
| 0.001955
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Jon Hawkesworth (@jhawkesworth) <figs@unity.demon.co.uk>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_msg
version_added: "2.3"
short_description: Sends a message to logged in users on Windows hosts.
description:
- Wraps the msg.exe command in order to send messages to Windows hosts.
options:
to:
description:
- Who to send the message to. Can be a username, sessionname or sessionid.
default: '*'
display_seconds:
description:
- How long to wait for receiver to acknowledge message, in seconds.
default: 10
wait:
description:
- Whether to wait for users to respond. Module will only wait for the number of seconds specified in display_seconds or 10 seconds if not specified.
However, if I(wait) is true, the message is sent to each logged on user in turn, waiting for the user to eit
|
her press 'ok' or for
the timeout to elapse before moving on to the next user.
type: bool
default: 'no'
msg:
description:
- The text of the message to be displayed.
- The message must be less than 256 characters.
default: Hello world!
author:
- Jon Hawkesworth (@jhawkesworth)
no
|
tes:
- This module must run on a windows host, so ensure your play targets windows
hosts, or delegates to a windows host.
- Messages are only sent to the local host where the module is run.
- The module does not support sending to users listed in a file.
- Setting wait to true can result in long run times on systems with many logged in users.
'''
EXAMPLES = r'''
- name: Warn logged in users of impending upgrade
win_msg:
display_seconds: 60
msg: Automated upgrade about to start. Please save your work and log off before {{ deployment_start_time }}
'''
RETURN = r'''
msg:
description: Test of the message that was sent.
returned: changed
type: string
sample: Automated upgrade about to start. Please save your work and log off before 22 July 2016 18:00:00
display_seconds:
description: Value of display_seconds module parameter.
returned: success
type: string
sample: 10
rc:
description: The return code of the API call
returned: always
type: int
sample: 0
runtime_seconds:
description: How long the module took to run on the remote windows host.
returned: success
type: string
sample: 22 July 2016 17:45:51
sent_localtime:
description: local time from windows host when the message was sent.
returned: success
type: string
sample: 22 July 2016 17:45:51
wait:
description: Value of wait module parameter.
returned: success
type: boolean
sample: false
'''
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/contourcarpet/line/_smoothing.py
|
Python
|
mit
| 505
| 0.00198
|
import _plotly_utils.basevalidators
class SmoothingValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name=
|
"smoothing", parent_name="contourcarpet.line", **kwargs
):
super(SmoothingValidator, self).__init__(
pl
|
otly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
max=kwargs.pop("max", 1.3),
min=kwargs.pop("min", 0),
**kwargs
)
|
sserrot/champion_relationships
|
venv/Lib/site-packages/nbconvert/preprocessors/sanitize.py
|
Python
|
mit
| 4,070
| 0
|
"""
NBConvert Preprocessor for sanitizing HTML rendering of notebooks.
"""
from bleach import (
ALLOWED_ATTRIBUTES,
ALLOWED_STYLES,
ALLOWED_TAGS,
clean,
)
from traitlets import (
Any,
Bool,
List,
Set,
Unicode,
)
from .base import Preprocessor
class SanitizeHTML(Preprocessor):
# Bleach config.
attributes = Any(
config=True,
default_value=ALLOWED_ATTRIBUTES,
help="Allowed HTML tag attributes",
)
tags = List(
Unicode(),
config=True,
default_value=ALLOWED_TAGS,
help="List of HTML tags to allow",
)
styles = List(
Unicode(),
config=True,
default_value=ALLOWED_STYLES,
help="Allowed CSS styles if <style> tag is whitelisted"
)
strip = Bool(
config=True,
default_value=False,
help="If True, remove unsafe markup entirely instead of escaping"
)
strip_comments = Bool(
config=True,
default_value=True,
help="If True, strip comments from escaped HTML",
)
# Display data config.
safe_output_keys = Set(
config=True,
default_value={
'metadata', # Not a mimetype per-se, but expected and safe.
'text/plain',
'text/latex',
'application/json',
'image/png',
'image/jpeg',
},
help="Cell output mimetypes to render without modification",
)
sanitized_output_types = Set(
config=True,
default_value={
'text/html',
'text/markdown',
},
help="Cell output types to display after escaping with Bleach.",
)
def preprocess_cell(self, cell, resources, cell_index):
"""
Sanitize potentially-dangerous contents of the cell.
Cell Types:
raw:
Sanitize literal HTML
markdown:
Sanitize literal HTML
code:
Sanitize outputs that could result in code execution
"""
if cell.cell_type == 'raw':
# Sanitize all raw cells anyway.
# Only ones with the text/html mimetype should be emitted
# but erring on the side of safety maybe.
cell.source = self.sanitize_html_tags(cell.source)
return cell, resources
elif cell.cell_type == 'markdown':
cell.source = self.sanitize_html_tags(cell.source)
return cell, resources
elif cell.cell_type == 'code':
cell.outputs = self.sanitize_code_outputs(cell.outputs)
return
|
cell, resources
def sanitize_code_outputs(self, outputs):
"""
|
Sanitize code cell outputs.
Removes 'text/javascript' fields from display_data outputs, and
runs `sanitize_html_tags` over 'text/html'.
"""
for output in outputs:
# These are always ascii, so nothing to escape.
if output['output_type'] in ('stream', 'error'):
continue
data = output.data
to_remove = []
for key in data:
if key in self.safe_output_keys:
continue
elif key in self.sanitized_output_types:
self.log.info("Sanitizing %s" % key)
data[key] = self.sanitize_html_tags(data[key])
else:
# Mark key for removal. (Python doesn't allow deletion of
# keys from a dict during iteration)
to_remove.append(key)
for key in to_remove:
self.log.info("Removing %s" % key)
del data[key]
return outputs
def sanitize_html_tags(self, html_str):
"""
Sanitize a string containing raw HTML tags.
"""
return clean(
html_str,
tags=self.tags,
attributes=self.attributes,
styles=self.styles,
strip=self.strip,
strip_comments=self.strip_comments,
)
|
edwardsnj/rmidb2
|
rmidb2/sosecpwhashprovider.py
|
Python
|
mit
| 317
| 0.015773
|
f
|
rom turbogears.identity.soprovider import *
from secpwhash import check_password
class SoSecPWHashIdentityProvider(SqlObjectIdentityProvider):
def validate_password(self, user, user_name, password):
# print >>sys.stderr, user, user.password, user_name, passw
|
ord
return check_password(user.password,password)
|
igormartire/esii
|
chess/ui/ui.py
|
Python
|
mit
| 20,909
| 0
|
import os
import time
import pygame
from pygame.locals import *
from chess.core.models import Coordinate, Color, Piece, Player
from chess.core.utils import WHITE_PIECES
from chess.core.query import (destinations,
is_check_for_player,
is_checkmate_for_player,
is_stalemate_for_player,
is_impossible_checkmate)
from chess.core.coloring import color_board
from chess.core.moving import move
from chess.ai.minimax import Minimax
from chess.core.game import Game
from chess.ai.state import State
SCREEN_TITLE = 'Chess'
SCREEN_WIDTH = 640
SCREEN_HEIGHT = 740
BOARD_SIZE = 640
CELL_BORDER = 3
IMAGES_FOLDER_PATH = 'assets'
BUILD = False
class UI:
def __init__(self):
pygame.init()
self.font = pygame.font.SysFont("monospace", 50)
self.screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
pygame.display.set_caption(SCREEN_TITLE)
self.sprites = {
"WHITE_PAWN_IMAGE": self.load_png('white-pawn.png'),
"WHITE_BISHOP_IMAGE": self.load_png('white-bishop.png'),
"WHITE_KING_IMAGE": self.load_png('white-king.png'),
"WHITE_KNIGHT_IMAGE": self.load_png('white-knight.png'),
"WHITE_QUEEN_IMAGE": self.load_png('white-queen.png'),
"WHITE_ROOK_IMAGE
|
": self.load_png('white-rook.png'),
"BLACK_PAWN_IMAGE": self.load_png('black-pawn.png'),
"BLACK_BISHOP_IMAGE": self.load_png('black-bishop.png'),
"BLACK_KING_IMAGE": self.load_png('black-king.png'),
"BLACK_KNIGHT_IMAGE": self.load_png('black-knight.png'),
"BLACK_QUEEN_IMAGE": self.load_png('black-queen.png'),
"BLACK_ROO
|
K_IMAGE": self.load_png('black-rook.png')
}
self.assets = {
'title': self.load_png('title.png'),
'logo_small': self.load_png('logo_small.png'),
'bg': self.load_png('bg.png'),
}
self.__displayed_text = self.font.render("", 1, (255, 255, 255))
self.game_difficulty = 1
def display_text(self, text, color=(255, 255, 255)):
self.__displayed_text = self.font.render(text, 1, color)
def animate(self, board, move_diff):
B = Color.BLACK
W = Color.WHITE
colored_board = [[W, B, W, B, W, B, W, B],
[B, W, B, W, B, W, B, W],
[W, B, W, B, W, B, W, B],
[B, W, B, W, B, W, B, W],
[W, B, W, B, W, B, W, B],
[B, W, B, W, B, W, B, W],
[W, B, W, B, W, B, W, B],
[B, W, B, W, B, W, B, W]]
dH = move_diff[1].row - move_diff[0].row
dL = move_diff[1].column - move_diff[0].column
for i in range(21):
t = i / 20.0
self.screen.fill((0, 0, 0))
board_surface = pygame.Surface((BOARD_SIZE, BOARD_SIZE)).convert()
chess_pieces = []
num_of_cells = len(board)
cell_size = (BOARD_SIZE / num_of_cells)
for row in range(num_of_cells):
for col in range(num_of_cells):
cell_rect = (col * cell_size, row * cell_size,
cell_size - CELL_BORDER,
cell_size - CELL_BORDER)
if row == move_diff[1].row and col == move_diff[1].column:
piece_cell_rect = (
(move_diff[0].column + t * dL) * cell_size +
board_position()[0],
(move_diff[0].row + t * dH) * cell_size +
board_position()[1],
cell_size - CELL_BORDER, cell_size - CELL_BORDER)
else:
piece_cell_rect = (
col * cell_size + board_position()[0],
row * cell_size + board_position()[1],
cell_size - CELL_BORDER,
cell_size - CELL_BORDER)
cell_color_rgb = colored_board[row][col].rgb
board_surface.fill(cell_color_rgb, cell_rect)
cell_value = board[row][col]
chess_piece = self.create_chess_piece(
cell_value, cell_size, piece_cell_rect)
if chess_piece is not None:
chess_pieces.append(chess_piece)
self.screen.blit(board_surface, board_position())
for chess_piece in chess_pieces:
self.screen.blit(chess_piece.image, chess_piece.rect)
text_rect = self.__displayed_text.get_rect(
center=(SCREEN_WIDTH / 2, 50))
self.screen.blit(self.__displayed_text, text_rect)
pygame.display.update()
time.sleep(0.01)
def refresh(self, chess_board, colored_board):
self.screen.fill((0, 0, 0))
board_surface, chess_pieces = self.setup_board(
chess_board, colored_board)
self.screen.blit(board_surface, board_position())
for chess_piece in chess_pieces:
self.screen.blit(chess_piece.image, chess_piece.rect)
text_rect = self.__displayed_text.get_rect(
center=(SCREEN_WIDTH / 2, 50))
self.screen.blit(self.__displayed_text, text_rect)
pygame.display.update()
def create_chess_piece(self, piece, cell_size, cell_rect):
if piece == Piece.NONE:
piece_image = None
else:
piece_image = self.sprites[piece.name + '_IMAGE']
chess_piece = None
if piece_image is not None:
chess_piece_image = pygame.transform.scale(
piece_image, (int(cell_size), int(cell_size)))
chess_piece = ChessPiece(chess_piece_image, cell_rect, piece)
return chess_piece
def setup_board(self, board, color_board):
board_surface = pygame.Surface((BOARD_SIZE, BOARD_SIZE)).convert()
chess_pieces = []
num_of_cells = len(board)
cell_size = (BOARD_SIZE / num_of_cells)
for row in range(num_of_cells):
for col in range(num_of_cells):
cell_rect = (
col * cell_size,
row * cell_size,
cell_size - CELL_BORDER,
cell_size - CELL_BORDER)
cell_color_rgb = color_board[row][col].rgb
board_surface.fill(cell_color_rgb, cell_rect)
cell_value = board[row][col]
piece_rect = (
col * cell_size + board_position()[0],
row * cell_size + board_position()[1],
cell_size - CELL_BORDER,
cell_size - CELL_BORDER)
chess_piece = self.create_chess_piece(
cell_value, cell_size, piece_rect)
if chess_piece is not None:
chess_pieces.append(chess_piece)
return board_surface, chess_pieces
def load_png(self, file_name):
if BUILD:
image = pygame.image.load(file_name)
else:
image = pygame.image.load(
os.path.join(IMAGES_FOLDER_PATH, file_name))
if image.get_alpha() is None:
image = image.convert()
else:
image = image.convert_alpha()
return image
class ChessPiece(pygame.sprite.Sprite):
def __init__(self, image_surface, rect, symbol=''):
pygame.sprite.Sprite.__init__(self)
self.image = image_surface
self.rect = pygame.Rect(rect)
self.symbol = symbol
def was_clicked(self, click_position):
return self.rect.collidepoint(click_position)
def __str__(self):
return self.symbol
def get_coordinates_by_position(position, board):
num_of_cells = len(board)
cell_size = BOARD_SIZE / num_of_cells
for row in range(num_of_cells):
for col in range(num_of_cells):
cell = (col * cell_size + board_
|
bgroveben/python3_machine_learning_projects
|
oreilly_GANs_for_beginners/oreilly_GANs_for_beginners/introduction_to_ml_with_python/mglearn/mglearn/plot_nn_graphs.py
|
Python
|
mit
| 3,510
| 0.001709
|
def plot_logistic_regression_graph():
import graphviz
lr_graph = graphviz.Digraph(node_attr={'shape': 'circle', 'fixedsize': 'True'},
graph_attr={'rankdir': 'LR', 'splines': 'line'})
inputs = graphviz.Digraph(node_attr={'shape': 'circle'}, name="cluster_0")
output = graphviz.Digraph(node_attr={'shape': 'circle'}, name="cluster_2")
for i in range(4):
inputs.node("x[%d]" % i, labelloc="c")
inputs.body.append('label = "inputs"')
inputs
|
.body.append('color = "white"')
lr_graph.subgraph(inputs)
output.body.append('label = "output"')
output.body.append('color = "white"')
output.node("y")
lr_graph.subgraph(output)
for i in range(4):
lr_graph.edge("x[%d]
|
" % i, "y", label="w[%d]" % i)
return lr_graph
def plot_single_hidden_layer_graph():
import graphviz
nn_graph = graphviz.Digraph(node_attr={'shape': 'circle', 'fixedsize': 'True'},
graph_attr={'rankdir': 'LR', 'splines': 'line'})
inputs = graphviz.Digraph(node_attr={'shape': 'circle'}, name="cluster_0")
hidden = graphviz.Digraph(node_attr={'shape': 'circle'}, name="cluster_1")
output = graphviz.Digraph(node_attr={'shape': 'circle'}, name="cluster_2")
for i in range(4):
inputs.node("x[%d]" % i)
inputs.body.append('label = "inputs"')
inputs.body.append('color = "white"')
hidden.body.append('label = "hidden layer"')
hidden.body.append('color = "white"')
for i in range(3):
hidden.node("h%d" % i, label="h[%d]" % i)
output.node("y")
output.body.append('label = "output"')
output.body.append('color = "white"')
nn_graph.subgraph(inputs)
nn_graph.subgraph(hidden)
nn_graph.subgraph(output)
for i in range(4):
for j in range(3):
nn_graph.edge("x[%d]" % i, "h%d" % j)
for i in range(3):
nn_graph.edge("h%d" % i, "y")
return nn_graph
def plot_two_hidden_layer_graph():
import graphviz
nn_graph = graphviz.Digraph(node_attr={'shape': 'circle', 'fixedsize': 'True'},
graph_attr={'rankdir': 'LR', 'splines': 'line'})
inputs = graphviz.Digraph(node_attr={'shape': 'circle'}, name="cluster_0")
hidden = graphviz.Digraph(node_attr={'shape': 'circle'}, name="cluster_1")
hidden2 = graphviz.Digraph(node_attr={'shape': 'circle'}, name="cluster_2")
output = graphviz.Digraph(node_attr={'shape': 'circle'}, name="cluster_3")
for i in range(4):
inputs.node("x[%d]" % i)
inputs.body.append('label = "inputs"')
inputs.body.append('color = "white"')
for i in range(3):
hidden.node("h1[%d]" % i)
for i in range(3):
hidden2.node("h2[%d]" % i)
hidden.body.append('label = "hidden layer 1"')
hidden.body.append('color = "white"')
hidden2.body.append('label = "hidden layer 2"')
hidden2.body.append('color = "white"')
output.node("y")
output.body.append('label = "output"')
output.body.append('color = "white"')
nn_graph.subgraph(inputs)
nn_graph.subgraph(hidden)
nn_graph.subgraph(hidden2)
nn_graph.subgraph(output)
for i in range(4):
for j in range(3):
nn_graph.edge("x[%d]" % i, "h1[%d]" % j, label="")
for i in range(3):
for j in range(3):
nn_graph.edge("h1[%d]" % i, "h2[%d]" % j, label="")
for i in range(3):
nn_graph.edge("h2[%d]" % i, "y", label="")
return nn_graph
|
plamut/ggrc-core
|
src/ggrc/models/notification.py
|
Python
|
apache-2.0
| 1,986
| 0.012085
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""GGRC notification SQLAlchemy layer data model extensions."""
from sqlalchemy.orm import backref
from ggrc import db
from ggrc.models.mixins import Base
from ggrc.models import utils
class NotificationConfig(Base, db.Model):
__tablename__ = 'notification_configs'
name = db.Column(db.String, nullable=True)
enable_flag = db.Column(db.Boolean)
notif_type = db.Column(db.String)
person_id = db.Column(db.Integer, db.ForeignKey('people.id'), nullable=False)
person = db.relationship(
'Person',
backref=backref('notification_configs', cascade='all, delete-orphan'))
_publish_attrs = [
'person_id',
'notif_type',
'enable_flag',
|
]
VALID_TYPES = [
'Email_Now',
'Email_Digest',
'Calendar',
]
class NotificationType(Base, db.Model):
__tablename__ = 'notification_types'
name = db.Column(db.String, nullable=False)
description = db.Column(db.String, nullable=True)
advance_notice = db.Column(db.DateTime, nullable=True)
template = db.Column(db.String, nullable=True)
instant = db.Column(db.Boolean, nullable=False, default=False)
class Notification(Base, db.Model):
__tablename__ = 'notifications'
|
object_id = db.Column(db.Integer, nullable=False)
object_type = db.Column(db.String, nullable=False)
send_on = db.Column(db.DateTime, nullable=False)
sent_at = db.Column(db.DateTime, nullable=True)
custom_message = db.Column(db.Text, nullable=True)
force_notifications = db.Column(db.Boolean, default=False, nullable=False)
notification_type_id = db.Column(
db.Integer, db.ForeignKey('notification_types.id'), nullable=False)
notification_type = db.relationship(
'NotificationType', foreign_keys='Notification.notification_type_id')
object = utils.PolymorphicRelationship("object_id", "object_type",
"{}_notifiable")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.