text stringlengths 4 1.02M | meta dict |
|---|---|
from collections import deque
import time
class Node(object):
"""Represents a node."""
def __init__(self, name):
self.name = name
self._visited = 0
self.discovery_time = None
self.finishing_time = None
def neighbors(self, adjacency_list):
return adjacency_list[self]
@property
def visited(self):
return self._visited
@visited.setter
def visited(self, value):
if value == 1:
self.discovery_time = time.clock()
elif value == 2:
self.finishing_time = time.clock()
self._visited = value
def __str__(self):
return str(self.name)
def __repr__(self):
return str(self.name)
def breadth_first_search(Graph, Nodes):
for node in Nodes:
node.visited = 0
for node in Nodes:
if node.visited == 0:
breadth_first_search_visit(Graph, node)
def breadth_first_search_visit(Graph, node):
node.visited = 1
queue = deque([node])
while True:
try:
u = queue.popleft()
except IndexError:
break
for neighbor in u.neighbors(Graph):
if neighbor.visited == 0:
neighbor.visited = 1
queue.append(neighbor)
node.visited = 2
Nodes = [Node(i) for i in range(10)]
Graph = {
Nodes[0]: [Nodes[5], Nodes[3]],
Nodes[1]: [Nodes[8], Nodes[3]],
Nodes[2]: [Nodes[5]],
Nodes[3]: [Nodes[9], Nodes[8]],
Nodes[4]: [Nodes[5], Nodes[2]],
Nodes[5]: [Nodes[9]],
Nodes[6]: [Nodes[9]],
Nodes[7]: [Nodes[5], Nodes[2], Nodes[6]],
Nodes[8]: [Nodes[9], Nodes[4]],
Nodes[9]: [Nodes[0], Nodes[1]],
}
breadth_first_search(Graph, Nodes)
for node in sorted(Nodes, key=lambda obj: obj.finishing_time):
print('\t {}'.format(node))
| {
"content_hash": "9c01a27552435a2bc89af3cb8285ed57",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 62,
"avg_line_length": 23.68831168831169,
"alnum_prop": 0.5570175438596491,
"repo_name": "AbhiAgarwal/prep",
"id": "cc5330fc10faae7c3d5efb9a0f5af9880a6d9d14",
"size": "1824",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/bfs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "19389"
},
{
"name": "Python",
"bytes": "82385"
}
],
"symlink_target": ""
} |
import datetime
import os
import numpy as np
import pandas as pd
from decouple import config
from WindAdapter.enums import Header
from WindAdapter.enums import OutputFormat
DATA_DICT_PATH = config('DATA_DICT_PATH', default='data_dict.csv')
DATA_DICT_PATH_TYPE_ABS = config('DATA_DICT_PATH_TYPE_ABS', default=False, cast=bool)
INDEX_NAME = config('MULTI_INDEX_NAMES', default='date, secID')
COL_NAME = config('DF_COL_NAME', default='factor')
class WindQueryHelper:
def __init__(self, data_dict_path=DATA_DICT_PATH, path_type_abs=DATA_DICT_PATH_TYPE_ABS):
try:
if not path_type_abs:
current_dir = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(current_dir, data_dict_path)
self.data_dict_path = path
else:
self.data_dict_path = data_dict_path
self._data_dict = pd.read_csv(self.data_dict_path, index_col=0, encoding='gbk')
except ValueError:
raise ValueError('data_dict fails to load')
@property
def data_dict(self):
return self._data_dict
@staticmethod
def _split_params(params):
main_params = params[[Header.API, Header.EXPLANATION, Header.INDICATOR]]
extra_params = params.drop([Header.API, Header.EXPLANATION, Header.INDICATOR, Header.TYPE])
extra_params[Header.TENOR.value] = np.nan
extra_params[Header.FREQ.value] = 'M'
return main_params, extra_params
def get_query_params(self, factor_name=None):
try:
self.data_dict.index = self.data_dict.index.str.lower()
factor_params = self.data_dict.loc[factor_name.lower()]
except:
raise ValueError(
'WindQueryHelper.get_query_params: failed to find params for factor {0}, check factor name spelling'.format(factor_name))
main_params, extra_params = WindQueryHelper._split_params(factor_params)
main_params[Header.API] = 'w.' + main_params[Header.API]
return main_params, extra_params
@staticmethod
def convert_2_multi_index(df):
df = df.copy()
df = df.stack()
df = pd.DataFrame(df)
df.index.names = INDEX_NAME.split(',')
df.columns = [COL_NAME]
return df
@staticmethod
def reformat_wind_data(raw_data, date, output_data_format=OutputFormat.PITVOT_TABLE_DF):
ret = pd.DataFrame(data=raw_data.Data,
columns=raw_data.Codes,
index=[date.strftime('%Y-%m-%d')])
if output_data_format == OutputFormat.MULTI_INDEX_DF:
ret = WindQueryHelper.convert_2_multi_index(ret)
return ret
@staticmethod
def latest_report_date(date):
month = date.month
if month <= 4:
date = datetime.datetime(date.year - 1, 9, 30)
elif month <= 8:
date = datetime.datetime(date.year, 3, 31)
elif month <= 11:
date = datetime.datetime(date.year, 6, 30)
else:
date = datetime.datetime(date.year, 9, 30)
return date
| {
"content_hash": "b3eaa614daaa15f7f2cad7f8d62fcf12",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 137,
"avg_line_length": 36.94047619047619,
"alnum_prop": 0.6142442797292942,
"repo_name": "RoxanneYang/TestCase",
"id": "579658ba5bd0436d73b2e1e942ad546a44336374",
"size": "3128",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "WindAdapter/helper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "41320"
},
{
"name": "Python",
"bytes": "29964"
}
],
"symlink_target": ""
} |
import os
import tempfile
from robotide.lib.robot.errors import DataError
from robotide.lib.robot.output import LOGGER
from robotide.lib.robot.utils import abspath, find_file, get_error_details, NormalizedDict
from .variables import Variables
class VariableScopes(object):
def __init__(self, settings):
self._global = GlobalVariables(settings)
self._suite = None
self._test = None
self._scopes = [self._global]
self._variables_set = SetVariables()
@property
def current(self):
return self._scopes[-1]
@property
def _all_scopes(self):
return reversed(self._scopes)
@property
def _scopes_until_suite(self):
for scope in self._all_scopes:
yield scope
if scope is self._suite:
break
@property
def _scopes_until_test(self):
for scope in self._scopes_until_suite:
yield scope
if scope is self._test:
break
def start_suite(self):
self._suite = self._global.copy()
self._scopes.append(self._suite)
self._variables_set.start_suite()
self._variables_set.update(self._suite)
def end_suite(self):
self._scopes.pop()
self._suite = self._scopes[-1] if len(self._scopes) > 1 else None
self._variables_set.end_suite()
def start_test(self):
self._test = self._suite.copy()
self._scopes.append(self._test)
self._variables_set.start_test()
def end_test(self):
self._scopes.pop()
self._test = None
self._variables_set.end_test()
def start_keyword(self):
kw = self._suite.copy()
self._variables_set.start_keyword()
self._variables_set.update(kw)
self._scopes.append(kw)
def end_keyword(self):
self._scopes.pop()
self._variables_set.end_keyword()
def __getitem__(self, name):
return self.current[name]
def __setitem__(self, name, value):
self.current[name] = value
def __contains__(self, name):
return name in self.current
def replace_list(self, items, replace_until=None, ignore_errors=False):
return self.current.replace_list(items, replace_until, ignore_errors)
def replace_scalar(self, items, ignore_errors=False):
return self.current.replace_scalar(items, ignore_errors)
def replace_string(self, string, ignore_errors=False):
return self.current.replace_string(string, ignore_errors=ignore_errors)
def set_from_file(self, path, args, overwrite=False):
variables = None
for scope in self._scopes_until_suite:
if variables is None:
variables = scope.set_from_file(path, args, overwrite)
else:
scope.set_from_file(variables, overwrite=overwrite)
def set_from_variable_table(self, variables, overwrite=False):
for scope in self._scopes_until_suite:
scope.set_from_variable_table(variables, overwrite)
def resolve_delayed(self):
for scope in self._scopes_until_suite:
scope.resolve_delayed()
def set_global(self, name, value):
for scope in self._all_scopes:
name, value = self._set_global_suite_or_test(scope, name, value)
self._variables_set.set_global(name, value)
def _set_global_suite_or_test(self, scope, name, value):
scope[name] = value
# Avoid creating new list/dict objects in different scopes.
if name[0] != '$':
name = '$' + name[1:]
value = scope[name]
return name, value
def set_suite(self, name, value, top=False, children=False):
if top:
self._scopes[1][name] = value
return
for scope in self._scopes_until_suite:
name, value = self._set_global_suite_or_test(scope, name, value)
if children:
self._variables_set.set_suite(name, value)
def set_test(self, name, value):
if self._test is None:
raise DataError('Cannot set test variable when no test is started.')
for scope in self._scopes_until_test:
name, value = self._set_global_suite_or_test(scope, name, value)
self._variables_set.set_test(name, value)
def set_keyword(self, name, value):
self.current[name] = value
self._variables_set.set_keyword(name, value)
def as_dict(self, decoration=True):
return self.current.as_dict(decoration=decoration)
class GlobalVariables(Variables):
def __init__(self, settings):
Variables.__init__(self)
self._set_cli_variables(settings)
self._set_built_in_variables(settings)
def _set_cli_variables(self, settings):
for path, args in settings.variable_files:
try:
path = find_file(path, file_type='Variable file')
self.set_from_file(path, args)
except:
msg, details = get_error_details()
LOGGER.error(msg)
LOGGER.info(details)
for varstr in settings.variables:
try:
name, value = varstr.split(':', 1)
except ValueError:
name, value = varstr, ''
self['${%s}' % name] = value
def _set_built_in_variables(self, settings):
for name, value in [('${TEMPDIR}', abspath(tempfile.gettempdir())),
('${EXECDIR}', abspath('.')),
('${/}', os.sep),
('${:}', os.pathsep),
('${\\n}', os.linesep),
('${SPACE}', ' '),
('${True}', True),
('${False}', False),
('${None}', None),
('${null}', None),
('${OUTPUT_DIR}', settings.output_directory),
('${OUTPUT_FILE}', settings.output or 'NONE'),
('${REPORT_FILE}', settings.report or 'NONE'),
('${LOG_FILE}', settings.log or 'NONE'),
('${DEBUG_FILE}', settings.debug_file or 'NONE'),
('${LOG_LEVEL}', settings.log_level),
('${PREV_TEST_NAME}', ''),
('${PREV_TEST_STATUS}', ''),
('${PREV_TEST_MESSAGE}', '')]:
self[name] = value
class SetVariables(object):
def __init__(self):
self._suite = None
self._test = None
self._scopes = []
def start_suite(self):
if not self._scopes:
self._suite = NormalizedDict(ignore='_')
else:
self._suite = self._scopes[-1].copy()
self._scopes.append(self._suite)
def end_suite(self):
self._scopes.pop()
self._suite = self._scopes[-1] if self._scopes else None
def start_test(self):
self._test = self._scopes[-1].copy()
self._scopes.append(self._test)
def end_test(self):
self._test = None
self._scopes.pop()
def start_keyword(self):
self._scopes.append(self._scopes[-1].copy())
def end_keyword(self):
self._scopes.pop()
def set_global(self, name, value):
for scope in self._scopes:
if name in scope:
scope.pop(name)
def set_suite(self, name, value):
self._suite[name] = value
def set_test(self, name, value):
for scope in reversed(self._scopes):
scope[name] = value
if scope is self._test:
break
def set_keyword(self, name, value):
self._scopes[-1][name] = value
def update(self, variables):
for name, value in self._scopes[-1].items():
variables[name] = value
| {
"content_hash": "c3ddd298e8beb4fc44f8304e393f34da",
"timestamp": "",
"source": "github",
"line_count": 241,
"max_line_length": 90,
"avg_line_length": 32.81327800829875,
"alnum_prop": 0.5464087000505817,
"repo_name": "robotframework/RIDE",
"id": "d44f8fe0723ac9daa5ee49f270cd82bf362ce5bc",
"size": "8552",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/robotide/lib/robot/variables/scopes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "31131"
},
{
"name": "HTML",
"bytes": "96342"
},
{
"name": "JavaScript",
"bytes": "42656"
},
{
"name": "Python",
"bytes": "3703410"
},
{
"name": "RobotFramework",
"bytes": "378004"
},
{
"name": "Shell",
"bytes": "1873"
}
],
"symlink_target": ""
} |
"""
This package contains persistent stores
"""
| {
"content_hash": "4e43bb26626f19d75de90ee3ae816454",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 39,
"avg_line_length": 16,
"alnum_prop": 0.7291666666666666,
"repo_name": "tcpcloud/openvstorage",
"id": "42fccd9e2ab4af81517e022a3835e770ac340b56",
"size": "629",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ovs/extensions/storage/persistent/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "11498"
},
{
"name": "DTrace",
"bytes": "215"
},
{
"name": "HTML",
"bytes": "208883"
},
{
"name": "JavaScript",
"bytes": "818191"
},
{
"name": "Makefile",
"bytes": "1335"
},
{
"name": "Python",
"bytes": "1849659"
},
{
"name": "Shell",
"bytes": "12612"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import url, patterns
from django.contrib.auth.views import login, logout
from forms import AuthenticationForm
from nose.tools import eq_
urlpatterns = patterns('person.views',
url(r'^signin/$', login,
{'authentication_form': AuthenticationForm}, name='login'),
url(r'^signout/$', logout, {"next_page": "/"}, name='logout'),
url('^browserid-login/', 'browserid_login', name='browserid_login'),
# dashboard
url(r'dashboard/$', 'dashboard', name='person_dashboard'),
# disabled
url(r'^private_addons/$', 'dashboard_browser',
{'type': 'a', 'disabled': True}, name='person_disabled_addons'),
url(r'^private_libraries/$', 'dashboard_browser',
{'type': 'l', 'disabled': True}, name='person_disabled_libraries'),
url(r'^private_addons/(?P<page_number>\d+)/$', 'dashboard_browser',
{'type': 'a', 'disabled': True}, name='person_disabled_addons_page'),
url(r'^private_libraries/(?P<page_number>\d+)/$', 'dashboard_browser',
{'type': 'l', 'disabled': True},
name='person_disabled_libraries_page'),
# packages
url(r'^addons/$',
'dashboard_browser', {'type': 'a'}, name='person_addons'),
url(r'^libraries/$',
'dashboard_browser', {'type': 'l'}, name='person_libraries'),
url(r'^addons/(?P<page_number>\d+)/$',
'dashboard_browser', {'type': 'a'}, name='person_addons_page'),
url(r'^libraries/(?P<page_number>\d+)/$',
'dashboard_browser', {'type': 'l'}, name='person_libraries_page'),
# public profile
url(u'^(?P<username>.*)/$', 'public_profile',
name='person_public_profile'),
)
| {
"content_hash": "cd9efcbcdbe93003bb664f7122b5300c",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 77,
"avg_line_length": 39.54761904761905,
"alnum_prop": 0.6104756170981337,
"repo_name": "mozilla/FlightDeck",
"id": "f6e7b8c3a825ddd789e1409f192e4cf78d033cf2",
"size": "1661",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/person/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "74700"
},
{
"name": "JavaScript",
"bytes": "968196"
},
{
"name": "Python",
"bytes": "673855"
},
{
"name": "Shell",
"bytes": "2315"
},
{
"name": "TeX",
"bytes": "13223"
}
],
"symlink_target": ""
} |
"""Real Time Plotting of planning and control"""
import math
import sys
import threading
import gflags
import matplotlib.pyplot as plt
import numpy as np
import rospy
import common.proto_utils as proto_utils
from item import Item
from modules.localization.proto.localization_pb2 import LocalizationEstimate
from modules.canbus.proto.chassis_pb2 import Chassis
from modules.planning.proto.planning_pb2 import ADCTrajectory
from stitem import Stitem
from xyitem import Xyitem
VehicleLength = 2.85
HistLine2display = 2 #The number of lines to display
MaxSteerAngle = 470 #Maximum Steering Angle
SteerRatio = 16
WindowSize = 80
FLAGS = gflags.FLAGS
gflags.DEFINE_boolean('show_heading', False,
'Show heading instead of acceleration')
gflags.DEFINE_boolean('show_st_graph', False, 'Show st graph')
class Plotter(object):
"""Plotter Class"""
def __init__(self, ax1, ax2, ax3, ax4, stgraph):
self.ax = [ax1, ax2, ax3, ax4]
self.updategraph = False
self.planningavailable = False
self.closed = False
self.carspeed = 0.0
self.steer_angle = 0.0
self.autodrive = False
self.carcurvature = 0.0
self.stgraph = stgraph
self.lock = threading.Lock()
def callback_planning(self, data):
"""New Planning Trajectory"""
if self.stgraph:
st_s, st_t, polygons_s, polygons_t = proto_utils.flatten(
data.debug.planning_data.st_graph,
['speed_profile.s',
'speed_profile.t',
'boundary.point.s',
'boundary.point.t'])
with self.lock:
for i in range(len(st_s)):
self.ax[i].new_planning(st_t[i], st_s[i],
polygons_t[i], polygons_s[i])
else:
if len(data.trajectory_point) == 0:
print data
return
x, y, speed, theta, kappa, acc, relative_time = np.array(
proto_utils.flatten(data.trajectory_point,
['path_point.x',
'path_point.y',
'v',
'path_point.theta',
'path_point.kappa',
'a',
'relative_time']))
relative_time += data.header.timestamp_sec
with self.lock:
self.ax[0].new_planning(relative_time, x, y)
self.ax[1].new_planning(relative_time, speed)
if self.ax[2].title == "Curvature":
self.ax[2].new_planning(relative_time, kappa)
if self.ax[3].title == "Heading":
self.ax[3].new_planning(relative_time, theta)
else:
self.ax[3].new_planning(relative_time, acc)
def callback_chassis(self, data):
"""New localization pose"""
if self.stgraph:
return
self.carspeed = data.speed_mps
self.steer_angle = \
data.steering_percentage / 100 * MaxSteerAngle / SteerRatio
self.autodrive = (data.driving_mode == Chassis.COMPLETE_AUTO_DRIVE)
self.carcurvature = math.tan(
math.radians(self.steer_angle)) / VehicleLength
def callback_localization(self, data):
"""New localization pose"""
if self.stgraph:
return
carheading = data.pose.heading
carx = data.pose.position.x
cary = data.pose.position.y
cartime = data.header.timestamp_sec
with self.lock:
self.ax[0].new_carstatus(cartime, carx, cary, carheading,
self.steer_angle, self.autodrive)
self.ax[1].new_carstatus(cartime, self.carspeed, self.autodrive)
self.ax[2].new_carstatus(cartime, self.carcurvature, self.autodrive)
if self.ax[3].title == "Heading":
self.ax[3].new_carstatus(cartime, carheading, self.autodrive)
else:
acc = data.pose.linear_acceleration_vrf.y
self.ax[3].new_carstatus(cartime, acc, self.autodrive)
def press(self, event):
"""Keyboard events during plotting"""
if event.key == 'q' or event.key == 'Q':
plt.close('all')
self.closed = True
if event.key == 'x' or event.key == 'X':
self.updategraph = True
if event.key == 'a' or event.key == 'A':
fig = plt.gcf()
fig.gca().autoscale()
fig.canvas.draw()
if event.key == 'n' or event.key == 'N':
with self.lock:
for ax in self.ax:
ax.reset()
self.updategraph = True
def main(argv):
"""Main function"""
argv = FLAGS(argv)
print """
Keyboard Shortcut:
[q]: Quit Tool
[s]: Save Figure
[a]: Auto-adjust x, y axis to display entire plot
[x]: Update Figure to Display last few Planning Trajectory instead of all
[h][r]: Go back Home, Display all Planning Trajectory
[f]: Toggle Full Screen
[n]: Reset all Plots
[b]: Unsubscribe Topics
Legend Description:
Red Line: Current Planning Trajectory
Blue Line: Past Car Status History
Green Line: Past Planning Target History at every Car Status Frame
Cyan Dashed Line: Past Planning Trajectory Frames
"""
rospy.init_node('realtime_plot', anonymous=True)
fig = plt.figure()
if not FLAGS.show_st_graph:
ax1 = plt.subplot(2, 2, 1)
item1 = Xyitem(ax1, WindowSize, VehicleLength, "Trajectory", "X [m]",
"Y [m]")
ax2 = plt.subplot(2, 2, 2)
item2 = Item(ax2, "Speed", "Time [sec]", "Speed [m/s]", 0, 30)
ax3 = plt.subplot(2, 2, 3, sharex=ax2)
item3 = Item(ax3, "Curvature", "Time [sec]", "Curvature [m-1]", -0.2,
0.2)
ax4 = plt.subplot(2, 2, 4, sharex=ax2)
if not FLAGS.show_heading:
item4 = Item(ax4, "Acceleration", "Time [sec]",
"Acceleration [m/sec^2]", -5, 5)
else:
item4 = Item(ax4, "Heading", "Time [sec]", "Heading [radian]", -4,
4)
else:
ax1 = plt.subplot(2, 2, 1)
item1 = Stitem(ax1, "ST Graph", "Time [sec]", "S [m]")
ax2 = plt.subplot(2, 2, 2)
item2 = Stitem(ax2, "ST Graph", "Time [sec]", "S [m]")
ax3 = plt.subplot(2, 2, 3)
item3 = Stitem(ax3, "ST Graph", "Time [sec]", "S [m]")
ax4 = plt.subplot(2, 2, 4)
item4 = Stitem(ax4, "ST Graph", "Time [sec]", "S [m]")
plt.tight_layout(pad=0.20)
plt.ion()
plt.show()
plotter = Plotter(item1, item2, item3, item4, FLAGS.show_st_graph)
fig.canvas.mpl_connect('key_press_event', plotter.press)
planning_sub = rospy.Subscriber(
'/apollo/planning',
ADCTrajectory,
plotter.callback_planning,
queue_size=3)
if not FLAGS.show_st_graph:
localization_sub = rospy.Subscriber(
'/apollo/localization/pose',
LocalizationEstimate,
plotter.callback_localization,
queue_size=3)
chassis_sub = rospy.Subscriber(
'/apollo/canbus/chassis',
Chassis,
plotter.callback_chassis,
queue_size=3)
rate = rospy.Rate(10)
while not rospy.is_shutdown():
ax1.draw_artist(ax1.patch)
ax2.draw_artist(ax2.patch)
ax3.draw_artist(ax3.patch)
ax4.draw_artist(ax4.patch)
with plotter.lock:
item1.draw_lines()
item2.draw_lines()
item3.draw_lines()
item4.draw_lines()
fig.canvas.blit(ax1.bbox)
fig.canvas.blit(ax2.bbox)
fig.canvas.blit(ax3.bbox)
fig.canvas.blit(ax4.bbox)
fig.canvas.flush_events()
rate.sleep()
if __name__ == '__main__':
main(sys.argv)
| {
"content_hash": "4d6504a7c815aa8ffca7a4c8dbfbf48e",
"timestamp": "",
"source": "github",
"line_count": 250,
"max_line_length": 81,
"avg_line_length": 32.516,
"alnum_prop": 0.540533891007504,
"repo_name": "fy2462/apollo",
"id": "3ab6846cdb50de2db6f0e75f85778a4be8d213ec",
"size": "8912",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "modules/tools/realtime_plot/realtime_plot.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "3386"
},
{
"name": "C++",
"bytes": "4761775"
},
{
"name": "CMake",
"bytes": "58901"
},
{
"name": "CSS",
"bytes": "20492"
},
{
"name": "HTML",
"bytes": "236"
},
{
"name": "JavaScript",
"bytes": "147786"
},
{
"name": "Python",
"bytes": "640744"
},
{
"name": "Shell",
"bytes": "103381"
},
{
"name": "Smarty",
"bytes": "54938"
}
],
"symlink_target": ""
} |
from celery import Celery
from .. import celery_config
celery = Celery(__name__, config_source=celery_config)
| {
"content_hash": "ce878f125ab420a9bd0a201cf3e050e9",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 54,
"avg_line_length": 22.4,
"alnum_prop": 0.75,
"repo_name": "steinitzu/aptfinder",
"id": "a91509d66ac7d5d6362fb23d5f6f7841c4ee76c5",
"size": "112",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aptfinder/background/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "12854"
},
{
"name": "HTML",
"bytes": "11812"
},
{
"name": "JavaScript",
"bytes": "598986"
},
{
"name": "Python",
"bytes": "21418"
}
],
"symlink_target": ""
} |
"""Downloads SVGs into a specified directory."""
from __future__ import print_function
import optparse
import os
import urllib
PARENT_DIR = os.path.dirname(os.path.realpath(__file__))
def download_files(input_file, output_dir, prefix, keep_common_prefix):
with open(input_file, 'r') as f:
lines = f.readlines()
if keep_common_prefix:
common_prefix = os.path.commonprefix(lines)
for url in lines:
file_url = url.strip()
if keep_common_prefix:
rel_file = file_url.replace(common_prefix, '')
dest_dir = os.path.join(output_dir, os.path.dirname(rel_file))
else:
dest_dir = output_dir
dest_file = os.path.join(dest_dir, prefix + os.path.basename(file_url))
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
print('Downloading %s to %s' % (file_url, dest_file))
urllib.urlretrieve(file_url, dest_file)
if '__main__' == __name__:
option_parser = optparse.OptionParser()
option_parser.add_option(
'-i', '--input_file',
help='Path to the text file containing URLs. Each line should contain a '
'single URL.',
default=os.path.join(PARENT_DIR, 'svgs.txt'))
option_parser.add_option(
'-o', '--output_dir',
help='The output dir where downloaded SVGs and images will be stored in.')
option_parser.add_option(
'-p', '--prefix',
help='The prefix which downloaded files will begin with.',
default='')
option_parser.add_option(
'-k', '--keep_common_prefix',
help='Preserve everything in the URL after the common prefix as directory '
'hierarchy.',
action='store_true', default=False)
options, unused_args = option_parser.parse_args()
if not options.output_dir:
raise Exception('Must specify --output_dir')
download_files(options.input_file, options.output_dir,
options.prefix, options.keep_common_prefix)
| {
"content_hash": "1cb67d0fa6b3b9cc2eda4a857ae5bd9e",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 81,
"avg_line_length": 31.06451612903226,
"alnum_prop": 0.6438213914849429,
"repo_name": "google/skia",
"id": "88c6a7fcbf53a59f0514ab9c0a11c8d57ddb92ac",
"size": "2115",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "tools/svg/svg_downloader.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "1277381"
},
{
"name": "Batchfile",
"bytes": "17474"
},
{
"name": "C",
"bytes": "6724920"
},
{
"name": "C#",
"bytes": "4683"
},
{
"name": "C++",
"bytes": "28759992"
},
{
"name": "CMake",
"bytes": "2850"
},
{
"name": "Cuda",
"bytes": "944096"
},
{
"name": "Dockerfile",
"bytes": "7142"
},
{
"name": "GLSL",
"bytes": "65328"
},
{
"name": "Go",
"bytes": "108521"
},
{
"name": "HTML",
"bytes": "1274414"
},
{
"name": "Java",
"bytes": "165376"
},
{
"name": "JavaScript",
"bytes": "110447"
},
{
"name": "Lex",
"bytes": "2458"
},
{
"name": "Lua",
"bytes": "70982"
},
{
"name": "Makefile",
"bytes": "10499"
},
{
"name": "Objective-C",
"bytes": "55140"
},
{
"name": "Objective-C++",
"bytes": "161861"
},
{
"name": "PHP",
"bytes": "128097"
},
{
"name": "Python",
"bytes": "1028767"
},
{
"name": "Shell",
"bytes": "63875"
}
],
"symlink_target": ""
} |
import sys, socket, time
import asyncoro
def client_recv(conn, coro=None):
conn = asyncoro.AsyncSocket(conn)
while True:
line = yield conn.recv_msg()
if not line:
break
print(line.decode())
if __name__ == '__main__':
asyncoro.logger.setLevel(asyncoro.Logger.DEBUG)
# host name or IP address of server is arg1
if len(sys.argv) > 1:
host = sys.argv[1]
else:
host = ''
# port used by server is arg2
if len(sys.argv) > 2:
port = int(sys.argv[2])
else:
port = 1234
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((host, port))
asyncoro.Coro(client_recv, sock)
# wrap it with asyncoro's synchronous socket so 'send_msg' can be
# used
conn = asyncoro.AsyncSocket(sock, blocking=True)
if sys.version_info.major > 2:
read_input = input
else:
read_input = raw_input
while True:
try:
line = read_input().strip()
if line.lower() in ('quit', 'exit'):
break
if not line:
continue
except:
break
conn.send_msg(line.encode())
conn.shutdown(socket.SHUT_WR)
| {
"content_hash": "0b250cbca3552bf7f2d6175e5b87a2df",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 69,
"avg_line_length": 26.170212765957448,
"alnum_prop": 0.5609756097560976,
"repo_name": "pgiri/asyncoro",
"id": "52d5e9faab6f589f4c54fa6a51b1af4f70db6f8c",
"size": "1449",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/chat_sock_client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7772"
},
{
"name": "HTML",
"bytes": "59972"
},
{
"name": "JavaScript",
"bytes": "3644"
},
{
"name": "Python",
"bytes": "835833"
}
],
"symlink_target": ""
} |
"""
Constants specific to the SQL storage portion of the ORM.
"""
from django.utils.regex_helper import _lazy_re_compile
# Size of each "chunk" for get_iterator calls.
# Larger values are slightly faster at the expense of more storage space.
GET_ITERATOR_CHUNK_SIZE = 100
# Namedtuples for sql.* internal use.
# How many results to expect from a cursor.execute call
MULTI = 'multi'
SINGLE = 'single'
CURSOR = 'cursor'
NO_RESULTS = 'no results'
ORDER_PATTERN = _lazy_re_compile(r'\?|[-+]?[.\w]+$')
ORDER_DIR = {
'ASC': ('ASC', 'DESC'),
'DESC': ('DESC', 'ASC'),
}
# SQL join types.
INNER = 'INNER JOIN'
LOUTER = 'LEFT OUTER JOIN'
| {
"content_hash": "91fabda20616e2d4c314dbf0afa71b4e",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 73,
"avg_line_length": 23.77777777777778,
"alnum_prop": 0.6744548286604362,
"repo_name": "georgemarshall/django",
"id": "1ff44252c56220d659faaf82e83e5c35b97842b8",
"size": "642",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "django/db/models/sql/constants.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "53023"
},
{
"name": "HTML",
"bytes": "172977"
},
{
"name": "JavaScript",
"bytes": "448123"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "12112373"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
} |
'''Test module to thest the xmllib module.
Sjoerd Mullender
'''
testdoc = """\
<?xml version="1.0" encoding="UTF-8" standalone='yes' ?>
<!-- comments aren't allowed before the <?xml?> tag,
but they are allowed before the <!DOCTYPE> tag -->
<?processing instructions are allowed in the same places as comments ?>
<!DOCTYPE greeting [
<!ELEMENT greeting (#PCDATA)>
]>
<greeting>Hello, world!</greeting>
"""
nsdoc = "<foo xmlns='URI' attr='val'/>"
from test import test_support
import unittest
# Silence Py3k warning
xmllib = test_support.import_module('xmllib', deprecated=True)
class XMLParserTestCase(unittest.TestCase):
def test_simple(self):
parser = xmllib.XMLParser()
for c in testdoc:
parser.feed(c)
parser.close()
def test_default_namespace(self):
class H(xmllib.XMLParser):
def unknown_starttag(self, name, attr):
self.name, self.attr = name, attr
h=H()
h.feed(nsdoc)
h.close()
# The default namespace applies to elements...
self.assertEquals(h.name, "URI foo")
# but not to attributes
self.assertEquals(h.attr, {'attr':'val'})
def test_main():
test_support.run_unittest(XMLParserTestCase)
if __name__ == "__main__":
test_main()
| {
"content_hash": "c4365616938213edfd52cdd0460f10a7",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 71,
"avg_line_length": 27.0625,
"alnum_prop": 0.6297151655119323,
"repo_name": "DecipherOne/Troglodyte",
"id": "68b883a0e50f31403353b35220bdf3493dd7b13b",
"size": "1299",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Trog Build Dependencies/Python26/Lib/test/test_xmllib.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "586396"
},
{
"name": "C++",
"bytes": "697696"
},
{
"name": "CSS",
"bytes": "837"
},
{
"name": "Python",
"bytes": "14516232"
},
{
"name": "Shell",
"bytes": "127"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
} |
"""
Digital Water - your micro water simulator. By Tom Viner
Explanation and see running:
https://www.youtube.com/watch?v=OBTUjoc46Pk
"""
import microbit
# define some constants
DISPLAY_WIDTH = 5
DISPLAY_HEIGHT = 5
MIN_BRIGHTNESS = 0
MEDIUM_BRIGHTNESS = 4
MAX_BRIGHTNESS = 9
# this is how the accelerometer values 1g of gravity
ONE_G = 1024
# Some maths functions to help us
def clamp(minimum, n, maximum):
"""Return the nearest value to n, that's within minimum to maximum (incl)
"""
return max(minimum, min(n, maximum))
def rescale(src_scale, dest_scale, x):
"""Map one number scale to another
For example, to convert a score of 4 stars out of 5 into a percentage:
>>> rescale((0, 5), (0, 100), 4)
80.0
Great for mapping different input values into LED pixel brightnesses!
"""
src_start, src_end = src_scale
# what proportion along src_scale x is:
proportion = 1.0 * (x - src_start) / (src_end - src_start)
dest_start, dest_end = dest_scale
# apply our proportion to the dest_scale
return proportion * (dest_end - dest_start) + dest_start
# Helpers for controling the display
def light(brightness, filter):
"""Light up all pixels matching the filter function
"""
brightness = clamp(MIN_BRIGHTNESS, round(brightness), MAX_BRIGHTNESS)
for col in range(DISPLAY_WIDTH):
for row in range(DISPLAY_HEIGHT):
if filter(col, row):
microbit.display.set_pixel(col, row, brightness)
def fade_display():
"""Reduce every pixel by 1 brightness level
This means as we draw new things, the old ones will fade away
"""
for col in range(DISPLAY_WIDTH):
for row in range(DISPLAY_HEIGHT):
brightness = microbit.display.get_pixel(col, row)
# reduce by one, but make sure it's still in 0 to 9
brightness = clamp(MIN_BRIGHTNESS, brightness - 1, MAX_BRIGHTNESS)
microbit.display.set_pixel(col, row, brightness)
def paint_water():
"""Use the accelerometer to paint a water level on the display
"""
# read the current orientation values from the accelerometer
X, Y, Z = microbit.accelerometer.get_values()
# map the force in the X-axis to a turn factor from -2 to 2
# -ONE_G is button A at the top, ONE_G is button B at the top
turn_factor = rescale((-ONE_G, ONE_G), (-2, 2), X)
# map the force in the Z-axis to a spill factor from -3 to 3
# this allows the water to cover the whole display when it's flat
spill_factor = rescale((ONE_G, -ONE_G), (-3, 3), Z)
# use the variables above to make a filter function, customised for the
# current orientation of the micro:bit
def filter(col, row):
"""For a given pixel position, decide if it should be on or not
"""
if Y < 0:
# we're upside down, so reverse the y-axis value
# (- 1 because we start counting rows from 0, not 1)
row = DISPLAY_HEIGHT - 1 - row
# remember rows count down from the top, so we want to light up all
# the rows below the water line (when the micro:bit is held up straight)
# The forumula here is of the form y = m*x + c
# We have a couple of "- 2"s to centre the water level in the middle
# of the display
return row - 2 > -turn_factor * (col - 2) - spill_factor
# we want the water to "dilute" when spread out across the whole display
overall_brightness = rescale(
(0, ONE_G),
(MAX_BRIGHTNESS, MEDIUM_BRIGHTNESS),
abs(Z)
)
# light up the pixels when filter returns true, to the given bright level
light(overall_brightness, filter)
# loop forever painting watery pixels, sleeping and then fading as each pixel
# washes away into the night
while True:
paint_water()
microbit.sleep(100)
# fade all pixels by one brightness level
fade_display()
| {
"content_hash": "177c6a94e1c55f75e189c1868277dbdd",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 80,
"avg_line_length": 32.91596638655462,
"alnum_prop": 0.6520296145008936,
"repo_name": "tomviner/micro-bit-examples",
"id": "75a6879bc820e5aed210b6ce5cca01f82d7b8c2c",
"size": "3917",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "digital_water.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "20868"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dunning_cruncher.settings')
app = Celery('dunning_cruncher',
broker='amqp://',
backend='amqp://',
include=['dunning_cruncher.tasks']
)
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
app.conf.update(
CELERY_TASK_RESULT_EXPIRES=3600,
)
if __name__ == '__main__':
app.start()
| {
"content_hash": "de165988f9c510afc993b7e5a342f963",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 76,
"avg_line_length": 22.5,
"alnum_prop": 0.674074074074074,
"repo_name": "MattBlack85/dunning-cruncher",
"id": "3bd40a246978a5f14d84a32e861fe741dfa1b3e0",
"size": "540",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dunning_cruncher/celery.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "6658"
},
{
"name": "Python",
"bytes": "71048"
}
],
"symlink_target": ""
} |
import requests
import sys
username = sys.argv[1]
password = sys.argv[2]
query = 'John'
expected_entities_count = 3
if len(sys.argv) > 3:
query = sys.argv[3]
expected_entities_count = int(sys.argv[4])
status_response = requests.get('http://localhost:21000/api/atlas/admin/status')
status = status_response.content
if status == "ACTIVE":
response = requests.get(
'http://localhost:21000/api/atlas/v2/search/basic',
params={'query': query},
auth=(username, password),
allow_redirects=False,
)
assert response.status_code == 200
response_dict = response.json()
entities = response_dict.get('entities', [])
assert len(entities) == expected_entities_count
elif status == "PASSIVE":
response = requests.get(
'http://localhost:21000/api/atlas/v2/search/basic',
params={'query': query},
auth=(username, password),
allow_redirects=False,
)
assert response.status_code == 302
| {
"content_hash": "5af260c21d92136b5ffc6151b0a74f94",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 79,
"avg_line_length": 29.606060606060606,
"alnum_prop": 0.6519959058341863,
"repo_name": "GoogleCloudDataproc/initialization-actions",
"id": "40499fe1bec3e840b4c3af082d0db5a4703e9b17",
"size": "977",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "atlas/validate_atlas.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "616"
},
{
"name": "Python",
"bytes": "143738"
},
{
"name": "R",
"bytes": "86"
},
{
"name": "Scala",
"bytes": "2116"
},
{
"name": "Shell",
"bytes": "349642"
},
{
"name": "Starlark",
"bytes": "16664"
}
],
"symlink_target": ""
} |
"""UEs Handerler."""
from empower.datatypes.etheraddress import EtherAddress
from empower.restserver.apihandlers import EmpowerAPIHandlerAdminUsers
from empower.main import RUNTIME
import empower.logger
LOG = empower.logger.get_logger()
class UEHandler(EmpowerAPIHandlerAdminUsers):
"""UE handler. Used to view UEs in a VBS (controller-wide)."""
HANDLERS = [r"/api/v1/vbses/([a-zA-Z0-9:]*)/ues/?",
r"/api/v1/vbses/([a-zA-Z0-9:]*)/ues/([a-zA-Z0-9]*)/?"]
def get(self, *args, **kwargs):
""" Get all UEs or just the specified one.
Args:
vbs_id: the vbs identifier
rnti: the radio network temporary identifier
Example URLs:
GET /api/v1/vbses/11:22:33:44:55:66/ues
GET /api/v1/vbses/11:22:33:44:55:66/ues/f93b
"""
try:
if len(args) > 2 or len(args) < 1:
raise ValueError("Invalid URL")
vbs_id = EtherAddress(args[0])
if len(args) == 1:
self.write_as_json(RUNTIME.vbses[vbs_id].ues.values())
else:
ue_id = int(args[1])
self.write_as_json(RUNTIME.vbses[vbs_id].ues[ue_id])
except KeyError as ex:
self.send_error(404, message=ex)
except ValueError as ex:
self.send_error(400, message=ex)
self.set_status(200, None)
| {
"content_hash": "5ec891b4b3eee31e02dd4a60306f3b5e",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 70,
"avg_line_length": 29.72340425531915,
"alnum_prop": 0.5755189692197566,
"repo_name": "LokiNetworks/empower-runtime",
"id": "64cdfc57834aa6dab9ef0725b202c1fda34b51c6",
"size": "2006",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "empower/vbsp/uehandler.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "101256"
},
{
"name": "HTML",
"bytes": "61335"
},
{
"name": "JavaScript",
"bytes": "5861837"
},
{
"name": "Python",
"bytes": "637231"
}
],
"symlink_target": ""
} |
'''
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
Test.Summary = '''
'''
Test.SkipUnless(Condition.PluginExists('cookie_remap.so'))
Test.ContinueOnFail = True
Test.testName = "cookie_remap: cookie in bucket or not"
# Define default ATS
ts = Test.MakeATSProcess("ts")
# First server is run during first test and
# second server is run during second test
server = Test.MakeOriginServer("server", ip='127.0.0.10')
request_header = {"headers": "GET /cookiematches HTTP/1.1\r\nHost: www.example.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
# expected response from the origin server
response_header = {"headers": "HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
# add response to the server dictionary
server.addResponse("sessionfile.log", request_header, response_header)
server2 = Test.MakeOriginServer("server2", ip='127.0.0.11')
request_header2 = {"headers": "GET /cookiedoesntmatch HTTP/1.1\r\nHost: www.example.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
# expected response from the origin server
response_header2 = {"headers": "HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
# add response to the server dictionary
server2.addResponse("sessionfile.log", request_header2, response_header2)
# Setup the remap configuration
config_path = os.path.join(Test.TestDirectory, "configs/bucketconfig.txt")
with open(config_path, 'r') as config_file:
config1 = config_file.read()
ts.Disk.records_config.update({
'proxy.config.diags.debug.enabled': 1,
'proxy.config.diags.debug.tags': 'cookie_remap.*|http.*|dns.*',
})
config1 = config1.replace("$PORT", str(server.Variables.Port))
config1 = config1.replace("$ALTPORT", str(server2.Variables.Port))
ts.Disk.File(ts.Variables.CONFIGDIR +"/bucketconfig.txt", exists=False, id="config1")
ts.Disk.config1.WriteOn(config1)
ts.Disk.remap_config.AddLine(
'map http://www.example.com/magic http://shouldnothit.com @plugin=cookie_remap.so @pparam=config/bucketconfig.txt'
)
# Cookie value in bucket
tr = Test.AddTestRun("cookie value in bucket")
tr.Processes.Default.Command = '''
curl \
--proxy 127.0.0.1:{0} \
"http://www.example.com/magic" \
-H"Cookie: fpbeta=333" \
-H "Proxy-Connection: keep-alive" \
--verbose \
'''.format(ts.Variables.port)
tr.Processes.Default.ReturnCode = 0
# time delay as proxy.config.http.wait_for_cache could be broken
tr.Processes.Default.StartBefore(server, ready=When.PortOpen(server.Variables.Port))
tr.Processes.Default.StartBefore(Test.Processes.ts)
tr.StillRunningAfter = ts
server.Streams.All = "gold/matchcookie.gold"
# cookie value not in bucket
tr = Test.AddTestRun("cooke value not in bucket")
tr.Processes.Default.Command = '''
curl \
--proxy 127.0.0.1:{0} \
"http://www.example.com/magic" \
-H"Cookie: fpbeta=etc" \
-H "Proxy-Connection: keep-alive" \
--verbose \
'''.format(ts.Variables.port)
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.StartBefore(server2, ready=When.PortOpen(server2.Variables.Port))
tr.StillRunningAfter = ts
server2.Streams.All = "gold/wontmatchcookie.gold"
| {
"content_hash": "84f1f3a8940ceb0220f4ad2f82f44a1b",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 140,
"avg_line_length": 38.386138613861384,
"alnum_prop": 0.7363941191643023,
"repo_name": "davidbz/trafficserver",
"id": "25254ec49ab67ee77d3263d74039f1ca1fc2b674",
"size": "3877",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/gold_tests/pluginTest/cookie_remap/bucketcookie.test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1332466"
},
{
"name": "C++",
"bytes": "14185272"
},
{
"name": "CMake",
"bytes": "18489"
},
{
"name": "Dockerfile",
"bytes": "3324"
},
{
"name": "Java",
"bytes": "9881"
},
{
"name": "Lua",
"bytes": "46851"
},
{
"name": "M4",
"bytes": "192615"
},
{
"name": "Makefile",
"bytes": "210210"
},
{
"name": "Objective-C",
"bytes": "15182"
},
{
"name": "Perl",
"bytes": "119201"
},
{
"name": "Python",
"bytes": "731661"
},
{
"name": "Shell",
"bytes": "126015"
},
{
"name": "TSQL",
"bytes": "5188"
},
{
"name": "Vim script",
"bytes": "192"
}
],
"symlink_target": ""
} |
from distutils.core import setup
setup(name='pygmi',
version='0.2',
description='Python wmii interaction library',
author='Kris Maglione',
author_email='maglione.k@gmail.com',
url='http://wmii.suckless.org',
packages=['pygmi'],
license='MIT',
)
| {
"content_hash": "8e6ccb19f235d399b1d88468dba28d43",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 52,
"avg_line_length": 24.5,
"alnum_prop": 0.6156462585034014,
"repo_name": "bwhmather/wmii",
"id": "556d6b60484c53f203ed383167264d6042e4a4ca",
"size": "317",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "alternative_wmiircs/python/pygmi.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "625296"
},
{
"name": "C++",
"bytes": "562"
},
{
"name": "Groff",
"bytes": "37598"
},
{
"name": "Limbo",
"bytes": "2797"
},
{
"name": "Makefile",
"bytes": "21310"
},
{
"name": "Objective-C",
"bytes": "112"
},
{
"name": "PostScript",
"bytes": "740"
},
{
"name": "Python",
"bytes": "91498"
},
{
"name": "Ruby",
"bytes": "15205"
},
{
"name": "Shell",
"bytes": "44363"
}
],
"symlink_target": ""
} |
import hmac
import random
from base64 import b64encode, b64decode
from ..util import hash, bytes, XOR
from ..sasl import Mechanism, register_mechanism
from ..exceptions import SASLCancelled
def parse_challenge(challenge):
"""
"""
items = {}
for key, value in [item.split(b'=', 1) for item in challenge.split(b',')]:
items[key] = value
return items
class SCRAM_HMAC(Mechanism):
"""
"""
def __init__(self, sasl, name):
"""
"""
super(SCRAM_HMAC, self).__init__(sasl, name, 0)
self._cb = False
if name[-5:] == '-PLUS':
name = name[:-5]
self._cb = True
self.hash = hash(name[6:])
if self.hash is None:
raise SASLCancelled(self.sasl, self)
if not self.sasl.tls_active():
if not self.sasl.sec_query(self, '-ENCRYPTION, SCRAM'):
raise SASLCancelled(self.sasl, self)
self._step = 0
self._rspauth = False
def HMAC(self, key, msg):
"""
"""
return hmac.HMAC(key=key, msg=msg, digestmod=self.hash).digest()
def Hi(self, text, salt, iterations):
"""
"""
text = bytes(text)
ui_1 = self.HMAC(text, salt + b'\0\0\0\01')
ui = ui_1
for i in range(iterations - 1):
ui_1 = self.HMAC(text, ui_1)
ui = XOR(ui, ui_1)
return ui
def H(self, text):
"""
"""
return self.hash(text).digest()
def prep(self):
if 'password' in self.values:
del self.values['password']
def process(self, challenge=None):
"""
"""
steps = {
0: self.process_one,
1: self.process_two,
2: self.process_three
}
return steps[self._step](challenge)
def process_one(self, challenge):
"""
"""
vitals = ['username']
if 'SaltedPassword' not in self.values:
vitals.append('password')
if 'Iterations' not in self.values:
vitals.append('password')
self.check_values(vitals)
username = bytes(self.values['username'])
self._step = 1
self._cnonce = bytes(('%s' % random.random())[2:])
self._soup = b'n=' + username + b',r=' + self._cnonce
self._gs2header = b''
if not self.sasl.tls_active():
if self._cb:
self._gs2header = b'p=tls-unique,,'
else:
self._gs2header = b'y,,'
else:
self._gs2header = b'n,,'
return self._gs2header + self._soup
def process_two(self, challenge):
"""
"""
data = parse_challenge(challenge)
self._step = 2
self._soup += b',' + challenge + b','
self._nonce = data[b'r']
self._salt = b64decode(data[b's'])
self._iter = int(data[b'i'])
if self._nonce[:len(self._cnonce)] != self._cnonce:
raise SASLCancelled(self.sasl, self)
cbdata = self.sasl.tls_active()
c = self._gs2header
if not cbdata and self._cb:
c += None
r = b'c=' + b64encode(c).replace(b'\n', b'')
r += b',r=' + self._nonce
self._soup += r
if 'Iterations' in self.values:
if self.values['Iterations'] != self._iter:
if 'SaltedPassword' in self.values:
del self.values['SaltedPassword']
if 'Salt' in self.values:
if self.values['Salt'] != self._salt:
if 'SaltedPassword' in self.values:
del self.values['SaltedPassword']
self.values['Iterations'] = self._iter
self.values['Salt'] = self._salt
if 'SaltedPassword' not in self.values:
self.check_values(['password'])
password = bytes(self.values['password'])
salted_pass = self.Hi(password, self._salt, self._iter)
self.values['SaltedPassword'] = salted_pass
salted_pass = self.values['SaltedPassword']
client_key = self.HMAC(salted_pass, b'Client Key')
stored_key = self.H(client_key)
client_sig = self.HMAC(stored_key, self._soup)
client_proof = XOR(client_key, client_sig)
r += b',p=' + b64encode(client_proof).replace(b'\n', b'')
server_key = self.HMAC(self.values['SaltedPassword'], b'Server Key')
self.server_sig = self.HMAC(server_key, self._soup)
return r
def process_three(self, challenge=None):
"""
"""
data = parse_challenge(challenge)
if b64decode(data[b'v']) == self.server_sig:
self._rspauth = True
def okay(self):
"""
"""
return self._rspauth
def get_user(self):
return self.values['username']
register_mechanism('SCRAM-', 60, SCRAM_HMAC)
register_mechanism('SCRAM-', 70, SCRAM_HMAC, extra='-PLUS')
| {
"content_hash": "7982f3d5bac4309f0d9e13909a14eeb8",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 78,
"avg_line_length": 28.222857142857144,
"alnum_prop": 0.5241951812107715,
"repo_name": "nicfit/vexmpp",
"id": "991c4ce5c3b62331973c3431601fe41a2210922a",
"size": "4939",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vexmpp/suelta/mechanisms/scram_hmac.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "7924"
},
{
"name": "Python",
"bytes": "502492"
}
],
"symlink_target": ""
} |
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
from thrift.transport import TTransport
all_structs = []
class PropValueUnion(object):
"""
Attributes:
- intValue
- longValue
- stringValue
- doubleValue
- flag
- lString
- unionMStringString
"""
def __init__(self, intValue=None, longValue=None, stringValue=None, doubleValue=None, flag=None, lString=None, unionMStringString=None,):
self.intValue = intValue
self.longValue = longValue
self.stringValue = stringValue
self.doubleValue = doubleValue
self.flag = flag
self.lString = lString
self.unionMStringString = unionMStringString
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.intValue = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.longValue = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.stringValue = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.doubleValue = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.BOOL:
self.flag = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.LIST:
self.lString = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in range(_size0):
_elem5 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
self.lString.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.MAP:
self.unionMStringString = {}
(_ktype7, _vtype8, _size6) = iprot.readMapBegin()
for _i10 in range(_size6):
_key11 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
_val12 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
self.unionMStringString[_key11] = _val12
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('PropValueUnion')
if self.intValue is not None:
oprot.writeFieldBegin('intValue', TType.I32, 1)
oprot.writeI32(self.intValue)
oprot.writeFieldEnd()
if self.longValue is not None:
oprot.writeFieldBegin('longValue', TType.I64, 2)
oprot.writeI64(self.longValue)
oprot.writeFieldEnd()
if self.stringValue is not None:
oprot.writeFieldBegin('stringValue', TType.STRING, 3)
oprot.writeString(self.stringValue.encode('utf-8') if sys.version_info[0] == 2 else self.stringValue)
oprot.writeFieldEnd()
if self.doubleValue is not None:
oprot.writeFieldBegin('doubleValue', TType.DOUBLE, 4)
oprot.writeDouble(self.doubleValue)
oprot.writeFieldEnd()
if self.flag is not None:
oprot.writeFieldBegin('flag', TType.BOOL, 5)
oprot.writeBool(self.flag)
oprot.writeFieldEnd()
if self.lString is not None:
oprot.writeFieldBegin('lString', TType.LIST, 6)
oprot.writeListBegin(TType.STRING, len(self.lString))
for iter13 in self.lString:
oprot.writeString(iter13.encode('utf-8') if sys.version_info[0] == 2 else iter13)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.unionMStringString is not None:
oprot.writeFieldBegin('unionMStringString', TType.MAP, 7)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.unionMStringString))
for kiter14, viter15 in self.unionMStringString.items():
oprot.writeString(kiter14.encode('utf-8') if sys.version_info[0] == 2 else kiter14)
oprot.writeString(viter15.encode('utf-8') if sys.version_info[0] == 2 else viter15)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class IntString(object):
"""
Attributes:
- myint
- myString
- underscore_int
"""
def __init__(self, myint=None, myString=None, underscore_int=None,):
self.myint = myint
self.myString = myString
self.underscore_int = underscore_int
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.myint = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.myString = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.underscore_int = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('IntString')
if self.myint is not None:
oprot.writeFieldBegin('myint', TType.I32, 1)
oprot.writeI32(self.myint)
oprot.writeFieldEnd()
if self.myString is not None:
oprot.writeFieldBegin('myString', TType.STRING, 2)
oprot.writeString(self.myString.encode('utf-8') if sys.version_info[0] == 2 else self.myString)
oprot.writeFieldEnd()
if self.underscore_int is not None:
oprot.writeFieldBegin('underscore_int', TType.I32, 3)
oprot.writeI32(self.underscore_int)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Complex(object):
"""
Attributes:
- aint
- aString
- lint
- lString
- lintString
- mStringString
- attributes
- unionField1
- unionField2
- unionField3
"""
def __init__(self, aint=None, aString=None, lint=None, lString=None, lintString=None, mStringString=None, attributes=None, unionField1=None, unionField2=None, unionField3=None,):
self.aint = aint
self.aString = aString
self.lint = lint
self.lString = lString
self.lintString = lintString
self.mStringString = mStringString
self.attributes = attributes
self.unionField1 = unionField1
self.unionField2 = unionField2
self.unionField3 = unionField3
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.aint = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.aString = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.lint = []
(_etype19, _size16) = iprot.readListBegin()
for _i20 in range(_size16):
_elem21 = iprot.readI32()
self.lint.append(_elem21)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.lString = []
(_etype25, _size22) = iprot.readListBegin()
for _i26 in range(_size22):
_elem27 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
self.lString.append(_elem27)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.lintString = []
(_etype31, _size28) = iprot.readListBegin()
for _i32 in range(_size28):
_elem33 = IntString()
_elem33.read(iprot)
self.lintString.append(_elem33)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.MAP:
self.mStringString = {}
(_ktype35, _vtype36, _size34) = iprot.readMapBegin()
for _i38 in range(_size34):
_key39 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
_val40 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
self.mStringString[_key39] = _val40
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.MAP:
self.attributes = {}
(_ktype42, _vtype43, _size41) = iprot.readMapBegin()
for _i45 in range(_size41):
_key46 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
_val47 = {}
(_ktype49, _vtype50, _size48) = iprot.readMapBegin()
for _i52 in range(_size48):
_key53 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
_val54 = {}
(_ktype56, _vtype57, _size55) = iprot.readMapBegin()
for _i59 in range(_size55):
_key60 = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
_val61 = PropValueUnion()
_val61.read(iprot)
_val54[_key60] = _val61
iprot.readMapEnd()
_val47[_key53] = _val54
iprot.readMapEnd()
self.attributes[_key46] = _val47
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRUCT:
self.unionField1 = PropValueUnion()
self.unionField1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.STRUCT:
self.unionField2 = PropValueUnion()
self.unionField2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.STRUCT:
self.unionField3 = PropValueUnion()
self.unionField3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Complex')
if self.aint is not None:
oprot.writeFieldBegin('aint', TType.I32, 1)
oprot.writeI32(self.aint)
oprot.writeFieldEnd()
if self.aString is not None:
oprot.writeFieldBegin('aString', TType.STRING, 2)
oprot.writeString(self.aString.encode('utf-8') if sys.version_info[0] == 2 else self.aString)
oprot.writeFieldEnd()
if self.lint is not None:
oprot.writeFieldBegin('lint', TType.LIST, 3)
oprot.writeListBegin(TType.I32, len(self.lint))
for iter62 in self.lint:
oprot.writeI32(iter62)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.lString is not None:
oprot.writeFieldBegin('lString', TType.LIST, 4)
oprot.writeListBegin(TType.STRING, len(self.lString))
for iter63 in self.lString:
oprot.writeString(iter63.encode('utf-8') if sys.version_info[0] == 2 else iter63)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.lintString is not None:
oprot.writeFieldBegin('lintString', TType.LIST, 5)
oprot.writeListBegin(TType.STRUCT, len(self.lintString))
for iter64 in self.lintString:
iter64.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.mStringString is not None:
oprot.writeFieldBegin('mStringString', TType.MAP, 6)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.mStringString))
for kiter65, viter66 in self.mStringString.items():
oprot.writeString(kiter65.encode('utf-8') if sys.version_info[0] == 2 else kiter65)
oprot.writeString(viter66.encode('utf-8') if sys.version_info[0] == 2 else viter66)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.attributes is not None:
oprot.writeFieldBegin('attributes', TType.MAP, 7)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.attributes))
for kiter67, viter68 in self.attributes.items():
oprot.writeString(kiter67.encode('utf-8') if sys.version_info[0] == 2 else kiter67)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(viter68))
for kiter69, viter70 in viter68.items():
oprot.writeString(kiter69.encode('utf-8') if sys.version_info[0] == 2 else kiter69)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(viter70))
for kiter71, viter72 in viter70.items():
oprot.writeString(kiter71.encode('utf-8') if sys.version_info[0] == 2 else kiter71)
viter72.write(oprot)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.unionField1 is not None:
oprot.writeFieldBegin('unionField1', TType.STRUCT, 8)
self.unionField1.write(oprot)
oprot.writeFieldEnd()
if self.unionField2 is not None:
oprot.writeFieldBegin('unionField2', TType.STRUCT, 9)
self.unionField2.write(oprot)
oprot.writeFieldEnd()
if self.unionField3 is not None:
oprot.writeFieldBegin('unionField3', TType.STRUCT, 10)
self.unionField3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SetIntString(object):
"""
Attributes:
- sIntString
- aString
"""
def __init__(self, sIntString=None, aString=None,):
self.sIntString = sIntString
self.aString = aString
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.SET:
self.sIntString = set()
(_etype76, _size73) = iprot.readSetBegin()
for _i77 in range(_size73):
_elem78 = IntString()
_elem78.read(iprot)
self.sIntString.add(_elem78)
iprot.readSetEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.aString = iprot.readString().decode('utf-8', errors='replace') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SetIntString')
if self.sIntString is not None:
oprot.writeFieldBegin('sIntString', TType.SET, 1)
oprot.writeSetBegin(TType.STRUCT, len(self.sIntString))
for iter79 in self.sIntString:
iter79.write(oprot)
oprot.writeSetEnd()
oprot.writeFieldEnd()
if self.aString is not None:
oprot.writeFieldBegin('aString', TType.STRING, 2)
oprot.writeString(self.aString.encode('utf-8') if sys.version_info[0] == 2 else self.aString)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(PropValueUnion)
PropValueUnion.thrift_spec = (
None, # 0
(1, TType.I32, 'intValue', None, None, ), # 1
(2, TType.I64, 'longValue', None, None, ), # 2
(3, TType.STRING, 'stringValue', 'UTF8', None, ), # 3
(4, TType.DOUBLE, 'doubleValue', None, None, ), # 4
(5, TType.BOOL, 'flag', None, None, ), # 5
(6, TType.LIST, 'lString', (TType.STRING, 'UTF8', False), None, ), # 6
(7, TType.MAP, 'unionMStringString', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 7
)
all_structs.append(IntString)
IntString.thrift_spec = (
None, # 0
(1, TType.I32, 'myint', None, None, ), # 1
(2, TType.STRING, 'myString', 'UTF8', None, ), # 2
(3, TType.I32, 'underscore_int', None, None, ), # 3
)
all_structs.append(Complex)
Complex.thrift_spec = (
None, # 0
(1, TType.I32, 'aint', None, None, ), # 1
(2, TType.STRING, 'aString', 'UTF8', None, ), # 2
(3, TType.LIST, 'lint', (TType.I32, None, False), None, ), # 3
(4, TType.LIST, 'lString', (TType.STRING, 'UTF8', False), None, ), # 4
(5, TType.LIST, 'lintString', (TType.STRUCT, [IntString, None], False), None, ), # 5
(6, TType.MAP, 'mStringString', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 6
(7, TType.MAP, 'attributes', (TType.STRING, 'UTF8', TType.MAP, (TType.STRING, 'UTF8', TType.MAP, (TType.STRING, 'UTF8', TType.STRUCT, [PropValueUnion, None], False), False), False), None, ), # 7
(8, TType.STRUCT, 'unionField1', [PropValueUnion, None], None, ), # 8
(9, TType.STRUCT, 'unionField2', [PropValueUnion, None], None, ), # 9
(10, TType.STRUCT, 'unionField3', [PropValueUnion, None], None, ), # 10
)
all_structs.append(SetIntString)
SetIntString.thrift_spec = (
None, # 0
(1, TType.SET, 'sIntString', (TType.STRUCT, [IntString, None], False), None, ), # 1
(2, TType.STRING, 'aString', 'UTF8', None, ), # 2
)
fix_spec(all_structs)
del all_structs
| {
"content_hash": "93adc0304efcf9f65ddf219aeaa1c0dc",
"timestamp": "",
"source": "github",
"line_count": 570,
"max_line_length": 199,
"avg_line_length": 41.80877192982456,
"alnum_prop": 0.5328353824849985,
"repo_name": "jcamachor/hive",
"id": "e8c169c01fcb48339ecff8065758e3213f18eceb",
"size": "23973",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "serde/src/gen/thrift/gen-py/complex/ttypes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "54376"
},
{
"name": "Batchfile",
"bytes": "845"
},
{
"name": "C",
"bytes": "28218"
},
{
"name": "C++",
"bytes": "45308"
},
{
"name": "CSS",
"bytes": "5157"
},
{
"name": "GAP",
"bytes": "179697"
},
{
"name": "HTML",
"bytes": "58711"
},
{
"name": "HiveQL",
"bytes": "7606577"
},
{
"name": "Java",
"bytes": "53149057"
},
{
"name": "JavaScript",
"bytes": "43855"
},
{
"name": "M4",
"bytes": "2276"
},
{
"name": "PHP",
"bytes": "148097"
},
{
"name": "PLSQL",
"bytes": "5261"
},
{
"name": "PLpgSQL",
"bytes": "302587"
},
{
"name": "Perl",
"bytes": "319842"
},
{
"name": "PigLatin",
"bytes": "12333"
},
{
"name": "Python",
"bytes": "408662"
},
{
"name": "Roff",
"bytes": "5379"
},
{
"name": "SQLPL",
"bytes": "409"
},
{
"name": "Shell",
"bytes": "299497"
},
{
"name": "TSQL",
"bytes": "2560286"
},
{
"name": "Thrift",
"bytes": "144733"
},
{
"name": "XSLT",
"bytes": "20199"
},
{
"name": "q",
"bytes": "320552"
}
],
"symlink_target": ""
} |
"""Training helper that checkpoints models and creates session."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import threading
import time
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.platform import gfile
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import saver as saver_mod
class SessionManager(object):
"""Training helper that restores from checkpoint and creates session.
This class is a small wrapper that takes care of session creation and
checkpoint recovery. It also provides functions that to facilitate
coordination among multiple training threads or processes.
* Checkpointing trained variables as the training progresses.
* Initializing variables on startup, restoring them from the most recent
checkpoint after a crash, or wait for checkpoints to become available.
### Usage:
```python
with tf.Graph().as_default():
...add operations to the graph...
# Create a SessionManager that will checkpoint the model in '/tmp/mydir'.
sm = SessionManager()
sess = sm.prepare_session(master, init_op, saver, checkpoint_dir)
# Use the session to train the graph.
while True:
sess.run(<my_train_op>)
```
`prepare_session()` initializes or restores a model. It requires `init_op`
and `saver` as an argument.
A second process could wait for the model to be ready by doing the following:
```python
with tf.Graph().as_default():
...add operations to the graph...
# Create a SessionManager that will wait for the model to become ready.
sm = SessionManager()
sess = sm.wait_for_session(master)
# Use the session to train the graph.
while True:
sess.run(<my_train_op>)
```
`wait_for_session()` waits for a model to be initialized by other processes.
"""
# Protects _TENSORFLOW_LAUNCHED
_launch_lock = threading.Lock()
# True if we have already launched the tensorflow in-process server.
_TENSORFLOW_LAUNCHED = False
def __init__(self, local_init_op=None, ready_op=None,
graph=None, recovery_wait_secs=30):
"""Creates a SessionManager.
The `local_init_op` is an `Operation` that is run always after a new session
was created. If `None`, this step is skipped.
The `ready_op` is an `Operation` used to check if the model is ready. The
model is considered ready if that operation returns an empty string tensor.
If the operation returns non empty string tensor, the elements are
concatenated and used to indicate to the user why the model is not ready.
If `ready_op` is `None`, the model is not checked for readiness.
`recovery_wait_secs` is the number of seconds between checks that
the model is ready. It is used by processes to wait for a model to
be initialized or restored. Defaults to 30 seconds.
Args:
local_init_op: An `Operation` run immediately after session creation.
Usually used to initialize tables and local variables.
ready_op: An `Operation` to check if the model is initialized.
graph: The `Graph` that the model will use.
recovery_wait_secs: Seconds between checks for the model to be ready.
"""
# Sets default values of arguments.
if graph is None:
graph = ops.get_default_graph()
self._local_init_op = local_init_op
self._ready_op = ready_op
self._graph = graph
self._recovery_wait_secs = recovery_wait_secs
self._target = None
def prepare_session(self, master, init_op=None, saver=None,
checkpoint_dir=None, wait_for_checkpoint=False,
max_wait_secs=7200, config=None, init_feed_dict=None,
init_fn=None):
"""Creates a `Session`. Makes sure the model is ready to be used.
Creates a `Session` on 'master'. If a `saver` object is passed in, and
`checkpoint_dir` points to a directory containing valid checkpoint
files, then it will try to recover the model from checkpoint. If
no checkpoint files are available, and `wait_for_checkpoint` is
`True`, then the process would check every `recovery_wait_secs`,
up to `max_wait_secs`, for recovery to succeed.
If the model cannot be recovered successfully then it is initialized by
either running the provided `init_op`, or calling the provided `init_fn`.
It is an error if the model cannot be recovered and neither an `init_op`
or an `init_fn` are passed.
This is a convenient function for the following, with a few error checks
added:
```python
sess, initialized = self.recover_session(master)
if not initialized:
if init_op:
sess.run(init_op, feed_dict=init_feed_dict)
if init_fn;
init_fn(sess)
return sess
```
Args:
master: `String` representation of the TensorFlow master to use.
init_op: Optional `Operation` used to initialize the model.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
init_feed_dict: Optional dictionary that maps `Tensor` objects to feed
values. This feed dictionary is passed to the session `run()` call when
running the init op.
init_fn: Optional callable used to initialize the model. Called after the
optional `init_op` is called. The callable must accept one argument,
the session being initialized.
Returns:
A `Session` object that can be used to drive the model.
Raises:
RuntimeError: If the model cannot be initialized or recovered.
"""
sess, initialized = self.recover_session(
master, saver, checkpoint_dir=checkpoint_dir,
wait_for_checkpoint=wait_for_checkpoint,
max_wait_secs=max_wait_secs, config=config)
if not initialized:
if not init_op and not init_fn:
raise RuntimeError("Model is not initialized and no init_op or "
"init_fn was given")
if init_op:
sess.run(init_op, feed_dict=init_feed_dict)
if init_fn:
init_fn(sess)
not_ready = self._model_not_ready(sess)
if not_ready:
raise RuntimeError("Init operations did not make model ready. "
"Init op: %s, init fn: %s, error: %s"
% (init_op.name, init_fn, not_ready))
return sess
def recover_session(self, master, saver=None, checkpoint_dir=None,
wait_for_checkpoint=False, max_wait_secs=7200,
config=None):
"""Creates a `Session`, recovering if possible.
Creates a new session on 'master'. If the session is not initialized
and can be recovered from a checkpoint, recover it.
Args:
master: `String` representation of the TensorFlow master to use.
saver: A `Saver` object used to restore a model.
checkpoint_dir: Path to the checkpoint files.
wait_for_checkpoint: Whether to wait for checkpoint to become available.
max_wait_secs: Maximum time to wait for checkpoints to become available.
config: Optional `ConfigProto` proto used to configure the session.
Returns:
A pair (sess, initialized) where 'initialized' is `True` if
the session could be recovered, `False` otherwise.
"""
self._target = master
sess = session.Session(self._target, graph=self._graph, config=config)
if self._local_init_op:
sess.run([self._local_init_op])
# If either saver or checkpoint_dir is not specified, cannot restore. Just
# return.
if not saver or not checkpoint_dir:
not_ready = self._model_not_ready(sess)
return sess, not_ready is None
# Waits up until max_wait_secs for checkpoint to become available.
wait_time = 0
ckpt = saver_mod.get_checkpoint_state(checkpoint_dir)
while not ckpt or not ckpt.model_checkpoint_path:
if wait_for_checkpoint and wait_time < max_wait_secs:
logging.info("Waiting for checkpoint to be available.")
time.sleep(self._recovery_wait_secs)
wait_time += self._recovery_wait_secs
ckpt = saver_mod.get_checkpoint_state(checkpoint_dir)
else:
return sess, False
# Loads the checkpoint and verifies that it makes the model ready.
saver.restore(sess, ckpt.model_checkpoint_path)
last_checkpoints = []
for fname in ckpt.all_model_checkpoint_paths:
fnames = gfile.Glob(fname)
if fnames:
mtime = gfile.Stat(fnames[0]).mtime
last_checkpoints.append((fname, mtime))
saver.set_last_checkpoints_with_time(last_checkpoints)
not_ready = self._model_not_ready(sess)
if not_ready:
logging.info("Restoring model from %s did not make model ready: %s",
ckpt.model_checkpoint_path, not_ready)
return sess, False
else:
logging.info("Restored model from %s", ckpt.model_checkpoint_path)
return sess, True
def wait_for_session(self, master, config=None, max_wait_secs=float("Inf")):
"""Creates a new `Session` and waits for model to be ready.
Creates a new `Session` on 'master'. Waits for the model to be
initialized or recovered from a checkpoint. It's expected that
another thread or process will make the model ready, and that this
is intended to be used by threads/processes that participate in a
distributed training configuration where a different thread/process
is responsible for initializing or recovering the model being trained.
NB: The amount of time this method waits for the session is bounded
by max_wait_secs. By default, this function will wait indefinitely.
Args:
master: `String` representation of the TensorFlow master to use.
config: Optional ConfigProto proto used to configure the session.
max_wait_secs: Maximum time to wait for the session to become available.
Returns:
A `Session`. May be None if the operation exceeds the timeout
specified by config.operation_timeout_in_ms.
Raises:
tf.DeadlineExceededError: if the session is not available after
max_wait_secs.
"""
self._target = master
if max_wait_secs is None:
max_wait_secs = float("Inf")
timer = _CountDownTimer(max_wait_secs)
while True:
sess = session.Session(self._target, graph=self._graph, config=config)
if self._local_init_op:
sess.run([self._local_init_op])
not_ready = self._model_not_ready(sess)
if not not_ready:
return sess
self._safe_close(sess)
# Do we have enough time left to try again?
remaining_ms_after_wait = (
timer.secs_remaining() - self._recovery_wait_secs)
if remaining_ms_after_wait < 0:
raise errors.DeadlineExceededError(
None, None,
"Session was not ready after waiting %d secs." % (max_wait_secs,))
logging.info("Waiting for model to be ready: %s", not_ready)
time.sleep(self._recovery_wait_secs)
def _safe_close(self, sess):
"""Closes a session without raising an exception.
Just like sess.close() but ignores exceptions.
Args:
sess: A `Session`.
"""
# pylint: disable=broad-except
try:
sess.close()
except Exception:
# Intentionally not logging to avoid user complaints that
# they get cryptic errors. We really do not care that Close
# fails.
pass
# pylint: enable=broad-except
def _model_not_ready(self, sess):
"""Checks if the model is ready or not.
Args:
sess: A `Session`.
Returns:
`None` if the model is ready, a `String` with the reason why it is not
ready otherwise.
"""
if self._ready_op is None:
return None
else:
try:
ready_value = sess.run(self._ready_op)
# The model is considered ready if ready_op returns an empty 1-D tensor.
# Also compare to `None` and dtype being int32 for backward
# compatibility.
if (ready_value is None or ready_value.dtype == np.int32 or
ready_value.size == 0):
return None
else:
# TODO(sherrym): If a custom ready_op returns other types of tensor,
# or strings other than variable names, this message could be
# confusing.
non_initialized_varnames = ", ".join(
[i.decode("utf-8") for i in ready_value])
return "Variables not initialized: " + non_initialized_varnames
except errors.FailedPreconditionError as e:
if "uninitialized" not in str(e):
logging.warning("Model not ready raised: %s", str(e))
raise e
return str(e)
class _CountDownTimer(object):
def __init__(self, duration_secs):
self._start_time_secs = time.time()
self._duration_secs = duration_secs
def secs_remaining(self):
diff = self._duration_secs - (time.time() - self._start_time_secs)
return max(0, diff)
| {
"content_hash": "47004efac5bc277c206310a1fc6f2719",
"timestamp": "",
"source": "github",
"line_count": 347,
"max_line_length": 80,
"avg_line_length": 38.4178674351585,
"alnum_prop": 0.6708423974195484,
"repo_name": "EvenStrangest/tensorflow",
"id": "d34e86e896029f81a4e68afc76a2552e82508e50",
"size": "14020",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/python/training/session_manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "156263"
},
{
"name": "C++",
"bytes": "9372687"
},
{
"name": "CMake",
"bytes": "29372"
},
{
"name": "CSS",
"bytes": "1297"
},
{
"name": "HTML",
"bytes": "784316"
},
{
"name": "Java",
"bytes": "39229"
},
{
"name": "JavaScript",
"bytes": "10875"
},
{
"name": "Jupyter Notebook",
"bytes": "1533241"
},
{
"name": "Makefile",
"bytes": "11364"
},
{
"name": "Objective-C",
"bytes": "5332"
},
{
"name": "Objective-C++",
"bytes": "45585"
},
{
"name": "Protocol Buffer",
"bytes": "112557"
},
{
"name": "Python",
"bytes": "6949434"
},
{
"name": "Shell",
"bytes": "196466"
},
{
"name": "TypeScript",
"bytes": "411503"
}
],
"symlink_target": ""
} |
import os
from unittest import TestCase
class CommandLineTest(TestCase):
def test_command_line(self):
"""Ensures command line interface does not contain typos."""
exit_status = os.system('dacsspace --help')
assert exit_status == 0
| {
"content_hash": "66ba40db01ed5c94396ac182b111c42f",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 68,
"avg_line_length": 26.2,
"alnum_prop": 0.6793893129770993,
"repo_name": "RockefellerArchiveCenter/DACSspace",
"id": "4c563583256bb550789539def362640fdbfeb6fd",
"size": "262",
"binary": false,
"copies": "1",
"ref": "refs/heads/base",
"path": "tests/test_commandline.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23982"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from ..models import FormQuestion, Page
from .page_admin import PageAdmin
from .question_admin import FormQuestionAdmin
admin.site.register(Page, PageAdmin)
admin.site.register(FormQuestion, FormQuestionAdmin)
| {
"content_hash": "1d7c994b184895fc3a9a86e733178cda",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 52,
"avg_line_length": 30.625,
"alnum_prop": 0.8326530612244898,
"repo_name": "SexualHealthInnovations/django-wizard-builder",
"id": "9d7464ef1df437e65010f808668ea7b8d0d45947",
"size": "245",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "wizard_builder/admin/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "6885"
},
{
"name": "Makefile",
"bytes": "2525"
},
{
"name": "Python",
"bytes": "99398"
}
],
"symlink_target": ""
} |
"""Tests for `tf.data.experimental.copy_to_device()`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.data.experimental.ops import prefetching_ops
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.data.util import structure
from tensorflow.python.framework import combinations
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
from tensorflow.python.util import compat as util_compat
# TODO(b/117581999): add eager coverage when supported.
class CopyToDeviceTest(test_base.DatasetTestBase, parameterized.TestCase):
@combinations.generate(test_base.graph_only_combinations())
def testCopyToDevice(self):
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/cpu:1"))
with ops.device("/cpu:1"):
iterator = dataset_ops.make_one_shot_iterator(device_dataset)
next_element = iterator.get_next()
self.assertTrue(
structure.are_compatible(
dataset_ops.get_structure(host_dataset),
dataset_ops.get_structure(device_dataset)))
self.assertEqual(dtypes.int64, next_element.dtype)
self.assertEqual([], next_element.shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config):
for i in range(10):
self.assertEqual(i, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyToDeviceInt32(self):
host_dataset = dataset_ops.Dataset.from_tensors([0, 1, 2, 3])
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/cpu:1"))
with ops.device("/cpu:1"):
iterator = dataset_ops.make_one_shot_iterator(device_dataset)
next_element = iterator.get_next()
self.assertTrue(
structure.are_compatible(
dataset_ops.get_structure(host_dataset),
dataset_ops.get_structure(device_dataset)))
self.assertEqual(dtypes.int32, next_element.dtype)
self.assertEqual((4,), next_element.shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config):
self.assertAllEqual([0, 1, 2, 3], self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyToSameDevice(self):
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/cpu:0"))
with ops.device("/cpu:0"):
iterator = dataset_ops.make_one_shot_iterator(device_dataset)
next_element = iterator.get_next()
self.assertTrue(
structure.are_compatible(
dataset_ops.get_structure(host_dataset),
dataset_ops.get_structure(device_dataset)))
self.assertEqual(dtypes.int64, next_element.dtype)
self.assertEqual([], next_element.shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config):
for i in range(10):
self.assertEqual(i, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyToDeviceWithPrefetch(self):
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/cpu:1")).prefetch(1)
with ops.device("/cpu:1"):
iterator = dataset_ops.make_one_shot_iterator(device_dataset)
next_element = iterator.get_next()
self.assertTrue(
structure.are_compatible(
dataset_ops.get_structure(host_dataset),
dataset_ops.get_structure(device_dataset)))
self.assertEqual(dtypes.int64, next_element.dtype)
self.assertEqual([], next_element.shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config):
for i in range(10):
self.assertEqual(i, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyDictToDevice(self):
host_dataset = dataset_ops.Dataset.range(10).map(lambda x: {"a": x})
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/cpu:1"))
with ops.device("/cpu:1"):
iterator = dataset_ops.make_one_shot_iterator(device_dataset)
next_element = iterator.get_next()
self.assertTrue(
structure.are_compatible(
dataset_ops.get_structure(host_dataset),
dataset_ops.get_structure(device_dataset)))
self.assertEqual(dtypes.int64, next_element["a"].dtype)
self.assertEqual([], next_element["a"].shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config):
for i in range(10):
self.assertEqual({"a": i}, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyDictToDeviceWithPrefetch(self):
host_dataset = dataset_ops.Dataset.range(10).map(lambda x: {"a": x})
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/cpu:1")).prefetch(1)
with ops.device("/cpu:1"):
iterator = dataset_ops.make_one_shot_iterator(device_dataset)
next_element = iterator.get_next()
self.assertTrue(
structure.are_compatible(
dataset_ops.get_structure(host_dataset),
dataset_ops.get_structure(device_dataset)))
self.assertEqual(dtypes.int64, next_element["a"].dtype)
self.assertEqual([], next_element["a"].shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config):
for i in range(10):
self.assertEqual({"a": i}, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopySparseTensorsToDevice(self):
def make_tensor(i):
return sparse_tensor.SparseTensorValue(
indices=[[0, 0]], values=(i * [1]), dense_shape=[2, 2])
host_dataset = dataset_ops.Dataset.range(10).map(make_tensor)
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/cpu:1"))
with ops.device("/cpu:1"):
iterator = dataset_ops.make_one_shot_iterator(device_dataset)
next_element = iterator.get_next()
self.assertTrue(
structure.are_compatible(
dataset_ops.get_structure(host_dataset),
dataset_ops.get_structure(device_dataset)))
self.assertEqual(dtypes.int64, next_element.dtype)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config):
for i in range(10):
actual = self.evaluate(next_element)
self.assertAllEqual([i], actual.values)
self.assertAllEqual([[0, 0]], actual.indices)
self.assertAllEqual([2, 2], actual.dense_shape)
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopySparseTensorsToDeviceWithPrefetch(self):
def make_tensor(i):
return sparse_tensor.SparseTensorValue(
indices=[[0, 0]], values=(i * [1]), dense_shape=[2, 2])
host_dataset = dataset_ops.Dataset.range(10).map(make_tensor)
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/cpu:1")).prefetch(1)
with ops.device("/cpu:1"):
iterator = dataset_ops.make_one_shot_iterator(device_dataset)
next_element = iterator.get_next()
self.assertTrue(
structure.are_compatible(
dataset_ops.get_structure(host_dataset),
dataset_ops.get_structure(device_dataset)))
self.assertEqual(dtypes.int64, next_element.dtype)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config):
for i in range(10):
actual = self.evaluate(next_element)
self.assertAllEqual([i], actual.values)
self.assertAllEqual([[0, 0]], actual.indices)
self.assertAllEqual([2, 2], actual.dense_shape)
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyToDeviceGpu(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/gpu:0"))
with ops.device("/gpu:0"):
iterator = dataset_ops.make_initializable_iterator(device_dataset)
next_element = iterator.get_next()
with self.cached_session(
config=config_pb2.ConfigProto(allow_soft_placement=False)):
self.evaluate(iterator.initializer)
for i in range(10):
self.assertEqual(i, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyToDeviceGpuWithPrefetch(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/gpu:0")).prefetch(1)
with ops.device("/gpu:0"):
iterator = dataset_ops.make_initializable_iterator(device_dataset)
next_element = iterator.get_next()
with self.cached_session(
config=config_pb2.ConfigProto(allow_soft_placement=False)):
self.evaluate(iterator.initializer)
for i in range(10):
self.assertEqual(i, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyToDeviceGpuWithMap(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
def generator():
for i in range(10):
yield i, float(i), str(i)
host_dataset = dataset_ops.Dataset.from_generator(
generator, output_types=(dtypes.int32, dtypes.float32, dtypes.string))
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/gpu:0"))
def gpu_map_func(x, y, z):
return math_ops.square(x), math_ops.square(y), z
device_dataset = device_dataset.apply(
prefetching_ops.map_on_gpu(gpu_map_func))
options = dataset_ops.Options()
options.experimental_optimization.autotune = False
device_dataset = device_dataset.with_options(options)
with ops.device("/gpu:0"):
iterator = dataset_ops.make_initializable_iterator(device_dataset)
next_element = iterator.get_next()
with self.cached_session(
config=config_pb2.ConfigProto(allow_soft_placement=False)):
self.evaluate(iterator.initializer)
for i in range(10):
x, y, z = self.evaluate(next_element)
self.assertEqual(i**2, x)
self.assertEqual(float(i**2), y)
self.assertEqual(util_compat.as_bytes(str(i)), z)
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyToDeviceGpuInt32(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.from_tensors([0, 1, 2, 3])
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/gpu:0"))
with ops.device("/gpu:0"):
iterator = dataset_ops.make_initializable_iterator(device_dataset)
next_element = iterator.get_next()
with self.cached_session(
config=config_pb2.ConfigProto(allow_soft_placement=False)):
self.evaluate(iterator.initializer)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyToDeviceGpuInt32AndPrefetch(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.from_tensors([0, 1, 2, 3])
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/gpu:0")).prefetch(1)
with ops.device("/gpu:0"):
iterator = dataset_ops.make_initializable_iterator(device_dataset)
next_element = iterator.get_next()
with self.cached_session(
config=config_pb2.ConfigProto(allow_soft_placement=False)):
self.evaluate(iterator.initializer)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyToDeviceGpuStrings(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.from_tensors(["a", "b", "c"])
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/gpu:0"))
with ops.device("/gpu:0"):
iterator = dataset_ops.make_initializable_iterator(device_dataset)
next_element = iterator.get_next()
with self.cached_session(
config=config_pb2.ConfigProto(allow_soft_placement=False)):
self.evaluate(iterator.initializer)
self.assertAllEqual([b"a", b"b", b"c"], self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyToDeviceGpuStringsAndPrefetch(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.from_tensors(["a", "b", "c"])
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/gpu:0"))
with ops.device("/gpu:0"):
iterator = dataset_ops.make_initializable_iterator(device_dataset)
next_element = iterator.get_next()
with self.cached_session(
config=config_pb2.ConfigProto(allow_soft_placement=False)):
self.evaluate(iterator.initializer)
self.assertAllEqual([b"a", b"b", b"c"], self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyToDevicePingPongCPUGPU(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/gpu:0", source_device="/cpu:0"))
back_to_cpu_dataset = device_dataset.apply(
prefetching_ops.copy_to_device("/cpu:0", source_device="/gpu:0"))
with ops.device("/cpu:0"):
iterator = dataset_ops.make_initializable_iterator(back_to_cpu_dataset)
next_element = iterator.get_next()
with self.cached_session(
config=config_pb2.ConfigProto(allow_soft_placement=False)):
self.evaluate(iterator.initializer)
for i in range(10):
self.assertEqual(i, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyToDeviceWithReInit(self):
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/cpu:1"))
with ops.device("/cpu:1"):
iterator = dataset_ops.make_initializable_iterator(device_dataset)
next_element = iterator.get_next()
self.assertTrue(
structure.are_compatible(
dataset_ops.get_structure(host_dataset),
dataset_ops.get_structure(device_dataset)))
self.assertEqual(dtypes.int64, next_element.dtype)
self.assertEqual([], next_element.shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config):
self.evaluate(iterator.initializer)
for i in range(5):
self.assertEqual(i, self.evaluate(next_element))
self.evaluate(iterator.initializer)
for i in range(10):
self.assertEqual(i, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyToDeviceWithReInitAndPrefetch(self):
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/cpu:1")).prefetch(1)
with ops.device("/cpu:1"):
iterator = dataset_ops.make_initializable_iterator(device_dataset)
next_element = iterator.get_next()
self.assertTrue(
structure.are_compatible(
dataset_ops.get_structure(host_dataset),
dataset_ops.get_structure(device_dataset)))
self.assertEqual(dtypes.int64, next_element.dtype)
self.assertEqual([], next_element.shape)
worker_config = config_pb2.ConfigProto(device_count={"CPU": 2})
with self.test_session(config=worker_config):
self.evaluate(iterator.initializer)
for i in range(5):
self.assertEqual(i, self.evaluate(next_element))
self.evaluate(iterator.initializer)
for i in range(10):
self.assertEqual(i, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyToDeviceGpuWithReInit(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/gpu:0"))
with ops.device("/gpu:0"):
iterator = dataset_ops.make_initializable_iterator(device_dataset)
next_element = iterator.get_next()
with self.cached_session(
config=config_pb2.ConfigProto(allow_soft_placement=False)):
self.evaluate(iterator.initializer)
for i in range(5):
self.assertEqual(i, self.evaluate(next_element))
self.evaluate(iterator.initializer)
for i in range(10):
self.assertEqual(i, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testCopyToDeviceGpuWithReInitAndPrefetch(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.range(10)
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/gpu:0")).prefetch(1)
with ops.device("/gpu:0"):
iterator = dataset_ops.make_initializable_iterator(device_dataset)
next_element = iterator.get_next()
with self.cached_session(
config=config_pb2.ConfigProto(allow_soft_placement=False)):
self.evaluate(iterator.initializer)
for i in range(5):
self.assertEqual(i, self.evaluate(next_element))
self.evaluate(iterator.initializer)
for i in range(10):
self.assertEqual(i, self.evaluate(next_element))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_element)
@combinations.generate(test_base.graph_only_combinations())
def testIteratorGetNextAsOptionalOnGPU(self):
if not test_util.is_gpu_available():
self.skipTest("No GPU available")
host_dataset = dataset_ops.Dataset.range(3)
device_dataset = host_dataset.apply(
prefetching_ops.copy_to_device("/gpu:0"))
with ops.device("/gpu:0"):
iterator = dataset_ops.make_initializable_iterator(device_dataset)
next_elem = iterator_ops.get_next_as_optional(iterator)
elem_has_value_t = next_elem.has_value()
elem_value_t = next_elem.get_value()
with self.cached_session(
config=config_pb2.ConfigProto(allow_soft_placement=False)):
# Before initializing the iterator, evaluating the optional fails with
# a FailedPreconditionError.
with self.assertRaises(errors.FailedPreconditionError):
self.evaluate(elem_has_value_t)
with self.assertRaises(errors.FailedPreconditionError):
self.evaluate(elem_value_t)
# For each element of the dataset, assert that the optional evaluates to
# the expected value.
self.evaluate(iterator.initializer)
for i in range(3):
elem_has_value, elem_value = self.evaluate(
[elem_has_value_t, elem_value_t])
self.assertTrue(elem_has_value)
self.assertEqual(i, elem_value)
# After exhausting the iterator, `next_elem.has_value()` will evaluate to
# false, and attempting to get the value will fail.
for _ in range(2):
self.assertFalse(self.evaluate(elem_has_value_t))
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(elem_value_t)
if __name__ == "__main__":
test.main()
| {
"content_hash": "e9b67ebff7397b8fdd1102c9b2d23ac3",
"timestamp": "",
"source": "github",
"line_count": 573,
"max_line_length": 79,
"avg_line_length": 38.767888307155324,
"alnum_prop": 0.6903754389123976,
"repo_name": "renyi533/tensorflow",
"id": "b325474daabbb9a65e9e515f7c6fcedc90c5c783",
"size": "22903",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/python/data/experimental/kernel_tests/copy_to_device_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "31572"
},
{
"name": "Batchfile",
"bytes": "55269"
},
{
"name": "C",
"bytes": "903309"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "82507951"
},
{
"name": "CMake",
"bytes": "6967"
},
{
"name": "Dockerfile",
"bytes": "113964"
},
{
"name": "Go",
"bytes": "1871425"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "988219"
},
{
"name": "Jupyter Notebook",
"bytes": "550861"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "2073744"
},
{
"name": "Makefile",
"bytes": "66796"
},
{
"name": "Objective-C",
"bytes": "116558"
},
{
"name": "Objective-C++",
"bytes": "319021"
},
{
"name": "PHP",
"bytes": "4236"
},
{
"name": "Pascal",
"bytes": "318"
},
{
"name": "Pawn",
"bytes": "20422"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "37811412"
},
{
"name": "RobotFramework",
"bytes": "1779"
},
{
"name": "Roff",
"bytes": "2705"
},
{
"name": "Ruby",
"bytes": "7464"
},
{
"name": "SWIG",
"bytes": "6846"
},
{
"name": "Shell",
"bytes": "696058"
},
{
"name": "Smarty",
"bytes": "35725"
},
{
"name": "Starlark",
"bytes": "3655758"
},
{
"name": "Swift",
"bytes": "62814"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('distutils', parent_package, top_path)
config.add_subpackage('command')
config.add_subpackage('fcompiler')
config.add_subpackage('tests')
config.add_data_files('site.cfg')
config.add_data_files('mingw/gfortran_vs2003_hack.c')
config.make_config_py()
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
| {
"content_hash": "acb4c16c34987c261f2e29054327fb4e",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 65,
"avg_line_length": 38.714285714285715,
"alnum_prop": 0.7047970479704797,
"repo_name": "abalkin/numpy",
"id": "88cd1a16083beb8269bff52a73af9481b0bc5ab7",
"size": "565",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "numpy/distutils/setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "7712217"
},
{
"name": "C++",
"bytes": "27311"
},
{
"name": "Fortran",
"bytes": "5803"
},
{
"name": "Makefile",
"bytes": "2574"
},
{
"name": "Python",
"bytes": "6209890"
}
],
"symlink_target": ""
} |
import tests.periodicities.period_test as per
per.buildModel((15 , 'H' , 400));
| {
"content_hash": "3a1d4197e230a98853d1ec9482a49682",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 45,
"avg_line_length": 20.5,
"alnum_prop": 0.7073170731707317,
"repo_name": "antoinecarme/pyaf",
"id": "04c65a022eae726f758fa4fae3658682dfc9378a",
"size": "82",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/periodicities/Hour/Cycle_Hour_400_H_15.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
} |
"""
This module defines built-in evaluation functions for regression applications
"""
from __future__ import absolute_import, division, print_function
import numpy as np
import pandas as pd
from niftynet.evaluation.base_evaluations import BaseEvaluation
class BaseRegressionEvaluation(BaseEvaluation):
""" Interface for scalar regression metrics """
def layer_op(self, subject_id, data):
metric_name = self.__class__.__name__
metric_value = self.metric(data['inferred'], data['output'])
pdf = pd.DataFrame.from_records([{'subject_id':subject_id,
metric_name:metric_value}],
('subject_id',))
return [pdf]
def metric(self, reg, ref):
"""
Computes a scalar value for the metric
:param reg: np.array with inferred regression
:param ref: np array with the reference output
:return: scalar metric value
"""
raise NotImplementedError
#pylint: disable=invalid-name
class mse(BaseRegressionEvaluation):
""" Computes mean squared error """
def metric(self, reg, ref):
return np.mean(np.square(reg - ref))
class rmse(BaseRegressionEvaluation):
""" Computes root mean squared error """
def metric(self, reg, ref):
return np.sqrt(np.mean(np.square(reg - ref)))
class mae(BaseRegressionEvaluation):
""" Computes mean absolute error """
def metric(self, reg, ref):
return np.mean(np.abs(ref - reg))
| {
"content_hash": "0194a3fa206b530d9d3ca790b30c473a",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 77,
"avg_line_length": 31.163265306122447,
"alnum_prop": 0.6339227242960053,
"repo_name": "NifTK/NiftyNet",
"id": "a236e09e624e4c31b85a3d57b47209c43a8575bf",
"size": "1551",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "niftynet/evaluation/regression_evaluations.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "381956"
},
{
"name": "C++",
"bytes": "182582"
},
{
"name": "CMake",
"bytes": "3500"
},
{
"name": "Cuda",
"bytes": "69664"
},
{
"name": "Python",
"bytes": "2340002"
},
{
"name": "Shell",
"bytes": "1792"
}
],
"symlink_target": ""
} |
import paddle
import paddle.fluid as fluid
import numpy as np
import unittest
from op_test import OpTest
def test_static_layer(
place, input_np, label_np, reduction='mean', weight_np=None
):
prog = paddle.static.Program()
startup_prog = paddle.static.Program()
with paddle.static.program_guard(prog, startup_prog):
input = paddle.fluid.data(
name='input', shape=input_np.shape, dtype='float64'
)
label = paddle.fluid.data(
name='label', shape=label_np.shape, dtype='float64'
)
if weight_np is not None:
weight = paddle.fluid.data(
name='weight', shape=weight_np.shape, dtype='float64'
)
bce_loss = paddle.nn.loss.BCELoss(
weight=weight, reduction=reduction
)
else:
bce_loss = paddle.nn.loss.BCELoss(reduction=reduction)
res = bce_loss(input, label)
exe = paddle.static.Executor(place)
(static_result,) = exe.run(
prog,
feed={"input": input_np, "label": label_np}
if weight_np is None
else {"input": input_np, "label": label_np, "weight": weight_np},
fetch_list=[res],
)
return static_result
def test_static_functional(
place, input_np, label_np, reduction='mean', weight_np=None
):
prog = paddle.static.Program()
startup_prog = paddle.static.Program()
with paddle.static.program_guard(prog, startup_prog):
input = paddle.fluid.data(
name='input', shape=input_np.shape, dtype='float64'
)
label = paddle.fluid.data(
name='label', shape=label_np.shape, dtype='float64'
)
if weight_np is not None:
weight = paddle.fluid.data(
name='weight', shape=weight_np.shape, dtype='float64'
)
res = paddle.nn.functional.binary_cross_entropy(
input, label, weight=weight, reduction=reduction
)
else:
res = paddle.nn.functional.binary_cross_entropy(
input, label, reduction=reduction
)
exe = paddle.static.Executor(place)
(static_result,) = exe.run(
prog,
feed={"input": input_np, "label": label_np}
if weight_np is None
else {"input": input_np, "label": label_np, "weight": weight_np},
fetch_list=[res],
)
return static_result
def test_dygraph_layer(
place, input_np, label_np, reduction='mean', weight_np=None
):
paddle.disable_static()
if weight_np is not None:
weight = paddle.to_tensor(weight_np)
bce_loss = paddle.nn.loss.BCELoss(weight=weight, reduction=reduction)
else:
bce_loss = paddle.nn.loss.BCELoss(reduction=reduction)
dy_res = bce_loss(paddle.to_tensor(input_np), paddle.to_tensor(label_np))
dy_result = dy_res.numpy()
paddle.enable_static()
return dy_result
def test_dygraph_functional(
place, input_np, label_np, reduction='mean', weight_np=None
):
paddle.disable_static()
input = paddle.to_tensor(input_np)
label = paddle.to_tensor(label_np)
if weight_np is not None:
weight = paddle.to_tensor(weight_np)
dy_res = paddle.nn.functional.binary_cross_entropy(
input, label, weight=weight, reduction=reduction
)
else:
dy_res = paddle.nn.functional.binary_cross_entropy(
input, label, reduction=reduction
)
dy_result = dy_res.numpy()
paddle.enable_static()
return dy_result
def calc_bceloss(input_np, label_np, reduction='mean', weight_np=None):
if weight_np is None:
expected = -1 * (
label_np * np.log(input_np)
+ (1.0 - label_np) * np.log(1.0 - input_np)
)
else:
expected = (
-1
* weight_np
* (
label_np * np.log(input_np)
+ (1.0 - label_np) * np.log(1.0 - input_np)
)
)
if reduction == 'mean':
expected = np.mean(expected)
elif reduction == 'sum':
expected = np.sum(expected)
else:
expected = expected
return expected
class TestBCELoss(unittest.TestCase):
def test_BCELoss(self):
input_np = np.random.uniform(0.1, 0.8, size=(20, 30)).astype(np.float64)
label_np = np.random.randint(0, 2, size=(20, 30)).astype(np.float64)
places = [fluid.CPUPlace()]
if fluid.core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
reductions = ['sum', 'mean', 'none']
for place in places:
for reduction in reductions:
static_result = test_static_layer(
place, input_np, label_np, reduction
)
dy_result = test_dygraph_layer(
place, input_np, label_np, reduction
)
expected = calc_bceloss(input_np, label_np, reduction)
np.testing.assert_allclose(static_result, expected, rtol=1e-05)
np.testing.assert_allclose(static_result, dy_result, rtol=1e-05)
np.testing.assert_allclose(dy_result, expected, rtol=1e-05)
static_functional = test_static_functional(
place, input_np, label_np, reduction
)
dy_functional = test_dygraph_functional(
place, input_np, label_np, reduction
)
np.testing.assert_allclose(
static_functional, expected, rtol=1e-05
)
np.testing.assert_allclose(
static_functional, dy_functional, rtol=1e-05
)
np.testing.assert_allclose(dy_functional, expected, rtol=1e-05)
def test_BCELoss_weight(self):
input_np = np.random.uniform(0.1, 0.8, size=(2, 3, 4, 10)).astype(
np.float64
)
label_np = np.random.randint(0, 2, size=(2, 3, 4, 10)).astype(
np.float64
)
weight_np = np.random.random(size=(3, 4, 10)).astype(np.float64)
place = (
fluid.CUDAPlace(0)
if fluid.core.is_compiled_with_cuda()
else fluid.CPUPlace()
)
for reduction in ['sum', 'mean', 'none']:
static_result = test_static_layer(
place, input_np, label_np, reduction, weight_np=weight_np
)
dy_result = test_dygraph_layer(
place, input_np, label_np, reduction, weight_np=weight_np
)
expected = calc_bceloss(
input_np, label_np, reduction, weight_np=weight_np
)
np.testing.assert_allclose(static_result, expected, rtol=1e-05)
np.testing.assert_allclose(static_result, dy_result, rtol=1e-05)
np.testing.assert_allclose(dy_result, expected, rtol=1e-05)
static_functional = test_static_functional(
place, input_np, label_np, reduction, weight_np=weight_np
)
dy_functional = test_dygraph_functional(
place, input_np, label_np, reduction, weight_np=weight_np
)
np.testing.assert_allclose(static_functional, expected, rtol=1e-05)
np.testing.assert_allclose(
static_functional, dy_functional, rtol=1e-05
)
np.testing.assert_allclose(dy_functional, expected, rtol=1e-05)
def test_BCELoss_error(self):
paddle.disable_static()
self.assertRaises(
ValueError, paddle.nn.loss.BCELoss, reduction="unsupport reduction"
)
input = paddle.to_tensor([[0.1, 0.3]], dtype='float32')
label = paddle.to_tensor([[0.0, 1.0]], dtype='float32')
self.assertRaises(
ValueError,
paddle.nn.functional.binary_cross_entropy,
input=input,
label=label,
reduction="unsupport reduction",
)
paddle.enable_static()
def bce_loss(input, label):
return -1 * (label * np.log(input) + (1.0 - label) * np.log(1.0 - input))
class TestBceLossOp(OpTest):
def setUp(self):
self.init_test_case()
self.op_type = "bce_loss"
input_np = np.random.uniform(0.1, 0.8, self.shape).astype("float64")
label_np = np.random.randint(0, 2, self.shape).astype("float64")
output_np = bce_loss(input_np, label_np)
self.inputs = {'X': input_np, 'Label': label_np}
self.outputs = {'Out': output_np}
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(['X'], 'Out')
def init_test_case(self):
self.shape = [10, 10]
class TestBceLossOpCase1(OpTest):
def init_test_cast(self):
self.shape = [2, 3, 4, 5]
class TestBceLossOpCase2(OpTest):
def init_test_cast(self):
self.shape = [2, 3, 20]
if __name__ == "__main__":
paddle.enable_static()
unittest.main()
| {
"content_hash": "a618c3b5e97195f0759ba2454be8640f",
"timestamp": "",
"source": "github",
"line_count": 263,
"max_line_length": 80,
"avg_line_length": 34.56273764258555,
"alnum_prop": 0.5614961496149615,
"repo_name": "luotao1/Paddle",
"id": "c77196c1d0d94ef1ad8ecd4e4b414b30997a89d3",
"size": "9701",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/test_bce_loss.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "58544"
},
{
"name": "C",
"bytes": "210300"
},
{
"name": "C++",
"bytes": "36771446"
},
{
"name": "CMake",
"bytes": "903079"
},
{
"name": "Cuda",
"bytes": "5200715"
},
{
"name": "Dockerfile",
"bytes": "4361"
},
{
"name": "Go",
"bytes": "49796"
},
{
"name": "Java",
"bytes": "16630"
},
{
"name": "Jinja",
"bytes": "23852"
},
{
"name": "MLIR",
"bytes": "39982"
},
{
"name": "Python",
"bytes": "36248258"
},
{
"name": "R",
"bytes": "1332"
},
{
"name": "Shell",
"bytes": "553175"
}
],
"symlink_target": ""
} |
from __future__ import print_function, absolute_import, division
import sys
import numpy
from numba import config, unittest_support as unittest
from numba.npyufunc.ufuncbuilder import UFuncBuilder, GUFuncBuilder
from numba import vectorize, guvectorize
from numba.npyufunc import PyUFunc_One
from numba.tests import support
def add(a, b):
"""An addition"""
return a + b
def equals(a, b):
return a == b
def mul(a, b):
"""A multiplication"""
return a * b
def guadd(a, b, c):
"""A generalized addition"""
x, y = c.shape
for i in range(x):
for j in range(y):
c[i, j] = a[i, j] + b[i, j]
class Dummy: pass
def guadd_obj(a, b, c):
Dummy() # to force object mode
x, y = c.shape
for i in range(x):
for j in range(y):
c[i, j] = a[i, j] + b[i, j]
class MyException(Exception):
pass
def guerror(a, b, c):
raise MyException
class TestUfuncBuilding(unittest.TestCase):
def test_basic_ufunc(self):
ufb = UFuncBuilder(add)
cres = ufb.add("int32(int32, int32)")
self.assertFalse(cres.objectmode)
cres = ufb.add("int64(int64, int64)")
self.assertFalse(cres.objectmode)
ufunc = ufb.build_ufunc()
def check(a):
b = ufunc(a, a)
self.assertTrue(numpy.all(a + a == b))
self.assertEqual(b.dtype, a.dtype)
a = numpy.arange(12, dtype='int32')
check(a)
# Non-contiguous dimension
a = a[::2]
check(a)
a = a.reshape((2, 3))
check(a)
# Metadata
self.assertEqual(ufunc.__name__, "add")
self.assertIn("An addition", ufunc.__doc__)
def test_ufunc_struct(self):
ufb = UFuncBuilder(add)
cres = ufb.add("complex64(complex64, complex64)")
self.assertFalse(cres.objectmode)
ufunc = ufb.build_ufunc()
def check(a):
b = ufunc(a, a)
self.assertTrue(numpy.all(a + a == b))
self.assertEqual(b.dtype, a.dtype)
a = numpy.arange(12, dtype='complex64') + 1j
check(a)
# Non-contiguous dimension
a = a[::2]
check(a)
a = a.reshape((2, 3))
check(a)
def test_ufunc_forceobj(self):
ufb = UFuncBuilder(add, targetoptions={'forceobj': True})
cres = ufb.add("int32(int32, int32)")
self.assertTrue(cres.objectmode)
ufunc = ufb.build_ufunc()
a = numpy.arange(10, dtype='int32')
b = ufunc(a, a)
self.assertTrue(numpy.all(a + a == b))
class TestUfuncBuildingJitDisabled(TestUfuncBuilding):
def setUp(self):
self.old_disable_jit = config.DISABLE_JIT
config.DISABLE_JIT = False
def tearDown(self):
config.DISABLE_JIT = self.old_disable_jit
class TestGUfuncBuilding(unittest.TestCase):
def test_basic_gufunc(self):
gufb = GUFuncBuilder(guadd, "(x, y),(x, y)->(x, y)")
cres = gufb.add("void(int32[:,:], int32[:,:], int32[:,:])")
self.assertFalse(cres.objectmode)
ufunc = gufb.build_ufunc()
a = numpy.arange(10, dtype="int32").reshape(2, 5)
b = ufunc(a, a)
self.assertTrue(numpy.all(a + a == b))
self.assertEqual(b.dtype, numpy.dtype('int32'))
# Metadata
self.assertEqual(ufunc.__name__, "guadd")
self.assertIn("A generalized addition", ufunc.__doc__)
def test_gufunc_struct(self):
gufb = GUFuncBuilder(guadd, "(x, y),(x, y)->(x, y)")
cres = gufb.add("void(complex64[:,:], complex64[:,:], complex64[:,:])")
self.assertFalse(cres.objectmode)
ufunc = gufb.build_ufunc()
a = numpy.arange(10, dtype="complex64").reshape(2, 5) + 1j
b = ufunc(a, a)
self.assertTrue(numpy.all(a + a == b))
def test_gufunc_struct_forceobj(self):
gufb = GUFuncBuilder(guadd, "(x, y),(x, y)->(x, y)",
targetoptions=dict(forceobj=True))
cres = gufb.add("void(complex64[:,:], complex64[:,:], complex64[:,"
":])")
self.assertTrue(cres.objectmode)
ufunc = gufb.build_ufunc()
a = numpy.arange(10, dtype="complex64").reshape(2, 5) + 1j
b = ufunc(a, a)
self.assertTrue(numpy.all(a + a == b))
self.assertEqual(b.dtype, numpy.dtype('complex64'))
class TestGUfuncBuildingJitDisabled(TestGUfuncBuilding):
def setUp(self):
self.old_disable_jit = config.DISABLE_JIT
config.DISABLE_JIT = False
def tearDown(self):
config.DISABLE_JIT = self.old_disable_jit
class TestVectorizeDecor(unittest.TestCase):
_supported_identities = [0, 1, None]
if numpy.__version__ >= '1.7':
_supported_identities.append("reorderable")
def test_vectorize(self):
ufunc = vectorize(['int32(int32, int32)'])(add)
a = numpy.arange(10, dtype='int32')
b = ufunc(a, a)
self.assertTrue(numpy.all(a + a == b))
self.assertEqual(b.dtype, numpy.dtype('int32'))
def test_vectorize_objmode(self):
ufunc = vectorize(['int32(int32, int32)'], forceobj=True)(add)
a = numpy.arange(10, dtype='int32')
b = ufunc(a, a)
self.assertTrue(numpy.all(a + a == b))
self.assertEqual(b.dtype, numpy.dtype('int32'))
def test_vectorize_bool_return(self):
ufunc = vectorize(['bool_(int32, int32)'])(equals)
a = numpy.arange(10, dtype='int32')
r = ufunc(a,a)
self.assertTrue(numpy.all(r))
self.assertEqual(r.dtype, numpy.dtype('bool_'))
def test_vectorize_identity(self):
sig = 'int32(int32, int32)'
for identity in self._supported_identities:
ufunc = vectorize([sig], identity=identity)(add)
expected = None if identity == 'reorderable' else identity
self.assertEqual(ufunc.identity, expected)
# Default value is None
ufunc = vectorize([sig])(add)
self.assertIs(ufunc.identity, None)
# Invalid values
with self.assertRaises(ValueError):
vectorize([sig], identity='none')(add)
with self.assertRaises(ValueError):
vectorize([sig], identity=2)(add)
def test_vectorize_no_args(self):
a = numpy.linspace(0,1,10)
b = numpy.linspace(1,2,10)
ufunc = vectorize(add)
self.assertTrue(numpy.all(ufunc(a,b) == (a + b)))
ufunc2 = vectorize(add)
c = numpy.empty(10)
ufunc2(a, b, c)
self.assertTrue(numpy.all(c == (a + b)))
def test_vectorize_only_kws(self):
a = numpy.linspace(0,1,10)
b = numpy.linspace(1,2,10)
ufunc = vectorize(identity=PyUFunc_One, nopython=True)(mul)
self.assertTrue(numpy.all(ufunc(a,b) == (a * b)))
def test_guvectorize(self):
ufunc = guvectorize(['(int32[:,:], int32[:,:], int32[:,:])'],
"(x,y),(x,y)->(x,y)")(guadd)
a = numpy.arange(10, dtype='int32').reshape(2, 5)
b = ufunc(a, a)
self.assertTrue(numpy.all(a + a == b))
self.assertEqual(b.dtype, numpy.dtype('int32'))
def test_guvectorize_no_output(self):
ufunc = guvectorize(['(int32[:,:], int32[:,:], int32[:,:])'],
"(x,y),(x,y),(x,y)")(guadd)
a = numpy.arange(10, dtype='int32').reshape(2, 5)
out = numpy.zeros_like(a)
ufunc(a, a, out)
self.assertTrue(numpy.all(a + a == out))
def test_guvectorize_objectmode(self):
ufunc = guvectorize(['(int32[:,:], int32[:,:], int32[:,:])'],
"(x,y),(x,y)->(x,y)")(guadd_obj)
a = numpy.arange(10, dtype='int32').reshape(2, 5)
b = ufunc(a, a)
self.assertTrue(numpy.all(a + a == b))
def test_guvectorize_error_in_objectmode(self):
ufunc = guvectorize(['(int32[:,:], int32[:,:], int32[:,:])'],
"(x,y),(x,y)->(x,y)", forceobj=True)(guerror)
a = numpy.arange(10, dtype='int32').reshape(2, 5)
with self.assertRaises(MyException):
ufunc(a, a)
def test_guvectorize_identity(self):
args = (['(int32[:,:], int32[:,:], int32[:,:])'], "(x,y),(x,y)->(x,y)")
for identity in self._supported_identities:
ufunc = guvectorize(*args, identity=identity)(guadd)
expected = None if identity == 'reorderable' else identity
self.assertEqual(ufunc.identity, expected)
# Default value is None
ufunc = guvectorize(*args)(guadd)
self.assertIs(ufunc.identity, None)
# Invalid values
with self.assertRaises(ValueError):
guvectorize(*args, identity='none')(add)
with self.assertRaises(ValueError):
guvectorize(*args, identity=2)(add)
def test_guvectorize_invalid_layout(self):
sigs = ['(int32[:,:], int32[:,:], int32[:,:])']
# Syntax error
with self.assertRaises(ValueError) as raises:
guvectorize(sigs, ")-:")(guadd)
self.assertIn("bad token in signature", str(raises.exception))
# Output shape can't be inferred from inputs
with self.assertRaises(NameError) as raises:
guvectorize(sigs, "(x,y),(x,y)->(x,z,v)")(guadd)
self.assertEqual(str(raises.exception),
"undefined output symbols: v,z")
# Arrow but no outputs
with self.assertRaises(ValueError) as raises:
guvectorize(sigs, "(x,y),(x,y),(x,y)->")(guadd)
# (error message depends on Numpy version)
class TestVectorizeDecorJitDisabled(TestVectorizeDecor):
def setUp(self):
self.old_disable_jit = config.DISABLE_JIT
config.DISABLE_JIT = False
def tearDown(self):
config.DISABLE_JIT = self.old_disable_jit
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "378e20252d4ab890bed47ba79fc0f688",
"timestamp": "",
"source": "github",
"line_count": 304,
"max_line_length": 79,
"avg_line_length": 32.38157894736842,
"alnum_prop": 0.568772856562373,
"repo_name": "pitrou/numba",
"id": "4749ff103ef8ed7ef5f20addb7fcce7215df5317",
"size": "9844",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "numba/tests/npyufunc/test_ufuncbuilding.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "2212"
},
{
"name": "C",
"bytes": "241911"
},
{
"name": "C++",
"bytes": "17024"
},
{
"name": "Cuda",
"bytes": "214"
},
{
"name": "HTML",
"bytes": "98846"
},
{
"name": "PowerShell",
"bytes": "3153"
},
{
"name": "Python",
"bytes": "3236740"
},
{
"name": "Shell",
"bytes": "120"
}
],
"symlink_target": ""
} |
import fileinput
import subprocess
import os
import urllib2
import json
def transmit(matrix):
req = urllib2.Request('http://localhost:8000/print')
req.add_header('Content-Type', 'application/json')
return urllib2.urlopen(req, json.dumps(matrix))
for line in fileinput.input():
for char in line.rstrip():
rawMatrix = subprocess.check_output(\
os.getcwd() + '/font8X8/converter.py \'' + char + '\'',\
shell=True\
).rstrip()
jsonMatrix = map(
lambda x: map(lambda y: int(y, 10), x),
rawMatrix.split('\n')
)
transmit(jsonMatrix)
| {
"content_hash": "c66011ca51ff41c5897169bb3fe366b6",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 72,
"avg_line_length": 27.708333333333332,
"alnum_prop": 0.5729323308270676,
"repo_name": "anuragpeshne/LEcho",
"id": "1b583413cb7f867b738f57b8ad135ae5df859008",
"size": "684",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "LEcho.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "684"
}
],
"symlink_target": ""
} |
"""
@name: Modules/House/Family/insteon/insteon_button.py
@author: D. Brian Kimmel
@contact: D.BrianKimmel@gmail.com
@copyright: (c) 2019-2020 by D. Brian Kimmel
@note: Created on Aug 18, 2019
@license: MIT License
@summary:
We get these .
"""
__updated__ = '2020-02-18'
# Import system type stuff
# Import PyMh files
from Modules.Core import logging_pyh as Logger
LOG = Logger.getLogger('PyHouse.Insteon_Button ')
class LocalConfig:
"""
"""
class Discovery:
"""
"""
class ButtonActions:
"""
"""
def initial_button_load(self):
"""
"""
if p_obj.Family.Name.lower() == 'insteon':
self._get_engine_version(p_controller_obj, p_obj)
self._get_id_request(p_controller_obj, p_obj)
self._get_one_device_status(p_controller_obj, p_obj)
else:
LOG.warning('Skipping "{}" "{}" device "{}"'.format(p_obj.DeviceType, p_obj.DeviceSubType, p_obj.Name))
pass
# ## END DBK
| {
"content_hash": "6a5f85e14880fe3d9a9edc0ea6b1a602",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 115,
"avg_line_length": 20.571428571428573,
"alnum_prop": 0.5952380952380952,
"repo_name": "DBrianKimmel/PyHouse",
"id": "bf6a92f2c90db86c22d391d2bb2140df4922523b",
"size": "1008",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "Project/src/Modules/House/Family/Insteon/insteon_button.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "114778"
},
{
"name": "HTML",
"bytes": "15398"
},
{
"name": "JavaScript",
"bytes": "220171"
},
{
"name": "Python",
"bytes": "1491784"
},
{
"name": "Shell",
"bytes": "2131"
}
],
"symlink_target": ""
} |
import sys
for line in sys.stdin:
sys.stdout.write(line)
| {
"content_hash": "a3864b1ec1e685788c5a5f0d6561a1d0",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 26,
"avg_line_length": 12.6,
"alnum_prop": 0.6984126984126984,
"repo_name": "jvanbrug/alanaldavista",
"id": "f13dd00a0dcf2cb4e9f43692a80fa42556b00641",
"size": "85",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data/process_map.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3672584"
}
],
"symlink_target": ""
} |
_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py'
model = dict(
backbone=dict(
dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False),
stage_with_dcn=(False, True, True, True)))
| {
"content_hash": "8d31e2a21e2d0a48beb515dd82ebb136",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 72,
"avg_line_length": 44.2,
"alnum_prop": 0.6425339366515838,
"repo_name": "open-mmlab/mmdetection",
"id": "3b3683af235f46df36d8793e52c2b9c52e0defeb",
"size": "221",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2540"
},
{
"name": "Python",
"bytes": "4811377"
},
{
"name": "Shell",
"bytes": "47911"
}
],
"symlink_target": ""
} |
"""
Set up the plot figures, axes, and items to be done for each frame.
This module is imported by the plotting routines and then the
function setplot is called to set the plot parameters.
"""
from pyclaw.geotools import topotools
from pyclaw.data import Data
#--------------------------
def setplot(plotdata):
#--------------------------
"""
Specify what is to be plotted at each frame.
Input: plotdata, an instance of pyclaw.plotters.data.ClawPlotData.
Output: a modified version of plotdata.
"""
from pyclaw.plotters import colormaps, geoplot
from numpy import linspace
plotdata.clearfigures() # clear any old figures,axes,items data
# To plot gauge locations on pcolor or contour plot, use this as
# an afteraxis function:
def addgauges(current_data):
from pyclaw.plotters import gaugetools
gaugetools.plot_gauge_locations(current_data.plotdata, \
gaugenos='all', format_string='ko', add_labels=True)
#-----------------------------------------
# Figure for pcolor plot
#-----------------------------------------
plotfigure = plotdata.new_plotfigure(name='pcolor', figno=0)
plotfigure.show = False
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes('pcolor')
plotaxes.title = 'Surface'
plotaxes.scaled = True
def fixup(current_data):
import pylab
addgauges(current_data)
t = current_data.t
t = t / 3600. # hours
pylab.title('Surface at %4.2f hours' % t, fontsize=20)
pylab.xticks(fontsize=15)
pylab.yticks(fontsize=15)
plotaxes.afteraxes = fixup
# Water
plotitem = plotaxes.new_plotitem(plot_type='2d_pcolor')
#plotitem.plot_var = geoplot.surface
plotitem.plot_var = geoplot.surface_or_depth
plotitem.pcolor_cmap = geoplot.tsunami_colormap
plotitem.pcolor_cmin = -0.2
plotitem.pcolor_cmax = 0.2
plotitem.add_colorbar = True
plotitem.amr_gridlines_show = [0,0,0]
plotitem.gridedges_show = 1
# Land
plotitem = plotaxes.new_plotitem(plot_type='2d_pcolor')
plotitem.plot_var = geoplot.land
plotitem.pcolor_cmap = geoplot.land_colors
plotitem.pcolor_cmin = 0.0
plotitem.pcolor_cmax = 100.0
plotitem.add_colorbar = False
plotitem.amr_gridlines_show = [1,1,0]
plotitem.gridedges_show = 1
plotaxes.xlimits = [-120,-60]
plotaxes.ylimits = [-60,0]
# add contour lines of bathy if desired:
plotitem = plotaxes.new_plotitem(plot_type='2d_contour')
plotitem.show = False
plotitem.plot_var = geoplot.topo
plotitem.contour_levels = linspace(-2000,0,5)
plotitem.amr_contour_colors = ['y'] # color on each level
plotitem.kwargs = {'linestyles':'solid','linewidths':2}
plotitem.amr_contour_show = [1,0,0]
plotitem.gridlines_show = 0
plotitem.gridedges_show = 0
#-----------------------------------------
# Figure for imshow plot
#-----------------------------------------
plotfigure = plotdata.new_plotfigure(name='imshow', figno=1)
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes('imshow')
plotaxes.title = 'Surface'
plotaxes.scaled = True
def fixup(current_data):
import pylab
addgauges(current_data)
t = current_data.t
t = t / 3600. # hours
pylab.title('Surface at %4.2f hours' % t, fontsize=20)
pylab.xticks(fontsize=15)
pylab.yticks(fontsize=15)
plotaxes.afteraxes = fixup
# Water
plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')
#plotitem.plot_var = geoplot.surface
plotitem.plot_var = geoplot.surface_or_depth
plotitem.imshow_cmap = geoplot.tsunami_colormap
plotitem.imshow_cmin = -0.2
plotitem.imshow_cmax = 0.2
plotitem.add_colorbar = True
plotitem.amr_gridlines_show = [0,0,0]
plotitem.gridedges_show = 1
# Land
plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')
plotitem.plot_var = geoplot.land
plotitem.imshow_cmap = geoplot.land_colors
plotitem.imshow_cmin = 0.0
plotitem.imshow_cmax = 100.0
plotitem.add_colorbar = False
plotitem.amr_gridlines_show = [0,0,0]
plotitem.gridedges_show = 1
plotaxes.xlimits = [-120,-60]
plotaxes.ylimits = [-60,0]
# add contour lines of bathy if desired:
plotitem = plotaxes.new_plotitem(plot_type='2d_contour')
plotitem.show = False
plotitem.plot_var = geoplot.topo
plotitem.contour_levels = linspace(-2000,0,5)
plotitem.amr_contour_colors = ['y'] # color on each level
plotitem.kwargs = {'linestyles':'solid','linewidths':2}
plotitem.amr_contour_show = [1,0,0]
plotitem.gridlines_show = 0
plotitem.gridedges_show = 0
#-----------------------------------------
# Figures for gauges
#-----------------------------------------
plotfigure = plotdata.new_plotfigure(name='Surface & topo', figno=300, \
type='each_gauge')
plotfigure.clf_each_gauge = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
plotaxes.xlimits = 'auto'
plotaxes.ylimits = 'auto'
plotaxes.title = 'Surface'
# Plot surface as blue curve:
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
plotitem.plot_var = 3
plotitem.plotstyle = 'b-'
# Plot topo as green curve:
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
plotitem.show = False
def gaugetopo(current_data):
q = current_data.q
h = q[:,0]
eta = q[:,3]
topo = eta - h
return topo
plotitem.plot_var = gaugetopo
plotitem.plotstyle = 'g-'
def add_zeroline(current_data):
from pylab import plot, legend, xticks, floor
t = current_data.t
#legend(('surface','topography'),loc='lower left')
plot(t, 0*t, 'k')
n = floor(t.max()/3600.) + 2
xticks([3600*i for i in range(n)])
plotaxes.afteraxes = add_zeroline
#-----------------------------------------
# Parameters used only when creating html and/or latex hardcopy
# e.g., via pyclaw.plotters.frametools.printframes:
plotdata.printfigs = True # print figures
plotdata.print_format = 'png' # file format
plotdata.print_framenos = 'all' # list of frames to print
plotdata.print_gaugenos = 'all' # list of gauges to print
plotdata.print_fignos = 'all' # list of figures to print
plotdata.html = True # create html files of plots?
plotdata.html_homelink = '../README.html' # pointer for top of index
plotdata.latex = True # create latex file of plots?
plotdata.latex_figsperline = 2 # layout of plots
plotdata.latex_framesperline = 1 # layout of plots
plotdata.latex_makepdf = False # also run pdflatex?
return plotdata
| {
"content_hash": "22eeace244d97efd23782b3a3d426238",
"timestamp": "",
"source": "github",
"line_count": 213,
"max_line_length": 76,
"avg_line_length": 32.751173708920184,
"alnum_prop": 0.6092316513761468,
"repo_name": "clawpack/clawpack-4.x",
"id": "3b82894ad275c32529ec6add99d1450248ea0e9b",
"size": "6977",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "apps/tsunami/chile2010/setplot_imshow.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Fortran",
"bytes": "1413468"
},
{
"name": "HTML",
"bytes": "1032"
},
{
"name": "Limbo",
"bytes": "135"
},
{
"name": "M",
"bytes": "123"
},
{
"name": "Makefile",
"bytes": "153571"
},
{
"name": "Matlab",
"bytes": "311883"
},
{
"name": "Objective-C",
"bytes": "36"
},
{
"name": "Python",
"bytes": "1242190"
},
{
"name": "Shell",
"bytes": "1579"
}
],
"symlink_target": ""
} |
"""
Example Script for the Python Geographic Visualizer (GeoVis)
https://github.com/karimbahgat/geovis
"""
#importing geovis from temporary location
TEMP_GEOVIS_FOLDER = r"C:\Users\BIGKIMO\Documents\GitHub\geovis"
import sys
sys.path.append(TEMP_GEOVIS_FOLDER)
import geovis
############
#SETUP
############
#set rendering options
geovis.SetRenderingOptions(renderer="PIL", numpyspeed=False, reducevectors=False)
#create map
geovis.SetMapBackground(geovis.Color("blue", brightness=0.9))
geovis.SetMapZoom(x2x=[-120,40],y2y=[-60,20])
newmap = geovis.NewMap()
############
#LOAD AND SYMBOLIZE LAYERS
############
countrylayer = geovis.Layer(filepath=r"D:\Test Data\necountries\necountries.shp", fillcolor=geovis.Color("yellow",brightness=0.8))
pointlayer = geovis.Layer(filepath=r"D:\Test Data\GTD_Georef\gtd_georef.shp", symbolizer="square")
pointlayer.AddClassification(symboltype="fillcolor", valuefield="nwound", symbolrange=[geovis.Color("white"),geovis.Color("red", intensity=0.9, brightness=0.9),geovis.Color("red", intensity=0.9, brightness=0.5)], classifytype="natural breaks", nrclasses=3)
pointlayer.AddClassification(symboltype="fillsize", valuefield="nwound", symbolrange=[0.3,2.8], classifytype="natural breaks", nrclasses=3)
############
#RENDER TO MAP
############
#add layers to map
newmap.AddToMap(countrylayer)
newmap.AddToMap(pointlayer)
#add legend
newmap.AddLegend(pointlayer, upperleft=(0.5,0.7), bottomright=(0.9,0.9))
#view map
newmap.ViewMap()
| {
"content_hash": "223ea86c0fa5b50f5820252c8ba65e01",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 256,
"avg_line_length": 35.951219512195124,
"alnum_prop": 0.737449118046133,
"repo_name": "karimbahgat/GeoVis",
"id": "6e1114882e179cd6e287d044869c9d5c28d221a4",
"size": "1474",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/pointmap_example.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "268217"
}
],
"symlink_target": ""
} |
import abc
import json
import math
from collections.abc import Mapping, Sequence
import pprint
import numpy as np
import hail as hl
from hail import genetics
from hail.expr.nat import NatBase, NatLiteral
from .type_parsing import type_grammar, type_node_visitor
from hail.genetics.reference_genome import reference_genome_type
from hail.typecheck import typecheck, typecheck_method, oneof, transformed
from hail.utils.java import escape_parsable
__all__ = [
'dtype',
'HailType',
'hail_type',
'is_container',
'is_compound',
'is_numeric',
'is_primitive',
'types_match',
'tint',
'tint32',
'tint64',
'tfloat',
'tfloat32',
'tfloat64',
'tstr',
'tbool',
'tarray',
'tstream',
'tndarray',
'tset',
'tdict',
'tstruct',
'tunion',
'ttuple',
'tinterval',
'tlocus',
'tcall',
'tvoid',
'tvariable',
'hts_entry_schema',
]
def summary_type(t):
if isinstance(t, hl.tdict):
return f'dict<{summary_type(t.key_type)}, {summary_type(t.value_type)}>'
elif isinstance(t, hl.tset):
return f'set<{summary_type(t.element_type)}>'
elif isinstance(t, hl.tarray):
return f'array<{summary_type(t.element_type)}>'
elif isinstance(t, hl.tstruct):
return f'struct with {len(t)} fields'
elif isinstance(t, hl.ttuple):
return f'tuple with {len(t)} fields'
elif isinstance(t, hl.tinterval):
return f'interval<{summary_type(t.point_type)}>'
else:
return str(t)
def dtype(type_str):
r"""Parse a type from its string representation.
Examples
--------
>>> hl.dtype('int')
dtype('int32')
>>> hl.dtype('float')
dtype('float64')
>>> hl.dtype('array<int32>')
dtype('array<int32>')
>>> hl.dtype('dict<str, bool>')
dtype('dict<str, bool>')
>>> hl.dtype('struct{a: int32, `field with spaces`: int64}')
dtype('struct{a: int32, `field with spaces`: int64}')
Notes
-----
This function is able to reverse ``str(t)`` on a :class:`.HailType`.
The grammar is defined as follows:
.. code-block:: text
type = _ (array / set / dict / struct / union / tuple / interval / int64 / int32 / float32 / float64 / bool / str / call / str / locus) _
int64 = "int64" / "tint64"
int32 = "int32" / "tint32" / "int" / "tint"
float32 = "float32" / "tfloat32"
float64 = "float64" / "tfloat64" / "tfloat" / "float"
bool = "tbool" / "bool"
call = "tcall" / "call"
str = "tstr" / "str"
locus = ("tlocus" / "locus") _ "[" identifier "]"
array = ("tarray" / "array") _ "<" type ">"
array = ("tstream" / "stream") _ "<" type ">"
ndarray = ("tndarray" / "ndarray") _ "<" type, identifier ">"
set = ("tset" / "set") _ "<" type ">"
dict = ("tdict" / "dict") _ "<" type "," type ">"
struct = ("tstruct" / "struct") _ "{" (fields / _) "}"
union = ("tunion" / "union") _ "{" (fields / _) "}"
tuple = ("ttuple" / "tuple") _ "(" ((type ("," type)*) / _) ")"
fields = field ("," field)*
field = identifier ":" type
interval = ("tinterval" / "interval") _ "<" type ">"
identifier = _ (simple_identifier / escaped_identifier) _
simple_identifier = ~"\w+"
escaped_identifier = ~"`([^`\\\\]|\\\\.)*`"
_ = ~"\s*"
Parameters
----------
type_str : :class:`str`
String representation of type.
Returns
-------
:class:`.HailType`
"""
tree = type_grammar.parse(type_str)
return type_node_visitor.visit(tree)
class HailTypeContext(object):
def __init__(self, references=set()):
self.references = references
@property
def is_empty(self):
return len(self.references) == 0
def _to_json_context(self):
if self._json is None:
self._json = {
'reference_genomes':
{r: hl.get_reference(r)._config for r in self.references}
}
return self._json
@classmethod
def union(cls, *types):
ctxs = [t.get_context() for t in types if not t.get_context().is_empty]
if len(ctxs) == 0:
return _empty_context
if len(ctxs) == 1:
return ctxs[0]
refs = ctxs[0].references.union(*[ctx.references for ctx in ctxs[1:]])
return HailTypeContext(refs)
_empty_context = HailTypeContext()
class HailType(object):
"""
Hail type superclass.
"""
def __init__(self):
super(HailType, self).__init__()
self._context = None
def __repr__(self):
s = str(self).replace("'", "\\'")
return "dtype('{}')".format(s)
@abc.abstractmethod
def _eq(self, other):
return
def __eq__(self, other):
return isinstance(other, HailType) and self._eq(other)
@abc.abstractmethod
def __str__(self):
return
def __hash__(self):
# FIXME this is a bit weird
return 43 + hash(str(self))
def pretty(self, indent=0, increment=4):
"""Returns a prettily formatted string representation of the type.
Parameters
----------
indent : :obj:`int`
Spaces to indent.
Returns
-------
:class:`str`
"""
b = []
b.append(' ' * indent)
self._pretty(b, indent, increment)
return ''.join(b)
def _pretty(self, b, indent, increment):
b.append(str(self))
@abc.abstractmethod
def _parsable_string(self):
pass
def typecheck(self, value):
"""Check that `value` matches a type.
Parameters
----------
value
Value to check.
Raises
------
:obj:`TypeError`
"""
def check(t, obj):
t._typecheck_one_level(obj)
return True
self._traverse(value, check)
@abc.abstractmethod
def _typecheck_one_level(self, annotation):
pass
def _to_json(self, x):
converted = self._convert_to_json_na(x)
return json.dumps(converted)
def _convert_to_json_na(self, x):
if x is None:
return x
else:
return self._convert_to_json(x)
def _convert_to_json(self, x):
return x
def _from_json(self, s):
x = json.loads(s)
return self._convert_from_json_na(x)
def _convert_from_json_na(self, x):
if x is None:
return x
else:
return self._convert_from_json(x)
def _convert_from_json(self, x):
return x
def _traverse(self, obj, f):
"""Traverse a nested type and object.
Parameters
----------
obj : Any
f : Callable[[HailType, Any], bool]
Function to evaluate on the type and object. Traverse children if
the function returns ``True``.
"""
f(self, obj)
@abc.abstractmethod
def unify(self, t):
raise NotImplementedError
@abc.abstractmethod
def subst(self):
raise NotImplementedError
@abc.abstractmethod
def clear(self):
raise NotImplementedError
def _get_context(self):
return _empty_context
def get_context(self):
if self._context is None:
self._context = self._get_context()
return self._context
hail_type = oneof(HailType, transformed((str, dtype)))
class _tvoid(HailType):
def __init__(self):
super(_tvoid, self).__init__()
def __str__(self):
return "void"
def _eq(self, other):
return isinstance(other, _tvoid)
def _parsable_string(self):
return "Void"
def unify(self, t):
return t == tvoid
def subst(self):
return self
def clear(self):
pass
class _tint32(HailType):
"""Hail type for signed 32-bit integers.
Their values can range from :math:`-2^{31}` to :math:`2^{31} - 1`
(approximately 2.15 billion).
In Python, these are represented as :obj:`int`.
"""
def __init__(self):
super(_tint32, self).__init__()
def _typecheck_one_level(self, annotation):
if annotation is not None:
if not isinstance(annotation, int):
raise TypeError("type 'tint32' expected Python 'int', but found type '%s'" % type(annotation))
elif not self.min_value <= annotation <= self.max_value:
raise TypeError(f"Value out of range for 32-bit integer: "
f"expected [{self.min_value}, {self.max_value}], found {annotation}")
def __str__(self):
return "int32"
def _eq(self, other):
return isinstance(other, _tint32)
def _parsable_string(self):
return "Int32"
@property
def min_value(self):
return -(1 << 31)
@property
def max_value(self):
return (1 << 31) - 1
def unify(self, t):
return t == tint32
def subst(self):
return self
def clear(self):
pass
def to_numpy(self):
return np.int32
class _tint64(HailType):
"""Hail type for signed 64-bit integers.
Their values can range from :math:`-2^{63}` to :math:`2^{63} - 1`.
In Python, these are represented as :obj:`int`.
"""
def __init__(self):
super(_tint64, self).__init__()
def _typecheck_one_level(self, annotation):
if annotation is not None:
if not isinstance(annotation, int):
raise TypeError("type 'int64' expected Python 'int', but found type '%s'" % type(annotation))
if not self.min_value <= annotation <= self.max_value:
raise TypeError(f"Value out of range for 64-bit integer: "
f"expected [{self.min_value}, {self.max_value}], found {annotation}")
def __str__(self):
return "int64"
def _eq(self, other):
return isinstance(other, _tint64)
def _parsable_string(self):
return "Int64"
@property
def min_value(self):
return -(1 << 63)
@property
def max_value(self):
return (1 << 63) - 1
def unify(self, t):
return t == tint64
def subst(self):
return self
def clear(self):
pass
def to_numpy(self):
return np.int64
class _tfloat32(HailType):
"""Hail type for 32-bit floating point numbers.
In Python, these are represented as :obj:`float`.
"""
def __init__(self):
super(_tfloat32, self).__init__()
def _typecheck_one_level(self, annotation):
if annotation is not None and not isinstance(annotation, (float, int)):
raise TypeError("type 'float32' expected Python 'float', but found type '%s'" % type(annotation))
def __str__(self):
return "float32"
def _eq(self, other):
return isinstance(other, _tfloat32)
def _parsable_string(self):
return "Float32"
def _convert_from_json(self, x):
return float(x)
def _convert_to_json(self, x):
if math.isfinite(x):
return x
else:
return str(x)
def unify(self, t):
return t == tfloat32
def subst(self):
return self
def clear(self):
pass
def to_numpy(self):
return np.float32
class _tfloat64(HailType):
"""Hail type for 64-bit floating point numbers.
In Python, these are represented as :obj:`float`.
"""
def __init__(self):
super(_tfloat64, self).__init__()
def _typecheck_one_level(self, annotation):
if annotation is not None and not isinstance(annotation, (float, int)):
raise TypeError("type 'float64' expected Python 'float', but found type '%s'" % type(annotation))
def __str__(self):
return "float64"
def _eq(self, other):
return isinstance(other, _tfloat64)
def _parsable_string(self):
return "Float64"
def _convert_from_json(self, x):
return float(x)
def _convert_to_json(self, x):
if math.isfinite(x):
return x
else:
return str(x)
def unify(self, t):
return t == tfloat64
def subst(self):
return self
def clear(self):
pass
def to_numpy(self):
return np.float64
class _tstr(HailType):
"""Hail type for text strings.
In Python, these are represented as strings.
"""
def __init__(self):
super(_tstr, self).__init__()
def _typecheck_one_level(self, annotation):
if annotation and not isinstance(annotation, str):
raise TypeError("type 'str' expected Python 'str', but found type '%s'" % type(annotation))
def __str__(self):
return "str"
def _eq(self, other):
return isinstance(other, _tstr)
def _parsable_string(self):
return "String"
def unify(self, t):
return t == tstr
def subst(self):
return self
def clear(self):
pass
class _tbool(HailType):
"""Hail type for Boolean (``True`` or ``False``) values.
In Python, these are represented as :obj:`bool`.
"""
def __init__(self):
super(_tbool, self).__init__()
def _typecheck_one_level(self, annotation):
if annotation is not None and not isinstance(annotation, bool):
raise TypeError("type 'bool' expected Python 'bool', but found type '%s'" % type(annotation))
def __str__(self):
return "bool"
def _eq(self, other):
return isinstance(other, _tbool)
def _parsable_string(self):
return "Boolean"
def unify(self, t):
return t == tbool
def subst(self):
return self
def clear(self):
pass
def to_numpy(self):
return np.bool
class tndarray(HailType):
"""Hail type for n-dimensional arrays.
.. include:: _templates/experimental.rst
In Python, these are represented as NumPy :obj:`numpy.ndarray`.
Notes
-----
NDArrays contain elements of only one type, which is parameterized by
`element_type`.
Parameters
----------
element_type : :class:`.HailType`
Element type of array.
ndim : int32
Number of dimensions.
See Also
--------
:class:`.NDArrayExpression`, :obj:`.nd.array`
"""
@typecheck_method(element_type=hail_type, ndim=oneof(NatBase, int))
def __init__(self, element_type, ndim):
self._element_type = element_type
self._ndim = NatLiteral(ndim) if isinstance(ndim, int) else ndim
super(tndarray, self).__init__()
@property
def element_type(self):
"""NDArray element type.
Returns
-------
:class:`.HailType`
Element type.
"""
return self._element_type
@property
def ndim(self):
"""NDArray number of dimensions.
Returns
-------
:obj:`int`
Number of dimensions.
"""
assert isinstance(self._ndim, NatLiteral), "tndarray must be realized with a concrete number of dimensions"
return self._ndim.n
def _traverse(self, obj, f):
if f(self, obj):
for elt in np.nditer(obj, ['zerosize_ok']):
self.element_type._traverse(elt.item(), f)
def _typecheck_one_level(self, annotation):
if annotation is not None and not isinstance(annotation, np.ndarray):
raise TypeError("type 'ndarray' expected Python 'numpy.ndarray', but found type '%s'" % type(annotation))
def __str__(self):
return "ndarray<{}, {}>".format(self.element_type, self.ndim)
def _eq(self, other):
return isinstance(other, tndarray) and self.element_type == other.element_type
def _pretty(self, b, indent, increment):
b.append('ndarray<')
self._element_type._pretty(b, indent, increment)
b.append(', ')
b.append(str(self.ndim))
b.append('>')
def _parsable_string(self):
return f'NDArray[{self._element_type._parsable_string()},{self.ndim}]'
def _convert_from_json(self, x):
if is_numeric(self._element_type):
np_type = self.element_type.to_numpy()
return np.ndarray(shape=x['shape'], buffer=np.array(x['data'], dtype=np_type), dtype=np_type)
else:
raise TypeError("Hail cannot currently return ndarrays of non-numeric or boolean type.")
def _convert_to_json(self, x):
data = x.flatten("C").tolist()
strides = []
axis_one_step_byte_size = x.itemsize
for dimension_size in x.shape:
strides.append(axis_one_step_byte_size)
axis_one_step_byte_size *= (dimension_size if dimension_size > 0 else 1)
json_dict = {
"shape": x.shape,
"data": data
}
return json_dict
def clear(self):
self._element_type.clear()
self._ndim.clear()
def unify(self, t):
return isinstance(t, tndarray) and \
self._element_type.unify(t._element_type) and \
self._ndim.unify(t._ndim)
def subst(self):
return tndarray(self._element_type.subst(), self._ndim.subst())
def _get_context(self):
return self.element_type.get_context()
class tarray(HailType):
"""Hail type for variable-length arrays of elements.
In Python, these are represented as :obj:`list`.
Notes
-----
Arrays contain elements of only one type, which is parameterized by
`element_type`.
Parameters
----------
element_type : :class:`.HailType`
Element type of array.
See Also
--------
:class:`.ArrayExpression`, :class:`.CollectionExpression`,
:func:`~hail.expr.functions.array`, :ref:`sec-collection-functions`
"""
@typecheck_method(element_type=hail_type)
def __init__(self, element_type):
self._element_type = element_type
super(tarray, self).__init__()
@property
def element_type(self):
"""Array element type.
Returns
-------
:class:`.HailType`
Element type.
"""
return self._element_type
def _traverse(self, obj, f):
if f(self, obj):
for elt in obj:
self.element_type._traverse(elt, f)
def _typecheck_one_level(self, annotation):
if annotation is not None:
if not isinstance(annotation, Sequence):
raise TypeError("type 'array' expected Python 'list', but found type '%s'" % type(annotation))
def __str__(self):
return "array<{}>".format(self.element_type)
def _eq(self, other):
return isinstance(other, tarray) and self.element_type == other.element_type
def _pretty(self, b, indent, increment):
b.append('array<')
self.element_type._pretty(b, indent, increment)
b.append('>')
def _parsable_string(self):
return "Array[" + self.element_type._parsable_string() + "]"
def _convert_from_json(self, x):
return [self.element_type._convert_from_json_na(elt) for elt in x]
def _convert_to_json(self, x):
return [self.element_type._convert_to_json_na(elt) for elt in x]
def _propagate_jtypes(self, jtype):
self._element_type._add_jtype(jtype.elementType())
def unify(self, t):
return isinstance(t, tarray) and self.element_type.unify(t.element_type)
def subst(self):
return tarray(self.element_type.subst())
def clear(self):
self.element_type.clear()
def _get_context(self):
return self.element_type.get_context()
class tstream(HailType):
@typecheck_method(element_type=hail_type)
def __init__(self, element_type):
self._element_type = element_type
super(tstream, self).__init__()
@property
def element_type(self):
return self._element_type
def _traverse(self, obj, f):
if f(self, obj):
for elt in obj:
self.element_type._traverse(elt, f)
def _typecheck_one_level(self, annotation):
raise TypeError("type 'stream' is not realizable in Python")
def __str__(self):
return "stream<{}>".format(self.element_type)
def _eq(self, other):
return isinstance(other, tstream) and self.element_type == other.element_type
def _pretty(self, b, indent, increment):
b.append('stream<')
self.element_type._pretty(b, indent, increment)
b.append('>')
def _parsable_string(self):
return "Stream[" + self.element_type._parsable_string() + "]"
def _convert_from_json(self, x):
return [self.element_type._convert_from_json_na(elt) for elt in x]
def _convert_to_json(self, x):
return [self.element_type._convert_to_json_na(elt) for elt in x]
def _propagate_jtypes(self, jtype):
self._element_type._add_jtype(jtype.elementType())
def unify(self, t):
return isinstance(t, tstream) and self.element_type.unify(t.element_type)
def subst(self):
return tstream(self.element_type.subst())
def clear(self):
self.element_type.clear()
def _get_context(self):
return self.element_type.get_context()
class tset(HailType):
"""Hail type for collections of distinct elements.
In Python, these are represented as :obj:`set`.
Notes
-----
Sets contain elements of only one type, which is parameterized by
`element_type`.
Parameters
----------
element_type : :class:`.HailType`
Element type of set.
See Also
--------
:class:`.SetExpression`, :class:`.CollectionExpression`,
:func:`.set`, :ref:`sec-collection-functions`
"""
@typecheck_method(element_type=hail_type)
def __init__(self, element_type):
self._element_type = element_type
super(tset, self).__init__()
@property
def element_type(self):
"""Set element type.
Returns
-------
:class:`.HailType`
Element type.
"""
return self._element_type
def _traverse(self, obj, f):
if f(self, obj):
for elt in obj:
self.element_type._traverse(elt, f)
def _typecheck_one_level(self, annotation):
if annotation is not None:
if not isinstance(annotation, set):
raise TypeError("type 'set' expected Python 'set', but found type '%s'" % type(annotation))
def __str__(self):
return "set<{}>".format(self.element_type)
def _eq(self, other):
return isinstance(other, tset) and self.element_type == other.element_type
def _pretty(self, b, indent, increment):
b.append('set<')
self.element_type._pretty(b, indent, increment)
b.append('>')
def _parsable_string(self):
return "Set[" + self.element_type._parsable_string() + "]"
def _convert_from_json(self, x):
return {self.element_type._convert_from_json_na(elt) for elt in x}
def _convert_to_json(self, x):
return [self.element_type._convert_to_json_na(elt) for elt in x]
def _propagate_jtypes(self, jtype):
self._element_type._add_jtype(jtype.elementType())
def unify(self, t):
return isinstance(t, tset) and self.element_type.unify(t.element_type)
def subst(self):
return tset(self.element_type.subst())
def clear(self):
self.element_type.clear()
def _get_context(self):
return self.element_type.get_context()
class tdict(HailType):
"""Hail type for key-value maps.
In Python, these are represented as :obj:`dict`.
Notes
-----
Dicts parameterize the type of both their keys and values with
`key_type` and `value_type`.
Parameters
----------
key_type: :class:`.HailType`
Key type.
value_type: :class:`.HailType`
Value type.
See Also
--------
:class:`.DictExpression`, :func:`.dict`, :ref:`sec-collection-functions`
"""
@typecheck_method(key_type=hail_type, value_type=hail_type)
def __init__(self, key_type, value_type):
self._key_type = key_type
self._value_type = value_type
super(tdict, self).__init__()
@property
def key_type(self):
"""Dict key type.
Returns
-------
:class:`.HailType`
Key type.
"""
return self._key_type
@property
def value_type(self):
"""Dict value type.
Returns
-------
:class:`.HailType`
Value type.
"""
return self._value_type
@property
def element_type(self):
return tstruct(key=self._key_type, value=self._value_type)
def _traverse(self, obj, f):
if f(self, obj):
for k, v in obj.items():
self.key_type._traverse(k, f)
self.value_type._traverse(v, f)
def _typecheck_one_level(self, annotation):
if annotation is not None:
if not isinstance(annotation, dict):
raise TypeError("type 'dict' expected Python 'dict', but found type '%s'" % type(annotation))
def __str__(self):
return "dict<{}, {}>".format(self.key_type, self.value_type)
def _eq(self, other):
return isinstance(other, tdict) and self.key_type == other.key_type and self.value_type == other.value_type
def _pretty(self, b, indent, increment):
b.append('dict<')
self.key_type._pretty(b, indent, increment)
b.append(', ')
self.value_type._pretty(b, indent, increment)
b.append('>')
def _parsable_string(self):
return "Dict[{},{}]".format(self.key_type._parsable_string(), self.value_type._parsable_string())
def _convert_from_json(self, x):
return {self.key_type._convert_from_json_na(elt['key']): self.value_type._convert_from_json_na(elt['value']) for
elt in x}
def _convert_to_json(self, x):
return [{'key': self.key_type._convert_to_json(k),
'value': self.value_type._convert_to_json(v)} for k, v in x.items()]
def _propagate_jtypes(self, jtype):
self._key_type._add_jtype(jtype.keyType())
self._value_type._add_jtype(jtype.valueType())
def unify(self, t):
return (isinstance(t, tdict)
and self.key_type.unify(t.key_type)
and self.value_type.unify(t.value_type))
def subst(self):
return tdict(self._key_type.subst(), self._value_type.subst())
def clear(self):
self.key_type.clear()
self.value_type.clear()
def _get_context(self):
return HailTypeContext.union(self.key_type, self.value_type)
class tstruct(HailType, Mapping):
"""Hail type for structured groups of heterogeneous fields.
In Python, these are represented as :class:`.Struct`.
Hail's :class:`.tstruct` type is commonly used to compose types together to form nested
structures. Structs can contain any combination of types, and are ordered mappings
from field name to field type. Each field name must be unique.
Structs are very common in Hail. Each component of a :class:`.Table` and :class:`.MatrixTable`
is a struct:
- :meth:`.Table.row`
- :meth:`.Table.globals`
- :meth:`.MatrixTable.row`
- :meth:`.MatrixTable.col`
- :meth:`.MatrixTable.entry`
- :meth:`.MatrixTable.globals`
Structs appear below the top-level component types as well. Consider the following join:
>>> new_table = table1.annotate(table2_fields = table2.index(table1.key))
This snippet adds a field to ``table1`` called ``table2_fields``. In the new table,
``table2_fields`` will be a struct containing all the non-key fields from ``table2``.
Parameters
----------
field_types : keyword args of :class:`.HailType`
Fields.
See Also
--------
:class:`.StructExpression`, :class:`.Struct`
"""
@typecheck_method(field_types=hail_type)
def __init__(self, **field_types):
self._field_types = field_types
self._fields = tuple(field_types)
super(tstruct, self).__init__()
@property
def types(self):
"""Struct field types.
Returns
-------
:obj:`tuple` of :class:`.HailType`
"""
return tuple(self._field_types.values())
@property
def fields(self):
"""Struct field names.
Returns
-------
:obj:`tuple` of :class:`str`
Tuple of struct field names.
"""
return self._fields
def _traverse(self, obj, f):
if f(self, obj):
for k, v in obj.items():
t = self[k]
t._traverse(v, f)
def _typecheck_one_level(self, annotation):
if annotation:
if isinstance(annotation, Mapping):
s = set(self)
for f in annotation:
if f not in s:
raise TypeError("type '%s' expected fields '%s', but found fields '%s'" %
(self, list(self), list(annotation)))
else:
raise TypeError("type 'struct' expected type Mapping (e.g. dict or hail.utils.Struct), but found '%s'" %
type(annotation))
@typecheck_method(item=oneof(int, str))
def __getitem__(self, item):
if not isinstance(item, str):
item = self._fields[item]
return self._field_types[item]
def __iter__(self):
return iter(self._field_types)
def __len__(self):
return len(self._fields)
def __str__(self):
return "struct{{{}}}".format(
', '.join('{}: {}'.format(escape_parsable(f), str(t)) for f, t in self.items()))
def _eq(self, other):
return (isinstance(other, tstruct)
and self._fields == other._fields
and all(self[f] == other[f] for f in self._fields))
def _pretty(self, b, indent, increment):
if not self._fields:
b.append('struct {}')
return
pre_indent = indent
indent += increment
b.append('struct {')
for i, (f, t) in enumerate(self.items()):
if i > 0:
b.append(', ')
b.append('\n')
b.append(' ' * indent)
b.append('{}: '.format(escape_parsable(f)))
t._pretty(b, indent, increment)
b.append('\n')
b.append(' ' * pre_indent)
b.append('}')
def _parsable_string(self):
return "Struct{{{}}}".format(
','.join('{}:{}'.format(escape_parsable(f), t._parsable_string()) for f, t in self.items()))
def _convert_from_json(self, x):
from hail.utils import Struct
return Struct(**{f: t._convert_from_json_na(x.get(f)) for f, t in self.items()})
def _convert_to_json(self, x):
return {f: t._convert_to_json_na(x[f]) for f, t in self.items()}
def _is_prefix_of(self, other):
return (isinstance(other, tstruct)
and len(self._fields) <= len(other._fields)
and all(x == y for x, y in zip(self._field_types.values(), other._field_types.values())))
def _concat(self, other):
new_field_types = {}
new_field_types.update(self._field_types)
new_field_types.update(other._field_types)
return tstruct(**new_field_types)
def _insert(self, path, t):
if not path:
return t
key = path[0]
keyt = self.get(key)
if not (keyt and isinstance(keyt, tstruct)):
keyt = tstruct()
return self._insert_fields(**{key: keyt._insert(path[1:], t)})
def _insert_field(self, field, typ):
return self._insert_fields(**{field: typ})
def _insert_fields(self, **new_fields):
new_field_types = {}
new_field_types.update(self._field_types)
new_field_types.update(new_fields)
return tstruct(**new_field_types)
def _drop_fields(self, fields):
return tstruct(**{f: t for f, t in self.items() if f not in fields})
def _select_fields(self, fields):
return tstruct(**{f: self[f] for f in fields})
def _index_path(self, path):
t = self
for p in path:
t = t[p]
return t
def _rename(self, map):
seen = {}
new_field_types = {}
for f0, t in self.items():
f = map.get(f0, f0)
if f in seen:
raise ValueError(
"Cannot rename two fields to the same name: attempted to rename {} and {} both to {}".format(
repr(seen[f]), repr(f0), repr(f)))
else:
seen[f] = f0
new_field_types[f] = t
return tstruct(**new_field_types)
def unify(self, t):
if not (isinstance(t, tstruct) and len(self) == len(t)):
return False
for (f1, t1), (f2, t2) in zip(self.items(), t.items()):
if not (f1 == f2 and t1.unify(t2)):
return False
return True
def subst(self):
return tstruct(**{f: t.subst() for f, t in self.items()})
def clear(self):
for f, t in self.items():
t.clear()
def _get_context(self):
return HailTypeContext.union(*self.values())
class tunion(HailType, Mapping):
@typecheck_method(case_types=hail_type)
def __init__(self, **case_types):
"""Tagged union type. Values of type union represent one of several
heterogenous, named cases.
Parameters
----------
cases : keyword args of :class:`.HailType`
The union cases.
"""
super(tunion, self).__init__()
self._case_types = case_types
self._cases = tuple(case_types)
@property
def cases(self):
"""Return union case names.
Returns
-------
:obj:`tuple` of :class:`str`
Tuple of union case names
"""
return self._cases
@typecheck_method(item=oneof(int, str))
def __getitem__(self, item):
if isinstance(item, int):
item = self._cases[item]
return self._case_types[item]
def __iter__(self):
return iter(self._case_types)
def __len__(self):
return len(self._cases)
def __str__(self):
return "union{{{}}}".format(
', '.join('{}: {}'.format(escape_parsable(f), str(t)) for f, t in self.items()))
def _eq(self, other):
return (isinstance(other, tunion)
and self._cases == other._cases
and all(self[c] == other[c] for c in self._cases))
def _pretty(self, b, indent, increment):
if not self._cases:
b.append('union {}')
return
pre_indent = indent
indent += increment
b.append('union {')
for i, (f, t) in enumerate(self.items()):
if i > 0:
b.append(', ')
b.append('\n')
b.append(' ' * indent)
b.append('{}: '.format(escape_parsable(f)))
t._pretty(b, indent, increment)
b.append('\n')
b.append(' ' * pre_indent)
b.append('}')
def _parsable_string(self):
return "Union{{{}}}".format(
','.join('{}:{}'.format(escape_parsable(f), t._parsable_string()) for f, t in self.items()))
def unify(self, t):
if not (isinstance(t, tunion) and len(self) == len(t)):
return False
for (f1, t1), (f2, t2) in zip(self.items(), t.items()):
if not (f1 == f2 and t1.unify(t2)):
return False
return True
def subst(self):
return tunion(**{f: t.subst() for f, t in self.items()})
def clear(self):
for f, t in self.items():
t.clear()
def _get_context(self):
return HailTypeContext.union(*self.values())
class ttuple(HailType, Sequence):
"""Hail type for tuples.
In Python, these are represented as :obj:`tuple`.
Parameters
----------
types: varargs of :class:`.HailType`
Element types.
See Also
--------
:class:`.TupleExpression`
"""
@typecheck_method(types=hail_type)
def __init__(self, *types):
self._types = types
super(ttuple, self).__init__()
@property
def types(self):
"""Tuple element types.
Returns
-------
:obj:`tuple` of :class:`.HailType`
"""
return self._types
def _traverse(self, obj, f):
if f(self, obj):
for t, elt in zip(self.types, obj):
t._traverse(elt, f)
def _typecheck_one_level(self, annotation):
if annotation:
if not isinstance(annotation, tuple):
raise TypeError("type 'tuple' expected Python tuple, but found '%s'" %
type(annotation))
if len(annotation) != len(self.types):
raise TypeError("%s expected tuple of size '%i', but found '%s'" %
(self, len(self.types), annotation))
@typecheck_method(item=int)
def __getitem__(self, item):
return self._types[item]
def __iter__(self):
for i in range(len(self)):
yield self[i]
def __len__(self):
return len(self._types)
def __str__(self):
return "tuple({})".format(", ".join([str(t) for t in self.types]))
def _eq(self, other):
from operator import eq
return isinstance(other, ttuple) and len(self.types) == len(other.types) and all(
map(eq, self.types, other.types))
def _pretty(self, b, indent, increment):
pre_indent = indent
indent += increment
b.append('tuple (')
for i, t in enumerate(self.types):
if i > 0:
b.append(', ')
b.append('\n')
b.append(' ' * indent)
t._pretty(b, indent, increment)
b.append('\n')
b.append(' ' * pre_indent)
b.append(')')
def _parsable_string(self):
return "Tuple[{}]".format(",".join([t._parsable_string() for t in self.types]))
def _convert_from_json(self, x):
return tuple(self.types[i]._convert_from_json_na(x[i]) for i in range(len(self.types)))
def _convert_to_json(self, x):
return [self.types[i]._convert_to_json_na(x[i]) for i in range(len(self.types))]
def unify(self, t):
if not (isinstance(t, ttuple) and len(self.types) == len(t.types)):
return False
for t1, t2 in zip(self.types, t.types):
if not t1.unify(t2):
return False
return True
def subst(self):
return ttuple(*[t.subst() for t in self.types])
def clear(self):
for t in self.types:
t.clear()
def _get_context(self):
return HailTypeContext.union(*self.types)
class _tcall(HailType):
"""Hail type for a diploid genotype.
In Python, these are represented by :class:`.Call`.
"""
def __init__(self):
super(_tcall, self).__init__()
def _typecheck_one_level(self, annotation):
if annotation is not None and not isinstance(annotation, genetics.Call):
raise TypeError("type 'call' expected Python hail.genetics.Call, but found %s'" %
type(annotation))
def __str__(self):
return "call"
def _eq(self, other):
return isinstance(other, _tcall)
def _parsable_string(self):
return "Call"
def _convert_from_json(self, x):
if x == '-':
return hl.Call([])
if x == '|-':
return hl.Call([], phased=True)
if x[0] == '|':
return hl.Call([int(x[1:])], phased=True)
n = len(x)
i = 0
while i < n:
c = x[i]
if c in '|/':
break
i += 1
if i == n:
return hl.Call([int(x)])
return hl.Call([int(x[0:i]), int(x[i + 1:])], phased=(c == '|'))
def _convert_to_json(self, x):
return str(x)
def unify(self, t):
return t == tcall
def subst(self):
return self
def clear(self):
pass
class tlocus(HailType):
"""Hail type for a genomic coordinate with a contig and a position.
In Python, these are represented by :class:`.Locus`.
Parameters
----------
reference_genome: :class:`.ReferenceGenome` or :class:`str`
Reference genome to use.
See Also
--------
:class:`.LocusExpression`, :func:`.locus`, :func:`.parse_locus`,
:class:`.Locus`
"""
@typecheck_method(reference_genome=reference_genome_type)
def __init__(self, reference_genome='default'):
self._rg = reference_genome
super(tlocus, self).__init__()
def _typecheck_one_level(self, annotation):
if annotation is not None:
if not isinstance(annotation, genetics.Locus):
raise TypeError("type '{}' expected Python hail.genetics.Locus, but found '{}'"
.format(self, type(annotation)))
if not self.reference_genome == annotation.reference_genome:
raise TypeError("type '{}' encountered Locus with reference genome {}"
.format(self, repr(annotation.reference_genome)))
def __str__(self):
return "locus<{}>".format(escape_parsable(str(self.reference_genome)))
def _parsable_string(self):
return "Locus({})".format(escape_parsable(str(self.reference_genome)))
def _eq(self, other):
return isinstance(other, tlocus) and self.reference_genome == other.reference_genome
@property
def reference_genome(self):
"""Reference genome.
Returns
-------
:class:`.ReferenceGenome`
Reference genome.
"""
if self._rg is None:
self._rg = hl.default_reference()
return self._rg
def _pretty(self, b, indent, increment):
b.append('locus<{}>'.format(escape_parsable(self.reference_genome.name)))
def _convert_from_json(self, x):
return genetics.Locus(x['contig'], x['position'], reference_genome=self.reference_genome)
def _convert_to_json(self, x):
return {'contig': x.contig, 'position': x.position}
def unify(self, t):
return isinstance(t, tlocus) and self.reference_genome == t.reference_genome
def subst(self):
return self
def clear(self):
pass
def _get_context(self):
return HailTypeContext(references={self.reference_genome.name})
class tinterval(HailType):
"""Hail type for intervals of ordered values.
In Python, these are represented by :class:`.Interval`.
Parameters
----------
point_type: :class:`.HailType`
Interval point type.
See Also
--------
:class:`.IntervalExpression`, :class:`.Interval`, :func:`.interval`,
:func:`.parse_locus_interval`
"""
@typecheck_method(point_type=hail_type)
def __init__(self, point_type):
self._point_type = point_type
super(tinterval, self).__init__()
@property
def point_type(self):
"""Interval point type.
Returns
-------
:class:`.HailType`
Interval point type.
"""
return self._point_type
def _traverse(self, obj, f):
if f(self, obj):
self.point_type._traverse(obj.start, f)
self.point_type._traverse(obj.end, f)
def _typecheck_one_level(self, annotation):
from hail.utils import Interval
if annotation is not None:
if not isinstance(annotation, Interval):
raise TypeError("type '{}' expected Python hail.utils.Interval, but found {}"
.format(self, type(annotation)))
if annotation.point_type != self.point_type:
raise TypeError("type '{}' encountered Interval with point type {}"
.format(self, repr(annotation.point_type)))
def __str__(self):
return "interval<{}>".format(str(self.point_type))
def _eq(self, other):
return isinstance(other, tinterval) and self.point_type == other.point_type
def _pretty(self, b, indent, increment):
b.append('interval<')
self.point_type._pretty(b, indent, increment)
b.append('>')
def _parsable_string(self):
return "Interval[{}]".format(self.point_type._parsable_string())
def _convert_from_json(self, x):
from hail.utils import Interval
return Interval(self.point_type._convert_from_json_na(x['start']),
self.point_type._convert_from_json_na(x['end']),
x['includeStart'],
x['includeEnd'])
def _convert_to_json(self, x):
return {'start': self.point_type._convert_to_json_na(x.start),
'end': self.point_type._convert_to_json_na(x.end),
'includeStart': x.includes_start,
'includeEnd': x.includes_end}
def unify(self, t):
return isinstance(t, tinterval) and self.point_type.unify(t.point_type)
def subst(self):
return tinterval(self.point_type.subst())
def clear(self):
self.point_type.clear()
def _get_context(self):
return self.point_type.get_context()
class Box(object):
named_boxes = {}
@staticmethod
def from_name(name):
if name in Box.named_boxes:
return Box.named_boxes[name]
b = Box()
Box.named_boxes[name] = b
return b
def __init__(self):
pass
def unify(self, v):
if hasattr(self, 'value'):
return self.value == v
self.value = v
return True
def clear(self):
if hasattr(self, 'value'):
del self.value
def get(self):
assert hasattr(self, 'value')
return self.value
tvoid = _tvoid()
tint32 = _tint32()
"""Hail type for signed 32-bit integers.
Their values can range from :math:`-2^{31}` to :math:`2^{31} - 1`
(approximately 2.15 billion).
In Python, these are represented as :obj:`int`.
See Also
--------
:class:`.Int32Expression`, :func:`.int`, :func:`.int32`
"""
tint64 = _tint64()
"""Hail type for signed 64-bit integers.
Their values can range from :math:`-2^{63}` to :math:`2^{63} - 1`.
In Python, these are represented as :obj:`int`.
See Also
--------
:class:`.Int64Expression`, :func:`.int64`
"""
tint = tint32
"""Alias for :py:data:`.tint32`."""
tfloat32 = _tfloat32()
"""Hail type for 32-bit floating point numbers.
In Python, these are represented as :obj:`float`.
See Also
--------
:class:`.Float32Expression`, :func:`.float64`
"""
tfloat64 = _tfloat64()
"""Hail type for 64-bit floating point numbers.
In Python, these are represented as :obj:`float`.
See Also
--------
:class:`.Float64Expression`, :func:`.float`, :func:`.float64`
"""
tfloat = tfloat64
"""Alias for :py:data:`.tfloat64`."""
tstr = _tstr()
"""Hail type for text strings.
In Python, these are represented as strings.
See Also
--------
:class:`.StringExpression`, :func:`.str`
"""
tbool = _tbool()
"""Hail type for Boolean (``True`` or ``False``) values.
In Python, these are represented as :obj:`bool`.
See Also
--------
:class:`.BooleanExpression`, :func:`.bool`
"""
tcall = _tcall()
"""Hail type for a diploid genotype.
In Python, these are represented by :class:`.Call`.
See Also
--------
:class:`.CallExpression`, :class:`.Call`, :func:`.call`, :func:`.parse_call`,
:func:`.unphased_diploid_gt_index_call`
"""
hts_entry_schema = tstruct(GT=tcall, AD=tarray(tint32), DP=tint32, GQ=tint32, PL=tarray(tint32))
_numeric_types = {_tbool, _tint32, _tint64, _tfloat32, _tfloat64}
_primitive_types = _numeric_types.union({_tstr})
_interned_types = _primitive_types.union({_tcall})
@typecheck(t=HailType)
def is_numeric(t) -> bool:
return t.__class__ in _numeric_types
@typecheck(t=HailType)
def is_primitive(t) -> bool:
return t.__class__ in _primitive_types
@typecheck(t=HailType)
def is_container(t) -> bool:
return (isinstance(t, tarray)
or isinstance(t, tset)
or isinstance(t, tdict))
@typecheck(t=HailType)
def is_compound(t) -> bool:
return (is_container(t)
or isinstance(t, tstruct)
or isinstance(t, tunion)
or isinstance(t, ttuple)
or isinstance(t, tndarray))
def types_match(left, right) -> bool:
return (len(left) == len(right)
and all(map(lambda lr: lr[0].dtype == lr[1].dtype, zip(left, right))))
def from_numpy(np_dtype):
if np_dtype == np.int32:
return tint32
elif np_dtype == np.int64:
return tint64
elif np_dtype == np.float32:
return tfloat32
elif np_dtype == np.float64:
return tfloat64
elif np_dtype == np.bool:
return tbool
else:
raise ValueError(f"numpy type {np_dtype} could not be converted to a hail type.")
class tvariable(HailType):
_cond_map = {
'numeric': is_numeric,
'int32': lambda x: x == tint32,
'int64': lambda x: x == tint64,
'float32': lambda x: x == tfloat32,
'float64': lambda x: x == tfloat64,
'locus': lambda x: isinstance(x, tlocus),
'struct': lambda x: isinstance(x, tstruct),
'union': lambda x: isinstance(x, tunion),
'tuple': lambda x: isinstance(x, ttuple)
}
def __init__(self, name, cond):
self.name = name
self.cond = cond
self.condf = tvariable._cond_map[cond] if cond else None
self.box = Box.from_name(name)
def unify(self, t):
if self.condf and not self.condf(t):
return False
return self.box.unify(t)
def clear(self):
self.box.clear()
def subst(self):
return self.box.get()
def __str__(self):
s = '?' + self.name
if self.cond:
s = s + ':' + self.cond
return s
_old_printer = pprint.PrettyPrinter
class TypePrettyPrinter(pprint.PrettyPrinter):
def _format(self, object, stream, indent, allowance, context, level):
if isinstance(object, HailType):
stream.write(object.pretty(self._indent_per_level))
else:
return _old_printer._format(self, object, stream, indent, allowance, context, level)
pprint.PrettyPrinter = TypePrettyPrinter # monkey-patch pprint
| {
"content_hash": "528d13da62def9d20661bbedb75f4310",
"timestamp": "",
"source": "github",
"line_count": 1863,
"max_line_length": 145,
"avg_line_length": 27.208803005904453,
"alnum_prop": 0.5616689682383113,
"repo_name": "danking/hail",
"id": "7ea46b6de366897c200fda0eda6772349b7d5bdb",
"size": "50690",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "hail/python/hail/expr/types.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "7729"
},
{
"name": "C",
"bytes": "289"
},
{
"name": "C++",
"bytes": "171899"
},
{
"name": "CSS",
"bytes": "29124"
},
{
"name": "Dockerfile",
"bytes": "13073"
},
{
"name": "Emacs Lisp",
"bytes": "252"
},
{
"name": "HTML",
"bytes": "151709"
},
{
"name": "Java",
"bytes": "32302"
},
{
"name": "JavaScript",
"bytes": "3309"
},
{
"name": "Jupyter Notebook",
"bytes": "162395"
},
{
"name": "Makefile",
"bytes": "73914"
},
{
"name": "Python",
"bytes": "4149266"
},
{
"name": "R",
"bytes": "3038"
},
{
"name": "SCSS",
"bytes": "9075"
},
{
"name": "Scala",
"bytes": "4426573"
},
{
"name": "Shell",
"bytes": "49103"
},
{
"name": "TeX",
"bytes": "7125"
},
{
"name": "XSLT",
"bytes": "5748"
}
],
"symlink_target": ""
} |
"""This script helps to generate code coverage report.
It uses Clang Source-based Code Coverage -
https://clang.llvm.org/docs/SourceBasedCodeCoverage.html
In order to generate code coverage report, you need to first add
"use_clang_coverage=true" and "is_component_build=false" GN flags to args.gn
file in your build output directory (e.g. out/coverage).
* Example usage:
gn gen out/coverage \\
--args="use_clang_coverage=true is_component_build=false\\
is_debug=false dcheck_always_on=true"
gclient runhooks
vpython3 tools/code_coverage/coverage.py crypto_unittests url_unittests \\
-b out/coverage -o out/report -c 'out/coverage/crypto_unittests' \\
-c 'out/coverage/url_unittests --gtest_filter=URLParser.PathURL' \\
-f url/ -f crypto/
The command above builds crypto_unittests and url_unittests targets and then
runs them with specified command line arguments. For url_unittests, it only
runs the test URLParser.PathURL. The coverage report is filtered to include
only files and sub-directories under url/ and crypto/ directories.
If you want to run tests that try to draw to the screen but don't have a
display connected, you can run tests in headless mode with xvfb.
* Sample flow for running a test target with xvfb (e.g. unit_tests):
vpython3 tools/code_coverage/coverage.py unit_tests -b out/coverage \\
-o out/report -c 'python testing/xvfb.py out/coverage/unit_tests'
If you are building a fuzz target, you need to add "use_libfuzzer=true" GN
flag as well.
* Sample workflow for a fuzz target (e.g. pdfium_fuzzer):
vpython3 tools/code_coverage/coverage.py pdfium_fuzzer \\
-b out/coverage -o out/report \\
-c 'out/coverage/pdfium_fuzzer -runs=0 <corpus_dir>' \\
-f third_party/pdfium
where:
<corpus_dir> - directory containing samples files for this format.
To learn more about generating code coverage reports for fuzz targets, see
https://chromium.googlesource.com/chromium/src/+/main/testing/libfuzzer/efficient_fuzzer.md#Code-Coverage
* Sample workflow for running Blink web tests:
vpython3 tools/code_coverage/coverage.py blink_tests \\
-wt -b out/coverage -o out/report -f third_party/blink
If you need to pass arguments to run_web_tests.py, use
-wt='arguments to run_web_tests.py e.g. test directories'
For more options, please refer to tools/code_coverage/coverage.py -h.
For an overview of how code coverage works in Chromium, please refer to
https://chromium.googlesource.com/chromium/src/+/main/docs/testing/code_coverage.md
"""
from __future__ import print_function
import sys
import argparse
import json
import logging
import multiprocessing
import os
import platform
import re
import shlex
import shutil
import subprocess
from urllib.request import urlopen
sys.path.append(
os.path.join(
os.path.dirname(__file__), os.path.pardir, os.path.pardir,
'third_party'))
from collections import defaultdict
import coverage_utils
# Absolute path to the code coverage tools binary. These paths can be
# overwritten by user specified coverage tool paths.
# Absolute path to the root of the checkout.
SRC_ROOT_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)),
os.path.pardir, os.path.pardir)
LLVM_BIN_DIR = os.path.join(
os.path.join(SRC_ROOT_PATH, 'third_party', 'llvm-build', 'Release+Asserts'),
'bin')
LLVM_COV_PATH = os.path.join(LLVM_BIN_DIR, 'llvm-cov')
LLVM_PROFDATA_PATH = os.path.join(LLVM_BIN_DIR, 'llvm-profdata')
# Build directory, the value is parsed from command line arguments.
BUILD_DIR = None
# Output directory for generated artifacts, the value is parsed from command
# line arguemnts.
OUTPUT_DIR = None
# Name of the file extension for profraw data files.
PROFRAW_FILE_EXTENSION = 'profraw'
# Name of the final profdata file, and this file needs to be passed to
# "llvm-cov" command in order to call "llvm-cov show" to inspect the
# line-by-line coverage of specific files.
PROFDATA_FILE_NAME = os.extsep.join(['coverage', 'profdata'])
# Name of the file with summary information generated by llvm-cov export.
SUMMARY_FILE_NAME = os.extsep.join(['summary', 'json'])
# Name of the coverage file in lcov format generated by llvm-cov export.
LCOV_FILE_NAME = os.extsep.join(['coverage', 'lcov'])
# Build arg required for generating code coverage data.
CLANG_COVERAGE_BUILD_ARG = 'use_clang_coverage'
LOGS_DIR_NAME = 'logs'
# Used to extract a mapping between directories and components.
COMPONENT_MAPPING_URL = (
'https://storage.googleapis.com/chromium-owners/component_map.json')
# Caches the results returned by _GetBuildArgs, don't use this variable
# directly, call _GetBuildArgs instead.
_BUILD_ARGS = None
# Retry failed merges.
MERGE_RETRIES = 3
# Message to guide user to file a bug when everything else fails.
FILE_BUG_MESSAGE = (
'If it persists, please file a bug with the command you used, git revision '
'and args.gn config here: '
'https://bugs.chromium.org/p/chromium/issues/entry?'
'components=Infra%3ETest%3ECodeCoverage')
# String to replace with actual llvm profile path.
LLVM_PROFILE_FILE_PATH_SUBSTITUTION = '<llvm_profile_file_path>'
def _ConfigureLLVMCoverageTools(args):
"""Configures llvm coverage tools."""
if args.coverage_tools_dir:
llvm_bin_dir = coverage_utils.GetFullPath(args.coverage_tools_dir)
global LLVM_COV_PATH
global LLVM_PROFDATA_PATH
LLVM_COV_PATH = os.path.join(llvm_bin_dir, 'llvm-cov')
LLVM_PROFDATA_PATH = os.path.join(llvm_bin_dir, 'llvm-profdata')
else:
subprocess.check_call([
sys.executable, 'tools/clang/scripts/update.py', '--package',
'coverage_tools'
])
if coverage_utils.GetHostPlatform() == 'win':
LLVM_COV_PATH += '.exe'
LLVM_PROFDATA_PATH += '.exe'
coverage_tools_exist = (
os.path.exists(LLVM_COV_PATH) and os.path.exists(LLVM_PROFDATA_PATH))
assert coverage_tools_exist, ('Cannot find coverage tools, please make sure '
'both \'%s\' and \'%s\' exist.') % (
LLVM_COV_PATH, LLVM_PROFDATA_PATH)
def _GetPathWithLLVMSymbolizerDir():
"""Add llvm-symbolizer directory to path for symbolized stacks."""
path = os.getenv('PATH')
dirs = path.split(os.pathsep)
if LLVM_BIN_DIR in dirs:
return path
return path + os.pathsep + LLVM_BIN_DIR
def _GetTargetOS():
"""Returns the target os specified in args.gn file.
Returns an empty string is target_os is not specified.
"""
build_args = _GetBuildArgs()
return build_args['target_os'] if 'target_os' in build_args else ''
def _IsAndroid():
"""Returns true if the target_os specified in args.gn file is android"""
return _GetTargetOS() == 'android'
def _IsIOS():
"""Returns true if the target_os specified in args.gn file is ios"""
return _GetTargetOS() == 'ios'
def _GeneratePerFileLineByLineCoverageInFormat(binary_paths, profdata_file_path,
filters, ignore_filename_regex,
output_format):
"""Generates per file line-by-line coverage in html or text using
'llvm-cov show'.
For a file with absolute path /a/b/x.cc, a html/txt report is generated as:
OUTPUT_DIR/coverage/a/b/x.cc.[html|txt]. For html format, an index html file
is also generated as: OUTPUT_DIR/index.html.
Args:
binary_paths: A list of paths to the instrumented binaries.
profdata_file_path: A path to the profdata file.
filters: A list of directories and files to get coverage for.
ignore_filename_regex: A regular expression for skipping source code files
with certain file paths.
output_format: The output format of generated report files.
"""
# llvm-cov show [options] -instr-profile PROFILE BIN [-object BIN,...]
# [[-object BIN]] [SOURCES]
# NOTE: For object files, the first one is specified as a positional argument,
# and the rest are specified as keyword argument.
logging.debug('Generating per file line by line coverage reports using '
'"llvm-cov show" command.')
subprocess_cmd = [
LLVM_COV_PATH, 'show', '-format={}'.format(output_format),
'-compilation-dir={}'.format(BUILD_DIR),
'-output-dir={}'.format(OUTPUT_DIR),
'-instr-profile={}'.format(profdata_file_path), binary_paths[0]
]
subprocess_cmd.extend(
['-object=' + binary_path for binary_path in binary_paths[1:]])
_AddArchArgumentForIOSIfNeeded(subprocess_cmd, len(binary_paths))
if coverage_utils.GetHostPlatform() in ['linux', 'mac']:
subprocess_cmd.extend(['-Xdemangler', 'c++filt', '-Xdemangler', '-n'])
subprocess_cmd.extend(filters)
if ignore_filename_regex:
subprocess_cmd.append('-ignore-filename-regex=%s' % ignore_filename_regex)
subprocess.check_call(subprocess_cmd)
logging.debug('Finished running "llvm-cov show" command.')
def _GeneratePerFileLineByLineCoverageInLcov(binary_paths, profdata_file_path,
filters, ignore_filename_regex):
"""Generates per file line-by-line coverage using "llvm-cov export".
Args:
binary_paths: A list of paths to the instrumented binaries.
profdata_file_path: A path to the profdata file.
filters: A list of directories and files to get coverage for.
ignore_filename_regex: A regular expression for skipping source code files
with certain file paths.
"""
logging.debug('Generating per file line by line coverage reports using '
'"llvm-cov export" command.')
for path in binary_paths:
if not os.path.exists(path):
logging.error("Binary %s does not exist", path)
subprocess_cmd = [
LLVM_COV_PATH, 'export', '-format=lcov',
'-instr-profile=' + profdata_file_path, binary_paths[0]
]
subprocess_cmd.extend(
['-object=' + binary_path for binary_path in binary_paths[1:]])
_AddArchArgumentForIOSIfNeeded(subprocess_cmd, len(binary_paths))
subprocess_cmd.extend(filters)
if ignore_filename_regex:
subprocess_cmd.append('-ignore-filename-regex=%s' % ignore_filename_regex)
# Write output on the disk to be used by code coverage bot.
with open(_GetLcovFilePath(), 'w') as f:
subprocess.check_call(subprocess_cmd, stdout=f)
logging.debug('Finished running "llvm-cov export" command.')
def _GetLogsDirectoryPath():
"""Path to the logs directory."""
return os.path.join(
coverage_utils.GetCoverageReportRootDirPath(OUTPUT_DIR), LOGS_DIR_NAME)
def _GetProfdataFilePath():
"""Path to the resulting .profdata file."""
return os.path.join(
coverage_utils.GetCoverageReportRootDirPath(OUTPUT_DIR),
PROFDATA_FILE_NAME)
def _GetSummaryFilePath():
"""The JSON file that contains coverage summary written by llvm-cov export."""
return os.path.join(
coverage_utils.GetCoverageReportRootDirPath(OUTPUT_DIR),
SUMMARY_FILE_NAME)
def _GetLcovFilePath():
"""The LCOV file that contains coverage data written by llvm-cov export."""
return os.path.join(
coverage_utils.GetCoverageReportRootDirPath(OUTPUT_DIR),
LCOV_FILE_NAME)
def _CreateCoverageProfileDataForTargets(targets, commands, jobs_count=None):
"""Builds and runs target to generate the coverage profile data.
Args:
targets: A list of targets to build with coverage instrumentation.
commands: A list of commands used to run the targets.
jobs_count: Number of jobs to run in parallel for building. If None, a
default value is derived based on CPUs availability.
Returns:
A relative path to the generated profdata file.
"""
_BuildTargets(targets, jobs_count)
target_profdata_file_paths = _GetTargetProfDataPathsByExecutingCommands(
targets, commands)
coverage_profdata_file_path = (
_CreateCoverageProfileDataFromTargetProfDataFiles(
target_profdata_file_paths))
for target_profdata_file_path in target_profdata_file_paths:
os.remove(target_profdata_file_path)
return coverage_profdata_file_path
def _BuildTargets(targets, jobs_count):
"""Builds target with Clang coverage instrumentation.
This function requires current working directory to be the root of checkout.
Args:
targets: A list of targets to build with coverage instrumentation.
jobs_count: Number of jobs to run in parallel for compilation. If None, a
default value is derived based on CPUs availability.
"""
logging.info('Building %s.', str(targets))
autoninja = 'autoninja'
if coverage_utils.GetHostPlatform() == 'win':
autoninja += '.bat'
subprocess_cmd = [autoninja, '-C', BUILD_DIR]
if jobs_count is not None:
subprocess_cmd.append('-j' + str(jobs_count))
subprocess_cmd.extend(targets)
subprocess.check_call(subprocess_cmd)
logging.debug('Finished building %s.', str(targets))
def _GetTargetProfDataPathsByExecutingCommands(targets, commands):
"""Runs commands and returns the relative paths to the profraw data files.
Args:
targets: A list of targets built with coverage instrumentation.
commands: A list of commands used to run the targets.
Returns:
A list of relative paths to the generated profraw data files.
"""
logging.debug('Executing the test commands.')
# Remove existing profraw data files.
report_root_dir = coverage_utils.GetCoverageReportRootDirPath(OUTPUT_DIR)
for file_or_dir in os.listdir(report_root_dir):
if file_or_dir.endswith(PROFRAW_FILE_EXTENSION):
os.remove(os.path.join(report_root_dir, file_or_dir))
# Ensure that logs directory exists.
if not os.path.exists(_GetLogsDirectoryPath()):
os.makedirs(_GetLogsDirectoryPath())
profdata_file_paths = []
# Run all test targets to generate profraw data files.
for target, command in zip(targets, commands):
output_file_name = os.extsep.join([target + '_output', 'log'])
output_file_path = os.path.join(_GetLogsDirectoryPath(), output_file_name)
profdata_file_path = None
for _ in range(MERGE_RETRIES):
logging.info('Running command: "%s", the output is redirected to "%s".',
command, output_file_path)
if _IsIOSCommand(command):
# On iOS platform, due to lack of write permissions, profraw files are
# generated outside of the OUTPUT_DIR, and the exact paths are contained
# in the output of the command execution.
output = _ExecuteIOSCommand(command, output_file_path)
else:
# On other platforms, profraw files are generated inside the OUTPUT_DIR.
output = _ExecuteCommand(target, command, output_file_path)
profraw_file_paths = []
if _IsIOS():
profraw_file_paths = [_GetProfrawDataFileByParsingOutput(output)]
elif _IsAndroid():
android_coverage_dir = os.path.join(BUILD_DIR, 'coverage')
for r, _, files in os.walk(android_coverage_dir):
for f in files:
if f.endswith(PROFRAW_FILE_EXTENSION):
profraw_file_paths.append(os.path.join(r, f))
else:
for file_or_dir in os.listdir(report_root_dir):
if file_or_dir.endswith(PROFRAW_FILE_EXTENSION):
profraw_file_paths.append(
os.path.join(report_root_dir, file_or_dir))
assert profraw_file_paths, (
'Running target "%s" failed to generate any profraw data file, '
'please make sure the binary exists, is properly instrumented and '
'does not crash. %s' % (target, FILE_BUG_MESSAGE))
assert isinstance(profraw_file_paths, list), (
'Variable \'profraw_file_paths\' is expected to be of type \'list\', '
'but it is a %s. %s' % (type(profraw_file_paths), FILE_BUG_MESSAGE))
try:
profdata_file_path = _CreateTargetProfDataFileFromProfRawFiles(
target, profraw_file_paths)
break
except Exception:
logging.info('Retrying...')
finally:
# Remove profraw files now so that they are not used in next iteration.
for profraw_file_path in profraw_file_paths:
os.remove(profraw_file_path)
assert profdata_file_path, (
'Failed to merge target "%s" profraw files after %d retries. %s' %
(target, MERGE_RETRIES, FILE_BUG_MESSAGE))
profdata_file_paths.append(profdata_file_path)
logging.debug('Finished executing the test commands.')
return profdata_file_paths
def _GetEnvironmentVars(profraw_file_path):
"""Return environment vars for subprocess, given a profraw file path."""
env = os.environ.copy()
env.update({
'LLVM_PROFILE_FILE': profraw_file_path,
'PATH': _GetPathWithLLVMSymbolizerDir()
})
return env
def _SplitCommand(command):
"""Split a command string into parts in a platform-specific way."""
if coverage_utils.GetHostPlatform() == 'win':
return command.split()
return shlex.split(command)
def _ExecuteCommand(target, command, output_file_path):
"""Runs a single command and generates a profraw data file."""
# Per Clang "Source-based Code Coverage" doc:
#
# "%p" expands out to the process ID. It's not used by this scripts due to:
# 1) If a target program spawns too many processess, it may exhaust all disk
# space available. For example, unit_tests writes thousands of .profraw
# files each of size 1GB+.
# 2) If a target binary uses shared libraries, coverage profile data for them
# will be missing, resulting in incomplete coverage reports.
#
# "%Nm" expands out to the instrumented binary's signature. When this pattern
# is specified, the runtime creates a pool of N raw profiles which are used
# for on-line profile merging. The runtime takes care of selecting a raw
# profile from the pool, locking it, and updating it before the program exits.
# N must be between 1 and 9. The merge pool specifier can only occur once per
# filename pattern.
#
# "%1m" is used when tests run in single process, such as fuzz targets.
#
# For other cases, "%4m" is chosen as it creates some level of parallelism,
# but it's not too big to consume too much computing resource or disk space.
profile_pattern_string = '%1m' if _IsFuzzerTarget(target) else '%4m'
expected_profraw_file_name = os.extsep.join(
[target, profile_pattern_string, PROFRAW_FILE_EXTENSION])
expected_profraw_file_path = os.path.join(
coverage_utils.GetCoverageReportRootDirPath(OUTPUT_DIR),
expected_profraw_file_name)
command = command.replace(LLVM_PROFILE_FILE_PATH_SUBSTITUTION,
expected_profraw_file_path)
try:
# Some fuzz targets or tests may write into stderr, redirect it as well.
with open(output_file_path, 'wb') as output_file_handle:
subprocess.check_call(_SplitCommand(command),
stdout=output_file_handle,
stderr=subprocess.STDOUT,
env=_GetEnvironmentVars(expected_profraw_file_path))
except subprocess.CalledProcessError as e:
logging.warning('Command: "%s" exited with non-zero return code.', command)
return open(output_file_path, 'rb').read()
def _IsFuzzerTarget(target):
"""Returns true if the target is a fuzzer target."""
build_args = _GetBuildArgs()
use_libfuzzer = ('use_libfuzzer' in build_args and
build_args['use_libfuzzer'] == 'true')
return use_libfuzzer and target.endswith('_fuzzer')
def _ExecuteIOSCommand(command, output_file_path):
"""Runs a single iOS command and generates a profraw data file.
iOS application doesn't have write access to folders outside of the app, so
it's impossible to instruct the app to flush the profraw data file to the
desired location. The profraw data file will be generated somewhere within the
application's Documents folder, and the full path can be obtained by parsing
the output.
"""
assert _IsIOSCommand(command)
# After running tests, iossim generates a profraw data file, it won't be
# needed anyway, so dump it into the OUTPUT_DIR to avoid polluting the
# checkout.
iossim_profraw_file_path = os.path.join(
OUTPUT_DIR, os.extsep.join(['iossim', PROFRAW_FILE_EXTENSION]))
command = command.replace(LLVM_PROFILE_FILE_PATH_SUBSTITUTION,
iossim_profraw_file_path)
try:
with open(output_file_path, 'wb') as output_file_handle:
subprocess.check_call(_SplitCommand(command),
stdout=output_file_handle,
stderr=subprocess.STDOUT,
env=_GetEnvironmentVars(iossim_profraw_file_path))
except subprocess.CalledProcessError as e:
# iossim emits non-zero return code even if tests run successfully, so
# ignore the return code.
pass
return open(output_file_path, 'rb').read()
def _GetProfrawDataFileByParsingOutput(output):
"""Returns the path to the profraw data file obtained by parsing the output.
The output of running the test target has no format, but it is guaranteed to
have a single line containing the path to the generated profraw data file.
NOTE: This should only be called when target os is iOS.
"""
assert _IsIOS()
output_by_lines = ''.join(output).splitlines()
profraw_file_pattern = re.compile('.*Coverage data at (.*coverage\.profraw).')
for line in output_by_lines:
result = profraw_file_pattern.match(line)
if result:
return result.group(1)
assert False, ('No profraw data file was generated, did you call '
'coverage_util::ConfigureCoverageReportPath() in test setup? '
'Please refer to base/test/test_support_ios.mm for example.')
def _CreateCoverageProfileDataFromTargetProfDataFiles(profdata_file_paths):
"""Returns a relative path to coverage profdata file by merging target
profdata files.
Args:
profdata_file_paths: A list of relative paths to the profdata data files
that are to be merged.
Returns:
A relative path to the merged coverage profdata file.
Raises:
CalledProcessError: An error occurred merging profdata files.
"""
logging.info('Creating the coverage profile data file.')
logging.debug('Merging target profraw files to create target profdata file.')
profdata_file_path = _GetProfdataFilePath()
try:
subprocess_cmd = [
LLVM_PROFDATA_PATH, 'merge', '-o', profdata_file_path, '-sparse=true'
]
subprocess_cmd.extend(profdata_file_paths)
output = subprocess.check_output(subprocess_cmd)
logging.debug('Merge output: %s', output)
except subprocess.CalledProcessError as error:
logging.error(
'Failed to merge target profdata files to create coverage profdata. %s',
FILE_BUG_MESSAGE)
raise error
logging.debug('Finished merging target profdata files.')
logging.info('Code coverage profile data is created as: "%s".',
profdata_file_path)
return profdata_file_path
def _CreateTargetProfDataFileFromProfRawFiles(target, profraw_file_paths):
"""Returns a relative path to target profdata file by merging target
profraw files.
Args:
profraw_file_paths: A list of relative paths to the profdata data files
that are to be merged.
Returns:
A relative path to the merged coverage profdata file.
Raises:
CalledProcessError: An error occurred merging profdata files.
"""
logging.info('Creating target profile data file.')
logging.debug('Merging target profraw files to create target profdata file.')
profdata_file_path = os.path.join(OUTPUT_DIR, '%s.profdata' % target)
try:
subprocess_cmd = [
LLVM_PROFDATA_PATH, 'merge', '-o', profdata_file_path, '-sparse=true'
]
subprocess_cmd.extend(profraw_file_paths)
output = subprocess.check_output(subprocess_cmd)
logging.debug('Merge output: %s', output)
except subprocess.CalledProcessError as error:
logging.error(
'Failed to merge target profraw files to create target profdata.')
raise error
logging.debug('Finished merging target profraw files.')
logging.info('Target "%s" profile data is created as: "%s".', target,
profdata_file_path)
return profdata_file_path
def _GeneratePerFileCoverageSummary(binary_paths, profdata_file_path, filters,
ignore_filename_regex):
"""Generates per file coverage summary using "llvm-cov export" command."""
# llvm-cov export [options] -instr-profile PROFILE BIN [-object BIN,...]
# [[-object BIN]] [SOURCES].
# NOTE: For object files, the first one is specified as a positional argument,
# and the rest are specified as keyword argument.
logging.debug('Generating per-file code coverage summary using "llvm-cov '
'export -summary-only" command.')
for path in binary_paths:
if not os.path.exists(path):
logging.error("Binary %s does not exist", path)
subprocess_cmd = [
LLVM_COV_PATH, 'export', '-summary-only',
'-compilation-dir={}'.format(BUILD_DIR),
'-instr-profile=' + profdata_file_path, binary_paths[0]
]
subprocess_cmd.extend(
['-object=' + binary_path for binary_path in binary_paths[1:]])
_AddArchArgumentForIOSIfNeeded(subprocess_cmd, len(binary_paths))
subprocess_cmd.extend(filters)
if ignore_filename_regex:
subprocess_cmd.append('-ignore-filename-regex=%s' % ignore_filename_regex)
export_output = subprocess.check_output(subprocess_cmd)
# Write output on the disk to be used by code coverage bot.
with open(_GetSummaryFilePath(), 'wb') as f:
f.write(export_output)
return export_output
def _AddArchArgumentForIOSIfNeeded(cmd_list, num_archs):
"""Appends -arch arguments to the command list if it's ios platform.
iOS binaries are universal binaries, and require specifying the architecture
to use, and one architecture needs to be specified for each binary.
"""
if _IsIOS():
cmd_list.extend(['-arch=x86_64'] * num_archs)
def _GetBinaryPath(command):
"""Returns a relative path to the binary to be run by the command.
Currently, following types of commands are supported (e.g. url_unittests):
1. Run test binary direcly: "out/coverage/url_unittests <arguments>"
2. Use xvfb.
2.1. "python testing/xvfb.py out/coverage/url_unittests <arguments>"
2.2. "testing/xvfb.py out/coverage/url_unittests <arguments>"
3. Use iossim to run tests on iOS platform, please refer to testing/iossim.mm
for its usage.
3.1. "out/Coverage-iphonesimulator/iossim
<iossim_arguments> -c <app_arguments>
out/Coverage-iphonesimulator/url_unittests.app"
Args:
command: A command used to run a target.
Returns:
A relative path to the binary.
"""
xvfb_script_name = os.extsep.join(['xvfb', 'py'])
command_parts = _SplitCommand(command)
if os.path.basename(command_parts[0]) == 'python':
assert os.path.basename(command_parts[1]) == xvfb_script_name, (
'This tool doesn\'t understand the command: "%s".' % command)
return command_parts[2]
if os.path.basename(command_parts[0]) == xvfb_script_name:
return command_parts[1]
if _IsIOSCommand(command):
# For a given application bundle, the binary resides in the bundle and has
# the same name with the application without the .app extension.
app_path = command_parts[1].rstrip(os.path.sep)
app_name = os.path.splitext(os.path.basename(app_path))[0]
return os.path.join(app_path, app_name)
if coverage_utils.GetHostPlatform() == 'win' \
and not command_parts[0].endswith('.exe'):
return command_parts[0] + '.exe'
return command_parts[0]
def _IsIOSCommand(command):
"""Returns true if command is used to run tests on iOS platform."""
return os.path.basename(_SplitCommand(command)[0]) == 'iossim'
def _VerifyTargetExecutablesAreInBuildDirectory(commands):
"""Verifies that the target executables specified in the commands are inside
the given build directory."""
for command in commands:
binary_path = _GetBinaryPath(command)
binary_absolute_path = coverage_utils.GetFullPath(binary_path)
assert binary_absolute_path.startswith(BUILD_DIR + os.sep), (
'Target executable "%s" in command: "%s" is outside of '
'the given build directory: "%s".' % (binary_path, command, BUILD_DIR))
def _ValidateBuildingWithClangCoverage():
"""Asserts that targets are built with Clang coverage enabled."""
build_args = _GetBuildArgs()
if (CLANG_COVERAGE_BUILD_ARG not in build_args or
build_args[CLANG_COVERAGE_BUILD_ARG] != 'true'):
assert False, ('\'{} = true\' is required in args.gn.'
).format(CLANG_COVERAGE_BUILD_ARG)
def _ValidateCurrentPlatformIsSupported():
"""Asserts that this script suports running on the current platform"""
target_os = _GetTargetOS()
if target_os:
current_platform = target_os
else:
current_platform = coverage_utils.GetHostPlatform()
supported_platforms = ['android', 'chromeos', 'ios', 'linux', 'mac', 'win']
assert current_platform in supported_platforms, ('Coverage is only'
'supported on %s' %
supported_platforms)
def _GetBuildArgs():
"""Parses args.gn file and returns results as a dictionary.
Returns:
A dictionary representing the build args.
"""
global _BUILD_ARGS
if _BUILD_ARGS is not None:
return _BUILD_ARGS
_BUILD_ARGS = {}
build_args_path = os.path.join(BUILD_DIR, 'args.gn')
assert os.path.exists(build_args_path), ('"%s" is not a build directory, '
'missing args.gn file.' % BUILD_DIR)
with open(build_args_path) as build_args_file:
build_args_lines = build_args_file.readlines()
for build_arg_line in build_args_lines:
build_arg_without_comments = build_arg_line.split('#')[0]
key_value_pair = build_arg_without_comments.split('=')
if len(key_value_pair) != 2:
continue
key = key_value_pair[0].strip()
# Values are wrapped within a pair of double-quotes, so remove the leading
# and trailing double-quotes.
value = key_value_pair[1].strip().strip('"')
_BUILD_ARGS[key] = value
return _BUILD_ARGS
def _VerifyPathsAndReturnAbsolutes(paths):
"""Verifies that the paths specified in |paths| exist and returns absolute
versions.
Args:
paths: A list of files or directories.
"""
absolute_paths = []
for path in paths:
absolute_path = os.path.join(SRC_ROOT_PATH, path)
assert os.path.exists(absolute_path), ('Path: "%s" doesn\'t exist.' % path)
absolute_paths.append(absolute_path)
return absolute_paths
def _GetBinaryPathsFromTargets(targets, build_dir):
"""Return binary paths from target names."""
# TODO(crbug.com/899974): Derive output binary from target build definitions
# rather than assuming that it is always the same name.
binary_paths = []
for target in targets:
binary_path = os.path.join(build_dir, target)
if coverage_utils.GetHostPlatform() == 'win':
binary_path += '.exe'
if os.path.exists(binary_path):
binary_paths.append(binary_path)
else:
logging.warning(
'Target binary "%s" not found in build directory, skipping.',
os.path.basename(binary_path))
return binary_paths
def _GetCommandForWebTests(arguments):
"""Return command to run for blink web tests."""
cpu_count = multiprocessing.cpu_count()
if sys.platform == 'win32':
# TODO(crbug.com/1190269) - we can't use more than 56
# cores on Windows or Python3 may hang.
cpu_count = min(cpu_count, 56)
cpu_count = max(1, cpu_count // 2)
command_list = [
'python', 'testing/xvfb.py', 'python',
'third_party/blink/tools/run_web_tests.py',
'--additional-driver-flag=--no-sandbox',
'--additional-env-var=LLVM_PROFILE_FILE=%s' %
LLVM_PROFILE_FILE_PATH_SUBSTITUTION,
'--child-processes=%d' % cpu_count, '--disable-breakpad',
'--no-show-results', '--skip-failing-tests',
'--target=%s' % os.path.basename(BUILD_DIR), '--timeout-ms=30000'
]
if arguments.strip():
command_list.append(arguments)
return ' '.join(command_list)
def _GetBinaryPathsForAndroid(targets):
"""Return binary paths used when running android tests."""
# TODO(crbug.com/899974): Implement approach that doesn't assume .so file is
# based on the target's name.
android_binaries = set()
for target in targets:
so_library_path = os.path.join(BUILD_DIR, 'lib.unstripped',
'lib%s__library.so' % target)
if os.path.exists(so_library_path):
android_binaries.add(so_library_path)
return list(android_binaries)
def _GetBinaryPathForWebTests():
"""Return binary path used to run blink web tests."""
host_platform = coverage_utils.GetHostPlatform()
if host_platform == 'win':
return os.path.join(BUILD_DIR, 'content_shell.exe')
elif host_platform == 'linux':
return os.path.join(BUILD_DIR, 'content_shell')
elif host_platform == 'mac':
return os.path.join(BUILD_DIR, 'Content Shell.app', 'Contents', 'MacOS',
'Content Shell')
else:
assert False, 'This platform is not supported for web tests.'
def _SetupOutputDir():
"""Setup output directory."""
if os.path.exists(OUTPUT_DIR):
shutil.rmtree(OUTPUT_DIR)
# Creates |OUTPUT_DIR| and its platform sub-directory.
os.makedirs(coverage_utils.GetCoverageReportRootDirPath(OUTPUT_DIR))
def _SetMacXcodePath():
"""Set DEVELOPER_DIR to the path to hermetic Xcode.app on Mac OS X."""
if sys.platform != 'darwin':
return
xcode_path = os.path.join(SRC_ROOT_PATH, 'build', 'mac_files', 'Xcode.app')
if os.path.exists(xcode_path):
os.environ['DEVELOPER_DIR'] = xcode_path
def _ParseCommandArguments():
"""Adds and parses relevant arguments for tool comands.
Returns:
A dictionary representing the arguments.
"""
arg_parser = argparse.ArgumentParser()
arg_parser.usage = __doc__
arg_parser.add_argument(
'-b',
'--build-dir',
type=str,
required=True,
help='The build directory, the path needs to be relative to the root of '
'the checkout.')
arg_parser.add_argument(
'-o',
'--output-dir',
type=str,
required=True,
help='Output directory for generated artifacts.')
arg_parser.add_argument(
'-c',
'--command',
action='append',
required=False,
help='Commands used to run test targets, one test target needs one and '
'only one command, when specifying commands, one should assume the '
'current working directory is the root of the checkout. This option is '
'incompatible with -p/--profdata-file option.')
arg_parser.add_argument(
'-wt',
'--web-tests',
nargs='?',
type=str,
const=' ',
required=False,
help='Run blink web tests. Support passing arguments to run_web_tests.py')
arg_parser.add_argument(
'-p',
'--profdata-file',
type=str,
required=False,
help='Path to profdata file to use for generating code coverage reports. '
'This can be useful if you generated the profdata file seperately in '
'your own test harness. This option is ignored if run command(s) are '
'already provided above using -c/--command option.')
arg_parser.add_argument(
'-f',
'--filters',
action='append',
required=False,
help='Directories or files to get code coverage for, and all files under '
'the directories are included recursively.')
arg_parser.add_argument(
'-i',
'--ignore-filename-regex',
type=str,
help='Skip source code files with file paths that match the given '
'regular expression. For example, use -i=\'.*/out/.*|.*/third_party/.*\' '
'to exclude files in third_party/ and out/ folders from the report.')
arg_parser.add_argument(
'--no-file-view',
action='store_true',
help='Don\'t generate the file view in the coverage report. When there '
'are large number of html files, the file view becomes heavy and may '
'cause the browser to freeze, and this argument comes handy.')
arg_parser.add_argument(
'--no-component-view',
action='store_true',
help='Don\'t generate the component view in the coverage report.')
arg_parser.add_argument(
'--coverage-tools-dir',
type=str,
help='Path of the directory where LLVM coverage tools (llvm-cov, '
'llvm-profdata) exist. This should be only needed if you are testing '
'against a custom built clang revision. Otherwise, we pick coverage '
'tools automatically from your current source checkout.')
arg_parser.add_argument(
'-j',
'--jobs',
type=int,
default=None,
help='Run N jobs to build in parallel. If not specified, a default value '
'will be derived based on CPUs and goma availability. Please refer to '
'\'autoninja -h\' for more details.')
arg_parser.add_argument(
'--format',
type=str,
default='html',
help='Output format of the "llvm-cov show/export" command. The '
'supported formats are "text", "html" and "lcov".')
arg_parser.add_argument(
'-v',
'--verbose',
action='store_true',
help='Prints additional output for diagnostics.')
arg_parser.add_argument(
'-l', '--log_file', type=str, help='Redirects logs to a file.')
arg_parser.add_argument(
'targets',
nargs='+',
help='The names of the test targets to run. If multiple run commands are '
'specified using the -c/--command option, then the order of targets and '
'commands must match, otherwise coverage generation will fail.')
args = arg_parser.parse_args()
return args
def Main():
"""Execute tool commands."""
# Change directory to source root to aid in relative paths calculations.
os.chdir(SRC_ROOT_PATH)
# Setup coverage binaries even when script is called with empty params. This
# is used by coverage bot for initial setup.
if len(sys.argv) == 1:
subprocess.check_call([
sys.executable, 'tools/clang/scripts/update.py', '--package',
'coverage_tools'
])
print(__doc__)
return
args = _ParseCommandArguments()
coverage_utils.ConfigureLogging(verbose=args.verbose, log_file=args.log_file)
_ConfigureLLVMCoverageTools(args)
global BUILD_DIR
BUILD_DIR = coverage_utils.GetFullPath(args.build_dir)
global OUTPUT_DIR
OUTPUT_DIR = coverage_utils.GetFullPath(args.output_dir)
assert args.web_tests or args.command or args.profdata_file, (
'Need to either provide commands to run using -c/--command option OR '
'provide prof-data file as input using -p/--profdata-file option OR '
'run web tests using -wt/--run-web-tests.')
assert not args.command or (len(args.targets) == len(args.command)), (
'Number of targets must be equal to the number of test commands.')
assert os.path.exists(BUILD_DIR), (
'Build directory: "%s" doesn\'t exist. '
'Please run "gn gen" to generate.' % BUILD_DIR)
_ValidateCurrentPlatformIsSupported()
_ValidateBuildingWithClangCoverage()
absolute_filter_paths = []
if args.filters:
absolute_filter_paths = _VerifyPathsAndReturnAbsolutes(args.filters)
_SetupOutputDir()
# Get .profdata file and list of binary paths.
if args.web_tests:
commands = [_GetCommandForWebTests(args.web_tests)]
profdata_file_path = _CreateCoverageProfileDataForTargets(
args.targets, commands, args.jobs)
binary_paths = [_GetBinaryPathForWebTests()]
elif args.command:
for i in range(len(args.command)):
assert not 'run_web_tests.py' in args.command[i], (
'run_web_tests.py is not supported via --command argument. '
'Please use --run-web-tests argument instead.')
# A list of commands are provided. Run them to generate profdata file, and
# create a list of binary paths from parsing commands.
_VerifyTargetExecutablesAreInBuildDirectory(args.command)
profdata_file_path = _CreateCoverageProfileDataForTargets(
args.targets, args.command, args.jobs)
binary_paths = [_GetBinaryPath(command) for command in args.command]
else:
# An input prof-data file is already provided. Just calculate binary paths.
profdata_file_path = args.profdata_file
binary_paths = _GetBinaryPathsFromTargets(args.targets, args.build_dir)
# If the checkout uses the hermetic xcode binaries, then otool must be
# directly invoked. The indirection via /usr/bin/otool won't work unless
# there's an actual system install of Xcode.
otool_path = None
if sys.platform == 'darwin':
hermetic_otool_path = os.path.join(
SRC_ROOT_PATH, 'build', 'mac_files', 'xcode_binaries', 'Contents',
'Developer', 'Toolchains', 'XcodeDefault.xctoolchain', 'usr', 'bin',
'otool')
if os.path.exists(hermetic_otool_path):
otool_path = hermetic_otool_path
if _IsAndroid():
binary_paths = _GetBinaryPathsForAndroid(args.targets)
elif sys.platform.startswith('linux') or sys.platform.startswith('darwin'):
binary_paths.extend(
coverage_utils.GetSharedLibraries(binary_paths, BUILD_DIR, otool_path))
assert args.format in ['html', 'lcov', 'text'], (
'%s is not a valid output format for "llvm-cov show/export". Only '
'"text", "html" and "lcov" formats are supported.' % (args.format))
logging.info('Generating code coverage report in %s (this can take a while '
'depending on size of target!).' % (args.format))
per_file_summary_data = _GeneratePerFileCoverageSummary(
binary_paths, profdata_file_path, absolute_filter_paths,
args.ignore_filename_regex)
if args.format == 'lcov':
_GeneratePerFileLineByLineCoverageInLcov(
binary_paths, profdata_file_path, absolute_filter_paths,
args.ignore_filename_regex)
return
_GeneratePerFileLineByLineCoverageInFormat(
binary_paths, profdata_file_path, absolute_filter_paths,
args.ignore_filename_regex, args.format)
component_mappings = None
if not args.no_component_view:
component_mappings = json.load(urlopen(COMPONENT_MAPPING_URL))
# Call prepare here.
processor = coverage_utils.CoverageReportPostProcessor(
OUTPUT_DIR,
SRC_ROOT_PATH,
per_file_summary_data,
no_component_view=args.no_component_view,
no_file_view=args.no_file_view,
component_mappings=component_mappings)
if args.format == 'html':
processor.PrepareHtmlReport()
if __name__ == '__main__':
sys.exit(Main())
| {
"content_hash": "ccba0c04b6e581f09a9e41a45c681bef",
"timestamp": "",
"source": "github",
"line_count": 1160,
"max_line_length": 107,
"avg_line_length": 36.88620689655173,
"alnum_prop": 0.6840936711227447,
"repo_name": "chromium/chromium",
"id": "826496ac40806a6252908c384f65b3a4c7b7de35",
"size": "42952",
"binary": false,
"copies": "7",
"ref": "refs/heads/main",
"path": "tools/code_coverage/coverage.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""
This module exists only for schema compatibility for existing databases. As
soon as Axiom supports removing types from a store, this module can be deleted.
"""
from axiom.item import Item
from axiom import attributes
class HyperbolaPublicPage(Item):
"""
Needed for schema compatibility only.
"""
installedOn = attributes.reference()
| {
"content_hash": "2c35815f15a0fb3564f62bf600241a45",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 79,
"avg_line_length": 27.307692307692307,
"alnum_prop": 0.7492957746478873,
"repo_name": "twisted/hyperbola",
"id": "59df026c0d94e9e87467002fa087b05e91bdf882",
"size": "355",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hyperbola/publicpage.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2571"
},
{
"name": "JavaScript",
"bytes": "12787"
},
{
"name": "Python",
"bytes": "105403"
}
],
"symlink_target": ""
} |
import motionSensor
import soundPlayer
import logging
import sys
import getopt
import glob
import random
import ConfigParser
import datetime
import time
# ==============================================================================
# Globals
# ------------------------------------------------------------------------------
gDebugMode = False
gUserTrigger = False
gLogName = "motionSound"
gLogFile = "motionSound.log"
gConfigFile = "motionSound.cfg"
gLogger = logging.getLogger(gLogName)
# ==============================================================================
# parseArgs
# Parse the command line arguments
# ------------------------------------------------------------------------------
def parseArgs():
global gDebugMode
global gUserTrigger
try:
# Get the list of options provided, and there args
opts, args = getopt.getopt(sys.argv[1:], "du",["debug", "userTrigger"])
except getopt.GetoptError as e:
print("Error parsing args: %s" % (e))
sys.exit(0)
# Loop through and react to each option
for opt, arg in opts:
if opt in ("-d", "--debug"):
gDebugMode = True
elif opt in ("-u", "--userTrigger"):
gUserTrigger = True
# ==============================================================================
# init
# Initialise self and all sub systems
# ------------------------------------------------------------------------------
def init():
global gLogger
parseArgs()
# Initialise console logger
consoleHandle = logging.StreamHandler()
lvl = logging.INFO
if gDebugMode:
lvl = logging.DEBUG
formatString = "%(levelname)-8s %(asctime)s %(funcName)s:%(lineno)s: %(message)s"
formatter = logging.Formatter(formatString)
consoleHandle.setLevel(lvl)
consoleHandle.setFormatter(formatter)
gLogger.setLevel(lvl)
gLogger.addHandler(consoleHandle)
# Initialise the file logging
fileHandle = logging.FileHandler(gLogFile)
fileHandle.setLevel(logging.INFO)
fileHandle.setFormatter(formatter)
gLogger.addHandler(fileHandle)
gLogger.debug("Initialising")
config = readConfig()
inputChannel = config.getint("gpio", "inputpin")
if gUserTrigger:
inputChannel = -1
motionSensor.init(inputPin=inputChannel, logName=gLogName)
soundPlayer.init(logName=gLogName)
# ==============================================================================
# cleanup
# Perform any necessary cleanup before exitting
# ------------------------------------------------------------------------------
def cleanup():
gLogger.debug("Cleaning up")
motionSensor.cleanup()
# ==============================================================================
# readConfig
# Read the config file
# ------------------------------------------------------------------------------
def readConfig():
config = ConfigParser.RawConfigParser()
config.read(gConfigFile)
return config
# ==============================================================================
# writeConfig
# Write a given config to the file
# ------------------------------------------------------------------------------
def writeConfig(config):
with open(gConfigFile, "wb") as configfile:
config.write(configfile)
# ==============================================================================
# playRandomFile
# Choose a random file from our directory and play it
# ------------------------------------------------------------------------------
def playRandomFile():
config = readConfig()
# Build a list of ogg files in this directory
filenames = glob.glob("*.ogg")
# Choose a random file from this list
gLogger.debug("Choosing random file")
filename = random.choice(filenames)
# Make sure this file isn't in the list of recently played files
recentlyPlayedStr = config.get("recentlyplayed", "list")
recentlyPlayed = recentlyPlayedStr.split("/")
if len(recentlyPlayed) < len(filenames):
while filename in recentlyPlayed:
gLogger.debug("%s has been played recently, choosing another file" % (filename))
filename = random.choice(filenames)
# Add this file to the list of recently played ones
recentlyPlayed.append(filename)
# Only keep up to a max number of recently played files
maxListLength = config.getint("recentlyplayed", "maxlistlength")
recentlyPlayed = recentlyPlayed[-maxListLength:]
# Write the updated list back to the config file
recentlyPlayedStr = "/".join(recentlyPlayed)
config.set("recentlyplayed", "list", recentlyPlayedStr)
writeConfig(config)
# Play this file
soundTimeout = config.getint("timeouts", "soundtimeout")
soundPlayer.play(filename, timeout=soundTimeout)
# ==============================================================================
# shouldPlaySound
# Decides whether a file should be played
# ------------------------------------------------------------------------------
def shouldPlaySound():
shouldPlay = True
config = readConfig()
# Sound can be completely disabled, if it is just exit the method here
if config.getboolean("misc", "sounddisabled"):
gLogger.info("Sound is disabled in the config file")
return False
# Don't allow sound before or after certain times
# Exit the method if either of these checks fail
currentTime = datetime.datetime.now()
if currentTime.hour < config.getint("allowedtimes", "earliest"):
gLogger.info("Too early to play sound")
return False
if currentTime.hour >= config.getint("allowedtimes", "latest"):
gLogger.info("Too late to play sound")
return False
# Only play a sound a certain percentage of the time
currentChance = config.getint("chance", "current")
generatedNum = random.randint(1, 100)
if generatedNum > currentChance:
gLogger.debug("Generated %d, must be %d or lower" % (generatedNum, currentChance))
# Increment the chance to play
incr = config.getint("chance", "increment")
currentChance = currentChance + incr
maxChance = config.getint("chance", "max")
currentChance = min(currentChance, maxChance)
gLogger.info("Chance to play is now %d%%" % (currentChance))
shouldPlay = False
else:
# Reset the chance to play
initial = config.getint("chance", "initial")
currentChance = initial
# Write the new current chance to config
config.set("chance", "current", currentChance)
writeConfig(config)
return shouldPlay
# ==============================================================================
# main
# ------------------------------------------------------------------------------
if __name__ == "__main__":
init()
# Catch keyboard interrupts
try:
while True:
motionSensor.waitForMotion()
if shouldPlaySound():
playRandomFile()
config = readConfig()
secondsToWait = config.getint("timeouts", "secondstowaitaftersound")
gLogger.debug("Waiting %d seconds" % (secondsToWait))
time.sleep(secondsToWait)
except KeyboardInterrupt:
gLogger.debug("Keyboard interrupt caught")
cleanup()
| {
"content_hash": "cafc061fedec55b6de2faf88a7e47076",
"timestamp": "",
"source": "github",
"line_count": 211,
"max_line_length": 86,
"avg_line_length": 33.03791469194313,
"alnum_prop": 0.5696456749390332,
"repo_name": "sizlo/RPiFun",
"id": "3b5047c8b30a1aa6bca3b142b0c199c7496f55bc",
"size": "7190",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MotionSound/motionSound.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "322"
},
{
"name": "Python",
"bytes": "28895"
}
],
"symlink_target": ""
} |
__author__ = 'kjoseph'
import itertools, codecs, sys
from multiprocessing import Pool
from casostwitter.general_utils import tab_stringify_newline
from casostwitter.Tweet import Tweet
from collections import defaultdict, Counter
import ujson as json
from datetime import datetime
top_dir = "/usr3/kjoseph/final_minerva_data/"
EARLIEST_DATE = datetime(2012, 01, 31, 23, 59)
LATEST_DATE = datetime(2013, 01, 01, 00, 00)
def gen_term_network(args):
i, data_dir, output_dir = args
data_file = data_dir + str(i) + ".json"
all_users = set()
user_count = set()
for line in codecs.open(data_file):
try:
tweet_json = json.loads(line)
tweet = Tweet(tweet_json)
uid = tweet.user['id'] if tweet.user['id'] is not None else tweet.user['screen_name']
if tweet.user['id'] is not None:
all_users.add(uid)
except:
print 'failed tweet'
pass
if tweet.created_at is None or tweet.created_at < EARLIEST_DATE or tweet.created_at > LATEST_DATE:
continue
user_count.add(uid)
output_file = codecs.open(output_dir + str(i) + "_u.txt","w","utf8")
for u in all_users:
output_file.write(tab_stringify_newline([u]))
output_file.close()
print i
return len(user_count)
CPU_COUNT = 80
pool = Pool(processes=CPU_COUNT)
results = pool.map(gen_term_network,
itertools.izip(range(5000),
itertools.repeat(top_dir+"all_good_tweets/"),
itertools.repeat(top_dir+"user_out_good/")))
out_fil = codecs.open("full_user_count.tsv","w","utf8")
for r in results:
out_fil.write(str(r)+"\n")
results = pool.map(gen_term_network,
itertools.izip(range(5000),
itertools.repeat(top_dir+"tweet_tracker_split/"),
itertools.repeat(top_dir+"user_out_tt/")))
for r in results:
out_fil.write(str(r)+"\n")
out_fil.close()
| {
"content_hash": "21cc1130253a6f5889fd61d3e6002b5e",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 106,
"avg_line_length": 30,
"alnum_prop": 0.5941176470588235,
"repo_name": "kennyjoseph/minerva_relig_paper",
"id": "cbfa3cc51a1fc014dca7e4931d0778d049d3ace4",
"size": "2040",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/casos_total_n_users.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10476"
},
{
"name": "R",
"bytes": "26716"
},
{
"name": "TeX",
"bytes": "409302"
}
],
"symlink_target": ""
} |
'''Functional tests using WebTest.'''
import httplib as http
import unittest
import re
import mock
from nose.tools import * # noqa (PEP8 asserts)
from framework.mongo.utils import to_mongo_key
from framework.auth import exceptions as auth_exc
from framework.auth.core import Auth
from tests.base import OsfTestCase, fake
from tests.factories import (UserFactory, AuthUserFactory, ProjectFactory,
WatchConfigFactory, ApiKeyFactory,
NodeFactory, NodeWikiFactory, RegistrationFactory,
UnregUserFactory, UnconfirmedUserFactory,
PrivateLinkFactory)
from tests.test_features import requires_piwik
from tests.test_addons import assert_urls_equal
from website import settings, language
from website.addons.twofactor.tests import _valid_code
from website.security import random_string
from website.project.metadata.schemas import OSF_META_SCHEMAS
from website.project.model import ensure_schemas
from website.util import web_url_for, api_url_for
class TestDisabledUser(OsfTestCase):
def setUp(self):
super(TestDisabledUser, self).setUp()
self.user = UserFactory()
self.user.set_password('Korben Dallas')
self.user.is_disabled = True
self.user.save()
def test_profile_disabled_returns_401(self):
res = self.app.get(self.user.url, expect_errors=True)
assert_equal(res.status_code, 410)
class TestAnUnregisteredUser(OsfTestCase):
def test_cant_see_profile_if_not_logged_in(self):
res = self.app.get(web_url_for('profile_view'))
assert_equal(res.status_code, 302)
res = res.follow(expect_errors=True)
assert_equal(res.status_code, 401)
assert_in(
'You must log in to access this resource',
res,
)
class TestTwoFactor(OsfTestCase):
@mock.patch('website.addons.twofactor.models.push_status_message')
def setUp(self, mock_push_message):
super(TestTwoFactor, self).setUp()
self.user = UserFactory()
self.user.set_password('science')
self.user.save()
self.user.add_addon('twofactor')
self.user_settings = self.user.get_addon('twofactor')
self.user_settings.is_confirmed = True
self.user_settings.save()
def test_user_with_two_factor_redirected_to_two_factor_page(self):
# Goes to log in page
res = self.app.get(web_url_for('auth_login'))
# Fills in log in form with correct username/password
form = res.forms['logInForm']
form['username'] = self.user.username
form['password'] = 'science'
# Submits
res = form.submit()
res = res.follow()
assert_equal(web_url_for('two_factor'), res.request.path)
assert_equal(res.status_code, 200)
def test_user_with_2fa_failure(self):
# Goes to log in page
res = self.app.get(web_url_for('auth_login'))
# Fills in log in form with correct username/password
form = res.forms['logInForm']
form['username'] = self.user.username
form['password'] = 'science'
# Submits
res = form.submit()
res = res.follow()
# Fills in 2FA form with incorrect two factor code
form = res.forms['twoFactorSignInForm']
form['twoFactorCode'] = 0000000
# Submits
res = form.submit(expect_errors=True)
assert_equal(web_url_for('two_factor'), res.request.path)
assert_equal(res.status_code, 401)
def test_user_with_2fa_success(self):
# Goes to log in page
res = self.app.get(web_url_for('auth_login'))
# Fills in log in form with correct username/password
form = res.forms['logInForm']
form['username'] = self.user.username
form['password'] = 'science'
# Submits
res = form.submit()
res = res.follow()
# Fills in 2FA form with incorrect two factor code
form = res.forms['twoFactorSignInForm']
form['twoFactorCode'] = _valid_code(self.user_settings.totp_secret)
res = form.submit()
res.follow()
assert_urls_equal(web_url_for('dashboard'), res.location)
assert_equal(res.status_code, 302)
class TestAUser(OsfTestCase):
def setUp(self):
super(TestAUser, self).setUp()
self.user = UserFactory()
self.user.set_password('science')
# Add an API key for quicker authentication
api_key = ApiKeyFactory()
self.user.api_keys.append(api_key)
self.user.save()
self.auth = ('test', api_key._primary_key)
def _login(self, username, password):
'''Log in a user via at the login page.'''
res = self.app.get(web_url_for('auth_login')).maybe_follow()
# Fills out login info
form = res.forms['logInForm'] # Get the form from its ID
form['username'] = username
form['password'] = password
# submits
res = form.submit().maybe_follow()
return res
def test_can_see_profile_url(self):
res = self.app.get(self.user.url).maybe_follow()
assert_in(self.user.url, res)
def test_can_see_homepage(self):
# Goes to homepage
res = self.app.get('/').maybe_follow() # Redirects
assert_equal(res.status_code, 200)
def test_can_log_in(self):
# Log in and out
self._login(self.user.username, 'science')
self.app.get('/logout/')
# Goes to home page
res = self.app.get('/').maybe_follow()
# Fills out login info
form = res.forms['signInForm'] # Get the form from its ID
form['username'] = self.user.username
form['password'] = 'science'
# submits
res = form.submit().maybe_follow()
# Sees dashboard with projects and watched projects
assert_in('Projects', res)
assert_in('Watchlist', res)
def test_sees_flash_message_on_bad_login(self):
# Goes to log in page
res = self.app.get('/account/').maybe_follow()
# Fills the form with incorrect password
form = res.forms['logInForm']
form['username'] = self.user.username
form['password'] = 'thisiswrong'
# Submits
res = form.submit()
# Sees a flash message
assert_in('Log-in failed', res)
@mock.patch('website.addons.twofactor.models.push_status_message')
def test_user_with_two_factor_redirected_to_two_factor_page(self, mock_push_message):
self.user.add_addon('twofactor')
self.user_settings = self.user.get_addon('twofactor')
self.user_settings.is_confirmed = True
self.user_settings.save()
# Goes to log in page
res = self.app.get(web_url_for('auth_login'))
# Fills the form with correct password
form = res.forms['logInForm']
form['username'] = self.user.username
form['password'] = 'science'
# Submits
res = form.submit()
res = res.follow()
assert_equal(web_url_for('two_factor'), res.request.path)
assert_equal(res.status_code, 200)
@mock.patch('website.addons.twofactor.models.push_status_message')
def test_user_with_two_factor_redirected_to_two_factor_page_from_navbar_login(self, mock_push_message):
self.user.add_addon('twofactor')
self.user_settings = self.user.get_addon('twofactor')
self.user_settings.is_confirmed = True
self.user_settings.save()
# Goes to log in page
res = self.app.get(web_url_for('auth_login')) # TODO(hrybacki): Is there an actual landing page route?
# Fills in the form with correct password
form = res.forms['signInForm']
form['username'] = self.user.username
form['password'] = 'science'
# Submits
res = form.submit()
res = res.follow()
assert_equal(web_url_for('two_factor'), res.request.path)
assert_equal(res.status_code, 200)
@mock.patch('website.addons.twofactor.models.push_status_message')
def test_access_resource_before_two_factor_authorization(self, mock_push_message):
# User attempts to access resource after login page but before two factor authentication
self.user.add_addon('twofactor')
self.user_settings = self.user.get_addon('twofactor')
self.user_settings.is_confirmed = True
self.user_settings.save()
# Goes to log in page
res = self.app.get(web_url_for('auth_login'))
# Fills the form with correct password
form = res.forms['logInForm']
form['username'] = self.user.username
form['password'] = 'science'
# Submits
form.submit()
# User attempts to access a protected resource
res = self.app.get(web_url_for('dashboard'))
assert_equal(res.status_code, 302)
assert_in(web_url_for('auth_login'), res.location)
res = res.follow(expect_errors=True)
assert_equal(res.status_code, 401)
@mock.patch('website.addons.twofactor.models.push_status_message')
def test_is_redirected_to_dashboard_after_two_factor_login(self, mock_push_message):
# User attempts to access resource after login page but before two factor authentication
self.user.add_addon('twofactor')
self.user_settings = self.user.get_addon('twofactor')
self.user_settings.is_confirmed = True
self.user_settings.save()
# Goes to log in page
res = self.app.get(web_url_for('auth_login'))
# Fills the form with correct password
form = res.forms['logInForm']
form['username'] = self.user.username
form['password'] = 'science'
# Submits
res = form.submit()
res = res.follow()
# Fills the form with the correct 2FA code
form = res.forms['twoFactorSignInForm']
form['twoFactorCode'] = _valid_code(self.user_settings.totp_secret)
# Submits
res = form.submit()
res = res.follow()
assert_equal(res.status_code, 200)
assert_equal(res.request.path, web_url_for('dashboard'))
def test_is_redirected_to_dashboard_already_logged_in_at_login_page(self):
res = self._login(self.user.username, 'science')
res = self.app.get('/login/').follow()
assert_equal(res.request.path, '/dashboard/')
def test_sees_projects_in_her_dashboard(self):
# the user already has a project
project = ProjectFactory(creator=self.user)
project.add_contributor(self.user)
project.save()
# Goes to homepage, already logged in
res = self._login(self.user.username, 'science')
res = self.app.get('/').maybe_follow()
# Clicks Dashboard link in navbar
res = res.click('Dashboard', index=0)
assert_in('Projects', res) # Projects heading
# The project title is listed
# TODO: (bgeiger) figure out how to make this assertion work with hgrid view
#assert_in(project.title, res)
def test_does_not_see_osffiles_in_user_addon_settings(self):
res = self._login(self.user.username, 'science')
res = self.app.get('/settings/addons/', auth=self.auth, auto_follow=True)
assert_not_in('OSF Storage', res)
def test_sees_osffiles_in_project_addon_settings(self):
project = ProjectFactory(creator=self.user)
project.add_contributor(
self.user,
permissions=['read', 'write', 'admin'],
save=True)
res = self.app.get('/{0}/settings/'.format(project._primary_key), auth=self.auth, auto_follow=True)
assert_in('OSF Storage', res)
@unittest.skip("Can't test this, since logs are dynamically loaded")
def test_sees_log_events_on_watched_projects(self):
# Another user has a public project
u2 = UserFactory(username='bono@u2.com', fullname='Bono')
key = ApiKeyFactory()
u2.api_keys.append(key)
u2.save()
project = ProjectFactory(creator=u2, is_public=True)
project.add_contributor(u2)
auth = Auth(user=u2, api_key=key)
project.save()
# User watches the project
watch_config = WatchConfigFactory(node=project)
self.user.watch(watch_config)
self.user.save()
# Goes to her dashboard, already logged in
res = self.app.get('/dashboard/', auth=self.auth, auto_follow=True)
# Sees logs for the watched project
assert_in('Watched Projects', res) # Watched Projects header
# The log action is in the feed
assert_in(project.title, res)
def test_sees_correct_title_home_page(self):
# User goes to homepage
res = self.app.get('/', auto_follow=True)
title = res.html.title.string
# page title is correct
assert_equal('OSF | Home', title)
def test_sees_correct_title_on_dashboard(self):
# User goes to dashboard
res = self.app.get('/dashboard/', auth=self.auth, auto_follow=True)
title = res.html.title.string
assert_equal('OSF | Dashboard', title)
def test_can_see_make_public_button_if_admin(self):
# User is a contributor on a project
project = ProjectFactory()
project.add_contributor(
self.user,
permissions=['read', 'write', 'admin'],
save=True)
# User goes to the project page
res = self.app.get(project.url, auth=self.auth).maybe_follow()
assert_in('Make Public', res)
def test_cant_see_make_public_button_if_not_admin(self):
# User is a contributor on a project
project = ProjectFactory()
project.add_contributor(
self.user,
permissions=['read', 'write'],
save=True)
# User goes to the project page
res = self.app.get(project.url, auth=self.auth).maybe_follow()
assert_not_in('Make Public', res)
def test_can_see_make_private_button_if_admin(self):
# User is a contributor on a project
project = ProjectFactory(is_public=True)
project.add_contributor(
self.user,
permissions=['read', 'write', 'admin'],
save=True)
# User goes to the project page
res = self.app.get(project.url, auth=self.auth).maybe_follow()
assert_in('Make Private', res)
def test_cant_see_make_private_button_if_not_admin(self):
# User is a contributor on a project
project = ProjectFactory(is_public=True)
project.add_contributor(
self.user,
permissions=['read', 'write'],
save=True)
# User goes to the project page
res = self.app.get(project.url, auth=self.auth).maybe_follow()
assert_not_in('Make Private', res)
def test_sees_logs_on_a_project(self):
project = ProjectFactory(is_public=True)
# User goes to the project's page
res = self.app.get(project.url, auth=self.auth).maybe_follow()
# Can see log event
assert_in('created', res)
def test_no_wiki_content_message(self):
project = ProjectFactory(creator=self.user)
# Goes to project's wiki, where there is no content
res = self.app.get('/{0}/wiki/home/'.format(project._primary_key), auth=self.auth)
# Sees a message indicating no content
assert_in('No wiki content', res)
def test_wiki_content(self):
project = ProjectFactory(creator=self.user)
wiki_page = 'home'
wiki_content = 'Kittens'
NodeWikiFactory(user=self.user, node=project, content=wiki_content, page_name=wiki_page)
res = self.app.get('/{0}/wiki/{1}/'.format(
project._primary_key,
wiki_page,
), auth=self.auth)
assert_not_in('No wiki content', res)
assert_in(wiki_content, res)
def test_wiki_page_name_non_ascii(self):
project = ProjectFactory(creator=self.user)
non_ascii = to_mongo_key('WöRlÐé')
self.app.get('/{0}/wiki/{1}/'.format(
project._primary_key,
non_ascii
), auth=self.auth, expect_errors=True)
project.update_node_wiki(non_ascii, 'new content', Auth(self.user))
assert_in(non_ascii, project.wiki_pages_current)
def test_noncontributor_cannot_see_wiki_if_no_content(self):
user2 = UserFactory()
# user2 creates a public project and adds no wiki content
project = ProjectFactory(creator=user2, is_public=True)
# self navigates to project
res = self.app.get(project.url).maybe_follow()
# Should not see wiki widget (since non-contributor and no content)
assert_not_in('No wiki content', res)
def test_wiki_does_not_exist(self):
project = ProjectFactory(creator=self.user)
res = self.app.get('/{0}/wiki/{1}/'.format(
project._primary_key,
'not a real page yet',
), auth=self.auth, expect_errors=True)
assert_in('No wiki content', res)
def test_sees_own_profile(self):
res = self.app.get('/profile/', auth=self.auth)
td1 = res.html.find('td', text=re.compile(r'Public(.*?)Profile'))
td2 = td1.find_next_sibling('td')
assert_equal(td2.text, self.user.display_absolute_url)
def test_sees_another_profile(self):
user2 = UserFactory()
res = self.app.get(user2.url, auth=self.auth)
td1 = res.html.find('td', text=re.compile(r'Public(.*?)Profile'))
td2 = td1.find_next_sibling('td')
assert_equal(td2.text, user2.display_absolute_url)
# Regression test for https://github.com/CenterForOpenScience/osf.io/issues/1320
@mock.patch('framework.auth.views.mails.send_mail')
def test_can_reset_password(self, mock_send_mail):
# A registered user
user = UserFactory()
# goes to the login page
url = web_url_for('_forgot_password')
res = self.app.get(url)
# and fills out forgot password form
form = res.forms['forgotPasswordForm']
form['forgot_password-email'] = user.username
# submits
res = form.submit()
# mail was sent
mock_send_mail.assert_called
# gets 200 response
assert_equal(res.status_code, 200)
# URL is /forgotpassword
assert_equal(res.request.path, web_url_for('forgot_password'))
class TestRegistrations(OsfTestCase):
def setUp(self):
super(TestRegistrations, self).setUp()
ensure_schemas()
self.user = UserFactory()
# Add an API key for quicker authentication
api_key = ApiKeyFactory()
self.user.api_keys.append(api_key)
self.user.save()
self.auth = ('test', api_key._primary_key)
self.original = ProjectFactory(creator=self.user, is_public=True)
# A registration
self.project = RegistrationFactory(
creator=self.user,
project=self.original,
user=self.user,
)
def test_can_see_contributor(self):
# Goes to project's page
res = self.app.get(self.project.url, auth=self.auth).maybe_follow()
# Settings is not in the project navigation bar
subnav = res.html.select('#projectSubnav')[0]
assert_in('Sharing', subnav.text)
def test_sees_registration_templates(self):
# Browse to original project
res = self.app.get(
'{}register/'.format(self.original.url),
auth=self.auth
).maybe_follow()
# Find registration options
options = res.html.find(
'select', id='select-registration-template'
).find_all('option')
# Should see number of options equal to number of registration
# templates, plus one for 'Select...'
assert_equal(
len(options),
len(OSF_META_SCHEMAS) + 1
)
# First option should have empty value
assert_equal(options[0].get('value'), '')
# All registration templates should be listed in <option>
option_values = [
option.get('value')
for option in options[1:]
]
for schema in OSF_META_SCHEMAS:
assert_in(
schema['name'],
option_values
)
def test_registration_nav_not_seen(self):
# Goes to project's page
res = self.app.get(self.project.url, auth=self.auth).maybe_follow()
# Settings is not in the project navigation bar
subnav = res.html.select('#projectSubnav')[0]
assert_not_in('Registrations', subnav.text)
def test_settings_nav_not_seen(self):
# Goes to project's page
res = self.app.get(self.project.url, auth=self.auth).maybe_follow()
# Settings is not in the project navigation bar
subnav = res.html.select('#projectSubnav')[0]
assert_not_in('Settings', subnav.text)
class TestComponents(OsfTestCase):
def setUp(self):
super(TestComponents, self).setUp()
self.user = AuthUserFactory()
self.consolidate_auth = Auth(user=self.user)
self.project = ProjectFactory(creator=self.user)
self.project.add_contributor(contributor=self.user, auth=self.consolidate_auth)
# A non-project componenet
self.component = NodeFactory(
category='hypothesis',
creator=self.user,
project=self.project,
)
self.component.save()
self.component.set_privacy('public', self.consolidate_auth)
self.component.set_privacy('private', self.consolidate_auth)
self.project.save()
self.project_url = self.project.web_url_for('view_project')
def test_can_create_component_from_a_project(self):
res = self.app.get(self.project.url, auth=self.user.auth).maybe_follow()
assert_in('Add Component', res)
def test_cannot_create_component_from_a_component(self):
res = self.app.get(self.component.url, auth=self.user.auth).maybe_follow()
assert_not_in('Add Component', res)
def test_sees_parent(self):
res = self.app.get(self.component.url, auth=self.user.auth).maybe_follow()
parent_title = res.html.find_all('h2', class_='node-parent-title')
assert_equal(len(parent_title), 1)
assert_in(self.project.title, parent_title[0].text)
def test_delete_project(self):
res = self.app.get(
self.component.url + 'settings/',
auth=self.user.auth
).maybe_follow()
assert_in(
'Delete {0}'.format(self.component.project_or_component),
res
)
def test_cant_delete_project_if_not_admin(self):
non_admin = AuthUserFactory()
self.component.add_contributor(
non_admin,
permissions=['read', 'write'],
auth=self.consolidate_auth,
save=True,
)
res = self.app.get(
self.component.url + 'settings/',
auth=non_admin.auth
).maybe_follow()
assert_not_in(
'Delete {0}'.format(self.component.project_or_component),
res
)
def test_can_configure_comments_if_admin(self):
res = self.app.get(
self.component.url + 'settings/',
auth=self.user.auth,
).maybe_follow()
assert_in('Configure Commenting', res)
def test_cant_configure_comments_if_not_admin(self):
non_admin = AuthUserFactory()
self.component.add_contributor(
non_admin,
permissions=['read', 'write'],
auth=self.consolidate_auth,
save=True,
)
res = self.app.get(
self.component.url + 'settings/',
auth=non_admin.auth
).maybe_follow()
assert_not_in('Configure commenting', res)
def test_components_shouldnt_have_component_list(self):
res = self.app.get(self.component.url, auth=self.user.auth)
assert_not_in('Components', res)
def test_do_not_show_registration_button(self):
# No registrations on the component
url = self.component.web_url_for('node_registrations')
res = self.app.get(url, auth=self.user.auth)
# New registration button is hidden
assert_not_in('New Registration', res)
class TestPrivateLinkView(OsfTestCase):
def setUp(self):
super(TestPrivateLinkView, self).setUp()
self.user = AuthUserFactory() # Is NOT a contributor
self.project = ProjectFactory(is_public=False)
self.link = PrivateLinkFactory(anonymous=True)
self.link.nodes.append(self.project)
self.link.save()
self.project_url = self.project.web_url_for('view_project')
def test_anonymous_link_hide_contributor(self):
res = self.app.get(self.project_url, {'view_only': self.link.key})
assert_in("Anonymous Contributors", res.body)
assert_not_in(self.user.fullname, res)
def test_anonymous_link_hides_citations(self):
res = self.app.get(self.project_url, {'view_only': self.link.key})
assert_not_in('Citation:', res)
def test_no_warning_for_read_only_user_with_valid_link(self):
link2 = PrivateLinkFactory(anonymous=False)
link2.nodes.append(self.project)
link2.save()
self.project.add_contributor(
self.user,
permissions=['read'],
save=True,
)
res = self.app.get(self.project_url, {'view_only': link2.key},
auth=self.user.auth)
assert_not_in(
"is being viewed through a private, view-only link. "
"Anyone with the link can view this project. Keep "
"the link safe.",
res.body
)
def test_no_warning_for_read_only_user_with_invalid_link(self):
self.project.add_contributor(
self.user,
permissions=['read'],
save=True,
)
res = self.app.get(self.project_url, {'view_only': "not_valid"},
auth=self.user.auth)
assert_not_in(
"is being viewed through a private, view-only link. "
"Anyone with the link can view this project. Keep "
"the link safe.",
res.body
)
class TestMergingAccounts(OsfTestCase):
def setUp(self):
super(TestMergingAccounts, self).setUp()
self.user = UserFactory.build()
self.user.set_password('science')
self.user.save()
self.dupe = UserFactory.build()
self.dupe.set_password('example')
self.dupe.save()
def _login(self, username, password):
'''Log in a user via at the login page.'''
res = self.app.get(web_url_for('auth_login')).maybe_follow()
# Fills out login info
form = res.forms['logInForm']
form['username'] = self.user.username
form['password'] = 'science'
# submits
res = form.submit().maybe_follow()
return res
@unittest.skip('Disabled for now')
def test_can_merge_accounts(self):
res = self._login(self.user.username, 'science')
# Goes to settings
res = self.app.get('/settings/').maybe_follow()
# Clicks merge link
res = res.click('Merge with duplicate account')
# Fills out form
form = res.forms['mergeAccountsForm']
form['merged_username'] = self.dupe.username
form['merged_password'] = 'example'
form['user_password'] = 'science'
# Submits
res = form.submit().maybe_follow()
# Back at the settings page
assert_equal(res.request.path, '/settings/')
# Sees a flash message
assert_in(
'Successfully merged {0} with this account'.format(
self.dupe.username
),
res
)
# User is merged in database
self.dupe.reload()
assert_true(self.dupe.is_merged)
def test_sees_error_message_when_merged_password_is_wrong(self):
# User logs in
res = self._login(self.user.username, 'science')
res = self.app.get('/user/merge/')
# Fills out form
form = res.forms['mergeAccountsForm']
form['merged_username'] = self.dupe.username
form['merged_password'] = 'WRONG'
form['user_password'] = 'science'
# Submits
res = form.submit().maybe_follow()
# Sees flash message
assert_in(
'Could not find that user. Please check the username and '
'password.',
res
)
@unittest.skip('Disabled for now')
def test_sees_error_message_when_own_password_is_wrong(self):
# User logs in
res = self._login(self.user.username, 'science')
# Goes to settings
res = self.app.get('/settings/').maybe_follow()
# Clicks merge link
res = res.click('Merge with duplicate account')
# Fills out form
form = res.forms['mergeAccountsForm']
form['merged_username'] = self.dupe.username
form['merged_password'] = 'example'
form['user_password'] = 'BAD'
# Submits
res = form.submit().maybe_follow()
# Sees flash message
assert_in(
'Could not authenticate. Please check your username and password.',
res
)
def test_merged_user_is_not_shown_as_a_contributor(self):
project = ProjectFactory(is_public=True)
# Both the master and dupe are contributors
project.add_contributor(self.dupe, log=False)
project.add_contributor(self.user, log=False)
project.save()
# At the project page, both are listed as contributors
res = self.app.get(project.url).maybe_follow()
assert_in(self.user.fullname, res)
assert_in(self.dupe.fullname, res)
# The accounts are merged
self.user.merge_user(self.dupe)
self.user.save()
# Now only the master user is shown at the project page
res = self.app.get(project.url).maybe_follow()
assert_in(self.user.fullname, res)
assert_true(self.dupe.is_merged)
assert_not_in(self.dupe.fullname, res)
def test_merged_user_has_alert_message_on_profile(self):
# Master merges dupe
self.user.merge_user(self.dupe)
self.user.save()
# At the dupe user's profile there is an alert message at the top
# indicating that the user is merged
res = self.app.get('/profile/{0}/'.format(self.dupe._primary_key)).maybe_follow()
assert_in('This account has been merged', res)
# FIXME: These affect search in development environment. So need to migrate solr after running.
# # Remove this side effect.
@unittest.skipIf(not settings.SEARCH_ENGINE, 'Skipping because search is disabled')
class TestSearching(OsfTestCase):
'''Test searching using the search bar. NOTE: These may affect the
Solr database. May need to migrate after running these.
'''
def setUp(self):
super(TestSearching, self).setUp()
import website.search.search as search
search.delete_all()
self.user = UserFactory()
# Add an API key for quicker authentication
api_key = ApiKeyFactory()
self.user.api_keys.append(api_key)
self.user.save()
self.auth = ('test', api_key._primary_key)
@unittest.skip(reason='¯\_(ツ)_/¯ knockout.')
def test_a_user_from_home_page(self):
user = UserFactory()
# Goes to home page
res = self.app.get('/').maybe_follow()
# Fills search form
form = res.forms['searchBar']
form['q'] = user.fullname
res = form.submit().maybe_follow()
# The username shows as a search result
assert_in(user.fullname, res)
@unittest.skip(reason='¯\_(ツ)_/¯ knockout.')
def test_a_public_project_from_home_page(self):
project = ProjectFactory(title='Foobar Project', is_public=True)
# Searches a part of the name
res = self.app.get('/').maybe_follow()
project.reload()
form = res.forms['searchBar']
form['q'] = 'Foobar'
res = form.submit().maybe_follow()
# A link to the project is shown as a result
assert_in('Foobar Project', res)
@unittest.skip(reason='¯\_(ツ)_/¯ knockout.')
def test_a_public_component_from_home_page(self):
component = NodeFactory(title='Foobar Component', is_public=True)
# Searches a part of the name
res = self.app.get('/').maybe_follow()
component.reload()
form = res.forms['searchBar']
form['q'] = 'Foobar'
res = form.submit().maybe_follow()
# A link to the component is shown as a result
assert_in('Foobar Component', res)
class TestShortUrls(OsfTestCase):
def setUp(self):
super(TestShortUrls, self).setUp()
self.user = UserFactory()
# Add an API key for quicker authentication
api_key = ApiKeyFactory()
self.user.api_keys.append(api_key)
self.user.save()
self.auth = ('test', api_key._primary_key)
self.consolidate_auth=Auth(user=self.user, api_key=api_key)
self.project = ProjectFactory(creator=self.user)
# A non-project componenet
self.component = NodeFactory(category='hypothesis', creator=self.user)
self.project.nodes.append(self.component)
self.component.save()
# Hack: Add some logs to component; should be unnecessary pending
# improvements to factories from @rliebz
self.component.set_privacy('public', auth=self.consolidate_auth)
self.component.set_privacy('private', auth=self.consolidate_auth)
self.wiki = NodeWikiFactory(user=self.user, node=self.component)
def _url_to_body(self, url):
return self.app.get(
url,
auth=self.auth
).maybe_follow(
auth=self.auth,
).normal_body
def test_profile_url(self):
res1 = self.app.get('/{}/'.format(self.user._primary_key)).maybe_follow()
res2 = self.app.get('/profile/{}/'.format(self.user._primary_key)).maybe_follow()
assert_equal(
res1.normal_body,
res2.normal_body
)
def test_project_url(self):
assert_equal(
self._url_to_body(self.project.deep_url),
self._url_to_body(self.project.url),
)
def test_component_url(self):
assert_equal(
self._url_to_body(self.component.deep_url),
self._url_to_body(self.component.url),
)
def test_wiki_url(self):
assert_equal(
self._url_to_body(self.wiki.deep_url),
self._url_to_body(self.wiki.url),
)
@requires_piwik
class TestPiwik(OsfTestCase):
def setUp(self):
super(TestPiwik, self).setUp()
self.users = [
AuthUserFactory()
for _ in range(3)
]
self.consolidate_auth = Auth(user=self.users[0])
self.project = ProjectFactory(creator=self.users[0], is_public=True)
self.project.add_contributor(contributor=self.users[1])
self.project.save()
def test_contains_iframe_and_src(self):
res = self.app.get(
'/{0}/statistics/'.format(self.project._primary_key),
auth=self.users[0].auth
).maybe_follow()
assert_in('iframe', res)
assert_in('src', res)
assert_in(settings.PIWIK_HOST, res)
def test_anonymous_no_token(self):
res = self.app.get(
'/{0}/statistics/'.format(self.project._primary_key),
auth=self.users[2].auth
).maybe_follow()
assert_in('token_auth=anonymous', res)
def test_contributor_token(self):
res = self.app.get(
'/{0}/statistics/'.format(self.project._primary_key),
auth=self.users[1].auth
).maybe_follow()
assert_in(self.users[1].piwik_token, res)
def test_no_user_token(self):
res = self.app.get(
'/{0}/statistics/'.format(self.project._primary_key)
).maybe_follow()
assert_in('token_auth=anonymous', res)
def test_private_alert(self):
self.project.set_privacy('private', auth=self.consolidate_auth)
self.project.save()
res = self.app.get(
'/{0}/statistics/'.format(self.project._primary_key),
auth=self.users[0].auth
).maybe_follow().normal_body
assert_in(
'Usage statistics are collected only for public resources.',
res
)
class TestClaiming(OsfTestCase):
def setUp(self):
super(TestClaiming, self).setUp()
self.referrer = AuthUserFactory()
self.project = ProjectFactory(creator=self.referrer, is_public=True)
def test_correct_name_shows_in_contributor_list(self):
name1, email = fake.name(), fake.email()
UnregUserFactory(fullname=name1, email=email)
name2, email = fake.name(), fake.email()
# Added with different name
self.project.add_unregistered_contributor(fullname=name2,
email=email, auth=Auth(self.referrer))
self.project.save()
res = self.app.get(self.project.url, auth=self.referrer.auth)
# Correct name is shown
assert_in(name2, res)
assert_not_in(name1, res)
def test_user_can_set_password_on_claim_page(self):
name, email = fake.name(), fake.email()
new_user = self.project.add_unregistered_contributor(
email=email,
fullname=name,
auth=Auth(self.referrer)
)
self.project.save()
claim_url = new_user.get_claim_url(self.project._primary_key)
res = self.app.get(claim_url)
self.project.reload()
assert_in('Set Password', res)
form = res.forms['setPasswordForm']
#form['username'] = new_user.username #Removed as long as E-mail can't be updated.
form['password'] = 'killerqueen'
form['password2'] = 'killerqueen'
res = form.submit().maybe_follow()
new_user.reload()
# at settings page
assert_equal(res.request.path, '/settings/')
assert_in('Welcome to the OSF', res)
def test_sees_is_redirected_if_user_already_logged_in(self):
name, email = fake.name(), fake.email()
new_user = self.project.add_unregistered_contributor(
email=email,
fullname=name,
auth=Auth(self.referrer)
)
self.project.save()
existing = AuthUserFactory()
claim_url = new_user.get_claim_url(self.project._primary_key)
# a user is already logged in
res = self.app.get(claim_url, auth=existing.auth, expect_errors=True)
assert_equal(res.status_code, 302)
def test_unregistered_users_names_are_project_specific(self):
name1, name2, email = fake.name(), fake.name(), fake.email()
project2 = ProjectFactory(creator=self.referrer)
# different projects use different names for the same unreg contributor
self.project.add_unregistered_contributor(
email=email,
fullname=name1,
auth=Auth(self.referrer)
)
self.project.save()
project2.add_unregistered_contributor(
email=email,
fullname=name2,
auth=Auth(self.referrer)
)
project2.save()
self.app.authenticate(*self.referrer.auth)
# Each project displays a different name in the contributor list
res = self.app.get(self.project.url)
assert_in(name1, res)
res2 = self.app.get(project2.url)
assert_in(name2, res2)
def test_cannot_go_to_claim_url_after_setting_password(self):
name, email = fake.name(), fake.email()
new_user = self.project.add_unregistered_contributor(
email=email,
fullname=name,
auth=Auth(self.referrer)
)
self.project.save()
# Goes to claim url and successfully claims account
claim_url = new_user.get_claim_url(self.project._primary_key)
res = self.app.get(claim_url)
self.project.reload()
assert_in('Set Password', res)
form = res.forms['setPasswordForm']
#form['username'] = new_user.username #Removed as long as the E-mail can't be changed
form['password'] = 'killerqueen'
form['password2'] = 'killerqueen'
res = form.submit().maybe_follow()
# logs out
res = self.app.get('/logout/').maybe_follow()
# tries to go to claim url again
res = self.app.get(claim_url, expect_errors=True)
assert_equal(res.status_code, 400)
assert_in('already been claimed', res)
@unittest.skip("as long as E-mails cannot be changed")
def test_cannot_set_email_to_a_user_that_already_exists(self):
reg_user = UserFactory()
name, email = fake.name(), fake.email()
new_user = self.project.add_unregistered_contributor(
email=email,
fullname=name,
auth=Auth(self.referrer)
)
self.project.save()
# Goes to claim url and successfully claims account
claim_url = new_user.get_claim_url(self.project._primary_key)
res = self.app.get(claim_url)
self.project.reload()
assert_in('Set Password', res)
form = res.forms['setPasswordForm']
# Fills out an email that is the username of another user
form['username'] = reg_user.username
form['password'] = 'killerqueen'
form['password2'] = 'killerqueen'
res = form.submit().maybe_follow(expect_errors=True)
assert_in(
language.ALREADY_REGISTERED.format(email=reg_user.username),
res
)
def test_correct_display_name_is_shown_at_claim_page(self):
original_name = fake.name()
unreg = UnregUserFactory(fullname=original_name)
different_name = fake.name()
new_user = self.project.add_unregistered_contributor(
email=unreg.username,
fullname=different_name,
auth=Auth(self.referrer),
)
self.project.save()
claim_url = new_user.get_claim_url(self.project._primary_key)
res = self.app.get(claim_url)
# Correct name (different_name) should be on page
assert_in(different_name, res)
class TestConfirmingEmail(OsfTestCase):
def setUp(self):
super(TestConfirmingEmail, self).setUp()
self.user = UnconfirmedUserFactory()
self.confirmation_url = self.user.get_confirmation_url(
self.user.username,
external=False,
)
self.confirmation_token = self.user.get_confirmation_token(
self.user.username
)
def test_cannot_remove_another_user_email(self):
user1 = AuthUserFactory()
user2 = AuthUserFactory()
url = api_url_for('update_user')
header = {'id': user1.username, 'emails': [{'address': user1.username}]}
res = self.app.put_json(url, header, auth=user2.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannnot_make_primary_email_for_another_user(self):
user1 = AuthUserFactory()
user2 = AuthUserFactory()
email = 'test@cos.io'
user1.emails.append(email)
user1.save()
url = api_url_for('update_user')
header = {'id': user1.username,
'emails': [{'address': user1.username, 'primary': False, 'confirmed': True},
{'address': email, 'primary': True, 'confirmed': True}
]}
res = self.app.put_json(url, header, auth=user2.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannnot_add_email_for_another_user(self):
user1 = AuthUserFactory()
user2 = AuthUserFactory()
email = 'test@cos.io'
url = api_url_for('update_user')
header = {'id': user1.username,
'emails': [{'address': user1.username, 'primary': True, 'confirmed': True},
{'address': email, 'primary': False, 'confirmed': False}
]}
res = self.app.put_json(url, header, auth=user2.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_redirects_to_settings(self):
res = self.app.get(self.confirmation_url).follow()
assert_equal(
res.request.path,
'/settings/',
'redirected to settings page'
)
assert_in('Welcome to the OSF!', res, 'shows flash message')
assert_in('Please update the following settings.', res)
def test_error_page_if_confirm_link_is_used(self):
self.user.confirm_email(self.confirmation_token)
self.user.save()
res = self.app.get(self.confirmation_url, expect_errors=True)
assert_in(auth_exc.InvalidTokenError.message_short, res)
assert_equal(res.status_code, http.BAD_REQUEST)
def test_flash_message_does_not_break_page_if_email_unconfirmed(self):
# set a password for user
self.user.set_password('bicycle')
self.user.save()
# Goes to log in page
res = self.app.get(web_url_for('auth_login')).maybe_follow()
# Fills the form with correct password
form = res.forms['logInForm']
form['username'] = self.user.username
form['password'] = 'bicycle'
res = form.submit().maybe_follow()
assert_in(language.UNCONFIRMED, res, 'shows flash message')
@mock.patch('framework.auth.views.send_confirm_email')
def test_resend_form(self, send_confirm_email):
res = self.app.get('/resend/')
form = res.forms['resendForm']
form['email'] = self.user.username
res = form.submit()
assert_true(send_confirm_email.called)
assert_in('Resent email to', res)
def test_resend_form_does_nothing_if_not_in_db(self):
res = self.app.get('/resend/')
form = res.forms['resendForm']
form['email'] = 'nowheretobefound@foo.com'
res = form.submit()
assert_equal(res.request.path, '/resend/')
def test_resend_form_shows_alert_if_email_already_confirmed(self):
user = UnconfirmedUserFactory()
url = user.get_confirmation_url(user.username, external=False)
# User confirms their email address
self.app.get(url).maybe_follow()
# tries to resend confirmation
res = self.app.get('/resend/')
form = res.forms['resendForm']
form['email'] = user.username
res = form.submit()
# Sees alert message
assert_in('already been confirmed', res)
class TestClaimingAsARegisteredUser(OsfTestCase):
def setUp(self):
super(TestClaimingAsARegisteredUser, self).setUp()
self.referrer = AuthUserFactory()
self.project = ProjectFactory(creator=self.referrer, is_public=True)
name, email = fake.name(), fake.email()
self.user = self.project.add_unregistered_contributor(
fullname=name,
email=email,
auth=Auth(user=self.referrer)
)
self.project.save()
@mock.patch('website.project.views.contributor.session')
def test_user_can_log_in_with_a_different_account(self, mock_session):
# Assume that the unregistered user data is already stored in the session
mock_session.data = {
'unreg_user': {
'uid': self.user._primary_key,
'pid': self.project._primary_key,
'token': self.user.get_unclaimed_record(
self.project._primary_key)['token']
}
}
right_user = AuthUserFactory.build(fullname="Right User")
right_user.set_password('science')
right_user.save()
# User goes to the claim page, but a different user (lab_user) is logged in
lab_user = AuthUserFactory(fullname="Lab Comp")
url = self.user.get_claim_url(self.project._primary_key)
res = self.app.get(url, auth=lab_user.auth).follow(auth=lab_user.auth)
# verify that the "Claim Account" form is returned
assert_in('Claim Contributor', res.body)
# Clicks "I am not Lab Comp"
# Taken to login/register page
res2 = res.click(linkid='signOutLink', auth=lab_user.auth)
# Fills in log in form
form = res2.forms['logInForm']
form['username'] = right_user.username
form['password'] = 'science'
# submits
res3 = form.submit().follow()
# Back at claim contributor page
assert_in('Claim Contributor', res3)
# Verifies their password
form = res3.forms['claimContributorForm']
form['password'] = 'science'
form.submit()
self.project.reload()
right_user.reload()
self.user.reload()
# user is now a contributor to self.project
assert_in(right_user._primary_key, self.project.contributors)
# lab user is not a contributor
assert_not_in(lab_user._primary_key, self.project.contributors)
def test_claim_user_registered_with_correct_password(self):
reg_user = AuthUserFactory()
reg_user.set_password('killerqueen')
reg_user.save()
url = self.user.get_claim_url(self.project._primary_key)
# Follow to password re-enter page
res = self.app.get(url, auth=reg_user.auth).follow(auth=reg_user.auth)
# verify that the "Claim Account" form is returned
assert_in('Claim Contributor', res.body)
form = res.forms['claimContributorForm']
form['password'] = 'killerqueen'
res = form.submit(auth=reg_user.auth).follow(auth=reg_user.auth)
self.project.reload()
self.user.reload()
# user is now a contributor to the project
assert_in(reg_user._primary_key, self.project.contributors)
# the unregistered user (self.user) is removed as a contributor, and their
assert_not_in(self.user._primary_key, self.project.contributors)
# unclaimed record for the project has been deleted
assert_not_in(self.project._primary_key, self.user.unclaimed_records)
class TestExplorePublicActivity(OsfTestCase):
def setUp(self):
super(TestExplorePublicActivity, self).setUp()
self.project = ProjectFactory(is_public=True)
self.registration = RegistrationFactory(project=self.project)
self.private_project = ProjectFactory(title="Test private project")
def test_newest_public_project_and_registrations_show_in_explore_activity(self):
url = self.project.web_url_for('activity')
res = self.app.get(url)
assert_in(str(self.project.title), res)
assert_in(str(self.project.date_created.date()), res)
assert_in(str(self.registration.title), res)
assert_in(str(self.registration.registered_date.date()), res)
assert_not_in(str(self.private_project.title), res)
class TestForgotAndResetPasswordViews(OsfTestCase):
def setUp(self):
super(TestForgotAndResetPasswordViews, self).setUp()
self.user = AuthUserFactory()
self.key = random_string(20)
# manually set verifification key
self.user.verification_key = self.key
self.user.save()
self.url = web_url_for('reset_password', verification_key=self.key)
def test_reset_password_view_returns_200(self):
res = self.app.get(self.url)
assert_equal(res.status_code, 200)
def test_can_reset_password_if_form_success(self):
res = self.app.get(self.url)
form = res.forms['resetPasswordForm']
form['password'] = 'newpassword'
form['password2'] = 'newpassword'
res = form.submit()
# password was updated
self.user.reload()
assert_true(self.user.check_password('newpassword'))
def test_reset_password_logs_out_user(self):
another_user = AuthUserFactory()
# visits reset password link while another user is logged in
res = self.app.get(self.url, auth=another_user.auth)
assert_equal(res.status_code, 200)
# We check if another_user is logged in by checking if
# their full name appears on the page (it should be in the navbar).
# Yes, this is brittle.
assert_not_in(another_user.fullname, res)
# make sure the form is on the page
assert_true(res.forms['resetPasswordForm'])
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "0a55407dbc0c7cbcfbf69f10bcf0bb22",
"timestamp": "",
"source": "github",
"line_count": 1385,
"max_line_length": 111,
"avg_line_length": 38.15595667870036,
"alnum_prop": 0.6100556333497332,
"repo_name": "lamdnhan/osf.io",
"id": "13c48a404482da808446ff683a51e5bbf4c74063",
"size": "52907",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/webtest_tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "79328"
},
{
"name": "HTML",
"bytes": "34188"
},
{
"name": "JavaScript",
"bytes": "904722"
},
{
"name": "Mako",
"bytes": "450508"
},
{
"name": "Python",
"bytes": "2585814"
},
{
"name": "Shell",
"bytes": "234"
}
],
"symlink_target": ""
} |
"""
Common code for isolate starters
:author: Thomas Calmant
:license: Apache Software License 2.0
:version: 1.1.0
..
Copyright 2014 isandlaTech
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Standard library
import os
try:
# Python 3
# pylint: disable=F0401,E0611
from urllib.parse import quote
except ImportError:
# Python 2
# pylint: disable=F0401,E0611
from urllib import quote
# Pelix framework
from pelix.ipopo.decorators import Requires, Validate, Invalidate
# COHORTE modules
import cohorte
import cohorte.forker
import cohorte.utils
# ------------------------------------------------------------------------------
# Documentation strings format
__docformat__ = "restructuredtext en"
# Version
__version_info__ = (1, 1, 0)
__version__ = ".".join(str(x) for x in __version_info__)
# ------------------------------------------------------------------------------
@Requires('_watcher', cohorte.forker.SERVICE_WATCHER)
class CommonStarter(object):
"""
Common code for starters
"""
def __init__(self):
"""
Sets up members
"""
# Isolate watcher
self._watcher = None
# Bundle context
self._context = None
# Utility methods
self._utils = None
# Isolate UID -> process information
self._isolates = {}
@Validate
def _validate(self, context):
"""
Component validated
"""
# Store the bundle context
self._context = context
# Get OS utility methods
self._utils = cohorte.utils.get_os_utils()
@Invalidate
def _invalidate(self, context):
"""
Component invalidated
"""
self._context = None
self._utils = None
def uids(self):
"""
Returns the list of UID of the isolates started by this component
:return: A list of isolate UIDs
"""
return list(self._isolates.keys())
def ping(self, uid):
"""
Pings the isolate with the given UID
:param uid: The UID if an isolate
"""
return self._utils.is_process_running(self._isolates[uid].pid)
def kill(self, uid):
"""
Kills the given isolate
:param uid: The UID if an isolate
:raise KeyError: Unknown UID
:raise OSError: Error killing the process
"""
process = self._isolates.pop(uid)
if process.poll() is None:
process.terminate()
def stop(self, uid):
"""
Stops the given isolate
:param uid: The UID if an isolate
"""
self.terminate(uid)
def terminate(self, uid):
"""
Softly terminates the given isolate
:param uid: The UID of an isolate
:raise KeyError: Unknown UID
"""
try:
self.kill(uid)
except OSError:
# Ignore errors
pass
@staticmethod
def normalize_environment(environment):
"""
Ensures that the environment dictionary only contains strings.
:param environment: The environment dictionary (modified in-place)
:return: The environment dictionary
"""
for key in environment:
value = environment[key]
if value is None:
environment[key] = ''
elif not isinstance(value, str):
environment[key] = str(value)
return environment
def setup_environment(self, configuration):
"""
Sets up an environment dictionary. Uses the 'environment' entry from
the configuration dictionary.
:param configuration: An isolate configuration
:return: A new environment dictionary
"""
# Process environment
env = os.environ.copy()
# Use configuration environment variables
config_env = configuration.get('environment')
if config_env:
env.update(config_env)
# Add Cohorte variables
# ... directories
env[cohorte.ENV_HOME] = self._context.get_property(cohorte.PROP_HOME)
env[cohorte.ENV_BASE] = self._context.get_property(cohorte.PROP_BASE)
# ... isolate
env[cohorte.ENV_UID] = configuration['uid']
env[cohorte.ENV_NAME] = configuration['name']
# ... node
env[cohorte.ENV_NODE_UID] = configuration['node_uid']
env[cohorte.ENV_NODE_NAME] = configuration['node_name']
# Normalize environment
self.normalize_environment(env)
return env
def prepare_working_directory(self, configuration):
"""
Prepares the working directory for the given isolate configuration.
Uses the 'working_directory' configuration entry, if present, or
creates a new folder in the base directory.
:param configuration: An isolate configuration
:return: A valid configuration directory
"""
# The working directory can be specified in the configuration
working_dir = configuration.get('working_directory')
if working_dir:
# Ensure the whole path is created
if not os.path.exists(working_dir):
os.makedirs(working_dir, exist_ok=True)
# Prepare folders
return working_dir
else:
# Prepare a specific working directory
uid = configuration['uid']
name = configuration['name']
# Get the base directory
base = self._context.get_property(cohorte.PROP_BASE)
# Escape the name
name = quote(name)
# Compute the path (1st step)
path = os.path.join(base, 'var', name)
# Compute the instance index
index = 0
if os.path.exists(path):
# The path already exists, get the maximum folder index value
max_index = 0
for entry in os.listdir(path):
if os.path.isdir(os.path.join(path, entry)):
try:
dir_index = int(entry[:entry.index('-')])
if dir_index > max_index:
max_index = dir_index
except ValueError:
# No '-' in the name or not an integer
pass
index = max_index + 1
# Set the folder name (2nd step)
path = os.path.join(path, '{index:03d}-{uid}'
.format(index=index, uid=uid))
# Ensure the whole path is created
if not os.path.exists(path):
os.makedirs(path)
return path
| {
"content_hash": "8544b24e7b2faa3716007b1ad33377f7",
"timestamp": "",
"source": "github",
"line_count": 254,
"max_line_length": 80,
"avg_line_length": 28.637795275590552,
"alnum_prop": 0.5643387407203739,
"repo_name": "isandlaTech/cohorte-devtools",
"id": "17b94a4bf3b20e139856e4d8f351aaaa469dc01b",
"size": "7328",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "org.cohorte.eclipse.runner.basic/files/test/cohorte/forker/starters/common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "151318"
},
{
"name": "HTML",
"bytes": "113064"
},
{
"name": "Java",
"bytes": "172793"
},
{
"name": "JavaScript",
"bytes": "2165497"
},
{
"name": "Python",
"bytes": "13926564"
},
{
"name": "Shell",
"bytes": "1490"
}
],
"symlink_target": ""
} |
import json
from datetime import datetime
from trove.common import cfg
from trove.common import exception
from trove.common import utils
from trove.common.exception import ModelNotFoundError
from trove.datastore import models as dstore_models
from trove.db import get_db_api
from trove.db import models as dbmodels
from trove.openstack.common import log as logging
from trove.common.i18n import _
from trove.taskmanager import api as task_api
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class Configurations(object):
DEFAULT_LIMIT = CONF.configurations_page_size
@staticmethod
def load(context):
if context is None:
raise TypeError("Argument context not defined.")
elif id is None:
raise TypeError("Argument is not defined.")
if context.is_admin:
db_info = DBConfiguration.find_all(deleted=False)
if db_info.count() == 0:
LOG.debug("No configurations found for admin user")
else:
db_info = DBConfiguration.find_all(tenant_id=context.tenant,
deleted=False)
if db_info.count() == 0:
LOG.debug("No configurations found for tenant %s"
% context.tenant)
limit = int(context.limit or Configurations.DEFAULT_LIMIT)
if limit > Configurations.DEFAULT_LIMIT:
limit = Configurations.DEFAULT_LIMIT
data_view = DBConfiguration.find_by_pagination('configurations',
db_info,
"foo",
limit=limit,
marker=context.marker)
next_marker = data_view.next_page_marker
return data_view.collection, next_marker
class Configuration(object):
def __init__(self, context, configuration_id):
self.context = context
self.configuration_id = configuration_id
@property
def instances(self):
return self.instances
@property
def items(self):
return self.items
@staticmethod
def create(name, description, tenant_id, datastore, datastore_version):
configurationGroup = DBConfiguration.create(
name=name,
description=description,
tenant_id=tenant_id,
datastore_version_id=datastore_version)
return configurationGroup
@staticmethod
def create_items(cfg_id, values):
LOG.debug("Saving configuration values for %s - "
"values: %s" % (cfg_id, values))
config_items = []
for key, val in values.iteritems():
config_item = DBConfigurationParameter.create(
configuration_id=cfg_id,
configuration_key=key,
configuration_value=val)
config_items.append(config_item)
return config_items
@staticmethod
def delete(context, group):
deleted_at = datetime.utcnow()
Configuration.remove_all_items(context, group.id, deleted_at)
group.deleted = True
group.deleted_at = deleted_at
group.save()
@staticmethod
def remove_all_items(context, id, deleted_at):
items = DBConfigurationParameter.find_all(configuration_id=id,
deleted=False).all()
LOG.debug("Removing all configuration values for %s" % id)
for item in items:
item.deleted = True
item.deleted_at = deleted_at
item.save()
@staticmethod
def load_configuration_datastore_version(context, id):
config = Configuration.load(context, id)
datastore_version = dstore_models.DatastoreVersion.load_by_uuid(
config.datastore_version_id)
return datastore_version
@staticmethod
def load(context, id):
try:
if context.is_admin:
return DBConfiguration.find_by(id=id, deleted=False)
else:
return DBConfiguration.find_by(id=id,
tenant_id=context.tenant,
deleted=False)
except ModelNotFoundError:
msg = _("Configuration group with ID %s could not be found.") % id
raise ModelNotFoundError(msg)
@staticmethod
def find_parameter_details(name, detail_list):
for item in detail_list:
if item.name == name:
return item
return None
@staticmethod
def load_items(context, id):
datastore_v = Configuration.load_configuration_datastore_version(
context,
id)
config_items = DBConfigurationParameter.find_all(
configuration_id=id, deleted=False).all()
detail_list = DatastoreConfigurationParameters.load_parameters(
datastore_v.id)
for item in config_items:
rule = Configuration.find_parameter_details(
str(item.configuration_key), detail_list)
if not rule:
continue
if rule.data_type == 'boolean':
item.configuration_value = bool(int(item.configuration_value))
elif rule.data_type == 'integer':
item.configuration_value = int(item.configuration_value)
else:
item.configuration_value = str(item.configuration_value)
return config_items
def get_configuration_overrides(self):
"""Gets the overrides dictionary to apply to an instance."""
overrides = {}
if self.configuration_id:
config_items = Configuration.load_items(self.context,
id=self.configuration_id)
for i in config_items:
overrides[i.configuration_key] = i.configuration_value
return overrides
def does_configuration_need_restart(self):
datastore_v = Configuration.load_configuration_datastore_version(
self.context,
self.configuration_id)
config_items = Configuration.load_items(self.context,
id=self.configuration_id)
LOG.debug("config_items: %s" % config_items)
detail_list = DatastoreConfigurationParameters.load_parameters(
datastore_v.id, show_deleted=True)
for i in config_items:
LOG.debug("config item: %s" % i)
details = Configuration.find_parameter_details(
i.configuration_key, detail_list)
LOG.debug("parameter details: %s" % details)
if not details:
raise exception.NotFound(uuid=i.configuration_key)
if bool(details.restart_required):
return True
return False
@staticmethod
def save(context, configuration, configuration_items, instances):
DBConfiguration.save(configuration)
for item in configuration_items:
item["deleted_at"] = None
DBConfigurationParameter.save(item)
items = Configuration.load_items(context, configuration.id)
for instance in instances:
LOG.debug("Configuration %s being applied to "
"instance: %s" % (configuration.id, instance.id))
overrides = {}
for i in items:
overrides[i.configuration_key] = i.configuration_value
task_api.API(context).update_overrides(instance.id, overrides)
class DBConfiguration(dbmodels.DatabaseModelBase):
_data_fields = ['name', 'description', 'tenant_id', 'datastore_version_id',
'deleted', 'deleted_at', 'created', 'updated']
@property
def datastore(self):
datastore_version = dstore_models.DatastoreVersion.load_by_uuid(
self.datastore_version_id)
datastore = dstore_models.Datastore.load(
datastore_version.datastore_id)
return datastore
@property
def datastore_version(self):
datastore_version = dstore_models.DatastoreVersion.load_by_uuid(
self.datastore_version_id)
return datastore_version
#rds-start
class DBDefaultConfigurationParameters(dbmodels.DatabaseModelBase):
"""Model for storing the default configuration parameters on a datastore."""
_auto_generated_attrs = ['id']
_data_fields = [
'name',
'default_value',
'max_size',
'min_size',
'enumeration',
'description',
'data_type',
'restart_required',
'version',
'is_show',
'datastore_version_id',
]
_table_name = "db_default_configuration_parameters"
preserve_on_delete = True
#rds-end
class DBConfigurationParameter(dbmodels.DatabaseModelBase):
_data_fields = ['configuration_id', 'configuration_key',
'configuration_value', 'deleted',
'deleted_at']
def __hash__(self):
return self.configuration_key.__hash__()
class DBDatastoreConfigurationParameters(dbmodels.DatabaseModelBase):
"""Model for storing the configuration parameters on a datastore."""
_auto_generated_attrs = ['id']
_data_fields = [
'name',
'datastore_version_id',
'restart_required',
'max_size',
'min_size',
'data_type',
'deleted',
'deleted_at',
]
_table_name = "datastore_configuration_parameters"
preserve_on_delete = True
class DatastoreConfigurationParameters(object):
def __init__(self, db_info):
self.db_info = db_info
@staticmethod
def create(**kwargs):
"""Create a configuration parameter for a datastore version."""
# Do we already have a parameter in the db?
# yes: and its deleted then modify the param
# yes: and its not deleted then error on create.
# no: then just create the new param
ds_v_id = kwargs.get('datastore_version_id')
config_param_name = kwargs.get('name')
try:
param = DatastoreConfigurationParameters.load_parameter_by_name(
ds_v_id,
config_param_name,
show_deleted=True)
if param.deleted == 1:
param.restart_required = kwargs.get('restart_required')
param.data_type = kwargs.get('data_type')
param.max_size = kwargs.get('max_size')
param.min_size = kwargs.get('min_size')
param.deleted = 0
param.save()
return param
else:
raise exception.ConfigurationParameterAlreadyExists(
parameter_name=config_param_name,
datastore_version=ds_v_id)
except exception.NotFound:
pass
config_param = DBDatastoreConfigurationParameters.create(
**kwargs)
return config_param
@staticmethod
def delete(version_id, config_param_name):
config_param = DatastoreConfigurationParameters.load_parameter_by_name(
version_id, config_param_name)
config_param.deleted = True
config_param.deleted_at = datetime.utcnow()
config_param.save()
#rds-start
@classmethod
def load_default_parameters(cls, version):
try:
return DBDefaultConfigurationParameters.find_all(
version=version)
except exception.NotFound:
raise exception.NotFound(uuid=datastore_version_id)
#rds-end
@classmethod
def load_parameters(cls, datastore_version_id, show_deleted=False):
try:
if show_deleted:
return DBDatastoreConfigurationParameters.find_all(
datastore_version_id=datastore_version_id
)
else:
return DBDatastoreConfigurationParameters.find_all(
datastore_version_id=datastore_version_id,
deleted=False
)
except exception.NotFound:
raise exception.NotFound(uuid=datastore_version_id)
@classmethod
def load_parameter(cls, config_id, show_deleted=False):
try:
if show_deleted:
return DBDatastoreConfigurationParameters.find_by(
id=config_id
)
else:
return DBDatastoreConfigurationParameters.find_by(
id=config_id, deleted=False
)
except exception.NotFound:
raise exception.NotFound(uuid=config_id)
@classmethod
def load_parameter_by_name(cls, datastore_version_id, config_param_name,
show_deleted=False):
try:
if show_deleted:
return DBDatastoreConfigurationParameters.find_by(
datastore_version_id=datastore_version_id,
name=config_param_name
)
else:
return DBDatastoreConfigurationParameters.find_by(
datastore_version_id=datastore_version_id,
name=config_param_name,
deleted=False
)
except exception.NotFound:
raise exception.NotFound(uuid=config_param_name)
def create_or_update_datastore_configuration_parameter(name,
datastore_version_id,
restart_required,
data_type,
max_size,
min_size):
get_db_api().configure_db(CONF)
datastore_version = dstore_models.DatastoreVersion.load_by_uuid(
datastore_version_id)
try:
config = DatastoreConfigurationParameters.load_parameter_by_name(
datastore_version_id, name, show_deleted=True)
config.restart_required = restart_required
config.max_size = max_size
config.min_size = min_size
config.data_type = data_type
get_db_api().save(config)
except exception.NotFound:
config = DBDatastoreConfigurationParameters(
id=utils.generate_uuid(),
name=name,
datastore_version_id=datastore_version.id,
restart_required=restart_required,
data_type=data_type,
max_size=max_size,
min_size=min_size,
deleted=False,
)
get_db_api().save(config)
def load_datastore_configuration_parameters(datastore,
datastore_version,
config_file):
get_db_api().configure_db(CONF)
(ds, ds_v) = dstore_models.get_datastore_version(
type=datastore, version=datastore_version, return_inactive=True)
with open(config_file) as f:
config = json.load(f)
for param in config['configuration-parameters']:
create_or_update_datastore_configuration_parameter(
param['name'],
ds_v.id,
param['restart_required'],
param['type'],
param.get('max'),
param.get('min'),
)
def persisted_models():
return {
'configurations': DBConfiguration,
'configuration_parameters': DBConfigurationParameter,
'datastore_configuration_parameters': DBDatastoreConfigurationParameters, # noqa
'db_default_configuration_parameters': DBDefaultConfigurationParameters, #rds
}
| {
"content_hash": "1880a9c3074b1171a90bd4a4b44c3a11",
"timestamp": "",
"source": "github",
"line_count": 435,
"max_line_length": 89,
"avg_line_length": 36.34712643678161,
"alnum_prop": 0.5764973752450825,
"repo_name": "daizhengy/RDS",
"id": "e79d33ca8baf5b2c14fb1e3b04aece6b19721735",
"size": "16436",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "trove/configuration/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "88"
},
{
"name": "CSS",
"bytes": "21914"
},
{
"name": "JavaScript",
"bytes": "60526"
},
{
"name": "Python",
"bytes": "2795151"
},
{
"name": "Shell",
"bytes": "4771"
},
{
"name": "XSLT",
"bytes": "50542"
}
],
"symlink_target": ""
} |
from my_lib.imagelib import expand2square
from PIL import Image
im = Image.open('data/src/astronaut_rect.bmp')
# 
im_new = expand2square(im, (0, 0, 0))
im_new.save('data/dst/astronaut_expand_square.jpg', quality=95)
# 
im_new = expand2square(im, (0, 0, 0)).resize((150, 150))
| {
"content_hash": "bba120bac6708b98896a99b53f0aac7a",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 63,
"avg_line_length": 25.357142857142858,
"alnum_prop": 0.7014084507042253,
"repo_name": "nkmk/python-snippets",
"id": "c297a2fba54477aac1b739050e8ffd981e1f51c8",
"size": "355",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "notebook/pillow_expand_to_square.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "5734214"
},
{
"name": "Python",
"bytes": "1619882"
},
{
"name": "Shell",
"bytes": "12097"
}
],
"symlink_target": ""
} |
import json
import urllib
import urllib2
import sys
import datetime
from b_bot import BBot
from rand_str import RandomString
#cmrCollectionsByPage = "https://cmr.earthdata.nasa.gov/search/collections.umm-json?%s=%s&page_size=%s&pretty=true"
cmrCollections = "https://cmr.%searthdata.nasa.gov/search/collections?%s=%s&pretty=true"
cmrConcepts = "https://cmr.%searthdata.nasa.gov/search/concepts/%s?pretty=true"
#cmrConceptRevisions = "https://cmr.earthdata.nasa.gov/search/concepts/%s/%s"
class BCmrLookup(BBot):
def __init__(self):
BBot.__init__(self)
self.responses = RandomString([
"I found a record in CMR matching that id:"
, "That reminds me of someting the CMR server told me the other day:"
, "Looks like you were talking about a CMR ID so I went a head and looked them up for you:"
, "You mean you don't know the full URL?"
])
self.multiple_responses = RandomString([
"I found records in CMR matching that id:"
, "That reminds me of someting the CMR server told me the other day:"
, "Looks like you were talking about some CMR IDs so I went a head and looked them up for you:"
, "You mean you don't know the full URLs?"
])
def cmrLookup (self, url):
found = False
data = None
try:
#print url
response = urllib2.urlopen(url, timeout=2)
response.addheaders = [('User-agent', 'GCMD-Bot')]
data = response.read()
hit = response.info().getheader('CMR-Hits')
if hit is None or 0<int(hit):
# we got a 200 and there is no hit value, or the value is > 0
found = True
except urllib2.URLError, e:
found = False
data = None
except urllib2.HTTPError, e:
print e.code
found = False
data = None
return found, data
def find(self, cmd, data, found):
global cmrCollections
global cmrConcepts
text = found.group(1)
msg = []
found = False
for env in ["", "uat.", "sit."]:
''' text should be an id '''
url = cmrConcepts % (env, text)
found, data = self.cmrLookup(url)
if found:
msg.append(url)
if cmd is not "cmr_all":
break
url = cmrCollections % (env, "entry_id", text)
found, data = self.cmrLookup(url)
if found:
msg.append(url)
if cmd is not "cmr_all":
break
url = cmrCollections % (env, "short_name", text)
found, data = self.cmrLookup(url)
if found:
msg.append(url)
if cmd is not "cmr_all":
break
return msg
def action(self, cmd, data, found):
msg = None
urls = self.find(cmd, data, found)
if 0<len(urls):
text_urls = "\n".join(urls)
r = self.multiple_responses if cmd=="cmr_all" else self.responses
msg = "%s\n%s" % (r.pick(), text_urls)
return msg
def main(argv):
cl = BCmrLookup()
print cl.action(None, {u'text': u'testing find: msut2', u'ts': u'1462474491.000084', u'user': u'U13SD9KSN', u'team': u'T13S7BSJD', u'type': u'message', u'channel': u'C14FTCSKV'}, "msut2")
if __name__ == "__main__": main(sys.argv[1:])
| {
"content_hash": "40c4d4836439e9e2212c83bbda1f600d",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 191,
"avg_line_length": 36.33673469387755,
"alnum_prop": 0.5397360292052794,
"repo_name": "jceaser/gcmd_bot",
"id": "0f6b7a647545fa43f4976772fee225fd7bd51ca9",
"size": "3561",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/python/bots/b_cmr_lookup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "45655"
},
{
"name": "Shell",
"bytes": "65"
}
],
"symlink_target": ""
} |
import os
from setuptools import setup, find_packages
from federation import __version__
description = 'Python library to abstract social web federation protocols like ActivityPub and Diaspora.'
def get_long_description():
return open(os.path.join(os.path.dirname(__file__), "docs", "introduction.rst")).read()
setup(
name='federation',
version=__version__,
description=description,
long_description=get_long_description(),
author='Jason Robinson',
author_email='mail@jasonrobinson.me',
maintainer='Jason Robinson',
maintainer_email='mail@jasonrobinson.me',
url='https://git.feneas.org/jaywink/federation',
download_url='https://pypi.org/project/federation/',
packages=find_packages(),
license="BSD 3-clause",
install_requires=[
"attrs",
"bleach>3.0",
"commonmark",
"cryptography",
"cssselect>=0.9.2",
"dirty-validators>=0.3.0",
"lxml>=3.4.0",
"iteration_utilities",
"jsonschema>=2.0.0",
"pycryptodome>=3.4.10",
"python-dateutil>=2.4.0",
"python-slugify>=5.0.0",
"python-xrd>=0.1",
"pytz",
"PyYAML",
"requests>=2.8.0",
"requests-http-signature-jaywink>=0.1.0.dev0",
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Communications',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='federation diaspora activitypub matrix protocols federate fediverse social',
)
| {
"content_hash": "b14b8114656a92aa79143e1455ef42d2",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 105,
"avg_line_length": 32.03125,
"alnum_prop": 0.6107317073170732,
"repo_name": "jaywink/social-federation",
"id": "7635666f07cf2f596e5d2cdcce56dfef70e5ad55",
"size": "2072",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "2934"
},
{
"name": "Python",
"bytes": "124166"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='ContentPortlet',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100, verbose_name='Title', blank=True)),
('limit', models.PositiveSmallIntegerField(default=5)),
('tags', models.CharField(max_length=100, blank=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='NavigationPortlet',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100, verbose_name='Title', blank=True)),
('start_level', models.PositiveSmallIntegerField(default=1)),
('expand_level', models.PositiveSmallIntegerField(default=0)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='RandomPortlet',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100, verbose_name='Title', blank=True)),
('limit', models.PositiveSmallIntegerField(default=1)),
('tags', models.CharField(max_length=100, blank=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='TextPortlet',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100, verbose_name='Title', blank=True)),
('text', models.TextField(verbose_name='Text', blank=True)),
],
options={
'abstract': False,
},
),
]
| {
"content_hash": "42c4db7c4db55bf33db1b52efef27a56",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 114,
"avg_line_length": 38.67796610169491,
"alnum_prop": 0.5297984224364592,
"repo_name": "diefenbach/lfc-portlets",
"id": "42e8c64f5ce0cfd9efec46ea8992340a3e636364",
"size": "2306",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lfc_portlets/migrations/0001_initial.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "8924"
}
],
"symlink_target": ""
} |
import sys
from wmr import common
from itertools import imap
for pair in imap(common.parse_input, sys.stdin):
mapper(*pair)
| {
"content_hash": "4fbc159dc5d08073cc569b40b23b208f",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 48,
"avg_line_length": 21.5,
"alnum_prop": 0.7596899224806202,
"repo_name": "benjamin-guillet/wmr-backend",
"id": "f065614ade6e5ff8f05cbf0f5ea2f7b0b3838d41",
"size": "711",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lang-support/python2/mapper-suffix.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "8515"
},
{
"name": "C#",
"bytes": "5669"
},
{
"name": "C++",
"bytes": "18098"
},
{
"name": "Java",
"bytes": "534200"
},
{
"name": "JavaScript",
"bytes": "5344"
},
{
"name": "Python",
"bytes": "8954"
},
{
"name": "Scheme",
"bytes": "13027"
},
{
"name": "Shell",
"bytes": "10491"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from yambopy import *
from qepy import *
from schedulerpy import *
from functools import partial
import multiprocessing
import argparse
import sys
prefix = "bn"
yambo = "yambo"
p2y = 'p2y'
pw = 'pw.x'
layer_separations = [10,15,20,25,30,35,40]
scf_kpoints = [ 9, 9,1]
nscf_kpoints = [12,12,1]
nbands = 20
ecutwf = 50
scheduler = Scheduler.factory
# create the quantum espresso input file
def get_inputfile():
""" Define a Quantum espresso input file for boron nitride
"""
qe = PwIn()
qe.atoms = [['N',[ 0.0, 0.0,0.5]],
['B',[1./3,2./3,0.5]]]
qe.atypes = {'B': [10.811, "B.pbe-mt_fhi.UPF"],
'N': [14.0067,"N.pbe-mt_fhi.UPF"]}
qe.control['prefix'] = "'%s'"%prefix
qe.control['wf_collect'] = '.true.'
qe.control['pseudo_dir'] = "'../../../pseudos/'"
qe.system['celldm(1)'] = 4.7
qe.system['celldm(3)'] = 14/qe.system['celldm(1)']
qe.system['ecutwfc'] = ecutwf
qe.system['occupations'] = "'fixed'"
qe.system['nat'] = 2
qe.system['ntyp'] = 2
qe.system['ibrav'] = 4
qe.kpoints = scf_kpoints
qe.electrons['conv_thr'] = 1e-10
return qe
#run the self consistent calculation
def scf(layer_separation,folder='scf'):
if not os.path.isdir(folder):
os.makedirs(folder)
qe = get_inputfile()
qe.system['celldm(3)'] = layer_separation/qe.system['celldm(1)']
qe.control['calculation'] = "'scf'"
qe.write('%s/%s.scf'%(folder,prefix))
#run the non-self consistent calculation
def nscf(layer_separation,folder='nscf'):
if not os.path.isdir(folder):
os.makedirs(folder)
qe = get_inputfile()
qe.control['calculation'] = "'nscf'"
qe.electrons['diago_full_acc'] = ".true."
qe.electrons['conv_thr'] = 1e-8
qe.system['nbnd'] = nbands
qe.system['force_symmorphic'] = ".true."
qe.system['celldm(3)'] = layer_separation/qe.system['celldm(1)']
qe.kpoints = nscf_kpoints
qe.write('%s/%s.nscf'%(folder,prefix))
def database(shell,output_folder,nscf_folder='nscf'):
if not os.path.isdir('%s/SAVE'%output_folder):
print('preparing yambo database...')
shell.add_command('mkdir -p %s'%nscf_folder)
shell.add_command('pushd %s/%s.save; %s; %s'%(nscf_folder,prefix,p2y,yambo))
shell.add_command('popd')
shell.add_command('mv %s/%s.save/SAVE %s'%(nscf_folder,prefix,output_folder))
print('done!')
def run_job(layer_separation,nthreads=1,work_folder='bse_cutoff',cut=False):
"""
Given a layer separation run the calculation
1. scf calculation with QE
2. nscf calculation
3. BSE with yambo
"""
#check if the calculation exists
done_stamp = '%s/%d/done'%(work_folder,layer_separation)
print(done_stamp)
if os.path.isfile(done_stamp):
return
print("layer separation: %d bohr cutoff:"%layer_separation, cut)
root_folder = "%s/%d"%(work_folder,layer_separation)
shell = scheduler()
if not os.path.isdir(root_folder):
shell.add_command( 'mkdir -p %s'%root_folder )
# 1. run the ground state calculation
print("scf cycle")
print("kpoints",scf_kpoints)
scf(layer_separation,folder="%s/scf"%root_folder)
shell.add_command("pushd %s/scf; mpirun -np %d %s < %s.scf > scf.log"%(root_folder,nthreads,pw,prefix))
shell.add_command("popd")
# 2. run the non self consistent calculation
print("nscf cycle")
print("kpoints",nscf_kpoints)
src ='%s/scf/%s.save'%(root_folder,prefix)
dst ='%s/nscf/%s.save'%(root_folder,prefix)
nscf(layer_separation,folder="%s/nscf"%root_folder)
shell.add_command('cp -r %s %s'%(src,dst) )
shell.add_command("pushd %s/nscf; mpirun -np %d %s < %s.nscf > nscf.log"%(root_folder,nthreads,pw,prefix))
shell.add_command('popd')
# generate the database
database(shell,'%s'%root_folder,nscf_folder="%s/nscf"%root_folder)
shell.run()
#wait for execution
# 3. calculate the absorption spectra
y = YamboIn('mpirun -np %d yambo -r -b -o b -k sex -y d -V all'%nthreads,folder=root_folder)
if cut:
y['CUTGeo'] = 'box z'
y['CUTBox'] = [0,0,layer_separation-2]
y['RandQpts'] = 1000000
y['RandGvec'] = [1,'Ry']
y['FFTGvecs'] = [20,'Ry']
y['NGsBlkXs'] = [1,'Ry'] #local field effects
y['BndsRnXs'] = [1,nbands] #number of bands for static screening
y['KfnQP_E'] = [2.91355133,1.0,1.0] #scissor operator
y['BSEBands'] = [4,5] #number of bands in BSE kernel
y['BEnRange'] = [[4.0,8.0],'eV'] #energy range to plot optical absorption
y['BEnSteps'] = 500 #energy steps in the range
y.write('%s/yambo_run.in'%root_folder)
shell = scheduler()
shell.add_command('cd %s; %s -F yambo_run.in -J %d'%(root_folder,yambo,layer_separation))
shell.add_command('touch done')
shell.run()
def run(mpthreads=1,nthreads=1,work_folder='bse_cutoff',cut=True):
if (mpthreads > 1):
p = multiprocessing.Pool(nthreads)
run = partial(run_job,nthreads=nthreads,work_folder=work_folder,cut=cut)
try:
#reversed list because of load imbalance
p.map(run, reversed(layer_separations))
except KeyboardInterrupt:
print("Caught KeyboardInterrupt, terminating workers")
p.terminate()
p.join()
else:
for layer_separation in layer_separations:
run_job(layer_separation,nthreads=nthreads,work_folder=work_folder,cut=cut)
def plot(work_folder,filename,cut):
ax = plt.gca()
for layer_separation in layer_separations:
root_folder = "%s/%d"%(work_folder,layer_separation)
#gather the results
pack_files_in_folder(root_folder)
#plot the results
ya = YamboAnalyser(work_folder)
print(ya)
ax = ya.plot_bse('eps',ax=ax)
if cut: title = "with coulomb cutoff"
else: title = "without coulomb cutoff"
plt.title(title)
if filename is None: filename = "%s.pdf"%work_folder
plt.savefig(filename)
plt.show()
if __name__ == "__main__":
#parse options
parser = argparse.ArgumentParser(description='Convergence test of the colomb cutoff')
parser.add_argument('-r' ,'--run', action="store_true", help='Run the calculation')
parser.add_argument('-c' ,'--cut', action="store_true", help='Use coulomb cutoff')
parser.add_argument('-p' ,'--plot', action="store_true", help='Run the analysis')
parser.add_argument('-f' ,'--plotfile', help='name of the plot file', default=None)
parser.add_argument('-t' ,'--nthreads', help='threads for yambo', default=1, type=int)
parser.add_argument('-mp' ,'--mpthreads', help='theads using python multiprocessing module', default=1, type=int)
args = parser.parse_args()
print("yambo using %d threads"%args.nthreads)
print("multiprocessing using %d threads"%args.mpthreads)
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
cut = args.cut
#choose work_folder
if cut:
work_folder = "bse_cutoff_cut"
else:
work_folder = "bse_cutoff"
if args.run:
run(args.mpthreads,args.nthreads,work_folder,cut)
if args.plot:
plot(work_folder,args.plotfile,cut)
| {
"content_hash": "f2da3d98ac769a1a446e352da4f87e64",
"timestamp": "",
"source": "github",
"line_count": 212,
"max_line_length": 117,
"avg_line_length": 34.5188679245283,
"alnum_prop": 0.6197048373872642,
"repo_name": "palful/yambopy",
"id": "76c84ea3b9ab90945cbb0ca4c0f4496ce2ed03fd",
"size": "7445",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tutorial/bn/bse_cutoff.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "486227"
},
{
"name": "Shell",
"bytes": "1303"
}
],
"symlink_target": ""
} |
"""Test simple functions (i.e. no pointers involved)"""
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
import time
start = time.time()
window = None
def drawText( value, x,y, windowHeight, windowWidth, step = 18 ):
"""Draw the given text at given 2D position in window
"""
glMatrixMode(GL_PROJECTION);
# For some reason the GL_PROJECTION_MATRIX is overflowing with a single push!
# glPushMatrix()
matrix = glGetDouble( GL_PROJECTION_MATRIX )
glLoadIdentity();
glOrtho(0.0, windowHeight or 32, 0.0, windowWidth or 32, -1.0, 1.0)
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glLoadIdentity();
glRasterPos2i(x, y);
lines = 0
for character in value:
if character == '\n':
glRasterPos2i(x, y-(lines*18))
else:
glutBitmapCharacter(GLUT_BITMAP_HELVETICA_18, ord(character));
glPopMatrix();
glMatrixMode(GL_PROJECTION);
# For some reason the GL_PROJECTION_MATRIX is overflowing with a single push!
# glPopMatrix();
glLoadMatrixd( matrix ) # should have un-decorated alias for this...
glMatrixMode(GL_MODELVIEW);
def display():
glutSetWindow(window);
glClearColor (0.0, 0.0, (time.time()%1.0)/1.0, 0.0)
glClear (GL_COLOR_BUFFER_BIT)
drawText( 'hello', 20,20, size[0],size[1] )
#glutBitmapCharacter( GLUT_BITMAP_8_BY_13, ord('a'))
glutSolidTeapot( .2 )
glFlush ()
glutSwapBuffers()
size = (250,250)
def reshape( *args ):
global size
size = args
glViewport( *( (0,0)+args) )
display()
def ontimer( *args ):
print 'timer', args, '@time', time.time()-start
glutTimerFunc( 1000, ontimer, 24 )
def idle():
delta = time.time()-start
if delta < 10:
global size
x,y = size
if delta < 5:
change = +1
else:
change = -1
x = x-change
y = y+change
if x < 1:
x = 1
if y < 1:
y = 1
glutReshapeWindow( x, y )
size = (x,y)
glutSetWindow(window)
glutPostRedisplay()
else:
glutDestroyWindow( window )
print 'window destroyed'
import sys
sys.exit( 0 )
def printFunction( name ):
def onevent( *args ):
print '%s -> %s'%(name, ", ".join( [str(a) for a in args ]))
return onevent
if __name__ == "__main__":
import sys
newArgv = glutInit(sys.argv)
print 'newArguments', newArgv
glutInitDisplayMode( GLUT_DOUBLE | GLUT_RGB )
glutInitWindowSize(250, 250)
glutInitWindowPosition(100, 100)
window = glutCreateWindow("hello")
print 'window', repr(window)
glutDisplayFunc(display)
glutReshapeFunc(reshape)
glutMouseFunc(printFunction( 'Mouse' ))
glutEntryFunc(printFunction( 'Entry' ))
glutKeyboardFunc( printFunction( 'Keyboard' ))
glutKeyboardUpFunc( printFunction( 'KeyboardUp' ))
glutMotionFunc( printFunction( 'Motion' ))
glutPassiveMotionFunc( printFunction( 'PassiveMotion' ))
glutVisibilityFunc( printFunction( 'Visibility' ))
glutWindowStatusFunc( printFunction( 'WindowStatus' ))
glutSpecialFunc( printFunction( 'Special' ))
glutSpecialUpFunc( printFunction( 'SpecialUp' ))
glutTimerFunc( 1000, ontimer, 23 )
glutIdleFunc( idle )
glutMainLoop() | {
"content_hash": "c2f91bd33642e1519f0ee8608e76e66b",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 81,
"avg_line_length": 28.81896551724138,
"alnum_prop": 0.6212982351181573,
"repo_name": "frederica07/Dragon_Programming_Process",
"id": "bfb81a82ca1eddb294cbef476702ff998a5130e8",
"size": "3343",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PyOpenGL-3.0.2/tests/test_glutwindow.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "1548"
},
{
"name": "Python",
"bytes": "2558317"
}
],
"symlink_target": ""
} |
from functools import lru_cache
from django import template
from django.conf import settings
from django.forms.formsets import BaseFormSet
from django.template.loader import get_template
from crispy_forms.helper import FormHelper
from crispy_forms.utils import TEMPLATE_PACK, get_template_pack
register = template.Library()
# We import the filters, so they are available when doing load crispy_forms_tags
from crispy_forms.templatetags.crispy_forms_filters import * # NOQA: F403,F401, E402 isort:skip
class ForLoopSimulator:
"""
Simulates a forloop tag, precisely::
{% for form in formset.forms %}
If `{% crispy %}` is rendering a formset with a helper, We inject a `ForLoopSimulator` object
in the context as `forloop` so that formset forms can do things like::
Fieldset("Item {{ forloop.counter }}", [...])
HTML("{% if forloop.first %}First form text{% endif %}"
"""
def __init__(self, formset):
self.len_values = len(formset.forms)
# Shortcuts for current loop iteration number.
self.counter = 1
self.counter0 = 0
# Reverse counter iteration numbers.
self.revcounter = self.len_values
self.revcounter0 = self.len_values - 1
# Boolean values designating first and last times through loop.
self.first = True
self.last = 0 == self.len_values - 1
def iterate(self):
"""
Updates values as if we had iterated over the for
"""
self.counter += 1
self.counter0 += 1
self.revcounter -= 1
self.revcounter0 -= 1
self.first = False
self.last = self.revcounter0 == self.len_values - 1
class BasicNode(template.Node):
"""
Basic Node object that we can rely on for Node objects in normal
template tags. I created this because most of the tags we'll be using
will need both the form object and the helper string. This handles
both the form object and parses out the helper string into attributes
that templates can easily handle.
"""
def __init__(self, form, helper, template_pack=None):
self.form = form
if helper is not None:
self.helper = helper
else:
self.helper = None
self.template_pack = template_pack or get_template_pack()
def get_render(self, context):
"""
Returns a `Context` object with all the necessary stuff for rendering the form
:param context: `django.template.Context` variable holding the context for the node
`self.form` and `self.helper` are resolved into real Python objects resolving them
from the `context`. The `actual_form` can be a form or a formset. If it's a formset
`is_formset` is set to True. If the helper has a layout we use it, for rendering the
form or the formset's forms.
"""
# Nodes are not thread safe in multithreaded environments
# https://docs.djangoproject.com/en/dev/howto/custom-template-tags/#thread-safety-considerations
if self not in context.render_context:
context.render_context[self] = (
template.Variable(self.form),
template.Variable(self.helper) if self.helper else None,
)
form, helper = context.render_context[self]
actual_form = form.resolve(context)
if self.helper is not None:
helper = helper.resolve(context)
else:
# If the user names the helper within the form `helper` (standard), we use it
# This allows us to have simplified tag syntax: {% crispy form %}
helper = FormHelper() if not hasattr(actual_form, "helper") else actual_form.helper
# use template_pack from helper, if defined
try:
if helper.template_pack:
self.template_pack = helper.template_pack
except AttributeError:
pass
self.actual_helper = helper
# We get the response dictionary
is_formset = isinstance(actual_form, BaseFormSet)
response_dict = self.get_response_dict(helper, context, is_formset)
node_context = context.__copy__()
node_context.update({"is_bound": actual_form.is_bound})
node_context.update(response_dict)
final_context = node_context.__copy__()
# If we have a helper's layout we use it, for the form or the formset's forms
if helper and helper.layout:
if not is_formset:
actual_form.form_html = helper.render_layout(
actual_form, node_context, template_pack=self.template_pack
)
else:
forloop = ForLoopSimulator(actual_form)
helper.render_hidden_fields = True
for form in actual_form:
node_context.update({"forloop": forloop})
node_context.update({"formset_form": form})
form.form_html = helper.render_layout(form, node_context, template_pack=self.template_pack)
forloop.iterate()
if is_formset:
final_context["formset"] = actual_form
else:
final_context["form"] = actual_form
return final_context
def get_response_dict(self, helper, context, is_formset):
"""
Returns a dictionary with all the parameters necessary to render the form/formset in a template.
:param context: `django.template.Context` for the node
:param is_formset: Boolean value. If set to True, indicates we are working with a formset.
"""
if not isinstance(helper, FormHelper):
raise TypeError("helper object provided to {% crispy %} tag must be a crispy.helper.FormHelper object.")
attrs = helper.get_attributes(template_pack=self.template_pack)
form_type = "form"
if is_formset:
form_type = "formset"
# We take form/formset parameters from attrs if they are set, otherwise we use defaults
response_dict = {
"%s_action" % form_type: attrs["attrs"].get("action", ""),
"%s_attrs" % form_type: attrs.get("attrs", ""),
"%s_class" % form_type: attrs["attrs"].get("class", ""),
"%s_id" % form_type: attrs["attrs"].get("id", ""),
"%s_method" % form_type: attrs.get("form_method", "post"),
"%s_style" % form_type: attrs.get("form_style", None),
"%s_tag" % form_type: attrs.get("form_tag", True),
"disable_csrf": attrs.get("disable_csrf", False),
"error_text_inline": attrs.get("error_text_inline", True),
"field_class": attrs.get("field_class", ""),
"field_template": attrs.get("field_template", ""),
"flat_attrs": attrs.get("flat_attrs", ""),
"form_error_title": attrs.get("form_error_title", None),
"form_show_errors": attrs.get("form_show_errors", True),
"form_show_labels": attrs.get("form_show_labels", True),
"formset_error_title": attrs.get("formset_error_title", None),
"help_text_inline": attrs.get("help_text_inline", False),
"html5_required": attrs.get("html5_required", False),
"include_media": attrs.get("include_media", True),
"inputs": attrs.get("inputs", []),
"is_formset": is_formset,
"label_class": attrs.get("label_class", ""),
"template_pack": self.template_pack,
}
# Handles custom attributes added to helpers
for attribute_name, value in attrs.items():
if attribute_name not in response_dict:
response_dict[attribute_name] = value
if "csrf_token" in context:
response_dict["csrf_token"] = context["csrf_token"]
return response_dict
@lru_cache()
def whole_uni_formset_template(template_pack=TEMPLATE_PACK):
return get_template("%s/whole_uni_formset.html" % template_pack)
@lru_cache()
def whole_uni_form_template(template_pack=TEMPLATE_PACK):
return get_template("%s/whole_uni_form.html" % template_pack)
class CrispyFormNode(BasicNode):
def render(self, context):
c = self.get_render(context).flatten()
if self.actual_helper is not None and getattr(self.actual_helper, "template", False):
template = get_template(self.actual_helper.template)
else:
if c["is_formset"]:
template = whole_uni_formset_template(self.template_pack)
else:
template = whole_uni_form_template(self.template_pack)
return template.render(c)
# {% crispy %} tag
@register.tag(name="crispy")
def do_uni_form(parser, token):
"""
You need to pass in at least the form/formset object, and can also pass in the
optional `crispy_forms.helpers.FormHelper` object.
helper (optional): A `crispy_forms.helper.FormHelper` object.
Usage::
{% load crispy_tags %}
{% crispy form form.helper %}
You can also provide the template pack as the third argument::
{% crispy form form.helper 'bootstrap' %}
If the `FormHelper` attribute is named `helper` you can simply do::
{% crispy form %}
{% crispy form 'bootstrap' %}
"""
token = token.split_contents()
form = token.pop(1)
helper = None
template_pack = "'%s'" % get_template_pack()
# {% crispy form helper %}
try:
helper = token.pop(1)
except IndexError:
pass
# {% crispy form helper 'bootstrap' %}
try:
template_pack = token.pop(1)
except IndexError:
pass
# {% crispy form 'bootstrap' %}
if helper is not None and isinstance(helper, str) and ("'" in helper or '"' in helper):
template_pack = helper
helper = None
if template_pack is not None:
template_pack = template_pack[1:-1]
ALLOWED_TEMPLATE_PACKS = getattr(
settings, "CRISPY_ALLOWED_TEMPLATE_PACKS", ("bootstrap", "uni_form", "bootstrap3", "bootstrap4")
)
if template_pack not in ALLOWED_TEMPLATE_PACKS:
raise template.TemplateSyntaxError(
"crispy tag's template_pack argument should be in %s" % str(ALLOWED_TEMPLATE_PACKS)
)
return CrispyFormNode(form, helper, template_pack=template_pack)
| {
"content_hash": "c9f059388fa9735606aee312eb8f36dc",
"timestamp": "",
"source": "github",
"line_count": 272,
"max_line_length": 116,
"avg_line_length": 38.1764705882353,
"alnum_prop": 0.612673343605547,
"repo_name": "maraujop/django-crispy-forms",
"id": "e5060232f6b8f9ad4348f8f47d008247bd842c37",
"size": "10384",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "crispy_forms/templatetags/crispy_forms_tags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "65611"
},
{
"name": "Makefile",
"bytes": "205"
},
{
"name": "Python",
"bytes": "171005"
}
],
"symlink_target": ""
} |
from mezzanine import template
from mezzanine.generic.fields import RatingField
from mezzanine.generic.forms import RatingForm
register = template.Library()
@register.inclusion_tag("generic/includes/rating.html", takes_context=True)
def rating_for(context, obj):
"""
Provides a generic context variable name for the object that
ratings are being rendered for, and the rating form.
"""
context["rating_obj"] = obj
context["rating_form"] = RatingForm(obj)
for field in obj._meta.many_to_many:
if isinstance(field, RatingField):
context["rating_average"] = getattr(obj, "%s_average" % field.name)
context["rating_count"] = getattr(obj, "%s_count" % field.name)
break
return context
| {
"content_hash": "8b689a6b307dc9fbf48b7a75073d5009",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 79,
"avg_line_length": 34.5,
"alnum_prop": 0.6877470355731226,
"repo_name": "guibernardino/mezzanine",
"id": "ad10d578322d48cb85205df50cd1b192874bca6b",
"size": "760",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "mezzanine/generic/templatetags/rating_tags.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "22201"
},
{
"name": "JavaScript",
"bytes": "61430"
},
{
"name": "Python",
"bytes": "832496"
}
],
"symlink_target": ""
} |
"""
Pytest fixtures
"""
import os
import pytest
import icomoon
class FixturesSettingsTestMixin(object):
"""
A mixin containing settings about application. This is almost about useful
paths which may be used in tests.
Attributes:
application_path (str): Absolute path to the application directory.
package_path (str): Absolute path to the package directory.
tests_dir (str): Directory name which include tests.
tests_path (str): Absolute path to the tests directory.
fixtures_dir (str): Directory name which include tests datas.
fixtures_path (str): Absolute path to the tests datas.
"""
def __init__(self):
# Base fixture datas directory
self.application_path = os.path.abspath(
os.path.dirname(icomoon.__file__)
)
self.package_path = os.path.normpath(
os.path.join(
os.path.abspath(
os.path.dirname(icomoon.__file__)
),
"..",
)
)
self.tests_dir = "tests"
self.tests_path = os.path.join(
self.package_path,
self.tests_dir,
)
self.fixtures_dir = "data_fixtures"
self.fixtures_path = os.path.join(
self.tests_path,
self.fixtures_dir
)
def format(self, content):
"""
Format given string to include some values related to this application.
Arguments:
content (str): Content string to format with possible values.
Returns:
str: Given string formatted with possible values.
"""
return content.format(
HOMEDIR=os.path.expanduser("~"),
PACKAGE=self.package_path,
APPLICATION=self.application_path,
TESTS=self.tests_path,
FIXTURES=self.fixtures_path,
VERSION=icomoon.__version__,
)
@pytest.fixture(scope="session")
def temp_builds_dir(tmpdir_factory):
"""
Shortand to prepare a temporary build directory where to create temporary
content from tests.
"""
fn = tmpdir_factory.mktemp("builds")
return fn
@pytest.fixture(scope="module")
def testsettings():
"""
Initialize and return test settings.
Example:
You may use it like: ::
def test_foo(testsettings):
print(testsettings.package_path)
print(testsettings.format("foo: {VERSION}"))
"""
return FixturesSettingsTestMixin()
| {
"content_hash": "f74b96709d5880adc4b5a1f0b75a95bc",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 79,
"avg_line_length": 27.934065934065934,
"alnum_prop": 0.5869394177812746,
"repo_name": "sveetch/django-icomoon",
"id": "f54bcec184eedb8db3d9d5e6d3fecd4856839255",
"size": "2542",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/conftest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "442"
},
{
"name": "HTML",
"bytes": "2185"
},
{
"name": "Makefile",
"bytes": "1631"
},
{
"name": "Python",
"bytes": "31800"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations
import utilities.fields
class Migration(migrations.Migration):
dependencies = [
('dcim', '0031_regions'),
]
operations = [
migrations.AlterField(
model_name='device',
name='name',
field=utilities.fields.NullableCharField(blank=True, max_length=64, null=True, unique=True),
),
]
| {
"content_hash": "8b028ea8816a3c57102228d272d52482",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 104,
"avg_line_length": 22.736842105263158,
"alnum_prop": 0.6203703703703703,
"repo_name": "Alphalink/netbox",
"id": "e11e75bab3a154eef29f446745c879189c5918aa",
"size": "505",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "netbox/dcim/migrations/0032_device_increase_name_length.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "167396"
},
{
"name": "HTML",
"bytes": "399345"
},
{
"name": "JavaScript",
"bytes": "13295"
},
{
"name": "Python",
"bytes": "937982"
},
{
"name": "Shell",
"bytes": "2973"
}
],
"symlink_target": ""
} |
"""This module exposes utility functions and classes for working with pandas objects."""
# third-party libraries
import pandas
__author__ = 'Amanda Birmingham'
__maintainer__ = "Amanda Birmingham"
__email__ = "abirmingham@ucsd.edu"
__status__ = "prototype"
def add_series_to_dataframe(dataframe, series, header):
"""Insert the input series into the input dataframe with the specified column header.
Args:
dataframe (pandas.DataFrame): The dataframe to which to add a column; insert is done in-place.
series (array-like, dict, or scalar value): The column values to add to the dataframe.
header (str): The name to be used for the new column.
"""
dataframe.loc[:, header] = pandas.Series(series, index=dataframe.index) | {
"content_hash": "a1e2a7280ad9b88519613ede1541e315",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 102,
"avg_line_length": 38.05,
"alnum_prop": 0.7069645203679369,
"repo_name": "ucsd-ccbb/jupyter-genomics",
"id": "bfe6d0825a1bc0d963c2256cdf7f86f7d2a063cb",
"size": "761",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/crispr/ccbbucsd/utilities/pandas_utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "254329"
},
{
"name": "Java",
"bytes": "278021"
},
{
"name": "Jupyter Notebook",
"bytes": "19771596"
},
{
"name": "Perl",
"bytes": "14052"
},
{
"name": "Python",
"bytes": "428899"
},
{
"name": "R",
"bytes": "6817"
},
{
"name": "Shell",
"bytes": "37476"
}
],
"symlink_target": ""
} |
import pytest
from pathlib import Path
from dstools.pipeline import DAG
from dstools.pipeline.tasks import PythonCallable
from dstools.pipeline.products import File
class MyException(Exception):
pass
def fn(product, a):
Path(str(product)).write_text('things')
def fn_w_exception(product):
raise MyException
def test_params_are_accesible_after_init():
dag = DAG()
t = PythonCallable(fn, File('file.txt'), dag, 'callable',
params=dict(a=1))
assert t.params == dict(a=1)
def test_upstream_and_me_are_added():
dag = DAG()
t = PythonCallable(fn, File('file.txt'), dag, 'callable',
params=dict(a=1))
dag.render()
p = t.params.copy()
p['product'] = str(p['product'])
assert p == dict(a=1, product='file.txt')
def test_can_execute_python_callable(tmp_directory):
dag = DAG()
PythonCallable(fn, File('file.txt'), dag, 'callable',
params=dict(a=1))
assert dag.build()
def test_exceptions_are_raised_with_serial_executor():
dag = DAG()
PythonCallable(fn_w_exception, File('file.txt'),
dag, 'callable')
with pytest.raises(MyException):
dag.build()
| {
"content_hash": "6dc5d3d14a6d5996e04eba720aae9f66",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 61,
"avg_line_length": 23.84313725490196,
"alnum_prop": 0.6241776315789473,
"repo_name": "edublancas/python-ds-tools",
"id": "f3fa2cfa43986a918493a102fd4c1ed14490cee2",
"size": "1216",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/pipeline/test_python_callable.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11675"
}
],
"symlink_target": ""
} |
""" Defines a representation of a graph in Graphviz"s dot language """
#------------------------------------------------------------------------------
# Imports:
#------------------------------------------------------------------------------
import uuid
from enthought.traits.ui.api \
import View, Group, VGroup, HGroup, Item, Tabbed, Label
from enthought.traits.ui.api import TableEditor, InstanceEditor, ListEditor
from enthought.traits.ui.table_column import ObjectColumn
from enthought.traits.ui.extras.checkbox_column import CheckboxColumn
from enthought.traits.ui.table_filter import \
EvalFilterTemplate, MenuFilterTemplate, RuleFilterTemplate, RuleTableFilter
from enthought.naming.unique_name import make_unique_name
from enthought.pyface.image_resource import ImageResource
from enthought.enable.component_editor import ComponentEditor
import godot
#------------------------------------------------------------------------------
# Images:
#------------------------------------------------------------------------------
frame_icon = ImageResource("dot")
#------------------------------------------------------------------------------
# Node factory function:
#------------------------------------------------------------------------------
def node_factory(**row_factory_kw):
""" Give new nodes a unique ID. """
if "__table_editor__" in row_factory_kw:
graph = row_factory_kw["__table_editor__"].object
ID = make_unique_name("n", [node.ID for node in graph.nodes])
del row_factory_kw["__table_editor__"]
return godot.node.Node(ID)
else:
return godot.node.Node(uuid.uuid4().hex[:6])
#------------------------------------------------------------------------------
# Node table editor:
#------------------------------------------------------------------------------
node_table_editor = TableEditor(
columns=[
ObjectColumn(name="ID"),
ObjectColumn(name="label"),
ObjectColumn(name="shape"),
ObjectColumn(name="fixedsize"),
ObjectColumn(name="width"),
ObjectColumn(name="height"),
ObjectColumn(name="pos"),
ObjectColumn(name="style"),
ObjectColumn(name="_draw_")
],
other_columns = [ # not initially displayed
ObjectColumn(name="sides")
],
show_toolbar=True, deletable=True,
filters=[EvalFilterTemplate, MenuFilterTemplate, RuleFilterTemplate],
search=RuleTableFilter(),
row_factory=node_factory,
row_factory_kw={"__table_editor__": ""}
)
#------------------------------------------------------------------------------
# Edge factory function:
#------------------------------------------------------------------------------
def edge_factory(**row_factory_kw):
""" Give new edges a unique ID. """
if "__table_editor__" in row_factory_kw:
table_editor = row_factory_kw["__table_editor__"]
graph = table_editor.object
ID = make_unique_name("node", [node.ID for node in graph.nodes])
n_nodes = len(graph.nodes)
IDs = [v.ID for v in graph.nodes]
if n_nodes == 0:
tail_node = godot.Node(ID=make_unique_name("n", IDs))
head_node = godot.Node(ID=make_unique_name("n", IDs))
elif n_nodes == 1:
tail_node = graph.nodes[0]
head_node = godot.Node(ID=make_unique_name("n", IDs))
else:
tail_node = graph.nodes[0]
head_node = graph.nodes[1]
return godot.edge.Edge(tail_node, head_node, _nodes=graph.nodes)
else:
return None
#------------------------------------------------------------------------------
# Edge table editor:
#------------------------------------------------------------------------------
edge_table_editor = TableEditor(
columns=[
ObjectColumn(name="tail_node", label="From",
editor=InstanceEditor(name="_nodes", editable=False),
format_func=lambda obj: obj.ID),
ObjectColumn(name="head_node", label="To",
editor=InstanceEditor(name="_nodes", editable=False),
format_func=lambda obj: obj.ID),
ObjectColumn(name="label"),
ObjectColumn(name="style"),
ObjectColumn(name="arrowsize"),
ObjectColumn(name="weight"),
ObjectColumn(name="len"),
ObjectColumn(name="headlabel"),
ObjectColumn(name="arrowhead"),
ObjectColumn(name="taillabel"),
ObjectColumn(name="arrowtail"),
ObjectColumn(name="_draw_")
],
other_columns = [ # not initially displayed
ObjectColumn(name="color"),
ObjectColumn(name="lp"),
ObjectColumn(name="pos"),
ObjectColumn(name="dir"),
ObjectColumn(name="minlen"),
ObjectColumn(name="colorscheme"),
ObjectColumn(name="constraint"),
ObjectColumn(name="decorate"),
ObjectColumn(name="showboxes"),
ObjectColumn(name="ltail"),
ObjectColumn(name="lhead"),
],
show_toolbar=True, deletable=True,
filters=[EvalFilterTemplate, MenuFilterTemplate, RuleFilterTemplate],
search=RuleTableFilter(),
row_factory=edge_factory,
row_factory_kw={"__table_editor__": ""}
)
#------------------------------------------------------------------------------
# Items:
#------------------------------------------------------------------------------
view_port_item = Item(name="vp", editor=ComponentEditor(height=80),
show_label=False, id=".viewport")
arrange_item = Item("arrange", show_label=False)
redraw_item = Item("redraw", show_label=False)
nodes_item = Item(name="nodes", editor=node_table_editor, show_label=False)
edges_item = Item(name="edges", editor=edge_table_editor, show_label=False)
#------------------------------------------------------------------------------
# Groups:
#------------------------------------------------------------------------------
subgraphs_notebook_group = Group(
Item( "subgraphs@",
id = ".subgraphs_nb",
show_label = False,
editor = ListEditor( use_notebook = True,
deletable = True,
export = 'DockShellWindow',
page_name = '.ID' )
),
label="Subgraphs", id = ".subgraphs"
)
clusters_notebook_group = Group(
Item( "clusters@",
id = ".clusters_nb",
show_label = False,
editor = ListEditor( use_notebook = True,
deletable = True,
export = "DockShellWindow",
page_name = ".ID" )
),
label = "Clusters", id = ".clusters"
)
# FIXME: For want of a better word.
appearance_group = Group(
["bgcolor", "colorscheme"],
Group(
["charset", "fontcolor", "fontname", "fontnames", "fontpath",
"fontsize"], label="Font", show_border=True
),
Group(
["label", "labelloc", "labeljust", "lp", "nojustify"],
label="Label", show_border=True
),
Group(["layers", "layersep"], label="Layer", show_border=True),
label="Appearance"
)
layout_group = Group(
["center", "dim", "normalize", "outputorder", "overlap", "pack",
"packmode", "pad", "rankdir", "ranksep", "ratio", "root",
"voro_margin"],
label="Layout"
)
algorithm_group = Group(
["epsilon", "levelsgap", "maxiter", "mclimit", "mode",
"model", "mosek", "nslimit", "nslimit1", "remincross",
"searchsize"],
label="Algorithm"
)
children_group = Group(
Group(
["clusterrank", "compound"], label="Cluster",
show_border=True
),
Group(
["Damping", "defaultdist", "mindist", "nodesep", "quantum",
"sep", "start"], label="Node", show_border=True
),
Group(
["concentrate", "diredgeconstraints", "esep", "K",
"ordering", "splines"], label="Edge", show_border=True
),
label="Children"
)
output_group = Group(
["dpi", "landscape", "margin", "pagedir", "resolution",
"rotate", "showboxes", "size", "stylesheet"],
Group(
["comment", "target", "URL"], label="Misc",
show_border=True
),
label="Output"
)
#------------------------------------------------------------------------------
# Views:
#------------------------------------------------------------------------------
graph_view = View(
view_port_item, id="godot.graph",
buttons=["OK", "Cancel", "Help"],
resizable=True, icon=frame_icon
)
tabbed_view = View(
VGroup(
Group(view_port_item, HGroup(arrange_item, redraw_item)),
Tabbed(
Group(nodes_item, label="Nodes"),
Group(edges_item, label="Edges"),
subgraphs_notebook_group,
clusters_notebook_group,
appearance_group, layout_group,
algorithm_group, children_group,
output_group,
dock="tab"
), layout="split", id=".splitter"
),
id="godot.graph.tabbed_view",
buttons=["OK", "Cancel", "Help"],
resizable=True, icon=frame_icon
)
nodes_view = View(nodes_item, title="Nodes", icon=frame_icon,
buttons=["OK", "Cancel", "Undo"])
edges_view = View(edges_item, title="Edges", icon=frame_icon,
buttons=["OK", "Cancel", "Undo"])
attr_view = View(
Tabbed(appearance_group, layout_group, algorithm_group,
children_group, output_group), dock="tab",
id="godot.graph.attr_view", title="Dot Attributes",
buttons=["OK", "Cancel", "Help"],
resizable=True, icon=frame_icon
)
license_label = \
"""
Copyright (c) 2009 Richard W. Lincoln
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""
contact_label = """
http://github.com/rwl/godot
"""
about_view = View(
Group(Label(license_label), label="License"),
Group(Label(contact_label), label="Contribute"),
# Group(Label(credits_label), label="Credits"),
title="About", buttons=["OK"],
icon=frame_icon
)
# EOF -------------------------------------------------------------------------
| {
"content_hash": "b576ec401202cf0a3882be0d715c3c6f",
"timestamp": "",
"source": "github",
"line_count": 319,
"max_line_length": 79,
"avg_line_length": 34.658307210031346,
"alnum_prop": 0.5349131693198264,
"repo_name": "rwl/godot",
"id": "cf9ae73fecbe5a1db601024278183660d0e8b67a",
"size": "12330",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "godot/ui/graph_view.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "412228"
}
],
"symlink_target": ""
} |
import unittest
from unittest import skip
class InitializationTests(unittest.TestCase):
def test_import_ingestion(self):
"""
Check import 'ingestion' as a whole package
"""
try:
import ingestion
except ImportError:
self.fail("Was not able to import ingestion")
@skip("Test not written yet")
def test_import_other_stuff(self):
"""
Another test of imports
"""
pass
| {
"content_hash": "3b7e6ba515a03bed63bd24f50697f237",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 57,
"avg_line_length": 23.7,
"alnum_prop": 0.5886075949367089,
"repo_name": "georgetown-analytics/housing-risk",
"id": "4b57a2bc09f3cdf5d6ec2478b99b6cb84226caa6",
"size": "641",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "code/tests/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2575"
},
{
"name": "HTML",
"bytes": "860130"
},
{
"name": "JavaScript",
"bytes": "8476"
},
{
"name": "Python",
"bytes": "72609"
}
],
"symlink_target": ""
} |
from collections.abc import Iterable
from numbers import Real, Integral
from warnings import warn
import numpy as np
import openmc.checkvalue as cv
from openmc.stats import Tabular, Univariate, Discrete, Mixture, \
Uniform, Legendre
from .function import INTERPOLATION_SCHEME
from .angle_energy import AngleEnergy
from .data import EV_PER_MEV
from .endf import get_list_record, get_tab2_record
class CorrelatedAngleEnergy(AngleEnergy):
"""Correlated angle-energy distribution
Parameters
----------
breakpoints : Iterable of int
Breakpoints defining interpolation regions
interpolation : Iterable of int
Interpolation codes
energy : Iterable of float
Incoming energies at which distributions exist
energy_out : Iterable of openmc.stats.Univariate
Distribution of outgoing energies corresponding to each incoming energy
mu : Iterable of Iterable of openmc.stats.Univariate
Distribution of scattering cosine for each incoming/outgoing energy
Attributes
----------
breakpoints : Iterable of int
Breakpoints defining interpolation regions
interpolation : Iterable of int
Interpolation codes
energy : Iterable of float
Incoming energies at which distributions exist
energy_out : Iterable of openmc.stats.Univariate
Distribution of outgoing energies corresponding to each incoming energy
mu : Iterable of Iterable of openmc.stats.Univariate
Distribution of scattering cosine for each incoming/outgoing energy
"""
def __init__(self, breakpoints, interpolation, energy, energy_out, mu):
super().__init__()
self.breakpoints = breakpoints
self.interpolation = interpolation
self.energy = energy
self.energy_out = energy_out
self.mu = mu
@property
def breakpoints(self):
return self._breakpoints
@property
def interpolation(self):
return self._interpolation
@property
def energy(self):
return self._energy
@property
def energy_out(self):
return self._energy_out
@property
def mu(self):
return self._mu
@breakpoints.setter
def breakpoints(self, breakpoints):
cv.check_type('correlated angle-energy breakpoints', breakpoints,
Iterable, Integral)
self._breakpoints = breakpoints
@interpolation.setter
def interpolation(self, interpolation):
cv.check_type('correlated angle-energy interpolation', interpolation,
Iterable, Integral)
self._interpolation = interpolation
@energy.setter
def energy(self, energy):
cv.check_type('correlated angle-energy incoming energy', energy,
Iterable, Real)
self._energy = energy
@energy_out.setter
def energy_out(self, energy_out):
cv.check_type('correlated angle-energy outgoing energy', energy_out,
Iterable, Univariate)
self._energy_out = energy_out
@mu.setter
def mu(self, mu):
cv.check_iterable_type('correlated angle-energy outgoing cosine',
mu, Univariate, 2, 2)
self._mu = mu
def to_hdf5(self, group):
"""Write distribution to an HDF5 group
Parameters
----------
group : h5py.Group
HDF5 group to write to
"""
group.attrs['type'] = np.string_('correlated')
dset = group.create_dataset('energy', data=self.energy)
dset.attrs['interpolation'] = np.vstack((self.breakpoints,
self.interpolation))
# Determine total number of (E,p) pairs and create array
n_tuple = sum(len(d.x) for d in self.energy_out)
eout = np.empty((5, n_tuple))
# Make sure all mu data is tabular
mu_tabular = []
for i, mu_i in enumerate(self.mu):
mu_tabular.append([mu_ij if isinstance(mu_ij, (Tabular, Discrete)) else
mu_ij.to_tabular() for mu_ij in mu_i])
# Determine total number of (mu,p) points and create array
n_tuple = sum(sum(len(mu_ij.x) for mu_ij in mu_i)
for mu_i in mu_tabular)
mu = np.empty((3, n_tuple))
# Create array for offsets
offsets = np.empty(len(self.energy_out), dtype=int)
interpolation = np.empty(len(self.energy_out), dtype=int)
n_discrete_lines = np.empty(len(self.energy_out), dtype=int)
offset_e = 0
offset_mu = 0
# Populate offsets and eout array
for i, d in enumerate(self.energy_out):
n = len(d)
offsets[i] = offset_e
if isinstance(d, Mixture):
discrete, continuous = d.distribution
n_discrete_lines[i] = m = len(discrete)
interpolation[i] = 1 if continuous.interpolation == 'histogram' else 2
eout[0, offset_e:offset_e+m] = discrete.x
eout[1, offset_e:offset_e+m] = discrete.p
eout[2, offset_e:offset_e+m] = discrete.c
eout[0, offset_e+m:offset_e+n] = continuous.x
eout[1, offset_e+m:offset_e+n] = continuous.p
eout[2, offset_e+m:offset_e+n] = continuous.c
else:
if isinstance(d, Tabular):
n_discrete_lines[i] = 0
interpolation[i] = 1 if d.interpolation == 'histogram' else 2
elif isinstance(d, Discrete):
n_discrete_lines[i] = n
interpolation[i] = 1
eout[0, offset_e:offset_e+n] = d.x
eout[1, offset_e:offset_e+n] = d.p
eout[2, offset_e:offset_e+n] = d.c
for j, mu_ij in enumerate(mu_tabular[i]):
if isinstance(mu_ij, Discrete):
eout[3, offset_e+j] = 0
else:
eout[3, offset_e+j] = 1 if mu_ij.interpolation == 'histogram' else 2
eout[4, offset_e+j] = offset_mu
n_mu = len(mu_ij)
mu[0, offset_mu:offset_mu+n_mu] = mu_ij.x
mu[1, offset_mu:offset_mu+n_mu] = mu_ij.p
mu[2, offset_mu:offset_mu+n_mu] = mu_ij.c
offset_mu += n_mu
offset_e += n
# Create dataset for outgoing energy distributions
dset = group.create_dataset('energy_out', data=eout)
# Write interpolation on outgoing energy as attribute
dset.attrs['offsets'] = offsets
dset.attrs['interpolation'] = interpolation
dset.attrs['n_discrete_lines'] = n_discrete_lines
# Create dataset for outgoing angle distributions
group.create_dataset('mu', data=mu)
@classmethod
def from_hdf5(cls, group):
"""Generate correlated angle-energy distribution from HDF5 data
Parameters
----------
group : h5py.Group
HDF5 group to read from
Returns
-------
openmc.data.CorrelatedAngleEnergy
Correlated angle-energy distribution
"""
interp_data = group['energy'].attrs['interpolation']
energy_breakpoints = interp_data[0, :]
energy_interpolation = interp_data[1, :]
energy = group['energy'][()]
offsets = group['energy_out'].attrs['offsets']
interpolation = group['energy_out'].attrs['interpolation']
n_discrete_lines = group['energy_out'].attrs['n_discrete_lines']
dset_eout = group['energy_out'][()]
energy_out = []
dset_mu = group['mu'][()]
mu = []
n_energy = len(energy)
for i in range(n_energy):
# Determine length of outgoing energy distribution and number of
# discrete lines
offset_e = offsets[i]
if i < n_energy - 1:
n = offsets[i+1] - offset_e
else:
n = dset_eout.shape[1] - offset_e
m = n_discrete_lines[i]
# Create discrete distribution if lines are present
if m > 0:
x = dset_eout[0, offset_e:offset_e+m]
p = dset_eout[1, offset_e:offset_e+m]
eout_discrete = Discrete(x, p)
eout_discrete.c = dset_eout[2, offset_e:offset_e+m]
p_discrete = eout_discrete.c[-1]
# Create continuous distribution
if m < n:
interp = INTERPOLATION_SCHEME[interpolation[i]]
x = dset_eout[0, offset_e+m:offset_e+n]
p = dset_eout[1, offset_e+m:offset_e+n]
eout_continuous = Tabular(x, p, interp, ignore_negative=True)
eout_continuous.c = dset_eout[2, offset_e+m:offset_e+n]
# If both continuous and discrete are present, create a mixture
# distribution
if m == 0:
eout_i = eout_continuous
elif m == n:
eout_i = eout_discrete
else:
eout_i = Mixture([p_discrete, 1. - p_discrete],
[eout_discrete, eout_continuous])
# Read angular distributions
mu_i = []
for j in range(n):
# Determine interpolation scheme
interp_code = int(dset_eout[3, offsets[i] + j])
# Determine offset and length
offset_mu = int(dset_eout[4, offsets[i] + j])
if offsets[i] + j < dset_eout.shape[1] - 1:
n_mu = int(dset_eout[4, offsets[i] + j + 1]) - offset_mu
else:
n_mu = dset_mu.shape[1] - offset_mu
# Get data
x = dset_mu[0, offset_mu:offset_mu+n_mu]
p = dset_mu[1, offset_mu:offset_mu+n_mu]
c = dset_mu[2, offset_mu:offset_mu+n_mu]
if interp_code == 0:
mu_ij = Discrete(x, p)
else:
mu_ij = Tabular(x, p, INTERPOLATION_SCHEME[interp_code],
ignore_negative=True)
mu_ij.c = c
mu_i.append(mu_ij)
offset_mu += n_mu
energy_out.append(eout_i)
mu.append(mu_i)
return cls(energy_breakpoints, energy_interpolation,
energy, energy_out, mu)
@classmethod
def from_ace(cls, ace, idx, ldis):
"""Generate correlated angle-energy distribution from ACE data
Parameters
----------
ace : openmc.data.ace.Table
ACE table to read from
idx : int
Index in XSS array of the start of the energy distribution data
(LDIS + LOCC - 1)
ldis : int
Index in XSS array of the start of the energy distribution block
(e.g. JXS[11])
Returns
-------
openmc.data.CorrelatedAngleEnergy
Correlated angle-energy distribution
"""
# Read number of interpolation regions and incoming energies
n_regions = int(ace.xss[idx])
n_energy_in = int(ace.xss[idx + 1 + 2*n_regions])
# Get interpolation information
idx += 1
if n_regions > 0:
breakpoints = ace.xss[idx:idx + n_regions].astype(int)
interpolation = ace.xss[idx + n_regions:idx + 2*n_regions].astype(int)
else:
breakpoints = np.array([n_energy_in])
interpolation = np.array([2])
# Incoming energies at which distributions exist
idx += 2*n_regions + 1
energy = ace.xss[idx:idx + n_energy_in]*EV_PER_MEV
# Location of distributions
idx += n_energy_in
loc_dist = ace.xss[idx:idx + n_energy_in].astype(int)
# Initialize list of distributions
energy_out = []
mu = []
# Read each outgoing energy distribution
for i in range(n_energy_in):
idx = ldis + loc_dist[i] - 1
# intt = interpolation scheme (1=hist, 2=lin-lin)
INTTp = int(ace.xss[idx])
intt = INTTp % 10
n_discrete_lines = (INTTp - intt)//10
if intt not in (1, 2):
warn("Interpolation scheme for continuous tabular distribution "
"is not histogram or linear-linear.")
intt = 2
# Secondary energy distribution
n_energy_out = int(ace.xss[idx + 1])
data = ace.xss[idx + 2:idx + 2 + 4*n_energy_out].copy()
data.shape = (4, n_energy_out)
data[0,:] *= EV_PER_MEV
# Create continuous distribution
eout_continuous = Tabular(data[0][n_discrete_lines:],
data[1][n_discrete_lines:]/EV_PER_MEV,
INTERPOLATION_SCHEME[intt],
ignore_negative=True)
eout_continuous.c = data[2][n_discrete_lines:]
if np.any(data[1][n_discrete_lines:] < 0.0):
warn("Correlated angle-energy distribution has negative "
"probabilities.")
# If discrete lines are present, create a mixture distribution
if n_discrete_lines > 0:
eout_discrete = Discrete(data[0][:n_discrete_lines],
data[1][:n_discrete_lines])
eout_discrete.c = data[2][:n_discrete_lines]
if n_discrete_lines == n_energy_out:
eout_i = eout_discrete
else:
p_discrete = min(sum(eout_discrete.p), 1.0)
eout_i = Mixture([p_discrete, 1. - p_discrete],
[eout_discrete, eout_continuous])
else:
eout_i = eout_continuous
energy_out.append(eout_i)
lc = data[3].astype(int)
# Secondary angular distributions
mu_i = []
for j in range(n_energy_out):
if lc[j] > 0:
idx = ldis + abs(lc[j]) - 1
intt = int(ace.xss[idx])
n_cosine = int(ace.xss[idx + 1])
data = ace.xss[idx + 2:idx + 2 + 3*n_cosine]
data.shape = (3, n_cosine)
mu_ij = Tabular(data[0], data[1], INTERPOLATION_SCHEME[intt])
mu_ij.c = data[2]
else:
# Isotropic distribution
mu_ij = Uniform(-1., 1.)
mu_i.append(mu_ij)
# Add cosine distributions for this incoming energy to list
mu.append(mu_i)
return cls(breakpoints, interpolation, energy, energy_out, mu)
@classmethod
def from_endf(cls, file_obj):
"""Generate correlated angle-energy distribution from an ENDF evaluation
Parameters
----------
file_obj : file-like object
ENDF file positioned at the start of a section for a correlated
angle-energy distribution
Returns
-------
openmc.data.CorrelatedAngleEnergy
Correlated angle-energy distribution
"""
params, tab2 = get_tab2_record(file_obj)
lep = params[3]
ne = params[5]
energy = np.zeros(ne)
n_discrete_energies = np.zeros(ne, dtype=int)
energy_out = []
mu = []
for i in range(ne):
items, values = get_list_record(file_obj)
energy[i] = items[1]
n_discrete_energies[i] = items[2]
# TODO: separate out discrete lines
n_angle = items[3]
n_energy_out = items[5]
values = np.asarray(values)
values.shape = (n_energy_out, n_angle + 2)
# Outgoing energy distribution at the i-th incoming energy
eout_i = values[:,0]
eout_p_i = values[:,1]
energy_out_i = Tabular(eout_i, eout_p_i, INTERPOLATION_SCHEME[lep],
ignore_negative=True)
energy_out.append(energy_out_i)
# Legendre coefficients used for angular distributions
mu_i = []
for j in range(n_energy_out):
mu_i.append(Legendre(values[j,1:]))
mu.append(mu_i)
return cls(tab2.breakpoints, tab2.interpolation, energy,
energy_out, mu)
| {
"content_hash": "7309991aed09f77e7943b948ae1a7dec",
"timestamp": "",
"source": "github",
"line_count": 460,
"max_line_length": 88,
"avg_line_length": 35.93260869565217,
"alnum_prop": 0.537237582430879,
"repo_name": "wbinventor/openmc",
"id": "b760f2e2fdae8e8bf0d27c9d140c80ea8c4bc397",
"size": "16529",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "openmc/data/correlated.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9675"
},
{
"name": "C++",
"bytes": "1289928"
},
{
"name": "CMake",
"bytes": "11264"
},
{
"name": "Dockerfile",
"bytes": "1427"
},
{
"name": "Python",
"bytes": "2653785"
},
{
"name": "Shell",
"bytes": "2519"
}
],
"symlink_target": ""
} |
from OpenstackConnection import *
from TestConnection import *
from Exceptions import *
| {
"content_hash": "fdc0c8f416662240e7b4a6a5d045bf6c",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 33,
"avg_line_length": 29.333333333333332,
"alnum_prop": 0.8295454545454546,
"repo_name": "LAL/openstack-lease-it",
"id": "be8b218f5d11c77428b0b78535c96aa841ba9c75",
"size": "133",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "openstack_lease_it/lease_it/backend/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "211313"
},
{
"name": "HTML",
"bytes": "9967"
},
{
"name": "JavaScript",
"bytes": "357543"
},
{
"name": "Python",
"bytes": "57792"
},
{
"name": "Shell",
"bytes": "101"
}
],
"symlink_target": ""
} |
import configuration
import logging
from google.appengine.api import users, memcache
from google.appengine.ext import db, ereporter
from google.appengine.ext.webapp.util import run_wsgi_app, login_required
from tornado.wsgi import WSGIApplication
from utils import BaseRequestHandler, SessionRequestHandler, send_mail_once, STATIC_PAGE_CACHE_TIMEOUT
from models import Profile, User
from countries import COUNTRY_ISO_ALPHA_TABLE, COUNTRIES_LIST
####################### Import from version 1 ########################
from models import BLOG_YEAR_LIST, MONTH_LIST,User,Person,PersonPhone, MILS_YEAR_LIST, GENDER_CHOICES, T_SHIRT_SIZES_TUPLE_MAP
######################################################################
try:
import json
except ImportError:
from django.utils import simplejson as json
logging.basicConfig(level=logging.DEBUG)
ereporter.register_logger()
class UsersHandler(BaseRequestHandler):
def get(self):
new_user = User(username='Aswad', email='aswad.r@gmail.com', identifier='Google')
#new_user.put()
new_person = Person(
first_name = 'Aswad',
last_name = 'Rangnekar',
#user = new_user
)
#new_person.put()
#new_phone1 = PersonPhone(number='9960815827', person=new_person).put()
#new_phone2 = PersonPhone(number='26652590', person=new_person).put()
#getting total, approved and deleted users
TOTAL_USER = User.all()
APPROVED_USER = db.GqlQuery("SELECT * FROM User WHERE is_active=True AND is_deleted = False")
DELETED_USER = db.GqlQuery("SELECT * FROM User WHERE is_active=False AND is_deleted = True")
self.render('adminusers.html',
user_count= TOTAL_USER.count(),
approved_user_count= APPROVED_USER.count(),
deleted_user_count= DELETED_USER.count(),
total_user = TOTAL_USER,
approved_user = APPROVED_USER,
deleted_user = DELETED_USER,
mils_year_list= MILS_YEAR_LIST,
gender_choices = GENDER_CHOICES,
t_shirt_sizes = T_SHIRT_SIZES_TUPLE_MAP,
page_name='users',
login_url='/login',
)
class ApproveUsersHandler(BaseRequestHandler):
def get(self,key):
user = User.get(key)
user.is_active = True
user.put()
self.redirect('/admin')
class UnapproveUsersHandler(BaseRequestHandler):
def get(self,key):
user = User.get(key)
user.is_active = False
user.put()
self.redirect('/admin')
class DeleteUsersHandler(BaseRequestHandler):
def get(self,key):
user = User.get(key)
user.is_deleted = True
user.put()
self.redirect('/admin')
class UndeleteUsersHandler(BaseRequestHandler):
def get(self,key):
user = User.get(key)
user.is_deleted = False
user.put()
self.redirect('/admin')
class StarUsersHandler(BaseRequestHandler):
def get(self,key):
user = User.get(key)
user.is_starred = True
user.put()
self.redirect('/admin')
class UnstarUsersHandler(BaseRequestHandler):
def get(self,key):
user = User.get(key)
user.is_starred = False
user.put()
self.redirect('/admin')
class EditUsersHandler(BaseRequestHandler):
def post(self,key):
user = User.get(key)
person_key = user.people_singleton[0].key()
person = Person.get(person_key)
user.email = self.get_argument('email')
user.corporate_email = self.get_argument('corporate_email')
person.first_name = self.get_argument('first_name')
person.last_name = self.get_argument('last_name')
person.designation = self.get_argument('designation')
person.company = self.get_argument('company')
person.graduation_year = int(self.get_argument('graduation_year'))
person.gender = self.get_argument('gender')
person.t_shirt_size = self.get_argument('t_shirt_size')
is_student = self.get_argument('is_student')
if is_student == 'yes':
person.is_student = True
phone_count = int(self.get_argument('phone_count'))
if phone_count:
phones = []
for phone in person.phones:
str_key = str(phone.key())
phone.number = self.get_argument(str_key)
phones.append(phone)
db.put(phones)
user.put()
person.put()
self.redirect('/admin')
#self.write(person.t_shirt_size)
class ArticlesHandler(BaseRequestHandler):
def get(self):
self.render('adminusers.html',
page_name = 'articles',
page_description = 'Add, remove, update articles and publish them.',
user_count= User.all().count(),
approved_user_count=User.get_approved_user_count(),
deleted_user_count=User.get_deleted_user_count(),
)
class BooksHandler(BaseRequestHandler):
def get(self):
self.render('adminusers.html',
page_name = 'books',
page_description = 'Add or remove books.',
user_count= User.all().count(),
approved_user_count=User.get_approved_user_count(),
deleted_user_count=User.get_deleted_user_count(),
)
class AnnouncementsHandler(BaseRequestHandler):
def get(self):
self.render('adminusers.html',
page_name = 'announcements',
page_description = 'Create new announcements to send to everyone in the list of users.',
user_count= User.all().count(),
approved_user_count=User.get_approved_user_count(),
deleted_user_count=User.get_deleted_user_count(),
)
class MailHandler(BaseRequestHandler):
def get(self):
self.render('adminusers.html',
page_name = 'mails',
page_description = 'Send mail to people',
approved_user_count=User.get_approved_user_count(),
deleted_user_count=User.get_deleted_user_count(),
)
class LogoutHandler(BaseRequestHandler):
def get(self):
user = users.get_current_user()
if user:
self.redirect(users.create_logout_url('/admin'))
settings = {
'debug': configuration.DEBUG,
#'xsrf_cookies': True,
'template_path': configuration.TEMPLATE_PATH,
}
urls = (
(r'/admin/?', UsersHandler),
(r'/admin/users/?', UsersHandler),
(r'/admin/logout/?', LogoutHandler),
(r'/admin/approve/(.*)', ApproveUsersHandler),
(r'/admin/unapprove/(.*)', UnapproveUsersHandler),
(r'/admin/delete/(.*)', DeleteUsersHandler),
(r'/admin/undelete/(.*)', UndeleteUsersHandler),
(r'/admin/star/(.*)', StarUsersHandler),
(r'/admin/unstar/(.*)', UnstarUsersHandler),
(r'/admin/edit/(.*)', EditUsersHandler),
(r'/admin/articles/?', ArticlesHandler),
(r'/admin/books/?', BooksHandler),
(r'/admin/announcements/?', AnnouncementsHandler),
(r'/admin/mails/?', MailHandler),
)
application = WSGIApplication(urls, **settings)
def main():
from gaefy.db.datastore_cache import DatastoreCachingShim
DatastoreCachingShim.Install()
run_wsgi_app(application)
DatastoreCachingShim.Uninstall()
if __name__ == '__main__':
main()
| {
"content_hash": "79cd323a50e704f24942d801fc82fdc3",
"timestamp": "",
"source": "github",
"line_count": 226,
"max_line_length": 126,
"avg_line_length": 35.02212389380531,
"alnum_prop": 0.5677826910928616,
"repo_name": "yesudeep/old-milsalumni",
"id": "921b872bedf9fb4730d48ab4970ce2b40534dcf6",
"size": "9098",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "233390"
},
{
"name": "Python",
"bytes": "1501280"
},
{
"name": "Ruby",
"bytes": "293"
}
],
"symlink_target": ""
} |
import sys
from passslot import __version__
from setuptools import setup
# To install the passslot-python-sdk library, open a Terminal shell, then run this
# file by typing:
#
# python setup.py install
#
# You need to have the setuptools module installed. Try reading the setuptools
# documentation: http://pypi.python.org/pypi/setuptools
with open('README.md') as f:
readme = f.read()
with open('LICENSE.txt') as f:
license = f.read()
install_requires = ['requests >= 2.1']
if sys.version_info < (3, 2):
# This is required for SNI support in python < 3.2
install_requires.append('pyOpenSSL >= 0.13')
install_requires.append('ndg-httpsclient')
install_requires.append('pyasn1')
setup(
name = "passslot",
version = __version__,
description = "PassSlot Python SDK",
author = "PassSlot",
author_email = "dev@passslot.com",
url = "http://github.com/passslot/passslot-python-sdk/",
py_modules = ['passslot'],
keywords = ["passslot","passbook"],
install_requires = install_requires,
license=license,
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Communications",
],
long_description = readme
) | {
"content_hash": "696dd553eab1ce4fbc6e673830f87088",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 82,
"avg_line_length": 32.86274509803921,
"alnum_prop": 0.6390214797136038,
"repo_name": "passslot/passslot-python-sdk",
"id": "d0ceaf3070adf4ae0ab4331f979234cb9d54cab9",
"size": "1676",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "9110"
}
],
"symlink_target": ""
} |
import unittest
import pathlib
import json
from mycroft.dialog import MustacheDialogRenderer
class DialogTest(unittest.TestCase):
def setUp(self):
self.stache = MustacheDialogRenderer()
self.topdir = pathlib.Path(__file__).parent
def test_general_dialog(self):
""" Test the loading and filling of valid simple mustache dialogs """
template_path = self.topdir.joinpath('./mustache_templates')
for file in template_path.iterdir():
if file.suffix == '.dialog':
self.stache.load_template_file(file.name, str(file.absolute()))
context = json.load(
file.with_suffix('.context.json').open(
'r', encoding='utf-8'))
self.assertEqual(
self.stache.render(file.name, context),
file.with_suffix('.result').open('r',
encoding='utf-8').read())
def test_unknown_dialog(self):
""" Test for returned file name literals in case of unkown dialog """
self.assertEqual(
self.stache.render("unknown.template"), "unknown template")
def test_multiple_dialog(self):
"""
Test the loading and filling of valid mustache dialogs
where a dialog file contains multiple text versions
"""
template_path = self.topdir.joinpath('./mustache_templates_multiple')
for file in template_path.iterdir():
if file.suffix == '.dialog':
self.stache.load_template_file(file.name, str(file.absolute()))
context = json.load(
file.with_suffix('.context.json').open(
'r', encoding='utf-8'))
results = [
line.strip() for line in file.with_suffix('.result').open(
'r', encoding='utf-8')
]
# Try all lines
for index, line in enumerate(results):
self.assertEqual(
self.stache.render(
file.name, index=index, context=context),
line.strip())
# Test random index function
# (bad test because non-deterministic?)
self.assertIn(
self.stache.render(file.name, context=context), results)
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "fb4fb4eec199109d035027189670a875",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 79,
"avg_line_length": 40.49180327868852,
"alnum_prop": 0.5323886639676113,
"repo_name": "linuxipho/mycroft-core",
"id": "3cd9ab5f7d58277d7795550892f9d32ff9b76964",
"size": "3076",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "test/unittests/dialog/test_dialog.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1316535"
},
{
"name": "Shell",
"bytes": "69724"
}
],
"symlink_target": ""
} |
import unittest
import google
import mock
from google.cloud.bigtable import Client
from google.cloud.bigtable.instance import Instance
from mock import PropertyMock
from airflow import AirflowException
from airflow.providers.google.cloud.hooks.bigtable import BigtableHook
from tests.providers.google.cloud.utils.base_gcp_mock import (
GCP_PROJECT_ID_HOOK_UNIT_TEST, mock_base_gcp_hook_default_project_id,
mock_base_gcp_hook_no_default_project_id,
)
CBT_INSTANCE = 'instance'
CBT_CLUSTER = 'cluster'
CBT_ZONE = 'zone'
CBT_TABLE = 'table'
class TestBigtableHookNoDefaultProjectId(unittest.TestCase):
def setUp(self):
with mock.patch('airflow.providers.google.cloud.hooks.base.CloudBaseHook.__init__',
new=mock_base_gcp_hook_no_default_project_id):
self.bigtable_hook_no_default_project_id = BigtableHook(gcp_conn_id='test')
@mock.patch(
"airflow.providers.google.cloud.hooks.bigtable.BigtableHook.client_info",
new_callable=mock.PropertyMock
)
@mock.patch("airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_credentials")
@mock.patch("airflow.providers.google.cloud.hooks.bigtable.Client")
def test_bigtable_client_creation(self, mock_client, mock_get_creds, mock_client_info):
result = self.bigtable_hook_no_default_project_id._get_client(GCP_PROJECT_ID_HOOK_UNIT_TEST)
mock_client.assert_called_once_with(
project=GCP_PROJECT_ID_HOOK_UNIT_TEST,
credentials=mock_get_creds.return_value,
client_info=mock_client_info.return_value,
admin=True
)
self.assertEqual(mock_client.return_value, result)
self.assertEqual(self.bigtable_hook_no_default_project_id._client, result)
@mock.patch(
'airflow.providers.google.cloud.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_get_instance_missing_project_id(self, get_client, mock_project_id):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
instance_exists_method.return_value = True
with self.assertRaises(AirflowException) as cm:
self.bigtable_hook_no_default_project_id.get_instance(instance_id=CBT_INSTANCE)
instance_exists_method.assert_not_called()
instance_method.assert_not_called()
err = cm.exception
self.assertIn("The project id must be passed", str(err))
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_get_instance_overridden_project_id(self, get_client):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
instance_exists_method.return_value = True
res = self.bigtable_hook_no_default_project_id.get_instance(
project_id=GCP_PROJECT_ID_HOOK_UNIT_TEST,
instance_id=CBT_INSTANCE)
instance_method.assert_called_once_with('instance')
instance_exists_method.assert_called_once_with()
get_client.assert_called_once_with(project_id='example-project')
self.assertIsNotNone(res)
@mock.patch(
'airflow.providers.google.cloud.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_delete_instance_missing_project_id(self, get_client, mock_project_id):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
delete_method = instance_method.return_value.delete
instance_exists_method.return_value = True
with self.assertRaises(AirflowException) as cm:
self.bigtable_hook_no_default_project_id.delete_instance(instance_id=CBT_INSTANCE)
instance_exists_method.assert_not_called()
instance_method.assert_not_called()
delete_method.assert_not_called()
err = cm.exception
self.assertIn("The project id must be passed", str(err))
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_delete_instance_overridden_project_id(self, get_client):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
instance_exists_method.return_value = True
delete_method = instance_method.return_value.delete
res = self.bigtable_hook_no_default_project_id.delete_instance(
project_id=GCP_PROJECT_ID_HOOK_UNIT_TEST, instance_id=CBT_INSTANCE)
instance_method.assert_called_once_with('instance')
instance_exists_method.assert_called_once_with()
delete_method.assert_called_once_with()
get_client.assert_called_once_with(project_id='example-project')
self.assertIsNone(res)
@mock.patch(
'airflow.providers.google.cloud.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch('google.cloud.bigtable.instance.Instance.create')
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_create_instance_missing_project_id(self, get_client, instance_create, mock_project_id):
operation = mock.Mock()
operation.result_return_value = Instance(instance_id=CBT_INSTANCE, client=get_client)
instance_create.return_value = operation
with self.assertRaises(AirflowException) as cm:
self.bigtable_hook_no_default_project_id.create_instance(
instance_id=CBT_INSTANCE,
main_cluster_id=CBT_CLUSTER,
main_cluster_zone=CBT_ZONE)
get_client.assert_not_called()
instance_create.assert_not_called()
err = cm.exception
self.assertIn("The project id must be passed", str(err))
@mock.patch('google.cloud.bigtable.instance.Instance.create')
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_create_instance_overridden_project_id(self, get_client, instance_create):
operation = mock.Mock()
operation.result_return_value = Instance(instance_id=CBT_INSTANCE, client=get_client)
instance_create.return_value = operation
res = self.bigtable_hook_no_default_project_id.create_instance(
project_id=GCP_PROJECT_ID_HOOK_UNIT_TEST,
instance_id=CBT_INSTANCE,
main_cluster_id=CBT_CLUSTER,
main_cluster_zone=CBT_ZONE)
get_client.assert_called_once_with(project_id='example-project')
instance_create.assert_called_once_with(clusters=mock.ANY)
self.assertEqual(res.instance_id, 'instance')
@mock.patch(
'airflow.providers.google.cloud.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=None
)
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_delete_table_missing_project_id(self, get_client, mock_project_id):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
table_delete_method = instance_method.return_value.table.return_value.delete
instance_exists_method.return_value = True
with self.assertRaises(AirflowException) as cm:
self.bigtable_hook_no_default_project_id.delete_table(
instance_id=CBT_INSTANCE,
table_id=CBT_TABLE)
get_client.assert_not_called()
instance_exists_method.assert_not_called()
table_delete_method.assert_not_called()
err = cm.exception
self.assertIn("The project id must be passed", str(err))
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_delete_table_overridden_project_id(self, get_client):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
table_delete_method = instance_method.return_value.table.return_value.delete
instance_exists_method.return_value = True
self.bigtable_hook_no_default_project_id.delete_table(
project_id=GCP_PROJECT_ID_HOOK_UNIT_TEST,
instance_id=CBT_INSTANCE,
table_id=CBT_TABLE)
get_client.assert_called_once_with(project_id='example-project')
instance_exists_method.assert_called_once_with()
table_delete_method.assert_called_once_with()
class TestBigtableHookDefaultProjectId(unittest.TestCase):
def setUp(self):
with mock.patch('airflow.providers.google.cloud.hooks.base.CloudBaseHook.__init__',
new=mock_base_gcp_hook_default_project_id):
self.bigtable_hook_default_project_id = BigtableHook(gcp_conn_id='test')
@mock.patch(
"airflow.providers.google.cloud.hooks.bigtable.BigtableHook.client_info",
new_callable=mock.PropertyMock
)
@mock.patch("airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_credentials")
@mock.patch("airflow.providers.google.cloud.hooks.bigtable.Client")
def test_bigtable_client_creation(self, mock_client, mock_get_creds, mock_client_info):
result = self.bigtable_hook_default_project_id._get_client(GCP_PROJECT_ID_HOOK_UNIT_TEST)
mock_client.assert_called_once_with(
project=GCP_PROJECT_ID_HOOK_UNIT_TEST,
credentials=mock_get_creds.return_value,
client_info=mock_client_info.return_value,
admin=True
)
self.assertEqual(mock_client.return_value, result)
self.assertEqual(self.bigtable_hook_default_project_id._client, result)
@mock.patch(
'airflow.providers.google.cloud.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_get_instance(self, get_client, mock_project_id):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
instance_exists_method.return_value = True
res = self.bigtable_hook_default_project_id.get_instance(
instance_id=CBT_INSTANCE)
instance_method.assert_called_once_with('instance')
instance_exists_method.assert_called_once_with()
get_client.assert_called_once_with(project_id='example-project')
self.assertIsNotNone(res)
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_get_instance_overridden_project_id(self, get_client):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
instance_exists_method.return_value = True
res = self.bigtable_hook_default_project_id.get_instance(
project_id='new-project',
instance_id=CBT_INSTANCE)
instance_method.assert_called_once_with('instance')
instance_exists_method.assert_called_once_with()
get_client.assert_called_once_with(project_id='new-project')
self.assertIsNotNone(res)
@mock.patch(
'airflow.providers.google.cloud.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_get_instance_no_instance(self, get_client, mock_project_id):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
instance_exists_method.return_value = False
res = self.bigtable_hook_default_project_id.get_instance(
instance_id=CBT_INSTANCE)
instance_method.assert_called_once_with('instance')
instance_exists_method.assert_called_once_with()
get_client.assert_called_once_with(project_id='example-project')
self.assertIsNone(res)
@mock.patch(
'airflow.providers.google.cloud.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_delete_instance(self, get_client, mock_project_id):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
instance_exists_method.return_value = True
delete_method = instance_method.return_value.delete
res = self.bigtable_hook_default_project_id.delete_instance(
instance_id=CBT_INSTANCE)
instance_method.assert_called_once_with('instance')
instance_exists_method.assert_called_once_with()
delete_method.assert_called_once_with()
get_client.assert_called_once_with(project_id='example-project')
self.assertIsNone(res)
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_delete_instance_overridden_project_id(self, get_client):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
instance_exists_method.return_value = True
delete_method = instance_method.return_value.delete
res = self.bigtable_hook_default_project_id.delete_instance(
project_id='new-project', instance_id=CBT_INSTANCE)
instance_method.assert_called_once_with('instance')
instance_exists_method.assert_called_once_with()
delete_method.assert_called_once_with()
get_client.assert_called_once_with(project_id='new-project')
self.assertIsNone(res)
@mock.patch(
'airflow.providers.google.cloud.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_delete_instance_no_instance(self, get_client, mock_project_id):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
instance_exists_method.return_value = False
delete_method = instance_method.return_value.delete
self.bigtable_hook_default_project_id.delete_instance(
instance_id=CBT_INSTANCE)
instance_method.assert_called_once_with('instance')
instance_exists_method.assert_called_once_with()
delete_method.assert_not_called()
get_client.assert_called_once_with(project_id='example-project')
@mock.patch(
'airflow.providers.google.cloud.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch('google.cloud.bigtable.instance.Instance.create')
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_create_instance(self, get_client, instance_create, mock_project_id):
operation = mock.Mock()
operation.result_return_value = Instance(instance_id=CBT_INSTANCE, client=get_client)
instance_create.return_value = operation
res = self.bigtable_hook_default_project_id.create_instance(
instance_id=CBT_INSTANCE,
main_cluster_id=CBT_CLUSTER,
main_cluster_zone=CBT_ZONE)
get_client.assert_called_once_with(project_id='example-project')
instance_create.assert_called_once_with(clusters=mock.ANY)
self.assertEqual(res.instance_id, 'instance')
@mock.patch('google.cloud.bigtable.instance.Instance.create')
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_create_instance_overridden_project_id(self, get_client, instance_create):
operation = mock.Mock()
operation.result_return_value = Instance(instance_id=CBT_INSTANCE, client=get_client)
instance_create.return_value = operation
res = self.bigtable_hook_default_project_id.create_instance(
project_id='new-project',
instance_id=CBT_INSTANCE,
main_cluster_id=CBT_CLUSTER,
main_cluster_zone=CBT_ZONE)
get_client.assert_called_once_with(project_id='new-project')
instance_create.assert_called_once_with(clusters=mock.ANY)
self.assertEqual(res.instance_id, 'instance')
@mock.patch(
'airflow.providers.google.cloud.hooks.base.CloudBaseHook.project_id',
new_callable=PropertyMock,
return_value=GCP_PROJECT_ID_HOOK_UNIT_TEST
)
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_delete_table(self, get_client, mock_project_id):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
table_delete_method = instance_method.return_value.table.return_value.delete
instance_exists_method.return_value = True
self.bigtable_hook_default_project_id.delete_table(
instance_id=CBT_INSTANCE,
table_id=CBT_TABLE)
get_client.assert_called_once_with(project_id='example-project')
instance_exists_method.assert_called_once_with()
table_delete_method.assert_called_once_with()
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_delete_table_overridden_project_id(self, get_client):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
table_delete_method = instance_method.return_value.table.return_value.delete
instance_exists_method.return_value = True
self.bigtable_hook_default_project_id.delete_table(
project_id='new-project',
instance_id=CBT_INSTANCE,
table_id=CBT_TABLE)
get_client.assert_called_once_with(project_id='new-project')
instance_exists_method.assert_called_once_with()
table_delete_method.assert_called_once_with()
@mock.patch('google.cloud.bigtable.table.Table.create')
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_create_table(self, get_client, create):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
instance_exists_method.return_value = True
client = mock.Mock(Client)
instance = google.cloud.bigtable.instance.Instance(
instance_id=CBT_INSTANCE,
client=client)
self.bigtable_hook_default_project_id.create_table(
instance=instance,
table_id=CBT_TABLE)
get_client.assert_not_called()
create.assert_called_once_with([], {})
@mock.patch('google.cloud.bigtable.cluster.Cluster.update')
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_update_cluster(self, get_client, update):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
instance_exists_method.return_value = True
client = mock.Mock(Client)
instance = google.cloud.bigtable.instance.Instance(
instance_id=CBT_INSTANCE,
client=client)
self.bigtable_hook_default_project_id.update_cluster(
instance=instance,
cluster_id=CBT_CLUSTER,
nodes=4)
get_client.assert_not_called()
update.assert_called_once_with()
@mock.patch('google.cloud.bigtable.table.Table.list_column_families')
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_list_column_families(self, get_client, list_column_families):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
instance_exists_method.return_value = True
client = mock.Mock(Client)
get_client.return_value = client
instance = google.cloud.bigtable.instance.Instance(
instance_id=CBT_INSTANCE,
client=client)
self.bigtable_hook_default_project_id.get_column_families_for_table(
instance=instance, table_id=CBT_TABLE)
get_client.assert_not_called()
list_column_families.assert_called_once_with()
@mock.patch('google.cloud.bigtable.table.Table.get_cluster_states')
@mock.patch('airflow.providers.google.cloud.hooks.bigtable.BigtableHook._get_client')
def test_get_cluster_states(self, get_client, get_cluster_states):
instance_method = get_client.return_value.instance
instance_exists_method = instance_method.return_value.exists
instance_exists_method.return_value = True
client = mock.Mock(Client)
instance = google.cloud.bigtable.instance.Instance(
instance_id=CBT_INSTANCE,
client=client)
self.bigtable_hook_default_project_id.get_cluster_states_for_table(
instance=instance, table_id=CBT_TABLE)
get_client.assert_not_called()
get_cluster_states.assert_called_once_with()
| {
"content_hash": "5b4a8091752d8824bc887e1714bba711",
"timestamp": "",
"source": "github",
"line_count": 431,
"max_line_length": 100,
"avg_line_length": 50.54988399071926,
"alnum_prop": 0.6936246385459219,
"repo_name": "mtagle/airflow",
"id": "dc0ba6e7c72eeaec5d15e2f7268ecd6488602303",
"size": "22575",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/providers/google/cloud/hooks/test_bigtable.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13715"
},
{
"name": "Dockerfile",
"bytes": "17280"
},
{
"name": "HTML",
"bytes": "148492"
},
{
"name": "JavaScript",
"bytes": "25360"
},
{
"name": "Jupyter Notebook",
"bytes": "2933"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "10006634"
},
{
"name": "Shell",
"bytes": "217011"
},
{
"name": "TSQL",
"bytes": "879"
}
],
"symlink_target": ""
} |
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "csinfo.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| {
"content_hash": "e74d3d4937cd36200f732dfd7c453ac8",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 77,
"avg_line_length": 37.23809523809524,
"alnum_prop": 0.6202046035805626,
"repo_name": "hcrudolph/ciphersuite.info",
"id": "8b20de284874c40831b6223951ca0d01137f301c",
"size": "804",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "322"
},
{
"name": "HTML",
"bytes": "38689"
},
{
"name": "JavaScript",
"bytes": "397"
},
{
"name": "Python",
"bytes": "78519"
},
{
"name": "SCSS",
"bytes": "245831"
},
{
"name": "Shell",
"bytes": "336"
}
],
"symlink_target": ""
} |
import json
from collections import abc
def load_profiles(strings):
"""Load profiles from a list of strings
Parameters
----------
strings : [str]
A list of strings that are file names or json, and represent either a
single profile or a list of profiles.
Returns
-------
prof_gen : (prof)
A generator of json profiles.
"""
for prof_type in strings:
# Try to load file else read as string
try:
with open(prof_type) as f:
prof = json.load(f)
except FileNotFoundError:
prof = json.loads(prof_type)
# Yield different amounts if it's a list
if isinstance(prof, abc.Mapping):
yield prof
else:
for p in prof:
yield p
| {
"content_hash": "451f4a96e15cf3cc9ed47e6615497627",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 77,
"avg_line_length": 25.838709677419356,
"alnum_prop": 0.5580524344569289,
"repo_name": "yackj/GameAnalysis",
"id": "889c5cf246c6950b96dc330514101be8f4565358",
"size": "801",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gameanalysis/scriptutils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "2002"
},
{
"name": "Python",
"bytes": "456660"
}
],
"symlink_target": ""
} |
import pkg_resources
import argparse
import logging
import textwrap
import os
import sys
import megdc
__header__ = textwrap.dedent("""
Megam System megdc-%s
Full documentation can be found at: http://docs.megam.io
""" % megdc.__version__)
def log_flags(args, logger=None):
logger = logger or LOG
logger.info('megdc options:')
for k, v in args.__dict__.items():
if k.startswith('_'):
continue
logger.info(' %-30s: %s' % (k, v))
def install():
print "install called"
def get_parser():
parser = argparse.ArgumentParser(
prog='megdc',
formatter_class=argparse.RawDescriptionHelpFormatter,
description='Datacenter ready in minutes \n\n%s' % __header__,
)
parser.add_argument(
'--version',
action='version',
version='%s' % megdc. __version__,
help='the current installed version of megdc',
)
# create the top-level parser
subpar = parser.add_subparsers(
title='commands',
metavar='COMMAND',
help='description',
)
entry_p = {}
for ep in pkg_resources.iter_entry_points(group='megdc.cli',name= None):
if not entry_p.has_key(ep.dist):
entry_p[ep.dist] = {}
entry_p.update({ep.name:ep.load()})
subpar.add_parser(
ep.name,
description=ep.load().__doc__,
help=ep.load().__doc__,
)
#print ep.name
return parser
#@catches((KeyboardInterrupt, RuntimeError, exc.DeployError,), handle_all=True)
def _main(args=None, namespace=None):
parser = get_parser()
if len(sys.argv) < 2:
parser.print_help()
sys.exit()
else:
args = parser.parse_args(args=args, namespace=namespace)
return args.func(args)
def main(args=None, namespace=None):
try:
_main(args=args, namespace=namespace)
finally:
# This block is crucial to avoid having issues with
# Python spitting non-sense thread exceptions. We have already
# handled what we could, so close stderr and stdout.
if not os.environ.get('MEGAM_TEST'):
try:
sys.stdout.close()
except:
pass
try:
sys.stderr.close()
except:
pass
| {
"content_hash": "1759b7654d636a00630270695535f1dd",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 79,
"avg_line_length": 23.86868686868687,
"alnum_prop": 0.5670757511637748,
"repo_name": "vijaykanthm28/megdc.py",
"id": "bfee43f4f6757cfaf534c81c0f19b95d0d9d96c8",
"size": "2363",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "megdc/cli.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "6747"
},
{
"name": "Shell",
"bytes": "3680"
}
],
"symlink_target": ""
} |
"""This pipeline is intended to make the classification of MRSI relative
quantification modality features."""
from __future__ import division
import os
import numpy as np
from sklearn.externals import joblib
from sklearn.preprocessing import label_binarize
from sklearn.ensemble import RandomForestClassifier
from protoclass.data_management import GTModality
# Define the path where the patients are stored
path_patients = '/data/prostate/experiments'
# Define the path where the features have been extracted
path_features = '/data/prostate/extraction/mp-mri-prostate'
# Define a list of the path where the feature are kept
mrsi_features = ['mrsi-rel-qua']
# Define the extension of each features
ext_features = ['_rq_mrsi.npy']
# Define the path of the ground for the prostate
path_gt = ['GT_inv/prostate', 'GT_inv/pz', 'GT_inv/cg', 'GT_inv/cap']
# Define the label of the ground-truth which will be provided
label_gt = ['prostate', 'pz', 'cg', 'cap']
# Generate the different path to be later treated
path_patients_list_gt = []
# Create the generator
id_patient_list = [name for name in os.listdir(path_patients)
if os.path.isdir(os.path.join(path_patients, name))]
# Sort the list of patient
id_patient_list = sorted(id_patient_list)
for id_patient in id_patient_list:
# Append for the GT data - Note that we need a list of gt path
path_patients_list_gt.append([os.path.join(path_patients, id_patient, gt)
for gt in path_gt])
# Load all the data once. Splitting into training and testing will be done at
# the cross-validation time
data = []
label = []
for idx_pat in range(len(id_patient_list)):
print 'Read patient {}'.format(id_patient_list[idx_pat])
# For each patient we nee to load the different feature
patient_data = []
for idx_feat in range(len(mrsi_features)):
# Create the path to the patient file
filename_feature = (id_patient_list[idx_pat].lower().replace(' ', '_') +
ext_features[idx_feat])
path_data = os.path.join(path_features, mrsi_features[idx_feat],
filename_feature)
single_feature_data = np.load(path_data)
# Check if this is only one dimension data
if len(single_feature_data.shape) == 1:
single_feature_data = np.atleast_2d(single_feature_data).T
patient_data.append(single_feature_data)
# Concatenate the data in a single array
patient_data = np.concatenate(patient_data, axis=1)
print 'Read the MRSI data for the current patient ...'
# Create the corresponding ground-truth
gt_mod = GTModality()
gt_mod.read_data_from_path(label_gt,
path_patients_list_gt[idx_pat])
print 'Read the GT data for the current patient ...'
# Concatenate the training data
data.append(patient_data)
# Extract the corresponding ground-truth for the testing data
# Get the index corresponding to the ground-truth
roi_prostate = gt_mod.extract_gt_data('prostate', output_type='index')
# Get the label of the gt only for the prostate ROI
gt_cap = gt_mod.extract_gt_data('cap', output_type='data')
label.append(gt_cap[roi_prostate])
print 'Data and label extracted for the current patient ...'
result_cv = []
# Go for LOPO cross-validation
for idx_lopo_cv in range(len(id_patient_list)):
# Display some information about the LOPO-CV
print 'Round #{} of the LOPO-CV'.format(idx_lopo_cv + 1)
# Get the testing data
testing_data = data[idx_lopo_cv]
testing_label = label_binarize(label[idx_lopo_cv], [0, 255])
print 'Create the testing set ...'
# Create the training data and label
training_data = [arr for idx_arr, arr in enumerate(data)
if idx_arr != idx_lopo_cv]
training_label = [arr for idx_arr, arr in enumerate(label)
if idx_arr != idx_lopo_cv]
# Concatenate the data
training_data = np.vstack(training_data)
training_label = label_binarize(np.hstack(training_label).astype(int),
[0, 255])
print 'Create the training set ...'
# Perform the classification for the current cv and the
# given configuration
crf = RandomForestClassifier(n_estimators=100, n_jobs=-1)
pred_prob = crf.fit(training_data, np.ravel(training_label)).predict_proba(
testing_data)
result_cv.append([pred_prob, crf.classes_])
# Save the information
path_store = '/data/prostate/results/mp-mri-prostate/exp-1/mrsi-citrate-choline-fit-ratio'
if not os.path.exists(path_store):
os.makedirs(path_store)
joblib.dump(result_cv, os.path.join(path_store,
'results.pkl'))
| {
"content_hash": "4fd54fb9989cba1f22933fd06bea18c8",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 90,
"avg_line_length": 40.313559322033896,
"alnum_prop": 0.669539625814589,
"repo_name": "I2Cvb/mp-mri-prostate",
"id": "06d084377bda165379ff1db05b459ba6787d6ca2",
"size": "4757",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pipeline/feature-classification/exp-1/mrsi/pipeline_classifier_mrsi_citrate_choline_fit_ratio.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "54402"
},
{
"name": "CMake",
"bytes": "3709"
},
{
"name": "Makefile",
"bytes": "68"
},
{
"name": "Python",
"bytes": "422305"
},
{
"name": "Shell",
"bytes": "3153"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import datetime
import os
from decimal import Decimal
from unittest import skipUnless
import warnings
from django import forms
from django.core.exceptions import FieldError
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.validators import ValidationError
from django.db import connection
from django.db.models.query import EmptyQuerySet
from django.forms.models import model_to_dict
from django.utils._os import upath
from django.test import TestCase
from django.utils import six
from .models import (Article, ArticleStatus, BetterWriter, BigInt, Book,
Category, CommaSeparatedInteger, CustomFieldForExclusionModel, DerivedBook,
DerivedPost, ExplicitPK, FlexibleDatePost, ImprovedArticle,
ImprovedArticleWithParentLink, Inventory, Post, Price,
Product, TextFile, Writer, WriterProfile, Colour, ColourfulItem,
ArticleStatusNote, DateTimePost, CustomErrorMessage, test_images)
if test_images:
from .models import ImageFile, OptionalImageFile
class ImageFileForm(forms.ModelForm):
class Meta:
model = ImageFile
fields = '__all__'
class OptionalImageFileForm(forms.ModelForm):
class Meta:
model = OptionalImageFile
fields = '__all__'
class ProductForm(forms.ModelForm):
class Meta:
model = Product
fields = '__all__'
class PriceForm(forms.ModelForm):
class Meta:
model = Price
fields = '__all__'
class BookForm(forms.ModelForm):
class Meta:
model = Book
fields = '__all__'
class DerivedBookForm(forms.ModelForm):
class Meta:
model = DerivedBook
fields = '__all__'
class ExplicitPKForm(forms.ModelForm):
class Meta:
model = ExplicitPK
fields = ('key', 'desc',)
class PostForm(forms.ModelForm):
class Meta:
model = Post
fields = '__all__'
class DateTimePostForm(forms.ModelForm):
class Meta:
model = DateTimePost
fields = '__all__'
class DerivedPostForm(forms.ModelForm):
class Meta:
model = DerivedPost
fields = '__all__'
class CustomWriterForm(forms.ModelForm):
name = forms.CharField(required=False)
class Meta:
model = Writer
fields = '__all__'
class FlexDatePostForm(forms.ModelForm):
class Meta:
model = FlexibleDatePost
fields = '__all__'
class BaseCategoryForm(forms.ModelForm):
class Meta:
model = Category
fields = '__all__'
class ArticleForm(forms.ModelForm):
class Meta:
model = Article
fields = '__all__'
class PartialArticleForm(forms.ModelForm):
class Meta:
model = Article
fields = ('headline', 'pub_date')
class RoykoForm(forms.ModelForm):
class Meta:
model = Writer
fields = '__all__'
class TestArticleForm(forms.ModelForm):
class Meta:
model = Article
fields = '__all__'
class PartialArticleFormWithSlug(forms.ModelForm):
class Meta:
model = Article
fields = ('headline', 'slug', 'pub_date')
class ArticleStatusForm(forms.ModelForm):
class Meta:
model = ArticleStatus
fields = '__all__'
class InventoryForm(forms.ModelForm):
class Meta:
model = Inventory
fields = '__all__'
class SelectInventoryForm(forms.Form):
items = forms.ModelMultipleChoiceField(Inventory.objects.all(), to_field_name='barcode')
class CustomFieldForExclusionForm(forms.ModelForm):
class Meta:
model = CustomFieldForExclusionModel
fields = ['name', 'markup']
class ShortCategory(forms.ModelForm):
name = forms.CharField(max_length=5)
slug = forms.CharField(max_length=5)
url = forms.CharField(max_length=3)
class Meta:
model = Category
fields = '__all__'
class ImprovedArticleForm(forms.ModelForm):
class Meta:
model = ImprovedArticle
fields = '__all__'
class ImprovedArticleWithParentLinkForm(forms.ModelForm):
class Meta:
model = ImprovedArticleWithParentLink
fields = '__all__'
class BetterWriterForm(forms.ModelForm):
class Meta:
model = BetterWriter
fields = '__all__'
class WriterProfileForm(forms.ModelForm):
class Meta:
model = WriterProfile
fields = '__all__'
class TextFileForm(forms.ModelForm):
class Meta:
model = TextFile
fields = '__all__'
class BigIntForm(forms.ModelForm):
class Meta:
model = BigInt
fields = '__all__'
class ModelFormWithMedia(forms.ModelForm):
class Media:
js = ('/some/form/javascript',)
css = {
'all': ('/some/form/css',)
}
class Meta:
model = TextFile
fields = '__all__'
class CommaSeparatedIntegerForm(forms.ModelForm):
class Meta:
model = CommaSeparatedInteger
fields = '__all__'
class PriceFormWithoutQuantity(forms.ModelForm):
class Meta:
model = Price
exclude = ('quantity',)
class ColourfulItemForm(forms.ModelForm):
class Meta:
model = ColourfulItem
fields = '__all__'
# model forms for testing work on #9321:
class StatusNoteForm(forms.ModelForm):
class Meta:
model = ArticleStatusNote
fields = '__all__'
class StatusNoteCBM2mForm(forms.ModelForm):
class Meta:
model = ArticleStatusNote
fields = '__all__'
widgets = {'status': forms.CheckboxSelectMultiple}
class CustomErrorMessageForm(forms.ModelForm):
name1 = forms.CharField(error_messages={'invalid': 'Form custom error message.'})
class Meta:
fields = '__all__'
model = CustomErrorMessage
class ModelFormBaseTest(TestCase):
def test_base_form(self):
self.assertEqual(list(BaseCategoryForm.base_fields),
['name', 'slug', 'url'])
def test_missing_fields_attribute(self):
with warnings.catch_warnings(record=True):
warnings.simplefilter("always", DeprecationWarning)
class MissingFieldsForm(forms.ModelForm):
class Meta:
model = Category
# There is some internal state in warnings module which means that
# if a warning has been seen already, the catch_warnings won't
# have recorded it. The following line therefore will not work reliably:
# self.assertEqual(w[0].category, DeprecationWarning)
# Until end of the deprecation cycle, should still create the
# form as before:
self.assertEqual(list(MissingFieldsForm.base_fields),
['name', 'slug', 'url'])
def test_extra_fields(self):
class ExtraFields(BaseCategoryForm):
some_extra_field = forms.BooleanField()
self.assertEqual(list(ExtraFields.base_fields),
['name', 'slug', 'url', 'some_extra_field'])
def test_replace_field(self):
class ReplaceField(forms.ModelForm):
url = forms.BooleanField()
class Meta:
model = Category
fields = '__all__'
self.assertIsInstance(ReplaceField.base_fields['url'],
forms.fields.BooleanField)
def test_replace_field_variant_2(self):
# Should have the same result as before,
# but 'fields' attribute specified differently
class ReplaceField(forms.ModelForm):
url = forms.BooleanField()
class Meta:
model = Category
fields = ['url']
self.assertIsInstance(ReplaceField.base_fields['url'],
forms.fields.BooleanField)
def test_replace_field_variant_3(self):
# Should have the same result as before,
# but 'fields' attribute specified differently
class ReplaceField(forms.ModelForm):
url = forms.BooleanField()
class Meta:
model = Category
fields = [] # url will still appear, since it is explicit above
self.assertIsInstance(ReplaceField.base_fields['url'],
forms.fields.BooleanField)
def test_override_field(self):
class WriterForm(forms.ModelForm):
book = forms.CharField(required=False)
class Meta:
model = Writer
fields = '__all__'
wf = WriterForm({'name': 'Richard Lockridge'})
self.assertTrue(wf.is_valid())
def test_limit_nonexistent_field(self):
expected_msg = 'Unknown field(s) (nonexistent) specified for Category'
with self.assertRaisesMessage(FieldError, expected_msg):
class InvalidCategoryForm(forms.ModelForm):
class Meta:
model = Category
fields = ['nonexistent']
def test_limit_fields_with_string(self):
expected_msg = "CategoryForm.Meta.fields cannot be a string. Did you mean to type: ('url',)?"
with self.assertRaisesMessage(TypeError, expected_msg):
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
fields = ('url') # note the missing comma
def test_exclude_fields(self):
class ExcludeFields(forms.ModelForm):
class Meta:
model = Category
exclude = ['url']
self.assertEqual(list(ExcludeFields.base_fields),
['name', 'slug'])
def test_exclude_nonexistent_field(self):
class ExcludeFields(forms.ModelForm):
class Meta:
model = Category
exclude = ['nonexistent']
self.assertEqual(list(ExcludeFields.base_fields),
['name', 'slug', 'url'])
def test_exclude_fields_with_string(self):
expected_msg = "CategoryForm.Meta.exclude cannot be a string. Did you mean to type: ('url',)?"
with self.assertRaisesMessage(TypeError, expected_msg):
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
exclude = ('url') # note the missing comma
def test_confused_form(self):
class ConfusedForm(forms.ModelForm):
""" Using 'fields' *and* 'exclude'. Not sure why you'd want to do
this, but uh, "be liberal in what you accept" and all.
"""
class Meta:
model = Category
fields = ['name', 'url']
exclude = ['url']
self.assertEqual(list(ConfusedForm.base_fields),
['name'])
def test_mixmodel_form(self):
class MixModelForm(BaseCategoryForm):
""" Don't allow more than one 'model' definition in the
inheritance hierarchy. Technically, it would generate a valid
form, but the fact that the resulting save method won't deal with
multiple objects is likely to trip up people not familiar with the
mechanics.
"""
class Meta:
model = Article
fields = '__all__'
# MixModelForm is now an Article-related thing, because MixModelForm.Meta
# overrides BaseCategoryForm.Meta.
self.assertEqual(
list(MixModelForm.base_fields),
['headline', 'slug', 'pub_date', 'writer', 'article', 'categories', 'status']
)
def test_article_form(self):
self.assertEqual(
list(ArticleForm.base_fields),
['headline', 'slug', 'pub_date', 'writer', 'article', 'categories', 'status']
)
def test_bad_form(self):
# First class with a Meta class wins...
class BadForm(ArticleForm, BaseCategoryForm):
pass
self.assertEqual(
list(BadForm.base_fields),
['headline', 'slug', 'pub_date', 'writer', 'article', 'categories', 'status']
)
def test_invalid_meta_model(self):
class InvalidModelForm(forms.ModelForm):
class Meta:
pass # no model
# Can't create new form
with self.assertRaises(ValueError):
InvalidModelForm()
# Even if you provide a model instance
with self.assertRaises(ValueError):
InvalidModelForm(instance=Category)
def test_subcategory_form(self):
class SubCategoryForm(BaseCategoryForm):
""" Subclassing without specifying a Meta on the class will use
the parent's Meta (or the first parent in the MRO if there are
multiple parent classes).
"""
pass
self.assertEqual(list(SubCategoryForm.base_fields),
['name', 'slug', 'url'])
def test_subclassmeta_form(self):
class SomeCategoryForm(forms.ModelForm):
checkbox = forms.BooleanField()
class Meta:
model = Category
fields = '__all__'
class SubclassMeta(SomeCategoryForm):
""" We can also subclass the Meta inner class to change the fields
list.
"""
class Meta(SomeCategoryForm.Meta):
exclude = ['url']
self.assertHTMLEqual(
str(SubclassMeta()),
"""<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>
<tr><th><label for="id_slug">Slug:</label></th><td><input id="id_slug" type="text" name="slug" maxlength="20" /></td></tr>
<tr><th><label for="id_checkbox">Checkbox:</label></th><td><input type="checkbox" name="checkbox" id="id_checkbox" /></td></tr>"""
)
def test_orderfields_form(self):
class OrderFields(forms.ModelForm):
class Meta:
model = Category
fields = ['url', 'name']
self.assertEqual(list(OrderFields.base_fields),
['url', 'name'])
self.assertHTMLEqual(
str(OrderFields()),
"""<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>
<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>"""
)
def test_orderfields2_form(self):
class OrderFields2(forms.ModelForm):
class Meta:
model = Category
fields = ['slug', 'url', 'name']
exclude = ['url']
self.assertEqual(list(OrderFields2.base_fields),
['slug', 'name'])
class FieldOverridesTroughFormMetaForm(forms.ModelForm):
class Meta:
model = Category
fields = ['name', 'url', 'slug']
widgets = {
'name': forms.Textarea,
'url': forms.TextInput(attrs={'class': 'url'})
}
labels = {
'name': 'Title',
}
help_texts = {
'slug': 'Watch out! Letters, numbers, underscores and hyphens only.',
}
error_messages = {
'slug': {
'invalid': (
"Didn't you read the help text? "
"We said letters, numbers, underscores and hyphens only!"
)
}
}
class TestFieldOverridesTroughFormMeta(TestCase):
def test_widget_overrides(self):
form = FieldOverridesTroughFormMetaForm()
self.assertHTMLEqual(
str(form['name']),
'<textarea id="id_name" rows="10" cols="40" name="name"></textarea>',
)
self.assertHTMLEqual(
str(form['url']),
'<input id="id_url" type="text" class="url" name="url" maxlength="40" />',
)
self.assertHTMLEqual(
str(form['slug']),
'<input id="id_slug" type="text" name="slug" maxlength="20" />',
)
def test_label_overrides(self):
form = FieldOverridesTroughFormMetaForm()
self.assertHTMLEqual(
str(form['name'].label_tag()),
'<label for="id_name">Title:</label>',
)
self.assertHTMLEqual(
str(form['url'].label_tag()),
'<label for="id_url">The URL:</label>',
)
self.assertHTMLEqual(
str(form['slug'].label_tag()),
'<label for="id_slug">Slug:</label>',
)
def test_help_text_overrides(self):
form = FieldOverridesTroughFormMetaForm()
self.assertEqual(
form['slug'].help_text,
'Watch out! Letters, numbers, underscores and hyphens only.',
)
def test_error_messages_overrides(self):
form = FieldOverridesTroughFormMetaForm(data={
'name': 'Category',
'url': '/category/',
'slug': '!%#*@',
})
form.full_clean()
error = [
"Didn't you read the help text? "
"We said letters, numbers, underscores and hyphens only!",
]
self.assertEqual(form.errors, {'slug': error})
class IncompleteCategoryFormWithFields(forms.ModelForm):
"""
A form that replaces the model's url field with a custom one. This should
prevent the model field's validation from being called.
"""
url = forms.CharField(required=False)
class Meta:
fields = ('name', 'slug')
model = Category
class IncompleteCategoryFormWithExclude(forms.ModelForm):
"""
A form that replaces the model's url field with a custom one. This should
prevent the model field's validation from being called.
"""
url = forms.CharField(required=False)
class Meta:
exclude = ['url']
model = Category
class ValidationTest(TestCase):
def test_validates_with_replaced_field_not_specified(self):
form = IncompleteCategoryFormWithFields(data={'name': 'some name', 'slug': 'some-slug'})
assert form.is_valid()
def test_validates_with_replaced_field_excluded(self):
form = IncompleteCategoryFormWithExclude(data={'name': 'some name', 'slug': 'some-slug'})
assert form.is_valid()
def test_notrequired_overrides_notblank(self):
form = CustomWriterForm({})
assert form.is_valid()
# unique/unique_together validation
class UniqueTest(TestCase):
def setUp(self):
self.writer = Writer.objects.create(name='Mike Royko')
def test_simple_unique(self):
form = ProductForm({'slug': 'teddy-bear-blue'})
self.assertTrue(form.is_valid())
obj = form.save()
form = ProductForm({'slug': 'teddy-bear-blue'})
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['slug'], ['Product with this Slug already exists.'])
form = ProductForm({'slug': 'teddy-bear-blue'}, instance=obj)
self.assertTrue(form.is_valid())
def test_unique_together(self):
"""ModelForm test of unique_together constraint"""
form = PriceForm({'price': '6.00', 'quantity': '1'})
self.assertTrue(form.is_valid())
form.save()
form = PriceForm({'price': '6.00', 'quantity': '1'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'], ['Price with this Price and Quantity already exists.'])
def test_unique_null(self):
title = 'I May Be Wrong But I Doubt It'
form = BookForm({'title': title, 'author': self.writer.pk})
self.assertTrue(form.is_valid())
form.save()
form = BookForm({'title': title, 'author': self.writer.pk})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'], ['Book with this Title and Author already exists.'])
form = BookForm({'title': title})
self.assertTrue(form.is_valid())
form.save()
form = BookForm({'title': title})
self.assertTrue(form.is_valid())
def test_inherited_unique(self):
title = 'Boss'
Book.objects.create(title=title, author=self.writer, special_id=1)
form = DerivedBookForm({'title': 'Other', 'author': self.writer.pk, 'special_id': '1', 'isbn': '12345'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['special_id'], ['Book with this Special id already exists.'])
def test_inherited_unique_together(self):
title = 'Boss'
form = BookForm({'title': title, 'author': self.writer.pk})
self.assertTrue(form.is_valid())
form.save()
form = DerivedBookForm({'title': title, 'author': self.writer.pk, 'isbn': '12345'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'], ['Book with this Title and Author already exists.'])
def test_abstract_inherited_unique(self):
title = 'Boss'
isbn = '12345'
DerivedBook.objects.create(title=title, author=self.writer, isbn=isbn)
form = DerivedBookForm({'title': 'Other', 'author': self.writer.pk, 'isbn': isbn})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['isbn'], ['Derived book with this Isbn already exists.'])
def test_abstract_inherited_unique_together(self):
title = 'Boss'
isbn = '12345'
DerivedBook.objects.create(title=title, author=self.writer, isbn=isbn)
form = DerivedBookForm({
'title': 'Other',
'author': self.writer.pk,
'isbn': '9876',
'suffix1': '0',
'suffix2': '0'
})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['__all__'],
['Derived book with this Suffix1 and Suffix2 already exists.'])
def test_explicitpk_unspecified(self):
"""Test for primary_key being in the form and failing validation."""
form = ExplicitPKForm({'key': '', 'desc': ''})
self.assertFalse(form.is_valid())
def test_explicitpk_unique(self):
"""Ensure keys and blank character strings are tested for uniqueness."""
form = ExplicitPKForm({'key': 'key1', 'desc': ''})
self.assertTrue(form.is_valid())
form.save()
form = ExplicitPKForm({'key': 'key1', 'desc': ''})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 3)
self.assertEqual(form.errors['__all__'], ['Explicit pk with this Key and Desc already exists.'])
self.assertEqual(form.errors['desc'], ['Explicit pk with this Desc already exists.'])
self.assertEqual(form.errors['key'], ['Explicit pk with this Key already exists.'])
def test_unique_for_date(self):
p = Post.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3))
form = PostForm({'title': "Django 1.0 is released", 'posted': '2008-09-03'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['title'], ['Title must be unique for Posted date.'])
form = PostForm({'title': "Work on Django 1.1 begins", 'posted': '2008-09-03'})
self.assertTrue(form.is_valid())
form = PostForm({'title': "Django 1.0 is released", 'posted': '2008-09-04'})
self.assertTrue(form.is_valid())
form = PostForm({'slug': "Django 1.0", 'posted': '2008-01-01'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['slug'], ['Slug must be unique for Posted year.'])
form = PostForm({'subtitle': "Finally", 'posted': '2008-09-30'})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['subtitle'], ['Subtitle must be unique for Posted month.'])
form = PostForm({'subtitle': "Finally", "title": "Django 1.0 is released",
"slug": "Django 1.0", 'posted': '2008-09-03'}, instance=p)
self.assertTrue(form.is_valid())
form = PostForm({'title': "Django 1.0 is released"})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['posted'], ['This field is required.'])
def test_unique_for_date_in_exclude(self):
"""If the date for unique_for_* constraints is excluded from the
ModelForm (in this case 'posted' has editable=False, then the
constraint should be ignored."""
DateTimePost.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally",
posted=datetime.datetime(2008, 9, 3, 10, 10, 1))
# 'title' has unique_for_date='posted'
form = DateTimePostForm({'title': "Django 1.0 is released", 'posted': '2008-09-03'})
self.assertTrue(form.is_valid())
# 'slug' has unique_for_year='posted'
form = DateTimePostForm({'slug': "Django 1.0", 'posted': '2008-01-01'})
self.assertTrue(form.is_valid())
# 'subtitle' has unique_for_month='posted'
form = DateTimePostForm({'subtitle': "Finally", 'posted': '2008-09-30'})
self.assertTrue(form.is_valid())
def test_inherited_unique_for_date(self):
p = Post.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3))
form = DerivedPostForm({'title': "Django 1.0 is released", 'posted': '2008-09-03'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['title'], ['Title must be unique for Posted date.'])
form = DerivedPostForm({'title': "Work on Django 1.1 begins", 'posted': '2008-09-03'})
self.assertTrue(form.is_valid())
form = DerivedPostForm({'title': "Django 1.0 is released", 'posted': '2008-09-04'})
self.assertTrue(form.is_valid())
form = DerivedPostForm({'slug': "Django 1.0", 'posted': '2008-01-01'})
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors), 1)
self.assertEqual(form.errors['slug'], ['Slug must be unique for Posted year.'])
form = DerivedPostForm({'subtitle': "Finally", 'posted': '2008-09-30'})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['subtitle'], ['Subtitle must be unique for Posted month.'])
form = DerivedPostForm({'subtitle': "Finally", "title": "Django 1.0 is released",
"slug": "Django 1.0", 'posted': '2008-09-03'}, instance=p)
self.assertTrue(form.is_valid())
def test_unique_for_date_with_nullable_date(self):
p = FlexibleDatePost.objects.create(title="Django 1.0 is released",
slug="Django 1.0", subtitle="Finally", posted=datetime.date(2008, 9, 3))
form = FlexDatePostForm({'title': "Django 1.0 is released"})
self.assertTrue(form.is_valid())
form = FlexDatePostForm({'slug': "Django 1.0"})
self.assertTrue(form.is_valid())
form = FlexDatePostForm({'subtitle': "Finally"})
self.assertTrue(form.is_valid())
form = FlexDatePostForm({'subtitle': "Finally", "title": "Django 1.0 is released",
"slug": "Django 1.0"}, instance=p)
self.assertTrue(form.is_valid())
class ModelToDictTests(TestCase):
"""
Tests for forms.models.model_to_dict
"""
def test_model_to_dict_many_to_many(self):
categories = [
Category(name='TestName1', slug='TestName1', url='url1'),
Category(name='TestName2', slug='TestName2', url='url2'),
Category(name='TestName3', slug='TestName3', url='url3')
]
for c in categories:
c.save()
writer = Writer(name='Test writer')
writer.save()
art = Article(
headline='Test article',
slug='test-article',
pub_date=datetime.date(1988, 1, 4),
writer=writer,
article='Hello.'
)
art.save()
for c in categories:
art.categories.add(c)
art.save()
with self.assertNumQueries(1):
d = model_to_dict(art)
# Ensure all many-to-many categories appear in model_to_dict
for c in categories:
self.assertIn(c.pk, d['categories'])
# Ensure many-to-many relation appears as a list
self.assertIsInstance(d['categories'], list)
def test_reuse_prefetched(self):
# model_to_dict should not hit the database if it can reuse
# the data populated by prefetch_related.
categories = [
Category(name='TestName1', slug='TestName1', url='url1'),
Category(name='TestName2', slug='TestName2', url='url2'),
Category(name='TestName3', slug='TestName3', url='url3')
]
for c in categories:
c.save()
writer = Writer(name='Test writer')
writer.save()
art = Article(
headline='Test article',
slug='test-article',
pub_date=datetime.date(1988, 1, 4),
writer=writer,
article='Hello.'
)
art.save()
for c in categories:
art.categories.add(c)
art = Article.objects.prefetch_related('categories').get(pk=art.pk)
with self.assertNumQueries(0):
d = model_to_dict(art)
#Ensure all many-to-many categories appear in model_to_dict
for c in categories:
self.assertIn(c.pk, d['categories'])
#Ensure many-to-many relation appears as a list
self.assertIsInstance(d['categories'], list)
class OldFormForXTests(TestCase):
def test_base_form(self):
self.assertEqual(Category.objects.count(), 0)
f = BaseCategoryForm()
self.assertHTMLEqual(
str(f),
"""<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>
<tr><th><label for="id_slug">Slug:</label></th><td><input id="id_slug" type="text" name="slug" maxlength="20" /></td></tr>
<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>"""
)
self.assertHTMLEqual(
str(f.as_ul()),
"""<li><label for="id_name">Name:</label> <input id="id_name" type="text" name="name" maxlength="20" /></li>
<li><label for="id_slug">Slug:</label> <input id="id_slug" type="text" name="slug" maxlength="20" /></li>
<li><label for="id_url">The URL:</label> <input id="id_url" type="text" name="url" maxlength="40" /></li>"""
)
self.assertHTMLEqual(
str(f["name"]),
"""<input id="id_name" type="text" name="name" maxlength="20" />""")
def test_auto_id(self):
f = BaseCategoryForm(auto_id=False)
self.assertHTMLEqual(
str(f.as_ul()),
"""<li>Name: <input type="text" name="name" maxlength="20" /></li>
<li>Slug: <input type="text" name="slug" maxlength="20" /></li>
<li>The URL: <input type="text" name="url" maxlength="40" /></li>"""
)
def test_with_data(self):
self.assertEqual(Category.objects.count(), 0)
f = BaseCategoryForm({'name': 'Entertainment',
'slug': 'entertainment',
'url': 'entertainment'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['name'], 'Entertainment')
self.assertEqual(f.cleaned_data['slug'], 'entertainment')
self.assertEqual(f.cleaned_data['url'], 'entertainment')
c1 = f.save()
# Testing wether the same object is returned from the
# ORM... not the fastest way...
self.assertEqual(c1, Category.objects.all()[0])
self.assertEqual(c1.name, "Entertainment")
self.assertEqual(Category.objects.count(), 1)
f = BaseCategoryForm({'name': "It's a test",
'slug': 'its-test',
'url': 'test'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['name'], "It's a test")
self.assertEqual(f.cleaned_data['slug'], 'its-test')
self.assertEqual(f.cleaned_data['url'], 'test')
c2 = f.save()
# Testing wether the same object is returned from the
# ORM... not the fastest way...
self.assertEqual(c2, Category.objects.get(pk=c2.pk))
self.assertEqual(c2.name, "It's a test")
self.assertEqual(Category.objects.count(), 2)
# If you call save() with commit=False, then it will return an object that
# hasn't yet been saved to the database. In this case, it's up to you to call
# save() on the resulting model instance.
f = BaseCategoryForm({'name': 'Third test', 'slug': 'third-test', 'url': 'third'})
self.assertEqual(f.is_valid(), True)
self.assertEqual(f.cleaned_data['url'], 'third')
self.assertEqual(f.cleaned_data['name'], 'Third test')
self.assertEqual(f.cleaned_data['slug'], 'third-test')
c3 = f.save(commit=False)
self.assertEqual(c3.name, "Third test")
self.assertEqual(Category.objects.count(), 2)
c3.save()
self.assertEqual(Category.objects.count(), 3)
# If you call save() with invalid data, you'll get a ValueError.
f = BaseCategoryForm({'name': '', 'slug': 'not a slug!', 'url': 'foo'})
self.assertEqual(f.errors['name'], ['This field is required.'])
self.assertEqual(f.errors['slug'], ["Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens."])
self.assertEqual(f.cleaned_data, {'url': 'foo'})
with self.assertRaises(ValueError):
f.save()
f = BaseCategoryForm({'name': '', 'slug': '', 'url': 'foo'})
with self.assertRaises(ValueError):
f.save()
# Create a couple of Writers.
w_royko = Writer(name='Mike Royko')
w_royko.save()
w_woodward = Writer(name='Bob Woodward')
w_woodward.save()
# ManyToManyFields are represented by a MultipleChoiceField, ForeignKeys and any
# fields with the 'choices' attribute are represented by a ChoiceField.
f = ArticleForm(auto_id=False)
self.assertHTMLEqual(six.text_type(f), '''<tr><th>Headline:</th><td><input type="text" name="headline" maxlength="50" /></td></tr>
<tr><th>Slug:</th><td><input type="text" name="slug" maxlength="50" /></td></tr>
<tr><th>Pub date:</th><td><input type="text" name="pub_date" /></td></tr>
<tr><th>Writer:</th><td><select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></td></tr>
<tr><th>Article:</th><td><textarea rows="10" cols="40" name="article"></textarea></td></tr>
<tr><th>Categories:</th><td><select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
</select><br /><span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></td></tr>
<tr><th>Status:</th><td><select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></td></tr>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
# You can restrict a form to a subset of the complete list of fields
# by providing a 'fields' argument. If you try to save a
# model created with such a form, you need to ensure that the fields
# that are _not_ on the form have default values, or are allowed to have
# a value of None. If a field isn't specified on a form, the object created
# from the form can't provide a value for that field!
f = PartialArticleForm(auto_id=False)
self.assertHTMLEqual(six.text_type(f), '''<tr><th>Headline:</th><td><input type="text" name="headline" maxlength="50" /></td></tr>
<tr><th>Pub date:</th><td><input type="text" name="pub_date" /></td></tr>''')
# When the ModelForm is passed an instance, that instance's current values are
# inserted as 'initial' data in each Field.
w = Writer.objects.get(name='Mike Royko')
f = RoykoForm(auto_id=False, instance=w)
self.assertHTMLEqual(six.text_type(f), '''<tr><th>Name:</th><td><input type="text" name="name" value="Mike Royko" maxlength="50" /><br /><span class="helptext">Use both first and last names.</span></td></tr>''')
art = Article(
headline='Test article',
slug='test-article',
pub_date=datetime.date(1988, 1, 4),
writer=w,
article='Hello.'
)
art.save()
art_id_1 = art.id
self.assertEqual(art_id_1 is not None, True)
f = TestArticleForm(auto_id=False, instance=art)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="Test article" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" value="test-article" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>
<li>Writer: <select name="writer">
<option value="">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s" selected="selected">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article">Hello.</textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
</select> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
f = TestArticleForm({
'headline': 'Test headline',
'slug': 'test-headline',
'pub_date': '1984-02-06',
'writer': six.text_type(w_royko.pk),
'article': 'Hello.'
}, instance=art)
self.assertEqual(f.errors, {})
self.assertEqual(f.is_valid(), True)
test_art = f.save()
self.assertEqual(test_art.id == art_id_1, True)
test_art = Article.objects.get(id=art_id_1)
self.assertEqual(test_art.headline, 'Test headline')
# You can create a form over a subset of the available fields
# by specifying a 'fields' argument to form_for_instance.
f = PartialArticleFormWithSlug({
'headline': 'New headline',
'slug': 'new-headline',
'pub_date': '1988-01-04'
}, auto_id=False, instance=art)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="New headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" value="new-headline" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>''')
self.assertEqual(f.is_valid(), True)
new_art = f.save()
self.assertEqual(new_art.id == art_id_1, True)
new_art = Article.objects.get(id=art_id_1)
self.assertEqual(new_art.headline, 'New headline')
# Add some categories and test the many-to-many form output.
self.assertQuerysetEqual(new_art.categories.all(), [])
new_art.categories.add(Category.objects.get(name='Entertainment'))
self.assertQuerysetEqual(new_art.categories.all(), ["Entertainment"])
f = TestArticleForm(auto_id=False, instance=new_art)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="New headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" value="new-headline" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>
<li>Writer: <select name="writer">
<option value="">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s" selected="selected">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article">Hello.</textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s" selected="selected">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third test</option>
</select> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
# Initial values can be provided for model forms
f = TestArticleForm(
auto_id=False,
initial={
'headline': 'Your headline here',
'categories': [str(c1.id), str(c2.id)]
})
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="Your headline here" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" /></li>
<li>Writer: <select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article"></textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s" selected="selected">Entertainment</option>
<option value="%s" selected="selected">It's a test</option>
<option value="%s">Third test</option>
</select> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
f = TestArticleForm({
'headline': 'New headline',
'slug': 'new-headline',
'pub_date': '1988-01-04',
'writer': six.text_type(w_royko.pk),
'article': 'Hello.',
'categories': [six.text_type(c1.id), six.text_type(c2.id)]
}, instance=new_art)
new_art = f.save()
self.assertEqual(new_art.id == art_id_1, True)
new_art = Article.objects.get(id=art_id_1)
self.assertQuerysetEqual(new_art.categories.order_by('name'),
["Entertainment", "It's a test"])
# Now, submit form data with no categories. This deletes the existing categories.
f = TestArticleForm({'headline': 'New headline', 'slug': 'new-headline', 'pub_date': '1988-01-04',
'writer': six.text_type(w_royko.pk), 'article': 'Hello.'}, instance=new_art)
new_art = f.save()
self.assertEqual(new_art.id == art_id_1, True)
new_art = Article.objects.get(id=art_id_1)
self.assertQuerysetEqual(new_art.categories.all(), [])
# Create a new article, with categories, via the form.
f = ArticleForm({'headline': 'The walrus was Paul', 'slug': 'walrus-was-paul', 'pub_date': '1967-11-01',
'writer': six.text_type(w_royko.pk), 'article': 'Test.', 'categories': [six.text_type(c1.id), six.text_type(c2.id)]})
new_art = f.save()
art_id_2 = new_art.id
self.assertEqual(art_id_2 not in (None, art_id_1), True)
new_art = Article.objects.get(id=art_id_2)
self.assertQuerysetEqual(new_art.categories.order_by('name'), ["Entertainment", "It's a test"])
# Create a new article, with no categories, via the form.
f = ArticleForm({'headline': 'The walrus was Paul', 'slug': 'walrus-was-paul', 'pub_date': '1967-11-01',
'writer': six.text_type(w_royko.pk), 'article': 'Test.'})
new_art = f.save()
art_id_3 = new_art.id
self.assertEqual(art_id_3 not in (None, art_id_1, art_id_2), True)
new_art = Article.objects.get(id=art_id_3)
self.assertQuerysetEqual(new_art.categories.all(), [])
# Create a new article, with categories, via the form, but use commit=False.
# The m2m data won't be saved until save_m2m() is invoked on the form.
f = ArticleForm({'headline': 'The walrus was Paul', 'slug': 'walrus-was-paul', 'pub_date': '1967-11-01',
'writer': six.text_type(w_royko.pk), 'article': 'Test.', 'categories': [six.text_type(c1.id), six.text_type(c2.id)]})
new_art = f.save(commit=False)
# Manually save the instance
new_art.save()
art_id_4 = new_art.id
self.assertEqual(art_id_4 not in (None, art_id_1, art_id_2, art_id_3), True)
# The instance doesn't have m2m data yet
new_art = Article.objects.get(id=art_id_4)
self.assertQuerysetEqual(new_art.categories.all(), [])
# Save the m2m data on the form
f.save_m2m()
self.assertQuerysetEqual(new_art.categories.order_by('name'), ["Entertainment", "It's a test"])
# Here, we define a custom ModelForm. Because it happens to have the same fields as
# the Category model, we can just call the form's save() to apply its changes to an
# existing Category instance.
cat = Category.objects.get(name='Third test')
self.assertEqual(cat.name, "Third test")
self.assertEqual(cat.id == c3.id, True)
form = ShortCategory({'name': 'Third', 'slug': 'third', 'url': '3rd'}, instance=cat)
self.assertEqual(form.save().name, 'Third')
self.assertEqual(Category.objects.get(id=c3.id).name, 'Third')
# Here, we demonstrate that choices for a ForeignKey ChoiceField are determined
# at runtime, based on the data in the database when the form is displayed, not
# the data in the database when the form is instantiated.
f = ArticleForm(auto_id=False)
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" /></li>
<li>Writer: <select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article"></textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third</option>
</select> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
c4 = Category.objects.create(name='Fourth', url='4th')
self.assertEqual(c4.name, 'Fourth')
w_bernstein = Writer.objects.create(name='Carl Bernstein')
self.assertEqual(w_bernstein.name, 'Carl Bernstein')
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" /></li>
<li>Writer: <select name="writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Carl Bernstein</option>
<option value="%s">Mike Royko</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article"></textarea></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="%s">Entertainment</option>
<option value="%s">It's a test</option>
<option value="%s">Third</option>
<option value="%s">Fourth</option>
</select> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>''' % (w_woodward.pk, w_bernstein.pk, w_royko.pk, c1.pk, c2.pk, c3.pk, c4.pk))
# ModelChoiceField ############################################################
f = forms.ModelChoiceField(Category.objects.all())
self.assertEqual(list(f.choices), [
('', '---------'),
(c1.pk, 'Entertainment'),
(c2.pk, "It's a test"),
(c3.pk, 'Third'),
(c4.pk, 'Fourth')])
self.assertEqual(5, len(f.choices))
with self.assertRaises(ValidationError):
f.clean('')
with self.assertRaises(ValidationError):
f.clean(None)
with self.assertRaises(ValidationError):
f.clean(0)
self.assertEqual(f.clean(c3.id).name, 'Third')
self.assertEqual(f.clean(c2.id).name, "It's a test")
# Add a Category object *after* the ModelChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
c5 = Category.objects.create(name='Fifth', url='5th')
self.assertEqual(c5.name, 'Fifth')
self.assertEqual(f.clean(c5.id).name, 'Fifth')
# Delete a Category object *after* the ModelChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
Category.objects.get(url='5th').delete()
with self.assertRaises(ValidationError):
f.clean(c5.id)
f = forms.ModelChoiceField(Category.objects.filter(pk=c1.id), required=False)
self.assertEqual(f.clean(''), None)
f.clean('')
self.assertEqual(f.clean(str(c1.id)).name, "Entertainment")
with self.assertRaises(ValidationError):
f.clean('100')
# queryset can be changed after the field is created.
f.queryset = Category.objects.exclude(name='Fourth')
self.assertEqual(list(f.choices), [
('', '---------'),
(c1.pk, 'Entertainment'),
(c2.pk, "It's a test"),
(c3.pk, 'Third')])
self.assertEqual(f.clean(c3.id).name, 'Third')
with self.assertRaises(ValidationError):
f.clean(c4.id)
# check that we can safely iterate choices repeatedly
gen_one = list(f.choices)
gen_two = f.choices
self.assertEqual(gen_one[2], (c2.pk, "It's a test"))
self.assertEqual(list(gen_two), [
('', '---------'),
(c1.pk, 'Entertainment'),
(c2.pk, "It's a test"),
(c3.pk, 'Third')])
# check that we can override the label_from_instance method to print custom labels (#4620)
f.queryset = Category.objects.all()
f.label_from_instance = lambda obj: "category " + str(obj)
self.assertEqual(list(f.choices), [
('', '---------'),
(c1.pk, 'category Entertainment'),
(c2.pk, "category It's a test"),
(c3.pk, 'category Third'),
(c4.pk, 'category Fourth')])
# ModelMultipleChoiceField ####################################################
f = forms.ModelMultipleChoiceField(Category.objects.all())
self.assertEqual(list(f.choices), [
(c1.pk, 'Entertainment'),
(c2.pk, "It's a test"),
(c3.pk, 'Third'),
(c4.pk, 'Fourth')])
with self.assertRaises(ValidationError):
f.clean(None)
with self.assertRaises(ValidationError):
f.clean([])
self.assertQuerysetEqual(f.clean([c1.id]), ["Entertainment"])
self.assertQuerysetEqual(f.clean([c2.id]), ["It's a test"])
self.assertQuerysetEqual(f.clean([str(c1.id)]), ["Entertainment"])
self.assertQuerysetEqual(f.clean([str(c1.id), str(c2.id)]), ["Entertainment", "It's a test"],
ordered=False)
self.assertQuerysetEqual(f.clean([c1.id, str(c2.id)]), ["Entertainment", "It's a test"],
ordered=False)
self.assertQuerysetEqual(f.clean((c1.id, str(c2.id))), ["Entertainment", "It's a test"],
ordered=False)
with self.assertRaises(ValidationError):
f.clean(['100'])
with self.assertRaises(ValidationError):
f.clean('hello')
with self.assertRaises(ValidationError):
f.clean(['fail'])
# Add a Category object *after* the ModelMultipleChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
# Note, we are using an id of 1006 here since tests that run before
# this may create categories with primary keys up to 6. Use
# a number that is will not conflict.
c6 = Category.objects.create(id=1006, name='Sixth', url='6th')
self.assertEqual(c6.name, 'Sixth')
self.assertQuerysetEqual(f.clean([c6.id]), ["Sixth"])
# Delete a Category object *after* the ModelMultipleChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
Category.objects.get(url='6th').delete()
with self.assertRaises(ValidationError):
f.clean([c6.id])
f = forms.ModelMultipleChoiceField(Category.objects.all(), required=False)
self.assertIsInstance(f.clean([]), EmptyQuerySet)
self.assertIsInstance(f.clean(()), EmptyQuerySet)
with self.assertRaises(ValidationError):
f.clean(['0'])
with self.assertRaises(ValidationError):
f.clean([str(c3.id), '0'])
with self.assertRaises(ValidationError):
f.clean([str(c1.id), '0'])
# queryset can be changed after the field is created.
f.queryset = Category.objects.exclude(name='Fourth')
self.assertEqual(list(f.choices), [
(c1.pk, 'Entertainment'),
(c2.pk, "It's a test"),
(c3.pk, 'Third')])
self.assertQuerysetEqual(f.clean([c3.id]), ["Third"])
with self.assertRaises(ValidationError):
f.clean([c4.id])
with self.assertRaises(ValidationError):
f.clean([str(c3.id), str(c4.id)])
f.queryset = Category.objects.all()
f.label_from_instance = lambda obj: "multicategory " + str(obj)
self.assertEqual(list(f.choices), [
(c1.pk, 'multicategory Entertainment'),
(c2.pk, "multicategory It's a test"),
(c3.pk, 'multicategory Third'),
(c4.pk, 'multicategory Fourth')])
# OneToOneField ###############################################################
self.assertEqual(list(ImprovedArticleForm.base_fields), ['article'])
self.assertEqual(list(ImprovedArticleWithParentLinkForm.base_fields), [])
bw = BetterWriter(name='Joe Better', score=10)
bw.save()
self.assertEqual(sorted(model_to_dict(bw)),
['id', 'name', 'score', 'writer_ptr'])
form = BetterWriterForm({'name': 'Some Name', 'score': 12})
self.assertEqual(form.is_valid(), True)
bw2 = form.save()
bw2.delete()
form = WriterProfileForm()
self.assertHTMLEqual(form.as_p(), '''<p><label for="id_writer">Writer:</label> <select name="writer" id="id_writer">
<option value="" selected="selected">---------</option>
<option value="%s">Bob Woodward</option>
<option value="%s">Carl Bernstein</option>
<option value="%s">Joe Better</option>
<option value="%s">Mike Royko</option>
</select></p>
<p><label for="id_age">Age:</label> <input type="number" name="age" id="id_age" min="0" /></p>''' % (w_woodward.pk, w_bernstein.pk, bw.pk, w_royko.pk))
data = {
'writer': six.text_type(w_woodward.pk),
'age': '65',
}
form = WriterProfileForm(data)
instance = form.save()
self.assertEqual(six.text_type(instance), 'Bob Woodward is 65')
form = WriterProfileForm(instance=instance)
self.assertHTMLEqual(form.as_p(), '''<p><label for="id_writer">Writer:</label> <select name="writer" id="id_writer">
<option value="">---------</option>
<option value="%s" selected="selected">Bob Woodward</option>
<option value="%s">Carl Bernstein</option>
<option value="%s">Joe Better</option>
<option value="%s">Mike Royko</option>
</select></p>
<p><label for="id_age">Age:</label> <input type="number" name="age" value="65" id="id_age" min="0" /></p>''' % (w_woodward.pk, w_bernstein.pk, bw.pk, w_royko.pk))
def test_file_field(self):
# Test conditions when files is either not given or empty.
f = TextFileForm(data={'description': 'Assistance'})
self.assertEqual(f.is_valid(), False)
f = TextFileForm(data={'description': 'Assistance'}, files={})
self.assertEqual(f.is_valid(), False)
# Upload a file and ensure it all works as expected.
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
self.assertEqual(f.is_valid(), True)
self.assertEqual(type(f.cleaned_data['file']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test1.txt')
instance.file.delete()
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
self.assertEqual(f.is_valid(), True)
self.assertEqual(type(f.cleaned_data['file']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test1.txt')
# Check if the max_length attribute has been inherited from the model.
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test-maxlength.txt', b'hello world')})
self.assertEqual(f.is_valid(), False)
# Edit an instance that already has the file defined in the model. This will not
# save the file again, but leave it exactly as it is.
f = TextFileForm(
data={'description': 'Assistance'},
instance=instance)
self.assertEqual(f.is_valid(), True)
self.assertEqual(f.cleaned_data['file'].name, 'tests/test1.txt')
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test1.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
# Override the file by uploading a new one.
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test2.txt', b'hello world')}, instance=instance)
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test2.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test2.txt', b'hello world')})
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test2.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
instance.delete()
# Test the non-required FileField
f = TextFileForm(data={'description': 'Assistance'})
f.fields['file'].required = False
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.file.name, '')
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test3.txt', b'hello world')}, instance=instance)
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test3.txt')
# Instance can be edited w/out re-uploading the file and existing file should be preserved.
f = TextFileForm(
data={'description': 'New Description'},
instance=instance)
f.fields['file'].required = False
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.description, 'New Description')
self.assertEqual(instance.file.name, 'tests/test3.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
instance.delete()
f = TextFileForm(
data={'description': 'Assistance'},
files={'file': SimpleUploadedFile('test3.txt', b'hello world')})
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.file.name, 'tests/test3.txt')
# Delete the current file since this is not done by Django.
instance.file.delete()
instance.delete()
def test_big_integer_field(self):
bif = BigIntForm({'biggie': '-9223372036854775808'})
self.assertEqual(bif.is_valid(), True)
bif = BigIntForm({'biggie': '-9223372036854775809'})
self.assertEqual(bif.is_valid(), False)
self.assertEqual(bif.errors, {'biggie': ['Ensure this value is greater than or equal to -9223372036854775808.']})
bif = BigIntForm({'biggie': '9223372036854775807'})
self.assertEqual(bif.is_valid(), True)
bif = BigIntForm({'biggie': '9223372036854775808'})
self.assertEqual(bif.is_valid(), False)
self.assertEqual(bif.errors, {'biggie': ['Ensure this value is less than or equal to 9223372036854775807.']})
@skipUnless(test_images, "Pillow/PIL not installed")
def test_image_field(self):
# ImageField and FileField are nearly identical, but they differ slighty when
# it comes to validation. This specifically tests that #6302 is fixed for
# both file fields and image fields.
with open(os.path.join(os.path.dirname(upath(__file__)), "test.png"), 'rb') as fp:
image_data = fp.read()
with open(os.path.join(os.path.dirname(upath(__file__)), "test2.png"), 'rb') as fp:
image_data2 = fp.read()
f = ImageFileForm(
data={'description': 'An image'},
files={'image': SimpleUploadedFile('test.png', image_data)})
self.assertEqual(f.is_valid(), True)
self.assertEqual(type(f.cleaned_data['image']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
f = ImageFileForm(
data={'description': 'An image'},
files={'image': SimpleUploadedFile('test.png', image_data)})
self.assertEqual(f.is_valid(), True)
self.assertEqual(type(f.cleaned_data['image']), SimpleUploadedFile)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Edit an instance that already has the (required) image defined in the model. This will not
# save the image again, but leave it exactly as it is.
f = ImageFileForm(data={'description': 'Look, it changed'}, instance=instance)
self.assertEqual(f.is_valid(), True)
self.assertEqual(f.cleaned_data['image'].name, 'tests/test.png')
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test.png')
self.assertEqual(instance.height, 16)
self.assertEqual(instance.width, 16)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
# Override the file by uploading a new one.
f = ImageFileForm(
data={'description': 'Changed it'},
files={'image': SimpleUploadedFile('test2.png', image_data2)}, instance=instance)
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test2.png')
self.assertEqual(instance.height, 32)
self.assertEqual(instance.width, 48)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
instance.delete()
f = ImageFileForm(
data={'description': 'Changed it'},
files={'image': SimpleUploadedFile('test2.png', image_data2)})
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test2.png')
self.assertEqual(instance.height, 32)
self.assertEqual(instance.width, 48)
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
instance.image.delete(save=False)
instance.delete()
# Test the non-required ImageField
# Note: In Oracle, we expect a null ImageField to return '' instead of
# None.
if connection.features.interprets_empty_strings_as_nulls:
expected_null_imagefield_repr = ''
else:
expected_null_imagefield_repr = None
f = OptionalImageFileForm(data={'description': 'Test'})
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.image.name, expected_null_imagefield_repr)
self.assertEqual(instance.width, None)
self.assertEqual(instance.height, None)
f = OptionalImageFileForm(
data={'description': 'And a final one'},
files={'image': SimpleUploadedFile('test3.png', image_data)}, instance=instance)
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test3.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Editing the instance without re-uploading the image should not affect the image or its width/height properties
f = OptionalImageFileForm(
data={'description': 'New Description'},
instance=instance)
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.description, 'New Description')
self.assertEqual(instance.image.name, 'tests/test3.png')
self.assertEqual(instance.width, 16)
self.assertEqual(instance.height, 16)
# Delete the current file since this is not done by Django.
instance.image.delete()
instance.delete()
f = OptionalImageFileForm(
data={'description': 'And a final one'},
files={'image': SimpleUploadedFile('test4.png', image_data2)}
)
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.image.name, 'tests/test4.png')
self.assertEqual(instance.width, 48)
self.assertEqual(instance.height, 32)
instance.delete()
# Test callable upload_to behavior that's dependent on the value of another field in the model
f = ImageFileForm(
data={'description': 'And a final one', 'path': 'foo'},
files={'image': SimpleUploadedFile('test4.png', image_data)})
self.assertEqual(f.is_valid(), True)
instance = f.save()
self.assertEqual(instance.image.name, 'foo/test4.png')
instance.delete()
def test_media_on_modelform(self):
# Similar to a regular Form class you can define custom media to be used on
# the ModelForm.
f = ModelFormWithMedia()
self.assertHTMLEqual(six.text_type(f.media), '''<link href="/some/form/css" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/some/form/javascript"></script>''')
f = CommaSeparatedIntegerForm({'field': '1,2,3'})
self.assertEqual(f.is_valid(), True)
self.assertEqual(f.cleaned_data, {'field': '1,2,3'})
f = CommaSeparatedIntegerForm({'field': '1a,2'})
self.assertEqual(f.errors, {'field': ['Enter only digits separated by commas.']})
f = CommaSeparatedIntegerForm({'field': ',,,,'})
self.assertEqual(f.is_valid(), True)
self.assertEqual(f.cleaned_data, {'field': ',,,,'})
f = CommaSeparatedIntegerForm({'field': '1.2'})
self.assertEqual(f.errors, {'field': ['Enter only digits separated by commas.']})
f = CommaSeparatedIntegerForm({'field': '1,a,2'})
self.assertEqual(f.errors, {'field': ['Enter only digits separated by commas.']})
f = CommaSeparatedIntegerForm({'field': '1,,2'})
self.assertEqual(f.is_valid(), True)
self.assertEqual(f.cleaned_data, {'field': '1,,2'})
f = CommaSeparatedIntegerForm({'field': '1'})
self.assertEqual(f.is_valid(), True)
self.assertEqual(f.cleaned_data, {'field': '1'})
# This Price instance generated by this form is not valid because the quantity
# field is required, but the form is valid because the field is excluded from
# the form. This is for backwards compatibility.
form = PriceFormWithoutQuantity({'price': '6.00'})
self.assertEqual(form.is_valid(), True)
price = form.save(commit=False)
with self.assertRaises(ValidationError):
price.full_clean()
# The form should not validate fields that it doesn't contain even if they are
# specified using 'fields', not 'exclude'.
class Meta:
model = Price
fields = ('price',)
form = PriceFormWithoutQuantity({'price': '6.00'})
self.assertEqual(form.is_valid(), True)
# The form should still have an instance of a model that is not complete and
# not saved into a DB yet.
self.assertEqual(form.instance.price, Decimal('6.00'))
self.assertEqual(form.instance.quantity is None, True)
self.assertEqual(form.instance.pk is None, True)
# Choices on CharField and IntegerField
f = ArticleForm()
with self.assertRaises(ValidationError):
f.fields['status'].clean('42')
f = ArticleStatusForm()
with self.assertRaises(ValidationError):
f.fields['status'].clean('z')
def test_foreignkeys_which_use_to_field(self):
apple = Inventory.objects.create(barcode=86, name='Apple')
Inventory.objects.create(barcode=22, name='Pear')
core = Inventory.objects.create(barcode=87, name='Core', parent=apple)
field = forms.ModelChoiceField(Inventory.objects.all(), to_field_name='barcode')
self.assertEqual(tuple(field.choices), (
('', '---------'),
(86, 'Apple'),
(87, 'Core'),
(22, 'Pear')))
form = InventoryForm(instance=core)
self.assertHTMLEqual(six.text_type(form['parent']), '''<select name="parent" id="id_parent">
<option value="">---------</option>
<option value="86" selected="selected">Apple</option>
<option value="87">Core</option>
<option value="22">Pear</option>
</select>''')
data = model_to_dict(core)
data['parent'] = '22'
form = InventoryForm(data=data, instance=core)
core = form.save()
self.assertEqual(core.parent.name, 'Pear')
class CategoryForm(forms.ModelForm):
description = forms.CharField()
class Meta:
model = Category
fields = ['description', 'url']
self.assertEqual(list(CategoryForm.base_fields),
['description', 'url'])
self.assertHTMLEqual(six.text_type(CategoryForm()), '''<tr><th><label for="id_description">Description:</label></th><td><input type="text" name="description" id="id_description" /></td></tr>
<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>''')
# to_field_name should also work on ModelMultipleChoiceField ##################
field = forms.ModelMultipleChoiceField(Inventory.objects.all(), to_field_name='barcode')
self.assertEqual(tuple(field.choices), ((86, 'Apple'), (87, 'Core'), (22, 'Pear')))
self.assertQuerysetEqual(field.clean([86]), ['Apple'])
form = SelectInventoryForm({'items': [87, 22]})
self.assertEqual(form.is_valid(), True)
self.assertEqual(len(form.cleaned_data), 1)
self.assertQuerysetEqual(form.cleaned_data['items'], ['Core', 'Pear'])
def test_model_field_that_returns_none_to_exclude_itself_with_explicit_fields(self):
self.assertEqual(list(CustomFieldForExclusionForm.base_fields),
['name'])
self.assertHTMLEqual(six.text_type(CustomFieldForExclusionForm()),
'''<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="10" /></td></tr>''')
def test_iterable_model_m2m(self):
colour = Colour.objects.create(name='Blue')
form = ColourfulItemForm()
self.maxDiff = 1024
self.assertHTMLEqual(
form.as_p(),
"""<p><label for="id_name">Name:</label> <input id="id_name" type="text" name="name" maxlength="50" /></p>
<p><label for="id_colours">Colours:</label> <select multiple="multiple" name="colours" id="id_colours">
<option value="%(blue_pk)s">Blue</option>
</select> <span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span></p>"""
% {'blue_pk': colour.pk})
def test_custom_error_messages(self):
data = {'name1': '@#$!!**@#$', 'name2': '@#$!!**@#$'}
errors = CustomErrorMessageForm(data).errors
self.assertHTMLEqual(
str(errors['name1']),
'<ul class="errorlist"><li>Form custom error message.</li></ul>'
)
self.assertHTMLEqual(
str(errors['name2']),
'<ul class="errorlist"><li>Model custom error message.</li></ul>'
)
def test_model_clean_error_messages(self):
data = {'name1': 'FORBIDDEN_VALUE', 'name2': 'ABC'}
errors = CustomErrorMessageForm(data).errors
self.assertHTMLEqual(
str(errors['name1']),
'<ul class="errorlist"><li>Model.clean() error messages.</li></ul>'
)
class M2mHelpTextTest(TestCase):
"""Tests for ticket #9321."""
def test_multiple_widgets(self):
"""Help text of different widgets for ManyToManyFields model fields"""
dreaded_help_text = '<span class="helptext"> Hold down "Control", or "Command" on a Mac, to select more than one.</span>'
# Default widget (SelectMultiple):
std_form = StatusNoteForm()
self.assertInHTML(dreaded_help_text, std_form.as_p())
# Overridden widget (CheckboxSelectMultiple, a subclass of
# SelectMultiple but with a UI that doesn't involve Control/Command
# keystrokes to extend selection):
form = StatusNoteCBM2mForm()
html = form.as_p()
self.assertInHTML('<ul id="id_status">', html)
self.assertInHTML(dreaded_help_text, html, count=0)
class ModelFormInheritanceTests(TestCase):
def test_form_subclass_inheritance(self):
class Form(forms.Form):
age = forms.IntegerField()
class ModelForm(forms.ModelForm, Form):
class Meta:
model = Writer
fields = '__all__'
self.assertEqual(list(ModelForm().fields.keys()), ['name', 'age'])
def test_field_shadowing(self):
class ModelForm(forms.ModelForm):
class Meta:
model = Writer
fields = '__all__'
class Mixin(object):
age = None
class Form(forms.Form):
age = forms.IntegerField()
class Form2(forms.Form):
foo = forms.IntegerField()
self.assertEqual(list(ModelForm().fields.keys()), ['name'])
self.assertEqual(list(type(str('NewForm'), (Mixin, Form), {})().fields.keys()), [])
self.assertEqual(list(type(str('NewForm'), (Form2, Mixin, Form), {})().fields.keys()), ['foo'])
self.assertEqual(list(type(str('NewForm'), (Mixin, ModelForm, Form), {})().fields.keys()), ['name'])
self.assertEqual(list(type(str('NewForm'), (ModelForm, Mixin, Form), {})().fields.keys()), ['name'])
self.assertEqual(list(type(str('NewForm'), (ModelForm, Form, Mixin), {})().fields.keys()), ['name', 'age'])
self.assertEqual(list(type(str('NewForm'), (ModelForm, Form), {'age': None})().fields.keys()), ['name'])
| {
"content_hash": "2015f137442ce3149d18180124edc4d5",
"timestamp": "",
"source": "github",
"line_count": 1879,
"max_line_length": 219,
"avg_line_length": 41.68653539116551,
"alnum_prop": 0.6016290262865606,
"repo_name": "yceruto/django",
"id": "bf7b3d296aa1ff1f10e9070016938791aab6995f",
"size": "78329",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/model_forms/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "51177"
},
{
"name": "JavaScript",
"bytes": "102290"
},
{
"name": "Python",
"bytes": "9171078"
},
{
"name": "Shell",
"bytes": "12137"
}
],
"symlink_target": ""
} |
import tornado.web
from .handlers.ping import PingHandler
from .handlers.run import RunHandler
def get_application(**settings):
return tornado.web.Application([
(r"/ping", PingHandler),
(r"/api/v1/run", RunHandler),
], **settings)
| {
"content_hash": "a7fae2876e9d895ef1985e488d8aba3f",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 38,
"avg_line_length": 23.454545454545453,
"alnum_prop": 0.6782945736434108,
"repo_name": "asimihsan/vocalsalad",
"id": "d782b0f2acb1ad40843a1d277b0847695213e87f",
"size": "258",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vocalsalad/app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Puppet",
"bytes": "2922"
},
{
"name": "Python",
"bytes": "6512"
},
{
"name": "Shell",
"bytes": "1390"
}
],
"symlink_target": ""
} |
"""Create hybrid foil coefficient dataset from Sheldahl and Jacobs data."""
from __future__ import division, print_function
import numpy as np
import pandas as pd
from scipy.stats import linregress
# Default CACTUS 0021 data has zero for all moment coeffs
zero_cms = True
# Whether or not to calculate lift slopes
calc_lift_slopes = False
# Whether or not to calculate LB DS model critical lift coefficients
calc_lb_crit_cls = False
# Whether or not to calculate BV DS model stall angles
calc_bv_stall_angles = False
# Offset for BV DS model stall angles versus default (deg)
bv_stall_angle_offset = 1.5
header = \
"""Title: NACA0021
Thickness to Chord Ratio: 0.21
Zero Lift AOA (deg): 0.0
Reverse Camber Direction: 0"""
subheader = \
"""Reynolds Number: {re}
BV Dyn. Stall Model - Positive Stall AOA (deg): {bv_pos_stall_angle}
BV Dyn. Stall Model - Negative Stall AOA (deg): {bv_nev_stall_angle}
LB Dyn. Stall Model - Lift Coeff. Slope at Zero Lift AOA (per radian): {lb_lift_coeff_slope}
LB Dyn. Stall Model - Positive Critical Lift Coeff.: {lb_pos_crit_cl}
LB Dyn. Stall Model - Negative Critical Lift Coeff.: {lb_neg_crit_cl}
AOA (deg) CL CD Cm25"""
re_list = ["8.3e4", "1.6e5", "3.8e5"]
# Default parameters from CACTUS input data (all but lift slopes are replaced)
bv_stall_angles = {"8.3e4": 4.0, "1.6e5": 5.0, "3.8e5": 5.0}
lb_lift_slopes = {"8.3e4": 5.277, "1.6e5": 5.371, "3.8e5": 6.303}
lb_crit_cls = {"8.3e4": 0.829, "1.6e5": 1.031, "3.8e5": 1.32}
# Manually add to BV stall angles
for re in bv_stall_angles:
bv_stall_angles[re] += bv_stall_angle_offset
def calc_lift_slope(df, alpha_max=9.0):
"""Calculate lift coefficient slope per unit radian using a linear
regression.
"""
df = df[df.alpha_deg >= 0]
df = df[df.alpha_deg <= alpha_max]
df["alpha_rad"] = np.deg2rad(df.alpha_deg)
slope, intercept, r_val, p_val, std_err = linregress(df.alpha_rad, df.cl)
return slope
def detect_ss_angle(df, threshold=0.03):
"""Detect static stall angle from input DataFrame by finding where the
change in `cd` per degree `alpha_deg` reaches `threshold`.
Should be run on the Sheldahl data since Jacobs does not contain `cd`.
"""
df["alpha"] = df.alpha_deg
df = df[df.alpha > 2]
df = df[df.alpha < 40]
dcd_dalpha = np.diff(df.cd) / np.diff(df.alpha)
i = np.where(dcd_dalpha >= threshold)[0][0]
return df.alpha.iloc[i]
def calc_crit_cl(df, re, fcrit=0.7, alpha1_fraction=0.87):
"""Calculate critical lift coefficient for Leishman--Beddoes model.
Code from turbinesFoam:
CN1_ = CNAlpha_*alpha1_*pow((1.0 + sqrt(f))/2.0, 2);
Technically this returns the critical normal force coefficient.
"""
df["alpha_rad"] = np.deg2rad(df.alpha_deg)
alpha1 = np.deg2rad(alpha1_fraction*detect_ss_angle(df))
# Use existing lift slopes
cn_alpha = lb_lift_slopes[re]
cn1 = cn_alpha*alpha1*((1.0 + np.sqrt(fcrit))/2.0)**2
return cn1
# Create empty dictionary for DataFrames
dfs = {}
# Load Jacobs data and mirror about zero angle of attack
for re in re_list:
df = pd.read_csv("config/foildata/NACA_0021_Jacobs_{}.csv".format(re))
df = df[df.alpha >= 0.0]
alpha = np.append(-np.flipud(df.alpha), df.alpha)
cl = np.append(-np.flipud(df.cl), df.cl)
df = pd.DataFrame()
df["alpha_deg"] = alpha
df["cl"] = cl
dfs[re] = df
# Fill in Jacobs C_d and C_m data from Sheldahl, interpolating to the Jacobs
# AoAs
for re in re_list:
df = dfs[re]
df_sh = pd.read_csv("config/foildata/NACA_0021_Sheldahl_{}.csv".format(re))
df["cd"] = np.interp(df.alpha_deg, df_sh.alpha_deg, df_sh.cd)
df["cm"] = np.interp(df.alpha_deg, df_sh.alpha_deg, df_sh.cm)
# Replace all Sheldahl data with Jacobs in its AoA range
df_sh_save_pos = df_sh[df_sh.alpha_deg > df.alpha_deg.max()]
df_sh_save_neg = df_sh[df_sh.alpha_deg < df.alpha_deg.min()]
df = df_sh_save_neg.append(df, ignore_index=True)
df = df.append(df_sh_save_pos, ignore_index=True)
dfs[re] = df
# Calculate lift slope
if calc_lift_slopes:
lb_lift_slopes[re] = calc_lift_slope(df)
# Calculate critical normal force coefficients and use as critical `cl`
if calc_lb_crit_cls:
lb_crit_cls[re] = calc_crit_cl(df_sh, re)
# Detect static stall angles for BV model
if calc_bv_stall_angles:
bv_stall_angles[re] = detect_ss_angle(df_sh)
# Write final text file in correct format
txt = header + "\n\n"
for re in re_list:
txt += subheader.format(re=re, bv_pos_stall_angle=bv_stall_angles[re],
bv_nev_stall_angle=bv_stall_angles[re],
lb_lift_coeff_slope=lb_lift_slopes[re],
lb_pos_crit_cl=lb_crit_cls[re],
lb_neg_crit_cl=lb_crit_cls[re]) + "\n"
df = dfs[re]
if zero_cms:
df.cm *= 0.0
for alpha_deg, cl, cd, cm in zip(df.alpha_deg, df.cl, df.cd, df.cm):
txt += str(alpha_deg) + "\t" + str(cl) + "\t" + str(cd) + "\t" + str(cm)
txt += "\n"
txt += "\n"
with open("config/foildata/NACA_0021_Jacobs.dat", "w") as f:
f.write(txt)
| {
"content_hash": "15fc18110a9910600d92c493d8b5180b",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 92,
"avg_line_length": 35.26712328767123,
"alnum_prop": 0.6407069333851233,
"repo_name": "UNH-CORE/RM2-CACTUS",
"id": "76a0ad9a6d792c16b86d251049c7131cf557690e",
"size": "5171",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scripts/jacobs-data.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "272"
},
{
"name": "Matlab",
"bytes": "2506"
},
{
"name": "Python",
"bytes": "39425"
},
{
"name": "Shell",
"bytes": "910"
}
],
"symlink_target": ""
} |
from django.core.exceptions import ObjectDoesNotExist
from djblets.util.decorators import augment_method_from
from djblets.webapi.decorators import (webapi_login_required,
webapi_response_errors,
webapi_request_fields)
from djblets.webapi.errors import (DOES_NOT_EXIST, INVALID_FORM_DATA,
NOT_LOGGED_IN, PERMISSION_DENIED)
from djblets.webapi.fields import IntFieldType
from reviewboard.reviews.models import Screenshot
from reviewboard.webapi.decorators import webapi_check_local_site
from reviewboard.webapi.resources import resources
from reviewboard.webapi.resources.base_screenshot_comment import \
BaseScreenshotCommentResource
class ReviewScreenshotCommentResource(BaseScreenshotCommentResource):
"""Provides information on screenshots comments made on a review.
If the review is a draft, then comments can be added, deleted, or
changed on this list. However, if the review is already published,
then no changes can be made.
"""
allowed_methods = ('GET', 'POST', 'PUT', 'DELETE')
policy_id = 'review_screenshot_comment'
model_parent_key = 'review'
uri_template_name_plural = None
def get_queryset(self, request, review_id, *args, **kwargs):
q = super(ReviewScreenshotCommentResource, self).get_queryset(
request, *args, **kwargs)
return q.filter(review=review_id)
@webapi_check_local_site
@webapi_login_required
@webapi_request_fields(
required=dict({
'screenshot_id': {
'type': IntFieldType,
'description': 'The ID of the screenshot being commented on.',
},
'x': {
'type': IntFieldType,
'description': 'The X location for the comment.',
},
'y': {
'type': IntFieldType,
'description': 'The Y location for the comment.',
},
'w': {
'type': IntFieldType,
'description': 'The width of the comment region.',
},
'h': {
'type': IntFieldType,
'description': 'The height of the comment region.',
},
}, **BaseScreenshotCommentResource.REQUIRED_CREATE_FIELDS),
optional=BaseScreenshotCommentResource.OPTIONAL_CREATE_FIELDS,
allow_unknown=True,
)
def create(self, request, screenshot_id, *args, **kwargs):
"""Creates a screenshot comment on a review.
This will create a new comment on a screenshot as part of a review.
The comment contains text and dimensions for the area being commented
on.
Extra data can be stored later lookup. See
:ref:`webapi2.0-extra-data` for more information.
"""
try:
review_request = \
resources.review_request.get_object(request, *args, **kwargs)
review = resources.review.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
if not resources.review.has_modify_permissions(request, review):
return self.get_no_access_error(request)
try:
screenshot = Screenshot.objects.get(pk=screenshot_id,
review_request=review_request)
except ObjectDoesNotExist:
return INVALID_FORM_DATA, {
'fields': {
'screenshot_id': ['This is not a valid screenshot ID'],
}
}
return self.create_comment(
review=review,
comments_m2m=review.screenshot_comments,
screenshot=screenshot,
fields=('screenshot', 'x', 'y', 'w', 'h'),
**kwargs)
@webapi_check_local_site
@webapi_login_required
@webapi_response_errors(DOES_NOT_EXIST, NOT_LOGGED_IN, PERMISSION_DENIED)
@webapi_request_fields(
optional=dict({
'x': {
'type': IntFieldType,
'description': 'The X location for the comment.',
},
'y': {
'type': IntFieldType,
'description': 'The Y location for the comment.',
},
'w': {
'type': IntFieldType,
'description': 'The width of the comment region.',
},
'h': {
'type': IntFieldType,
'description': 'The height of the comment region.',
},
}, **BaseScreenshotCommentResource.OPTIONAL_UPDATE_FIELDS),
allow_unknown=True
)
def update(self, request, *args, **kwargs):
"""Updates a screenshot comment.
This can update the text or region of an existing comment. It
can only be done for comments that are part of a draft review.
Extra data can be stored later lookup. See
:ref:`webapi2.0-extra-data` for more information.
"""
try:
resources.review_request.get_object(request, *args, **kwargs)
review = resources.review.get_object(request, *args, **kwargs)
screenshot_comment = self.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
return self.update_comment(request=request,
review=review,
comment=screenshot_comment,
update_fields=('x', 'y', 'w', 'h'),
**kwargs)
@webapi_check_local_site
@augment_method_from(BaseScreenshotCommentResource)
def delete(self, *args, **kwargs):
"""Deletes the comment.
This will remove the comment from the review. This cannot be undone.
Only comments on draft reviews can be deleted. Attempting to delete
a published comment will return a Permission Denied error.
Instead of a payload response on success, this will return :http:`204`.
"""
pass
@webapi_check_local_site
@augment_method_from(BaseScreenshotCommentResource)
def get_list(self, *args, **kwargs):
"""Returns the list of screenshot comments made on a review."""
pass
review_screenshot_comment_resource = ReviewScreenshotCommentResource()
| {
"content_hash": "6e6490fde56e06310c26126b2ee4b1cd",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 79,
"avg_line_length": 38.626506024096386,
"alnum_prop": 0.5828134747348721,
"repo_name": "reviewboard/reviewboard",
"id": "4c9097a471f41629ebdd9369e6ad8ee686f930c6",
"size": "6412",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reviewboard/webapi/resources/review_screenshot_comment.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "10167"
},
{
"name": "Dockerfile",
"bytes": "7721"
},
{
"name": "HTML",
"bytes": "226489"
},
{
"name": "JavaScript",
"bytes": "3991608"
},
{
"name": "Less",
"bytes": "438017"
},
{
"name": "Python",
"bytes": "9186415"
},
{
"name": "Shell",
"bytes": "3855"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ReportingPeriod'
db.create_table(u'pa_reportingperiod', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=120)),
('start_date', self.gf('django.db.models.fields.DateTimeField')()),
('end_date', self.gf('django.db.models.fields.DateTimeField')()),
('slots_per_hour', self.gf('django.db.models.fields.IntegerField')()),
))
db.send_create_signal(u'pa', ['ReportingPeriod'])
# Adding model 'Category'
db.create_table(u'pa_category', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('reporting_period', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['pa.ReportingPeriod'])),
('description', self.gf('django.db.models.fields.CharField')(max_length=200)),
('grouping', self.gf('django.db.models.fields.CharField')(default='d', max_length=15)),
))
db.send_create_signal(u'pa', ['Category'])
# Adding model 'Activity'
db.create_table(u'pa_activity', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['pa.Category'])),
('description', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal(u'pa', ['Activity'])
# Adding model 'ActivityEntry'
db.create_table(u'pa_activityentry', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('day', self.gf('django.db.models.fields.CharField')(max_length=10)),
('hour', self.gf('django.db.models.fields.IntegerField')()),
('slot', self.gf('django.db.models.fields.IntegerField')()),
('activity', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['pa.Activity'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['pa.User'])),
))
db.send_create_signal(u'pa', ['ActivityEntry'])
# Adding model 'Profession'
db.create_table(u'pa_profession', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=60)),
))
db.send_create_signal(u'pa', ['Profession'])
# Adding model 'Participant'
db.create_table(u'pa_participant', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('reporting_period', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['pa.ReportingPeriod'])),
))
db.send_create_signal(u'pa', ['Participant'])
# Adding M2M table for field user on 'Participant'
db.create_table(u'pa_participant_user', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('participant', models.ForeignKey(orm[u'pa.participant'], null=False)),
('user', models.ForeignKey(orm[u'pa.user'], null=False))
))
db.create_unique(u'pa_participant_user', ['participant_id', 'user_id'])
# Adding model 'User'
db.create_table(u'pa_user', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('password', self.gf('django.db.models.fields.CharField')(max_length=128)),
('last_login', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('is_superuser', self.gf('django.db.models.fields.BooleanField')(default=False)),
('username', self.gf('django.db.models.fields.CharField')(unique=True, max_length=30)),
('first_name', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('last_name', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75, blank=True)),
('is_staff', self.gf('django.db.models.fields.BooleanField')(default=False)),
('is_active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('date_joined', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('profession', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['pa.Profession'], null=True, blank=True)),
))
db.send_create_signal(u'pa', ['User'])
# Adding M2M table for field groups on 'User'
db.create_table(u'pa_user_groups', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('user', models.ForeignKey(orm[u'pa.user'], null=False)),
('group', models.ForeignKey(orm[u'auth.group'], null=False))
))
db.create_unique(u'pa_user_groups', ['user_id', 'group_id'])
# Adding M2M table for field user_permissions on 'User'
db.create_table(u'pa_user_user_permissions', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('user', models.ForeignKey(orm[u'pa.user'], null=False)),
('permission', models.ForeignKey(orm[u'auth.permission'], null=False))
))
db.create_unique(u'pa_user_user_permissions', ['user_id', 'permission_id'])
def backwards(self, orm):
# Deleting model 'ReportingPeriod'
db.delete_table(u'pa_reportingperiod')
# Deleting model 'Category'
db.delete_table(u'pa_category')
# Deleting model 'Activity'
db.delete_table(u'pa_activity')
# Deleting model 'ActivityEntry'
db.delete_table(u'pa_activityentry')
# Deleting model 'Profession'
db.delete_table(u'pa_profession')
# Deleting model 'Participant'
db.delete_table(u'pa_participant')
# Removing M2M table for field user on 'Participant'
db.delete_table('pa_participant_user')
# Deleting model 'User'
db.delete_table(u'pa_user')
# Removing M2M table for field groups on 'User'
db.delete_table('pa_user_groups')
# Removing M2M table for field user_permissions on 'User'
db.delete_table('pa_user_user_permissions')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'pa.activity': {
'Meta': {'object_name': 'Activity'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['pa.Category']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'pa.activityentry': {
'Meta': {'object_name': 'ActivityEntry'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['pa.Activity']"}),
'day': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'hour': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['pa.User']"})
},
u'pa.category': {
'Meta': {'object_name': 'Category'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'grouping': ('django.db.models.fields.CharField', [], {'default': "'d'", 'max_length': '15'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reporting_period': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['pa.ReportingPeriod']"})
},
u'pa.participant': {
'Meta': {'object_name': 'Participant'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reporting_period': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['pa.ReportingPeriod']"}),
'user': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['pa.User']", 'symmetrical': 'False'})
},
u'pa.profession': {
'Meta': {'object_name': 'Profession'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'})
},
u'pa.reportingperiod': {
'Meta': {'object_name': 'ReportingPeriod'},
'end_date': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'slots_per_hour': ('django.db.models.fields.IntegerField', [], {}),
'start_date': ('django.db.models.fields.DateTimeField', [], {})
},
u'pa.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'profession': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['pa.Profession']", 'null': 'True', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
}
}
complete_apps = ['pa'] | {
"content_hash": "3ae4115014b416cbb49d12bea5ef0f72",
"timestamp": "",
"source": "github",
"line_count": 217,
"max_line_length": 187,
"avg_line_length": 57.91244239631337,
"alnum_prop": 0.5756345985517626,
"repo_name": "Mathew/psychoanalysis",
"id": "696b8d5235b1a8395663cfa5a23fbb4d6a44abda",
"size": "12591",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "psychoanalysis/apps/pa/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "108499"
},
{
"name": "Python",
"bytes": "56205"
},
{
"name": "Ruby",
"bytes": "52320"
},
{
"name": "Shell",
"bytes": "311"
}
],
"symlink_target": ""
} |
import bs4
import config
import re
import requests
import sqlite3
from datetime import datetime
from uuid import UUID, uuid4
PATH = "chat.sqlite"
class Show(object):
def __init__(self, show_str):
self.show_str = show_str.strip()
self.date = datetime.strptime(self.show_str.split()[0], '%d.%m.%Y')
def is_upcoming(self):
return self.date.date() >= datetime.today().date()
def __str__(self):
return self.show_str
class Tweet(object):
def __init__(self):
url = "https://mobile.twitter.com/{0}"
html = requests.get(url.format(config.GUEST_TWITTER_NAME)).text
soup = bs4.BeautifulSoup(html, 'lxml')
self.text = soup.find('div', 'tweet-text').div.text.strip()
def is_new(self):
with open(".guest", "a+") as f:
f.seek(0)
last_tweet = f.read()
f.seek(0)
f.truncate()
f.write(self.text)
return last_tweet != self.text
class SMS(object):
url = "https://www.smsout.de/client/sendsms.php"
query = "?Username={0}&Password={1}&SMSTo={2}&SMSType=V1&SMSText={3}"
def __init__(self, sender, recipient, text):
self.sender = sender
self.recipient = recipient
self.text = text
def send(self):
sms_text = "[{0}] {1}".format(self.sender.username, self.text)
user, password = config.sms_config
number = self.recipient.number
url = self.url + self.query.format(user, password, number, sms_text)
print(url)
return requests.get(url)
class Users(object):
users = []
def __init__(self):
self._connection = sqlite3.connect(PATH)
self._initialize_database()
self.users = self.users or self.all()
def _initialize_database(self):
sql = """create table if not exists users (
id integer primary key not null,
name text not null,
user_id text not null unique,
color text not null,
number text
)"""
self._execute(sql)
self._connection.commit()
def all(self):
result = self._execute("""select * from users""")
return [User(r[1], r[3], r[4], UUID(r[2])) for r in result]
def find_by_name(self, name):
matches = list(filter(lambda u: u.username == name, self.users))
return matches[0] if matches else None
def find_by_user_id(self, uuid_str):
matches = list(filter(lambda u: str(u.user_id) == uuid_str,
self.users))
return matches[0] if matches else None
def exists(self, user):
return self.find_by_user_id(user.user_id) is not None
def insert(self, user):
sql = """insert into users
(name, user_id, color, number)
values (?, ?, ?, ?)
"""
try:
self._execute(sql, (user.username, str(
user.user_id), user.color, user.number))
self._connection.commit()
# update users after insert
self.users = self.all()
return True
except sqlite3.IntegrityError:
return False
def guest(self):
guest_name = config.GUEST_NAME_SHORT
guest = self.find_by_name(guest_name)
if not guest:
guest = User(guest_name, 'orange', '', uuid4())
self.insert(guest)
return guest
def alfabot(self):
return self.find_by_name("alfabot")
def _execute(self, sql, params=()):
return self._connection.cursor().execute(sql, params)
class User(object):
def __init__(self, username, color, number, uuid):
self.username = username
self.color = color
self.number = number
self.user_id = uuid
def exists(self):
return Users().exists(self)
def save(self):
return Users().insert(self)
def __str__(self):
return str(self.user_id)
def __eq__(self, other):
return other and (self.user_id == other.user_id)
__repr__ = __str__
class Message:
pattern = re.compile(r"(https?:\/\/[^\s()]+)")
repl = r'<a href="\g<1>" target="_blank">\g<1></a>'
def __init__(self, message_text, user, visible_to=None, pk=-1):
self.user = user
self.text = message_text
self.visible_to = visible_to or []
self.is_private = Message.is_private(self.text)
self.pk = pk
def html_text(self):
return re.sub(self.pattern, self.repl, self.text)
@staticmethod
def is_private(message):
starts_with_at = message.startswith("@")
followed_by_user = bool(Users().find_by_name(message.split()[0][1:]))
return starts_with_at and followed_by_user
def __str__(self):
return "[{0}] {1} (visible to {2})".format(
self.user.username,
self.text, ",".join(map(str, self.visible_to)))
def is_visible_to(self, user):
return (not self.visible_to) or (user in self.visible_to)
def to_json(self):
return """{{"text":"{0}",
"pk":"{1}",
"user":"{2}",
"color":"{3}",
"private":{4}}}""".format(
self.text, self.pk, self.user.username,
self.user.color, str(self.is_private).lower())
class Chat(object):
def __init__(self):
self._connection = sqlite3.connect(PATH)
self._initialize_database()
def close(self):
self._connection.commit()
self._connection.close()
def _execute(self, sql, params=()):
return self._connection.cursor().execute(sql, params)
def _initialize_database(self):
sql = """create table if not exists chat (
id integer primary key not null,
message text not null,
user_id text not null,
visible_to text not null,
timestamp datetime default current_timestamp
)"""
self._execute(sql)
self._connection.commit()
def write(self, message):
if not message:
return
sql = """insert into chat (message, user_id, visible_to)
values (?, ?, ?)"""
visible_to = ",".join(map(str, message.visible_to))
self._execute(sql, (message.text, str(
message.user.user_id), visible_to))
self._connection.commit()
def read(self, limit=-1):
sql = """select message, user_id, visible_to, id
from chat
order by timestamp desc
limit (?)"""
result = self._execute(sql, (limit,)).fetchall()
return [self._to_message(r) for r in result][::-1]
def read_latest(self, pk):
sql = """select message, user_id, visible_to, id
from chat
where id > (?)
order by timestamp desc
"""
result = self._execute(sql, (pk,)).fetchall()
return [self._to_message(r) for r in result][::-1]
def remove_bot_messages_for(self, user):
sql = """delete from chat
where user_id = (?) and visible_to like (?);
"""
alfabot_id = str(Users().alfabot().user_id)
user_id = str(user.user_id)
self._execute(sql, (alfabot_id, user_id,))
def delete_latest_message_of(self, user):
sql = """delete from chat where user_id = (?)
order by timestamp desc limit 1
"""
self._execute(sql, (str(user.user_id),))
def _to_message(self, record):
message_text = record[0]
user = Users().find_by_user_id(record[1])
pk = record[3]
if not record[2]:
visible_to = []
else:
visible_to = [Users().find_by_user_id(
id) for id in record[2].split(",")]
return Message(message_text, user, visible_to, pk)
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
| {
"content_hash": "345c77f634a4814d5f821888e8e92e5c",
"timestamp": "",
"source": "github",
"line_count": 292,
"max_line_length": 77,
"avg_line_length": 27.328767123287673,
"alnum_prop": 0.5453634085213033,
"repo_name": "suspectpart/alfachat",
"id": "f321fb36a3d1bb1936e508bbf5affa5ddd2eb986",
"size": "7980",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1067"
},
{
"name": "HTML",
"bytes": "4349"
},
{
"name": "Python",
"bytes": "18899"
}
],
"symlink_target": ""
} |
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class cspolicy_cspolicylabel_binding(base_resource) :
""" Binding class showing the cspolicylabel that can be bound to cspolicy.
"""
def __init__(self) :
self._domain = ""
self._url = ""
self._priority = 0
self._hits = 0
self._labeltype = ""
self._labelname = ""
self._policyname = ""
self.___count = 0
@property
def policyname(self) :
"""Name of the content switching policy to display. If this parameter is omitted, details of all the policies are displayed.<br/>Minimum length = 1.
"""
try :
return self._policyname
except Exception as e:
raise e
@policyname.setter
def policyname(self, policyname) :
"""Name of the content switching policy to display. If this parameter is omitted, details of all the policies are displayed.<br/>Minimum length = 1
"""
try :
self._policyname = policyname
except Exception as e:
raise e
@property
def domain(self) :
"""The domain name. The string value can range to 63 characters.<br/>Minimum length = 1.
"""
try :
return self._domain
except Exception as e:
raise e
@domain.setter
def domain(self, domain) :
"""The domain name. The string value can range to 63 characters.<br/>Minimum length = 1
"""
try :
self._domain = domain
except Exception as e:
raise e
@property
def priority(self) :
"""priority of bound policy.
"""
try :
return self._priority
except Exception as e:
raise e
@property
def labelname(self) :
"""Name of the label invoked.
"""
try :
return self._labelname
except Exception as e:
raise e
@property
def hits(self) :
"""Total number of hits.
"""
try :
return self._hits
except Exception as e:
raise e
@property
def url(self) :
"""URL string that is matched with the URL of a request. Can contain a wildcard character. Specify the string value in the following format: [[prefix] [*]] [.suffix].<br/>Minimum length = 1<br/>Maximum length = 208.
"""
try :
return self._url
except Exception as e:
raise e
@property
def labeltype(self) :
"""The invocation type.<br/>Possible values = reqvserver, resvserver, policylabel.
"""
try :
return self._labeltype
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(cspolicy_cspolicylabel_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.cspolicy_cspolicylabel_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.policyname) :
return str(self.policyname)
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, policyname) :
""" Use this API to fetch cspolicy_cspolicylabel_binding resources.
"""
try :
obj = cspolicy_cspolicylabel_binding()
obj.policyname = policyname
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, policyname, filter_) :
""" Use this API to fetch filtered set of cspolicy_cspolicylabel_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = cspolicy_cspolicylabel_binding()
obj.policyname = policyname
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, policyname) :
""" Use this API to count cspolicy_cspolicylabel_binding resources configued on NetScaler.
"""
try :
obj = cspolicy_cspolicylabel_binding()
obj.policyname = policyname
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, policyname, filter_) :
""" Use this API to count the filtered set of cspolicy_cspolicylabel_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = cspolicy_cspolicylabel_binding()
obj.policyname = policyname
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Labeltype:
reqvserver = "reqvserver"
resvserver = "resvserver"
policylabel = "policylabel"
class cspolicy_cspolicylabel_binding_response(base_response) :
def __init__(self, length=1) :
self.cspolicy_cspolicylabel_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.cspolicy_cspolicylabel_binding = [cspolicy_cspolicylabel_binding() for _ in range(length)]
| {
"content_hash": "d8020e077061305d8e05e9e7964d059f",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 219,
"avg_line_length": 28.068292682926828,
"alnum_prop": 0.6958637469586375,
"repo_name": "mahabs/nitro",
"id": "4b79104da37498b8b3a326ff8c89df6d7f52dbfa",
"size": "6368",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nssrc/com/citrix/netscaler/nitro/resource/config/cs/cspolicy_cspolicylabel_binding.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "498"
},
{
"name": "Python",
"bytes": "10647176"
}
],
"symlink_target": ""
} |
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.contrib.auth import get_user_model
from helpdesk.settings import DEFAULT_USER_SETTINGS
def pickle_settings(data):
"""Pickling as defined at migration's creation time"""
import cPickle
from helpdesk.lib import b64encode
return b64encode(cPickle.dumps(data))
# https://docs.djangoproject.com/en/1.7/topics/migrations/#data-migrations
def populate_usersettings(orm):
"""Create a UserSettings entry for each existing user.
This will only happen once (at install time, or at upgrade)
when the UserSettings model doesn't already exist."""
_User = get_user_model()
# Import historical version of models
User = orm[_User._meta.app_label+'.'+_User._meta.model_name]
UserSettings = orm["helpdesk"+'.'+"UserSettings"]
settings_pickled = pickle_settings(DEFAULT_USER_SETTINGS)
for u in User.objects.all():
try:
UserSettings.objects.get(user=u)
except UserSettings.DoesNotExist:
UserSettings.objects.create(user=u, settings_pickled=settings_pickled)
class Migration(DataMigration):
def forwards(self, orm):
populate_usersettings(orm)
def backwards(self, orm):
pass
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'helpdesk.usersettings': {
'Meta': {'object_name': 'UserSettings'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'settings_pickled': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['helpdesk']
symmetrical = True
| {
"content_hash": "d97625949ec6abf67190fda7bef3225b",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 195,
"avg_line_length": 56.157303370786515,
"alnum_prop": 0.5916366546618648,
"repo_name": "harrisonfeng/django-helpdesk",
"id": "8b0514b283e76ed117259ba00cb8275744291bb5",
"size": "5022",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "helpdesk/south_migrations/0011_populate_usersettings.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "5926"
},
{
"name": "HTML",
"bytes": "108212"
},
{
"name": "JavaScript",
"bytes": "42249"
},
{
"name": "Python",
"bytes": "457805"
},
{
"name": "Shell",
"bytes": "708"
}
],
"symlink_target": ""
} |
from django.contrib import messages
from django.conf import settings
from django.core.urlresolvers import reverse, NoReverseMatch
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response
from django.views.decorators.csrf import csrf_exempt
from django.template import RequestContext
from cccheckout import settings as c_settings
from cccheckout.models import RequestResponse
from cccheckout.payments.stripe.api import StripeAPI
@csrf_exempt
def mock_api(self):
"""A mock api view used for testing. Only available in test mode"""
if c_settings.STRIPE_MODE != 'TEST':
raise Http404()
# return 500
if c_settings.STRIPE_OUTCOME == 'APIERROR':
response = """{"error": {"message": "test error"}}"""
return HttpResponse(response,
status=500,
content_type='application/json')
# return 402
if c_settings.STRIPE_OUTCOME == 'CARDERROR':
response = """{"error": {"message": "test error"}}"""
return HttpResponse(response,
status=402,
content_type='application/json')
# return 401
if c_settings.STRIPE_OUTCOME == 'AUTHENTICATIONERROR':
response = """{"error": {"message": "test error"}}"""
return HttpResponse(response,
status=401,
content_type='application/json')
# return 404
if c_settings.STRIPE_OUTCOME == 'INVALIDREQUEST_404':
response = """{"error": {"message": "test error"}}"""
return HttpResponse(response,
status=404,
content_type='application/json')
# return 400
if c_settings.STRIPE_OUTCOME == 'INVALIDREQUEST_400':
response = """{"error": {"message": "test error"}}"""
return HttpResponse(response,
status=400,
content_type='application/json')
# if it was marked as unpaid
if c_settings.STRIPE_OUTCOME == 'UNPAID':
response = """{ "livemode": false,
"failure_message": null,
"object": "charge",
"paid": false,
"currency": "usd",
"fee": 0,
"customer": "cus_RTwBTDaZN2rvjW",
"refunded": false,
"card": {
"address_line1_check": null,
"type": "Visa",
"exp_year": 2023,
"address_zip": null,
"fingerprint": "kEgDzHaae99yUzmC",
"address_line1": null,
"country": "US",
"last4": "0027",
"object": "card",
"address_line2": null,
"cvc_check": "pass",
"address_country": null,
"name": null,
"address_zip_check": null,
"id": "cc_ZG7XL95fRse7Rj",
"address_state": null,
"exp_month": 1
},
"invoice": null,
"description": "Recurring billing",
"id": "ch_Ke93RwOz28j0Eq",
"amount_refunded": 0,
"created": 1337061730,
"amount": 63,
"disputed": false }"""
if c_settings.STRIPE_OUTCOME == 'PASS':
response = """{ "livemode": false,
"failure_message": null,
"object": "charge",
"paid": true,
"currency": "usd",
"fee": 0,
"customer": "cus_RTwBTDaZN2rvjW",
"refunded": false,
"card": {
"address_line1_check": null,
"type": "Visa",
"exp_year": 2023,
"address_zip": null,
"fingerprint": "kEgDzHaae99yUzmC",
"address_line1": null,
"country": "US",
"last4": "0027",
"object": "card",
"address_line2": null,
"cvc_check": "pass",
"address_country": null,
"name": null,
"address_zip_check": null,
"id": "cc_ZG7XL95fRse7Rj",
"address_state": null,
"exp_month": 1
},
"invoice": null,
"description": "Recurring billing",
"id": "ch_Ke93RwOz28j0Eq",
"amount_refunded": 0,
"created": 1337061730,
"amount": 63,
"disputed": false }"""
return HttpResponse(response, mimetype='application/json')
def card(request):
# get the checout
checkout = request.session['cccheckout']
if request.method == 'POST':
try:
stripeToken = request.POST['stripeToken']
except KeyError:
messages.error(request,
'You must enable JavaScript to pay with your card')
return HttpResponseRedirect(reverse('stripe:card'))
# make the api
api = StripeAPI(
checkout=checkout,
api_key=c_settings.STRIPE[c_settings.STRIPE_MODE]['PRIVATE_KEY'])
# make the params
try:
description = checkout.customer.email
except AttributeError:
description = checkout
params = {
'amount': int(checkout.total_price() * 100),
'currency': 'usd',
'card': stripeToken,
'description': description}
# make the api call
response = api.charge_create(**params)
# if the response was paid set the checkout to paid
checkout.paid = response['paid']
checkout.save()
if checkout.paid:
# return the http response redirect
try:
url = reverse(c_settings.CCCHECKOUT_SUCCESS_URL)
except NoReverseMatch:
url = c_settings.CCCHECKOUT_SUCCESS_URL
return HttpResponseRedirect(url)
# not paid, add a messge
messages.error(request,
'Your payment could not be taken, please try again')
return render_to_response('cccheckout/payments/stripe/card.html', {},
context_instance=RequestContext(request))
| {
"content_hash": "a8acad6ca1fcb800c8f00f64d7b3ab40",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 81,
"avg_line_length": 41.376543209876544,
"alnum_prop": 0.48023273161271074,
"repo_name": "designcc/django-cccheckout",
"id": "50e04f9ef995a4cba05420af14e4c3bc28e1ccbd",
"size": "6703",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cccheckout/payments/stripe/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "7093"
},
{
"name": "Python",
"bytes": "230676"
}
],
"symlink_target": ""
} |
""" Automatic file """
from app.logic.commandrepo.controllers import CommandController
| {
"content_hash": "5d8f748ec7588ad61c6c6f5b2086bfb0",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 63,
"avg_line_length": 29.333333333333332,
"alnum_prop": 0.7954545454545454,
"repo_name": "imvu/bluesteel",
"id": "71e6d2c73c9d0e497eaa69bdcb5f9ce220c3cfe6",
"size": "88",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/logic/commandrepo/controllers/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16828"
},
{
"name": "HTML",
"bytes": "119014"
},
{
"name": "JavaScript",
"bytes": "36015"
},
{
"name": "Python",
"bytes": "1220104"
}
],
"symlink_target": ""
} |
"""Pickle related utilities. Perhaps this should be called 'can'."""
__docformat__ = "restructuredtext en"
#-------------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import copy
import logging
import sys
from types import FunctionType
try:
import cPickle as pickle
except ImportError:
import pickle
try:
import numpy
except:
numpy = None
import codeutil # This registers a hook when it's imported
import py3compat
from importstring import import_item
from IPython.config import Application
if py3compat.PY3:
buffer = memoryview
class_type = type
else:
from types import ClassType
class_type = (type, ClassType)
#-------------------------------------------------------------------------------
# Classes
#-------------------------------------------------------------------------------
class CannedObject(object):
def __init__(self, obj, keys=[], hook=None):
"""can an object for safe pickling
Parameters
==========
obj:
The object to be canned
keys: list (optional)
list of attribute names that will be explicitly canned / uncanned
hook: callable (optional)
An optional extra callable,
which can do additional processing of the uncanned object.
large data may be offloaded into the buffers list,
used for zero-copy transfers.
"""
self.keys = keys
self.obj = copy.copy(obj)
self.hook = can(hook)
for key in keys:
setattr(self.obj, key, can(getattr(obj, key)))
self.buffers = []
def get_object(self, g=None):
if g is None:
g = {}
obj = self.obj
for key in self.keys:
setattr(obj, key, uncan(getattr(obj, key), g))
if self.hook:
self.hook = uncan(self.hook, g)
self.hook(obj, g)
return self.obj
class Reference(CannedObject):
"""object for wrapping a remote reference by name."""
def __init__(self, name):
if not isinstance(name, basestring):
raise TypeError("illegal name: %r"%name)
self.name = name
self.buffers = []
def __repr__(self):
return "<Reference: %r>"%self.name
def get_object(self, g=None):
if g is None:
g = {}
return eval(self.name, g)
class CannedFunction(CannedObject):
def __init__(self, f):
self._check_type(f)
self.code = f.func_code
if f.func_defaults:
self.defaults = [ can(fd) for fd in f.func_defaults ]
else:
self.defaults = None
self.module = f.__module__ or '__main__'
self.__name__ = f.__name__
self.buffers = []
def _check_type(self, obj):
assert isinstance(obj, FunctionType), "Not a function type"
def get_object(self, g=None):
# try to load function back into its module:
if not self.module.startswith('__'):
__import__(self.module)
g = sys.modules[self.module].__dict__
if g is None:
g = {}
if self.defaults:
defaults = tuple(uncan(cfd, g) for cfd in self.defaults)
else:
defaults = None
newFunc = FunctionType(self.code, g, self.__name__, defaults)
return newFunc
class CannedClass(CannedObject):
def __init__(self, cls):
self._check_type(cls)
self.name = cls.__name__
self.old_style = not isinstance(cls, type)
self._canned_dict = {}
for k,v in cls.__dict__.items():
if k not in ('__weakref__', '__dict__'):
self._canned_dict[k] = can(v)
if self.old_style:
mro = []
else:
mro = cls.mro()
self.parents = [ can(c) for c in mro[1:] ]
self.buffers = []
def _check_type(self, obj):
assert isinstance(obj, class_type), "Not a class type"
def get_object(self, g=None):
parents = tuple(uncan(p, g) for p in self.parents)
return type(self.name, parents, uncan_dict(self._canned_dict, g=g))
class CannedArray(CannedObject):
def __init__(self, obj):
self.shape = obj.shape
self.dtype = obj.dtype.descr if obj.dtype.fields else obj.dtype.str
if sum(obj.shape) == 0:
# just pickle it
self.buffers = [pickle.dumps(obj, -1)]
else:
# ensure contiguous
obj = numpy.ascontiguousarray(obj, dtype=None)
self.buffers = [buffer(obj)]
def get_object(self, g=None):
data = self.buffers[0]
if sum(self.shape) == 0:
# no shape, we just pickled it
return pickle.loads(data)
else:
return numpy.frombuffer(data, dtype=self.dtype).reshape(self.shape)
class CannedBytes(CannedObject):
wrap = bytes
def __init__(self, obj):
self.buffers = [obj]
def get_object(self, g=None):
data = self.buffers[0]
return self.wrap(data)
def CannedBuffer(CannedBytes):
wrap = buffer
#-------------------------------------------------------------------------------
# Functions
#-------------------------------------------------------------------------------
def _logger():
"""get the logger for the current Application
the root logger will be used if no Application is running
"""
if Application.initialized():
logger = Application.instance().log
else:
logger = logging.getLogger()
if not logger.handlers:
logging.basicConfig()
return logger
def _import_mapping(mapping, original=None):
"""import any string-keys in a type mapping
"""
log = _logger()
log.debug("Importing canning map")
for key,value in mapping.items():
if isinstance(key, basestring):
try:
cls = import_item(key)
except Exception:
if original and key not in original:
# only message on user-added classes
log.error("cannning class not importable: %r", key, exc_info=True)
mapping.pop(key)
else:
mapping[cls] = mapping.pop(key)
def istype(obj, check):
"""like isinstance(obj, check), but strict
This won't catch subclasses.
"""
if isinstance(check, tuple):
for cls in check:
if type(obj) is cls:
return True
return False
else:
return type(obj) is check
def can(obj):
"""prepare an object for pickling"""
import_needed = False
for cls,canner in can_map.iteritems():
if isinstance(cls, basestring):
import_needed = True
break
elif istype(obj, cls):
return canner(obj)
if import_needed:
# perform can_map imports, then try again
# this will usually only happen once
_import_mapping(can_map, _original_can_map)
return can(obj)
return obj
def can_class(obj):
if isinstance(obj, class_type) and obj.__module__ == '__main__':
return CannedClass(obj)
else:
return obj
def can_dict(obj):
"""can the *values* of a dict"""
if istype(obj, dict):
newobj = {}
for k, v in obj.iteritems():
newobj[k] = can(v)
return newobj
else:
return obj
sequence_types = (list, tuple, set)
def can_sequence(obj):
"""can the elements of a sequence"""
if istype(obj, sequence_types):
t = type(obj)
return t([can(i) for i in obj])
else:
return obj
def uncan(obj, g=None):
"""invert canning"""
import_needed = False
for cls,uncanner in uncan_map.iteritems():
if isinstance(cls, basestring):
import_needed = True
break
elif isinstance(obj, cls):
return uncanner(obj, g)
if import_needed:
# perform uncan_map imports, then try again
# this will usually only happen once
_import_mapping(uncan_map, _original_uncan_map)
return uncan(obj, g)
return obj
def uncan_dict(obj, g=None):
if istype(obj, dict):
newobj = {}
for k, v in obj.iteritems():
newobj[k] = uncan(v,g)
return newobj
else:
return obj
def uncan_sequence(obj, g=None):
if istype(obj, sequence_types):
t = type(obj)
return t([uncan(i,g) for i in obj])
else:
return obj
def _uncan_dependent_hook(dep, g=None):
dep.check_dependency()
def can_dependent(obj):
return CannedObject(obj, keys=('f', 'df'), hook=_uncan_dependent_hook)
#-------------------------------------------------------------------------------
# API dictionaries
#-------------------------------------------------------------------------------
# These dicts can be extended for custom serialization of new objects
can_map = {
'IPython.parallel.dependent' : can_dependent,
'numpy.ndarray' : CannedArray,
FunctionType : CannedFunction,
bytes : CannedBytes,
buffer : CannedBuffer,
class_type : can_class,
}
uncan_map = {
CannedObject : lambda obj, g: obj.get_object(g),
}
# for use in _import_mapping:
_original_can_map = can_map.copy()
_original_uncan_map = uncan_map.copy()
| {
"content_hash": "0913ad883ddd8215695768d2bc407fd0",
"timestamp": "",
"source": "github",
"line_count": 352,
"max_line_length": 86,
"avg_line_length": 28.039772727272727,
"alnum_prop": 0.5252279635258359,
"repo_name": "marcoantoniooliveira/labweb",
"id": "04ac8827f519a1acaab0fbe2e5a0253157811c1c",
"size": "9889",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oscar/lib/python2.7/site-packages/IPython/utils/pickleutil.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "5939"
},
{
"name": "CSS",
"bytes": "1534157"
},
{
"name": "CoffeeScript",
"bytes": "21"
},
{
"name": "JavaScript",
"bytes": "2968822"
},
{
"name": "LiveScript",
"bytes": "6103"
},
{
"name": "Puppet",
"bytes": "3507"
},
{
"name": "Python",
"bytes": "30402832"
},
{
"name": "Shell",
"bytes": "10782"
},
{
"name": "TeX",
"bytes": "56626"
},
{
"name": "XSLT",
"bytes": "49764"
}
],
"symlink_target": ""
} |
from django.db import models
# import django.contrib.auth.models as auth
from django.contrib.auth.models import User
# from django.conf import settings
class UserProfile(models.Model):
user = models.OneToOneField(User)
def __str__(self):
return self.user.get_username()
class Task(models.Model):
id = models.AutoField(primary_key=True)
title = models.CharField(max_length = 100)
description = models.TextField(default = '')
pub_date = models.DateTimeField('date published')
subscribers = models.ManyToManyField(UserProfile, related_name='tasks')
closed = models.BooleanField(default=False)
def __str__(self):
return self.title
class Comment(models.Model):
id = models.AutoField(primary_key=True)
text = models.TextField(default = '')
task = models.ForeignKey(Task)
user = models.ForeignKey(UserProfile, default = '')
| {
"content_hash": "2a7eb0a7a12ef9ae8a27d733749f27e8",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 72,
"avg_line_length": 31.074074074074073,
"alnum_prop": 0.7508939213349225,
"repo_name": "jailuthra/taskzilla",
"id": "1e6028b97409f179a0fde2231acf66e7f0034041",
"size": "839",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "taskzilla/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "38"
},
{
"name": "HTML",
"bytes": "7846"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Python",
"bytes": "16124"
}
],
"symlink_target": ""
} |
'''Tests for youtube playlist'''
from unittest.mock import patch
from chineurs import youtube_playlist
@patch('chineurs.youtube_playlist.requests', autospec=True)
def test_insert_videos(requests):
'''Make HTTP calls to the youtube API'''
requests.post.return_value = 5
assert youtube_playlist.insert_video({}, 'playlist', 'vid') == 5
requests.post.assert_called_once_with(
'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet',
headers={},
json={
'snippet': {
'playlistId': 'playlist',
'position': 0,
'resourceId': {
'kind': 'youtube#video',
'videoId': 'vid'
}
}
})
@patch('chineurs.youtube_playlist.requests.get', autospec=True)
def test_get_playlists(requests_get):
'''Get playlists from YouTube api'''
requests_get.return_value.json.return_value = {
'items': [
{
'id': 'id1',
'snippet': {
'title': 'name1'
}
},
{
'id': 'id2',
'snippet': {
'title': 'name2'
}
}
]
}
assert youtube_playlist.get_playlists({}) == [
{
'id': 'id1',
'name': 'name1'
},
{
'id': 'id2',
'name': 'name2'
}
]
| {
"content_hash": "f576fb72388dffc57fb49cac95e8d486",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 75,
"avg_line_length": 25.517241379310345,
"alnum_prop": 0.4560810810810811,
"repo_name": "jroitgrund/chineurs",
"id": "d12dbc629f95e89a7e2ddc5df92d4ada17d0bdb7",
"size": "1480",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tests/test_youtube_playlist.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "21"
},
{
"name": "HTML",
"bytes": "914"
},
{
"name": "JavaScript",
"bytes": "13316"
},
{
"name": "Makefile",
"bytes": "382"
},
{
"name": "Python",
"bytes": "45873"
},
{
"name": "Shell",
"bytes": "1193"
}
],
"symlink_target": ""
} |
import os
import pathlib
import sys
if len(sys.argv) < 3:
sys.exit('usage: replacer.py <pattern> <replacement>')
source_root = pathlib.Path(os.environ['MESON_DIST_ROOT'])
modfile = source_root / 'prog.c'
contents = modfile.read_text()
contents = contents.replace(sys.argv[1], sys.argv[2])
modfile.write_text(contents)
| {
"content_hash": "f12436165b0944a7e64aefd24e1f7306",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 58,
"avg_line_length": 23.285714285714285,
"alnum_prop": 0.7177914110429447,
"repo_name": "mesonbuild/meson",
"id": "96ccdcc8bf2c308b38f86271815c340b01229705",
"size": "350",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "test cases/unit/35 dist script/replacer.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "5960"
},
{
"name": "Batchfile",
"bytes": "1499"
},
{
"name": "C",
"bytes": "204306"
},
{
"name": "C#",
"bytes": "1130"
},
{
"name": "C++",
"bytes": "59193"
},
{
"name": "CMake",
"bytes": "38429"
},
{
"name": "Cuda",
"bytes": "10592"
},
{
"name": "Cython",
"bytes": "1921"
},
{
"name": "D",
"bytes": "7840"
},
{
"name": "Fortran",
"bytes": "12248"
},
{
"name": "Genie",
"bytes": "476"
},
{
"name": "HTML",
"bytes": "897"
},
{
"name": "Inno Setup",
"bytes": "354"
},
{
"name": "Java",
"bytes": "3768"
},
{
"name": "JavaScript",
"bytes": "150"
},
{
"name": "LLVM",
"bytes": "75"
},
{
"name": "Lex",
"bytes": "219"
},
{
"name": "Limbo",
"bytes": "28"
},
{
"name": "Meson",
"bytes": "601347"
},
{
"name": "Objective-C",
"bytes": "686"
},
{
"name": "Objective-C++",
"bytes": "378"
},
{
"name": "PowerShell",
"bytes": "4728"
},
{
"name": "Python",
"bytes": "4125854"
},
{
"name": "Roff",
"bytes": "625"
},
{
"name": "Rust",
"bytes": "4039"
},
{
"name": "Shell",
"bytes": "12539"
},
{
"name": "Swift",
"bytes": "1152"
},
{
"name": "Vala",
"bytes": "10033"
},
{
"name": "Verilog",
"bytes": "696"
},
{
"name": "Vim Script",
"bytes": "10684"
},
{
"name": "Yacc",
"bytes": "103"
}
],
"symlink_target": ""
} |
import logging
import pkg_resources
import six
from novaclient import exceptions
from novaclient import utils
logger = logging.getLogger(__name__)
_discovered_plugins = {}
def discover_auth_systems():
"""Discover the available auth-systems.
This won't take into account the old style auth-systems.
"""
ep_name = 'openstack.client.auth_plugin'
for ep in pkg_resources.iter_entry_points(ep_name):
try:
auth_plugin = ep.load()
except (ImportError, pkg_resources.UnknownExtra, AttributeError) as e:
logger.debug("ERROR: Cannot load auth plugin %s" % ep.name)
logger.debug(e, exc_info=1)
else:
_discovered_plugins[ep.name] = auth_plugin
def load_auth_system_opts(parser):
"""Load options needed by the available auth-systems into a parser.
This function will try to populate the parser with options from the
available plugins.
"""
for name, auth_plugin in six.iteritems(_discovered_plugins):
add_opts_fn = getattr(auth_plugin, "add_opts", None)
if add_opts_fn:
group = parser.add_argument_group("Auth-system '%s' options" %
name)
add_opts_fn(group)
def load_plugin(auth_system):
if auth_system in _discovered_plugins:
return _discovered_plugins[auth_system]()
# NOTE(aloga): If we arrive here, the plugin will be an old-style one,
# so we have to create a fake AuthPlugin for it.
return DeprecatedAuthPlugin(auth_system)
class BaseAuthPlugin(object):
"""Base class for authentication plugins.
An authentication plugin needs to override at least the authenticate
method to be a valid plugin.
"""
def __init__(self):
self.opts = {}
def get_auth_url(self):
"""Return the auth url for the plugin (if any)."""
return None
@staticmethod
def add_opts(parser):
"""Populate and return the parser with the options for this plugin.
If the plugin does not need any options, it should return the same
parser untouched.
"""
return parser
def parse_opts(self, args):
"""Parse the actual auth-system options if any.
This method is expected to populate the attribute self.opts with a
dict containing the options and values needed to make authentication.
If the dict is empty, the client should assume that it needs the same
options as the 'keystone' auth system (i.e. os_username and
os_password).
Returns the self.opts dict.
"""
return self.opts
def authenticate(self, cls, auth_url):
"""Authenticate using plugin defined method."""
raise exceptions.AuthSystemNotFound(self.auth_system)
class DeprecatedAuthPlugin(object):
"""Class to mimic the AuthPlugin class for deprecated auth systems.
Old auth systems only define two entry points: openstack.client.auth_url
and openstack.client.authenticate. This class will load those entry points
into a class similar to a valid AuthPlugin.
"""
def __init__(self, auth_system):
self.auth_system = auth_system
def authenticate(cls, auth_url):
raise exceptions.AuthSystemNotFound(self.auth_system)
self.opts = {}
self.get_auth_url = lambda: None
self.authenticate = authenticate
self._load_endpoints()
def _load_endpoints(self):
ep_name = 'openstack.client.auth_url'
fn = utils._load_entry_point(ep_name, name=self.auth_system)
if fn:
self.get_auth_url = fn
ep_name = 'openstack.client.authenticate'
fn = utils._load_entry_point(ep_name, name=self.auth_system)
if fn:
self.authenticate = fn
def parse_opts(self, args):
return self.opts
| {
"content_hash": "ff401d2ab6a3a7db81666ff003b92bff",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 78,
"avg_line_length": 30.519685039370078,
"alnum_prop": 0.6439628482972136,
"repo_name": "neumerance/deploy",
"id": "843489788435b195e5a1762d2455452ee07c475f",
"size": "4564",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": ".venv/lib/python2.7/site-packages/novaclient/auth_plugin.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "49399"
},
{
"name": "CSS",
"bytes": "769836"
},
{
"name": "CoffeeScript",
"bytes": "21"
},
{
"name": "Erlang",
"bytes": "31042"
},
{
"name": "JavaScript",
"bytes": "642626"
},
{
"name": "PHP",
"bytes": "3858"
},
{
"name": "Perl",
"bytes": "386749"
},
{
"name": "Python",
"bytes": "23358678"
},
{
"name": "Racket",
"bytes": "28441"
},
{
"name": "Ruby",
"bytes": "453"
},
{
"name": "Shell",
"bytes": "29414"
},
{
"name": "XSLT",
"bytes": "152770"
}
],
"symlink_target": ""
} |
from console import Console
class GUI:
class Managers:
class Console:
def show(self, screen):
Console.Output.clear()
screen.draw(self)
def show_dialog(self, screen):
screen.draw(self)
class Dialogs:
class OptionsBox:
def __init__(self, title, options, extras=[]):
self.title = title
self.options = options
self.extras = extras
self.chosen_option_index = -1
def draw(self, menu):
Console.Output.header(self.title)
for index, option in enumerate(self.options):
text = option
if len(self.extras) > index:
text = str(option) + str(self.extras[index])
Console.PrettyPrint.key_value(index, text)
while True:
print()
choose_option = Console.Input.safe_int("Chosen a option: ")
try:
if not self.options[choose_option] is None:
self.chosen_option_index = choose_option
break
except Exception:
print("Invalid Input - Option is not in range")
def get_chosen_index(self):
return self.chosen_option_index
def get_chosen(self):
return self.options[self.chosen_option_index]
GLOBAL_MENU = GUI.Managers.Console()
| {
"content_hash": "1e02867abbb8b067a11c2faef233d3a5",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 79,
"avg_line_length": 32.583333333333336,
"alnum_prop": 0.4840153452685422,
"repo_name": "FetzenRndy/Creative",
"id": "396e13b9bfeb3c6845f1c82aca2d58a6695aac6e",
"size": "1564",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "projects/Bratwurst/source/gui.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "AutoHotkey",
"bytes": "161"
},
{
"name": "C#",
"bytes": "83121"
},
{
"name": "Java",
"bytes": "9489"
},
{
"name": "JavaScript",
"bytes": "733"
},
{
"name": "Python",
"bytes": "9034"
},
{
"name": "TypeScript",
"bytes": "11698"
}
],
"symlink_target": ""
} |
"""Translation string lookup helpers."""
import logging
from typing import Any, Dict, Iterable, Optional
from homeassistant import config_entries
from homeassistant.loader import async_get_integration, bind_hass
from homeassistant.util.json import load_json
from .typing import HomeAssistantType
_LOGGER = logging.getLogger(__name__)
TRANSLATION_STRING_CACHE = 'translation_string_cache'
def recursive_flatten(prefix: Any, data: Dict) -> Dict[str, Any]:
"""Return a flattened representation of dict data."""
output = {}
for key, value in data.items():
if isinstance(value, dict):
output.update(
recursive_flatten('{}{}.'.format(prefix, key), value))
else:
output['{}{}'.format(prefix, key)] = value
return output
def flatten(data: Dict) -> Dict[str, Any]:
"""Return a flattened representation of dict data."""
return recursive_flatten('', data)
async def component_translation_file(hass: HomeAssistantType, component: str,
language: str) -> Optional[str]:
"""Return the translation json file location for a component.
For component:
- components/hue/.translations/nl.json
For platform:
- components/hue/.translations/light.nl.json
If component is just a single file, will return None.
"""
parts = component.split('.')
domain = parts[-1]
is_platform = len(parts) == 2
integration = await async_get_integration(hass, domain)
assert integration is not None, domain
if is_platform:
filename = "{}.{}.json".format(parts[0], language)
return str(integration.file_path / '.translations' / filename)
# If it's a component that is just one file, we don't support translations
# Example custom_components/my_component.py
if integration.file_path.name != domain:
return None
filename = '{}.json'.format(language)
return str(integration.file_path / '.translations' / filename)
def load_translations_files(translation_files: Dict[str, str]) \
-> Dict[str, Dict[str, Any]]:
"""Load and parse translation.json files."""
loaded = {}
for component, translation_file in translation_files.items():
loaded_json = load_json(translation_file)
assert isinstance(loaded_json, dict)
loaded[component] = loaded_json
return loaded
def build_resources(translation_cache: Dict[str, Dict[str, Any]],
components: Iterable[str]) -> Dict[str, Dict[str, Any]]:
"""Build the resources response for the given components."""
# Build response
resources = {} # type: Dict[str, Dict[str, Any]]
for component in components:
if '.' not in component:
domain = component
else:
domain = component.split('.', 1)[0]
if domain not in resources:
resources[domain] = {}
# Add the translations for this component to the domain resources.
# Since clients cannot determine which platform an entity belongs to,
# all translations for a domain will be returned together.
resources[domain].update(translation_cache[component])
return resources
@bind_hass
async def async_get_component_resources(hass: HomeAssistantType,
language: str) -> Dict[str, Any]:
"""Return translation resources for all components."""
if TRANSLATION_STRING_CACHE not in hass.data:
hass.data[TRANSLATION_STRING_CACHE] = {}
if language not in hass.data[TRANSLATION_STRING_CACHE]:
hass.data[TRANSLATION_STRING_CACHE][language] = {}
translation_cache = hass.data[TRANSLATION_STRING_CACHE][language]
# Get the set of components
components = hass.config.components | set(config_entries.FLOWS)
# Calculate the missing components
missing_components = components - set(translation_cache)
missing_files = {}
for component in missing_components:
path = await component_translation_file(hass, component, language)
# No translation available
if path is None:
translation_cache[component] = {}
else:
missing_files[component] = path
# Load missing files
if missing_files:
load_translations_job = hass.async_add_job(
load_translations_files, missing_files)
assert load_translations_job is not None
loaded_translations = await load_translations_job
# Update cache
translation_cache.update(loaded_translations)
resources = build_resources(translation_cache, components)
# Return the component translations resources under the 'component'
# translation namespace
return flatten({'component': resources})
@bind_hass
async def async_get_translations(hass: HomeAssistantType,
language: str) -> Dict[str, Any]:
"""Return all backend translations."""
resources = await async_get_component_resources(hass, language)
if language != 'en':
# Fetch the English resources, as a fallback for missing keys
base_resources = await async_get_component_resources(hass, 'en')
resources = {**base_resources, **resources}
return resources
| {
"content_hash": "3dfc9b3f2cee158d67a95bd9a8e326ae",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 78,
"avg_line_length": 35.23489932885906,
"alnum_prop": 0.6582857142857143,
"repo_name": "auduny/home-assistant",
"id": "4f655e692f74dcd2c7374c94449ff72f013d15dd",
"size": "5250",
"binary": false,
"copies": "4",
"ref": "refs/heads/dev",
"path": "homeassistant/helpers/translation.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1081"
},
{
"name": "HCL",
"bytes": "407"
},
{
"name": "Python",
"bytes": "15129018"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17609"
}
],
"symlink_target": ""
} |
from plotly.graph_objs import Layout
| {
"content_hash": "459d286594a9a2ae79d248917c1e05f1",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 36,
"avg_line_length": 37,
"alnum_prop": 0.8378378378378378,
"repo_name": "plotly/python-api",
"id": "058b60b807ddad489df88882a1a46dcf430ce19a",
"size": "37",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/graph_objs/layout/template/_layout.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
} |
from itertools import islice
import time
from django.conf import settings
from django.db import connection
from django.template.loader import render_to_string
class Frelic(object):
"""docstring for Frelic"""
def __init__(self):
self.start_time = time.time()
self.template_num = 0
self.sentinel = len(connection.queries)
def set_view_name(self, view_func):
self.view_name = ".".join((view_func.__module__, view_func.__name__))
def count_templates(self, sender, **kwargs):
self.template_num += 1
def load_metrics(self):
self.timings = []
self.counts = []
total_time = int((time.time() - self.start_time) * 1000)
self.add_timing("Total View Time", total_time)
self.sql_query_num = len(connection.queries) - self.sentinel
self.add_count('Rendered Templates', self.template_num)
self.add_count('SQL Queries', self.sql_query_num)
sql_time = 0.0
for query in islice(connection.queries, self.sentinel, None):
query_time = float(query.get('time', 0)) * 1000
if query_time == 0:
# django-debug-toolbar monkeypatches the connection
# cursor wrapper and adds extra information in each
# item in connection.queries. The query time is stored
# under the key "duration" rather than "time" and is
# in milliseconds, not seconds.
query_time = query.get('duration', 0)
sql_time += query_time
self.add_timing("SQL Query Time", sql_time)
def add_timing(self, name, millisec):
self.timings.append(('Frelic', name, millisec, self.view_name))
def add_count(self, name, count):
self.counts.append(('Frelic', name, count, self.view_name))
def ga_code(self):
context = {}
context['google_analytics_id'] = settings.GOOGLE_ANALYTICS_ID
context['timings'] = self.timings
context['counts'] = self.counts
return render_to_string('frelic/ga_code.html', context)
| {
"content_hash": "29916d47791b0b3e2e310af11486b8fa",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 77,
"avg_line_length": 33.67741935483871,
"alnum_prop": 0.6101532567049809,
"repo_name": "glasslion/django-frelic",
"id": "211b47a286f686e9da6c2d0e6fd74c37f199ee16",
"size": "2088",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "frelic/core.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6912"
}
],
"symlink_target": ""
} |
"""
Django settings for homeinventory project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '%4nip+#(zqtb-*ky13!=zw%arh4f_dkuyec*3s@o3e7seesazb'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'inventory',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'homeinventory.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'homeinventory.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'CET'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
MEDIA_ROOT = (BASE_DIR)
MEDIA_URL = '/media/'
| {
"content_hash": "f786fbbfa2c78eaeb01a8bbf07724ece",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 91,
"avg_line_length": 25.124031007751938,
"alnum_prop": 0.6862079605060166,
"repo_name": "Drabant/homeinventory",
"id": "fa849c6a4e57498afd012eb1b2f30f2ab30ef8fa",
"size": "3241",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "homeinventory/settings.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "7070"
}
],
"symlink_target": ""
} |
import sys
import os
import re
import sendmail
import config
out = sys.argv[1]
err = sys.argv[2]
hostname = sys.argv[3]
if not os.path.exists(out) or not os.path.exists(err):
sendmail.send(config.DEVS,
"Failed to read results from '%s'" % hostname,
"Failed to read files: '%s' '%s'" % (out, err),
[])
exit(0)
pat = re.compile("Exception:")
f = open(err, "rt")
for i, line in enumerate(f):
if pat.search(line):
sendmail.send(config.DEVS,
"Failed to build z3 at '%s'" % hostname,
"See attached files for standard output and standard error",
[out, err])
exit(0)
f.close()
sendmail.send(config.DEVS,
"Z3 was built at '%s'" % hostname,
"Command was successfully executed",
[out, err])
| {
"content_hash": "ae23376f1b78fece1127a7c565baa4c9",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 82,
"avg_line_length": 27.375,
"alnum_prop": 0.5388127853881278,
"repo_name": "dstaple/z3test",
"id": "9e99629766c4c00c6105a3ca8a8472906343fb0b",
"size": "1058",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "scripts/send_results.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "21980"
},
{
"name": "Batchfile",
"bytes": "4009"
},
{
"name": "C#",
"bytes": "758062"
},
{
"name": "C++",
"bytes": "9623"
},
{
"name": "CSS",
"bytes": "3966"
},
{
"name": "Python",
"bytes": "131312"
},
{
"name": "SMT",
"bytes": "6844416"
},
{
"name": "Shell",
"bytes": "2179"
}
],
"symlink_target": ""
} |
import socket
import threading
import time
from org.apache.flink.api.common.functions import FlatMapFunction, ReduceFunction
from org.apache.flink.api.java.functions import KeySelector
from org.apache.flink.streaming.api.windowing.time.Time import milliseconds
from org.apache.flink.streaming.util.serialization import SerializationSchema
from utils import constants
from utils import utils
from utils.python_test_base import TestBase
class Tokenizer(FlatMapFunction):
def flatMap(self, value, collector):
collector.collect((1, value))
class Sum(ReduceFunction):
def reduce(self, input1, input2):
count1, val1 = input1
count2, val2 = input2
return (count1 + count2, val1)
class Selector(KeySelector):
def getKey(self, input):
return input[1]
class ToStringSchema(SerializationSchema):
def serialize(self, value):
return "{}, {}|".format(value[0], value[1])
class SocketStringReader(threading.Thread):
def __init__(self, host, port, expected_num_values):
threading.Thread.__init__(self)
self._host = host
self._port = port
self._expected_num_values = expected_num_values
def run(self):
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
serversocket.bind((self._host, self._port))
serversocket.listen(5)
(clientsocket, address) = serversocket.accept()
while True:
msg = clientsocket.recv(1024)
if not msg:
break
for v in msg.split('|')[:-1]:
print(v)
print("*** Done receiving ***")
clientsocket.close()
serversocket.close()
class Main(TestBase):
def __init__(self):
super(Main, self).__init__()
def run(self):
port = utils.gen_free_port()
SocketStringReader('', port, constants.NUM_ITERATIONS_IN_TEST).start()
time.sleep(0.5)
elements = ["aa" if iii % 2 == 0 else "bbb" for iii in range(constants.NUM_ITERATIONS_IN_TEST)]
env = self._get_execution_environment()
env.from_collection(elements) \
.flat_map(Tokenizer()) \
.key_by(Selector()) \
.time_window(milliseconds(50)) \
.reduce(Sum()) \
.write_to_socket('localhost', port, ToStringSchema())
result = env.execute("MyJob", True)
print("Job completed, job_id={}".format(result.jobID))
def main():
Main().run()
if __name__ == '__main__':
main()
| {
"content_hash": "723b195c62d1e20686eb54a408d443f3",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 103,
"avg_line_length": 28.280898876404493,
"alnum_prop": 0.6249503377036154,
"repo_name": "zohar-mizrahi/flink",
"id": "aa322ce4675915036587149d1daf2c3ef0be595b",
"size": "3475",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flink-libraries/flink-streaming-python/src/test/python/org/apache/flink/streaming/python/api/test_write_to_socket.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4792"
},
{
"name": "CSS",
"bytes": "18100"
},
{
"name": "CoffeeScript",
"bytes": "89007"
},
{
"name": "HTML",
"bytes": "86524"
},
{
"name": "Java",
"bytes": "31605216"
},
{
"name": "JavaScript",
"bytes": "8267"
},
{
"name": "Python",
"bytes": "240673"
},
{
"name": "Scala",
"bytes": "5925253"
},
{
"name": "Shell",
"bytes": "93241"
}
],
"symlink_target": ""
} |
import contextlib
import functools
import mock
from django.http import HttpRequest
from nose import SkipTest
from nose.tools import assert_equal, assert_not_equal
from framework.auth import Auth
from website.archiver import ARCHIVER_SUCCESS
from website.archiver import listeners as archiver_listeners
def requires_module(module):
def decorator(fn):
@functools.wraps(fn)
def wrapper(*args, **kwargs):
try:
__import__(module)
except ImportError:
raise SkipTest()
return fn(*args, **kwargs)
return wrapper
return decorator
def assert_logs(log_action, node_key, index=-1):
"""A decorator to ensure a log is added during a unit test.
:param str log_action: NodeLog action
:param str node_key: key to get Node instance from self
:param int index: list index of log to check against
Example usage:
@assert_logs(NodeLog.UPDATED_FIELDS, 'node')
def test_update_node(self):
self.node.update({'title': 'New Title'}, auth=self.auth)
TODO: extend this decorator to check log param correctness?
"""
def outer_wrapper(func):
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
node = getattr(self, node_key)
last_log = node.logs[-1]
func(self, *args, **kwargs)
node.reload()
new_log = node.logs[index]
assert_not_equal(last_log._id, new_log._id)
assert_equal(new_log.action, log_action)
node.save()
return wrapper
return outer_wrapper
def assert_not_logs(log_action, node_key, index=-1):
def outer_wrapper(func):
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
node = getattr(self, node_key)
last_log = node.logs[-1]
func(self, *args, **kwargs)
node.reload()
new_log = node.logs[index]
assert_not_equal(new_log.action, log_action)
assert_equal(last_log._id, new_log._id)
node.save()
return wrapper
return outer_wrapper
@contextlib.contextmanager
def mock_archive(project, schema=None, auth=None, template=None, data=None, parent=None,
autocomplete=True, autoapprove=False):
""" A context manager for registrations. When you want to call Node#register_node in
a test but do not want to deal with any of this side effects of archiver, this
helper allows for creating a registration in a safe fashion.
:param bool autocomplete: automatically finish archival?
:param bool autoapprove: automatically approve registration approval?
Example use:
project = ProjectFactory()
with mock_archive(project) as registration:
assert_true(registration.is_registration)
assert_true(registration.archiving)
assert_true(registration.is_pending_registration)
with mock_archive(project, autocomplete=True) as registration:
assert_true(registration.is_registration)
assert_false(registration.archiving)
assert_true(registration.is_pending_registration)
with mock_archive(project, autocomplete=True, autoapprove=True) as registration:
assert_true(registration.is_registration)
assert_false(registration.archiving)
assert_false(registration.is_pending_registration)
"""
schema = schema or None
auth = auth or Auth(project.creator)
template = template or ''
data = data or ''
with mock.patch('framework.tasks.handlers.enqueue_task'):
registration = project.register_node(schema, auth, template, data, parent)
registration.root.require_approval(project.creator)
if autocomplete:
root_job = registration.root.archive_job
root_job.status = ARCHIVER_SUCCESS
root_job.sent = False
root_job.done = True
root_job.save()
sanction = registration.root.sanction
with contextlib.nested(
mock.patch.object(root_job, 'archive_tree_finished', mock.Mock(return_value=True)),
mock.patch.object(sanction, 'ask')
):
archiver_listeners.archive_callback(registration)
if autoapprove:
sanction = registration.root.sanction
sanction._on_complete(project.creator)
yield registration
def make_drf_request(*args, **kwargs):
from rest_framework.request import Request
http_request = HttpRequest()
# The values here don't matter; they just need
# to be present
http_request.META['SERVER_NAME'] = 'localhost'
http_request.META['SERVER_PORT'] = 8000
# A DRF Request wraps a Django HttpRequest
return Request(http_request, *args, **kwargs)
| {
"content_hash": "f6cf9015201664f9046827349db34e6f",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 95,
"avg_line_length": 36.651162790697676,
"alnum_prop": 0.6592639593908629,
"repo_name": "njantrania/osf.io",
"id": "fdc02f49a56410d97a35dc1ee4546dc20be41a6c",
"size": "4728",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "tests/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "119424"
},
{
"name": "HTML",
"bytes": "31299"
},
{
"name": "JavaScript",
"bytes": "1175450"
},
{
"name": "Mako",
"bytes": "537851"
},
{
"name": "Python",
"bytes": "3844872"
},
{
"name": "Shell",
"bytes": "1927"
}
],
"symlink_target": ""
} |
import unittest
import six
if six.PY2:
from mock import Mock
else:
from unittest.mock import Mock
from application.caches.cache_manager import CacheManager
class TestCacheManager(unittest.TestCase):
def setUp(self):
self.cache_manager = CacheManager()
def tearDown(self):
pass
def test_addElementSingleCache(self):
cache = Mock()
cache.get = Mock(return_value="test.value")
self.cache_manager.add_cache(cache)
self.cache_manager.add("test.key", "test.value")
self.assertEqual(self.cache_manager.get("test.key"), "test.value")
cache.get.assert_called_with("test.key")
def test_addElementMultiCache(self):
cache = Mock()
cache2 = Mock()
cache.get = Mock(return_value=None)
cache2.get = Mock(return_value="test.value")
self.cache_manager.add_cache(cache)
self.cache_manager.add_cache(cache2)
self.cache_manager.add("test.key", "test.value")
self.assertEqual(self.cache_manager.get("test.key"), "test.value")
cache.get.assert_called_with("test.key")
cache2.get.assert_called_with("test.key")
if __name__ == "__main__":
unittest.main() | {
"content_hash": "d70f51c7a6b70801ec61ffc3e7be3c50",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 68,
"avg_line_length": 23.148936170212767,
"alnum_prop": 0.7196691176470589,
"repo_name": "andrew749/andrew749.github.io",
"id": "795d03aa8476acdf3b6bec1ac7ca8f934e295aae",
"size": "1088",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "application/caches/test/test_cache_manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "19138"
},
{
"name": "Dockerfile",
"bytes": "493"
},
{
"name": "HTML",
"bytes": "20181"
},
{
"name": "JavaScript",
"bytes": "8467"
},
{
"name": "Makefile",
"bytes": "811"
},
{
"name": "Python",
"bytes": "20212"
},
{
"name": "Shell",
"bytes": "153"
}
],
"symlink_target": ""
} |
"""The Tornado web server and tools."""
from __future__ import absolute_import, division, print_function, with_statement
# version is a human-readable version number.
# version_info is a four-tuple for programmatic comparison. The first
# three numbers are the components of the version number. The fourth
# is zero for an official release, positive for a development branch,
# or negative for a release candidate or beta (after the base version
# number has been incremented)
version = "3.0.1"
version_info = (3, 0, 1, 0)
| {
"content_hash": "e9e8277c91d932df32c5f09e3a5b94aa",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 80,
"avg_line_length": 40.53846153846154,
"alnum_prop": 0.7533206831119544,
"repo_name": "alkaitz/starloot",
"id": "c188a1b5feb7de2e0d16b50e2aafe1eca62b2e1c",
"size": "1124",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/gameengine/webSocketServer/lib/tornado-3.0.1/tornado/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "230"
},
{
"name": "CSS",
"bytes": "8912"
},
{
"name": "HTML",
"bytes": "20083"
},
{
"name": "Java",
"bytes": "3637"
},
{
"name": "JavaScript",
"bytes": "257697"
},
{
"name": "Python",
"bytes": "1807036"
},
{
"name": "Shell",
"bytes": "725"
}
],
"symlink_target": ""
} |
def test_password(con):
"""Called by GitHub Actions with auth method password.
We just need to check that we can get a connection.
"""
pass
| {
"content_hash": "5565079bc66e9a5e75e3bc20f6d1d516",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 58,
"avg_line_length": 31.2,
"alnum_prop": 0.6730769230769231,
"repo_name": "tlocke/pg8000",
"id": "ada7edb007b7032b31582de84784d369575c9c37",
"size": "156",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "test/native/auth/test_password.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "331262"
}
],
"symlink_target": ""
} |
from setuptools import setup
version = '1.0.0'
required = open('requirements.txt').read().split('\n')
setup(
name='thunder-regression',
version=version,
description='algorithms for mass univariate regression',
author='jwittenbach',
author_email='the.freeman.lab@gmail.com',
url='https://github.com/freeman-lab/thunder-regression',
packages=['regression'],
install_requires=required,
long_description='See ' + 'https://github.com/freeman-lab/thunder-regression',
license='MIT'
)
| {
"content_hash": "203b4fa75d9fd7555987e676e03b7ce8",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 82,
"avg_line_length": 28.944444444444443,
"alnum_prop": 0.6967370441458733,
"repo_name": "thunder-project/thunder-regression",
"id": "59fad9222b6a5a37ab6669c4619079cb2f845430",
"size": "544",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11532"
}
],
"symlink_target": ""
} |
import os
from flexmock import flexmock
from orator.migrations import Migrator
from orator.commands.migrations import MigrateCommand
from orator import DatabaseManager
from .. import OratorCommandTestCase
class MigrateCommandTestCase(OratorCommandTestCase):
def test_basic_migrations_call_migrator_with_proper_arguments(self):
resolver = flexmock(DatabaseManager)
resolver.should_receive("connection").and_return(None)
migrator_mock = flexmock(Migrator)
migrator_mock.should_receive("set_connection").once().with_args(None)
migrator_mock.should_receive("run").once().with_args(
os.path.join(os.getcwd(), "migrations"), False
)
migrator_mock.should_receive("get_notes").and_return([])
migrator_mock.should_receive("repository_exists").once().and_return(True)
command = flexmock(MigrateCommand())
command.should_receive("_get_config").and_return({})
command.should_receive("confirm").and_return(True)
self.run_command(command)
def test_migration_repository_create_when_necessary(self):
resolver = flexmock(DatabaseManager)
resolver.should_receive("connection").and_return(None)
migrator_mock = flexmock(Migrator)
migrator_mock.should_receive("set_connection").once().with_args(None)
migrator_mock.should_receive("run").once().with_args(
os.path.join(os.getcwd(), "migrations"), False
)
migrator_mock.should_receive("get_notes").and_return([])
migrator_mock.should_receive("repository_exists").once().and_return(False)
command = flexmock(MigrateCommand())
command.should_receive("_get_config").and_return({})
command.should_receive("confirm").and_return(True)
command.should_receive("call").once().with_args(
"migrate:install", [("--config", None)]
)
self.run_command(command)
def test_migration_can_be_pretended(self):
resolver = flexmock(DatabaseManager)
resolver.should_receive("connection").and_return(None)
migrator_mock = flexmock(Migrator)
migrator_mock.should_receive("set_connection").once().with_args(None)
migrator_mock.should_receive("run").once().with_args(
os.path.join(os.getcwd(), "migrations"), True
)
migrator_mock.should_receive("get_notes").and_return([])
migrator_mock.should_receive("repository_exists").once().and_return(True)
command = flexmock(MigrateCommand())
command.should_receive("_get_config").and_return({})
command.should_receive("confirm").and_return(True)
self.run_command(command, [("--pretend", True)])
def test_migration_database_can_be_set(self):
resolver = flexmock(DatabaseManager)
resolver.should_receive("connection").and_return(None)
migrator_mock = flexmock(Migrator)
migrator_mock.should_receive("set_connection").once().with_args("foo")
migrator_mock.should_receive("run").once().with_args(
os.path.join(os.getcwd(), "migrations"), False
)
migrator_mock.should_receive("get_notes").and_return([])
migrator_mock.should_receive("repository_exists").once().and_return(False)
command = flexmock(MigrateCommand())
command.should_receive("_get_config").and_return({})
command.should_receive("confirm").and_return(True)
command.should_receive("call").once().with_args(
"migrate:install", [("--database", "foo"), ("--config", None)]
)
self.run_command(command, [("--database", "foo")])
def test_migration_can_be_forced(self):
resolver = flexmock(DatabaseManager)
resolver.should_receive("connection").and_return(None)
migrator_mock = flexmock(Migrator)
migrator_mock.should_receive("set_connection").once().with_args(None)
migrator_mock.should_receive("run").once().with_args(
os.path.join(os.getcwd(), "migrations"), False
)
migrator_mock.should_receive("get_notes").and_return([])
migrator_mock.should_receive("repository_exists").once().and_return(True)
command = flexmock(MigrateCommand())
command.should_receive("_get_config").and_return({})
self.run_command(command, [("--force", True)])
| {
"content_hash": "f95c86edb8aaba07a7cd37a134f30a17",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 82,
"avg_line_length": 42.320388349514566,
"alnum_prop": 0.6526726313374627,
"repo_name": "sdispater/orator",
"id": "4a69058a88f75df817b98e562b420da2f6596354",
"size": "4384",
"binary": false,
"copies": "1",
"ref": "refs/heads/0.9",
"path": "tests/commands/migrations/test_migrate_command.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2170"
},
{
"name": "Python",
"bytes": "1013569"
}
],
"symlink_target": ""
} |
import setuptools
if __name__ == "__main__":
setuptools.setup()
| {
"content_hash": "57756fd6809f17aaf1cf8b2a7402fbd5",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 26,
"avg_line_length": 17.25,
"alnum_prop": 0.5942028985507246,
"repo_name": "Featuretools/featuretools",
"id": "dbe9716a3302dfe8faa31a7daa63502e0d3d9907",
"size": "91",
"binary": false,
"copies": "4",
"ref": "refs/heads/latest-dep-update-03d11f0",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "3340"
},
{
"name": "Makefile",
"bytes": "736"
},
{
"name": "Python",
"bytes": "921333"
},
{
"name": "Shell",
"bytes": "511"
}
],
"symlink_target": ""
} |
from django.urls import reverse
from accounts.factories import UserFactory
from functional_tests.base import FunctionalTest
class UsersTest(FunctionalTest):
def test_unauthenticated_can_login(self):
self.user = UserFactory.create(username='ugo')
self.password = self.user.password
self.user.set_password(self.user.password)
self.user.save()
# Ugo goes to main page
self.browser.get(self.live_server_url)
# and see login button.
login_link = self.browser.find_element_by_link_text('Login')
# He clicks on it
login_link.click()
# and now he sees login form.
self.wait_for(
lambda: self.browser.find_element_by_id('id_username')
)
# He fills his credentials
self.browser.find_element_by_id('id_username').send_keys(self.user.username)
self.browser.find_element_by_id('id_password').send_keys(self.password)
# and presses the only button
self.browser.find_element_by_css_selector('input[type="submit"]').click()
# and now he is redirected back to main page.
# Now he sees logout button.
self.wait_to_be_logged_in()
# and logged in as text
self.assertEqual(self.browser.find_element_by_css_selector('nav .navbar-text').text,
f'Logged in as ugo')
def test_authenticated_can_change_password(self):
# Ugo is logged-is user
user = self.create_pre_authenticated_session('ugo')
# Ugo goes to password change page
self.browser.get(self.live_server_url+reverse('password_change'))
# Ugo fill old password, and new one
self.browser.find_element_by_id('id_old_password').send_keys('ugo666')
self.browser.find_element_by_id('id_new_password1').send_keys('QAZwsx123')
self.browser.find_element_by_id('id_new_password2').send_keys('QAZwsx123')
# Press save button
self.browser.find_element_by_css_selector('input[type="submit"]').click()
# Now he redirected to main page
self.wait_for(lambda : self.assertIn('/', self.browser.current_url))
# Ugo loges out
self.browser.find_element_by_link_text('Log out').click()
self.wait_to_be_logged_out()
# Ugo goes to login page
self.browser.get(self.live_server_url+reverse('login'))
# and can login with new password
self.browser.find_element_by_id('id_username').send_keys('ugo')
self.browser.find_element_by_id('id_password').send_keys('QAZwsx123')
self.browser.find_element_by_css_selector('input[type="submit"]').click()
self.wait_to_be_logged_in() | {
"content_hash": "5e55a57f354b135e6a7802601eef8532",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 92,
"avg_line_length": 46.293103448275865,
"alnum_prop": 0.6454376163873371,
"repo_name": "asyler/betleague",
"id": "8f30906b19df16ecf10b9eb8630cd78e17ca8252",
"size": "2685",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "functional_tests/test_users.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2543"
},
{
"name": "HTML",
"bytes": "14185"
},
{
"name": "JavaScript",
"bytes": "879"
},
{
"name": "Python",
"bytes": "74891"
}
],
"symlink_target": ""
} |
"""solar URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^device/', include('device.urls')),
url(r'^account/', include('account.urls')),
url(r'^wechat/', include('wechat.urls')),
url(r'^admin/', admin.site.urls),
]
| {
"content_hash": "b23e2816ff5bcfc594a2b707b5deb4fb",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 79,
"avg_line_length": 37.916666666666664,
"alnum_prop": 0.6835164835164835,
"repo_name": "mageelen/Forest",
"id": "15ab8ae6409b7373574d036d85c8f90def4a4f2f",
"size": "910",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "solar/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "603908"
},
{
"name": "HTML",
"bytes": "1733463"
},
{
"name": "JavaScript",
"bytes": "2893745"
},
{
"name": "PHP",
"bytes": "1684"
},
{
"name": "Python",
"bytes": "37694"
}
],
"symlink_target": ""
} |
import argparse
import datetime
import dateutil.parser
import httplib
import json
import random
arg_parser = argparse.ArgumentParser(description='Generate URL for load testing')
arg_parser.add_argument('--number-of-urls', required=True, dest='n', type=int, metavar='n', help='Number of url to generate')
arg_parser.add_argument('--atlas-url', required=True, dest='atlas_url', metavar='atlas_url', help='Atlas host')
arg_parser.add_argument('--target-host', required=True, dest='target_host', metavar='target_host', help='Target host')
arg_parser.add_argument('--api-key', required=True, dest='api_key', metavar='api_key', help='Atlas API key')
arg_parser.add_argument('--num-channels-source', required=True, type=int, dest='num_channels_source', metavar='num_channels_source', help='Number of channels to choose from')
arg_parser.add_argument('--num-channels', required=True, type=int, dest='num_channels', metavar='num_channels', help='Number of channels to use in request')
arg_parser.add_argument('--platform', required=True, dest='platform', metavar='platform', help='platform')
arg_parser.add_argument('--source', required=True, metavar='source', help='source of the schedules to bootstrap')
arg_parser.add_argument('--start-date', required=True, metavar='start_date', help='Start date')
arg_parser.add_argument('--end-date', required=True, metavar='end_date', help='Start date')
args = arg_parser.parse_args()
args.start_date = dateutil.parser.parse(args.start_date)
args.end_date = dateutil.parser.parse(args.end_date)
class Atlas:
def __init__(self, host, port):
self.host = host
self.port = port
def get(self, resource):
conn = httplib.HTTPConnection(self.host, self.port)
request = "GET http://%s:%s%s" % (self.host, self.port, resource)
conn.request('GET', resource)
resp = conn.getresponse()
if not resp.status == 200:
if resp.status == 400:
print "request failed for %s: %s" % (resource, resp.reason)
if resp.status == 404:
print "resource %s doesn't appear to exist" % (resource)
if resp.status >= 500:
print "problem with %s? %s %s" % (self.host, resp.status, resp.reason)
resp.read()
conn.close()
sys.exit()
body = resp.read()
try:
response = json.loads(body)
except Exception as e:
print "couldn't decode response to %s: %s" % (request, e)
print body
sys.exit()
return (request, response)
atlas = Atlas(args.atlas_url, 80)
req, platform = atlas.get("/4/channel_groups/%s.json?key=%s&annotations=channels" % (args.platform, args.api_key))
def get_days(start,end):
ds = []
cur = start
while cur <= end:
ds.append(cur)
cur = cur + datetime.timedelta(1)
return ds
channels = map((lambda c: c['channel']['id']),platform['channel_group']['channels'][:args.num_channels_source])
days = get_days(args.start_date, args.end_date)
for x in range(0, args.n):
channels_string = ",".join(random.sample(channels, args.num_channels))
day = random.choice(days)
print "/4/schedules.json?id=%s&annotations=channel,content_detail&from=%s&to=%s&key=%s&source=%s" % (
# args.target_host,
channels_string,
day.isoformat(),
(day + datetime.timedelta(1)).isoformat(),
args.api_key,
args.source
)
| {
"content_hash": "ac48bdb128b9b95f753d0bbf6762d38c",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 174,
"avg_line_length": 40.811764705882354,
"alnum_prop": 0.6434130873450562,
"repo_name": "atlasapi/atlas-deer",
"id": "13970788c9a5882bb1e24569bec0d2d6b84d23fc",
"size": "3768",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "atlas-api/src/main/python/generate-load-test-urls.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Closure Templates",
"bytes": "1449"
},
{
"name": "Dockerfile",
"bytes": "14849"
},
{
"name": "Java",
"bytes": "5000455"
},
{
"name": "Python",
"bytes": "33836"
}
],
"symlink_target": ""
} |
__version__ = '0.0.19'
DEFAULT_PORT = 5672
AMQP_EXCHANGE = "amqppy"
AMQP_BROKER = "amqp://localhost:{}//".format(DEFAULT_PORT)
from .utils import create_url, parse_url
from .consumer import Worker
from .publisher import Topic, Rpc
import logging
try:
# not available in python 2.6
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(NullHandler())
class BrokenConnection(Exception):
'''
It would be raised when the connection to the broker fails
'''
pass
class RpcRemoteException(Exception):
'''
It would be raised in the publisher.Rpc.request() when remote reply fails
'''
pass
class ResponseTimeout(Exception):
'''
It would be raised in the publisher.Rpc.request() when remote reply exceeds its allowed execution time, the timeout.
'''
pass
class PublishNotRouted(Exception):
'''
It would be raised in the publisher.Rpc.request() or publisher.Topic.publish() when there is no consumer listening
those Topics or Rpc requests.
'''
pass
class ExclusiveQueue(Exception):
'''
It would be raised in the consumer.Worker.add_topic() or consumer.Worker.add_request() when tries to consume from a queue
where there is already a consumer listening. That happens when add_topic or add_request is called with 'exclusive=True'.
'''
pass
class ExchangeNotFound(Exception):
'''
It will be raised when AMQP Exchange does not exist.
'''
pass
class AbortConsume(Exception):
'''
This exception can be raised by the Topic callback or RPC reply callback. And indicates to amqppy to do not send ACK
for that consuming message. For this, is required 'no_ack=False' in consumer.Worker.add_topic() or consumer.Worker.add_request()
'''
pass
class DeadLetterMessage(Exception):
'''
This exception can be raised by the Topic callback or RPC reply callback. And indicates to amqppy to move this
message is being consumed to the DeadLetter Queue. See: 'https://www.rabbitmq.com/dlx.html'
'''
pass
| {
"content_hash": "6e9a87297be1ebbeb641e2377675f266",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 132,
"avg_line_length": 27.698795180722893,
"alnum_prop": 0.6763810352327099,
"repo_name": "marceljanerfont/amqppy",
"id": "a477f5538e317b4c1a326d840e484327b8e37abe",
"size": "2326",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "amqppy/__init__.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "48097"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.