repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
mdeff/ntds_2017
|
projects/reports/terrorist_attacks/project/visualization.py
|
Python
|
mit
| 8,333
| 0.022081
|
"""
Visualization module.
"""
import numpy as np
from matplotlib import animation
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
from mpl_toolkits.axes_grid1.inset_locator import zoomed_inset_axes
from mpl_toolkits.axes_grid1.inset_locator import mark_inset
from pca import create_handles
import warnings
warnings.filterwarnings('ignore')
def get_temp_markers(year, attacks):
"""
Gives all the information about the markers needed for the
year passed in argument.
"""
data_given_year = attacks[attacks['Year'] == year].reset_index()
num_markers = data_given_year.shape[0]
markers = np.zeros(num_markers, dtype=[('Longitude', float, 1),
('Latitude', float, 1),
('Size', float, 1),
('Color', float, 1)])
killed = data_given_year['Killed']
_MIN, _MAX, _MEDIAN = killed.min(), killed.max(), killed.median()
markers['Longitude'] = data_given_year['Longitude']
markers['Latitude'] = data_given_year['Latitude']
markers['Size'] = 10* np.abs(killed - _MEDIAN) + 1
markers['Color'] = (killed - _MIN)/(_MAX - _MIN)
return markers, _MAX
def world_view(attacks):
"""
Creates an animation where we see the evolution of the worldwide terrorist attacks
among the available years.
"""
fig = plt.figure(figsize=(10, 10))
cmap = plt.get_cmap('inferno')
# create the map
map = Basemap(projection='cyl')
map.drawmapboundary()
map.fillcontinents(color='lightgray', zorder=0)
# define the frame values (as 1993 is not contained in the database
# we have to remove it, otherwise we will have an empty frame)
frames = np.append(np.arange(1970, 1993), np.arange(1994, 2017))
# create the plot structure
temp_markers, _MAX = get_temp_markers(frames[0], attacks)
xs, ys = map(temp_markers['Longitude'], temp_markers['Latitude'])
scat = map.scatter(xs, ys, s=temp_markers['Size'], c=temp_markers['Color'], cmap=cmap, marker='o',
alpha=0.3, zorder=10)
year_text = plt.text(-170, 80, str(frames[0]),fontsize=15)
cbar = map.colorbar(scat, location='bottom')
cbar.set_label('number of killed people 0.0 = min [0] 1.0 = max [{}]' .format(_MAX))
plt.title('Activity of terrorism attacks from 1970 to 2016')
plt.savefig('world_view.pdf', bbox_inches='tight')
plt.show()
def update(year):
"""
Updates the content of each frame during the animation for
the year passed in argument.
"""
# retrieve necessary information from the markers
temp_markers, _MAX = get_temp_markers(year, attacks)
# update the map content
xs, ys = map(temp_markers['Longitude'], temp_markers['Latitude'])
scat.set_offsets(np.hstack((xs[:,np.newaxis], ys[:, np.newaxis])))
scat.set_color(cmap(temp_markers['Color']))
scat.set_sizes(temp_markers['Size'])
year_text.set_text(str(year))
cbar.set_label('number of killed people 0.0 = min [0] 1.0 = max [{}]' .format(_MAX))
return scat,
# cre
|
ate animation
ani = animation.Fu
|
ncAnimation(fig, update, interval=1000, frames=frames, blit=True)
ani.save('visualization.mp4', writer = 'ffmpeg', fps=1, bitrate=-1)
plt.show()
def get_group_markers(attacks, group):
"""
Gives all the information about the markers for the
group passed in argument.
"""
data_given_group = attacks[attacks['Group'] == group]
num_markers = data_given_group.shape[0]
markers = np.zeros(num_markers, dtype=[('Longitude', float, 1),
('Latitude', float, 1),
('Size', float, 1),
('Color', float, 1)])
killed = data_given_group['Killed']
_MIN, _MAX, _MEDIAN = killed.min(), killed.max(), killed.median()
markers['Longitude'] = data_given_group['Longitude']
markers['Latitude'] = data_given_group['Latitude']
markers['Size'] = 10* np.abs(killed - _MEDIAN) + 1
markers['Color'] = (killed - _MIN)/(_MAX - _MIN)
return markers, _MAX
def zoom_taliban_intensity(attacks):
"""
Zooms in the particular location of the attacks perpetrated by the Taliban group
showing the intensity of the attacks.
"""
fig = plt.figure(figsize=(15,15))
ax = fig.add_subplot(111)
cmap = plt.get_cmap('inferno')
plt.title('Intensity of attacks perpetrated by the Taliban group\n')
# create the map
map = Basemap(projection='cyl',lat_0=0, lon_0=0)
map.drawmapboundary()
map.fillcontinents(color='lightgray', zorder=0)
# create the plot structure
temp_markers, _MAX = get_group_markers(attacks, 'Taliban')
xs, ys = map(temp_markers['Longitude'], temp_markers['Latitude'])
scat = map.scatter(xs, ys, s=temp_markers['Size'], c=temp_markers['Color'], cmap=cmap, marker='o',
alpha=0.3, zorder=10)
axins = zoomed_inset_axes(ax, 9, loc=2)
axins.set_xlim(25, 40)
axins.set_ylim(60, 75)
plt.xticks(visible=False)
plt.yticks(visible=False)
map2 = Basemap(llcrnrlon=55,llcrnrlat=25,urcrnrlon=75,urcrnrlat=40, ax=axins)
map2.drawmapboundary()
map2.fillcontinents(color='lightgray', zorder=0)
map2.drawcoastlines()
map2.drawcountries()
map2.scatter(xs, ys, s=temp_markers['Size']/5., c=cmap(temp_markers['Color']), alpha=0.5)
mark_inset(ax, axins, loc1=2, loc2=4, fc="none", ec="0.5")
plt.savefig('taliban_zoom_intensity.pdf', bbox_inches='tight')
plt.show()
def get_group_attack_types_markers(attacks, group):
"""
Gives the description of the attack types about the markers for the
group passed in argument.
"""
data_given_year = attacks[attacks['Group'] == group]
list_attack_type_unique = data_given_year['Attack_type'].unique().tolist()
list_attack_type = data_given_year['Attack_type'].tolist()
# assign each attack to the corresponding color
colors_attack_type = plt.cm.tab20(list(range(1,len(list_attack_type_unique)+1)))
label_color_dict_attack_type = dict(zip(list_attack_type_unique, colors_attack_type))
cvec_attack_type = [label_color_dict_attack_type[label] for label in list_attack_type]
num_markers = data_given_year.shape[0]
markers = np.zeros(num_markers, dtype=[('Longitude', float, 1),
('Latitude', float, 1),
('Size', float, 1),
('Color', float, 4)])
killed = data_given_year['Killed']
_MIN, _MAX, _MEDIAN = killed.min(), killed.max(), killed.median()
markers['Longitude'] = data_given_year['Longitude']
markers['Latitude'] = data_given_year['Latitude']
markers['Size'] = 100
markers['Color'] = np.array(cvec_attack_type)
return markers, label_color_dict_attack_type
def zoom_taliban_attack_types(attacks):
"""
Zooms in the particular location of the attacks perpetrated by the Taliban group
showing the different attack types.
"""
group = 'Taliban'
fig = plt.figure(figsize=(15,15))
ax = fig.add_subplot(111)
cmap = plt.get_cmap('inferno')
plt.title('Attack types perpetrated by the Taliban group\n')
# create the map
map = Basemap(projection='cyl',lat_0=0, lon_0=0)
map.drawmapboundary()
map.fillcontinents(color='lightgray', zorder=0)
# create the plot structure
temp_markers, _MAX = get_group_markers(attacks, group)
xs, ys = map(temp_markers['Longitude'], temp_markers['Latitude'])
scat = map.scatter(xs, ys, s=temp_markers['Size'], c=temp_markers['Color'], cmap=cmap, marker='o',
alpha=0.5, zorder=10)
axins = zoomed_inset_axes(ax, 9, loc=2)
axins.set_xlim(25, 40)
axins.set_ylim(60, 75)
plt.xticks(visible=False)
plt.yticks(visible=False)
map2 = Basemap(llcrnrlon=55,llcrnrlat=25,urcrnrlon=75,urcrnrlat=40, ax=axins)
map2.drawmapboundary()
map2.fillcontinents(color='lightgray', zorder=0)
map2.drawcoastlines()
map2.drawcountries()
temp_markers, label_color_dict_attack_type = get_group_attack_types_markers(attacks, group)
map2.scatter(xs, ys, s=temp_markers['Size']/5., c=temp_markers['Color'], alpha=0.5)
mark_inset(ax, axins, loc1=2, loc2=4, fc="none", ec="0.5")
handles = create_handles(label_color_dict_attack_type, ax)
labels = [h.get_label() for h in handles]
ax.legend(loc='
|
bitmaintech/p2pool
|
config.py
|
Python
|
gpl-3.0
| 545
| 0
|
TestNet = False
Address = "1MjeEv3WDgycrEaaNeSESrWvRfkU6s81TX"
workerEndpoint = "3333"
DonationPercentage = 0.0
Upnp = True
BitcoindConfigPath = "/opt/bitcoin/bitcoindata/bitcoin.conf"
WORKER_STATUS_REFRESH_TIME = 10
dbService = {}
worker
|
Status = {}
NodeService = {
'authentication': 'http://127.0.0.1:8080/service/node/authentication.htm'
}
DbOptions = {
't
|
ype': 'sql',
'engine': 'mysql',
'dbopts': {
'host': '127.0.0.1',
'db': 'antpooldb',
'user': 'antpool',
'password': 'antpool',
}
}
|
dmpayton/django-flanker
|
tests/settings.py
|
Python
|
mit
| 609
| 0
|
DEBUG = False
TEMPLATE_DEBUG = DEBUG
TIME_ZONE = 'UTC'
LANGUAGE_CODE = 'en-US'
SITE_ID = 1
USE_L10N = True
USE_TZ =
|
True
SECRET_KEY = 'local'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends
|
.locmem.LocMemCache',
}
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django_flanker',
'tests',
)
|
wgwoods/anaconda
|
translation-canary/translation_canary/translated/test_markup.py
|
Python
|
gpl-2.0
| 2,842
| 0.002463
|
# Check translations of pango markup
#
# This will look for translatable strings that appear to contain markup and
# check that the markup in the translation matches.
#
# Copyright (C) 2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): David Shea <dshea@redhat.com>
try:
import polib
except ImportError:
print("You need to install the python-polib package to read translations")
raise
from pocketlint.pangocheck import is_markup, markup_match
import xml.etree.ElementTree as ET
def test_markup(mofile):
mo = polib.mofile(mofile)
for entry in mo
|
.translated_entries():
if is_markup(entry.msgid):
# If this is a plural, check each of the plural translations
if entry.msgid_plural:
xlations = entry.msgstr_plural
else:
xlations = {None: entry.msgstr}
|
for plural_id, msgstr in xlations.items():
# Check if the markup is valid at all
try:
# pylint: disable=unescaped-markup
ET.fromstring('<markup>%s</markup>' % msgstr)
except ET.ParseError:
if entry.msgid_plural:
raise AssertionError("Invalid markup translation for %d translation of msgid %s" %
(plural_id, entry.msgid))
else:
raise AssertionError("Invalid markup translation for msgid %s" % entry.msgid)
# Check if the markup has the same number and kind of tags
if not markup_match(entry.msgid, msgstr):
if entry.msgid_plural:
raise AssertionError("Markup does not match for %d translation of msgid %s" %
(plural_id, entry.msgid))
else:
raise AssertionError("Markup does not match for msgid %s" % entry.msgid)
|
nickmckay/LiPD-utilities
|
Python/lipd/retreive_dataset.py
|
Python
|
gpl-2.0
| 12,806
| 0.003983
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 30 09:25:09 2018
@author: deborahkhider
Script to batch download LiPD files from the wiki after query
"""
# 1. Query the wiki (note this is taken directly from the Jupyter Notebook on
# GitHub) The query can be changed but doesn't matter in the grand scheme.
import json
imp
|
ort requests
import sys
import urllib.request
import os
# %% 1.1 Query terms
# By archive
archiveT
|
ype = ["marine sediment", "Marine Sediment"]
# By variable
proxyObsType = ["Mg/Ca", "Mg Ca"]
infVarType = ["Sea Surface Temperature"]
# By sensor
sensorGenus = ["Globigerinoides"]
sensorSpecies = ["ruber"]
# By interpretation
interpName = ["temperature", "Temperature"]
interpDetail = ["sea surface"]
# By Age
ageUnits = ["yr BP"]
ageBound = [3000, 6000] # Must enter minimum and maximum age search
ageBoundType = ["entirely"] # Other values include "any", "entire"
recordLength = [1500]
# By resolution
# Make sure the resolution makes sense with the age units
# Will look for records with a max resolution of number entered
resolution = [100]
# By location
# Enter latitude boundaries below.
# If searching for entire latitude band, leave blank.
# Otherwise, enter both lower and upper bonds!!!!
# Enter south latitude as negative numbers
lat = [-30, 30]
# Enter Longitude boundaries below
# If searching for entire longitude band, leave blank
# Otherhwise, enter both lower and upper bonds!!!!
# Enter west longitude as negative numbers
lon = [100, 160]
# Enter altitude boundaries below
# If not searching for specific altitude, leave blank
# Otherwise, enter both lower and upper bonds!!!!
# Enter depth in the ocean as negative numbers
# All altitudes on the wiki are in m!
alt = [-10000, 0]
# %% 1.2 Make sure eveything makes sense
# Make sure that all conditions are met
if len(ageBound) == 1:
sys.exit("You need to provide a minimum and maximum boundary.")
if ageBound and not ageUnits:
sys.exit("When providing age limits, you must also enter the units")
if recordLength and not ageUnits:
sys.exit("When providing a record length, you must also enter the units")
if ageBound and ageBound[0] > ageBound[1]:
ageBound = [ageBound[1], ageBound[0]]
if not ageBoundType:
print("No ageBoundType selected, running the query as 'any'")
ageBoundType = ["any"]
if len(ageBoundType) > 1:
sys.exit("Only one search possible at a time.")
while ageBoundType != "any" and ageBoundType != "entirely" and ageBoundType != "entire":
print("ageBoundType is not recognized")
ageBoundType = input("Please enter either 'any', 'entirely', or 'entire': ")
if recordLength and ageBound and recordLength[0] > (ageBound[1] - ageBound[0]):
sys.exit("The required recordLength is greater than the provided age bounds")
if len(resolution) > 1:
sys.exit("You can only search for a maximum resolution one at a time.")
if len(lat) == 1:
sys.exit("Please enter a lower AND upper boundary for the latitude search")
if lat and lat[1] < lat[0]:
lat = [lat[1], lat[0]]
if len(lon) == 1:
sys.exit("Please enter a lower AND upper boundary for the longitude search")
if lon and lon[1] < lon[0]:
lon = [lon[1], lon[0]]
if len(alt) == 1:
sys.exit("Please enter a lower AND upper boundary for the altitude search")
if alt and alt[1] < alt[0]:
alt = [alt[1], alt[0]]
# %% 1.3 Query
url = "http://wiki.linked.earth/store/ds/query"
query = """PREFIX core: <http://linked.earth/ontology#>
PREFIX wiki: <http://wiki.linked.earth/Special:URIResolver/>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
SELECT distinct ?dataset
WHERE {
"""
### Look for data field
dataQ = ""
if archiveType or proxyObsType or infVarType or sensorGenus or sensorSpecies or interpName or interpDetail or ageUnits or ageBound or recordLength or resolution:
dataQ = "?dataset core:includesChronData|core:includesPaleoData ?data."
### Look for variable
## measuredVariable
measuredVarQ = ""
if proxyObsType or archiveType or sensorGenus or sensorSpecies or interpName or interpDetail or resolution:
measuredVarQ = "?data core:foundInMeasurementTable / core:includesVariable ?v."
## InferredVar
inferredVarQ = ""
if infVarType or interpName or interpDetail or resolution:
inferredVarQ = "?data core:foundInMeasurementTable / core:includesVariable ?v1."
### Archive Query
archiveTypeQ = ""
if len(archiveType) > 0:
# add values for the archiveType
query += "VALUES ?a {"
for item in archiveType:
query += "\"" + item + "\" "
query += "}\n"
# Create the query
archiveTypeQ = """
#Archive Type query
{
?dataset wiki:Property-3AArchiveType ?a.
}UNION
{
?p core:proxyArchiveType / rdfs:label ?a.
}"""
### ProxyObservationQuery
proxyObsTypeQ = ""
if len(proxyObsType) > 0:
# add values for the proxyObservationType
query += "VALUES ?b {"
for item in proxyObsType:
query += "\"" + item + "\""
query += "}\n"
# Create the query
proxyObsTypeQ = "?v core:proxyObservationType/rdfs:label ?b."
### InferredVariableQuery
infVarTypeQ = ""
if len(infVarType) > 0:
query += "VALUES ?c {"
for item in infVarType:
query += "\"" + item + "\""
query += "}\n"
# create the query
infVarTypeQ = """
?v1 core:inferredVariableType ?t.
?t rdfs:label ?c.
"""
### ProxySensorQuery
sensorQ = ""
if len(sensorGenus) > 0 or len(sensorSpecies) > 0:
sensorQ = """
?p core:proxySensorType ?sensor.
"""
## Genus query
genusQ = ""
if len(sensorGenus) > 0:
query += "VALUES ?genus {"
for item in sensorGenus:
query += "\"" + item + "\""
query += "}\n"
# create the query
genusQ = "?sensor core:sensorGenus ?genus."
## Species query
speciesQ = ""
if len(sensorSpecies) > 0:
query += "VALUES ?species {"
for item in sensorSpecies:
query += "\"" + item + "\""
query += "}\n"
# Create the query
speciesQ = "?sensor core:sensorSpecies ?species."
### Proxy system query
proxySystemQ = ""
if len(archiveType) > 0 or len(sensorGenus) > 0 or len(sensorSpecies) > 0:
proxySystemQ = "?v ?proxySystem ?p."
### Deal with interpretation
## Make sure there is an interpretation to begin with
interpQ = ""
if len(interpName) > 0 or len(interpDetail) > 0:
interpQ = """
{?v1 core:interpretedAs ?interpretation}
UNION
{?v core:interpretedAs ?interpretation}
"""
## Name
interpNameQ = ""
if len(interpName) > 0:
query += "VALUES ?intName {"
for item in interpName:
query += "\"" + item + "\""
query += "}\n"
# Create the query
interpNameQ = "?interpretation core:name ?intName."
## detail
interpDetailQ = ""
if len(interpDetail) > 0:
query += "VALUES ?intDetail {"
for item in interpDetail:
query += "\"" + item + "\""
query += "}\n"
# Create the query
interpDetailQ = "?interpretation core:detail ?intDetail."
### Age
## Units
ageUnitsQ = ""
if len(ageUnits) > 0:
query += "VALUES ?units {"
for item in ageUnits:
query += "\"" + item + "\""
query += "}\n"
query += """VALUES ?ageOrYear{"Age" "Year"}\n"""
# create the query
ageUnitsQ = """
?data core:foundInMeasurementTable / core:includesVariable ?v2.
?v2 core:inferredVariableType ?aoy.
?aoy rdfs:label ?ageOrYear.
?v2 core:hasUnits ?units .
"""
## Minimum and maximum
ageQ = ""
if ageBoundType[0] == "entirely":
if len(ageBound) > 0 and len(recordLength) > 0:
ageQ = """
?v2 core:hasMinValue ?e1.
?v2 core:hasMaxValue ?e2.
filter(?e1<=""" + str(ageBound[0]) + """&& ?e2>=""" + str(ageBound[1]) + """ && abs(?e1-?e2)>=""" + str(
recordLength[0]) + """).
"""
elif len(ageBound) > 0 and len(recordLength) == 0:
ageQ = """
?v2 core:hasMinValue ?e1.
?v2 core:hasMaxValue ?e2.
filter(?e1<=""" + str(ageBound[0]) + """&& ?e2>=""" + str(ageBound[1]) + """).
"""
elif ageBoundType[0] == "entire":
if len(ageBound) > 0 and len(recordLength) > 0:
ageQ = """
?v2 core:hasMinValue ?e1.
?v2 core:hasMaxValue ?e2.
filter(?e1>=""" + str(ageBound[0]) + """&& ?e2<=""" + str(ageBound[1]) + """ && abs(?e1-?e2)>=""" + str(
|
Kjir/papyon
|
papyon/gnet/proxy/SOCKS4.py
|
Python
|
gpl-2.0
| 4,827
| 0.002486
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Johann Prieur <johann.prieur@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
from abstract import AbstractProxy
from papyon.gnet.io import TCPClient
from papyon.gnet.constants import *
from papyon.gnet.parser import DelimiterParser
import gobject
import struct
__all__ = ['SOCKS4Proxy']
class SOCKS4Proxy(AbstractProxy):
PROTOCOL_VERSION = 4
CONNECT_COMMAND = 1
"""Proxy class used to communicate with SOCKS4 proxies."""
def __init__(self, client, proxy_infos):
assert(proxy_infos.type == 'socks4'), \
"SOCKS4Proxy expects a socks4 proxy description"
# TODO : implement version 4a of the protocol to allow proxy-side name resolution
assert(client.domain == AF_INET), \
"SOCKS4 CONNECT
|
only handles INET address family"
assert(client.type == SOCK_STR
|
EAM), \
"SOCKS4 CONNECT only handles SOCK_STREAM"
assert(client.status == IoStatus.CLOSED), \
"SOCKS4Proxy expects a closed client"
AbstractProxy.__init__(self, client, proxy_infos)
self._transport = TCPClient(self._proxy.host, self._proxy.port)
self._transport.connect("notify::status", self._on_transport_status)
self._transport.connect("error", self._on_transport_error)
self._delimiter_parser = DelimiterParser(self._transport)
self._delimiter_parser.delimiter = 8
self._delimiter_parser.connect("received", self._on_proxy_response)
# Opening state methods
def _pre_open(self, io_object=None):
AbstractProxy._pre_open(self)
def _post_open(self):
AbstractProxy._post_open(self)
host = self._client.get_property("host")
port = self._client.get_property("port")
user = self._proxy.user
proxy_protocol = struct.pack('!BBH', SOCKS4Proxy.PROTOCOL_VERSION,
SOCKS4Proxy.CONNECT_COMMAND, port)
for part in host.split('.'):
proxy_protocol += struct.pack('B', int(part))
proxy_protocol += user
proxy_protocol += struct.pack('B', 0)
self._transport.send(proxy_protocol)
# Public API
def open(self):
"""Open the connection."""
if not self._configure():
return
self._pre_open()
try:
self._transport.open()
except:
pass
def close(self):
"""Close the connection."""
self._client._proxy_closed()
def send(self, buffer, callback=None, *args):
self._client.send(buffer, callback, *args)
# Callbacks
def _on_transport_status(self, transport, param):
if transport.status == IoStatus.OPEN:
self._post_open()
elif transport.status == IoStatus.OPENING:
self._client._proxy_opening(self._transport._transport)
self._status = transport.status
else:
self._status = transport.status
def _on_transport_error(self, transport, error_code):
if error_code == IoError.CONNECTION_FAILED:
error_code = IoError.PROXY_CONNECTION_FAILED
self.close()
self.emit("error", error_code)
def _on_proxy_response(self, parser, response):
version, response_code = struct.unpack('BB', response[0:2])
assert(version == 0)
if self.status == IoStatus.OPENING:
if response_code == 90:
del self._delimiter_parser
self._transport._watch_remove() # HACK: ok this is ugly !
self._client._proxy_open()
elif response_code == 91:
self.close()
self.emit("error", IoError.PROXY_CONNECTION_FAILED)
elif response_code == 92:
self.close()
self.emit("error", IoError.PROXY_AUTHENTICATION_REQUIRED)
elif response_code == 93:
self.close()
self.emit("error", IoError.PROXY_AUTHENTICATION_REQUIRED)
else:
raise NotImplementedError("Unknow Proxy response code")
return False
gobject.type_register(SOCKS4Proxy)
|
don-github/edx-platform
|
lms/djangoapps/certificates/views/webview.py
|
Python
|
agpl-3.0
| 23,455
| 0.00469
|
"""
Certificate HTML webview.
"""
from datetime import datetime
from uuid import uuid4
import logging
import urllib
from django.conf import settings
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.template import RequestContext
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from courseware.courses import course_image_url
from edxmako.shortcuts import render_to_response
from edxmako.template import Template
from eventtracking import tracker
from microsite_configuration import microsite
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from student.models import LinkedInAddToProfileConfiguration
from util import organizations_helpers as organization_api
from xmodule.modulestore.django import modulestore
from certificates.api import (
get_active_web_certificate,
get_certificate_url,
emit_certificate_event,
has_html_certificates_enabled,
get_certificate_template
)
from certificates.models import (
GeneratedCertificate,
CertificateHtmlViewConfiguration,
CertificateSocialNetworks,
BadgeAssertion
)
log = logging.getLogger(__name__)
class CourseDoesNotExist(Exception):
"""
This exception is raised in the case where None is returned from the modulestore
"""
pass
def get_certificate_description(mode, certificate_type, platform_name):
"""
:return certificate_type_description on the basis of current mode
"""
certificate_type_description = None
if mode == 'honor':
# Translators: This text describes the 'Honor' course certificate type.
certificate_type_description = _("An {cert_type} Certificate signifies that an {platform_name} "
"learner has agreed to abide by {platform_name}'s honor code and "
"completed all of the required tasks for this course under its "
"guidelines.").format(cert_type=certificate_type,
platform_name=platform_name)
elif mode == 'verified':
# Translators: This text describes the 'ID Verified' course certificate type, which is a higher level of
# verification offered by edX. This type of verification is useful for professional education/certifications
certificate_type_description = _("An {cert_type} Certificate signifies that an {platform_name} "
"learner has agreed to abide by {platform_name}'s honor code and "
"completed all of the required tasks for this course under its "
"guidelines, as well as having their photo ID checked to verify "
"their identity.").format(cert_type=certificate_type,
platform_name=platform_name)
elif mode == 'xseries':
# Translators: This text describes the 'XSeries' course certificate type. An XSeries is a collection of
# courses related to each other in a meaningful way, such as a specific topic or theme, or even an organization
certificate_type_description = _("An {cert_type} Certificate demonstrates a high level of "
"achievement in a program of study, and includes verification of "
"the student's identity.").format(cert_type=certificate_type)
return certificate_type_description
# pylint: disable=bad-continuation
# pylint: disable=too-many-statements
def _update_certificate_context(context, course, user, user_certificate):
"""
Build up the certificate web view context using the provided values
(Helper method to keep the view clean)
"""
# Populate dynamic output values using the course/certificate data loaded above
user_fu
|
llname = user.profile.name
platform_name = microsite.get_value("platform_name", settings.PLATFORM_NAME)
certificate_type = context.get('certificate_type')
partner_short_name = course.org
partner_long_name = None
or
|
ganizations = organization_api.get_course_organizations(course_id=course.id)
if organizations:
#TODO Need to add support for multiple organizations, Currently we are interested in the first one.
organization = organizations[0]
partner_long_name = organization.get('name', partner_long_name)
partner_short_name = organization.get('short_name', partner_short_name)
context['organization_long_name'] = partner_long_name
context['organization_short_name'] = partner_short_name
context['organization_logo'] = organization.get('logo', None)
context['username'] = user.username
context['course_mode'] = user_certificate.mode
context['accomplishment_user_id'] = user.id
context['accomplishment_copy_name'] = user_fullname
context['accomplishment_copy_username'] = user.username
context['accomplishment_copy_course_org'] = partner_short_name
course_title_from_cert = context['certificate_data'].get('course_title', '')
accomplishment_copy_course_name = course_title_from_cert if course_title_from_cert else course.display_name
context['accomplishment_copy_course_name'] = accomplishment_copy_course_name
share_settings = settings.FEATURES.get('SOCIAL_SHARING_SETTINGS', {})
context['facebook_share_enabled'] = share_settings.get('CERTIFICATE_FACEBOOK', False)
context['facebook_app_id'] = getattr(settings, "FACEBOOK_APP_ID", None)
context['facebook_share_text'] = share_settings.get(
'CERTIFICATE_FACEBOOK_TEXT',
_("I completed the {course_title} course on {platform_name}.").format(
course_title=accomplishment_copy_course_name,
platform_name=platform_name
)
)
context['twitter_share_enabled'] = share_settings.get('CERTIFICATE_TWITTER', False)
context['twitter_share_text'] = share_settings.get(
'CERTIFICATE_TWITTER_TEXT',
_("I completed a course on {platform_name}. Take a look at my certificate.").format(
platform_name=platform_name
)
)
context['course_number'] = course.number
try:
badge = BadgeAssertion.objects.get(user=user, course_id=course.location.course_key)
except BadgeAssertion.DoesNotExist:
badge = None
context['badge'] = badge
# Override the defaults with any mode-specific static values
context['certificate_id_number'] = user_certificate.verify_uuid
context['certificate_verify_url'] = "{prefix}{uuid}{suffix}".format(
prefix=context.get('certificate_verify_url_prefix'),
uuid=user_certificate.verify_uuid,
suffix=context.get('certificate_verify_url_suffix')
)
# Translators: The format of the date includes the full name of the month
context['certificate_date_issued'] = _('{month} {day}, {year}').format(
month=user_certificate.modified_date.strftime("%B"),
day=user_certificate.modified_date.day,
year=user_certificate.modified_date.year
)
if partner_long_name:
context['accomplishment_copy_course_description'] = _('a course of study offered by {partner_short_name}, an '
'online learning initiative of {partner_long_name} '
'through {platform_name}.').format(
partner_short_name=partner_short_name,
partner_long_name=partner_long_name,
platform_name=platform_name
)
else:
context['accomplishment_copy_course_description'] = _('a course of study offered by {partner_short_name}, '
'through {platform_name}.').format(
partner_short_name=partner_short_name,
platform_name=platform_name
)
# Translators: Accomplishments describe the awards/certifications obtained by students on this
|
edx/edx-platform
|
common/djangoapps/third_party_auth/apps.py
|
Python
|
agpl-3.0
| 919
| 0.003264
|
# lint-amnesty, pylint: disable=missing-module-docstring
from django.apps import AppConfig
from django.conf import settings
class ThirdPartyAuthConfig(AppConfig): # lint-amnesty, pylint: disable=missing-class-docstring
|
name = 'common.djangoapps.third_party_auth'
verbose_name = "Third-party authentication"
def ready(self):
# To override the settings before loading social_django.
if settings.FEATURES.get('ENABLE_THIRD_PARTY_AUTH', False):
|
self._enable_third_party_auth()
def _enable_third_party_auth(self):
"""
Enable the use of third_party_auth, which allows users to sign in to edX
using other identity providers. For configuration details, see
common/djangoapps/third_party_auth/settings.py.
"""
from common.djangoapps.third_party_auth import settings as auth_settings
auth_settings.apply_settings(settings)
|
goinnn/deldichoalhecho
|
ddah_web/views.py
|
Python
|
gpl-3.0
| 3,903
| 0.000769
|
from django.template.response import TemplateResponse
from ddah_web.models import DDAHInstanceWeb, DdahFlatPage
from django.views.generic.detail import DetailView
from pystache import Renderer
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
import markdown
from django.utils.safestring import mark_safe
class MoustacheTemplateResponseBase(TemplateResponse):
def __init__(self, request, template, context=None, content_type=None, status=None,
charset=None, using=None, context_object_name='instance',
ddah_template=None
):
super(MoustacheTemplateResponseBase, self).__init__(request,
template,
context,
content_type,
status,
charset,
using
)
self.context_object_name = context_object_name
def get_instance(self):
return self.context_data[self.context_object_name]
def get_the_data(self):
raise NotImplement
|
edError("Subclasses should implement this!")
def get_template(self):
raise NotImplementedError("Subclasses should implement this!")
def get_partials(self):
template = self.get_templat
|
e()
return {
"head": template.head,
"header": template.header,
"style": template.style,
"footer": template.footer,
}
def get_content(self):
return self.get_template().content
@property
def rendered_content(self):
renderer = Renderer(partials=self.get_partials())
return renderer.render(self.get_content(), self.get_the_data())
class MoustacheTemplateResponse(MoustacheTemplateResponseBase):
def get_the_data(self):
instance = self.get_instance()
return instance.get_as_bunch()
def get_template(self):
return self.get_instance().template
class MoustacheFlatPageTemplateResponse(MoustacheTemplateResponseBase):
def get_content(self):
return self.get_template().flat_page_content
def get_the_data(self):
flatpage = self.get_instance()
data = flatpage.instance.get_as_bunch()
del data.summary
del data.categories
data.page_title = flatpage.title
data.page_content = mark_safe(markdown.markdown(flatpage.content, ['markdown.extensions.extra']))
data.enable_comments = flatpage.enable_comments
return data
def get_template(self):
return self.get_instance().instance.template
class DDAHInstanceWebView(DetailView):
response_class = MoustacheTemplateResponse
model = DDAHInstanceWeb
context_object_name = 'instance'
def get_object(self):
return self.model.objects.get(id=self.request.instance.id)
def get_slug_field(self):
return 'label'
class DDAHInstanceWebJSONView(DetailView):
model = DDAHInstanceWeb
context_object_name = 'instance'
def get_object(self):
return self.model.objects.get(id=self.request.instance.id)
def get_slug_field(self):
return 'label'
def render_to_response(self, context, **response_kwargs):
response_data = self.object.to_json()
return HttpResponse(response_data, content_type="application/json")
class FlatPageView(DetailView):
model = DdahFlatPage
response_class = MoustacheFlatPageTemplateResponse
context_object_name = 'instance'
def get_object(self):
return get_object_or_404(DdahFlatPage, url=self.kwargs['url'], instance__id=self.request.instance.id)
|
fasaas/owne-coursera
|
documentation/Crafting-Quality-Code/Doctest/vowels.py
|
Python
|
apache-2.0
| 653
| 0
|
def collect_vowels(s):
""" (str) -> str
Return the vowels (a, e, i, o, and u) from s.
>>> collect_vowels('Happy Anniversary!')
'aAiea'
>>> col
|
lect_vowels('xyz')
''
"""
vowels = ''
for char in s:
if char in 'aeiouAEIOU':
vowels = vowels + char
return
|
vowels
def count_vowels(s):
""" (str) -> int
Return the number of vowels (a, e, i, o, and u) in s.
>>> count_vowels('Happy Anniversary!')
5
>>> count_vowels('xyz')
0
"""
num_vowels = 0
for char in s:
if char in 'aeiouAEIOU':
num_vowels = num_vowels + 1
return num_vowels
|
pygeo/pycmbs
|
pycmbs/benchmarking/models/mpi_esm.py
|
Python
|
mit
| 41,720
| 0.004938
|
# -*- coding: utf-8 -*-
"""
This file is part of pyCMBS.
(c) 2012- Alexander Loew
For COPYING and LICENSE details, please refer to the LICENSE file
"""
from cdo import Cdo
from pycmbs.data import Data
import tempfile as tempfile
import copy
import glob
import os
import sys
import numpy as np
from pycmbs.benchmarking import preprocessor
from pycmbs.benchmarking.utils import get_T63_landseamask, get_temporary_directory
from pycmbs.benchmarking.models.model_basic import *
class JSBACH_BOT(Model):
def __init__(self, filename, dic_variables, experiment, name='', shift_lon=False, **kwargs):
super(JSBACH_BOT, self).__init__(filename, dic_variables, name=name, **kwargs)
self.experiment = experiment
self.shift_lon = shift_lon
self.type = 'JSBACH_BOT'
self._unique_name = self._get_unique_name()
def _get_unique_name(self):
"""
get unique name from model and experiment
@return: string with unique combination of models and experiment
"""
return self.name.replace(' ', '') + '-' + self.experiment.replace(' ', '')
def get_albedo_data(self, interval='season'):
"""
get albedo data for JSBACH
returns Data object
"""
if interval != 'season':
raise ValueError('Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry')
v = 'var176'
filename = self.data_dir + 'data/model1/' + self.experiment + '_echam6_BOT_mm_1979-2006_albedo_yseasmean.nc'
ls_mask = get_T63_landseamask(self.shift_lon)
albedo = Data(filename, v, read=True,
label='MPI-ESM albedo ' + self.experiment, unit='-', lat_name='lat', lon_name='lon',
|
shift_lon=self.shift_lon,
mask=ls_mask.data.data)
return albedo
def get_tree_fraction(self, interval='season'):
"""
todo implement this for data from a real run !!!
"""
if interval != 'season':
raise ValueError('Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry')
ls_mask = get_T63_landseamask(self.shift_lon)
filename = '/home/m300028/shared/dev/svn/trstools-0.0.1/l
|
ib/python/pyCMBS/framework/external/vegetation_benchmarking/VEGETATION_COVER_BENCHMARKING/example/historical_r1i1p1-LR_1850-2005_forest_shrub.nc'
v = 'var12'
tree = Data(filename, v, read=True,
label='MPI-ESM tree fraction ' + self.experiment, unit='-', lat_name='lat', lon_name='lon',
shift_lon=self.shift_lon,
mask=ls_mask.data.data, start_time=pl.num2date(pl.datestr2num('2001-01-01')), stop_time=pl.num2date(pl.datestr2num('2001-12-31')))
return tree
def get_grass_fraction(self, interval='season'):
"""
todo implement this for data from a real run !!!
"""
if interval != 'season':
raise ValueError('Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry')
ls_mask = get_T63_landseamask(self.shift_lon)
filename = '/home/m300028/shared/dev/svn/trstools-0.0.1/lib/python/pyCMBS/framework/external/vegetation_benchmarking/VEGETATION_COVER_BENCHMARKING/example/historical_r1i1p1-LR_1850-2005_grass_crop_pasture_2001.nc'
v = 'var12'
grass = Data(filename, v, read=True,
label='MPI-ESM tree fraction ' + self.experiment, unit='-', lat_name='lat', lon_name='lon',
#shift_lon=shift_lon,
mask=ls_mask.data.data, start_time=pl.num2date(pl.datestr2num('2001-01-01')), stop_time=pl.num2date(pl.datestr2num('2001-12-31')), squeeze=True)
return grass
def get_surface_shortwave_radiation_down(self, interval='season'):
"""
get surface shortwave incoming radiation data for JSBACH
returns Data object
"""
if interval != 'season':
raise ValueError('Other temporal sampling than SEASON not supported yet for JSBACH BOT files, sorry')
v = 'var176'
y1 = '1979-01-01'
y2 = '2006-12-31'
rawfilename = self.data_dir + 'data/model/' + self.experiment + '_echam6_BOT_mm_1979-2006_srads.nc'
if not os.path.exists(rawfilename):
return None
#--- read data
cdo = pyCDO(rawfilename, y1, y2)
if interval == 'season':
seasfile = cdo.seasmean()
del cdo
print 'seasfile: ', seasfile
cdo = pyCDO(seasfile, y1, y2)
filename = cdo.yseasmean()
else:
raise ValueError('Invalid interval option %s ' % interval)
#--- read land-sea mask
ls_mask = get_T63_landseamask(self.shift_lon)
#--- read SIS data
sis = Data(filename, v, read=True,
label='MPI-ESM SIS ' + self.experiment, unit='-', lat_name='lat', lon_name='lon',
#shift_lon=shift_lon,
mask=ls_mask.data.data)
return sis
def get_rainfall_data(self, interval='season'):
"""
get rainfall data for JSBACH
returns Data object
"""
if interval == 'season':
pass
else:
raise ValueError('Invalid value for interval: %s' % interval)
#/// PREPROCESSING: seasonal means ///
s_start_time = str(self.start_time)[0:10]
s_stop_time = str(self.stop_time)[0:10]
filename1 = self.data_dir + self.experiment + '_echam6_BOT_mm_1980_sel.nc'
tmp = pyCDO(filename1, s_start_time, s_stop_time).seldate()
tmp1 = pyCDO(tmp, s_start_time, s_stop_time).seasmean()
filename = pyCDO(tmp1, s_start_time, s_stop_time).yseasmean()
#/// READ DATA ///
#1) land / sea mask
ls_mask = get_T63_landseamask(self.shift_lon)
#2) precipitation data
try:
v = 'var4'
rain = Data(filename, v, read=True, scale_factor=86400.,
label='MPI-ESM ' + self.experiment, unit='mm/day', lat_name='lat', lon_name='lon',
shift_lon=self.shift_lon,
mask=ls_mask.data.data)
except:
v = 'var142'
rain = Data(filename, v, read=True, scale_factor=86400.,
label='MPI-ESM ' + self.experiment, unit='mm/day', lat_name='lat', lon_name='lon',
shift_lon=self.shift_lon,
mask=ls_mask.data.data)
return rain
class JSBACH_RAW2(Model):
"""
Class for RAW JSBACH model output
works on the real raw output
"""
#def __init__(self, filename, dic_variables, experiment, name='', shift_lon=False, model_dict=None, input_format='grb', raw_outdata='outdata/jsbach/', **kwargs):
def __init__(self, filename, dic_variables, experiment, name='', shift_lon=False, input_format='grb', raw_outdata='outdata/jsbach/', **kwargs):
"""
The assignment of certain variables to different input streams is done in the routine
get_jsbach_data_generic()
Parameters
----------
input_format : str
specifies file format of input data
['nc','grb']
"""
super(JSBACH_RAW2, self).__init__(filename, dic_variables, name=name, **kwargs)
self.experiment = experiment
self.shift_lon = shift_lon
#self.get_data()
self.type = 'JSBACH_RAW2'
self.input_format = input_format
assert self.input_format in ['nc', 'grb']
self.raw_outdata = raw_outdata
self._unique_name = self._get_unique_name()
# do preprocessing of streams (only needed once!) ---
self.files = {}
self._preproc_streams()
#~ self.model_dict = copy.deepcopy(model_dict)
self.model = 'JSBACH'
def _get_filenames_jsbach_stream(self):
return self.data_dir + self.raw_outdata + self.experiment + '_jsbach_main_mm_*.' + self.input_format
def _get_filenames_veg_stream(self):
return self.data_dir + self.raw_outdata + self.ex
|
Ghalko/waterbutler
|
waterbutler/providers/figshare/provider.py
|
Python
|
apache-2.0
| 13,916
| 0.001653
|
import http
import json
import asyncio
import aiohttp
import oauthlib.oauth1
from waterbutler.core import streams
from waterbutler.core import provider
from waterbutler.core import exceptions
from waterbutler.core.path import WaterButlerPath
from waterbutler.providers.figshare import metadata
from waterbutler.providers.figshare import settings
from waterbutler.providers.figshare import utils as figshare_utils
class FigshareProvider:
def __new__(cls, auth, credentials, settings):
if settings['container_type'] == 'project':
return FigshareProjectProvider(auth, credentials, dict(settings, project_id=settings['container_id']))
if settings['container_type'] in ('article', 'fileset'):
return FigshareArticleProvider(auth, credentials, dict(settings, article_id=settings['container_id']))
raise exceptions.ProviderError('Invalid "container_type" {0}'.format(settings['container_type']))
class BaseFigshareProvider(provider.BaseProvider):
NAME = 'figshare'
BASE_URL = settings.BASE_URL
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.client = oauthlib.oauth1.Client(
self.credentials['client_token'],
client_secret=self.credentials['client_secret'],
resource_owner_key=self.credentials['owner_token'],
resource_owner_secret=self.credentials['owner_secret'],
)
@asyncio.coroutine
def make_request(self, method, uri, *args, **kwargs):
signed_uri, signed_headers, _ = self.client.sign(uri, method)
signed_headers.update(kwargs.pop('headers', {}))
kwargs['headers'] = signed_headers
return (yield from super().make_request(method, signed_uri, *args, **kwargs))
@asyncio.coroutine
def web_view(self, path, **kwargs):
if path._is_folder:
raise exceptions.WebViewError('Web view links are not supported for figshare filesets.', code=400)
data = yield from self.metadata(path)
segments = ('articles', data.name, str(data.extra['articleId']))
return provider.build_url(settings.VIEW_URL, *segments)
@asyncio.coroutine
def revalidate_path(self, base, path, folder=False):
wbpath = base
assert base.is_dir
path = path.strip('/')
for entry in (yield from self.metadata(base)):
if entry.name == path:
# base may when refering to a file will have a article id as well
# This handles that case so the resulting path is actually correct
names, ids = map(lambda x: getattr(entry, x).strip('/').split('/'), ('materialized_path', 'path'))
while names and ids:
wbpath = wbpath.child(names.pop(0), _id=ids.pop(0))
wbpath._is_folder = entry.kind == 'folder'
return wbpath
return base.child(path, folder=False)
class FigshareProjectProvider(BaseFigshareProvider):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.project_id = self.settings['project_id']
@asyncio.coroutine
def validate_path(self, path, **kwargs):
split = path.rstrip('/').split('/')[1:]
wbpath = WaterButlerPath('/', _ids=(self.settings['project_id'], ), folder=True)
if split:
name_or_id = split.pop(0)
try:
article = yield from self._assert_contains_article(name_or_id)
except ValueError:
return wbpath.child(name_or_id, folder=False)
except exceptions.ProviderError as e:
if e.code not in (404, 401):
raise
return wbpath.child(name_or_id, folder=False)
wbpath = wbpath.child(article['title'], article['id'], folder=True)
if split:
provider = yield from self._make_article_provider(article['id'], check_parent=False)
try:
return (yield from provider.validate_path('/'.join([''] + split), parent=wbpath))
except exceptions.ProviderError as e:
if e.code not in (404, 401):
raise
return wbpath.child(split.pop(0), folder=False)
|
return wbpath
@asyncio.coroutine
def _assert_contains_article(self, article_id):
articles_json = yield from self._list_articles()
try:
return next(
each for each in articles_json
if each['id'] == int(article_id)
)
except StopIteration:
|
raise exceptions.ProviderError(
'Article {0} not found'.format(article_id),
code=http.client.NOT_FOUND,
)
@asyncio.coroutine
def _make_article_provider(self, article_id, check_parent=True):
article_id = str(article_id)
if check_parent:
yield from self._assert_contains_article(article_id)
settings = {'article_id': article_id}
return FigshareArticleProvider(self.auth, self.credentials, settings, child=True)
@asyncio.coroutine
def _get_project_metadata(self):
response = yield from self.make_request(
'GET',
self.build_url('projects', self.project_id),
expects=(200, ),
)
data = yield from response.json()
return data
return metadata.FigshareProjectMetadata(data)
@asyncio.coroutine
def _list_articles(self):
response = yield from self.make_request(
'GET',
self.build_url('projects', self.project_id, 'articles'),
expects=(200, ),
)
return (yield from response.json())
@asyncio.coroutine
def _get_article_metadata(self, article_id):
provider = yield from self._make_article_provider(article_id, check_parent=False)
return (yield from provider.about())
@asyncio.coroutine
def _project_metadata_contents(self):
articles_json = yield from self._list_articles()
contents = yield from asyncio.gather(*[
self._get_article_metadata(each['id'])
for each in articles_json
])
return [each for each in contents if each]
@asyncio.coroutine
def _create_article(self, name):
response = yield from self.make_request(
'POST',
self.build_url('articles'),
data=json.dumps({
'title': name,
'defined_type': 'dataset',
}),
headers={'Content-Type': 'application/json'},
expects=(200, ),
)
return (yield from response.json())
@asyncio.coroutine
def download(self, path, **kwargs):
if path.identifier is None:
raise exceptions.NotFoundError(str(path))
provider = yield from self._make_article_provider(path.parts[1].identifier)
return (yield from provider.download(path, **kwargs))
@asyncio.coroutine
def upload(self, stream, path, **kwargs):
if not path.parent.is_root:
provider = yield from self._make_article_provider(path.parent.identifier)
else:
article_json = yield from self._create_article(path.name)
provider = yield from self._make_article_provider(article_json['article_id'], check_parent=False)
yield from provider._add_to_project(self.project_id)
return (yield from provider.upload(stream, path, **kwargs))
@asyncio.coroutine
def delete(self, path, **kwargs):
provider = yield from self._make_article_provider(path.parts[1].identifier)
if len(path.parts) == 3:
yield from provider.delete(path, **kwargs)
else:
yield from provider._remove_from_project(self.project_id)
@asyncio.coroutine
def metadata(self, path, **kwargs):
if path.is_root:
return (yield from self._project_metadata_contents())
if path.identifier is None:
raise exceptions.NotFoundError(str(path))
provider = yield from self._make_article_provider(path.parts[1].identifier)
return (yield
|
waseem18/bedrock
|
bin/update/deploy_base.py
|
Python
|
mpl-2.0
| 5,260
| 0.00019
|
"""
Deployment for Bedrock in production.
Requires commander (https://github.com/oremj/commander) which is installed on
the systems that need it.
"""
import os
import random
import re
import urllib
import urllib2
from commander.deploy import commands, task, hostgroups
import commander_settings as settings
NEW_RELIC_API_KEY = getattr(settings, 'NEW_RELIC_API_KEY', None)
NEW_RELIC_APP_ID = getattr(settings, 'NEW_RELIC_APP_ID', None)
NEW_RELIC_URL = 'https://rpm.newrelic.com/deployments.xml'
GITHUB_URL = 'https://github.com/mozilla/bedrock/compare/{oldrev}...{newrev}'
def management_cmd(ctx, cmd):
"""Run a Django management command correctly."""
with ctx.lcd(settings.SRC_DIR):
ctx.local('LANG=en_US.UTF-8 python2.6 manage.py ' + cmd)
@task
def reload_crond(ctx):
ctx.local("killall -SIGHUP crond")
@task
def update_code(ctx, tag):
with ctx.lcd(settings.SRC_DIR):
ctx.local("git fetch --all")
ctx.local("git checkout -f %s" % tag)
ctx.local("git submodule sync")
ctx.local("git submodule update --init --recursive")
@task
def update_locales(ctx):
with ctx.lcd(os.path.join(settings.SRC_DIR, 'locale')):
ctx.local("svn up")
@task
def update_assets(ctx):
management_cmd(ctx, 'compress_assets')
management_cmd(ctx, 'update_product_details')
management_cmd(ctx, 'update_externalfiles')
@task
def update_revision_file(ctx):
with ctx.lcd(settings.SRC_DIR):
ctx.local("mv media/revision.txt media/prev-revision.txt")
ctx.local("git rev-parse HEAD > media/revision.txt")
@task
def database(ctx):
management_cmd(ctx, 'syncdb --migrate --noinput')
@task
def checkin_changes(ctx):
ctx.local(settings.DEPLOY_SCRIPT)
@hostgroups(settings.WEB_HOSTGROUP, remote_kwargs={'ssh_key': settings.SSH_KEY})
def deploy_app(ctx):
ctx.remote(settings.REMOTE_UPDATE_SCRIPT)
# ctx.remote("/bin/touch %s" % settings.REMOTE_WSGI)
ctx.remote("service httpd graceful")
@task
def update_info(ctx):
with ctx.lcd(settings.SRC_DIR):
ctx.local("date")
ctx.local("git branch")
ctx.local("git log -3")
ctx.local("git status")
ctx.local("git submodule status")
with ctx.lcd("locale"):
ctx.local("svn info")
ctx.local("svn status")
management_cmd(ctx, 'migrate --list')
@task
def ping_newrelic(ctx):
if NEW_RELIC_API_KEY and NEW_RELIC_APP_ID:
with ctx.lcd(settings.SRC_DIR):
oldrev = ctx.local('cat media/prev-revision.txt').out.strip()
newrev = ctx.local('cat media/revision.txt').out.strip()
log_cmd = 'git log --oneline {0}..{1}'.format(oldrev, newrev)
changelog = ctx.local(log_cmd).out.strip()
print 'Post deployment to New Relic'
desc = generate_desc(oldrev, newrev, changelog)
if changelog:
github_url = GITHUB_URL.format(oldrev=oldrev, newrev=newrev)
changelog = '{0}\n\n{1}'.format(changelog, github_url)
data = urllib.urlencode({
'deployment[description]': desc,
'deployment[revision]': newrev,
'deployment[app_id]': NEW_RELIC_APP_ID,
'deployment[changelog]': changelog,
})
header
|
s = {'x-api-key': NEW_RELIC_API_KEY}
try:
request = urllib2.Request(NEW_RELIC_URL, data, headers)
urllib2.urlopen(request)
except urllib.URLError as exp:
print 'Error notifying New Relic: {0}'.format(exp)
@task
def pre_update(ctx, ref=settings.UPDATE_REF):
commands['update_code'](ref)
commands['update_info']()
@task
def update(ctx):
commands['database']()
commands['upd
|
ate_assets']()
commands['update_locales']()
commands['update_revision_file']()
commands['reload_crond']()
@task
def deploy(ctx):
commands['checkin_changes']()
commands['deploy_app']()
commands['ping_newrelic']()
@task
def update_bedrock(ctx, tag):
"""Do typical bedrock update"""
commands['pre_update'](tag)
commands['update']()
# utility functions #
# shamelessly stolen from https://github.com/mythmon/chief-james/
def get_random_desc():
return random.choice([
'No bugfixes--must be adding infinite loops.',
'No bugfixes--must be rot13ing function names for code security.',
'No bugfixes--must be demonstrating our elite push technology.',
'No bugfixes--must be testing james.',
])
def extract_bugs(changelog):
"""Takes output from git log --oneline and extracts bug numbers"""
bug_regexp = re.compile(r'\bbug (\d+)\b', re.I)
bugs = set()
for line in changelog:
for bug in bug_regexp.findall(line):
bugs.add(bug)
return sorted(list(bugs))
def generate_desc(from_commit, to_commit, changelog):
"""Figures out a good description based on what we're pushing out."""
if from_commit.startswith(to_commit):
desc = 'Pushing {0} again'.format(to_commit)
else:
bugs = extract_bugs(changelog.split('\n'))
if bugs:
bugs = ['bug #{0}'.format(bug) for bug in bugs]
desc = 'Fixing: {0}'.format(', '.join(bugs))
else:
desc = get_random_desc()
return desc
|
mrmuxl/keops
|
keops/middleware/__init__.py
|
Python
|
agpl-3.0
| 85
| 0.011765
|
from .db import SingleDBMiddleware, MultiDBMidd
|
leware, get_db, g
|
et_user, get_request
|
arthurfurlan/django-shortim
|
src/shortim/migrations/0002_auto__add_field_shorturl_collect_date__add_field_shorturl_title__add_f.py
|
Python
|
gpl-3.0
| 2,839
| 0.007045
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ShortURL.collect_date'
db.add_column('shortim_shorturl', 'collect_date', self.gf('django.db.models.fields.DateTimeField')(default=None, null=True, blank=True), keep_default=False)
# Adding field 'ShortURL.title'
db.add_column('shortim_shorturl', 'title', self.gf('django.db.models.fields.CharField')(default=None, max_length=255, null=True, blank=True), keep_default=False)
# Adding field 'ShortURL.mime'
db.add_column('shortim_shorturl', 'mime', self.gf('django.db.models.fields.CharField')(default=None, max_length=100, null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'ShortURL.collect_date'
db.delete_column('shortim_shorturl', 'collect_date')
# Deleting field 'ShortURL.title'
db.delete_column('shortim_shorturl', 'title')
# Deleting field 'ShortURL.mime'
db.delete_column('shortim_shorturl', 'mime')
models = {
'shortim.shorturl': {
'Meta': {'ordering': "['-id']", 'object_name': 'ShortURL'},
'canonical_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'collect_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mime': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'remote_user': ('dj
|
ango.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'title': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'db_index': 'True'})
},
'shortim.shorturlhit': {
'Meta': {'ordering': "['-date']",
|
'object_name': 'ShortURLHit'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'remote_user': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'shorturl': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'hits'", 'to': "orm['shortim.ShortURL']"})
}
}
complete_apps = ['shortim']
|
HybridF5/jacket
|
jacket/tests/compute/unit/virt/hyperv/test_livemigrationops.py
|
Python
|
apache-2.0
| 5,845
| 0
|
# Copyright 2014 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from os_win import exceptions as os_win_exc
from oslo_config import cfg
from jacket.tests.compute.unit import fake_instance
from jacket.tests.compute.unit.virt.hyperv import test_base
from jacket.compute.virt.hyperv import livemigrationops
CONF = cfg.CONF
class LiveMigrationOpsTestCase(test_base.HyperVBaseTestCase):
"""Unit tests for the Hyper-V LiveMigrationOps class."""
def setUp(self):
super(LiveMigrationOpsTestCase, self).setUp()
self.context = 'fake_context'
self._livemigrops = livemigrationops.LiveMigrationOps()
self._livemigrops._livemigrutils = mock.MagicMock()
self._livemigrops._pathutils = mock.MagicMock()
@mock.patch('compute.virt.hyperv.vmops.VMOps.copy_vm_console_logs')
@mock.patch('compute.virt.hyperv.vmops.VMOps.copy_vm_dvd_disks')
def _test_live_migration(self, mock_get_vm_dvd_paths,
mock_copy_logs, side_effect):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_post = mock.MagicMock()
mock_recover = mock.MagicMock()
fake_dest = mock.sentinel.DESTINATION
self._livemigrops._livemigrutils.live_migrate_vm.side_effect = [
side_effect]
if side_effect is os_win_exc.HyperVException:
self.assertRaises(os_win_exc.HyperVException,
self._livemigrops.live_migration,
self.context, mock_instance, fake_dest,
mock_post, mock_recover, False, None)
mock_recover.assert_called_once_with(self.context, mock_instance,
fake_dest, False)
else:
self._livemigrops.live_migration(context=self.context,
instance_ref=mock_instance,
dest=fake_dest,
post_method=mock_post,
recover_method=mock_recover)
mock_copy_logs.assert_called_once_with(mock_instance.name,
fake_dest)
mock_live_migr = self._livemigrops._livemigrutils.live_migrate_vm
mock_live_migr.assert_called_once_with(mock_instance.name,
fake_dest)
mock_post.assert_called_once_with(self.context, mock_instance,
fake_dest, False)
def test_live_migration(self):
self._test_live_migration(side_effect=None)
def test_live_migration_exception(self):
self._test_live_migration(side_effect=os_win_exc.HyperVException)
@mock.patch('compute.virt.hyperv.volumeops.VolumeOps'
'.ebs_root_in_block_devices')
@mock.patch('compute.virt.hyperv.imagecache.ImageCache.get_cached_image')
@mock.patch('compute.virt.hyperv.volumeops.VolumeOps'
'.initialize_volumes_connection')
def test_pre_live_migration(self, mock_initialize_connection,
mock_get_cached_image,
mock_ebs_root_in_block_devices):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_instance.image_ref = "fake_image_ref"
mock_ebs_root_in_block_devices.return_value = None
CONF.set_override('use_cow_images', True)
self._livemigrops.pre_live_migration(
self.context, mock_instance,
block_device_info=mock.sentinel.BLOCK_INFO,
network_info=mock.sentinel.NET_INFO)
check_config = (
self._livemigrops._livemigrutils.check_live_migration_config)
check_config.assert_called_once_with()
mock_ebs_root_in_block_devices.assert_called_once_with(
mock.sentinel.BLOCK_INFO)
mock_get_cached_image.assert_called_once_with(self.context,
mock_instance)
mock_initialize_connection.assert_called_once_with(
mock.sentinel.BLOCK_INFO)
@mock.patch('compute.virt.hyperv.volumeops.VolumeOps.disconnect_volumes')
def test_post_live_migration(self, mock_disconnect_volumes):
self._livemigrops.post_live_migration(
self.context, mock.sentinel.instance,
mock.sentinel.block_device_info)
m
|
ock_disconnect_volumes.assert_called_once_with(
mock.sentinel.block_device_info)
self._livemigrops._pathutils.get_instance_dir.assert_called_once_with(
mock.sentinel.instance.name, create_dir=False, remove_dir=True)
|
@mock.patch('compute.virt.hyperv.vmops.VMOps.log_vm_serial_output')
def test_post_live_migration_at_destination(self, mock_log_vm):
mock_instance = fake_instance.fake_instance_obj(self.context)
self._livemigrops.post_live_migration_at_destination(
self.context, mock_instance, network_info=mock.sentinel.NET_INFO,
block_migration=mock.sentinel.BLOCK_INFO)
mock_log_vm.assert_called_once_with(mock_instance.name,
mock_instance.uuid)
|
mesosphere/marathon
|
tests/performance/apps.py
|
Python
|
apache-2.0
| 445
| 0.011236
|
import requests
i
|
mport json
def generate_apps():
apps = [{'id': '/app-{}'.format(i), 'cmd': 'sleep 3600', 'cpus': 0.1, 'mem': 32, 'instances': 0} for i in range(1000)]
groups = {'id': '/',
|
'groups': [], 'apps': apps}
return groups
def main():
apps = generate_apps()
r = requests.put("http://localhost:8080/v2/groups?force=true", json=apps)
print(r.text)
r.raise_for_status()
if __name__ == "__main__":
main()
|
bygreencn/DIGITS
|
plugins/data/imageGradients/digitsDataPluginImageGradients/data.py
|
Python
|
bsd-3-clause
| 3,492
| 0.001145
|
# Copyright (c) 2016, NVIDIA CORPORATION. All rights reserved.
from __future__ import absolute_import
from digits.utils import subclass, override, constants
from digits.extensions.data.interface import DataIngestionInterface
from .forms import DatasetForm, InferenceForm
import numpy as np
import os
TEMPLATE = "templates/template.html"
INFERENCE_TEMPLATE = "templates/inference_template.html"
@subclass
class DataIngestion(DataIngestionInterface):
"""
A data ingestion extension for an image gradient dataset
"""
def __init__(self, is_inference_db=False, **kwargs):
super(DataIngestion, self).__init__(**kwargs)
self.userdata['is_inference_db'] = is_inference_db
# Used to calculate the gradients later
self.yy, self.xx = np.mgrid[:self.image_height,
:self.image_width].astype('float')
@override
def encode_entry(self, entry):
xslope, yslope = entry
label = np.array([xslope, yslope])
a = xslope * 255 / self.image_width
b = yslope * 255 / self.image_height
image = a * (self.xx - self.image_width/2) + b * (self.yy - self.image_height/2) + 127.5
image = image.astype('uint8')
# convert to 3D tensors
image = image[np.newaxis, ...]
label = label[np.newaxis, np.newaxis, ...]
return image, label
@staticmethod
@override
def get_category():
return "Images"
@staticmethod
@override
def get_id():
return "image-gradients"
@staticmethod
@override
def get_dataset_form():
return DatasetForm()
@staticmethod
@override
def get_dataset_template(form):
"""
parameters:
- form: form returned by get_dataset_form(). This may be populated
with values if the job was cloned
return:
- (template, context) tuple
- template is a Jinja template to use for rendering dataset creation
options
- context is a dictionary of context variables to use for rendering
the form
"""
extension_dir = os.path.dirname(os.path.abspath(__file__))
template = open(os.path.join(extension_dir, TEMPLATE), "r").read()
context = {'form': form}
return (template
|
, context)
@override
def get_inference_form(self):
return InferenceForm()
@staticmethod
@override
def get_inference_template(form):
extension_dir = os.path.dirname(os.path.abspath(__file__))
template = open(os.path.join(extension_dir, INFERENCE_TEMPLATE), "r").read()
context = {'form': form}
return (template, context)
@staticmethod
@overr
|
ide
def get_title():
return "Gradients"
@override
def itemize_entries(self, stage):
count = 0
if self.userdata['is_inference_db']:
if stage == constants.TEST_DB:
if self.test_image_count:
count = self.test_image_count
else:
return [(self.gradient_x, self.gradient_y)]
else:
if stage == constants.TRAIN_DB:
count = self.train_image_count
elif stage == constants.VAL_DB:
count = self.val_image_count
elif stage == constants.TEST_DB:
count = self.test_image_count
return [np.random.random_sample(2) - 0.5 for i in xrange(count)] if count > 0 else []
|
rezoo/chainer
|
tests/chainer_tests/iterators_tests/test_iterator_compatibility.py
|
Python
|
mit
| 2,915
| 0
|
from __future__ import division
import unittest
import itertools
import numpy
from chainer import iterators
from chainer import serializer
from chainer import testing
class DummySerializer(serializer.Serializer):
def __init__(self, target):
super(DummySerializer, self).__init__()
self.target = target
def __getitem__(self, key):
raise NotImplementedError
def __call__(self, key, value):
self.target[key] = value
return self.target[key]
class DummyDeserializer(serializer.Deserializer):
def __init__(self, target):
super(DummyDeserializer, self).__init__()
self.target = target
def __getitem__(self, key):
raise NotImplementedError
def __call__(self, key, value):
if value is None:
value = self.target[key]
elif isinstance(value, nump
|
y.ndarray):
numpy.copyto(value, self.target[key])
else:
value = type(value)(numpy.asarray(self.target[key]))
return value
@testing.parameterize(*testing.product({
'n_prefetch': [1, 2],
'share
|
d_mem': [None, 1000000],
}))
class TestIteratorCompatibility(unittest.TestCase):
def setUp(self):
self.n_processes = 2
self.options = {'n_processes': self.n_processes,
'n_prefetch': self.n_prefetch,
'shared_mem': self.shared_mem}
def test_iterator_compatibilty(self):
dataset = [1, 2, 3, 4, 5, 6]
iters = (
lambda: iterators.SerialIterator(dataset, 2),
lambda: iterators.MultiprocessIterator(dataset, 2, **self.options),
)
for it_before, it_after in itertools.permutations(iters, 2):
it = it_before()
self.assertEqual(it.epoch, 0)
self.assertAlmostEqual(it.epoch_detail, 0 / 6)
batch1 = it.next()
self.assertEqual(len(batch1), 2)
self.assertIsInstance(batch1, list)
self.assertFalse(it.is_new_epoch)
self.assertAlmostEqual(it.epoch_detail, 2 / 6)
batch2 = it.next()
self.assertEqual(len(batch2), 2)
self.assertIsInstance(batch2, list)
self.assertFalse(it.is_new_epoch)
self.assertAlmostEqual(it.epoch_detail, 4 / 6)
target = dict()
it.serialize(DummySerializer(target))
it = it_after()
it.serialize(DummyDeserializer(target))
self.assertFalse(it.is_new_epoch)
self.assertAlmostEqual(it.epoch_detail, 4 / 6)
batch3 = it.next()
self.assertEqual(len(batch3), 2)
self.assertIsInstance(batch3, list)
self.assertTrue(it.is_new_epoch)
self.assertEqual(sorted(batch1 + batch2 + batch3), dataset)
self.assertAlmostEqual(it.epoch_detail, 6 / 6)
testing.run_module(__name__, __file__)
|
bhallen/pyparadigms
|
hypothesize.py
|
Python
|
bsd-3-clause
| 15,896
| 0.006102
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
## Based on learner.js (by Blake Allen and Michael Becker)
import itertools
import collections
from collections import defaultdict
import pdb
import phoment
class Change(object):
def __init__(self, change_type, position, input_material, output_material):
self.change_type = change_type
self.position = position
self.input_material = input_material
self.output_material = output_material
def __repr__(self):
# needs aesthetic improvement
return '{0} {1} to {2} at {3}'.format(self.change_type, self.input_material, self.output_material, self.position)
def __str__(self):
return self.__repr__()
class Sublexicon(object):
"""Starts off as a hypothesis; will grow and compete with others, potentially becoming a sublexicon of the final grammar
"""
def __init__(self, changes, associated_forms):
self.changes = tuple(sorted(changes, key=lambda c:str(c)))
self.associated_forms = associated_forms
self.constraint_names = None
self.weights = None
self.megatableau = None
self.relative_size = 0.0
def __repr__(self):
# needs aesthetic improvement
example_count = min(5, len(self.associated_forms))
return str(self.changes)
def __str__(self):
return self.__repr__()
def create_and_reduce_hypotheses(alignments, pre_reduction_cutoff, orientation='product'):
unfiltered_hypotheses = []
all_pairs = []
for alignment in alignments:
base = linearize_word([column['elem1'] for column in alignment['alignment']])
derivative = linearize_word([column['elem2'] for column in alignment['alignment']])
basic_changes = find_basic_changes(alignment['alignment'])
grouped_changes = group_changes(basic_changes)
possibilities_for_all_changes = [create_change_possibilities(c, base) for c in grouped_changes]
product = list(itertools.product(*possibilities_for_all_changes))
for cp in product:
unfiltered_hypotheses.append(Sublexicon(cp, [{'base':base, 'derivative':derivative, 'probability':alignment['probability'], 'lexeme':alignment['lexeme']}]))
all_pairs.append({'base':base, 'derivative':derivative, 'probability':alignment['probability'], 'lexeme':alignment['lexeme']})
combined_hypotheses = combine_identical_hypotheses(unfiltered_hypotheses)
combined_hypotheses.sort(key=lambda h: len(h.associated_forms))
combined_hypotheses.reverse()
if pre_reduction_cutoff:
combined_hypotheses = [h for h in combined_hypotheses if len(h.associated_forms) >= pre_reduction_cutoff]
print('Hypotheses ready for reduction. Pre-reduction hypothesis count: {}'.format(str(len(combined_hypotheses))))
reduced_hypotheses = reduce_hypotheses(combined_hypotheses, all_pairs, orientation)
sublexicon_sizes = [sum([af['probability'] for af in h.associated_forms]) for h in reduced_hypotheses]
size_total = sum(sublexicon_sizes)
for h, size in zip(reduced_hypotheses, sublexicon_sizes):
h.relative_size = size / size_total
h.total_probability = sum([af['probability'] for af in h.associated_forms])
return reduced_hypotheses
def find_basic_changes(alignment):
"""Find the differences between the aligned base and derivative.
Return differences as Changes with positive indices as positions.
"""
changes = []
surface_i = 0
for column in alignment:
if column['elem1'] != column['elem2']:
if column['elem1'] == None:
changes.append(Change('insert', surface_i*2, [column['elem1']], [column['elem2']]))
# surface_i does not increment
elif column['elem2'] == None:
changes.append(Change('delete', surface_i*2+1, [column['elem1']], [column['elem2']]))
surface_i += 1
else:
changes.append(Change('mutate', surface_i*2+1, [column['elem1']], [column['elem2']]))
surface_i += 1
else:
surface_i += 1
return changes
def create_change_possibilities(change, base, side='both'):
"""Given a change with segments as input and output and a positive index as position,
return a list of changes with different positions/inputs/outputs.
"""
change_possibilities = []
if side in ['left', 'both']:
change_possibilities.append(change)
if side in ['right', 'both']:
noned_base = add_nones(base)
new_change = Change(change.change_type, -(len(noned_base)-change.position), change.input_material, change.output_material)
change_possibilities.append(new_change)
return change_possibilities
def group_changes(changes):
"""Consolidate same-position insertions and deletions into single changes.
"""
insertions = [c for c in changes if c.change_type == 'insert']
deletions = [c for c in changes if c.change_type == 'delete']
mutations = [c for c in changes if c.change_type == 'mutate']
inserted_locations = [ins.position for ins in insertions]
grouped_insertions = []
for i, ins in enumerate(insertions):
if i > 0:
if ins.position == insertions[i-1].position:
grouped_insertions[-1].output_material += ins.output_material
continue
grouped_insertions.append(ins)
grouped_deletions = []
for i, dlt in enumerate(deletions):
if i > 0:
if dlt.position == deletions[i-1].position+2 and dlt.position-1 not in inserted_locations:
grouped_deletions[-1].input_material += dlt.input_material
continue
grouped_deletions.append(dlt)
return sorted(grouped_insertions + grouped_deletions + mutations, key=lambda x: x.position)
def combine_identical_hypotheses(hypotheses):
"""Combine hypotheses with the same Change objects, yielding hypotheses with associated assoc_forms
that are the superset of component hypotheses.
"""
temp_dict = defaultdict(list)
for h in hypotheses:
temp_dict[str(h.changes)].append(h)
grouped_hypotheses = []
for gh in temp_dict:
assoc_forms = [h.associated_forms[0] for h in temp_dict[gh]]
grouped_hypotheses.append(Sublexicon(temp_dict[gh][0].changes, assoc_forms))
return grouped_hypotheses
def add_nones(word):
"""Change word into a list and add None at its beginning, end, and between every other pair of elements. Works whether the word is a str or a list.
"""
def yield_it(word_string):
yield None
it = iter(word_string)
yield next(it)
for x in it:
yield None
yield x
yield None
if isinstance(word, str):
return list(yield_it(word.split(' ')))
else:
return list(yield_it(word))
def apply_change(current_base, current_derivative, change, orientation):
"""Use the given set of changes to derive a new form from the base word.
May be only one intermediate step in the application of multiple
changes associated with a single hypothesis/sublexicon.
"""
change_position = make_index_positive(current_base, change.position)
changed_base = current_base[:]
changed_derivative = current_derivative[:]
if change.change_type == 'insert':
changed_base[change_position] = [None for s in change.output_material]
changed_derivative[change_position] = change.output_material
if change.change_type == 'delete':
for i, s in enumerate(change.input_material):
if orientation == 'source' and current_base[change
|
_position+(i*2)] != s:
raise Exception('Deletion incompatible with base: no {} to delete.'.format(s))
changed_derivative[change_pos
|
ition+(i*2)] = None
if change.change_type == 'mutate':
for i, s in enumerate(change.output_material):
if orientation == 'source' and current_base[change_position+(i*2)] != chang
|
portfoliome/foil
|
tests/test_logger.py
|
Python
|
mit
| 936
| 0
|
import json
import unitte
|
st
from logging import INFO, LogRecord
from foil.logger import JSONFormatter
class TestLogFormatter(unittest.TestCase):
def test_json_formatter(self):
name = 'name'
line = 42
|
module = 'some_module'
func = 'some_function'
msg = {'content': 'sample log'}
log_record = LogRecord(
name, INFO, module, line, msg, None, None, func=func
)
formatter = JSONFormatter()
log_result = formatter.format(log_record)
result = json.loads(log_result)
# check some of the fields to ensure json formatted correctly
self.assertEqual(name, result['name'])
self.assertEqual(line, result['lineNumber'])
self.assertEqual(func, result['functionName'])
self.assertEqual(module, result['module'])
self.assertEqual('INFO', result['level'])
self.assertEqual(msg, result['message'])
|
bjpop/complexo_pipeline
|
src/pipeline.py
|
Python
|
bsd-3-clause
| 6,819
| 0.002346
|
'''
Build the pipeline workflow by plumbing the stages together.
'''
from ruffus import Pipeline, suffix, formatter, add_inputs, output_from
from stages import Stages
def make_pipeline(state):
'''Build the pipeline by constructing stages and connecting them together'''
# Build an empty pipeline
pipeline = Pipeline(name='complexo')
# Get a list of paths to all the FASTQ files
fastq_files = state.config.get_option('fastqs')
# Stages are dependent on the state
stages = Stages(state)
# The original FASTQ files
# This is a dummy stage. It is useful because it makes a node in the
# pipeline graph, and gives the pipeline an obvious starting point.
pipeline.originate(
task_func=stages.original_fastqs,
name='original_fastqs',
output=fastq_files)
# Align paired end reads in FASTQ to the reference producing a BAM file
pipeline.transform(
task_func=stages.align_bwa,
name='align_bwa',
input=output_from('original_fastqs'),
# Match the R1 (read 1) FASTQ file and grab the path and sample name.
# This will be the first input to the stage.
# We assume the sample name may consist of only alphanumeric
# characters.
filter=formatter('.+/(?P<sample>[a-zA-Z0-9]+)_R1.fastq.gz'),
# Add one more inputs to the stage:
# 1. The corresponding R2 FASTQ file
add_inputs=add_inputs('{path[0]}/{sample[0]}_R2.fastq.gz'),
# Add an "extra" argument to the state (beyond the inputs and outputs)
# which is the sample name. This is needed within the stage for finding out
# sample specific configuration options
extras=['{sample[0]}'],
# The output file name is the sample name with a .bam extension.
output='{path[0]}/{sample[0]}.bam')
# Sort the BAM file using Picard
pipeline.transform(
task_func=stages.sort_bam_picard,
name='sort_bam_picard',
input=output_from('align_bwa'),
filter=suffix('.bam'),
output='.sort.bam')
# Mark duplicates in the BAM file using Picard
pipeline.transform(
task_func=stages.mark_duplicates_picard,
name='mark_duplicates_picard',
input=output_from('sort_bam_picard'),
filter=suffix('.sort.bam'),
# XXX should make metricsup an extra output?
output=['.sort.dedup.bam', '.metricsdup'])
# Generate chromosome intervals using GATK
pipeline.transform(
task_func=stages.chrom_intervals_gatk,
name='chrom_intervals_gatk',
input=output_from('mark_duplicates_picard'),
filter=suffix('.sort.dedup.bam'),
output='.chr.intervals')
# Local realignment using GATK
(pipeline.transform(
task_func=stages.local_realignment_gatk,
name='local_realignment_gatk',
input=output_from('chrom_intervals_gatk'),
filter=formatter('.+/(?P<sample>[a-zA-Z0-9]+).chr.intervals'),
add_inputs=add_inputs('{path[0]}/{sample[0]}.sort.dedup.bam'),
output='{path[0]}/{sample[0]}.sort.dedup.realn.bam')
.follows('mark_duplicates_picard'))
# Base recalibration using GATK
pipeline.transform(
task_func=stages.base_recalibration_gatk,
name='base_recalibration_gatk',
input=output_from('local_realignment_gatk'),
filter=suffix('.sort.dedup.realn.bam'),
output=['.recal_data.csv', '.count_cov.log'])
# Print reads using GATK
(pipeline.transform(
task_func=stages.print_reads_gatk,
name='print_reads_gatk',
input=output_from('base_recalibration_gatk'),
filter=formatter('.+/(?P<sample>[a-zA-Z0-9]+).recal_data.csv'),
add_inputs=add_inputs('{path[0]}/{sample[0]}.sort.dedup.realn.bam'),
output='{path[0]}/{sample[0]}.sort.dedup.realn.recal.bam')
.follows('local_realignment_gatk'))
# Call variants using GATK
pipeline.transform(
task_func=stages.call_variants_gatk,
name='call_variants_gatk',
input=output_from('print_reads_gatk'),
filter=suffix('.sort.dedup.realn.recal.bam'),
output='.raw.snps.indels.g.vcf')
# Combine G.VCF files for all samples using GATK
pipeline.merge(
task_func=stages.combine_gvcf_gatk,
name='combine_gvcf_gatk',
input=output_from('call_variants_gatk'),
output='PCExomes.mergegvcf.vcf')
# Genotype G.VCF files using GATK
pipeline.transform(
task_func=stages.genotype_gvcf_gatk,
name='genotype_gvcf_gatk',
input=output_from('combine_gvcf_gatk'),
filter=suffix('.mergegvcf.vcf'),
output='.genotyped.vcf')
# SNP recalibration using GATK
pipeline.transform(
task_func=stages.snp_recalibrate_gatk,
name='snp_recalibrate_gatk',
input=output_from('genotype_gvcf_gatk'),
filter=suffix('.genotyped.vcf'),
output=['.snp_recal', '.snp_tranches', '.snp_plots.R'])
# INDEL recalibration using GATK
pipeline.transform(
task_func=stages.indel_recalibrate_gatk,
name='indel_recalibrate_gatk',
input=output_from('genotype_gvcf_gatk'),
filter=suffix('.genotyped.vcf'),
output=['.indel_recal', '.indel_tranches', '.indel_plots.R'])
# Apply SNP recalibration using GATK
(pipeline.transform(
task_func=stages.apply_snp_recalibrate_gatk,
name='apply_snp_recalibrate_gatk',
input=output_from('genotype_gvcf_gatk'),
filter=suffix('.genotyped.vcf'),
add_inputs=add_inputs(['PCExomes.snp_recal', 'PCExomes.snp_tranches']),
output='.recal_SNP.vcf')
|
.follows('snp_recalibrate_gatk'))
# Apply INDEL recalibration using GATK
(pipeline.transform(
task_func=stages.apply_indel_recalibrate_gatk,
name='apply_indel_recalibrate_gatk',
input=output_from('genotype_gvcf_gatk'),
filter=suffix('.genotyped.vcf'),
add_inputs=add_inputs(['PCExomes.indel_recal', 'PCExomes.indel_tranches']),
output='.recal_INDEL.vcf')
.follows('indel_recal
|
ibrate_gatk'))
# Combine variants using GATK
(pipeline.transform(
task_func=stages.combine_variants_gatk,
name='combine_variants_gatk',
input=output_from('apply_snp_recalibrate_gatk'),
filter=suffix('.recal_SNP.vcf'),
add_inputs=add_inputs(['PCExomes.recal_INDEL.vcf']),
output='.combined.vcf')
.follows('apply_indel_recalibrate_gatk'))
# Select variants using GATK
pipeline.transform(
task_func=stages.select_variants_gatk,
name='select_variants_gatk',
input=output_from('combine_variants_gatk'),
filter=suffix('.combined.vcf'),
output='.selected.vcf')
return pipeline
|
Chrisplus/HeyoDict
|
Dict.py
|
Python
|
gpl-2.0
| 2,929
| 0.034141
|
#! /user/bin/python
#! -*- coding: utf-8 -*-
import sys
import urllib, urllib2
import json
"""
Reversion HeyooDic
Transfer from unoffical API to offical API
Chrisplus
2014-6
"""
# Key and name
url = "http://fanyi.youdao.com/openapi.do?%s"
keyFrom = "SunnyArtStudio"
key = "1884243682"
dataType = "data"
docType = "json"
version = "1.1"
queryWord = ""
# dic or translate
only = ""
core_command_quit = ".exit"
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
def main(argv):
if len(argv) == 1:
queryword = argv[0]
sendRequest(queryword, None)
elif len(argv) == 2:
if argv[0] == "-d":
only = "dict"
queryword = argv[1]
sendRequest(queryword, only)
elif argv[0] == "-t":
only = "tanslate"
queryword = argv[1]
sendRequest(queryword, only)
else:
printUsage()
else:
printUsage()
def printUsage():
print "Dic.py -d or -t word"
def sendRequest(keyword, selection):
# build parameters
if keyword is None or not keyword:
return -1
param = {}
param.update({'keyfrom' : keyFrom})
param.update({'key' : key})
param.update({'type': dataType})
param.update({'doctype': docType})
param.update({'version': version})
if selection is not None:
param.update({'only': selection})
param.update({'q': keyword})
# build url and send request
requestUrl = url % urllib.urlencode(param)
# print requestUrl
try:
content = urllib2.urlopen(requestUrl).read()
except:
print bcolors.WARNING + "Network Error"+bcolors.ENDC
return -1
parseContent(content)
def parseContent(content):
try:
jsonData = json.loads(content)
except ValueError:
print "Invalid Json Content"
return -1
# handle error code
errorCode = jsonData['errorCode']
if errorCode == 0:
# successful
showResult(jsonData)
elif errorCode == 20:
print "Too many words"
elif errorCode == 30:
print "I cannot do it"
elif errorCode == 40:
print "I have no idea"
elif errorCode == 50:
print "Invalid key"
elif errorCode == 60:
print "No results"
#finish
def showResult(jsondata):
#First extract useful fields
words = jsondata['query']
phonetic = jsondata['basic'].get('us-phonetic','')
explains = jsondata['basic']['explains']
web_explains = jsondat
|
a['web']
#Then show word and its phonetic
basic_meaning = words + bcolors.HEADER + " [" + phonetic + "]" + bcolors.ENDC
#Then show the explainations from dict
print '======== ' + basic_meaning + ' ========'
for ex in explains:
print ' ' + bcolors.OKGREEN + ex + bcolors.ENDC + ' '
print '======== ' + 'more ref' + ' ========'
for web in web_explains:
print '------ ' + web['key'] + ' ------'
for exp in web['value']:
print ' ' + bcolors.OKBLUE + exp + bcol
|
ors.ENDC + ' '
if __name__ == "__main__":
raw = ""
while True:
raw = raw_input('=> ')
if raw == core_command_quit:
break;
else:
main(['-d',raw])
|
RedHatInsights/insights-core
|
insights/parsers/tests/test_docker_inspect.py
|
Python
|
apache-2.0
| 10,303
| 0.002038
|
import pytest
import doctest
from insights.parsers import docker_inspect, SkipException
from insights.tests import context_wrap
DOCKER_CONTAINER_INSPECT = """
[
{
"Id": "97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07",
"Created": "2016-06-23T05:12:25.433469799Z",
"Path": "/bin/bash",
"Args": [],
"State": {
"Status": "running",
"Running": true,
"Paused": false,
"Restarting": false,
"OOMKilled": false,
"Dead": false,
"Pid": 15096,
"ExitCode": 0,
"Error": "",
"StartedAt": "2016-06-23T05:37:56.925378831Z",
"FinishedAt": "2016-06-23T05:33:02.012653984Z"
},
"Image": "882ab98aae5394aebe91fe6d8a4297fa0387c3cfd421b2d892bddf218ac373b2",
"ResolvConfPath": "/var/lib/docker/containers/97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07/resolv.conf",
"HostnamePath": "/var/lib/docker/containers/97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07/hostname",
"HostsPath": "/var/lib/docker/containers/97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07/hosts",
"LogPath": "/var/lib/docker/containers/97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07/97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07-json.log",
"Name": "/hehe2",
"RestartCount": 0,
"Driver": "devicemapper",
"ExecDriver": "native-0.2",
"MountLabel": "system_u:object_r:svirt_sandbox_file_t:s0:c429,c690",
"ProcessLabel": "system_u:system_r:svirt_lxc_net_t:s0:c429,c690",
"AppArmorProfile": "",
"ExecIDs": null,
"HostConfig": {
"Binds": null,
"ContainerIDFile": "",
"LxcConf": [],
"Memory": 0,
"MemoryReservation": 0,
"MemorySwap": 0,
"KernelMemory": 0,
"CpuShares": 0,
"CpuPeriod": 0,
"CpusetCpus": "",
"CpusetMems": "",
"CpuQuota": 0,
"BlkioWeight": 0,
"OomKillDisable": false,
"MemorySwappiness": -1,
"Privileged": false,
"PortBindings": {},
"Links": null,
"PublishAllPorts": false,
"Dns": [],
"DnsOptions": [],
"DnsSearch": [],
"ExtraHosts": null,
"VolumesFrom": null,
"Devices": [],
"NetworkMode": "default",
"IpcMode": "",
"PidMode": "",
"UTSMode": "",
"CapAdd": null,
"CapDrop": null,
"GroupAdd": null,
"RestartPolicy": {
"Name": "no",
"MaximumRetryCount": 0
},
"SecurityOpt": null,
"ReadonlyRootfs": false,
"Ulimits": null,
"Sysctls": {},
"LogConfig": {
"Type": "json-file",
"Config": {
"max-file": "7",
"max-size": "10m"
}
},
"CgroupParent": "",
"ConsoleSize": [
0,
0
],
"VolumeDriver": "",
"ShmSize": 67108864
},
"GraphDriver": {
"Name": "devicemapper",
"Data": {
"DeviceId": "433",
"DeviceName"
|
: "docker-253:0-71431059-97d7cd1a5d8fd7730e83bb61ecbc993742438e966ac5c11910776b5d53f4ae07",
"DeviceSize": "107374182400"
}
},
"Mounts": [],
"Config": {
"Hostname": "97d7cd1a5d8f",
"Domainname": "",
"User": "root",
"AttachStdi
|
n": true,
"AttachStdout": true,
"AttachStderr": true,
"Tty": true,
"OpenStdin": true,
"StdinOnce": true,
"Env": [
"container=docker",
"PKGM=yum",
"PATH=/usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin"
],
"Cmd": [
"/bin/bash"
],
"Image": "rhel7_imagemagick",
"Volumes": null,
"WorkingDir": "",
"Entrypoint": null,
"OnBuild": null,
"Labels": {
"Architecture": "x86_64",
"Authoritative_Registry": "registry.access.redhat.com",
"BZComponent": "rhel-server-docker",
"Build_Host": "rcm-img03.build.eng.bos.redhat.com",
"Name": "rhel7/rhel",
"Release": "61",
"Vendor": "Red Hat, Inc.",
"Version": "7.2",
"io.projectatomic.Temporary": "true"
},
"StopSignal": "SIGTERM"
},
"NetworkSettings": {
"Bridge": "",
"SandboxID": "f1cce5397340364aff043879ff5bd7e2ce2fcc5b81cfb7fe1833ce7b57eb6cf8",
"HairpinMode": false,
"LinkLocalIPv6Address": "",
"LinkLocalIPv6PrefixLen": 0,
"Ports": {},
"SandboxKey": "/var/run/docker/netns/f1cce5397340",
"SecondaryIPAddresses": null,
"SecondaryIPv6Addresses": null,
"EndpointID": "59be4c94b2a1346eb0ec16472bc132e071d18733fd956c34b3b1defff9bba389",
"Gateway": "172.17.0.1",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"IPAddress": "172.17.0.2",
"IPPrefixLen": 16,
"IPv6Gateway": "",
"MacAddress": "02:42:ac:11:00:02",
"Networks": {
"bridge": {
"EndpointID": "59be4c94b2a1346eb0ec16472bc132e071d18733fd956c34b3b1defff9bba389",
"Gateway": "172.17.0.1",
"IPAddress": "172.17.0.2",
"IPPrefixLen": 16,
"IPv6Gateway": "",
"GlobalIPv6Address": "",
"GlobalIPv6PrefixLen": 0,
"MacAddress": "02:42:ac:11:00:02"
}
}
}
}
]
""".splitlines()
DOCKER_IMAGE_INSPECT = """
[
{
"Id": "882ab98aae5394aebe91fe6d8a4297fa0387c3cfd421b2d892bddf218ac373b2",
"RepoTags": [
"rhel7_imagemagick:latest"
],
"RepoDigests": [],
"Parent": "34c167d900afb820ecab622a214ce3207af80ec755c0dcb6165b425087ddbc3a",
"Comment": "",
"Created": "2016-06-23T03:39:15.068803433Z",
"Container": "65410bf8809af52d2074c882917ea0651b119a91f460c1037bc99d4d5976532a",
"ContainerConfig": {
"Hostname": "cf3092658f01",
"Domainname": "",
"User": "root",
"AttachStdin": false,
"AttachStdout": false,
"AttachStderr": false,
"Tty": false,
"OpenStdin": false,
"StdinOnce": false,
"Env": [
"container=docker",
"PKGM=yum",
"PATH=/usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin"
],
"Cmd": [
"/bin/sh",
"-c",
"yum install -y ImageMagick-6.7.8.9-10.el7"
],
"Image": "34c167d900afb820ecab622a214ce3207af80ec755c0dcb6165b425087ddbc3a",
"Volumes": null,
"WorkingDir": "",
"Entrypoint": null,
"OnBuild": [],
"Labels": {
"Architecture": "x86_64",
"Authoritative_Registry": "registry.access.redhat.com",
"BZComponent": "rhel-server-docker",
"Build_Host": "rcm-img03.build.eng.bos.redhat.com",
"Name": "rhel7/rhel",
"Release": "61",
"Vendor": "Red Hat, Inc.",
"Version": "7.2"
}
},
"DockerVersion": "1.9.1",
"Author": "",
"Config": {
"Hostname": "cf3092658f01",
"Domainname": "",
"User": "root",
"AttachStdin": false,
"AttachStdout": false,
"AttachStderr": false,
"Tty": false,
"OpenStdin": false,
"StdinOnce": false,
"Env": [
"container=docker",
"PKGM=yum",
"PATH=/usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin"
],
"Cmd": [
"/usr/bin/bash"
],
"Image": "34c167d900afb820ecab622a214ce3207af80ec755c0dcb6165b425087ddbc3a",
"Volumes": null,
"WorkingDir": "",
"Entrypoint": null,
"OnBuild": [],
"Labels": {
"Architecture": "x86_64",
"Authoritative_Registry": "registry.access.redhat.com",
"BZComponent": "rhel-server-docker",
"Build_Host": "rcm-img03.build.eng.bos.redhat.com",
"Name": "rhel7/rhel",
"Relea
|
srome/jacksearch
|
search.py
|
Python
|
apache-2.0
| 922
| 0.007592
|
# Copyright (C) 2016 Scott Rome. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
class Searcher:
ALLOWED_FILE_TYPES = ['*.*pg','*.png','*.tif*']
@staticmethod
def search_from_dir(base_dir):
files = []
for file_type in Searcher.ALLOWED_FILE_TYPES:
files
|
.extend(glob.glob('%s/**/%s' % (base_dir,file_type), recursive=True))
return files
|
rbuffat/pyidf
|
tests/test_coilheatingdxvariablespeed.py
|
Python
|
apache-2.0
| 38,214
| 0.005548
|
import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.coils import CoilHeatingDxVariableSpeed
log = logging.getLogger(__name__)
class TestCoilHeatingDxVariableSpeed(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_coilheatingdxvariablespeed(self):
pyidf.validation_level = ValidationLevel.error
obj = CoilHeatingDxVariableSpeed()
# alpha
var_name = "Name"
obj.name = var_name
# node
var_indoor_air_inlet_node_name = "node|Indoor Air Inlet Node Name"
obj.indoor_air_inlet_node_name = var_indoor_air_inlet_node_name
# node
var_indoor_air_outlet_node_name = "node|Indoor Air Outlet Node Name"
obj.indoor_air_outlet_node_name = var_indoor_air_outlet_node_name
# integer
var_number_of_speeds = 5
|
obj.number_of_speeds = var_number_of_speeds
# integer
var_nominal_speed_level = 5
obj.nominal_speed_level = var_nominal_speed_level
# real
var_rated_heating_capacity_at_selected_nominal_speed_level = 6.6
obj.rated_heating_capacity_at_selected_nominal_speed_level = var_rated_heating_capacity_at_selected_nominal_speed_level
# real
var_rated_air_flow_rate_at_selected_nominal_speed_level = 7.7
obj.rated_air_flow_r
|
ate_at_selected_nominal_speed_level = var_rated_air_flow_rate_at_selected_nominal_speed_level
# object-list
var_energy_part_load_fraction_curve_name = "object-list|Energy Part Load Fraction Curve Name"
obj.energy_part_load_fraction_curve_name = var_energy_part_load_fraction_curve_name
# object-list
var_defrost_energy_input_ratio_function_of_temperature_curve_name = "object-list|Defrost Energy Input Ratio Function of Temperature Curve Name"
obj.defrost_energy_input_ratio_function_of_temperature_curve_name = var_defrost_energy_input_ratio_function_of_temperature_curve_name
# real
var_minimum_outdoor_drybulb_temperature_for_compressor_operation = -50.0
obj.minimum_outdoor_drybulb_temperature_for_compressor_operation = var_minimum_outdoor_drybulb_temperature_for_compressor_operation
# real
var_outdoor_drybulb_temperature_to_turn_on_compressor = 11.11
obj.outdoor_drybulb_temperature_to_turn_on_compressor = var_outdoor_drybulb_temperature_to_turn_on_compressor
# real
var_maximum_outdoor_drybulb_temperature_for_defrost_operation = 3.61
obj.maximum_outdoor_drybulb_temperature_for_defrost_operation = var_maximum_outdoor_drybulb_temperature_for_defrost_operation
# real
var_crankcase_heater_capacity = 0.0
obj.crankcase_heater_capacity = var_crankcase_heater_capacity
# real
var_maximum_outdoor_drybulb_temperature_for_crankcase_heater_operation = 0.0
obj.maximum_outdoor_drybulb_temperature_for_crankcase_heater_operation = var_maximum_outdoor_drybulb_temperature_for_crankcase_heater_operation
# alpha
var_defrost_strategy = "ReverseCycle"
obj.defrost_strategy = var_defrost_strategy
# alpha
var_defrost_control = "Timed"
obj.defrost_control = var_defrost_control
# real
var_defrost_time_period_fraction = 0.0
obj.defrost_time_period_fraction = var_defrost_time_period_fraction
# real
var_resistive_defrost_heater_capacity = 0.0
obj.resistive_defrost_heater_capacity = var_resistive_defrost_heater_capacity
# real
var_speed_1_reference_unit_gross_rated_heating_capacity = 0.0
obj.speed_1_reference_unit_gross_rated_heating_capacity = var_speed_1_reference_unit_gross_rated_heating_capacity
# real
var_speed_1_reference_unit_gross_rated_heating_cop = 0.0
obj.speed_1_reference_unit_gross_rated_heating_cop = var_speed_1_reference_unit_gross_rated_heating_cop
# real
var_speed_1_reference_unit_rated_air_flow_rate = 0.0
obj.speed_1_reference_unit_rated_air_flow_rate = var_speed_1_reference_unit_rated_air_flow_rate
# object-list
var_speed_1_heating_capacity_function_of_temperature_curve_name = "object-list|Speed 1 Heating Capacity Function of Temperature Curve Name"
obj.speed_1_heating_capacity_function_of_temperature_curve_name = var_speed_1_heating_capacity_function_of_temperature_curve_name
# object-list
var_speed_1_total_heating_capacity_function_of_air_flow_fraction_curve_name = "object-list|Speed 1 Total Heating Capacity Function of Air Flow Fraction Curve Name"
obj.speed_1_total_heating_capacity_function_of_air_flow_fraction_curve_name = var_speed_1_total_heating_capacity_function_of_air_flow_fraction_curve_name
# object-list
var_speed_1_energy_input_ratio_function_of_temperature_curve_name = "object-list|Speed 1 Energy Input Ratio Function of Temperature Curve Name"
obj.speed_1_energy_input_ratio_function_of_temperature_curve_name = var_speed_1_energy_input_ratio_function_of_temperature_curve_name
# object-list
var_speed_1_energy_input_ratio_function_of_air_flow_fraction_curve_name = "object-list|Speed 1 Energy Input Ratio Function of Air Flow Fraction Curve Name"
obj.speed_1_energy_input_ratio_function_of_air_flow_fraction_curve_name = var_speed_1_energy_input_ratio_function_of_air_flow_fraction_curve_name
# real
var_speed_2_reference_unit_gross_rated_heating_capacity = 0.0
obj.speed_2_reference_unit_gross_rated_heating_capacity = var_speed_2_reference_unit_gross_rated_heating_capacity
# real
var_speed_2_reference_unit_gross_rated_heating_cop = 0.0
obj.speed_2_reference_unit_gross_rated_heating_cop = var_speed_2_reference_unit_gross_rated_heating_cop
# real
var_speed_2_reference_unit_rated_air_flow_rate = 0.0
obj.speed_2_reference_unit_rated_air_flow_rate = var_speed_2_reference_unit_rated_air_flow_rate
# object-list
var_speed_2_heating_capacity_function_of_temperature_curve_name = "object-list|Speed 2 Heating Capacity Function of Temperature Curve Name"
obj.speed_2_heating_capacity_function_of_temperature_curve_name = var_speed_2_heating_capacity_function_of_temperature_curve_name
# object-list
var_speed_2_total_heating_capacity_function_of_air_flow_fraction_curve_name = "object-list|Speed 2 Total Heating Capacity Function of Air Flow Fraction Curve Name"
obj.speed_2_total_heating_capacity_function_of_air_flow_fraction_curve_name = var_speed_2_total_heating_capacity_function_of_air_flow_fraction_curve_name
# object-list
var_speed_2_energy_input_ratio_function_of_temperature_curve_name = "object-list|Speed 2 Energy Input Ratio Function of Temperature Curve Name"
obj.speed_2_energy_input_ratio_function_of_temperature_curve_name = var_speed_2_energy_input_ratio_function_of_temperature_curve_name
# object-list
var_speed_2_energy_input_ratio_function_of_air_flow_fraction_curve_name = "object-list|Speed 2 Energy Input Ratio Function of Air Flow Fraction Curve Name"
obj.speed_2_energy_input_ratio_function_of_air_flow_fraction_curve_name = var_speed_2_energy_input_ratio_function_of_air_flow_fraction_curve_name
# real
var_speed_3_reference_unit_gross_rated_heating_capacity = 0.0
obj.speed_3_reference_unit_gross_rated_heating_capacity = var_speed_3_reference_unit_gross_rated_heating_capacity
# real
var_speed_3_reference_unit_gross_rated_heating_cop = 0.0
obj.speed_3_reference_unit_gross_rated_heating_cop = var_speed_3_reference_unit_gross_rated_heating_cop
# real
var_speed_3_reference_unit_rated_air_flow_rate = 0.0
obj.speed_3_reference_unit_rated_air_flow_rate = var_speed_3_reference_unit_rated_air_flow_rate
# object-list
var_speed_3_heating_capacity_function_of_temperature_curve_name = "object-list|Speed 3 Heating
|
jakereps/qiime-workshops
|
config/urls/callback.py
|
Python
|
bsd-3-clause
| 890
| 0
|
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2018, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from django.conf import settings
from django.conf.urls import include, url
from django.views import defaults as default_views
urlpatterns = [
url(r'^', include('payments.urls.callback')),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__
|
debug__/', include(debug_toolbar.urls)),
url(r'^400/$', default_views.bad_request),
url(r'^403/$', default_views.permission_denied),
url(r'^404/$', default_views.page_not_found),
url(r'^500/$', def
|
ault_views.server_error),
]
|
jaswal72/hacker-rank
|
Python/Math/Polar_Coordinates.py
|
Python
|
mit
| 69
| 0.014493
|
impo
|
rt cmath
n = complex(input())
print(abs(n))
print(cmath.phase(
|
n))
|
robofab-developers/fontParts
|
Lib/fontParts/base/component.py
|
Python
|
mit
| 11,222
| 0.000089
|
from fontTools.misc import transform
from fontParts.base import normalizers
from fontParts.base.errors import FontPartsError
from fontParts.base.base import (
BaseObject,
TransformationMixin,
InterpolationMixin,
PointPositionMixin,
SelectionMixin,
IdentifierMixin,
dynamicProperty,
reference
)
from fontParts.base.compatibility import ComponentCompatibilityReporter
from fontParts.base.deprecated import DeprecatedComponent, RemovedComponent
class BaseComponent(
BaseObject,
TransformationMixin,
PointPositionMixin,
InterpolationMixin,
SelectionMixin,
IdentifierMixin,
DeprecatedComponent,
RemovedComponent
):
copyAttributes = (
"baseGlyph",
"transformation"
)
def _reprContents(self):
contents = [
"baseGlyph='%s'" % self.baseGlyph,
"offset='({x}, {y})'".format(x=self.offset[0], y=self.offset[1]),
]
if self.glyph is not None:
contents.append("in glyph")
contents += self.glyph._reprContents()
return contents
# -------
# Parents
# -------
# Glyph
_glyph = None
glyph = dynamicProperty("glyph", "The component's parent glyph.")
def _get_glyph(self):
if self._glyph is None:
return None
return self._glyph()
def _set_glyph(self, glyph):
if self._glyph is not None:
raise AssertionError("glyph for component already set")
if glyph is not None:
glyph = reference(glyph)
self._glyph = glyph
# Layer
layer = dynamicProperty("layer", "The component's parent layer.")
def _get_layer(self):
if self._glyph is None:
return None
return self.glyph.layer
# Font
font = dynamicProperty("font", "The component's parent font.")
def _get_font(self):
if self._glyph is None:
return None
return self.glyph.font
# ----------
# Attributes
# ----------
# baseGlyph
baseGlyph = dynamicProperty("base_baseGlyph",
"The glyph the component references.")
def _get_base_baseGlyph(self):
value = self._get_baseGlyph()
# if the component does not belong to a layer,
# it is allowed to have None as its baseGlyph
if value is None and self.layer is None:
pass
else:
value = normalizers.normalizeGlyphName(value)
return value
def _set_base_baseGlyph(self, value):
value = normalizers.normalizeGlyphName(value)
self._set_baseGlyph(value)
def _get_baseGlyph(self):
"""
Subclasses must override this method.
"""
self.raiseNotImplementedError()
def _set_baseGlyph(self, value):
"""
Subclasses must override this method.
"""
self.raiseNotImplementedError()
# transformation
transformation = dynamicProperty("base_transformation",
"The component's transformation matrix.")
def _get_base_transformation(self):
value = self._get_transformation()
value = normalizers.normalizeTransformationMatrix(value)
return value
def _set_base_transformation(self, value):
value = normalizers.normalizeTransformationMatrix(value)
self._set_transformation(value)
def _get_transformation(self):
"""
Subclasses must override this method.
"""
self.raiseNotImplementedError()
def _set_transformation(self, value):
"""
Subclasses must override this method.
"""
self.raiseNotImplementedError()
# offset
offset = dynamicProperty("base_offset", "The component's offset.")
def _get_base_offset(self):
value = self._get_offset()
value = normalizers.normalizeTransformationOffset(value)
return value
def _set_base_offset(self, value):
value = normalizers.normalizeTransformationOffset(value)
self._set_offset(value)
def _get_offset(self):
"""
Subclasses may override this method.
"""
sx, sxy, syx, sy, ox, oy = self.transformation
return ox, oy
def _set_offset(self, value):
"""
Subclasses may override this method.
"""
sx, sxy, syx, sy, ox, oy = self.transformation
ox, oy = value
self.transformation = sx, sxy, syx, sy, ox, oy
# scale
scale = dynamicProperty("base_scale", "The component's scale.")
def _get_base_scale(self):
value = self._get_scale()
value = normalizers.normalizeComponentScale(value)
return value
def _set_base_scale(self, value):
value = normalizers.normalizeComponentScale(value)
self._set_scale(value)
def _get_scale(self):
"""
Subclasses may override this method.
"""
sx, sxy, syx, sy, ox, oy = self.transformation
return sx, sy
def _set_scale(self, value):
"""
Subclasses may override this method.
"""
sx, sxy, syx, sy, ox, oy = self.transformation
sx, sy = value
self.transformation = sx, sxy, syx, sy, ox, oy
# --------------
# Identification
# --------------
# index
index = dynamicProperty("base_index",
("The index of the component within the "
"ordered list of the parent glyph's components."))
def _get_base_index(self):
glyph = self.glyph
if glyph is None:
return None
value = self._get_index()
value = normalizers.normalizeIndex(value)
return value
def _set_base_index(self, value):
glyph = self.glyph
if glyph is None:
raise FontPartsError("The component does not belong to a glyph.")
value = normalizers.normalizeIndex(value)
componentCount = len(glyph.components)
if value < 0:
value = -(value % componentCount)
if value >= componentCount:
value = componentCount
self._set_index(value)
def _get_index(self):
"""
Subclasses may override this method.
"""
glyph = self.glyph
return glyph.components.index(self)
def _set_index(self, value):
"""
Subclasses must override this method.
"""
self.raiseNotImplementedError()
# ----
# Pens
# ----
def draw(self, pen):
"""
Draw the component with the given Pen.
"""
self._draw(pen)
def _draw(self, pen, **kwargs):
"""
Subclasses may override this method.
"""
from fontTools.ufoLib.pointPen import PointToSegmentPen
adapter = PointToSegmentPen(pen)
self.drawPoints(adapter)
def drawPoints(self, pen):
"""
Draw the contour with the given PointPen.
"""
self._drawPoints(pen)
def _drawPoints(self, pen, **kwargs):
"""
Subclasses may override this method.
"""
# The try: ... except TypeError: ...
# h
|
andles backwards compatibility with
# point pens that have not been upgraded
# to point pen protocol 2.
try:
pen.addComponent(self.baseGlyph, self.transformation,
identifier=self.identifier, **kwargs)
except TypeError:
pen.addComponent(self.baseGlyph
|
, self.transformation, **kwargs)
# --------------
# Transformation
# --------------
def _transformBy(self, matrix, **kwargs):
"""
Subclasses may override this method.
"""
t = transform.Transform(*matrix)
transformation = t.transform(self.transformation)
self.transformation = tuple(transformation)
# -------------
# Normalization
# -------------
def round(self):
"""
Round offset coordinates.
|
r-singh/Test2
|
webapp_project/website/migrations/0003_initial.py
|
Python
|
mit
| 351
| 0.005698
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from djan
|
go.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
pass
def backwards(self, orm):
pass
|
models = {
}
complete_apps = ['website']
|
yesudeep/tisshrmlr
|
app/jinja2/tests/test_debug.py
|
Python
|
mit
| 1,268
| 0
|
# -*- coding: utf-8 -*-
"""
Test debug interface
~~~~~~~~~~~~~~~~~~~~
Tests the traceback rewriter.
:copyright: (c) 2009 by the Jinja Team.
:license: BSD.
"""
from jinja2 import Environment
from test_loaders import filesystem_loader
env = Environment(loader=filesystem_loader)
def test_runtime_error():
'''
>>> tmpl = env.get_template('broken.html')
>>> tmpl.render(fail=lambda: 1 / 0)
Traceback (most recent call last):
File "loaderres/templates/broken.html", line 2, in top-level template code
{{ fail() }}
File "<doctest test_runtime_error[1]>", line 1, in <lambda>
tmpl.render(fail=lambda: 1 / 0)
ZeroDivisionError: integer division or modulo by zero
'''
def test_syntax_error():
'''
>>> tmpl = env.get_template('syntaxerror.html')
Traceback (most recent call last):
...
TemplateSyntaxError: unknown tag 'endif'
File "loaderres/templates
|
\\syntaxerror.html", line 4
{% endif %}
'''
def test_regular_syntax_error():
'''
>>> from jinja2.exceptions import TemplateSyntaxE
|
rror
>>> raise TemplateSyntaxError('wtf', 42)
Traceback (most recent call last):
...
File "<doctest test_regular_syntax_error[1]>", line 1, in <module>
raise TemplateSyntaxError('wtf', 42)
TemplateSyntaxError: wtf
line 42
'''
|
astrobin/astrobin
|
astrobin/auth.py
|
Python
|
agpl-3.0
| 1,134
| 0.002646
|
from annoying.functions import get_object_or_None
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from django.db.models import Q
# Class to permit the authentication using email or username, with case sensitive and insensitive matches.
class CustomBackend(ModelBackend):
def authenticate(self, request, username=None, password=None, **kwar
|
gs):
UserModel = get_user_model()
case_sensitive = UserModel.objects.filter(Q(username__exact=username) | Q(email
|
__iexact=username)).distinct()
case_insensitive = UserModel.objects.filter(Q(username__iexact=username) | Q(email__iexact=username)).distinct()
user = None
if case_sensitive.exists():
user = case_sensitive.first()
elif case_insensitive.exists():
count = case_insensitive.count()
if count == 1:
user = case_insensitive.first()
if user and user.check_password(password):
return user
return None
def get_user(self, user_id):
return get_object_or_None(get_user_model(), pk=user_id)
|
patrickbeeson/diy-trainer
|
diytrainer/diytrainer/preview_urls.py
|
Python
|
mit
| 761
| 0.001314
|
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.conf import settings
from django.views.generic import TemplateView
from views import RobotsView
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='home.html')),
url(r'^robots\.txt$',
|
RobotsView.as_view()),
url(r'^', include('g
|
uides.urls')),
)
# Uncomment the next line to serve media files in dev.
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns('',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
|
jasonge27/picasso
|
python-package/pycasso/libpath.py
|
Python
|
gpl-3.0
| 1,608
| 0.003731
|
# coding: utf-8
"""Find the path to picasso dynamic library files."""
import os
import platform
import sys
class PicassoLibraryNotFound(Exception):
"""Error thrown by when picasso is not found"""
pass
def find_lib_path():
"""Find the path to picasso dynamic library files.
:return: List of all found library path to picasso
:rtype: list(string)
"""
curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
dll_path = [os.path.join(curr_path, './lib/')]
if sys.platform == 'win32':
dll_path = [os.path.join(p, 'picasso.dll') for p in dll_path] \
+[os.path.join(p, 'libpicasso.so') for p in dll_path]
elif sys.platform.startswith('linux'):
dll_path = [os.path.join(p, 'libpicasso.so') for p in dll_path]
elif sys.platform == 'darwin':
dll_path = [os.path.join(p, 'l
|
ibpicasso.so') for p in dll_path] \
+[os.path.join(p, 'libpicasso.dylib') for p in dll_path]
lib_path = [p for p in dll_path if os.path.exists(p) and os.path.isfile(p)]
if not lib_path:
print('Library file does not exist. Need to be updated!')
return lib_path
# From github
|
issues, most of installation errors come from machines w/o compilers
if not lib_path and not os.environ.get('PICASSO_BUILD_DOC', False):
raise PicassoLibraryNotFound(
'Cannot find Picasso Library in the candidate path, ' +
'did you install compilers and make the project in root path?\n'
'List of candidates:\n' + ('\n'.join(dll_path)))
return lib_path
|
bartdag/recodoc2
|
recodoc2/apps/doc/admin.py
|
Python
|
bsd-3-clause
| 2,534
| 0.001184
|
from __future__ import unicode_literals
from django.contrib import admin
from django.contrib.contenttypes import generic
from codebase.models import SingleCodeReference, CodeSnippet
from doc.models import Document, Page, Section, DocDiff, SectionChanger,\
LinkChange
class SingleCodeReferenceInline(generic.GenericTabularInline):
fields = ('content', 'kind_hint', 'index',
'snippet', 'first_link', 'project')
readonly_fields = ('first_link',)
raw_id_fields = ('snippet',)
model = SingleCodeReference
ct_field = 'local_content_type'
ct_fk_field = 'local_object_id'
extra = 0
class CodeSnippetInline(generic.GenericTabularInline):
fields = ('language', 'project', 'snippet_text')
model = CodeSnippet
ct_field = 'local_content_type'
ct_fk_field = 'local_object_id'
extra = 0
ordering = ('index',)
class SectionAdmin(admin.ModelAdmin):
ordering = ('title',)
list_filter = ('page__document', 'page')
list_display = ('pk', 'page', 'title', 'parent')
list_display_links = ('title',)
search_fields = ['title', 'id']
inlines = [SingleCodeReferenceInline, CodeSnippetInline]
class SectionInline(admin.StackedInline):
model = Section
extra = 0
ordering = ('title',)
class PageAdmin(admin.ModelAdmin):
inlines = [SectionInline]
ordering = ('title',)
list_display = ('document', 'title')
list_filter = ('document',)
list_display_links = ('title',)
search_fields = ['title']
class PageInline(admin.Stacked
|
Inline):
model = Page
extra = 0
ordering = ('title',)
class DocumentAdmin(admin.ModelAdmin):
inlines = [PageInline]
class SectionChangerInline(admin.StackedInline):
model = SectionChanger
fields = ('section_from', 'section_to', 'words_from', 'words_to',
'change')
readonly_fields = ('section_from', 'section_to')
extra = 0
ordering = ('section_from',)
class DocDiffAdmin(admin.ModelAdmin):
read
|
only_fields = ('removed_pages', 'added_pages', 'removed_sections',
'added_sections')
inlines = [SectionChangerInline]
class LinkChangeAdmin(admin.ModelAdmin):
read_only_fields = ('link_from', 'link_to')
list_filter = ('diff', 'from_matched_section')
link_display = ('link_from', 'link_to', 'diff')
admin.site.register(Document, DocumentAdmin)
admin.site.register(Page, PageAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(DocDiff, DocDiffAdmin)
admin.site.register(LinkChange, LinkChangeAdmin)
|
santidltp/viprcommand
|
ViPRCommand/bin/CLIInputs.py
|
Python
|
mit
| 1,681
| 0.009518
|
"""
Copyright EMC Corporation 2015.
Distributed under the MIT License.
(See accompanying file LICENSE or copy at http://opensource.org/licenses/MIT)
"""
""" Class to store parsed WADL and XSD data. """
class CLIInputs:
wadl_context = dict()
xsd_elements_dict = dict()
unknown_xsd_elements_dict = dict()
name_type_dict = dict()
class ActionParams:
def __init__(self):
self.query_params = dict()
self.method_name = None
class XSDElement:
def __init__(self, name=None, type=None, min_occurs='0', max_occurs='1', base=None, ref=None):
self.name = name
self.type = type
self.min_occurs = min_occurs
self.max_occurs = max_occurs
self.base = base
self.ref = ref
self.children = list()
self.query_params = list()
def __str__(self):
return 'name: %s type: %s base: %s ref: %s children: %s' %(self.name, self.type, self.base, self.ref, self.children)
def __repr__(self):
return 'name: %s type: %s base: %s ref: %s children: %s' %(self.name, self.type, self.base, self.ref, self.children)
class ChildXSDElement:
def __init__(self, name=None, type=None, min_oc
|
curs=0, max_occurs=1, base=None, ref=None):
self.name = name
self.type = type
self.min_occurs = min_occurs
self.max_occurs = max_occurs
self.base = base
|
self.ref = ref
self.children = None
def __str__(self):
return 'name: %s type: %s base: %s ref: %s' %(self.name, self.type, self.base, self.ref)
def __repr__(self):
return 'name: %s type: %s base: %s ref: %s' %(self.name, self.type, self.base, self.ref)
|
scjrobertson/xRange
|
kalman/sensor_array.py
|
Python
|
gpl-3.0
| 2,075
| 0.004337
|
"""
Module containing the SensorArray class which
models an array of FMCW radars.
@author: scj robertson
@since: 03/04/16
"""
import numpy as np
C = 3e8
class SensorArray(object):
'''
Class for representing an array of identical FMCW radars. For viable multilateration
four or more sensors must always be specified at distinct locations.
Parameters
----------
sensor_locations : (N, 3) ndarray
The locations of the sensors in 3D space. N >= 4 and at least
four of these locations must be distinct.
f_c : scalar
The carrier frequency of the radar sensor.
del_r : scalar
The range resolution (m)
r_max : scalar
The maximum range (m) the radar can detect objects.
Determines range bin width, not the Nyquist frequency for range.
del_v : scalar
The velocity resolution (m/s)
v_max : scalar
The maximum velocity (m/s) the radar can detect.
Determines velocity bin width, not the Nyquist frequnecy for velocity.
Methods
----------
output
Returns range-Doppler maps for each sensor for a given collection of targets.
Raises
----------
|
ValueError
If the are less than four distinct sensor locations.
'''
def __init__(self, sensor_locatio
|
ns, f_c, del_r, r_max, del_v, v_max):
self.K, _ = sensor_locations.shape
if (self.K < 4):
ValueError('There must be K > 4 distinct sensor locations')
self.f_c = f_c
self.del_r = del_r
self.r_max = r_max
self.del_v = del_v
self.v_max = v_max
self.B = C/(2*del_r)
self.T = C/(4*f_c*v_max)
self.M = int((4*B*r_max)/c)
self.N = int(C/(2*del_v*T*f_c))
'''
Determines a collection of range-Doppler maps for each sensor given
a simulated trajectory
'''
def output(self, targets):
I, J, _ = targets.shape
dt = np.linspace(0, self.T, self.M)
rd_map = np.zeros((self.K, self.M, self.N))
return rd_map
|
openego/oeplatform
|
dataedit/migrations/0009_tablerevision_path.py
|
Python
|
agpl-3.0
| 488
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-04 15:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("dataedit", "0008_auto_2
|
0170504_1651")]
operations = [
migra
|
tions.AddField(
model_name="tablerevision",
name="path",
field=models.CharField(default="", max_length=100),
preserve_default=False,
)
]
|
ohaut/ohaut-core
|
ohaut/config.py
|
Python
|
gpl-3.0
| 1,064
| 0.00188
|
from oslo_config import cfg
OPTS = [
cfg.StrOpt('openhab_config_dir',
default='/opt/openhab/config',
help='The open
|
hab configuration path'),
cfg.StrOpt('mqtt_id',
default='mosquitto',
help='The mqtt id inside openhab config'
'to connect the items to'),
]
MQTT_OPTS = [
cfg.HostnameOpt('server',
default='localhost',
help='MQTT server address'),
cfg.IntOpt('port',
default=1883,
help='MQTT server port'),
cfg.StrOpt('user',
default=None,
|
help='MQTT connection username'),
cfg.StrOpt('password',
default=None,
help='MQTT connection username')]
_conf = None
def get():
"""Load the configuration and return the CONF object."""
global _conf
if _conf is None:
cfg.CONF.register_opts(OPTS)
cfg.CONF.register_opts(MQTT_OPTS, group='mqtt')
cfg.CONF(project='ohaut')
_conf = cfg.CONF
return _conf
|
lcostantino/healing-os
|
external/ceilometer/compute/virt/libvirt/inspector.py
|
Python
|
apache-2.0
| 8,169
| 0
|
#
# Copyright 2012 Red Hat, Inc
#
# Author: Eoghan Glynn <eglynn@redhat.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of Inspector abstraction for libvirt."""
from lxml import etree
from oslo.config import cfg
import six
from ceilometer.compute.virt import inspector as virt_inspector
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common import log as logging
libvirt = None
LOG = logging.getLogger(__name__)
libvirt_opts = [
cfg.StrOpt('libvirt_type',
default='kvm',
help='Libvirt domain type (valid options are: '
'kvm, lxc, qemu, uml, xen).'),
cfg.StrOpt('libvirt_uri',
default='',
help='Override the default libvirt URI '
'(which is dependent on libvirt_type).'),
]
CONF = cfg.CONF
CONF.register_opts(libvirt_opts)
VIR_DOMAIN_NOSTATE = 0
VIR_DOMAIN_RUNNING = 1
VIR_DOMAIN_BLOCKED = 2
VIR_DOMAIN_PAUSED = 3
VIR_DOMAIN_SHUTDOWN = 4
VIR_DOMAIN_SHUTOFF = 5
VIR_DOMAIN_CRASHED = 6
VIR_DOMAIN_PMSUSPENDED = 7
LIBVIRT_POWER_STATE = {
VIR_DOMAIN_NOSTATE: virt_inspector.NOSTATE,
VIR_DOMAIN_RUNNING: virt_inspector.RUNNING,
# NOTE(maoy): The DOMAIN_BLOCKED state is only valid in Xen.
# It means that the VM is running and the vCPU is idle. So,
# we map it to RUNNING
VIR_DOMAIN_BLOCKED: virt_inspector.RUNNING,
VIR_DOMAIN_PAUSED: virt_inspector.PAUSED,
# NOTE(maoy): The libvirt API doc says that DOMAIN_SHUTDOWN
# means the domain is being shut down. So technically the domain
# is still running. SHUTOFF is the real powered off state.
# But we will map both to SHUTDOWN anyway.
# http://libvirt.org/html/libvirt-libvirt.html
VIR_DOMAIN_SHUTDOWN: virt_inspector.SHUTDOWN,
VIR_DOMAIN_SHUTOFF: virt_inspector.SHUTDOWN,
VIR_DOMAIN_CRASHED: virt_inspector.CRASHED,
VIR_DOMAIN_PMSUSPENDED: virt_inspector.SUSPENDED,
}
class LibvirtInspector(virt_inspector.Inspector):
per_type_uris = dict(uml='uml:///system', xen='xen:///', lxc='lxc:///')
def __init__(self):
self.uri = self._get_uri()
self.connection = None
def _get_uri(self):
return CONF.libvirt_uri or self.per_type_uris.get(CONF.libvirt_type,
'qemu:///system')
def _get_connection(self):
if not self.connection or not self._test_connection():
global libvirt
if libvirt is None:
libvirt = __import__('libvirt')
LOG.debug(_('Connecting to libvirt: %s'), self.uri)
self.connection = libvirt.openReadOnly(self.uri)
return self.connection
def _test_connection(self):
try:
self.connection.getCapabilities()
return True
except libvirt.libvirtError as e:
if (e.get_error_code() == libvirt.VIR_ERR_SYSTEM_ERROR and
e.get_error_domain() in (libvirt.VIR_FROM_REMOTE,
libvirt.VIR_FROM_RPC)):
LOG.debug(_('Connection to libvirt broke'))
return False
raise
def _lookup_by_name(self, instance_name):
try:
return self._get_connection().lookupByName(instance_name)
except Exception as ex:
if not libvirt or not isinstance(ex, libvirt.libvirtError):
raise virt_inspector.InspectorException(six.text_type(ex))
error_code = ex.get_error_code()
msg = ("Error from libvirt while looking up %(instance_name)s: "
"[Error Code %(error_code)s] "
"%(ex)s" % {'instance_name': instance_name,
'error_code': error_code,
'ex': ex})
raise virt_inspector.InstanceNotFoundException(msg)
def inspect_instances(self):
if self._get_connection().numOfDomains() > 0:
for domain_id in self._get_connection().listDomainsID():
try:
# We skip domains with ID 0 (hypervisors).
if domain_id != 0:
domain = self._get_connection().lookupByID(domain_id)
yield virt_inspector.Instance(name=domain.name(),
UUID=domain.UUIDString())
except libvirt.libvirtError:
# Instance was deleted while listing... ignore it
pass
def inspect_cpus(self, instance_name):
domain = self._lookup_by_name(instance_name)
dom_info = domain.info()
return virt_inspector.CPUStats(number=dom_info[3], time=dom_info[4])
def inspect_state(self, instance_name):
domain = self._lookup_by_name(instance_name)
dom_info = domain.info()
return virt_inspector.InstanceState(
state=LIBVIRT_POWER_STATE[dom_info[0]])
def inspect_vnics(self, instance_name):
domain = self._lookup_by_name(instance_name)
state = domain.info()[0]
if state == libvirt.VIR_DOMAIN_SHUTOFF:
LOG.warn(_('Failed to inspect vnics of %(instance_name)s, '
'domain is in state of SHUTOFF'),
{'instance_name': instance_name})
return
tree = etree.fromstring(domain.XMLDesc(0))
for iface in tree.findall('devices/interface'):
target = iface.find('target')
if target is not None:
name = target.get('dev')
else:
continue
mac = iface.find('mac')
if mac is not None:
mac_address = mac.get('address')
else:
continue
fref = iface.find('filterref')
if fref is not None:
fref = fref.get('filter')
params = dict((p.get('name').lower(), p.get('value'))
for p in iface.findall('filterref/parameter'))
interface = virt_inspector.Interface(name=name, mac=mac_address,
fref=fref, parameters=params)
dom_stats = domain.interfaceStats(name)
stats = virt_inspector.InterfaceStats(rx_bytes=dom_stats[0],
rx_packets=dom_stats[1],
tx_bytes=dom_stats[4],
tx_packets=dom_stats[5])
yield (interface, stats)
def inspect_disks(self, i
|
nstance_name):
domain = self._lookup_by_name(instance_name)
state = domain.info()[0]
if state == libvirt.VIR_DOMAIN_SHUTOFF:
LOG.warn(_('Failed to inspe
|
ct disks of %(instance_name)s, '
'domain is in state of SHUTOFF'),
{'instance_name': instance_name})
return
tree = etree.fromstring(domain.XMLDesc(0))
for device in filter(
bool,
[target.get("dev")
for target in tree.findall('devices/disk/target')]):
disk = virt_inspector.Disk(device=device)
block_stats = domain.blockStats(device)
stats = virt_inspector.DiskStats(read_requests=block_stats[0],
read_bytes=block_stats[1],
write_requests=block_stats[2],
write_bytes=block_stats[3],
errors=block_stats[4])
yield (disk, stats)
|
todaychi/hue
|
desktop/core/ext-py/thrift-0.9.1/src/transport/TSocket.py
|
Python
|
apache-2.0
| 6,194
| 0.008718
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import errno
import os
import socket
import sys
from TTransport import *
class TSocketBase(TTransportBase):
def _resolveAddr(self):
if self._unix_socket is not None:
return [(socket.AF_UNIX, socket.SOCK_STREAM, None, None,
self._unix_socket)]
else:
return socket.getaddrinfo(self.host,
|
self.port,
self._socket_family,
socket.SOCK_STREAM,
0,
socket.AI_PASSIVE | socket.AI_ADDRCONFIG)
def close(self):
if self.handle:
self.han
|
dle.close()
self.handle = None
class TSocket(TSocketBase):
"""Socket implementation of TTransport base."""
def __init__(self, host='localhost', port=9090, unix_socket=None, socket_family=socket.AF_UNSPEC):
"""Initialize a TSocket
@param host(str) The host to connect to.
@param port(int) The (TCP) port to connect to.
@param unix_socket(str) The filename of a unix socket to connect to.
(host and port will be ignored.)
@param socket_family(int) The socket family to use with this socket.
"""
self.host = host
self.port = port
self.handle = None
self._unix_socket = unix_socket
self._timeout = None
self._socket_family = socket_family
def setHandle(self, h):
self.handle = h
def isOpen(self):
return self.handle is not None
def setTimeout(self, ms):
if ms is None:
self._timeout = None
else:
self._timeout = ms / 1000.0
if self.handle is not None:
self.handle.settimeout(self._timeout)
def open(self):
try:
res0 = self._resolveAddr()
for res in res0:
self.handle = socket.socket(res[0], res[1])
# check and turn on TCP Keepalive
sockprops = self.handle.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE)
if (sockprops == 0):
sockprops = self.handle.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
self.handle.settimeout(self._timeout)
try:
self.handle.connect(res[4])
except socket.error, e:
if res is not res0[-1]:
continue
else:
raise e
break
except socket.error, e:
if self._unix_socket:
message = 'Could not connect to socket %s' % self._unix_socket
else:
message = 'Could not connect to %s:%d' % (self.host, self.port)
raise TTransportException(type=TTransportException.NOT_OPEN,
message=message)
def read(self, sz):
try:
buff = self.handle.recv(sz)
except socket.error, e:
if (e.args[0] == errno.ECONNRESET and
(sys.platform == 'darwin' or sys.platform.startswith('freebsd'))):
# freebsd and Mach don't follow POSIX semantic of recv
# and fail with ECONNRESET if peer performed shutdown.
# See corresponding comment and code in TSocket::read()
# in lib/cpp/src/transport/TSocket.cpp.
self.close()
# Trigger the check to raise the END_OF_FILE exception below.
buff = ''
else:
raise
if len(buff) == 0:
raise TTransportException(type=TTransportException.END_OF_FILE,
message='TSocket read 0 bytes')
return buff
def write(self, buff):
if not self.handle:
raise TTransportException(type=TTransportException.NOT_OPEN,
message='Transport not open')
sent = 0
have = len(buff)
while sent < have:
plus = self.handle.send(buff)
if plus == 0:
raise TTransportException(type=TTransportException.END_OF_FILE,
message='TSocket sent 0 bytes')
sent += plus
buff = buff[plus:]
def flush(self):
pass
class TServerSocket(TSocketBase, TServerTransportBase):
"""Socket implementation of TServerTransport base."""
def __init__(self, host=None, port=9090, unix_socket=None, socket_family=socket.AF_UNSPEC):
self.host = host
self.port = port
self._unix_socket = unix_socket
self._socket_family = socket_family
self.handle = None
def listen(self):
res0 = self._resolveAddr()
socket_family = self._socket_family == socket.AF_UNSPEC and socket.AF_INET6 or self._socket_family
for res in res0:
if res[0] is socket_family or res is res0[-1]:
break
# We need remove the old unix socket if the file exists and
# nobody is listening on it.
if self._unix_socket:
tmp = socket.socket(res[0], res[1])
try:
tmp.connect(res[4])
except socket.error, err:
eno, message = err.args
if eno == errno.ECONNREFUSED:
os.unlink(res[4])
self.handle = socket.socket(res[0], res[1])
# check and turn on TCP Keepalive
sockprops = self.handle.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE)
if (sockprops == 0):
sockprops = self.handle.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
self.handle.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(self.handle, 'settimeout'):
self.handle.settimeout(None)
self.handle.bind(res[4])
self.handle.listen(128)
def accept(self):
client, addr = self.handle.accept()
result = TSocket()
result.setHandle(client)
return result
|
alexcasgarcia/GCJ
|
2008/1A/MinimumScalarProduct/MinScalarProduct.py
|
Python
|
mit
| 1,138
| 0.02812
|
def MinScalarProduct(vector1,vector2,case):
vector1.sort(reverse=False)
vector2.sort(reverse=True)
scalarProduct=0
i=0
while i<len(vector1):
scalarProduct+=vector1[i]*vector2[i]
i+=1
return "Case #"+str(case)+": "+str(scalarProduct)
|
+"\n"
def readTestFile(inputFile,outputFile):
r = open(outputFile, 'w')
with open(inputFile) as f:
i=0
n=1
vector1=[]
vector2=[]
for line in f:
if i==0:
|
NumberOfRecords=int(line)
else:
if (i+2)%3==0:
vectorLength=int(line.strip('\n'))
else:
textInput=line.strip('\n')
stringList=textInput.split()
integerList=[int(x) for x in stringList]
if (i+1)%3==0:
vector1=integerList
else:
vector2=integerList
r.write(MinScalarProduct(vector1,vector2,i/3))
n+=1
i+=1
readTestFile('in/minscalarproduct.in','out/minscalarproduct.out')
|
javierj/kobudo-katas
|
Kata-RestConsumer/DjangoGIndexDemo/gindex/gindex_logic/gindex.py
|
Python
|
apache-2.0
| 1,609
| 0.002486
|
__author__ = 'Javier'
import urlli
|
b.request
import json
class Repo(object):
def __init__(self, fork, stars, watchers):
self._fork = int(fork)
self._stars = int(stars)
self._watchers = int(watchers)
@property
def forks(self):
return self._fork
@property
def stars(self):
return self._stars
@property
def watchers(self):
return self._watchers
class GIndexCalculator(object):
def calc(self, repo_info):
return (
|
repo_info.forks *3) + repo_info.stars + repo_info.watchers
class RepositoryService(object):
def get_repos_from(self, user):
url = "https://api.github.com/users/"+user+"/repos"
connection = urllib.request.urlopen(url)
result_raw = connection.read().decode('utf-8')
repos = json.loads(result_raw)
return repos
def find_repo(self, repos, repo_name):
for repo in repos:
if repo['name'] == repo_name:
return repo
return None
def get_repo(self, user, repo_name):
repos = self.get_repos_from(user)
repo = self.find_repo(repos, repo_name)
print(repo)
return Repo(repo['forks_count'], repo['stargazers_count'], repo['watchers_count'])
class GIndexPresenter(object):
def __init__(self, view, service):
self.view = view
self.service = service
def show_gindex(self, user, repo_name):
repo_info = self.service.get_repo(user, repo_name)
calculator = GIndexCalculator()
gindex = calculator.calc(repo_info)
self.view.show(gindex)
|
Nukesor/Pueuew
|
pueue/client/factories.py
|
Python
|
mit
| 2,761
| 0.001811
|
import pickle
from pueue.client.socket import connect_socket, receive_data, process_response
def command_factory(command):
"""A factory which returns functions for direct daemon communication.
This factory will create a function which sends a payload to the daemon
and returns the unpickled object which is returned by the daemon.
Args:
command (string): The type of payload this should be. This determines
as what kind of instruction this will be interpreted by the daemon.
Returns:
function: The created function.
"""
def communicate(body={}, root_dir=None):
"""Communicate with the daemon.
This function sends a payload to the daemon and returns the unpickled
object sent by the daemon.
Args:
body (dir): Any other arguments that should be put into the payload.
root_dir (str): The root directory in which we expect the daemon.
We need this to connect to the daemons socket.
Returns:
function: The returned payload.
"""
client = connect_socket(root_dir)
body['mode'] = command
# Delete the func entry we use to call the correct function with argparse
# as functions can't be pickled and this shouldn't be send to the daemon.
if 'func' in body:
del body['func']
data_string = pickle.dumps(body, -1)
client.send(data_string)
# Receive message, unpickle and return it
response = receive_data(client)
return response
return communicate
def print_command_factory(command):
"""A factory which returns functions for direct daemon communication.
This factory will create a function which sends a payload to the daemon
and prints the response of the daemon. If the daemon sends a
`response['status'] == 'error'`, the pueue client will exit with `1`.
Args:
command (string): The
|
type of payload this should be. This determines
|
as what kind of instruction this will be interpreted by the daemon.
Returns:
function: The created function.
"""
def communicate(body={}, root_dir=None):
client = connect_socket(root_dir)
body['mode'] = command
# Delete the func entry we use to call the correct function with argparse
# as functions can't be pickled and this shouldn't be send to the daemon.
if 'func' in body:
del body['func']
data_string = pickle.dumps(body, -1)
client.send(data_string)
# Receive message and print it. Exit with 1, if an error has been sent.
response = receive_data(client)
process_response(response)
return communicate
|
sciCloud/OLiMS
|
fields/file_field.py
|
Python
|
agpl-3.0
| 449
| 0.01559
|
from openerp import fields
from fields_utils im
|
port direct_mapper
class FileField(fields.Binary):
# type = 'binary' will auto inherit from the base class of Binary
|
def __bika_2_odoo_attrs_mapping(self):
direct_mapper(self, 'description', 'help')
def _setup_regular_base(self, model):
super(FileField, self)._setup_regular_base(model)
self.__bika_2_odoo_attrs_mapping()
pass
|
meteoswiss-mdr/precipattractor
|
pyscripts/radar_extrapolation.py
|
Python
|
gpl-3.0
| 15,346
| 0.015379
|
#!/usr/bin/env python
from __future__ import division
from __future__ import print_function
# General libraries
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as colors
import sys
import argparse
import datetime
import getpass
import os
import time
# OpenCV
import cv2
# Precip Attractor libraries
import time_tools_attractor as ti
import io_tools_attractor as io
import data_tools_attractor as dt
import stat_tools_attractor as st
# optical flow libraries
import optical_flow as of
# advection libraries
import adv2d
print(adv2d.__doc__)
import maple_ree
print(maple_ree.__doc__)
####################################
###### RADAR EXTRAPOLATION IN PYTHON
####################################
######## Default parameters
noData = -999.0
timeAccumMin = 5
domainSize = [512,512] #512
resKm = 1
rainThreshold = 0.08
######## Folder paths
usrName = getpass.getuser()
usrName = "lforesti"
inBaseDir = '/scratch/' + usrName + '/data/' # directory to read from
outBaseDir = '/store/msrad/radar/precip_attractor_' + usrName + '/data/'
######## Parse arguments from command line
parser = argparse.ArgumentParser(description='')
parser.add_argument('-start', default='201505151600', type=str,help='Start date of forecast YYYYMMDDHHmmSS.')
parser.add_argument('-leadtime', default=60, type=int,help='')
parser.add_argument('-stack', default=15, type=int,help='')
parser.add_argument('-product', default='AQC', type=str,help='Which radar rainfall product to use (AQC, CPC, etc).')
parser.add_argument('-frameRate', default=0.5, type=float,help='')
parser.add_argument('-adv', default='maple', type=str,help='')
args = parser.parse_args()
advectionScheme = args.adv
frameRate = args.frameRate
product = args.product
leadtime = args.leadtime
timewindow = np.max((5,args.stack))
if (int(args.start) < 198001010000) or (int(args.start) > 203001010000):
print('Invalid -start time arguments.')
sys.exit(1)
else:
timeStartStr = args.start
######## Get dattime from timestamp
timeStart = ti.timestring2datetime(timeStartStr)
timeAccumMinStr = '%05i' %
|
timeAccumMin
timeAccum24hStr = '%05i' % (24*60)
######## GIS stuff
# Limits of CCS4 domain
Xmin = 255000
Xmax = 965000
Ymin = -160000
Ymax = 480000
allXcoords = np.arange(Xmin,Xmax+resKm*1000,resKm*1000)
allYcoords = np.arange(Ymin,Ymax+resKm*1000,resKm*1000)
# Shapefile filename
fileNameShapefile = "/users/" + usr
|
Name + "/pyscripts/shapefiles/CHE_adm0.shp"
proj4stringWGS84 = "+proj=longlat +ellps=WGS84 +datum=WGS84"
proj4stringCH = "+proj=somerc +lat_0=46.95240555555556 +lon_0=7.439583333333333 \
+k_0=1 +x_0=600000 +y_0=200000 +ellps=bessel +towgs84=674.374,15.056,405.346,0,0,0,0 +units=m +no_defs"
######## Colormaps
color_list, clevs = dt.get_colorlist('MeteoSwiss') #'STEPS' or 'MeteoSwiss'
clevsStr = []
for i in range(0,len(clevs)):
if (clevs[i] < 10) and (clevs[i] >= 1):
clevsStr.append(str('%.1f' % clevs[i]))
elif (clevs[i] < 1):
clevsStr.append(str('%.2f' % clevs[i]))
else:
clevsStr.append(str('%i' % clevs[i]))
cmap = colors.ListedColormap(color_list)
norm = colors.BoundaryNorm(clevs, cmap.N)
######## Loop over files to get two consecutive images
nrValidFields = 0
rainfallStack = np.zeros((2,domainSize[0],domainSize[1]))
nStacks = np.max((1,np.round(timewindow/timeAccumMin))).astype(int) + 1 # includes present obs
# number of leadtimes
net = np.round(leadtime/timeAccumMin).astype(int)
# leadtimes + number of observations
nt = net + nStacks
# initialise variables
zStack = []
tStack = []
xStack = []
yStack = []
uStack = []
vStack = []
tic = time.clock()
for i in range(nStacks-1,-1*net-1,-1):
######## Load radar images
timeLocal = timeStart - datetime.timedelta(seconds=i*60*timeAccumMin)
print(timeLocal)
year, yearStr, julianDay, julianDayStr = ti.parse_datetime(timeLocal)
hour = timeLocal.hour
minute = timeLocal.minute
# Create filename for input
hourminStr = ('%02i' % hour) + ('%02i' % minute)
radarOperWildCard = '?'
subDir = str(year) + '/' + yearStr + julianDayStr + '/'
inDir = inBaseDir + subDir
fileNameWildCard = inDir + product + yearStr + julianDayStr + hourminStr + radarOperWildCard + '_' + timeAccumMinStr + '*.gif'
# Get filename matching regular expression
fileName = io.get_filename_matching_regexpr(fileNameWildCard)
# Get data quality from fileName
dataQuality = io.get_quality_fromfilename(fileName)
# Check if file exists
isFile = os.path.isfile(fileName)
if (isFile == False):
print('File: ', fileNameWildCard, ' not found.')
else:
# Reading GIF file
try:
# Open GIF image
rain8bit, nrRows, nrCols = io.open_gif_image(fileName)
# Get GIF image metadata
alb, doe, mle, ppm, wei = io.get_gif_radar_operation(fileName)
# Generate lookup table
lut = dt.get_rainfall_lookuptable(noData)
# Replace 8bit values with rain rates
rainrate = lut[rain8bit]
if product == 'AQC': # AQC is given in millimiters!!!
rainrate[rainrate != noData] = rainrate[rainrate != noData]*(60/5)
# Get coordinates of reduced domain
extent = dt.get_reduced_extent(rainrate.shape[1], rainrate.shape[0], domainSize[1], domainSize[0])
Xmin = allXcoords[extent[0]]
Ymin = allYcoords[extent[1]]
Xmax = allXcoords[extent[2]]
Ymax = allYcoords[extent[3]]
subXcoords = np.arange(Xmin,Xmax,resKm*1000)
subYcoords = np.arange(Ymin,Ymax,resKm*1000)
# Select 512x512 domain in the middle
rainrate = dt.extract_middle_domain(rainrate, domainSize[1], domainSize[0])
rain8bit = dt.extract_middle_domain(rain8bit, domainSize[1], domainSize[0])
# rainrate = rainrate[150:350,50:250]
# rain8bit = rain8bit[150:350,50:250]
# Create mask radar composite
mask = np.ones(rainrate.shape)
mask[rainrate != noData] = np.nan
mask[rainrate == noData] = 1
# Compute WAR
war = st.compute_war(rainrate,rainThreshold, noData)
except IOError:
print('File ', fileName, ' not readable')
war = -1
if (war >= 0.01 or i < 0):
# -999 to nan
rainrate[rainrate < 0] = np.nan
rainratePlot = np.copy(rainrate)
# Set lowest rain thresholds
rainThreshold = 0.08
condition = rainrate < rainThreshold
rainrate[condition] = rainThreshold
# Compute corresponding reflectivity
A = 316.0
b = 1.5
dBZ,mindBZ,_ = dt.rainrate2reflectivity(rainrate,A,b)
dBZ[condition] = 0
dBZ[dBZ==-999] = 0
rainfieldZeros = dBZ.copy()
# nan with zeros
rainfieldZeros[np.isnan(rainfieldZeros)] = 0
# remove small noise with a morphological operator (opening)
rainfieldZeros = of.morphological_opening(rainfieldZeros, thr=rainThreshold, n=5)
# scale values between 0 and 255
rainfieldZeros *= 255.0/rainfieldZeros.max()
# Move rainfall field down the stack
nrValidFields = nrValidFields + 1
rainfallStack[1,:,:] = rainfallStack[0,:]
rainfallStack[0,:,:] = rainfieldZeros
# Stack image for plotting
zStack.append(rainratePlot)
tStack.append(timeLocal)
########### Compute optical flow on these two images
if (nrValidFi
|
gsingers/rtfmbot
|
src/python/run.py
|
Python
|
mit
| 2,559
| 0.004299
|
import ConfigParser
import sys, traceback
from slackclient import SlackClient
from chatterbot import ChatBot
import os
from os import listdir
from os.path import isfile, join
from chatterbot.trainers import ChatterBotCorpusTrainer
config = ConfigParser.SafeConfigParser({"host": "searchhub.lucidworks.com", "port":80})
config.read('config.cfg')
token = config.get("Slack", "token") # found at https://api.slack.com/web#authentication
channel_str = config.get("Slack", "channels")
channel_names = []
if channel_str:
#print (channel_str)
channels = channel_str.split(",")
for channel in channels:
#print channel
channel_names.append(channel)
storage = config.get("Chatterbot", "storage_dir")
if not os.path.exists(storage):
os.makedirs(storage)
bot_name = config.get("Slack", "bot_name")
print "Starting Slack"
sc = SlackClient(token)
print "Starting Chatterbot"
chatbot = ChatBot(bot_name, storage_adapter="chatterbot.adapters.storage.JsonDatabaseAdapter",
logic_adapters=[
"chatterbot.adapters.logic.MathematicalEvaluation",
"chatterbot.adapters.logic.TimeLogicAdapter",
"chatterbot.adapters.logic.ClosestMeaningAdapter",
"adapters.SearchHubLogicAdapter"
],
searchhub_host=config.get("SearchHub", "host"),
searchhub_port=config.get("SearchHub", "port"),
input_adapter="adapters.SlackPythonInputAdapter",
output_adapter="adapters.SlackPythonOutputAdapter",
database=storage + "/database.json",
slack_client=sc,
slack_channels=channel_names,
slack_output_channel=config.get("Slack", "output_channel"),
slack_bot_name=bot_name
)
chatbot.set_trainer(ChatterBotCorpusTrainer)
training_dir = "training"
files = [f for f in listdir(training_dir) if isfile(join(training_dir, f)) and f.endswith(".json") and f.find("example.json") == -1]
for fil
|
e in files:
print "Training on " + file
chatbot.t
|
rain("training." + file.replace(".json", ""))
# Train based on english greetings corpus
chatbot.train("chatterbot.corpus.english")
# Train based on the english conversations corpus
#chatbot.train("chatterbot.corpus.english.conversations")
print "Starting Chatbot"
while True:
try:
bot_input = chatbot.get_response(None)
except(Exception):
print "Exception"
traceback.print_exc(Exception)
|
breathe-free/breathe-see
|
example_publisher/__main__.py
|
Python
|
gpl-3.0
| 10,411
| 0.0073
|
#!/usr/bin/env python
import socket
import sys
import os
import time
import random
import csv
import json
import random
from sentence_generator import make_sentence
from copy import deepcopy
from subprocess import check_output
# csv file columns are timestamp, pressure, CO2, ...
SAMPLE_DATA_DIR = os.path.join(os.path.dirname(__file__), "sample_data")
SAMPLE_DATA = os.path.join(SAMPLE_DATA_DIR, "1427199271-sample-breathing.csv")
SOCKET_PA
|
TH = '/tmp/lucidity.socket'
TIME_WARP = float(os.environ.get('TIME_WARP', 1.0))
MAX_LINES_AT_ONCE = int(os.environ.get('MAX_LINES_AT_ONCE', 1))
EMIT_RANDOM_MSGS = bool(os.environ.get('GIBBERISH', False))
class SocketNotFound(Exception):
pass
# Read in data from the example csv file
datapoints = []
with open(SAMPLE_DATA, 'rb') as csvfile:
datareader = csv.reader(csvfile)
for row in datareader:
datapoints.append([float(x) for x in row])
# Try and connect to socket. If any error, print out error and output to stdout instead.
try:
# Make sure the socket exists
if not os.path.exists(SOCKET_PATH):
raise SocketNotFound("No socket at %s" % SOCKET_PATH)
# Create a UDS socket
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.setblocking(0) # important - don't block on reads
sock.connect(SOCKET_PATH)
output = sock.sendall
except (SocketNotFound, socket.error), msg:
print >>sys.stderr, "Error connecting to %s.\n\n%s." % (SOCKET_PATH, msg)
sys.exit(1)
def receive(the_socket):
# Act as an iterator. Sometimes >1 message will have accumulated on the
# socket by the time we come to read it.
# Yield either None (if nothing received, buffer empty) or json decode line by line.
rbuffer = ''
while True:
try:
incoming = the_socket.recv(1024)
rbuffer += incoming
except socket.error:
# nothing to read
yield None
continue
while rbuffer.find("\n") != -1:
line, rbuffer = rbuffer.split("\n", 1)
try:
yield json.loads(line)
except ValueError, e:
print >>sys.stderr, str(e)
print >>sys.stderr, line
def enum(**enums):
return type('Enum', (), enums)
STATES = enum(
INITIALISING = "initialising",
WAITING = "waiting",
CALIBRATING = "calibrating",
ANALYSING = "analysing",
COLLECTING = "collecting",
)
ACTIVE_STATES = [ STATES.CALIBRATING, STATES.ANALYSING, STATES.COLLECTING ]
DEFAULT_SETTINGS = {
"calibration_time": 3,
"sample_collection_time": 3,
"collection_control": "c",
"auto_triggers": True,
"blank_capture": False,
"total_breath": False,
"collection_rate": 4,
"collection_limit": 5,
"filename": "",
"capture_window": {
"start": {
"percent": 85,
"gradient": "rising"
},
"end": {
"percent": 15,
"gradient": "falling"
},
}
}
class Publisher:
def __init__(self):
self.lines_buffered = 0
self.index = 0
self.buffer = ""
self.state = None
# get own version
self.version = check_output(['git','describe','--tags'])
self.change_state(STATES.INITIALISING)
self.user_settings = {
"calibration_time": 5,
"sample_collection_time": 2,
"collection_control": "p",
"auto_triggers": False,
"blank_capture": False,
"total_breath": False,
"collection_rate": 2,
"collection_limit": 7,
"filename": "myfile",
"capture_window": {
"start": {
"percent": 62,
"gradient": "rising"
},
"end": {
"percent": 9,
"gradient": "falling"
},
}
}
self.settings = deepcopy(DEFAULT_SETTINGS)
self.set_completion(0,0)
def set_completion(self, by_volume, by_time):
self.collection_completion = {
"volume": min(100, by_volume),
"time": min(100, by_time),
}
self.emit(
collection_completion = self.collection_completion,
)
def change_state(self, new_state, message=None, severity=None):
if self.state != new_state:
message = "State changed to %s." % new_state
severity = "info"
self.state = new_state
self.emit(message=message, severity="info")
self.set_completion(0, 0)
def emit(self, **kwargs):
h = {
"state": self.state,
"version": self.version,
"is_simulation": True # DON'T include this member in a real publisher's messages
}
for key,val in kwargs.iteritems():
h[key] = val
output(json.dumps(h) + "\n")
def run(self):
# Wait a while to simulate initialisation
self.change_state(STATES.INITIALISING)
time.sleep(3.0 / TIME_WARP)
self.change_state(STATES.WAITING)
# Loop until user hits Ctrl+C
while True:
try:
# read from sock
received = receive(sock).next()
been_nudged = False
if received is not None and 'command' in received:
# act on information received
print "Received: %s" % received
do_what = received['command']
if do_what == "stop":
self.change_state(STATES.WAITING)
elif do_what == "start":
self.change_state(STATES.CALIBRATING)
self.emit(message="Using settings: " + json.dumps(received['settings']), severity="info", results_dir=SAMPLE_DATA_DIR)
self.emit(message="Got timestamp: " + json.dumps(received['timestamp']), severity="info")
elif do_what == "request_state":
self.emit()
elif do_what == "request_settings_current":
self.emit(settings=self.settings, results_dir=SAMPLE_DATA_DIR)
elif do_what == "apply_settings_default":
self.settings = deepcopy(DEFAULT_SETTINGS)
self.emit(settings=self.settings, message="Loaded default settings.", severity="info")
elif do_what == "apply_settings_user":
self.settings = deepcopy(self.user_settings)
self.emit(settings=self.settings, message="Loaded user settings.", severity="info")
elif do_what == "save_settings":
self.user_settings = received['settings']
self.settings = deepcopy(self.user_settings)
self.emit(settings=self.settings, message="Saved user settings.", severity="info")
elif do_what == "nudge":
been_nudged = True
# While running...
if self.state in ACTIVE_STATES:
# ...cycle through active states to simulate instrument doing things
if been_nudged:
current = ACTIVE_STATES.index(self.state)
next = current + 1
if next >= len(ACTIVE_STATES):
self.change_state(STATES.WAITING)
else:
self.change_state(ACTIVE_STATES[next])
# Emit incrementing completion data during simulated collection
if self.state == STATES.COLLECTING:
self.set_completion(
|
|
rbramwell/pulp
|
server/pulp/server/event/http.py
|
Python
|
gpl-2.0
| 2,330
| 0.001717
|
"""
Forwards events to a HTTP call. The configuration used by this notifier
is as follows:
url
Full URL to contact with the event data. A POST request will be made to this
URL with the contents of the events in the body.
Eventually this should be enhanced to support authentication credentials as well.
"""
import base64
import httplib
import logging
import threading
from pulp.server.compat import json, json_util
TYPE_ID = 'http'
_logger = logging.getLogger(__name__)
def handle_event(notifier_config, event):
# fire the actual http push function off in a separate thread to keep
# pulp from blocking or deadlocking due to the tasking subsystem
data = event.data()
_logger.info(data)
body = json.dumps(data, default=json_util.default)
thread = threading.Thread(target=_send_post, args=[notifier_config, body])
thread.setDaemon(True)
thread.start()
def _send_post(notifier_config, body):
# Basic headers
headers = {'Accept': 'application/json',
'Content-Type': 'application/json'}
# Parse the URL for the pieces we need
if 'url' not in notifier_config or not notifier_config['url']:
_logger.warn('HTTP notifier configured without a URL; cannot fire event')
return
url = notifier_config['url']
try:
scheme, empty, server, path = url.split('/', 3)
except ValueError:
_logger.warn('Improperly configured post_sync_url: %(u)s' % {'u': url})
return
connection = _create_conn
|
ection(scheme, server)
# Pr
|
ocess authentication
if 'username' in notifier_config and 'password' in notifier_config:
raw = ':'.join((notifier_config['username'], notifier_config['password']))
encoded = base64.encodestring(raw)[:-1]
headers['Authorization'] = 'Basic ' + encoded
connection.request('POST', '/' + path, body=body, headers=headers)
response = connection.getresponse()
if response.status != httplib.OK:
error_msg = response.read()
_logger.warn('Error response from HTTP notifier: %(e)s' % {'e': error_msg})
connection.close()
def _create_connection(scheme, server):
if scheme.startswith('https'):
connection = httplib.HTTPSConnection(server)
else:
connection = httplib.HTTPConnection(server)
return connection
|
tonysyu/deli
|
deli/stylus/rect_stylus.py
|
Python
|
bsd-3-clause
| 543
| 0
|
from en
|
able.api import ColorTrait
from
|
.base_patch_stylus import BasePatchStylus
class RectangleStylus(BasePatchStylus):
""" A Flyweight object for drawing filled rectangles.
"""
edge_color = ColorTrait('black')
fill_color = ColorTrait('yellow')
def draw(self, gc, rect):
with gc:
gc.set_stroke_color(self.edge_color_)
if self.fill_color is not 'none':
gc.set_fill_color(self.fill_color_)
gc.fill_path()
gc.draw_rect([int(a) for a in rect])
|
mwv/babycircle
|
visualization/colors.py
|
Python
|
gpl-3.0
| 1,039
| 0.006737
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
""" Convenience functions for gen
|
erating distinct colors.
Usage:
>>> generate_colors(4)
[(1.0, 0.0, 0.0), (0.5, 1.0, 0.0), (0.0, 1.0, 1.0), (0.5, 0.0, 1.0)]
"""
from __future__ import division
__author__ = 'Maarten Versteegh'
import
|
math
def _hsv_to_rgb(h,f):
"""Convert a color specified by h-value and f-value to rgb triple
"""
v = 1.0
p = 0.0
if h == 0:
return v, f, p
elif h == 1:
return 1-f, v, p
elif h == 2:
return p, v, f
elif h == 3:
return p, 1-f, v
elif h == 4:
return f, p, v
elif h == 5:
return v, p, 1-f
def generate_colors(n):
"""Generate n distinct colors as rgb triples
Arguments:
n : int
number of colors to generate
Returns:
List of rgb triples
"""
hues = [360/n*i for i in range(n)]
hs = [(math.floor(hue/60) % 6) for hue in hues]
fs = [(hue/60 - math.floor(hue / 60)) for hue in hues]
return [_hsv_to_rgb(h,f) for h,f in zip(hs,fs)]
|
argriffing/numpy
|
numpy/ma/tests/test_core.py
|
Python
|
bsd-3-clause
| 167,699
| 0.00102
|
# pylint: disable-msg=W0401,W0511,W0611,W0612,W0614,R0201,E1102
"""Tests suite for MaskedArray & subclassing.
:author: Pierre Gerard-Marchant
:contact: pierregm_at_uga_dot_edu
"""
from __future__ import division, absolute_import, print_function
__author__ = "Pierre GF Gerard-Marchant"
import warnings
import pickle
import operator
import itertools
from functools import reduce
import numpy as np
import numpy.ma.core
import numpy.core.fromnumeric as fromnumeric
import numpy.core.umath as umath
from numpy.testing import TestCase, run_module_suite, assert_raises
from numpy import ndarray
from numpy.compat import asbytes, asbytes_nested
from numpy.ma.testutils import (
assert_, assert_array_equal, assert_equal, assert_almost_equal,
assert_equal_records, fail_if_equal, assert_not_equal,
assert_mask_equal,
)
from numpy.ma.core import (
MAError, MaskError, MaskType, MaskedArray, abs, absolute, add, all,
allclose, allequal, alltrue, angle, anom, arange, arccos, arccosh, arctan2,
arcsin, arctan, argsort, array, asarray, choose, concatenate,
conjugate, cos, cosh, count, default_fill_value, diag, divide, empty,
empty_like, equal, exp, flatten_mask, filled, fix_invalid,
flatten_structured_array, fromflex, getmask, getmaskarray, greater,
greater_equal, identity, inner, isMaskedArray, less, less_equal, log,
log10, make_mask, make_mask_descr, mask_or, masked, masked_array,
masked_equal, masked_greater, masked_greater_equal, masked_inside,
masked_less, masked_less_equal, masked_not_equal, masked_outside,
masked_print_option, masked_values, masked_where, max, maximum,
maximum_fill_value, min, minimum, minimum_fill_value, mod, multiply,
mvoid, nomask, not_equal, ones, outer, power, product, put, putmask,
ravel, repeat, reshape, resize, shape, sin, sinh, sometrue, sort, sqrt,
subtract, sum, take, tan, tanh, transpose, where, zeros,
)
pi = np.pi
class TestMaskedArray(TestCase):
# Base test class for MaskedArrays.
def setUp(self):
# Base data definition.
x = np.array([1., 1., 1., -2., pi/2.0, 4., 5., -10., 10., 1., 2., 3.])
y = np.array([5., 0., 3., 2., -1., -4., 0., -10., 10., 1., 0., 3.])
a10 = 10.
m1 = [1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]
m2 = [0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1]
xm = masked_array(x, mask=m1)
ym = masked_array(y, mask=m2)
z = np.array([-.5, 0., .5, .8])
zm = masked_array(z, mask=[0, 1, 0, 0])
xf = np.where(m1, 1e+20, x)
xm.set_fill_value(1e+20)
self.d = (x, y, a10, m1, m2, xm, ym, z, zm, xf)
def test_basicattributes(self):
# Tests some basic array attributes.
a = array([1, 3, 2])
b = array([1, 3, 2], mask=[1, 0, 1])
assert_equal(a.ndim, 1)
assert_equal(b.ndim, 1)
assert_equal(a.size, 3)
assert_equal(b.size, 3)
assert_equal(a.shape, (3,))
assert_equal(b.shape, (3,))
def test_basic0d(self):
# Checks masking a scalar
x = masked_array(0)
assert_equal(str(x), '0')
x = masked_array(0, mask=True)
assert_equal(str(x), str(masked_print_option))
x = masked_array(0, mask=False)
assert_equal(str(x), '0')
x = array(0, mask=1)
self.assertTrue(x.filled().dtype is x._data.dtype)
def test_basic1d(self):
# Test of basic array creation and properties in 1 dimension.
(x, y, a10, m1, m2, xm, ym, z, zm, xf) = self.d
self.assertTrue(not isMaskedArray(x))
self.assertTrue(isMaskedArray(xm))
self.assertTrue((xm - ym).filled(0).any())
fail_if_equal(xm.mask.astype(int), ym.mask.astype(int))
s = x.shape
assert_equal(np.shape(xm), s)
assert_equal(xm.shape, s)
assert_equal(xm.dtype, x.dtype)
assert_equal(zm.dtype, z.dtype)
assert_equal(xm.size, reduce(lambda x, y:x * y, s))
assert_equal(count(xm), len(m1) - reduce(lambda x, y:x + y, m1))
assert_array_equal(xm, xf)
assert_array_equal(filled(xm, 1.e20), xf)
assert_array_equal(x, xm)
def test_basic2d(self):
# Test of basic array creation and properties in 2 dimensions.
(x, y, a10, m1, m2, xm, ym, z, zm, xf) = self.d
for s in [(4, 3), (6, 2)]:
x.shape = s
y.shape = s
xm.shape = s
ym.shape = s
xf.shape = s
self.assertTrue(not isMaskedArray(x))
self.assertTrue(isMaskedArray(xm))
assert_equal(shape(xm), s)
assert_equal(xm.shape, s)
assert_equal(xm.size, reduce(lambda x, y:x * y, s))
assert_equal(count(xm), len(m1) - reduce(lambda x, y:x + y, m1))
assert_equal(xm, xf)
assert_equal(filled(xm, 1.e20), xf)
assert_equal(x, xm)
def test_concatenate_basic(self):
# Tests concatenations.
(x, y, a10, m1, m2, xm, ym, z, zm, xf) = self.d
# basic concatenation
assert_equal(np.concatenate((x, y)), concatenate((xm, ym)))
assert_equal(np.concatenate((x, y)), concatenate((x, y)))
assert_equal(np.concatenate((x, y)), concatenate((xm, y)))
assert_equal(np.concatenate((x, y, x)), concatenate((x, ym, x)))
def test_concatenate_alongaxis(self):
# Tests concatenations.
(x, y, a10, m1, m2, xm, ym, z, zm, xf) = self.d
# Concatenation along an axis
s = (3, 4)
x.shape = y.shape = xm.shape = ym.shape = s
assert_equal(xm.mask, np.reshape(m1, s))
assert_equal(ym.mask, np.reshape(m2, s))
xmym = concatenate((xm, ym), 1)
assert_equal(np.concatenate((x, y), 1), xmym)
assert_equal(np.concatenate((xm.mask, ym.mask), 1), xmym._mask)
x = zeros(2)
y = array(ones(2), mask=[False, True])
z = concatenate((x, y))
assert_array_equal(z, [0, 0, 1, 1])
assert_array_equal(z.mask, [False, False, False, True])
z = concatenate((y, x))
assert_array_equal(z, [1, 1, 0, 0])
assert_array_equal(z.mask, [False, True, False, False])
def test_concatenate_flexible(self):
# Tests the concatenation on flexible arrays.
data = masked_array(list(zip(np.random.rand(10),
np.arange(10))),
dtype=[('a', float), ('b', int)])
test = concatenate([data[:5], data[5:]])
assert_equal_records(test, data)
def test_creation_ndmin(self):
# Check the use of ndmin
x = array([1, 2, 3], mask=[1, 0, 0], ndmin=2)
assert_equal(x.shape, (1, 3))
assert_equal(x._data, [[1, 2, 3]])
assert_equal(x._mask, [[1, 0, 0]])
def test_creation_ndmin_from_maskedarray(self):
# Make sure we're not losing the original mask w/ ndmin
x = array([1, 2, 3])
x[-1] = masked
xx = array(x, ndmin=2, dtype=float)
assert_equal(x.shape, x._mask.shape)
assert_equal(xx.shape, xx._mask.shape)
def test_creation_maskcreation(self):
# Tests how masks are initialized at the creation of Maskedarrays.
data = arange(24, dtype=float)
data[[3, 6, 15]] = masked
dma_1 = Maske
|
dArray(data)
assert_equal(dma_1.mask, data.mask)
dma_2 = MaskedArray(dma_1)
assert_equal(dma_2.mask, dma_1.mask)
dma_3 = MaskedArray(dma_1, mask=[1, 0, 0, 0] * 6)
fail_if_equal(dma_3.mask, dma_1.mask)
x = array([1, 2, 3], mask=True)
assert_equal(x._mask, [True, True, True])
x = array([1, 2, 3], mask=False)
assert_equal(x._mask, [False, False, False])
y = array([1, 2, 3], mask=x._mask, copy=False)
assert_
|
(np.may_share_memory(x.mask, y.mask))
y = array([1, 2, 3], mask=x._mask, copy=True)
assert_(not np.may_share_memory(x.mask, y.mask))
def test_creation_with_list_of_maskedarrays(self):
# Tests creating a masked array from a list of masked arrays.
x = array(np.arange(5), mask=[1, 0, 0, 0, 0])
data = array((x, x[::-1]))
assert_e
|
mephizzle/wagtail
|
wagtail/wagtailcore/management/commands/fixtree.py
|
Python
|
bsd-3-clause
| 5,041
| 0.004761
|
import operator
import functools
from optparse import make_option
from django.core.management.base import BaseCommand
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.db.models import Q
from django.utils import six
from wagtail.wagtailcore.models import Page
class Command(BaseCommand):
help = "Checks for data integrity errors on the page tree, and fixes them where possible."
base_options = (
make_option('--noinput', action='store_false', dest='interactive', default=True,
help='If provided, any fixes requiring user interaction will be skipped.'
),
)
option_list = BaseCommand.option_list + base_options
def numberlist_to_string(self, numberlist):
# Converts a list of numbers into a string
# Doesn't put "L" after longs
return '[' + ', '.join(map(str, numberlist)) + ']'
def handle(self, **options):
any_problems_fixed = False
for page in Page.objects.all():
try:
page.specific
except ObjectDoesNotExist:
self.stdout.write("Page %d (%s) is missing a subclass record; deleting." % (page.id, page.title))
any_problems_fixed = True
page.delete()
(bad_alpha, bad_path, orphans, bad_depth, bad_numchild) = Page.find_problems()
if bad_depth:
self.stdout.write("In
|
correct depth value found for pages: %s" % self.numberlist_to_string(bad_depth))
if bad_numchild:
self.stdout.write("Incorrect numchild value found for pages: %s" % self.numberlist_to_string(bad_numchild))
if bad_depth or bad_numchild:
Page.fix_tree(destructive=False)
any_problems_fixed = True
if orphans:
# The 'orphans' list as returned by treebeard only inc
|
ludes pages that are
# missing an immediate parent; descendants of orphans are not included.
# Deleting only the *actual* orphans is a bit silly (since it'll just create
# more orphans), so generate a queryset that contains descendants as well.
orphan_paths = Page.objects.filter(id__in=orphans).values_list('path', flat=True)
filter_conditions = []
for path in orphan_paths:
filter_conditions.append(Q(path__startswith=path))
# combine filter_conditions into a single ORed condition
final_filter = functools.reduce(operator.or_, filter_conditions)
# build a queryset of all pages to be removed; this must be a vanilla Django
# queryset rather than a treebeard MP_NodeQuerySet, so that we bypass treebeard's
# custom delete() logic that would trip up on the very same corruption that we're
# trying to fix here.
pages_to_delete = models.query.QuerySet(Page).filter(final_filter)
self.stdout.write("Orphaned pages found:")
for page in pages_to_delete:
self.stdout.write("ID %d: %s" % (page.id, page.title))
self.stdout.write('')
if options.get('interactive', True):
yes_or_no = six.moves.input("Delete these pages? [y/N] ")
delete_orphans = yes_or_no.lower().startswith('y')
self.stdout.write('')
else:
# Running tests, check for the "delete_orphans" option
delete_orphans = options.get('delete_orphans', False)
if delete_orphans:
deletion_count = len(pages_to_delete)
pages_to_delete.delete()
self.stdout.write(
"%d orphaned page%s deleted." % (deletion_count, "s" if deletion_count != 1 else "")
)
any_problems_fixed = True
if any_problems_fixed:
# re-run find_problems to see if any new ones have surfaced
(bad_alpha, bad_path, orphans, bad_depth, bad_numchild) = Page.find_problems()
if any((bad_alpha, bad_path, orphans, bad_depth, bad_numchild)):
self.stdout.write("Remaining problems (cannot fix automatically):")
if bad_alpha:
self.stdout.write("Invalid characters found in path for pages: %s" % self.numberlist_to_string(bad_alpha))
if bad_path:
self.stdout.write("Invalid path length found for pages: %s" % self.numberlist_to_string(bad_path))
if orphans:
self.stdout.write("Orphaned pages found: %s" % self.numberlist_to_string(orphans))
if bad_depth:
self.stdout.write("Incorrect depth value found for pages: %s" % self.numberlist_to_string(bad_depth))
if bad_numchild:
self.stdout.write("Incorrect numchild value found for pages: %s" % self.numberlist_to_string(bad_numchild))
elif any_problems_fixed:
self.stdout.write("All problems fixed.")
else:
self.stdout.write("No problems found.")
|
AASHE/hub
|
hub/apps/content/management/commands/import_conf_presentations_2020.py
|
Python
|
mit
| 5,266
| 0.001709
|
import csv
import os
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from django.utils import timezone
from hub.apps.content.models import Author
from hub.apps.content.types.presentations import Presentation
from hub.apps.metadata.models import (
Organization,
SustainabilityTopic,
ConferenceName,
PresentationType,
AcademicDiscipline,
InstitutionalOffice,
)
from hub.imports.utils import create_file_from_path
User = get_user_model()
class Command(BaseCommand):
help = "One-time import of Conference Presentation data for 2020"
def handle(self, *args, **options):
FILES_PATH = "/Volumes/GoogleDrive/My Drive/GCSHE 2020 Conference Materials"
with open(
"{}/{}".format(os.path.dirname(__file__), "GCSHE2020Presentations.csv"),
"rb",
) as csvfile:
reader = csv.DictReader(csvfile)
submitter_monika = User.objects.get(email="monika.urbanski@aashe.org")
conference_name = ConferenceName.objects.get(name="AASHE")
for row in reader:
title = row["Presentation_Title"].strip()
description = row["Description"].strip()
presentation_type = PresentationType.objects.get(
name=row["Presentation_Type"].strip()
)
month, day, year = row["Presentation_Date"].split("/")
presentation = Presentation.objects.create(
title=title,
description=description,
conf_name=conference_name,
presentation_type=presentation_type,
date_created="{}-{}-{}".format(2020, month, day),
published=timezone.now(),
status="published",
submitted_by=submitter_monika,
)
#
# Academic Disciplines
#
disc = row["Academic_Discipline_1"].strip()
if disc:
academic_discipline = AcademicDiscipline.objects.get(name=disc)
presentation.disciplines.add(academic_discipline)
#
# Institutional office
#
for idx in (1, 2):
office_dept = row["Office_Dept_{}".format(idx)].strip()
if office_dept:
office_dept = InstitutionalOffice.objects.get(name=office_dept)
presentation.institutions.add(office_dept)
#
# Organizations
# Org_1_id
for idx in (1, 2, 3, 4, 5, 6):
org_id = row["Org_{}_id".format(idx)].strip()
if org_id:
try:
org = Organization.objects.get(membersuite_id=org_id)
presentation.organizations.add(org)
except Organization.DoesNotExist:
print "Org {} not found for {}".format(org_id, title)
#
# Topics
#
for idx in (1, 2, 3):
topic = row["Topic_{}".format(idx)].strip()
if topic:
topic = SustainabilityTopic.objects.get(name=topic)
presentation.topics.add(topic)
#
# Tags
#
tags_token = row["Tags"]
tags = [tag.strip() for tag in tags_token.split(",")]
for tag in tags:
presentation.keywords.add(tag)
#
# Authors
#
for idx in (1, 2, 3, 4, 5, 6, 7, 8):
author_name = row["Author{}_Name".format(idx)].strip()
if author_name:
author_title = row["Position/Title _{}".format(idx)]
org_id =
|
row["Author_Org_{}_id".format(idx)]
org = None
if org_id:
try:
org = Organization.objects.get(membersuite_id=org_id)
except Organization.DoesNotExist:
print "Org {} not found for Author {} for {}".format(
org_id, author_name, title
)
|
Author.objects.create(
ct=presentation,
name=author_name,
title=author_title,
organization=org,
)
#
# Files
#
for idx in (1, 2, 3, 4):
file_title = row["File{}_Title".format(idx)].strip()
if file_title:
create_file_from_path(
parent=presentation,
files_dir=FILES_PATH,
path=file_title,
upload=False,
)
# 5192
|
OiNutter/rivets
|
test/test_scss.py
|
Python
|
mit
| 6,467
| 0.032473
|
import sys
sys.path.insert(0,'../')
if sys.version_info[:2] == (2,6):
import unittest2 as unittest
else:
import unittest
import os
import lean
import shutil
import datetime
import time
from rivets_test import RivetsTest
import rivets
CACHE_PATH = os.path.relpath("../../.sass-cache", __file__)
COMPASS_PATH = os.path.join(RivetsTest.FIXTURE_ROOT, 'compass')
SASS_PATH = os.path.join(RivetsTest.FIXTURE_ROOT, 'sass')
class ScssTemplate(lean.ScssTemplate):
def sass_options(self):
scss.LOAD_PATHS += ',%s,%s' % (COMPASS_PATH,SASS_PATH)
options = self._options
options.update({
'filename':self.eval_file(),
'line':self._line,
'syntax':'scss',
'compress':0,
'load_paths':[COMPASS_PATH,SASS_PATH]
})
return options
class TestLeanScss(RivetsTest):
def setUp(self):
def get_scss():
global scss
import scss
self.silence_warnings(callback=get_scss)
def tearDown(self):
if os.path.exists(CACHE_PATH):
shutil.rmtree(CACHE_PATH)
assert not os.path.exists(CACHE_PATH)
def silence_warnings(self,callback):
return callback()
def render(self,path):
path = self.fixture_path(path)
def process():
return ScssTemplate(path).render()
return self.silence_warnings(callback=process)
def testProcessVariables(self):
''' Test process variables '''
example_css = '''.content-navigation {
border-color: #3bbfce;
color: rgb(17.184%, 63.477%, 68.737%);
}
.border {
padding: 8px;
margin: 8px;
border-color: #3bbfce;
}
'''
self.ass
|
ertEqual(self.render('sass/variables.scss'),example_css)
def testProcessNesting(self):
''' Test process nesting '''
example_css = '''table.hl {
margin: 2em 0;
}
table.hl td.ln {
text-align: right;
}
li {
font-family: serif;
font-weight: bold;
font-size: 1.2em;
}
'''
self.assertEqual(self.render('
|
sass/nesting.scss'),example_css)
def testImportScssPartialFromScss(self):
''' Test @import scss partial from scss '''
example_css = '''#navbar li {
border-top-radius: 10px;
-moz-border-radius-top: 10px;
-webkit-border-top-radius: 10px;
}
#footer {
border-top-radius: 5px;
-moz-border-radius-top: 5px;
-webkit-border-top-radius: 5px;
}
#sidebar {
border-left-radius: 8px;
-moz-border-radius-left: 8px;
-webkit-border-left-radius: 8px;
}
'''
self.assertEqual(self.render('sass/import_partial.scss'),example_css)
def testImportPrefersPartialOverFullName(self):
''' Test @import prefers partial over fullname '''
filename = self.fixture_path('sass/test.scss')
partial = self.fixture_path("sass/_partial.scss")
other = self.fixture_path("sass/partial.scss")
def do_test():
f = open(filename,'w')
f.write("@import 'partial';")
f.close()
f = open(partial,'w')
f.write(".partial { background: #ff0000; };")
f.close()
f = open(other,'w')
f.write(".partial { background: #0000ff; };")
f.close()
self.assertEqual(
".partial {\n background: #ff0000;\n}\n\n",
self.render(filename)
)
self.sandbox(filename,partial,other,callback=do_test)
def testImportCSSFileFromLoadPath(self):
''' Test @import css file from load path '''
self.assertEqual(
'',
self.render("sass/import_load_path.scss")
)
@unittest.skip('Skipping relative imports until supported by pyScss')
def testImportRelativeFile(self):
''' Test @import relative file'''
example_css = '''#navbar li {
border-top-radius: 10px;
-moz-border-radius-top: 10px;
-webkit-border-top-radius: 10px;
}
#footer {
border-top-radius: 5px;
-moz-border-radius-top: 5px;
-webkit-border-top-radius: 5px;
}
#sidebar {
border-left-radius: 8px;
-moz-border-radius-left: 8px;
-webkit-border-left-radius: 8px;
}
'''
self.assertEqual(
self.render('sass/shared/relative.scss'),
example_css
)
@unittest.skip('Skipping relative imports until supported by pyScss')
def testImportRelativeNestedFile(self):
''' Test import relative nested file '''
example_css = '''body {
background: #666666;
}
'''
self.assertEqual(self.render('sass/relative.scss'),example_css)
def testModifyFileCausesItToRecompile(self):
''' Test modify file causes it to recompile '''
filename = self.fixture_path('sass/test.scss')
def do_test():
f = open(filename,'w')
f.write("body { background: red; };")
f.close()
self.assertEqual("body {\n background: #ff0000;\n}\n\n",self.render(filename))
f = open(filename,'w')
f.write("body { background: blue; };")
f.close()
new_time = time.mktime((datetime.datetime.now()+datetime.timedelta(seconds=1)).timetuple())
os.utime(filename,(new_time,new_time))
self.assertEqual("body {\n background: #0000ff;\n}\n\n",self.render(filename))
self.sandbox(filename,callback=do_test)
@unittest.skip('Skipping until python scss can support custom importers')
def testModifyPartialCausesItToRecompile(self):
''' Test modify partial causes it to recompile '''
filename = self.fixture_path('sass/test.scss')
partial = self.fixture_path('sass/_partial.scss')
def do_test():
f = open(filename,'w')
f.write("@import 'partial'")
f.close()
f = open(partial,'w')
f.write("body { background: red; };")
f.close()
self.assertEqual("body {\n background: #ff0000;\n}\n\n",self.render(filename))
f = open(partial,'w')
f.write("body { background: blue; };")
f.close()
new_time = time.mktime((datetime.datetime.now()+datetime.timedelta(seconds=1)).timetuple())
os.utime(partial,(new_time,new_time))
self.assertEqual("body {\n background: #0000ff;\n}\n\n",self.render(filename))
self.sandbox(filename,partial,callback=do_test)
def testReferenceImportedVariable(self):
''' Test reference @import'd variable '''
self.assertEqual(
self.render('sass/links.scss'),
'a:link {\n color: "#ff0000";\n}\n\n'
)
def testImportReferenceVariable(self):
''' Test @import reference variable '''
self.assertEqual(
self.render('sass/main.scss'),
'#header {\n color: #0000ff;\n}\n\n'
)
class TestRivetsSass(TestLeanScss):
def setUp(self):
super(TestRivetsSass,self).setUp()
self.env = rivets.Environment('.')
self.env.cache = {}
self.env.append_path(
self.fixture_path('.'),
self.fixture_path('compass')
)
def teardown(self):
assert not os.path.exists(CACHE_PATH)
def render(self,path):
path = self.fixture_path(path)
return self.silence_warnings(callback=lambda :str(self.env[path]))
if __name__ == '__main__':
unittest.main()
|
lepture/oauthlib
|
tests/oauth2/rfc6749/clients/test_legacy_application.py
|
Python
|
bsd-3-clause
| 2,383
| 0.002098
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from mock import patch
from oauthlib.oauth2 import LegacyApplicationClient
from ....unittest import TestCase
@patch('time.time', new=lambda: 1000)
class LegacyApplicationClientTest(TestCase):
client_id = "someclientid"
scope = ["/profile"]
kwargs = {
"some": "providers",
"require": "extra arguments"
}
username = "foo"
password = "bar"
body = "not=empty"
body_up = "not=empty&grant_type=password&username=%s&password=%s" % (username, password)
body_kwargs = body_up + "&some=providers&require=extra+arguments"
token_json = ('{ "access_token":"2YotnFZFEjr1zCsicMWpAA",'
' "token_type":"example",'
' "expires_in":3600,'
' "scope":"/profile",'
' "refresh_token":"tGzv3JOkF0XG5Qx2TlKWIA",'
' "example_parameter":"example_value"}')
token = {
"access_token": "2YotnFZFEjr1zCsicMWpAA",
"token_type": "example",
"expires_in": 3600,
"expires_at": 4600,
"scope": scope,
"refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter": "example_value"
}
def test_request_body(self):
client = LegacyApplicationClient(self.client_id)
# Basic, no extra arguments
body = client.prepare_request_body(self.username, self.password,
body=self.body)
self.assertFormBodyEqual(body, self.body_up)
# With extra parameters
body = client.prepare_request_body(self.username, self.password,
b
|
ody=self.body, **self.kwargs)
self.assertFormBodyEqual(body, self.body_kwargs)
def test_parse_token_response(self):
client = LegacyApplicationClient(self.client_id)
# Parse code and state
response = client.parse_request_body_response(self.token_json, scope=self.scope)
self.
|
assertEqual(response, self.token)
self.assertEqual(client.access_token, response.get("access_token"))
self.assertEqual(client.refresh_token, response.get("refresh_token"))
self.assertEqual(client.token_type, response.get("token_type"))
# Mismatching state
self.assertRaises(Warning, client.parse_request_body_response, self.token_json, scope="invalid")
|
timkrentz/SunTracker
|
IMU/VTK-6.2.0/IO/MINC/Testing/Python/TestMNITagPoints.py
|
Python
|
mit
| 3,826
| 0.000261
|
#!/usr/bin/env python
import os
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Test label reading from an MNI tag file
#
# The current directory must be writeable.
#
try:
fname = "mni-tagtest.tag"
channel = open(fname, "wb")
channel.close()
# create some random points in a sphere
#
sphere1 = vtk.vtkPointSource()
sphere1.SetNumberOfPoints(13)
xform = vtk.vtkTransform()
xform.RotateWXYZ(20, 1, 0, 0)
xformFilter = vtk.vtkTransformFilter()
xformFilter.SetTransform(xform)
xformFilter.SetInputConnection(sphere1.GetOutputPort())
labels = vtk.vtkStringArray()
labels.InsertNextValue("0")
labels.InsertNextValue("1")
labels.InsertNextValue("2")
labels.InsertNextValue("3")
labels.InsertNextValue("Halifax")
labels.InsertNextValue("Toronto")
labels.InsertNextValue("Vancouver")
labels.InsertNextValue("Larry")
labels.InsertNextValue("Bob")
labels.InsertNextValue("Jackie")
labels.InsertNextValue("10")
labels.InsertNextValue("11")
labels.InsertNextValue("12")
weights = vtk.vtkDoubleArray()
weights.InsertNextValue(1.0)
weights.InsertNextValue(1.1)
weights.InsertNextValue(1.2)
weights.InsertNextValue(1.3)
weights.InsertNextValue(1.4)
weights.InsertNextValue(1.5)
weights.InsertNextValue(1.6)
weights.InsertNextValue(1.7)
weights.InsertNextValue(1.8)
weights.InsertNextValue(1.9)
weights.InsertNextValue(0.9)
weights.InsertNextValue(0.8)
weights.InsertNextValue(0.7)
writer = vtk.vtkMNITagPointWriter()
writer.SetFileName(fname)
writer.SetInputConnection(sphere1.GetOutputPort())
writer.SetInputConnection(1, xformFilter.GetOutputPort())
writer.SetLabelText(labels)
writer.SetWeights(weights)
writer.SetComments("Volume 1: sphere points\nVolume 2: transformed points")
writer.Write()
reader = vtk.vtkMNITagPointReader()
reader.CanReadFile(fname)
reader.SetFileName(fname)
textProp = vtk.vtkTextProperty()
textProp.SetFontSize(12)
textProp.SetColor(1.0, 1.0, 0.5)
labelHier = vtk.vtkPointSetToLabelHierarchy()
labelHier.SetInputConnection(reader.GetOutputPort())
labelHier.SetTextProperty(textProp)
labelHier.SetLabelArrayName("LabelText")
labelHier.SetMaximumDepth(15)
labelHier.SetTargetLabelCount(12)
labelMapper = vtk.vtkLabelPlacementMapper()
labelMapper.SetInputConnection(labelHier.GetOutputPort())
labelMapper.UseDepthBufferOff()
labelMapper.SetShapeToRect()
labelMapper.SetStyleToOutline()
labelActor = vtk.vtkActor2D()
labelActor.SetMapper(labelMapper)
glyphSource = vtk.vtkSphereSource()
glyphSourc
|
e.SetRadius(0.01)
glyph = vtk.vtkGlyph3D()
glyph.SetSourceConnection(glyphSource.GetOutputPort())
glyph.SetInputConnection(reader.GetOutputPort())
mapper = vtk.vtkDataSetMapper()
mapper.SetInputConnection(glyph.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
# Create rendering stuff
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin
|
.SetMultiSamples(0)
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, set the background and size
#
ren1.AddViewProp(actor)
ren1.AddViewProp(labelActor)
ren1.SetBackground(0, 0, 0)
renWin.SetSize(300, 300)
renWin.Render()
try:
os.remove(fname)
except OSError:
pass
# render the image
#
# iren.Start()
except IOError:
print "Unable to test the writer/reader."
|
hiryou/pandora_extractor
|
src/PandoraExtractor.py
|
Python
|
mit
| 232
| 0.017241
|
__author__="longuy
|
en"
__date__ ="$8-Feb-2013 3:29:44 AM$"
from app.Welcome import Welcome
from app.FlowControl import FlowControl
if __name_
|
_ == '__main__':
Welcome.disclaimer()
control = FlowControl()
control.start()
|
HybridF5/tempest_debug
|
tempest/services/orchestration/json/orchestration_client.py
|
Python
|
apache-2.0
| 16,844
| 0
|
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import time
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest import exceptions
from tempest.lib.common import rest_client
from tempest.lib import exceptions as lib_exc
class OrchestrationClient(rest_client.RestClient):
def list_stacks(self, params=None):
"""Lists all stacks for a user."""
uri = 'stacks'
if params:
uri += '?%s' % urllib.urlencode(params)
resp, body = self.get(uri)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def create_stack(self, name, disable_rollback=True, parameters=None,
timeout_mins=60, template=None, template_url=None,
environment=None, files=None):
if parameters is None:
parameters = {}
headers, body = self._prepare_update_create(
name,
disable_rollback,
parameters,
timeout_mins,
template,
template_url,
environment,
files)
uri = 'stacks'
resp, body = self.post(uri, headers=headers, body=body)
self.expected_success(201, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def update_stack(self, stack_identifier, name, disable_rollback=True,
parameters=None, timeout_mins=60, template=None,
template_url=None, environment=None, files=None):
if parameters is None:
parameters = {}
headers, body = self._prepare_update_create(
name,
disable_rollback,
parameters,
timeout_mins,
template,
template_url,
environment)
uri = "stacks/%s" % stack_identifier
resp, body = self.put(uri, headers=headers, body=body)
self.expected_success(202, resp.status)
return rest_client.ResponseBody(resp, body)
def _prepare_update_create(self, name, disable_rollback=True,
parameters=None, timeout_mins=60,
template=None, template_url=None,
environment=None, files=None):
if parameters is None:
parameters = {}
post_body = {
"stack_name": name,
"disable_rollback": disable_rollback,
"parameters": parameters,
"timeout_mins": timeout_mins,
"template": "HeatTemplateFormatVersion: '2012-12-12'\n",
"environment": environment,
"files": files
}
if template:
post_body['template'] = template
if template_url:
post_body['template_url'] = template_url
body = json.dumps(post_body)
# Password must be provided on stack create so that heat
# can perform future operations on behalf of the user
headers = self.get_headers()
headers['X-Auth-Key'] = self.password
headers['X-Auth-User'] = self.user
return headers, body
def show_stack(self, stack_identifier):
"""Returns the details of a single stack."""
url = "stacks/%s" % stack_identifier
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def suspend_stack(self, stack_identifier):
"""Suspend a stack."""
url = 'stacks/%s/actions' % stack_identifier
body = {'suspend': None}
resp, body = self.post(url, json.dumps(body))
self.expected_success(200, resp.status)
return rest_client.ResponseBody(resp)
def resume_stack(self, stack_identifier):
"""Resume a stack."""
url = 'stacks/%s/actions' % stack_identifier
body = {'resume': None}
resp, body = self.post(url, json.dumps(body))
self.expected_success(200, resp.status)
return rest_client.ResponseBody(resp)
def list_resources(self, stack_identifier):
"""Returns the details of a single resource."""
url = "stacks/%s/resources" % stack_identifier
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def show_resource(self, stack_identifier, resource_name):
"""Returns the details of a single resource."""
url = "stacks/%s/resources/%s" % (stack_identifier, resource_name)
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def delete_stack(self, stack_identifier):
"""Deletes the specified Stack."""
resp, _ = self.delete("stacks/%s" % str(stack_identifier))
self.expected_success(204, resp.status)
return rest_client.ResponseBody(resp)
def wait_for_stack_status(self, stack_identifier, status,
failure_pattern='^.*_FAILED$'):
"""Waits for a Stack to reach a given status."""
start = int(time.time())
fail_regexp = re.compile(failure_pattern)
while True:
try:
body = self.show_stack(stack_identifier)['stack']
except lib_exc.NotFound:
if status == 'DELETE_COMPLETE':
return
stack_name = body['stack_name']
stack_status = body['stack_status']
if stack_status == status:
return body
if fail_regexp.search(stack_status):
raise exceptions.StackBuildErrorException(
stack_identifier=stack_identifier,
stack_status=stack_status,
stack_status_reason=body['stack_status_reason'])
if int(time.time()) - start >= self.build_timeout:
message = ('Stac
|
k %s failed to reach %s status (current: %s) '
'within the required time (%s s).' %
(stack_name, status, stack_status,
self.build_timeout))
raise exceptions.TimeoutException(message)
time.sleep(self.build_interval)
def show_resource_metadata(self, stack_identifier, resource_name):
|
"""Returns the resource's metadata."""
url = ('stacks/{stack_identifier}/resources/{resource_name}'
'/metadata'.format(**locals()))
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def list_events(self, stack_identifier):
"""Returns list of all events for a stack."""
url = 'stacks/{stack_identifier}/events'.format(**locals())
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def list_resource_events(self, stack_identifier, resource_name):
"""Returns list of all events for a resource from stack."""
url = ('stacks/{stack_identifier}/resources/{resource_name}'
'/events'.format(**locals()))
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def s
|
yquant/gn-standalone
|
src/build/find_depot_tools.py
|
Python
|
apache-2.0
| 2,084
| 0.013436
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Small utility function to find depot_tools and add it to the python path.
Will throw an ImportError exception if depot_tools can't be found since it
imports breakpad.
This can also be used as a standalone script to print out the depot_tools
directory location.
"""
import os
import sys
def IsRealDepotTools(path):
return os.path.isfile(os.path.join(path, 'gclient.py'))
def add_depot_tools_to_path():
"""Search for depot_tools and add it to sys.path."""
# First search for depot_tools_lite for gn standalone build
script_dir = os.path.dirname(os.path.realpath(__file__))
gn_standalone_root = os.path.dirname(os.path.dirname(script_dir))
depot_tools_lite_dir = os.path.join(gn_standalone_root,
|
'tools')
if IsRealDepotTools(depot_tools_lite_dir):
return depot_tools_lite_dir
# First look if depot_tools is already in PYTHONPATH.
for i in sys.path:
if i.rstrip(os.sep).endswith('depot_tool
|
s') and IsRealDepotTools(i):
return i
# Then look if depot_tools is in PATH, common case.
for i in os.environ['PATH'].split(os.pathsep):
if IsRealDepotTools(i):
sys.path.append(i.rstrip(os.sep))
return i
# Rare case, it's not even in PATH, look upward up to root.
root_dir = os.path.dirname(os.path.abspath(__file__))
previous_dir = os.path.abspath(__file__)
while root_dir and root_dir != previous_dir:
i = os.path.join(root_dir, 'depot_tools')
if IsRealDepotTools(i):
sys.path.append(i)
return i
previous_dir = root_dir
root_dir = os.path.dirname(root_dir)
print >> sys.stderr, 'Failed to find depot_tools'
return None
DEPOT_TOOLS_PATH = add_depot_tools_to_path()
# pylint: disable=W0611
if os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN') != '2':
import breakpad
def main():
if DEPOT_TOOLS_PATH is None:
return 1
print DEPOT_TOOLS_PATH
return 0
if __name__ == '__main__':
sys.exit(main())
|
nishad-jobsglobal/odoo-marriot
|
openerp/addons/tapplicant_webcam/__openerp__.py
|
Python
|
agpl-3.0
| 1,685
| 0
|
# -*- coding:utf-8 -*-
#
#
# Copyright (C) 2013
|
Michael Telahun Makonnen <mmakonnen@gmail.com>.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Aff
|
ero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
'name': 'Capture picture with webcam',
'version': '1.0',
'category': 'Generic Modules/Human Resources',
'description': """
TApplicant WebCam
=========
Capture employee pictures with an attached web cam.
""",
'author': "Michael Telahun Makonnen <mmakonnen@gmail.com>,"
"Odoo Community Association (OCA)",
'website': 'http://miketelahun.wordpress.com',
'license': 'AGPL-3',
'depends': [
'hr',
'web',
'trip'
],
'js': [
'static/src/js/jquery.webcam.js',
'static/src/js/tapplicant_webcam.js',
],
'css': [
'static/src/css/tapplicant_webcam.css',
],
'qweb': [
'static/src/xml/tapplicant_webcam.xml',
],
'data': [
'tapplicant_webcam_data.xml',
'tapplicant_webcam_view.xml',
],
'installable': True,
'active': False,
}
|
ericholscher/pinax
|
pinax/apps/projects/management.py
|
Python
|
mit
| 922
| 0.005423
|
from django.conf import settings
from django.db.models import signals
from django.utils.translation import ugettext_noop as _
if "notification" in settings.INSTALLED_APPS:
from notification import models as notification
|
def create_notice_types(app, created_models, verbosity, **kwargs):
notification.create_notice_type("projects_new_member", _("New Project Member"), _("a project you are a member of has a new member"), default=1)
notification.create_notice_type("projects_created_new_member", _("New Member Of Project You Created"), _("a project you created has a new member"),
|
default=2)
notification.create_notice_type("projects_new_project", _("New Project Created"), _("a new project has been created"), default=1)
signals.post_syncdb.connect(create_notice_types, sender=notification)
else:
print "Skipping creation of NoticeTypes as notification app not found"
|
apache/incubator-allura
|
Allura/allura/model/repo_refresh.py
|
Python
|
apache-2.0
| 23,508
| 0.000468
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from itertools import chain
from cPickle import dumps
from collections import OrderedDict
import bson
import tg
import jinja2
from pylons import tmpl_context as c, app_globals as g
from ming.base import Object
from ming.orm import mapper, session, ThreadLocalORMSession
from allura.lib import utils
from allura.lib import helpers as h
from allura.model.repo import CommitDoc, TreeDoc, TreesDoc, DiffInfoDoc
from allura.model.repo import CommitRunDoc
from allura.model.repo import Commit, Tree, LastCommit, ModelCache
from allura.model.index import ArtifactReferenceDoc, ShortlinkDoc
from allura.model.auth import User
from allura.model.timeline import TransientActor
log = logging.getLogger(__name__)
QSIZE = 100
def refresh_repo(repo, all_commits=False, notify=True, new_clone=False):
all_commit_ids = commit_ids = list(repo.all_commit_ids())
if not commit_ids:
# the repo is empty, no need to continue
return
new_commit_ids = unknown_commit_ids(commit_ids)
stats_log = h.log_action(log, 'commit')
for ci in new_commit_ids:
stats_log.info(
'',
meta=dict(
module='scm-%s' % repo.repo_id,
read='0'))
if not all_commits:
# Skip commits that are already in the DB
commit_ids = new_commit_ids
log.info('Refreshing %d commits on %s', len(commit_ids), repo.full_fs_path)
# Refresh commits
seen = set()
for i, oid in enumerate(commit_ids):
repo.refresh_commit_info(oid, seen, not all_commits)
if (i + 1) % 100 == 0:
log.info('Refresh commit info %d: %s', (i + 1), oid)
refresh_commit_repos(all_commit_ids, repo)
# Refresh child references
for i, oid in enumerate(commit_ids):
ci = CommitDoc.m.find(dict(_id=oid), validate=False).next()
refresh_children(ci)
if (i + 1) % 100 == 0:
log.info('Refresh child info %d for parents of %s',
(i + 1), ci._id)
if repo._refresh_precompute:
# Refresh commit runs
commit_run_ids = commit_ids
# Check if the CommitRuns for the repo are in a good state by checking for
# a CommitRunDoc that contains the last known commit. If there isn't one,
# the CommitRuns for this repo are in a bad state - rebuild them
# entirely.
if commit_run_ids != all_commit_ids:
last_commit = last_known_commit_id(all_commit_ids, new_commit_ids)
log.info('Last known commit id: %s', last_commit)
if not CommitRunDoc.m.find(dict(commit_ids=last_commit)).count():
log.info('CommitRun incomplete, rebuilding with all commits')
commit_run_ids = all_commit_ids
log.info('Starting CommitRunBuilder for %s', repo.full_fs_path)
rb = CommitRunBuilder(commit_run_ids)
rb.run()
rb.cleanup()
log.info('Finished CommitRunBuilder for %s', repo.full_fs_path)
# Refresh trees
# Like diffs below, pre-computing trees for some SCMs is too expensive,
# so we skip it here, then do it on-demand later.
if repo._refresh_precompute:
cache = {}
for i, oid in enumerate(commit_ids):
ci = CommitDoc.m.find(dict(_id=oid), validate=False).next()
cache = refresh_commit_trees(ci, cache)
if (i + 1) % 100 == 0:
log.info('Refresh commit trees %d: %s', (i + 1), ci._id)
# Compute diffs
cache = {}
# For some SCMs, we don't want to pre-compute the diffs because that
# would be too expensive, so we skip them here and do them on-demand
# with caching.
if repo._refresh_precompute:
for i, oid in enumerate(commit_ids):
cid = CommitDoc.m.find(dict(_id=oid), validate=False).next()
ci = mapper(Commit).create(cid, dict(instrument=False))
ci.set_context(repo)
compute_diffs(repo._id, cache, ci)
if (i + 1) % 100 == 0:
log.info('Compute diffs %d: %s', (i + 1), ci._id)
if repo._refresh_precompute:
model_cache = ModelCache()
lcid_cache = {}
for i, oid in enumerate(reversed(commit_ids)):
ci = model_cache.get(Commit, dict(_id=oid))
ci.set_context(repo)
compute_lcds(ci, model_cache, lcid_cache)
ThreadLocalORMSession.flush_all()
if (i + 1) % 100 == 0:
log.info('Compute last commit info %d: %s', (i + 1), ci._id)
if not all_commits and not new_clone:
for commit in commit_ids:
new = repo.commit(commit)
user = User.by_email_address(new.committed.email)
if user is None:
user = User.by_username(new.committed.name)
if user is not None:
g.statsUpdater.newCommit(new, repo.app_config.project, user)
|
actor = user or TransientActor(
activity_name=new.committed.name or new.committed.emai)
g.director.create_activity(actor, 'committed', new,
related_nodes=[repo.app_config.project],
tags=['commit', repo.tool.lower()])
log.info('Refresh complete for %s', repo.full_fs_path)
g.post_event('repo_refreshed', len(commit_ids),
|
all_commits, new_clone)
# Send notifications
if notify:
send_notifications(repo, commit_ids)
def refresh_commit_trees(ci, cache):
'''Refresh the list of trees included withn a commit'''
if ci.tree_id is None:
return cache
trees_doc = TreesDoc(dict(
_id=ci._id,
tree_ids=list(trees(ci.tree_id, cache))))
trees_doc.m.save(safe=False)
new_cache = dict(
(oid, cache[oid])
for oid in trees_doc.tree_ids)
return new_cache
def refresh_commit_repos(all_commit_ids, repo):
'''Refresh the list of repositories within which a set of commits are
contained'''
for oids in utils.chunked_iter(all_commit_ids, QSIZE):
for ci in CommitDoc.m.find(dict(
_id={'$in': list(oids)},
repo_ids={'$ne': repo._id})):
oid = ci._id
ci.repo_ids.append(repo._id)
index_id = 'allura.model.repo.Commit#' + oid
ref = ArtifactReferenceDoc(dict(
_id=index_id,
artifact_reference=dict(
cls=bson.Binary(dumps(Commit)),
project_id=repo.app.config.project_id,
app_config_id=repo.app.config._id,
artifact_id=oid),
references=[]))
link0 = ShortlinkDoc(dict(
_id=bson.ObjectId(),
ref_id=index_id,
project_id=repo.app.config.project_id,
app_config_id=repo.app.config._id,
link=repo.shorthand_for_commit(oid)[1:-1],
url=repo.url_for_commit(oid)))
# Always create a link for the full commit ID
link1 = ShortlinkDoc(dict(
_id=bson.ObjectId(),
ref_id=index_id,
project_id=repo.app.config.project_id,
app_config_id=repo.app.config._id,
link=oid,
url=repo.url_for_commit(oid)))
|
klahnakoski/intermittents
|
pyLibrary/testing/fuzzytestcase.py
|
Python
|
mpl-2.0
| 5,448
| 0.006057
|
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from collections import Mapping
import unittest
from pyLibrary import dot
from pyLibrary.debugs.logs import Log
from pyLibrary.dot import coalesce, Dict, literal_field
from pyLibrary.maths import Math
from pyLibrary.dot import wrap
from pyLibrary.strings import expand_template
class FuzzyTestCase(unittest.TestCase):
"""
COMPARE STRUCTURE AND NUMBERS!
ONLY THE ATTRIBUTES IN THE expected STRUCTURE ARE TESTED TO EXIST
EXTRA ATTRIBUTES ARE IGNORED.
NUMBERS ARE MATCHED BY ...
* places (UP TO GIVEN SIGNIFICANT DIGITS)
* digits (UP TO GIVEN DECIMAL PLACES, WITH NEGATIVE MEANING LEFT-OF-UNITS)
* delta (MAXIMUM ABSOLUTE DIFFERENCE FROM expected)
"""
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
self.default_places=15
def set_default_places(self, places):
"""
WHEN COMPARING float, HOW MANY DIGITS ARE SIGNIFICANT BY DEFAULT
"""
self.default_places=places
def assertAlmostEqual(self, test_value, expected, msg=None, digits=None, places=None, delta=None):
if delta or digits:
assertAlmostEqual(test_value, expected, msg=msg, digits=digits, places=places, delta=delta)
else:
assertAlmostEqual(test_value, expected, msg=msg, digits=digits, places=coalesce(places, self.default_places), delta=delta)
def assertEqual(self, test_value, expected, msg=None, digits=None, places=None, delta=None):
self.assertAlmostEqual(test_value, expected, msg=msg, digits=digits, places=places, delta=delta)
def zipall(*args):
"""
LOOP THROUGH LONGEST OF THE LISTS, None-FILL THE REMAINDER
"""
iters = [a.__iter__() for a in args]
def _next(_iter):
try:
return False, _iter.next()
except:
return True, None
while True:
output = zip(*(_next(a) for a in iters))
if all(output[0]):
return
else:
yield output[1]
def assertAlmostEqual(test, expected, digits=None, places=None, msg=None, delta=None):
show_detail=True
try:
if test==None and expected==None:
return
elif isinstance(expected, Mapping):
for k, v2 in expected.items():
if isinstance(k, basestring):
v1 = dot.get_attr(test, literal_field(k))
else:
show_deta=False
v1 = test[k]
assertAlmostEqual(v1, v2, msg=msg, digits=digits, places=places, delta=delta)
elif isinstance(test, set) and isinstance(expected, set):
if test != expected:
Log.error("Sets do not match")
elif hasattr(test, "__iter__") and hasattr(expected, "__iter__"):
for a, b in zipall(test, expected):
assertAlmostEqual(a, b, msg=msg, digits=digits, places=places, delta=delta)
else:
assertAlmostEqualValue(test, expected, msg=msg, digits=digits, places=places, delta=delta)
|
except Exception, e:
Log.error(
"{{test|json}} does not match expected {
|
{expected|json}}",
test=test if show_detail else "[can not show]",
expected=expected if show_detail else "[can not show]",
cause=e
)
def assertAlmostEqualValue(test, expected, digits=None, places=None, msg=None, delta=None):
"""
Snagged from unittest/case.py, then modified (Aug2014)
"""
if test == expected:
# shortcut
return
if not Math.is_number(expected):
# SOME SPECIAL CASES, EXPECTING EMPTY CONTAINERS IS THE SAME AS EXPECTING NULL
if isinstance(expected, list) and len(expected)==0 and test == None:
return
if isinstance(expected, Mapping) and not expected.keys() and test == None:
return
if test != expected:
raise AssertionError(expand_template("{{test}} != {{expected}}", locals()))
return
num_param = 0
if digits != None:
num_param += 1
if places != None:
num_param += 1
if delta != None:
num_param += 1
if num_param>1:
raise TypeError("specify only one of digits, places or delta")
if digits is not None:
try:
diff = Math.log10(abs(test-expected))
if diff < digits:
return
except Exception, e:
pass
standardMsg = expand_template("{{test}} != {{expected}} within {{digits}} decimal places", locals())
elif delta is not None:
if abs(test - expected) <= delta:
return
standardMsg = expand_template("{{test}} != {{expected}} within {{delta}} delta", locals())
else:
if places is None:
places = 15
try:
diff = Math.log10(abs(test-expected))
if diff < Math.ceiling(Math.log10(abs(test)))-places:
return
except Exception, e:
pass
standardMsg = expand_template("{{test|json}} != {{expected|json}} within {{places}} places", locals())
raise AssertionError(coalesce(msg, "") + ": (" + standardMsg + ")")
|
diegojromerolopez/djanban
|
src/djanban/apps/recurrent_cards/migrations/0002_auto_20170602_1726.py
|
Python
|
mit
| 2,903
| 0.0031
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-06-02 15:26
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('boards', '0071_auto_20170530_1711'),
('recurrent_cards', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='weeklyrecurrentcard',
name='deadline',
),
migrations.RemoveField(
model_name='weeklyrecurrentcard',
name='move_on_deadline_to_list',
),
migrations.AddField(
model_name='weeklyrecurrentcard',
name='move_to_list_when_day_ends',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='moved_recurrent_cards', to='boards.List', verbose_name='Automatically move the card to this list when the day ends'),
),
migrations.AlterField(
model_name='recurrentcard',
name='estimated_time',
field=models.DecimalField(blank=True, decimal_places=2, default=None, help_text='Estimated time that will be spent in this card', max_digits=10, null=True, verbose_name='Estimated spent time of this recurrent card'),
),
migrations.AlterField(
model_name='weeklyrecurrentcard',
name='create_on_fridays',
field=models.BooleanField(default=False, verbose_name='Create card on fridays'),
),
migrations.AlterField(
model_name='weeklyrecurrentcard',
name='create_on_mondays',
field=models.BooleanField(default=False, verbose_name='Create card on mondays'),
),
migrations.AlterField(
model_name='weeklyrecurrentcard',
name='create_on_saturdays',
field=models.BooleanField(default=False, verbose_name='Create card on saturdays'),
),
migrations.AlterField(
|
model_name='weeklyrecurrentcard',
name='create_on_sundays',
field=models.BooleanField(default=False, verbose_name='Create card on sundays'),
),
migrations.AlterField(
model_name='weeklyrecurrentcard',
name='create_on_thursdays',
field=models.BooleanField(default=False, verbose_name='Create card on thursdays'),
),
migrations.AlterField(
model_name='weeklyrecurrentcard',
|
name='create_on_tuesdays',
field=models.BooleanField(default=False, verbose_name='Create card on tuesdays'),
),
migrations.AlterField(
model_name='weeklyrecurrentcard',
name='create_on_wednesdays',
field=models.BooleanField(default=False, verbose_name='Create card on wednesdays'),
),
]
|
vaishnavsm/spardha17
|
spardha/spardha/wsgi.py
|
Python
|
gpl-3.0
| 392
| 0
|
"""
WSGI config for spardha project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
ht
|
tps://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "spardha.settings")
application = get_
|
wsgi_application()
|
Valeureux/wezer-exchange
|
__unreviewed__/project_assignment/__openerp__.py
|
Python
|
agpl-3.0
| 1,833
| 0
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Buron and Valeureux Copyright Valeureux.org
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Project Assignment',
'version': '1.0',
'category': 'Project',
'author': 'Yannick Buron and Valeureux',
'license': '
|
AGPL-3',
'description': """
Project Assignment
===============
|
==
Automatically reassign task to specified partner depending on stage
-------------------------------------------------------------------
* For each stage, the partner can be specified in stage,
then in project and finally task itself
* We use partner instead of user for more flexibility
* Use base inherit config
""",
'website': 'http://www.wezer.org',
'depends': [
'base_recursive_model',
'project',
],
'data': [
'security/ir.model.access.csv',
'project_assignment_view.xml'
],
'demo': ['data/project_assignment_demo.xml'],
'test': ['tests/project_assignment_test.yml'],
'installable': True,
}
|
sujoykroy/motion-picture
|
editor/MotionPicture/commons/camera3d.py
|
Python
|
gpl-3.0
| 18,923
| 0.006394
|
import numpy, cairo, math
from scipy import ndimage
from .object3d import Object3d
from .point3d import Point3d
from .polygon3d import Polygon3d
from .draw_utils import *
from .colors import hsv_to_rgb, rgb_to_hsv
def surface2array(surface):
data = surface.get_data()
if not data:
return None
rgb_array = 0+numpy.frombuffer(surface.get_data(), numpy.uint8)
rgb_array.shape = (surface.get_height(), surface.get_width(), 4)
#rgb_array = rgb_array[:,:,[2,1,0,3]]
#rgb_array = rgb_array[:,:, :3]
return rgb_array
class Camera3d(Object3d):
def __init__(self, viewer=(0,0,0)):
super(Camera3d, self).__init__()
self.viewer = Point3d.create_if_needed(viewer)
self.sorted_items = []
self.mat_params = None
self.hit_alpha = 0
self.convolve_kernel = 0
self.hsv_coef = None
def project_point_values(self, point_values):
point_values = self.forward_transform_point_values(point_values)
return self.viewer_point_values(point_values)
def viewer_point_values(self, point_values):
if self.viewer.get_z() != 0:
ratio = self.viewer.get_z()/point_values[:, 2]
x_values = (ratio*point_values[:,0]) - self.viewer.get_x()
y_values = (ratio*point_values[:,1]) - self.viewer.get_y()
return numpy.stack((x_values, y_values), axis=1)
else:
return point_values[:, [0,1]]
def reverse_project_point_value(self, point_value, z_depth):
real_point_value = Point3d(x=point_value[0], y=point_value[1], z=z_depth)
if self.viewer.get_z() != 0:
ratio = z_depth/self.viewer.get_z()
real_point_value.values[0] = (point_value[0] + self.viewer.get_x())/ratio
real_point_value.values[1] = (point_value[1] + self.viewer.get_y())/ratio
real_point_value.values = self.reverse_transform_point_values(real_point_value.values)
return real_point_value.values
def sort_items(self, items=None):
polygons = []
if items is None:
polygons.extend(Polygon3d.Items)
else:
for item in items:
if not hasattr(item, "polygons"):
continue
polygons.extend(item.polygons)
self.sorted_items = sorted(polygons, key=self.z_depth_sort_key)
self.poly_face_params = None
for item in self.sorted_items:
params = numpy.array([item.plane_params_normalized[self]])
if self.poly_face_params is None:
self.poly_face_params = params
else:
self.poly_face_params = numpy.concatenate(
(self.poly_face_params, params), axis=0)
def z_depth_sort_key(self, ob):
return ob.z_depths[self]
def get_image_canvas(self, left, top, width, height, border_color=None, border_width=None, scale=.5):
left = math.floor(left)
top = math.floor(top)
width = int(width)
height = int(height)
if border_width>0:
border_width = max(border_width*scale, 1)
min_depth = -100000
canvas_width = int(width*scale)
ca
|
nvas_height = int(height*scale)
pixel_count = canvas_width*canvas_height
canvas_surf = cairo.ImageSurface(cairo.FORMAT_ARGB32, canvas_width, canvas_height)
canvas_surf_array = surface2array(canvas_surf)
canvas_z_depths =numpy.repeat(min_depth, pixel_count)
|
canvas_z_depths = canvas_z_depths.astype("f").reshape(canvas_height, canvas_width)
obj_pad = max(border_width*4, 0)
for object_3d in self.sorted_items:
if object_3d.border_width:
pad = max(obj_pad, object_3d.border_width*2)
else:
pad = obj_pad
brect = object_3d.bounding_rect[self]
bleft, btop = int(math.ceil(brect[0][0])), int(math.ceil(brect[0][1]))
bright, bbottom = int(math.ceil(brect[1][0])), int(math.ceil(brect[1][1]))
if bleft>left+width or bright<left or \
btop>top+height or bbottom<top:
continue
bleft -= pad
bright += pad
btop -= pad
bbottom += pad
sleft = max(left, bleft)
stop = max(top, btop)
sright = min(left+width, bright)
sbottom = min(top+height, bbottom)
#if sleft>=sright or stop>=sbottom:
# continue
sw = int(math.ceil(sright-sleft))
sh = int(math.ceil(sbottom-stop))
if sw<=0 or sh<=0:
continue
poly_canvas_width = int(sw*scale)
poly_canvas_height = int(sh*scale)
cleft = int((sleft-left)*scale)
cright = min(int((sright-left)*scale), canvas_width)
ctop = int((stop-top)*scale)
cbottom = int((sbottom-top)*scale)
if (ctop-cbottom!=poly_canvas_height):
cbottom=poly_canvas_height+ctop
if cbottom>canvas_height:
cbottom = canvas_height
ctop = cbottom-poly_canvas_height
if (cright-cleft!=poly_canvas_width):
cright=poly_canvas_width+cleft
if cright>canvas_width:
cright = canvas_width
cleft = cright-poly_canvas_width
#print "poly_canvas_height", poly_canvas_height, "poly_canvas_width", poly_canvas_width
#print "cbottom-ctop", cbottom-ctop, "cright-cleft", cright-cleft
#print "canvas_width, canvas_height", canvas_width, canvas_height
#print "cbottom, ctop", cbottom, ctop, "cright, cleft", cright, cleft
poly_surf = cairo.ImageSurface(cairo.FORMAT_ARGB32, poly_canvas_width, poly_canvas_height)
poly_ctx = cairo.Context(poly_surf)
poly_ctx.scale(scale, scale)
set_default_line_style(poly_ctx)
poly_ctx.rectangle(0, 0, sw, sh)
poly_ctx.set_source_rgba(1, 0, 0, 0)
poly_ctx.fill()
poly_ctx.translate(-bleft, -btop)
poly_ctx.translate(-(sleft-bleft), -(stop-btop))
object_3d.draw(poly_ctx, self, border_color=border_color, border_width=border_width)
surfacearray = surface2array(poly_surf)
if surfacearray is None:
continue
area_cond = (surfacearray[:, :, 3]<=self.hit_alpha)
xs = numpy.linspace(sleft, sright, poly_canvas_width)
xcount = len(xs)
ys = numpy.linspace(stop, sbottom, poly_canvas_height)
ycount = len(ys)
xs, ys = numpy.meshgrid(xs, ys)
coords = numpy.vstack((xs.flatten(), ys.flatten()))
coords = coords.T#.reshape((ycount, xcount, 2))
coords.shape = (xcount*ycount, 2)
vz = self.viewer.get_z()
if vz == 0:
coords_depths = numpy.matmul(object_3d.plane_params_normalized[self],
numpy.concatenate((coords.T, [numpy.ones(coords.shape[0])]), axis=0))
else:
vx = self.viewer.get_x()
vy = self.viewer.get_y()
pp = object_3d.plane_params_normalized[self]
coords_depths = pp[2]*vz/(-pp[0]*(coords[:, 0]+vx)-pp[1]*(coords[:, 1]+vy)+vz)
coords_depths.shape = (ycount, xcount)
coords_depths.shape = (ycount, xcount)
blank_depths = numpy.repeat(min_depth+1, ycount*xcount)
blank_depths.shape = coords_depths.shape
coords_depths = numpy.where(area_cond, blank_depths, coords_depths)
pre_depths = canvas_z_depths[ctop:cbottom, cleft:cright]
pre_depths.shape = (cbottom-ctop, cright-cleft)
depths_cond = pre_depths<coords_depths#highier depths come at top
new_depths = numpy.where(depths_cond, coords_depths, pre_depths)
canvas_z_depths[ctop:cbottom, cleft:cright] = new_depths
pre_colors = canvas_surf_array[ctop:cbottom, cleft:cright, :]
pre_colors.shape = (cbottom-ctop, cright-cleft, 4)
|
geektoni/Influenza-Like-Illness-Predictor
|
data_analysis/filter_news.py
|
Python
|
mit
| 2,317
| 0.036254
|
"""Generate year files with news counts
Usage:
filter_news.py <directory> <output> <lang>
Options:
-h, --help
"""
from docopt import docopt
from os import listdir
from os.path import isfile, join, getsize
import datetime
from tqdm import *
import pandas as pd
def find_index(id, lis):
for i in range(0, len(lis)):
if id == lis[i]:
retur
|
n i
return -1
if __name__ == "__main__":
# Parse the command line
args = docopt(__doc__)
# Array with the week we are considering
weeks = [42,43,44,45,46,47,48,49,50,51,
|
52,1,23,4,5,6,7,8,9,10,11,12,13,14,15]
# Final count dictionary
news_count = {}
# Get only the files in the directory which have a dimension greater than zero
onlyfiles = [f for f in listdir(args["<directory>"])
if isfile(join(args["<directory>"], f)) and getsize(join(args["<directory>"], f))>0]
if (len(onlyfiles) == 0):
print("No file with size greater the zero found! Exiting.")
exit(-1)
# Loop over all the files and parse them
for file in tqdm(onlyfiles):
# Split the filename and get the day/month/year
file_name = file.split("_")
day = file_name[2]
month = file_name[1]
year = file_name[3]
# Compute the week number
week_number = datetime.date(int(year), int(month), int(day)).isocalendar()[1]
# Read and parse the file only if it is in the week range we are considering
if week_number in weeks:
# Read the file
file = pd.read_csv(args["<directory>"]+"/"+file)
# Count how many news we have, considering only the italian ones
total_news = file[file.lang_detected == args["<lang>"]].count()
# If that year column is still empty, create it and set it to zero
if news_count.get(year, []) == []:
news_count[year] = [0 for x in range(0, len(weeks))]
# Increment the week count
news_count[year][find_index(week_number, weeks)] += int(total_news["lang_detected"])
# Generate the index for the future dataframe
df_index = []
# Add a zero in front of number less than 10
for i in weeks:
if i < 10:
number = "0" + str(i)
else:
number = str(i)
df_index.append(number)
# Generate the dataframe
final_df = pd.DataFrame(news_count)
final_df.set_index([df_index])
# Print the dataframe to show the result
print(final_df)
# Save it to file
final_df.to_csv(args["<output>"], index_label="Week")
|
LaoZhongGu/kbengine
|
kbe/src/lib/python/Lib/test/test_capi.py
|
Python
|
lgpl-3.0
| 9,478
| 0.010762
|
# Run the _testcapi module tests (tests for the Python/C API): by defn,
# these are all functions _testcapi exports whose name begins with 'test_'.
from __future__ import with_statement
import os
import pickle
import random
import subprocess
import sys
import time
import unittest
from test import support
try:
import _posixsubprocess
except ImportError:
_posixsubprocess = None
try:
import _thread
import threading
except ImportError:
_thread = None
threading = None
import _testcapi
def testfunction(self):
"""some doc"""
return self
class InstanceMethod:
id = _testcapi.instancemethod(id)
testfunction = _testcapi.instancemethod(testfunction)
class CAPITest(unittest.TestCase):
def test_instancemethod(self):
inst = InstanceMethod()
self.assertEqual(id(inst), inst.id())
self.assertTrue(inst.testfunction() is inst)
self.assertEqual(inst.testfunction.__doc__, testfunction.__doc__)
self.assertEqual(InstanceMethod.testfunction.__doc__, testfunction.__doc__)
InstanceMethod.testfunction.attribute = "test"
self.assertEqual(testfunction.attribute, "test")
self.assertRaises(AttributeError, setattr, inst.testfunction, "attribute", "test")
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_no_FatalError_infinite_loop(self):
with support.suppress_crash_popup():
p = subprocess.Popen([sys.executable, "-c",
'import _testcapi;'
'_testcapi.crash_no_current_thread()'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(out, err) = p.communicate()
self.assertEqual(out, b'')
# This used to cause an infinite loop.
self.assertEqual(err.rstrip(),
b'Fatal Python error:'
b' PyThreadState_Get: no current thread')
def test_memoryview_from_NULL_pointer(self):
self.assertRaises(ValueError, _testcapi.make_memoryview_from_NULL_pointer)
@unittest.skipUnless(_posixsubprocess, '_posixsubprocess required for this test.')
def test_seq_bytes_to_charp_array(self):
# Issue #15732: crash in _PySequence_BytesToCharpArray()
class Z(object):
def __len__(self):
return 1
self.assertRaises(TypeError, _posixsubprocess.fork_exec,
|
1,Z(),3,[1, 2],5,6,7,8,9,10,11,12,13,14,15,16,17)
# Issue #15736: overflow in _PySequence_BytesToCharpArray()
class Z(object):
def __len__(self):
return sys.maxsize
def __getitem__(self, i):
return b'x'
self.assertRaises(MemoryError, _posixsubproce
|
ss.fork_exec,
1,Z(),3,[1, 2],5,6,7,8,9,10,11,12,13,14,15,16,17)
@unittest.skipUnless(_posixsubprocess, '_posixsubprocess required for this test.')
def test_subprocess_fork_exec(self):
class Z(object):
def __len__(self):
return 1
# Issue #15738: crash in subprocess_fork_exec()
self.assertRaises(TypeError, _posixsubprocess.fork_exec,
Z(),[b'1'],3,[1, 2],5,6,7,8,9,10,11,12,13,14,15,16,17)
@unittest.skipUnless(threading, 'Threading required for this test.')
class TestPendingCalls(unittest.TestCase):
def pendingcalls_submit(self, l, n):
def callback():
#this function can be interrupted by thread switching so let's
#use an atomic operation
l.append(None)
for i in range(n):
time.sleep(random.random()*0.02) #0.01 secs on average
#try submitting callback until successful.
#rely on regular interrupt to flush queue if we are
#unsuccessful.
while True:
if _testcapi._pending_threadfunc(callback):
break;
def pendingcalls_wait(self, l, n, context = None):
#now, stick around until l[0] has grown to 10
count = 0;
while len(l) != n:
#this busy loop is where we expect to be interrupted to
#run our callbacks. Note that callbacks are only run on the
#main thread
if False and support.verbose:
print("(%i)"%(len(l),),)
for i in range(1000):
a = i*i
if context and not context.event.is_set():
continue
count += 1
self.assertTrue(count < 10000,
"timeout waiting for %i callbacks, got %i"%(n, len(l)))
if False and support.verbose:
print("(%i)"%(len(l),))
def test_pendingcalls_threaded(self):
#do every callback on a separate thread
n = 32 #total callbacks
threads = []
class foo(object):pass
context = foo()
context.l = []
context.n = 2 #submits per thread
context.nThreads = n // context.n
context.nFinished = 0
context.lock = threading.Lock()
context.event = threading.Event()
for i in range(context.nThreads):
t = threading.Thread(target=self.pendingcalls_thread, args = (context,))
t.start()
threads.append(t)
self.pendingcalls_wait(context.l, n, context)
for t in threads:
t.join()
def pendingcalls_thread(self, context):
try:
self.pendingcalls_submit(context.l, context.n)
finally:
with context.lock:
context.nFinished += 1
nFinished = context.nFinished
if False and support.verbose:
print("finished threads: ", nFinished)
if nFinished == context.nThreads:
context.event.set()
def test_pendingcalls_non_threaded(self):
#again, just using the main thread, likely they will all be dispatched at
#once. It is ok to ask for too many, because we loop until we find a slot.
#the loop can be interrupted to dispatch.
#there are only 32 dispatch slots, so we go for twice that!
l = []
n = 64
self.pendingcalls_submit(l, n)
self.pendingcalls_wait(l, n)
def test_subinterps(self):
import builtins
r, w = os.pipe()
code = """if 1:
import sys, builtins, pickle
with open({:d}, "wb") as f:
pickle.dump(id(sys.modules), f)
pickle.dump(id(builtins), f)
""".format(w)
with open(r, "rb") as f:
ret = _testcapi.run_in_subinterp(code)
self.assertEqual(ret, 0)
self.assertNotEqual(pickle.load(f), id(sys.modules))
self.assertNotEqual(pickle.load(f), id(builtins))
# Bug #6012
class Test6012(unittest.TestCase):
def test(self):
self.assertEqual(_testcapi.argparsing("Hello", "World"), 1)
class EmbeddingTest(unittest.TestCase):
@unittest.skipIf(
sys.platform.startswith('win'),
"test doesn't work under Windows")
def test_subinterps(self):
# XXX only tested under Unix checkouts
basepath = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
oldcwd = os.getcwd()
# This is needed otherwise we get a fatal error:
# "Py_Initialize: Unable to get the locale encoding
# LookupError: no codec search functions registered: can't find encoding"
os.chdir(basepath)
try:
exe = os.path.join(basepath, "Modules", "_testembed")
if not os.path.exists(exe):
self.skipTest("%r doesn't exist" % exe)
p = subprocess.Popen([exe],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(out, err) = p.communicate()
self.assertEqual(p.returncode, 0,
"bad returncode %d, stderr is %r" %
(p.returncode, err))
if support.verbose:
print()
|
MediaKraken/mkarchive
|
pipeline-deploy-os-server-ubuntu.py
|
Python
|
gpl-2.0
| 5,822
| 0.010649
|
'''
Copyright (C) 2016 Quinn D Granfor <spootdev@gmail.com>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
version 2, as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License version 2 for more details.
You should have received a copy of the GNU General Public License
version 2 along with this program; if not, write to the Free
Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA.
'''
from __future__ import absolute_import, division, print_function, unicode_literals
import logging # pylint: disable=W0611
import time
import sys
sys.path.append('.')
sys.path.append('../MediaKraken-PyLint') # for jenkins server
sys.path.append('../MediaKraken-PyLint/build_code/jenkins/')
import pipeline_packages_list
from common import common_network_ssh
from common import common_network_vm_proxmox
###
# Will be used to deploy ubuntu server
###
JENKINS_BUILD_VIM_LXC = 103
JENKINS_BUILD_VIM_LNX_IP = '10.0.0.90'
JENKINS_DEPLOY_VIM_LXC = 108
JENKINS_DEPLOY_VIM_LNX_IP = '10.0.0.101'
# create prox class instance to use
PROX_CONNECTION = common_network_vm_proxmox.CommonNetworkProxMox('10.0.0.190', 'root@pam',\
'jenkinsb
|
uild')
# check status of ubuntu build vm
if PROX_CONNECTION.com_net_prox_node_lxc_status('pve',\
JENKINS_BUILD_VIM_LXC)['data']['status'] == 'stopped':
# start up the vm
PROX_CONNECTION.com_net_prox_node_lxc_start('p
|
ve', JENKINS_BUILD_VIM_LXC)
time.sleep(120) # wait two minutes for box to boot
# check status of ubuntu deploy vm
if PROX_CONNECTION.com_net_prox_node_lxc_status('pve',\
JENKINS_DEPLOY_VIM_LXC)['data']['status'] == 'stopped':
# start up the vm
PROX_CONNECTION.com_net_prox_node_lxc_start('pve', JENKINS_DEPLOY_VIM_LXC)
time.sleep(120) # wait two minutes for box to boot
# connect to server via ssh
SSH_DEPLOY = common_network_ssh.CommonNetworkSSH(JENKINS_DEPLOY_VIM_LNX_IP,\
'metaman', 'metaman')
SSH_BUILD = common_network_ssh.CommonNetworkSSH(JENKINS_BUILD_VIM_LNX_IP,\
'metaman', 'metaman')
# TODO rollback snap to base?
# setup directories needed for app
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/backups')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/bin')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/cache')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/conf')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/key')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/log')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/passwordmeter')
SSH_DEPLOY.com_net_ssh_run_command('mkdir mediakraken/passwordmeter/res')
SSH_DEPLOY.com_net_ssh_run_command('cd mediakraken')
# install servers deps
# way too many deps, so install ffmpeg to stomp over with compiled version
SSH_DEPLOY.com_net_ssh_run_sudo_command('sudo apt-get -y install postgresql ffmpeg'\
' libva-drm1 libva-x11-1 libsmbclient nfs-common nginx redis-server'\
' cifs-utils')
# libhdhomerun
# scp ffmpeg
SSH_BUILD.com_net_ssh_run_sudo_command('sudo sshpass -p \'metaman\''\
' scp -o StrictHostKeyChecking=no /home/metaman/bin/ff*'\
' metaman@%s:/home/metaman/.' % JENKINS_DEPLOY_VIM_LNX_IP)
SSH_DEPLOY.com_net_ssh_run_sudo_command('sudo mv /home/metaman/ff* /usr/bin/.')
SSH_DEPLOY.com_net_ssh_run_sudo_command('sudo ldconfig')
# prep files to scp
SSH_BUILD.com_net_ssh_run_command('mkdir /home/metaman/dist/xfer')
SSH_BUILD.com_net_ssh_run_command('rm -Rf /home/metaman/dist/xfer/*')
# move all programs
for app_to_build in pipeline_packages_list.PIPELINE_APP_LIST:
SSH_BUILD.com_net_ssh_run_command('rsync -r /home/metaman/dist/' + app_to_build\
+ '/ /home/metaman/dist/xfer/.')
# scp actual programs
SSH_BUILD.com_net_ssh_run_sudo_command('sudo sshpass -p \'metaman\''\
' scp -r -o StrictHostKeyChecking=no /home/metaman/dist/xfer/*'\
' metaman@%s:/home/metaman/mediakraken/.' % JENKINS_DEPLOY_VIM_LNX_IP)
# scp the password common
SSH_BUILD.com_net_ssh_run_sudo_command('sudo sshpass -p \'metaman\''\
' scp -r -o StrictHostKeyChecking=no /home/metaman/MediaKraken_Submodules/passwordmeter/'\
'passwordmeter/res/common.txt'\
' metaman@%s:/home/metaman/mediakraken/passwordmeter/res/.' % JENKINS_DEPLOY_VIM_LNX_IP)
# copy over config files
SSH_BUILD.com_net_ssh_run_sudo_command('sudo sshpass -p \'metaman\''\
' scp -o StrictHostKeyChecking=no /home/metaman/MediaKraken_Deployment/'\
'MediaKraken.ini metaman@%s:/home/metaman/mediakraken/.' % JENKINS_DEPLOY_VIM_LNX_IP)
# copy postgresl user file
SSH_BUILD.com_net_ssh_run_sudo_command('sudo sshpass -p \'metaman\''\
' scp -o StrictHostKeyChecking=no /home/metaman/MediaKraken_Deployment/'\
'build_code/jenkins/pipeline-deploy-os/pipeline-deploy-os-server-pgsql-user-ubuntu.sh'\
' metaman@%s:/home/metaman/mediakraken/.' % JENKINS_DEPLOY_VIM_LNX_IP)
# create the postgresql user
SSH_DEPLOY.com_net_ssh_run_sudo_command('sudo /home/metaman/mediakraken/'\
'pipeline-deploy-os-server-ubuntu-pgsql-user.sh')
# remove user create script
SSH_DEPLOY.com_net_ssh_run_command('rm /home/metaman/mediakraken/'\
'pipeline-deploy-os-server-pgsql-user-ubuntu.sh')
# copy ffmpeg and libs
SSH_BUILD.com_net_ssh_run_sudo_command('sudo sshpass -p \'metaman\''\
' scp -o StrictHostKeyChecking=no /home/metaman/bin/*'\
' metaman@%s:/home/metaman/mediakraken/bin/.' % JENKINS_DEPLOY_VIM_LNX_IP)
SSH_DEPLOY.com_net_ssh_close()
SSH_BUILD.com_net_ssh_close()
|
turekj/iDK
|
tasks/exchange_file_remote_task.py
|
Python
|
gpl-2.0
| 413
| 0.01937
|
import core.task
import urllib2
class ExchangeFileWithRemoteTask(core.task.Task):
def execute_task(self, parameters=None):
self._check_mandatory_parameters(['path', 'remote_path'], parameters)
path =
|
parameters['path']
remote_path = parameters['remote_path']
wit
|
h open(path, 'w+') as file_handle:
response = urllib2.urlopen(remote_path)
contents = response.read()
file_handle.write(contents)
|
drpngx/tensorflow
|
tensorflow/contrib/distributions/python/ops/vector_diffeomixture.py
|
Python
|
apache-2.0
| 44,484
| 0.004226
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The VectorDiffeomixture distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops import distribution_util
from tensorflow.contrib.distributions.python.ops.bijectors.affine_linear_operator import AffineLinearOperator
from tensorflow.contrib.distributions.python.ops.bijectors.softmax_centered import SoftmaxCentered
from tensorflow.contrib.linalg.python.ops import linear_operator_addition as linop_add_lib
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops.distributions import categorical as categorical_lib
from tensorflow.python.ops.distributions import distribution as distribution_lib
from tensorflow.python.ops.distributions import normal as normal_lib
from tensorflow.python.ops.linalg import linear_operator_diag as linop_diag_lib
from tensorflow.python.ops.linalg import linear_operator_full_matrix as linop_full_lib
from tensorflow.python.ops.linalg import linear_operator_identity as linop_identity_lib
from tensorflow.python.ops.linalg import linear_operator_lower_triangular as linop_tril_lib
from tensorflow.python.util import deprecation
__all__ = [
"VectorDiffeomixture",
"quadrature_scheme_softmaxnormal_gauss_hermite",
"quadrature_scheme_softmaxnormal_quantiles",
]
@deprecation.deprecated(
"2018-10-01",
"The TensorFlow Distributions library has moved to "
"TensorFlow Probability "
"(https://github.com/tensorflow/probability). You "
"should update all references to use `tfp.distributions` "
"instead of `tf.contrib.distributions`.",
warn_once=True)
def quadrature_scheme_softmaxnormal_gauss_hermite(
normal_loc, normal_scale, quadrature_size,
validate_args=False, name=None):
"""Use Gauss-Hermite quadrature to form quadrature on `K - 1` simplex.
A `SoftmaxNormal` random variable `Y` may be generated via
```
Y = SoftmaxCentered(X),
X = Normal(normal_loc, normal_scale)
```
Note: for a given `quadrature_size`, this method is generally less accurate
than `quadrature_scheme_softmaxnormal_quantiles`.
Args:
normal_loc: `float`-like `Tensor` with shape `[b1, ..., bB, K-1]`, B>=0.
The location parameter of the Normal used to construct the SoftmaxNormal.
normal_scale: `float`-like `Tensor`. Broadcastable with `normal_loc`.
The scale parameter of the Normal used to construct the SoftmaxNormal.
quadrature_size: Python `int` scalar representing the number of quadrature
points.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
name: Python `str` name prefixed to Ops created by this class.
Returns:
grid: Shape `[b1, ..., bB, K, quadrature_size]` `Tensor` representing the
convex combination of affine parameters for `K` components.
`grid[..., :, n]` is the `n`-th grid point, living in the `K - 1` simplex.
probs: Shape `[b1, ..., bB, K, quadrature_size]` `Tensor` representing the
associated with each grid point.
"""
with ops.name_scope(name, "quadrature_scheme_softmaxnormal_gauss_hermite",
[normal_loc, normal_scale]):
normal_loc = ops.convert_to_tensor(normal_loc, name="normal_loc")
dt = normal_loc.dtype.base_dtype
normal_scale = ops.convert_to_tensor(
normal_scale, dtype=dt, name="normal_scale")
normal_scale = maybe_check_quadrature_param(
normal_scale, "normal_scale", validate_args)
grid, probs = np.polynomial.hermite.hermgauss(deg=quadrature_size)
grid = grid.astype(dt.dtype.as_numpy_dtype)
probs = probs.astype(dt.dtype.as_numpy_dtype)
probs /= np.linalg.norm(probs, ord=1, keepdims=True)
probs = ops.convert_to_tensor(probs
|
, name="probs", dtype=dt)
grid = softmax(
-distribution_util.pad(
(normal_loc[..., array_ops.newaxis] +
np.sqrt(2.) * normal_scale[..., array_ops.newaxis] * grid),
axis=-2,
front=True),
axis=-2) # shape: [B, components, deg]
return grid, probs
@deprecation.deprecated(
"2018-10-01",
"The TensorFlow Distributions library has moved to "
"TensorFlow Probability "
"(https://github.com/tensorflow/probability). Yo
|
u "
"should update all references to use `tfp.distributions` "
"instead of `tf.contrib.distributions`.",
warn_once=True)
def quadrature_scheme_softmaxnormal_quantiles(
normal_loc, normal_scale, quadrature_size,
validate_args=False, name=None):
"""Use SoftmaxNormal quantiles to form quadrature on `K - 1` simplex.
A `SoftmaxNormal` random variable `Y` may be generated via
```
Y = SoftmaxCentered(X),
X = Normal(normal_loc, normal_scale)
```
Args:
normal_loc: `float`-like `Tensor` with shape `[b1, ..., bB, K-1]`, B>=0.
The location parameter of the Normal used to construct the SoftmaxNormal.
normal_scale: `float`-like `Tensor`. Broadcastable with `normal_loc`.
The scale parameter of the Normal used to construct the SoftmaxNormal.
quadrature_size: Python `int` scalar representing the number of quadrature
points.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
name: Python `str` name prefixed to Ops created by this class.
Returns:
grid: Shape `[b1, ..., bB, K, quadrature_size]` `Tensor` representing the
convex combination of affine parameters for `K` components.
`grid[..., :, n]` is the `n`-th grid point, living in the `K - 1` simplex.
probs: Shape `[b1, ..., bB, K, quadrature_size]` `Tensor` representing the
associated with each grid point.
"""
with ops.name_scope(name, "softmax_normal_grid_and_probs",
[normal_loc, normal_scale]):
normal_loc = ops.convert_to_tensor(normal_loc, name="normal_loc")
dt = normal_loc.dtype.base_dtype
normal_scale = ops.convert_to_tensor(
normal_scale, dtype=dt, name="normal_scale")
normal_scale = maybe_check_quadrature_param(
normal_scale, "normal_scale", validate_args)
dist = normal_lib.Normal(loc=normal_loc, scale=normal_scale)
def _get_batch_ndims():
"""Helper to get dist.batch_shape.ndims, statically if possible."""
ndims = dist.batch_shape.ndims
if ndims is None:
ndims = array_ops.shape(dist.batch_shape_tensor())[0]
return ndims
batch_ndims = _get_batch_ndims()
def _get_final_shape(qs):
"""Helper to build `TensorShape`."""
bs = dist.batch_shape.with_rank_at_least(1)
num_components = bs[-1].value
if num_components is not None:
num_components += 1
tail = tensor_shape.TensorShape([num_components, qs])
return bs[:-1].concatenate(tail)
def _compute
|
jamespcole/home-assistant
|
script/gen_requirements_all.py
|
Python
|
apache-2.0
| 10,045
| 0
|
#!/usr/bin/env python3
"""Generate an updated requirements_all.txt."""
import importlib
import os
import pkgutil
import re
import sys
import fnmatch
COMMENT_REQUIREMENTS = (
'Adafruit-DHT',
'Adafruit_BBIO',
'avion',
'beacontools',
'blinkt',
'bluepy',
'bme680',
'credstash',
'decora',
'envirophat',
'evdev',
'face_recognition',
'fritzconnection',
'i2csense',
'opencv-python',
'py_noaa',
'VL53L1X2',
'pybluez',
'pycups',
'PySwitchbot',
'pySwitchmate',
'python-eq3bt',
'python-lirc',
'pyuserinput',
'raspihats',
'rpi-rf',
'RPi.GPIO',
'smbus-cffi',
)
TEST_REQUIREMENTS = (
'aioambient',
'aioautomatic',
'aiobotocore',
'aiohttp_cors',
'aiohue',
'aiounifi',
'apns2',
'av',
'axis',
'caldav',
'coinmarketcap',
'defusedxml',
'dsmr_parser',
'eebrightbox',
'emulated_roku',
'ephem',
'evohomeclient',
'feedparser-homeassistant',
'foobot_async',
'geojson_client',
'georss_client',
'gTTS-token',
'ha-ffmpeg',
'hangups',
'HAP-python',
'hass-nabucasa',
'haversine',
'hbmqtt',
'hdate',
'holidays',
'home-assistant-frontend',
'homekit[IP]',
'homematicip',
'influxdb',
'jsonpath',
'libpurecoollink',
'libsoundtouch',
'luftdaten',
'mbddns',
'mficlient',
'numpy',
'paho-mqtt',
'pexpect',
'pilight',
'pmsensor',
'prometheus_client',
'pushbullet.py',
'py-canary',
'pyblackbird',
'pydeconz',
'pydispatcher',
'pyhomematic',
'pylitejet',
'pymonoprice',
'pynx584',
'pyopenuv',
'pyotp',
'pyps4-homeassistant',
'pysmartapp',
'pysmartthings',
'pysonos',
'pyqwikswitch',
'PyRMVtransport',
'PyTransportNSW',
'pyspcwebgw',
'python-forecastio',
'python-nest',
'python_awair',
'pytradfri[async]',
'pyunifi',
'pyupnp-async',
'pywebpush',
'pyHS100',
'PyNaCl',
'regenmaschine',
'restrictedpython',
'rflink',
'ring_doorbell',
'rxv',
'simplisafe-python',
'sleepyq',
'smhi-pkg',
'somecomfort',
'sqlalchemy',
'srpenergy',
'statsd',
'toonapilib',
'uvcclient',
'vsure',
'warrant',
'pythonwhois',
'wakeonlan',
'vultr',
'YesssSMS',
'ruamel.yaml',
'zigpy-homeassistant',
'bellows-homeassistant',
)
IGNORE_PACKAGES = (
'homeassistant.components.hangouts.hangups_utils',
'homeassistant.components.cloud.client',
'homeassistant.components.homekit.*',
'homeassistant.components.recorder.models',
)
IGNORE_PIN = ('colorlog>2.1,<3', 'keyring>=9.3,<10.0', 'urllib3')
IGNORE_REQ = (
'colorama<=1', # Windows only requirement in check_config
)
URL_PIN = ('https://developers.home-assistant.io/docs/'
'creating_platform_code_review.html#1-requirements')
CONSTRAINT_PATH = os.path.join(os.path.dirname(__file__),
'../homeassistant/package_constraints.txt')
CONSTRAINT_BASE = """
pycryptodome>=3.6.6
# Breaks Python 3.6 and is not needed for our supported Python versions
enum34==1000000000.0.0
# This is a old unmaintained library and is replaced with pycryptodome
pycrypto==1000000000.0.0
# Contains code to modify Home Assistant to work around our rules
python-systemair-savecair==1000000000.0.0
# Newer version causes pylint to take forever
# https://github.com/timothycrosley/isort/issues/848
isort==4.3.4
"""
def explore_module(package, explore_children):
"""Explore the modules."""
module = importlib.import_module(package)
found = []
if not hasattr(module, '__path__'):
return found
for _, name, _ in pkgutil.iter_modules(module.__path__, package + '.'):
found.append(name)
if explore_children:
found.extend(explore_module(name, False))
return found
def core_requirements():
"""Gather core requirements out of setup.py."""
with open('setup.py') as inp:
reqs_raw = re.search(
r'REQUIRES = \[(.*?)\]', inp.read(), re.S).group(1)
return re.findall(r"'(.*?)'", reqs_raw)
def comment_requirement(req):
"""Comment out requirement. Some don't install on all systems."""
return any(ign in req for ign in COMMENT_REQUIREMENTS)
def gather_modules():
"""Collect the information."""
reqs = {}
errors = []
for package in sorted(
explore_module('homeassistant.components', True) +
explore_module('homeassistant.scripts', True) +
explore_module('homeassistant.auth', True)):
try:
module = importlib.import_module(package)
except ImportError as err:
for pattern in IGNORE_PACKAGES:
if fnmatch.fnmatch(package, pattern):
break
else:
print("{}: {}".format(package.replace('.', '/') + '.py', err))
errors.append(package)
continue
if not getattr(module, 'REQUIREMENTS', None):
continue
for req in module.REQUIREMENTS:
if req in IGNORE_REQ:
continue
if '://' in req and 'pyharmony' not in req:
errors.append(
"{}[Only pypi dependencies are allowed: {}]".format(
package, req))
if req.partition('==')[1] == '' and req not in IGNORE_PIN:
errors.append(
"{}[Please pin requirement {}, see {}]".format(
package, req, URL_PIN))
reqs.setdefault(req, []).append(package)
for key in reqs:
reqs[key] = sorted(reqs[key],
key=lambda name: (len(name.split('.')), name))
if errors:
print("******* ERROR")
print("Errors while importing: ", ', '.join(errors))
print("Make sure you import 3rd party libraries inside methods.")
return None
return reqs
def generate_requirements_list(reqs):
"""Generate a pip file based on requirements."""
output = []
for pkg, requirements in sorted(reqs.items(), key=lambda item: item[0]):
for req in sorted(requirements,
key=lambda name: (len(name.split('.')), name)):
output.append('\n# {}'.format(req))
if comment_requirement(pkg):
output.append('\n# {}\n'.format(pkg))
else:
output.append('\n{}\n'.format(pkg))
return ''.join(output)
def requirements_all_output(reqs):
"""Generate output for requirements_all."""
output = []
output.append('# Home Assistant core')
output.append('\n')
output.append('\n'.join(core_requirements()))
output.append('\n')
output.append(generate_requirements_list(reqs))
return ''.join(output)
def requirements_test_output(reqs):
"""Generate output for test_requirements."""
output = []
output.append('# Home Assistant test')
output.append('\n')
with open('requirements_test.txt') as test_file:
output.append(test_file.read())
output.append('\n')
filtered = {key: value for key, value in reqs.items()
if any(
re.search(r'(^|#){}($|[=><])'.format(re.escape(ign)),
key) is not None for ign in TEST_REQUIREMENTS)}
output.append(generate_requirements_list(filtered))
return ''.join(output)
def gather_constraints():
"""Construct output for constraint file."""
return '\n'.join(core_requirements() + [''])
def write_requirements_file(data):
|
"""Write the modules to the requirements_all.txt."""
with open('requirements_all.txt', 'w+', newline="\n") as req_file:
req_file.write(data)
def write_test_requirements_file(data):
"""Write the modules to the requirements_test_all.txt."""
with open('requirements_test_all.txt', 'w+', newline="\n") as req_file:
req_file.write(data)
def write_constraints_file(data):
"""Write constraints to a file."""
with open(CONSTRAINT_PATH, 'w+'
|
, newline="\n") as req_file:
req_file.write(data
|
Azure/azure-sdk-for-python
|
sdk/servicebus/azure-mgmt-servicebus/azure/mgmt/servicebus/v2021_01_01_preview/aio/operations/_topics_operations.py
|
Python
|
mit
| 39,140
| 0.004778
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class TopicsOperations:
"""TopicsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.servicebus.v2021_01_01_preview.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_authorization_rules(
self,
resource_group_name: str,
namespace_name: str,
topic_name: str,
**kwargs: Any
) -> AsyncIterable["_models.SBAuthorizationRuleListResult"]:
"""Gets authorization rules for a topic.
:param resource_group_name: Name of the Resource group within the Azure subscription.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param topic_name: The topic name.
:type topic_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SBAuthorizationRuleListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.servicebus.v2021_01_01_preview.models.SBAuthorizationRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SBAuthorizationRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-01-01-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_authorization_rules.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'topicName': self._serialize.url("topic_name", topic_name, 'str', min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SBAuthorizationRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_authorization_rules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/topics/{topicName}/authorizationRules'} # type: ignore
async def create_or_update_authorization_rule(
self,
resource_group_name: str,
namespace_name: str,
topic_name: str,
authorization_rule_name: str,
parameters: "_models.SBAuthorizationRule",
**kwargs: Any
) -> "_models.SBAuthorizationRule":
"""Creates an authorization rule for the specified topic.
:param resource_group_name: Name of the Resource group within the Azure subscription.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param topic_name: The topic name.
:type topic_name: str
:param authorization_rule_name: The authorization rule name.
:type authorization_rule_name: str
:param parameters: The shared access authorization rule.
:type parameters: ~azure.mgmt.servicebus.v2021_01_01_preview.models.SBAuthorizationRule
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SBAuthorizationRule, or the result of cls(response)
:rtype: ~azure
|
.mgmt.servicebus.v2021_01_01_preview.models.SBAuthorizationRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SBAuthorizationRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2
|
021-01-01-preview"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update_authorization_rule.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'topicName': self._serialize
|
Tocknicsu/nctuoj_contest
|
backend/utils/form.py
|
Python
|
apache-2.0
| 3,213
| 0.008403
|
from dateutil import parser
from datetime import datetime
def form_validation(form ,schema):
err = _form_validation(form, schema)
return (400, err) if err else None
def _form_validation(form, schema):
'''
schema:
[{
### require
'name': <str> # +<str> means require, default is optional
### optional
'type': <class>
'non_empty': <bool> # for str, list
'except': <list>
'range': <tuple> # t[0] <= value <= t[1]
'len_range': <tuple> # t[0] <= len(value) <= t[1]
'check_dict': <dict> # for dict
'xss': <bool> # xss filter, default is False
...
}]
int
str
list
set
dict
datetime
'''
key = list(form.keys())
for item in key:
exist = False
for x in schema:
if x['name'] == item or (x['name'][0] == '+' and x['name'][1:] == item):
exist = True
if not exist:
del form[item]
for item in schema:
require = True if item['name'][0] == '+' else False
name = item['name'] = item['name'][1:] if require else item['name']
### check require
if require and (name not in form or form[name] is None):
return '%s not in form' % name
if not require and (name not in form or form[name] is None):
form[name] = None
continue
## check non_empty
if 'non_empty' in item and item['non_empty']:
if form[name] == item['type']() or form[name] is None:
return 'value of %s: "%s" should not be empty value' % (name, str(form[name]))
### check value type
if 'type' in item:
if not isinstance(form[name], item['type']):
if item['type'] == datetime:
try: form[name] = parser.parse(form[name])
except Exception as e: return name + str(e)
elif item['type'] == bool:
if form[name] in ["True", "true", "1", 1]:
form[name] = True
else:
form[name] = False
e
|
lse:
try: form[name] = item['type'](form[name])
except Exception as e: return name + str(e)
### check except
if 'except' in item:
if form[name] in item['except']:
return 'value of %s: "%s" in except list' % (name, str(form[name]))
### check range
if 'range' in item:
|
if not (item['range'][0] <= form[name] <= item['range'][1]):
return 'value of %s: "%s" not in range %s' % (name, str(form[name]), str(item['range']))
### check len_range
if 'len_range' in item:
if not (item['len_range'][0] <= len(form[name]) <= item['len_range'][1]):
return 'value of %s: "%s" not in len_range %s' % (name, str(form[name]), str(item['len_range']))
### check check_dict
if 'check_dict' in item:
err = form_validation(form[name], item['check_dict'])
if err: return err
return None
|
FlaminMad/RPiProcessRig
|
RPiProcessRig/src/yamlImport.py
|
Python
|
mit
| 521
| 0.003839
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@author: Alexander David Leech
@date: 03/06/2016
@rev: 1
@lang: Python 2.
|
7
@deps: YAML
@desc: Class to use as an interface to import YAML files
"""
import yaml
class yamlImport():
@staticmethod
def importYAML(pathToFile):
try:
with open(pathToFile, "r") as f:
config = yaml.load(f)
except IOError:
print("Failed to read " + pathToFile)
raise SystemExit()
return
|
config
|
AEDA-Solutions/matweb
|
backend/Database/Models/Prereq.py
|
Python
|
mit
| 654
| 0.055046
|
from Da
|
tabase.Controllers.Disciplina import Disciplina
clas
|
s Prereq(object):
def __init__(self,dados=None):
if dados is not None:
self.id = dados ['id']
self.grupo = dados ['grupo']
self.id_disc_pre = dados ['id_disc_pre']
def getId(self):
return self.id
def setGrupo(self,grupo):
self.grupo = grupo
def getGrupo(self):
return self.grupo
def setId_disc_pre(self,disc_pre):
self.id_disc_pre = (Disciplina().pegarDisciplina('nome = %s',(disc_pre,))).getId()
def getId_disc_pre(self):
return self.id_disc_pre
def getDisc_pre(self):
return (Disciplina().pegarDisciplina('id = %s',(self.id_disc_pre,))).getNome()
|
anderson7ru/bienestarues
|
enfermeriaapp/views.py
|
Python
|
mit
| 4,242
| 0.016973
|
from django.shortcuts import render
from enfermeriaapp.models import Cola_Consulta, Cola_Enfermeria
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from django.utils import timezone
import time
from django.contrib import messages
from django.contrib.auth.decorators import login_required
import datetime
from django.db import connection
import json
from datospersonalesapp.models import Paciente
from nuevoingresoapp.models import Expediente_Provisional
from enfermeriaapp.forms import ColaEnfermeriaForm
# Vista para poner un nuevo paciente en la cola para la toma de signos vitales
@login_required(login_url='logins')
def cola_enfermeria_nuevo(request,pk):
info = ""
pacientes=Paciente.objects.filter(estadoExpediente='A').order_by('facultadE')
cursor = connection.cursor()
cursor.execute('SELECT distinct(p.facultadE_id), f.nombreFacultad FROM datospersonalesapp_paciente as p, datospersonalesapp_facultad as f WHERE p.facultadE_id = f.codigoFacultad ORDER BY f.nombreFacultad')
auxL = cursor.fetchall()
if request.method == "GET":
data = {'idPaciente':Paciente.objects.filter(codigoPaciente = pk)
|
}
form = ColaEnfermeriaForm(data)
existe = Cola_Enfermeria.objects.filter(idPaciente = pk)
if existe:
info="El paciente ya existe en la cola"
else:
if form.is_valid():
|
expediente = form.save(commit=False)
expediente.hora = time.strftime("%H:%M:%S") #Formato de 24 horas
expediente.save()
info = "Datos Guardados Exitosamen"
return render(request,"datospersonales/paciente_list.html",{'personalpaciente':pacientes,'datoFacult':auxL,'informacion':info})
else:
form=ColaEnfermeriaForm()
info = "Ocurrio un error los datos no se guardaron"
return render(request,"datospersonales/paciente_list.html",{'personalpaciente':pacientes,'datoFacult':auxL,'informacion':info})
#Muestra el listado de pacientes en cola para tomarles signos vitales
@login_required(login_url='logins')
def cola_enfermeria_list(request):
cola=Cola_Enfermeria.objects.order_by('hora')
return render(request,"enfermeriaapp/cola_enfermeria_list.html",{'cola':cola})
# Vista para borrar manualmente un paciente en la cola para la toma de signos vitales
@login_required(login_url='logins')
def cola_enfermeria_borrar(request,pk):
cola=Cola_Enfermeria.objects.order_by('hora')
info = ""
if request.method == "GET":
data = {'idPaciente':Paciente.objects.filter(codigoPaciente = pk)
}
form = ColaEnfermeriaForm(data)
existe = Cola_Enfermeria.objects.filter(idPaciente = pk)
if existe:
if form.is_valid():
existe.delete()
info = "Datos eliminados exitosamente"
return render(request,"enfermeriaapp/cola_enfermeria_list.html",{'cola':cola})
else:
form=ColaEnfermeriaForm()
info = "Ocurrio un error no se pudo eliminar el paciente de la cola"
else:
info="El paciente no existe en la cola"
return render(request,"enfermeriaapp/cola_enfermeria_list.html",{'cola':cola})
#Muestra el listado de pacientes en cola para pasar consulta
@login_required(login_url='logins')
def cola_consulta_list(request):
cursor = connection.cursor()
cursor.execute('SELECT distinct(p.nit) as codigo, p.nombrePrimero as nombre,p.nombreSegundo as nombreSegundo, p.apellidoPrimero as apellido,c.hora,c.idDoctor_id as doctor FROM datospersonalesapp_paciente as p, enfermeriaapp_cola_consulta as c WHERE p.nit = c.nit')
cursor2 = connection.cursor()
cursor2.execute('SELECT distinct(p.nit) as codigo, p.nombrePrimero as nombre,p.nombreSegundo as nombreSegundo, p.apellidoPrimero as apellido,c.hora,c.idDoctor_id as doctor FROM nuevoingresoapp_expediente_provisional as p, enfermeriaapp_cola_consulta as c WHERE p.nit = c.nit')
cola = cursor.fetchall()
cola += cursor2.fetchall()
#cola=Cola_Consulta.objects.order_by('hora')
return render(request,"enfermeriaapp/cola_consulta_list.html",{'cola':cola})
|
chennan47/OSF-Offline
|
osfoffline/exceptions/tray_icon_exceptions.py
|
Python
|
apache-2.0
| 73
| 0
|
__author__ = 'himanshu'
# Tray
|
Icon
class TrayIcon(Exception):
|
pass
|
spiceqa/virt-test
|
qemu/tests/live_snapshot_chain.py
|
Python
|
gpl-2.0
| 6,291
| 0.000159
|
from vi
|
rttest import storage
from v
|
irttest import qemu_storage
from virttest import data_dir
from autotest.client.shared import error
import re
import logging
import time
@error.context_aware
def run_live_snapshot_chain(test, params, env):
"""
live_snapshot chain test:
Will test snapshot as following steps:
1. Boot up guest with base image
2. Do pre snapshot operates(option)
3. Do live snapshot
4. Do post snapshot operates(option)
5. Check the base and snapshot images(option)
:param test: Kvm test object
:param params: Dictionary with the test parameters
:param env: Dictionary with test environment.
"""
def get_base_image(snapshot_chain, snapshot_file):
try:
index = snapshot_chain.index(snapshot_file)
except ValueError:
index = -1
if index > 0:
base_image = snapshot_chain[index - 1]
else:
base_image = None
return base_image
def do_operate(params, key_word):
operate_cmd = params.get(key_word)
timeout = int(params.get("operate_timeout", "60"))
for cmd in re.findall("{(.+?)}", operate_cmd):
if re.match("shell:", cmd):
cmd = cmd[6:]
session.cmd(cmd, timeout=timeout)
elif re.match("shell_no_reply:", cmd):
cmd = cmd[15:]
session.sendline(cmd)
time.sleep(timeout)
elif re.match("monitor:", cmd):
cmd = cmd[8:]
vm.monitor.send_args_cmd(cmd)
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
timeout = int(params.get("login_timeout", 360))
snapshot_chain = params.get("snapshot_chain")
file_create_cmd = params.get("file_create_cmd")
file_check_cmd = params.get("file_check_cmd")
file_dir = params.get("file_dir")
dir_create_cmd = params.get("dir_create_cmd")
md5_cmd = params.get("md5_cmd")
snapshot_chain = re.split("\s+", snapshot_chain)
session = vm.wait_for_login(timeout=timeout)
md5_value = {}
files_in_guest = {}
for index, image in enumerate(snapshot_chain):
image_params = params.object_params(image)
if image_params.get("file_create"):
session.cmd(dir_create_cmd % file_dir)
if index > 0:
snapshot_file = storage.get_image_filename(image_params,
data_dir.get_data_dir())
base_image = get_base_image(snapshot_chain, image)
base_image_params = params.object_params(base_image)
base_file = storage.get_image_filename(base_image_params,
data_dir.get_data_dir())
snapshot_format = image_params.get("image_format")
error.context("Do pre snapshot operates", logging.info)
if image_params.get("pre_snapshot_cmd"):
do_operate(image_params, "pre_snapshot_cmd")
error.context("Do live snapshot ", logging.info)
vm.live_snapshot(base_file, snapshot_file, snapshot_format)
error.context("Do post snapshot operates", logging.info)
if image_params.get("post_snapshot_cmd"):
do_operate(image_params, "post_snapshot_cmd")
md5 = ""
if image_params.get("file_create"):
session.cmd(file_create_cmd % image)
md5 = session.cmd_output(md5_cmd % image)
md5_value[image] = md5_value[base_image].copy()
md5_value[image].update({image: md5})
elif index == 0:
md5 = ""
if params.get("file_create"):
session.cmd(file_create_cmd % image)
md5 = session.cmd_output(md5_cmd % image)
md5_value[image] = {image: md5}
if image_params.get("check_alive_cmd"):
session.cmd(image_params.get("check_alive_cmd"))
if image_params.get("file_create"):
files_check = session.cmd(file_check_cmd % file_dir)
files_in_guest[image] = files_check
session.close()
error.context("Do base files check", logging.info)
snapshot_chain_backward = snapshot_chain[:]
snapshot_chain_backward.reverse()
for index, image in enumerate(snapshot_chain_backward):
image_params = params.object_params(image)
if image_params.get("check_base_image"):
vm.destroy()
vm.create(params=image_params)
vm.verify_alive()
session = vm.wait_for_login(timeout=timeout)
if image_params.get("file_create"):
for file in md5_value[image]:
md5 = session.cmd_output(md5_cmd % file)
if md5 != md5_value[image][file]:
error_message = "File %s in image %s changed " %\
(file, image)
error_message += "from '%s' to '%s'(md5)" %\
(md5_value[image][file], md5)
raise error.TestFail(error_message)
files_check = session.cmd(file_check_cmd % file_dir)
if files_check != files_in_guest[image]:
error_message = "Files in image %s is not as expect:" %\
image
error_message += "Before shut down: %s" %\
files_in_guest[image]
error_message += "Now: %s" % files_check
raise error.TestFail(error_message)
if image_params.get("image_check"):
image = qemu_storage.QemuImg(
image_params, data_dir.get_data_dir(), image)
image.check_image(image_params, data_dir.get_data_dir())
session.close()
error.context("Remove snapshot images", logging.info)
if vm.is_alive():
vm.destroy()
if params.get("remove_snapshot_images"):
for index, image in enumerate(snapshot_chain):
image_params = params.object_params(image)
if index != 0:
image = qemu_storage.QemuImg(
image_params, data_dir.get_data_dir(), image)
image.remove()
|
weigj/django-multidb
|
tests/regressiontests/fixtures_regress/models.py
|
Python
|
bsd-3-clause
| 5,996
| 0.002668
|
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
import os
class Animal(models.Model):
name = models.CharField(max_length=150)
latin_name = models.CharField(max_length=150)
count = models.IntegerField()
def __unicode__(self):
return self.common_name
def animal_pre_save_check(signal, sender, instance, **kwargs):
"A signal that is used to check the type of data loaded from fixtures"
print 'Count = %s (%s)' % (instance.count, type(instance.count))
class Plant(models.Model):
name = models.CharField(max_length=150)
clas
|
s Meta:
|
# For testing when upper case letter in app name; regression for #4057
db_table = "Fixtures_regress_plant"
class Stuff(models.Model):
name = models.CharField(max_length=20, null=True)
owner = models.ForeignKey(User, null=True)
def __unicode__(self):
# Oracle doesn't distinguish between None and the empty string.
# This hack makes the test case pass using Oracle.
name = self.name
if settings.DATABASE_ENGINE == 'oracle' and name == u'':
name = None
return unicode(name) + u' is owned by ' + unicode(self.owner)
class Absolute(models.Model):
name = models.CharField(max_length=40)
load_count = 0
def __init__(self, *args, **kwargs):
super(Absolute, self).__init__(*args, **kwargs)
Absolute.load_count += 1
class Parent(models.Model):
name = models.CharField(max_length=10)
class Child(Parent):
data = models.CharField(max_length=10)
# Models to regresison check #7572
class Channel(models.Model):
name = models.CharField(max_length=255)
class Article(models.Model):
title = models.CharField(max_length=255)
channels = models.ManyToManyField(Channel)
class Meta:
ordering = ('id',)
__test__ = {'API_TESTS':"""
>>> from django.core import management
# Load a fixture that uses PK=1
>>> management.call_command('loaddata', 'sequence', verbosity=0)
# Create a new animal. Without a sequence reset, this new object
# will take a PK of 1 (on Postgres), and the save will fail.
# This is a regression test for ticket #3790.
>>> animal = Animal(name='Platypus', latin_name='Ornithorhynchus anatinus', count=2)
>>> animal.save()
###############################################
# Regression test for ticket #4558 -- pretty printing of XML fixtures
# doesn't affect parsing of None values.
# Load a pretty-printed XML fixture with Nulls.
>>> management.call_command('loaddata', 'pretty.xml', verbosity=0)
>>> Stuff.objects.all()
[<Stuff: None is owned by None>]
###############################################
# Regression test for ticket #6436 --
# os.path.join will throw away the initial parts of a path if it encounters
# an absolute path. This means that if a fixture is specified as an absolute path,
# we need to make sure we don't discover the absolute path in every fixture directory.
>>> load_absolute_path = os.path.join(os.path.dirname(__file__), 'fixtures', 'absolute.json')
>>> management.call_command('loaddata', load_absolute_path, verbosity=0)
>>> Absolute.load_count
1
###############################################
# Test for ticket #4371 -- fixture loading fails silently in testcases
# Validate that error conditions are caught correctly
# redirect stderr for the next few tests...
>>> import sys
>>> savestderr = sys.stderr
>>> sys.stderr = sys.stdout
# Loading data of an unknown format should fail
>>> management.call_command('loaddata', 'bad_fixture1.unkn', verbosity=0)
Problem installing fixture 'bad_fixture1': unkn is not a known serialization format.
# Loading a fixture file with invalid data using explicit filename
>>> management.call_command('loaddata', 'bad_fixture2.xml', verbosity=0)
No fixture data found for 'bad_fixture2'. (File format may be invalid.)
# Loading a fixture file with invalid data without file extension
>>> management.call_command('loaddata', 'bad_fixture2', verbosity=0)
No fixture data found for 'bad_fixture2'. (File format may be invalid.)
# Loading a fixture file with no data returns an error
>>> management.call_command('loaddata', 'empty', verbosity=0)
No fixture data found for 'empty'. (File format may be invalid.)
# If any of the fixtures contain an error, loading is aborted
# (Regression for #9011 - error message is correct)
>>> management.call_command('loaddata', 'bad_fixture2', 'animal', verbosity=0)
No fixture data found for 'bad_fixture2'. (File format may be invalid.)
>>> sys.stderr = savestderr
###############################################
# Test for ticket #7565 -- PostgreSQL sequence resetting checks shouldn't
# ascend to parent models when inheritance is used (since they are treated
# individually).
>>> management.call_command('loaddata', 'model-inheritance.json', verbosity=0)
###############################################
# Test for ticket #7572 -- MySQL has a problem if the same connection is
# used to create tables, load data, and then query over that data.
# To compensate, we close the connection after running loaddata.
# This ensures that a new connection is opened when test queries are issued.
>>> management.call_command('loaddata', 'big-fixture.json', verbosity=0)
>>> articles = Article.objects.exclude(id=9)
>>> articles.values_list('id', flat=True)
[1, 2, 3, 4, 5, 6, 7, 8]
# Just for good measure, run the same query again. Under the influence of
# ticket #7572, this will give a different result to the previous call.
>>> articles.values_list('id', flat=True)
[1, 2, 3, 4, 5, 6, 7, 8]
###############################################
# Test for ticket #8298 - Field values should be coerced into the correct type
# by the deserializer, not as part of the database write.
>>> models.signals.pre_save.connect(animal_pre_save_check)
>>> management.call_command('loaddata', 'animal.xml', verbosity=0)
Count = 42 (<type 'int'>)
>>> models.signals.pre_save.disconnect(animal_pre_save_check)
"""}
|
sveetch/PO-Projects
|
po_projects/crumbs.py
|
Python
|
mit
| 697
| 0.004304
|
from autobreadcrumbs import site
from django.utils.translation import ugett
|
ext_lazy
site.update({
'po_projects:project-index': ugettext_lazy('PO Projects'),
'po_projects:project-create': ugettext_lazy('Create a new project'),
'po_projects:project-details': ugettext_lazy('<small class="subhead">Project</small> {{ project.name }}'),
'po_projects:project-update': ugettext_lazy('Settings'),
'po_projects:project-download': None,
'
|
po_projects:catalog-details': ugettext_lazy('<small class="subhead">Catalog</small> {{ catalog.get_locale_name }}'),
'po_projects:catalog-messages-edit': ugettext_lazy('Edit messages'),
'po_projects:catalog-messages-download': None,
})
|
UCSD-CCAL/ccal
|
ccal/conda_is_installed.py
|
Python
|
mit
| 184
| 0.005435
|
from os.path import isdir
def conda_is_installed(
|
conda_directory_path):
return all(
(isdir("{}/{}".format(conda_directory_path, na
|
me)) for name in ("bin", "lib"))
)
|
neutrinog/Comperio
|
comperio/accounts/forms.py
|
Python
|
bsd-3-clause
| 8,980
| 0.010468
|
from django import forms
from django.core import validators
from comperio.accounts.models import cUser, Settings, cGroup
from django.core.validators import email_re
import random, datetime, sha
MIN_PASSWORD_LENGTH = 6
class LoginForm(forms.Form):
"""account login form"""
username = forms.CharField(widget=forms.TextInput(attrs={'class':'span-5', 'placeholder':'username', 'tabindex':'1'}), help_text="username or email")
password = forms.CharField(widget=forms.PasswordInput(attrs={'class':'span-5 char_count','placeholder':'password', 'tabindex':'2'}))
class RegistrationForm(forms.Form):
"""user registration form"""
def check_consent(val):
"""check if the user has agreed to the consent form"""
return val
username = forms.CharField(widget=forms.TextInput(attrs={'class':'span-5', 'placeholder':'username'}), max_length=30)
email = forms.CharField(widget=forms.TextInput(attrs={'class':'span-5','placeholder':'email'}), max_length=60, validators=[validators.validate_email])
password1 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'span-5 char_count','placeholder':'password'}), max_length=60, validators=[validators.MinLengthValidator(MIN_PASSWORD_LENGTH)])
password2 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'span-5 char_count','placeholder':'verify password'}), max_length=60, validators=[validators.MinLengthValidator(MIN_PASSWORD_LENGTH)])
consent = forms.BooleanField(widget=forms.CheckboxInput() , label="I have read and understood the above consent form")
honeypot = forms.CharField(widget=forms.HiddenInput(), required=False)
def isValidHuman(self, new_data):
"""check if the user is human"""
return new_data['honeypot'] == ""
def isValidUsername(self, new_data):
"""check if the username is valid"""
if not cUser.objects.filter(username=new_data['username']):
return True
return False
def isValidEmail(self, new_data):
"""check if the email is unique"""
# TODO: email is ok if same
if not cUser.objects.filter(email=new_data['email']):
return True
return False
# TODO: display specific error messages on the form
# TODO: form is not passing field errors.
def isValidPassword(self, new_data):
"""
check if the passwords match
"""
if len(new_data['password1']) < MIN_PASSWORD_LENGTH or len(new_data['password2']) < MIN_PASSWORD_LENGTH:
return False
return True
def PasswordsMatch(self, new_data):
"""check if the passwords match"""
if new_data['password1'] == new_data['password2']:
return True
return False
def save(self, new_data):
"""create a new inactive user from the form data"""
# make sure email is unique
if new_data['consent'] == False:
raise forms.ValidationError(u'You must agree to the consent form')
try:
duplicate = cUser.objects.get(email=new_data['email'])
except cUser.DoesNotExist:
# make sure we have a valid email
if email_re.search(new_data['email']):
# Build the activation key for their account
salt = sha.new(str(random.random())).hexdigest()[:5]
activation_key = sha.new(salt+new_data['username']).hexdigest()
key_expires = datetime.datetime.today() + datetime.timedelta(2)
u = cUser.objects.create(username=new_data['username'],
email=new_data['email'],
activation_key=activation_key,
key_expires=key_expires,
)
u.set_password(new_data['password1'])
u.is_active=False
u.save()
return u
# invalid email
raise forms.ValidationError(u'invalid email')
# duplciate user or bad email
raise forms.ValidationError(u'email already in use')
return None
class EditAccountForm(forms.Form):
"""user registration form"""
username = forms.CharField(widget=forms.TextInput(attrs={'class':'span-5', 'placeholder':'username'}), max_length=30)
email = forms.CharField(widget=forms.TextInput(attrs={'class':'span-5','placeholder':'email'}), max_length=60, validators=[validators.validate_email])
password1 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'span-5 char_count','placeholder':'password'}), max_length=60, validators=[validators.MinLengthValidator(MIN_PASSWORD_LENGTH)], required=False)
password2 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'span-5 char_count','placeholder':'verify password'}), max_length=60, validators=[validators.MinLengthValidator(MIN_PASSWORD_LENGTH)], required=False)
def isValidUsername(self, new_data):
"""check if the username is valid"""
# TODO: username ok if same
if not cUser.objects.filter(username=new_data['username']):
return True
return False
# TODO: display specific error messages on the form
# TODO: form is not passing field errors.
def isValidPassword(self, new_data):
"""
check if the passwords match
"""
if new_data['password1'] != '':
if len(new_data['password1']) < MIN_PASSWORD_LENGTH or len(new_data['password2']) < MIN_PASSWORD_LENGTH:
return False
return True
def isValidEmail(self, new_data):
"""check if the email is unique"""
# TODO: email is ok if same
if not cUser.objects.filter(email=new_data['email']):
return True
return False
def PasswordsMatch(self, new_data):
"""check if the passwords match"""
if new_data['password1'] == new_data['password2']:
return True
return False
def update(self, request, u):
"""update an exi
|
sting user from the form data"""
# make sure email is unique
new_data = request.POST.copy()
if u.email != new_data['email']:
try:
duplicate = cUser.objects.get(email=new_data['email'])
raise forms.ValidationEr
|
ror(u'email is not available')
except cUser.DoesNotExist:
u.email = new_data['email']
if u.username != new_data['username']:
try:
duplicate = cUser.objects.get(username=new_data['username'])
raise forms.ValidationError(u'username is not available')
except cUser.DoesNotExist:
u.username = new_data['username']
if new_data['password1'] != '':
u.set_password(new_data['password1'])
u.save()
class CreateGroupForm(forms.Form):
"""create a new user group"""
title = forms.CharField(widget=forms.TextInput(attrs={'class':'span-10 title',}), max_length=100)
description = forms.CharField(widget=forms.Textarea(attrs={'class':'span-10 description-textarea',}), max_length=1000, required=False)
#
#
# Hierarchical: only managers can send invites (individual and mass)
# overview page of students/minions
# can view code quality and comment ratio.
#
# Peer: all members can send invites, only managers can send mass invites
#
#
type = forms.CharField(widget=forms.Select(choices=cGroup.types), required=False)
visibility = forms.CharField(widget=forms.Select(choices=cGroup.visibility_types), required=False)
open_registration = forms.CharField(widget=forms.CheckboxInput(), help_text="Open registration allows anyone to request group membership")
class SettingsForm(forms.ModelForm):
"""profile settings form"""
class Meta:
model = Settings
exclude = ("user",)
# TODO: Should we allow users
|
concefly/indent_system
|
db_test.py
|
Python
|
gpl-3.0
| 598
| 0.078595
|
# -*- coding:utf-8 -*-
import datetime
import xml.etree.ElementTree as et
import pony.orm as orm
import sys
import os
pjoin = os.path.join
__dir__ = os.path.abspath(os.path.dirname(__file__))
sys.path.append(__dir__)
from server import *
dat = dict(
code = 'concefly',
last_login = datetime.datetime.now(),
user_type = 'admin',
is_active
|
= True,
date_joined = datetime.datetime.now(),
balance = 10000,
point_member =
|
10000,
point_xzl = 10000,
point_jhs = 10000,
point_nlb = 10000,
point_nlt = 10000
)
with orm.db_session:
User(**dat)
|
Metronus/metronus
|
Metronus-Project/metronus_app/model/goalEvolution.py
|
Python
|
mpl-2.0
| 791
| 0.001264
|
from django.db import models
from metronus_app.model.actor import Actor
from metronus_app.model.task import Task
class GoalEvolution(models.Model):
"""
Each time the goal or the price per unit/hour from a task is changed, a new entry is created in the log
Maybe should have been named TaskLog, but...
"""
task_id =
|
models.ForeignKey(Task)
registryDate = models.DateTimeField(auto_now=True)
actor_id = models.ForeignKey(Actor)
production_goal = models.FloatField(blank=True, null=True)
goal_description = models.
|
CharField(blank=True, max_length=100, default="")
price_per_unit = models.FloatField(null=True, blank=True)
price_per_hour = models.FloatField(null=True, blank=True)
def __unicode__(self):
return self.production_goal
|
misli/cmsplugin-survey
|
cmsplugin_survey/fields.py
|
Python
|
bsd-3-clause
| 744
| 0
|
from __future__ import unicode_literals
import re
from django import forms
from django.core.validators import RegexValidator
from django.db import models
from django.utils.translation import ugettext_lazy as _
class ColorInput(forms.TextInput):
input_type = 'color'
class ColorField(models.CharField):
default_validators = [RegexValidator(
re.compile(
|
'^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$'),
_('Enter a valid hex color.'),
'invalid',
)]
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 10
super(ColorField, self).
|
__init__(*args, **kwargs)
def formfield(self, **kwargs):
kwargs['widget'] = ColorInput
return super(ColorField, self).formfield(**kwargs)
|
yuhangwang/MirrorAI
|
test/dataset/directional/label_image/test_3.py
|
Python
|
mit
| 224
| 0
|
from MirrorAI.dataset.directional.label_image import label_image
import numpy
def test():
d = numpy.array([1, 1, 0])
|
answer = label_image(d, target=0)
solut
|
ion = [0, 0, 1]
assert (answer == solution).all()
|
VillageAlliance/django-cms
|
cms/templatetags/cms_tags.py
|
Python
|
bsd-3-clause
| 14,804
| 0.003445
|
# -*- coding: utf-8 -*-
from classytags.arguments import Argument, MultiValueArgument
from classytags.core import Options, Tag
from classytags.helpers import InclusionTag
from classytags.parser import Parser
from cms.models import Page, Placeholder as PlaceholderModel
from cms.plugin_rendering import render_plugins, render_placeholder
from cms.plugins.utils import get_plugins
from cms.utils import get_language_from_request
from cms.utils.moderator import get_cmsplugin_queryset, get_page_queryset
from cms.utils.placeholder import validate_placeholder_name
from django import template
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.cache import cache
from django.core.mail import mail_managers
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from itertools import chain
import re
register = template.Library()
def get_site_id(site):
if site:
if isinstance(site, Site):
site_id = site.id
elif isinstance(site, int) or (isinstance(site, basestring) and site.isdigit()):
site_id = int(site)
else:
site_id = settings.SITE_ID
else:
site_id = settings.SITE_ID
return site_id
def has_permission(page, request):
return page.has_change_permission(request)
register.filter(has_permission)
CLEAN_KEY_PATTERN = re.compile(r'[^a-zA-Z0-9_-]')
def _clean_key(key):
return CLEAN_KEY_PATTERN.sub('-', key)
def _get_cache_key(name, page_lookup, lang, site_id):
if
|
isinstance(page_lookup, Page):
page_key = str(page_lookup.pk)
else:
page_key = str(page_lookup)
page_key = _clean_key(pa
|
ge_key)
return name+'__page_lookup:'+page_key+'_site:'+str(site_id)+'_lang:'+str(lang)
def _get_page_by_untyped_arg(page_lookup, request, site_id):
"""
The `page_lookup` argument can be of any of the following types:
- Integer: interpreted as `pk` of the desired page
- String: interpreted as `reverse_id` of the desired page
- `dict`: a dictionary containing keyword arguments to find the desired page
(for instance: `{'pk': 1}`)
- `Page`: you can also pass a Page object directly, in which case there will be no database lookup.
- `None`: the current page will be used
"""
if page_lookup is None:
return request.current_page
if isinstance(page_lookup, Page):
return page_lookup
if isinstance(page_lookup, basestring):
page_lookup = {'reverse_id': page_lookup}
elif isinstance(page_lookup, (int, long)):
page_lookup = {'pk': page_lookup}
elif not isinstance(page_lookup, dict):
raise TypeError('The page_lookup argument can be either a Dictionary, Integer, Page, or String.')
page_lookup.update({'site': site_id})
try:
return get_page_queryset(request).get(**page_lookup)
except Page.DoesNotExist:
site = Site.objects.get_current()
subject = _('Page not found on %(domain)s') % {'domain':site.domain}
body = _("A template tag couldn't find the page with lookup arguments `%(page_lookup)s\n`. "
"The URL of the request was: http://%(host)s%(path)s") \
% {'page_lookup': repr(page_lookup), 'host': site.domain, 'path': request.path}
if settings.DEBUG:
raise Page.DoesNotExist(body)
else:
if settings.SEND_BROKEN_LINK_EMAILS:
mail_managers(subject, body, fail_silently=True)
return None
class PageUrl(InclusionTag):
template = 'cms/content.html'
name = 'page_url'
options = Options(
Argument('page_lookup'),
Argument('lang', required=False, default=None),
Argument('site', required=False, default=None),
)
def get_context(self, context, page_lookup, lang, site):
site_id = get_site_id(site)
request = context.get('request', False)
if not request:
return {'content': ''}
if request.current_page == "dummy":
return {'content': ''}
if lang is None:
lang = get_language_from_request(request)
cache_key = _get_cache_key('page_url', page_lookup, lang, site_id)+'_type:absolute_url'
url = cache.get(cache_key)
if not url:
page = _get_page_by_untyped_arg(page_lookup, request, site_id)
if page:
url = page.get_absolute_url(language=lang)
cache.set(cache_key, url, settings.CMS_CACHE_DURATIONS['content'])
if url:
return {'content': url}
return {'content': ''}
register.tag(PageUrl)
register.tag('page_id_url', PageUrl)
def _get_placeholder(current_page, page, context, name):
placeholder_cache = getattr(current_page, '_tmp_placeholders_cache', {})
if page.pk in placeholder_cache:
return placeholder_cache[page.pk].get(name, None)
placeholder_cache[page.pk] = {}
placeholders = page.placeholders.all()
for placeholder in placeholders:
placeholder_cache[page.pk][placeholder.slot] = placeholder
current_page._tmp_placeholders_cache = placeholder_cache
return placeholder_cache[page.pk].get(name, None)
def get_placeholder_content(context, request, current_page, name, inherit):
pages = [current_page]
if inherit:
pages = chain([current_page], current_page.get_cached_ancestors(ascending=True))
for page in pages:
placeholder = _get_placeholder(current_page, page, context, name)
if placeholder is None:
continue
if not get_plugins(request, placeholder):
continue
content = render_placeholder(placeholder, context, name)
if content:
return content
placeholder = _get_placeholder(current_page, current_page, context, name)
return render_placeholder(placeholder, context, name)
class PlaceholderParser(Parser):
def parse_blocks(self):
for bit in getattr(self.kwargs['extra_bits'], 'value', self.kwargs['extra_bits']):
if getattr(bit, 'value', bit.var.value) == 'or':
return super(PlaceholderParser, self).parse_blocks()
return
class PlaceholderOptions(Options):
def get_parser_class(self):
return PlaceholderParser
class Placeholder(Tag):
"""
This template node is used to output page content and
is also used in the admin to dynamically generate input fields.
eg: {% placeholder "placeholder_name" %}
{% placeholder "sidebar" inherit %}
{% placeholder "footer" inherit or %}
<a href="/about/">About us</a>
{% endplaceholder %}
Keyword arguments:
name -- the name of the placeholder
width -- additional width attribute (integer) which gets added to the plugin context
(deprecated, use `{% with 320 as width %}{% placeholder "foo"}{% endwith %}`)
inherit -- optional argument which if given will result in inheriting
the content of the placeholder with the same name on parent pages
or -- optional argument which if given will make the template tag a block
tag whose content is shown if the placeholder is empty
"""
name = 'placeholder'
options = PlaceholderOptions(
Argument('name', resolve=False),
MultiValueArgument('extra_bits', required=False, resolve=False),
blocks=[
('endplaceholder', 'nodelist'),
]
)
def render_tag(self, context, name, extra_bits, nodelist=None):
validate_placeholder_name(name)
width = None
inherit = False
for bit in extra_bits:
if bit == 'inherit':
inherit = True
elif bit.isdigit():
width = int(bit)
import warnings
warnings.warn(
"The width parameter for the placeholder tag is deprecated.",
DeprecationWarning
)
if not 'request' in context:
return ''
request = context['request']
if width:
context.update({'width': width})
page = request.current_page
if not page or page == 'dummy':
|
openstack/manila
|
manila/share/drivers/purestorage/flashblade.py
|
Python
|
apache-2.0
| 17,837
| 0
|
# Copyright 2021 Pure Storage Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Pure Storage FlashBlade Share Driver
"""
import functools
import platform
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import units
from manila import exception
from manila.i18n import _
from manila.share import driver
HAS_PURITY_FB = True
try:
import purity_fb
except ImportError:
purity_fb = None
LOG = logging.getLogger(__name__)
flashblade_connection_opts = [
cfg.HostAddressOpt(
"flashblade_mgmt_vip",
help="The name (or IP address) for the Pure Storage "
|
"FlashBlade storage system management VIP.",
),
cfg.HostAddres
|
sOpt(
"flashblade_data_vip",
help="The name (or IP address) for the Pure Storage "
"FlashBlade storage system data VIP.",
),
]
flashblade_auth_opts = [
cfg.StrOpt(
"flashblade_api",
help=("API token for an administrative user account"),
secret=True,
),
]
flashblade_extra_opts = [
cfg.BoolOpt(
"flashblade_eradicate",
default=True,
help="When enabled, all FlashBlade file systems and snapshots "
"will be eradicated at the time of deletion in Manila. "
"Data will NOT be recoverable after a delete with this "
"set to True! When disabled, file systems and snapshots "
"will go into pending eradication state and can be "
"recovered.)",
),
]
CONF = cfg.CONF
CONF.register_opts(flashblade_connection_opts)
CONF.register_opts(flashblade_auth_opts)
CONF.register_opts(flashblade_extra_opts)
def purity_fb_to_manila_exceptions(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except purity_fb.rest.ApiException as ex:
msg = _("Caught exception from purity_fb: %s") % ex
LOG.exception(msg)
raise exception.ShareBackendException(msg=msg)
return wrapper
class FlashBladeShareDriver(driver.ShareDriver):
"""Version hisotry:
1.0.0 - Initial version
2.0.0 - Xena release
3.0.0 - Yoga release
"""
VERSION = "3.0" # driver version
USER_AGENT_BASE = "OpenStack Manila"
def __init__(self, *args, **kwargs):
super(FlashBladeShareDriver, self).__init__(False, *args, **kwargs)
self.configuration.append_config_values(flashblade_connection_opts)
self.configuration.append_config_values(flashblade_auth_opts)
self.configuration.append_config_values(flashblade_extra_opts)
self._user_agent = "%(base)s %(class)s/%(version)s (%(platform)s)" % {
"base": self.USER_AGENT_BASE,
"class": self.__class__.__name__,
"version": self.VERSION,
"platform": platform.platform(),
}
def do_setup(self, context):
"""Driver initialization"""
if purity_fb is None:
msg = _(
"Missing 'purity_fb' python module, ensure the library"
" is installed and available."
)
raise exception.ManilaException(message=msg)
self.api = self._safe_get_from_config_or_fail("flashblade_api")
self.management_address = self._safe_get_from_config_or_fail(
"flashblade_mgmt_vip"
)
self.data_address = self._safe_get_from_config_or_fail(
"flashblade_data_vip"
)
self._sys = purity_fb.PurityFb(self.management_address)
self._sys.disable_verify_ssl()
try:
self._sys.login(self.api)
self._sys._api_client.user_agent = self._user_agent
except purity_fb.rest.ApiException as ex:
msg = _("Exception when logging into the array: %s\n") % ex
LOG.exception(msg)
raise exception.ManilaException(message=msg)
backend_name = self.configuration.safe_get("share_backend_name")
self._backend_name = backend_name or self.__class__.__name__
LOG.debug("setup complete")
def _update_share_stats(self, data=None):
"""Retrieve stats info from share group."""
(
free_capacity_bytes,
physical_capacity_bytes,
provisioned_cap_bytes,
data_reduction,
) = self._get_available_capacity()
reserved_share_percentage = self.configuration.safe_get(
"reserved_share_percentage"
)
if reserved_share_percentage is None:
reserved_share_percentage = 0
reserved_share_from_snapshot_percentage = self.configuration.safe_get(
"reserved_share_from_snapshot_percentage"
)
if reserved_share_from_snapshot_percentage is None:
reserved_share_from_snapshot_percentage = reserved_share_percentage
data = dict(
share_backend_name=self._backend_name,
vendor_name="PURE STORAGE",
driver_version=self.VERSION,
storage_protocol="NFS",
data_reduction=data_reduction,
reserved_percentage=reserved_share_percentage,
reserved_snapshot_percentage=(
reserved_share_from_snapshot_percentage),
total_capacity_gb=float(physical_capacity_bytes) / units.Gi,
free_capacity_gb=float(free_capacity_bytes) / units.Gi,
provisioned_capacity_gb=float(provisioned_cap_bytes) / units.Gi,
snapshot_support=True,
create_share_from_snapshot_support=False,
mount_snapshot_support=False,
revert_to_snapshot_support=True,
thin_provisioning=True,
)
super(FlashBladeShareDriver, self)._update_share_stats(data)
def _get_available_capacity(self):
space = self._sys.arrays.list_arrays_space()
array_space = space.items[0]
data_reduction = array_space.space.data_reduction
physical_capacity_bytes = array_space.capacity
used_capacity_bytes = array_space.space.total_physical
free_capacity_bytes = physical_capacity_bytes - used_capacity_bytes
provisioned_capacity_bytes = array_space.space.unique
return (
free_capacity_bytes,
physical_capacity_bytes,
provisioned_capacity_bytes,
data_reduction,
)
def _safe_get_from_config_or_fail(self, config_parameter):
config_value = self.configuration.safe_get(config_parameter)
if not config_value:
reason = _(
"%(config_parameter)s configuration parameter "
"must be specified"
) % {"config_parameter": config_parameter}
LOG.exception(reason)
raise exception.BadConfigurationException(reason=reason)
return config_value
def _make_source_name(self, snapshot):
base_name = CONF.share_name_template + "-manila"
return base_name % snapshot["share_id"]
def _make_share_name(self, manila_share):
base_name = CONF.share_name_template + "-manila"
return base_name % manila_share["id"]
def _get_full_nfs_export_path(self, export_path):
subnet_ip = self.data_address
return "{subnet_ip}:/{export_path}".format(
subnet_ip=subnet_ip, export_path=export_path
)
def _get_flashblade_filesystem_by_name(self, name):
filesys = []
filesys.append(name)
try:
res = self._sys.file_systems.list_file_systems(names=filesys)
except purity_fb.rest.ApiException as ex:
msg = _("Share not found on FlashBlade: %s\n") %
|
hr567/seating-chart
|
SeatingChart/API/SeatingChart.py
|
Python
|
gpl-3.0
| 2,072
| 0
|
from .RuleEditor import *
class SeatingChart:
def __init__(self, m, n):
self.M, self.N = m, n
self._pos = list(range(len(self)))
self.names = None
self.rule_editor = RuleEditor(m, n)
self.maintain()
def __len__(self) -> int:
"""Return the number of students in class"""
return self.M * self.N
def __getitem__(self, i: int) -> list:
"""Return the students in row i"""
return self._pos[i * self.N: (i + 1) * self.N]
def __repr__(self) -> str:
s = ''
for i in range(self.M):
for j in range(self.N):
s += str(self[i][j]) + ' '
s = s.lstrip()
s += '\n'
return s
def __str__(self) -> str:
s = ''
for i
|
in range(self.M):
for j in range(self.N):
s += str(self[i][j]).rjust(4)
s += '\n'
return s
def index(self, i: int) -> tuple:
"""Return the position of student i"""
|
i = int(i)
_real_pos = self._pos.index(i)
return _real_pos // self.N, _real_pos % self.N
def get_name(self, i: int, j: int) -> str:
"""Return the number/name of student who seat at (i, j)"""
return self.names[self[i][j]] if self.names else str(self[i][j])
def maintain(self):
"""Shuffle the seat"""
random.shuffle(self._pos)
try:
self.rule_editor.adjust_seat_list(self._pos)
except FutureWarning:
print('Use random chart')
def load(self, file_name: str):
"""Read the name list from file"""
with open(file_name) as file:
names = file.read().split()
names.insert(0, 'Empty Seat')
if len(names) >= len(self):
self.names = names[:len(self)]
else:
print("Length of the list is less than", len(self))
def save(self, file_name: str):
"""Save the seating chart to file"""
with open(file_name, 'w') as file:
file.write(str(self))
|
gilestrolab/pyrem
|
src/pyrem/univariate.py
|
Python
|
gpl-3.0
| 18,418
| 0.008579
|
r"""
==================================================
Feature computation for univariate time series
==================================================
This sub-module provides routines for computing features on univariate time series.
Many functions are improved version of PyEEG [PYEEG]_ functions. Be careful,
some functions will give different results compared to PyEEG as the maths have been changed to match original definitions.
Have a look at the documentation notes/ source code to know more.
Here a list of the functions that were reimplemented:
* Approximate entropy :func:`~pyrem.univariate.ap_entropy` [RIC00]_
* Fisher information :func:`~pyrem.univariate.fisher_info` [PYEEG]_
* Higuchi fractal dimension :func:`~pyrem.univariate.hfd` [HIG88]_
* Hjorth parameters :func:`~pyrem.univariate.hjorth` [HJO70]_
* Petrosian fractal dimension :func:`~pyrem.univariate.pfd` [PET95]_
* Sample entropy :func:`~pyrem.univariate.samp_entropy` [RIC00]_
* Singular value decomposition entropy :func:`~pyrem.univariate.svd_entropy` [PYEEG]_
* Spectral entropy :func:`~pyrem.univariate.spectral_entropy` [PYEEG]_
.. [PET95] A. Petrosian, Kolmogorov complexity of finite sequences and recognition of different preictal EEG patterns, in ,
Proceedings of the Eighth IEEE Symposium on Computer-Based Medical Systems, 1995, 1995, pp. 212-217.
.. [PYEEG] F. S. Bao, X. Liu, and C. Zhang, PyEEG: An Open Source Python Module for EEG/MEG Feature Extraction,
Computational Intelligence and Neuroscience, vol. 2011, p. e406391, Mar. 2011.
.. [HJO70] B. Hjorth, EEG analysis based on time domain properties,
Electroencephalography and Clinical Neurophysiology, vol. 29, no. 3, pp. 306-310, Sep. 1970.
.. [COS05] M. Costa, A. L. Goldberger, and C.-K. Peng, "Multiscale entropy analysis of biological signals," Phys. Rev. E, vol. 71, no. 2, p. 021906, Feb. 2005.
.. [RIC00] J. S. Richman and J. R. Moorman, "Physiological time-series analysis using approximate entropy and sample entropy,"
American Journal of Physiology - Heart and Circulatory Physiology, vol. 278, no. 6, pp. H2039-H2049, Jun. 2000.
.. [HIG88] T. Higuchi, "Approach to an irregular time series on the basis of the fractal theory," Physica D: Nonlinear Phenomena, vol. 31, no. 2, pp. 277-283, Jun. 1988.
"""
__author__ = 'quentin'
import numpy as np
def _embed_seq(X,tau,de):
N =len(X)
if de * tau > N:
raise ValueError("Cannot build such a matrix, because D * Tau > N")
if tau<1:
raise ValueError("Tau has to be at least 1")
Y=np.zeros((de, N - (de - 1) * tau))
for i in range(de):
Y[i] = X[i *tau : i*tau + Y.shape[1] ]
return Y.T
def _make_cmp(X, M, R, in_range_i, in_range_j):
#Then we make Cmp
N = len(X)
Emp = _embed_seq(X, 1, M + 1)
inrange_cmp = np.abs(Emp[in_range_i,-1] - Emp[in_range_j,-1]) <= R
in_range_cmp_i = in_range_i[inrange_cmp]
Cmp = np.bincount(in_range_cmp_i, minlength=N-M)
in_range_cmp_j = in_range_j[inrange_cmp]
Cmp += np.bincount(in_range_cmp_j, minlength=N-M)
return Cmp.astype(np.float)
def _coarse_grainning(a, tau):
"""
Coarse grainning for multiscale (sample) entropy.
"""
if tau ==1:
return a
length_out = a.size / tau
n_dropped = a.size % tau
mat = a[0:a.size - n_dropped].reshape((tau, length_out))
return np.mean(mat, axis=0)
def _make_cm(X,M,R):
N = len(X)
# we pregenerate all indices
i_idx,j_idx = np.triu_indices(N - M)
# We start by making Cm
Em = _embed_seq(X, 1, M)
dif = np.abs(Em[i_idx] - Em[j_idx])
max_dist = np.max(dif, 1)
inrange_cm = max_dist <= R
in_range_i = i_idx[inrange_cm]
in_range_j = j_idx[inrange_cm]
Cm = np.bincount(in_range_i, minlength=N-M+1)
Cm += np.bincount(in_range_j, minlength=N-M+1)
inrange_last = np.max(np.abs(Em[:-1] - Em[-1]),1) <= R
Cm[inrange_last] += 1
# all matches + self match
Cm[-1] += np.sum(inrange_last) + 1
return Cm.astype(np.float), in_range_i, in_range_j
def pfd(a):
r"""
Compute Petrosian Fractal Dimension of a time series [PET95]_.
It is defined by:
.. math::
\frac{log(N)}{log(N) + log(\frac{N}{N+0.4N_{\delta}})}
.. note::
**Difference with PyEEG:**
Results is different from [PYEEG]_ which implemented an apparently erroneous formulae:
.. math::
\frac{log(N)}{log(N) + log(\frac{N}{N}+0.4N_{\delta})}
Where:
:math:`N` is the length of the time series, and
:math:`N_{\delta}` is the number of sign changes.
:param a: a one dimensional floating-point array representing a time series.
:type a: :class:`~numpy.ndarray` or :class:`~pyrem.time_series.Signal`
:return: the Petrosian Fractal Dimension; a scalar.
:rtype: float
Example:
>>> import pyrem as pr
>>> import numpy as np
>>> # generate white noise:
>>> noise = np.random.normal(size=int(1e4))
>>> pr.univariate.pdf(noise)
"""
diff = np.diff(a)
# x[i] * x[i-1] for i in t0 -> tmax
prod = diff[1:-1] * diff[0:-2]
# Number of sign changes in derivative of the signal
N_delta = np.sum(prod < 0)
n = len(a)
return np.log(n)/(np.log(n)+np.log(n/(n+0.4*N_delta)))
def hjorth(a):
r"""
Compute Hjorth parameters [HJO70]_.
.. math::
Activity = m_0 = \sigma_{a}^2
.. math::
Complexity = m_2 = \sigma_{d}/ \sigma_{a}
.. math::
Morbidity = m_4 = \frac{\sigma_{dd}/ \sigma_{d}}{m_2}
Where:
:math:`\sigma_{x}^2` is the mean power of a signal :math:`x`. That is, its variance, if it's mean is zero.
:math:`a`, :math:`d` and :math:`dd` represent the original signal, its first and second derivatives, respectively.
.. note::
**Difference with PyEEG:**
R
|
esults is different from [PYEEG]_ which appear to uses a non normalised (by the length of the signal) definition of the activity:
.. math::
\sigma_{a}^2 = \sum{\mathbf{x}[i]^2}
As opposed to
.
|
. math::
\sigma_{a}^2 = \frac{1}{n}\sum{\mathbf{x}[i]^2}
:param a: a one dimensional floating-point array representing a time series.
:type a: :class:`~numpy.ndarray` or :class:`~pyrem.time_series.Signal`
:return: activity, complexity and morbidity
:rtype: tuple(float, float, float)
Example:
>>> import pyrem as pr
>>> import numpy as np
>>> # generate white noise:
>>> noise = np.random.normal(size=int(1e4))
>>> activity, complexity, morbidity = pr.univariate.hjorth(noise)
"""
first_deriv = np.diff(a)
second_deriv = np.diff(a,2)
var_zero = np.mean(a ** 2)
var_d1 = np.mean(first_deriv ** 2)
var_d2 = np.mean(second_deriv ** 2)
activity = var_zero
morbidity = np.sqrt(var_d1 / var_zero)
complexity = np.sqrt(var_d2 / var_d1) / morbidity
return activity, morbidity, complexity
def svd_entropy(a, tau, de):
r"""
Compute the Singular Value Decomposition entropy of a signal with embedding dimension "de" and delay "tau" [PYEEG]_.
.. note::
**Difference with PyEEG:**
The result differs from PyEEG implementation because :math:`log_2` is used (as opposed to natural logarithm in PyEEG code),
according to the definition in their paper [PYEEG]_ (eq. 9):
.. math::
H_{SVD} = -\sum{\bar\sigma{}_i log_2 \bar\sigma{}_i}
:param a: a one dimensional floating-point array representing a time series.
:type a: :class:`~numpy.ndarray` or :class:`~pyrem.time_series.Signal`
:param tau: the delay
:type tau: int
:param de: the embedding dimension
:type de: int
:return: the SVD entropy, a scalar
:rtype: float
"""
mat = _embed_seq(a, tau, de)
W = np.linalg.svd(mat, compute_uv = False)
W /= sum(W) # normalize singular values
return -1*sum(W * np.log2(W))
def fisher_info(a, tau, de):
r"""
Compute the Fisher information of a signal with embedding dimension "de" and delay "tau" [PYEEG]_.
Vectorised (i.e. faster) version of the eponymous PyEEG functi
|
ryfeus/lambda-packs
|
Keras_tensorflow_nightly/source2.7/tensorboard/version.py
|
Python
|
mit
| 744
| 0
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Ver
|
sion 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed
|
under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains the version string."""
VERSION = '1.8.0'
|
kgsn1763/deep-learning-from-scratch
|
ch06/weight_init_compare.py
|
Python
|
mit
| 1,963
| 0.00429
|
#!/usr/bin/env python
# coding: utf-8
import os
import sys
sys.path.append(os.pardir) # 親ディレクトリのファイルをインポートするための設定
import numpy as np
import matplotlib.pyplot as plt
from dataset.mnist import load_mnist
from common.util import smooth_curve
from common.multi_layer_net import MultiLayerNet
from common.optimizer import SGD
# 0:MNISTデータの読み込み==========
(x_train, t_train), (x_test, t_test) = load_mnist(normalize=True)
train_size = x_train.shape[0]
batch_size = 128
max_iterations = 2000
# 1:実験の設定==========
weight_init_types = {'std=0.01': 0.01, 'Xavier': 'sigmoid', 'He': 'relu'}
optimizer = SGD(lr=0.01)
networks = {}
train_loss = {}
for key, weight_type in weight_init_types.items():
networks[key] = MultiLayerNet(input_size=784, hidden_size_list=[100, 100, 100, 100],
output_size=10, weight_init_std=weight_type)
train_loss[key] = []
# 2:訓練の開始==========
for i in range(max_iterations):
batch_mask = np.random.choice(train_size, batch_size)
x_batch = x_train[batch_mask]
t_batch = t_train[batch_mask]
for key in weight_init_types.keys():
grads = networks[key].gradient(x_batch, t_batch)
optimizer.update(networks[key].params, grads)
loss = networks[key].loss(x_batch, t_batch)
train_loss[key]
|
.append(loss)
if i % 100 == 0:
print("===========" + "iteration:" + str(i) + "===========")
for key in weight_init_types.keys():
loss = networks[key].loss(x_batch, t_batch)
print(key + ":" + str(loss))
# 3.グラフの描画==========
markers = {'std=0.01': 'o', 'Xavier': 's', 'He': 'D'}
x = np.arange(max_iterations)
for key in weight_init_types.keys():
plt.plot(x, smooth_curve(train_loss[key]), mar
|
ker=markers[key], markevery=100, label=key)
plt.xlabel("iterations")
plt.ylabel("loss")
plt.ylim(0, 2.5)
plt.legend()
plt.show()
|
MrYsLab/pymata-aio
|
examples/blink.py
|
Python
|
agpl-3.0
| 1,345
| 0
|
#!/usr/bin/python
"""
Turns on an LED on for one second, then off for one second, repeatedly.
Most Arduinos have an on-board LED you can control. On the Uno and
Leonardo, it is attached to digital pin 13. If you're unsure what
pin the on-board LED is connected to on your Arduino model, check
the documentation at http://www.arduino.cc
"""
from pymata_aio.pymata3 import PyMata3
from pymata_aio.constants import Constants
# Arduino LED is on pin 13
BOARD_LED = 13
# If you are having problems connecting, you may
# wish to add some time the arduino_wait parameter.
# replace:
# board = PyMata3()
# with:
# board = PyMata3(arduino_wait=5)
# adjust the arduino_wait value to meet the needs
# of your computer
# instantiate PyMata3
board = PyMata3()
def setup():
"""
Set the Arduino BOARD_LED pin as an output
:return:
"""
board.set_pi
|
n_mode(BOARD_LED, Constants.OUTPUT)
def loop():
"""
Toggle the LED by alternating the values written
to the LED pin. Wait 1 second between writes.
Also note the use of board.sleep and not
time.sleep.
:return:
"""
print("LED On
|
")
board.digital_write(BOARD_LED, 1)
board.sleep(1.0)
print("LED Off")
board.digital_write(BOARD_LED, 0)
board.sleep(1.0)
if __name__ == "__main__":
setup()
while True:
loop()
|
bd-j/hmc
|
convergence.py
|
Python
|
gpl-2.0
| 1,867
| 0.009106
|
import numpy as np
def gr_indicators(chain, alpha=0.05):
"""Calculate the Gelman Rubin indicator of convergence. Also,
calculate the interval based indicator presented in Brooks &
Gelman 1998
"""
nw, nstep, ndim = chain.shape
# mean within each chain
mean = chain.mean(axis=1)
# variance within each chain
var = chain.var(axis=1, ddof=1)
# mean over chains of the variance within each chain
W = var.mean(axis=0)
# variance over chains of the mean within each chain, mutiplied by nstep
B = nstep * mean.var(axis=0, ddof=1)
# estimate of true variance: weighted sum of variances
sigmasq = (1 - 1/nstep) * W + B/nstep
# accounting for sampling variability
V = sigmasq + B/(nw*nstep)
R = V / W
#Now do the interval based method
p = [100.0*(alpha/2), 100.0*(1-alpha/2)]
Wp = np.percentile(chain, p, axis=1)
Wp = Wp[1,...] - Wp[0,...]
Bp = np.percentile(chain.reshape(nw*nstep, ndim), p, axis=0)
Bp = Bp[1,...] - Bp[0,...]
Rint = Bp/Wp.mean(axis=0)
return R, Rint
def correlation_time(chain, window=None, c=10, fast=False):
from emcee.autocorr import integ
|
rated_time
nw, nstep, ndim = chain.shape
x = np.mean(chain, axis=0)
m = 0
if window is None:
for m in np.arange(10, nstep):
tau = integrated_time(x, axis=0, fast=fast,
window=m)
if np.all(tau * c < m) and np.all(tau > 0):
break
window = m
else:
tau = integrated_time(x, axis=0, fast=fast,
wind
|
ow=window)
if m == (nstep-1) or (np.any(tau < 0)):
raise(ValueError)
return tau, window
def raftery_lewis(chain, q, tol=None, p = 0.95):
pass
def heidelberg_welch(chain, alpha):
pass
def geweke(chain):
pass
|
simon-r/SerialPhotoMerge
|
imgmerge/mergeProcedureVirtual.py
|
Python
|
gpl-3.0
| 2,752
| 0.00109
|
# Serial Photo Merge
# Copyright (C) 2017 Simone Riva mail: simone.rva {at} gmail {dot} com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import sys
import scipy.ndimage as ndimage
from imgmerge.readImg import ReadImageBasic
from imgmerge.image import Image
from imgmerge.readImgFactory import ReadImageFarctory
#import matplotlib.pyplot as plt
def get_dtype(color_bits):
if color_bits == 8:
return np.uint8
elif color_bits == 16:
return np.uint16
class MergeProcedureVirtual(object):
def __init__(self):
self._img_list = None
self._resimg = None
self._refimage = None
self._images_iterator = None
self._read_img_factory = ReadImageFarctory()
def set_images_iterator(self, img_itr):
self._images_iterator = img_itr
self._images_iterator.read_image_factory = self.read_image_factory
def get_images_iterator(self):
return self._images_iterator
images_iterator = property(get_images_iterator, set_images_iterator)
def set_images_list(self, img_list):
self._img_list = img_list
def get_images_list(self):
return self._img_list
images_list = property(get_images_list, set_images_list)
def set_reference_image(self, file_name):
self._refimage = file_name
def get_reference_image(self):
return self._refimage
reference_image = property(get_reference_image, set_reference_image)
def get_read_image_factory(self):
return self._read_img_factory
def set_read_image_factory(self, rif):
self._read_img_factory = rif
if self.images_iterator:
self.images_iterator.read_image_factory = rif
read_image_factory = property(
get_r
|
ead_image_factory, set_read_image_factory)
def execute(self):
NotImplementedError(
" %s : is virutal and must b
|
e overridden." % sys._getframe().f_code.co_name)
def get_resulting_image(self):
return self._resimg
def set_resulting_image(self, resarr):
self._resimg = resarr
resulting_image = property(get_resulting_image, set_resulting_image)
|
nfredrik/pyModelStuff
|
samples/populations/test/test_filter.py
|
Python
|
bsd-3-clause
| 247
| 0.004049
|
cases = [
('pmt.py -s 1 -n 20 population
|
s, first without state filter',
'pmt.py -s 1 -n 20 populations'),
('pmt.py -s 2 -n 20 populations filter3, state filter limits population to 3',
'pmt.py -s 2 -n 20 popu
|
lations filter3')
]
|
surru/Three-Musketeers-Game
|
multiagent/main.py
|
Python
|
mit
| 113
| 0.026549
|
import view
try:
view.main()
e
|
xcept:
print('Invalid List F
|
ormat')
view.terminate()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.