repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
sigdeletras/dxf2gmlcatastro
|
ejemplo/catastroqgis.py
|
Python
|
gpl-3.0
| 383
| 0.007853
|
# -*- coding: utf-8 -*-.
import dxf2gmlcatastro
# carpeta de trabajo
path = '/carpeta/archivos/'
# define archivos
dxf = 'parcelacad.dxf'
gml = 'catastrogml.gml'
#Define variables
dxffile = path
|
+ dxf
gmlfile = path + gml
src = '25
|
830'
# Crea GML
dxf2gmlcatastro.crea_gml(dxffile, gmlfile, src)
#Añade capa GML a QGIS
layer = iface.addVectorLayer(gmlfile, "gmlcatastro", "ogr")
|
GrognardsFromHell/TemplePlus
|
tpdatasrc/tpgamefiles/rules/char_class/class029_loremaster.py
|
Python
|
mit
| 2,110
| 0.030806
|
from toee import *
import char_class_utils
###################################################
def GetConditionName():
return "Loremaster"
def GetSpellCasterCond
|
itionName():
return "Loremaster Spellcasting"
def GetCategory():
return "Core 3.5 Ed Prestige Classes"
def GetClassDefinitionFlags():
return CDF_CoreClass
def GetClassHelpTopic():
return "TAG_LOREMASTERS"
classEnum = stat_level_loremaster
###################################################
class_feat
|
s = {
}
class_skills = (skill_alchemy, skill_appraise, skill_concentration, skill_alchemy, skill_decipher_script, skill_gather_information, skill_handle_animal, skill_heal, skill_knowledge_all, skill_perform, skill_profession, skill_spellcraft, skill_use_magic_device)
def IsEnabled():
return 0
def GetHitDieType():
return 4
def GetSkillPtsPerLevel():
return 4
def GetBabProgression():
return base_attack_bonus_non_martial
def IsFortSaveFavored():
return 0
def IsRefSaveFavored():
return 0
def IsWillSaveFavored():
return 1
def GetSpellListType():
return spell_list_type_any
def IsClassSkill(skillEnum):
return char_class_utils.IsClassSkill(class_skills, skillEnum)
def IsClassFeat(featEnum):
return char_class_utils.IsClassFeat(class_feats, featEnum)
def GetClassFeats():
return class_feats
def IsAlignmentCompatible( alignment):
return 1
def LoremasterFeatPrereq(obj):
numFeats = 0
loremasterFeats = (feat_empower_spell, feat_enlarge_spell, feat_extend_spell, feat_heighten_spell, feat_maximize_spell, feat_silent_spell, feat_quicken_spell , feat_still_spell, feat_widen_spell, feat_persistent_spell, feat_scribe_scroll, feat_brew_potion, feat_craft_magic_arms_and_armor, feat_craft_rod, feat_craft_staff, feat_craft_wand, feat_craft_wondrous_item)
for p in loremasterFeats:
if obj.has_feat(p):
numFeats = numMmFeats + 1
if (numFeats >= 3):
return 1
return 0
def ObjMeetsPrereqs( obj ):
return 0 # WIP
if (not LoremasterFeatPrereq(obj)):
return 0
if (obj.stat_level_get(stat_level) < 7): # in lieu of Knowledge ranks
return 0
# todo check seven divination spells... bah..
return 1
|
kalev/anaconda
|
scripts/getlangnames.py
|
Python
|
gpl-2.0
| 1,386
| 0.002886
|
#
# getlangnames.py
#
# Copyright (C) 2007 Red Hat, Inc. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this p
|
rogram. If not, see <http://www.gnu.org/licenses/>.
#
import sys
sys.path.append("..")
import localeinfo
import gettext
localeInfo = localeinfo.get("en_US.UTF-8")
names = {}
for k in localeInfo.keys():
found = False
for l in localeinfo.expandLangs(k):
try:
f = open("../po/%s.gmo" %(l,))
except (OSError, IOError):
continue
cat = gettext.GNUTranslations(f)
cat.set_output_char
|
set("utf-8")
names[localeInfo[k][0]] = cat.lgettext(localeInfo[k][0])
found = True
break
if not found:
names[localeInfo[k][0]] = localeInfo[k][0]
nameList = names.keys()
nameList.sort()
for k in nameList:
print("%s\t%s" % (k, names[k]))
|
tupes/School
|
CS175/Asn2/Reversi/model.py
|
Python
|
gpl-3.0
| 5,899
| 0.003051
|
class ReversiLogic(object):
def __init__(self):
self.turn = 'b'
self.board = Board()
# Check if the current self.turn can play. If not, change to the other
# player. If he can't play either, return None (game over).
def get_whose_turn(self):
if self.can_play():
return self.turn
else:
self.turn = switch(self.turn)
if self.can_play():
return self.turn
return None
# Iterate through every square on the board until a legal move is
# found. If none is found, the current self.turn can't play.
def can_play(self):
for row in range(8):
for col in range(8):
if self.legal_attacks(row, col):
return True
return False
# Get a list of all of the attack lines created by playing this square.
# If it's empty, the move is invalid. Otherwise, change the square and
# flip the pieces on the attack lines. Return changed squares as a list of
# Square objects to be graphically changed.
def make_move(self, row, col):
directions = self.legal_attacks(row, col)
if not directions: return False
self.board.squares[get_key(row, col)] = self.turn
self.turn = switch(self.turn)
return self.board.flip(row, col, directions)
# If there's already a piece on this square, it can't be a legal move.
# Otherwise, return a (possibly empty) list of all of the attack lines.
def legal_attacks(self, row, col):
if self.board.squares[get_key(row, col)]: return False
return self.board.attacks(row, col, self.turn)
# Return the number of the given player's pieces on the board.
def count_pieces(self, color):
pieces = 0
for row in range(8):
for col in range(8):
if self.board.squares[get_key(row, col)] == color:
pieces += 1
return pieces
# Stores a dictionary of each square's color, initialized to None.
# Each key is a concatenated string in row+col format. See get_key().
class Board(object):
def __init__(self):
self.squares = {}
for row in range(8):
for col in range(8):
key = get_key(row, col)
self.squares[key] = None
# Create the starting pieces.
self.squares['33'] = 'w'
self.squares['34'] = 'b'
self.squares['43'] = 'b'
self.squares['44'] = 'w'
steps = [-1, 0, 1]
steps = [(r_step, c_step) for r_step in steps for c_step in steps]
steps.remove((0, 0))
# Steps is a list of tuples, representing all possible directions from
# a given square. Tuple is in (row_step, col_step) format.
self.steps = steps
def attacks(self, row, col, color):
attack_lines = []
opponent = switch(color)
# Check in every adjacent square for the opponent's color.
for direction in self.steps:
row_step = direction[0]
col_step = direction[1]
# Use a try statement because some adjacent squares will be
# off the board, resulting in a key error.
try:
key = get_key(row + row_step, col + col_step)
# If adjacent square contains the opponent, continue in that
# direction to determine if it meets up with a player's piece.
if self.squares[key] == opponent:
row_index = row
col_index = col
while 1:
row_index += row_step
col_index += col_step
key = get_key(row_index, col_index)
# Check to see if there's a piece on this square.
if self.squares[key]:
# Now check if the piece is one of the players.
if self.squares[key] != opponent:
# We have found an attack line.
attack_lines.append(direction)
# Break from this direction to try others.
break
# Found an empty square. Move on to the next direction
else: break
# If we check a square not on the board, just move on to the next.
except KeyError: continue
return attack_lines
def flip(self, row, col, directions):
# target is the color we'll be changing to.
target = self.squares[get_key(row, col)]
# squares is the list of squares that need to be graphically updated.
squares = []
# Each direction is an attack line.
for direction in directions:
row_index = row
col_index = col
# Continue flipping pieces in this direction until target is found
while 1:
row_index += direction[0]
col_index += direction[1]
key = get_key(row_index, col_index)
if self.squares[key] == target: break
# Flip piece.
self.squares[key] = target
# Add this square to list that must be graphically updated.
squares.append(Square(row_index, col_index))
# The played square must be graphically updated too.
squares.append(Square(row, col))
return squares
# Simple data storage object to return to the main function.
# Each square returned must be updated.
class Square(object):
def __init__(self, row, col):
self.row = str(row)
|
self.col = str(col)
# UTILITY FUNCTIONS
def get_key(row, col):
return str(row) + str(col)
def switch(color):
if color
|
== 'b': return 'w'
elif color == 'w': return 'b'
|
cloudify-cosmo/cloudify-manager
|
rest-service/manager_rest/test/endpoints/test_filters.py
|
Python
|
apache-2.0
| 22,691
| 0
|
from cloudify.models_states import VisibilityState
from cloudify_rest_client.exceptions import CloudifyClientError
from manager_rest.test import base_test
from manager_rest.utils import get_formatted_timestamp
from manager_rest.manager_exceptions import BadFilterRule
from manager_rest.storage import models, get_storage_manager
from manager_rest.constants import AttrsOperator, LabelsOperator
from manager_rest.rest.filters_utils import (FilterRule,
create_filter_rules_list)
FILTER_ID = 'filter'
LEGAL_FILTER_RULES = [
FilterRule('a', ['b'], LabelsOperator.ANY_OF, 'label'),
FilterRule('a', ['b@c#d& ,:'], LabelsOperator.ANY_OF, 'label'),
FilterRule('e', ['f', 'g*%'], LabelsOperator.ANY_OF, 'label'),
FilterRule('c', ['d'], LabelsOperator.NOT_ANY_OF, 'label'),
FilterRule('h', ['i', 'j'], LabelsOperator.NOT_ANY_OF, 'label'),
FilterRule('k', [], LabelsOperator.IS_NULL, 'label'),
FilterRule('l', [], LabelsOperator.IS_NOT_NULL, 'label'),
FilterRule('created_by', ['user'], AttrsOperator.ANY_OF, 'attribute'),
FilterRule('created_by', ['user', 'admin'], AttrsOperator.ANY_OF,
'attribute'),
FilterRule('created_by', ['user'], AttrsOperator.NOT_ANY_OF, 'attribute'),
FilterRule('created_by', ['user', 'admin'], AttrsOperator.NOT_ANY_OF,
'attribute'),
FilterRule('created_by', ['user'], AttrsOperator.CONTAINS, 'attribute'),
FilterRule('created_by', ['user', 'admin'], AttrsOperator.CONTAINS,
'attribute'),
FilterRule('created_by', ['user'], AttrsOperator.NOT_CONTAINS,
'attribute'),
FilterRule('created_by', ['user', 'admin'], AttrsOperator.NOT_CONTAINS,
'attribute'),
FilterRule('created_by', ['user'], AttrsOperator.STARTS_WITH, 'attribute'),
FilterRule('created_by', ['user', 'admin'], AttrsOperator.STARTS_WITH,
'attribute'),
FilterRule('created_by', ['user'], AttrsOperator.ENDS_WITH, 'attribute'),
FilterRule('created_by', ['user', 'admin'], AttrsOperator.ENDS_WITH,
'attribute'),
FilterRule('created_by', [], AttrsOperator.IS_NOT_EMPTY, 'attribute'),
]
BLUEPRINT_SPECIFIC_FILTER_RULES = [
FilterRule('state', ['uploaded'], AttrsOperator.ANY_OF, 'attribute'),
FilterRule('state', ['uploaded', 'invalid'], AttrsOperator.ANY_OF,
'attribute'),
FilterRule('state', ['uploaded'], AttrsOperator.NOT_ANY_OF, 'attribute'),
FilterRule('state', ['uploaded', 'invalid'], AttrsOperator.NOT_ANY_OF,
'attribute'),
FilterRule('state', ['uploaded'], AttrsOperator.CONTAINS, 'attribute'),
FilterRule('state', ['uploaded', 'invalid'], AttrsOperator.CONTAINS,
|
'attribute'),
FilterRule('state', ['uploaded'], AttrsOperator.NOT_CONTAINS, 'attribute'),
FilterRule('s
|
tate', ['uploaded', 'invalid'], AttrsOperator.NOT_CONTAINS,
'attribute'),
FilterRule('state', ['uploaded'], AttrsOperator.STARTS_WITH, 'attribute'),
FilterRule('state', ['uploaded', 'invalid'], AttrsOperator.STARTS_WITH,
'attribute'),
FilterRule('state', ['uploaded'], AttrsOperator.ENDS_WITH, 'attribute'),
FilterRule('state', ['uploaded', 'invalid'], AttrsOperator.ENDS_WITH,
'attribute'),
]
class FiltersFunctionalityBaseCase(base_test.BaseServerTestCase):
__test__ = False
LABELS = [{'a': 'b'}, {'a': 'z'}, {'c': 'd'}]
LABELS_2 = [{'a': 'b'}, {'c': 'z'}, {'e': 'f'}]
LABELS_3 = [{'g': 'f'}]
def setUp(self, resource_model):
super().setUp()
self.resource_model = resource_model
def _test_labels_filters_applied(self,
res_1_id,
res_2_id,
res_3_id,
res_4_id):
self.assert_filters_applied([('a', ['b'], LabelsOperator.ANY_OF,
'label')],
{res_1_id, res_2_id}, self.resource_model)
self.assert_filters_applied([('c', ['z'], LabelsOperator.NOT_ANY_OF,
'label')],
{res_1_id}, self.resource_model)
self.assert_filters_applied([('a', ['y', 'z'], LabelsOperator.ANY_OF,
'label'),
('c', ['d'], LabelsOperator.ANY_OF,
'label')],
{res_1_id}, self.resource_model)
self.assert_filters_applied([('a', ['b'], LabelsOperator.ANY_OF,
'label'),
('e', [], LabelsOperator.IS_NOT_NULL,
'label')],
{res_2_id}, self.resource_model)
self.assert_filters_applied([('a', ['b'], LabelsOperator.ANY_OF,
'label'),
('e', [], LabelsOperator.IS_NULL,
'label')],
{res_1_id}, self.resource_model)
self.assert_filters_applied([('a', [], LabelsOperator.IS_NULL,
'label')], {res_3_id, res_4_id},
self.resource_model)
self.assert_filters_applied([('c', ['z'], LabelsOperator.IS_NOT,
'label')],
{res_1_id, res_3_id, res_4_id},
self.resource_model)
def test_filter_rule_not_dictionary_fails(self):
with self.assertRaisesRegex(BadFilterRule, 'not a dictionary'):
create_filter_rules_list(['a'], self.resource_model)
def test_filter_rule_missing_entry_fails(self):
with self.assertRaisesRegex(BadFilterRule, 'missing'):
create_filter_rules_list([{'key': 'key1'}], self.resource_model)
def test_filter_rule_key_not_text_type_fails(self):
with self.assertRaisesRegex(BadFilterRule, 'must be a string'):
err_filter_rule = {'key': 1, 'values': ['b'],
'operator': LabelsOperator.ANY_OF,
'type': 'label'}
create_filter_rules_list([err_filter_rule], self.resource_model)
def test_filter_rule_value_not_list_fails(self):
with self.assertRaisesRegex(BadFilterRule, 'must be a list'):
err_filter_rule = {'key': 'a', 'values': 'b',
'operator': LabelsOperator.ANY_OF,
'type': 'label'}
create_filter_rules_list([err_filter_rule], self.resource_model)
def test_parse_filter_rules_fails(self):
err_filter_rules_params = [
(('a', ['b'], 'bad_operator', 'label'),
'operator for filtering by labels must be one of'),
(('a', ['b'], LabelsOperator.IS_NULL, 'label'),
'list must be empty if the operator'),
(('a', ['b'], LabelsOperator.IS_NOT_NULL, 'label'),
'list must be empty if the operator'),
(('a', [], LabelsOperator.ANY_OF, 'label'),
'list must include at least one item if the operator'),
(('blueprint_id', ['b'], 'bad_operator', 'attribute'),
'The operator for filtering by attributes must be'),
(('bad_attribute', ['dep1'], LabelsOperator.ANY_OF, 'attribute'),
'Allowed attributes to filter deployments|blueprints by are'),
(('a', ['b'], LabelsOperator.ANY_OF, 'bad_type'),
'Filter rule type must be one of'),
(('bad_attribute', ['dep1'], LabelsOperator.ANY_OF, 'bad_type'),
'Filter rule type must be one of')
]
for params, err_msg in err_filter_rules_params:
with self.assertRaisesRegex(BadFilterRule, err_msg):
create_filter_rules_list([FilterRule(*params)],
self.resource_model)
def test_key_and_value_validation_fails(self):
|
kfdm/django-simplestats
|
quickstats/urls.py
|
Python
|
mit
| 1,550
| 0.006452
|
from . import views
from django.urls import path
urlpatterns = [
path("", views.PublicWidgets.as_view(), n
|
ame="home"),
path("create/widget", views.WidgetCreate.as_view(), name="widget-create"),
path("subscription/<pk>/delete", views.S
|
ubscriptionDelete.as_view(), name="subscription-delete"),
path("user/<username>/subscriptions", views.SubscriptionListView.as_view(), name="subscriptions"),
path("user/<username>/widgets", views.UserWidgets.as_view(), name="widget-user"),
# Widget Views
path("widget", views.WidgetListView.as_view(), name="widget-list"),
path("widget/<pk>", views.WidgetDetailView.as_view(), name="widget-detail"),
path("widget/<pk>/delete", views.WidgetDelete.as_view(), name="widget-delete"),
path("widget/<pk>/subscribe", views.WidgetSubscription.as_view(), name="widget-subscribe"),
path("widget/<pk>/unsubscribe", views.WidgetUnsubscribe.as_view(), name="widget-unsubscribe"),
path("widget/<pk>/update", views.WidgetUpdate.as_view(), name="widget-update"),
path("widget/<pk>/increment", views.StreakIncrement.as_view(), name="streak-increment"),
# Filtered List Views
path("filter/<type>", views.FilterList.as_view(), name="widget-type"),
# Misc Views
path("recent/waypoints", views.WaypointList.as_view(), name="waypoint-list"),
path("recent/samples", views.SampleList.as_view(), name="sample-list"),
path("recent/scrapes", views.ScrapeList.as_view(), name="scrape-list"),
path("recent/shares", views.ShareList.as_view(), name="share-list"),
]
|
fxia22/ASM_xf
|
PythonD/site_python/twisted/internet/stdio.py
|
Python
|
gpl-2.0
| 3,287
| 0.003347
|
# Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Standard input/out/err support.
API Stability: semi-stable
Future Plans: support for stderr, perhaps
Maintainer: U{Itamar Shtull-Trauring<mailto:twisted@itamarst.org>}
"""
# system import
|
s
import sys, os, select, errno
# Sibling Imports
import abstract, fdesc, protocol
from main import CONNECTION_LOST
_stdio_in_use = 0
class StandardIOWriter(abstract.FileDescriptor):
connected = 1
ic = 0
def __init__(self):
abstract.FileDescriptor.__init__(self)
self.fileno = sys.__stdout__.fileno
fdesc.setNonBlockin
|
g(self.fileno())
def writeSomeData(self, data):
try:
return os.write(self.fileno(), data)
return rv
except IOError, io:
if io.args[0] == errno.EAGAIN:
return 0
elif io.args[0] == errno.EPERM:
return 0
return CONNECTION_LOST
except OSError, ose:
if ose.errno == errno.EPIPE:
return CONNECTION_LOST
if ose.errno == errno.EAGAIN:
return 0
raise
def connectionLost(self, reason):
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fileno())
class StandardIO(abstract.FileDescriptor):
"""I can connect Standard IO to a twisted.protocol
I act as a selectable for sys.stdin, and provide a write method that writes
to stdout.
"""
def __init__(self, protocol):
"""Create me with a protocol.
This will fail if a StandardIO has already been instantiated.
"""
abstract.FileDescriptor.__init__(self)
global _stdio_in_use
if _stdio_in_use:
raise RuntimeError, "Standard IO already in use."
_stdio_in_use = 1
self.fileno = sys.__stdin__.fileno
fdesc.setNonBlocking(self.fileno())
self.protocol = protocol
self.startReading()
self.writer = StandardIOWriter()
self.protocol.makeConnection(self)
def write(self, data):
"""Write some data to standard output.
"""
self.writer.write(data)
def doRead(self):
"""Some data's readable from standard input.
"""
return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
def closeStdin(self):
"""Close standard input.
"""
self.writer.loseConnection()
def connectionLost(self, reason):
"""The connection was lost.
"""
self.protocol.connectionLost()
|
MTgeophysics/mtpy
|
legacy/modem_data_to_phase_tensor.py
|
Python
|
gpl-3.0
| 919
| 0.007617
|
#!/bin/env python
# -*- coding: utf-8 -*-
"""
Description:
Compute Phase Tensors from ModEM Dat File and output to CSV files
Usage Examples:
python scripts/mode
|
m_data_to_phase_tensor.py examples/data/ModEM_files/Modular_MPI_NLCG_028.dat [OutDir]
python scripts/modem_data_to_phas
|
e_tensor.py /e/MTPY2_Outputs/GA_UA_edited_10s-10000s_modem_inputs/ModEM_Data.dat [OutDir]
Developer: fei.zhang@ga.gov.au
LastUpdate: 08/09/2017
LastUpdate: 05/12/2017 FZ moved the function into the module mtpy.modeling.modem.Data
LastUpdate: 21/02/2018 Added command for running the script
"""
import sys, os
from mtpy.modeling.modem import Data
from mtpy.mtpy_globals import NEW_TEMP_DIR
if __name__ == "__main__":
file_dat = sys.argv[1]
if len(sys.argv)>2:
outdir = sys.argv[2]
else:
outdir=NEW_TEMP_DIR
obj = Data()
obj.compute_phase_tensor(file_dat, outdir)
|
minhphung171093/GreenERP_V9
|
openerp/addons/account/report/account_invoice_report.py
|
Python
|
gpl-3.0
| 9,959
| 0.004719
|
# -*- coding: utf-8 -*-
from openerp import tools
from openerp import models, fields, api
class AccountInvoiceReport(models.Model):
_name = "account.invoice.report"
_description = "Invoices Statistics"
_auto = False
_rec_name = 'date'
@api.multi
@api.depends('currency_id', 'date', 'price_total', 'price_average', 'residual')
def _compute_amounts_in_user_currency(self):
"""Compute the amounts in the currency of the user
"""
context = dict(self._context or {})
user_currency_id = self.env.user.company_id.currency_id
currency_rate_id = self.env['res.currency.rate'].search([
('rate', '=', 1),
'|', ('company_id', '=', self.env.user.company_id.id), ('company_id', '=', False)], limit=1)
base_currency_id = currency_rate_id.currency_id
ctx = context.copy()
for record in self:
ctx['date'] = record.date
record.user_currency_price_total = base_currency_id.with_context(ctx).compute(record.price_total, user_currency_id)
record.user_currency_price_average = base_currency_id.with_context(ctx).compute(record.price_average, user_currency_id)
record.user_currency_residual = base_currency_id.with_context(ctx).compute(record.residual, user_currency_id)
date = fields.Date(readonly=True)
product_id = fields.Many2one('product.product', string='Product', readonly=True)
product_qty = fields.Float(string='Product Quantity', readonly=True)
uom_name = fields.Char(string='Reference Unit of Measure', readonly=True)
payment_term_id = fields.Many2one('account.payment.term', string='Payment Term', oldname='payment_term', readonly=True)
fiscal_position_id = fields.Many2one('account.fiscal.position', oldname='fiscal_position', string='Fiscal Position', readonly=True)
currency_id = fields.Many2one('res.currency', string='Currency', readonly=True)
categ_id = fields.Many2one('product.category', string='Product Category', readonly=True)
journal_id = fields.Many2one('account.journal', string='Journal', readonly=True)
partner_id = fields.Many2one('res.partner', string='Partner', readonly=True)
commercial_partner_id = fields.Many2one('res.partner', string='Partner Company', help="Commercial Entity")
company_id = fields.Many2one('res.company', string='Company', readonly=True)
user_id = fields.Many2one('res.users', string='Salesperson', readonly=True)
price_total = fields.Float(string='Total Without Tax', readonly=True)
user_currency_price_total = fields.Float(string="Total Without Tax", compute='_compute_amounts_in_user_currency', digits=0)
price_average = fields.Float(string='Average Price', readonly=True, group_operator="avg")
user_currency_price_average = fields.Float(string="Average Price", compute='_compute_amounts_in_user_currency', digits=0)
currency_rate = fields.Float(string='Currency Rate', readonly=True)
nbr = fields.Integer(string='# of Invoices', readonly=True) # TDE FIXME master: rename into nbr_lines
type = fields.Selection([
('out_invoice', 'Customer Invoice'),
('in_invoice', 'Vendor Bill'),
('out_refund', 'Customer Refund'),
('in_refund', 'Vendor Refund'),
], readonly=True)
state = fields.Selection([
('draft', 'Draft'),
('proforma', 'Pro-forma'),
('proforma2', 'Pro-forma'),
('open', 'Open'),
('paid', 'Done'),
('cancel', 'Cancelled')
], string='Invoice Status', readonly=True)
date_due = fields.Date(string='Due Date', readonly=True)
account_id = fields.Many2one('account.account', string='Account', readonly=True, domain=[('deprecated', '=', False)])
account_line_id = fields.Many2one('account.account', string='Account Line', readonly=True, domain=[('deprecated', '=', False)])
partner_bank_id = fields.Many2one('res.partner.bank', string='Bank Account', readonly=True)
residual = fields.Float(string='Total Residual', readonly=True)
user_currency_residual = fields.Float(string="Total Residual", compute='_compute_amounts_in_user_currency', digits=0)
country_id = fields.Many2one('res.country', string='Country of the Partner Company')
_order = 'date desc'
_depends = {
'account.invoice': [
'account_id', 'amount_total_company_signed', 'commercial_partner_id', 'company_id',
'currency_id', 'date_due', 'date_invoice', 'fiscal_position_id',
'journal_id', 'partner_bank_id', 'partner_id', 'payment_term_id',
'residual', 'state', 'type', 'user_id',
],
'account.invoice.line': [
'account_id', 'invoice_id', 'price_subtotal', 'product_id',
'quantity', 'uom_id', 'account_analytic_id',
],
'product.product': ['product_tmpl_id'],
'product.template': ['categ_id'],
'product.uom': ['category_id', 'factor', 'name', 'uom_type'],
'res.currency.rate': ['currency_id', 'name'],
'res.partner': ['country_id'],
}
def _select(self):
select_str = """
SELECT sub.id, sub.date, sub.product_id, sub.partner_id, sub.country_id, sub.account_analytic_id,
sub.payment_term_id, sub.uom_name, sub.currency_id, sub.journal_id,
sub.fiscal_position_id, sub.user_id, sub.company_id, sub.nbr, sub.type, sub.state,
sub.categ_id, sub.date_due, sub.account_id, sub.account_line_id, sub.partner_bank_id,
sub.product_qty, sub.price_total as price_total, sub.price_average as price_average,
COALESCE(cr.rate, 1) as currency_rate, sub.residual as residual, sub.commercial_partner_id as commercial_partner_id
"""
return select_str
def _sub_select(self):
select_str = """
SELECT min(ail.id) AS id,
ai.date_in
|
voice AS date,
ail.product_id, ai.partner_id, ai.payment_term_id, ail.account_analytic_id,
u2.name AS uom_name,
ai.currency_id, ai.journal_id, ai.fiscal_position_id, ai.user_id, ai.company_id,
count(ail.*) AS nbr,
ai.type, ai
|
.state, pt.categ_id, ai.date_due, ai.account_id, ail.account_id AS account_line_id,
ai.partner_bank_id,
SUM(CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN (- ail.quantity) / u.factor * u2.factor
ELSE ail.quantity / u.factor * u2.factor
END) AS product_qty,
SUM(ail.price_subtotal_signed) AS price_total,
SUM(ail.price_subtotal_signed) / CASE
WHEN SUM(ail.quantity / u.factor * u2.factor) <> 0::numeric
THEN CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN SUM((- ail.quantity) / u.factor * u2.factor)
ELSE SUM(ail.quantity / u.factor * u2.factor)
END
ELSE 1::numeric
END AS price_average,
ai.residual_company_signed / (SELECT count(*) FROM account_invoice_line l where invoice_id = ai.id) *
count(*) AS residual,
ai.commercial_partner_id as commercial_partner_id,
partner.country_id
"""
return select_str
def _from(self):
from_str = """
FROM account_invoice_line ail
JOIN account_invoice ai ON ai.id = ail.invoice_id
JOIN res_partner partner ON ai.commercial_partner_id = partner.id
LEFT JOIN product_product pr ON pr.id = ail.product_id
left JOIN product_template pt ON pt.id = pr.product_tmpl_id
LEFT JOIN product_uom u ON u.id = ail.uom_id
|
erigones/esdc-ce
|
pdns/migrations/0001_initial.py
|
Python
|
apache-2.0
| 9,038
| 0.003098
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Domain',
fields=[
('id', models.AutoField(help_text=b'This field is used to easily manage the domains with this number as an unique handle.', serialize=False, primary_key=True)),
('name', models.CharField(help_text=b'This field is the actual domain name. This is the field that powerDNS matches to when it gets a request. The domain name should be in the format of: domainname.TLD.', unique=True, max_length=255, verbose_name='Name')),
('master', models.CharField(default=None, max_length=128, null=True, verbose_name='Master', help_text=b'This describes the master nameserver from which this domain should be slaved.')),
('last_check', models.IntegerField(default=None, help_text=b'Last time this domain was checked for freshness.', null=True, verbose_name='Last check')),
('type', models.CharField(help_text=b'Type of the domain.', max_length=6, verbose_name='Type', db_index=True, choices=[(b'MASTER', b'MASTER'), (b'SLAVE', b'SLAVE'), (b'NATIVE', b'NATIVE'), (b'SUPERSLAVE', b'SUPERSLAVE')])),
('notified_serial', models.IntegerField(default=None, help_text=b'The last notified serial of a master domain. This is updated from the SOA record of the domain.', null=True, verbose_name='Notified serial')),
('account', models.CharField(default=None, max_length=40, null=True, verbose_name='Account', help_text=b'Determine if a certain host is a supermaster for a certain domain name.')),
('user', models.IntegerField(default=None, help_text=b'Field representing the user ID responsible for the domain. Added by Erigones.', null=True, verbose_name='User', db_index=True)),
('desc', models.CharField(help_text=b'Added by Erigones.', max_length=128, verbose_name='Description', blank=True)),
('access', models.SmallIntegerField(default=3, help_text=b'Added by Erigones.', verbose_name='Access', choices=[(1, 'Public'), (3, 'Private')])),
('created', models.DateTimeField(auto_now_add=True, help_text=b'Added by Erigones.', null=True, verbose_name='Created')),
('changed', models.DateTimeField(auto_now=True, help_text=b'Added by Erigones.', null=True, verbose_name='Last changed')),
('dc_bound', models.IntegerField(default=None, help_text=b'Datacenter ID used for DC-bound DNS records. Added by Erigones.', null=True, verbose_name='Datacenter')),
],
options={
'db_table': 'domains',
'verbose_name': 'Domain',
'verbose_name_plural': 'Domains',
},
),
migrations.CreateModel(
name='Record',
fields=[
('id', models.AutoField(help_text=b'This field is used to easily manage the records with this number as an unique handle.', serialize=False, primary_key=True)),
('name', models.CharField(default=None, max_length=255, help_text=b'What URI the dns-server should pick up on. For example www.test.com.', null=True, verbose_name='Name', db_index=True)),
('type', models.CharField(default=None, choices=[(b'SOA', b'SOA'), (b'NS', b'NS'), (b'MX', b'MX'), (b'A', b'A'), (b'AAAA', b'AAAA'), (b'CNAME', b'CNAME'), (b'TXT', b'TXT'), (b'PTR', b'PTR'), (b'SRV', b'SRV'), (b'SPF', b'SPF'), (b'HINFO', b'HINFO'), (b'NAPTR', b'NAPTR'), (b'SSHFP', b'SSHFP'), (b'RP', b'RP'), (b'LOC', b'LOC'), (b'KEY', b'KEY'), (b'CERT', b'CERT'), (b'TLSA', b'TLSA')], max_length=6, help_text=b'The ASCII representation of the qtype of this record.', null=True, verbose_name='Type')),
('content', models.CharField(default=None, max_length=65535, null=True, verbose_name='Content', help_text=b'Is the answer of the DNS-query and the content depend on the type field.')),
('ttl', models.IntegerField(default=None, help_text=b'How long the DNS-client are allowed to remember this record. Also known as Time To Live (TTL) This value is in seconds.', null=True, verbose_name='TTL')),
('prio', models.IntegerField(default=None, help_text=b'This field sets the priority of an MX-field.', null=True, verbose_name='Priority')),
('change_date', models.IntegerField(default=None, help_text=b'Timestamp for the last update.', null=True, verbose_name='Changed')),
('disabled', models.BooleanField(default=False, help_text=b'If set to true, this record is hidden from DNS clients, but can still be modified from the REST API.', verbose_name='Disabled?')),
('ordername', models.CharField(default=None, max_length=255, null=True, verbose_name='Ordername')),
('auth', models.BooleanField(default=True, verbose_name='Auth')),
('domain', models.ForeignKey(db_constraint=False, db_column=b'domain_id', default=None, to='pdns.Domain', help_text=b'This field binds the current record to the unique handle(the id-field) in the domains-table.', null=True)),
],
options={
'db_table': 'records',
'verbose_name': 'Record',
'verbose_name_plural': 'Records',
},
),
migrations.AlterIndexTogether(
name='record',
index_together=set([('name', 'type')]),
),
# Update domains table
migrations.RunSQL("""
A
|
LTER TABLE domains ADD CONSTRAINT c_lowercase_name CHECK (((name)::TEXT = lower((name)::TEXT)));
ALTER TABLE domains ALTER COLUMN "access" SET DEFAULT 3;
ALTER TABLE domains ALTER COLUMN "desc" SET DEFAULT '';
ALTER TABLE domains ALTER COLUMN "user" SET DEFAULT 1;
GRANT ALL ON domains TO pdns;
GRANT ALL ON domains_id_seq TO pdns;
|
"""),
# Update records table
migrations.RunSQL("""
ALTER TABLE records ADD CONSTRAINT c_lowercase_name CHECK (((name)::TEXT = lower((name)::TEXT)));
ALTER TABLE records ADD CONSTRAINT domain_exists FOREIGN KEY(domain_id) REFERENCES domains(id) ON DELETE CASCADE;
ALTER TABLE records ALTER COLUMN "disabled" SET DEFAULT false;
ALTER TABLE records ALTER COLUMN "auth" SET DEFAULT false;
CREATE INDEX recordorder ON records (domain_id, ordername text_pattern_ops);
GRANT ALL ON records TO pdns;
GRANT ALL ON records_id_seq TO pdns;
"""),
# Create other PowerDNS tables
migrations.RunSQL("""
CREATE TABLE supermasters (
ip INET NOT NULL,
nameserver VARCHAR(255) NOT NULL,
account VARCHAR(40) NOT NULL,
PRIMARY KEY(ip, nameserver)
);
GRANT ALL ON supermasters TO pdns;
CREATE TABLE comments (
id SERIAL PRIMARY KEY,
domain_id INT NOT NULL,
name VARCHAR(255) NOT NULL,
type VARCHAR(10) NOT NULL,
modified_at INT NOT NULL,
account VARCHAR(40) DEFAULT NULL,
comment VARCHAR(65535) NOT NULL,
CONSTRAINT domain_exists
FOREIGN KEY(domain_id) REFERENCES domains(id)
ON DELETE CASCADE,
CONSTRAINT c_lowercase_name CHECK (((name)::TEXT = LOWER((name)::TEXT)))
);
CREATE INDEX comments_domain_id_idx ON comments (domain_id);
CREATE INDEX comments_name_type_idx ON comments (name, type);
CREATE INDEX comments_order_idx ON comments (domain_id, modified_at);
GRANT ALL ON comments TO pdns;
GRANT ALL ON comments_id_seq TO pdns;
CREATE TABLE domainmetadata (
id SERIAL PRIMARY KEY,
domain_id INT REFERENCES domains(id) ON DELETE CASCADE,
kind VARCHAR(32),
content TEXT
);
CREATE INDEX domainidmetaindex ON domainmetadata(domain_id);
GRANT ALL ON domainmetadata TO pdns;
GRANT ALL ON domainmetadata_id_seq TO pdns;
CR
|
hzlf/openbroadcast
|
website/djangoratings/forms.py
|
Python
|
gpl-3.0
| 100
| 0.02
|
from django im
|
port forms
__all__ = ('RatingField',)
cl
|
ass RatingField(forms.ChoiceField):
pass
|
passalis/sef
|
examples/supervised_reduction.py
|
Python
|
mit
| 2,019
| 0.005448
|
# License: MIT License https://github.com/passalis/sef/blob/master/LICENSE.txt
from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
import sklearn
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sef_dr.classification import evaluate_svm
from sef_dr.datasets import load_mnist
from sef_dr.linear import LinearSEF
def supervised_reduction(method=None):
# Load data and init seeds
train_data, train_labels, test_data, test_labels = load_mnist(dataset_path='data')
np.random.seed(1)
sklearn.utils.check_random_state(1)
n_train = 5000
n_classes = len(np.unique(train_labels))
if method == 'lda':
proj = LinearDiscriminantAnalysis(n_components=n_classes - 1)
proj.fit(train_data[:n_train, :], train_labels[:n_train])
elif method == 's-lda':
proj = LinearSEF(train_data.shape[1], output_dimensionality=(n_classes - 1))
proj.cuda()
loss = proj.fit(data=train_data[:n_train,
|
:], target_labels=train_labels[:n_train], epochs=50,
target='supervised', batch_size=128, regularizer_weight=1, learning_rate=0.001, verbose=True)
elif method == 's-lda-2x':
# SEF output dimensions are not limited
proj = LinearSEF(train_data.shape[1],
|
output_dimensionality=2 * (n_classes - 1))
proj.cuda()
loss = proj.fit(data=train_data[:n_train, :], target_labels=train_labels[:n_train], epochs=50,
target='supervised', batch_size=128, regularizer_weight=1, learning_rate=0.001, verbose=True)
acc = evaluate_svm(proj.transform(train_data[:n_train, :]), train_labels[:n_train],
proj.transform(test_data), test_labels)
print("Method: ", method, " Test accuracy: ", 100 * acc, " %")
if __name__ == '__main__':
print("LDA: ")
supervised_reduction('lda')
print("S-LDA: ")
supervised_reduction('s-lda')
print("S-LDA (2x): ")
supervised_reduction('s-lda-2x')
|
neutrons/FastGR
|
addie/processing/mantid/master_table/table_tree_handler.py
|
Python
|
mit
| 66,023
| 0.001287
|
from __future__ import (absolute_import, division, print_function)
from collections import OrderedDict
import numpy as np
import os
import pickle
from qtpy.QtWidgets import QDialog, QTreeWidgetItem, QTableWidgetItem, QMenu, QFileDialog, QApplication
from addie.utilities import load_ui
from qtpy import QtCore, QtGui
from addie.utilities.file_handler import FileHandler
from addie.processing.mantid.master_table.tree_definition import TREE_DICT, COLUMN_DEFAULT_WIDTH, CONFIG_FILE
from addie.processing.mantid.master_table.tree_definition import h1_COLUMNS_WIDTH, h2_COLUMNS_WIDTH, h3_COLUMNS_WIDTH
from addie.processing.mantid.master_table.table_row_handler import TableRowHandler
from addie.processing.mantid.master_table.table_plot_handler import TablePlotHandler
from addie.processing.mantid.master_table.selection_handler import CellsHandler, RowsHandler
from addie.processing.mantid.master_table.master_table_loader import TableFileLoader
from addie.processing.mantid.master_table.master_table_exporter import TableFileExporter
from addie.widgets.filedialog import get_save_file
try:
from addie.processing.mantid.master_table.import_from_database.oncat_authentication_handler import OncatAuthenticationHandler
|
import pyoncat # noqa
ONCAT_ENABLED = True
except ImportError:
print('pyoncat module not found. Functionality disabled')
ONCAT_ENABLED = False
class TableInitialization:
default_width = COLUMN_DEFAULT_WIDTH
table_headers = {}
table_width = {}
def __init__(self, main_window=None):
self.main_window = main_window
# s
|
elf.parent = parent
# self.parent_ui = parent.processing_ui
self.tree_dict = TREE_DICT
def init_master_table(self):
# set h1, h2 and h3 headers
self.init_headers()
self.init_table_header(
table_ui=self.main_window.processing_ui.h1_table,
list_items=self.table_headers['h1'])
self.init_table_header(
table_ui=self.main_window.processing_ui.h2_table,
list_items=self.table_headers['h2'])
self.init_table_header(
table_ui=self.main_window.processing_ui.h3_table,
list_items=self.table_headers['h3'])
# set h1, h2 and h3 width
self.init_table_dimensions()
self.init_table_col_width(
table_width=self.table_width['h1'],
table_ui=self.main_window.processing_ui.h1_table)
self.init_table_col_width(
table_width=self.table_width['h2'],
table_ui=self.main_window.processing_ui.h2_table)
self.init_table_col_width(
table_width=self.table_width['h3'],
table_ui=self.main_window.processing_ui.h3_table)
self.h1_header_table = self.main_window.processing_ui.h1_table.horizontalHeader()
self.h2_header_table = self.main_window.processing_ui.h2_table.horizontalHeader()
self.h3_header_table = self.main_window.processing_ui.h3_table.horizontalHeader()
self.make_tree_of_column_references()
self.save_parameters()
def init_signals(self):
self.main_window.h1_header_table.sectionResized.connect(
self.main_window.resizing_h1)
self.main_window.h2_header_table.sectionResized.connect(
self.main_window.resizing_h2)
self.main_window.h3_header_table.sectionResized.connect(
self.main_window.resizing_h3)
self.main_window.processing_ui.h1_table.horizontalScrollBar(
).valueChanged.connect(self.main_window.scroll_h1_table)
self.main_window.processing_ui.h2_table.horizontalScrollBar(
).valueChanged.connect(self.main_window.scroll_h2_table)
self.main_window.processing_ui.h3_table.horizontalScrollBar(
).valueChanged.connect(self.main_window.scroll_h3_table)
def save_parameters(self):
self.main_window.h1_header_table = self.h1_header_table
self.main_window.h2_header_table = self.h2_header_table
self.main_window.h3_header_table = self.h3_header_table
self.main_window.table_columns_links = self.table_columns_links
self.main_window.table_width = self.table_width
self.main_window.table_headers = self.table_headers
self.main_window.tree_dict = self.tree_dict
def make_tree_of_column_references(self):
"""
table_columns_links = {'h1': [], 'h2': [], 'h3': []}
h1 = [0, 1, 2] # number of h1 columns
h2 = [[0], [1,2,3], [4]] link of h2 columns with h1
h3 = [ [[0]], [[1,2], [3,4], [5]], [[6,7,8]] ]
:return:
None
"""
h1 = []
h2 = []
h3 = []
h2_index = 0
h3_index = 0
td = self.tree_dict
for h1_index, _key_h1 in enumerate(td.keys()):
h1.append(h1_index)
if td[_key_h1]['children']:
_h2 = []
_h3_h2 = []
for _key_h2 in td[_key_h1]['children']:
if td[_key_h1]['children'][_key_h2]['children']:
_h3_h3 = []
for _key_h3 in td[_key_h1]['children'][_key_h2]['children']:
_h3_h3.append(h3_index)
h3_index += 1
_h3_h2.append(_h3_h3)
else:
# h2 does not have any h3 children
_h3_h2.append([h3_index])
h3_index += 1
_h2.append(h2_index)
h2_index += 1
h3.append(_h3_h2)
h2.append(_h2)
else:
# h1 does not have any h2 children
h2.append([h2_index])
h3.append([[h3_index]])
h2_index += 1
h3_index += 1
self.table_columns_links = {'h1': h1,
'h2': h2,
'h3': h3,
}
def init_table_col_width(self, table_width=[], table_ui=None):
for _col in np.arange(table_ui.columnCount()):
table_ui.setColumnWidth(_col, table_width[_col])
def init_table_dimensions(self):
table_width = {'h1': [], 'h2': [], 'h3': []}
# Trying manual input of table dimensions
table_width['h3'] = h3_COLUMNS_WIDTH
table_width['h2'] = h2_COLUMNS_WIDTH
table_width['h1'] = h1_COLUMNS_WIDTH
self.table_width = table_width
def init_headers(self):
td = self.tree_dict
table_headers = {'h1': [], 'h2': [], 'h3': []}
for _key_h1 in td.keys():
table_headers['h1'].append(td[_key_h1]['name'])
if td[_key_h1]['children']:
for _key_h2 in td[_key_h1]['children'].keys():
table_headers['h2'].append(
td[_key_h1]['children'][_key_h2]['name'])
if td[_key_h1]['children'][_key_h2]['children']:
for _key_h3 in td[_key_h1]['children'][_key_h2]['children'].keys(
):
table_headers['h3'].append(
td[_key_h1]['children'][_key_h2]['children'][_key_h3]['name'])
else:
table_headers['h3'].append('')
else:
table_headers['h2'].append('')
table_headers['h3'].append('')
self.table_headers = table_headers
def init_table_header(self, table_ui=None, list_items=None):
table_ui.setColumnCount(len(list_items))
for _index, _text in enumerate(list_items):
item = QTableWidgetItem(_text)
table_ui.setHorizontalHeaderItem(_index, item)
class TableTreeHandler:
def __init__(self, parent=None):
if parent.table_tree_ui is None:
parent.table_tree_ui = TableTree(parent=parent)
if parent.table_tree_ui_position:
parent.table_tree_ui.move(parent.table_tree_ui_position)
parent.table_tree_ui.show()
else:
parent.table_tree_ui.activateWindow()
|
bnewbold/diffoscope
|
tests/comparators/test_debian.py
|
Python
|
gpl-3.0
| 5,786
| 0.004842
|
# -*- coding: utf-8 -*-
#
# diffoscope: in-depth comparison of files, archives, and directories
#
# Copyright © 2015 Jérémy Bobbio <lunar@debian.org>
#
# diffoscope is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# diffoscope is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with diffoscope. If not, see <http://www.gnu.org/licenses/>.
import os.path
import shutil
import pytest
from diffoscope.comparators import specialize
from diffoscope.comparators.binary import FilesystemFile, NonExistingFile
from diffoscope.comparators.debian import DotChangesFile, DotDscFile
from diffoscope.config import Config
from diffoscope.presenters.text import output_text
TEST_DOT_CHANGES_FILE1_PATH = os.path.join(os.path.dirname(__file__), '../data/test1.changes')
TEST_DOT_CHANGES_FILE2_PATH = os.path.join(os.path.dirname(__file__), '../data/test2.changes')
TEST_DEB_FILE1_PATH = os.path.join(os.path.dirname(__file__), '../data/test1.deb')
TEST_DEB_FILE2_PATH = os.path.join(os.path.dirname(__file__), '../data/test2.deb')
@pytest.fixture
def dot_changes1(tmpdir):
tmpdir.mkdir('a')
dot_changes_path = str(tmpdir.join('a/test_1.changes'))
shutil.copy(TEST_DOT_CHANGES_FILE1_PATH, dot_changes_path)
shutil.copy(TEST_DEB_FILE1_PATH, str(tmpdir.join('a/test_1_all.deb')))
return specialize(FilesystemFile(dot_changes_path))
@pytest.fixture
def dot_changes2(tmpdir):
tmpdir.mkdir('b')
dot_changes_path = str(tmpdir.join('b/test_1.changes'))
shutil.copy(TEST_DOT_CHANGES_FILE2_PATH, dot_changes_path)
shutil.copy(TEST_DEB_FILE2_PATH, str(tmpdir.join('b/test_1_all.deb')))
return specialize(FilesystemFile(dot_changes_path))
def test_dot_changes_identification(dot_changes1):
assert isinstance(dot_changes1, DotChangesFile)
def test_dot_changes_invalid(tmpdir):
tmpdir.mkdir('a')
dot_changes_path = str(tmpdir.join('a/test_1.changes'))
shutil.copy(TEST_DOT_CHANGES_FILE1_PATH, dot_changes_path)
# we don't copy the referenced .deb
identified = specialize(FilesystemFile(dot_changes_path))
assert not isinstance(identified, DotChangesFile)
def test_dot_changes_no_differences(dot_changes1):
difference = dot_changes1.compare(dot_changes1)
assert difference is None
@pytest.fixture
def dot_changes_differences(dot_changes1, dot_changes2):
difference = dot_changes1.compare(dot_changes2)
output_text(difference, print_func=print)
return difference.details
def test_dot_changes_description(dot_changes_differences):
assert dot_changes_differences[0]
expected_diff = open(os.path.join(os.path.dirname(__file__), '../data/dot_changes_description_expected_diff')).read()
assert dot_changes_differences[0].unified_diff == expected_diff
def test_dot_changes_internal_diff(dot_changes_differences):
assert dot_changes_differences[2].source1 == 'test_1_all.deb'
def test_dot_changes_compare_non_existing(monkeypatch, dot_changes1):
monkeypatch.setattr(Config.general, 'new_file', True)
difference = dot_changes1.compare(NonExistingFile('/nonexisting', dot_changes1))
output_text(difference, print_func=print)
assert difference.source2 == '/nonexisting'
assert difference.details[-1].source2 == '/dev/null'
TEST_DOT_DSC_FILE1_PATH = os.path.join(os.path.dirname(__file__), '../data/test1.dsc')
TEST_DOT_DSC_FILE2_PATH = os.path.join(os.path.dirname(__file__), '../data/test2.dsc')
TEST_DEB_SRC1_PATH = os.path.join(os.path.dirname(__file__), '../data/test1.debsrc.tar.gz')
TEST_DEB_SRC2_PATH = os.path.join(os.path.dirname(__file__), '../data/test2.debsrc.tar.gz')
@pytest.fixture
def dot_dsc1(tmpdir):
tmpdir.mkdir('a')
dot_dsc_path = str(tmpdir.join('a/test_1.dsc'))
shutil.copy(TEST_DOT_DSC_FILE1_PATH, dot_dsc_path)
shutil.copy(TEST_DEB_SRC1_PATH, str(tmpdir.join('a/test_1.tar.gz')))
return specialize(FilesystemFile(dot_dsc_path))
@pytest.fixture
def dot_dsc2(tmpdir):
tmpdir.mkdir('b')
dot_dsc_path = str(tmpdir.join('b/test_1.dsc'))
shutil.copy(TEST_DOT_DSC_FILE2_PATH, dot_dsc_path)
shutil.copy(TEST_DEB_SRC2_PATH, str(tmpdir.join('b/test_1.tar.gz')))
return specialize(FilesystemFile(dot_dsc_path))
def test_dot_dsc_identification(dot_dsc1):
assert isinstance(dot_dsc1, DotDscFile)
def test_dot_dsc_invalid(tmpdir, dot_dsc2):
tmpdir.mkdir('a')
dot_dsc_path = str(tmpdir.join('a/test_1.dsc'))
shutil.copy(TEST_DOT_CHANGES_FILE1_PATH, dot_dsc_path)
# we don't copy the referenced .tar.gz
identified = specialize(FilesystemFile(dot_dsc_path))
assert not isinstance(identified, DotDscFile)
def test_dot_dsc_no_differences(dot_dsc1):
difference = dot_dsc1.compare(dot_dsc1)
assert difference is None
@pytest.fixture
def dot_dsc_differences(dot_dsc1, dot_dsc2):
difference = dot_dsc1.compare(dot_dsc2)
output_text(difference, print_func=print)
return difference.details
def test_dot_dsc_internal_diff(dot_dsc_differences):
assert dot_dsc_differences[1].source1 == 'test_1.tar.gz'
def test_dot_dsc_comp
|
are_non_existing(monkeypatch, dot_dsc1):
monkeypatch.setattr(Config.general, 'new_file', True)
difference = dot_dsc1.compare(NonExistingFile('/nonexisting', dot_dsc1))
output_text(difference, print_func=pri
|
nt)
assert difference.source2 == '/nonexisting'
assert difference.details[-1].source2 == '/dev/null'
|
mtskelton/huawei-4g-stats
|
stats/migrations/0001_initial.py
|
Python
|
mit
| 718
| 0.001393
|
# -*- coding: utf-8 -*-
from __
|
future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Stat',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateTimeField(auto_now_add=True)),
|
('up', models.BigIntegerField()),
('down', models.BigIntegerField()),
('live_time', models.BigIntegerField()),
],
options={
},
bases=(models.Model,),
),
]
|
benley/Mathics
|
mathics/builtin/files.py
|
Python
|
gpl-3.0
| 138,763
| 0.000649
|
# -*- coding: utf8 -*-
"""
File Operations
"""
from __future__ import with_statement
import os
import io
import shutil
import hashlib
import zlib
import base64
import tempfile
import time
import struct
import sympy
import mpmath
import math
from mathics.core.expression import (Expression, Real, Complex, String, Symbol,
from_python, Integer, BoxError,
valid_context_name)
from mathics.builtin.base import (Builtin, Predefined, BinaryOperator,
PrefixOperator)
from mathics.settings import ROOT_DIR
INITIAL_DIR = os.getcwd()
HOME_DIR = os.path.expanduser('~')
SYS_ROOT_DIR = '/' if os.name == 'posix' else '\\'
TMP_DIR = tempfile.gettempdir()
DIRECTORY_STACK = [INITIAL_DIR]
INPUT_VAR = ""
INPUTFILE_VAR = ""
PATH_VAR = [HOME_DIR, os.path.join(ROOT_DIR, 'data'),
os.path.join(ROOT_DIR, 'packages')]
def path_search(filename):
# For names of the form "name`", search for name.mx and name.m
if filename[-1] == '`':
filename = filename[:-1].replace('`', os.path.sep)
for ext in ['.mx', '.m']:
result = path_search(filename + ext)
if result is not None:
filename = None
break
if filename is not None:
result = None
for p in PATH_VAR + ['']:
path = os.path.join(p, filename)
if os.path.exists(path):
result = path
break
# If FindFile resolves to a dir, search within for Kernel/init.m and ini
|
t.m
if result is not None and os.path.isdir(result):
for ext in [os.path.join('Kernel', 'init.m'), 'init.m']:
tmp = os.path.join(result, ext)
if os.path.isfile(tmp):
|
return tmp
return result
def count():
n = 0
while True:
yield n
n += 1
NSTREAMS = count() # use next(NSTREAMS)
STREAMS = []
def _channel_to_stream(channel, mode='r'):
if isinstance(channel, String):
name = channel.get_string_value()
opener = mathics_open(name, mode)
opener.__enter__()
n = opener.n
if mode in ['r', 'rb']:
head = 'InputStream'
elif mode in ['w', 'a', 'wb', 'ab']:
head = 'OutputStream'
else:
raise ValueError("Unknown format {0}".format(mode))
return Expression(head, channel, Integer(n))
elif channel.has_form('InputStream', 2):
return channel
elif channel.has_form('OutputStream', 2):
return channel
else:
return None
def _lookup_stream(n=None):
if n is None:
return None
elif n is not None:
try:
return STREAMS[n]
except IndexError:
return None
class mathics_open:
def __init__(self, name, mode='r'):
self.name = name
self.mode = mode
if mode not in ['r', 'w', 'a', 'rb', 'wb', 'ab']:
raise ValueError("Can't handle mode {0}".format(mode))
def __enter__(self):
# find path
path = path_search(self.name)
if path is None and self.mode in ['w', 'a', 'wb', 'ab']:
path = self.name
if path is None:
raise IOError
# determine encoding
encoding = 'utf-8' if 'b' not in self.mode else None
# open the stream
stream = io.open(path, self.mode, encoding=encoding)
# build the Expression
n = next(NSTREAMS)
if self.mode in ['r', 'rb']:
self.expr = Expression(
'InputStream', String(path), Integer(n))
elif self.mode in ['w', 'a', 'wb', 'ab']:
self.expr = Expression(
'OutputStream', String(path), Integer(n))
else:
raise IOError
STREAMS.append(stream)
self.n = n
return stream
def __exit__(self, type, value, traceback):
strm = STREAMS[self.n]
if strm is not None:
strm.close()
STREAMS[self.n] = None
class InitialDirectory(Predefined):
"""
<dl>
<dt>'$InitialDirectory'
<dd>returns the directory from which \Mathics was started.
</dl>
>> $InitialDirectory
= ...
"""
name = '$InitialDirectory'
def evaluate(self, evaluation):
global INITIAL_DIR
return String(INITIAL_DIR)
class InstallationDirectory(Predefined):
"""
<dl>
<dt>'$InstallationDirectory'
<dd>returns the directory in which \Mathics was installed.
</dl>
>> $InstallationDirectory
= ...
"""
name = '$InstallationDirectory'
def evaluate(self, evaluation):
global ROOT_DIR
return String(ROOT_DIR)
class HomeDirectory(Predefined):
"""
<dl>
<dt>'$HomeDirectory'
<dd>returns the users HOME directory.
</dl>
>> $HomeDirectory
= ...
"""
name = '$HomeDirectory'
attributes = ('Protected')
def evaluate(self, evaluation):
global HOME_DIR
return String(HOME_DIR)
class RootDirectory(Predefined):
"""
<dl>
<dt>'$RootDirectory'
<dd>returns the system root directory.
</dl>
>> $RootDirectory
= ...
"""
name = '$RootDirectory'
attributes = ('Protected')
def evaluate(self, evaluation):
global SYS_ROOT_DIR
return String(SYS_ROOT_DIR)
class TemporaryDirectory(Predefined):
"""
<dl>
<dt>'$TemporaryDirectory'
<dd>returns the directory used for temporary files.
</dl>
>> $TemporaryDirectory
= ...
"""
name = '$TemporaryDirectory'
def evaluate(self, evaluation):
return String(TMP_DIR)
class Input(Predefined):
"""
<dl>
<dt>'$Input'
<dd>is the name of the stream from which input is currently being read.
</dl>
>> $Input
=
"""
attributes = ('Protected', 'ReadProtected')
name = '$Input'
def evaluate(self, evaluation):
global INPUT_VAR
return String(INPUT_VAR)
class InputFileName(Predefined):
"""
<dl>
<dt>'$InputFileName'
<dd>is the name of the file from which input is currently being read.
</dl>
While in interactive mode, '$InputFileName' is "".
>> $InputFileName
=
"""
name = '$InputFileName'
def evaluate(self, evaluation):
global INPUTFILE_VAR
return String(INPUTFILE_VAR)
class PathnameSeparator(Predefined):
"""
<dl>
<dt>'$PathnameSeparator'
<dd>returns a string for the seperator in paths.
</dl>
>> $PathnameSeparator
= ...
"""
name = '$PathnameSeparator'
def evaluate(self, evaluation):
return String(os.sep)
class Path(Predefined):
"""
<dl>
<dt>'$Path'
<dd>returns the list of directories to search when looking for a file.
</dl>
>> $Path
= ...
"""
attributes = ('Protected')
name = '$Path'
def evaluate(self, evaluation):
return Expression('List', *[String(p) for p in PATH_VAR])
class OperatingSystem(Predefined):
"""
<dl>
<dt>'$OperatingSystem'
<dd>gives the type of operating system running Mathics.
</dl>
>> $OperatingSystem
= ...
"""
attributes = ('Locked', 'Protected')
name = '$OperatingSystem'
def evaluate(self, evaluation):
if os.name == 'posix':
return String('Unix')
elif os.name == 'nt':
return String('Windows')
elif os.name == 'os2':
return String('MacOSX')
else:
return String('Unknown')
class EndOfFile(Builtin):
"""
<dl>
<dt>'EndOfFile'
<dd>is returned by 'Read' when the end of an input stream is reached.
</dl>
"""
# TODO: Improve docs for these Read[] arguments.
class Byte(Builtin):
"""
<dl>
<dt>'Byte'
<dd>is a data type for 'Read'.
</dl>
"""
class Character(Builtin):
"""
<dl>
<dt>'Character'
<dd>is a data type for 'Read'.
</dl>
"""
class Expression_(Builtin):
"""
<dl>
<dt>'Expression'
<dd>is a data type for 'Read'.
</dl>
|
CINPLA/expipe-dev
|
python-neo/doc/source/images/generate_diagram.py
|
Python
|
gpl-3.0
| 7,653
| 0.000523
|
# -*- coding: utf-8 -*-
"""
This generate diagram in .png and .svg from neo.core
Author: sgarcia
"""
from datetime import datetime
import numpy as np
import quantities as pq
from matplotlib import pyplot
from matplotlib.patches import Rectangle, ArrowStyle, FancyArrowPatch
from matplotlib.font_manager import FontProperties
from neo.test.generate_datasets import fake_neo
line_heigth = .22
fontsize = 10.5
left_text_shift = .1
dpi = 100
def get_rect_height(name, obj):
'''
calculate rectangle height
'''
nlines = 1.5
nlines += len(getattr(obj, '_all_attrs', []))
nlines += len(getattr(obj, '_single_child_objects', []))
nlines += len(getattr(obj, '_multi_child_objects', []))
nlines += len(getattr(obj, '_multi_parent_objects', []))
return nlines*line_heigth
def annotate(ax, coord1, coord2, connectionstyle, color, alpha):
arrowprops = dict(arrowstyle='fancy',
#~ patchB=p,
shrinkA=.3, shrinkB=.3,
fc=color, ec=color,
connectionstyle=connectionstyle,
alpha=alpha)
bbox = dict(boxstyle="square", fc="w")
a = ax.annotate('', coord1, coord2,
#xycoords="figure fraction",
#textcoords="figure fraction",
ha="right", va="center",
size=fontsize,
arrowprops=arrowprops,
bbox=bbox)
a.set_zorder(-4)
def calc_coordinates(pos, height):
x = pos[0]
y = pos[1] + height - line_heigth*.5
return pos[0], y
def generate_diagram(filename, rect_pos, rect_width, figsize):
rw = rect_width
fig = pyplot.figure(figsize=figsize)
ax = fig.add_axes([0, 0, 1, 1])
all_h = {}
objs = {}
for name in rect_pos:
objs[name] = fake_neo(name)
all_h[name] = get_rect_height(name, objs[name])
# draw connections
color = ['c', 'm', 'y']
alpha = [1., 1., 0.3]
for name, pos in rect_pos.items():
obj = objs[name]
relationships = [getattr(obj, '_single_child_objects', []),
getattr(obj, '_multi_child_objects', []),
getattr(obj, '_child_properties', [])]
for r in range(3):
for ch_name in relationships[r]:
x1, y1 = calc_coordinates(rect_pos[ch_name], all_h[ch_name])
x2, y2 = calc_coordinates(pos, all_h[name])
if r in [0, 2]:
x2 += rect_width
connectionstyle = "arc3,rad=-0.2"
elif
|
y2 >= y1:
connectionstyle = "arc3,rad=0.7"
else:
connectionstyle = "arc3,rad=-0.7"
annotate(
|
ax=ax, coord1=(x1, y1), coord2=(x2, y2),
connectionstyle=connectionstyle,
color=color[r], alpha=alpha[r])
# draw boxes
for name, pos in rect_pos.items():
htotal = all_h[name]
obj = objs[name]
allrelationship = (list(getattr(obj, '_child_containers', [])) +
list(getattr(obj, '_multi_parent_containers', [])))
rect = Rectangle(pos, rect_width, htotal,
facecolor='w', edgecolor='k', linewidth=2.)
ax.add_patch(rect)
# title green
pos2 = pos[0], pos[1]+htotal - line_heigth*1.5
rect = Rectangle(pos2, rect_width, line_heigth*1.5,
facecolor='g', edgecolor='k', alpha=.5, linewidth=2.)
ax.add_patch(rect)
# single relationship
relationship = getattr(obj, '_single_child_objects', [])
pos2 = pos[1] + htotal - line_heigth*(1.5+len(relationship))
rect_height = len(relationship)*line_heigth
rect = Rectangle((pos[0], pos2), rect_width, rect_height,
facecolor='c', edgecolor='k', alpha=.5)
ax.add_patch(rect)
# multi relationship
relationship = (list(getattr(obj, '_multi_child_objects', [])) +
list(getattr(obj, '_multi_parent_containers', [])))
pos2 = (pos[1]+htotal - line_heigth*(1.5+len(relationship)) -
rect_height)
rect_height = len(relationship)*line_heigth
rect = Rectangle((pos[0], pos2), rect_width, rect_height,
facecolor='m', edgecolor='k', alpha=.5)
ax.add_patch(rect)
# necessary attr
pos2 = (pos[1]+htotal -
line_heigth*(1.5+len(allrelationship) +
len(obj._necessary_attrs)))
rect = Rectangle((pos[0], pos2), rect_width,
line_heigth*len(obj._necessary_attrs),
facecolor='r', edgecolor='k', alpha=.5)
ax.add_patch(rect)
# name
if hasattr(obj, '_quantity_attr'):
post = '* '
else:
post = ''
ax.text(pos[0]+rect_width/2., pos[1]+htotal - line_heigth*1.5/2.,
name+post,
horizontalalignment='center', verticalalignment='center',
fontsize=fontsize+2,
fontproperties=FontProperties(weight='bold'),
)
#relationship
for i, relat in enumerate(allrelationship):
ax.text(pos[0]+left_text_shift, pos[1]+htotal - line_heigth*(i+2),
relat+': list',
horizontalalignment='left', verticalalignment='center',
fontsize=fontsize,
)
# attributes
for i, attr in enumerate(obj._all_attrs):
attrname, attrtype = attr[0], attr[1]
t1 = attrname
if (hasattr(obj, '_quantity_attr') and
obj._quantity_attr == attrname):
t1 = attrname+'(object itself)'
else:
t1 = attrname
if attrtype == pq.Quantity:
if attr[2] == 0:
t2 = 'Quantity scalar'
else:
t2 = 'Quantity %dD' % attr[2]
elif attrtype == np.ndarray:
t2 = "np.ndarray %dD dt='%s'" % (attr[2], attr[3].kind)
elif attrtype == datetime:
t2 = 'datetime'
else:
t2 = attrtype.__name__
t = t1+' : '+t2
ax.text(pos[0]+left_text_shift,
pos[1]+htotal - line_heigth*(i+len(allrelationship)+2),
t,
horizontalalignment='left', verticalalignment='center',
fontsize=fontsize,
)
xlim, ylim = figsize
ax.set_xlim(0, xlim)
ax.set_ylim(0, ylim)
ax.set_xticks([])
ax.set_yticks([])
fig.savefig(filename, dpi=dpi)
def generate_diagram_simple():
figsize = (18, 12)
rw = rect_width = 3.
bf = blank_fact = 1.2
rect_pos = {'Block': (.5+rw*bf*0, 4),
'Segment': (.5+rw*bf*1, .5),
'Event': (.5+rw*bf*4, 3.0),
'Epoch': (.5+rw*bf*4, 1.0),
'ChannelIndex': (.5+rw*bf*1, 7.5),
'Unit': (.5+rw*bf*2., 9.9),
'SpikeTrain': (.5+rw*bf*3, 7.5),
'IrregularlySampledSignal': (.5+rw*bf*3, 0.5),
'AnalogSignal': (.5+rw*bf*3, 4.9),
}
generate_diagram('simple_generated_diagram.svg',
rect_pos, rect_width, figsize)
generate_diagram('simple_generated_diagram.png',
rect_pos, rect_width, figsize)
if __name__ == '__main__':
generate_diagram_simple()
pyplot.show()
|
CLCMacTeam/AutoPkgBESEngine
|
Code/BESTemplater.py
|
Python
|
gpl-2.0
| 2,795
| 0.003936
|
#!/usr/local/autopkg/python
# encoding: utf-8
#
# Copyright 2015 The Pennsylvania State University.
#
"""
BESTemplater.py
Created by Matt Hansen (mah60@psu.edu) on 2015-04-30.
AutoPkg Processor for importing tasks using the BigFix RESTAPI
Updated by Rusty Myers (rzm102@psu.edu) on 2020-02-21.
Work in progress. Does not support Python3.
"""
from __future__ import absolute_import
import os
import sys
from autopkglib import Processor, ProcessorError
__all__ = ["BESTemplater"]
class BESTemplater(Processor):
"""AutoPkg Processor for rendering tasks from templates"""
description = "Generates BigFix XML to install application."
input_variables = {
"template_name": {
"required": True,
"description":
"Name of template file."
},
}
output_variables = {
"bes_file": {
"description":
"The resulting BES task rendered from the template."
},
}
__doc__ = description
def main(self):
"""BESImporter Main Method"""
# http://stackoverflow.com/a/14150750/2626090
uppath = lambda _path, n: os.sep.join(_path.split(os.sep)[:-n])
try:
from jinja2 import Environment, ChoiceLoader, FileSystemLoader
except ImportError as err:
raise ProcessorError("jinja2 module is not installed: %s" % err)
# Assign variables
template_name = self.env.get("template_name")
name = self.env.get("NAME")
version = self.env.get("version")
RECIPE_DIR = self.env.get("RECIPE_DIR")
BES_TEMPLATES = self.env.get("BES_TEMPLATES")
jinja_env = Environment(loader = ChoiceLoader([
FileSystemLoader(os.getcwd()),
FileSystemLoader('templates'),
FileSystemLoader(os.path.join(RECIPE_DIR, 'templates')),
FileSystemLoader(os.path.join(uppath(RECIPE_DIR, 1), 'templates')),
FileSystemLoader(os.path.join(uppath(RECIPE_DIR, 2), 'Templates')),
FileSystemLoader(BES_TEMPLATES)
]))
template_task = jinja_env.g
|
et_template(template_name)
# print jinja_env.list_templates()
rendered_task = template_task.render(**self.env)
# Write Final BES File to D
|
isk
outputfile_handle = open("%s/Deploy %s %s.bes" %
(self.env.get("RECIPE_CACHE_DIR"),
name, version), "wb")
outputfile_handle.write(rendered_task)
outputfile_handle.close()
self.env['bes_file'] = outputfile_handle.name
self.output("Output BES File: '%s'" % self.env.get("bes_file"))
if __name__ == "__main__":
processor = BESImporter()
processor.execute_shell()
|
brianspeir/Vanilla
|
vendor/bootstrap-vz/common/phases.py
|
Python
|
bsd-3-clause
| 1,251
| 0.005596
|
from base import Phase
preparation = Phase('Preparation', 'Initializing connections, fetching data etc.')
volume_creation = Phase('Volume creation', 'Creating the volume to bootstrap onto')
volume_preparation = Phase('Volume preparation', 'Formatting the bootstrap volume')
volume_mounting = Phase('Volume mounting', 'Mounting bootstrap volume')
os_installation = Phase('OS installation', 'Installing the operating system')
package_installation = Phase('Package installation', 'Installing software')
system_modification = Phase('System modification', 'Modifying configuration files, adding resources, etc.')
system_cleaning = Phase('System cleaning', 'Removing sensitive data, temporary file
|
s and other leftovers')
volume_unmounting = Phase('Volume unmounting', 'Unmounting the bootstrap volume')
image_registration = Phase('Image registration', 'Uploading/Registering with the provider')
|
cleaning = Phase('Cleaning', 'Removing temporary files')
order = [preparation,
volume_creation,
volume_preparation,
volume_mounting,
os_installation,
package_installation,
system_modification,
system_cleaning,
volume_unmounting,
image_registration,
cleaning,
]
|
dominicrodger/django-tinycontent
|
tinycontent/migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 570
| 0.001754
|
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='TinyContent',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
('name', models.CharField(uniqu
|
e=True, max_length=100)),
('content', models.TextField()),
],
options={
'verbose_name': 'Content block',
},
),
]
|
UnrememberMe/pants
|
src/python/pants/engine/isolated_process.py
|
Python
|
apache-2.0
| 2,261
| 0.008403
|
# coding=utf-8
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
from pants.engine.fs import EMPTY_SNAPSHOT
from pants.engine.rules import RootRule, rule
from
|
pants.engine.selectors import Select
from pants.util.objects import datatype
logger = logging.getLogger(__name__)
class ExecuteProcessRequest(datatype('ExecuteProcessRequest', ['argv', 'env', 'input_files_digest', 'digest_length']))
|
:
"""Request for execution with args and snapshots to extract."""
@classmethod
def create_from_snapshot(cls, argv, env, snapshot):
return ExecuteProcessRequest(
argv=argv,
env=env,
input_files_digest=snapshot.fingerprint,
digest_length=snapshot.digest_length,
)
@classmethod
def create_with_empty_snapshot(cls, argv, env):
return cls.create_from_snapshot(argv, env, EMPTY_SNAPSHOT)
def __new__(cls, argv, env, input_files_digest, digest_length):
"""
:param args: Arguments to the process being run.
:param env: A tuple of environment variables and values.
"""
if not isinstance(argv, tuple):
raise ValueError('argv must be a tuple.')
if not isinstance(env, tuple):
raise ValueError('env must be a tuple.')
if not isinstance(input_files_digest, str):
raise ValueError('input_files_digest must be a str.')
if not isinstance(digest_length, int):
raise ValueError('digest_length must be an int.')
if digest_length < 0:
raise ValueError('digest_length must be >= 0.')
return super(ExecuteProcessRequest, cls).__new__(cls, argv, env, input_files_digest, digest_length)
class ExecuteProcessResult(datatype('ExecuteProcessResult', ['stdout', 'stderr', 'exit_code'])):
pass
def create_process_rules():
"""Intrinsically replaced on the rust side."""
return [execute_process_noop, RootRule(ExecuteProcessRequest)]
@rule(ExecuteProcessResult, [Select(ExecuteProcessRequest)])
def execute_process_noop(*args):
raise Exception('This task is replaced intrinsically, and should never run.')
|
Jozhogg/iris
|
lib/iris/tests/unit/fileformats/grib/test_GribWrapper.py
|
Python
|
lgpl-3.0
| 6,115
| 0
|
# (C) British Crown Copyright 2014, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied
|
warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Unit tests for the `iris.fileformats.grib.GribWrapper` class.
"""
from __future__ import (absolute_import, division, print_function)
# Import iris.tests first so that some things can be initialised before
# impo
|
rting anything else.
import iris.tests as tests
from biggus import NumpyArrayAdapter
import mock
import numpy as np
from iris.fileformats.grib import GribWrapper, GribDataProxy
_message_length = 1000
def _mock_grib_get_long(grib_message, key):
lookup = dict(totalLength=_message_length,
numberOfValues=200,
jPointsAreConsecutive=0,
Ni=20,
Nj=10)
try:
result = lookup[key]
except KeyError:
msg = 'Mock grib_get_long unknown key: {!r}'.format(key)
raise AttributeError(msg)
return result
def _mock_grib_get_string(grib_message, key):
return grib_message
def _mock_grib_get_native_type(grib_message, key):
result = int
if key == 'gridType':
result = str
return result
class Test_deferred(tests.IrisTest):
def setUp(self):
confirm_patch = mock.patch(
'iris.fileformats.grib.GribWrapper._confirm_in_scope')
compute_patch = mock.patch(
'iris.fileformats.grib.GribWrapper._compute_extra_keys')
long_patch = mock.patch('gribapi.grib_get_long', _mock_grib_get_long)
string_patch = mock.patch('gribapi.grib_get_string',
_mock_grib_get_string)
native_patch = mock.patch('gribapi.grib_get_native_type',
_mock_grib_get_native_type)
confirm_patch.start()
compute_patch.start()
long_patch.start()
string_patch.start()
native_patch.start()
self.addCleanup(confirm_patch.stop)
self.addCleanup(compute_patch.stop)
self.addCleanup(long_patch.stop)
self.addCleanup(string_patch.stop)
self.addCleanup(native_patch.stop)
def test_regular_sequential(self):
tell_tale = np.arange(1, 5) * _message_length
grib_fh = mock.Mock(tell=mock.Mock(side_effect=tell_tale))
auto_regularise = False
grib_message = 'regular_ll'
for i, _ in enumerate(tell_tale):
gw = GribWrapper(grib_message, grib_fh, auto_regularise)
self.assertIsInstance(gw._data, NumpyArrayAdapter)
proxy = gw._data.concrete
self.assertIsInstance(proxy, GribDataProxy)
self.assertEqual(proxy.shape, (10, 20))
self.assertEqual(proxy.dtype, np.float)
self.assertIs(proxy.fill_value, np.nan)
self.assertEqual(proxy.path, grib_fh.name)
self.assertEqual(proxy.offset, _message_length * i)
self.assertEqual(proxy.regularise, auto_regularise)
def test_regular_mixed(self):
tell_tale = np.arange(1, 5) * _message_length
expected = tell_tale - _message_length
grib_fh = mock.Mock(tell=mock.Mock(side_effect=tell_tale))
auto_regularise = False
grib_message = 'regular_ll'
for offset in expected:
gw = GribWrapper(grib_message, grib_fh, auto_regularise)
self.assertIsInstance(gw._data, NumpyArrayAdapter)
proxy = gw._data.concrete
self.assertIsInstance(proxy, GribDataProxy)
self.assertEqual(proxy.shape, (10, 20))
self.assertEqual(proxy.dtype, np.float)
self.assertIs(proxy.fill_value, np.nan)
self.assertEqual(proxy.path, grib_fh.name)
self.assertEqual(proxy.offset, offset)
self.assertEqual(proxy.regularise, auto_regularise)
def test_reduced_sequential(self):
tell_tale = np.arange(1, 5) * _message_length
grib_fh = mock.Mock(tell=mock.Mock(side_effect=tell_tale))
auto_regularise = False
grib_message = 'reduced_gg'
for i, _ in enumerate(tell_tale):
gw = GribWrapper(grib_message, grib_fh, auto_regularise)
self.assertIsInstance(gw._data, NumpyArrayAdapter)
proxy = gw._data.concrete
self.assertIsInstance(proxy, GribDataProxy)
self.assertEqual(proxy.shape, (200,))
self.assertEqual(proxy.dtype, np.float)
self.assertIs(proxy.fill_value, np.nan)
self.assertEqual(proxy.path, grib_fh.name)
self.assertEqual(proxy.offset, _message_length * i)
self.assertEqual(proxy.regularise, auto_regularise)
def test_reduced_mixed(self):
tell_tale = np.arange(1, 5) * _message_length
expected = tell_tale - _message_length
grib_fh = mock.Mock(tell=mock.Mock(side_effect=tell_tale))
auto_regularise = False
grib_message = 'reduced_gg'
for offset in expected:
gw = GribWrapper(grib_message, grib_fh, auto_regularise)
self.assertIsInstance(gw._data, NumpyArrayAdapter)
proxy = gw._data.concrete
self.assertIsInstance(proxy, GribDataProxy)
self.assertEqual(proxy.shape, (200,))
self.assertEqual(proxy.dtype, np.float)
self.assertIs(proxy.fill_value, np.nan)
self.assertEqual(proxy.path, grib_fh.name)
self.assertEqual(proxy.offset, offset)
self.assertEqual(proxy.regularise, auto_regularise)
if __name__ == '__main__':
tests.main()
|
cherokee/webserver
|
admin/Handler.py
|
Python
|
gpl-2.0
| 1,775
| 0.007324
|
# -*- coding: utf-8 -*-
#
# Cherokee-admin
#
# Authors:
# Alvaro Lopez Ortega <alvaro@alobbs.com>
#
# Copyright (C) 2001-2014 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope
|
that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Pu
|
blic License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import CTK
import Cherokee
import validations
URL_APPLY = '/plugin/handler/apply'
NOTE_DOCUMENT_ROOT = N_('Allows to specify an alternative Document Root path.')
class PluginHandler (CTK.Plugin):
def __init__ (self, key, **kwargs):
CTK.Plugin.__init__ (self, key)
self.show_document_root = kwargs.pop('show_document_root', True)
self.key_rule = '!'.join(self.key.split('!')[:-1])
def AddCommon (self):
if self.show_document_root:
table = CTK.PropsTable()
table.Add (_('Document Root'), CTK.TextCfg('%s!document_root'%(self.key_rule), True), _(NOTE_DOCUMENT_ROOT))
submit = CTK.Submitter (URL_APPLY)
submit += CTK.Indenter (table)
self += submit
# Publish
VALS = [("%s!document_root"%(self.key_rule), validations.is_dev_null_or_local_dir_exists)]
CTK.publish ('^%s'%(URL_APPLY), CTK.cfg_apply_post, validation=VALS, method="POST")
|
GaloMALDONADO/motorg
|
motorog/filterKinematics.py
|
Python
|
gpl-3.0
| 2,952
| 0.013211
|
import numpy as np
from bmtools.filters import filtfilt_butter
def derivatefilterQ(model, q, t, cutoff, fs, filter_order=4):
'''
Differentiate data using backwards finite differences and
butterworth low pass filter. Takes care of quaternions
Inputs:
- Q : numpy matrix double x=time, y=coordinates(model.nq)
- t : numpy vector of time
- cutoff : cutoff frequency
- fs : sampling frequency
Output
- V : numpy matrix double x=time, y=coordinates(model.nv)
'''
# Debug
q = q.squeeze()
t = t.squeeze()
if type(q) != np.matrixlib.defmatrix.matrix:
q = np.matrix(q).squeeze()
if type(t) != np.ndarray:
t = np.array(t).squeeze()
assert type(q) == np.matrixlib.defmatrix.matrix
assert type(t) == np.ndarray
assert q.shape[0] == len(t)
assert q.shape[1] == model.nq
# Differentiate
dq = np.empty((len(t), model.nv))
for frame in xrange(0,len(t)-1):
# Backward differences
dt = np.fl
|
oat64(t[frame+1]-t[frame])
|
q1 = q[frame,:]
q2 = q[frame+1,:]
diff = se3.differentiate(model, q1, q2)/dt
dq[frame,:] = diff.A1
dq[-1,:] = dq[-2,:]
# Filter
dq_prime = np.empty((len(t), model.nv))
for i in xrange(model.nv):
filtered = filtfilt_butter(dq[:,i], cutoff, fs, filter_order)
dq_prime[:,i] = filtered
return np.matrix(dq_prime)
def derivatefilterV(model, dq, t, cutoff, fs, filter_order=4):
'''
Differentiate data using backwards finite differences and
butterworth low pass filter
Inputs:
- dq : numpy matrix double x=time, y=coordinates(model.nv)
- t : numpy vector of time
- cutoff : cutoff frequency
- fs : sampling frequency
Output
- ddq : numpy matrix double x=time, y=coordinates(model.nv)
'''
# Debug
dq = dq.squeeze()
t = t.squeeze()
if type(dq) != np.matrixlib.defmatrix.matrix:
dq = np.matrix(dq).squeeze()
if type(t) != np.ndarray:
t = np.array(t).squeeze()
assert type(dq) == np.matrixlib.defmatrix.matrix
assert type(t) == np.ndarray
assert dq.shape[0] == len(t)
assert dq.shape[1] == model.nv
# Differentiate
ddq = np.empty((len(t), model.nv))
for frame in xrange(0,len(t)-1):
# Backward differences
dt = np.float64(t[frame+1]-t[frame])
diff = (dq[frame+1,:] - dq[frame,:])/np.float64(dt)
ddq[frame,:] = diff.A1
ddq[-1,:] = ddq[-2,:]
# Filter
ddq_prime = np.empty((len(t), model.nv))
for i in xrange(model.nv):
filtered = filtfilt_butter(ddq[:,i], cutoff, fs, filter_order)
ddq_prime[:,i] = filtered
return np.matrix(ddq_prime)
|
Jumpscale/play7
|
gogsclient/gogsclient.py
|
Python
|
apache-2.0
| 4,050
| 0.010123
|
import requests
import json
from JumpScale import j
from mongoengine import *
ENTRY_POINT =""
TOKEN=""
class ModelGogsRepo(j.core.models.getBaseModel()):
name = StringField(default='')
description = StringField(default='')
private = BoolField(default=False)
readme = StringField(default='Default')
gitignores=StringField(default='Python')
auto_init=BoolField(default=True)
def endpoint(resource):
C='%s/%s' % (ENTRY_POINT, resource)
if C.find("?")==-1:
C+='?token=%s'%TOKEN
else:
C+='&token=%s'%TOKEN
return C
def perform_post(resource, data):
headers = {'Content-Type': 'application/json'}
return requests.post(endpoint(resource), data, headers=headers)
def perform_delete(resource):
return requests.delete(endpoint(resource))
def perform_get(resource):
r = requests.get(endpoint(resource))
print r.json
curlexample='''
curl -H "Authorization: token b9d3768004daf48b4b6f963ab94ca47515444074" http://192.168.99.100:3001/api/v1/user/repos
curl http://192.168.99.100:3001/api/v1/user/repos?token=b9d3768004daf48b4b6f963ab94ca47515444074
'''
class GOGSClient():
def __init__(self):
ENTRY_POINT = 'http://192.168.99.100:3001/api/v1/'
TOKEN="b9d3768004daf48b4b6f963ab94ca47515444074"
def repos_list(self):
return perform_get("user/repos")
def repo_create(self,name,description,private):
model=ModelGogsRepo(name=name,description=description,private=private)
perform_post("")
people = [
{
'firstname': 'John',
'lastname': 'Doe',
'role': ['author'],
'location': {'address': '422 South Gay Street', 'city': 'Auburn'},
'born': 'Thu, 27 Aug 1970 14:37:13 GMT'
},
{
'firstname': 'Serena',
'lastname': 'Love',
'role': ['author'],
'location': {'address': '363 Brannan St', 'city': 'San Francisco'},
'born': 'Wed, 25 Feb 1987 17:00:00 GMT'
},
{
|
'firstname': 'Mark',
'lastname': 'Green',
'role': ['copy', 'author'],
'location': {'address': '4925 Lacross Road', 'city': 'New York'},
'born': 'Sat, 23 Feb 1985 12:00:00 GMT'
},
{
'firstname': 'Julia',
'lastname': 'Red',
'role': ['copy'],
'location': {'address': '98 Yatch Road', 'city
|
': 'San Francisco'},
'born': 'Sun, 20 Jul 1980 11:00:00 GMT'
},
{
'firstname': 'Anne',
'lastname': 'White',
'role': ['contributor', 'copy'],
'location': {'address': '32 Joseph Street', 'city': 'Ashfield'},
'born': 'Fri, 25 Sep 1970 10:00:00 GMT'
},
]
r = perform_post('people', json.dumps(people))
print "'people' posted", r.status_code
valids = []
if r.status_code == 201:
response = r.json()
if response['_status'] == 'OK':
for person in response['_items']:
if person['_status'] == "OK":
valids.append(person['_id'])
return valids
# def post_works(ids):
# works = []
# for i in range(28):
# works.append(
# {
# 'title': 'Book Title #%d' % i,
# 'description': 'Description #%d' % i,
# 'owner': random.choice(ids),
# }
# )
# r = perform_post('works', json.dumps(works))
# print "'works' posted", r.status_code
# def delete():
# r = perform_delete('people')
# print "'people' deleted", r.status_code
# r = perform_delete('works')
# print "'works' deleted", r.status_code
cl=GOGSClient
print (cl.list())
|
tinkerinestudio/Tinkerine-Suite
|
TinkerineSuite/Cura/cura_sf/skeinforge_application/skeinforge_plugins/craft_plugins/stretch.py
|
Python
|
agpl-3.0
| 21,092
| 0.022331
|
"""
This page is in the table of contents.
Stretch is very important Skeinforge plugin that allows you to partially compensate for the fact that extruded holes are smaller then they should be. It stretches the threads to partially compensate for filament shrinkage when extruded.
The stretch manual page is at:
http://fabmetheus.crsndoo.com/wiki/index.php/Skeinforge_Stretch
Extruded holes are smaller than the model because while printing an arc the head is depositing filament on both sides of the arc but in the inside of the arc you actually need less material then on the outside of the arc. You can read more about this on the RepRap ArcCompensation page:
http://reprap.org/bin/view/Main/ArcCompensation
In general, stretch will widen holes and push corners out. In practice the filament contraction will not be identical to the algorithm, so even once the optimal parameters are determined, the stretch script will not be able to eliminate the inaccuracies caused by contraction, but it should reduce them.
All the defaults assume that the thread sequence choice setting in fill is the edge being extruded first, then the loops, then the infill. If the thread sequence choice is different, the optimal thread parameters will also be different. In general, if the infill is extruded first, the infill would have to be stretched more so that even after the filament shrinkage, it would still be long enough to connect to the loop or edge.
Holes should be made with the correct area for their radius. In other words, for example if your modeling program approximates a hole of radius one (area = pi) by making a square with the points at [(1,0), (0,1), (-1,0), (0,-1)] (area = 2), the radius should be increased by sqrt(pi/2). This can be done in fabmetheus xml by writing:
radiusAreal='True'
in the attributes of the object or any parent of that object. In other modeling programs, you'll have to this manually or make a script. If area compensation is not done, then changing the stretch parameters to over compensate for too small hole areas will lead to incorrect compensation in other shapes.
==Operation==
The default 'Activate Stretch' checkbox is off. When it is on, the functions described below will work, when it is off, the functions will not be called.
==Settings==
===Loop Stretch Over Perimeter Width===
Default is 0.1.
Defines the ratio of the maximum amount the loop aka inner shell threads will be stretched compared to the edge width, in general this value should be the same as the 'Perimeter Outside Stretch Over Perimeter Width' setting.
===Path Stretch Over Perimeter Width===
Default is zero.
Defines the ratio of the maximum amount the threads which are not loops, like the infill threads, will be stretched compared to the edge width.
===Perimeter===
====Perimeter Inside Stretch Over Perimeter Width====
Default is 0.32.
Defines the ratio of the maximum amount the inside edge thread will be stretched compared to the edge width, this is the most important setting in stretch. The higher the value the more it will stretch the edge and the wider holes will be. If the value is too small, the holes could be drilled out after fabrication, if the value is too high, the holes would be too wide and the part would have to junked.
====Perimeter Outside Stretch Over Perimeter Width====
Default is 0.1.
Defines the ratio of the maximum amount the outside edge thread will be stretched compared to the edge width, in general this value should be around a third of the 'Perimeter Inside Stretch Over Perimeter Width' setting.
===Stretch from Distance over Perimeter Width===
Default is two.
The stretch algorithm works by checking at each turning point on the extrusion path what the direction of the thread is at a distance of 'Stretch from Distance over Perimeter Width' times the edge width, on both sides, and moves the thread in the opposite direction. So it takes the current turning-point, goes "Stretch from Distance over Perimeter Width" * "Perimeter Width" ahead, reads the direction at that point. Then it goes the same distance in back in time, reads the direction at that other point. It then moves the thread in the opposite direction, away from the center of the arc formed by these 2 points+directions.
The magnitude of the stretch increases with:
the amount that the direction of the two threads is similar and
by the '..Stretch Over Perimeter Wi
|
dth' ratio.
==Examples==
The following examples stretch the file Screw Holder Bottom.stl. The examples are run in a terminal in the folder which contains Screw Holde
|
r Bottom.stl and stretch.py.
> python stretch.py
This brings up the stretch dialog.
> python stretch.py Screw Holder Bottom.stl
The stretch tool is parsing the file:
Screw Holder Bottom.stl
..
The stretch tool has created the file:
.. Screw Holder Bottom_stretch.gcode
"""
from __future__ import absolute_import
from fabmetheus_utilities.fabmetheus_tools import fabmetheus_interpret
from fabmetheus_utilities import archive
from fabmetheus_utilities import euclidean
from fabmetheus_utilities import gcodec
from fabmetheus_utilities import settings
from skeinforge_application.skeinforge_utilities import skeinforge_craft
from skeinforge_application.skeinforge_utilities import skeinforge_polyfile
from skeinforge_application.skeinforge_utilities import skeinforge_profile
import sys
__author__ = 'Enrique Perez (perez_enrique@yahoo.com)'
__date__ = '$Date: 2008/21/04 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
#maybe speed up feedRate option
def getCraftedText( fileName, gcodeText, stretchRepository = None ):
"Stretch a gcode linear move text."
return getCraftedTextFromText( archive.getTextIfEmpty(fileName, gcodeText), stretchRepository )
def getCraftedTextFromText( gcodeText, stretchRepository = None ):
"Stretch a gcode linear move text."
if gcodec.isProcedureDoneOrFileIsEmpty( gcodeText, 'stretch'):
return gcodeText
if stretchRepository == None:
stretchRepository = settings.getReadRepository( StretchRepository() )
if not stretchRepository.activateStretch.value:
return gcodeText
return StretchSkein().getCraftedGcode( gcodeText, stretchRepository )
def getNewRepository():
'Get new repository.'
return StretchRepository()
def writeOutput(fileName, shouldAnalyze=True):
"Stretch a gcode linear move file. Chain stretch the gcode if it is not already stretched."
skeinforge_craft.writeChainTextWithNounMessage(fileName, 'stretch', shouldAnalyze)
class LineIteratorBackward(object):
"Backward line iterator class."
def __init__( self, isLoop, lineIndex, lines ):
self.firstLineIndex = None
self.isLoop = isLoop
self.lineIndex = lineIndex
self.lines = lines
def getIndexBeforeNextDeactivate(self):
"Get index two lines before the deactivate command."
for lineIndex in xrange( self.lineIndex + 1, len(self.lines) ):
line = self.lines[lineIndex]
splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line)
firstWord = gcodec.getFirstWord(splitLine)
if firstWord == 'M103':
return lineIndex - 2
print('This should never happen in stretch, no deactivate command was found for this thread.')
raise StopIteration, "You've reached the end of the line."
def getNext(self):
"Get next line going backward or raise exception."
while self.lineIndex > 3:
if self.lineIndex == self.firstLineIndex:
raise StopIteration, "You've reached the end of the line."
if self.firstLineIndex == None:
self.firstLineIndex = self.lineIndex
nextLineIndex = self.lineIndex - 1
line = self.lines[self.lineIndex]
splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line)
firstWord = gcodec.getFirstWord(splitLine)
if firstWord == 'M103':
if self.isLoop:
nextLineIndex = self.getIndexBeforeNextDeactivate()
else:
raise StopIteration, "You've reached the end of the line."
if firstWord == 'G1':
if self.isBeforeExtrusion():
if self.isLoop:
nextLineIndex = self.getIndexBeforeNextDeactivate()
else:
raise StopIteration, "You've reached the end of the line."
else:
self.lineIndex = nextLineIndex
return lin
|
MjAbuz/flask
|
flask/ctx.py
|
Python
|
bsd-3-clause
| 14,399
| 0.000625
|
# -*- coding
|
: utf-8 -*-
"""
flask.ctx
~~~~~~~~~
Implements the objects required to keep the context.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement
import sys
from functo
|
ols import update_wrapper
from werkzeug.exceptions import HTTPException
from .globals import _request_ctx_stack, _app_ctx_stack
from .module import blueprint_is_module
from .signals import appcontext_pushed, appcontext_popped
class _AppCtxGlobals(object):
"""A plain object."""
def get(self, name, default=None):
return self.__dict__.get(name, default)
def __contains__(self, item):
return item in self.__dict__
def __iter__(self):
return iter(self.__dict__)
def __repr__(self):
top = _app_ctx_stack.top
if top is not None:
return '<flask.g of %r>' % top.app.name
return object.__repr__(self)
def after_this_request(f):
"""Executes a function after this request. This is useful to modify
response objects. The function is passed the response object and has
to return the same or a new one.
Example::
@app.route('/')
def index():
@after_this_request
def add_header(response):
response.headers['X-Foo'] = 'Parachute'
return response
return 'Hello World!'
This is more useful if a function other than the view function wants to
modify a response. For instance think of a decorator that wants to add
some headers without converting the return value into a response object.
.. versionadded:: 0.9
"""
_request_ctx_stack.top._after_request_functions.append(f)
return f
def copy_current_request_context(f):
"""A helper function that decorates a function to retain the current
request context. This is useful when working with greenlets. The moment
the function is decorated a copy of the request context is created and
then pushed when the function is called.
Example::
import gevent
from flask import copy_current_request_context
@app.route('/')
def index():
@copy_current_request_context
def do_some_work():
# do some work here, it can access flask.request like you
# would otherwise in the view function.
...
gevent.spawn(do_some_work)
return 'Regular response'
.. versionadded:: 0.10
"""
top = _request_ctx_stack.top
if top is None:
raise RuntimeError('This decorator can only be used at local scopes '
'when a request context is on the stack. For instance within '
'view functions.')
reqctx = top.copy()
def wrapper(*args, **kwargs):
with reqctx:
return f(*args, **kwargs)
return update_wrapper(wrapper, f)
def has_request_context():
"""If you have code that wants to test if a request context is there or
not this function can be used. For instance, you may want to take advantage
of request information if the request object is available, but fail
silently if it is unavailable.
::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and has_request_context():
remote_addr = request.remote_addr
self.remote_addr = remote_addr
Alternatively you can also just test any of the context bound objects
(such as :class:`request` or :class:`g` for truthness)::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and request:
remote_addr = request.remote_addr
self.remote_addr = remote_addr
.. versionadded:: 0.7
"""
return _request_ctx_stack.top is not None
def has_app_context():
"""Works like :func:`has_request_context` but for the application
context. You can also just do a boolean check on the
:data:`current_app` object instead.
.. versionadded:: 0.9
"""
return _app_ctx_stack.top is not None
class AppContext(object):
"""The application context binds an application object implicitly
to the current thread or greenlet, similar to how the
:class:`RequestContext` binds request information. The application
context is also implicitly created if a request context is created
but the application is not on top of the individual application
context.
"""
def __init__(self, app):
self.app = app
self.url_adapter = app.create_url_adapter(None)
self.g = app.app_ctx_globals_class()
# Like request context, app contexts can be pushed multiple times
# but there a basic "refcount" is enough to track them.
self._refcnt = 0
def push(self):
"""Binds the app context to the current context."""
self._refcnt += 1
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
_app_ctx_stack.push(self)
appcontext_pushed.send(self.app)
def pop(self, exc=None):
"""Pops the app context."""
self._refcnt -= 1
if self._refcnt <= 0:
if exc is None:
exc = sys.exc_info()[1]
self.app.do_teardown_appcontext(exc)
rv = _app_ctx_stack.pop()
assert rv is self, 'Popped wrong app context. (%r instead of %r)' \
% (rv, self)
appcontext_popped.send(self.app)
def __enter__(self):
self.push()
return self
def __exit__(self, exc_type, exc_value, tb):
self.pop(exc_value)
class RequestContext(object):
"""The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
Do not attempt to use this class directly, instead use
:meth:`~flask.Flask.test_request_context` and
:meth:`~flask.Flask.request_context` to create this object.
When the request context is popped, it will evaluate all the
functions registered on the application for teardown execution
(:meth:`~flask.Flask.teardown_request`).
The request context is automatically popped at the end of the request
for you. In debug mode the request context is kept around if
exceptions happen so that interactive debuggers have a chance to
introspect the data. With 0.4 this can also be forced for requests
that did not fail and outside of `DEBUG` mode. By setting
``'flask._preserve_context'`` to `True` on the WSGI environment the
context will not pop itself at the end of the request. This is used by
the :meth:`~flask.Flask.test_client` for example to implement the
deferred cleanup functionality.
You might find this helpful for unittests where you need the
information from the context local around for a little longer. Make
sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in
that situation, otherwise your unittests will leak memory.
"""
def __init__(self, app, environ, request=None):
self.app = app
if request is None:
request = app.request_class(environ)
self.request = request
self.url_adapter = app.create_url_adapter(self.request)
self.flashes = None
self.session = None
# Request contexts can be pushed multiple times and interleaved with
# other request contexts. Now only if the last level is popped we
# get rid of them. Additionally if an application context is missing
# one is created implicitly so for each level we add this information
self._implicit_app_ctx_stack = []
# indicator if the context was preserved. Next time another context
# is pushed the preserved context is popped.
self.
|
zack3241/incubator-airflow
|
airflow/sensors/base_sensor_operator.py
|
Python
|
apache-2.0
| 2,492
| 0
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from time import sleep
from airflow.exceptions import AirflowException, AirflowSensorTimeout, \
AirflowSkipException
from airflow.models import BaseOperator
from airflow.utils import timezone
from airflow.utils.decorators import apply_default
|
s
class BaseSensorOperator(BaseOperator):
"""
Sensor operators are derived from this class an inherit these attributes.
Sensor operators keep executing at a time interval and succeed when
a criteria is met and fail if and when they time out.
:param soft_fail: Set to true to mark the task as SKIPPED on failure
:type soft_fail: bool
:param poke_interval: Time in seconds that the job should wait in
between each tries
:type
|
poke_interval: int
:param timeout: Time, in seconds before the task times out and fails.
:type timeout: int
"""
ui_color = '#e6f1f2'
@apply_defaults
def __init__(self,
poke_interval=60,
timeout=60 * 60 * 24 * 7,
soft_fail=False,
*args,
**kwargs):
super(BaseSensorOperator, self).__init__(*args, **kwargs)
self.poke_interval = poke_interval
self.soft_fail = soft_fail
self.timeout = timeout
def poke(self, context):
"""
Function that the sensors defined while deriving this class should
override.
"""
raise AirflowException('Override me.')
def execute(self, context):
started_at = timezone.utcnow()
while not self.poke(context):
if (timezone.utcnow() - started_at).total_seconds() > self.timeout:
if self.soft_fail:
raise AirflowSkipException('Snap. Time is OUT.')
else:
raise AirflowSensorTimeout('Snap. Time is OUT.')
sleep(self.poke_interval)
self.log.info("Success criteria met. Exiting.")
|
Bismarrck/pymatgen
|
pymatgen/optimization/__init__.py
|
Python
|
mit
| 229
| 0.026201
|
# coding: utf-8
# Copyright (c)
|
Pymatgen Development Team.
<<<<<<< HEAD
# Distributed under the terms of the MIT License.
=======
# Distributed under the terms of the MIT
|
License.
>>>>>>> a41cc069c865a5d0f35d0731f92c547467395b1b
|
TravelModellingGroup/TMGToolbox
|
TMGToolbox/src/XTMF_internal/import_function_batch_file.py
|
Python
|
gpl-3.0
| 2,153
| 0.010218
|
'''
Copyright 2021 Travel Modelling Group, Department of Civil Engineering, University of Toronto
This file is part of the TMG Toolbox.
The TMG Toolbox is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The TMG Toolbox is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE
|
. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with the TMG Toolbox. If not, see <http://www.gnu.org/licenses/>.
'''
#---METADATA---------------------
'
|
''
Copy Scenario
Authors: JamesVaughan
Latest revision by: JamesVaughan
This tool will allow XTMF to be able to execute a vdf batch file into
an EMME Databank.
'''
#---VERSION HISTORY
'''
0.0.1 Created on 2021-01-20 by JamesVaughan
'''
import inro.modeller as _m
import traceback as _traceback
_MODELLER = _m.Modeller() #Instantiate Modeller once.
process = _m.Modeller().tool('inro.emme.data.function.function_transaction')
class CopyScenario(_m.Tool()):
version = '0.0.1'
batch_file = _m.Attribute(str)
scenario_number = _m.Attribute(int)
def page(self):
pb = _m.ToolPageBuilder(self, title="Import VDF Batch File",
runnable=False,
description="Cannot be called from Modeller.",
branding_text="XTMF")
return pb.render()
def run(self):
pass
def __call__(self, batch_file, scenario_number):
try:
project = _MODELLER.emmebank
scenario = project.scenario(str(scenario_number))
process(transaction_file=batch_file,
scenario=scenario,
throw_on_error = True)
except Exception as e:
raise Exception(_traceback.format_exc())
|
ScottWales/rose
|
lib/python/rose/config_editor/data_helper.py
|
Python
|
gpl-3.0
| 21,509
| 0.000372
|
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# (C) British Crown Copyright 2012-5 Met Office.
#
# This file is part of Rose, a framework for meteorological suites.
#
# Rose is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Rose is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Rose. If not, see <http://www.gnu.org/licenses/>.
#-----------------------------------------------------------------------------
import re
import rose.config
import rose.config_editor
REC_ELEMENT_SECTION = re.compile(r"^(.*)\((.+)\)$")
class ConfigDataHelper(object):
def __init__(self, data, util):
self.data = data
self.util = util
def get_config_has_unsaved_changes(self, config_name):
"""Return True if there are unsaved changes for config_name."""
config_data = self.data.config[config_name]
variables = config_data.vars.get_all(skip_latent=True)
save_vars = config_data.vars.get_all(save=True, skip_latent=True)
sections = config_data.sections.get_all(skip_latent=True)
save_sections = config_data.sections.get_all(save=True,
skip_latent=True)
now_set = set([v.to_hashable() for v in variables])
save_set = set([v.to_hashable() for v in save_vars])
now_sect_set = set([s.to_hashable() for s in sections])
save_sect_set = set([s.to_hashable() for s in save_sections])
return (config_name not in self.data.saved_config_names or
now_set ^ save_set or
now_sect_set ^ save_sect_set)
def get_config_meta_flag(self, config_name, from_this_config_obj=None):
"""Return the metadata id flag."""
for section, option in [
[rose.CONFIG_SECT_TOP, rose.CONFIG_OPT_META_TYPE],
[rose.CONFIG_SECT_TOP, rose.CONFIG_OPT_PROJECT]]:
if from_this_config_obj is not None:
type_node = from_this_config_obj.get(
[section, option], no_ignore=True)
if type_node is not None and type_node.value:
return type_node.value
continue
id_ = self.util.get_id_from_section_option(section, option)
var = self.get_variable_by_id(id_, config_name)
if var is not None:
return var.value
return None
def is_ns_sub_data(self, ns):
"""Return whether a namespace is mentioned in summary data."""
ns_meta = self.data.namespace_meta_lookup.get(ns, {})
return ns_meta.get("has_sub_data", False)
def is_ns_content(self, ns):
"""Return whether a namespace has any existing content."""
config_name = self.util.split_full_ns(self.data, ns)[0]
for section in self.get_sections_from_namespace(ns):
if section in self.data.config[config_name].sections.now:
return True
return self.is_ns_sub_data(ns)
def get_metadata_for_config_id(self, node_id, config_name):
"""Retrieve the corresponding metadata for a variable."""
config_data = self.data.config[config_name]
meta_config = config_data.meta
meta_data = {}
if not node_id:
return {'id': node_id}
return rose.macro.get_metadata_for_config_id(node_id, meta_config)
def get_variable_by_id(self, var_id, config_name, save=False,
latent=False):
"""Return the matching variable or None."""
sect, opt = self.util.get_section_option_from_id(var_id)
return self.data.config[config_name].vars.get_var(
sect, opt, save, skip_latent=not latent)
#------------------ Data model helper functions ------------------------------
def get_data_for_namespace(self, ns, from_saved=False):
"""Return a list of vars and a list of latent vars for this ns."""
config_name = self.util.split_full_ns(self.data, ns)[0]
config_data = self.data.config[config_name]
allowed_sections = self.get_sections_from_namespace(ns)
variables = []
latents = []
if from_saved:
var_map = config_data.vars.save
latent_var_map = config_data.vars.latent_save
else:
var_map = config_data.vars.now
latent_var_map = config_data.vars.latent
for section in allowed_sections:
variables.extend(var_map.get(section, []))
latents.extend(latent_var_map.get(section, []))
ns_vars = [v for v in variables if v.metadata.get('full_ns') == ns]
ns_latents = [v for v in latents if v.metadata.get('full_ns') == ns]
return ns_vars, ns_latents
def get_macro_info_for_namespace(self, ns):
"""Return some information for custom macros for this namespace."""
config_name = self.util.split_full_ns(self, ns)[0]
config_data = self.data.config[config_name]
ns_macros_text = self.data.namespace_meta_lookup.get(ns, {}).get(
rose.META_PROP_MACRO, "")
if not ns_macros_text:
return {}
ns_macros = rose.variable.array_split(ns_macros_text,
only_this_delim=",")
module_prefix = self.get_macro_module_prefix(config_name)
for i, ns_macro in enumerate(ns_macros):
ns_macros[i] = module_prefix + ns_macro
ns_macro_info = {}
macro_tuples = rose.macro.get_macro_class_methods(config_data.macros)
for module_name, class_name, method_name, docstring in macro_tuples:
this_macro_name = ".".join([module_name, class_name])
this_macro_method_name = ".".join([this_macro_name, method_name])
this_info = (method_name, docstring)
if this_macro_name in ns_macros:
key = this_macro_name.replace(module_prefix, "", 1)
ns_macro_info.update({key: this_info})
elif this_macro_method_name in ns_macros:
key = this_macro_method_name.replace(module_prefix, "", 1)
ns_macro_info.update({key: this_info})
return ns_macro_info
def get_section_data_for_namespace(self, ns):
"""Return real and latent lists of Section objects for this ns."""
allowed_sections = (
self.data.helper.get_sections_from_namespace(ns))
config_name = self.util.split_full_ns(self.data, ns)[0]
config_data = self.data.config[config_name]
real_sections = []
for section, sect_data in config_data.sections.now.items():
if section in allowed_sections:
real_sections.append(sect_data)
latent_sections = []
for section, sect_data in config_data.sections.latent.items():
if section in allowed_sections:
latent_sections.append(sect_data)
return real_sections, latent_sections
def get_sub_data_for_namespace(self, ns, from_saved=False):
"""Return any sections/variables below this namespace."""
sub_data = {"sections": {
|
}, "variables": {}}
config_name = self.util.split_full_ns(self.data, ns)[0]
config_data = self.data.config[config_name]
for sect, sect_data in config_data.sections.now.items():
sect_ns = sect_data.metadata["full_ns"]
if sect_ns.startswith(ns):
sub_data["sections"].update({sect: sect_data})
for sect, variables in config_data.vars.now.items():
for variable in variables:
|
if variable.metadata["full_ns"].startswith(ns):
sub_data["variables"].setdefault(sect,
|
skilstak/code-dot-org-python
|
codestudio/artist.py
|
Python
|
unlicense
| 10,452
| 0.00995
|
"""Artist puzzles from <http://code.org> built on `tkinter` only.
Artist is similar to the great `turtle` standard module for teaching
programming but builds on a foundation of puzzle and solution, (which
`turtle` does not):
- Subset of basic Python turtle commands (all needed for puzzles).
- Puzzles created by students with Artist can be checked against
a known solution saved as JSON.
- New puzzles can be created with Artist by simply `artist.save()` and
creating challenge stub programs for students to complete that `load()`
the saved challenge.
- Artist has only `move_*`, `turn_*`, and `jump_*` and always uses
verbs to begin method and function names.
- Artist methods correspond one-to-one with those from <http://code.org>
for easier porting by students.
- Artist supports sprite animation and theming (e.g. zombie, turtle, etc.).
- Artist includes sound and sound theming as well.
- Artist can be made to be very slow or very, very fast unlike `turtle`
- Artist metaphor matches 'canvas' metaphor used in all graphics coding.
- Artist draws lines individually instead of updating a single line with
new coordinates so that the artists drawn `lines` can be checked to
see if the line was drawn forward or backward and give credit for that
specific line segment. This allows set() to isolate the essential lines
when checking solutions without throwing out an otherwise good solution
that was drawn in a different way. This is critical for code.org puzzles
since often there is more than one way to retrace drawn lines to get
to a new position.
"""
import os
import json
import math
import random
from .tkcanvas import Canvas
from .gamegrids import XYGrid,xy,slope,bearing,length
class Artist():
start_direction = 0
startx = 0
starty = 0
color = 'black'
width = 7
speed = 'normal'
resources = os.path.join(os.path.dirname(__file__),'res','artist')
def __init__(self,proto=None):
"""In most cases you want Artist.from_json() instead."""
self.grid = None
self.solution = None
# aggregate
if proto:
self.canvas = proto.canvas
self.puzzle = proto.puzzle
self.log = proto.log
self.uid = proto.uid
self.type = proto.type
self.theme
|
= proto.theme
self.x = proto.x
self.y = proto.y
self.direction = proto.start_direction
self.startx = proto.startx
self.starty = proto.starty
self.lastx = proto.lastx
self.lasty = proto.lasty
self.last_direction = proto.direction
self.sprite = proto.sprite
|
else:
self.canvas = Canvas()
self.puzzle = []
self.log = []
self.uid = None
self.type = 'artist'
self.theme = 'default'
self.x = self.startx
self.y = self.starty
self.direction = self.start_direction
self.lastx = self.x
self.lasty = self.y
self.last_direction = self.direction
self.sprite = None
self._lines_to_draw = [] # drawing cache
@property
def title(self):
return self.canvas.title
@title.setter
def title(self,new):
self._title = new
if not new:
if self.uid:
self.canvas.title = self.uid
else:
if self.uid:
self.canvas.title = new + ' [' + self.uid + ']'
else:
self.canvas.title = new
@title.deleter
def title(self):
self.canvas.title = self.uid
def config(self,conf):
"""Sets attributes based dictionary (usually after JSON load)."""
for key in conf:
if key in ('startx','starty','start_direction'):
setattr(__class__,key,conf[key])
if key in ('puzzle','uid','title','type','theme'):
setattr(self,key,conf[key])
def pen_color(self,color):
"""Just to be compatible with 'Show Code' JavaScript"""
self.color = color
@classmethod
def from_json(cls,json_):
if type(json_) is str:
json_ = json.loads(json_)
instance = cls()
instance.config(json_)
return instance
def setup(self):
self.title = self._title # for missing uid
self.direction = self.start_direction
self.x = self.startx
self.y = self.starty
self.grid = XYGrid().init(400,400,0)
self.draw_lines(self.puzzle, color='lightgrey', speed='fastest')
self.solution = XYGrid(self.grid)
self.grid = XYGrid().init(400,400,0) # wipe
strip = os.path.join(self.resources,self.theme,
'sprite_strip180_70x50.gif')
self.sprite = self.canvas.create_sprite(strip)
self.sprite.move(self.startx,self.starty,self.start_direction)
def check(self):
if self.grid == self.solution:
return self.good_job()
else:
if self._close_enough():
return self.good_job()
else:
return self.try_again()
def _close_enough(self):
for y in range(400):
for x in range(400):
if self.solution[x][y] and not self.grid.ping(x,y):
return False
if self.grid[x][y] and not self.solution.ping(x,y):
return False
return True
def show_check(self):
canvas = Canvas()
for y in range(-200,201):
for x in range(-200,201):
if not self.solution[x][y] and not self.grid[x][y]:
pass
elif self.solution[x][y] == self.grid[x][y]:
canvas.poke(x,-y,'lightgreen')
elif self.solution[x][y]:
canvas.poke(x,-y,'red')
elif self.grid[x][y]:
canvas.poke(x,-y,'orange')
self.wait()
def show_solution(self):
canvas = Canvas()
for y in range(-200,201):
for x in range(-200,201):
if self.grid[x][y]:
canvas.poke(x,-y,'black')
self.wait()
def show_lines(self):
canvas = Canvas()
for y in range(-200,201):
for x in range(-200,201):
if self.grid[x][y]:
canvas.poke(x,-y,'black')
self.wait()
def show_wrong(self):
canvas = Canvas()
for y in range(-200,201):
for x in range(-200,201):
if self.grid[x][y] and self.grid[x][y] != self.solution[x][y]:
canvas.poke(x,-y,'black')
self.wait()
def save(self,name=None,fname=None):
name = name if name else self.uid
if os.path.isdir('puzzles'):
fname = os.path.join('puzzles', name + '.json')
assert not os.path.isfile(fname), '{} exists'.format(name)
else:
fname = name + '.json'
with open(fname,'w') as f:
f.write(json.dumps({
"uid": self.uid,
"type": self.type,
"title": self._title,
"startx": self.startx,
"starty": self.starty,
"start_direction": self.start_direction,
"puzzle": self.log
}))
def try_again(self,message='Nope. Try again.'):
# TODO replace with a canvas splash window graphic
print(message)
self.canvas.exit_on_click()
def good_job(self,message='Perfect! Congrats!'):
# TODO replace with a canvas splash window graphic
print(message)
self.canvas.exit_on_click()
def wait_for_click(self):
return self.good_job('Beautiful!')
wait = wait_for_click
def clear(self):
self._lines_to_draw = []
self.log = []
def draw_lines(self,lines,color=None,speed=None):
self.grid.draw_lines(lines,1)
if speed:
self.canvas.speed = speed
else:
self.canvas.speed = se
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-2.4.3/Lib/email/Message.py
|
Python
|
mit
| 31,201
| 0.000609
|
# Copyright (C) 2001-2006 Python Software Foundation
# Author: Barry Warsaw
# Contact: email-sig@python.org
"""Basic message object for the email package object model."""
import re
import uu
import binascii
import warnings
from cStringIO import StringIO
# Intrapackage imports
from email import Utils
from email import Errors
from email import Charset
SEMISPACE = '; '
# Regular expression used to split header parameters. BAW: this may be too
# simple. It isn't strictly RFC 2045 (section 5.1) compliant, but it catches
# most headers found in the wild. We may eventually need a full fledged
# parser eventually.
paramre = re.compile(r'\s*;\s*')
# Regular expression that matches `special' characters in parameters, the
# existance of which force quoting of the parameter value.
tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]')
# Helper functions
def _formatparam(param, value=None, quote=True):
"""Convenience function to format and return a key=value pair.
This will quote the value if needed or if quote is true.
"""
if value is not None and len(value) > 0:
# A tuple is used for RFC 2231 encoded parameter values where items
# are (charset, language, value). charset is a string, not a Charset
# instance.
if isinstance(value, tuple):
# Encode as per RFC 2231
param += '*'
value = Utils.encode_rfc2231(value[2], value[0], value[1])
# BAW: Please check this. I think that if quote is set it should
# force quoting even if not necessary.
if quote or tspecials.search(value):
return '%s="%s"' % (param, Utils.quote(value))
else:
return '%s=%s' % (param, value)
else:
return param
def _parseparam(s):
plist = []
while s[:1] == ';':
s = s[1:]
end = s.find(';')
while end > 0 and s.count('"', 0, end) % 2:
end = s.find(';', end + 1)
if end < 0:
end = len(s)
f = s[:end]
if '=' in f:
i = f.index('=')
f = f[:i].strip().lower() + '=' + f[i+1:].strip()
plist.append(f.strip())
s = s[end:]
return plist
def _unquotevalue(value):
# This is different than Utils.collapse_rfc2231_value() because it doesn't
# try to convert the value to a unicode. Message.get_param() and
# Message.get_params() are both currently defined to return the tuple in
# the face of RFC 2231 parameters.
if isinstance(value, tuple):
return value[0], value[1], Utils.unquote(value[2])
else:
return Utils.unquote(value)
class Message:
"""Basic message object.
A message object is defined as something that has a bunch of RFC 2822
headers and a payload. It may optionally have an envelope header
(a.k.a. Unix-From or From_ header). If the message is a container (i.e. a
multipart or a message/rfc822), then the payload is a list of Message
objects, otherwise it is a string.
Message objects implement part of the `mapping' interface, which assumes
there is exactly one occurrance of the header per message. Some headers
do in fact appear multiple times (e.g. Received) and for those headers,
you must use the explicit API to set or get all the headers. Not all of
the mapping methods are implemented.
"""
def __init__(self):
self._headers = []
self._unixfrom = None
self._payload = None
self._charset = None
# Defaults for multipart messages
self.preamble = self.epilogue = None
self.defects = []
# Default content type
self._default_type = 'text/plain'
def __str__(self):
"""Return the entire formatted message as a string.
This includes the headers, body, and envelope header.
"""
return self.as_string(unixfrom=True)
def as_string(self, unixfrom=False):
"""Return the entire formatted message as a string.
Optional `unixfrom' when True, means include the Unix From_ envelope
header.
This is a convenience method and may not generate the message exactly
as you intend because by default it mangles lines that begin with
"From ". For more flexibility, use the flatten() method of a
Generator instance.
"""
from email.Generator import Generator
fp = StringIO()
g = Generator(fp)
g.flatten(self, unixfrom=unixfrom)
return fp.getvalue()
def is_multipart(self):
"""Return True if the message consists of multiple parts."""
return isinstance(self._payload, list)
#
# Unix From_ line
#
def set_unixfrom(self, unixfrom):
self._unixfrom = unixfrom
def get_unixfrom(self):
return self._unixfrom
#
# Payload manipulation.
#
def attach(self, payload):
"""Add the given payload to the current payload.
The current payload will always be a list of objects after this method
is called. If you want to set the payload to a scalar object, use
set_payload() instead.
"""
if self._payload is None:
self._payload = [payload]
else:
self._payload.append(payload)
def get_payload(self, i=None, decode=False):
"""Return a reference to the payload.
The payload will either be a list object or a string. If you mutate
the list object, you modify the message's payload in place. Optional
i returns that index into the payload.
Optional decode is a flag indicating whether the payload should be
decoded or not, according to the Content-Transfer-Encoding header
(default is False).
When True and the message is not a multipart, the payload will be
decoded if this header's value is `quoted-printable' or `base64'. If
some other encoding is used, or the header is missing, or if the
payload has bogus data (i.e. bogus base64 or uuencoded data), the
payload is returned as-is.
If the message is a multipart and the decode flag is True, then None
is returned.
"""
if i is None:
payload = self._payload
elif not isinstance(self._payload, list):
raise TypeError('Expected list, got %s' % type(self._payload))
else:
payload = self._payload[i]
if decode:
if self.is_multipart():
return None
cte = self.get('content-transfer-encoding', '').lower()
if cte == 'quoted-printable':
return Utils._qdecode(payload)
elif cte == 'base64':
try:
return Utils._bdecode(payload)
except binascii.Error:
# Incorrect padding
return payload
elif cte in ('x-uuencode', 'uuencode', 'uue', 'x-uue'):
sfp = StringIO()
try:
uu.decode(StringIO(payload+'\n'
|
), sfp, quiet=True)
payload = sfp.getvalue()
except uu.Error:
# Some decoding problem
return payload
# Everything else, including encodings with 8bit or 7bit are returned
# unchanged.
return payload
def s
|
et_payload(self, payload, charset=None):
"""Set the payload to the given value.
Optional charset sets the message's default character set. See
set_charset() for details.
"""
self._payload = payload
if charset is not None:
self.set_charset(charset)
def set_charset(self, charset):
"""Set the charset of the payload to a given character set.
charset can be a Charset instance, a string naming a character set, or
None. If it is a string it will be converted to a Charset instance.
If charset is None, the charset parameter will be removed from the
Content-Type field. Anything else will generate a TypeError.
The message will be assumed to be of type text/* encoded with
|
tongwang01/tensorflow
|
tensorflow/python/ops/array_ops.py
|
Python
|
apache-2.0
| 86,939
| 0.004911
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""## Casting
TensorFlow provides several operations that you can use to cast tensor data
types in your graph.
@@string_to_number
@@to_double
@@to_float
@@to_bfloat16
@@to_int32
@@to_int64
@@cast
@@bitcast
@@saturate_cast
## Shapes and Shaping
TensorFlow provides several operations that you can use to determine the shape
of a tensor and change the shape of a tensor.
@@shape
@@shape_n
@@size
@@rank
@@reshape
@@squeeze
@@expand_dims
@@meshgrid
## Slicing and Joining
TensorFlow provides several operations to slice or extract parts of a tensor,
or join multiple tensors together.
@@slice
@@strided_slice
@@split
@@tile
@@pad
@@concat
@@stack
@@pack
@@unstack
@@unpack
@@reverse_sequence
@@reverse
@@transpose
@@extract_image_patches
@@space_to_batch_nd
@@space_to_batch
@@required_space_to_batch_paddings
@@batch_to_space_nd
@@batch_to_space
@@space_to_depth
@@depth_to_space
@@gather
@@ga
|
ther_nd
@@unique_with_counts
@@scatter_nd
@@dynamic_partition
@@dynamic_stitch
@@boolean_mask
@@one_hot
@@sequence_mask
@@dequantize
@@quantize_v2
@@quantized_concat
@@setdiff1d
## Fake quantization
Operations used to help train for better quantization accuracy.
@@fake_quan
|
t_with_min_max_args
@@fake_quant_with_min_max_args_gradient
@@fake_quant_with_min_max_vars
@@fake_quant_with_min_max_vars_gradient
@@fake_quant_with_min_max_vars_per_channel
@@fake_quant_with_min_max_vars_per_channel_gradient
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import numpy as np
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
# 'Constant' gets imported in the module 'array_ops'.
from tensorflow.python.framework.constant_op import constant
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gen_math_ops
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_array_ops import *
# pylint: enable=wildcard-import
# Used for slicing to specify a new 1 size dimension
newaxis = None
# We override the 'slice' for the "slice" op, so we keep python's
# existing 'slice' for later use in this module.
_baseslice = slice
# Aliases for some automatically-generated names.
listdiff = gen_array_ops.list_diff
def setdiff1d(x, y, index_dtype=dtypes.int32, name=None):
"""Returns the difference between the `x` and `y` treated as sets.
Args:
x: Set of values not assumed to be unique.
y: Set of values not assumed to be unique.
index_dtype: Output index type (`tf.int32`, `tf.int64`) default: `tf.int32`
name: A name for the operation (optional).
Returns:
A `Tensor` the same type as `x` and `y`
A `Tensor` that is of type `index_dtype` representing indices from .
"""
return gen_array_ops.list_diff(x, y, index_dtype, name)
def shape(input, name=None, out_type=dtypes.int32):
# pylint: disable=redefined-builtin
"""Returns the shape of a tensor.
This operation returns a 1-D integer tensor representing the shape of `input`.
For example:
```python
# 't' is [[[1, 1, 1], [2, 2, 2]], [[3, 3, 3], [4, 4, 4]]]
shape(t) ==> [2, 2, 3]
```
Args:
input: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
out_type: (Optional) The specified output type of the operation
(`int32` or `int64`). Defaults to `tf.int32`.
Returns:
A `Tensor` of type `out_type`.
"""
return shape_internal(input, name, optimize=True, out_type=out_type)
def shape_internal(input, name=None, optimize=True, out_type=dtypes.int32):
# pylint: disable=redefined-builtin
"""Returns the shape of a tensor.
Args:
input: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
optimize: if true, encode the shape as a constant when possible.
out_type: (Optional) The specified output type of the operation
(`int32` or `int64`). Defaults to tf.int32.
Returns:
A `Tensor` of type `out_type`.
"""
with ops.name_scope(name, "Shape", [input]) as name:
if isinstance(
input, (sparse_tensor.SparseTensor, sparse_tensor.SparseTensorValue)):
return gen_math_ops.cast(input.shape, out_type)
else:
input_tensor = ops.convert_to_tensor(input)
input_shape = input_tensor.get_shape()
if optimize and input_shape.is_fully_defined():
return constant(input_shape.as_list(), out_type, name=name)
return gen_array_ops.shape(input, name=name, out_type=out_type)
def size(input, name=None, out_type=dtypes.int32):
# pylint: disable=redefined-builtin
"""Returns the size of a tensor.
This operation returns an integer representing the number of elements in
`input`.
For example:
```python
# 't' is [[[1, 1, 1], [2, 2, 2]], [[3, 3, 3], [4, 4, 4]]]]
size(t) ==> 12
```
Args:
input: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
out_type: (Optional) The specified output type of the operation
(`int32` or `int64`). Defaults to tf.int32.
Returns:
A `Tensor` of type `out_type`. Defaults to tf.int32.
"""
return size_internal(input, name, optimize=True, out_type=out_type)
def size_internal(input, name=None, optimize=True, out_type=dtypes.int32):
# pylint: disable=redefined-builtin,protected-access
"""Returns the size of a tensor.
Args:
input: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
optimize: if true, encode the size as a constant when possible.
out_type: (Optional) The specified output type of the operation
(`int32` or `int64`). Defaults to tf.int32.
Returns:
A `Tensor` of type `out_type`.
"""
with ops.name_scope(name, "Size", [input]) as name:
if isinstance(
input, (sparse_tensor.SparseTensor, sparse_tensor.SparseTensorValue)):
return gen_math_ops._prod(
gen_math_ops.cast(input.shape, out_type), 0, name=name)
else:
input_tensor = ops.convert_to_tensor(input)
input_shape = input_tensor.get_shape()
if optimize and input_shape.is_fully_defined():
return constant(input_shape.num_elements(), out_type, name=name)
return gen_array_ops.size(input, name=name, out_type=out_type)
def rank(input, name=None):
# pylint: disable=redefined-builtin
"""Returns the rank of a tensor.
This operation returns an integer representing the rank of `input`.
For example:
```python
# 't' is [[[1, 1, 1], [2, 2, 2]], [[3, 3, 3], [4, 4, 4]]]
# shape of tensor 't' is [2, 2, 3]
rank(t) ==> 3
```
**Note**: The rank of a tensor is not the same as the rank of a matrix. The
rank of a tensor is the number of indices required to uniquely select each
element of the tensor. Rank is also known as "order", "degree", or "ndims."
Args:
input: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `int32`.
"""
return rank_internal(input, name, optimize=True)
def rank_internal(input, name=None, optimize=True):
# pylint: disable=redefined-builtin
"""Returns the rank of a tensor.
Args:
input: A `Tensor` or `SparseTensor`.
name: A name for the operation (optional).
opt
|
alibozorgkhan/utils
|
utils/fileoo.py
|
Python
|
mit
| 1,696
| 0
|
# -*- coding: utf-8 -*-
import csv
import os
impo
|
rt gzip
class File:
def read(self, path, **kwargs):
path = os.path.join(kwargs.get('root_path', ''), path)
content_type = kwargs.get('content_type', 'txt')
if content_type == 'txt':
with file(path, 'r') as f:
content = f.read()
yield content
elif content_type == 'gz':
with gzip.open(path, 'r') as f:
content = f.read()
yi
|
eld content
elif content_type == 'csv':
with open(path, 'rU') as f:
reader = csv.reader(f)
for line in reader:
yield line
else:
raise Exception('Bad file type')
def write(self, path, content, **kwargs):
path = os.path.join(kwargs.get('root_path', ''), path)
content_type = kwargs.get('content_type', 'txt')
if content_type == 'txt':
with file(path, 'wb') as f:
f.write(content)
elif content_type == 'gz':
with gzip.open(path, 'w') as f:
f.write(content)
elif content_type == 'csv':
with open(path, 'wb') as f:
writer = csv.writer(f)
for c in content:
if c['type'] == 'single':
writer.writerow(c['data'])
elif c['type'] == 'multi':
writer.writerows(c['data'])
else:
raise Exception('Row type must be specified')
else:
raise Exception('Bad file type')
def exists(self, path):
return os.path.exists(path)
|
unioslo/cerebrum
|
Cerebrum/modules/virthome/base.py
|
Python
|
gpl-2.0
| 29,202
| 0.001609
|
# -*- encoding: utf-8 -*-
#
# Copyright 2013-2021 University of Oslo, Norway
#
# This file is part of Cerebrum.
#
# Cerebrum is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Cerebrum is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Cerebrum; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
This file contains workflows and common operations related to virthome
accounts and groups. It's a generalization of some of the bofhd-commands, so
that they can be used by other applications.
NOTICE: The classes here won't check permissions, that needs to be done by the
the caller!
TODO: This is the new home for all virthome-related bofh-commands. All changes
should be done here, and called from bofhd_virthome_cmds. This class should
stay genereic enough to be used outside of bofhd.
"""
import sys
import re
import six
import cereconf
from Cerebrum.group.GroupRoles import GroupRoles
from Cerebrum.Utils import Factory
from Cerebrum.Errors import CerebrumError, NotFoundError
from mx.DateTime import DateTimeDelta
from Cerebrum.modules.bofhd.auth import BofhdAuthOpSet, BofhdAuthOpTarget, BofhdAuthRole
from Cerebrum.modules.virthome.bofhd_auth import BofhdVirtHomeAuth
from Cerebrum.modules.virthome.VirtAcco
|
unt import BaseVirtHomeAccount, FEDAccount, VirtAccount
# TODO: Is this okay? Shou
|
ld we just have one class in stead? There's no real
# reason to keep two classes...
# They have been split up here in order to separate between methods that
# should be visible (imported) in other modules. All the methods in
# VirthomeUtils should only be used here. However, they DO need to be
# visible elsewhere until all functionality have been migrated from
# bofhd_virthome_cmds. It'll be easy to merge the classes later. I hope.
class VirthomeBase:
""" The outer access layer: Methods here are workflows such as creating
accounts, creating groups, disabling groups, creating events...
This is the only class that _should_ be imported (visible) to other
modules.
"""
def __init__(self, db):
""" NOTE: This class does not commit any changes to the db. That must
be done from the calling environment.
@type db: Cerebrum.database.Database
@param db: A database connection.
"""
self.db = db
self.co = Factory.get('Constants')(db)
self.clconst = Factory.get('CLConstants')(db)
self.account_class = Factory.get('Account')
self.group_class = Factory.get('Group')
self.vhutils = VirthomeUtils(db)
# TODO: Should owners be restricted to the FEDAccount class? How about the
# Webapp-account, or other su accs?
def group_create(self, group_name, description, creator, owner, url=None, forward=None):
""" This method creates a new VirtHome group.
NOTE: Some group name formats are reserved for specific applications!
This method WILL allow creation of reserved group names.
@type group_name: str
@param group_name: The name of the new group
@type description: str
@param description: The group description
@type creator: self.account_class
@param creator: The account object of the creator of this group.
@type owner: self.account_class
@param owner: The account object of the owner of this group.
@type url: str
@param url: A url resource associated with the group
@type forward: str
@param forward: A url resource to an external app that uses this group
"""
gr = self.group_class(self.db)
if self.vhutils.group_exists(group_name):
raise CerebrumError("Group name '%s' already exists" % group_name)
# TBD: Verify owner.np_type is FEDaccount? Must it be?
try:
gr.populate(creator.entity_id, group_name, description)
gr.write_db()
gr.set_group_resource(url)
except (ValueError, AssertionError):
raise CerebrumError(str(sys.exc_info()[1]))
forward = self.vhutils.whitelist_url(forward)
if forward:
gr.populate_trait(self.co.trait_group_forward, strval=forward)
for spread in getattr(cereconf, "BOFHD_NEW_GROUP_SPREADS", ()):
gr.add_spread(self.co.human2constant(spread, self.co.Spread))
gr.write_db()
roles = GroupRoles(self.db)
roles.add_admin_to_group(owner.entity_id, gr.entity_id)
return gr
def group_invite_user(self, inviter, group, email, timeout=3):
""" This method sets up an event that will allow a user to join a
group.
@type inviter: self.account_class
@param inviter: The account that is setting up the invitation
@type group: self.group_class
@param group_name: The group that should be joined
@type email: str
@param : The email adrress of the user that is invited
@type timeout: int
@param timeout: The number of days until the confirmation key expires.
@rtype: dict
@return: A dictionary with the following keys:
'confirmation_key': <str> The code that is used to confirm the invitation
'match_user': <str> A username, if a user exists with that email-address
'match_user_email': An e-mailaddress. Not sure why?
"""
ac = self.account_class(self.db)
assert hasattr(inviter, 'entity_id')
assert hasattr(group, 'entity_id')
timeout = DateTimeDelta(int(timeout))
if timeout.day < 1:
raise CerebrumError('Timeout too short (%d)' % timeout.day)
if (timeout > cereconf.MAX_INVITE_PERIOD):
raise CerebrumError("Timeout too long (%d)" % timeout.day)
ret = {'confirmation_key': self.vhutils.setup_event_request(
group.entity_id,
self.clconst.va_group_invitation,
params={'inviter_id': inviter.entity_id,
'group_id': group.entity_id,
'invitee_mail': email,
'timeout': timeout.day,},
change_by=inviter.entity_id)}
# check if e-mail matches a valid username
try:
ac.find_by_name(email)
ret['match_user'] = ac.account_name
if ac.np_type in (self.co.fedaccount_type, self.co.virtaccount_type):
ret['match_user_email'] = ac.get_email_address()
except NotFoundError:
pass
return ret
def group_disable(self, group):
"""This method removes all members and auth data related to a group,
effectively disabling it without actually 'nuking' it.
@type group_name: str
@param group_name: The name of the group that should be joined
@rtype: str
@return: The name of the group that was disabled, nice for feedback.
"""
assert hasattr(group, 'entity_id')
# Yank all the spreads
for row in group.get_spread():
group.delete_spread(row["spread"])
# Remove all members
for membership in group.search_members(group_id=group.entity_id,
member_filter_expired=False):
group.remove_member(membership["member_id"])
group.write_db()
# Clean up the permissions (granted ON the group and TO the group)
self.vhutils.remove_auth_targets(group.entity_id)
self.vhutils.
|
razzius/PyClassLessons
|
instructors/course-2015/functions_gens_and_ducks/examples/in_class/parsetext_trade_2015.py
|
Python
|
mit
| 2,835
| 0.018342
|
# -*- coding: utf-8 -*-
#above helps to declare what encoding we want to use in the module
#note this is copied from
|
the first json lab
#above is used to set the encoding for this module, (unfortunately didn't help that much)
#used for seeing our data in nicest string format possible
import pprint
#again idiom for reading in a file, relative path given
with open('../pres_on_trade.txt', 'r') as fp:
all_text = fp.read()
#str.split() will split groups of character
|
s on any white space, easy... nice
#sorted built-in function will only sort alphbetically here
all_words = sorted(all_text.split())
#begin preparation of words for a reasonable word frequency count
#we need to change our words from str to unicode
#unicode_words = [unicode(word) for word in all_words if unicode(word)]
#list comprehensions won't work because we get errors,
#let's do a try: except: block
unicode_words = []
for word in all_words:
try:
unicode_words.append(unicode(word))
except UnicodeDecodeError:
pass
#awesome list comprehension, they take iterables and return lists
#this will clean our words of unwanted punctuation and change to all lowercase
all_words = [word.strip("?.\'-,().").lower() for word in unicode_words]
#print all_words
#help(''.strip)
#reminder on dictionary syntax - setting the key and value
#dict_name[key] = value
#word_freq_dc['word'] = 18
#using dict.get method to check for existence and build word_freq dictionary
word_freq_dc = {}
for word in all_words:
times = word_freq_dc.get(word, 0)
times += 1
word_freq_dc[word] = times
#the easy way :) if you knew about it or where to look
from collections import Counter
#help(Counter)
counter = Counter(all_words)
#can use slice method on a sequence, this gets first 40 of type list
#that is: Counter.most_common() returns a list, a list is considerd one kind of sequence
print(counter.most_common()[:40])
#end line character for clarity when printing
print '\n'
#to be sure
counter_for_dc = Counter(word_freq_dc)
counter_from_before = Counter(all_words)
print counter_for_dc == counter_from_before
#going further with a generator expression
non_small_words = (word for word in all_words
if len(word) > 4 and
word is not 'usa' and
word not in
['applause', 'laughter', 'there', 'these', 'those'])
recounter = Counter(non_small_words)
print(recounter.most_common()[:40])
#below is work we did to figure out the proper procedure to
#count words using a dictionary
#pprint.pprint(word_freq_dc)
#for k, v in word_freq_dc.iteritems():
# tupled_word_freq.append((k, v))
#tupled_word_freq = zip(word_freq_dc.itervalues(), word_freq_dc.iterkeys())
#print(tupled_word_freq)
#print sorted(tupled_word_freq)
#help(word_freq_dc.get)
|
bsmr-eve/Pyfa
|
eos/effects/shipheavymissileexpdmgpiratecruiser.py
|
Python
|
gpl-3.0
| 309
| 0.006472
|
# shipHeavyMissileExpDmgPirateCruiser
#
# Used
|
by:
# Ship: Gnosis
type = "passive"
def handler(fit, ship, context):
fit.modules.filteredChargeBoost(lambda mod: mod.charge.requiresSkill("Heavy Missiles"),
|
"explosiveDamage", ship.getModifiedItemAttr("shipBonusRole7"))
|
pbombnz/IRCBot
|
modules/whatPulse.py
|
Python
|
gpl-2.0
| 4,737
| 0.004222
|
import urllib.request
import json
from modules import userDatabase
def get_what_pulse_url(param: str):
return "http://api.whatpulse.org/user.php?user=" + str(param) + "&formatted=yes&format=json"
# noinspection PyUnusedLocal
def on_channel_pm(irc, user_mask, user, channel, message):
command = message.split()
if command[0].lower() == '!setwhatpulse' or command[0].lower() == '!setwp':
if len(command) != 2:
irc.send_private_message(channel, "USAGE: !setw[hat]p[ulse] (WhatPulse ID/WhatPulse Username)")
return
if command[1].isdigit():
irc.send_private_message(channel, "USAGE: !setw[hat]p[ulse] (WhatPulse ID/WhatPulse Username)")
return
irc.send_private_message(channel, "3SUCCESS: Your WhatPulse ID has been changed.")
irc.user_info[user.lower()]['whatpulse'] = str(command[1])
userDatabase.save_user_database(irc)
elif command[0].lower() == '!whatpulse' or command[0].lower() == '!wp':
param = str()
if len(command) == 1:
param = str(irc.userData[user.lower()]['whatpulse'])
if irc.userData[user.lower()]['whatpulse'] == "":
irc.send_private_message(channel, "5ERROR: You have not set your WhatPulse ID yet.")
irc.send_private_message(channel,
"USAGE: !w[hat]p[ulse] (WhatPulse ID/WhatPulse Username/IRC Nickname
|
)")
irc.send_private_message(channel, "USAGE: !setw[hat]p[ulse] (WhatPulse ID)")
return
elif len(command) == 2:
command = message.split(' ', 1)
param = str(command[1])
if command[1].lo
|
wer() in irc.user_info:
if irc.user_infoData[command[1].lower()]['whatpulse'] != "":
param = str(irc.userData[command[1].lower()]['whatpulse'])
try:
response = urllib.request.urlopen(get_what_pulse_url(param))
html_source = response.read().decode('utf-8')
response.close()
except IOError:
irc.send_private_message(channel, "5ERROR: The WhatPulse service is currently unavailable.")
return
try:
whatpulse_info = json.loads(html_source)
except ValueError:
irc.send_private_message(channel, '5ERROR: An unknown WhatPulse Username/ID was given.')
return
if 'error' in whatpulse_info:
irc.send_private_message(channel, '5ERROR: An unknown WhatPulse Username/ID was given.')
return
account_name = whatpulse_info['AccountName'] # Username
user_id = whatpulse_info['UserID'] # ID
country = whatpulse_info['Country'] # User's Country
joined_date = whatpulse_info['DateJoined'] # Date Joined
last_pulse_date = whatpulse_info['LastPulse'] # Last Pulsed
pulses = whatpulse_info['Pulses'] # Pulses
total_key_count = whatpulse_info['Keys'] # Total Key Count
total_mouse_clicks = whatpulse_info['Clicks'] # Total Mouse Clicks
avg_kpp = whatpulse_info['AvKeysPerPulse'] # Average Keys Per Pulse
avg_cpp = whatpulse_info['AvClicksPerPulse'] # Average Clicks Per Pulse
avg_kps = whatpulse_info['AvKPS'] # Average Keys Per Second
avg_cps = whatpulse_info['AvCPS'] # Average Clicks Per Second
# Ranks
clicks_rank = whatpulse_info['Ranks']['Clicks']
keys_rank = whatpulse_info['Ranks']['Keys']
uptime_rank = whatpulse_info['Ranks']['Uptime']
irc.send_private_message(channel,
"\u000310WhatPulse:\u0003 {0}(ID:{1}) \u000310Country:\u0003 {2} "
"\u000310Date Joined:\u0003 {3} \u000310LastPulsed:\u0003 {4} "
"\u000310Pulses:\u0003 {5} \u000310Keys:\u0003 {6} \u000310Clicks:\u0003 {7} "
"\u000310AvKeysPerPulse:\u0003 {8} \u000310AvClicksPerPulse:\u0003 {9} "
"\u000310AvKeyPerSecond:\u0003 {10} \u000310AvClicksPerSecond:\u0003 {11} "
"\u000310Rank: Clicks:\u0003 {12} \u000310Keys:\u0003 {13} "
"\u000310Uptime:\u0003 {14}".format(
str(account_name), str(user_id), str(country), str(joined_date),
str(last_pulse_date),
str(pulses), str(total_key_count), str(total_mouse_clicks), str(avg_kpp),
str(avg_cpp),
str(avg_kps), str(avg_cps), str(clicks_rank), str(keys_rank), str(uptime_rank)))
|
ContinuumIO/blaze
|
blaze/index.py
|
Python
|
bsd-3-clause
| 1,644
| 0.001217
|
from .dispatch import dispatch
from .compatibility import basestring
from blaze.expr.literal import BoundSymbol, data as bz_data
@dispatch(object, (basestring, list, tuple))
def create_index(t, column_name_or_names, name=None):
"""Create an index on a column.
Parameters
----------
o : table-like
index_name : str
The name of the index to create
column_name_or_names : string, list, tuple
A column name
|
to index on, or a list or tuple for a composite index
Examples
--------
>>> # Using SQLite
>>> from blaze import SQL
>>> # create a table called 'tb', i
|
n memory
>>> sql = SQL('sqlite:///:memory:', 'tb',
... schema='{id: int64, value: float64, categ: string}')
>>> dta = [(1, 2.0, 'a'), (2, 3.0, 'b'), (3, 4.0, 'c')]
>>> sql.extend(dta)
>>> # create an index on the 'id' column (for SQL we must provide a name)
>>> sql.table.indexes
set()
>>> create_index(sql, 'id', name='id_index')
>>> sql.table.indexes
{Index('id_index', Column('id', BigInteger(), table=<tb>, nullable=False))}
"""
raise NotImplementedError("create_index not implemented for type %r" %
type(t).__name__)
@dispatch(BoundSymbol, (basestring, list, tuple))
def create_index(dta, column_name_or_names, name=None, **kwargs):
return create_index(dta.data, column_name_or_names, name=name, **kwargs)
@dispatch(basestring, (basestring, list, tuple))
def create_index(uri, column_name_or_names, name=None, **kwargs):
dta = bz_data(uri, **kwargs)
create_index(dta, column_name_or_names, name=name)
return dta
|
jsha/letsencrypt
|
certbot-apache/certbot_apache/parser.py
|
Python
|
apache-2.0
| 27,616
| 0.000036
|
"""ApacheParser is a member object of the ApacheConfigurator class."""
import copy
import fnmatch
import logging
import os
import re
import subprocess
import sys
import six
from certbot import errors
from certbot_apache import constants
logger = logging.getLogger(__name__)
class ApacheParser(object):
"""Class handles the fine details of parsing the Apache Configuration.
.. todo:: Make parsing general... remove sites-available etc...
:ivar str root: Normalized absolute path to the server root
directory. Without trailing slash.
:ivar set modules: All module names that are currently enabled.
:ivar dict loc: Location to place directives, root - configuration origin,
default - user config file, name - NameVirtualHost,
"""
arg_var_interpreter = re.compile(r"\$\{[^ \}]*}")
fnmatch_chars = set(["*", "?", "\\", "[", "]"])
def __init__(self, aug, root, vhostroot=None, version=(2, 4),
configurator=None):
# Note: Order is important here.
# Needed for calling save() with reverter functionality that resides in
# AugeasConfigurator superclass of ApacheConfigurator. This resolves
# issues with aug.load() after adding new files / defines to parse tree
self.configurator = configurator
# This uses the binary, so it can be done first.
# https://httpd.apache.org/docs/2.4/mod/core.html#define
# https://httpd.apache.org/docs/2.4/mod/core.html#ifdefine
# This only handles invocation parameters and Define directives!
self.parser_paths = {}
self.variables = {}
if version >= (2, 4):
self.update_runtime_variables()
self.aug = aug
# Find configuration root and make sure augeas can parse it.
self.root = os.path.abspath(root)
self.loc = {"root": self._find_config_root()}
self.parse_file(self.loc["root"])
# This problem has been fixed in Augeas 1.0
self.standardize_excl()
# Temporarily set modules to be empty, so that find_dirs can work
# https://httpd.apache.org/docs/2.4/mod/core.html#ifmodule
# This needs to come before locations are set.
self.modules = set()
self.init_modules()
# Set up rest of locations
self.loc.update(self._set_locations())
self.existing_paths = copy.deepcopy(self.parser_paths)
# Must also attempt to parse additional virtual host root
if vhostroot:
self.parse_file(os.path.abspath(vhostroot) + "/" +
constants.os_constant("vhost_files"))
# check to see if there were unparsed define statements
if version < (2, 4):
if self.find_dir("Define", exclude=False):
raise errors.PluginError("Error parsing runtime variables")
def add_include(self, main_config, inc_path):
"""Add Include for a new configuration file if one does not exist
:param str main_config: file path to main Apache config file
:param str inc_path: path of file to include
"""
if len(self.find_dir(case_i("Include"), inc_path)) == 0:
logger.debug("Adding Include %s to %s",
inc_path, get_aug_path(main_config))
self.add_dir(
get_aug_path(main_config),
"Include", inc_path)
# Add new path to parser paths
new_dir = os.path.dirname(inc_path)
new_file = os.path.basename(inc_path)
if new_dir in self.existing_paths.keys():
# Add to existing path
self.existing_paths[new_dir].append(new_file)
else:
# Create a new path
self.existing_paths[new_dir] = [new_file]
def init_modules(self):
"""Iterates on the configuration until no new modules are loaded.
..todo:: This should be attempted to be done with a binary to avoid
the iteration issue. Else... parse and enable mods at same time.
"""
# Since modules are being initiated... clear existing set.
self.modules = set()
matches = self.find_dir("LoadModule")
iterator = iter(matches)
# Make sure prev_size != cur_size for do: while: iteration
prev_size = -1
while len(self.modules) != prev_size:
prev_size = len(self.modules)
for match_name, match_filename in six.moves.zip(
iterator, iterator):
mod_name = self.get_arg(match_name)
mod_filename = self.get_arg(match_filename)
if mod_name and mod_filename:
self.modules.add(mod_name)
self.modules.add(os.path.basename(mod_filename)[:-2] + "c")
else:
logger.debug("Could not read LoadModule directive from " +
"Augeas path: {0}".format(match_name[6:]))
def update_runtime_variables(self):
""""
.. note:: Compile time variables (apache2ctl -V) are not used within
the dynamic configuration files. These should not be parsed or
interpreted.
.. todo:: Create separate compile time variables...
simply for arg_get()
"""
stdout = self._get_runtime_cfg()
variables = dict()
matches = re.compile(r"Define: ([^ \n]*)").findall(stdout)
try:
matches.remove("DUMP_RUN_CFG")
except ValueError:
return
for match in matches:
if match.count("=") > 1:
logger.error("Unexpected number of equal signs in "
"runtime config dump.")
raise errors.PluginError(
|
"Error parsing Apache runtime
|
variables")
parts = match.partition("=")
variables[parts[0]] = parts[2]
self.variables = variables
def _get_runtime_cfg(self): # pylint: disable=no-self-use
"""Get runtime configuration info.
:returns: stdout from DUMP_RUN_CFG
"""
try:
proc = subprocess.Popen(
constants.os_constant("define_cmd"),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
stdout, stderr = proc.communicate()
except (OSError, ValueError):
logger.error(
"Error running command %s for runtime parameters!%s",
constants.os_constant("define_cmd"), os.linesep)
raise errors.MisconfigurationError(
"Error accessing loaded Apache parameters: %s",
constants.os_constant("define_cmd"))
# Small errors that do not impede
if proc.returncode != 0:
logger.warning("Error in checking parameter list: %s", stderr)
raise errors.MisconfigurationError(
"Apache is unable to check whether or not the module is "
"loaded because Apache is misconfigured.")
return stdout
def filter_args_num(self, matches, args): # pylint: disable=no-self-use
"""Filter out directives with specific number of arguments.
This function makes the assumption that all related arguments are given
in order. Thus /files/apache/directive[5]/arg[2] must come immediately
after /files/apache/directive[5]/arg[1]. Runs in 1 linear pass.
:param string matches: Matches of all directives with arg nodes
:param int args: Number of args you would like to filter
:returns: List of directives that contain # of arguments.
(arg is stripped off)
"""
filtered = []
if args == 1:
for i in range(len(matches)):
if matches[i].endswith("/arg"):
filtered.append(matches[i][:-4])
else:
for i in range(len(matches)):
if matches[i].endswith("/arg[%d]" % args):
# Make sure we don't cause an IndexError (end of list)
|
mesoscloud/events
|
0.5.6/docker.py
|
Python
|
mit
| 1,100
| 0.002727
|
"""docker"""
import http.client
import json
import socket
__all__ = ['HTTPConnection', 'HTTPError', 'get']
class HTTPConnection(http.client.HTTPConnection):
def __init__(self):
http.client.HTTPConnection.__init__(self, 'localhost')
def connect(self):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect('/var/run/docker.sock')
self.sock = sock
class HTTPError(Exception):
def __init__(self, status, reason):
self.status = status
self.r
|
eason = reason
def get(path, async=False):
conn = HTTPConnection()
try:
conn.request('GET', path)
resp = conn.getresponse()
if resp.status != 200:
raise HTTPError(resp.status, resp.reason)
except Exception:
conn.close()
raise
try:
if async:
return resp
elif resp.headers.get('Content-Type') == 'application/json':
return json.loads(resp.read().deco
|
de('utf-8'))
else:
return resp.read()
finally:
if not async:
conn.close()
|
frederick623/wat
|
date_transform.py
|
Python
|
apache-2.0
| 2,296
| 0.037021
|
import csv
import os
import sys
import traceback
import sqlite3
import fnmatch
import decimal
import datetime
def valid_dt(dt):
try:
datetime.datetime.strptime(dt, "%m/%d/%Y")
return True
except:
return False
def adapt_decimal(d):
return str(d)
def convert_decimal(s):
return decimal.Decimal(s)
def db_cur(source = ":memory:"):
# Register the adapter
sqlite3.register_adapter(decimal.Decimal, adapt_decimal)
# Register the converter
sqlite3.register_converter("DECTEXT", convert_decimal)
conn = sqlite3.connect(source, detect_types=sqlite3.PARSE_DECLTYPES)
#conn.row_factory = sqlite3.Row
cur = conn.cursor()
return conn, cur
def question_marks(st):
question_marks = '?'
for i in range(0, len(st.split(','))-1):
question_marks = question_marks + ",?"
return question_marks
def create_tbl(cur, tbl_name, header, arr = [], index_arr = []):
cur.execute("""select count(*) FROM sqlite_master WHERE type='table' AND name = '%s' """ % (tbl_name))
tbl_exists = cur.fetchone()
if tbl_exists[0] == 0:
print "CREATE TABLE " + tbl_name + " (" + header.replace("id,", "id PRIMARY KEY,") + " );"
cur.execute("CREATE TABLE " + tbl_name + " (" + header.replace("id,", "id PRIMARY KEY,") + " );")
for index in index_arr:
cur.execute("CREATE INDEX " + tbl_name + "_" + index + " ON " + tbl_name + " (" + index + ");")
if arr != []:
cur.executemany("INSERT INTO " + tbl_name + " VALUES ("+question_marks(header)+")", arr)
return
def csv_to_arr(csv_file, start=1, has_header=True):
arr = []
with open(csv_file, 'rU') as f:
reader = csv.reader(f)
arr = list(reader)
arr = zip(*arr)
arr = [[(datetime.datetime.strptime(y, "%m/%d/%Y").date().strftime("%Y-%m-%d") if valid_dt(y) else y) for y in x] for x in arr if any(x)]
arr = zip(*arr)
header = ""
if has_header:
header = ','.join(
|
arr[0])
arr = arr[start:]
return header, arr
else:
return arr[start:]
return
def arr_to_csv(file_name, header, data_arr):
csv_file = open(file_name, 'wb')
wr = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
wr.writerow(header.split(','))
for data_row in data_arr:
line = []
for ele in data_row:
line.append(str(ele))
wr.writerow(line)
csv_file.close()
return
conn, cur = db_cur()
header, arr = csv_to_arr("tmp\\20160914
|
.csv")
print arr[0]
|
winhamwr/django-attendance
|
django_attendance/models.py
|
Python
|
bsd-3-clause
| 1,202
| 0.000832
|
import datetime
from django.db import models
f
|
rom django.utils.translation import ugettext, ugettext_lazy as _
from django.contrib.auth.models import User
from django.contrib import admin
from schedule.models import Occurrence
from django_attendance.conf import settings as attendance_settings
class EventAttendance(models.Model):
'''
The attendance status of a user for a specific Occurrence of an event.
'''
occurrence = models.OneToOneField(Occurrence)
attendees = models.ManyToManyFie
|
ld(User)
class Meta:
verbose_name = _('attendance')
verbose_name_plural = _('attendances')
def __unicode__(self):
return "Attendance for %s-%s" % (self.occurrence.title,
self.occurrence.start)
def duration(self):
"""
Get the duration of this event in hours, taking the HOUR_MULTIPLIER in
to account.
"""
delta = self.occurrence.end - self.occurrence.start
real_hours = delta.days * 24 + delta.seconds / (60.0 * 60.0)
adjusted_hours = attendance_settings.HOUR_MULTIPLIER * real_hours
return adjusted_hours
admin.site.register(EventAttendance)
|
sandow-digital/django-filebrowser-no-grappelli-sandow
|
filebrowser/views.py
|
Python
|
bsd-3-clause
| 19,288
| 0.005081
|
# coding: utf-8
# general imports
import os, re
from time import gmtime, strftime
# django imports
from django.shortcuts import render_to_response, HttpResponse
from django.template import RequestContext as Context
from django.http import HttpResponseRedirect
from django.contrib.admin.views.decorators import staff_member_required
from django.views.decorators.cache import never_cache
from django.utils.translation import ugettext as _
from django.conf import settings
from django import forms
from django.core.urlresolvers import reverse
from django.core.exceptions import ImproperlyConfigured
from django.dispatch import Signal
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.utils.encoding import smart_str
try:
# django SVN
from django.views.decorators.csrf import csrf_exempt
except:
# django 1.1
from django.contrib.csrf.middleware import csrf_exempt
# filebrowser imports
from filebrowser.settings import *
from filebrowser.functions import path_to_url, sort_by_attr, get_path, get_file, get_version_path, get_breadcrumbs, get_filterdate, get_settings_var, handle_file_upload, convert_filename
from filebrowser.templatetags.fb_tags import query_helper
from filebrowser.base import FileObject
from filebrowser.decorators import flash_login_required
# Precompile regular expressions
filter_re = []
for exp in EXCLUDE:
filter_re.append(re.compile(exp))
for k,v in VERSIONS.iteritems():
exp = (r'_%s.(%s)') % (k, '|'.join(EXTENSION_LIST))
filter_re.append(re.compile(exp))
def browse(request):
"""
Browse Files/Directories.
"""
# Redirect for images to use business photos
if request.GET.get('type') == 'image':
redirect_url = "/admin/business/photo/"
query_string = request.GET.urlencode()
return HttpResponseRedirect("/admin/business/photo/?%s" % (query_string))
# QUERY / PATH CHECK
query = request.GET.copy()
path = get_path(query.get('dir', ''))
directory = get_path('')
if path is None:
msg = _('The requested Folder does not exist.')
request.user.message_set.create(message=msg)
if directory is None:
# The DIRECTORY does not exist, raise an error to prevent eternal redirecting.
raise ImproperlyConfigured, _("Error finding Upload-Folder. Maybe it does not exist?")
redirect_url = reverse("fb_browse") + query_helper(query, "", "dir")
return HttpResponseRedirect(redirect_url)
abs_path = os.path.join(MEDIA_ROOT, DIRECTORY, path)
# INITIAL VARIABLES
results_var = {'results_total': 0, 'results_current': 0, 'delete_total': 0, 'images_total': 0, 'select_total': 0 }
counter = {}
for k,v in EXTENSIONS.iteritems():
counter[k] = 0
dir_list = os.listdir(abs_path)
files = []
for file in dir_list:
# EXC
|
LUDE FILES MATCHING VERSIONS_PREFIX OR ANY OF THE EXCLUDE PATTERNS
filtered = file.startswith('.')
for re_prefix in filter_r
|
e:
if re_prefix.search(file):
filtered = True
if filtered:
continue
results_var['results_total'] += 1
# CREATE FILEOBJECT
fileobject = FileObject(os.path.join(DIRECTORY, path, file))
# FILTER / SEARCH
append = False
if fileobject.filetype == request.GET.get('filter_type', fileobject.filetype) and get_filterdate(request.GET.get('filter_date', ''), fileobject.date):
append = True
if request.GET.get('q') and not re.compile(request.GET.get('q').lower(), re.M).search(file.lower()):
append = False
# APPEND FILE_LIST
if append:
try:
# COUNTER/RESULTS
if fileobject.filetype == 'Image':
results_var['images_total'] += 1
if fileobject.filetype != 'Folder':
results_var['delete_total'] += 1
elif fileobject.filetype == 'Folder' and fileobject.is_empty:
results_var['delete_total'] += 1
if query.get('type') and query.get('type') in SELECT_FORMATS and fileobject.filetype in SELECT_FORMATS[query.get('type')]:
results_var['select_total'] += 1
elif not query.get('type'):
results_var['select_total'] += 1
except OSError:
# Ignore items that have problems
continue
else:
files.append(fileobject)
results_var['results_current'] += 1
# COUNTER/RESULTS
if fileobject.filetype:
counter[fileobject.filetype] += 1
# SORTING
query['o'] = request.GET.get('o', DEFAULT_SORTING_BY)
query['ot'] = request.GET.get('ot', DEFAULT_SORTING_ORDER)
files = sort_by_attr(files, request.GET.get('o', DEFAULT_SORTING_BY))
if not request.GET.get('ot') and DEFAULT_SORTING_ORDER == "desc" or request.GET.get('ot') == "desc":
files.reverse()
p = Paginator(files, LIST_PER_PAGE)
try:
page_nr = request.GET.get('p', '1')
except:
page_nr = 1
try:
page = p.page(page_nr)
except (EmptyPage, InvalidPage):
page = p.page(p.num_pages)
return render_to_response('filebrowser/index.html', {
'dir': path,
'p': p,
'page': page,
'results_var': results_var,
'counter': counter,
'query': query,
'title': _(u'FileBrowser'),
'settings_var': get_settings_var(),
'breadcrumbs': get_breadcrumbs(query, path),
'breadcrumbs_title': ""
}, context_instance=Context(request))
browse = staff_member_required(never_cache(browse))
# mkdir signals
filebrowser_pre_createdir = Signal(providing_args=["path", "dirname"])
filebrowser_post_createdir = Signal(providing_args=["path", "dirname"])
def mkdir(request):
"""
Make Directory.
"""
from filebrowser.forms import MakeDirForm
# QUERY / PATH CHECK
query = request.GET
path = get_path(query.get('dir', ''))
if path is None:
msg = _('The requested Folder does not exist.')
request.user.message_set.create(message=msg)
return HttpResponseRedirect(reverse("fb_browse"))
abs_path = os.path.join(MEDIA_ROOT, DIRECTORY, path)
if request.method == 'POST':
form = MakeDirForm(abs_path, request.POST)
if form.is_valid():
server_path = os.path.join(abs_path, form.cleaned_data['dir_name'])
try:
# PRE CREATE SIGNAL
filebrowser_pre_createdir.send(sender=request, path=path, dirname=form.cleaned_data['dir_name'])
# CREATE FOLDER
os.mkdir(server_path)
os.chmod(server_path, 0775)
# POST CREATE SIGNAL
filebrowser_post_createdir.send(sender=request, path=path, dirname=form.cleaned_data['dir_name'])
# MESSAGE & REDIRECT
msg = _('The Folder %s was successfully created.') % (form.cleaned_data['dir_name'])
request.user.message_set.create(message=msg)
# on redirect, sort by date desc to see the new directory on top of the list
# remove filter in order to actually _see_ the new folder
# remove pagination
redirect_url = reverse("fb_browse") + query_helper(query, "ot=desc,o=date", "ot,o,filter_type,filter_date,q,p")
return HttpResponseRedirect(redirect_url)
except OSError, (errno, strerror):
if errno == 13:
form.errors['dir_name'] = forms.util.ErrorList([_('Permission denied.')])
else:
form.errors['dir_name'] = forms.util.ErrorList([_('Error creating folder.')])
else:
form = MakeDirForm(abs_path)
return render_to_response('filebrowser/makedir.html', {
'form': form,
'query': query,
'title': _(u'New Folder'),
'settings_var': get_settings_var(),
'breadcrumbs': get_breadcrumbs(query,
|
AAorris/search
|
src/bing/runtime.py
|
Python
|
mit
| 523
| 0.003824
|
"""
Provides a more structured call to the nodejs bing
"""
import os
impo
|
rt sys
import argparse
import subprocess
def main():
parser = argparse.ArgumentParser(description="Use Bing's API services")
parser.add_argument("query", nargs="*", help="Query string")
parser.add_argument("-s", "--service", default="Web", choices=["Web", "Image", "News"])
args = parser.parse_args()
query = "%20".join(args.query)
sys.stdout.write("%s %s\n" %
|
(args.service, query))
if __name__ == '__main__':
main()
|
brettcs/diffoscope
|
tests/comparators/test_macho.py
|
Python
|
gpl-3.0
| 2,299
| 0.003486
|
# -*- coding: utf-8 -*-
#
# diffoscope: in-depth comparison of files, archives, and directories
#
# Copyright © 2015 Jérémy Bobbio <lunar@debian.org>
# Copyright © 2015 Clemens Lang <cal@macports.org>
#
# diffoscope is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# diffoscope is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with diffoscope. If not, see <https://www.gnu.org/licenses/>.
import pytest
import os.path
from diffoscope.config import Config
from diffoscope.comparators.macho import MachoFile
from diffoscope.comparators.missing_file import MissingFile
from utils.data import data, load_fixture
from utils.tools import skip_unless_tools_exist
obj1 = load_fixture('t
|
est1.macho')
obj2 = load_fixture('test2.macho')
def test_obj_identification(obj1):
assert isinstance(obj1, MachoFile)
def test_obj_no_differences(obj1):
difference = obj1.compare(obj1)
assert difference is None
@pytest.fixture
def obj_differences(obj1, obj2)
|
:
return obj1.compare(obj2).details
@skip_unless_tools_exist('otool', 'lipo')
def test_obj_compare_non_existing(monkeypatch, obj1):
monkeypatch.setattr(Config(), 'new_file', True)
difference = obj1.compare(MissingFile('/nonexisting', obj1))
assert difference.source2 == '/nonexisting'
assert len(difference.details) > 0
@skip_unless_tools_exist('otool', 'lipo')
def test_diff(obj_differences):
assert len(obj_differences) == 4
l = ['macho_expected_diff_arch', 'macho_expected_diff_headers', 'macho_expected_diff_loadcommands', 'macho_expected_diff_disassembly']
for idx, diff in enumerate(obj_differences):
with open(os.path.join(os.path.dirname(__file__), '../data', l[idx]), 'w') as f:
print(diff.unified_diff, file=f)
expected_diff = open(data('macho_expected_diff')).read()
assert obj_differences[0].unified_diff == expected_diff
|
vonderborch/CS355
|
sps - working/sps.py
|
Python
|
mit
| 9,305
| 0.013649
|
#!/usr/bin/python3
import re
import sys
######## Global Variables
# Stacks
dictionary = []
execution = []
operand = []
# Sept 21, 2011 -- fixed the handling of }{ -- each brace should be a separate token
# A regular expression that matches postscript each different kind of postscript token
pattern = '/?[a-zA-Z][a-zA-Z0-9_]*|[-]?[0-9]+|[}{]|%.*|[^\t\n ]'
######## Debug Functions
def Debug(*s):
print(s)
sys.exit(1)
return
######## Common Functions
#### Is Variable
def isNumber(x):
if type(x) is int == False or type(x) is float == False:
return False
else:
return True
def isBool(x):
if type(x) is bool == False:
return False
else:
return True
#### Pop Operands
def PopNumber(s,v):
x = OperandPop()
if isNumber(x) == False:
Debug("%s in operation %s is not a number or doesn't exist!",v,s)
return x;
def PopBoolean(s,v):
x = OperandPop()
if isBool(x) == False:
Debug("%s in operation %s is not a boolean or doesn't exist!",v,s)
return x;
def PopTwoNumbers(s):
return (PopNumber(s,"x1"),PopNumber(s,"x2"));
def PopTwoBooleans(s):
return (PopBoolean(s,"x1"),PopBoolean(s,"x2"));
#### Command Logic
def CommandLogic(t):
if t == "add":
_ADD()
return True
elif t == "sub":
_SUB()
return True
elif t == "mul":
_MUL()
return True
elif t == "div":
_DIV()
return True
elif t == "eq":
_EQ()
return True
elif t == "gt":
_LT()
return True
elif t == "lt":
_GT()
return True
elif t == "and":
_AND()
return True
elif t == "or":
_OR()
return True
elif t == "not":
_NOT()
return True
elif t == "if":
_IF()
return True
elif t == "ifelse":
_IFELSE()
return True
return False
######## SPS Functions
#### Number Operators
def _ADD():
x1,x2 = PopTwoNumbers("_ADD")
return OperandPush(x1 + x2)
def _SUB():
x1,x2 = PopTwoNumbers("_SUB")
return OperandPush(x1 - x2)
def _MUL():
x1,x2 = PopTwoNumbers("_MUL")
return OperandPush(x1 * x2)
def _DIV():
x1,x2 = PopTwoNumbers("_DIV")
return OperandPush(x1 / x2)
def _EQ():
x1,x2 = PopTwoNumbers("_EQ")
return OperandPush(x1 == x2)
def _LT():
x1,x2 = PopTwoNumbers("_LT")
return OperandPush(x1 < x2)
def _GT():
x1,x2 = PopTwoNumbers("_GT")
return OperandPush(x1 > x2)
#### Boolean Operators
def _AND():
x1,x2 = PopTwoBooleans("_AND")
return OperandPush(x1 and x2)
def _OR():
x1,x2 = PopTwoBooleans("_OR")
return OperandPush(x1 or x2)
def _NOT():
return OperandPush(not PopBoolean("_OR","x"))
#### Sequencing Operators
def _IF():
t == ""
while t != "{":
CommandLogic(OperandPush(ExecutionPop()))
if OperandPop() == True:
OperandPush(ExecutionPop())
ExecutionPop()
return True
def _IFELSE():
t == ""
while t != "{":
CommandLogic(OperandPush(ExecutionPop()))
if OperandPop() == True:
OperandPush(ExecutionPop())
ExecutionPop()
else:
ExecutionPop()
ExecutionPop()
ExecutionPop()
OperandPush(ExecutionPop())
ExecutionPop()
return True
#### Stack Operators
def _DUP():
t = OperandPop()
OperandPush(t)
OperandPush(t)
return True
def _EXCH():
t = OperandPop()
OperandPushPosition(t, len(operand) - 1)
return True
def _POP():
return OperandPop()
#### Dictionary Creation
def _DICTZ():
t = {}
DictionaryPush(t)
return True
#### DICTIONARY MANIPULATION
def _BEGIN():
if len(dictionary) < 1:
Debug("No item on the Dictionary Stack for _BEGIN!")
return False
DictionaryPush(OperandPop())
return
def _END():
return DictionaryPop()
#### Name Defination
def _DEF():
t1 = OperandPop()
t2 = OperandPop()
if type(t1) is str == False:
Debug("T1 in operation _DEF is not a string!")
return False
t = [t1,t2]
DictionaryPush(t)
return True
#### Stack Printing
def _STACK():
for i in operand:
print(i)
return True
def _EQUALS():
t = OperandPop()
print(t)
|
return True
######## Stack Control
def DictionaryPushItem(t,value):
x = {t:value}
dictionary[len(dictionary)].append(x)
def DictionaryPush(t):
dictionary.append(t)
def DictionaryPop():
return dictionary.pop()
def ExecutionPush(t):
execution.append(t)
def ExecutionPop():
if len(execution) > 0:
return execution.pop()
else:
printOutput()
def Operan
|
dPush(t):
operand.append(t)
return t
def OperandPushPosition(t,p):
operand.insert(p,t)
return t
def OperandPop():
return operand.pop()
######## File Reader
# Given a string, return the tokens it contains
def parse(s):
tokens = re.findall(pattern, s)
return tokens
# Given an open file, return the tokens it contains
def parseFile(f):
tokens = parse(''.join(f.readlines()))
return tokens
######## Interpretor
def InterpretorMain(L):
for w in L:
ExecutionPush(w)
InterpretorLoop()
return
def InterpretorLoop():
word = ExecutionPop()
if word == None:
return
elif isBool(word) == True or isNumber(word) == True:
OperandPush(word)
InterpretorLoop()
else:
Interpretor(word)
InterpretorLoop()
def Interpretor(w):
if w.startswith('/') == True:
t = w.split('/')
word = ExecutionPop()
if isBool(word) == True or isNumber(word) == True:
DictionaryPushItem(t[1],x)
else:
tL = ["{"]
i = 1
while i > 0:
word = ExecutionPop()
if word == "{":
i = i + 1
elif word == "}":
i = i - 1
tL.append(word)
while word != "def":
word = ExecutionPop()
DictionaryPushItem(t[1], tL)
elif w == "add":
_ADD()
elif w == "sub":
_SUB()
elif w == "mul":
_MUL()
elif w == "div":
_DIV()
elif w == "eq":
_EQ()
elif w == "gt":
_LT()
elif w == "lt":
_GT()
elif w == "and":
_AND()
elif w == "or":
_OR()
elif w == "not":
_NOT()
elif w == "if":
_IF()
elif w == "ifelse":
_IFELSE()
elif w == "dup":
_DUP()
elif w == "exch":
_EXCH()
elif w == "pop":
_POP()
elif w == "dictz":
_DICTZ()
elif w == "begin":
_BEGIN()
elif w == "end":
_END()
elif w == "stack":
_STACK()
elif w == "=":
_EQUALS()
else:
wordIntepretor(w)
return 0
def wordInterpretor(w):
L = dictionary[len(dictionary)].get(w)
for word in L:
if isBool(word) == True or isNumber(word) == True:
OperandPush(word)
else:
if w.startswith('/') == True:
t = w.split('/')
word = L.pop()
if isBool(word) == True or isNumber(word) == True:
DictionaryPushItem(t[1],x)
else:
tL = ["{"]
i = 1
while i > 0:
word = ExecutionPop()
if word == "{":
i = i + 1
elif word == "}":
i = i - 1
tL.append(word)
while word != "def":
word = L.pop()
DictionaryPushItem(t[1], tL)
elif w == "add":
_ADD()
elif w == "sub":
_SUB()
elif w == "mul":
_MUL()
elif w == "div":
_DIV()
elif w == "eq":
_EQ()
elif w == "gt":
_LT()
elif w == "lt":
_GT()
elif w == "and":
_AND()
elif w == "or":
|
SELO77/seloPython
|
3.X/ex/strFormat.py
|
Python
|
mit
| 217
| 0.004608
|
# testStr = "Hello {n
|
ame}, How long have you bean?. I'm {myName}"
#
# testStr = testStr.format(name="Leo", myName="Serim")
#
# print(testStr)
limit = None
hello = str(limit, "")
print(hello)
# print( "
|
4" in "3.5")
|
leechy9/Vigilante
|
vigilante.py
|
Python
|
gpl-3.0
| 5,186
| 0.016969
|
'''
Reads scrambled Vigenere Cipher text from stdin and attempts to decrypt it.
Written for Python 2.7.
Copyright (C) 2014 leechy9
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import string
from collections import defaultdict, Counter
# Average letter frequencies found in English (from Wikipedia)
letter_frequencies = [
('A', 0.08167),
('B', 0.01492),
('C', 0.02782),
('D', 0.04253),
('E', 0.12702),
('F', 0.02228),
('G', 0.02015),
('H', 0.06094),
('I', 0.06966),
('J', 0.00153),
('K', 0.00772),
('L', 0.04025),
('M', 0.02406),
('N', 0.06749),
('O', 0.07507),
('P', 0.01929),
('Q', 0.00095),
('R', 0.05987),
('S', 0.06327),
('T', 0.09056),
('U', 0.02758),
('V', 0.00978),
('W', 0.02360),
('X', 0.00150),
('Y', 0.01974),
('Z', 0.00074),
]
def all_substrings(text, size):
'''Returns a list of all substrings with length of size in text'''
length = len(text)
return [text[i:i+size] for i in range(length) if i+size<=length]
def rotate_letter(letter, shift):
'''Rotates the letter clockwise by the given shift value.'''
rotated_letter = ord(letter) + shift
if rotated_letter < ord('A'):
rotated_letter = ord('Z') - (ord('A') - rotated_letter) + 1
if rotated_letter > ord('Z'):
rotated_letter = ord('A') + (rotated_letter - ord('Z')) - 1
return chr(rotated_letter)
def calculate_distances(string_list):
'''
Takes in a list of strings and tells how far away matching elements are from
one another. ['a','a'] has a distance of 1.
Returns list((string, distance)).
'''
distances = []
length = len(string_list)
for i in range(length):
for x in range(length):
if string_list[i] == string_list[x] and i != x and i-x > 0:
distances.append((string_list[i], i-x))
return distances
def mod_counts(numbers, max_mod):
'''
Takes in a list(int). Calculates how many of the int%x==0 for 1<x<max_mod .
Returns a defaultdict{mod, count} showing how many of the int%mod==0 .
'''
counts = defaultdict(int)
for i in range(2, max_mod):
for num in numbers:
if num%i == 0:
counts[i] += 1
return counts
def find_shift_value(shift_column):
'''
Takes in a list of letters. Finds the most common occurrances.
Uses these common occurrances to estimate how much the text was shifted.
Returns a probable integer shift value.
'''
count = Counter(shift_column)
# Ensure counts of 0 appear
unfound_l
|
etters = [l for l in string.uppercase if l not in count]
for l in unfound_letters: count[l] = 0
total_letters = 0.0
for l,c in count.most_common(): total_letters += c
# Try to find the smallest difference between actual and expected frequencies
differences = defaultdict(float)
# Try shifting through every combination
for r in range(len(letter_frequencies)):
for l,f in letter_frequencies:
rotated = rotate_letter(l, r)
differences[r] += abs(f - co
|
unt[rotated]/total_letters)
# The smallest difference is most likely the shift value
smallest = 0
for s,d in differences.iteritems():
if differences[s] < differences[smallest]:
smallest = s
return smallest
def vigenere_shift(text, shift_values):
'''Rotates text by the shift_values given. Returns shifted value.'''
key_length = len(shift_values)
text = text.upper()
shifted_letters = []
for i in range(len(text)):
rotated_letter = rotate_letter(text[i], -shift_values[i%key_length])
shifted_letters.append(rotated_letter)
return ''.join(shifted_letters)
def main():
'''Main method'''
cipher_text = raw_input('Enter the cipher text to decrypt: \n')
print('Calculating...\n')
substrings = all_substrings(cipher_text, 3)
distances = calculate_distances(substrings)
counts = mod_counts(zip(*distances)[1], 20)
counts = [x for x in counts.iteritems()]
counts.sort(key=lambda x: -x[1])
# counts[x][0] should now contain key sizes from most to least probable
key_size = counts[0][0]
# Split letters by the key length and find the most common occurrences
shift_values = []
for i in range(key_size):
shift_column = [cipher_text[x] for x in range(len(cipher_text)) if x%key_size==i]
shift_values.append(find_shift_value(shift_column))
decrypted_text = vigenere_shift(cipher_text, shift_values)
print('\nExpected key: ')
print(''.join([rotate_letter('A', c) for c in shift_values]))
print('\nAlternative key (different starting shift sometimes encountered): ')
print(''.join([rotate_letter('Z', c) for c in shift_values]))
print('\nDecrypted text: ')
print(decrypted_text)
# Call main method
if __name__ == '__main__':
main()
|
plasmodic/hippodraw
|
examples/run_examples.py
|
Python
|
gpl-2.0
| 1,052
| 0.004753
|
"""
Run example scripts.
@author J. Chiang <jchiang@slac.stanford.edu>
@author Paul F. Kunz <Paul_Kunz@slac.stanford.edu>
"""
#$Id: run_examples.py,v 1.13 2006/10/03 20:02:20 pfkeb Exp $
import sys
from load_hippo import app, canvas
prompt = 1
scripts = []
scripts.append('static_vs_dynamic')
scripts.append('loglog')
scripts.append('datareps')
scripts.append('append_ntuple')
scripts.append('cut_multi_displays')
scripts.append('cuts_one_displa
|
y')
scripts.append('cuts_complex')
scripts.append('function_ntuple')
scripts.append('fitting')
scripts.append('fitting2')
scripts.append('simple_xyplot')
scripts.append('mainpage')
scripts.append('fft')
scripts.append('displays')
def prompt(prompt = None):
if (prompt):
sys.stderr.write(prompt)
|
else:
sys.stderr.write("Hit return to continue: ")
x = sys.stdin.readline()
return x
print "Hit return to run named script"
for name in scripts :
prompt("Run %s: " % name)
canvas.clear()
command = 'import ' + name
exec(command)
print "All done. Enjoy!"
|
ivandm/django-geopositionmap
|
geopositionmap/geoWidgets.py
|
Python
|
mit
| 2,620
| 0.003817
|
# -*- coding: ISO-8859-1 -*-
"""
Form Widget classes specific to the geoSite admin site.
"""
# A class that corresponds to an HTML form widget,
# e.g. <input type="text"> or <textarea>.
# This handles rendering of the widget as HTML.
import json
from django.template.loader import render_to_string
from .conf import settings
from django.utils import six
from django import forms
from django.forms import widgets, MultiWidget, Media
from django.utils.html import conditional_escape, format_html, format_html_join
from django.forms.util import flatatt, to_current_timezone
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
from django.templatetags.static import static
from . import LatLng
# classe widget utilizzata dal campo forms.geoFields LatLngField
class LatLngTextInputWidget(forms.MultiWidget):
def __init__(self, attrs=None):
widgets = (
forms.TextInput(),
forms.TextInput(),
)
super(LatLngTextInputWidget, self).__init__(widgets, attrs)
def decompress(self, value):
if isinstance(value, six.text_type):
return value.rsplit(',')
if value:
return [value.lat, value.lng]
r
|
eturn [None,None]
def format_output(self, rendered_widgets):
return render_to_string('geopositionmap/widgets/geopositionmap.html', {
'latitude': {
|
'html': rendered_widgets[0],
'label': _("latitude"),
},
'longitude': {
'html': rendered_widgets[1],
'label': _("longitude"),
},
'config': {
'map_widget_height': settings.GEOPOSITIONMAP_MAP_WIDGET_HEIGHT,
'map_options': json.dumps(settings.GEOPOSITIONMAP_MAP_OPTIONS),
'marker_options': json.dumps(settings.GEOPOSITIONMAP_MARKER_OPTIONS),
'google_view': json.dumps(settings.GEOPOSITIONMAP_GOOGLE_VIEW),
'osm_view': json.dumps(settings.GEOPOSITIONMAP_OSM_VIEW),
}
})
class Media:
#extend = False
css = {
'all': (
'geopositionmap/geopositionmap.css',
'//cdn.leafletjs.com/leaflet-0.7.3/leaflet.css',
)
}
js = (
'//maps.google.com/maps/api/js?sensor=false',
'//cdn.leafletjs.com/leaflet-0.7.3/leaflet.js',
'geopositionmap/geopositionmap.js',
)
|
whosken/reversedict
|
reversedict/__init__.py
|
Python
|
mit
| 1,266
| 0.014218
|
import elastic
import nlp
def lookup(description, synonyms=None):
'''
Look up words by their definitions
using the indexed terms and their synonyms.
'''
description = nlp.correct(description)
query = {'bool':{'must':get_definition_query(description)}}
synonym_query = get_synonym_query(description, synonyms)
if synonym_query:
query['bool']['should'] = synonym_query
query['bool']['minimum_should_match'] = 0
query['bool']['boost'] = 1.2
return search(query)
def search(query):
print 'searching', query
results = elastic.client.search(index=elastic.SEARCH_INDEX, body={'query':query})
return list(parse_results(results))
def get_definition_query(description, syno
|
nyms=None):
query = {'match':{'definitions':{'query':unicode(description),
'cutoff_frequency':0.001}}}
|
return query
def get_synonym_query(description, synonyms=None):
tokens = nlp.tokenize(description) + (synonyms or [])
if not tokens:
return None
return {'match':{'synonyms':{'query':tokens, 'operator':'or'}}}
def parse_results(results):
print 'found', results['hits'].get('total')
return (h['_source']['doc'] for h in results['hits'].get('hits',[]))
|
carsongee/edx-platform
|
common/djangoapps/edxmako/__init__.py
|
Python
|
agpl-3.0
| 676
| 0.001479
|
# Copyright (c) 2008 Mikeal Rogers
#
# Licensed under the Apache License, Version 2.
|
0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF AN
|
Y KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
LOOKUP = {}
from .paths import add_lookup, lookup_template, clear_lookups
|
muneebalam/scrapenhl
|
scrapenhl/scrape_season.py
|
Python
|
mit
| 13,272
| 0.008137
|
import scrapenhl_globals
import scrape_game
def scrape_games(season, games, force_overwrite = False, pause = 1, marker = 10):
"""
Scrapes the specified games.
Parameters
-----------
season : int
The season of the game. 2007-08 would be 2007.
games : iterable of ints (e.g. list)
The game id. This can range from 20001 to
|
21230 for regular season, and 30111 to 30417 for playoffs.
The preseason, all-star game, Olympics, and World Cup also have game IDs that can be provided.
force_overwrite : bool
If
|
True, will overwrite previously raw html files. If False, will not scrape if files already found.
pause : float or int
The time to pause between requests to the NHL API. Defaults to 1 second
marker : float or int
The number of times to print progress. 10 will print every 10%; 20 every 5%.
"""
import time
import datetime
starttime = time.time()
games = sorted(list(games))
marker_i = [len(games)//marker * i for i in range(marker)]
marker_i[-1] = len(games) - 1
marker_i_set = set(marker_i)
for i in range(len(games)):
game = games[i]
newscrape = scrape_game.scrape_game(season, game, force_overwrite)
if newscrape: #only sleep if had to scrape a new game
time.sleep(pause)
if i in marker_i_set:
print('Done through', season, game, ' ~ ', round((marker_i.index(i)) * 100/marker), '% in',
str(datetime.timedelta(seconds = time.time() - starttime)))
print('Done scraping games in', season)
def scrape_season(season, startgame = None, endgame = None, force_overwrite = False, pause = 1):
"""
Scrapes games for the specified season.
Parameters
-----------
season : int
The season of the game. 2007-08 would be 2007.
startgame : int
The game id at which scraping will start. For example, midway through a season, this can be the last game
scraped.
This can range from 20001 to 21230 for regular season, and 30111 to 30417 for playoffs.
The preseason, all-star game, Olympics, and World Cup also have game IDs that can be provided.
force_overwrite : bool
If True, will overwrite previously raw html files. If False, will not scrape if files already found.
pause : float or int
The time to pause between requests to the NHL API. Defaults to 1 second
"""
if season != 2012:
games = [20000 + x for x in range(1, 1231)]
else:
games = [20000 + x for x in range(1, 721)]
for round in range(1, 5):
for series in range(1, 8//round + 1):
for game in range(1, 8):
games.append(int('30{0:d}{1:d}{2:d}'.format(round, series, game)))
if startgame is not None:
games = [g for g in games if g >= startgame]
if endgame is not None:
games = [g for g in games if g <= endgame]
scrape_games(season, games, force_overwrite, pause, 10)
def get_team_pbplog_filename(season, team):
return '{0:s}Team logs/{2:s}{1:d}_pbp.feather'.format(scrapenhl_globals.SAVE_FOLDER, season, team)
def get_team_toilog_filename(season, team):
return '{0:s}Team logs/{2:s}{1:d}_toi.feather'.format(scrapenhl_globals.SAVE_FOLDER, season, team)
def update_teamlogs(season, force_overwrite = False):
import os
import feather
import pandas as pd
import os.path
basic_gamelog = scrapenhl_globals.get_quick_gamelog_file()
teams = {x for x in \
basic_gamelog.query('Season == {0:d}'.format(season))['Home'].drop_duplicates()} | \
{x for x in \
basic_gamelog.query('Season == {0:d}'.format(season))['Away'].drop_duplicates()}
temp = basic_gamelog
### List files in correct format
allfiles = os.listdir(scrapenhl_globals.get_season_folder(season))
pbpfiles = {int(x[:5]): x for x in allfiles if x[-12:] == '_parsed.zlib'}
toifiles = {int(x[:5]): x for x in allfiles if x[-19:] == '_shifts_parsed.zlib'}
for team in teams:
teamgames = {int(g) for g in basic_gamelog.query('Season == {0:d} & (Home == "{1:s}" | Away == "{1:s}")'.format(
season, team))['Game'].values}
current_pbp = None
games_already_done = set()
if os.path.exists(get_team_pbplog_filename(season, team)):
current_pbp = feather.read_dataframe(get_team_pbplog_filename(season, team))
games_already_done = {x for x in current_pbp.Game}
dflist = []
if not force_overwrite and current_pbp is not None:
dflist.append(current_pbp)
teamgames = {int(g) for g in teamgames if g not in games_already_done}
### TODO do I need to flip any columns?
#if force_overwrite:
for game in teamgames:
try:
df = pd.read_hdf(scrape_game.get_parsed_save_filename(season, game))
df = df.assign(Game = game)
if df is not None:
dflist.append(df)
except FileNotFoundError:
pass
if len(dflist) > 0:
new_pbp = pd.concat(dflist)
for col in new_pbp.columns:
if new_pbp[col].dtype == 'object':
new_pbp[col] = new_pbp[col].astype(str)
feather.write_dataframe(new_pbp, get_team_pbplog_filename(season, team))
current_toi = None
games_already_done = set()
if os.path.exists(get_team_toilog_filename(season, team)):
current_toi = feather.read_dataframe(get_team_toilog_filename(season, team))
games_already_done = {x for x in current_toi.Game}
### TODO issues here
dflist = []
if not force_overwrite:
dflist.append(current_toi)
teamgames = {g for g in teamgames if g not in games_already_done}
#if force_overwrite:
for game in teamgames:
try:
df = pd.read_hdf(scrape_game.get_parsed_shifts_save_filename(season, game))
df = df.assign(Game = game)
cols_to_replace = {col for col in df.columns if str.isdigit(col[-1]) if col[:3] != team}
df.rename(columns = {col: 'Opp' + col[3:] for col in cols_to_replace}, inplace = True)
if df is not None:
dflist.append(df)
except FileNotFoundError:
pass
import pandas as pd
dflist = [df for df in dflist if df is not None]
if len(dflist) > 0:
new_toi = pd.concat(dflist)
for col in new_toi.columns:
if new_toi[col].dtype == 'object':
new_toi[col] = new_toi[col].astype(str)
feather.write_dataframe(new_toi, get_team_toilog_filename(season, team))
def get_team_toilog(season, team):
import feather
return feather.read_dataframe(get_team_toilog_filename(season, team))
def get_team_pbplog(season, team):
import feather
return feather.read_dataframe(get_team_pbplog_filename(season, team))
def get_season_schedule_url(season):
return 'https://statsapi.web.nhl.com/api/v1/schedule?startDate={0:d}-09-01&endDate={1:d}-06-25'.format(season,
season + 1)
def parse_games(season, games, force_overwrite = False, marker = 10):
"""
Parses the specified games.
Parameters
-----------
season : int
The season of the game. 2007-08 would be 2007.
games : iterable of ints (e.g. list)
The game id. This can range from 20001 to 21230 for regular season, and 30111 to 30417 for playoffs.
The preseason, all-star game, Olympics, and World Cup also have game IDs that can be provided.
force_overwrite : bool
If True, will overwrite previously parsed files. If False, will not parise if files already found.
marker : float or int
The number of times to print progress. 10 will print every 10%; 20 every 5%.
"""
import time
import datetime
starttime = time.time()
games = sorted(list(games))
marker_i =
|
Thortoise/Super-Snake
|
Blender/animation_nodes-master/nodes/vector/combine_vector.py
|
Python
|
gpl-3.0
| 729
| 0.002743
|
import bpy
from ... base_types.node import AnimationNode
class CombineVectorNode(bpy.types.Node, AnimationNode):
bl_idname = "an_CombineVectorNode"
bl_label = "Combine Vector"
dynamicLabelType = "HIDDEN_ONLY"
def create(self):
self.newInput("Float", "X", "x")
self.newInput("Float", "Y", "y")
|
self.newInput("Float", "Z", "z")
self.newOutput("Vector", "Vector", "vector")
def drawLabel(self):
label = "<X, Y, Z>"
for axis in "XYZ":
if self.inputs[axis].isUnlinked:
label = label.replace(axis, str(round(self.inputs[axis].value, 4)))
return label
def getExecutionCode(self):
return "vector = Vector(
|
(x, y, z))"
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractRumorsBlock.py
|
Python
|
bsd-3-clause
| 393
| 0.022901
|
def
|
extractRumorsBlock(item):
"""
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or 'preview' in item['title'].lower():
return None
if "Rumor's Block" in item['tags'] and 'chapter' in item['title'].lower():
return buildReleaseMessageWithType(item, "Rumor's Block", vol, chp, frag=frag, postfix=postfix, tl_type='oel')
retu
|
rn False
|
kamcpp/tensorflow
|
tensorflow/contrib/rnn/python/ops/lstm_ops.py
|
Python
|
apache-2.0
| 23,693
| 0.005149
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""LSTM Block Cell ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from tensorflow.contrib.rnn.python.ops import fused_rnn_cell
from tensorflow.contrib.util import loader
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import rnn_cell
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import resource_loader
_lstm_ops_so = loader.load_op_library(
resource_loader.get_path_to_datafile("_lstm_ops.so"))
# pylint: disable=invalid-name
def _lstm_block_cell(x,
cs_prev,
h_prev,
w,
b,
wci=None,
wcf=None,
wco=None,
forget_bias=None,
cell_clip=None,
use_peephole=None,
name=None):
r"""Computes the LSTM cell forward propagation for 1 time step.
This implementation uses 1 weight matrix and 1 bias vector, and there's an
optional peephole connection.
This kernel op implements the following mathematical equations:
```python
xh = [x, h_prev]
[i, f, ci, o] = xh * w + b
f = f + forget_bias
if not use_peephole:
wci = wcf = wco = 0
i = sigmoid(cs_prev * wci + i)
f = sigmoid(cs_prev * wcf + f)
ci = tanh(ci)
cs = ci .* i + cs_prev .* f
cs = clip(cs, cell_clip)
o = sigmoid(cs * wco + f)
co = tanh(cs)
h = co .* o
```
Args:
x: A `Tensor`. Must be one of the following types: `float32`.
The input to the LSTM cell, shape (batch_size, num_inputs).
cs_prev: A `Tensor`. Must have the same type as `x`.
Value of the cell state at previous time step.
h_prev: A `Tensor`. Must have the same type as `x`.
Output of the previous cell at previous time step.
w: A `Tensor`. Must have the same type as `x`. The weight matrix.
b: A `Tensor`. Must have the same type as `x`. The bias vector.
wci: A `Tensor`. Must have the same type as `x`.
The weight matrix for input gate peephole connection.
wcf: A `Tensor`. Must have the same type as `x`.
The weight matrix for forget gate peephole connection.
wco: A `Tensor`. Must have the same type as `x`.
The weight matrix for output gate peephole connection.
forget_bias: An optional `float`. Defaults to `1`. The forget gate bias.
cell_clip: An optional `float`. Defaults to `3`.
Value to clip the 'cs' value to.
use_peephole: An optional `bool`. Defaults to `False`.
Whether to use peephole weights.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (i, cs, f, o, ci, co, h).
i: A `Tensor`. Has the same type as `x`. The input gate.
cs: A `Tensor`. Has the same type as `x`. The cell state before the tanh.
f: A `Tensor`. Has the same type as `x`. The forget gate.
o: A `Tensor`. Has the same type as `x`. The output gate.
ci: A `Tensor`. Has the same type as `x`. The cell input.
co: A `Tensor`. Has the same type as `x`. The cell after the tanh.
h: A `Tensor`. Has the same type as `x`. The output h vector.
Raises:
ValueError: If cell_size is None.
"""
if wci is None:
cell_size = cs_prev.get_shape().with_rank(2)[1].value
|
if cell_size is None:
raise ValueError("cell_size from `cs_prev` should not be None.")
wci = array_ops.constant(0, dtype=dtypes.float32, shape=[cell_size])
wco = wci
wcf = wci
# pylint: disable=protected-access
return _lstm_ops_so.lstm_block_cell(
x=x,
cs_prev=cs_
|
prev,
h_prev=h_prev,
w=w,
wci=wci,
wco=wco,
wcf=wcf,
b=b,
forget_bias=forget_bias,
cell_clip=cell_clip,
use_peephole=use_peephole,
name=name)
# pylint: enable=protected-access
def _block_lstm(seq_len_max,
x,
w,
b,
cs_prev=None,
h_prev=None,
wci=None,
wcf=None,
wco=None,
forget_bias=None,
cell_clip=None,
use_peephole=None,
name=None):
r"""TODO(williamchan): add doc.
Args:
seq_len_max: A `Tensor` of type `int64`.
x: A list of at least 1 `Tensor` objects of the same type in: `float32`.
w: A `Tensor`. Must have the same type as `x`.
b: A `Tensor`. Must have the same type as `x`.
cs_prev: A `Tensor`. Must have the same type as `x`.
h_prev: A `Tensor`. Must have the same type as `x`.
wci: A `Tensor`. Must have the same type as `x`.
wcf: A `Tensor`. Must have the same type as `x`.
wco: A `Tensor`. Must have the same type as `x`.
forget_bias: An optional `float`. Defaults to `1`.
cell_clip: An optional `float`. Defaults to `3`.
use_peephole: An optional `bool`. Defaults to `False`.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (i, cs, f, o, ci, co, h).
i: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
cs: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
f: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
o: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
ci: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
co: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
h: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
Raises:
ValueError: If `b` does not have a valid shape.
"""
batch_size = x[0].get_shape().with_rank(2)[0].value
cell_size4 = b.get_shape().with_rank(1)[0].value
if cell_size4 is None:
raise ValueError("`b` shape must not be None.")
cell_size = cell_size4 / 4
zero_state = None
if cs_prev is None or h_prev is None:
zero_state = array_ops.constant(
0, dtype=dtypes.float32, shape=[batch_size, cell_size])
if cs_prev is None:
cs_prev = zero_state
if h_prev is None:
h_prev = zero_state
if wci is None:
wci = array_ops.constant(0, dtype=dtypes.float32, shape=[cell_size])
wco = wci
wcf = wci
# pylint: disable=protected-access
i, cs, f, o, ci, co, h = _lstm_ops_so.block_lstm(
seq_len_max=seq_len_max,
x=array_ops.pack(x),
cs_prev=cs_prev,
h_prev=h_prev,
w=w,
wci=wci,
wco=wco,
wcf=wcf,
b=b,
forget_bias=forget_bias,
cell_clip=cell_clip,
name=name,
use_peephole=use_peephole)
return array_ops.unpack(i), array_ops.unpack(cs), array_ops.unpack(
f), array_ops.unpack(o), array_ops.unpack(ci), array_ops.unpack(
co), array_ops.unpack(h)
# pylint: enable=protected-access
# pylint: enable=invalid-name
_lstm_block_cell_grad_outputs = ["cs_prev_grad", "dicfo"]
ops.RegisterShape("LSTMBlockCell")(common_shapes.call_cp
|
PuzzleboxIO/orbit-python
|
Puzzlebox/Orbit/__init__.py
|
Python
|
agpl-3.0
| 229
| 0
|
# -*- coding: utf-8 -*-
__doc__ = """\
Copyright Puzzlebox Productions, LLC (2010)
This code is released under t
|
he GNU Pulic License (GPL) version 2
For more information please refer to http://www.gnu.org/cop
|
yleft/gpl.html
"""
|
ArseniyK/Sunflower
|
application/gui/input_dialog.py
|
Python
|
gpl-3.0
| 60,030
| 0.023755
|
import os
import gtk
import time
import locale
import fnmatch
import user
from plugin_base.provider import FileType, Support as ProviderSupport
from common import get_user_directory, UserDirectory
from widgets.completion_entry import PathCompletionEntry
from queue import OperationQueue
# constants
class OverwriteOption:
RENAME = 0
NEW_NAME = 1
APPLY_TO_ALL = 2
class InputDialog:
"""Simple input dialog
This class can be extended with additional custom controls
by accessing locally stored objects. Initially this dialog
contains single label and text entry, along with two buttons.
"""
def __init__(self, application):
self._dialog = gtk.Dialog(parent=application)
self._application = application
self._dialog.set_default_size(340, 10)
self._dialog.set_resizable(True)
self._dialog.set_skip_taskbar_hint(True)
self._dialog.set_modal(True)
self._dialog.set_transient_for(application)
self._dialog.set_wmclass('Sunflower', 'Sunflower')
self._dialog.vbox.set_spacing(0)
self._container = gtk.VBox(False, 0)
self._container.set_border_width(5)
# create interface
vbox = gtk.VBox(False, 0)
self._label = gtk.Label('Label')
self._label.set_alignment(0, 0.5)
self._entry = gtk.Entry()
self._entry.connect('activate', self._confirm_entry)
button_ok = gtk.Button(stock=gtk.STOCK_OK)
button_ok.connect('clicked', self._confirm_entry)
button_ok.set_can_default(True)
button_cancel = gtk.Button(stock=gtk.STOCK_CANCEL)
# pack interface
vbox.pack_start(self._label, False, False, 0)
vbox.pack_start(self._entry, False, False, 0)
self._container.pack_start(vbox, False, False, 0)
self._dialog.add_action_widget(button_cancel, gtk.RESPONSE_CANCEL)
self._dialog.action_area.pack_end(button_ok, False, False, 0)
self._dialog.set_default_response(gtk.RESPONSE_OK)
self._dialog.vbox.pack_start(self._container, True, True, 0)
self._dialog.show_all()
def _confirm_entry(self, widget, data=None):
"""Enable user to confirm by pressing Enter"""
if self._entry.get_text() != '':
self._dialog.response(gtk.RESPONSE_OK)
def set_title(self, title_text):
"""Set dialog title"""
self._dialog.set_title(title_text)
def set_label(self, label_text):
"""Provide an easy way to set label text"""
self._label.set_text(label_text)
def set_text(self, entry_text):
"""Set main entry text"""
self._entry.set_text(entry_text)
def set_password(self):
"""Set field as password input"""
self._entry.set_property('caps-lock-warning', True)
self._entry.set_visibility(False)
def get_response(self):
"""Return value and self-destruct
This method returns tuple with response code and
input text.
"""
code = self._dialog.run()
result = self._entry.get_text()
self._dialog.destroy()
return code, result
class LinkDialog(InputDialog):
"""Input dialog for creating symbolic or hard links"""
def __init__(self, application):
InputDialog.__init__(self, application)
self.set_title(_('Create link'))
self.set_label(_('Enter new link name:'))
self._container.set_spacing(5)
# create user interface
vbox_original_path = gtk.VBox(False, 0)
hbox_original_path = gtk.HBox(False, 5)
label_original_path = gtk.Label(_('Original path:'))
label_original_path.set_alignment(0, 0.5)
self._entry_original_path = gtk.Entry()
# create checkbox
self._checkbox_hard_link = gtk.CheckButton(_('Create hard link'))
# create browse button
button_browse = gtk.Button(_('Browse'))
button_browse.connect('clicked', self._browse_original_path)
# pack interface
hbox_original_path.pack_start(self._entry_original_path, True, True, 0)
hbox_original_path.pack_start(button_browse, False, False, 0)
vbox_original_path.pack_start(label_original_path, False, False, 0)
vbox_original_path.pack_start(hbox_original_path, False, False, 0)
self._container.pack_start(vbox_original_path, False, False, 0)
self._container.pack_start(self._checkbox_hard_link, False, False, 0)
# show all widgets
self._container.show_all()
def _browse_original_path(self, widget, data=None):
"""Show file selection dialog"""
dialog = gtk.FileChooserDialog(
title=_('Select original path'),
parent=self._application,
action=gtk.FILE_CHOOSER_ACTION_OPEN,
buttons=(
gtk.STOCK_CANCEL,
gtk.RESPONSE_REJECT,
gtk.STOCK_OK,
gtk.RESPONSE_ACCEPT
)
)
response = dialog.run()
if response == gtk.RESPONSE_ACCEPT:
self._entry_original_path.set_text(dialog.get_filename())
# if link name is empty, add original path name
if self._entry.get_text() == '':
self._entry.set_text(os.path.basename(dialog.get_filename()))
dialog.destroy()
def set_original_path(self, path):
"""Set original path where link point to"""
if path is not None:
self._entry_original_path.set_text(path)
def set_hard_link(self, hard_link=True):
"""Set hard link option state"""
if self._checkbox_hard_link.is_sensitive():
self._checkbox_hard_link.set_active(hard_link)
def set_hard_link_supported(self, supported):
"""Set checkbox state for hard link"""
self._checkbox_hard_link.set_sensitive(supported)
def get_response(self):
"""Return value and self-destruct"""
code = self._dialog.run()
original_path = self._entry_original_path.get_text()
link_name = self._entry.get_text()
hard_link = self._checkbox_hard_link.get_active()
self._dialog.destroy()
return code, original_path, link_name, hard_link
class CreateDialog(InputDialog):
"""Generic create file/directory dialog"""
def __init__(self, application):
InputDialog.__init__(self, application)
self._permission_updating = False
self._mode = 0644
self._dialog_size = None
self._container.set_spacing(5)
# create advanced options expander
expander = gtk.Expander(_('Advanced options'))
expander.connect('activate', self._expander_event)
expander.set_border_width(0)
self._advanced = gtk.VBox(False, 5)
self._advanced.set_border_width(5)
table = gtk.Table(4, 4, False)
# create widgets
label = gtk.Label(_('User:'))
label.set_alignment(0, 0.5)
table.attach(label, 0, 1, 0, 1)
label = gtk.Label(_('Group:'))
label.set_alignment(0, 0.5)
table.attach(label, 0, 1, 1, 2)
label = gtk.Label(_('Others:'))
label.set_alignment(0, 0.5)
table.attach(label, 0, 1, 2, 3)
# owner checkboxes
self._permission_owner_read = gtk.CheckButton(_('Read'))
self._permission_owner_read.connect('toggled', self._update_octal, (1 << 2) * 100)
table.attach(self._permission_owner_read, 1, 2, 0, 1)
self._permission_owner_write = gtk.CheckButton(_('Write'))
self._permission_owner_write.connect('toggled', self._update_octal, (1 << 1) * 100)
table.attach(self._permission_owner_write, 2, 3, 0, 1)
self._permission_owner_execute = gtk.CheckButton(_('Execute'))
self._permission_owner_execute.connect('toggled', self._update_octal, (1 << 0) * 100)
table.attach(self._permission_owner_execute, 3, 4, 0, 1)
# group checkboxes
self._permission_group_read = gtk.CheckButton(_('Read'))
self._permission_group_read.connect('toggled', self._update_octal, (1 << 2) * 10)
table.attach(self._per
|
mission_group_read, 1, 2, 1, 2)
self._permission_group_write = gtk.CheckButton(_('Write'))
self._permission_group_write.connect('toggled', self._update_octal, (1 << 1) * 10)
table.attach(self._permission_group_write, 2, 3, 1, 2)
|
self._permission_group_execute = gtk.CheckButton(_('Execute'))
self._permission_group_execute.connect('toggled', self._update_octal, (1 << 0) * 10)
table.attach(self._permission_group_execute, 3, 4, 1, 2)
# others checkboxes
self._permission_others_read = gtk.CheckButton(_('Read'))
self._permission_others_read.connect('toggled', self._update_octal, (1 << 2))
table.attach(self._permission_others_read, 1, 2, 2, 3)
self._permission_others_write = gtk.CheckButton(_('Write'))
self._permission_others_write.connect('toggled', self._update_octal, (1 << 1))
table.attach(self._permission_others_write, 2, 3, 2, 3)
self._permission_others_execute = gtk.CheckButton(_('Execute'))
self._permission_others_execute.connect(
|
kiip/statsite
|
tests/unit/test_collector.py
|
Python
|
bsd-3-clause
| 2,419
| 0.000827
|
"""
Contains tests for the collector base class.
"""
import pytest
from tests.base import TestBase
from statsite.metrics import Counter, KeyValue, Timer
from statsite.collector import Collector
class TestCollector(TestBase):
def test_stores_aggregator(self):
"""
Tests that collectors will store aggregator objects.
"""
agg = object()
assert agg is Collector(agg).aggregator
def test_parse_metrics_succeeds(self)
|
:
"""
Tests that parsing metrics succeeds and returns an array
of proper metric objects.
"""
message = "\n".join(["k:1|kv", "j:27|ms"])
results = Collector(None)._parse_metrics(message)
assert isinstance(results[0], KeyValue)
assert isinstance(results[1], Timer)
def test_parse_metrics_suppress_error(self):
"""
Tests that parsing metrics will suppress errors i
|
f requested.
"""
message = "k:1|nope"
results = Collector(None)._parse_metrics(message)
assert 0 == len(results)
def test_parse_metrics_keeps_good_metrics(self, aggregator):
"""
Tests that parse_metrics will keep the good metrics in the face
of an error.
"""
message = "\n".join(["k::1|c",
"j:2|nope",
"k:2|ms"])
results = Collector(aggregator)._parse_metrics(message)
assert [Timer("k", 2)] == results
def test_parse_metrics_ignores_blank_lines(self, aggregator):
"""
Tests that parse_metrics will properly ignore blank lines.
"""
message = "\n".join(["", "k:2|ms"])
assert [Timer("k", 2)] == Collector(aggregator)._parse_metrics(message)
def test_add_metrics(self, aggregator):
"""
Tests that add_metrics successfully adds an array of metrics to
the configured aggregator.
"""
now = 17
metrics = [KeyValue("k", 1, now), Counter("j", 2)]
Collector(aggregator)._add_metrics(metrics)
assert metrics == aggregator.metrics
def test_set_aggregator(self, aggregator):
"""
Tests that setting an aggregator properly works.
"""
coll = Collector(aggregator)
new_agg = object()
assert aggregator is coll.aggregator
coll.set_aggregator(new_agg)
assert new_agg is coll.aggregator
|
SCORE42/nodeshot
|
nodeshot/community/participation/serializers.py
|
Python
|
gpl-3.0
| 2,237
| 0.001788
|
from django.contrib.auth import get_user_model
User = get_user_model()
from rest_framework import serializers
from nodeshot.core.base.serializers import ModelValidationSerializer
from nodeshot.community.profiles.serializers import ProfileRelationSerializer
from .models import Comment, Vote, Rating, NodeRatingCount
__all__ = ['CommentSerializer',
'RatingSerializer',
'CommentRelationSerializer',
'VoteSerializer',
'ParticipationSerializer']
class AutoNodeMixin(object):
"""
automatically adds node to validated_data
the node info is taken from views th
|
at extend NodeRelationViewMixin
"""
def validate(self, data):
data['node'] = self.context['view'].node
return super(AutoNodeMixin, self).valida
|
te(data)
class CommentSerializer(AutoNodeMixin, ModelValidationSerializer):
""" Comment serializer """
node = serializers.ReadOnlyField(source='node.name')
username = serializers.ReadOnlyField(source='user.username')
class Meta:
model = Comment
fields = ('node', 'username', 'text', 'added')
read_only_fields = ('added',)
class CommentRelationSerializer(serializers.ModelSerializer):
""" display user info """
user = ProfileRelationSerializer()
class Meta:
model = Comment
fields = ('user', 'text', 'added',)
class RatingSerializer(AutoNodeMixin, ModelValidationSerializer):
""" Rating serializer """
node = serializers.ReadOnlyField(source='node.name')
username = serializers.ReadOnlyField(source='user.username')
class Meta:
model = Rating
fields = ('node', 'username', 'value',)
read_only_fields = ('added',)
class VoteSerializer(AutoNodeMixin, ModelValidationSerializer):
node = serializers.ReadOnlyField(source='node.name')
username = serializers.ReadOnlyField(source='user.username')
class Meta:
model = Vote
fields = ('node', 'username', 'vote',)
read_only_fields = ('added',)
class ParticipationSerializer(serializers.ModelSerializer):
class Meta:
model = NodeRatingCount
fields = ('likes', 'dislikes', 'rating_count',
'rating_avg', 'comment_count')
|
deo1/deo1
|
Legacy/PythonTutorial/18InnerClasses.py
|
Python
|
mit
| 630
| 0.006349
|
__author__ =
|
'jbowman'
# https://pythonspot.com/inner-classes/
class Human:
def __init__(self, name):
self.name = name
self.head = self.Head()
def addhead(self):
self.head2 = self.Head()
class Head:
def __init__(self):
self.brain = self.Brain()
def talk(self):
return '
|
talking...'
class Brain:
def think(self):
return 'thinking...'
if __name__ == '__main__': # execute only if run as a script directly
joey = Human('Joey')
print(joey.name)
print(joey.head.talk())
print(joey.head.brain.think())
|
PabloVallejo/docker-django
|
manage.py
|
Python
|
mit
| 249
| 0
|
#!/usr/bin/env python3.5
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJ
|
ANGO_SETTINGS_MODULE", "app.settings")
from django.core.management import execute_from_command_l
|
ine
execute_from_command_line(sys.argv)
|
zhiwliu/openshift-ansible
|
roles/openshift_aws/filter_plugins/openshift_aws_filters.py
|
Python
|
apache-2.0
| 2,484
| 0.003221
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Custom filters for use in openshift_aws
'''
from ansible import errors
class FilterModule(object):
''' Custom ansible filters for use by openshift_aws role'''
@staticmethod
def scale_groups_serial(scale_group_info, upgrade=False):
''' This function will determine what the deployment serial should be and return it
Search through the tags and find the deployment_serial tag. Once found,
determine if an increment is needed during an upgrade.
if upgrade is true then increment the serial and return it
else return the serial
'''
if scale_group_info == []:
return 1
scale_group_info = scale_group_info[0]
if not isinstance(scale_group_info, dict):
raise errors.AnsibleFilterError("|filter plugin failed: Expected scale_group_info to be a dict")
serial = None
for tag in scale_group_info['tags']:
if tag['key'] == 'deployment_serial':
serial = int(tag['value'])
if upgrade:
serial += 1
break
else:
raise errors.AnsibleFilterError("|filter plugin failed: deployment_serial tag was not found")
return serial
@staticmethod
def scale_groups_match_capacity(scale_group_info):
''' This function will verify that the scale group instance count matches
the scale group desired capacity
'''
for scale_group in scale_group_info:
if scale_group['desired_capacity'] != len(scale_group['instances']):
return False
return True
@staticmethod
def build_instance_tags(clusterid):
''' This function will return a dictionary of the instance tags.
The main desire to have this inside of a filter_plugin is that we
need to build the following key.
{"kubernetes.io/cluster/{{ openshift_aws_clusterid }}": "{{ openshift_aws_clusterid}}"}
'''
tags = {'clusterid': clu
|
sterid,
'kubern
|
etes.io/cluster/{}'.format(clusterid): clusterid}
return tags
def filters(self):
''' returns a mapping of filters to methods '''
return {'build_instance_tags': self.build_instance_tags,
'scale_groups_match_capacity': self.scale_groups_match_capacity,
'scale_groups_serial': self.scale_groups_serial}
|
pathespe/MarkerBot
|
tests/resources/session_6.py
|
Python
|
mit
| 2,838
| 0.015856
|
def get_attendance_records(file_path):
attendance_file = open(file_path,'r')
lines = attendance_file.readlines()
attendance_file.close()
header = lines[0]
attendance_records = lines[1:]
return attendance_records
def convert_attendance_record_to_bools(sessions):
sessions_bool = []
for session in sessions:
if session == 'Yes':
sessions_bool.append(1)
else:
sessions_bool.append(0)
return sessions_bool
def session_attendance(file_path):
number_of_sessions = 9
session_attendance = {u'Session_0':0, u'Session_1':0, u'Session_2':0, u'Session_3':0, u'Session_4':0, u'Session_5':0, u'Session_6':0, u'Session_7':0, u'Session_8':0}
attendee_consistency = {u'0_Sessions':0, u'1_Sessions':0, u'2_Sessions':0, u'3_Sessions':0, u'4_Sessions':0, u'5_Sessions':0, u'6_Sessions':0, u'7_Sessions':0, u'8_Sessions':0, u'9_Sessions':0}
attendance_records = get_attendance_records(file_path)
for record in attendance_records:
record = record.strip('\n').split(',') # convert record from a string to a list
sessions = convert_attendance_record_to_bools(record[2:])
number_of_sessions = len(sessions)
number_of_sessions_attended = str(sum(sessions))+'_Sessions'
# add record to attendee_consitency dictionary
attendee_consistency[number_of_sessions_attended] += 1
# add record to session attendance dictionary
for i in range(number_of_sessions):
key = u'Session_'+ str(i)
session_attendance[key] += sessions[i]
return {
u"by_attendee" : attendee_consistency,
u"by_session" : session_attendance
}
# print session_attendance('attendance.csv')
import string
import collections
from operator import itemgetter
IGNORE = {
'a', 'also', 'an', 'and', 'are', 'as', 'be', 'by', 'can', 'do', 'for', 'from',
'have', 'in', 'is', 'it', 'j
|
ust', 'more', 'not', 'of', 'on', 'or', 'our',
'over', 'than', 'that', 'the', 'their', 'these', 'they', 'this', 'those',
'to', 'up', 'we', 'with'
}
def build_word_counter(file_path):
with open(file_path, 'r') as f:
speech = f.read()
chars_to_remove = list(string.punctuation) + ['\n'] + list(string.digits)
for char in chars_to_remove:
speech = speech.replace(char, '')
return colle
|
ctions.Counter(w.lower() for w in speech.split() if w not in IGNORE)
def common_words(file_path):
word_counter = build_word_counter(file_path)
return sorted(w.decode('utf-8') for w in word_counter if word_counter[w] > 10)
def most_used_words(file_path):
word_counter = build_word_counter(file_path)
word_counter_sorted = sorted(word_counter.most_common(20), key=itemgetter(1,0))
return [word.decode('utf-8') for word, _ in word_counter_sorted]
|
xorpaul/shinken
|
shinken/objects/checkmodulation.py
|
Python
|
agpl-3.0
| 4,462
| 0.00381
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2012:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
import time
from item import Item, Items
from shinken.property import BoolProp, IntegerProp, StringProp, ListProp
from shinken.util import to_name_if_possible
from shinken.log import logger
class CheckModulation(Item):
id = 1 # zero is always special in database, so we do not take risk here
my_type = 'checkmodulation'
properties = Item.properties.copy()
properties.update({
'checkmodulation_name': StringProp(fill_brok=['full_status']),
'check_command': StringProp(fill_brok=['full_status']),
'check_period' : StringProp(brok_transformation=to_name_if_possible, fill_brok=['full_status']),
})
running_properties = Item.running_properties.copy()
_special_properties = ('check_period',)
macros = {}
# For debugging purpose only (nice name)
def get_name(self):
return self.checkmodulation_name
# Will look at if our check_period is ok, and give our check_command if we got it
def get_check_command(self, t_to_go):
if not self.check_period or self.check_period.is_time_valid(t_to_go):
return self.check_command
return None
# Should have all properties, or a void check_period
def is_correct(self):
state = True
cls = self.__class__
# Raised all previously saw errors like unknown commands or timeperiods
if self.configuration_errors != []:
state = False
for err in self.configuration_errors:
logger.error("[item::%s] %s" % (self.get_name(), err))
for prop, entry in cls.properties.items():
if prop not in cls._special_properties:
if not hasattr(self, prop) and entry.required:
logger.warning("[checkmodulation::%s] %s property not set" % (self.get_name(), prop))
state = False # Bad boy...
# Ok now we manage special cases...
# Service part
if not hasattr(self, 'check_command'):
logger.warning("[checkmodulation::%s] do not have any check_command defined" % self.get_name())
state = False
else:
if self.check_command is None:
logger.warning("[checkmodulation::%s] a check_command is missing" % self.get_name())
state = False
if not self.check_command.is_valid():
logger.warning("[checkmodulation::%s] a check_command is invalid" % self.get_name())
state = False
# Ok just put None as check_period, means 24x7
if not hasattr(self, 'check_period'):
self.check_period = None
return state
# In the scheduler we need to relink the commandCall with
# the real commands
def late_linkify_cw_by_commands(self, commands):
if self.check_command:
self.check_command.late_linkify_with_command(commands)
class CheckModulations(Items):
name_property = "checkmodulation_name"
inner_c
|
lass = CheckModulation
def linkify(self, timeperiods, commands):
|
self.linkify_with_timeperiods(timeperiods, 'check_period')
self.linkify_one_command_with_commands(commands, 'check_command')
def new_inner_member(self, name=None, params={}):
if name is None:
name = CheckModulation.id
params['checkmodulation_name'] = name
#print "Asking a new inner checkmodulation from name %s with params %s" % (name, params)
cw = CheckModulation(params)
self.items[cw.id] = cw
|
wasserfeder/lomap
|
lomap/algorithms/__init__.py
|
Python
|
gpl-2.0
| 765
| 0.003922
|
# Copyright (C) 2012-2015, Alphan Ulusoy (alphan@bu.edu)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public L
|
icense along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-
|
1301 USA.
|
krafczyk/spack
|
lib/spack/spack/test/cmd/test_compiler_cmd.py
|
Python
|
lgpl-2.1
| 3,159
| 0
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import pytest
import llnl.util.filesystem
import spack.cmd.comp
|
iler
import spack.compilers
import spack.spec
import spack.util.pattern
from spack.version import Version
test_version = '4.5-spacktest'
@pytest.fixture()
def mock_compiler_dir(tmpdir):
"""Return a directory containing a fake, but detectable compiler."""
tmpdir.ensure('bin', dir=True)
bin_dir =
|
tmpdir.join('bin')
gcc_path = bin_dir.join('gcc')
gxx_path = bin_dir.join('g++')
gfortran_path = bin_dir.join('gfortran')
gcc_path.write("""\
#!/bin/sh
for arg in "$@"; do
if [ "$arg" = -dumpversion ]; then
echo '%s'
fi
done
""" % test_version)
# Create some mock compilers in the temporary directory
llnl.util.filesystem.set_executable(str(gcc_path))
gcc_path.copy(gxx_path, mode=True)
gcc_path.copy(gfortran_path, mode=True)
return str(tmpdir)
@pytest.mark.usefixtures('config', 'mock_packages')
class TestCompilerCommand(object):
def test_compiler_remove(self):
args = spack.util.pattern.Bunch(
all=True, compiler_spec='gcc@4.5.0', add_paths=[], scope=None
)
spack.cmd.compiler.compiler_remove(args)
compilers = spack.compilers.all_compiler_specs()
assert spack.spec.CompilerSpec("gcc@4.5.0") not in compilers
def test_compiler_add(self, mock_compiler_dir):
# Compilers available by default.
old_compilers = set(spack.compilers.all_compiler_specs())
args = spack.util.pattern.Bunch(
all=None,
compiler_spec=None,
add_paths=[mock_compiler_dir],
scope=None
)
spack.cmd.compiler.compiler_find(args)
# Ensure new compiler is in there
new_compilers = set(spack.compilers.all_compiler_specs())
new_compiler = new_compilers - old_compilers
assert any(c.version == Version(test_version) for c in new_compiler)
|
0x00ach/zer0m0n
|
signatures/network_irc.py
|
Python
|
gpl-3.0
| 1,128
| 0.000887
|
# Copyright (C) 2013 Claudio "nex" Guarnieri (@botherder)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
|
by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even t
|
he implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from lib.cuckoo.common.abstracts import Signature
class NetworkIRC(Signature):
name = "network_irc"
description = "Connects to an IRC server, possibly part of a botnet"
severity = 3
categories = ["irc"]
authors = ["nex"]
minimum = "0.6"
def run(self):
if "irc" in self.results["network"]:
if len(self.results["network"]["irc"]) > 0:
return True
return False
|
joeedh/gameblendjs
|
blender/dm/node_types.py
|
Python
|
mit
| 4,508
| 0.00488
|
from . import utils
import bpy
from bpy.types import NodeTree, Node, NodeSocket
# Implementation of custom nodes from Python
# Derived from the NodeTree base ty
|
pe, similar to Menu, Operator, Panel, etc.
class GameGraph(NodeTree):
# Description string
'''A custom node tree type that will show up in the node editor header'''
# Optional identifier string. If not explicitly defined, the python class name is used.
bl_idname = 'GameG
|
raphType'
# Label for nice name display
bl_label = 'Game Graph'
# Icon identifier
bl_icon = 'NODETREE'
# Custom socket types
class ReadySocket(NodeSocket):
def __new__(self):
self.link_limit = 500
print("set link limit!")
# Description string
'''Custom node socket type'''
# Optional identifier string. If not explicitly defined, the python class name is used.
bl_idname = 'ReadySocket'
# Label for nice name display
bl_label = 'Then Socket'
link_limit = 500;
# Optional function for drawing the socket input value
def draw(self, context, layout, node, text):
if self.is_output or self.is_linked:
layout.label(text)
else:
layout.label(text)
pass#layout.prop(self, "myEnumProperty", text=text)
# Socket color
def draw_color(self, context, node):
return (1.0, 0.4, 0.216, 0.5)
# Mix-in class for all custom nodes in this tree type.
# Defines a poll function to enable instantiation.
class GameGraphNode:
@classmethod
def poll(cls, ntree):
print("type", ntree.bl_idname)
return ntree.bl_idname == 'GameGraphType'
# Derived from the Node base type.
class MyCustomNode(Node, GameGraphNode):
# === Basics ===
# Description string
'''A custom node'''
# Optional identifier string. If not explicitly defined, the python class name is used.
bl_idname = 'CustomNodeType'
# Label for nice name display
bl_label = 'Custom Node'
# Icon identifier
bl_icon = 'SOUND'
# === Custom Properties ===
# These work just like custom properties in ID data blocks
# Extensive information can be found under
# http://wiki.blender.org/index.php/Doc:2.6/Manual/Extensions/Python/Properties
myStringProperty = bpy.props.StringProperty()
myFloatProperty = bpy.props.FloatProperty(default=3.1415926)
# === Optional Functions ===
# Initialization function, called when a new node is created.
# This is the most common place to create the sockets for a node, as shown below.
# NOTE: this is not the same as the standard __init__ function in Python, which is
# a purely internal Python method and unknown to the node system!
def init(self, context):
self.inputs.new('CustomSocketType', "Hello")
self.inputs.new('NodeSocketFloat', "World")
self.inputs.new('NodeSocketVector', "!")
self.outputs.new('NodeSocketColor', "How")
self.outputs.new('NodeSocketColor', "are")
self.outputs.new('NodeSocketFloat', "you")
# Copy function to initialize a copied node from an existing one.
def copy(self, node):
print("Copying from node ", node)
# Free function to clean up on removal.
def free(self):
print("Removing node ", self, ", Goodbye!")
# Additional buttons displayed on the node.
def draw_buttons(self, context, layout):
layout.label("Node settings")
layout.prop(self, "myFloatProperty")
# Detail buttons in the sidebar.
# If this function is not defined, the draw_buttons function is used instead
def draw_buttons_ext(self, context, layout):
layout.prop(self, "myFloatProperty")
# myStringProperty button will only be visible in the sidebar
layout.prop(self, "myStringProperty")
# Optional: custom label
# Explicit user label overrides this, but here we can define a label dynamically
def draw_label(self):
return "I am a custom node"
def register():
bpy.utils.register_class(MyCustomTree)
bpy.utils.register_class(MyCustomSocket)
bpy.utils.register_class(MyCustomNode)
nodeitems_utils.register_node_categories("CUSTOM_NODES", node_categories)
def unregister():
nodeitems_utils.unregister_node_categories("CUSTOM_NODES")
bpy.utils.unregister_class(MyCustomTree)
bpy.utils.unregister_class(MyCustomSocket)
bpy.utils.unregister_class(MyCustomNode)
bpy_classes = utils.Registrar([
GameGraph,
ReadySocket
])
|
arthurdejong/python-stdnum
|
stdnum/lu/__init__.py
|
Python
|
lgpl-2.1
| 954
| 0
|
# __init__.py - collection of Luxembourgian numbers
# coding: utf-8
#
# Copyright (C) 2012 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the
|
License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# L
|
esser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of Luxembourgian numbers."""
# provide vat as an alias
from stdnum.lu import tva as vat # noqa: F401
|
chipaca/snapcraft
|
snapcraft/internal/db/datastore.py
|
Python
|
gpl-3.0
| 4,171
| 0
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2020 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import pathlib
from typing import Any, Dict, List, Type
import tinydb
import yaml
import snapcraft
from . import errors, migration
logger = logging.getLogger(__name__)
class _YAMLStorage(tinydb.Storage):
"""Provide YAML-ba
|
cked storage for TinyDB."""
def __init__(self, path: str):
self.path = pathlib.Path(path)
logger.
|
debug(f"_YAMLStorage init {self.path}")
def read(self) -> Dict[str, Any]:
"""Read database from file."""
logger.debug(f"_YAMLStorage read: {self.path}")
try:
with self.path.open() as fd:
db_data = yaml.safe_load(fd)
except FileNotFoundError:
return dict()
if not isinstance(db_data, dict) or any(
[not isinstance(k, str) for k in db_data.keys()]
):
raise RuntimeError(
f"Invalid datastore contents for {str(self.path)}: {db_data!r}"
)
return db_data
def write(self, data) -> None:
"""Write database (data) to file."""
logger.debug(f"_YAMLStorage write: {self.path} data={data!r}")
self.path.write_text(yaml.dump(data))
def close(self):
"""Nothing to do since we do not keep <path> open."""
logger.debug(f"_YAMLStorage close: {self.path}")
class _YAMLStorageReadOnly(_YAMLStorage):
def write(self, data) -> None:
"""Ignore any writes in read-only mode."""
class Datastore:
"""Datastore class, providing context manager for TinyDB.
Manages migrations and storage requirements. If migrations
do not indicate support for current datastore version,
SnapcraftDatastoreVersionUnsupported will be raised. In that
event, some basic fallback mode can be utilized by re-opening
datastore in read-only mode."""
def __init__(
self,
*,
path: pathlib.Path,
migrations: List[Type[migration.Migration]],
read_only: bool = False,
snapcraft_version: str = snapcraft.__version__,
) -> None:
self.path = path
self._snapcraft_version = snapcraft_version
if read_only:
storage_class = _YAMLStorageReadOnly
else:
storage_class = _YAMLStorage
self.db = tinydb.TinyDB(str(path), storage=storage_class)
logger.debug(f"Datastore init: {self.path} read_only={read_only}")
# Force the datastore to be read by making a query, otherwise it is
# only read on the first access.
_ = self.db.tables()
# Nothing left to do if opening in read-only mode.
if read_only:
return
current_version: int = 0
supported_version: int = 0
for migration_class in migrations:
current_version = migration_class(
db=self.db, snapcraft_version=self._snapcraft_version
).apply()
supported_version = migration_class.SCHEMA_VERSION
if current_version > supported_version:
raise errors.SnapcraftDatastoreVersionUnsupported(
path=self.path,
current_version=current_version,
supported_version=supported_version,
)
def __enter__(self) -> tinydb.TinyDB:
return self.db
def __exit__(self, exc_value, exc_type, exc_traceback) -> None:
self.close()
def close(self) -> None:
"""Close database."""
self.db.close()
logger.debug(f"Datastore close: {self.path}")
|
mzdaniel/oh-mainline
|
vendor/packages/Django/tests/regressiontests/fixtures_regress/models.py
|
Python
|
agpl-3.0
| 5,411
| 0.000185
|
from django.db import models, DEFAULT_DB_ALIAS, connection
from django.contrib.auth.models import User
from django.conf import settings
class Animal(models.Model):
name = models.CharField(max_length=150)
latin_name = models.CharField(max_length=150)
count = models.IntegerField()
weight = models.FloatField()
# use a non-default name for the default manager
specimens = models.Manager()
def __unicode__(self):
return self.name
class Plant(models.Model):
name = models.CharField(max_length=150)
class Meta:
# For testing when upper case letter in app name; regression for #4057
db_table = "Fixtures_regress_plant"
class Stuff(models.Model):
name = models.CharField(max_length=20, null=True)
owner = models.ForeignKey(User, null=True)
def __unicode__(self):
return unicode(self.name) + u' is owned by ' + unicode(self.owner)
class Absolute(models.Model):
name = models.CharField(max_length=40)
load_count = 0
def __init__(self, *args, **kwargs):
super(Absolute, self).__init__(*args, **kwargs)
Absolute.load_count += 1
class Parent(models.Model):
name = models.CharField(max_length=10)
class Meta:
ordering = ('id',)
class Child(Parent):
data = models.CharField(max_length=10)
# Models to regression test #7572
class Channel(models.Model):
name = models.CharField(max_length=255)
class Article(models.Model):
title = models.CharField(max_length=255)
channels = models.ManyToManyField(Channel)
class Meta:
ordering = ('id',)
# Models to regression test #11428
class Widget(models.Model):
name = models.CharField(max_length=255)
class Meta:
ordering = ('name',)
def __unicode__(self)
|
:
return self.name
class WidgetProxy(Widget):
class Meta:
proxy = True
# Check for forward references in FKs and M2Ms with natural keys
class TestManager(models.Manager):
def get_by_natural_key(self, key):
return self.get(name=key)
class Store(models.Model):
objects = TestManager()
name = models.CharField(max_length=255)
class Meta:
ordering = ('name',)
def __unicode__(self):
return self.name
def natural_key(self):
|
return (self.name,)
class Person(models.Model):
objects = TestManager()
name = models.CharField(max_length=255)
class Meta:
ordering = ('name',)
def __unicode__(self):
return self.name
# Person doesn't actually have a dependency on store, but we need to define
# one to test the behaviour of the dependency resolution algorithm.
def natural_key(self):
return (self.name,)
natural_key.dependencies = ['fixtures_regress.store']
class Book(models.Model):
name = models.CharField(max_length=255)
author = models.ForeignKey(Person)
stores = models.ManyToManyField(Store)
class Meta:
ordering = ('name',)
def __unicode__(self):
return u'%s by %s (available at %s)' % (
self.name,
self.author.name,
', '.join(s.name for s in self.stores.all())
)
class NKManager(models.Manager):
def get_by_natural_key(self, data):
return self.get(data=data)
class NKChild(Parent):
data = models.CharField(max_length=10, unique=True)
objects = NKManager()
def natural_key(self):
return self.data
def __unicode__(self):
return u'NKChild %s:%s' % (self.name, self.data)
class RefToNKChild(models.Model):
text = models.CharField(max_length=10)
nk_fk = models.ForeignKey(NKChild, related_name='ref_fks')
nk_m2m = models.ManyToManyField(NKChild, related_name='ref_m2ms')
def __unicode__(self):
return u'%s: Reference to %s [%s]' % (
self.text,
self.nk_fk,
', '.join(str(o) for o in self.nk_m2m.all())
)
# ome models with pathological circular dependencies
class Circle1(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return self.name
natural_key.dependencies = ['fixtures_regress.circle2']
class Circle2(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return self.name
natural_key.dependencies = ['fixtures_regress.circle1']
class Circle3(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return self.name
natural_key.dependencies = ['fixtures_regress.circle3']
class Circle4(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return self.name
natural_key.dependencies = ['fixtures_regress.circle5']
class Circle5(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return self.name
natural_key.dependencies = ['fixtures_regress.circle6']
class Circle6(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return self.name
natural_key.dependencies = ['fixtures_regress.circle4']
class ExternalDependency(models.Model):
name = models.CharField(max_length=255)
def natural_key(self):
return self.name
natural_key.dependencies = ['fixtures_regress.book']
# Model for regression test of #11101
class Thingy(models.Model):
name = models.CharField(max_length=255)
|
sheanmassey/django-audit-trails
|
django_audit/apps.py
|
Python
|
unlicense
| 89
| 0
|
from django.apps import AppConfig
|
class AuditorConfig(
|
AppConfig):
name = 'auditor'
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractFeelinthedarkWordpressCom.py
|
Python
|
bsd-3-clause
| 566
| 0.033569
|
def extractFeelinthedarkWordpressCom(item):
'''
Parser for 'feelinthedark.wordpress.com'
'''
vol, chp, frag, postfix =
|
extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterou
|
s', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
jomyhuang/sdwle
|
SDWLE/cards_copy/minions/warlock.py
|
Python
|
mit
| 8,778
| 0.005354
|
from SDWLE.cards.base import MinionCard
from SDWLE.cards.heroes import Jaraxxus
from SDWLE.cards.weapons.warlock import BloodFury
from SDWLE.constants import CHARACTER_CLASS, CARD_RARITY, MINION_TYPE
from SDWLE.game_objects import Minion
from SDWLE.tags.action import Summon, Kill, Damage, Discard, DestroyManaCrystal, Give, Equip, \
Remove, Heal, ReplaceHeroWithMinion
from SDWLE.tags.base import Effect, Aura, Deathrattle, Battlecry, Buff, ActionTag
from SDWLE.tags.card_source import HandSource
from SDWLE.tags.condition import IsType, MinionCountIs, Not, OwnersTurn, IsHero, And, Adjacent, IsMinion
from SDWLE.tags.event import TurnEnded, CharacterDamaged, DidDamage, Damaged
from SDWLE.tags.selector import MinionSelector, PlayerSelector, \
SelfSelector, BothPlayer, HeroSelector, CharacterSelector, RandomPicker, Attribute, EventValue, CardSelector, \
FriendlyPlayer
from SDWLE.tags.status import ChangeHealth, ManaChange, ChangeAttack, Immune
class FlameImp(MinionCard):
def __init__(self):
super().__init__("Flame Imp", 1, CHARACTER_CLASS.WARLOCK, CARD_RARITY.COMMON, minion_type=MINION_TYPE.DEMON,
battlecry=Battlecry(Damage(3), HeroSelector()))
def create_minion(self, player):
return Minion(3, 2)
class PitLord(MinionCard):
def __init__(self):
super().__init__("Pit Lord", 4, CHARACTER_CLASS.WARLOCK, CARD_RARITY.EPIC, minion_type=MINION_TYPE.DEMON,
battlecry=Battlecry(Damage(5), HeroSelector()))
def create_minion(self, player):
return Minion(5, 6)
class Voidwalker(MinionCard):
def __init__(self):
super().__init__("Voidwalker", 1, CHARACTER_CLASS.WARLOCK, CARD_RARITY.FREE, minion_type=MINION_TYPE.DEMON)
def create_minion(self, player):
return Minion(1, 3, taunt=True)
class DreadInfernal(MinionCard):
def __init__(self):
super().__init__("Dread Infernal", 6, CHARACTER_CLASS.WARLOCK, CARD_RARITY.COMMON,
minion_type=MINION_TYPE.DEMON,
battlecry=Battlecry(Damage(1), CharacterSelector(players=BothPlayer())))
def create_minion(self, player):
return Minion(6, 6)
class Felguard(MinionCard):
def __init__(self):
super().__init__("Felguard", 3, CHARACTER_CLASS.WARLOCK, CARD_RARITY.RARE, minion_type=MINION_TYPE.DEMON,
battlecry=Battlecry(DestroyManaCrystal(), PlayerSelector()))
def create_minion(self, player):
return Minion(3, 5, taunt=True)
class Doomguard(MinionCard):
def __init__(self):
super().__init__("Doomguard", 5, CHARACTER_CLASS.WARLOCK, CARD_RARITY.RARE, minion_type=MINION_TYPE.DEMON,
battlecry=Battlecry(Discard(amount=2), PlayerSelector()))
def create_minion(self, player):
return Minion(5, 7, charge=True)
class Succubus(MinionCard):
def __init__(self):
super().__init__("Succubus", 2, CHARACTER_CLASS.WARLOCK, CARD_RARITY.FREE, minion_type=MINION_TYPE.DEMON,
battlecry=Battlecry(Discard(), PlayerSelector()))
def create_minion(self, player):
return Minion(4, 3)
class SummoningPortal(MinionCard):
def __init__(self):
super().__init__("Summoning Portal", 4, CHARACTER_CLASS.WARLOCK, CARD_RARITY.COMMON)
def create_minion(self, player):
return Minion(0, 4, auras=[Aura(ManaChange(-2, 1, minimum=1), CardSelector(condition=IsMinion()))])
class BloodImp(MinionCard):
def __init__(self):
super().__init__("Blood Imp", 1, CHARACTER_CLASS.WARLOCK, CARD_RARITY.COMMON, minion_type=MINION_TYPE.DEMON)
def create_minion(self, player):
return Minion(0, 1, stealth=True,
effects=[Effect(TurnEnded(), ActionTag(Give(ChangeHealth(1)),
MinionSelector(picker=RandomPicker())))])
class LordJaraxxus(MinionCard):
def __init__(self):
super().__init__("Lord Jaraxxus", 9, CHARACTER_CLASS.WARLOCK, CARD_RARITY.LEGENDARY,
minion_type=MINION_TYPE.DEMON,
battlecry=(Battlecry(ReplaceHeroWithMinion(Jaraxxus()), HeroSelector()),
Battlecry(Remove(), SelfSelector()),
Battlecry(Equip(BloodFury()), PlayerSelector())))
def create_minion(self, player):
return Minion(3, 15)
class Infernal(MinionCard):
def __init__(self):
super().__init__("Infernal", 6, CHARACTER_CLASS.WARLOCK, CARD_RARITY.COMMON, False,
minion_type=MINION_TYPE.DEMON)
def create_minion(self, player):
return Minion(6, 6)
class VoidTerror(MinionCard):
def __init__(self):
super().__init__("Void Terror", 3, CHARACTER_CLASS.WARLOCK, CARD_RARITY.RARE, minion_type=MINION_TYPE.DEMON,
battlecry=(Battlecry(
Give([Buff(ChangeHealth(Attribute("health", MinionSelector(Adjacent())))),
Buff(ChangeAttack(Attribute("attack", MinionSelector(Adjacent()))))]),
SelfSelector()), Battlecry(Kill(), MinionSelector(Adjacent()))))
def create_minion(self, player):
return Minion(3, 3)
class Voidcaller(MinionCard):
def __init__(self):
super().__init__("Voidcal
|
ler", 4, CHARACTER_CLASS.WARLOCK, CARD_RARITY.COMMON, minion_type=MINION_TYPE.DEMON)
def create_minion(self, player):
return Minion(3, 4, deathrattle=Deathrattle(Summon(HandSource(FriendlyPlayer(), [IsType(MINION_TYPE.DEMON)])),
PlayerSelector()))
class AnimaGolem(MinionCard):
def __init__(self):
|
super().__init__("Anima Golem", 6, CHARACTER_CLASS.WARLOCK, CARD_RARITY.EPIC, minion_type=MINION_TYPE.MECH)
def create_minion(self, player):
return Minion(9, 9, effects=[Effect(TurnEnded(MinionCountIs(1), BothPlayer()),
ActionTag(Kill(), SelfSelector()))])
class Imp(MinionCard):
def __init__(self):
super().__init__("Imp", 1, CHARACTER_CLASS.WARLOCK, CARD_RARITY.COMMON, False, minion_type=MINION_TYPE.DEMON,
ref_name="Imp (warlock)")
def create_minion(self, player):
return Minion(1, 1)
class WorthlessImp(MinionCard):
def __init__(self):
super().__init__("Worthless Imp", 1, CHARACTER_CLASS.WARLOCK, CARD_RARITY.COMMON, False, MINION_TYPE.DEMON)
def create_minion(self, p):
return Minion(1, 1)
class FelCannon(MinionCard):
def __init__(self):
super().__init__("Fel Cannon", 4, CHARACTER_CLASS.WARLOCK, CARD_RARITY.RARE, minion_type=MINION_TYPE.MECH)
def create_minion(self, player):
return Minion(3, 5, effects=[Effect(TurnEnded(), ActionTag(Damage(2),
MinionSelector(Not(IsType(MINION_TYPE.MECH, True)),
BothPlayer(), RandomPicker())))])
class MalGanis(MinionCard):
def __init__(self):
super().__init__("Mal'Ganis", 9, CHARACTER_CLASS.WARLOCK, CARD_RARITY.LEGENDARY, minion_type=MINION_TYPE.DEMON)
def create_minion(self, player):
return Minion(9, 7, auras=[Aura(ChangeHealth(2), MinionSelector(IsType(MINION_TYPE.DEMON))),
Aura(ChangeAttack(2), MinionSelector(IsType(MINION_TYPE.DEMON))),
Aura(Immune(), HeroSelector())])
class FloatingWatcher(MinionCard):
def __init__(self):
super().__init__("Floating Watcher", 5, CHARACTER_CLASS.WARLOCK, CARD_RARITY.COMMON,
minion_type=MINION_TYPE.DEMON)
def create_minion(self, player):
return Minion(4, 4, effects=[Effect(CharacterDamaged(And(IsHero(), OwnersTurn())),
ActionTag(Give([Buff(ChangeAttack(2)), Buff(ChangeHealth(2))]),
SelfSelector()))])
class MistressOfPain(MinionCard):
def __init__(self):
su
|
openstack/octavia
|
octavia/tests/unit/controller/worker/v1/flows/test_amphora_flows.py
|
Python
|
apache-2.0
| 20,294
| 0
|
# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from unittest import mock
from oslo_config import cfg
from oslo_config import fixture as oslo_fixture
from oslo_utils import uuidutils
from taskflow.patterns import linear_flow as flow
from octavia.common import constants
from octavia.common import data_models
from octavia.controller.worker.v1.flows import amphora_flows
import octavia.tests.unit.base as base
AUTH_VERSION = '2'
# NOTE: We patch the get_network_driver for all the calls so we don't
# inadvertently make real calls.
@mock.patch('octavia.common.utils.get_netwo
|
rk_driver')
class TestAmphoraFlows(base.TestCase):
def setUp(self):
super().setUp()
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
self.conf.config(
group="controller_worker",
amphora_driver='amphora_haproxy_rest_driver')
self.conf.config(group="nova", enable_anti_affinity=False)
self.AmpFlow = amphora_flows.AmphoraFlows()
self.amp1 = data_models.Amphora(id=1)
self.amp
|
2 = data_models.Amphora(id=2)
self.amp3 = data_models.Amphora(id=3, status=constants.DELETED)
self.amp4 = data_models.Amphora(id=uuidutils.generate_uuid())
self.lb = data_models.LoadBalancer(
id=4, amphorae=[self.amp1, self.amp2, self.amp3])
def test_get_create_amphora_flow(self, mock_get_net_driver):
amp_flow = self.AmpFlow.get_create_amphora_flow()
self.assertIsInstance(amp_flow, flow.Flow)
self.assertIn(constants.AMPHORA, amp_flow.provides)
self.assertIn(constants.AMPHORA_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_ID, amp_flow.provides)
self.assertIn(constants.SERVER_PEM, amp_flow.provides)
self.assertEqual(5, len(amp_flow.provides))
self.assertEqual(4, len(amp_flow.requires))
def test_get_create_amphora_flow_cert(self, mock_get_net_driver):
self.AmpFlow = amphora_flows.AmphoraFlows()
amp_flow = self.AmpFlow.get_create_amphora_flow()
self.assertIsInstance(amp_flow, flow.Flow)
self.assertIn(constants.AMPHORA, amp_flow.provides)
self.assertIn(constants.AMPHORA_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_ID, amp_flow.provides)
self.assertEqual(5, len(amp_flow.provides))
self.assertEqual(4, len(amp_flow.requires))
def test_get_create_amphora_for_lb_flow(self, mock_get_net_driver):
amp_flow = self.AmpFlow._get_create_amp_for_lb_subflow(
'SOMEPREFIX', constants.ROLE_STANDALONE)
self.assertIsInstance(amp_flow, flow.Flow)
self.assertIn(constants.LOADBALANCER_ID, amp_flow.requires)
self.assertIn(constants.AMPHORA, amp_flow.provides)
self.assertIn(constants.AMPHORA_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_OBJ, amp_flow.provides)
self.assertIn(constants.SERVER_PEM, amp_flow.provides)
self.assertEqual(5, len(amp_flow.provides))
self.assertEqual(5, len(amp_flow.requires))
def test_get_cert_create_amphora_for_lb_flow(self, mock_get_net_driver):
self.AmpFlow = amphora_flows.AmphoraFlows()
amp_flow = self.AmpFlow._get_create_amp_for_lb_subflow(
'SOMEPREFIX', constants.ROLE_STANDALONE)
self.assertIsInstance(amp_flow, flow.Flow)
self.assertIn(constants.LOADBALANCER_ID, amp_flow.requires)
self.assertIn(constants.AMPHORA, amp_flow.provides)
self.assertIn(constants.AMPHORA_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_OBJ, amp_flow.provides)
self.assertIn(constants.SERVER_PEM, amp_flow.provides)
self.assertEqual(5, len(amp_flow.provides))
self.assertEqual(5, len(amp_flow.requires))
def test_get_cert_master_create_amphora_for_lb_flow(
self, mock_get_net_driver):
self.AmpFlow = amphora_flows.AmphoraFlows()
amp_flow = self.AmpFlow._get_create_amp_for_lb_subflow(
'SOMEPREFIX', constants.ROLE_MASTER)
self.assertIsInstance(amp_flow, flow.Flow)
self.assertIn(constants.LOADBALANCER_ID, amp_flow.requires)
self.assertIn(constants.AMPHORA, amp_flow.provides)
self.assertIn(constants.AMPHORA_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_OBJ, amp_flow.provides)
self.assertIn(constants.SERVER_PEM, amp_flow.provides)
self.assertEqual(5, len(amp_flow.provides))
self.assertEqual(5, len(amp_flow.requires))
def test_get_cert_master_rest_anti_affinity_create_amphora_for_lb_flow(
self, mock_get_net_driver):
self.conf.config(group="nova", enable_anti_affinity=True)
self.AmpFlow = amphora_flows.AmphoraFlows()
amp_flow = self.AmpFlow._get_create_amp_for_lb_subflow(
'SOMEPREFIX', constants.ROLE_MASTER)
self.assertIsInstance(amp_flow, flow.Flow)
self.assertIn(constants.AMPHORA_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_OBJ, amp_flow.provides)
self.assertIn(constants.SERVER_PEM, amp_flow.provides)
self.assertEqual(5, len(amp_flow.provides))
self.assertEqual(5, len(amp_flow.requires))
self.conf.config(group="nova", enable_anti_affinity=False)
def test_get_cert_backup_create_amphora_for_lb_flow(
self, mock_get_net_driver):
self.AmpFlow = amphora_flows.AmphoraFlows()
amp_flow = self.AmpFlow._get_create_amp_for_lb_subflow(
'SOMEPREFIX', constants.ROLE_BACKUP)
self.assertIsInstance(amp_flow, flow.Flow)
self.assertIn(constants.LOADBALANCER_ID, amp_flow.requires)
self.assertIn(constants.AMPHORA, amp_flow.provides)
self.assertIn(constants.AMPHORA_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_OBJ, amp_flow.provides)
self.assertIn(constants.SERVER_PEM, amp_flow.provides)
self.assertEqual(5, len(amp_flow.provides))
self.assertEqual(5, len(amp_flow.requires))
def test_get_cert_bogus_create_amphora_for_lb_flow(
self, mock_get_net_driver):
self.AmpFlow = amphora_flows.AmphoraFlows()
amp_flow = self.AmpFlow._get_create_amp_for_lb_subflow(
'SOMEPREFIX', 'BOGUS_ROLE')
self.assertIsInstance(amp_flow, flow.Flow)
self.assertIn(constants.LOADBALANCER_ID, amp_flow.requires)
self.assertIn(constants.AMPHORA, amp_flow.provides)
self.assertIn(constants.AMPHORA_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_ID, amp_flow.provides)
self.assertIn(constants.COMPUTE_OBJ, amp_flow.provides)
self.assertIn(constants.SERVER_PEM, amp_flow.provides)
self.assertEqual(5, len(amp_flow.provides))
self.assertEqual(5, len(amp_flow.requires))
def test_get_cert_backup_rest_anti_affinity_create_amphora_for_lb_flow(
self, mock_get_net_driver):
self.conf.config(group="nova", enable_anti_affinity=True)
self.AmpFlow = amphora_flows.AmphoraFlows()
amp_flow = self.AmpFlow._get_create_amp_for_lb_subflow(
'SOMEPREFIX', constants.ROLE_BACKUP)
self.assertIsInstance(amp_flow, flow.Flow)
self.assertIn(constants.AMPHORA_ID, amp_flow.provides)
self.assertIn(consta
|
cbears/octoform
|
ocr/findBarcode.py
|
Python
|
gpl-3.0
| 15,096
| 0.036235
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Code128 Barcode Detection & Analysis
(c) Charles Shiflett 2011
Finds Code128 barcodes in documents scanned in Grayscale at 300 dpi.
Usage:
Each page of the PDF must be converted to a grayscale PNG image, and should
be ordered as follows:
1001/1001-001.png
1001/1001-002.png
1001/1001-003.png
.
.
.
1099/1099-001.png
1099/1099-002.png
This program will find & enhance barcodes in those pages, and save it's
progress to a file of the same name, except with an extension of barcode.png.
"""
DEBUG=False
from PIL import Image
from PIL import ImageOps
import PIL.ImageDraw as draw
from glob import glob
import os
import re
import pdb
import sys
import numpy
import scipy.signal as ss
import math
import scipy.ndimage.interpolation
import scipy.weave
import logging
log = logging.getLogger('findBarcodes')
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
import filter
unAliasFilter = numpy.array( [ [ 0, 1, 0], [1, 4, 1], [ 0, 1, 0] ], numpy.int )
if DEBUG:
def debugger(type, value, tb):
pdb.pm()
sys.excepthook = debugger
sys.setrecursionlimit(32768)
filWidth= 102 # / 25
filHeight= 110 # / 30
def calcBarLength(length):
if length < 6:
return 1
elif length < 10:
return 2
elif length < 13:
return 3
else:
return 4
def convolve( im, filt, reshape ):
height, stride = im.shape
fh,fw = filt.shape
im = im.reshape( height * stride )
filt = filt.reshape( fh*fw )
newIm = numpy.zeros ( (height * stride), numpy.int )
code = """
int sum=0, pos;
int ys=0, fys=0;
for (int y=0; y < (height-(fh/2)); y++) {
for (int x=0; x < (stride-(fw/2)); x++) {
fys=sum=0;
pos=ys+x;
int th = ((height-y) < fh ) ? height-y : fh;
int tw = ((stride-x) < fw ) ? stride-x : fw;
for (int fy=0; fy < th; fy++) {
for (int fx=0; fx < tw; fx++) {
sum+=im[pos+fx]*filt[fys+fx];
}
fys+=fw;
pos+=stride;
}
newIm[ys+x] = sum;
}
ys+=stride;
}
"""
scipy.weave.inline(code,['height','stride','fh','fw','im','filt','newIm'])
if reshape:
return newIm.reshape(height,stride )
else:
return newIm
class barImage (object):
def __init__ ( self, im ):
self.im = numpy.array ( im.getdata() )
self.stride, self.height = im.size
self.im = self.im.reshape(self.height,self.stride)
# Note: im is indexed as [y][x] not...
def printImg( self, l=[], offset=0):
l = [ (i[1], i[2]) for i in l ]
print l
for y in range( 0, self.height-1):
output = []
for x in range( 5+offset, self.stride-1):
if x > 115+offset:
continue
i = self.im[y][x]
if (x,y) in l:
output.append("B")
elif i < 20:
output.append(".")
elif i < 64:
output.append("+")
elif i < 128:
output.append("*")
elif i < 196:
output.append("x")
else:
output.append("X")
print "%03d" % y, "".join(output)
print " 56789 123456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789"
def applyFilter ( self, f, reshape=True ):
value = 0
filt = getattr( self, f, False)
if type(filt) == type(False):
filt = numpy.array( getattr(filter, f, False), dtype=numpy.int )
setattr( s
|
elf, f, filt )
if type(filt) == type(False):
raise ValueError("Error: filter %s was not found in filter.py" % f)
return convolve( self.im, filt, reshape )
def findBarcode( self ):
results = self.applyFilter("scaledFilter", reshape=False)
list = [ (x[1], int(x[0] % sel
|
f.stride), int(x[0] / self.stride)) for x in enumerate(results) if x[1] > 1000 ]
list.sort(reverse=True)
return list[0:20]
def unAlias(s):
"Remove dithering. "
#s.im= ss.convolve2d( s.im, unAliasFilter, mode="same" )
s.im=convolve( s.im, unAliasFilter, reshape=True )
s.im=numpy.piecewise(s.im, [ s.im > 1000 ], [255, 0])
return
""" Convolve operator does the following:
for y in range(1, s.height-1):
for x in range(1, s.stride-1):
if s.im[y][x-1] == s.im[y][x+1] == s.im[y+1][x] == s.im[y-1][x]:
s.im[y][x] = s.im[y][x+1]
return
"""
def bw( self, whitePoint=64):
self.im=numpy.piecewise(self.im, [self.im < whitePoint, self.im >= whitePoint], [255, 0])
#self.im=self.vApplyBW( self.im, whitePoint )
def virtualLine(self, x1, y1, x2, y2, ox=0, oy=0):
totalLength = math.sqrt(math.pow(x2-x1,2) + math.pow(y2-y1,2))
if totalLength < 300:
return []
if x1 < x2:
sx,sy,ex,ey=(x1,y1,x2,y2)
else:
sx,sy,ex,ey=(x2,y2,x1,y1)
xgain = float(ex-sx)/totalLength
ygain = float(ey-sy)/totalLength
if ex - sx < 150:
# Skip vertical codes, save them for the next run.
return []
if sx < 1 or (ex+ox) >= self.stride or sx > self.stride:
return []
if not (1< sy <self.height) or not (1< sy+ygain*totalLength <self.height):
return []
#slope = float(h2-h1)/(w2-w1)
newLine = numpy.zeros( shape=(totalLength), dtype=int )
code = """
float x=sx, y=sy;
for ( int i=1; i < int(totalLength); i++ ) {
int top = stride*int(y) + int(x),
bot = stride*int(y+1) + int(x);
float xr = x-int(x),
xl = 1-xr,
yt = y-int(y),
yb = 1-yt;
newLine[i]= im[top]*xr*yt +
im[top-1]*xl*yt +
im[bot]*xr*yb +
im[bot-1]*xl*yb;
x+=xgain;
y+=ygain;
}
"""
stride, im = self.stride, self.im
scipy.weave.inline(code,['im', 'stride', \
'newLine', 'totalLength', 'ygain', 'xgain', 'sx', 'sy'])
if DEBUG:
log.debug( "".join(
[ chr( 0x2e + int(x/6.07142857142857142857) ) for x in list(newLine) ] ) )
return newLine
def checkLineCharacteristics( self, line ):
whiteCount= blackCount= 0
if 300 < len(line) < 475:
for i in line:
if int(i) < 128:
whiteCount+=1
else:
blackCount+=1
if whiteCount >= 18:
return False
if blackCount > 1:
whiteCount=0
blackCount=0
else:
return False
return True
def getValidPoint ( self, point, possible ):
for endpoint in possible:
#print point, endpoint
found = True
for i in range ( 8, 50, 10 ):
if not found:
continue
#print point, endpoint, i
line = self.virtualLine(point[0]+2, point[1]+i, endpoint[0], endpoint[1]+i)
if not self.checkLineCharacteristics(line):
found = False
#print "False"
#print "True"
if found:
return endpoint
return False
def getValidPair ( self, l, r ):
"""Returns the first pair that is a barcode and is located at the top
edges of a barcode. """
if not l or not r:
return False
l.sort( key=lambda x: x[1] )
r.sort( key=lambda x: x[1] )
if l[0][1] > r[0][1]:
r.sort( key=lambda x: x[0], reverse=True )
res = self.getValidPoint( l[0], r )
if not res:
return self.getValidPair( l[1:], r)
return l[0], res
else:
l.sort( key=lambda x: x[0], reverse=False )
res = self.getValidPoint( r[0], l )
if not res:
return self.getValidPair( l, r[1:] )
return res, r[0]
def removeNeighbors ( self, l, rev ):
l.sort( key= lambda x: x[0], reverse=rev )
restart = False
sizeOfArray = len(l)-1
for i in range (1, sizeOfArray):
for j in range(i, sizeOfArray):
if abs( l[i-1][1] - l[j][1] ) < 5:
restart = True
l[j] = False
if restart==True:
return self.removeNeighbors ([ x for x in l if x], rev)
return l
def getCode ( self, barcode ):
"""
Return a single code from a code 128 barcode.
"""
code=[]
start = False
trend = 1
for pos, c in enumerate(barcode):
if (p
|
Rio517/pledgeservice
|
lib/stripe/http_client.py
|
Python
|
apache-2.0
| 10,566
| 0
|
import os
import sys
import textwrap
import warnings
from stripe import error, util
# - Requests is the preferred HTTP library
# - Google App Engine has urlfetch
# - Use Pycurl if it's there (at least it verifies SSL certs)
# - Fall back to urllib2 with a warning if needed
try:
import urllib2
except ImportError:
pass
try:
import pycurl
except ImportError:
pycurl = None
try:
import requests
except ImportError:
requests = None
else:
try:
# Require version 0.8.8, but don't want to depend on distutils
version = requests.__version__
major, minor, patch = [int(i) for i in version.split('.')]
except Exception:
# Probably some new-fangled version, so it should support verify
pass
else:
if (major, minor, patch) < (0, 8, 8):
sys.stderr.write(
'Warning: the Stripe library requires that your Python '
'"requests" library be newer than version 0.8.8, but your '
'"requests" library is version %s. Stripe will fall back to '
'an alternate HTTP library so everything should work. We '
'recommend upgrading your "requests" library. If you have any '
'questions, please contact support@stripe.com. (HINT: running '
'"pip install -U requests" should upgrade your requests '
'library to the latest version.)' % (version,))
requests = None
try:
from google.appengine.api import urlfetch
except ImportError:
urlfetch = None
def new_default_http_client(*args, **kwargs):
if urlfetch:
impl = UrlFetchClient
elif requests:
impl = RequestsClient
elif pycurl:
impl = PycurlClient
else:
impl = Urllib2Client
warnings.warn(
"Warning: the Stripe library is falling back to urllib2/urllib "
"because neither requests nor pycurl are installed. "
"urllib2's SSL implementation doesn't verify server "
"certificates. For improved security, we suggest installing "
"requests.")
return impl(*args, **kwargs)
class HTTPClient(object):
def __init__(self, verify_ssl_certs=True):
self._verify_ssl_certs = verify_ssl_certs
def request(self, method, url, headers, post_data=None):
raise NotImplementedError(
'HTTPClient subclasses must implement `request`')
class RequestsClient(HTTPClient):
name = 'requests'
def request(self, method, url, headers, post_data=None):
kwargs = {}
if self._verify_ssl_certs:
kwargs['verify'] = os.path.join(
os.path.dirname(__file__), 'data/ca-certificates.crt')
else:
kwargs['verify'] = False
try:
try:
result = requests.request(method,
url,
headers=headers,
data=post_data,
timeout=80,
**kwargs)
except TypeError, e:
raise TypeError(
'Warning: It looks like your installed version of the '
'"requests" library is not compatible with Stripe\'s '
'usage thereof. (HINT: The most likely cause is that '
'your "requests" library is out of date. You can fix '
'that by running "pip install -U requests".) The '
'underlying error was: %s' % (e,))
# This causes the content to actually be read, which could cause
# e.g. a socket timeout. TODO: The other fetch methods probably
# are succeptible to the same and should be updated.
content = result.content
status_code = result.status_code
except Exception, e:
# Would catch just requests.exceptions.RequestException, but can
# also raise ValueError, RuntimeError, etc.
self._handle_request_error(e)
return content, status_code
def _handle_request_error(self, e):
if isinstance(e, requests.exceptions.RequestException):
msg = ("Unexpected error communicating with Stripe. "
"If this problem persists, let us know at "
"support@stripe.com.")
err = "%s: %s" % (type(e).__name__, str(e))
else:
msg = ("Unexpected error communicating with Stripe. "
"It looks like there's probably a configuration "
"issue locally. If this problem persists, let us "
"know at support@stripe.com.")
err = "A %s was raised" % (type(e).__name__,)
if str(e):
err += " with error message %s" % (str(e),)
else:
err += " with no error message"
msg = textwrap.fill(msg) + "\n\n(Network error: %s)" % (err,)
raise error.APIConnectionError(msg)
class UrlFetchClient(HTTPClient):
name = 'urlfetch'
def request(self, method, url, headers, post_data=None):
try:
result = urlfetch.fetch(
url=url,
method=method,
headers=headers,
# Google App Engine doesn't let us specify our own cert bundle.
# However, that's ok because the CA bundle they use recognizes
# api.stripe.com.
validate_certificate=self._verify_ssl_certs,
# GAE requests time out after 60 seconds, so make sure we leave
# some time for the application to handle a slow Stripe
deadline=55,
payload=post_data
)
except urlfetch.Error, e:
self._handle_request_error(e, url)
return result.content, result.status_code
def _handle_request_error(self, e, url):
if isinstance(e, urlfetch.InvalidURLError):
msg = ("The Stripe library attempted to fetch an "
"invalid URL (%r). This is likely due to a bug "
"in the Stripe Python bindings. Please let us know "
"at support@stripe.com." % (url,))
elif isinstance(e, urlfetch.DownloadError):
msg = "There was a problem retrieving data from Stripe."
elif isinstance(e, urlfetch.ResponseTooLargeError):
|
msg = ("There was a problem rec
|
eiving all of your data from "
"Stripe. This is likely due to a bug in Stripe. "
"Please let us know at support@stripe.com.")
else:
msg = ("Unexpected error communicating with Stripe. If this "
"problem persists, let us know at support@stripe.com.")
msg = textwrap.fill(msg) + "\n\n(Network error: " + str(e) + ")"
raise error.APIConnectionError(msg)
class PycurlClient(HTTPClient):
name = 'pycurl'
def request(self, method, url, headers, post_data=None):
s = util.StringIO.StringIO()
curl = pycurl.Curl()
if method == 'get':
curl.setopt(pycurl.HTTPGET, 1)
elif method == 'post':
curl.setopt(pycurl.POST, 1)
curl.setopt(pycurl.POSTFIELDS, post_data)
else:
curl.setopt(pycurl.CUSTOMREQUEST, method.upper())
# pycurl doesn't like unicode URLs
curl.setopt(pycurl.URL, util.utf8(url))
curl.setopt(pycurl.WRITEFUNCTION, s.write)
curl.setopt(pycurl.NOSIGNAL, 1)
curl.setopt(pycurl.CONNECTTIMEOUT, 30)
curl.setopt(pycurl.TIMEOUT, 80)
curl.setopt(pycurl.HTTPHEADER, ['%s: %s' % (k, v)
for k, v in headers.iteritems()])
if self._verify_ssl_certs:
curl.setopt(pycurl.CAINFO, os.path.join(
os.path.dirname(__file__), 'data/ca-certificates.crt'))
else:
curl.setopt(pycurl.SSL_VERIFYHOST, False)
try:
curl.perform()
except pycurl.error, e:
self._handle_request_error(e)
|
setphen/Donsol
|
tests/test_card.py
|
Python
|
mit
| 860
| 0.005814
|
from game.models import (Card,
HEART,
SPADE,
CLUB,
DIAMOND)
def test_card_creation_without_name():
c = Card('heart', 5)
assert c.name
|
== 'heart'
assert c.suit == 'heart'
assert c.value == 5
|
def test_card_creation_with_name():
c = Card('spade', 5, name='jabberwock')
assert c.name == 'jabberwock'
assert c.suit == 'spade'
assert c.value == 5
def test_card_spade_is_monster():
c = Card(SPADE, 5)
assert c.is_monster() == True
def test_card_club_is_monster():
c = Card(CLUB, 5)
assert c.is_monster() == True
def test_card_heart_is_not_monster():
c = Card(HEART, 5)
assert c.is_monster() == False
def test_card_diamond_is_not_monster():
c = Card(DIAMOND, 5)
assert c.is_monster() == False
|
omartrinidad/schiffsdiebe
|
schiffsdiebe/datasets/__init__.py
|
Python
|
mit
| 2,327
| 0.008595
|
import numpy as np
import os
__location__ = os.path.realpath(
os.path.join(os.getcwd(),
os.path.dirname(__file__))
) + "/"
class Dataset():
"""
I want to create a similar object to that one existing in R: Dataframe
"""
def __init__(self, training_file, test_file, header=False):
"""
header: a row with the attribute names, by default is False, otherwise,
the headers are autogenerated.
"""
# traning set
self.training = np.genfromtxt(
training_file, dtype=float,
delimiter=',', skip_header = 1
)[:,0:-1]
# traning labels set
self.training_labels = np.genfromtxt(
training_file, dtype=float,
delimiter=',', skip_header = 1
)[:,-1:].ravel()
# test set
self.test = np.genfromtxt(
test_file, dtype=float,
delimiter=',', s
|
kip_header = 1
)[:,0:-1]
# test labels set
self.test_labels = np.genfromtxt(
test_file, dtype=float,
delimiter=',', skip_header = 1
|
)[:,-1:].ravel()
# attributes
if header:
self.attributes = np.genfromtxt(
test_file, dtype=str,
delimiter=',', max_rows = 1
)
else:
ncols = range(self.training_labels.shape[0])
self.attributes = ["column {}".format(x) for x in ncols]
def dummy()
"""
ToDo: It generates dummy data (an small subset to practice)
"""
pass
class Examples(object):
"""
"""
def __init__(self):
"""
"""
pass
def spam(self):
"""
Spam dataset
"""
training_ds = __location__ + "data/spam_training.csv"
test_ds = __location__ + "data/spam_test.csv"
return Dataset(training_ds, test_ds)
def gene_expression(self):
"""
Gene expression dataset
"""
training_ds = __location__ + "data/gene_expression_training.csv"
test_ds = __location__ + "data/gene_expression_test.csv"
return Dataset(training_ds, test_ds, header=True)
|
Galarius/gann-square
|
gann.py
|
Python
|
mit
| 3,346
| 0.003586
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, getopt
from datetime import datetime
import math
from gann import *
def print_usage():
print """
classic Gann square: gann.py -o <output file name> -s <square size>
Gann square based on date: gann.py -o <output file name> -a <base date> -b <final date> -m <path to list of dates to mark>
Gann sub square based on date: gann.py -o <output file name> -a <base date> -b <final date> -m <path to list of dates to mark> -r "<left>;<bottom>;<right>;<up>"
input date format: "dd/MM/yyyy"
"""
def main(argv):
cell_size = 30
date_format = "%d/%m/%Y"
# --------------------------------------
output_file_name = ''
marks_file_name = ''
square_size = -1
date_a = None
date_b = None
left, bot, right, up = 0, 0, 0, 0
# --------------------------------------
try:
opts, args = getopt.getopt(argv, "ho:s:a:b:m:r:", ["ofile=", "size=", "a_date=", "b_date=", "mfile=", "rect="])
except getopt.GetoptError:
print_usage()
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print_usage()
sys.exit()
elif opt in ("-o", "--ofile"):
output_file_name = arg
elif opt in ("-s", "--size"):
square_size = int(arg)
elif opt in ("-a", "--a_date"):
date_a =
|
datetime.strptime(arg, date_format)
elif opt in ("-b", "--b_date"):
date_b = datetime.strptime(arg, date_format)
elif opt in ("-m", "--mfile"):
marks_fil
|
e_name = arg
elif opt in ("-r", "--rect"):
rect = arg.split(';')
try:
left, bot, right, up = int(rect[0]), int(rect[1]), int(rect[2]), int(rect[3])
except ValueError as e:
print 'Failed to parse range!'
if output_file_name == '':
print_usage()
sys.exit(2)
if square_size != -1:
# classic Gann square
# Info
print "Cells: %i" % (square_size * square_size)
print "Square size: %i" % square_size
print "Cell size: %i" % cell_size
print "Building..."
stream = open(output_file_name, 'w')
create_gann_square_classic(square_size, cell_size, stream)
stream.close()
elif date_a and date_b:
# date based Gann square
delta = date_b - date_a
square_size = int(math.ceil(math.sqrt(delta.days)))
if square_size % 2 == 0:
square_size += 1
# Info
print "Cells: %i" % (square_size * square_size)
print "Square size: %i" % square_size
print "Cell size: %i" % cell_size
# Process
print "Loading data..."
marks = load_marks(marks_file_name)
print "Building..."
stream = open(output_file_name, 'w')
if (left != 0 or bot != 0 or right != 0 or up != 0) and left < right and bot < up:
create_gann_sub_square_dates((left, bot, right+1, up+1), cell_size, date_a, marks, stream)
else:
create_gann_square_dates(square_size, cell_size, date_a, marks, stream)
stream.close()
else:
print_usage()
sys.exit(2)
print "Done. See {0}".format(output_file_name)
if __name__ == "__main__":
main(sys.argv[1:])
|
goodwinnk/intellij-community
|
python/testData/pyi/inspections/hiddenPyiImports/HiddenPyiImports.py
|
Python
|
apache-2.0
| 347
| 0.048991
|
from m1 import <er
|
ror descr="Cannot find reference 'foo' in 'm1.pyi'">foo</error>
from m1 import <error descr="Cannot find reference 'bar' in 'm1.pyi'">bar</error>
from m1 import bar_imported
from m1 import <error descr="Cannot find reference 'm2' in 'm1.pyi'">m2</error>
from m1 import m2_imported
print(
|
foo, bar, bar_imported, m2, m2_imported)
|
ofayans/freeipa
|
ipaplatform/setup.py
|
Python
|
gpl-3.0
| 1,334
| 0
|
#!/usr/bin/python2
# Copyright (C) 2014 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""FreeIPA platform
FreeIPA is a se
|
rver for iden
|
tity, policy, and audit.
"""
from os.path import abspath, dirname
import sys
if __name__ == '__main__':
# include ../ for ipasetup.py
sys.path.append(dirname(dirname(abspath(__file__))))
from ipasetup import ipasetup # noqa: E402
ipasetup(
name="ipaplatform",
doc=__doc__,
package_dir={'ipaplatform': ''},
packages=[
"ipaplatform",
"ipaplatform.base",
"ipaplatform.fedora",
"ipaplatform.redhat",
"ipaplatform.rhel"
],
)
|
undertherain/benchmarker
|
benchmarker/__profile__.py
|
Python
|
mpl-2.0
| 301
| 0
|
"""
This is helper module to profile th
|
e whole package
in Python 3.7 profiling modules from command line will be supported
and this module will no longer be needed
"""
import cProfile
from .__main__ import main
if __na
|
me__ == "__main__":
cProfile.run("main()", filename=".nemchmarker.cprofile")
|
wfn/stem
|
test/unit/version.py
|
Python
|
lgpl-3.0
| 9,206
| 0.002716
|
"""
Unit tests for the stem.version.Version parsing and class.
"""
import unittest
import stem.util.system
import stem.version
from stem.version import Version
from test import mocking
TOR_VERSION_OUTPUT = """Mar 22 23:09:37.088 [notice] Tor v0.2.2.35 \
(git-73ff13ab3cc9570d). This is experimental software. Do not rely on it for \
strong anonymity. (Running on Linux i686)
Tor version 0.2.2.35 (git-73ff13ab3cc9570d)."""
class TestVersion(unittest.TestCase):
def tearDown(self):
mocking.revert_mocking()
def test_get_system_tor_version(self):
# Clear the version cache both before and after the test. Without this
# prior results short circuit the system call, and future calls will
# provide this mocked value.
stem.version.VERSION_CACHE = {}
def _mock_call(command):
if command == "tor --version":
return TOR_VERSION_OUTPUT.splitlines()
else:
raise ValueError("stem.util.system.call received an unexpected command: %s" % command)
mocking.mock(stem.util.system.call, _mock_call)
version = stem.version.get_system_tor_version()
self.assert_versions_match(version, 0, 2, 2, 35, None, "git-73ff13ab3cc9570d")
self.assertEqual("73ff13ab3cc9570d", version.git_commit)
stem.version.VERSION_CACHE = {}
def test_parsing(self):
"""
Tests parsing by the Version class constructor.
"""
# valid versions with various number of compontents to the version
version = Version("0.1.2.3-tag")
self.assert_versions_match(version, 0, 1, 2, 3, "tag", None)
version = Version("0.1.2.3")
self.assert_versions_match(version, 0, 1, 2, 3, None, None)
version = Version("0.1.2-tag")
self.assert_versions_match(version, 0, 1, 2, None, "tag", None)
version = Version("0.1.2")
self.assert_versions_match(version, 0, 1, 2, None, None, None)
# checks an empty tag
version = Version("0.1.2.3-")
self.assert_versions_match(version, 0, 1, 2, 3, "", None)
version = Version("0.1.2-")
self.assert_versions_match(version, 0, 1, 2, None, "", None)
# check with extra informaton
version = Version("0.1.2.3-tag (git-73ff13ab3cc9570d)")
self.assert_versions_match(version, 0, 1, 2, 3, "tag", "git-73ff13ab3cc9570d")
self.assertEqual("73ff13ab3cc9570d", version.git_commit)
version = Version("0.1.2.3-tag ()")
self.assert_versions_match(version, 0, 1, 2, 3, "tag", "")
version = Version("0.1.2 (git-73ff13ab3cc9570d)")
self.assert_versions_match(version, 0, 1, 2, None, None, "git-73ff13ab3cc9570d")
# checks invalid version strings
self.assertRaises(ValueError, stem.version.Version, "")
self.assertRaises(ValueError, stem.version.Version, "1.2.3.4nodash")
self.assertRaises(ValueError, stem.version.Version, "1.2.3.a")
self.assertRaises(ValueError, stem.version.Version, "1.2.a.4")
self.assertRaises(ValueError, stem.version.Version, "1x2x3x4")
self.assertRaises(ValueError, stem.version.Version, "12.3")
self.assertRaises(ValueError, stem.version.Version, "1.-2.3")
def test_comparison(self):
"""
Tests comparision between Version instances.
"""
# check for basic incrementing in each portion
self.assert_version_is_greater("1.1.2.3-tag", "0.1.2.3-tag")
self.assert_version_is_greater("0.2.2.3-tag", "0.1.2.3-tag")
self.assert_version_is_greater("0.1.3.3-tag", "0.1.2.3-tag")
self.assert_version_is_greater("0.1.2.4-tag", "0.1.2.3-tag")
self.assert_version_is_greater("0.1.2.3-ugg", "0.1.2.3-tag")
self.assert_version_is_equal("0.1.2.3-tag", "0.1.2.3-tag")
# check with common tags
self.assert_version_is_greater("0.1.2.3-beta", "0.1.2.3-alpha")
self.assert_version_is_greater("0.1.2.3-rc", "0.1.2.3-beta")
# checks that a missing patch level equals zero
self.assert_version_is_equal("0.1.2", "0.1.2.0")
self.assert_version_is_equal("0.1.2-tag", "0.1.2.0-tag")
# checks for missing patch or status
self.assert_version_is_greater("0.1.2.3-tag", "0.1.2.3")
self.assert_version_is_greater("0.1.2.3-tag", "0.1.2-tag")
self.assert_version_is_greater("0.1.2.3-tag", "0.1.2")
self.assert_version_is_equal("0.1.2.3", "0.1.2.3")
self.assert_version_is_equal("0.1.2", "0.1.2")
def test_nonversion_comparison(self):
"""
Checks that we can be compared with other types.
In python 3 on only equality comparisons work, greater than and less than
comparisons result in a TypeError.
"""
test_version = Version("0.1.2.3")
self.assertNotEqual(test_version, None)
self.assertNotEqual(test_version, 5)
def test_string(self):
"""
Tests the Version -> string conversion.
"""
# checks conversion with various numbers of arguments
self.assert_string_matches("0.1.2.3-tag")
self.assert_string_matches("0.1.2.3")
self.assert_string_matches("0.1.2")
def test_requirements_greater_than(self):
"""
Checks a VersionRequirements with a single greater_than rule.
"""
requirements = stem.version._VersionRequirements()
requirements.greater_than(Version("0.2.2.36"))
self.assertTrue(Version("0.2.2.36") >= requirements)
self.assertTrue(Version("0.2.2.37") >= requirements)
self.assertTrue(Version("0.2.3.36") >= requirements)
self.assertFalse(Version("0.2.2.35") >= requirements)
self.assertFalse(Version("0.2.1.38") >= requirements)
requirements = stem.version._VersionRequirements()
requirements.greater_than(Version("0.2.2.36"), False)
self.assertFalse(Version("0.2.2.35") >= requirements)
self.assertFalse(Version("0.2.2.36") >= requirements)
self.assertTrue(Version("0.2.2.37") >= requirements)
def test_requirements_less_than(self):
"""
Checks a VersionRequirements with a single less_than rule.
"""
requirements = stem.version._VersionRequirements()
requirements.less_than(Version("0.2.2.36"))
self.assertTrue(Version("0.2.2.36") >= requirements)
self.assertTrue(Version("0.2.2.35") >= requirements)
self.assertTrue(Version("0.2.1.38") >= requirements)
self.assertFalse(Version("0.2.2.37") >= requirements)
self.assertFalse(Version("0.2.3.36") >= requirements)
requirements = stem.version._VersionRequirements()
requirements.less_th
|
an(Version("0.2.2.36"), False)
self.assertFalse(Version("0.2.2.37") >= requirements)
self.assertFalse(Version("0.2.2.36") >= requirements)
self.assertTrue(Version("0.2.2.35") >= requirements)
def test_requirements_in_range(self):
"""
Checks a VersionRequirements with a singl
|
e in_range rule.
"""
requirements = stem.version._VersionRequirements()
requirements.in_range(Version("0.2.2.36"), Version("0.2.2.38"))
self.assertFalse(Version("0.2.2.35") >= requirements)
self.assertTrue(Version("0.2.2.36") >= requirements)
self.assertTrue(Version("0.2.2.37") >= requirements)
self.assertFalse(Version("0.2.2.38") >= requirements)
# rule for 'anything in the 0.2.2.x series'
requirements = stem.version._VersionRequirements()
requirements.in_range(Version("0.2.2.0"), Version("0.2.3.0"))
for index in xrange(0, 100):
self.assertTrue(Version("0.2.2.%i" % index) >= requirements)
def test_requirements_multiple_rules(self):
"""
Checks a VersionRequirements is the logical 'or' when it has multiple rules.
"""
# rule to say 'anything but the 0.2.2.x series'
requirements = stem.version._VersionRequirements()
requirements.greater_than(Version("0.2.3.0"))
requirements.less_than(Version("0.2.2.0"), False)
self.assertTrue(Version("0.2.3.0") >= requirements)
self.assertFalse(Version("0.2.2.0") >= requirements)
for index in xrange(0, 100):
self.assertFalse(Version("0.2.2.%i" % index) >= requirements)
def assert_versions_match(self, version, major, minor, micro, patch, status, extra):
"""
Asserts that the values for a types.Version instance match the given
values.
"""
self.assertEqual(major, version.major)
self.assertEqual(minor, version.minor)
self.assertEqual(micro, version.micro)
self.assertEqual(patch, version.patch)
self.
|
andrewisakov/taximaster_x
|
snmp/settings.py
|
Python
|
unlicense
| 379
| 0
|
#!/usr/bin/python3
import os
# import logging
import logger as logger_
WS_SERVER = 'ws://127.0.0.1:4055/ws'
WS_TIMEOUT = 10
APP_DIR = os.path.dirname(__file__)
# SQ
|
L_DIR = os.path.join(APP_DIR, 'sql')
logger = logger_.rotating_log(os.path.join(
APP_DIR, 'kts_sn
|
mp.log'), 'kts_snmp_log')
SNMP_IP = '192.168.222.179'
SNMP_PORT = 11162
ROUTER_URL = 'http://127.0.0.1/snmp'
|
VA3SFA/rpi_hw_demo
|
pcd8544/IP.py
|
Python
|
gpl-2.0
| 2,974
| 0.003026
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2013-2015 Pervasive Displays, Inc.
# Copyright 2015, Syed Faisal Akber
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language
# governing permissions and limitations under the License.
import sys
import os
from datetime import datetime
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
import socket
import fcntl
import struct
import Adafruit_Nokia_LCD as LCD
import Adafruit_GPIO.SPI as SPI
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
# Raspberry Pi hardware SPI config:
DC = 23
RST = 24
SPI_PORT = 0
SPI_DEVICE = 0
WHITE = 1
BLACK = 0
# fonts are in different places on Raspbian/Angstrom so search
possible_fonts = [
'/usr/share/fonts/truetype/ttf-dejavu/DejaVuSansMono-Bold.ttf', # R.Pi
'/usr/share/fonts/truetype/freefont/Free
|
Mono.ttf', # R.Pi
'/usr/share/fonts/truetype/LiberationMono-Bold.ttf', # B.B
'/usr/share/fonts/truetype/DejaVuSansMono-Bold.ttf' # B.B
'/usr/share/fonts/TTF/FreeMonoBold.ttf',
|
# Arch
'/usr/share/fonts/TTF/DejaVuSans-Bold.ttf' # Arch
]
FONT_FILE = ''
for f in possible_fonts:
if os.path.exists(f):
FONT_FILE = f
break
if '' == FONT_FILE:
raise 'no font file found'
FONT_SIZE = 10
def main():
"""main program - draw and display a test image"""
now = datetime.today()
# Hardware SPI usage:
disp = LCD.PCD8544(DC, RST, spi=SPI.SpiDev(SPI_PORT, SPI_DEVICE, max_speed_hz=4000000))
# Initialize library.
disp.begin(contrast=60)
# Clear display.
disp.clear()
disp.display()
# initially set all white background
image = Image.new('1', (84,48), WHITE)
# prepare for drawing
draw = ImageDraw.Draw(image)
width, height = image.size
# font = ImageFont.truetype(FONT_FILE, FONT_SIZE)
font = ImageFont.load_default()
ethaddr = get_ip_address('eth0')
draw.rectangle((0, 0, width, height), fill=WHITE, outline=WHITE)
draw.text((0, 0), '{c:s}'.format(c=ethaddr), fill=BLACK, font=font)
draw.text((5, 10), '{h:02d}:{m:02d}:{s:02d}'.format(h=now.hour, m=now.minute, s=now.second), fill=BLACK, font=font)
# Display image.
disp.image(image)
disp.display()
# main
if "__main__" == __name__:
main()
|
zyphrus/fetch-django
|
fetcher/views.py
|
Python
|
mit
| 1,510
| 0.009272
|
from django.shortcuts import render
from django.conf import settings
from django.http import HttpResponseRedirect, HttpResponse
from django.urls import reverse
from django.views.decorators.csrf import csrf_exempt
from fetcher import api
import requests
import json
def index(request):
return render(request, 'fetcher/index.html',
{'result': api.status()})
def status(request):
if request.method == 'GET' and request.META.get('CONTENT_TYPE') == 'application/json':
return HttpResponse(json.dumps(api.status()), content_type='application/json')
else:
return HttpResponseRedirect(reverse('fetcher:index'))
def log(request):
if request.method =
|
= 'GET' and reques
|
t.META.get('CONTENT_TYPE') == 'application/json':
return HttpResponse(json.dumps(api.log()), content_type='application/json')
else:
return HttpResponseRedirect(reverse('fetcher:index'))
@csrf_exempt
def force_fetch(request):
if request.method == 'POST' and request.META.get('CONTENT_TYPE') == 'application/json':
return HttpResponse(json.dumps(api.force_fetch()), content_type='application/json')
else:
return HttpResponseRedirect(reverse('fetcher:index'))
@csrf_exempt
def force_sort(request):
if request.method == 'POST' and request.META.get('CONTENT_TYPE') == 'application/json':
return HttpResponse(json.dumps(api.force_sort()), content_type='application/json')
else:
return HttpResponseRedirect(reverse('fetcher:index'))
|
mupi/tecsaladeaula
|
core/templatetags/is_assistant_or_coordinator.py
|
Python
|
agpl-3.0
| 179
| 0
|
from django import template
register = templ
|
ate.Library()
@register.filter()
def is_assistant_or_coordinator(user, course):
return course.is_assistant_
|
or_coordinator(user)
|
IPyandy/pnil
|
eapi.py
|
Python
|
bsd-3-clause
| 12,608
| 0.000635
|
#!/usr/bin/env python
# Python Network Interface Library
#
# Author: Yandy Ramirez
# Twitter: @IPyandy (https://twitter.com/IPyandy)
# Site: http://ipyandy.net
# Code Verion: 0.0.1
#
# ----------------------------------------------------------------
'''
Library to make Arista Networks eAPI calls, furthur documenttion to come
'''
# ----------------------------------------------------------------
from __future__ import print_function
import sys
if sys.version_info > (2, 7, 2) and sys.version_info < (3, 0):
# ----------------------------------------------------------------
# IMPORTS IF TEST PASS
# ----------------------------------------------------------------
from jsonrpclib import Server
from FindRoutes import StandardRoutes
import re
# ----------------------------------------------------------------
# ARISTA EAPI CLASS
# ----------------------------------------------------------------
class AristaEapi(object):
"""docstring for arista"""
# ----------------------------------------------------------------
# PRIVATE MEMBERS
# ----------------------------------------------------------------
def __init__(self):
super(AristaEapi, self).__init__()
self._host = None
self._username = None
self._password = None
self._switch = None
self._name = None
self._version_info = None
self._connected = False
return str(self.getDetails())
def __connectToSwitch(self):
try:
return Server('https://{0}:{1}@{2}/command-api\
'.format(self._username, self._password, self._host))
except:
print('There was an error trying to connect')
# run CMD
def _runCMD(self, cli):
if self._connected:
return self._switch.runCmds(1, cli)
else:
self.connect()
return self._switch.runCmds(1, cli)
# run non JSON CMD
def _runCMDText(self, cli):
if self._connected:
return self._switch.runCmds(1, cli, 'text')
else:
self.connect()
return self._switch.runCmds(1, cli, 'text')
@classmethod
def createDataDict(cls, key, value):
return {key: value}
# ----------------------------------------------------------------
# INITIALIZE AND SETUP EAPI
# ----------------------------------------------------------------
# creates connection to switch
def connect(self):
try:
self._switch = self.__connectToSwitch()
self._connected = True
return self._switch
except:
print('Could not connect, error: {0}')
def setLogin(self, username, password):
self._username = username
self._password = password
def initialize(self, host, name):
self._host = host
self._name = name
# ----------------------------------------------------------------
# FIND BASIC INFORMATION, INTERFACES, HOSTNAME, MAC...
# ----------------------------------------------------------------
def getHost(self):
return self.createDataDict('host', self._host)
def _getVersionList(self):
# gets version and converts to a list of Ivalues
# this allows comparisons between software versions
# by calling int(on an index)
# checks if self._version_info is not empy
if not self._version_info:
self.getVersionInfo()
version_list = self._version_info['version'].split('.')
return version_list
# getVersionInfo created to streamline the calling of "show version"
# there was allot of code that repeated it, this way, only one
# call is needed
def getVersionInfo(self):
''' returns a 'show version' output as a dictionary '''
# normaly returns list with dictionary.
version_info = self._runCMD(['show version'])
self._version_info = version_info[0]
# returns only dict of relevant information
def getName(self):
return self.createDataDict('name', self._name)
def getVersion(self):
''' Returns the device running code version as a string '''
# checks if self._version_info is not empy
if not self._version_info:
self.getVersionInfo()
return self.createDataDict('version', self._version_info['version'])
# function returns a dictionary of the interfaces and their status
def getInterfacesStatus(self, mOptions=None):
response = self._runCMD(['show interfaces status'])[
0]['interfaceStatuses']
if mOptio
|
ns:
for _ in enumerate(response):
for key in response[keys]:
print(response[keys][key])
return response
return response
def getInterfaces(self):
interfaces = self.getInterfacesStatus().keys()
return self.createDataDict('inter
|
faces', interfaces)
def getPlatform(self):
if not self._version_info:
self.getVersionInfo()
return self.createDataDict('platform', self._version_info['modelName'])
def getSerialNumber(self):
if not self._version_info:
self.getVersionInfo()
serial = self._version_info['serialNumber']
serial_number = self.createDataDict('serial_number', serial)
if serial_number['serial_number'] == '':
non_serial = {'serial_number': 'not_found'}
return non_serial
else:
return serial_number
def getUptime(self):
output = self._runCMDText(['show uptime'])[0]['output']
# gets uptime if output is in H:M or (|) in "number Mins|Days"
up_split = re.split(r"up\s+?", output)
uptime = re.match(
r'(^(\d{1,3}:\d{1,3})|^(\d{1,3})\s\w+)', up_split[1]).group(0)
def getCPU(self):
output = self._runCMDText(['show processes top once'])[0]['output']
cpu = re.search(r"\d+\.\d*%(?=us)", output).group(0)
return self.createDataDict('cpu_usage', cpu)
def getHostname(self):
''' Returns the device's none FQDN hostname '''
version_int = self._getVersionList()
if int(version_int[0]) >= 4 >= 13:
output = self._runCMD(['show hostname'])[0]['hostname']
return self.createDataDict('hostname', output)
else:
output = self._runCMDText(
['show lldp local-info'])[0]['output']
host = re.search(
r"(?<=System Name: \").*?(?=\.)", output).group(0)
def getFQDN(self):
'''
Returns the device's FQDN hostname.domain.suffix
has not been added to main.py yet, waiting to make sure
their's support accross platforms
'''
version_int = self._getVersionList()
if int(version_int[0]) >= 4 >= 13:
output = self._runCMD(["show hostname"])[0]['fqdn']
return self.createDataDict('fqdn', output)
else:
output = self._runCMDText(
['show lldp local-info'])[0]['output']
fqdn = re.search(
r"(?<=System Name: \").*?(?=\")", output).group(0)
def getAAA(self):
aaa = self._runCMD(['enable', 'show aaa'])[1]['users']
return aaa
def getFreeMem(self):
# checks if self._version_info is not empy
if not self._version_info:
self.getVersionInfo()
|
asherwunk/objconfig
|
tests/writer/test_json_writer.py
|
Python
|
mit
| 1,712
| 0.014019
|
"""
Test objconfig.writer.Json
"""
from objconfig.writer import Json as JsonWriter
from objconfig.reader import Json as JsonReader
from objconfig.writer import AbstractWriter
from objconfig.writer import WriterInterface
import os
def test_emptyinstantiation_json():
writer = JsonWriter()
assert isinstance(writer, AbstractWriter), "Json not child of AbstractWriter"
assert isinstance(writer, WriterInterface), "Json not child of WriterInterface"
def test_render_json():
writer = JsonWriter()
conf = {
"webhost" : "www.example.com",
"database" : {
"adapter" : "pdo_mysql",
"params" : {
"host" : "db.example.com",
"username" : "dbuser",
"password" : "secret",
"dbname" : "dbproduction"
}
}
}
jsoncontents = writer.toString(conf)
reader = JsonReader()
compconf = reader.fromString(jsoncontents)
assert conf == compconf, "Json improperly rendered"
def test_render_tofile_json():
writer = JsonWriter()
conf = {
"webhost" : "www.example.com",
"database" : {
"adapter" : "pdo_mysql",
"params" : {
"host" : "db.example.com",
"username" : "dbuser",
"password" : "secret",
"dbname" : "dbproduction"
}
}
}
writer.toFile(os.path.jo
|
in(os.path.dirname(os.path.realpath(__file__)), "test.json"), conf)
reader = JsonReader()
compconf = reader.fromFile(os.path.join(os.path.dirname(os.path.realpath(__file__)), "test.json"))
os.re
|
move(os.path.join(os.path.dirname(os.path.realpath(__file__)), "test.json"))
assert conf == compconf, "Json improperly rendered in file"
|
DailyActie/Surrogate-Model
|
01-codes/scipy-master/scipy/integrate/tests/test_quadpack.py
|
Python
|
mit
| 13,179
| 0.000531
|
from __future__ import division, print_function, absolute_import
import math
import sys
import numpy as np
from numpy import sqrt, cos, sin, arctan, exp, log, pi, Inf
from numpy.testing import (assert_, TestCase, run_module_suite, dec,
assert_allclose, assert_array_less, assert_almost_equal)
from scipy._lib.six import xrange
from scipy.integrate import quad, dblquad, tplquad, nquad
try:
import ctypes
import ctypes.util
_ctypes_missing = False
except ImportError:
_ctypes_missing = True
try:
import scipy.integrate._test_multivariate as clib_test
_ctypes_multivariate_fail = False
except:
_ctypes_multivariate_fail = True
def assert_quad(value_and_err, tabled_value, errTol=1.5e-8):
value, err = value_and_err
assert_allclose(value, tabled_value, atol=err, rtol=0)
if errTol is not None:
assert_array_less(err, errTol)
class TestCtypesQuad(TestCase):
@dec.skipif(_ctypes_missing, msg="Ctypes library could not be found")
def setUp(self):
if sys.platform == 'win32':
if sys.version_info < (3, 5):
file = ctypes.util.find_msvcrt()
else:
file = 'api-ms-win-crt-math-l1-1-0.dll'
elif sys.platform == 'darwin':
file = 'libm.dylib'
else:
file = 'libm.so'
try:
self.lib = ctypes.CDLL(file)
except OSError:
# This test doesn't work on some Linux platforms (Fedora for
# example) that put an ld script in libm.so - see gh-5370
self.skipTest("Ctypes can't import libm.so")
restype = ctypes.c_double
argtypes = (ctypes.c_double,)
for name in ['sin', 'cos', 'tan']:
func = getattr(self.lib, name)
func.restype = restype
func.argtypes = argtypes
@dec.skipif(_ctypes_missing, msg="Ctypes library could not be found")
def test_typical(self):
assert_quad(quad(self.lib.sin, 0, 5), quad(math.sin, 0, 5)[0])
assert_quad(quad(self.lib.cos, 0, 5), quad(math.cos, 0, 5)[0])
assert_quad(quad(self.lib.tan, 0, 1), quad(math.tan, 0, 1)[0])
# @dec.skipif(_ctypes_missing, msg="Ctypes library could not be found")
# This doesn't seem to always work. Need a better way to figure out
# whether the fast path is called.
@dec.knownfailureif(True, msg="Unreliable test, see ticket 1684.")
def test_improvement(self):
import time
start = time.time()
for i in xrange(100):
quad(self.lib.sin, 0, 100)
fast = time.time() - start
start = time.time()
for i in xrange(100):
quad(math.sin, 0, 100)
slow = time.time() - start
assert_(fast < 0.5 * slow, (fast, slow))
class TestMultivariateCtypesQuad(TestCase):
@dec.skipif(_ctypes_missing or _ctypes_multivariate_fail,
msg="Compiled test functions not loaded")
def setUp(self):
self.lib = ctypes.CDLL(clib_test.__file__)
restype = ctypes.c_double
argtypes = (ctypes.c_int, ctypes.c_double)
for name in ['_multivariate_typical', '_multivariate_indefinite',
'_multivariate_sin']:
func = getattr(self.lib, name)
func.restype = restype
func.argtypes = argtypes
@dec.skipif(_ctypes_missing or _ctypes_multivariate_fail,
msg="Compiled test functions not loaded")
def test_typical(self):
# 1) Typical function with two extra arguments:
assert_quad(quad(self.lib._multivariate_typical, 0, pi, (2, 1.8)),
0.30614353532540296487)
@dec.skipif(_ctypes_missing or _ctypes_multivariate_fail,
msg="Compiled test functions not loaded")
def test_indefinite(self):
# 2) Infinite integration limits --- Euler's constant
assert_quad(quad(self.lib._multivariate_indefinite, 0, Inf),
0.577215664901532860606512)
@dec.skipif(_ctypes_missing or _ctypes_multivariate_fail,
msg="Compiled test functions not loaded")
def test_threadsafety(self):
# Ensure multivariate ctypes are threadsafe
def threadsafety(y):
return y + quad(self.lib._multivariate_sin, 0, 1)[0]
assert_quad(quad(threadsafety, 0, 1), 0.9596976941318602)
@dec.skipif(_ctypes_missing or _ctypes_multivariate_fail,
msg="Compiled test functions not loaded")
def test_improvement(self):
def myfunc(x): # Euler's constant integrand
return -exp(-x) * log(x)
import time
start = time.time()
for i in xrange(20):
quad(self.lib._multivariate_indefinite, 0, 100)
fast = time.time() - start
start = time.time()
for i in xrange(20):
quad(myfunc, 0, 100)
slow = time.time() - start
# 2+ times faster speeds generated by nontrivial ctypes
# function (single variable)
assert_(fast < 0.5 * slow, (fast, slow))
class TestQuad(TestCase):
def test_typical(self):
# 1) Typical function with two extra arguments:
def myfunc(x, n, z): # Bessel function integrand
return cos(n * x - z * sin(x)) / pi
assert_quad(quad(myfunc, 0, pi, (2, 1.8)), 0.30614353532540296487)
def test_indefinite(self):
# 2) Infinite integration limits --- Euler's constant
def myfunc(x): # Euler's constant integrand
return -exp(-x) * log(x)
assert_quad(quad(myfunc, 0, Inf), 0.577215664901532860606512)
def test_singular(self):
# 3) Singular points in region of integration.
def myfunc(x):
if 0 < x < 2.5:
return sin(x)
elif 2.5 <= x <= 5.0:
return exp(-x)
else:
return 0.0
assert_quad(quad(myfunc, 0, 10, points=[2.5, 5.0]),
1 - cos(2.5) + exp(-2.5) - exp(-5.0))
def test_sine_weighted_finite(self):
# 4) Sine weighted integral (finite limits)
def myfunc(x, a):
return exp(a * (x - 1))
ome = 2.0 ** 3.4
assert_quad(quad(myfunc, 0, 1, args=20, weight='sin', wvar=ome),
(20 * sin(ome) - ome * cos(ome) + ome * exp(-20)) / (20 ** 2 + ome ** 2))
def test_sine_weighted_infinite(self):
# 5) Sine weighted integral (infinite limits)
def myfunc(x, a):
return exp(-x * a)
a = 4.0
ome = 3.0
assert_quad(quad(myfunc, 0, Inf, args=a, weight='sin', wvar=ome),
ome / (a ** 2 + ome ** 2))
def test_cosine_weighted_infinite(self):
# 6) Cosine weighted integral (negative infinite limits)
def myfunc(x, a):
return exp(x * a)
a = 2.5
ome = 2.3
assert_quad(quad(myfunc, -Inf, 0, args=a, weight='cos', wvar=ome),
a / (a ** 2 + ome ** 2))
def test_algebraic_log_weight(self):
# 6) Algebraic-logarithmic weight.
def myfunc(x, a):
return 1 / (1 + x + 2 ** (-a))
a = 1.5
assert_quad(quad(myfunc, -1, 1, args=a, weight='alg',
wvar=(-0.5, -0.5)),
pi / sqrt((1 + 2 ** (-a)) ** 2 - 1))
def test_cauchypv_weight(self):
# 7) Cauchy prinicpal value weighting w(x) = 1/(x-c)
def myfunc(x, a):
return 2.0 ** (-a) / ((x - 1) ** 2 + 4.0 ** (-a))
a = 0.4
tabledValue = ((2.0 ** (-0.4) * log(1.5) -
2.0 ** (-1.4) * log((4.0 ** (-a) + 16) / (4.0 ** (-a) + 1)) -
arc
|
tan(2.0 ** (
|
a + 2)) -
arctan(2.0 ** a)) /
(4.0 ** (-a) + 1))
assert_quad(quad(myfunc, 0, 5, args=0.4, weight='cauchy', wvar=2.0),
tabledValue, errTol=1.9e-8)
def test_double_integral(self):
# 8) Double Integral test
def simpfunc(y, x): # Note order of arguments.
return x + y
a, b = 1.0, 2.0
assert_quad(dblquad(simpfunc, a, b, lambda x: x, lambda x: 2 * x),
|
tiborsimko/invenio-base
|
setup.py
|
Python
|
mit
| 2,590
| 0
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Base package for building Invenio application factories."""
import os
from setuptools import find_packages, setup
readme = open('README.rst').read()
history = open('CHANGES.rst').read()
# Get the version string. Cannot be done with import!
g = {}
with open(os.path.join('invenio_base', 'version.py'), 'rt') as fp:
exec(fp.read(), g)
version = g['__version__']
tests_require = [
'check-manifest>=0.25',
'coverage>=4.0',
'isort>=4.2.2',
'mock>=1.3.0',
'pydocstyle>=2.0.0',
'pytest-co
|
v>=1.8.0',
'pytest-pep8>=1.0.6',
'pytest>=2.8.0',
'invenio-config>=1.0.0',
]
extras_require = {
'docs': [
'Sphinx>=1.4.2',
],
'tests': tests_require,
}
extras_require['all'] = []
for reqs in extras_require.values():
extras_
|
require['all'].extend(reqs)
setup_requires = [
'pytest-runner>=2.6.2',
]
install_requires = [
'blinker>=1.4',
'cookiecutter>=1.2.1',
'Flask>=0.11.1',
]
packages = find_packages()
setup(
name='invenio-base',
version=version,
description=__doc__,
long_description=readme + '\n\n' + history,
keywords='invenio',
license='MIT',
author='CERN',
author_email='info@inveniosoftware.org',
url='https://github.com/inveniosoftware/invenio-base',
packages=packages,
zip_safe=False,
include_package_data=True,
platforms='any',
entry_points={
'console_scripts': [
'inveniomanage = invenio_base.__main__:cli',
],
'flask.commands': [
'instance = invenio_base.cli:instance',
],
},
extras_require=extras_require,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Development Status :: 5 - Production/Stable',
],
)
|
smmribeiro/intellij-community
|
python/testData/intentions/PyConvertTypeCommentToVariableAnnotationIntentionTest/simpleForLoop.py
|
Python
|
apache-2.0
| 50
| 0.02
|
for x in undefined(): # ty<caret>pe: int
|
pass
|
|
asweigart/moosegesture
|
tests/demoGestureApp.py
|
Python
|
bsd-3-clause
| 3,638
| 0.004948
|
"""
MooseGesture Test application
Al Sweigart al@coffeeghost.net
http://coffeeghost.net/2011/05/09/moosegesture-python-mouse-gestures-module
Run the app and then draw by dragging the mouse. When you release the mouse
button, the gesture you drew will be identified.
This script requires the MooseGesture library, which you can download from here:
http://coffeeghost.net/moosegesture.py
And also requires Pygame:
http://pygame.org
Copyright 2011, BSD-license.
"""
import pygame, sys, os
from pygame.locals import *
sys.path.append(os.path.abspath('..'))
import moosegesture
# setup constants
WINDOWWIDTH = 600
WINDOWHEIGHT = 600
FPS = 40
TEXTCOLOR = (255, 255, 255) # white
BACKGROUNDCOLOR = (0, 0, 0)# black
POINTSCOLOR = (255, 0, 0) # red
LINECOLOR = (255, 165, 0) # orange
CARDINALCOLOR = (0, 255, 0) # green
DIAGONALCOLOR = (0, 0, 255) # blue
# set up pygame, the window, and the mouse cursor
pygame.init()
mainClock = pygame.time.Clock()
windowSurface = pygame.display.set_mode((WINDOWWIDTH, WINDOWHEIGHT))
pygame.display.set_caption('Mouse Gesture Test')
points = []
mouseDown = False
font = pygame.font.SysFont(None, 24)
strokeText = ''
while True: # main loop
for event in pygame.event.get():
# handle all pygame events
if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE):
pygame.quit()
sys.exit()
if event.type == MOUSEBUTTONDOWN:
# on mouse down, erase the previous line and start drawing a new one
mouseDown = True
if len(points) > 2:
startx, starty = points[0][0], points[0][1]
for i in range(len(points)):
points[i] = (points[i][0] - startx, points[i][1] - starty)
points = []
storkeText = '
|
'
if event.type == MOUSEBUTTONUP:
# try to identify the gesture when the mouse dragging stops
mouseDown = False
strokes = moosegesture.getGesture(points)
segments = moosegesture.getSegments(points)
strokeText = ' '.jo
|
in(strokes)
textobj = font.render(strokeText, 1, (255,255,255))
textrect = textobj.get_rect()
textrect.topleft = (10, WINDOWHEIGHT - 30)
if event.type == MOUSEMOTION and mouseDown:
# draw the line if the mouse is dragging
points.append( (event.pos[0], event.pos[1]) )
# Draw the window.
windowSurface.fill(BACKGROUNDCOLOR)
if strokeText:
# draw the identified strokes of the last line
windowSurface.blit(textobj, textrect)
# draw points
for x, y in points:
pygame.draw.circle(windowSurface, POINTSCOLOR, (x, y), 2)
if mouseDown:
# draw strokes as unidentified while dragging the mouse
if len(points) > 1:
pygame.draw.lines(windowSurface, LINECOLOR, False, points)
else:
# draw the identified strokes
segNum = 0
curColor = LINECOLOR
for p in range(len(points)-1):
if segNum < len(segments) and segments[segNum][0] == p:
# start of new stroke
if strokes[segNum] in [2, 4, 6, 8]:
curColor = CARDINALCOLOR
elif strokes[segNum] in [1, 3, 7, 9]:
curColor = DIAGONALCOLOR
pygame.draw.line(windowSurface, curColor, points[p], points[p+1])
if segNum < len(segments) and segments[segNum][1] == p:
# end of a stroke
curColor = LINECOLOR
segNum += 1
pygame.display.update()
mainClock.tick(FPS)
|
MungoRae/home-assistant
|
homeassistant/components/switch/neato.py
|
Python
|
apache-2.0
| 4,019
| 0
|
"""
Support for Neato Connected Vaccums switches.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.neato/
"""
import logging
import requests
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.components.neato import NEATO_ROBOTS, NEATO_LOGIN
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['neato']
SWITCH_TYPE_CLEAN = 'clean'
SWITCH_TYPE_SCHEDULE = 'scedule'
SWITCH_TYPES = {
SWITCH_TYPE_CLEAN: ['Clean'],
SWITCH_TYPE_SCHEDULE: ['Schedule']
}
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Neato switches."""
dev = []
for robot in hass.data[NEATO_ROBOTS]:
for type_name in SWITCH_TYPES:
dev.append(NeatoConnectedSwitch(hass, robot, type_name))
_LOGGER.debug("Adding switches %s", dev)
add_devices(dev)
class NeatoConnectedSwitch(ToggleEntity):
"""Neato Connected Switches."""
def __init__(self, hass, robot, switch_type):
"""Initialize the Neato Connected switches."""
self.type = switch_type
self.robot = robot
self.neato = hass.data[NEATO_LOGIN]
self._robot_name = '{} {}'.format(
self.robot.name, SWITCH_TYPES[self.type][0])
try:
self.
|
_state = self.robot.state
except (requests.exceptions.ConnectionError,
requests.exceptions.HTTPError) as ex
|
:
_LOGGER.warning("Neato connection error: %s", ex)
self._state = None
self._schedule_state = None
self._clean_state = None
def update(self):
"""Update the states of Neato switches."""
_LOGGER.debug("Running switch update")
self.neato.update_robots()
try:
self._state = self.robot.state
except (requests.exceptions.ConnectionError,
requests.exceptions.HTTPError) as ex:
_LOGGER.warning("Neato connection error: %s", ex)
self._state = None
return
_LOGGER.debug('self._state=%s', self._state)
if self.type == SWITCH_TYPE_CLEAN:
if (self.robot.state['action'] == 1 or
self.robot.state['action'] == 2 or
self.robot.state['action'] == 3 and
self.robot.state['state'] == 2):
self._clean_state = STATE_ON
else:
self._clean_state = STATE_OFF
_LOGGER.debug("Clean state: %s", self._clean_state)
if self.type == SWITCH_TYPE_SCHEDULE:
_LOGGER.debug("State: %s", self._state)
if self.robot.schedule_enabled:
self._schedule_state = STATE_ON
else:
self._schedule_state = STATE_OFF
_LOGGER.debug("Shedule state: %s", self._schedule_state)
@property
def name(self):
"""Return the name of the switch."""
return self._robot_name
@property
def available(self):
"""Return True if entity is available."""
return self._state
@property
def is_on(self):
"""Return true if switch is on."""
if self.type == SWITCH_TYPE_CLEAN:
if self._clean_state == STATE_ON:
return True
return False
elif self.type == SWITCH_TYPE_SCHEDULE:
if self._schedule_state == STATE_ON:
return True
return False
def turn_on(self, **kwargs):
"""Turn the switch on."""
if self.type == SWITCH_TYPE_CLEAN:
self.robot.start_cleaning()
elif self.type == SWITCH_TYPE_SCHEDULE:
self.robot.enable_schedule()
def turn_off(self, **kwargs):
"""Turn the switch off."""
if self.type == SWITCH_TYPE_CLEAN:
self.robot.pause_cleaning()
self.robot.send_to_base()
elif self.type == SWITCH_TYPE_SCHEDULE:
self.robot.disable_schedule()
|
gooddata/openstack-nova
|
api-guide/source/conf.py
|
Python
|
apache-2.0
| 9,312
| 0.000966
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Compute API documentation build configuration file
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['openstackdocstheme']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Compute API Guide'
bug_tag = u'api-guide'
repository_name = 'openstack/nova'
bug_project = 'nova'
# Must set this variable to include year, month, day, hours, and minutes.
html_last_updated_fmt = '%Y-%m-%d %H:%M'
copyright = u'2015, OpenStack contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2.1.0'
# The full version, including alpha/beta/rc tags.
release = '2.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func:
|
etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style
|
to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = [openstackdocstheme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%Y-%m-%d %H:%M'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'compute-api-guide'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'ComputeAPI.tex', u'Compute API Documentation',
u'OpenStack contributors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'computeapi', u'Compute API Documentation',
[u'OpenStack contributors'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source star
|
jlaunonen/kirppu
|
kirppu/migrations/0007_auto_misc_adjustments.py
|
Python
|
mit
| 1,699
| 0.002354
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('kirppu', '0006_item_lost_property'),
|
]
|
operations = [
migrations.AlterField(
model_name='clerk',
name='access_key',
field=models.CharField(null=True, validators=[django.core.validators.RegexValidator(b'^[0-9a-fA-F]{14}$', message=b'Must be 14 hex chars.')], max_length=128, blank=True, help_text='Access code assigned to the clerk.', unique=True, verbose_name='Access key value'),
preserve_default=True,
),
migrations.AlterField(
model_name='item',
name='itemtype',
field=models.CharField(default=b'other', max_length=24, choices=[(b'manga-finnish', 'Manga (Finnish)'), (b'manga-english', 'Manga (English)'), (b'manga-other', 'Manga (other language)'), (b'book', 'Book'), (b'magazine', 'Magazine'), (b'movie-tv', 'Movie/TV-series'), (b'game', 'Game'), (b'figurine-plushie', 'Figurine/Plushie'), (b'clothing', 'Clothing'), (b'other', 'Other')]),
preserve_default=True,
),
migrations.AlterField(
model_name='uitext',
name='identifier',
field=models.CharField(help_text='Identifier of the text item', unique=True, max_length=16, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='uitext',
name='text',
field=models.CharField(help_text='Text item in UI', max_length=16384),
preserve_default=True,
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.