repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
ner0x652/RElief | dololi/dololi.py | Python | mit | 6,472 | 0.008189 | import lief
import sys
import os
import traceback
import configparser
import struct
from collections import OrderedDict
# Opcodes
X86_PUSH_BYTE = 0x6a
X86_32_PUSH_DWORD = 0x68
x86_32_CALL = [0xff, 0x15]
X86_64_CALL = [0xff, 0xd0]
X86_64_MOV_R9 = [0x49, 0xb9 | ]
X86_64_MOV_R8 = [0x49, 0xb8]
X86_64_MOV_RDX = [0x48, 0xba]
X86_64_MOV_RCX = [0x48, 0xb9]
X86_64_MOV_RAX = [0x48, 0xc7, 0xc0]
def get_config(conf_file="dololi.conf"):
assert os.path.isfile(conf_file)
conf = configparser.ConfigParser()
conf.read(conf_file)
return conf
def is_dll(pe_file):
return pe_file.hea | der.has_characteristic(lief.PE.HEADER_CHARACTERISTICS.DLL)
def get_pe_type(arch):
assert arch == "32" or arch == "64"
if arch == "32":
return lief.PE.PE_TYPE.PE32
else:
return lief.PE.PE_TYPE.PE32_PLUS
def is_64_bits(pe_type):
return pe_type == lief.PE.PE_TYPE.PE32_PLUS
def get_reg_by_argn(argn):
return {
"1": "r9",
"2": "r8",
"3": "rdx",
"4": "rcx",
"5": "rax"
}[argn]
def get_opcodes_by_reg(reg):
return {
"r9" : X86_64_MOV_R9,
"r8" : X86_64_MOV_R8,
"rdx": X86_64_MOV_RDX,
"rcx": X86_64_MOV_RCX,
"rax": X86_64_MOV_RAX
}[reg]
def dololi(arch, conf, out_file_name):
code_rva = int(conf["DEFAULT"].get("CODE_RVA"))
data_rva = int(conf["DEFAULT"].get("DATA_RVA"))
pe_type = get_pe_type(arch)
is_64bits = is_64_bits(pe_type)
pe_loader = lief.PE.Binary("dololi", pe_type)
code_cnt,\
reg_size,\
pack_fmt = ([], 8, "<Q") if is_64bits else ([], 4, "<I")
data_cnt = ""
data_off = 0
reg_cnt = 1
func_num = 0
funcs = OrderedDict()
# Parse CODE and DATA contents from config file
for k, v in conf["CODE"].items():
if k.endswith("_byte"):
value = int(v)
value = struct.pack("<B", value)
code_cnt.extend([X86_PUSH_BYTE, value[0]])
elif k.endswith("_word"):
value = int(v)
value = struct.pack("<H", value)
code_cnt.extend([X86_32_PUSH_DWORD, value[0], value[1], 0x0, 0x0])
elif k.endswith("_dword") or k.endswith("_qword"):
reg_size, pack_fmt = {"dword":(4, "<I"), "qword":(8, "<Q")}[k.split('_')[-1]]
if v.lower().endswith("_data"):
data_key = v.lower().rstrip("_data")
assert "str" in data_key.lower(), "Data should contain arrays or strings"
data_value = conf["DATA"][data_key] + '\0'
data_cnt += data_value
addr = struct.pack(pack_fmt, pe_loader.optional_header.imagebase + data_rva + data_off)
if is_64bits:
code_cnt.extend(get_opcodes_by_reg(get_reg_by_argn(str(reg_cnt))))
reg_cnt = (reg_cnt % 4) + 1
if reg_size < 8:
addr += bytes("\x00" * (8 - reg_size), 'ascii')
code_cnt.extend(list(addr))
else:
code_cnt.extend([X86_32_PUSH_DWORD])
code_cnt.extend(list(addr))
data_off += len(data_value)
else:
value = int(v)
value = struct.pack(pack_fmt, value)
if is_64bits:
code_cnt.extend(get_opcodes_by_reg(get_reg_by_argn(str(reg_cnt))))
reg_cnt = (reg_cnt % 4) + 1
if reg_size < 8:
value += [0x0] * (8 - reg_size)
code_cnt.extend(list(value))
else:
code_cnt.extend([X86_32_PUSH_DWORD])
code_cnt.extend(list(value))
elif k.endswith("_func"):
assert len(v.split(';')) == 2, "DLL name;Export function name"
dll_name, export_name = v.strip("\r\n").split(';')
dll = pe_loader.add_library(dll_name)
dll.add_entry(export_name)
func_num_str = "".join(["FUNC_", str(func_num)])
if is_64bits:
code_cnt.extend(get_opcodes_by_reg(get_reg_by_argn("5")))
reg_cnt = (reg_cnt % 4) + 1
else:
code_cnt.extend(x86_32_CALL)
for i in range(4):
code_cnt.append(func_num_str)
if is_64bits:
code_cnt.extend(X86_64_CALL)
if dll_name not in funcs:
funcs[dll_name] = set()
funcs[dll_name].add((export_name, func_num_str))
func_num += 1
else:
# code_rva and data_rva from DEFAULT section
pass
# Add function addresses
for k, v in funcs.items():
for f in v:
func_addr = pe_loader.predict_function_rva(k, f[0])
offset = code_rva if func_num == 1 else 0 # dirty hack to adjust function address
addr = struct.pack(pack_fmt, pe_loader.optional_header.imagebase + data_rva - offset + func_addr)
# TO DO, number of bytes should be adjusted automatically
for i in range(4):
code_cnt[code_cnt.index(f[1])] = addr[i]
# set .text section fields
text_sect = lief.PE.Section(".text")
text_sect.virtual_address = code_rva
text_sect.content = code_cnt
text_sect = pe_loader.add_section(text_sect, lief.PE.SECTION_TYPES.TEXT)
# set .data section fields
data_sect = lief.PE.Section(".data")
data_sect.virtual_address = data_rva
data_sect.content = list(map(ord, data_cnt))
data_sect = pe_loader.add_section(data_sect, lief.PE.SECTION_TYPES.DATA)
pe_loader.optional_header.addressof_entrypoint = text_sect.virtual_address
builder = lief.PE.Builder(pe_loader)
builder.build_imports(True)
builder.build()
builder.write(out_file_name)
print("{0} was successfully created!".format(out_file_name))
if __name__ == "__main__":
assert len(sys.argv) > 1, "Usage: {0} <32|64> [Output file name]".format(sys.argv[0])
if sys.argv[1] not in ("32", "64"):
print("Use 32 to build x86_32 bit or 64 for x86_64 bit loader")
sys.exit(1)
dololi(sys.argv[1], get_config(), "dololi.exe" if len(sys.argv) < 3 else sys.argv[2])
|
N6UDP/cslbot | cslbot/commands/bard.py | Python | gpl-2.0 | 1,191 | 0.001679 | # Copyright (C) 2013-2015 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Fox Wilson
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from ..helpers.textutils import gen_shakespeare
from ..helpers.command import Command
@Command(['bard', 'shakespeare'])
def cmd(send, msg, _):
"""Translates something into Shakespearese.
| Syntax: {command} <something>
"""
if not msg:
send("Forsooth! What shalt I translate?")
| return
send(gen_shakespeare(msg))
|
kubow/HAC | Multimedia/Database/TkTableWrapper.py | Python | unlicense | 25,265 | 0.004235 | # Copyright (c) 2008, Guilherme Polo
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
This contains a wrapper class for the tktable widget as well a class for using
tcl arrays that are, in some instances, required by tktable.
"""
__author__ = "Guilherme Polo <ggpolo@gmail.com>"
__all__ = ["ArrayVar", "Table"]
import os
try:
import tk as tk
except ImportError:
import tkinter as tk
print('using small tkinter')
def _setup_master(master):
if master is None:
if tk._support_default_root:
master = tk._default_root or tk.Tk()
else:
raise RuntimeError("No master specified and tk is "
"configured to not support default master")
return master
class ArrayVar(tk.Variable):
"""Class for handling Tcl arrays.
An array is actually an associative array in Tcl, so this class supports
some dict operations.
"""
def __init__(self, master=None, name=None):
# tk.Variable.__init__ is not called on purpose! I don't wanna
# see an ugly _default value in the pretty array.
self._master = _setup_master(master)
self._tk = self._master.tk
if name:
self._name = name
else:
self._name = 'PY_VAR%s' % id(self)
def __del__(self):
if bool(self._tk.call('info', 'exists', self._name)):
self._tk.globalunsetvar(self._name)
def __len__(self):
return int(self._tk.call('array', 'size', str(self)))
def __getitem__(self, key):
return self.get(key)
def __setitem__(self, key, value):
self.set(**{str(key): value})
def names(self):
return self._tk.call('array', 'names', self._name)
def get(self, key=None):
if key is None:
flatten_pairs = self._tk.call('array', 'get', str(self))
return dict(zip(flatten_pairs[::2], flatten_pairs[1::2]))
return self._tk.globalgetvar(str(self), str(key))
def set(self, **kw):
self._tk.call('array', 'set', str(self), tk._flatten(kw.items()))
def unset(self, pattern=None):
"""Unsets all of the elements in the array. If pattern is given, only
the elements that match pattern are unset. """
self._tk.call('array', 'unset', str(self), pattern)
_TKTABLE_LOADED = False
class Table(tk.Widget):
"""Create and manipulate tables."""
_switches = ('holddimensions', 'holdselection', 'holdtags', 'holdwindows',
'keeptitles', '-')
_tabsubst_format = ('%c', '%C', '%i', '%r', '%s', '%S', '%W')
_tabsubst_commands = ('browsecommand', 'browsecmd', 'command',
'selectioncommand', 'selcmd',
'validatecommand', 'valcmd')
def __init__(self, master=None, **kw):
master = _setup_master(master)
global _TKTABLE_LOADED
if not _TKTABLE_LOADED:
tktable_lib = os.environ.get('TKTABLE_LIBRARY')
if tktable_lib:
master.tk.eval('global auto_path; '
'lappend auto_path {%s}' % tktable_lib)
master.tk.call('package', 'require', 'Tktable')
_TKTABLE_LOADED = True
tk.Widget.__init__(self, master, 'table', kw)
def _options(self, cnf, kw=None):
if kw:
cnf = tk._cnfmerge((cnf, kw))
else:
cnf = tk._cnfmerge(cnf)
res = ()
for k, v in cnf.iteritems():
if callable(v):
if k in self._tabsubst_commands:
v = "%s %s" % (self._register(v, self._tabsubst),
' '.join(self._tabsubst_format))
else:
v = self._register(v)
res += ('-%s' % k, v)
return res
def _tabsubst(self, *args):
if len(args) != len(self._tabsubst_format):
return args
tk = self.tk
c, C, i, r, s, S, W = args
e = tk.Event()
e.widget = self
e.c = tk.getint(c)
e.i = tk.getint(i)
e.r = tk.getint(r)
e.C = "%d,%d" % (e.r, e.c)
e.s = s
e.S = S
try:
e.W = self._nametowidget(W)
except KeyError:
e.W = None
return (e,)
def _handle_switches(self, args):
args = args or ()
return tuple(('-%s' % x) for x in args if x in self._switches)
def activate(self, index):
"""Set the active cell to the one indicated by index."""
self.tk.call(self._w, 'activate', index)
def bbox(self, first, last=None):
"""Return the bounding box for the specified cell (range) as a
4-tuple of x, y, width and height in pixels. It clips the box to
the visible portion, if any, otherwise an empty tuple is returned."""
return self._getints(self.tk.ca | ll(self._w, 'bbox', first, last)) or ()
def clear(self, option, first=None, last=None):
"""This is a convenience routine to clear certain state information
managed by the table. first and last represent valid table indices.
If neither are specified, then the command operates on the whole
table."""
self.tk.call(self._w, 'clear', option, first, last)
def clear_cache(self, first=None, last=None):
"""Clear th | e specified section of the cache, if the table has been
keeping one."""
self.clear('cache', first, last)
def clear_sizes(self, first=None, last=None):
"""Clear the specified row and column areas of specific height/width
dimensions. When just one index is specified, for example 2,0, that
is interpreted as row 2 and column 0."""
self.clear('sizes', first, last)
def clear_tags(self, first=None, last=None):
"""Clear the specified area of tags (all row, column and cell tags)."""
self.clear('tags', first, last)
def clear_all(self, first=None, last=None):
"""Perform all of the above clear functions on the specified area."""
self.clear('all', first, last)
def curselection(self, value=None):
"""With no arguments, it returns the sorted indices of the currently
selected cells. Otherwise it sets all the selected cells to the given
value if there is an associated ArrayVar and the state is not
disabled."""
result = self.tk.call(self._w, 'curselection', value)
if value is None:
return result
def curvalue(self, value=None):
"""If no value is given, the value of the cell being edited (indexed
by active) is returned, else it is set to the given value. """
return self.tk.call(self._w, 'curvalue', value)
def delete_active(self, index1, index2=None):
"""Deletes text from the active cell. If only one index is given,
it deletes the character after that index, otherwise it |
Onyo/vtex-client | vtex_client/card.py | Python | gpl-3.0 | 424 | 0.002358 | # -*- | coding: utf-8 -*-
from __future__ import unicode_literals
from .base import BaseClient
class CardClient(BaseClient):
def get_information(self, numbers):
"""Get a card information from the first six numbers of the card.
:param numbers: first six numbers of the card
:returns: card information
"""
return self._make_request("api/pvt/bins?code={}".fo | rmat(numbers), 'get')
|
RenZ0/php-show-controller | engine/delta.py | Python | gpl-2.0 | 14,328 | 0.006003 | #!/usr/bin/python
# -*- coding:utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Copyright (C) 2011 Laurent Pierru <renzo@imaginux.com>
# Thanks to Stéphane Bonhomme <stephane@exselt.com>
# Thanks to Simon Newton <nomis52@gmail.com>
# Thanks to Heikki Junnila
"""Php-Show-Controller. Generates values and sends data to OLA."""
import sys
import time
import array
import com_sql
from threading import Thread
from ola.ClientWrapper import ClientWrapper
###
class DmxSender(Thread):
def __init__(self):
Thread.__init__(self)
self.Terminated = False
# self.term=term
## Stop the thread if called
def stop(self):
self.Terminated = True
def run(self):
'''Wrapper, Framerate'''
print "THREAD"
self._wrapper = ClientWrapper()
self._activesender = True
self.univloglevel = 0
self.base = com_sql.ComSql()
# SQL Framerate
try:
engine = self.base.requete_sql("SELECT * FROM dmx_engine WHERE id=1") #setting
for e in range(len(engine)):
freq_ms = engine[e]['freq_ms']
except:
print "default to 40 fps"
freq_ms = 25
# FOR TEST
# freq_ms = 500
self._tick_interval = int(freq_ms) # in milliseconds
print "freq_ms"
print self._tick_interval
# list of scens to play
self.scen_ids=[]
# dict to store each scenari instance
self.my_scens={}
# SQL | Universes
try:
prefs = self.base.requete_sql("SELECT * FROM dmx_preferences WHERE id=1") #setting
for p in range(len(prefs)):
self.univ_qty = prefs[p]['univ_qty']
except:
print "default | to 1 universe"
self.univ_qty = 1
print "univ_qty"
print self.univ_qty
# array to store full frame
self.WholeDmxFrame = [0] * 512 * self.univ_qty
# send the first one
self.SendDmxFrame()
self._wrapper.Run()
def BlackOut(self):
'''Reset all values to zero'''
self.WholeDmxFrame = [0] * 512 * self.univ_qty
def AssignChannels(self,offset,values):
'''Assign channels values according to address'''
self.WholeDmxFrame[offset:offset+len(values)] = values
def SendDmxFrame(self):
'''Ask frame for each scenari and make the whole frame, repeated every tick_interval'''
if self._activesender:
# Schedule an event to run in the future
if self.univloglevel > 0:
print "Schedule next"
self._wrapper.AddEvent(self._tick_interval, self.SendDmxFrame)
if self.univloglevel > 1:
print "before sending %s" % time.time()
# send data to universes
# print "SPLIT"
SplittedFrame = self.USplit(self.WholeDmxFrame,512)
u=1
for FramePart in SplittedFrame:
UniverseFrame = list(FramePart)
if self.univloglevel > 0:
print "FRAME_FOR_UNIV %s" % u
# print UniverseFrame
try:
data = array.array('B', UniverseFrame)
self._wrapper.Client().SendDmx(u, data)
except:
print "Dmx frame not sent. Reset all."
self.ResetAll()
u += 1
if self.univloglevel > 1:
print "before computing %s" % time.time()
#for each scenari in list
for scenarid in self.scen_ids:
try:
# create scenari instance if needed
if not self.my_scens.has_key(scenarid):
scen=PlayScenari(scenarid, self._tick_interval)
# store instance in dict, only once
self.my_scens[scenarid]=scen
print self.my_scens
# for each instance, compute frame
scen=self.my_scens[scenarid]
scen.ComputeNextFrame()
# print "calling %s" % scen.new_frame
# add partial frame to full one
self.AssignChannels(scen.patch, scen.new_frame)
# print "FRAME"
# print self.WholeDmxFrame
except:
print "NOT STARTED"
if self.univloglevel > 1:
print "after computing %s" % time.time()
print "---"
def ChangeUnivLogLevel(self):
self.univloglevel+=1
if self.univloglevel > 2:
self.univloglevel=0
return False
else:
return True
def ChangeLogLevel(self, scenarid):
if self.my_scens.has_key(scenarid):
# set loglevel for this instance
scen=self.my_scens[scenarid]
print scen
scen.loglevel+=1
if scen.loglevel > 2:
scen.loglevel=0
return False
else:
return True
def HaltDmxSender(self):
if self._activesender == True:
self._activesender = False
return True
def ResumeDmxSender(self):
if self._activesender == False:
self._activesender = True
self.SendDmxFrame()
return True
def CloseDmxSender(self):
self._wrapper.Stop()
def StartScenari(self, scenarid):
if not scenarid in self.scen_ids:
# add id into list
self.scen_ids.append(scenarid)
return True
def StopScenari(self, scenarid):
if scenarid in self.scen_ids:
# remove id into list
self.scen_ids.remove(scenarid)
return True
def StatusScenari(self, scenarid):
if scenarid in self.scen_ids:
return True
def ResetScenari(self, scenarid):
if self.my_scens.has_key(scenarid):
# remove instance for this id
self.my_scens.pop(scenarid)
return True
def StopAll(self):
self.scen_ids=[]
def ResetAll(self):
self.my_scens={}
def USplit(self, l, n):
return zip(*(l[i::n] for i in range(n)))
###
class PlayScenari:
def __init__(self, scenari, tickint):
'''Each instance if for only one scenari'''
self.scenari = scenari
self.tick_interval = tickint
self.loglevel = 0
self._activescenari = True
self.base = com_sql.ComSql()
self.GetFixtureDetails()
self.current_i = -1
self.GetNextStep()
self.ChangeStep()
def GetFixtureDetails(self):
'''Fixture patch (define address), universe'''
# SQL Scen infos
scendet = self.base.requete_sql("SELECT * FROM dmx_scensum WHERE id=%s", str(self.scenari)) #scen
for i in range(len(scendet)):
# SQL Fixture infos
fixtdet = self.base.requete_sql("SELECT * FROM dmx_fixture WHERE id=%s", str(scendet[i]['id_fixture'])) #fixt
for j in range(len(fixtdet)):
self.patch = fixtdet[j]['patch']
self.patch_after = fixtdet[j]['patch_after']
self.universe = fixtdet[j]['univ']
print "patch, patch_after, univ"
print self.patch, self.patch_after, self.universe
# change patch to meet universe zone
if self.universe > 1:
self.patch = self.patch + (512 * (self.universe-1))
# fill zeros if splitted fixture
self.pafter=""
for i in range(self.pa |
ESOedX/edx-platform | openedx/core/djangoapps/video_pipeline/config/waffle.py | Python | agpl-3.0 | 720 | 0.002778 | """
This module contains configuration settings via waffle flags
for the Video Pipeline app.
"""
from __future__ import absolute_import
from openedx.core.djan | goapps.waffle_utils import WaffleFlagNamespace, CourseWaffleFlag
# Videos Namespace
WAFFLE_NAME | SPACE = 'videos'
# Waffle flag telling whether youtube is deprecated.
DEPRECATE_YOUTUBE = 'deprecate_youtube'
def waffle_flags():
"""
Returns the namespaced, cached, audited Waffle flags dictionary for Videos.
"""
namespace = WaffleFlagNamespace(name=WAFFLE_NAMESPACE, log_prefix=u'Videos: ')
return {
DEPRECATE_YOUTUBE: CourseWaffleFlag(
waffle_namespace=namespace,
flag_name=DEPRECATE_YOUTUBE
)
}
|
schmit/intro-python-course | lectures/code/classes_stocks.py | Python | mit | 351 | 0.002849 | class Stock():
| def __init__(self, name, symbol, prices=[]):
self.name = name
self.symbol = symbol
self.prices = prices
def high_price(self):
if len(self.prices) == 0:
return 'MISSING PRICES'
return max(self.prices)
ap | ple = Stock('Apple', 'APPL', [500.43, 570.60])
print apple.high_price()
|
sjorsng/osint-combiner | shodanfunctions.py | Python | mit | 6,072 | 0.002635 | from base import dict_add_source_prefix
from base import add_institution_field
from base import get_institutions
from base import dict_clean_empty
from base import convert_file
import configparser
import shodan
import json
import sys
import os
def get_new_shodan_api_object():
"""Returns initialised Shodan API object"""
config = configparser.ConfigParser()
config.read(os.path.dirname(os.path.realpath(__file__)) + "/config.ini")
key = (config['osint_sources']['SHODAN_API_KEY'])
return shodan.Shodan(key)
def shodan_to_es_convert(input_dict, institutions):
"""Returns dict ready to be used by the Elastic Stack."""
try:
# set ip and ip_int
ip_int = input_dict['ip']
del input_dict['ip']
input_dict['ip'] = input_dict['ip_str']
del input_dict['ip_str']
input_dict['ip_int'] = ip_int
except KeyError:
try:
input_dict['ip'] = input_dict['ip_str']
del input_dict['ip_str']
except KeyError:
print(input_dict)
print('Missing required \'ip\' field in the element above. Exiting now...')
sys.exit(1)
# if present, convert ssl.cert.serial to string
try:
input_dict['ssl']['cert']['serial'] = str(input_dict['ssl']['cert']['serial'])
except KeyError:
pass
# if present, convert ssl.dhparams.generator to string
try:
input_dict['ssl']['dhparams']['generator'] = str(input_dict['ssl']['dhparams']['generator'])
except (KeyError, TypeError):
pass
try:
# rename_shodan.modules to protocols (used as prefix per banner for combining multiple banners into 1 IP)
input_dict['protocols'] = input_dict['_shodan']['module']
# the rest of the data in _shodan is irrelevant
del input_dict['_shodan']
except KeyError:
pass
# asn to int
try:
input_dict['asn'] = int((input_dict['asn'])[2:])
except KeyError:
pass
try:
# rename location.country_name to location.country
input_dict['location']['country'] = input_dict['location']['country_name']
del input_dict['location']['country_name']
# rename latitude and longitude for geoip
input_dict['location']['geo'] = {}
input_dict['location']['geo']['lat'] = input_dict['location']['latitude']
input_dict['location']['geo']['lon'] = input_dict['location']['longitude']
del input_dict['location']['latitude']
del input_dict['location']['longitude']
except KeyError:
pass
# Limit the number of fields
input_dict = limit_nr_of_elements(input_dict)
# prefix non-nested fields with 'shodan'
input_dict = dict_add_source_prefix(input_dict, 'shodan', str(input_dict['protocols']))
# If institutions are given, add institution field based on 'ip' field
if institutions is not None:
input_dict = add_institution_field(input_dict, institutions)
return input_dict
def limit_nr_of_elements(input_dict):
"""Converts some of the JSON elements containing (too) many nested elements to 1 string element.
This prevents Elasticsearch from making too many fields, so it is still manageable in Kibana.
"""
try:
input_dict['http']['components'] = str(
input_dict['http']['components'])
except KeyError:
pass
try:
input_dict['elastic'] = str(
input_dict['elastic'])
except KeyError:
pass
try:
input_dict['opts']['minecraft'] = str(
input_dict['opts']['minecraft'])
except KeyError:
pass
return input_dict
def to_file_shodan(queries, path_output_file, should_convert, should_add_institutions):
"""Makes a Shodan API call with each given query and writes results to output file
:param queries: Collection of strings which present Shodan queries
:param path_output_file: String which points to existing output file
:param should_convert: Boolean if results should be converted
:param should_add_institutions: boolean if an institution field should be added when converting
"""
api = get_new_shodan_api_object()
nr_total_results = 0
failed_queries = set()
for query in queries:
print | ('\"' + query + '\"')
results = 0
with open(path_output_file, "a") as output_file:
try:
for banner in api.search_cursor(query):
| banner = dict_clean_empty(banner)
output_file.write(json.dumps(banner) + '\n')
results += 1
print('\r' + str(results) + ' results written...', end='')
print("")
except shodan.APIError as e:
print('Error: ', e)
failed_queries.add(failed_queries)
nr_total_results += results
# Print failed queries if present
if not failed_queries == set():
print('Failed queries: ', failed_queries)
print(str(nr_total_results) + ' total results written in ' + path_output_file)
if should_convert:
institutions = None
if should_add_institutions:
institutions = get_institutions()
convert_file(path_output_file, 'shodan', institutions)
def get_input_choice():
"""Returns input_choice represented as integer"""
items = ['1', '2', '3', '4']
input_choice = '0'
while input_choice not in items:
input_choice = input("Console input[1], CIDR file input[2], csv file input[3] or query file input[4]?")
return int(input_choice)
def get_user_input_console_queries():
"""Returns a non empty set of query strings"""
queries = set()
done = False
print('Enter Shodan queries, one at a time. Enter \'4\' when done.')
while not done:
query = ''
while query is '':
query = input("Query:")
if query is '4':
if queries != set():
done = True
else:
queries.add(query)
return queries
|
mganeva/mantid | scripts/Muon/GUI/Common/load_run_widget/load_run_model.py | Python | gpl-3.0 | 2,348 | 0.001278 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
from Muon.GUI.Common.muon_load_data import MuonLoadData
import Muon.GUI.Common.utilities.load_utils as load_utils
class LoadRunWidgetModel(object):
"""Stores info on all currently loaded workspaces"""
def __init__(self, loaded_data_store=MuonLoadData(), context=None):
# Used with load thread
self._filenames = []
self._loaded_data_store = loaded_data_store
self._context = context
self._current_run = None
def remove_previous_data(self):
self._loaded_data_store.remove_last_added_data()
# Used with load thread
def loadData(self, filenames):
self._filenames = filenames
# Used with load thread
def execute(self):
failed_files = []
for filename in self._filenames:
try:
| ws, run, filename = load_utils.load_workspace_from_filename(filename)
except Exception as error:
failed_files += [(filename, error)]
continue
self._loade | d_data_store.remove_data(run=[run])
self._loaded_data_store.add_data(run=[run], workspace=ws, filename=filename, instrument=self._context.instrument)
if failed_files:
message = load_utils.exception_message_for_failed_files(failed_files)
raise ValueError(message)
# This is needed to work with thread model
def output(self):
pass
def cancel(self):
pass
def clear_loaded_data(self):
self._loaded_data_store.clear()
@property
def current_run(self):
return self._current_run
@current_run.setter
def current_run(self, run):
self._current_run = run
@property
def loaded_filenames(self):
return self._loaded_data_store.get_parameter("filename")
@property
def loaded_workspaces(self):
return self._loaded_data_store.get_parameter("workspace")
@property
def loaded_runs(self):
return self._loaded_data_store.get_parameter("run")
|
RyanEggert/space-lander | rocket_control/printvals.py | Python | mit | 3,261 | 0.005826 | import time
from usb_vendor import PIC_USB
import traceback
# Product IDs: Master PIC is 0x0004, Rocket PIC is 0x0005, Barge PIC is 0x0006
comms = PIC_USB(0x0005)
def main():
print("START")
loop_time = .2 # How often to run the main loop, in seconds
while True:
start_time = time.clock()
# print(chr(27) + "[2J")
# quad_info()
try:
# debug_uart_buffers()
# debug_uart_status()
rocket_info()
endstops()
# debug_oc_status()
except Exception, e:
print "Error occurred. {}".format(e)
traceback.print_exc()
print "Retrying..."
comms = PIC_USB(0x0005)
while (time.clock() - start_time) < loop_time:
pass
def rocket_info():
info = comms.get_rocket_info()
print "Rocket Tilt {} | Rocket Speed {} | Throttle {} | Motor Speed {} | Motor Thrust {} | Stepper Spe | ed {} | Tilt Angle {} | Tilt Direction {} | Rocket State {}".format(
info["tilt"],
info["speed"],
info["throttle"],
info["motor_speed"],
info["motor_thrust"],
info["stepper_speed"],
info["tilt_ang"],
| info["tilt_dir"],
info["rocket_state"],
)
def debug_uart_buffers():
info = comms.debug_uart_buffers()
rx = info["rx"]
tx = info["tx"]
print "TX_head {} | TX_tail {} | TX_count {} || RX_head {} | RX_tail {} | RX_count {}".format(
tx["head"],
tx["tail"],
tx["count"],
rx["head"],
rx["tail"],
rx["count"],
)
def debug_uart_status():
info = comms.debug_uart_status()
uart1 = info["uart1"]
uart2 = info["uart2"]
print "[UART1] URXDA: {} | OERR {} | FERR {} || PERR {} | RIDLE {} | ADDEN {}".format(
uart1["URXDA"],
uart1["OERR"],
uart1["FERR"],
uart1["PERR"],
uart1["RIDLE"],
uart1["ADDEN"]
)
print "[UART2] URXDA: {} | OERR {} | FERR {} || PERR {} | RIDLE {} | ADDEN {}".format(
uart2["URXDA"],
uart2["OERR"],
uart2["FERR"],
uart2["PERR"],
uart2["RIDLE"],
uart2["ADDEN"]
)
def debug_oc_status():
info = comms.debug_oc_status()
print "DC_OCM0 {} | DC_OCM1 {} | DC_OCM2 {} | DC_OCTSEL {} | DC_OCFLT {}".format(
info["DC_OCM0"],
info["DC_OCM1"],
info["DC_CM2"],
info["DC_OCTSEL"],
info["DC_OCFLT"],
)
print "ST_OCM0 {} | ST_OCM1 {} | ST_OCM2 {} | ST_OCTSEL {} | ST_OCFLT {}".format(
info["ST_OCM0"],
info["ST_OCM1"],
info["ST_CM2"],
info["ST_OCTSEL"],
info["ST_OCFLT"]
)
def current_state():
info = comms.get_state()
print "Current State {}".format(
info["state"],
)
def quad_info():
info = comms.get_quad_info()
print "Quad Counter {} | Overflow {}".format(
info["counter"],
info["overflow"],
)
def endstops():
"""
Reads the system's endstops.
"""
info = comms.get_limit_sw_info()
print("Y_BOT {} | Y_TOP {} | X_L {} | X_R {} | BARGE {} ".format(
info["Y_BOT"],
info["Y_TOP"],
info["X_L"],
info["X_R"],
info["BARGE"])
)
if __name__ == '__main__':
main() |
iTeam-org/iteam-site | iTeam/events/forms.py | Python | agpl-3.0 | 3,827 | 0.001573 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Adrien Chardon
# @Date: 2014-08-21 18:54:29
# @Last Modified by: Adrien Chardon
# @Last Modified time: 2014-12-04 19:41:49
# This file is part of iTeam.org.
# Copyright (C) 2014 Adrien Chardon (Nodraak).
#
# iTeam.org is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# iTeam.org is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with iTeam.org. If not, see <http://www.gnu.org/licenses/>.
from django import forms
from django.conf import settings
class EventForm(forms.Form):
title = forms.CharField(
label='Titre',
widget=forms.TextInput(
attrs={
'autofocus': '',
'placeholder': 'Titre'
}
)
)
place = forms.CharField(
label='Lieu',
widget=forms.TextInput(
attrs={
'placeholder': 'Lieu'
}
),
required=False
)
date_start = forms.DateTimeField(
label='Date de debut',
widget=forms.DateTimeInput(
attrs={
'placeholder': 'Date de début : jj/mm/aaaa hh:mm'
},
format='%d/%m/%Y %H:%m',
)
)
image = forms.ImageField(
required=False
)
type = forms.ChoiceField(
label='Type d\'événement',
widget=forms.RadioSelect,
choices=settings.EVENTS_MODEL_TYPES,
initial='O',
required=False,
)
is_draft = forms.ChoiceField(
label='Status de l\'événement',
widget=forms.RadioSelect,
choices=settings.MODEL_IS_DRAFT,
initial='1',
required=False,
)
text = forms.CharField(
label='Texte',
widget=forms.Textarea(
attrs={
'placeholder': 'Texte',
'rows': '15'
}
)
)
file = forms.FileField(
label=u'Fichier attaché',
allow_empty_file=True,
required=False,
)
def clean(self):
cleaned_data = super(EventForm, self).clean()
img = cleaned_data.get('imag | e')
file = cleaned_data.get('file')
if img and img.size > | settings.SIZE_MAX_IMG:
msg = (
u'Fichier trop lourd (%d Ko / %d Ko max). Pour ne pas saturer le serveur, merci '
u'de réduire la résolution de l\'image.') % (img.size/1024, settings.SIZE_MAX_IMG/1024)
self._errors['image'] = self.error_class([msg])
if 'image' in cleaned_data:
del cleaned_data['image']
if file and file.size > settings.SIZE_MAX_FILE:
msg = (
u'Fichier trop lourd (%d Ko / %d Ko max). Pour ne pas saturer le serveur, merci '
u'de réduire la taille du fichier.') % (img.size/1024, settings.SIZE_MAX_FILE/1024)
self._errors['file'] = self.error_class([msg])
if 'file' in cleaned_data:
del cleaned_data['file']
bad_word = False
title = cleaned_data.get('title')
for word in settings.FORBIDDEN_WORDS:
bad_word = bad_word or (word in title)
if bad_word:
msg = ('Erreur, un mot interdit a été utilisé. Regardez les sources ou contacter le dev.')
self._errors['title'] = self.error_class([msg])
return cleaned_data
|
maximumG/exscript | tests/Exscript/emulators/IOSEmulatorTest.py | Python | mit | 1,094 | 0.003656 | from __future__ import absolute_import
import sys
import unittest
import re
import os.path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
from .VirtualDeviceTest import VirtualDeviceTest
from Exscript.emulators import IOSEmulator
from Exscript.emulators.iosemu import ios | banner
class IOSEmulatorTest(VirtualDeviceTest):
CORRELATE = IOSEmulator
cls = IOSEmulator
banner = iosbanner % ('myhost', 'myhost', 'myhost')
prompt = 'myhost#'
userprompt = 'Username: | '
passwdprompt = 'Password: '
def testAddCommand(self):
VirtualDeviceTest.testAddCommand(self)
cs = self.cls('myhost',
strict=True,
echo=False,
login_type=self.cls.LOGIN_TYPE_NONE)
response = cs.do('show version')
self.assertTrue(response.startswith(
'Cisco Internetwork Operating'), response)
def suite():
return unittest.TestLoader().loadTestsFromTestCase(IOSEmulatorTest)
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())
|
kernelci/kernelci-backend | app/models/test_group.py | Python | lgpl-2.1 | 7,454 | 0 | # Copyright (C) Collabora Limited 2018,2019
# Author: Michal Galka <michal.galka@collabora.com>
# Author: Guillaume Tucker <guillaume.tucker@collabora.com>
# Author: Ana Guerrero Lopez <ana.guerrero@collabora.com>
#
# Copyright (C) Linaro Limited 2019
# Author: Matt Hart <matthew.hart@linaro.org>
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""The model that represent a test group document in the database."""
import copy
import types
import models
import models.base as modb
# pylint: disable=invalid-name
# pylint: disable=too-many-instance-attributes
class TestGroupDocument(modb.BaseDocument):
"""Model for a test group document.
A test group is a document that can store test cases results, and nested
test groups.
"""
def __init__(self, name, lab_name):
"""The test group document.
:param name: The name given to this test group.
:type name: string
:param lab_name: The name of the lab running this test group.
:type lab_name: str
"""
self._name = name
self._lab_name = lab_name
self._build_id = None
self._version = None
self._id = None
self._created_on = None
self.arch = None
self.board_instance = None
self.boot_log = None
self.boot_log_html = None
self.boot_result_description = None
self.build_environment = None
self.compiler = None
self.compiler_version = None
self.compiler_version_full = None
self.cross_compile = None
self.defconfig = None
self.defconfig_full = None
self.device_type = None
self.dtb = None
self.endian = None
self.file_server_resource = None
self.git_branch = None
self.git_commit = None
self.git_describe = None
self.git_url = None
self.index = None
self.initrd = None
self.initrd_info = None
self.job = None
self.job_id = None
self.kernel = None
self.kernel_image = None
self.mach = None
self.modules = None
self.plan_variant = None
self.test_cases = []
self.parent_id = None
self.sub_groups = []
self.time = -1
self.warnings = 0
@property
def collection(self):
return models.TEST_GROUP_COLLECTION
@property
def name(self):
" | ""The name of the test group."""
return self._name
@property
def lab_name(self):
"""The name of the test lab."""
return self._lab_name
@property
def build_id(self):
"""The ID of the build."""
return self._build_id
@build_id.setter
def build_id(self, value):
"""Set the ID of the build."""
if self._build_id:
raise AttributeError("Build ID already set")
self._build_id = | value
@property
def version(self):
"""The schema version of this test group."""
return self._version
@version.setter
def version(self, value):
"""Set the schema version of this test group."""
if self._version:
raise AttributeError("Schema version already set")
self._version = value
@property
def id(self):
"""The ID of the test group as registered in the database."""
return self._id
@id.setter
def id(self, value):
"""Set the test group ID."""
if self._id:
raise AttributeError("ID already set")
self._id = value
@property
def created_on(self):
"""The creation date of this test group."""
return self._created_on
@created_on.setter
def created_on(self, value):
"""Set the creation date of this test group."""
if self._created_on:
raise AttributeError("Creation date already set")
self._created_on = value
def to_dict(self):
test_group = {
models.ARCHITECTURE_KEY: self.arch,
models.BOARD_INSTANCE_KEY: self.board_instance,
models.BOOT_LOG_KEY: self.boot_log,
models.BOOT_LOG_HTML_KEY: self.boot_log_html,
models.BOOT_RESULT_DESC_KEY: self.boot_result_description,
models.BUILD_ENVIRONMENT_KEY: self.build_environment,
models.BUILD_ID_KEY: self.build_id,
models.COMPILER_KEY: self.compiler,
models.COMPILER_VERSION_FULL_KEY: self.compiler_version_full,
models.COMPILER_VERSION_KEY: self.compiler_version,
models.CROSS_COMPILE_KEY: self.cross_compile,
models.CREATED_KEY: self.created_on,
models.DEFCONFIG_FULL_KEY: self.defconfig_full or self.defconfig,
models.DEFCONFIG_KEY: self.defconfig,
models.DEVICE_TYPE_KEY: self.device_type,
models.DTB_KEY: self.dtb,
models.ENDIANNESS_KEY: self.endian,
models.FILE_SERVER_RESOURCE_KEY: self.file_server_resource,
models.GIT_BRANCH_KEY: self.git_branch,
models.GIT_COMMIT_KEY: self.git_commit,
models.GIT_DESCRIBE_KEY: self.git_describe,
models.GIT_URL_KEY: self.git_url,
models.INDEX_KEY: self.index,
models.INITRD_KEY: self.initrd,
models.INITRD_INFO_KEY: self.initrd_info,
models.JOB_ID_KEY: self.job_id,
models.JOB_KEY: self.job,
models.KERNEL_KEY: self.kernel,
models.KERNEL_IMAGE_KEY: self.kernel_image,
models.LAB_NAME_KEY: self.lab_name,
models.MACH_KEY: self.mach,
models.MODULES_KEY: self.modules,
models.NAME_KEY: self.name,
models.PLAN_VARIANT_KEY: self.plan_variant,
models.TEST_CASES_KEY: self.test_cases,
models.PARENT_ID_KEY: self.parent_id,
models.SUB_GROUPS_KEY: self.sub_groups,
models.TIME_KEY: self.time,
models.VERSION_KEY: self.version,
models.WARNINGS_KEY: self.warnings,
}
if self.id:
test_group[models.ID_KEY] = self.id
return test_group
@staticmethod
def from_json(json_obj):
test_group = None
if isinstance(json_obj, types.DictionaryType):
local_obj = copy.deepcopy(json_obj)
doc_pop = local_obj.pop
try:
name = doc_pop(models.NAME_KEY)
lab_name = doc_pop(models.LAB_NAME_KEY)
test_group = TestGroupDocument(name, lab_name)
for key, val in local_obj.iteritems():
setattr(test_group, key, val)
except KeyError:
# Missing mandatory key? Return None.
test_group = None
return test_group
|
RescueTime/cwmon-mysql | tests/test_cwmon_mysql.py | Python | bsd-2-clause | 1,999 | 0 | # -*- encoding: utf-8 -*-
"""Tests for the monitoring CLI.
.. danger:: Y | ou **must** pass the `--dry-run` flag in all tests. Failure to do
so will result in flooding AWS CloudWatch with bogus stats.
"""
from click.testing import CliRunner
from cwmon.cli import cwmon
def _run_mysql_metric(name, *args):
runner = CliRunner()
my_args = ['--dry-run', 'mysql']
if name:
my_args.append(name)
my_args.extend(args)
return runner.in | voke(cwmon, my_args)
def test_mysql_registered_correctly():
"""Test the primary entrypoint of the CLI some more."""
result = _run_mysql_metric('')
assert result.output.startswith('Usage')
assert result.exit_code == 0
def test_deadlocks():
"""Test the happy path for deadlock detection."""
result = _run_mysql_metric('deadlocks')
assert result.exit_code == 0
def test_uptime():
"""Test the happy path for uptime measurement."""
result = _run_mysql_metric('uptime')
assert result.exit_code == 0
def test_running_threads():
"""Test the happy path for measuring running threads."""
result = _run_mysql_metric('running_threads')
assert result.exit_code == 0
def test_questions():
"""Test the happy path for counting questions."""
result = _run_mysql_metric('questions')
assert result.exit_code == 0
def test_slow_queries():
"""Test the happy path for counting slow queries."""
result = _run_mysql_metric('slow_queries')
assert result.exit_code == 0
def test_open_files():
"""Test the happy path for counting open files."""
result = _run_mysql_metric('open_files')
assert result.exit_code == 0
def test_open_tables():
"""Test the happy path for counting open tables."""
result = _run_mysql_metric('open_tables')
assert result.exit_code == 0
def test_seconds_behind_master():
"""Test the happy path for measuring slave lag."""
result = _run_mysql_metric('seconds_behind_master')
assert result.exit_code == 0
|
mansonul/events | events/contrib/plugins/form_elements/fields/select_multiple/apps.py | Python | mit | 516 | 0 | __title__ = 'fobi.contrib.plugins.form_elements.fields.select_multiple.apps'
__author_ | _ = 'Artur Barseghyan <artur.barseghyan@gmail.com>'
__copyright__ = '2014-2017 Artur Barseghyan'
__license__ = 'GPL 2.0/LGPL 2.1'
__all__ = ('Config',)
try:
from django.a | pps import AppConfig
class Config(AppConfig):
"""Config."""
name = 'fobi.contrib.plugins.form_elements.fields.select_multiple'
label = 'fobi_contrib_plugins_form_elements_fields_select_multiple'
except ImportError:
pass
|
craffel/librosa | tests/test_output.py | Python | isc | 3,760 | 0.000798 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
'''Tests for output functions'''
# Disable cache
import os
try:
os.environ.pop('LIBROSA_CACHE_DIR')
except:
pass
import librosa
import numpy as np
import tempfile
from nose.tools import raises, eq_
def test_write_wav():
def __test(mono, norm):
y, sr = librosa.load('data/test1_22050.wav', sr=None, mono=mono)
_, tfname = tempfile.mkstemp()
librosa.output.write_wav(tfname, y, sr, norm=norm)
y_2, sr2 = librosa.load(tfname, sr=None, mono=mono)
os.unlink(tfname)
librosa.util.valid_audio(y_2, mono=mono)
assert np.allclose(sr2, sr)
if norm:
assert np.allclose(librosa.util.normalize(y, axis=-1),
librosa.util.normalize(y_2, axis=-1),
rtol=1e-3, atol=1e-4)
else:
assert np.allclose(y, y_2, rtol=1e-3, atol=1e-4)
for mono in [False, True]:
for norm in [False, True]:
yield __test, mono, norm
def test_times_csv():
def __test(times, annotations, sep):
_, tfname = tempfile.mkstemp()
# Dump to disk
librosa.output.times_csv(tfname, times, annotations=annotations,
delimiter=sep)
# Load it back
with open(tfname, 'r') as fdesc:
lines = [line for line in fdesc]
# Remove the file
os.unlink(tfname)
for i, line in enumerate(lines):
if annotations is None:
t_in = line.strip()
else:
t_in, ann_in = line.strip().split(sep, 2)
t_in = float(t_in)
assert np.allclose(times[i], t_in, atol=1e-3, rtol=1e-3)
if annotations is not None:
eq_(str(annotations[i]), ann_in)
__test_fail = raises(librosa.ParameterError)(__test)
for times in [[], np.linspace(0, 10, 20)]:
for annotations in [None, ['abcde'[q] for q in np.random.randint(0, 5,
size=len(times))], list('abcde')]:
for sep in [',', '\t', ' ']:
if annotations is not None and len(annotations) != len(times):
yield __test_fail, times, annotations, sep
else:
yield __test, times, annotations, sep
def test_annotation():
def __test(times, annotations, sep):
_, tfname = tempfile.mkstemp()
# Dump to disk
librosa.output.annotation(tfname, times, annotations=annotations,
delimiter=sep)
# Load it back
with open(tfname, 'r') as fdesc:
lines = [line for line in fdesc]
# Remove the file
os.unlink(tfname)
for i, line in enumerate(lines):
if annotations is None:
t_in1, t_in2 = line.strip().split(sep, 2)
else:
t_in1, t_in2, ann_in = line.strip().split(sep, 3)
t_in1 = float(t_in1)
t_in2 = float(t_in2)
assert np.allclose(times[i], [t_in1, t_in2],
atol=1e-3, rtol=1e-3)
if annotations is not None:
eq_(str(annotations[i]), ann_in)
__test_fail = raises(librosa.ParameterError)(__test)
times = np.random.randn(20, 2)
for annotations in [None, ['abcde'[q] for q in np.random.randint(0, 5,
size=len(times))], list('abcde')]:
for sep in [',', '\t', ' ']:
if annotations is not None and len(annotations) != le | n(times):
yield __test_fail, times, annotation | s, sep
else:
yield __test, times, annotations, sep
|
privacyidea/privacyidea | tests/smtpmock.py | Python | agpl-3.0 | 7,934 | 0.000378 | # -*- coding: utf-8 -*-
"""
2016-01-20 Cornelius Kölbel <cornelius@privacyidea.org>
Support STARTTLS mock
2015-01-30 Cornelius Kölbel <cornelius@privacyidea.org>
Change responses.py to be able to run with SMTP
Original responses.py is:
Copyright 2013 Dropbox, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import (
absolute_import, print_function, division, unicode_literals
)
import six
import smtplib
try:
from inspect import formatargspec, getfullargspec as getargspec
except ImportError:
from inspect import formatargspec, getargspec
from collections import namedtuple, Sequence, Sized
from functools import update_wrapper
from smtplib import SMTPException
Call = namedtuple('Call', ['request', 'response'])
_wrapper_template = """\
def wrapper%(signature)s:
with smtpmock:
return func%(funcargs)s
"""
def get_wrapped(func, wrapper_template, evaldict):
# Preserve the argspec for the wrapped function so that testing
# tools such as pytest can continue to use their fixture injection.
args = getargspec(func)
values = args.args[-len(args.defaults):] if args.defaults else None
signature = formatargspec(*args)
is_bound_method = hasattr(func, '__self__')
if is_bound_method:
args.args = args.args[1:] # Omit 'self'
callargs = formatargspec(*args, formatvalue=lambda v: '=' + v)
ctx = {'signature': signature, 'funcargs': callargs}
six.exec_(wrapper_template % ctx, evaldict)
wrapper = evaldict['wrapper']
update_wrapper(wrapper, func)
if is_bound_method:
wrapper = wrapper.__get__(func.__self__, type(func.__self__))
return wrapper
class CallList(Sequence, Sized):
def __init__(self):
self._calls = []
def __iter__(self):
return iter(self._calls)
def __len__(self):
return len(self._calls)
def __getitem__(self, idx):
return self._calls[idx]
def setdata(self, request, response):
self._calls.append(Call(request, response))
def reset(self):
self._calls = []
class SmtpMock(object):
def __init__(self):
self._calls = CallList()
self.sent_message = None
self.smtp_ssl = False
self.reset()
def reset(self):
self._request_data = {}
self._calls.reset()
def get_smtp_ssl(self):
return self.smtp_ssl
def setdata(self, response=None, authenticated=True,
config=None, exception=False, support_tls=True):
if response is None:
response = {}
config = config or {}
self.support_tls = support_tls
self.exception = exception
self._request_data = {
'response': response,
'authenticated': authenticated,
'config': config,
'recipient': config.get("MAILTO")
}
def get_sent_message(self):
return self.sent_message
@property
def calls(self):
return self._calls
def __enter__(self):
self.start()
def __exit__(self, *args):
self.stop()
self.reset()
def activate(self, func):
evaldict = {'smtpmock': self, 'func': func}
return get_wrapped(func, _wrapper_template, evaldict)
def _on_request(self, SMTP_instance, sender, recipient, msg):
# mangle request packet
response = self._request_data.get("response")
if not self._request_data.get("authenticated"):
response = {self._request_data.get("recipient"):
(530, "Authorization required (#5.7.1)")}
return response
def _on_login(self, SMTP_instance, username, password):
# mangle request packet
| if self._request_data.get("authenticated"):
response = (235, "Authentication successful.")
else:
response = (535, "authentication failed (#5.7.1)")
return {self._request_data.get("recipient | "): response}
# def _on_init(self, SMTP_instance, host, port=25, timeout=3):
def _on_init(self, *args, **kwargs):
SMTP_instance = args[0]
host = args[1]
if isinstance(SMTP_instance, smtplib.SMTP_SSL):
# in case we need sth. to do with SMTL_SSL
self.smtp_ssl = True
# mangle request packet
self.timeout = kwargs.get("timeout", 10)
self.port = kwargs.get("port", 25)
self.esmtp_features = {}
return None
@staticmethod
def _on_debuglevel(SMTP_instance, level):
return None
@staticmethod
def _on_quit(SMTP_instance):
return None
def _on_starttls(self, SMTP_instance):
if self.exception:
raise SMTPException("MOCK TLS ERROR")
if not self.support_tls:
raise SMTPException("The SMTP Server does not support TLS.")
return None
def start(self):
import mock
def unbound_on_send(SMTP, sender, recipient, msg, *a, **kwargs):
self.sent_message = msg
return self._on_request(SMTP, sender, recipient, msg, *a, **kwargs)
self._patcher = mock.patch('smtplib.SMTP.sendmail',
unbound_on_send)
self._patcher.start()
def unbound_on_login(SMTP, username, password, *a, **kwargs):
return self._on_login(SMTP, username, password, *a, **kwargs)
self._patcher2 = mock.patch('smtplib.SMTP.login',
unbound_on_login)
self._patcher2.start()
def unbound_on_init(SMTP, server, *a, **kwargs):
return self._on_init(SMTP, server, *a, **kwargs)
self._patcher3 = mock.patch('smtplib.SMTP.__init__',
unbound_on_init)
self._patcher3.start()
def unbound_on_debuglevel(SMTP, level, *a, **kwargs):
return self._on_debuglevel(SMTP, level, *a, **kwargs)
self._patcher4 = mock.patch('smtplib.SMTP.debuglevel',
unbound_on_debuglevel)
self._patcher4.start()
def unbound_on_quit(SMTP, *a, **kwargs):
return self._on_quit(SMTP, *a, **kwargs)
def unbound_on_starttls(SMTP, *a, **kwargs):
return self._on_starttls(SMTP, *a, **kwargs)
self._patcher5 = mock.patch('smtplib.SMTP.quit',
unbound_on_quit)
self._patcher5.start()
def unbound_on_empty(SMTP, *a, **kwargs):
return None
self._patcher6 = mock.patch('smtplib.SMTP.ehlo',
unbound_on_empty)
self._patcher6.start()
self._patcher7 = mock.patch('smtplib.SMTP.close',
unbound_on_empty)
self._patcher7.start()
self._patcher8 = mock.patch('smtplib.SMTP.starttls',
unbound_on_starttls)
self._patcher8.start()
def stop(self):
self._patcher.stop()
self._patcher2.stop()
self._patcher3.stop()
self._patcher4.stop()
self._patcher5.stop()
self._patcher6.stop()
self._patcher7.stop()
self._patcher8.stop()
# expose default mock namespace
mock = _default_mock = SmtpMock()
__all__ = []
for __attr in (a for a in dir(_default_mock) if not a.startswith('_')):
__all__.append(__attr)
globals()[__attr] = getattr(_default_mock, __attr)
|
sajuptpm/neutron-ipam | neutron/tests/unit/vmware/test_nsx_utils.py | Python | apache-2.0 | 15,554 | 0 | # Copyright (c) 2013 VMware.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from neutron.db import api as db_api
from neutron.openstack.common import uuidutils
from neutron.plugins.vmware.api_client import exception as api_exc
from neutron.plugins.vmware.common import exceptions as nsx_exc
from neutron.plugins.vmware.common import nsx_utils
from neutron.plugins.vmware.common import utils
from neutron.plugins.vmware import nsxlib
from neutron.tests import base
from neutron.tests.unit.vmware import nsx_method
from neutron.tests.unit.vmware.nsxlib import base as nsx_base
class NsxUtilsTestCase(base.BaseTestCase):
def _mock_port_mapping_db_calls(self, ret_value):
# Mock relevant db calls
# This will allow for avoiding setting up the plugin
# for creating db entries
mock.patch(nsx_method('get_nsx_switch_and_port_id',
module_name='dbexts.db'),
return_value=ret_value).start()
mock.patch(nsx_method('add_neutron_nsx_port_mapping',
module_name | ='dbexts.db')).start()
mock.patch(nsx_method('delete_neutron_nsx_port_mapping',
module_name='dbexts.db')).start()
def _mock_network_mapping_db_calls(self, ret_value):
# Mock relevant db calls
# This will allow for avoiding settin | g up the plugin
# for creating db entries
mock.patch(nsx_method('get_nsx_switch_ids',
module_name='dbexts.db'),
return_value=ret_value).start()
mock.patch(nsx_method('add_neutron_nsx_network_mapping',
module_name='dbexts.db')).start()
def _mock_router_mapping_db_calls(self, ret_value):
# Mock relevant db calls
# This will allow for avoiding setting up the plugin
# for creating db entries
mock.patch(nsx_method('get_nsx_router_id',
module_name='dbexts.db'),
return_value=ret_value).start()
mock.patch(nsx_method('add_neutron_nsx_router_mapping',
module_name='dbexts.db')).start()
def _verify_get_nsx_switch_and_port_id(self, exp_ls_uuid, exp_lp_uuid):
# The nsxlib and db calls are mocked, therefore the cluster
# and the neutron_port_id parameters can be set to None
ls_uuid, lp_uuid = nsx_utils.get_nsx_switch_and_port_id(
db_api.get_session(), None, None)
self.assertEqual(exp_ls_uuid, ls_uuid)
self.assertEqual(exp_lp_uuid, lp_uuid)
def _verify_get_nsx_switch_ids(self, exp_ls_uuids):
# The nsxlib and db calls are mocked, therefore the cluster
# and the neutron_router_id parameters can be set to None
ls_uuids = nsx_utils.get_nsx_switch_ids(
db_api.get_session(), None, None)
for ls_uuid in ls_uuids or []:
self.assertIn(ls_uuid, exp_ls_uuids)
exp_ls_uuids.remove(ls_uuid)
self.assertFalse(exp_ls_uuids)
def _verify_get_nsx_router_id(self, exp_lr_uuid):
# The nsxlib and db calls are mocked, therefore the cluster
# and the neutron_router_id parameters can be set to None
lr_uuid = nsx_utils.get_nsx_router_id(db_api.get_session(), None, None)
self.assertEqual(exp_lr_uuid, lr_uuid)
def test_get_nsx_switch_and_port_id_from_db_mappings(self):
# This test is representative of the 'standard' case in which both the
# switch and the port mappings were stored in the neutron db
exp_ls_uuid = uuidutils.generate_uuid()
exp_lp_uuid = uuidutils.generate_uuid()
ret_value = exp_ls_uuid, exp_lp_uuid
self._mock_port_mapping_db_calls(ret_value)
self._verify_get_nsx_switch_and_port_id(exp_ls_uuid, exp_lp_uuid)
def test_get_nsx_switch_and_port_id_only_port_db_mapping(self):
# This test is representative of the case in which a port with a nsx
# db mapping in the havana db was upgraded to icehouse
exp_ls_uuid = uuidutils.generate_uuid()
exp_lp_uuid = uuidutils.generate_uuid()
ret_value = None, exp_lp_uuid
self._mock_port_mapping_db_calls(ret_value)
with mock.patch(nsx_method('query_lswitch_lports',
module_name='nsxlib.switch'),
return_value=[{'uuid': exp_lp_uuid,
'_relations': {
'LogicalSwitchConfig': {
'uuid': exp_ls_uuid}
}}]):
self._verify_get_nsx_switch_and_port_id(exp_ls_uuid, exp_lp_uuid)
def test_get_nsx_switch_and_port_id_no_db_mapping(self):
# This test is representative of the case where db mappings where not
# found for a given port identifier
exp_ls_uuid = uuidutils.generate_uuid()
exp_lp_uuid = uuidutils.generate_uuid()
ret_value = None, None
self._mock_port_mapping_db_calls(ret_value)
with mock.patch(nsx_method('query_lswitch_lports',
module_name='nsxlib.switch'),
return_value=[{'uuid': exp_lp_uuid,
'_relations': {
'LogicalSwitchConfig': {
'uuid': exp_ls_uuid}
}}]):
self._verify_get_nsx_switch_and_port_id(exp_ls_uuid, exp_lp_uuid)
def test_get_nsx_switch_and_port_id_no_mappings_returns_none(self):
# This test verifies that the function return (None, None) if the
# mappings are not found both in the db and the backend
ret_value = None, None
self._mock_port_mapping_db_calls(ret_value)
with mock.patch(nsx_method('query_lswitch_lports',
module_name='nsxlib.switch'),
return_value=[]):
self._verify_get_nsx_switch_and_port_id(None, None)
def test_get_nsx_switch_ids_from_db_mappings(self):
# This test is representative of the 'standard' case in which the
# lswitch mappings were stored in the neutron db
exp_ls_uuids = [uuidutils.generate_uuid()]
self._mock_network_mapping_db_calls(exp_ls_uuids)
self._verify_get_nsx_switch_ids(exp_ls_uuids)
def test_get_nsx_switch_ids_no_db_mapping(self):
# This test is representative of the case where db mappings where not
# found for a given network identifier
exp_ls_uuids = [uuidutils.generate_uuid()]
self._mock_network_mapping_db_calls(None)
with mock.patch(nsx_method('get_lswitches',
module_name='nsxlib.switch'),
return_value=[{'uuid': uuid}
for uuid in exp_ls_uuids]):
self._verify_get_nsx_switch_ids(exp_ls_uuids)
def test_get_nsx_switch_ids_no_mapping_returns_None(self):
# This test verifies that the function returns None if the mappings
# are not found both in the db and in the backend
self._mock_network_mapping_db_calls(None)
with mock.patch(nsx_method('get_lswitches',
module_name='nsxlib.switch'),
return_value=[]):
self._verify_get_nsx_switch_ids(None)
def test_get_nsx_router_id_from_db_mappings(self):
# This test is representative of the 'standard' case in which the
# router mapping was stored in th |
spreeker/democracygame | external_apps/docutils-snapshot/test/functional/tests/standalone_rst_s5_html_1.py | Python | bsd-3-clause | 2,406 | 0.001247 | exec(open('functional/tests/_standalone_rst_defaults.py').read())
# Source and destination file names:
test_source = 'standalone_rst_s5_html.txt'
test_destination = 'standalone_rst_s5_html_1.html'
# Keyword parameters passed to publish_file:
writer_name = 's5_html'
# Settings:
settings_overrides['theme'] = 'small-black'
# Extra functional tests.
# Prefix all names with '_' to avoid confusing `docutils.core.publish_file`.
import filecmp as _filecmp
def _test_more(expected_dir, output_dir, test_case, parameters):
"""Compare ``ui/<theme>`` directories."""
theme = settings_overrides.get('theme', 'default')
expected = '%s/%s/%s' % (expected_dir, 'ui', theme)
output = '%s/%s/%s' % (output_dir, 'ui', theme)
differences, uniques = _compare_directories(expected, output)
parts = []
if differences:
parts.append('The following files differ from the expected output:')
parts.extend(differences)
expected = [path.replace('functi | onal/output/', 'functional/expected/')
for path in differences]
parts.append('Please compare the expected and actual output files:' | )
parts.extend([' diff %s %s' % tup
for tup in zip(expected, differences)])
parts.append('If the actual output is correct, please replace the '
'expected output files:')
parts.extend([' mv %s %s' % tup
for tup in zip(differences, expected)])
parts.append('and check them in to Subversion:')
parts.extend([' svn commit -m "<comment>" %s' % path
for path in expected])
if uniques:
parts.append('The following paths are unique:')
parts.extend(uniques)
test_case.assert_(not parts, '\n'.join(parts))
def _compare_directories(expected, output):
dircmp = _filecmp.dircmp(expected, output, ['.svn', 'CVS'])
differences = ['%s/%s' % (output, name) for name in dircmp.diff_files]
uniques = (['%s/%s' % (expected, name) for name in dircmp.left_only]
+ ['%s/%s' % (output, name) for name in dircmp.right_only])
for subdir in dircmp.common_dirs:
diffs, uniqs = _compare_directories('%s/%s' % (expected, subdir),
'%s/%s' % (output, subdir))
differences.extend(diffs)
uniques.extend(uniqs)
return differences, uniques
|
itucsdb1517/itucsdb1517 | classes/operations/position_operations.py | Python | gpl-3.0 | 3,765 | 0.00425 | import psycopg2 as dbapi2
from classes.position import Position
from classes.model_config import dsn, connection
class position_operations:
def __init__(self):
self.last_key=None
def get_positions(self):
positions=[]
global connection
try:
connection = dbapi2.connect(dsn)
cursor = connection.cursor()
statement = """SELECT objectid, name FROM position WHERE position.deleted = 0 ORDER BY objectid"""
cursor.execute(statement)
positions = [(key, Position(key,name,0)) for key, name in cursor]
cursor.close()
except dbapi2.DatabaseError:
if connection:
connection.rollback()
finally:
if connection:
connection.close()
return positions
def add_position(self,Position):
global connection
try:
connection = dbapi2.connect(dsn)
cursor = connection.cursor()
cursor.execute("""INSERT INTO position (name) VALUES (%s)""",(Position.name,))
cursor.close()
connection.commit()
result = 'success'
except dbapi2.IntegrityError:
result = 'integrityerror'
if connection:
connection.rollback()
except dbapi2.DatabaseError:
result = 'databaseerror'
if connection:
connection.rollback()
finally:
if connection:
connection.close()
return result
def get_position(self, key):
global connection
try:
connection = dbapi2.connect(dsn)
cursor = connection.cursor()
statement = """SELECT objectid, name FROM position where (objectid=%s and deleted=0)"""
cursor.execute(statement, (key,))
id,name=cursor.fetchone()
cursor.close()
except dbapi2.DatabaseError:
if connection:
connection.rollback()
finally:
if connection:
connection.close()
return Position(id, name, 0)
def update_position(self, key, name):
global connection
try:
connection = dbapi2.connect(dsn)
cursor = connection.cursor()
statement = """update position set (name) = (%s) where (objectid=(%s))"""
cursor.execute(statement, (name, key,))
connection.commit()
| cursor.close()
result = 'success'
except dbapi2.IntegrityError:
result = 'integrityerror'
if connection:
connection.rollback()
e | xcept dbapi2.DatabaseError:
result = 'databaseerror'
if connection:
connection.rollback()
finally:
if connection:
connection.close()
return result
def delete_position(self,key):
global connection
try:
connection = dbapi2.connect(dsn)
cursor = connection.cursor()
# statement = """update position set deleted = 1 where (objectid=(%s))"""
statement = """delete from position where (objectid=(%s))"""
cursor.execute(statement, (key,))
connection.commit()
cursor.close()
result = 'success'
except dbapi2.IntegrityError:
result = 'integrityerror'
if connection:
connection.rollback()
except dbapi2.DatabaseError:
result = 'databaseerror'
if connection:
connection.rollback()
finally:
if connection:
connection.close()
return result |
burgerdev/hostload | visualization/xor2.py | Python | mit | 1,409 | 0.00071 | # -*- coding: utf-8 -*-
"""
Created on Thu Apr 21 22:48:37 2016
@author: burger
"""
import numpy as np
from matplotlib import pyplot as plt
def sigma(x, a=1, b=0):
return 1/(1+np.exp(-(a*x+b)))
x = np.asarray([[0.0, .1], [0, 1], [.9, .05], [.9, .95]])
markers = 'v<>^'
a = .5*np.ones((2,))
proj = np.dot(x, a)
def trafo(x, y):
return sigma(x, 2, -2), sigma(y, 5, 0)
proj_line = np.arange(-50, 50, .02)
proj_transformed_x, proj_transformed_y = trafo(proj_line, proj_line)
proj_x, proj_y = trafo(proj, proj)
a = (x[0] + x[3])/2
b = (x[1] + x[2])/2
c = (a + b)/2
m = (proj_y[3] - proj_y[0])/(proj_x[3] - proj_x[0])
X = np.mean(proj_x) + proj_line
Y = np.mean(proj_y) + m*proj_line
plt.figure()
plt.hold(True)
ms = 10
for i in range(len(x)):
plt.plot(x[i, 0], x[i, 1], 'g'+markers[i], MarkerSize=ms)
plt.plot(proj[i], proj[i], 'b'+markers[i], MarkerSize=ms)
plt.plot(proj_x[i], proj_y[i], 'r'+markers[i], MarkerSize=ms)
dots = 3
p | lt.plot(proj_line, proj_line, 'k.', MarkerSize=dots)
plt.plot(proj_transformed_x, proj_transformed_y, 'r.', MarkerSize=dots)
plt.plot(X, Y, 'k')
for x in proj_line[::4]:
a, b = trafo(proj_line, x*np.ones_like(proj_line))
plt.plot(a, b, 'k')
a, b = trafo(x*np.ones_like(proj_line), proj_line)
plt.plot(a, b, 'k')
#plot(proj_line, y*np.ones_like(proj_line | ), 'k')
plt.xlim([-.05, 1.05])
plt.ylim([-.05, 1.05])
plt.show()
|
Tesora-Release/tesora-trove | trove/guestagent/strategies/restore/db2_impl.py | Python | apache-2.0 | 2,104 | 0 | # Copyright 2016 IBM Corp
# All Rights Reserved.
#
# Licensed under | the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific | language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from trove.common import cfg
from trove.common.i18n import _
from trove.common import utils
from trove.guestagent.common import operating_system
from trove.guestagent.datastore.db2 import service
from trove.guestagent.datastore.db2 import system
from trove.guestagent.strategies.restore import base
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
DB2_DBPATH = CONF.db2.mount_point
DB2_BACKUP_DIR = DB2_DBPATH + "/backup"
class DB2Backup(base.RestoreRunner):
"""Implementation of Restore Strategy for DB2."""
__strategy_name__ = 'db2backup'
base_restore_cmd = 'sudo tar xPf -'
def __init__(self, *args, **kwargs):
super(DB2Backup, self).__init__(*args, **kwargs)
self.appStatus = service.DB2AppStatus()
self.app = service.DB2App(self.appStatus)
self.admin = service.DB2Admin()
self.restore_location = DB2_BACKUP_DIR
def post_restore(self):
"""
Restore from the directory that we untarred into
"""
out, err = utils.execute_with_timeout(system.GET_DB_NAMES,
shell=True)
dbNames = out.split()
for dbName in dbNames:
service.run_command(system.RESTORE_DB % {'dbname': dbName,
'dir': DB2_BACKUP_DIR})
LOG.info(_("Cleaning out restore location post: %s."), DB2_BACKUP_DIR)
operating_system.remove(DB2_BACKUP_DIR, force=True, as_root=True)
|
jmeagher/playground | kubernetes/locust/tasks/tasks.py | Python | apache-2.0 | 5,021 | 0.009361 | import datetime
import glob
import os
import random
import requests
import sys
import time
import traceback
from locust import HttpLocust, TaskSet, task
timeout = (0.5, 1.0)
corpus_glob = os.environ["CORPUS_GLOB"]
all_files = glob.glob(corpus_glob)
test_index = os.environ.get("TEST_INDEX", "load_test")
simple_queries = [
'romeo', 'thou', 'laertes', 'to be',
'random:10', 'my_id:100',
'random:50 AND data_type:medium_cardinality_insert'
]
monitoring_es = os.environ.get("MONITORING_ES_URL", None)
print "Monitoring_es = '%s'" % monitoring_es
def monitored(func):
def wrapper(*arg, **kw):
timestamp = datetime.datetime.utcnow().isoformat()
start = time.time()
success = False
reason = ""
try:
success, reason = func(*arg, **kw)
except:
success = False
reason = "Something unknown in the wrapper: " + str(traceback.format_exc())
end = time.time()
if monitoring_es:
requests.request('POST', monitoring_es, timeout=(10,30), json={
"timestamp": timestamp,
"results_timestamp": datetime.datetime.utcnow().isoformat(),
"response_time": (end-start),
"test": func.__name__,
"success": success,
"reason": reason,
})
return success
return wrapper
class MixedElasticSearchLoad(TaskSet):
def on_start(self):
corpus_file = random.choice(all_files)
raw = open(corpus_file).readlines()
# Cleanup the ouptut
self.corpus = [ l.strip() for l in raw if len(l) > 2]
@task(100)
@monitored
def low_cardinality_insert(self):
return self._do_insert(random.randint(0,100), "low_cardinality_insert")
@task(100)
@monitored
def medium_cardinality_insert(self):
return self._do_insert(random.randint(0,10000), "medium_cardinality_insert")
@task(1000)
@monitored
def high_cardinality_insert(self):
return self._do_insert(random.randint(0,10000000), "high_cardinality_insert")
@task(100)
@monitored
def simple_search(self):
return self._do_search("simple_search", simple_search=random.choice(simple_queries))
def _do_search(self, name, | simple_search=None, post_search=None):
if simple_search:
url = "/%s/_search?q=%s" % (test_index, simple_search)
else:
url = "/%s/_search" % test_index
with self.client.get(url, name=name,
json=post_search, timeout=timeout, catch_response=True) as response:
return self._do_validation(respons | e)
def _do_insert(self, id, name):
line = random.choice(self.corpus)
data = {
"timestamp": datetime.datetime.utcnow().isoformat(),
"line": line,
"my_id": id,
"random": random.randint(0,1000),
"data_type": name,
}
with self.client.put(
"/%s/test/%s-%s" % (test_index, name, id),
json=data, name=name, timeout=timeout, catch_response=True) as response:
return self._do_validation(response, expected_response_codes=[200,201])
def _do_validation(self, response, expected_response_codes=[200]):
successful_request = False
failure_reason = ""
try:
response.raise_for_status()
try:
out = response.json()
except (ValueError) as e:
failure_reason = "Didn't get json as a response, status: %s text: %s" % (response.status_code, response.text)
response.failure(failure_reason)
if not response.status_code in expected_response_codes:
failure_reason = ("Expected response code %s but got %s instead" %
(expected_response_codes, response.status_code) )
response.failure(failure_reason)
elif not "_shards" in out:
failure_reason = ("No _shards information is available")
response.failure(failure_reason)
else:
shards = out["_shards"]
successful = shards.get("successful", -1)
failed = shards.get("failed", 999)
if successful <= 0:
failure_reason = ("Expected successful shards, but got %s instead" % str(successful) )
response.failure(failure_reason)
elif failed > 0:
failure_reason = ("Expected no failed shards, but got %s instead" % str(failed) )
response.failure(failure_reason)
else:
successful_request = True
response.success()
except:
failure_reason = ("Unknown exception: " + str(traceback.format_exc()))
response.failure(failure_reason)
return (successful_request, failure_reason)
class ElasticSearchUser(HttpLocust):
task_set = MixedElasticSearchLoad
min_wait = 900
max_wait = 1000
|
LABSN/expyfun | expyfun/io/_wav.py | Python | bsd-3-clause | 3,494 | 0 | # -*- coding: utf-8 -*-
"""WAV file IO functions
"""
import numpy as np
from scipy.io import wavfile
from os import path as op
import warnings
from .._utils import verbose_dec, logger, _has_scipy_version
@verbose_dec
def read_wav(fname, verbose=None):
"""Read in a WAV file
Parameters
----------
fname : str
Filename to load.
verbose : bool, str, int, or None
If not None, override default verbose level.
Returns
-------
data : array
The WAV file data. Will be of datatype np.float64. If the data
had been saved as integers (typical), this function will
automatically rescale the data to be between -1 and +1.
The result will have dimension n_channels x n_samples.
fs : int
The wav sample rate
"""
fs, data = wavfile.read(fname)
data = np.atleast_2d(data.T)
orig_dtype = data.dtype
max_val = _get_dtype_norm(orig_dtype)
data = np.ascontiguousarray(dat | a.astype(np.float64) / max_val)
_print_wav_info('Read', data, orig_dtype)
return data, fs
@verbose_dec
def | write_wav(fname, data, fs, dtype=np.int16, overwrite=False, verbose=None):
"""Write a WAV file
Parameters
----------
fname : str
Filename to save as.
data : array
The data to save.
fs : int
The sample rate of the data.
dtype : numpy dtype
The output format to use. np.int16 is standard for many wav files,
but np.float32 or np.float64 has higher dynamic range.
overwrite : bool
If True, overwrite the file if necessary.
verbose : bool, str, int, or None
If not None, override default verbose level.
"""
if not overwrite and op.isfile(fname):
raise IOError('File {} exists, overwrite=True must be '
'used'.format(op.basename(fname)))
if not np.dtype(type(fs)).kind == 'i':
fs = int(fs)
warnings.warn('Warning: sampling rate is being cast to integer and '
'may be truncated.')
data = np.atleast_2d(data)
if np.dtype(dtype).kind not in ['i', 'f']:
raise TypeError('dtype must be integer or float')
if np.dtype(dtype).kind == 'f':
if not _has_scipy_version('0.13'):
raise RuntimeError('cannot write float datatype unless '
'scipy >= 0.13 is installed')
elif np.dtype(dtype).itemsize == 8:
raise RuntimeError('Writing 64-bit integers is not supported')
if np.dtype(data.dtype).kind == 'f':
if np.dtype(dtype).kind == 'i' and np.max(np.abs(data)) > 1.:
raise ValueError('Data must be between -1 and +1 when saving '
'with an integer dtype')
_print_wav_info('Writing', data, dtype)
max_val = _get_dtype_norm(dtype)
data = (data * max_val).astype(dtype)
wavfile.write(fname, fs, data.T)
def _print_wav_info(pre, data, dtype):
"""Helper to print WAV info"""
logger.info('{0} WAV file with {1} channel{3} and {2} samples '
'(format {4})'.format(pre, data.shape[0], data.shape[1],
's' if data.shape[0] != 1 else '',
dtype))
def _get_dtype_norm(dtype):
"""Helper to get normalization factor for a given datatype"""
if np.dtype(dtype).kind == 'i':
info = np.iinfo(dtype)
maxval = min(-info.min, info.max)
else: # == 'f'
maxval = 1.0
return maxval
|
inveniosoftware/invenio-records-rest | invenio_records_rest/schemas/fields/marshmallow_contrib.py | Python | mit | 4,465 | 0 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2018-2019 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Contributed Marshmallow fields."""
from __future__ import absolute_import, print_function
import functools
from inspect import isfunction, ismethod
from marshmallow import fields, utils
# Recommended way of maintaining compatibility with python 2 for getargspec
try:
from inspect import getfullargspec as getargspec
except ImportError:
from inspect import getargspec
def _get_func_args(func):
"""Get a list of the arguments a function or method has."""
if isinstance(func, functools.partial):
return _get_func_args(func.func)
if isfunction(func) or ismethod(func):
return list(getargspec(func).args)
if callable(func):
return list(getargspec(func.__call__).args)
class Function(fields.Function):
"""Enhanced marshmallow Function field.
The main difference between the original marshmallow.fields.Function is for
the ``deserialize`` function, which can now also point to a three-argument
function, with the third argument being the original data that was passed
to ``Schema.load``. The following example better demonstrates how this
works:
.. code-block:: python
def serialize_foo(obj, context):
return {'serialize_args': {'obj': obj, 'context': context}}
def deserialize_foo(value, context, data):
return {'deserialize_args': {
'value': value, 'context': context, 'data': data}}
class FooSchema(marshmallow.Schema):
foo = Function(serialize_foo, deserialize_foo)
FooSchema().dump({'foo': 42}).data
{'foo': {
'serialize_args': {
'obj': {'foo': 42},
'context': {} # no context was passed
}
}}
FooSchema().load({'foo': 42}).data
{'foo': {
'deserialize_args': {
'value': 42,
'context': {}, # no context was passed
'data': {'foo': 42},
}
}}
"""
def _deserialize(self, value, attr, data, **kwargs):
if self.deserialize_func:
return self._call_or_raise(
self.deserialize_func, value, attr, data)
return value
def _call_or_raise(self, func, value, attr, data=None):
func_args_len = len(_get_func_args(func))
if func_args_len > 2:
| return func(value, self.parent.context, data)
elif func_args_len > 1:
return func(value, self.parent.context)
else:
return func(value)
class Method(fields.Method):
"""Enhanced marshmallow Method field.
The main difference between the original marshmallow.fields.Method is for
the ``deserialize`` method, which can now also point to a two-argum | ent
method, with the second argument being the original data that was passed to
``Schema.load``. The following example better demonstrates how this works:
.. code-block:: python
class BarSchema(marshmallow.Schema):
bar = Method('serialize_bar', 'deserialize_bar')
# Exactly the same behavior as in ``marshmallow.fields.Method``
def serialize_bar(self, obj):
return {'serialize_args': {'obj': obj}}
def deserialize_bar(self, value, data):
return {'deserialize_args': {'value': value, 'data': data}}
BarSchema().dump({'bar': 42}).data
{'bar': {
'serialize_args': {
'obj': {'bar': 42}
}
}}
BarSchema().load({'bar': 42}).data
{'bar': {
'deserialize_args': {
'data': {'bar': 42},
'value': 42
}
}}
"""
def _deserialize(self, value, attr, data, **kwargs):
if self.deserialize_method_name:
try:
method = utils.callable_or_raise(
getattr(self.parent, self.deserialize_method_name, None)
)
method_args_len = len(_get_func_args(method))
if method_args_len > 2:
return method(value, data)
return method(value)
except AttributeError:
pass
return value
|
theQRL/QRL | setup.py | Python | mit | 1,072 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This file was generated with PyScaffold 3.0.2.
PyScaffold helps you to put up the scaffold of your new Python project.
Learn more under: http://pyscaffold.org/
"""
import sys
from setuptools import setup
import versioneer
# Add here console scripts and other entry points in ini-style format
entry_points = """
[console_scripts]
start_qrl = qrl.main:main
qrl_start = qrl.main:main
qrl = qrl.cli:main
qrl_grpc_proxy = qrl.grpcProxy:main
qrl_measure = qrl. | measure:main
qrl | _walletd = qrl.daemon.walletd:main
qrl_generate_genesis = qrl.tools.generate_genesis:main
"""
def setup_package():
needs_sphinx = {'build_sphinx', 'upload_docs'}.intersection(sys.argv)
sphinx = ['sphinx'] if needs_sphinx else []
setup(setup_requires=['pyscaffold>=3.0a0,<3.1a0'] + sphinx,
entry_points=entry_points,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
use_pyscaffold=True)
if __name__ == "__main__":
setup_package()
|
rabipanda/tensorflow | tensorflow/contrib/kfac/python/ops/layer_collection_lib.py | Python | apache-2.0 | 1,738 | 0.000575 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
# |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WAR | RANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Registry for layers and their parameters/variables.
This represents the collection of all layers in the approximate Fisher
information matrix to which a particular FisherBlock may belong. That is, we
might have several layer collections for one TF graph (if we have multiple K-FAC
optimizers being used, for example.)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,line-too-long,wildcard-import
from tensorflow.contrib.kfac.python.ops.layer_collection import *
from tensorflow.python.util.all_util import remove_undocumented
# pylint: enable=unused-import,line-too-long,wildcard-import
_allowed_symbols = [
"LayerParametersDict",
"LayerCollection",
"APPROX_KRONECKER_NAME",
"APPROX_DIAGONAL_NAME",
"APPROX_FULL_NAME",
"VARIABLE_SCOPE",
"APPROX_KRONECKER_INDEP_NAME",
"APPROX_KRONECKER_SERIES_1_NAME",
"APPROX_KRONECKER_SERIES_2_NAME"
]
remove_undocumented(__name__, allowed_exception_list=_allowed_symbols)
|
jawilson/home-assistant | homeassistant/components/nest/events.py | Python | apache-2.0 | 2,037 | 0 | """Library from Pub/sub messages, events and device triggers."""
from google_nest_sdm.camera_traits import (
CameraMotionTrait,
CameraPersonTrait,
CameraSoundTrait,
)
from google_nest_sdm.doorbell_traits import DoorbellChimeTrait
from google_nest_sdm.event import (
CameraMotionEvent,
CameraPersonEvent,
CameraSoundEvent,
DoorbellChimeEvent,
)
NEST_EVENT = "nest_event"
# The nest_event namespace will fire events that are triggered from messages
# received via the Pub/Sub subscriber.
#
# An example event payload:
#
# {
# "event_type": "nest_event"
# "data": {
# "device_id": "my-device-id",
# "type": "camera_motion",
# "timestamp": "2021-10-24T19:42:43.304000+00:00",
# "nest_event_id": "KcO1HIR9sPKQ2bqby_vTcCcEov..."
# },
# ...
# }
#
# The nest_event_id corresponds to the event id in the SDM API used to retrieve
# snapshots.
#
# The following event types are fired:
EVENT_DOORBELL_CHIME = "doorbell_chime"
EVENT_CAMERA_MOTION = "camera_motion"
EVENT_CAMERA_PERSON = "camera_person"
EVENT_CAM | ERA_SOUND = "camera_sound"
# Mapping of supported device traits to home assistant event types. Devices
# that support these traits will generate Pub/Sub event messages in
# the EVENT_NAME_ | MAP
DEVICE_TRAIT_TRIGGER_MAP = {
DoorbellChimeTrait.NAME: EVENT_DOORBELL_CHIME,
CameraMotionTrait.NAME: EVENT_CAMERA_MOTION,
CameraPersonTrait.NAME: EVENT_CAMERA_PERSON,
CameraSoundTrait.NAME: EVENT_CAMERA_SOUND,
}
# Mapping of incoming SDM Pub/Sub event message types to the home assistant
# event type to fire.
EVENT_NAME_MAP = {
DoorbellChimeEvent.NAME: EVENT_DOORBELL_CHIME,
CameraMotionEvent.NAME: EVENT_CAMERA_MOTION,
CameraPersonEvent.NAME: EVENT_CAMERA_PERSON,
CameraSoundEvent.NAME: EVENT_CAMERA_SOUND,
}
# Names for event types shown in the media source
MEDIA_SOURCE_EVENT_TITLE_MAP = {
DoorbellChimeEvent.NAME: "Doorbell",
CameraMotionEvent.NAME: "Motion",
CameraPersonEvent.NAME: "Person",
CameraSoundEvent.NAME: "Sound",
}
|
JKrehl/Electrons | Electrons/Tomography/Kernels/__init__.py | Python | isc | 157 | 0 | from . | Kernel import Kernel
from .RayKernel | import RayKernel
from .FresnelKernel import FresnelKernel
__all__ = [s for s in dir() if not s.startswith('_')]
|
Arelle/Arelle | arelle/ViewWinXml.py | Python | apache-2.0 | 1,337 | 0.004488 | '''
Created on Feb 6, 2011
@author: Mark V Systems Limited
(c) Copyright 2011 Mark V Systems Limited, All rights reserved.
'''
from tkinter import *
try:
from tkinter.ttk import *
except ImportError:
from ttk import *
from arelle.CntlrWinTooltip import ToolTip
import io
from arelle import (XmlUtil, ViewWinList)
def viewXml(modelXbrl, tabWin, tabTitle, xmlDoc):
modelXbrl.modelManager.showStatus(_("viewing xml"))
view = ViewXml(modelXbrl, tabWin, tabTitle)
view.view(xmlDoc)
menu = view.co | ntextMenu()
view.menuAddSaveClipboard()
menu.add_command(label=_("Validate"), underline=0, command=view.validate)
class V | iewXml(ViewWinList.ViewList):
def __init__(self, modelXbrl, tabWin, tabTitle):
super(ViewXml, self).__init__(modelXbrl, tabWin, tabTitle, True)
def view(self, xmlDoc):
fh = io.StringIO()
XmlUtil.writexml(fh, xmlDoc, encoding="utf-8")
for line in fh.getvalue().split("\n"):
self.listBox.insert(END, line)
fh.close()
def validate(self):
try:
from arelle import Validate
import traceback
Validate.validate(self.modelXbrl)
except Exception as err:
self.modelXbrl.exception("exception", _("Validation exception: \s%(error)s"), error=err, exc_info=True)
|
all-of-us/raw-data-repository | tests/dao_tests/test_ghost_check_dao.py | Python | bsd-3-clause | 1,959 | 0.003063 | from datetime import datetime, timedelta
from rdr_service.dao.ghost_check_dao import GhostCheckDao
from tests.helpers.unittest_base import BaseTestCase
class GhostCheckDaoTest(BaseTestCase):
def test_loads_only_vibrent(self):
"""We might accidentally start flagging CE participants as ghosts if they're returned"""
vibrent_participant = self.data_generator.create_database_particip | ant(participantOrigin='vibrent')
self.data_generator.create_database_participant(participantOrigin='careevolution')
self.data_generator.create_database_participant(participantOrigin='anotherplatform')
participants = GhostCheckDao.get_participants_needing_checked(
session=self.data_generator.session,
earliest_signu | p_time=datetime.now() - timedelta(weeks=1)
)
self.assertEqual(1, len(participants), 'Should only be the Vibrent participant')
self.assertEqual(vibrent_participant.participantId, participants[0].participantId)
def test_ghost_flag_returned(self):
"""Ensure we get back the ghost data field"""
ghost_participant = self.data_generator.create_database_participant(
participantOrigin='vibrent',
isGhostId=True
)
self.data_generator.create_database_participant(
participantOrigin='vibrent',
isGhostId=None
)
self.data_generator.create_database_participant(
participantOrigin='vibrent',
isGhostId=False
)
results = GhostCheckDao.get_participants_needing_checked(
session=self.data_generator.session,
earliest_signup_time=datetime.now() - timedelta(weeks=1)
)
for participant in results:
if participant.participantId == ghost_participant.participantId:
self.assertTrue(participant.isGhostId)
else:
self.assertFalse(participant.isGhostId)
|
dennisguse/VoIP.py | Defines/SIGNALS.py | Python | gpl-3.0 | 1,227 | 0.008965 | from PyQt4.QtCore import *
import logging
#Call Signals
CALL_CONNECT = 'connectCall'
CALL_HANGUP = 'hangupCurrentCall'
CALL_NUMBER = 'callNumber'
CALL_INCOMING = 'inCallSig'
CALL_INCOMING_CANCELED = 'inCallCanceled'
CALL_OUTGOING_CANCELED = 'outCallCanceled'
CALL_SHOW_VIDEO = 'callShowVideo'
CALL_SIGNAL_LEVEL_CHANGE = 'callSignalLevelChange'
CALL_SIGNAL_LEVEL_REQUEST = 'callSignalLevelRequest'
CALL_ESTABLISHED = 'callEstablished'
CALL_RETRY_VIDEO = 'retryVideo'
#Presence Signals
BUDDY_STATE_CHANGED = 'buddyStateChanged'
OWN_ONLINE_STATE_CHANGED = 'ownOnlineStateChanged'
#Modules Signals
MODULE_LOAD = 'module_load'
MODULE_ACTIVATE = 'module_activate'
MODULE_DISMISS = 'module_dismiss'
#Register State Signals
REGISTER_REQUEST_INITIAL_STATE = 'regInitialState'
REGISTER_STATE_CHANGE = 'regStateChanged'
REGISTER = 'register'
#Misc
CLOSE = 'close'
STARTUP_ERROR = "startup_error"
def emit(sender, signalName, param1 = None, param2 | = None):
logging.getLogger("SIGNALS").info(sender.__class__.__name__ + " " + signalName)
| if param2:
sender.emit(SIGNAL(signalName), param1, param2)
elif param1:
sender.emit(SIGNAL(signalName), param1)
else:
sender.emit(SIGNAL(signalName))
|
chosak/fdic-call-reports | bin/download_all_reports.py | Python | mit | 3,207 | 0.000624 | import logging, os, time
from contextlib import contextmanager
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.support.ui import Select
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
SELENIUM_SERVER = os.environ.get(
'SELENIUM_SERVER',
'http://localhost:4444/wd/hub'
)
SELENIUM_BROWSER = os.environ.get(
'SELENIUM_BROWSER',
'CHROME'
)
def run():
'''
Uses a web browser to download all available FDIC call report data from the
URL defined as DOWNLOADS_PAGE above.
Uses Selenium which must be running using, e.g.
java -jar selenium-server-standalone-2.43.1.jar
By default the script tries to connect to a server running locally on
port 4444, but this may be overridden through use of the SELENIUM_SERVER
environment variable.
The standalone Selenium server tries to use the default system browser.
To use a different browser like Chrome, add this command-line option to
the java call:
-Dwebdriver.chrome.driver=/path/to/chromedriver
Also set the SELENIUM_BROWSER environment variable to CHROME.
This program triggers downloads in tab-delimited format which get
saved to the default browser download location.
'''
with selenium_driver() as driver:
count = 0
while True:
logger.info('navigating to data download page')
driver.get('https://cdr.ffiec.gov/public/PWS/DownloadBulkData.aspx')
logger.info('setting download type to single period')
dl_type = Select(driver.find_element_by_id('ListBox1'))
dl_type.select_by_value('ReportingSeriesSinglePeriod')
time.sleep(3)
logger.info('finding available reporting periods')
periods = Select(driver.find_element_by_id('DatesDropDownList'))
if not count:
logger.info('{} available reporting periods: {}'.format(
len(periods.options),
', '.join([period.text for period in periods.options])
))
if count == len(periods.options):
break
period = periods.options[count]
logger.info('downloading data for period {}'.format(period.text))
perio | ds.select_by_index(count)
time.sleep(3)
submit_button = driver.find_element_by_id | ('Download_0')
submit_button.click()
time.sleep(3)
count += 1
logger.info('waiting for last download to finish')
time.sleep(30)
@contextmanager
def selenium_driver():
logger.info('connecting to local Selenium server at {}'.format(
SELENIUM_SERVER
))
capabilities = getattr(DesiredCapabilities, SELENIUM_BROWSER)
driver = webdriver.Remote(
SELENIUM_SERVER,
desired_capabilities=capabilities
)
try:
driver.implicitly_wait(10)
driver.set_page_load_timeout(10)
yield driver
finally:
logger.info('disconnecting from local Selenium server')
driver.quit()
if __name__ == '__main__':
run()
|
djkonro/client-python | kubernetes/client/models/v1beta1_network_policy_ingress_rule.py | Python | apache-2.0 | 5,351 | 0.001121 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1NetworkPolicyIngressRule(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, _from=None, ports=None):
"""
V1beta1NetworkPolicyIngressRule - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'_from': 'list[V1beta1NetworkPolicyPeer]',
'ports': 'list[V1beta1NetworkPolicyPort]'
}
self.attribute_map = {
'_from': 'from',
'ports': 'ports'
}
self.__from = _from
self._ports = ports
@property
def _from(self):
"""
Gets the _from of this V1beta1NetworkPolicyIngressRule.
List of sources which should be able to access the pods selected for this rule. Items in this list are combined using a logical OR operation. If this field is empty or missing, this rule matches all sources (traffic not restricted by source). If this fie | ld is present and contains at least on item, this rule allows traffic only if the traffic matches at least one item in the from list.
:return: The _from of this V1beta1NetworkPolicyIngressRule.
| :rtype: list[V1beta1NetworkPolicyPeer]
"""
return self.__from
@_from.setter
def _from(self, _from):
"""
Sets the _from of this V1beta1NetworkPolicyIngressRule.
List of sources which should be able to access the pods selected for this rule. Items in this list are combined using a logical OR operation. If this field is empty or missing, this rule matches all sources (traffic not restricted by source). If this field is present and contains at least on item, this rule allows traffic only if the traffic matches at least one item in the from list.
:param _from: The _from of this V1beta1NetworkPolicyIngressRule.
:type: list[V1beta1NetworkPolicyPeer]
"""
self.__from = _from
@property
def ports(self):
"""
Gets the ports of this V1beta1NetworkPolicyIngressRule.
List of ports which should be made accessible on the pods selected for this rule. Each item in this list is combined using a logical OR. If this field is empty or missing, this rule matches all ports (traffic not restricted by port). If this field is present and contains at least one item, then this rule allows traffic only if the traffic matches at least one port in the list.
:return: The ports of this V1beta1NetworkPolicyIngressRule.
:rtype: list[V1beta1NetworkPolicyPort]
"""
return self._ports
@ports.setter
def ports(self, ports):
"""
Sets the ports of this V1beta1NetworkPolicyIngressRule.
List of ports which should be made accessible on the pods selected for this rule. Each item in this list is combined using a logical OR. If this field is empty or missing, this rule matches all ports (traffic not restricted by port). If this field is present and contains at least one item, then this rule allows traffic only if the traffic matches at least one port in the list.
:param ports: The ports of this V1beta1NetworkPolicyIngressRule.
:type: list[V1beta1NetworkPolicyPort]
"""
self._ports = ports
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1NetworkPolicyIngressRule):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
prheenan/BioModel | EnergyLandscapes/Lifetime_Dudko2008/Python/TestExamples/Examples/Example_Dudko_Fit.py | Python | gpl-2.0 | 889 | 0.008999 | # force floating point division. Can still use integer with //
from __future__ import division
# This file is used for importing the common utilities classes.
import numpy as np
import matplotlib.pyplot as plt
import sys
sys.path.append("../../../../../")
from EnergyLand | scapes.Lifetime_Dudko2008.Python.TestExamples.Util import \
Example_Data
def PlotFit(data,BaseName):
fig = Example_Data.PlotHistograms(data)
fig.savefig(BaseName + "_Histogram.png")
fig = Example_Data.PlotLifetimesAndFit(data)
fig.savefig(BaseName + "_Lifetimes.png")
def run():
"""
"""
# figure 1 from dudko 2008
data = Exa | mple_Data.Dudko2008Fig1_Probabilities()
PlotFit(data,"../Out/Dudko2008_Fig1")
# figure 2 frm dudko 2008
data = Example_Data.Dudko2008Fig2_Probabilities()
PlotFit(data,"../Out/Dudko2008_Fig2")
if __name__ == "__main__":
run()
|
RealTimeWeb/wikisite | MoinMoin/i18n/tools/check_i18n.py | Python | apache-2.0 | 12,365 | 0.001617 | #! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
"""check_i18n - compare texts in the source with the language files
Searches in the MoinMoin sources for calls of _() and tries to extract
the parameter. Then it checks the language modules if those parameters
are in the dictionary.
Usage: check_i18n.py [lang ...]
Without arguments, checks all languages in i18n or the specified
languages. Look into MoinMoin.i18n.__init__ for availeable language
names.
The script will run from the moin root directory, where the MoinMoin
package lives, or from MoinMoin/i18n where this script lives.
TextFinder class based on code by Seo Sanghyeon and the python compiler
package.
TODO: fix it for the changed i18n stuff of moin 1.6
@copyright: 2003 Florian Festi, Nir Soffer, Thomas Waldmann
@license: GNU GPL, see COPYING for details.
"""
output_encoding = 'utf-8'
# These lead to crashes (MemoryError - due to missing codecs?)
#blacklist_files = ["ja.py", "zh.py", "zh_tw.py"]
#blacklist_langs = ["ja", "zh", "zh-tw"]
# If you have cjkcodecs installed, use this:
blacklist_files = []
blacklist_langs = []
import sys, os, compiler
from compiler.ast import Name, Const, CallFunc, Getattr
class TextFinder:
""" Walk through AST tree and collect text from gettext calls
Find all calls to gettext function in the source tree and collect
the texts in a dict. Use compiler to create an abstract syntax tree
from each source file, then find the nodes for gettext function
call, and get the text from the call.
Localized texts are used usually translated during runtime by
gettext functions and apear in the source as
_('text...'). TextFinder class finds calls to the '_' function in
any namespace, or your prefered gettext function.
Note that TextFinder will only retrieve text from function calls
with a constant argument like _('text'). Calls like _('text' % locals()),
_('text 1' + 'text 2') are marked as bad call in the report, and the
text is not retrieved into the dictionary.
Note also that texts in source can appear several times in the same
file or different files, but they will only apear once in the
dictionary that this tool creates.
The dictionary value for each text is a dictionary of filenames each
containing a list of (best guess) lines numbers containning the text.
"""
def __init__(self, name='_'):
""" Init with the gettext function name or '_'"""
self._name = name # getText function name
self._dictionary = {} # Unique texts in the found texts
self._found = 0 # All good calls including duplicates
self._bad = 0 # Bad calls: _('%s' % var) or _('a' + 'b')
def setFilename(self, filename):
"""Remember the filename we are parsing"""
self._filename = filename
def visitModule(self, node):
""" Start the search from the top node of a module
This is the entry point into the search. When compiler.walk is
called it calls this method with the module node.
This is the place to initialize module specific data.
"""
self._visited = {} # init node cache - we will visit each node once
self._lineno = 'NA' # init line number
# Start walking in the module node
self.walk(node)
def walk(self, node):
""" Walk through all nodes """
if node in self._visited:
# We visited this node already
return
self._visited[node] = 1
if not self.parseNode(node):
for child in node.getChildNodes():
self.walk(child)
def parseNode(self, node):
""" Parse function call nodes and collect text """
# Get the current line number. Since not all nodes have a line number
# we save the last line number - it should be close to the gettext call
if node.lineno is not None:
self._lineno = node.lineno
if node.__class__ == CallFunc and node.args:
child = node.node
klass = child.__class__
if (# Standard call _('text')
(klass == Name and child.name == self._name) or
# A call to an object attribute: object._('text')
(klass == Getattr and child.attrname == self._name)):
if node.args[0].__class__ == Const:
# Good call with a constant _('text')
self.addText(node.args[0].value)
else:
self.addBadCall(node)
return 1
return 0
def addText(self, text):
""" Add text to dictionary and count found texts.
Note that number of texts in dictionary could be different from
the number of texts found, because some texts appear several
times in the code.
Each text value is a dictionary of filenames that contain the
text and each filename value is the list of line numbers with
the text. Missing line numbers are recorded as 'NA'.
self._lineno is the last line number we checked. It may be the line
number of the text, or near it.
"""
self._found = self._found + 1
# Create key for this text if needed
if text not in self._dictionary:
self._dictionary[text] = {}
# Create key for this filename if needed
textInfo = self._dictionary[text]
if self._filename not in textInfo:
textInfo[self._filename] = [self._lineno]
else:
textInfo[self._filename].append(self._lineno)
def addBadCall(self, node):
"""Called when a bad call like _('a' + 'b') is found"""
self._bad = self._bad + 1
print
print "<!> Warning: non-constant _ call:"
print " `%s`" % str(node)
print " `%s`:%s" % (self._filename, self._lineno)
# Accessors
def dictionary(self):
return self._dictionary
def bad(self):
return self._bad
def found(self):
return self._found
def visit(path, visitor):
visitor.setFilename(path)
tree = compiler.parseFile(path)
compiler.walk(tree, visitor)
# MoinMoin specific stuff follows
class Report:
"""Language status report"""
def __init__(self, lang, sourceDict):
self.__lang = lang
self.__sourceDict = sourceDict
self.__langDict = None
self.__missing = {}
self.__unused = {}
self.__error = None
self.__ready = 0
self.create()
def loadLanguage(self):
filename = i18n.filename(self.__lang)
self.__langDict = pysupport.importName("MoinMoin.i18n." + filename, "text")
def create(self):
"""Compare language text dict against source dict"""
self.loadLanguage()
if not self.__langDict:
self.__error = "Language %s not found!" % self.__lang
self.__ready = 1
return
# Collect missing texts
for text in self.__sourceDict:
if text not in self.__langDict:
self.__missing[text] = self.__sourceDict[text]
# Collect unused texts
for text in self.__langDict:
if text not in self.__sourceDict:
| self.__unused[text] = self.__langDict[text]
| self.__ready = 1
def summary(self):
"""Return summary dict"""
summary = {
'name': i18n.languages[self.__lang][i18n.ENAME].encode(output_encoding),
'maintainer': i18n.languages[self.__lang][i18n.MAINTAINER],
'total': len(self.__langDict),
'missing': len(self.__missing),
'unused': len(self.__unused),
'error': self.__error
}
return summary
def missing(self):
return self.__missing
def unused(self):
return self.__unused
if __name__ == '__main__':
import time
# Check that we run from the root directory where MoinMoin package lives
# or from the i18n directory when this script lives
if os.path.exists('MoinMoin/__init__.py'):
# Running from the root directory
Moin |
wazaahhh/patchwork | parseUserNames.py | Python | mit | 4,245 | 0.021673 | import numpy as np
import urllib
import simplejson
import time
from datetime import datetime
import pandas
from scipy import stats as S
'''
url = "http://reporobot.jlord.us/data"
urllib.urlretrieve(url, filename="usernames.json")
'''
rootdir = "/home/ubuntu/github/patchwork/"
dic = simplejson.loads(open(rootdir + "usernames.json",'rb').read())
#df2015 = pandas.io.parsers.read_csv("results-20150721-150046.csv")
def parseUsers(save=True):
userList = []
timeList = []
timestampList = []
prNumList = []
for i,ix in enumerate(dic):
if ix.has_key("username"):
userList.append(ix['username'])
elif ix.has_key("user"):
userList.append(ix['user'])
timeList.append(ix['time'])
timestampList.append(time.mktime(datetime.strptime(ix['time'],'%Y-%m-%dT%H:%M:%SZ').timetuple()))
prNumList.append(ix['prNum'])
output = {'u | serList' : userList,
'timeList': timeList,
'prNumList' : prNumList,
'timestampList' : timestampList
}
if save:
outfile = open(rootdir + "userList.json | ", 'wb').write(simplejson.dumps(output))
return output
def exportUserNames(outdic,subsample=0.05):
users = np.array(outdic['userList'])
index = np.random.randint(0,len(users),int(subsample*len(users)))
f =open("users.txt",'wb')
for u in users[index]:
f.write("'" + u + "'")
f.write(",")
def build_main_df(sampling_resol="1D"):
'''
Main DataFrame (df):
This pandas dataframe contains all timestamped events related to users
identified as having taken part to AstroWeek 2014. Repositories related
to events are also provided
'''
#Parse .csv files and create a timestamp column to merge 2014 and 2015 datasets
df2014 = pandas.io.parsers.read_csv(rootdir+"events_2014.csv")
df2014['timestamp'] = np.array([datetime.strptime(dt,"%Y-%m-%d %H:%M:%S") for dt in df2014['created_at']])
df2014.rename(columns={'actor_attributes_login':'actor'}, inplace=True)
df2014.rename(columns={'repository_name':'repo'}, inplace=True)
df2014.rename(columns={'repository_url':'repo_url'}, inplace=True)
df2014.rename(columns={'repository_created_at':'repo_created_at'}, inplace=True)
df2015 = pandas.io.parsers.read_csv(rootdir+"events_2015.csv")
df2015['timestamp'] = map(datetime.fromtimestamp,df2015['created_at'])
df2015.rename(columns={'actor_login':'actor'}, inplace=True)
df2015.rename(columns={'repo_name':'repo'}, inplace=True)
df = pandas.concat([df2014,df2015])
df.index = df['timestamp']
#df2014['repo_created_at'] = np.array([datetime.strptime(dt,"%Y-%m-%d %H:%M:%S") for dt in df2014['repo_created_at']])
t_resol = sampling_resol
event_types = np.unique(df.type.values)
event_dic = {}
event_dic['all'] = df.type.resample(t_resol,how='count')
event_count = df.type.resample(t_resol,how='count')
for e in event_types:
event_dic[e] = df[df['type']==e].type.resample(t_resol,how='count')
if len(event_dic[e]) < len(event_count):
event_dic[e] = fill_ommitted_resample(event_dic[e],event_count)
#print e,len(event_dic[e])
resampled = {"activity" :
{'events' : event_count,
'actors' : df.actor.resample(t_resol,how=countUnique),
'repos' : df.repo.resample(t_resol,how=countUnique)
},
'event_types' : event_dic
}
return df,df2014,df2015,resampled
def countUnique(array):
return len(set(array))
def fill_ommitted_resample(df,ref_df):
i=0
while ref_df.index[i] < df.index[0]:
#print i , ref_df.index[i],df.index[0] , ref_df.index[i] < df.index[0]
df = df.set_value(ref_df.index[i], 0)
i+=1
df = df.sort_index()
i=-1
while ref_df.index[i] > df.index[i]:
#print i,ref_df.index[-i] > df.index[-1]
df = df.set_value(ref_df.index[i], 0)
i-=1
df = df.sort_index()
return df
if __name__ == '__main__':
print "blah" |
caktus/rapidpro-python | temba_client/utils.py | Python | bsd-3-clause | 935 | 0.00107 | from __future__ import absolute_import, unicode_literals
import datetime
import pytz
imp | ort six
ISO8601_DATE_FORMAT = '%Y-%m-%d'
ISO8601_DATETIME_FORMAT = ISO8601_DATE_FORMAT + 'T' + '%H:%M:%S'
def parse_i | so8601(value):
"""
Parses a datetime as a UTC ISO8601 date
"""
if not value:
return None
if 'T' in value: # has time
_format = ISO8601_DATETIME_FORMAT
if '.' in value: # has microseconds. Some values from RapidPro don't include this.
_format += '.%f'
if 'Z' in value: # has zero offset marker
_format += 'Z'
else:
_format = ISO8601_DATE_FORMAT
return datetime.datetime.strptime(value, _format).replace(tzinfo=pytz.utc)
def format_iso8601(value):
"""
Formats a datetime as a UTC ISO8601 date
"""
_format = ISO8601_DATETIME_FORMAT + '.%f'
return six.text_type(value.astimezone(pytz.UTC).strftime(_format))
|
TangentMicroServices/ProjectService | api/tests/tests.py | Python | gpl-2.0 | 6,753 | 0.025766 | from django.test import TestCase, Client
from django.conf import settings
from django.db import IntegrityError
from django.contrib.auth.models import User
from api.models import Project, Task, Resource
from tokenauth.authbackends import TokenAuthBackend
from datetime import date
import requests
import responses
import json
def mock_auth_success(user=None):
url = '{0}/api/v1/users/me/' . format(settings.USERSERVICE_BASE_URL)
response_string = '{"username": "TEST"}'
if user is not None:
response_string = {
"username": user.username,
"id": user.pk
}
response_string = json.dumps(response_string)
responses.add(responses.GET, url,
body=response_string, status=200,
content_type='application/json')
def mock_auth_failure():
url = '{0}/api/v1/users/me/' . format(settings.USERSERVICE_BASE_URL)
responses.add(responses.GET, url,
body='', status=401,
content_type='application/json')
class ProjectModelTestCase(TestCase):
def test_project_unicode(self):
project = Project.quick_create(title="Test")
assert project.__unicode__() == 'Test'
def test_quick_create(self):
project = Project.quick_create()
assert isinstance(project, Project), 'Project instance is created'
class ResourceModelTestCase(TestCase):
def test_resource_quick_create(self):
resource = Resource.quick_create()
assert isinstance(resource, Resource)
def test_resource_quick_create_with_details(self):
project = Project.quick_create(title="TEST")
extra_data = {
"rate": 100
}
resource = Resource.quick_create(project=project, **extra_data)
assert resource.project.title == 'TEST', 'Expect project is explicitly set'
assert resource.rate == 100.00, 'Expect rate to be set by kwargs'
def test_project_user_unique_together(self):
project = Project.quick_create()
start_date = date.today()
Resource.objects.create(project=project, user=1, start_date=start_date)
Resource.objects.create(project=project, user=2, start_date=start_date)
try:
Resource.objects.create(project=project, user=2)
self.fail("Should not be able to add the same project and user twice")
except IntegrityError:
pass
class TaskModelTestCase(TestCase):
def test_quick_create(self):
task = Task.quick_create()
assert isinstance(task, Task), 'Task instance is created'
class ProjectEndpointTestCase(TestCase):
def setUp(self):
self.c = Client(Authorization='Token 123')
self.joe_admin = User.objects.create_superuser(username="admin", password="test", email="joe@soap.com")
self.joe_soap = User.objects.create_user(username="joe", password="test")
self.joe_soap.save()
## setup a bunch of Projects
p1 = Project.quick_create(title="P1", description="Search me", is_billable=True)
p2 = Project.quick_create(title="P2", is_billable=True)
p3 = Project.quick_create(title="P3", is_active=False)
p4 = Project.quick_create(title="P4", user=self.joe_soap.pk)
p5 = Project.quick_create(title="P5", user=self.joe_soap.pk)
p6 = Project.quick_create(title="P6")
Resource.quick_create(user=self.joe_soap.pk, project=p4)
Resource.quick_create(user=self.joe_soap.pk, project=p3)
Resource.quick_create(user=self.joe_admin.pk, project=p1)
Resource.quick_create(user=s | elf.joe_admin.pk, project=p2)
@responses.activate
def test_get_projects_list_requires_auth(self):
mock_auth_failure()
response = self.c.get("/api/v1/projects/")
assert response.status_code == 403, 'Expect permission denied'
@responses.activate
def test_get_project_list(self):
mock_auth_success(self.joe_soap)
#self.c.logout()
#login_result = self.c.login(usernam | e="joe", password="test")
response = self.c.get("/api/v1/projects/")
assert response.status_code == 200, 'Expect 200 OK'
assert len(json.loads(response.content)) == 2, 'Expect 2 projects back'
@responses.activate
def test_get_project_list_admin_gets_all_projects(self):
mock_auth_success(self.joe_admin)
#self.c.logout()
#login_result = self.c.login(username="joe", password="test")
response = self.c.get("/api/v1/projects/")
assert response.status_code == 200, 'Expect 200 OK'
assert len(json.loads(response.content)) == 6, 'Expect 6 projects back'
@responses.activate
def test_get_project_list_filter_on_active(self):
mock_auth_success(self.joe_admin)
response = self.c.get("/api/v1/projects/?is_active=False")
titles = [project.get("title") for project in json.loads(response.content)]
expected_titles = ['P3']
assert titles == expected_titles, 'Expect results to be filtered on is_active=False'
@responses.activate
def test_get_project_list_filter_on_billable(self):
mock_auth_success(self.joe_admin)
response = self.c.get("/api/v1/projects/?is_billable=True")
titles = [project.get("title") for project in json.loads(response.content)]
expected_titles = ['P1', 'P2']
assert titles == expected_titles, 'Expect results to be filtered on is_billable=True'
@responses.activate
def test_get_project_list_search_title(self):
mock_auth_success(self.joe_admin)
response = self.c.get("/api/v1/projects/?search=P1")
titles = [project.get("title") for project in json.loads(response.content)]
expected_titles = ['P1']
assert titles == expected_titles, 'Expect search to return matching title'
@responses.activate
def test_get_project_list_search_description(self):
mock_auth_success(self.joe_admin)
response = self.c.get("/api/v1/projects/?search=Search")
titles = [project.get("title") for project in json.loads(response.content)]
expected_titles = ['P1']
assert titles == expected_titles, 'Expect search to return matching description'
@responses.activate
def test_get_project_orders_by_title(self):
mock_auth_success(self.joe_admin)
response = self.c.get("/api/v1/projects/?ordering=title")
titles = [project.get("title") for project in json.loads(response.content)]
expected_titles = ['P1', 'P2', 'P3', 'P4', 'P5', 'P6']
assert titles == expected_titles, 'Expect search results ordered by title'
@responses.activate
def test_get_project(self):
project = Project.quick_create()
mock_auth_success(self.joe_admin)
response = self.c.get("/api/v1/projects/{0}/" . format (project.pk))
expected_fields = ['pk', 'title', 'description', 'start_date', 'end_date', 'is_billable', 'is_active', 'task_set', 'resource_set']
for field in expected_fields:
assert response.data.get(field, "NOTSET") != "NOTSET", 'Assert field {0} is returned in the response' . format (field)
assert response.status_code == 200, 'Expect 200 OK'
class TaskEndpointTestCase(TestCase):
def setUp(self):
self.c = Client(Authorization='Token 123')
# create some Tasks
|
fgaudin/aemanager | core/views.py | Python | agpl-3.0 | 39,044 | 0.004175 | import logging
from decimal import Decimal
from django.shortcuts import render_to_response, redirect, get_object_or_404
from django.template.context import RequestContext, Context
from django.utils.translation import ugettext_lazy as _, ugettext
from core.forms import UserForm, PasswordForm, ResendActivationEmailForm, \
ContactUsForm
from autoentrepreneur.forms import UserProfileForm
from contact.forms import AddressForm
from django.db.transaction import commit_on_success
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.utils import simplejson
from accounts.models import Expense, Invoice, INVOICE_STATE_PAID, \
PAYMENT_TYPE_BANK_CARD, InvoiceRow
from core.decorators import settings_required, disabled_for_demo
from autoentrepreneur.models import AUTOENTREPRENEUR_ACTIVITY_PRODUCT_SALE_BIC, \
Subscription, SUBSCRIPTION_STATE_NOT_PAID, SUBSCRIPTION_STATE_PAID, \
SUBSCRIPTION_STATE_TRIAL, UserProfile
from project.models import Proposal, Project, PROJECT_STATE_FINISHED, \
PROPOSAL_STATE_BALANCED, ROW_CATEGORY_SERVICE, ProposalRow, VAT_RATES_19_6
from django.views.decorators.csrf import csrf_exempt
from django.contrib.auth.models import User
from autoentrepreneur.decorators import subscription_required
from django.contrib.auth import logout
from django.core.mail import mail_admins
from announcement.models import Announcement
from contact.models import Contact, CONTACT_TYPE_COMPANY, Address
from django.contrib.sites.models import Site
from django.http import HttpResponse, HttpResponseNotFound
from django.core.mail.message import EmailMessage
from django.template import loader
from django.contrib.admin.views.decorators import staff_member_required
import time
import datetime
import urllib, urllib2
from django.conf import settings
from registration.models import RegistrationProfile
from django.utils.encoding import smart_str
import os
from django.db.models.aggregates import Sum
@settings_required
@subscription_required
def index(request):
if settings.DEMO:
messages.warning(request, _("You're currently using the demo version. Data are reset every %i hours. Stroke links are disabled features.") % (settings.DEMO_RESET_DELAY))
user = request.user
profile = user.get_profile()
if not Proposal.objects.filter(owner=user).count():
messages.info(request, _('How-to : create a customer, a project, a proposal and finally an invoice'))
last_subscription = profile.get_last_subscription()
delay_before_end_of_subscription = last_subscription.expiration_date - datetime.date.today()
if delay_before_end_of_subscription.days <= 30:
subscription_message = ''
subscription_state = last_subscription.state
if subscription_state == SUBSCRIPTION_STATE_TRIAL:
subscription_message = _("Your trial ends in %(days)d days, if you want to keep using %(site_name)s don't forget to subscribe") % {'days': delay_before_end_of_subscription.days,
'site_name': Site.objects.get_current().name}
elif subscription_state == SUBSCRIPTION_STATE_PAID:
subscription_message = _("Your subscription ends in %(days)d days, if you want to keep using %(site_name)s don't forget to renew it") % {'days': delay_before_end_of_subscription.days,
'site_name': Site.objects.get_current().name}
if subscription_message:
messages.warning(request, subscription_message)
today = datetime.date.today()
one_year_back = datetime.date(today.year - 1, today.month, today.day)
first_year = True
if one_year_back.year >= profile.creation_date.year:
first_year = False
service_paid = 0
service_waiting = 0
service_to_be_invoiced = 0
service_limit = 0
service_paid_previous_year = 0
service_limit_previous_year = 0
service_remaining = 0
paid = Invoice.objects.get_paid_sales(owner=user)
if not first_year:
paid_previous_year = Invoice.objects.get_paid_sales(owner=user,
reference_date=datetime.date(one_year_back.year, 12, 31))
waiting = Invoice.objects.get_waiting_payments(owner=user)
to_be_invoiced = Invoice.objects.get_to_be_ | invoiced(owner=user)
limit = profile.get_sales_limit()
remaining = limit - | paid - waiting - to_be_invoiced
sales_limit = profile.get_sales_limit()
sales_limit2 = profile.get_sales_limit2()
if user.get_profile().activity == AUTOENTREPRENEUR_ACTIVITY_PRODUCT_SALE_BIC:
service_waiting = Invoice.objects.get_waiting_service_payments(owner=user)
service_to_be_invoiced = Invoice.objects.get_service_to_be_invoiced(owner=user)
service_limit = profile.get_service_sales_limit()
service_paid = Invoice.objects.get_paid_service_sales(owner=user)
service_remaining = service_limit - service_paid - service_waiting - service_to_be_invoiced
if not first_year:
limit_previous_year = profile.get_sales_limit(year=one_year_back.year)
if user.get_profile().activity == AUTOENTREPRENEUR_ACTIVITY_PRODUCT_SALE_BIC:
service_limit_previous_year = profile.get_service_sales_limit(year=one_year_back.year)
service_paid_previous_year = Invoice.objects.get_paid_service_sales(owner=user, year=one_year_back.year)
if not first_year and paid_previous_year > limit_previous_year:
messages.warning(request, _('You will leave the Auto-entrepreneur status at the end of the current year.'))
if profile.creation_help:
messages.warning(request, _('You lose tax rates associated with creation help for overrunning sales.'))
if profile.freeing_tax_payment:
messages.warning(request, _('You lose freeing tax payment.'))
else:
if paid > sales_limit:
if paid > sales_limit2:
messages.warning(request, _('You will leave the Auto-entrepreneur status at the end of the current year.'))
else:
messages.warning(request, _('You will leave the Auto-entrepreneur status at the end of the next year.'))
if profile.creation_help:
messages.warning(request, _('You lose tax rates associated with creation help for overrunning sales.'))
if profile.freeing_tax_payment:
messages.warning(request, _('You lose freeing tax payment.'))
elif remaining < 0:
if remaining < (sales_limit - sales_limit2):
messages.warning(request, _('Attention, you will leave the Auto-entrepreneur status at the end of the current year if all your proposals and invoices are paid before the end of the year.'))
else:
messages.warning(request, _('Attention, you will leave the Auto-entrepreneur status at the end of the next year if all your proposals and invoices are paid before the end of the year.'))
if profile.creation_help:
messages.warning(request, _('Attention, you will lose tax rates associated with creation help for overrunning sales if all your proposals and invoices are paid before the end of the year.'))
if profile.freeing_tax_payment:
messages.warning(request, _('Attention, you will lose freeing tax payment if all your proposals and invoices are paid before the end of the year.'))
if remaining < (sales_limit - sales_limit2):
messages.warning(request, _('You have to declare VAT from the first month of overrun.'))
late_invoices = Invoice.objects.get_late_invoices(owner=user)
invoices_to_send = Invoice.objects.get_invoices_to_send(owner=user)
potential = Proposal.objects.get_potential_sales(owner=user)
duration = Proposal.objects.get_potential_duration(owner=user)
proposals_to_send = Proposal.objects.get_propos |
hs634/algorithms | python/strings/shortest_distance_between_words_1.py | Python | mit | 799 | 0.010013 | """
Given a list of words and two words word1 and word2, return the shortest distance between these two words in the list.
For example, Assume that words = ["practice", "makes", "perfect", "coding", "makes"].
Given word1 = "coding", word2 = "practice", return 3. Given word1 = "makes", word2 = "coding", retu | rn 1
"""
class Solution():
def __init__(self):
pass
def shortest(self, lst, word1, word2):
m = len(lst)
a , b = -1, -1
for i in range(len(lst)):
if lst[i] == word1:
a = i
if lst[i] == word2:
b = i
if a != -1 an | d b != -1:
m = min(m, abs(a-b))
return m
print Solution().shortest(["practice", "makes", "perfect", "coding", "makes"], "practice", "perfect") |
Kev/helhack | src/ability/Effect.py | Python | gpl-3.0 | 839 | 0.001192 | # Effect.py - Base class for all ability effects
# Copyright Stefan Brus 2007.
#
# This file is part of HelHack.
#
# HelHack is free software; you can redistribute it and/or modify
# it under the terms | of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or |
# (at your option) any later version.
#
# HelHack is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class Effect:
""" Base class for all ability effects
"""
# TODO: Design this shit
|
TheCamusean/DLRCev3 | scripts/slam/mapping_BASE_10668.py | Python | mit | 5,371 | 0.059393 | import numpy as np
import cv2
from math import pi
def points2mapa(landmarks,pos_rob,mapa,P, delete_countdown): #mapa = (x,y, Px,Py, updt)
new_points2add = []
landmarks = np.array(landmarks)
for i in range(0,landmarks.shape[0]):
x_mapa = pos_rob[0] + landmarks[i,1]*np.cos((pos_rob[2])*pi/180+landmarks[i,0])
y_mapa = pos_rob[1] + landmarks[i,1]*np.sin((pos_rob[2])*pi/180+landmarks[i,0])
new = 1
mapa_ar = np.array(mapa)
if delete_countdown ==5 and mapa_ar.size > 0:
mapa_ar[:,4] = np.zeros([mapa_ar.shape[0]])
mapa = list(mapa_ar)
delete_countdown = 0
else:
delete_countdown = 0
sh_dist = 10000;
p_already = 0
for j in range(0, mapa_ar.shape[0]):
distance = np.sqrt(np.power((x_mapa-mapa_ar[j,0]),2) + np.power((y_mapa - mapa_ar[j,1]),2))
if sh_dist > distance:
sh_dist = distance
p_already = j
print("shortest distance:", sh_dist)
if sh_dist < 10:
mapa = np.array(mapa)
mapa[p_already,4] = 1
mapa = mapa.tolist()
new = 0
if new ==1:
new_points2add.append(i)
delete_countdown +=1
return mapa , delete_countdown , new_points2add
def cam2rob(BB_legos, H):
####cam [[[ 270.03048706 448.53890991]]]
pixel_size = 0.653947100514
# CENTER OF THE CAMERA
cam= np.array([242.54,474.87])
cam2rob_dist = 25
Lego_list = []
for box in BB_legos:
y = box[3]
x = box[0] + (box[2]-box[0])/2
input_vector=np.array([[[x,y]]],dtype=np.float32)
output_vector=cv2.perspectiveTransform(input_vector,np.linalg.inv(H))
distance_x = (-output_vector[0,0,1]+cam[1])*pixel_size +cam2rob_dist
distance_x = -0.28*output_vector[0,0,1] +160
distance_y = -(output_vector[0,0,0] - cam[0])*pixel_size
distance_y = -(output_vector[0,0,0] - cam[0]) *(0.35-0.00022*output_vector[0,0,0])
print("data: ", distance_x,distance_y,box[3],box[1])
angle = np.arctan2(distance_y,distance_x)
distance = np.sqrt(np.power(distance_x,2) + np.power(distance_y,2))
print("Distance ", distance, " angle: ", angle)
if distance < 1000:
Lego_list.append([angle,distance/2])
print("angle" , angle*180/pi)
Lego_list = np.array(Lego_list)
return Lego_list
def mapa2grid(mapa):
obstacle_size = 28
obstacle_cells = obstacle_size
# Obstacles position
obs_pos = mapa
n_obs = obs_pos.shape[0]
for i in range(0,n_obs):
mapa[obs_pos[i,0]:obs_pos[i,0]+obstacle_cells,obs_pos[i,1]:obs_pos[i,1]+obstacle_cells] = np.ones([obstacle_cells,obstacle_cells])
target_on = 0
while(target_on == 1):
tar_pos = np.random.randint(1000,size=[2])
if mapa[tar_pos[0],tar_pos[1]] == 0 :
mapa[tar_pos[0],tar_pos[1]] = 2
target_on = 1
return mapa
def delete_in_mapa(mapa,robot_trajectory):
robot_trajectory_ar = np.array(robot_trajectory)
min_dist = 29
max_dist = 60
min_angle = np.arctan2(29,20)
max_angle = np.arctan2(29,-20)
mapa_ar = np.array(mapa)
eliminate_index = []
for i in range(0, robot_trajectory_ar.shape[0]):
for j in range(0, mapa_ar.shape[0]):
x = mapa_ar[j,0] - robot_trajectory_ar[i,0]
y = mapa_ar[j,1] - robot_trajectory_ar[i,1]
distance = np.sqrt(np.power(x,2)+np.power(y,2))
angle = np.arctan2(y,x) - robot_trajectory_ar[i,2]*pi/180
|
if (distance > min_dist and distance< max_dist and angle > min_angle and angle< max_angle) and mapa_ar[j,4] == 0 :
pass
elif j not in eliminate_index:
eliminate_index.append(j)
print("j: ",eliminate_index)
eliminate_index = np.ar | ray(eliminate_index)
mapa = np.array(mapa)
if mapa.size:
mapa = mapa[eliminate_index,:]
mapa= mapa.tolist()
#mapa = mapa[eliminate_index]
return mapa
def add_points_in_mapa(landmarks,new_points2add,mapa, P ,pos_rob,index):
landmarks = np.array(landmarks)
for i in new_points2add:
x_mapa = pos_rob[0] + landmarks[i,1]*np.cos((pos_rob[2])*pi/180+landmarks[i,0])
y_mapa = pos_rob[1] + landmarks[i,1]*np.sin((pos_rob[2])*pi/180+landmarks[i,0])
mapa.append(np.array([x_mapa,y_mapa,P[0,0],P[1,1],1]))
if index !=1000:
print("grrrrr")
x_mapa = pos_rob[0] + landmarks[index,1]*np.cos((pos_rob[2])*pi/180+landmarks[index,0])
y_mapa = pos_rob[1] + landmarks[index,1]*np.sin((pos_rob[2])*pi/180+landmarks[index,0])
mapa.append(np.array([x_mapa,y_mapa,P[0,0],P[1,1],5]))
return mapa
def create_fake_lego_measurements(real_rob_pos, mapa):
min_dist = 29
max_dist = 60
min_angle = 0#np.arctan2(29,15)
max_angle = np.arctan2(29,-15)
mapa_ar = np.array(mapa)
fake_landmarks = [];
for j in range(0, mapa_ar.shape[0]):
x = mapa_ar[j,0] - real_rob_pos[0]
y = mapa_ar[j,1] - real_rob_pos[1]
distance = np.sqrt(np.power(x,2)+np.power(y,2))
angle = np.arctan2(y,x) - real_rob_pos[2]*pi/180
if distance > min_dist and distance< max_dist and angle > min_angle and angle< max_angle:
fake_landmarks.append(np.array([angle,distance]))
return fake_landmarks
def update_mapa(mapa,landmark_rob,pos_rob,P,delete_countdown, robot_trajectory,index):
mapa,delete_countdown, new_points2add = points2mapa(landmark_rob, pos_rob, mapa, P, delete_countdown)
robot_trajectory.append(pos_rob)
mapa = add_points_in_mapa(landmark_rob,new_points2add,mapa,P,pos_rob,index)
if delete_countdown == 5:
mapa = delete_in_mapa(mapa, robot_trajectory)
robot_trajectory = [];
return mapa, delete_countdown,robot_trajectory
|
grnet/synnefo | snf-astakos-app/astakos/im/settings.py | Python | gpl-3.0 | 11,849 | 0 | # Copyright (C) 2010-2017 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licens | es/>.
from logging import INFO
from django.conf import settings
from synnefo.lib import parse_base_url
from astakos.api.services import astakos_services as vanilla_astakos_services
from synnefo.lib import join_urls
from synnefo.lib.services import fill_endpoints
from copy import deepcopy
BASE_URL = getattr(settings, 'ASTAKOS_BASE_URL',
| 'https://accounts.example.synnefo.org')
BASE_HOST, BASE_PATH = parse_base_url(BASE_URL)
astakos_services = deepcopy(vanilla_astakos_services)
fill_endpoints(astakos_services, BASE_URL)
ACCOUNTS_PREFIX = astakos_services['astakos_account']['prefix']
VIEWS_PREFIX = astakos_services['astakos_ui']['prefix']
KEYSTONE_PREFIX = astakos_services['astakos_identity']['prefix']
WEBLOGIN_PREFIX = astakos_services['astakos_weblogin']['prefix']
ADMIN_PREFIX = astakos_services['astakos_admin']['prefix']
KEYSTONE_ROOT_URL = join_urls(BASE_URL, KEYSTONE_PREFIX)
# Set the expiration time of newly created auth tokens
# to be this many hours after their creation time.
AUTH_TOKEN_DURATION = getattr(settings, 'ASTAKOS_AUTH_TOKEN_DURATION', 30 * 24)
DEFAULT_USER_LEVEL = getattr(settings, 'ASTAKOS_DEFAULT_USER_LEVEL', 4)
INVITATIONS_PER_LEVEL = getattr(settings, 'ASTAKOS_INVITATIONS_PER_LEVEL', {
0: 100,
1: 2,
2: 0,
3: 0,
4: 0
})
ADMINS = tuple(getattr(settings, 'ADMINS', ()))
MANAGERS = tuple(getattr(settings, 'MANAGERS', ()))
HELPDESK = tuple(getattr(settings, 'HELPDESK', ()))
# For convenience, Astakos groups the notifications in three categories and
# let the user define the recipients for these categories.
# - ACCOUNT_NOTIFICATIONS_RECIPIENTS receive notifications for 'account pending
# moderation' and 'account activated' actions.
# - FEEDBACK_NOTIFICATIONS_RECIPIENTS receive feedback notifications
# - PROJECT_NOTIFICATIONS_RECIPIENTS receive notifications for 'project
# creation' and 'project modification' actions.
ACCOUNT_NOTIFICATIONS_RECIPIENTS = tuple(set(tuple(
getattr(settings, 'ACCOUNT_NOTIFICATIONS_RECIPIENTS',
HELPDESK + MANAGERS + ADMINS))))
FEEDBACK_NOTIFICATIONS_RECIPIENTS = tuple(set(tuple(
getattr(settings, 'FEEDBACK_NOTIFICATIONS_RECIPIENTS',
HELPDESK))))
PROJECT_NOTIFICATIONS_RECIPIENTS = tuple(set(tuple(
getattr(settings, 'PROJECT_NOTIFICATIONS_RECIPIENTS',
HELPDESK + MANAGERS))))
# Using the following settings, one can explicitly specify the recipients for a
# specific notification. By default, these settings are not exposed to the
# config file.
ACCOUNT_PENDING_MODERATION_RECIPIENTS = tuple(set(tuple(
getattr(settings, 'ACCOUNT_PENDING_MODERATION_RECIPIENTS',
ACCOUNT_NOTIFICATIONS_RECIPIENTS))))
ACCOUNT_ACTIVATED_RECIPIENTS = tuple(set(tuple(
getattr(settings, 'ACCOUNT_ACTIVATED_RECIPIENTS',
ACCOUNT_NOTIFICATIONS_RECIPIENTS))))
PROJECT_CREATION_RECIPIENTS = tuple(set(tuple(
getattr(settings, 'PROJECT_CREATION_RECIPIENTS',
PROJECT_NOTIFICATIONS_RECIPIENTS))))
PROJECT_MODIFICATION_RECIPIENTS = tuple(set(tuple(
getattr(settings, 'PROJECT_MODIFICATION_RECIPIENTS',
PROJECT_NOTIFICATIONS_RECIPIENTS))))
CONTACT_EMAIL = settings.CONTACT_EMAIL
SERVER_EMAIL = settings.SERVER_EMAIL
SECRET_KEY = settings.SECRET_KEY
SESSION_ENGINE = settings.SESSION_ENGINE
# Identity Management enabled modules
# Supported modules are: 'local', 'twitter' and 'shibboleth'
IM_MODULES = getattr(settings, 'ASTAKOS_IM_MODULES', ['local'])
# Force user profile verification
FORCE_PROFILE_UPDATE = getattr(settings, 'ASTAKOS_FORCE_PROFILE_UPDATE', False)
# Enable invitations
INVITATIONS_ENABLED = getattr(settings, 'ASTAKOS_INVITATIONS_ENABLED', False)
COOKIE_NAME = getattr(settings, 'ASTAKOS_COOKIE_NAME', '_pithos2_a')
COOKIE_DOMAIN = getattr(settings, 'ASTAKOS_COOKIE_DOMAIN', None)
COOKIE_SECURE = getattr(settings, 'ASTAKOS_COOKIE_SECURE', True)
IM_STATIC_URL = getattr(settings, 'ASTAKOS_IM_STATIC_URL', '/static/im/')
# If set to False and invitations not enabled newly created user
# will be automatically accepted
MODERATION_ENABLED = getattr(settings, 'ASTAKOS_MODERATION_ENABLED', True)
# Set recaptcha keys
RECAPTCHA_PUBLIC_KEY = getattr(settings, 'ASTAKOS_RECAPTCHA_PUBLIC_KEY', '')
RECAPTCHA_PRIVATE_KEY = getattr(settings, 'ASTAKOS_RECAPTCHA_PRIVATE_KEY', '')
RECAPTCHA_OPTIONS = getattr(settings, 'ASTAKOS_RECAPTCHA_OPTIONS',
{'theme': 'custom',
'custom_theme_widget': 'okeanos_recaptcha'})
RECAPTCHA_USE_SSL = getattr(settings, 'ASTAKOS_RECAPTCHA_USE_SSL', True)
RECAPTCHA_ENABLED = getattr(settings, 'ASTAKOS_RECAPTCHA_ENABLED', False)
# Set where the user should be redirected after logout
LOGOUT_NEXT = getattr(settings, 'ASTAKOS_LOGOUT_NEXT', '')
# Set user email patterns that are automatically activated
RE_USER_EMAIL_PATTERNS = getattr(
settings, 'ASTAKOS_RE_USER_EMAIL_PATTERNS', [])
# Messages to display on login page header
# e.g. {'warning':
# 'This warning message will be displayed on the top of login page'}
LOGIN_MESSAGES = getattr(settings, 'ASTAKOS_LOGIN_MESSAGES', [])
# Messages to display on login page header
# e.g. {'warning':
# 'This warning message will be displayed on the top of signup page'}
SIGNUP_MESSAGES = getattr(settings, 'ASTAKOS_SIGNUP_MESSAGES', [])
# Messages to display on login page header
# e.g. {'warning':
# 'This warning message will be displayed on the top of profile page'}
PROFILE_MESSAGES = getattr(settings, 'ASTAKOS_PROFILE_MESSAGES', [])
# Messages to display on all pages
# e.g. {'warning':
# 'This warning message will be displayed on the top of every page'}
GLOBAL_MESSAGES = getattr(settings, 'ASTAKOS_GLOBAL_MESSAGES', [])
# messages to display as extra actions in account forms
# e.g. {'https://www.myhomepage.com': 'Back to <service_name>'}
PROFILE_EXTRA_LINKS = getattr(settings, 'ASTAKOS_PROFILE_EXTRA_LINKS', {})
# The number of unsuccessful login requests per minute allowed
# for a specific user
RATELIMIT_RETRIES_ALLOWED = getattr(
settings, 'ASTAKOS_RATELIMIT_RETRIES_ALLOWED', 3)
# If False the email change mechanism is disabled
EMAILCHANGE_ENABLED = getattr(settings, 'ASTAKOS_EMAILCHANGE_ENABLED', False)
# Set the expiration time (in days) of email change requests
EMAILCHANGE_ACTIVATION_DAYS = getattr(
settings, 'ASTAKOS_EMAILCHANGE_ACTIVATION_DAYS', 10)
# Set the astakos main functions logging severity (None to disable)
LOGGING_LEVEL = getattr(settings, 'ASTAKOS_LOGGING_LEVEL', INFO)
# Set how many objects should be displayed per page
PAGINATE_BY = getattr(settings, 'ASTAKOS_PAGINATE_BY', 50)
# Set how many objects should be displayed per page in show all projects page
PAGINATE_BY_ALL = getattr(settings, 'ASTAKOS_PAGINATE_BY_ALL', 50)
# Enforce token renewal on password change/reset
NEWPASSWD_INVALIDATE_TOKEN = getattr(
settings, 'ASTAKOS_NEWPASSWD_INVALIDATE_TOKEN', True)
# Interval at which to update the user's available quota in astakos usage
# profile view
USAGE_UPDATE_INTERVAL = getattr(settings, 'ASTAKOS_USAGE_UPDATE_INTERVAL',
5000)
# Permit local account migration
ENABLE_LOCAL_ACCOUNT_MIGRATION = getattr(
settings, 'ASTAKOS_ENABLE_LOCAL_ACCOUNT_MIGRATION', True)
# Migrate eppn identifiers to remote id
SHIBBOLETH_MIGRATE_EPPN = getattr(settings, 'ASTAKOS_SHIBBOLETH_MIGRATE_EPPN',
False)
# Migrate eppn identifiers to remote id
SHIBBOLETH_MIGRATE_EPPN = getattr(settings, 'ASTAKOS |
Xunius/txt2evernote | lib/geeknote/gnsync.py | Python | gpl-3.0 | 9,526 | 0.002729 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import argparse
import glob
import logging
import string
import unicodedata, re
from geeknote import GeekNote
from storage import Storage
from editor import Editor
import tools
# set default logger (write log to file)
def_logpath = os.path.join(os.getenv('USERPROFILE') or os.getenv('HOME'), 'GeekNoteSync.log')
formatter = logging.Formatter('%(asctime)-15s : %(message)s')
handler = logging.FileHandler(def_logpath)
handler.setFormatter(formatter)
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.addHandler(handler)
# determine if this is a narrow build or wide build (or py3k)
try:
unichr(0x10000)
MAX_CHAR = 0x110000
except ValueError:
MAX_CHAR = 0x9999
# http://stackoverflow.com/a/93029
CONTROL_CHARS = ''.join(c for c in (unichr(i) for i in xrange(MAX_CHAR)) \
if c not in string.printable and unicodedata.category(c) == 'Cc')
CONTROL_CHARS_RE = re.compile('[%s]' % re.escape(CONTROL_CHARS))
def remove_control_characters(s):
return CONTROL_CHARS_RE.sub('', s)
def log(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception, e:
logger.error("%s", str(e))
return wrapper
@log
def reset_logpath(logpath):
"""
Reset logpath to path from command line
"""
global logger
if not logpath:
return
# remove temporary log file if it's empty
if os.path.isfile(def_logpath):
if os.path.getsize(def_logpath) == 0:
os.remove(def_logpath)
# save previous handlers
handlers = logger.handlers
# remove old handlers
for handler in handlers:
logger.removeHandler(handler)
# try to set new file handler
handler = logging.FileHandler(logpath)
handler.setFormatter(formatter)
logger.addHandler(handler)
class GNSync:
notebook_name = None
path = None
mask = None
twoway = None
notebook_guid = None
all_set = False
@log
def __init__(self, notebook_name, path, mask, format, twoway=False):
# check auth
if not Storage().getUserToken():
raise Exception("Auth error. There is not any oAuthToken.")
#set path
if not path:
raise Exception("Path to sync directories does not select.")
if not os.path.exists(path):
raise Exception("Path to sync directories does not exist.")
self.path = path
#set mask
if not mask:
mask = "*.*"
self.mask = mask
#set format
if not format:
format = "plain"
self.format = format
if format == "markdown":
self.extension = ".md"
else:
self.extension = ".txt"
self.twoway = twoway
logger.info('Sync Start')
#set notebook
self.notebook_guid,\
self.notebook_name = self._get_notebook(notebook_name, path)
# all is Ok
self.all_set = True
@log
def sync(self):
"""
Synchronize files to notes
"""
if not self.all_set:
return
| files = self._get_files()
notes = self._get_notes()
for f in files:
has_note = False
for n in notes:
if f['name'] == n.title:
has_note = True
if f['mtime'] > n.updated:
self._update_note(f, n)
break
if not has_note:
self._create_note(f)
if self.twoway:
for n in notes:
has_file = False
| for f in files:
if f['name'] == n.title:
has_file = True
if f['mtime'] < n.updated:
self._update_file(f, n)
break
if not has_file:
self._create_file(n)
logger.info('Sync Complete')
@log
def _update_note(self, file_note, note):
"""
Updates note from file
"""
content = self._get_file_content(file_note['path'])
result = GeekNote().updateNote(
guid=note.guid,
title=note.title,
content=content,
notebook=self.notebook_guid)
if result:
logger.info('Note "{0}" was updated'.format(note.title))
else:
raise Exception('Note "{0}" was not updated'.format(note.title))
return result
@log
def _update_file(self, file_note, note):
"""
Updates file from note
"""
GeekNote().loadNoteContent(note)
content = Editor.ENMLtoText(note.content)
open(file_note['path'], "w").write(content)
@log
def _create_note(self, file_note):
"""
Creates note from file
"""
content = self._get_file_content(file_note['path'])
if content is None:
return
result = GeekNote().createNote(
title=file_note['name'],
content=content,
notebook=self.notebook_guid,
created=file_note['mtime'])
if result:
logger.info('Note "{0}" was created'.format(file_note['name']))
else:
raise Exception('Note "{0}" was not' \
' created'.format(file_note['name']))
return result
@log
def _create_file(self, note):
"""
Creates file from note
"""
GeekNote().loadNoteContent(note)
content = Editor.ENMLtoText(note.content)
path = os.path.join(self.path, note.title + self.extension)
open(path, "w").write(content)
return True
@log
def _get_file_content(self, path):
"""
Get file content.
"""
content = open(path, "r").read()
# strip unprintable characters
content = remove_control_characters(content.decode('utf-8')).encode('utf-8')
content = Editor.textToENML(content=content, raise_ex=True, format=self.format)
if content is None:
logger.warning("File {0}. Content must be " \
"an UTF-8 encode.".format(path))
return None
return content
@log
def _get_notebook(self, notebook_name, path):
"""
Get notebook guid and name.
Takes default notebook if notebook's name does not select.
"""
notebooks = GeekNote().findNotebooks()
if not notebook_name:
notebook_name = os.path.basename(os.path.realpath(path))
notebook = [item for item in notebooks if item.name == notebook_name]
guid = None
if notebook:
guid = notebook[0].guid
if not guid:
notebook = GeekNote().createNotebook(notebook_name)
if(notebook):
logger.info('Notebook "{0}" was' \
' created'.format(notebook_name))
else:
raise Exception('Notebook "{0}" was' \
' not created'.format(notebook_name))
guid = notebook.guid
return (guid, notebook_name)
@log
def _get_files(self):
"""
Get files by self.mask from self.path dir.
"""
file_paths = glob.glob(os.path.join(self.path, self.mask))
files = []
for f in file_paths:
if os.path.isfile(f):
file_name = os.path.basename(f)
file_name = os.path.splitext(file_name)[0]
mtime = int(os.path.getmtime(f) * 1000)
files.append({'path': f, 'name': file_name, 'mtime': mtime})
return files
@log
def _get_notes(self):
"""
Get notes from evernote.
"""
keywords = 'notebook:"{0}"'.format(tools.strip(self.notebook_name))
return GeekNote().findNotes(keywords, 10000).notes
def main():
try:
parser = argparse.ArgumentParser()
parser.add_argument('--path', '-p', action='sto |
kickstandproject/sarlacc | sarlacc/tests/asterisk/agi/test_wait_for_digit.py | Python | apache-2.0 | 2,080 | 0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2013 PolyBeacon, Inc.
#
# Author: Paul Belanger <paul.belanger@polybeacon.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cStringIO
import mock
from sarlacc.tests.asterisk.agi import test
class TestCase(test.TestCase):
@mock.patch('sys.stdin', cStringIO.StringIO("200 result=-1"))
def test_wait_for_digit_failure(self):
with mock.patch(
'sys.stdout', new_callable=cStringIO.StringIO) as mock_stdout:
res, dtmf = self.agi.wait_for_digit(timeout='2000')
self.assertEqual(mock_stdout.getvalue(), 'WAIT FOR DIGIT 2000\n')
self.assertFalse(res)
| self.assertEqual(dtmf, '')
@mock.patch('sys.stdin', cStringIO.StringIO("200 result=51"))
def test_wait_for_digit_success(self):
with mock.patch(
'sys.stdout', new_callable=cStringIO.StringIO) as mock_stdout:
res, dtmf = self.agi.wait_for_digit(timeout='25000')
self.assertEqual(mock_stdout.getvalue(), 'WAIT FOR DIGIT 25000\n')
self.as | sertTrue(res)
self.assertEqual(dtmf, '3')
@mock.patch('sys.stdin', cStringIO.StringIO("200 result=0"))
def test_wait_for_digit_timeout(self):
with mock.patch(
'sys.stdout', new_callable=cStringIO.StringIO) as mock_stdout:
res, dtmf = self.agi.wait_for_digit(timeout='5000')
self.assertEqual(mock_stdout.getvalue(), 'WAIT FOR DIGIT 5000\n')
self.assertTrue(res)
self.assertEqual(dtmf, '')
|
mattcongy/itshop | docker-images/taigav2/taiga-back/taiga/projects/api.py | Python | mit | 32,383 | 0.001328 | # -*- coding: utf-8 -*-
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import uuid
from easy_thumbnails.source_generators import pil_image
from dateutil.relativedelta import relativedelta
from django.apps import apps
from django.conf import settings
from django.http import Http404
from django.utils.translation import ugettext as _
from django.utils import timezone
from django_pglocks import advisory_lock
from taiga.base import filters
from taiga.base import exceptions as exc
from taiga.base import response
from taiga.base.api import ModelCrudViewSet, ModelListViewSet
from taiga.base.api.mixins import BlockedByProjectMixin, BlockeableSaveMixin, BlockeableDeleteMixin
from taiga.base.api.perm | issions import | AllowAnyPermission
from taiga.base.api.utils import get_object_or_404
from taiga.base.decorators import list_route
from taiga.base.decorators import detail_route
from taiga.base.utils.slug import slugify_uniquely
from taiga.permissions import services as permissions_services
from taiga.projects.epics.models import Epic
from taiga.projects.history.mixins import HistoryResourceMixin
from taiga.projects.issues.models import Issue
from taiga.projects.likes.mixins.viewsets import LikedResourceMixin, FansViewSetMixin
from taiga.projects.notifications.mixins import WatchersViewSetMixin
from taiga.projects.notifications.choices import NotifyLevel
from taiga.projects.mixins.on_destroy import MoveOnDestroyMixin
from taiga.projects.mixins.ordering import BulkUpdateOrderMixin
from taiga.projects.tasks.models import Task
from taiga.projects.tagging.api import TagsColorsResourceMixin
from taiga.projects.userstories.models import UserStory, RolePoints
from . import filters as project_filters
from . import models
from . import permissions
from . import serializers
from . import validators
from . import services
from . import utils as project_utils
from . import throttling
######################################################
# Project
######################################################
class ProjectViewSet(LikedResourceMixin, HistoryResourceMixin,
BlockeableSaveMixin, BlockeableDeleteMixin,
TagsColorsResourceMixin, ModelCrudViewSet):
validator_class = validators.ProjectValidator
queryset = models.Project.objects.all()
permission_classes = (permissions.ProjectPermission, )
filter_backends = (project_filters.UserOrderFilterBackend,
project_filters.QFilterBackend,
project_filters.CanViewProjectObjFilterBackend,
project_filters.DiscoverModeFilterBackend)
filter_fields = (("member", "members"),
"is_looking_for_people",
"is_featured",
"is_backlog_activated",
"is_kanban_activated")
ordering = ("name", "id")
order_by_fields = ("total_fans",
"total_fans_last_week",
"total_fans_last_month",
"total_fans_last_year",
"total_activity",
"total_activity_last_week",
"total_activity_last_month",
"total_activity_last_year")
def is_blocked(self, obj):
return obj.blocked_code is not None
def _get_order_by_field_name(self):
order_by_query_param = project_filters.CanViewProjectObjFilterBackend.order_by_query_param
order_by = self.request.QUERY_PARAMS.get(order_by_query_param, None)
if order_by is not None and order_by.startswith("-"):
return order_by[1:]
def get_queryset(self):
qs = super().get_queryset()
qs = qs.select_related("owner")
if self.request.QUERY_PARAMS.get('discover_mode', False):
qs = project_utils.attach_members(qs)
qs = project_utils.attach_notify_policies(qs)
qs = project_utils.attach_is_fan(qs, user=self.request.user)
qs = project_utils.attach_my_role_permissions(qs, user=self.request.user)
qs = project_utils.attach_my_role_permissions(qs, user=self.request.user)
qs = project_utils.attach_closed_milestones(qs)
else:
qs = project_utils.attach_extra_info(qs, user=self.request.user)
# If filtering an activity period we must exclude the activities not updated recently enough
now = timezone.now()
order_by_field_name = self._get_order_by_field_name()
if order_by_field_name == "total_fans_last_week":
qs = qs.filter(totals_updated_datetime__gte=now - relativedelta(weeks=1))
elif order_by_field_name == "total_fans_last_month":
qs = qs.filter(totals_updated_datetime__gte=now - relativedelta(months=1))
elif order_by_field_name == "total_fans_last_year":
qs = qs.filter(totals_updated_datetime__gte=now - relativedelta(years=1))
elif order_by_field_name == "total_activity_last_week":
qs = qs.filter(totals_updated_datetime__gte=now - relativedelta(weeks=1))
elif order_by_field_name == "total_activity_last_month":
qs = qs.filter(totals_updated_datetime__gte=now - relativedelta(months=1))
elif order_by_field_name == "total_activity_last_year":
qs = qs.filter(totals_updated_datetime__gte=now - relativedelta(years=1))
return qs
def retrieve(self, request, *args, **kwargs):
if self.action == "by_slug":
self.lookup_field = "slug"
return super().retrieve(request, *args, **kwargs)
def get_serializer_class(self):
if self.action == "list":
return serializers.ProjectSerializer
return serializers.ProjectDetailSerializer
@detail_route(methods=["POST"])
def change_logo(self, request, *args, **kwargs):
"""
Change logo to this project.
"""
self.object = get_object_or_404(self.get_queryset(), **kwargs)
self.check_permissions(request, "change_logo", self.object)
logo = request.FILES.get('logo', None)
if not logo:
raise exc.WrongArguments(_("Incomplete arguments"))
try:
pil_image(logo)
except Exception:
raise exc.WrongArguments(_("Invalid image format"))
self.pre_conditions_on_save(self.object)
self.object.logo = logo
self.object.save(update_fields=["logo"])
serializer = self.get_serializer(self.object)
return response.Ok(serializer.data)
@detail_route(methods=["POST"])
def remove_logo(self, request, *args, **kwargs):
"""
Remove the logo of a project.
"""
self.object = get_object_or_404(self.get_queryset(), **kwargs)
self.check_permissions(request, "remove_logo", self.object)
self.pre_conditions_on_save(self.object)
self.object.logo = None
self.object.save(update_fields=["logo"])
serializer = self.get_serializer(self.object)
return response.Ok(serializer.data)
@detail_route(methods=["POST"])
def watch(self, request, pk=None):
project = self.get_object()
self.check_permissions(request, "watch", project)
self.pre_conditions_on_save(project)
notify_level = request.DATA. |
jcarreiro/jmc-python | imp/dice.py | Python | mit | 1,405 | 0.0121 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import io
import string
import random
# Simple recursive descent parser for dice rolls, e.g. '3d6+1d8+4'.
#
# roll := die {('+' | '-') die} ('+' | '-') modifier
# die := number 'd' number
# modifier := number
class StringBuf(object):
def __init__(self, s):
self.s = s
self.pos = 0
def peek(self):
return self.s[self.pos]
def getc(self):
c = self.peek()
self.pos += 1
return c
def ungetc(self):
self.pos -= 1
def tell(self):
return self.pos
class Symbol(object):
NUMBER = 0
D = 1
PLUS = 2
MINUS = 3
def __init__(self, type_, pos, value)
def next_symbol(s):
c = s.getc()
while c in string.whitespace:
c = s.getc()
if c in string.digits:
# start of a number
literal = c
c = s.getc()
| while c in string.digits:
literal += c
c = s.getc()
s.ungetc()
sym = (Symbol.NUMBER,
elif c == 'd':
# die indicator
pass
elif c == '+':
# plus sign
pass
elif c == '-':
# minus sign
pass
else:
# unrecognized input
raise ValueE | rror('Syntax error at position ' + s.tell())
return ()
|
LinuxCircle/tea5767 | wstester.py | Python | mit | 1,082 | 0.008318 | import websocket
try:
import thread
except ImportError: #T | ODO use Threading instead of _thread in python3
import _thread as thread
import time
import sys
def on_message(ws, message):
print(message)
def on_error(ws, error):
print(error)
def on_close(ws):
print("### closed ###")
def on_open(ws):
def run(*args):
for i in range(3):
| # send the message, then wait
# so thread doesnt exit and socket
# isnt closed
ws.send("Hello %d" % i)
time.sleep(1)
time.sleep(1)
ws.close()
print("Thread terminating...")
thread.start_new_thread(run, ())
if __name__ == "__main__":
websocket.enableTrace(True)
if len(sys.argv) < 2:
host = "ws://echo.websocket.org/"
else:
host = sys.argv[1]
ws = websocket.WebSocketApp(host,
on_message = on_message,
on_error = on_error,
on_close = on_close)
ws.on_open = on_open
ws.run_forever()
|
emacsmirror/stgit | stgit/lib/git/base.py | Python | gpl-2.0 | 852 | 0.00939 | class Immutable:
"""Base class for immutable objects.
Immutable objects cannot be modified once created. Any modification methods will
return a new object, leaving the original object as it was.
The reason for this is that we want to be able to represent git objects, which are
immutable, and want to be able t | o create new git objects that are just slight
modifications of other git | objects. (Such as, for example, modifying the commit
message of a commit object while leaving the rest of it intact. This involves
creating a whole new commit object that's exactly like the old one except for the
commit message.)
The ``Immutable`` class does not actually enforce immutability--subclasses are
responsible for enforcing immutability. Thus inheriting ``Immutable`` just serves as
documentation.
"""
|
Kozea/Dyko | kalamar/site.py | Python | gpl-3.0 | 6,799 | 0.000441 | # -*- coding: utf-8 -*-
# This file is part of Dyko
# Copyright © 2008-2010 Kozea
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Kalamar. If not, see <http://www.gnu.org/licenses/>.
"""
Site
====
Site class. Create one for each independent site with its own configuration.
"""
import logging
from .request import normalize, make_request, And, Condition, Or, Not
from .query import QueryFilter, QuerySelect, QueryChain, QueryOrder, QueryRange,\
QueryDistinct, QueryAggregate
from .access_point import DEFAULT_PARAMETER
def _translate_request(request, aliases):
"""Translate high-level ``request`` to low-level using ``aliases``."""
if isinstance(request, And):
return And(*(_translate_request(req, aliases)
for req in request.sub_requests))
elif isinstance(request, Or):
return Or(*(_translate_request(req, aliases)
for req in request.sub_requests))
elif isinstance(request, Not):
return Not(_translate_request(request.sub_request, aliases))
elif isinstance(request, Condition):
name = repr(request.property)
if name in aliases:
# The complete path has already been selected,
# Let's use the alias instead !
new_name = aliases.get(name, name)
request.property.name = new_name
request.property.child_property = None
return request
elif name in aliases.values():
return request
elif ".".join(name.split(".")[:-1] + ["*"]) in aliases:
return request
else:
new_name = "__%s" % name.replace(".", "_")
aliases[name] = new_name
r | equest.property.name = new_name
request.property.child_property = None
return request
def _delegate_to_ | acces_point(method_name, first_arg_is_a_request=False):
"""Create a function delegating ``method_name`` to an access point."""
if first_arg_is_a_request:
def wrapper(self, access_point_name, request=None, *args, **kwargs):
"""Call ``access_point.method_name(request, *args, **kwargs)``."""
access_point = self.access_points[access_point_name]
request = normalize(access_point.properties, request)
return getattr(access_point, method_name)(request, *args, **kwargs)
else:
def wrapper(self, access_point_name, *args, **kwargs):
"""Call ``access_point.method_name(*args, **kwargs)``."""
access_point = self.access_points[access_point_name]
return getattr(access_point, method_name)(*args, **kwargs)
# Redefining documentation and name of the wrappers
# pylint: disable=W0622
wrapper.__name__ = method_name
wrapper.__doc__ = \
"Call :meth:`kalamar.access_point.AccessPoint.%s`." % method_name
# pylint: enable=W0622
return wrapper
class Site(object):
"""Kalamar site."""
def __init__(self):
self.access_points = {}
self.logger = logging.getLogger("dyko")
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
self.logger.addHandler(NullHandler())
def register(self, name, access_point):
"""Add an access point to this site.
:param name: Identifier string of the added access point.
:param access_point: Concrete subclass of :class:`AccessPoint`.
"""
if name in self.access_points:
raise RuntimeError(
"Site already has an access point named %r." % name)
self.access_points[name] = access_point
access_point.bind(self, name)
def view(self, access_point_name, aliases=None, request=None, order_by=None,
select_range=None, distinct=False, aggregate=None, query=None):
"""Call :meth:`kalamar.access_point.AccessPoint.view`.
If ``alias`` and ``request`` are given, a query is created from them.
The query is then validated and then passed to the ``view`` method of
the acess point called ``access_point_name``.
"""
access_point = self.access_points[access_point_name]
if aliases is None:
aliases = {"": "*"}
if query is None:
# Add dummy selects to be able to filter on those
chain = []
aliases = dict(((value, key) for key, value in aliases.items()))
request = make_request(request)
request = _translate_request(request, aliases)
aliases = dict(((value, key) for key, value in aliases.items()))
chain.append(QuerySelect(aliases))
chain.append(QueryFilter(request))
if distinct:
chain.append(QueryDistinct())
if order_by is not None:
chain.append(QueryOrder(order_by))
if aggregate is not None:
chain.append(QueryAggregate(aggregate))
if select_range is not None:
if hasattr(select_range, "__iter__"):
select_range = slice(*select_range)
else:
select_range = slice(select_range)
chain.append(QueryRange(select_range))
query = QueryChain(chain)
query.validate(access_point.properties)
for line in access_point.view(query):
for prop_name in [name for name in line if name.startswith("__")]:
line.pop(prop_name)
yield line
def from_repr(self, access_point_name, repr, default=DEFAULT_PARAMETER):
"""
Return an item of ``access_point_name`` from the ``repr`` string.
``repr`` should have been generated with item.__repr__()
"""
access_point = self.access_points[access_point_name]
return access_point.loader_from_reference_repr(repr)(None)[0]
create = _delegate_to_acces_point("create")
delete = _delegate_to_acces_point("delete")
delete_many = _delegate_to_acces_point("delete_many", True)
open = _delegate_to_acces_point("open", True)
search = _delegate_to_acces_point("search", True)
save = _delegate_to_acces_point("save")
|
libo/Enigma2 | lib/python/Screens/NetworkSetup.py | Python | gpl-2.0 | 55,103 | 0.032648 | from Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.InputBox import InputBox
from Screens.Standby import *
from Screens.VirtualKeyBoard import VirtualKeyBoard
from Screens.HelpMenu import HelpableScreen
from Components.Network import iNetwork
from Components.Sources.StaticText import StaticText
from Components.Sources.Boolean import Boolean
from Components.Sources.List import List
from Components.Label import Label,MultiColorLabel
from Components.Pixmap import Pixmap,MultiPixmap
from Components.MenuList import MenuList
from Components.config import config, ConfigYesNo, ConfigIP, NoSave, ConfigText, ConfigPassword, ConfigSelection, getConfigListEntry, ConfigNothing
from Components.ConfigList import ConfigListScreen
from Components.PluginComponent import plugins
from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmapAlphaTest
from Components.ActionMap import ActionMap, NumberActionMap, HelpableActionMap
from Tools.Directories import resolveFilename, SCOPE_PLUGINS, SCOPE_CURRENT_SKIN
from Tools.LoadPixmap import LoadPixmap
from Plugins.Plugin import PluginDescriptor
from enigma import eTimer, ePoint, eSize, RT_HALIGN_LEFT, eListboxPythonMultiContent, gFont
from os import path as os_path, system as os_system, unlink
from re import compile as re_compile, search as re_search
class NetworkAdapterSelection(Screen,HelpableScreen):
def __init__(self, session):
Screen.__init__(self, session)
HelpableScreen.__init__(self)
self.wlan_errortext = _("No working wireless network adapter found.\nPlease verify that you have attached a compatible WLAN device and your network is configured correctly.")
self.lan_errortext = _("No working local network adapter found.\nPlease verify that you have attached a network cable and your network is configured correctly.")
self.oktext = _("Press OK on your remote control to continue.")
self.edittext = _("Press OK to edit the settings.")
self.defaulttext = _("Press yellow to set this interface as default interface.")
self.restartLanRef = None
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Select"))
self["key_yellow"] = StaticText("")
self["key_blue"] = StaticText("")
self["introduction"] = StaticText(self.edittext)
self.adapters = [(iNetwork.getFriendlyAdapterName(x),x) for x in iNetwork.getAdapterList()]
if not self.adapters:
self.onFirstExecBegin.append(self.NetworkFallback)
self["OkCancelActions"] = HelpableActionMap(self, "OkCancelActions",
{
"cancel": (self.close, _("exit network interface list")),
"ok": (self.okbuttonClick, _("select interface")),
})
self["ColorActions"] = HelpableActionMap(self, "ColorActions",
{
"red": (self.close, _("exit network interface list")),
"green": (self.okbuttonClick, _("select interface")),
"blue": (self.openNetworkWizard, _("Use the Networkwizard to configure selected network adapter")),
})
self["DefaultInterfaceAction"] = HelpableActionMap(self, "ColorActions",
{
"yellow": (self.setDefaultInterface, [_("Set interface as default Interface"),_("* Only available if more than one interface is active.")] ),
})
self.list = []
self["list"] = List(self.list)
self.updateList()
if len(self.adapters) == 1:
self.onFirstExecBegin.append(self.okbuttonClick)
self.onClose.append(self.cleanup)
def buildInterfaceList(self,iface,name,default,active ):
divpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/div-h.png"))
defaultpng = None
activepng = None |
description = None
interfacepng = None
if iface in iNetwork.lan_interfaces:
if active is True:
interfacepng = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/network_wired-active.png"))
elif active is False:
interfacepng = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/network_wired-inactive.png"))
else:
| interfacepng = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/network_wired.png"))
elif iface in iNetwork.wlan_interfaces:
if active is True:
interfacepng = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/network_wireless-active.png"))
elif active is False:
interfacepng = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/network_wireless-inactive.png"))
else:
interfacepng = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/network_wireless.png"))
num_configured_if = len(iNetwork.getConfiguredAdapters())
if num_configured_if >= 2:
if default is True:
defaultpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/buttons/button_blue.png"))
elif default is False:
defaultpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/buttons/button_blue_off.png"))
if active is True:
activepng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/lock_on.png"))
elif active is False:
activepng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/lock_error.png"))
description = iNetwork.getFriendlyAdapterDescription(iface)
return((iface, name, description, interfacepng, defaultpng, activepng, divpng))
def updateList(self):
self.list = []
default_gw = None
num_configured_if = len(iNetwork.getConfiguredAdapters())
if num_configured_if >= 2:
self["key_yellow"].setText(_("Default"))
self["introduction"].setText(self.defaulttext)
self["DefaultInterfaceAction"].setEnabled(True)
else:
self["key_yellow"].setText("")
self["introduction"].setText(self.edittext)
self["DefaultInterfaceAction"].setEnabled(False)
if num_configured_if < 2 and os_path.exists("/etc/default_gw"):
unlink("/etc/default_gw")
if os_path.exists("/etc/default_gw"):
fp = file('/etc/default_gw', 'r')
result = fp.read()
fp.close()
default_gw = result
if len(self.adapters) == 0: # no interface available => display only eth0
self.list.append(self.buildInterfaceList("eth0",iNetwork.getFriendlyAdapterName('eth0'),True,True ))
else:
for x in self.adapters:
if x[1] == default_gw:
default_int = True
else:
default_int = False
if iNetwork.getAdapterAttribute(x[1], 'up') is True:
active_int = True
else:
active_int = False
self.list.append(self.buildInterfaceList(x[1],_(x[0]),default_int,active_int ))
if os_path.exists(resolveFilename(SCOPE_PLUGINS, "SystemPlugins/NetworkWizard/networkwizard.xml")):
self["key_blue"].setText(_("NetworkWizard"))
self["list"].setList(self.list)
def setDefaultInterface(self):
selection = self["list"].getCurrent()
num_if = len(self.list)
old_default_gw = None
num_configured_if = len(iNetwork.getConfiguredAdapters())
if os_path.exists("/etc/default_gw"):
fp = open('/etc/default_gw', 'r')
old_default_gw = fp.read()
fp.close()
if num_configured_if > 1 and (not old_default_gw or old_default_gw != selection[0]):
fp = open('/etc/default_gw', 'w+')
fp.write(selection[0])
fp.close()
self.restartLan()
elif old_default_gw and num_configured_if < 2:
unlink("/etc/default_gw")
self.restartLan()
def okbuttonClick(self):
selection = self["list"].getCurrent()
if selection is not None:
self.session.openWithCallback(self.AdapterSetupClosed, AdapterSetupConfiguration, selection[0])
def AdapterSetupClosed(self, *ret):
if len(self.adapters) == 1:
self.close()
else:
self.updateList()
def NetworkFallback(self):
if 'wlan0' in iNetwork.configuredNetworkAdapters:
self.session.openWithCallback(self.ErrorMessageClosed, MessageBox, self.wlan_errortext, type = MessageBox.TYPE_INFO,timeout = 10)
if 'ath0' in iNetwork.configuredNetworkAdapters:
self.session.openWithCallback(self.ErrorMessageClosed, MessageBox, self.wlan_errortext, type = MessageBox.TYPE_INFO,timeout = 10)
else:
self.session.openWithCallback(self.ErrorMessageClosed, MessageBox, self.lan_errortext, type = MessageBox.TYPE_INFO,timeout = 10)
def ErrorMessageClosed(self, *ret):
if 'wlan0' in iNet |
Julian/clogs | clogs/tests/test_git.py | Python | mit | 559 | 0 | from __future__ import absolute_import |
import unittest
from git import Repo
import mock
from clogs import git
class TestGitClogger(unittest.TestCase):
def setUp(self):
super(TestGitClogger, self).setUp()
with mock.p | atch("clogs.git.git.Repo", mock.Mock(spec=Repo)):
self.g = git.GitClogger(directory="foo")
self.repo = self.g.repo
def test_init(self):
self.assertEqual(self.g.directory, "foo")
def test_commit_id(self):
self.assertEqual(self.g.commit_id, self.repo.head.commit.hexsha)
|
momm3/WelcomeBot | welcomebot/Lib/site-packages/requests/adapters.py | Python | mit | 20,601 | 0.001262 | # -*- coding: utf-8 -*-
"""
requests.adapters
~~~~~~~~~~~~~~~~~
This module contains the transport adapters that Requests uses to define
and maintain connections.
"""
import os.path
import socket
from urllib3.poolmanager import PoolManager, proxy_from_url
from urllib3.response import HTTPResponse
from urllib3.util import Timeout as TimeoutSauce
from urllib3.util.retry import Retry
from urllib3.exceptions import ClosedPoolError
from urllib3.exceptions import ConnectTimeoutError
from urllib3.exceptions import HTTPError as _HTTPError
from urllib3.exceptions import MaxRetryError
from urllib3.exceptions import NewConnectionError
from urllib3.exceptions import ProxyError as _ProxyError
from urllib3.exceptions import ProtocolError
from urllib3.exceptions import ReadTimeoutError
from urllib3.exceptions import SSLError as _SSLError
from urllib3.exceptions import ResponseError
from .models import Response
from .compat import urlparse, basestring
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
prepend_scheme_if_needed, get_auth_from_url, urldefragauth,
select_proxy)
from .structures import CaseInsensitiveDict
from .cookies import extract_cookies_to_jar
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
ProxyError, RetryError, InvalidSchema)
from .auth import _basic_auth_str
try:
from urllib3.contrib.socks import SOCKSProxyManager
except ImportError:
def SOCKSProxyManager(*args, **kwargs):
raise InvalidSchema("Missing dependencies for SOCKS support.")
DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10
DEFAULT_RETRIES = 0
DEFAULT_POOL_TIMEOUT = None
class BaseAdapter(object):
"""The Base Transport Adapter"""
def __init__(self):
super(BaseAdapter, self).__init__()
def send(self, request, stream=False, timeout=None, verify=True,
cert=None, proxies=None):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple
:param verify: (optional) Either a boolean, in which case it controls whether we verify
the server's TLS certificate, or a string, in which case it must be a path
to a CA bundle to use
:param cert: (optional) Any user-provided SSL certificate to be t | rusted.
:param proxies: (optional) The proxies dictionary to apply to the | request.
"""
raise NotImplementedError
def close(self):
"""Cleans up adapter specific items."""
raise NotImplementedError
class HTTPAdapter(BaseAdapter):
"""The built-in HTTP Adapter for urllib3.
Provides a general-case interface for Requests sessions to contact HTTP and
HTTPS urls by implementing the Transport Adapter interface. This class will
usually be created by the :class:`Session <Session>` class under the
covers.
:param pool_connections: The number of urllib3 connection pools to cache.
:param pool_maxsize: The maximum number of connections to save in the pool.
:param max_retries: The maximum number of retries each connection
should attempt. Note, this applies only to failed DNS lookups, socket
connections and connection timeouts, never to requests where data has
made it to the server. By default, Requests does not retry failed
connections. If you need granular control over the conditions under
which we retry a request, import urllib3's ``Retry`` class and pass
that instead.
:param pool_block: Whether the connection pool should block for connections.
Usage::
>>> import requests
>>> s = requests.Session()
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
>>> s.mount('http://', a)
"""
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
'_pool_block']
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
pool_block=DEFAULT_POOLBLOCK):
if max_retries == DEFAULT_RETRIES:
self.max_retries = Retry(0, read=False)
else:
self.max_retries = Retry.from_int(max_retries)
self.config = {}
self.proxy_manager = {}
super(HTTPAdapter, self).__init__()
self._pool_connections = pool_connections
self._pool_maxsize = pool_maxsize
self._pool_block = pool_block
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
def __getstate__(self):
return dict((attr, getattr(self, attr, None)) for attr in
self.__attrs__)
def __setstate__(self, state):
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
# self.poolmanager uses a lambda function, which isn't pickleable.
self.proxy_manager = {}
self.config = {}
for attr, value in state.items():
setattr(self, attr, value)
self.init_poolmanager(self._pool_connections, self._pool_maxsize,
block=self._pool_block)
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
"""Initializes a urllib3 PoolManager.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param connections: The number of urllib3 connection pools to cache.
:param maxsize: The maximum number of connections to save in the pool.
:param block: Block when no free connections are available.
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
"""
# save these values for pickling
self._pool_connections = connections
self._pool_maxsize = maxsize
self._pool_block = block
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
block=block, strict=True, **pool_kwargs)
def proxy_manager_for(self, proxy, **proxy_kwargs):
"""Return urllib3 ProxyManager for the given proxy.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param proxy: The proxy to return a urllib3 ProxyManager for.
:param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
:returns: ProxyManager
:rtype: urllib3.ProxyManager
"""
if proxy in self.proxy_manager:
manager = self.proxy_manager[proxy]
elif proxy.lower().startswith('socks'):
username, password = get_auth_from_url(proxy)
manager = self.proxy_manager[proxy] = SOCKSProxyManager(
proxy,
username=username,
password=password,
num_pools=self._pool_connections,
maxsize=self._pool_maxsize,
block=self._pool_block,
**proxy_kwargs
)
else:
proxy_headers = self.proxy_headers(proxy)
manager = self.proxy_manager[proxy] = proxy_from_url(
proxy,
proxy_headers=proxy_headers,
num_pools=self._pool_connections,
maxsize=self._pool_maxsize,
block=self._pool_block,
**proxy_kwargs)
return manager
def cert_verify(self, conn, url, verify, cert):
"""Verify a SSL certificate. This method should not be called from user
code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAda |
AutorestCI/azure-sdk-for-python | azure-graphrbac/azure/graphrbac/models/domain.py | Python | mit | 1,683 | 0 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License | .txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Do | main(Model):
"""Active Directory Domain information.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar authentication_type: the type of the authentication into the domain.
:vartype authentication_type: str
:ivar is_default: if this is the default domain in the tenant.
:vartype is_default: bool
:ivar is_verified: if this domain's ownership is verified.
:vartype is_verified: bool
:param name: the domain name.
:type name: str
"""
_validation = {
'authentication_type': {'readonly': True},
'is_default': {'readonly': True},
'is_verified': {'readonly': True},
'name': {'required': True},
}
_attribute_map = {
'authentication_type': {'key': 'authenticationType', 'type': 'str'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'is_verified': {'key': 'isVerified', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(self, name):
self.authentication_type = None
self.is_default = None
self.is_verified = None
self.name = name
|
richardcornish/smsweather | emojiweather/utils/utils.py | Python | bsd-3-clause | 596 | 0 | from django.contrib.gis.geoip2 import GeoIP2
from geoip2.errors impor | t GeoIP2Error
from ipware import get_client_ip
def get_location_from_ip(request):
client_ip, is_routable = get_client_ip(request)
if client_ip is not None:
g = GeoIP2()
try:
record = g.city(client_ip)
except GeoIP2Error:
return None
if record:
city = record.get('city') or ''
country = record.get('country') or ''
delimeter = ', ' if city and country else ''
return f'{city}{delimeter}{country} | '
return None
|
malishevg/edugraph | common/djangoapps/lang_pref/views.py | Python | agpl-3.0 | 657 | 0 | """
Views for accessing language preferences
"""
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseBadRequest
from user_api.models import UserPreference
from lang_pref impo | rt LANGUAGE_KEY
@login_required
def set_language(request):
"""
This view is called when the user would like to set a language preference
"""
user = request.user
lang_pref = request. | POST.get('language', None)
if lang_pref:
UserPreference.set_preference(user, LANGUAGE_KEY, lang_pref)
return HttpResponse('{"success": true}')
return HttpResponseBadRequest('no language provided')
|
abalakh/robottelo | tests/foreman/cli/test_repository_set.py | Python | gpl-3.0 | 11,864 | 0 | # pylint: disable=invalid-name
"""Tests for cli repository set"""
from robottelo.cli.factory import make_org
from robottelo.cli.product import Product
from robottelo.cli.repository_set import RepositorySet
from robottelo.cli.subscription import Subscription
from robottelo import manifests
from robottelo.constants import PRDS, REPOSET
from robottelo.ssh import upload_file
from robottelo.test import CLITestCase
class TestRepositorySet(CLITestCase):
"""Repository Set CLI tests."""
def test_repositoryset_available_repositories(self):
"""@Test: List available repositories for repository-set
@Feature: Repository-set
@Assert: List of available repositories is displayed, with
valid amount of enabled repositories
"""
rhel_product_name = PRDS['rhel']
rhel_repo_set = REPOSET['rhva6']
# Clone manifest and upload it
org = make_org()
manifest = manifests.clone()
upload_file(manifest, remote_file=manifest)
Subscription.upload({
u'file': manifest,
u'organization-id': org['id'],
})
# No repos should be enabled by default
result = RepositorySet.available_repositories({
u'name': rhel_repo_set,
u'organization-id': org['id'],
u'product': rhel_product_name,
})
self.assertEqual(
sum(int(repo['enabled'] == u'true') for repo in result),
0
)
# Enable repo from Repository Set
RepositorySet.enable({
u'basearch': 'x86_64',
u'name': rhel_repo_set,
u'organization-id': org['id'],
u'product': rhel_product_name,
u'releasever': '6Server',
})
# Only 1 repo should be enabled
result = RepositorySet.available_repositories({
u'name': rhel_repo_set,
u'organization': org['name'],
u'product': rhel_product_name,
})
self.assertEqual(
sum(int(repo['enabled'] == u'true') for repo in result),
1
)
# Enable one more repo
RepositorySet.enable({
u'basearch': 'i386',
u'name': rhel_repo_set,
u'organization-id': org['id'],
u'product': rhel_product_name,
u'releasever': '6Server',
})
# 2 repos should be enabled
result = RepositorySet.available_repositories({
u'name': rhel_repo_set,
u'organization-label': org['label'],
u'product': rhel_product_name,
})
self.assertEqual(
sum(int(repo['enabled'] == u'true') for repo in result),
2
)
# Disable one repo
RepositorySet.disable({
u'basearch': 'i386',
u'name': rhel_repo_set,
u'organization-id': org['id'],
u'product': rhel_product_name,
u'releasever': '6Server',
})
# There should remain only 1 enabled repo
result = RepositorySet.available_repositories({
u'name': rhel_repo_set,
u'organization-id': org['id'],
u'product': rhel_product_name,
})
self.assertEqual(
sum(int(repo['enabled'] == u'true') for repo in result),
1
)
# Disable the last enabled repo
RepositorySet.disable({
u'basearch': 'x86_64',
u'name': rhel_repo_set,
u'organization-id': org['id'],
u'product': rhel_product_name,
u'releasever': '6Server',
})
# There should be no enabled repos
result = RepositorySet.available_repositories({
u'name': rhel_repo_set,
u'organization-id': org['id'],
u'product': rhel_product_name,
})
self.assertEqual(
sum(int(repo['enabled'] == u'true') for repo in result),
0
)
def test_repositoryset_enable_by_name(self):
"""@Test: Enable repo from reposet by names of reposet, org and product
@Feature: Repository-set
@Assert: Repository was enabled
"""
org = make_org()
manifest = manifests.clone()
upload_file(manifest, remote_file=manifest)
Subscription.upload({
u'file': manifest,
u'organization-id': org['id'],
})
RepositorySet.enable({
u'basearch': 'x86_64',
u'name': REPOSET['rhva6'],
u'organization': org['name'],
u'product': PRDS['rhel'],
u'releasever': '6Server',
})
result = RepositorySet.available_repositories({
u'name': REPOSET['rhva6'],
u'organization': org['name'],
u'product': PRDS['rhel'],
})
enabled = [
repo['enabled']
for repo
in result
if repo['arch'] == 'x86_64' and repo['release'] == '6Server'
][0]
self.assertEqual(enabled, 'true')
def test_repositoryset_enable_by_label(self):
"""@Test: Enable repo from reposet by org label, reposet and product
names
@Feature: Repository-set
@Assert: Repository was enabled
"""
org = make_org()
manifest = manifests.clone()
upload_file(manifest, remote_file=manifest)
Subscription.upload({
u'file': manifest,
u'organization-id': org['id'],
})
RepositorySet.enable({
u'basearch': 'x86_64',
u'name': REPOSET['rhva6'],
u'organization-label': org['label'],
u'product': PRDS['rhel'],
u'releasever': '6Server',
})
result = RepositorySet.available_repositories({
u'name': REPOSET['rhva6'],
u'organization-label': org['label'],
u'product': PRDS['rhel'],
})
enabled = [
repo['enabled']
for repo
in result
if repo['arch'] == 'x86_64' and repo['release'] == '6Server'
][0]
self.assertEqual(enabled, 'true')
def test_repositoryset_enable_by_id(self):
"""@Test: Enable repo from reposet by IDs of reposet, org and product
@Feature: Repository-set
@Assert: Repository was enabled
"""
org = make_org()
manifest = manifests.clone()
upload_file(manifest, remote_file=manifest)
Subscription.upload({
u'file': manifest,
u'organization-id': org['id'],
})
product_id = Product.info({
u'name': PRDS['rhel'],
u'organization-id': org['id'],
})['id']
reposet_id = RepositorySet.info({
u'name': REPOSET['rhva6'],
u'organization-id': org['id'],
u'product-id': product_id,
})['id']
RepositorySet.enable({
u'basearch': 'x86_64',
u'id': reposet_id,
u'organization-id': org['id'],
u'product-id': product_id,
u'releasever': '6Server',
})
result = RepositorySet.available_repositories({
u'id': reposet_id,
u'orga | nization-id': org['id'],
u'product-id': product_id,
})
enabled = [
repo['enabled']
for repo
in result
if repo['arch'] == 'x86_64' and repo['release'] == '6Server'
][0]
self.assertEqual(enabled, 'true')
def test_repositoryset_disable_by_name(self):
| """@Test: Disable repo from reposet by names of reposet, org and
product
@Feature: Repository-set
@Assert: Repository was disabled
"""
org = make_org()
manifest = manifests.clone()
upload_file(manifest, remote_file=manifest)
Subscription.upload({
u'file': manifest,
u'organization-id': org['id'],
})
RepositorySet.enable({
u'basearch': 'x86_64',
u'name': REPOSET['rhva6'],
u'organization': org['name'],
u'pr |
nisse3000/pymatgen | pymatgen/io/qchem_io/sets.py | Python | mit | 7,003 | 0.000857 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import logging
from pymatgen.core import Molecule
from pymatgen.io.qchem_io.inputs import QCInput
from pymatgen.io.qchem_io.utils import lower_and_check_unique
# Classes for reading/manipulating/writing QChem ouput file | s.
__author__ = "Samuel Blau, Br | andon Wood, Shyam Dwaraknath"
__copyright__ = "Copyright 2018, The Materials Project"
__version__ = "0.1"
logger = logging.getLogger(__name__)
class QChemDictSet(QCInput):
"""
Build a QCInput given all the various input parameters. Can be extended by standard implementations below.
"""
def __init__(self,
molecule,
job_type,
basis_set,
scf_algorithm,
dft_rung=4,
pcm_dielectric=None,
max_scf_cycles=200,
geom_opt_max_cycles=200,
overwrite_inputs=None):
"""
Args:
molecule (Pymatgen molecule object)
job_type (str)
basis_set (str)
scf_algorithm (str)
dft_rung (int)
pcm_dielectric (str)
max_scf_cycles (int)
geom_opt_max_cycles (int)
overwrite_inputs (dict): This is dictionary of QChem input sections to add or overwrite variables,
the available sections are currently rem, pcm, and solvent. So the accepted keys are rem, pcm, or solvent
and the value is a dictionary of key value pairs relevant to the section. An example would be adding a
new variable to the rem section that sets symmetry to false.
ex. overwrite_inputs = {"rem": {"symmetry": "false"}}
***It should be noted that if something like basis is added to the rem dict it will overwrite
the default basis.***
"""
self.molecule = molecule
self.job_type = job_type
self.basis_set = basis_set
self.scf_algorithm = scf_algorithm
self.dft_rung = dft_rung
self.pcm_dielectric = pcm_dielectric
self.max_scf_cycles = max_scf_cycles
self.geom_opt_max_cycles = geom_opt_max_cycles
self.overwrite_inputs = overwrite_inputs
pcm_defaults = {
"heavypoints": "194",
"hpoints": "194",
"radii": "uff",
"theory": "cpcm",
"vdwscale": "1.1"
}
mypcm = {}
mysolvent = {}
myrem = {}
myrem["job_type"] = job_type
myrem["basis"] = self.basis_set
myrem["max_scf_cycles"] = self.max_scf_cycles
myrem["gen_scfman"] = "true"
myrem["scf_algorithm"] = self.scf_algorithm
if self.dft_rung == 1:
myrem["exchange"] = "B3LYP"
elif self.dft_rung == 2:
myrem["method"] = "B97-D3"
myrem["dft_D"] = "D3_BJ"
elif self.dft_rung == 3:
myrem["method"] = "B97M-rV"
elif self.dft_rung == 4:
myrem["method"] = "wb97xd"
elif self.dft_rung == 5:
myrem["method"] = "wB97M-V"
else:
raise ValueError("dft_rung should be between 1 and 5!")
if self.job_type.lower() == "opt":
myrem["geom_opt_max_cycles"] = self.geom_opt_max_cycles
if self.pcm_dielectric != None:
mypcm = pcm_defaults
mysolvent["dielectric"] = self.pcm_dielectric
myrem["solvent_method"] = 'pcm'
if self.overwrite_inputs:
for sec, sec_dict in self.overwrite_inputs.items():
if sec == "rem":
temp_rem = lower_and_check_unique(sec_dict)
for k, v in temp_rem.items():
myrem[k] = v
if sec == "pcm":
temp_pcm = lower_and_check_unique(sec_dict)
for k, v in temp_pcm.items():
mypcm[k] = v
if sec == "solvent":
temp_solvent = lower_and_check_unique(sec_dict)
for k, v in temp_solvent.items():
mysolvent[k] = v
super(QChemDictSet, self).__init__(
self.molecule, rem=myrem, pcm=mypcm, solvent=mysolvent)
class OptSet(QChemDictSet):
"""
QChemDictSet for a geometry optimization
"""
def __init__(self,
molecule,
dft_rung=4,
basis_set="6-311++G*",
pcm_dielectric=None,
scf_algorithm="diis",
max_scf_cycles=200,
geom_opt_max_cycles=200,
overwrite_inputs=None):
self.basis_set = basis_set
self.scf_algorithm = scf_algorithm
self.max_scf_cycles = max_scf_cycles
self.geom_opt_max_cycles = geom_opt_max_cycles
super(OptSet, self).__init__(
molecule=molecule,
job_type="opt",
dft_rung=dft_rung,
pcm_dielectric=pcm_dielectric,
basis_set=self.basis_set,
scf_algorithm=self.scf_algorithm,
max_scf_cycles=self.max_scf_cycles,
geom_opt_max_cycles=self.geom_opt_max_cycles,
overwrite_inputs=overwrite_inputs)
class SinglePointSet(QChemDictSet):
"""
QChemDictSet for a single point calculation
"""
def __init__(self,
molecule,
dft_rung=4,
basis_set="6-311++G*",
pcm_dielectric=None,
scf_algorithm="diis",
max_scf_cycles=200,
overwrite_inputs=None):
self.basis_set = basis_set
self.scf_algorithm = scf_algorithm
self.max_scf_cycles = max_scf_cycles
super(SinglePointSet, self).__init__(
molecule=molecule,
job_type="sp",
dft_rung=dft_rung,
pcm_dielectric=pcm_dielectric,
basis_set=self.basis_set,
scf_algorithm=self.scf_algorithm,
max_scf_cycles=self.max_scf_cycles,
overwrite_inputs=overwrite_inputs)
class FreqSet(QChemDictSet):
"""
QChemDictSet for a single point calculation
"""
def __init__(self,
molecule,
dft_rung=4,
basis_set="6-311++G*",
pcm_dielectric=None,
scf_algorithm="diis",
max_scf_cycles=200,
overwrite_inputs=None):
self.basis_set = basis_set
self.scf_algorithm = scf_algorithm
self.max_scf_cycles = max_scf_cycles
super(FreqSet, self).__init__(
molecule=molecule,
job_type="freq",
dft_rung=dft_rung,
pcm_dielectric=pcm_dielectric,
basis_set=self.basis_set,
scf_algorithm=self.scf_algorithm,
max_scf_cycles=self.max_scf_cycles,
overwrite_inputs=overwrite_inputs)
|
tasleson/targetd | targetd/nfs.py | Python | gpl-3.0 | 10,459 | 0.000191 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import os.path
import re
import shlex
import logging as log
from targetd.utils import invoke
class Export(object):
SECURE = 0x00000001
RW = 0x00000002
RO = 0x00000004
SYNC = 0x00000008
ASYNC = 0x00000010
NO_WDELAY = 0x00000020
NOHIDE = 0x00000040
CROSSMNT = 0x00000080
NO_SUBTREE_CHECK = 0x00000100
INSECURE_LOCKS = 0x00000200
ROOT_SQUASH = 0x00000400
NO_ROOT_SQUASH = 0x00000800
ALL_SQUASH = 0x00001000
WDELAY = 0x00002000
HIDE = 0x00004000
INSECURE = 0x00008000
NO_ALL_SQUASH = 0x00010000
_conflicting = (((RW | RO), "Both RO & RW set"),
((INSECURE | SECURE), "Both INSECURE & SECURE set"),
((SYNC | ASYNC), "Both SYNC & ASYNC set"),
((HIDE | NOHIDE), "Both HIDE & NOHIDE set"),
((WDELAY | NO_WDELAY), "Both WDELAY & NO_WDELAY set"),
((ROOT_SQUASH | NO_ROOT_SQUASH),
"Only one option of ROOT_SQUASH, NO_ROOT_SQUASH, "
"can be specified")
)
bool_option = {
"secure": SECURE,
"rw": RW,
"ro": RO,
"sync": SYNC,
"async": ASYNC,
"no_wdelay": NO_WDELAY,
"nohide": NOHIDE,
"crossmnt": CROSSMNT,
"no_subtree_check": NO_SUBTREE_CHECK,
"insecure_locks": INSECURE_LOCKS,
"root_squash": ROOT_SQUASH,
"all_squash": ALL_SQUASH,
"wdelay": WDELAY,
"hide": HIDE,
"insecure": INSECURE,
"no_root_squash": NO_ROOT_SQUASH,
"no_all_squash": NO_ALL_SQUASH
}
key_pair = dict(
mountpoint=str,
mp=str,
fsid=None,
refer=str,
replicas=str,
anonuid=int,
anongid=int,
sec=str)
export_regex = r"([\/a-zA-Z0-9\.\-_]+)[\s]+(.+)\((.+)\)"
octal_nums_regex = r"""\\([0-7][0-7][0-7])"""
@staticmethod
def _validate_options(options):
for e in Export._conflicting:
if (options & (e[0])) == e[0]:
raise ValueError(e[1])
return options
@staticmethod
def _validate_key_pairs(kp):
if kp:
if isinstance(kp, dict):
for k, v in kp.items():
if k not in Export.key_pair:
raise ValueError('option %s not valid' % k)
return kp
else:
raise ValueError('key_value_options domain is None or dict')
else:
return {}
def __init__(self, host, path, bit_wise_options=0, key_value_options=None):
if host == '<world>':
self.host = '*'
else:
self.host = host
self.path = path
self.options = Export._validate_options(bit_wise_options)
self.key_value_options = Export._validate_key_pairs(key_value_options)
@staticmethod
def _parse_opt(options_string):
bits = 0
pairs = {}
if len(options_string):
options = options_string.split(',')
for o in options:
if '=' in o:
# We have a key=value
key, value = o.split('=')
pairs[key] = value
else:
bits |= Export.bool_option[o]
return bits, pairs
@staticmethod
def _override(combined, test, opt_a, opt_b):
if test & opt_a:
combined &= ~opt_b
if test & opt_b:
combined &= ~opt_a
return combined
@staticmethod
def parse_opt(global_options, specific_options=None):
gbit, gpairs = Export._parse_opt(global_options)
if specific_options is None:
return gbit, gpairs
sbit, spairs = Export._parse_opt(specific_options)
Export._validate_options(gbit)
Export._validate_options(sbit)
# Remove global options which are overridden by specific
culled = gbit | sbit
culled = Export._override(culled, sbit, Export.RO, Export.RW)
culled = Export._override(culled, sbit, Export.INSECURE, Export.SECURE)
culled = Export._override(culled, sbit, Export.SYNC, Export.ASYNC)
culled = Export._override(culled, sbit, Export.HIDE, Export.NOHIDE)
culled = Export._override(culled, sbit, Export.WDELAY, Export.NO_WDELAY)
culled = Export._override(culled, sbit,
Export.ROOT_SQUASH,
Export.NO_ROOT_SQUASH)
gpairs.update(spairs)
return culled, gpairs
@staticmethod
def parse_export(tokens):
rc = []
try:
global_options = ''
options = ''
if len(tokens) >= 1:
path = tokens[0]
if len(tokens) > 1:
for t in tokens[1:]:
# Handle global options
if t[0] == '-' and not global_options:
global_options = t[1:]
continue
# Check for a host or a host with an options group
if '(' and ')' in t:
if t[0] != '(':
host, options = t[:-1].split('(')
else:
host = '*'
options = t[1:-1]
else:
host = t
rc.append(
Export(host, path,
*Export.parse_opt(global_options, options)))
else:
rc.append(Export('*', path))
except Exception as e:
log.error("parse_export: %s" % str(e))
return None
return rc
@staticmethod
def parse_exports_file(f):
rc = []
with open(f, "r") as e_f:
for line in e_f:
exp = Export.parse_export(
shlex.split(Export._chr_encode(line), '#'))
if exp:
rc.extend(exp)
return rc
@staticmethod
def parse_exportfs_output(export_text):
rc = []
pattern = re.compile(Export.export_regex)
for m in re.finditer(pattern, export_text):
rc.append(
Export(m.group(2), m.group(1), *Export.parse_opt(m.group(3))))
return rc
def options_list(self):
rc = []
for k, v in self.bool_option.items():
if self.options & v:
rc.append(k)
for k, v in self.key_value_options.items():
rc.append('%s=%s' % (k, v))
return rc
def options_string(self):
return ','.join(self.options_list())
@staticmethod
def _double_quote_space(s):
if ' ' in s:
return '"%s"' % s
return s
def __repr__(self):
return "%s %s(%s)" % (Export._double_quote_space(self.path).ljust(50),
self.host, self.options_string())
def export_file_format(self):
r | eturn "%s %s(%s)\n" % (Export._double_quote_space(self.path),
| self.host, self.options_string())
@staticmethod
def _chr_encode(s):
# Replace octal values, the export path can contain \nnn in the
# export name.
p = re.compile(Export.octal_nums_regex)
for m in re.finditer(p, s):
s = s.replace('\\' + m.group(1) |
santisiri/popego | envs/ALPHA-POPEGO/lib/python2.5/site-packages/SQLAlchemy-0.4.0-py2.5.egg/sqlalchemy/__init__.py | Python | bsd-3-clause | 1,324 | 0.001511 | # __init__.py
# Copyright (C) 2005, 2006, 2007 Michael Bayer mike_mp@zzzcomputing.com
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import inspect
from sqlalchemy.types import \
BLOB, BOOLEAN, CHAR, CLOB, DATE, DATETIME, DECIMAL, FLOAT, INT, \
NCHAR, SMALLINT, TEXT, TIME, TIMESTAMP, VARCHAR, \
Binary, Boolean, Date, DateTime, Float, Integer, Interval, Numeric, \
PickleType, | SmallInteger, String, Time, Unicode
from sqlalchemy.sql import \
func, modifier, text, literal, literal_column, null, alias, | \
and_, or_, not_, \
select, subquery, union, union_all, insert, update, delete, \
join, outerjoin, \
bindparam, outparam, asc, desc, \
except_, except_all, exists, intersect, intersect_all, \
between, case, cast, distinct, extract
from sqlalchemy.schema import \
MetaData, ThreadLocalMetaData, Table, Column, ForeignKey, \
Sequence, Index, ForeignKeyConstraint, PrimaryKeyConstraint, \
CheckConstraint, UniqueConstraint, Constraint, \
PassiveDefault, ColumnDefault
from sqlalchemy.engine import create_engine, engine_from_config
__all__ = [ name for name, obj in locals().items()
if not (name.startswith('_') or inspect.ismodule(obj)) ]
__version__ = '0.4.0'
|
animesh0353/codelibrary | python/pow.py | Python | unlicense | 198 | 0.005051 | def pow(a, b, mod):
res = 1
while b > 0:
if b & 1 != 0:
| res = res * a % mod
a = | a * a % mod
b >>= 1
return res
print(1024 == pow(2, 10, 1000000007))
|
chengduoZH/Paddle | python/paddle/fluid/contrib/slim/nas/lock.py | Python | apache-2.0 | 1,215 | 0 | # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is d | istributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
__All__ = ['lock', 'unlock']
if os.name == 'nt':
def lock(fil | e):
raise NotImplementedError('Windows is not supported.')
def unlock(file):
raise NotImplementedError('Windows is not supported.')
elif os.name == 'posix':
from fcntl import flock, LOCK_EX, LOCK_UN
def lock(file):
"""Lock the file in local file system."""
flock(file.fileno(), LOCK_EX)
def unlock(file):
"""Unlock the file in local file system."""
flock(file.fileno(), LOCK_UN)
else:
raise RuntimeError("File Locker only support NT and Posix platforms!")
|
prithvinambiar/tensor_network | tensor_network/__init__.py | Python | mit | 166 | 0 | # - | *- coding: utf-8 -*-
"""Top-level package for tensor_network."""
__author__ = """Prithvi Nambiar"""
__email__ = 'prithvinambiar@gmail.com'
__version__ = '0.1.0 | '
|
odlgroup/odl | examples/tomo/backends/astra_performance_cuda_parallel_2d_cg.py | Python | mpl-2.0 | 2,904 | 0 | """Performance example of running native ASTRA vs using ODL for reconstruction.
In this example, a 512x512 image is reconstructed using the Conjugate Gradient
Least Squares method on the GPU.
In general, ASTRA is faster than ODL since it does not need to perform any
copies and all arithmetic is performed on the GPU. Despite this, ODL is not
much slower. In this example, the | overhead is about 60 %, depending on | the
hardware used.
"""
import astra
import numpy as np
import matplotlib.pyplot as plt
import scipy.misc
import odl
from odl.util.testutils import timer
# Common geometry parameters
domain_size = np.array([512, 512])
n_angles = 180
det_size = 362
niter = 50
phantom = np.rot90(scipy.misc.ascent().astype('float'), -1)
# --- ASTRA ---
# Define ASTRA geometry
vol_geom = astra.create_vol_geom(domain_size[0], domain_size[1])
proj_geom = astra.create_proj_geom('parallel',
np.linalg.norm(domain_size) / det_size,
det_size,
np.linspace(0, np.pi, n_angles))
# Create ASTRA projector
proj_id = astra.create_projector('cuda', proj_geom, vol_geom)
# Create sinogram
sinogram_id, sinogram = astra.create_sino(phantom, proj_id)
# Create a data object for the reconstruction
rec_id = astra.data2d.create('-vol', vol_geom)
# Set up the parameters for a reconstruction algorithm using the CUDA backend
cfg = astra.astra_dict('CGLS_CUDA')
cfg['ReconstructionDataId'] = rec_id
cfg['ProjectionDataId'] = sinogram_id
cfg['ProjectorId'] = proj_id
# Create the algorithm object from the configuration structure
alg_id = astra.algorithm.create(cfg)
with timer('ASTRA Run'):
# Run the algorithm
astra.algorithm.run(alg_id, niter)
# Get the result
rec = astra.data2d.get(rec_id)
# Clean up.
astra.algorithm.delete(alg_id)
astra.data2d.delete(rec_id)
astra.data2d.delete(sinogram_id)
astra.projector.delete(proj_id)
# --- ODL ---
# Create reconstruction space
reco_space = odl.uniform_discr(-domain_size / 2, domain_size / 2, domain_size)
# Create geometry
geometry = odl.tomo.parallel_beam_geometry(reco_space, n_angles, det_size)
# Create ray transform
ray_trafo = odl.tomo.RayTransform(reco_space, geometry, impl='astra_cuda')
# Create sinogram
data = ray_trafo(phantom)
# Solve with CGLS (aka CGN)
x = reco_space.zero()
with timer('ODL Run'):
odl.solvers.conjugate_gradient_normal(ray_trafo, x, data, niter=niter)
# Display results for comparison
plt.figure('Phantom')
plt.imshow(phantom.T, origin='lower', cmap='bone')
plt.figure('ASTRA Sinogram')
plt.imshow(sinogram.T, origin='lower', cmap='bone')
plt.figure('ASTRA Reconstruction')
plt.imshow(rec.T, origin='lower', cmap='bone')
plt.figure('ODL Sinogram')
plt.imshow(data.asarray().T, origin='lower', cmap='bone')
plt.figure('ODL Reconstruction')
plt.imshow(x.asarray().T, origin='lower', cmap='bone')
plt.show()
|
0atman/flask-admin | examples/quickstart/second.py | Python | bsd-3-clause | 272 | 0.003676 | from flask import Flas | k
from flask.ext.admin import Admin, BaseView, expose
class MyView(BaseView):
@expose('/')
def index(self):
return self.render('index.html')
app = Flask(__name__)
admin = Admin(app)
admin.add_view(MyView(name='Hello | '))
app.run()
|
whymirror/unholy | python/Kernel.py | Python | mit | 281 | 0.042705 | class Proc:
def __init__(self, code):
self.code = | code
def call(self, *args):
return eval(self.code, dict(zip(self.code.co_varnames, args)))
def proc(func):
return Proc(func)
def puts(*args):
for x in args: print x
if no | t args: print
class BasicObject:
pass
|
gammu/python-gammu | examples/batteryinfo.py | Python | gpl-2.0 | 1,237 | 0 | #!/usr/bin/env python
# vim: expandtab sw=4 ts=4 sts=4:
#
# Copyright © 2003 - 2018 Michal Čihař <michal@cihar.com>
#
# This file is part of python-gammu <https://wammu.eu/python-gammu/>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# |
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
import sys
import gammu
state_machine = gammu.StateMachine()
if len(sys.argv) > 2:
state_machine.ReadConfig(Filename= | sys.argv[1])
del sys.argv[1]
else:
state_machine.ReadConfig()
state_machine.Init()
status = state_machine.GetBatteryCharge()
for x in status:
if status[x] != -1:
print(f"{x:20}: {status[x]}")
|
rodynnz/python-tfstate | src/tfstate/exceptions.py | Python | lgpl-3.0 | 144 | 0 | # -*- coding: utf-8 -*-
clas | s InvalidResource(E | xception):
"""
Raised when try to create a Resource with invalid data
"""
pass
|
dsweet04/rekall | rekall-core/rekall/plugins/darwin/networking.py | Python | gpl-2.0 | 13,974 | 0 | # Rekall Memory Forensics
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
__author__ = (
"Michael Cohen <scudette@google.com>",
"Adam Sindelar <adam.sindelar@gmail.com>")
from rekall import obj
from rekall import plugin
from rekall_lib import registry
from rekall.plugins.darwin import common
class DarwinUnpListCollector(common.AbstractDarwinProducer):
"""Walks the global list of sockets in uipc_usrreq."""
name = "unp_sockets"
type_name = "socket"
def collect(self):
for head_const in ["_unp_dhead", "_unp_shead"]:
lhead = self.session.profile.get_constant_object(
head_const,
target="unp_head")
for unp in lhead.lh_first.walk_list("unp_link.le_next"):
yield [unp.unp_socket]
class DarwinSocketsFromHandles(common.AbstractDarwinProducer):
"""Looks up handles that point to a socket and collects the socket."""
name = "open_sockets"
type_name = "socket"
def collect(self):
for fileproc in self.session.plugins.collect("fileproc"):
if fileproc.fg_type == "DTYPE_SOCKET":
yield [fileproc.autocast_fg_data()]
class DarwinNetstat(common.AbstractDarwinCommand):
"""Prints all open sockets we know about, from any source.
Netstat will display even connections that lsof doesn't know about, because
they were either recovered from an allocation zone, or found through a
secondary mechanism (like system call handler cache).
On the other hand, netstat doesn't know the file descriptor or, really, the
process that owns the connection (although it does know the PID of the last
process to access the socket.)
Netstat will also tell you, in the style of psxview, if a socket was only
found using some of the methods available.
"""
name = "netstat"
@classmethod
def methods(cls):
"""Return the names of available socket enumeration methods."""
# Find all the producers that collect procs and inherit from
# AbstractDarwinCachedProducer.
methods = []
for subclass in common.AbstractDarwinProducer.classes.itervalues():
# We look for a plugin which is a producer and a darwin command.
if (issubclass(subclass, common.AbstractDarwinCommand) and
issubclass(subclass, plugin.Producer) and
subclass.type_name == "socket"):
methods.append(subclass.name)
methods.sort()
return methods
@registry.classproperty
@registry.memoize
def table_header(cls): # pylint: disable=no-self-argument
header = [dict(name="socket", type="socket", width=60)]
for method in cls.methods():
header.append(dict(name=method, width=12))
return plugin.PluginHeader(*header)
def collect(self):
methods = self.methods()
for socket in sorted(self.session.plugins.collect("socket"),
key=lambda socket: socket.last_pid):
row = [socket]
for method in methods:
row.append(method in socket.obj_producers)
yield row
class DarwinGetArpListHead(common.AbstractDarwinParameterHook):
"""
One version of arp_init looks like this:
void
arp_init(void)
{
VERIFY(!arpinit_done);
LIST_INIT(&llinfo_arp); // <-- This is the global we want.
llinfo_arp_zone = zinit(sizeof (struct llinfo_arp),
LLINFO_ARP_ZONE_MAX * sizeof (struct llinfo_arp), 0,
LLINFO_ARP_ZONE_NAME);
if (llinfo_arp_zone == NULL)
panic("%s: failed allocating llinfo_arp_zone", __func__);
zone_change(lli | nfo_arp_zone, Z_EXPAND, TRUE);
zone_change(llinfo_arp_zone, Z_CALLERACCT, FALSE);
arpinit_done = 1;
}
Disassembled, the first few instructions look like this:
0x0 55 | PUSH RBP
0x1 4889e5 MOV RBP, RSP
0x4 803d65e9400001 CMP BYTE [RIP+0x40e965], 0x1
0xb 7518 JNZ 0xff80090a7f95
0xd 488d3dee802900 LEA RDI, [RIP+0x2980ee]
0x14 488d35f5802900 LEA RSI, [RIP+0x2980f5]
0x1b baf3000000 MOV EDX, 0xf3
# This is a call to kernel!panic (later kernel!assfail):
0x20 e80b6c1400 CALL 0xff80091eeba0
# This is where it starts initializing the linked list:
0x25 48c70548e94000000000 MOV QWORD [RIP+0x40e948], 0x0
00
0x30 488d0d0e812900 LEA RCX, [RIP+0x29810e]
"""
name = "disassembled_llinfo_arp"
PANIC_FUNCTIONS = (u"__kernel__!_panic", u"__kernel__!_assfail")
def calculate(self):
resolver = self.session.address_resolver
arp_init = resolver.get_constant_object("__kernel__!_arp_init",
target="Function")
instructions = iter(arp_init.Decompose(20))
# Walk down to the CALL mnemonic and use the address resolver to
# see if it calls one of the panic functions.
for instruction in instructions:
# Keep spinning until we get to the first CALL.
if instruction.mnemonic != "CALL":
continue
# This is absolute:
target = instruction.operands[0].value
_, names = resolver.get_nearest_constant_by_address(target)
if not names:
return obj.NoneObject("Could not find CALL in arp_init.")
if names[0] not in self.PANIC_FUNCTIONS:
return obj.NoneObject(
"CALL was to %r, which is not on the PANIC list."
% names)
# We verified it's the right CALL. MOV should be right after it,
# so let's just grab it.
mov_instruction = next(instructions)
if mov_instruction.mnemonic != "MOV":
return obj.NoneObject("arp_init code changed.")
offset = (mov_instruction.operands[0].disp
+ mov_instruction.address
+ mov_instruction.size)
address = self.session.profile.Object(type_name="address",
offset=offset)
llinfo_arp = self.session.profile.Object(
type_name="llinfo_arp",
offset=address.v())
if llinfo_arp.isvalid:
return llinfo_arp.obj_offset
return obj.NoneObject("llinfo_arp didn't validate.")
class DarwinArp(common.AbstractDarwinProducer):
"""Show information about arp tables."""
name = "arp"
type_name = "rtentry"
def collect(self):
llinfo_arp = self.session.address_resolver.get_constant_object(
"__kernel__!_llinfo_arp",
target="Pointer",
target_args=dict(target="llinfo_arp"))
if not llinfo_arp:
# Must not have it in the profile. Try asking the session hook
# for the address.
offset = self.session.GetParameter("disassembled_llinfo_arp")
if not offset:
self.session.logging.error(
"Could not find the address of llinfo_arp.")
return
llinfo_arp = self.session.profile.Object(
type_name="llinfo_arp", offset=offset)
for arp_hit in llinfo_arp.walk_list("la_le.le_next"):
yield [arp_hit.la_rt]
class DarwinRoute(c |
django-cratis/cratis | tests/_markers.py | Python | bsd-2-clause | 131 | 0.007634 | import pytest
slow = pytest.mark.skipif(
no | t pytes | t.config.getoption("--runslow"),
reason="need --runslow option to run"
) |
TheTimmy/spack | var/spack/repos/builtin/packages/isl/package.py | Python | lgpl-2.1 | 1,805 | 0.000554 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the term | s and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Les | ser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Isl(AutotoolsPackage):
"""isl (Integer Set Library) is a thread-safe C library for manipulating
sets and relations of integer points bounded by affine constraints."""
homepage = "http://isl.gforge.inria.fr"
url = "http://isl.gforge.inria.fr/isl-0.18.tar.bz2"
version('0.18', '11436d6b205e516635b666090b94ab32')
version('0.14', 'acd347243fca5609e3df37dba47fd0bb')
depends_on('gmp')
def configure_args(self):
return [
'--with-gmp-prefix={0}'.format(self.spec['gmp'].prefix)
]
|
lmr/avocado-vt | virttest/qemu_monitor.py | Python | gpl-2.0 | 92,751 | 0.000442 | """
Interfaces to the QEMU monitor.
:copyright: 2008-2010 Red Hat Inc.
"""
from __future__ import division
import socket
import time
import threading
import logging
import select
import re
import os
import six
try:
import json
except ImportError:
logging.warning("Could not import json module. "
"QMP monitor functionality disabled.")
from . import passfd_setup
from . import utils_misc
from . import cartesian_config
from . import data_dir
class MonitorError(Exception):
pass
class MonitorConnectError(MonitorError):
def __init__(self, monitor_name):
MonitorError.__init__(self)
self.monitor_name = monitor_name
def __str__(self):
return "Could not connect to monitor | '%s'" % self.monitor_name
class MonitorSocketError | (MonitorError):
def __init__(self, msg, e):
Exception.__init__(self, msg, e)
self.msg = msg
self.e = e
def __str__(self):
return "%s (%s)" % (self.msg, self.e)
class MonitorLockError(MonitorError):
pass
class MonitorProtocolError(MonitorError):
pass
class MonitorNotSupportedError(MonitorError):
pass
class MonitorNotSupportedCmdError(MonitorNotSupportedError):
def __init__(self, monitor, cmd):
MonitorError.__init__(self)
self.monitor = monitor
self.cmd = cmd
def __str__(self):
return ("Not supported cmd '%s' in monitor '%s'" %
(self.cmd, self.monitor))
class MonitorNotSupportedMigCapError(MonitorNotSupportedError):
pass
class QMPCmdError(MonitorError):
def __init__(self, cmd, qmp_args, data):
MonitorError.__init__(self, cmd, qmp_args, data)
self.cmd = cmd
self.qmp_args = qmp_args
self.data = data
def __str__(self):
return ("QMP command %r failed (arguments: %r, "
"error message: %r)" % (self.cmd, self.qmp_args, self.data))
class QMPEventError(MonitorError):
def __init__(self, cmd, qmp_event, vm_name, name):
MonitorError.__init__(self, cmd, qmp_event, vm_name, name)
self.cmd = cmd
self.qmp_event = qmp_event
self.name = name
self.vm_name = vm_name
def __str__(self):
return ("QMP event %s not received after %s (monitor '%s.%s')"
% (self.qmp_event, self.cmd, self.vm_name, self.name))
def get_monitor_filename(vm, monitor_name):
"""
Return the filename corresponding to a given monitor name.
:param vm: The VM object which has the monitor.
:param monitor_name: The monitor name.
:return: The string of socket file name for qemu monitor.
"""
return os.path.join(data_dir.get_tmp_dir(),
"monitor-%s-%s" % (monitor_name, vm.instance))
def get_monitor_filenames(vm):
"""
Return a list of all monitor filenames (as specified in the VM's
params).
:param vm: The VM object which has the monitors.
"""
return [get_monitor_filename(vm, m) for m in vm.params.objects("monitors")]
def create_monitor(vm, monitor_name, monitor_params):
"""
Create monitor object and connect to the monitor socket.
:param vm: The VM object which has the monitor.
:param monitor_name: The name of this monitor object.
:param monitor_params: The dict for creating this monitor object.
"""
MonitorClass = HumanMonitor
if monitor_params.get("monitor_type") == "qmp":
if not utils_misc.qemu_has_option("qmp", vm.qemu_binary):
# Add a "human" monitor on non-qmp version of qemu.
logging.warn("QMP monitor is unsupported by %s,"
" creating human monitor instead." % vm.qemu_version)
else:
MonitorClass = QMPMonitor
monitor_filename = get_monitor_filename(vm, monitor_name)
logging.info("Connecting to monitor '<%s> %s'", MonitorClass, monitor_name)
monitor = MonitorClass(vm, monitor_name, monitor_filename)
monitor.verify_responsive()
return monitor
def wait_for_create_monitor(vm, monitor_name, monitor_params, timeout):
"""
Wait for the progress of creating monitor object. This function will
retry to create the Monitor object until timeout.
:param vm: The VM object which has the monitor.
:param monitor_name: The name of this monitor object.
:param monitor_params: The dict for creating this monitor object.
:param timeout: Time to wait for creating this monitor object.
"""
# Wait for monitor connection to succeed
end_time = time.time() + timeout
while time.time() < end_time:
try:
return create_monitor(vm, monitor_name, monitor_params)
except MonitorError as e:
logging.warn(e)
time.sleep(1)
else:
raise MonitorConnectError(monitor_name)
class VM(object):
"""
Dummy class to represent "vm.name" for pickling to avoid circular deps
"""
def __init__(self, name):
self.name = name
class Monitor:
"""
Common code for monitor classes.
"""
ACQUIRE_LOCK_TIMEOUT = 20
DATA_AVAILABLE_TIMEOUT = 0
CONNECT_TIMEOUT = 60
def __init__(self, vm, name, filename, suppress_exceptions=False):
"""
Initialize the instance.
:param vm: The VM which this monitor belongs to.
:param name: Monitor identifier (a string)
:param filename: Monitor socket filename
:raise MonitorConnectError: Raised if the connection fails
"""
self.vm = VM(vm.name)
self.name = name
self.filename = filename
self._lock = threading.RLock()
self._log_lock = threading.RLock()
self._socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self._socket.settimeout(self.CONNECT_TIMEOUT)
self._passfd = None
self._supported_cmds = []
self.debug_log = False
self.log_file = "%s-%s.log" % (name, vm.name)
self.open_log_files = {}
try:
self._socket.connect(filename)
except socket.error as details:
raise MonitorConnectError("Could not connect to monitor socket: %s"
% details)
def __del__(self):
# Automatically close the connection when the instance is garbage
# collected
self._close_sock()
if not self._acquire_lock(lock=self._log_lock):
raise MonitorLockError("Could not acquire exclusive lock to access"
" %s " % self.open_log_files)
try:
del_logs = []
for log in self.open_log_files:
self.open_log_files[log].close()
del_logs.append(log)
for log in del_logs:
self.open_log_files.pop(log)
finally:
self._log_lock.release()
# The following two functions are defined to make sure the state is set
# exclusively by the constructor call as specified in __getinitargs__().
def __getstate__(self):
pass
def __setstate__(self, state):
pass
def __getinitargs__(self):
"""
Unsafe way to allow pickling of this object
The monitor compounds of several unpickable objects like locks,
sockets and files. During unpickling this makes the Montior object
to re-connect and create new locks, which only works well when
the original object (pickled one) was already destroyed. If not
than this new object won't be able to connect to the already opened
resources and will be crippled. Anyway it's sufficient for our use
case, but don't tell you were not warned.
"""
# The Monitor object is usually part of VM. Let's avoid the circular
# dependency by creating fake VM object which only contains `vm.name`,
# which is in reality the only information required by Monitor object
# at this time.
# Always ignore errors during unpickle as exceptions during "__init__"
# would cause the whole unpickle operation to fail, leaving us without
# any representation whatsoever.
return VM(self.v |
lukecwik/incubator-beam | learning/katas/python/test_helper.py | Python | apache-2.0 | 6,995 | 0.001287 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
def get_file_text(path):
""" Returns file text by path"""
file_io = open(path, "r")
text = file_io.read()
file_io.close()
return text
def get_file_output(encoding="utf-8", path=sys.argv[-1], arg_string=""):
"""
Returns answer file output
:param encoding: to decode output in python3
:param path: path of file to execute
:return: list of strings
"""
import subprocess
proc = subprocess.Popen([sys.executable, path], stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if arg_string:
for arg in arg_string.split("\n"):
proc.stdin.write(bytearray(str(arg) + "\n", encoding))
proc.stdin.flush()
return list(map(lambda x: str(x.decode(encoding)), proc.communicate()[0].splitlines()))
def test_file_importable():
""" Tests there is no obvious syntax errors"""
path = sys.argv[-1]
if not path.endswith(".py"):
import os
parent = os.path.abspath(os.path.join(path, os.pardir))
python_files = [f for f in os.listdir(parent) if os.path.isfile(os.path.join(parent, f)) and f.endswith(".py")]
for python_file in python_files:
if python_file == "tests.py":
continue
check_importable_path(os.path.join(parent, python_file))
return
check_importable_path(path)
def check_importable_path(path):
""" Checks that file is importable.
Reports failure otherwise.
"""
saved_input = patch_input()
try:
import_file(path)
except:
failed("The file contains syntax errors", test_file_importable.__name__)
return
finally:
revert_input(saved_input)
passed(test_file_importable.__name__)
def patch_input():
def mock_fun(_m=""):
return "mock"
if sys.version_info[0] == 3:
import builtins
save_input = builtins.input
builtins.input = mock_fun
return save_input
elif sys.version_info[0] == 2:
import __builtin__
save_input = __builtin__.raw_input
__builtin__.raw_input = mock_fun
__builtin__.input = mock_fun
return save_input
def revert_input(saved_input):
if sys.version_info[0] == 3:
import builtins
builtins.input = saved_input
elif sys.version_info[0] == 2:
import __builtin__
__builtin__.raw_input = saved_input
__builtin__.input = saved_input
def import_file(path):
""" Returns imported file """
if sys.version_info[0] == 2 or sys.version_info[1] < 3:
import imp
return imp.load_source("tmp", path)
elif sys.version_info[0] == 3:
import importlib.machinery
return importlib.machinery.SourceFileLoader("tmp", path).load_module("tmp")
def import_task_file():
""" Returns imported file.
Imports file from which check action was run
"""
path = sys.argv[-1]
return import_file(path)
def test_is_not_empty():
"""
Checks that file is not empty
"""
path = sys.argv[-1]
file_text = get_file_text(path)
if len(file_text) > 0:
passed()
else:
failed("The file is empty. Please, reload the task and try again.")
def test_text_equals(text, error_text):
"""
Checks that answer equals text.
"""
path = sys.argv[-1]
file_text = get_file_text(path)
if file_text.strip() == text:
passed()
else:
failed(error_text)
def test_answer_placeholders_text_deleted(
error_text="Solution has empty answer prompt(s)."):
"""
Checks that all answer placeholders are not empty
"""
windows = get_answer_placeholders()
for window in windows:
if len(window) == 0:
failed(error_text)
return
passed()
def set_congratulation_message(message):
""" Overrides default 'Congratulations!' message """
print("#educational_plugin CONGRATS_MESSAGE " + message)
def failed(message="Please, reload the task and try again.", name=None):
""" Reports failure """
if not name:
name = sys._getframe().f_back.f_code.co_name
print("#educational_plugin " + name + " FAILED + " + message)
def passed(name=None):
""" Reports success """
if not name:
name = sys._getframe().f_back.f_code.co_name
print("#educational_plugin " + name + " test OK")
def get_answer_placeholders():
"""
Returns all answer placeholders text
"""
prefix = "#educational_plugin_window = "
path = sys.argv[-1]
import os
file_name_without_extension = os.path.splitext(path)[0]
windows_path = file_name_without_extension + "_windows"
windows = []
f = open(windows_path, "r")
window_text = ""
first = True
for line in f.readlines():
if line.startswith(prefix):
if not first:
windows.append(window_text.strip())
else:
first = False
| window_text = line[len(prefix):]
else:
window_text += line
if window_text:
windows.append(window_text.strip()) |
f.close()
return windows
def check_samples(samples=()):
"""
Check script output for all samples. Sample is a two element list, where the first is input and
the second is output.
"""
for sample in samples:
if len(sample) == 2:
output = get_file_output(arg_string=str(sample[0]))
if "\n".join(output) != sample[1]:
failed(
"Test from samples failed: \n \n"
"Input:\n{}"
"\n \n"
"Expected:\n{}"
"\n \n"
"Your result:\n{}".format(str.strip(sample[0]), str.strip(sample[1]), "\n".join(output)))
return
set_congratulation_message("All test from samples passed. Now we are checking your solution on Stepik server.")
passed()
def run_common_tests(error_text="Please, reload file and try again"):
test_is_not_empty()
test_answer_placeholders_text_deleted()
test_file_importable()
|
StephenPower/CollectorCity-Market-Place | stores/apps/store_admin/views.py | Python | apache-2.0 | 46,221 | 0.009281 | import logging
import datetime
import urllib
from django.core.mail import send_mail, EmailMessage
from django.core.urlresolvers import reverse
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.conf import settings
from django.db.models import Q
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseServerError
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.utils.translation import ugettext as _
from auctions.models import AuctionSession
from for_sale.models import Item
from core.decorators import shop_admin_required
from market.models import MarketCategory, MarketSubCategory
from django.utils import simplejson
PAGE_ITEMS = 10
@shop_admin_required
def back_to_site(request):
request.session['admin_checkpoint'] = request.META.get('HTTP_REFERER', '/admin')
shop_checkpoint = request.session.get("shop_checkpoint", "/")
return HttpResponseRedirect(shop_checkpoint)
@shop_admin_required
def redirect_admin(request):
request.session['shop_checkpoint'] = request.META.get('HTTP_REFERER', '/')
admin_checkpoint = request.session.get("admin_checkpoint", "/admin")
return HttpResponseRedirect(admin_checkpoint)
@shop_admin_required
def home_admin(request):
return render_to_response('store_admin/home_admin.html', {}, RequestContext(request))
@shop_admin_required
def customers_overview(request):
from sell.models import Sell
from market_buy.models import WishListItem
marketplace = request.shop.marketplace
sells = Sell.objects.filter(shop=request.shop, closed=False).order_by("-date_time")[:5]
wishlistitems = WishListItem.objects.filter(marketplace=marketplace)
return render_to_response('store_admin/customers/overview.html',
{
'sells': sells,
'wishlistitems' : wishlistitems,
}
, RequestContext(request))
@shop_admin_required
def customers_profiles(request):
return render_to_response('store_admin/customers/profiles.html', {},
RequestContext(request))
@shop_admin_required
def customers_sold_items(request):
from sell.models import Sell
filter_by = request.GET.get('filter_by','')
order_by = request.GET.get('order_by','')
show = request.GET.get('show','')
filter_params = {'order_by':order_by,
'filter_by':filter_by,
'show':show,}
q_user=''
shop = request.shop
sell_list = Sell.objects.filter(shop=shop)
if filter_by == 'for_date_today':
d = datetime.datetime.now()
date_from = datetime.datetime(d.year, d.month, d.day)
date_to = date_from + datetime.timedelta(1)
sell_list = sell_list.filter(date_time__range=(date_from, date_to))
elif filter_by == 'for_date_week':
d = datetime.datetime.now()
delta = d.weekday()
date_from = d - datetime.timedelta(delta)
sell_list = sell_list.filter(date_time__range=(date_from, d))
elif filter_by == 'for_date_month':
sell_list = sell_list.filter(date_time__month=datetime.datetime.now().date().month)
elif filter_by == 'for_date_year':
sell_list = sell_list.filter(date_time__year=datetime.datetime.now().date().year)
elif filter_by == 'payment_pending':
sell_list = sell_list.filter(payment__state_actual__state='PE')
elif filter_by == 'payment_paid':
sell_list = sell_list.filter(payment__state_actual__state='PA')
elif filter_by == 'payment_failed':
sell_list = sell_list.filter(payment__state_actual__state='FA')
elif filter_by == 'shipping_pending':
sell_list = sell_list.filter(shipping__state_actual__state='PE')
elif filter_by == 'shipping_dispatched':
sell_list = sell_list.filter(shipping__state_actual__state='DI')
elif filter_by == 'shipping_fullfilled':
sell_list = sell_list.filter(shipping__state_actual__state='FU')
elif filter_by == 'user':
q_user = request.GET.get('q_user','')
f = Q(bidder__username__icontains=q_user)|Q(bidder__first_name__icontains=q_user)|Q(bidder__last_name__icontains=q_user)
sell_list = sell_list.filter(f)
if show == 'open':
sell_list = sell_list.filter(closed=False)
elif show == 'close':
sell_list = sell_list.filter(closed=True)
else:
sell_list = sell_list.filter(closed=False)
if order_by == 'oldest':
sell_list = sell_list.order_by("id")
elif order_by == 'newest':
sell_list = sell_list.order_by("-id")
elif order_by == 'username':
sell_list = sell_list.order_by("bidder__username")
elif order_by == '-username':
sell_list = sell_list.order_by("-bidder__username")
elif order_by == 'total':
sell_list = sell_list.order_by("total")
elif order_by == '-total':
sell_list = sell_list.order_by("-total")
else:
sell_list = sell_list.order_by("-date_time")
pager = Paginator(sell_list, 5)
try:
page = int(request.GET.get('page','1'))
except:
page = 1
try:
sells = pager.page(page)
except (EmptyPage, InvalidPage):
sells = pager.page(pager.num_pages)
paged = (pager.num_pages > 1)
params = {
'sells': sells,
'pages': pager.page_range,
'paged': paged,
'filter_params': filter_params,
'q_user': q_user,
}
return render_to_r | esponse( | 'store_admin/customers/sold_items.html', params, RequestContext(request))
@shop_admin_required
def customers_payments(request):
return render_to_response('store_admin/customers/payments.html', {},
RequestContext(request))
@shop_admin_required
def customers_shipments(request):
return render_to_response('store_admin/customers/shipments.html', {},
RequestContext(request))
@shop_admin_required
def customers_wish_lists(request):
from market_buy.models import WishListItem
marketplace = request.shop.marketplace
wishlistitems = WishListItem.objects.filter(marketplace=marketplace)
search_text = ''
if request.method == "POST":
search_text = request.POST.get("search_text")
wishlistitems = wishlistitems.filter(description__contains=search_text)
else:
sort = request.GET.get('sort_by', 'oldest')
if sort == "oldest": wishlistitems = wishlistitems.order_by("id")
if sort == "newest": wishlistitems = wishlistitems.order_by("-id")
if sort == "category": wishlistitems = wishlistitems.order_by("category__name")
if sort == "username": wishlistitems = wishlistitems.order_by("posted_by__username")
if sort == "price": wishlistitems = wishlistitems.order_by("ideal_price")
if sort == "-price": wishlistitems = wishlistitems.order_by("-ideal_price")
return render_to_response('store_admin/customers/wish_lists.html',
{
'wishlistitems' : wishlistitems,
'search_text': search_text,
},
RequestContext(request))
@shop_admin_required
def customers_send_notification(request, id):
from market_buy.models import WishListItem
wishitem = get_object_or_404(WishListItem, pk=id)
shop = request.shop
subject = "Notification from %s" % shop.name_shop()
the_wish = "Hi %s, you have post an item in the wish list of %s on %s. You have specified the following information about your wish item: \n\n- Description: %s\n- Ideal Price: $%s\n- Category: %s\n- Subcategory: %s" % (wishitem.posted_by.get_full_name() or wishitem.posted_by.username , wishitem.marketplace, wishitem.posted_on, wishitem.description, wishitem.ideal_price, wishitem.category.name, wishitem.subcategory.name)
the_message = "%s from %s has found an item that appears to match the it |
googleapis/python-binary-authorization | samples/generated_samples/binaryauthorization_v1beta1_generated_binauthz_management_service_v1_beta1_get_attestor_async.py | Python | apache-2.0 | 1,601 | 0.002498 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC |
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language g | overning permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for GetAttestor
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-binaryauthorization
# [START binaryauthorization_v1beta1_generated_BinauthzManagementServiceV1Beta1_GetAttestor_async]
from google.cloud import binaryauthorization_v1beta1
async def sample_get_attestor():
# Create a client
client = binaryauthorization_v1beta1.BinauthzManagementServiceV1Beta1AsyncClient()
# Initialize request argument(s)
request = binaryauthorization_v1beta1.GetAttestorRequest(
name="name_value",
)
# Make the request
response = await client.get_attestor(request=request)
# Handle the response
print(response)
# [END binaryauthorization_v1beta1_generated_BinauthzManagementServiceV1Beta1_GetAttestor_async]
|
Tatsh-ansible/ansible | lib/ansible/modules/messaging/rabbitmq_parameter.py | Python | gpl-3.0 | 4,458 | 0.002019 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Chatham Financial <oss@chathamfinancial.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rabbitmq_parameter
short_description: Adds or removes parameters to RabbitMQ
description:
- Manage dynamic, cluster-wide parameters for RabbitMQ
version_added: "1.1"
author: '"Chris Hoffman (@chrishoffman)"'
options:
component:
description:
- Name of the component of which the parameter is being set
required: true
default: null
name:
description:
- Name of the parameter being set
required: true
default: null
value:
description:
- Value of the parameter, as a JSON term
required: false
default: null
vhost:
description:
- vhost to apply access privileges.
required: false
default: /
node:
description:
- erlang node name of the rabbit we wish to configure
required: false
default: rabbit
version_added: "1.2"
state:
description:
- Specify if user is to be added or removed
required: false
default: present
choices: [ 'present', 'absent']
'''
E | XAMPLES = """
# Set the federation parameter 'local_username' to a value o | f 'guest' (in quotes)
- rabbitmq_parameter:
component: federation
name: local-username
value: '"guest"'
state: present
"""
import json
from ansible.module_utils.basic import AnsibleModule
class RabbitMqParameter(object):
def __init__(self, module, component, name, value, vhost, node):
self.module = module
self.component = component
self.name = name
self.value = value
self.vhost = vhost
self.node = node
self._value = None
self._rabbitmqctl = module.get_bin_path('rabbitmqctl', True)
def _exec(self, args, run_in_check_mode=False):
if not self.module.check_mode or (self.module.check_mode and run_in_check_mode):
cmd = [self._rabbitmqctl, '-q', '-n', self.node]
rc, out, err = self.module.run_command(cmd + args, check_rc=True)
return out.splitlines()
return list()
def get(self):
parameters = self._exec(['list_parameters', '-p', self.vhost], True)
for param_item in parameters:
component, name, value = param_item.split('\t')
if component == self.component and name == self.name:
self._value = json.loads(value)
return True
return False
def set(self):
self._exec(['set_parameter',
'-p',
self.vhost,
self.component,
self.name,
json.dumps(self.value)])
def delete(self):
self._exec(['clear_parameter', '-p', self.vhost, self.component, self.name])
def has_modifications(self):
return self.value != self._value
def main():
arg_spec = dict(
component=dict(required=True),
name=dict(required=True),
value=dict(default=None),
vhost=dict(default='/'),
state=dict(default='present', choices=['present', 'absent']),
node=dict(default='rabbit')
)
module = AnsibleModule(
argument_spec=arg_spec,
supports_check_mode=True
)
component = module.params['component']
name = module.params['name']
value = module.params['value']
if isinstance(value, str):
value = json.loads(value)
vhost = module.params['vhost']
state = module.params['state']
node = module.params['node']
rabbitmq_parameter = RabbitMqParameter(module, component, name, value, vhost, node)
changed = False
if rabbitmq_parameter.get():
if state == 'absent':
rabbitmq_parameter.delete()
changed = True
else:
if rabbitmq_parameter.has_modifications():
rabbitmq_parameter.set()
changed = True
elif state == 'present':
rabbitmq_parameter.set()
changed = True
module.exit_json(changed=changed, component=component, name=name, vhost=vhost, state=state)
if __name__ == '__main__':
main()
|
mykonosbiennale/mykonosbiennale.github.io | filmfestival/models.py | Python | apache-2.0 | 13,267 | 0.011231 | from django.db import models
from django_countries.fields import CountryField
from django.utils.text import slugify
from django.core.urlresolvers import reverse
from phonenumber_field.modelfields import PhoneNumberField
import os,datetime
from uuid import uuid4
from imagekit.models import ImageSpecField
from imagekit.processors import ResizeToFill, ResizeToFit
from sorl.thumbnail import ImageField
from festival.models import Project
from material.models import Album
def path_and_rename(instance, filename):
document_path(instance, filename, 'poster', 'images')
def poster_path(instance, filename):
return filename
base, ext = os.path.splitext(filename)
# get filename
slug = slugify(instance.title+'-'+instance.dir_by)
filename = 'mykonos-biennale-2015-film-festival-{}-{}{}'.format(slug,'poster', ext)
return os.path.join('images', filename)
def location_image_path(instance, filename):
base, ext = os.path.splitext(filename)
# get filename
slug = slugify(instance.name)
filename = 'mykonos-biennale-2015-{}-{}{}'.format('location', slug, ext)
return os.path.join('images', filename)
def image_path(instance, filename):
base, ext = os.path.splitext(filename)
# get filename
slug = slugify(instance.film.title+'-'+instance.title+'-'+instance.film.dir_by)
count = instance.film.filmfestival_image_related.count()
filename = 'mykonos-biennale-2015-film-festival-{}-{}-{}{}'.format(slug, instance.image_type, count, ext)
return os.path.join('images', filename)
def headshot_path(instance, filename):
document_path(instance, filename, 'headshot', 'images')
def screenshot_path(instance, filename):
document_path(instance, filename, 'screenshot', 'images')
def still_path(instance, filename):
document_path(instance, filename, 'still', 'images')
def document_path(instance, filename, prefix='document',path='documents'):
base, ext = os.path.splitext(filename)
slug = slugify(instance.film.title+'-'+instance.film.dir_by)
filename = 'mykonos-biennale-2015-film-festival-{}-{}{}'.format(slug, prefix, instance.id, ext)
return os.path.join(path, filename)
class Award(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField(max_length=200)
image = ImageField (upload_to='awards', max_length=256, blank=True)
description = models.TextField()
def __unicode__(self):
return "{} {}".format(self.name, self.description)
class Reward(models.Model):
film = models.ForeignKey('Film')
award = models.ForeignKey('Award')
description = models.TextField()
def __unicode__(self):
return "{} {}".format(self.film.title, self.award)
class Location(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField(max_length=200)
image = ImageField (upload_to=location_image_path, max_length=256, blank=True)
address = models.TextField(blank=True, default='')
url = models.URLField(blank=True, default='')
embeded_map = models.TextField(blank=True, default='')
def __unicode__(self):
return self.name
def get_absolute_url(self):
return reverse('location', args=[self.slug])
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(Location, self).save(*args, **kwargs)
class Program(models.Model):
title = models.CharField(max_length=200)
slug = models.SlugField(max_length=200)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('program', args=[self.slug])
def save(self, *args, **kwargs):
self.slug = slugify(self.title)
super(Program, self).save(*args, **kwargs)
class Day(models.Model):
class Meta:
ordering = ['date']
program = models.ForeignKey(Program)
date = models.DateField()
slug = models.SlugField(max_length=200)
runtime = models.IntegerField(default=0)
start_time = models.TimeField(default="21:00")
def number_of_films(self):
return self.screening_set.count()
def __unicode__(self):
return "{} {}".format(self.program, self.date)
def save(self, *args, **kwargs):
self.slug = slugify(str(self))
super(Day, self).save(*args, **kwargs)
def build_timetable(self):
previous_screening = None
runtime = 0
count = 10
for screening in self.screening_set.all():
runtime += screening.film.runtime
screening.schedule(previous_screening)
screening.save()
previous_screening = screening
print "%02d-%03d-%s %s" % ( self.date.day, count, slugify(screening.film.title), screening.film.projection_copy_url)
count +=10
self.runtime = runtime
self.save()
def first_screening(self):
try:
return self.screening_set.first()
except Screening.DoesNotExist:
pass
def last_screening(self):
try:
return self.screening_set.last()
except Screening.DoesNotExist:
pass
class Screening(models.Model):
class Meta:
ordering = ('id',)
get_latest_by ="start_time"
day = models.ForeignKey(Day, blank=True, null=None)
pause = models.IntegerField(default=3)
film = models.ForeignKey('Film')
location = models.ForeignKey('Location', blank=True, null=True)
slug = models.SlugField(max_length=200)
start_time = models.DateTimeField(blank=True, default=None)
def __unicode__(self):
return "{} {} {}".format(self.day.program, self.start_time, self.film.title)
def schedule(self, previous_screening=None):
if previous_screening:
self.start_time = previous_screening.start_time
self.start_time += datetime.timed | elta(minutes=previous_screening.film.runtime+previous_screening.pause)
else:
self.start_time = datetime.datetime.combine(self.day.date, self.day.start_time)
def save(self, *args, **kwargs):
self.slug = slugify(str(self))
if self.id is None:
self.schedule(self.day.last_screening())
super(Screening, self).save(*args, **kwargs)
class Fi | lm(models.Model):
class Meta:
ordering = ['ref']
DRAMATIC_NIGHTS = 'Dramatic Nights'
VIDEO_GRAFITTI = 'Video Grafitti'
DANCE = 'Dance'
DOCUMENTARY = 'Documentary'
FILM_TYPES_CHOICES = (
(DRAMATIC_NIGHTS, 'Dramatic Nights'),
(VIDEO_GRAFITTI, 'Video Graffiti'),
(DANCE, 'Dance'),
(DOCUMENTARY, 'Documentary'),
)
SELECTED = 'SELECTED'
UNDECIDED = 'UNDECIDED'
OUT = 'OUT'
ENTRY_STATUS_CHOICES = (
(SELECTED, 'Selected'),
(UNDECIDED, 'Undecided'),
(OUT, 'Out'),
)
WITHOUTABOX = 'WITHOUTABOX'
FILMFREEWAY = 'UNDECIDED'
OTHER = 'OUT'
FILM_SOURCE_CHOICES = (
(WITHOUTABOX, 'Withoutabox'),
(FILMFREEWAY, 'Filmfreeway'),
(OTHER, 'Other'),
)
ref = models.CharField(max_length=30)
source = models.CharField(max_length=30,
choices=FILM_SOURCE_CHOICES,
default=WITHOUTABOX)
title = models.CharField(max_length=200)
original_title = models.CharField(max_length=200,blank=True, default='')
slug = models.SlugField(max_length=200)
dir_by = models.CharField(max_length=128)
sub_by = models.CharField(max_length=128, default='')
contact_email = models.EmailField(blank=True, default='')
contact_phone = PhoneNumberField(blank=True, default='')
posted_on_facebook = models.BooleanField(default=False)
subtitles = models.BooleanField(default=False)
language= models.CharField(max_length=128,blank=True, default='')
actors = models.TextField(blank=True, default='')
year = models.CharField(max_length=4)
runtime = models.IntegerField()
country = models.TextField(blank=True, default='')
#coming = models |
ramusus/django-vkontakte-places | vkontakte_places/factories.py | Python | bsd-3-clause | 328 | 0 | from m | odels import City, Country
import factory
class CityFactory(factory.DjangoModelFactor | y):
remote_id = factory.Sequence(lambda n: n + 1)
class Meta:
model = City
class CountryFactory(factory.DjangoModelFactory):
remote_id = factory.Sequence(lambda n: n + 1)
class Meta:
model = Country
|
ohsu-computational-biology/server | ga4gh/datamodel/genotype_phenotype_featureset.py | Python | apache-2.0 | 12,563 | 0.00008 | """
Module responsible for translating g2p data into GA4GH native
objects.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import bisect
import rdflib
from rdflib import RDF
import ga4gh.protocol as protocol
import ga4gh.datamodel.sequenceAnnotations as sequenceAnnotations
import ga4gh.datamodel.genotype_phenotype as g2p
# annotation keys
TYPE = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#type'
LABEL = 'http://www.w3.org/2000/01/rdf-schema#label'
HAS_QUALITY = 'http://purl.obolibrary.org/obo/BFO_0000159'
FALDO_LOCATION = "http://biohackathon.org/resource/faldo#location"
FALDO_BEGIN = "http://biohackathon.org/resource/faldo#begin"
FALDO_END = "http://biohackathon.org/resource/faldo#end"
FALDO_POSITION = "http://biohackathon.org/resource/faldo#position"
FALDO_REFERENCE = "http://biohackathon.org/resource/faldo#reference"
MEMBER_OF = 'http://purl.obolibrary.org/obo/RO_0002350'
ASSOCIATION = "http://purl.org/oban/association"
HAS_SUBJECT = "http://purl.org/oban/association_has_subject"
class PhenotypeAssociationFeatureSet(
g2p.G2PUtility, sequenceAnnotations.Gff3DbFeatureSet):
"""
An rdf object store. The cancer genome database
[Clinical Genomics Knowledge Base]
(http://nif-crawler.neuinfo.org/monarch/ttl/cgd.ttl),
published by the Monarch project, was the source of Evidence.
"""
def __init__(self, parentContainer, localId):
super(PhenotypeAssociationFeatureSet, self).__init__(
parentContainer, localId)
# mimic featureset
def populateFromRow(self, row):
"""
Populates the instance variables of this FeatureSet from the specified
DB row.
"""
self._dbFilePath = row[b'dataUrl']
self.populateFromFile(self._dbFilePath)
def populateFromFile(self, dataUrl):
"""
Populates the instance variables of this FeatureSet from the specified
data URL.
Initialize dataset, using the passed dict of sources
[{source,format}] see rdflib.parse() for more
If path is set, this backend will load itself
"""
self._dbFilePath = dataUrl
# initialize graph
self._rdfGraph = rdflib.ConjunctiveGraph()
# save the path
self._dataUrl = dataUrl
self._scanDataFiles(self._dataUrl, ['*.ttl'])
# extract version
cgdTTL = rdflib.URIRef("http://data.monarchinitiative.org/ttl/cgd.ttl")
versionInfo = rdflib.URIRef(
u'http://www.w3.org/2002/07/owl#versionInfo')
self._version = None
for _, _, obj in self._rdfGraph.triples((cgdTTL, versionInfo, None)):
self._version = obj.toPython()
# setup location cache
self._initializeLocationCache()
# mimic featureset
def getFeature(self, compoundId):
"""
find a feature and return ga4gh representation, use compoundId as
featureId
"""
feature = self._getFeatureById(compoundId.featureId)
feature.id = str(compoundId)
return feature
def _getFeatureById(self, featureId):
"""
find a feature and return ga4gh representation, use 'native' id as
featureId
"""
featureRef = rdflib.URIRef(featureId)
featureDetails = self._detailTuples([featureRef])
feature = {}
for detail in featureDetails:
feature[detail['predicate']] = []
for detail in featureDetails:
feature[detail['predicate']].append(detail['object'])
pbFeature = protocol.Feature()
term = protocol.OntologyTerm()
# Schema for feature only supports one type of `type`
# here we default to first OBO defined
for featureType in feature[TYPE]:
if "obolibrary" in featureType:
term.term = self._featureTypeLabel(featureType)
term.id = featureType
pbFeature.feature_type.MergeFrom(term)
break
pbFeature.id = featureId
# Schema for feature only supports one type of `name` `symbol`
# here we default to shortest for symbol and longest for name
feature[LABEL].sort(key=len)
pbFeature.gene_symbol = feature[LABEL][0]
pbFeature.name = feature[LABEL][-1]
pbFeature.attributes.MergeFrom(protocol.Attributes())
for key in feature:
for val in feature[key]:
pbFeature.attributes.vals[key].values.add().string_value = val
if featureId in self._locationMap:
location = self._locationMap[featureId]
pbFeature.reference_name = location["chromosome"]
pbFeature.start = location["begin"]
pbFeature.end = location["end"]
return pbFeature
# mimic featureset
def getFeatures(self, referenceName=None, start=None, end=None,
pageToken=None, pageSize=None,
featureTypes=None, parentId=None,
name=None, geneSymbol=None, numFeatures=10):
# query to do search
query = self._filterSearchFeaturesRequest(
referenceName, geneSymbol, name, start, end)
featuresResults = self._rdfGraph.query(query)
featureIds = set()
for row in featuresResults.bindings:
featureIds.add(row['feature'].toPython())
featuresCount = len(featureIds)
if pageToken:
nextPageToken = int(pageToken)
else:
nextPageToken = 0
for idx, featureId in enumerate(featureIds):
if idx < nextPageToken:
con | tinue
feature = self._getFeatureById(featureId)
# get _getFeatureById returns native id, cast to compound
| feature.id = self.getCompoundIdForFeatureId(feature.id)
if nextPageToken < featuresCount - 1:
nextPageToken += 1
else:
nextPageToken = None
yield feature, (
str(nextPageToken)
if nextPageToken is not None else None)
def _baseQuery(self):
return """
PREFIX OBAN: <http://purl.org/oban/>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT DISTINCT
?feature
?feature_label
WHERE {
?association a OBAN:association .
?association OBAN:association_has_subject ?feature .
?feature rdfs:label ?feature_label .
#%FILTER%
}
"""
def _filterSearchFeaturesRequest(self, reference_name, gene_symbol, name,
start, end):
"""
formulate a sparql query string based on parameters
"""
filters = []
query = self._baseQuery()
filters = []
location = self._findLocation(reference_name, start, end)
if location:
filters.append("?feature = <{}>".format(location))
if gene_symbol:
filters.append('regex(?feature_label, "{}")')
if name:
filters.append(
'regex(?feature_label, "{}")'.format(name))
# apply filters
filter = "FILTER ({})".format(' && '.join(filters))
if len(filters) == 0:
filter = ""
query = query.replace("#%FILTER%", filter)
return query
def _findLocation(self, reference_name, start, end):
"""
return a location key form the locationMap
"""
try:
# TODO - sequenceAnnotations does not have build?
return self._locationMap['hg19'][reference_name][start][end]
except:
return None
def _initializeLocationCache(self):
"""
CGD uses Faldo ontology for locations, it's a bit complicated.
This function sets up an in memory cache of all locations, which
can be queried via:
locationMap[build][chromosome][begin][end] = location["_id"]
"""
# cache of locations
self._locationMap = {}
locationMap = self._locationMap
|
JWageM/planet-wars | bots/smt/kb.py | Python | mit | 19,880 | 0.002968 | import sys
import numpy as np
import scipy.optimize as opt
class Symbol(object):
"""
A class representing a single unit in the boolean SAT problem. This can either refer to an atomic boolean, or a
constraint based on integer variables
"""
pass
class Boolean(Symbol):
def __init__(self, name):
self.__name = name
def name(self):
return self.__name
def __invert__(self):
# type: () -> Boolean
"""
:return:
"""
return _NegBoolean(self)
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.name() == other.name()
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.name())
def __repr__(self):
return self.name()
class _NegBoolean(Boolean):
def __init__(self, symbol):
self.__symbol = symbol
def name(self):
return self.__symbol.name()
def __invert__(self):
return self.__symbol
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.name() == other.name()
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.name(), False)
def __repr__(self):
return '~' + self.name()
class Constraint(Symbol):
def __init__(self, left, right):
self._left = left
self._right = right
if not isinstance(self._right, Constant):
self._left = Sum(self._left, - self._right)
self._right = Constant(0)
# Cluster the symbols on the left
symbols = {None: 0}
self.cluster(self._left, symbols)
# create new left and right
self._right = Constant(self._right.value() - symbols[None])
nwterms = []
for name, mult in symbols.iteritems():
if name is not None:
nwterms.append(Integer(name, mult))
self._left = Sum(*nwterms)
def cluster(self, term, symbols):
if isinstance(term, Constant):
symbols[None] += term.value()
return
if isinstance(term, Integer):
if term.name() not in symbols:
symbols[term.name()] = 0
symbols[term.name()] += term.mult()
return
if isinstance(term, Sum):
for subterm in term.terms():
self.cluster(subterm, symbols)
return
raise ValueError('Encountered element {} of type {}. Arithmetic expressions should contain only KB objects or integers.'.format(term, term.__class__))
def symbol(self):
return '?'
def __repr__(self):
return '[' + str(self._left) + ' ' + self.symbol() + ' ' + str(self._right) + ']'
def symbols(self):
'''
Returns a list of all integer symbols appearing in this constraint
:return:
'''
return union(self._left.symbols(), self._right.symbols())
class GT(Constraint):
def __init__(self, left, right):
super(GT, self).__init__(left, right)
def symbol(self):
return '>'
def __invert__(self):
return LEQ(self._left, self._right)
def canonical(self):
"""
Convert to a LEQ relation
"""
return LEQ(self._right, self._left - 1)
class GEQ(Constraint):
def __init__(self, left, right):
super(GEQ, self).__init__(left, right)
def symbol(self):
return '>='
def __invert__(self):
return LT(self._left, self._right)
def canonical(self):
"""
Convert to a LEQ relation
"""
return LEQ(self._right, self._left)
class LT(Constraint):
def __init__(self, left, right):
super(LT, self).__init__(left, right)
def symbol(self):
return '<'
def __invert__(self):
return GEQ(self._left, self._right)
def canonical(self):
"""
Convert to a LEQ relation
"""
return LEQ(self._left, self._right - 1)
class LEQ(Constraint):
def __init__(self, left, right):
super(LEQ, self).__init__(left, right)
def symbol(self):
return '<='
def __invert__(self):
return GT(self._left, self._right)
def canonical(self):
"""
Convert to a LEQ relation
"""
return self
class EQ(Constraint):
def __init__(self, left, right):
super(EQ, self).__init__(left, right)
def symbol(self):
return '=='
def canonical(self):
"""
The canonical for of an EQ relation is itself.
"""
return self
# Not used, as it makes the LP problem nonconvex
#
# class NEQ(Constraint):
# def __init__(self, left, right):
# super(NEQ, self).__init__(left, right)
#
# def symbol(self):
# return '!='
#
# def __invert__(self):
# return EQ(self._left, self._right)
class IntSymbol:
"""
A symbolic expression representing an integer: either an atomic symbol like 'x', a constant
like 15 or a compound expression like 'x + 15 - y'
"""
def __lt__(self, other):
other = self.check(other)
return LT(self, other)
def __gt__(self, other):
other = self.check(other)
return GT(self, other)
def __le__(self, other):
other = self.check(other)
return LEQ(self, other)
def __ge__(self, other):
other = self.check(other)
return GEQ(self, other)
def __eq__(self, other):
other = self.check(other)
return EQ(self, other)
# def __ne__(self, other):
# other = self.check(other)
# return NEQ(self, other)
def __add__(self, other):
other = self.check(other)
return Sum(self, other)
__radd__ = __add__
def __sub__(self, other):
other = self.check(other)
return Sum(self, - other)
__rub__ = __sub__
def check(self, other):
if not isinstance(other, IntSymbol):
if isinstance(other, int):
return Constant(other)
raise ValueError('You can only use KB objects or ints in comparisons. Encountered: {} {}'.format(other, other.__class__))
return other
class Sum(IntSymbol):
def __init__(self, *terms):
self.__terms = terms
for term in self.__terms:
if isinstance(term, int):
raise ValueError('Unwrapped int {}, {}'.format(term, term.__class__))
self.__name = ''
for i, term in enumerate(terms):
self.__name += ('' if i == 0 else ' + ') + str(term)
def name(self):
return self.__name
def terms(self):
return self.__terms
def allterms(self):
return self.__terms
def __neg__(self):
neg_terms = []
for term in self.__terms:
neg_terms.append(- term)
return Sum(*neg_terms)
def __hash__(self):
return hash(self.name())
def __repr__(self):
return self.__name
def symbols(self):
'''
Returns a set of all integer symbols appearing in this constraint
:return:
'''
return union(*[term.symbols() for term in self.__terms])
c | lass Integer(IntSymbol):
def __init__(self, name, mult = 1):
"""
:rtype: object
"""
self.__name = name
self.__mult = mult
def name(self):
return self.__name
def mult(self):
return self.__mult
def __neg__(self):
return Integer(self.name(), - self.__mult) |
def __hash__(self):
return hash(self.name())
def __mul__(self, other):
if not isinstance(other, int):
raise ValueError('Can only multiply number symbol by int.')
return Integer(self.__name, other)
__rmul__ = __mul__
def __repr__(self):
if self.__mult == 1:
return self.name()
if self.__mult == -1:
return '(-{})'.format(self.name())
if self.__mult < 0:
return '({}{})'.format(self.__mult, self.name())
return ' |
LBatsoft/python3-webapp | www/coroweb.py | Python | gpl-3.0 | 6,284 | 0.003819 | # !/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = "pwxc"
import asyncio, os, inspect, logging, functools
from urllib import parse
from aiohttp import web
from apis import APIError
def get(path):
"""
Define decorator @get('/path')
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kw):
return func(*args, **kw)
wrapper.__method__ = 'GET'
wrapper.__route__ = path
return wrapper
return decorator
def post(path):
"""
Define decorator @post('/path')
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kw):
return func(*args, **kw)
wrapper.__method__ = 'POST'
wrapper.__route__ = path
return wrapper
return decorator
def get_required_kw_args(fn):
args = []
params = inspect.signature(fn).parameters
for name, param in params.items():
if param.kind == inspect.Parameter.KEYWORD_ONLY and param.default == inspect.Parameter.empty:
args.append(name)
return tuple(args)
def get_named_kw_args(fn):
args = []
params = inspect.signature(fn).parameters
for name, param in params.items():
if param.kind == inspect.Parameter.KEYWORD_ONLY:
args.append(name)
return tuple(args)
def has_named_kw_args(fn):
params = inspect.signature(fn).parameters
for name, param in params.items():
if param.kind == inspect.Parameter.KEYWORD_ONLY:
return True
def has_var_kw_arg(fn):
params = inspect.signature(fn).parameters
for name, param in params.items():
if param.kind == inspect.Parameter.VAR_KEYWORD:
return True
def has_request_arg(fn):
sig = inspect.signature(fn)
params = sig.parameters
found = False
for name, param in params.items():
if name == 'request':
found = True
continue
if found and (param.kind != inspect.Parameter.VAR_POSITIONAL and param.kind != inspect.Parameter.KEYWORD_ONLY and param.kind != inspect.Parameter.VAR_KEYWORD):
raise ValueError('request parameter must be the last named parameter in function: %s %s' % (fn.__name__, str(sig)))
return found
class RequestHandler(object):
def __init__(self, app, fn):
self._app = app
self._func = fn
self._has_request_arg = has_request_arg(fn)
self._has_var_kw_arg = has_var_kw_arg(fn)
self._has_named_kw_args = has_named_kw_args(fn)
self._named_kw_args = get_named_kw_args(fn)
self._required_kw_args = get_required_kw_args(fn)
@asyncio.coroutine
def __call__(self, request):
kw = None
if self._has_var_kw_arg or self._has_named_kw_args or self._required_kw_args:
if request.method == 'POST':
if not request.content_type:
return web.HTTPBadRequest('Missing Content-Type.')
ct = request.content_type.lower()
if ct.startswith('application/json'):
params = yield from request.json()
if not isinstance(params, dict):
return web.HTTPBadRequest('JSON body must be object.')
kw = params
elif ct.startswith('application/x-www-form-urlencoded') or ct.startswith('multipart/form-data'):
params = yield from request.post()
kw = dict(**params)
else:
return web.HTTPBadRequest('Unsupported Content-Type: %s' % request.content_type)
if request.method == 'GET':
qs = request.query_string
if qs:
kw = dict()
for k, v in parse.parse_qs(qs, True).items():
kw[k] = v[0]
if kw is None:
kw = dict(**request.match_info)
else:
if not self._has_var_kw_arg and self._named_kw_args:
# remove all unamed kw:
copy = dict()
for name in self._named_kw_args:
if name in kw:
copy[name] = kw[name]
kw = copy
# check named arg:
for k, v in request.match_info.items():
if k in kw:
logging.warning('Duplicate arg name in named arg and kw args: %s' % k)
kw[k] = v
if self._has_request_arg:
kw['request'] = request
# check required kw:
if self._required_kw_args:
for name in self._required_kw_args:
if not name in kw:
return web.HTTPBadRequest('Missing argument: %s' % name)
logging.info('call with args: %s' % str(kw))
try:
r = yield from self._func(**kw)
return r
except APIError as e:
return dict(error=e.error, data=e.data, message=e.message)
def add_static(app):
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'static')
app.router.add_static('/static/', path)
logging.info('add static %s => %s' % ('/static/', path))
def add_route(app, fn):
method = getattr(fn, '__method__', None)
path = getattr(fn, '__route__', None)
if path is None or method is None:
raise ValueError('@get or @post not defined in %s.' % str(fn))
if not asyncio.iscoroutinefunction(fn) and not inspect.isgeneratorfunction(fn):
fn = asyncio.coroutine(fn)
logging.info('add route %s %s => %s(%s)' % (method, path, fn.__name__, ', '.join(inspect.signature(fn).parameters.keys())))
app.router.add_route(method, path, RequestHandler(app, fn))
def add_routes(app, module_name):
n = module_name.rfind('.')
if n == (-1):
mod = __import__(module_nam | e, globals(), locals())
else:
name = module_name[n+1:]
mod = getattr(__import__(module_n | ame[:n], globals(), locals(), [name]), name)
for attr in dir(mod):
if attr.startswith('_'):
continue
fn = getattr(mod, attr)
if callable(fn):
method = getattr(fn, '__method__', None)
path = getattr(fn, '__route__', None)
if method and path:
add_route(app, fn) |
youtube/cobalt | third_party/llvm-project/lldb/packages/Python/lldbsuite/test/functionalities/data-formatter/data-formatter-advanced/TestDataFormatterAdv.py | Python | bsd-3-clause | 11,243 | 0.00169 | """
Test lldb data formatter subsystem.
"""
from __future__ import print_function
import os
import time
import lldb
from lldbsuite.test.lldbtest import *
import lldbsuite.test.lldbutil as lldbutil
class AdvDataFormatterTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break at.
self.line = line_number('main.cpp', '// Set break point at this line.')
def test_with_run_command(self):
"""Test that that file and class static variables display correctly."""
self.build()
self.runCmd("file " + self.getBuildArtifact("a.out"), CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_file_and_line(
self, "main.cpp", self.line, num_expected_locations=1, loc_exact=True)
self.runCmd("run", RUN_SUCCEEDED)
# The stop reason of the thread should be breakpoint.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs=['stopped',
'stop reason = breakpoint'])
# This is the function to remove the custom formats in order to have a
# clean slate for the next test case.
def cleanup():
self.runCmd('type format clear', check=False)
self.runCmd('type summary clear', check=False)
self.runCmd(
"settings set target.max-children-count 256",
check=False)
# Execute the cleanup function during test case tear down.
self.addTearDownHook(cleanup)
self.runCmd("type summary add --summary-string \"pippo\" \"i_am_cool\"")
self.runCmd(
"type summary add --summary-string \"pluto\" -x \"i_am_cool[a-z]*\"")
self.expect("frame variable cool_boy",
substrs=['pippo'])
self.expect("frame variable cooler_boy",
substrs=['pluto'])
self.runCmd("type summary delete i_am_cool")
self.expect("frame variable cool_boy",
substrs=['pluto'])
self.runCmd("type summary clear")
self.runCmd(
"type summary add --summary-string \"${var[]}\" -x \"int \\[[0-9]\\]")
self.expect("frame variable int_array",
substrs=['1,2,3,4,5'])
# this will fail if we don't do [] as regex correctly
self.run | Cmd(
'type summary add --summary-string "${var[].integer}" "i_am_cool[]')
self.expect("frame variable cool_array",
| substrs=['1,1,1,1,6'])
self.runCmd("type summary clear")
self.runCmd(
"type summary add --summary-string \"${var[1-0]%x}\" \"int\"")
self.expect("frame variable iAmInt",
substrs=['01'])
self.runCmd(
"type summary add --summary-string \"${var[0-1]%x}\" \"int\"")
self.expect("frame variable iAmInt",
substrs=['01'])
self.runCmd("type summary clear")
self.runCmd("type summary add --summary-string \"${var[0-1]%x}\" int")
self.runCmd(
"type summary add --summary-string \"${var[0-31]%x}\" float")
self.expect("frame variable *pointer",
substrs=['0x',
'2'])
# check fix for <rdar://problem/11338654> LLDB crashes when using a
# "type summary" that uses bitfields with no format
self.runCmd("type summary add --summary-string \"${var[0-1]}\" int")
self.expect("frame variable iAmInt",
substrs=['9 1'])
self.expect("frame variable cool_array[3].floating",
substrs=['0x'])
self.runCmd(
"type summary add --summary-string \"low bits are ${*var[0-1]} tgt is ${*var}\" \"int *\"")
self.expect("frame variable pointer",
substrs=['low bits are',
'tgt is 6'])
self.expect(
"frame variable int_array --summary-string \"${*var[0-1]}\"",
substrs=['3'])
self.runCmd("type summary clear")
self.runCmd(
'type summary add --summary-string \"${var[0-1]}\" -x \"int \[[0-9]\]\"')
self.expect("frame variable int_array",
substrs=['1,2'])
self.runCmd(
'type summary add --summary-string \"${var[0-1]}\" "int []"')
self.expect("frame variable int_array",
substrs=['1,2'])
self.runCmd("type summary clear")
self.runCmd("type summary add -c -x \"i_am_cool \[[0-9]\]\"")
self.runCmd("type summary add -c i_am_cool")
self.expect("frame variable cool_array",
substrs=['[0]',
'[1]',
'[2]',
'[3]',
'[4]',
'integer',
'character',
'floating'])
self.runCmd(
"type summary add --summary-string \"int = ${*var.int_pointer}, float = ${*var.float_pointer}\" IWrapPointers")
self.expect("frame variable wrapper",
substrs=['int = 4',
'float = 1.1'])
self.runCmd(
"type summary add --summary-string \"low bits = ${*var.int_pointer[2]}\" IWrapPointers -p")
self.expect("frame variable wrapper",
substrs=['low bits = 1'])
self.expect("frame variable *wrap_pointer",
substrs=['low bits = 1'])
self.runCmd("type summary clear")
self.expect(
"frame variable int_array --summary-string \"${var[0][0-2]%hex}\"",
substrs=[
'0x',
'7'])
self.runCmd("type summary clear")
self.runCmd(
"type summary add --summary-string \"${*var[].x[0-3]%hex} is a bitfield on a set of integers\" -x \"SimpleWithPointers \[[0-9]\]\"")
self.expect(
"frame variable couple --summary-string \"${*var.sp.x[0-2]} are low bits of integer ${*var.sp.x}. If I pretend it is an array I get ${var.sp.x[0-5]}\"",
substrs=[
'1 are low bits of integer 9.',
'If I pretend it is an array I get [9,'])
# if the summary has an error, we still display the value
self.expect(
"frame variable couple --summary-string \"${*var.sp.foo[0-2]\"",
substrs=[
'(Couple) couple = {',
'x = 0x',
'y = 0x',
'z = 0x',
's = 0x'])
self.runCmd(
"type summary add --summary-string \"${*var.sp.x[0-2]} are low bits of integer ${*var.sp.x}. If I pretend it is an array I get ${var.sp.x[0-5]}\" Couple")
self.expect("frame variable sparray",
substrs=['[0x0000000f,0x0000000c,0x00000009]'])
# check that we can format a variable in a summary even if a format is
# defined for its datatype
self.runCmd("type format add -f hex int")
self.runCmd(
"type summary add --summary-string \"x=${var.x%d}\" Simple")
self.expect("frame variable a_simple_object",
substrs=['x=3'])
self.expect("frame variable a_simple_object", matching=False,
substrs=['0x0'])
# now check that the default is applied if we do not hand out a format
self.runCmd("type summary add --summary-string \"x=${var.x}\" Simple")
self.expect("frame variable a_simple_object", matching=False,
substrs=['x=3'])
self.expect("frame variable a_simple_object", matching=True,
substrs=['x=0x00000003'])
# check that we can correctly cap the number of children shown
self.runCmd("settings set target.max-children-count 5")
self.expect('frame variable a_long_guy', matching=True,
substrs=['a_1',
'b_1',
'c_1' |
mila-udem/fuel | fuel/config_parser.py | Python | mit | 7,045 | 0 | """Module level configuration.
Fuel allows module-wide configuration values to be set using a YAML_
configuration file and `environment variables`_. Environment variables
override the configuration file which in its turn overrides the defaults.
The configuration is read from ``~/.fuelrc`` if it exists. A custom
configuration file can be used by setting the ``FUEL_CONFIG`` environment
variable. A configuration file is of the form:
.. code-block:: yaml
data_path: /home/user/datasets
Which could be overwritten by using environment variables:
.. code-block:: bash
$ FUEL_DATA_PATH=/home/users/other_datasets python
This data path is a sequence of paths separated by an os-specific
delimiter (':' for Linux and OSX, ';' for Windows).
If a setting is not configured and does not provide a default, a
:class:`~.ConfigurationError` is raised when it is
accessed.
Configuration values can be accessed as attributes of
:const:`fuel.config`.
>>> from fuel import config
>>> print(config.data_path) # doctest: +SKIP
'~/datasets'
The following configurations are supported:
.. option:: data_path
The path where dataset files are stored. Can also be set using the
environment variable ``FUEL_DATA_PATH``. Expected to be a sequence
of paths separated by an os-specific delimiter (':' for Linux and
OSX, ';' for Windows).
.. todo::
Implement this.
.. option:: floatX
The default :class:`~numpy.dtype` to use for floating point numbers. The
default value is ``float64``. A lower value can save memory.
.. option:: local_data_path
The local path where the dataset is going to be copied. This is a useful
option for slow network file systems. The dataset is copied once to a
local directory and reused later. Currently, caching is implemented
for :class:`H5PYDataset` and therefore for the majority of builtin
datasets. In order to use caching with your own dataset refer to the
caching documentation: :func:`cache_file`.
.. option:: extra_downloaders
A list of package names which, like fuel.downloaders, define an
`all_downloaders` attribute listing available downloaders. By default,
an empty list.
.. option:: extra_converters
A list of package names which, like fuel.converters, define an
`all_converters` attribute listing available converters. By default,
an empty list.
.. _YAML: http://yaml.org/
.. _environment variables:
https://en.wikipedia.org/wiki/Environment_variable
"""
import logging
import os
import six
import yaml
from .exceptions import ConfigurationError
logger = logging.getLogger(__name__)
NOT_SET = object()
def extra_downloader_converter(value):
"""Parses extra_{downloader,converter} arguments.
Parameters
----------
value : iterable or str
If the value is a string, it is split into a list using spaces
as delimiters. Otherwise, it is returned as is.
"""
if isinstance(value, six.string_types):
value = value.split(" ")
return value
def multiple_paths_parser(value):
"""Parses data_path argument.
Parameters
----------
value : str
a string of data paths separated by ":".
Returns
-------
value : list
a list of strings indicating each data paths.
"""
if isinstance(value, six.string_types):
value = value.split(os.path.pathsep)
return value
class Configuration(object):
def __init__(self):
self.config = {}
def load_yaml(self):
if 'FUEL_CONFIG' in os.environ:
yaml_file = os.environ['FUEL_CONFIG']
else:
yaml_file = os.path.expanduser('~/.fuelrc')
if os.path.isfile(yaml_file):
with open(yaml_file) as f:
for key, value in yaml.safe_load(f).items():
if key not in self.config:
raise ValueError("Unrecognized config in YAML: {}"
.format(key))
self.config[key]['yaml'] = value
def __getattr__(self, key):
if key == 'config' or key not in self.config:
raise AttributeError
config_setting = self.config[key]
if 'value' in config_setting:
value = config_setting['value']
elif ('env_var' in config_setting and
config_setting['env_var'] in os.environ):
value = os.environ[config_setting['env_var']]
elif 'yaml' in config_setting:
value = config_setting['yaml']
elif 'default' in config_setting:
value = config_setting['default']
else:
raise ConfigurationError("Configuration not set and no default "
"provided: {}.".format(key))
return config_setting['type'](value)
def __setattr__(self, key, value):
if key != 'config' and key in self.config:
self.config[key]['value'] = value
else:
super(Configuration, self).__setattr__(key, value)
def add_config(self, key, type_, default=NOT_SET, env_var=None):
"""Add a configuration setting.
Parameters
----------
key : str
The name of the configuration setting. This must be a valid
Python attribute name i.e. alphanumeric with underscores.
type : function
A function such as ``float``, ``int`` or ``str`` which takes
the configuration value and returns an object of the correct
type. Note that the values retrieved from environment
variables are always strings, while those retrieved from the
YAML file might already be parsed. Hence, the function provided
here must accept both types of input.
default : object, optional
The default configuration to return if not | set. By default none
is set and an error is raised instead.
env_var : str, optional
The environment variable name that holds this configuration
value. If not given, | this configuration can only be set in the
YAML configuration file.
"""
self.config[key] = {'type': type_}
if env_var is not None:
self.config[key]['env_var'] = env_var
if default is not NOT_SET:
self.config[key]['default'] = default
config = Configuration()
# Define configuration options
config.add_config('data_path', type_=multiple_paths_parser,
env_var='FUEL_DATA_PATH')
config.add_config('local_data_path', type_=str,
env_var='FUEL_LOCAL_DATA_PATH', default="")
config.add_config('default_seed', type_=int, default=1)
config.add_config('extra_downloaders', type_=extra_downloader_converter,
default=[], env_var='FUEL_EXTRA_DOWNLOADERS')
config.add_config('extra_converters', type_=extra_downloader_converter,
default=[], env_var='FUEL_EXTRA_CONVERTERS')
config.add_config('floatX', type_=str, env_var='FUEL_FLOATX')
config.load_yaml()
|
LICEF/edx-platform | common/lib/xmodule/xmodule/modulestore/tests/persistent_factories.py | Python | agpl-3.0 | 3,589 | 0.003622 | """Provides factories for Split."""
from xmodule.modulestore import ModuleStoreEnum
from xmodule.course_module import CourseDescriptor
from xmodule.x_module import XModuleDescriptor
import factory
from factory.helpers import lazy_attribute
from opaque_keys.edx.keys import UsageKey
# Factories don't have __init__ methods, and are self documenting
# pylint: disable=W0232, C0111
class SplitFactory(factory.Factory):
"""
Abstracted superclass which defines modulestore so that there's no dependency on django
if the caller passes modulestore in kwargs
"""
@lazy_attribute
def modulestore(self):
# Delayed import so that we only depend on django if the caller
# hasn't provided their own modulestore
from xmodule.modulestore.django import modulestore
return modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.split)
class PersistentCourseFactory(SplitFactory):
"""
Create a new course (not a new version of a course, but a whole new index entry).
keywords: any xblock field plus (note, the below are filtered out; so, if they
become legitimate xblock fields, they won't be settable via this factory)
* org: defaults to textX
* master_branch: (optional) defaults to ModuleStoreEnum.BranchName.draft
* user_id: (optional) defaults to 'test_user'
* display_name (xblock field): will default to 'Robot Super Course' unless provided
"""
FACTORY_FOR = CourseDescriptor
# pylint: disable=W0613
@classmethod
def _create(cls, target_class, course='999', run='run', org='testX', user_id=ModuleStoreEnum.UserID.test,
master_branch=ModuleStoreEnum.BranchName.draft, **kwargs):
modulestore = kwargs.pop('modulestore')
root_block_id = kwargs.pop('root_block_id', 'course')
# Write the data to the mongo datastore
new_course = modulestore.create_course(
org, course, run, user_id, fields=kwargs,
master_branch=master_branch, root_block_id=root_block_id
)
return new_course
@classmethod
def _build(cls, target_class, *args, **kwargs):
raise NotImplementedError()
class ItemFactory(SplitFactory):
FACTORY_FOR | = XModuleDescriptor
display_name = factory.LazyAttributeSequence(lambda o, n: "{} {}".format(o.category, n))
# pylint: disable=W0613
@classmethod
def _create(cls, target_class, parent_location, category='ch | apter',
user_id=ModuleStoreEnum.UserID.test, definition_locator=None, force=False,
continue_version=False, **kwargs):
"""
passes *kwargs* as the new item's field values:
:param parent_location: (required) the location of the course & possibly parent
:param category: (defaults to 'chapter')
:param definition_locator (optional): the DescriptorLocator for the definition this uses or branches
"""
modulestore = kwargs.pop('modulestore')
if isinstance(parent_location, UsageKey):
return modulestore.create_child(
user_id, parent_location, category, defintion_locator=definition_locator,
force=force, continue_version=continue_version, **kwargs
)
else:
return modulestore.create_item(
user_id, parent_location, category, defintion_locator=definition_locator,
force=force, continue_version=continue_version, **kwargs
)
@classmethod
def _build(cls, target_class, *args, **kwargs):
raise NotImplementedError()
|
yaunj/hogments | hogments/hog.py | Python | bsd-2-clause | 4,940 | 0.001014 | #!/usr/bin/env python
# coding: utf-8
#
# License: BSD; see LICENSE for more details.
from pygments.lexer import RegexLexer, include, bygroups
import pygments.token as t
class SnortLexer(RegexLexer):
name = 'Snort'
aliases = ['snort', 'hog']
filenames = ['*.rules']
tokens = {
'root': [
(r'#.*$', t.Comment),
(r'(\$\w+)', t.Name.Variable),
(r'\b(any|(\d{1,3}\.){3}\d{1,3}(/\d+)?)', t.Name.Variable),
(r'^\s*(log|pass|alert|activate|dynamic|drop|reject|sdrop|'
r'ruletype|var|portvar|ipvar)',
t.Keyword.Type),
(r'\b(metadata)(?:\s*:)', t.Keyword, 'metadata'),
(r'\b(reference)(?:\s*:)', t.Keyword, 'reference'),
(r'\b(msg|reference|gid|sid|rev|classtype|priority|metadata|'
r'content|http_encode|uricontent|urilen|isdataat|pcre|pkt_data|'
r'file_data|base64_decode|base64_data|byte_test|byte_jump|'
r'byte_extract|ftp_bounce|pcre|asn1|cvs|dce_iface|dce_opnum|'
r'dce_stub_data|sip_method|sip_stat_code|sip_header|sip_body|'
r'gtp_type|gtp_info|gtp_version|ssl_version|ssl_state|nocase|'
r'rawbytes|depth|offset|distance|within|http_client_body|'
r'http_cookie|http_raw_cookie|http_header|http_raw_header|'
r'http_method|http_uri|http_raw_uri|http_stat_code|'
r'http_stat_msg|fast_pattern|fragoffset|fragbits|'
r'ttl|tos|id|ipopts|dsize|flags|flow|flowbits|seq|ack|window|'
r'itype|icode|icmp_id|icmp_seq|rpc|ip_proto|sameip|'
r'stream_reassemble|stream_size|logto|session|resp|react|tag|'
r'activates|activated_by|replace|detection_filter|treshold)'
r'(?:\s*:)',
t.Keyword),
(r'\b(tc | p|udp|icmp|ip)', t.Keyword.Constant),
(r'\b(hex|dec|oct|string|type|output|any|engine|soid|service|'
r'norm|raw|relative|bytes|big|little|align|invalid-entry|'
r'enable|disable|client|server|both|either|printable|binary|'
r'all|session|host|packets|seconds|bytes|src|dst|track|by_src|'
r'by_dst|uri|header|cookie|utf8|double_encode|non_ascii|'
r'uenc | ode|bare_byte|ascii|iis_encode|bitstring_overflow|'
r'double_overflow|oversize_length|absolute_offset|'
r'relative_offset|rr|eol|nop|ts|sec|esec|lsrr|lsrre|'
r'ssrr|satid|to_client|to_server|from_client|from_server|'
r'established|not_established|stateless|no_stream|only_stream|'
r'no_frag|only_frag|set|setx|unset|toggle|isset|isnotset|'
r'noalert|limit|treshold|count|str_offset|str_depth|tagged)',
t.Name.Attribute),
(r'(<-|->|<>)', t.Operator),
(ur'”', t.String, 'fancy-string'),
(ur'“', t.String, 'fancy-string'),
(r'"', t.String, 'dq-string'),
(r'\'', t.String, 'sq-string'),
(r'(\d+)', t.Number),
(r';', t.Punctuation),
(r'\\', t.String.Escape),
(r'\s+', t.Whitespace),
],
'hex': [
(r'\|([a-fA-F0-9 ]+)\|', t.Number.Hex),
],
'dq-string': [
include('hex'),
(r'([^"])', t.String),
(r'"', t.String, '#pop')
],
'sq-string': [
include('hex'),
(r'([^\'])', t.String),
(r'\'', t.String, '#pop')
],
'fancy-string': [
include('hex'),
(ur'([^”])', t.String),
(ur'”', t.String, '#pop')
],
'metadata': [
(r'\s', t.Whitespace),
(r'([\w_-]+)(\s+)([\w_-]+)',
bygroups(t.Name.Variable, t.Whitespace, t.Name.Attribute)),
(r';', t.Punctuation, '#pop'),
],
'reference': [
(r'(\w+)(,)(?:\s*)([^;]+)',
bygroups(t.Name.Variable, t.Punctuation, t.Name.Attribute)),
(r';', t.Punctuation, '#pop')
]
}
if __name__ == '__main__':
from pygments import highlight
from pygments.formatters import Terminal256Formatter
from sys import argv
if len(argv) > 1:
import io
for arg in argv[1:]:
input = io.open(arg, 'r')
code = input.read(-1)
print("Highlighting " + arg)
print(highlight(code, SnortLexer(encoding='chardet'),
Terminal256Formatter(encoding='utf-8')))
else:
code = """
alert tcp $HOME_NET any -> 192.168.1.0/24 111 (content:"|00 01 86 a5|"; msg: "mountd access";)
alert tcp any any -> any 21 (content:"site exec"; content:"%"; msg:"site exec buffer overflow attempt";)
alert tcp !192.168.1.0/24 any -> 192.168.1.0/24 111 (content: "|00 01 86 a5|"; msg: "external mountd access";)
"""
print(highlight(code, SnortLexer(), Terminal256Formatter()))
|
lzw120/django | django/core/files/base.py | Python | bsd-3-clause | 4,071 | 0.000737 | import os
from io import BytesIO
from django.utils.encoding import smart_str, smart_unicode
from django.core.files.utils import FileProxyMixin
class File(FileProxyMixin):
DEFAULT_CHUNK_SIZE = 64 * 2**10
def __init__(self, file, name=None):
self.file = file
if name is None:
name = getattr(file, 'name', None)
self.name = name
self.mode = getattr(file, 'mode', None)
def __str__(self):
return smart_str(self.name or '')
def __unicode__(self):
return smart_unicode(self.name or u'')
def __repr__(self):
| return "<%s: %s>" % (self.__class__.__name__, self or "None")
def __nonzero__(self):
return bool(self.name)
def __len__(self):
return self.size
def _get_size(self):
if not hasattr(self, '_size'):
if hasattr(self.file, 'size'):
self._size = self.file.size
elif hasattr(self.file, 'name') and os.path.exists(self.file.name):
self._size = os.path.getsize(self.file.name)
elif hasattr(sel | f.file, 'tell') and hasattr(self.file, 'seek'):
pos = self.file.tell()
self.file.seek(0, os.SEEK_END)
self._size = self.file.tell()
self.file.seek(pos)
else:
raise AttributeError("Unable to determine the file's size.")
return self._size
def _set_size(self, size):
self._size = size
size = property(_get_size, _set_size)
def _get_closed(self):
return not self.file or self.file.closed
closed = property(_get_closed)
def chunks(self, chunk_size=None):
"""
Read the file and yield chucks of ``chunk_size`` bytes (defaults to
``UploadedFile.DEFAULT_CHUNK_SIZE``).
"""
if not chunk_size:
chunk_size = self.DEFAULT_CHUNK_SIZE
if hasattr(self, 'seek'):
self.seek(0)
while True:
data = self.read(chunk_size)
if not data:
break
yield data
def multiple_chunks(self, chunk_size=None):
"""
Returns ``True`` if you can expect multiple chunks.
NB: If a particular file representation is in memory, subclasses should
always return ``False`` -- there's no good reason to read from memory in
chunks.
"""
if not chunk_size:
chunk_size = self.DEFAULT_CHUNK_SIZE
return self.size > chunk_size
def __iter__(self):
# Iterate over this file-like object by newlines
buffer_ = None
for chunk in self.chunks():
chunk_buffer = BytesIO(chunk)
for line in chunk_buffer:
if buffer_:
line = buffer_ + line
buffer_ = None
# If this is the end of a line, yield
# otherwise, wait for the next round
if line[-1] in ('\n', '\r'):
yield line
else:
buffer_ = line
if buffer_ is not None:
yield buffer_
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
self.close()
def open(self, mode=None):
if not self.closed:
self.seek(0)
elif self.name and os.path.exists(self.name):
self.file = open(self.name, mode or self.mode)
else:
raise ValueError("The file cannot be reopened.")
def close(self):
self.file.close()
class ContentFile(File):
"""
A File-like object that takes just raw content, rather than an actual file.
"""
def __init__(self, content, name=None):
content = content or ''
super(ContentFile, self).__init__(BytesIO(content), name=name)
self.size = len(content)
def __str__(self):
return 'Raw content'
def __nonzero__(self):
return True
def open(self, mode=None):
self.seek(0)
def close(self):
pass
|
kaisero/fxosREST | fxos.py | Python | gpl-3.0 | 14,707 | 0.002108 | import json
import requests
import copy
from requests.exceptions import ConnectionError
from distutils.version import LooseVersion
requests.packages.urllib3.disable_warnings()
HEADERS = {
'Content-Type': 'application/json',
'Accept': 'application/json',
'User-Agent': 'fxosREST'
}
class FXOSApiException(Exception):
pass
class FXOSAuthException(Exception):
pass
class FXOS(object):
def __init__(self, hostname, username, password, protocol='https', base_url='/api', auth_url='/login', logger=None,
verify_cert=False, timeout=30):
self.logger = logger
self.hostname = hostname
self.username = username
self.password = password
self.protocol = protocol
self.base_url = base_url
self.auth_url = auth_url
self.verify_cert = verify_cert
| self.timeout = timeout
self.headers = copy.copy(HEADERS)
se | lf.headers['token'] = self._login()
def _login(self):
try:
request_url = '{0}{1}'.format(self._url(), self.auth_url)
request_headers = copy.copy(HEADERS)
request_headers['USERNAME'] = self.username
request_headers['PASSWORD'] = self.password
response = requests.post(request_url, headers=request_headers, verify=self.verify_cert)
payload = response.json()
if 'token' not in payload:
raise FXOSApiException('Could not retrieve token from {0}.'.format(request_url))
if response.status_code == 400:
if '551' in response.content:
raise FXOSAuthException('FX-OS API Authentication to {0} failed.'.format(self.hostname))
if '552' in response.content:
raise FXOSAuthException('FX-OS API Authorization to {0} failed'.format(self.hostname))
return payload['token']
except ConnectionError:
self.logger.error(
'Could not connect to {0}. Max retries exceeded with url: {1}'.format(self.hostname, request_url))
except FXOSApiException as exc:
self.logger.error(exc.message)
except Exception as exc:
self.logger.exception(exc.message)
def _url(self):
return '{0}://{1}{2}'.format(self.protocol, self.hostname, self.base_url)
def _delete(self, request, headers=None):
url = '{0}{1}'.format(self._url(), request)
headers = self.headers if headers is None else headers
response = requests.delete(url, headers=headers, verify=self.verify_cert, timeout=self.timeout)
return self._validate(response)
def _get(self, request, headers=None):
url = '{0}{1}'.format(self._url(), request)
headers = self.headers if headers is None else headers
response = requests.get(url, headers=headers, verify=self.verify_cert, timeout=self.timeout)
return self._validate(response)
def _patch(self, request, data, headers=None):
url = '{0}{1}'.format(self._url(), request)
headers = self.headers if headers is None else headers
response = requests.patch(url, data=json.dumps(data), headers=headers, verify=self.verify_cert,
timeout=self.timeout)
return self._validate(response)
def _put(self, request, data, headers=None):
url = '{0}{1}'.format(self._url(), request)
headers = self.headers if headers is None else headers
response = requests.put(url, data=json.dumps(data), headers=headers, verify=self.verify_cert,
timeout=self.timeout)
return self._validate(response)
def _post(self, request, data=False, headers=None):
url = '{0}{1}'.format(self._url(), request)
headers = self.headers if headers is None else headers
if data:
response = requests.post(url, data=json.dumps(data), headers=headers, verify=self.verify_cert,
timeout=self.timeout)
else:
response = requests.post(url, headers=headers, verify=self.verify_cert, timeout=self.timeout)
return self._validate(response)
def _validate(self, response):
try:
if response.status_code == 400:
if '552' in response.content:
raise FXOSAuthException('FX-OS API Authorization to {0} failed'.format(self.hostname))
if '101' in response.content:
raise FXOSApiException(
'Request {0} failed. Error communicating with FX-OS API backend.'.format(response.request))
raise FXOSApiException('Request {0} failed with response code {1}. Eror message: {2}\nDetails: {3}'
.format(response.url, response.status_code, response.reason,
response.content))
except FXOSAuthException as exc:
self.headers['token'] = self._login()
self.logger.error(exc.message)
except FXOSApiException as exc:
self.logger.error(exc.message)
finally:
return response
def get_physical_interface(self, id=None):
request = '/ports/ep' if id is None else '/ports/ep/{0}'.format(id.replace('/', '_API_SLASH_'))
return self._get(request)
def get_portchannel_interface(self, id=None):
request = '/ports/pc' if id is None else '/ports/pc/{0}'.format(id)
return self._get(request)
def set_portchannel_interface(self, data):
request = '/ports/pc'
return self._post(request, data)
def update_portchannel_interface(self, data):
request = '/ports/pc'
return self._patch(request, data)
def delete_portchannel_interface(self, data):
request = '/ports/pc'
return self._delete(request, data)
def get_slot(self, id=None):
request = '/slot' if id is None else '/slot/{0}'.format(id)
return self._get(request)
def update_slot(self, id=None):
request = '/slot' if id is None else '/slot/{0}'.format(id)
return self._patch(request)
def get_app(self, id=None):
request = '/app' if id is None else '/app/{0}'.format(id)
return self._get(request)
def update_app(self, data):
request = '/app'
return self._patch(request, data)
def delete_app(self, data):
request = '/app'
return self._delete(request, data)
def get_app_instance(self, slot_id=None, app_id=None):
request = '?classId=smAppInstance' if app_id is None else '/slot/{0}/app-inst/{1}'.format(slot_id, app_id)
return self._get(request)
def update_app_instance(self, slot_id, app_id, data):
request = '/slot/{0}/app-inst/{1}'.format(slot_id, app_id)
return self._patch(request, data)
def delete_app_instance(self, slot_id, app_id):
request = '/slot/{0}/app-inst/{1}'.format(slot_id, app_id)
return self._delete(request)
def set_download_app(self, data):
request = '/sys/app-catalogue'
return self._post(request, data)
def update_download_app(self, data, overwrite=False):
request = '/sys/app-catalogue'
if overwrite:
return self._put(request, data)
return self._patch(request, data)
def get_download_app(self):
request = '/sys/app-catalogue'
return self._get(request)
def get_logical_device(self, id=None):
request = '/ld' if id is None else '/ld/{0}'.format(id)
return self._get(request)
def set_logical_device(self, data):
request = '/ld'
return self._post(request, data)
def delete_logical_device(self, id):
request = '/ld/{0}'.format(id)
return self._delete(request)
def get_firmware_packages(self, id=None):
request = '/sys/firmware/distrib' if id is None else '/sys/firmware/distrib/{0}'.format(id)
return self._get(request)
def get_firmware_kernel(self):
request = '/sys/firmware/version/kernel'
return self._get(request)
def get_firmware_system(self):
|
runn1ng/python-trezor | tests/test_msg_signtx.py | Python | lgpl-3.0 | 49,207 | 0.007153 | import unittest
import common
import binascii
import itertools
import trezorlib.messages_pb2 as proto
import trezorlib.types_pb2 as proto_types
from trezorlib.client import CallException
from trezorlib.tx_api import TXAPITestnet
class TestMsgSigntx(common.TrezorTest):
def test_one_one_fee(self):
self.setup_mnemonic_nopin_nopassphrase()
# tx: d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882
# input 0: 0.0039 BTC
inp1 = proto_types.TxInputType(address_n=[0], # 14LmW5k4ssUrtbAB4255zdqv3b4w1TuX9e
# amount=390000,
prev_hash=binascii.unhexlify('d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882'),
prev_index=0,
)
out1 = proto_types.TxOutputType(address='1MJ2tj2ThBE62zXbBYA5ZaN3fdve5CPAz1',
amount=390000 - 10000,
script_type=proto_types.PAYTOADDRESS,
)
with self.client:
self.client.set_expected_responses([
proto.TxRequest(request_type=proto_types.TXINPUT, details=proto_types.TxRequestDetailsType(request_index=0)),
proto.TxRequest(request_type=proto_types.TXMETA, details=proto_types.TxRequestDetailsType(tx_hash=binascii.unhexlify("d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882"))),
proto.TxRequest(request_type=proto_types.TXINPUT, details=proto_types.TxRequestDetailsType(request_index=0, tx_hash=binascii.unhexlify("d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882"))),
proto.TxRequest(request_type=proto_types.TXINPUT, details=proto_types.TxRequestDetailsType(request_index=1, tx_hash=binascii.unhexlify("d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882"))),
proto.TxRequest(request_type=proto_types.TXOUTPUT, details=proto_types.TxRequestDetailsType(request_index=0, tx_hash=binascii.unhexlify("d5f65ee80147b4bcc70b75e4bbf2d7382021b871bd8867ef8fa525ef50864882"))),
proto.TxRequest(request_type=proto_types.TXOUTPUT, details=proto_types.TxRequestDetailsType(request_index=0)),
proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput),
proto.ButtonRequest(code=proto_types.ButtonRequest_SignTx),
proto.TxRequest(request_type=proto_types.TXINPUT, details=proto_types.TxRequestDetailsType(request_index=0)),
proto.TxRequest(request_type=proto_types.TXOUTPUT, details=proto_types.TxRequestDetailsType(request_ind | ex=0)),
| proto.TxRequest(request_type=proto_types.TXOUTPUT, details=proto_types.TxRequestDetailsType(request_index=0)),
proto.TxRequest(request_type=proto_types.TXFINISHED),
])
(signatures, serialized_tx) = self.client.sign_tx('Bitcoin', [inp1, ], [out1, ])
# Accepted by network: tx fd79435246dee76b2f159d2db08032d666c95adc544de64c8c49f474df4a7fee
self.assertEqual(binascii.hexlify(serialized_tx), '010000000182488650ef25a58fef6788bd71b8212038d7f2bbe4750bc7bcb44701e85ef6d5000000006b4830450221009a0b7be0d4ed3146ee262b42202841834698bb3ee39c24e7437df208b8b7077102202b79ab1e7736219387dffe8d615bbdba87e11477104b867ef47afed1a5ede7810121023230848585885f63803a0a8aecdd6538792d5c539215c91698e315bf0253b43dffffffff0160cc0500000000001976a914de9b2a8da088824e8fe51debea566617d851537888ac00000000')
def test_testnet_one_two_fee(self):
self.setup_mnemonic_nopin_nopassphrase()
# tx: 6f90f3c7cbec2258b0971056ef3fe34128dbde30daa9c0639a898f9977299d54
# input 1: 10.00000000 BTC
inp1 = proto_types.TxInputType(address_n=[0], # mirio8q3gtv7fhdnmb3TpZ4EuafdzSs7zL
# amount=1000000000,
prev_hash=binascii.unhexlify('6f90f3c7cbec2258b0971056ef3fe34128dbde30daa9c0639a898f9977299d54'),
prev_index=1,
)
out1 = proto_types.TxOutputType(address='mfiGQVPcRcaEvQPYDErR34DcCovtxYvUUV',
amount=1000000000 - 500000000 - 10000000,
script_type=proto_types.PAYTOADDRESS,
)
out2 = proto_types.TxOutputType(address_n=[2],
amount=500000000,
script_type=proto_types.PAYTOADDRESS,
)
with self.client:
self.client.set_tx_api(TXAPITestnet())
self.client.set_expected_responses([
proto.TxRequest(request_type=proto_types.TXINPUT, details=proto_types.TxRequestDetailsType(request_index=0)),
proto.TxRequest(request_type=proto_types.TXMETA, details=proto_types.TxRequestDetailsType(tx_hash=binascii.unhexlify("6f90f3c7cbec2258b0971056ef3fe34128dbde30daa9c0639a898f9977299d54"))),
proto.TxRequest(request_type=proto_types.TXINPUT, details=proto_types.TxRequestDetailsType(request_index=0, tx_hash=binascii.unhexlify("6f90f3c7cbec2258b0971056ef3fe34128dbde30daa9c0639a898f9977299d54"))),
proto.TxRequest(request_type=proto_types.TXINPUT, details=proto_types.TxRequestDetailsType(request_index=1, tx_hash=binascii.unhexlify("6f90f3c7cbec2258b0971056ef3fe34128dbde30daa9c0639a898f9977299d54"))),
proto.TxRequest(request_type=proto_types.TXOUTPUT, details=proto_types.TxRequestDetailsType(request_index=0, tx_hash=binascii.unhexlify("6f90f3c7cbec2258b0971056ef3fe34128dbde30daa9c0639a898f9977299d54"))),
proto.TxRequest(request_type=proto_types.TXOUTPUT, details=proto_types.TxRequestDetailsType(request_index=1, tx_hash=binascii.unhexlify("6f90f3c7cbec2258b0971056ef3fe34128dbde30daa9c0639a898f9977299d54"))),
proto.TxRequest(request_type=proto_types.TXOUTPUT, details=proto_types.TxRequestDetailsType(request_index=0)),
proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput),
proto.TxRequest(request_type=proto_types.TXOUTPUT, details=proto_types.TxRequestDetailsType(request_index=1)),
proto.ButtonRequest(code=proto_types.ButtonRequest_SignTx),
proto.TxRequest(request_type=proto_types.TXINPUT, details=proto_types.TxRequestDetailsType(request_index=0)),
proto.TxRequest(request_type=proto_types.TXOUTPUT, details=proto_types.TxRequestDetailsType(request_index=0)),
proto.TxRequest(request_type=proto_types.TXOUTPUT, details=proto_types.TxRequestDetailsType(request_index=1)),
proto.TxRequest(request_type=proto_types.TXOUTPUT, details=proto_types.TxRequestDetailsType(request_index=0)),
proto.TxRequest(request_type=proto_types.TXOUTPUT, details=proto_types.TxRequestDetailsType(request_index=1)),
proto.TxRequest(request_type=proto_types.TXFINISHED),
])
(signatures, serialized_tx) = self.client.sign_tx('Testnet', [inp1, ], [out1, out2])
self.assertEqual(binascii.hexlify(serialized_tx), '0100000001549d2977998f899a63c0a9da30dedb2841e33fef561097b05822eccbc7f3906f010000006b4830450221009c2d30385519fdb13dce13d5ac038be07d7b2dad0b0f7b2c1c339d7255bcf553022056a2f5bceab3cd0ffed4d388387e631f419d67ff9ce7798e3d7dfe6a6d6ec4bd0121023230848585885f63803a0a8aecdd6538792d5c539215c91698e315bf0253b43dffffffff0280ce341d000000001976a9140223b1a09138753c9cb0baf95a0a62c82711567a88ac0065cd1d000000001976a9142db345c36563122e2fd0f5485fb7ea9bbf7cb5a288ac00000000')
def test_testnet_fee_too_high(self):
self.setup_mnemonic_nopin_nopassphrase()
# tx: 6f90f3c7cbec2258b0971056ef3fe34128dbde30daa9c0639a898f9977299d54
# input 1: 10.00000000 BTC
inp1 = proto_types.TxInputType(address_n=[0], # mirio8q3gtv7fhdnmb3TpZ4EuafdzSs7zL
# amount=1000000000,
prev_hash=binascii.unhexlify('6f90f3c7cbec2258b0971056ef3fe34128dbde30daa9c0639a898f9977299d54'),
prev_index=1,
)
out1 = proto_t |
whip112/Whip112 | kuma/wiki/tests/test_models.py | Python | mpl-2.0 | 75,272 | 0.000292 | from cStringIO import StringIO
from datetime import date, datetime, timedelta
import json
import time
from xml.sax.saxutils import escape
import mo | ck
f | rom nose.tools import eq_, ok_
from nose.plugins.attrib import attr
from nose import SkipTest
from django.conf import settings
from django.core.exceptions import ValidationError
from constance import config
from waffle.models import Switch
from kuma.core.exceptions import ProgrammingError
from kuma.core.tests import override_constance_settings, KumaTestCase
from kuma.users.tests import UserTestCase
from . import (document, revision, doc_rev, normalize_html,
create_template_test_users, create_topical_parents_docs)
from .. import tasks
from ..constants import REDIRECT_CONTENT
from ..exceptions import (PageMoveError,
DocumentRenderedContentNotAvailable,
DocumentRenderingInProgress)
from ..jobs import DocumentZoneStackJob
from ..models import (Document, Revision, RevisionIP, DocumentZone,
TaggedDocument)
def _objects_eq(manager, list_):
"""Assert that the objects contained by `manager` are those in `list_`."""
eq_(set(manager.all()), set(list_))
def redirect_rev(title, redirect_to):
return revision(
document=document(title=title, save=True),
content='REDIRECT [[%s]]' % redirect_to,
is_approved=True,
save=True)
class DocumentTests(UserTestCase):
"""Tests for the Document model"""
@attr('bug875349')
def test_json_data(self):
# Set up a doc with tags
doc, rev = doc_rev('Sample document')
doc.save()
expected_tags = sorted(['foo', 'bar', 'baz'])
expected_review_tags = sorted(['tech', 'editorial'])
doc.tags.set(*expected_tags)
doc.current_revision.review_tags.set(*expected_review_tags)
# Create a translation with some tags
de_doc = document(parent=doc, locale='de', save=True)
revision(document=de_doc, save=True)
expected_l10n_tags = ['inprogress']
de_doc.current_revision.localization_tags.set(*expected_l10n_tags)
de_doc.tags.set(*expected_tags)
de_doc.current_revision.review_tags.set(*expected_review_tags)
# Ensure the doc's json field is empty at first
eq_(None, doc.json)
# Get JSON data for the doc, and ensure the doc's json field is now
# properly populated.
data = doc.get_json_data()
eq_(json.dumps(data), doc.json)
# Load up another copy of the doc from the DB, and check json
saved_doc = Document.objects.get(pk=doc.pk)
eq_(json.dumps(data), saved_doc.json)
# Check the fields stored in JSON of the English doc
# (the fields are created in build_json_data in models.py)
eq_(doc.title, data['title'])
eq_(doc.title, data['label'])
eq_(doc.get_absolute_url(), data['url'])
eq_(doc.id, data['id'])
eq_(doc.slug, data['slug'])
result_tags = sorted([str(x) for x in data['tags']])
eq_(expected_tags, result_tags)
result_review_tags = sorted([str(x) for x in data['review_tags']])
eq_(expected_review_tags, result_review_tags)
eq_(doc.locale, data['locale'])
eq_(doc.current_revision.summary, data['summary'])
eq_(doc.modified.isoformat(), data['modified'])
eq_(doc.current_revision.created.isoformat(), data['last_edit'])
# Check fields of translated doc
ok_('translations' in data)
eq_(de_doc.locale, data['translations'][0]['locale'])
result_l10n_tags = sorted([str(x) for x
in data['translations'][0]['localization_tags']])
eq_(expected_l10n_tags, result_l10n_tags)
result_tags = sorted([str(x) for x in data['translations'][0]['tags']])
eq_(expected_tags, result_tags)
result_review_tags = sorted([str(x) for x
in data['translations'][0]['review_tags']])
eq_(expected_review_tags, result_review_tags)
eq_(de_doc.current_revision.summary, data['translations'][0]['summary'])
eq_(de_doc.title, data['translations'][0]['title'])
def test_document_is_template(self):
"""is_template stays in sync with the title"""
d = document(title='test')
d.save()
assert not d.is_template
d.slug = 'Template:test'
d.save()
assert d.is_template
d.slug = 'Back-to-document'
d.save()
assert not d.is_template
def test_error_on_delete(self):
"""Ensure error-on-delete is only thrown when waffle switch active"""
switch = Switch.objects.create(name='wiki_error_on_delete')
for active in (True, False):
switch.active = active
switch.save()
d = document()
d.save()
try:
d.delete()
if active:
self.fail('Exception on delete when active')
except Exception:
if not active:
self.fail('No exception on delete when not active')
def test_delete_tagged_document(self):
"""Make sure deleting a tagged doc deletes its tag relationships."""
# TODO: Move to wherever the tests for TaggableMixin are.
# This works because Django's delete() sees the `tags` many-to-many
# field (actually a manager) and follows the reference.
d = document()
d.save()
d.tags.add('grape')
eq_(1, TaggedDocument.objects.count())
d.delete()
eq_(0, TaggedDocument.objects.count())
def _test_m2m_inheritance(self, enum_class, attr, direct_attr):
"""Test a descriptor's handling of parent delegation."""
parent = document()
child = document(parent=parent, title='Some Other Title')
e1 = enum_class(item_id=1)
parent.save()
# Make sure child sees stuff set on parent:
getattr(parent, attr).add(e1)
_objects_eq(getattr(child, attr), [e1])
# Make sure parent sees stuff set on child:
child.save()
e2 = enum_class(item_id=2)
getattr(child, attr).add(e2)
_objects_eq(getattr(parent, attr), [e1, e2])
# Assert the data are attached to the parent, not the child:
_objects_eq(getattr(parent, direct_attr), [e1, e2])
_objects_eq(getattr(child, direct_attr), [])
def test_category_inheritance(self):
"""A document's categories must always be those of its parent."""
some_category = Document.CATEGORIES[1][0]
other_category = Document.CATEGORIES[0][0]
# Notice if somebody ever changes the default on the category field,
# which would invalidate our test:
assert some_category != document().category
parent = document(category=some_category)
parent.save()
child = document(parent=parent, locale='de')
child.save()
# Make sure child sees stuff set on parent:
eq_(some_category, child.category)
# Child'd category should revert to parent's on save:
child.category = other_category
child.save()
eq_(some_category, child.category)
# Changing the parent category should change the child's:
parent.category = other_category
parent.save()
eq_(other_category,
parent.translations.get(locale=child.locale).category)
def _test_int_sets_and_descriptors(self, enum_class, attr):
"""Test our lightweight int sets & descriptors' getting and setting."""
d = document()
d.save()
_objects_eq(getattr(d, attr), [])
i1 = enum_class(item_id=1)
getattr(d, attr).add(i1)
_objects_eq(getattr(d, attr), [i1])
i2 = enum_class(item_id=2)
getattr(d, attr).add(i2)
_objects_eq(getattr(d, attr), [i1, i2])
def test_only_localizable_allowed_children(self):
"""You can't have children for a non-localizable document."""
# Make English rev:
en_doc = document(is_localizable=F |
nimbusproject/epumgmt | src/python/tests/test_epumgmt_defaults_cloudinitd_load.py | Python | apache-2.0 | 3,064 | 0.001958 | import os
import types
import tempfile
import ConfigParser
from cloudinitd.user_api import CloudInitD
import epumgmt.defaults.cloudinitd_load
import epumgmt.main.em_args as em_args
from epumgmt.api.exceptions imp | ort ProgrammingError, IncompatibleEnvironment
from epumgmt.defaults.runlogs import DefaultRunlogs
from epumgmt.defaults.parameters import DefaultParameters
from mocks.common import FakeCommon
from mocks.modules import FakeModules
from mocks.modules import make_fake_scp_command_str
from mocks.remote_svc_adapter import FakeRemoteSvcAdapter
class TestCloudinitdLoad:
def setup(self):
"""
Build a fake test environment, with the sleep | ers cloudinit.d plan.
We can grab all logged messages from c.log.transcript.
"""
self.test_run_name = "TESTRUN"
self.config = ConfigParser.RawConfigParser()
self.config.add_section("events")
self.runlogdir = tempfile.mkdtemp()
self.config.set("events", "runlogdir", self.runlogdir)
self.vmlogdir = tempfile.mkdtemp()
self.config.set("events", "vmlogdir", self.vmlogdir)
self.optdict = {}
self.optdict[em_args.NAME.name] = self.test_run_name
self.params = DefaultParameters(self.config, None)
self.params.optdict = self.optdict
remote_svc_adapter = FakeRemoteSvcAdapter()
self.common = FakeCommon()
self.modules = FakeModules(remote_svc_adapter=remote_svc_adapter)
# Note that we monkey-patch the get_scp_command_str function
# to prepend "echo" to it. That way we can still allow the
# command to be run, but we can still see how it actually gets
# constructed
runlogs = DefaultRunlogs(self.params, self.common)
runlogs.validate()
self.modules.runlogs = runlogs
new_get_scp = make_fake_scp_command_str(runlogs, runlogs.get_scp_command_str)
self.modules.runlogs.get_scp_command_str = types.MethodType(new_get_scp, self.modules.runlogs)
self.test_dir = os.path.dirname(__file__)
self.test_db_dir = tempfile.mkdtemp()
self.test_cd_config = os.path.join(self.test_dir, "configs/main.conf")
self.cloudinitd = CloudInitD(self.test_db_dir, self.test_cd_config, self.test_run_name)
def test_get_cloudinitd_service(self):
from epumgmt.defaults.cloudinitd_load import get_cloudinitd_service
try:
get_cloudinitd_service(None, None)
except ProgrammingError:
no_cloudinitd_programming_error = True
assert no_cloudinitd_programming_error
try:
get_cloudinitd_service(self.cloudinitd, None)
except ProgrammingError:
no_service_name_programming_error = True
assert no_service_name_programming_error
nonexistant_svc = "notreal"
try:
service = get_cloudinitd_service(self.cloudinitd, nonexistant_svc)
except IncompatibleEnvironment:
no_service_incompatible_env = True
assert no_service_incompatible_env
|
eunchong/build | scripts/slave/recipe_modules/commit_position/api.py | Python | bsd-3-clause | 2,897 | 0.009665 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this so | urce code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
from recipe_engine import recipe_a | pi
class CommitPositionApi(recipe_api.RecipeApi):
"""Recipe module providing commit position parsing and manipulation."""
RE_COMMIT_POSITION = re.compile('(?P<branch>.+)@{#(?P<revision>\d+)}')
COMMIT_POS_STR = '^Cr-Commit-Position: refs/heads/master@{#%d}'
@classmethod
def parse(cls, value):
match = cls.RE_COMMIT_POSITION.match(value)
if not match:
raise ValueError("Invalid commit position (%s)" % (value,))
return match.group('branch'), int(match.group('revision'))
@classmethod
def parse_branch(cls, value):
branch, _ = cls.parse(value)
return branch
@classmethod
def parse_revision(cls, value):
_, revision = cls.parse(value)
return revision
@classmethod
def construct(cls, branch, value):
value = int(value)
return '%(branch)s@{#%(value)d}' % {
'branch': branch,
'value': value,
}
def chromium_hash_from_commit_position(self, commit_pos):
"""Resolve a commit position in the chromium repo to its commit hash."""
try:
int_pos = int(commit_pos)
except ValueError:
raise self.m.step.StepFailure('Invalid commit position (%s).'
% (commit_pos,))
step_result = self.m.git('log', '--format=hash:%H', '--grep',
self.COMMIT_POS_STR % int_pos, '-1',
'origin/master',
stdout=self.m.raw_io.output(),
name='resolving commit_pos ' + str(commit_pos))
try:
result_line = [line for line in step_result.stdout.splitlines()
if line.startswith('hash:')][0]
result = result_line.split(':')[1]
int(result, 16)
return result
except (IndexError, ValueError): # pragma: no cover
raise self.m.step.StepFailure(
'Could not parse commit hash from git log output' +
step_result.stdout)
def chromium_commit_position_from_hash(self, sha):
"""Resolve a chromium commit hash to its commit position."""
try:
assert int(sha, 16)
sha = str(sha) # Unicode would break the step when passed in the name
except (AssertionError, ValueError):
raise self.m.step.StepFailure('Invalid commit hash: ' + sha)
step_result = self.m.git('footers', '--position', sha,
stdout=self.m.raw_io.output(),
name='resolving hash ' + sha)
try:
result = int(self.parse_revision(str(step_result.stdout)))
except ValueError:
raise self.m.step.StepFailure(
'Could not parse commit position from git output: ' +
(step_result.stdout or ''))
return result
|
glebkuznetsov/nucleic | nucleic/seq/__init__.py | Python | mit | 337 | 0.002967 | '''
nucleic.seq
Modules related to the creation and manupulation of DNA and RNA sequence
strings, including compact encoding schemes.
'''
from barcode import BarcodeGenerator
|
import binary
from manip | import reverse, complement, reverse_complement
# Space efficient abbreviations
r = reverse
c = complement
rc = reverse_complement
|
liuzzfnst/tp-libvirt | libguestfs/tests/guestmount.py | Python | gpl-2.0 | 2,688 | 0 | import logging
import os
from autotest.client.shared import error, utils
from virttest import data_dir, utils_test
def umount_fs(mountpoint):
if os.path.ismount(mountpoint):
result = utils.run("umount -l %s" % mountpoint, ignore_status=True)
if result.exit_status:
logging.debug("Umount %s failed", mountpoint)
return False
logging.debug("Umount %s successfully", mountpoint)
return True
def run(test, params, env):
"""
Test libguestfs tool guestmount.
"""
vm_name = params.get("main_vm")
vm = env.get_vm(vm_name)
start_vm = "yes" == params.get("start_vm", "no")
if vm.is_alive() and not start_vm:
vm.destroy()
elif vm.is_dead() and start_vm:
vm.start()
# Create a file to vm with guestmount
content = "This is file for guestmount test."
path = params.get("gm_tempfile", "/home/gm_tmp")
mountpoint = os.path.join(data_dir.get_tmp_dir(), "mountpoint")
status_error = "yes" == params.get("status_error", "yes")
readonly = "no" == params.get("gm_readonly", "no")
special_mount = "yes" == params.g | et("gm_mount", "no")
vt = utils_test.libguestfs.VirtTools(vm, params)
vm_ref = params.get("gm_vm_ref")
is_disk = "yes" == params.get("gm_is_disk", "no")
# Automatically get disk if no disk specified.
if is_disk and vm_ref is None:
vm_ref = utils_test.libguestfs.get_primary_disk(vm)
if special_mount:
# Get root filesystem before test
params['libvirt_domain'] = params.get("main_vm | ")
params['gf_inspector'] = True
gf = utils_test.libguestfs.GuestfishTools(params)
roots, rootfs = gf.get_root()
gf.close_session()
if roots is False:
raise error.TestError("Can not get root filesystem "
"in guestfish before test")
logging.info("Root filesystem is:%s", rootfs)
params['special_mountpoints'] = [rootfs]
writes, writeo = vt.write_file_with_guestmount(mountpoint, path, content,
vm_ref)
if umount_fs(mountpoint) is False:
logging.error("Umount vm's filesystem failed.")
if status_error:
if writes:
if readonly:
raise error.TestFail("Write file to readonly mounted "
"filesystem successfully.Not expected.")
else:
raise error.TestFail("Write file with guestmount "
"successfully.Not expected.")
else:
if not writes:
raise error.TestFail("Write file to mounted filesystem failed.")
|
dyf102/Gomoku-online | client/controller/chat_controller.py | Python | apache-2.0 | 3,149 | 0.000953 |
# !/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import logging
from PyQt4.QtCore import SIGNAL, QString
from basecontroller import BaseController, singleton
# from user_controller import UserController
# sys.path.append('../')
from util.util import print_trace_exception, log_callback
SEND_MSG_ID = 'SEND_MSG'
GET_MSG_ID = 'GET_MSG'
JOIN_CHAT_ROOM_ID = 'JOIN_CHAT_ROOM'
SERVICE_NAME = 'ChatService'
@singleton
class ChatController(BaseController):
def __init__(self):
BaseController.__init__(self, SERVICE_NAME)
self.c.register(GET_MSG_ID, self.get_msg_cb)
self.c.register(SEND_MSG_ID, self.send_msg_cb)
self.c.register(JOIN_CHAT_ROOM_ID, self.join_chat_room_cb)
self.chat_token_pool = {} # to avoid unnecessary update
# self.user_controller = UserController()
self.counter = 0
def send_msg(self, cid, uid, msg, username):
print('called send msg')
client = self.get_client()
req = {
'cid': cid,
'uid': uid,
'msg': msg,
'username': username
}
client.send(service_name=SERVICE_NAME, method=SEND_MSG_ID, msg=req)
def add_polling_msg_task(self, cid, uid):
client = self.get_client()
client.set_periodic_task(self.get_msg, (cid, uid), self.get_msg_cb, GET_MSG_ID)
@log_callback
def send_msg_cb(self, data):
| # logging.debug('send_msg %s', data)
self.emit(SIGNAL("send_msg_callback(int)"), data['code'])
def get_msg(self, cid, uid):
logging.debug('Call get msg %d %d', cid, uid)
client = self.get_client()
req = {
'cid': cid,
'uid': uid
}
| client.send(service_name=SERVICE_NAME, method=GET_MSG_ID, msg=req)
@log_callback
def get_msg_cb(self, data):
if data and data.get('code') == 200:
token = data.get('token')
cid = data.get('cid')
if cid is None:
logging.debug("received %s", data)
if token != self.chat_token_pool.get(cid):
self.chat_token_pool[cid] = token
self.emit(SIGNAL('clear'))
for item in data.get(u'data'):
text = '{}: {} at{}'.format(item.get('username'),
item.get('msg'),
item.get('time'))
self.emit(SIGNAL("showRoomTextWithRGB(QString,int,int,int)"), text, 0, 0, 0)
def join_chat_room(self, cid, uid):
client = self.get_client()
req = {
'cid': cid,
'uid': uid,
# '-----': self.counter,
}
self.counter += 1
client.send(service_name=SERVICE_NAME, method=JOIN_CHAT_ROOM_ID, msg=req)
@log_callback
def join_chat_room_cb(self, data):
code = data.get('code')
if code == 200:
cid = data.get('cid')
uid = data.get('uid')
self.add_polling_msg_task(cid=cid, uid=uid)
else:
self.emit(SIGNAL("error_msg(QString)"), QString(data['code']))
|
dashea/redhat-upgrade-tool | redhat_upgrade_tool/upgrade.py | Python | gpl-2.0 | 9,234 | 0.003357 | # upgrade.py - test the upgrade transaction using RPM
#
# Copyright (C) 2012 Red Hat Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Will Woods <wwoods@redhat.com>
# For the sake of simplicity, we don't bother with yum here.
import rpm
from rpm._rpm import ts as TransactionSetCore
import os, tempfile
from threading import Thread
import logging
log = logging.getLogger(__package__+'.upgrade')
from . import _
from .util import df, hrsize
class TransactionSet(TransactionSetCore):
flags = TransactionSetCore._flags
vsflags = TransactionSetCore._vsflags
color = TransactionSetCore._color
def run(self, callback, data, probfilter):
log.debug('ts.run()')
rv = TransactionSetCore.run(self, callback, data, probfilter)
problems = self.problems()
if rv != rpm.RPMRC_OK and problems:
raise TransactionError(problems)
return rv
def check(self, *args, **kwargs):
TransactionSetCore.check(self, *args, **kwargs)
# NOTE: rpm.TransactionSet throws out all problems but these
return [p for p in self.problems()
if p.type in (rpm.RPMPROB_CONFLICT, rpm.RPMPROB_REQUIRES)]
def add_install(self, path, key=None, upgrade=False):
log.debug('add_install(%s, %s, upgrade=%s)', path, key, upgrade)
if key is None:
key = path
with open(path) as fileobj:
retval, header = self.hdrFromFdno(fileobj)
if retval != rpm.RPMRC_OK:
raise rpm.error("error reading package header")
if not self.addInstall(header, key, upgrade):
raise rpm.error("adding package to transaction failed")
def __del__(self):
self.closeDB()
probtypes = { rpm.RPMPROB_NEW_FILE_CONFLICT : _('file conflicts'),
rpm.RPMPROB_FILE_CONFLICT : _('file conflicts'),
rpm.RPMPROB_OLDPACKAGE: _('older package(s)'),
rpm.RPMPROB_DISKSPACE: _('insufficient disk space'),
rpm.RPMPROB_DISKNODES: _('insufficient disk inodes'),
rpm.RPMPROB_CONFLICT: _('package conflicts'),
rpm.RPMPROB_PKG_INSTALLED: _('package already installed'),
rpm.RPMPROB_REQUIRES: _('broken dependencies'),
rpm.RPMPROB_BADARCH: _('package for incorrect arch'),
rpm.RPMPROB_BADOS: _('package for incorrect os'),
}
# --- stuff for doing useful summaries of big sets of problems
probattrs = ('type', 'pkgNEVR', 'altNEVR', 'key', '_str', '_num')
def prob2dict(p):
return {f:getattr(p,f) for f in probattrs}
class ProblemSummary(object):
def __init__(self, probtype, problems):
self.type = probtype
self.problems = [p for p in problems if p.type == self.type]
self.desc = probtypes.get(probtype)
self.details = self.get_details()
def get_details(self):
return None
def format_details(self):
raise NotImplementedError
def _log_probs(self):
for p in self.problems:
log.debug('%s -> "%s"', prob2dict(p), p)
def __str__(self):
if self.details:
return "\n ".join([self.desc+':'] + self.format_details())
else:
return self.desc
class DiskspaceProblemSummary(ProblemSummary):
def get_details(self):
needs = dict()
for p in self.problems:
(mnt, size) = (p._str, p._num)
if size > needs.get(mnt,0):
needs[mnt] = size
return needs
def format_details(self):
return [_("%s needs %s more free space") % (mnt, hrsize(size))
for (mnt,size) in self.details.iteritems()]
class DepProblemSummary(ProblemSummary):
def get_details(self):
self._log_probs()
pkgprobs = dict()
# pkgprobs['installedpkg'] = {'otherpkg1': [req1, req2, ...], ...}
for p in self.problems:
# NOTE: p._num is a header reference if p.pkgNEVR is installed
thispkg, otherpkg, req = p.altNEVR, p.pkgNEVR, p._str
if thispkg not in pkgprobs:
pkgprobs[thispkg] = {}
if otherpkg not in pkgprobs[thispkg]:
pkgprobs[thispkg][otherpkg] = set()
pkgprobs[thispkg][otherpkg].add(req)
return pkgprobs
def format_details(self):
return [_("%s requires %s") % (pkg, ", ".join(pkgprob))
for (pkg, pkgprob) in self.details.iteritems( | )]
# If there is no handler for a type of problem, just return the
# rpmProblemString result for the problems
class GenericProblemSummary(ProblemSummary):
def format_details(self):
retur | n [str(p) for p in self.problems]
probsummary = { rpm.RPMPROB_DISKSPACE: DiskspaceProblemSummary,
rpm.RPMPROB_REQUIRES: DepProblemSummary,
}
def summarize_problems(problems):
summaries = []
for t in set(p.type for p in problems):
summarize = probsummary.get(t, GenericProblemSummary) # get the summarizer
summaries.append(summarize(t, problems)) # summarize the problem
return summaries
class TransactionError(Exception):
def __init__(self, problems):
self.problems = problems
self.summaries = summarize_problems(problems)
def pipelogger(pipe, level=logging.INFO):
logger = logging.getLogger(__package__+".rpm")
logger.info("opening pipe")
with open(pipe, 'r') as fd:
for line in fd:
if line.startswith('D: '):
logger.debug(line[3:].rstrip())
else:
logger.log(level, line.rstrip())
logger.info("got EOF")
logger.info("exiting")
logging_to_rpm = {
logging.DEBUG: rpm.RPMLOG_DEBUG,
logging.INFO: rpm.RPMLOG_INFO,
logging.WARNING: rpm.RPMLOG_WARNING,
logging.ERROR: rpm.RPMLOG_ERR,
logging.CRITICAL: rpm.RPMLOG_CRIT,
}
class RPMUpgrade(object):
def __init__(self, root='/', logpipe=True, rpmloglevel=logging.INFO):
self.root = root
self.ts = None
self.logpipe = None
rpm.setVerbosity(logging_to_rpm[rpmloglevel])
if logpipe:
self.logpipe = self.openpipe()
def setup_transaction(self, pkgfiles, check_fatal=False):
log.debug("starting")
# initialize a transaction set
self.ts = TransactionSet(self.root, rpm._RPMVSF_NOSIGNATURES)
if self.logpipe:
self.ts.scriptFd = self.logpipe.fileno()
# populate the transaction set
for pkg in pkgfiles:
try:
self.ts.add_install(pkg, upgrade=True)
except rpm.error as e:
log.warn('error adding pkg: %s', e)
# TODO: error callback
log.debug('ts.check()')
problems = self.ts.check() or []
if problems:
log.info("problems with transaction check:")
for p in problems:
log.info(p)
if check_fatal:
raise TransactionError(problems=problems)
log.debug('ts.order()')
self.ts.order()
log.debug('ts.clean()')
self.ts.clean()
log.debug('transaction is ready')
if problems:
return TransactionError(problems=problems)
def openpipe(self):
log.debug("creating log pipe")
pipefile = tempfile.mktemp(prefix='rpm-log-pipe.')
os.mkfifo(pipefile, 0600)
log.debug("starting logging thread")
pipethread = Thread(target=pipelogger, name='pipelogger',
args=(pipefile,))
pipethread |
eduNEXT/edunext-platform | cms/djangoapps/contentstore/tests/test_course_create_rerun.py | Python | agpl-3.0 | 8,047 | 0.001491 | """
Test view handler for rerun (and eventually create)
"""
import datetime
import ddt
from django.test import override_settings
from django.test.client import RequestFactory
from django.urls import reverse
from opaque_keys.edx.keys import CourseKey
from organizations.api import add_organization, get_course_organizations, get_organization_by_short_name
from organizations.exceptions import InvalidOrganizationException
from cms.djangoapps.contentstore.tests.utils import AjaxEnabledTestClient, parse_json
from common.djangoapps.student.roles import CourseInstructorRole, CourseStaffRole
from common.djangoapps.student.tests.factories import UserFactory
from xmodule.course_module import CourseFields
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@ddt.ddt
class TestCourseListing(ModuleStoreTestCase):
"""
Unit tests for getting the list of courses for a logged in user
"""
def setUp(self):
"""
Add a user and a course
"""
super().setUp()
# create and log in a staff user.
# create and log in a non-staff user
self.user = UserFactory()
self.factory = RequestFactory()
self.client = AjaxEnabledTestClient()
self.client.login(username=self.user.username, password='test')
self.course_create_rerun_url = reverse('course_handler')
self.course_start = datetime.datetime.utcnow()
self.course_end = self.course_start + datetime.timedelta(days=30)
self.enrollment_start = self.course_start - datetime.timedelta(days=7)
self.enrollment_end = self.cours | e_end - datetime.timedelta(days=14)
| source_course = CourseFactory.create(
org='origin',
number='the_beginning',
run='first',
display_name='the one and only',
start=self.course_start,
end=self.course_end,
enrollment_start=self.enrollment_start,
enrollment_end=self.enrollment_end
)
self.source_course_key = source_course.id
for role in [CourseInstructorRole, CourseStaffRole]:
role(self.source_course_key).add_users(self.user)
def tearDown(self):
"""
Reverse the setup
"""
self.client.logout()
ModuleStoreTestCase.tearDown(self)
def test_rerun(self):
"""
Just testing the functionality the view handler adds over the tasks tested in test_clone_course
"""
add_organization({
'name': 'Test Organization',
'short_name': self.source_course_key.org,
'description': 'Testing Organization Description',
})
response = self.client.ajax_post(self.course_create_rerun_url, {
'source_course_key': str(self.source_course_key),
'org': self.source_course_key.org, 'course': self.source_course_key.course, 'run': 'copy',
'display_name': 'not the same old name',
})
self.assertEqual(response.status_code, 200)
data = parse_json(response)
dest_course_key = CourseKey.from_string(data['destination_course_key'])
self.assertEqual(dest_course_key.run, 'copy')
source_course = self.store.get_course(self.source_course_key)
dest_course = self.store.get_course(dest_course_key)
self.assertEqual(dest_course.start, CourseFields.start.default)
self.assertEqual(dest_course.end, source_course.end)
self.assertEqual(dest_course.enrollment_start, None)
self.assertEqual(dest_course.enrollment_end, None)
course_orgs = get_course_organizations(dest_course_key)
self.assertEqual(len(course_orgs), 1)
self.assertEqual(course_orgs[0]['short_name'], self.source_course_key.org)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_newly_created_course_has_web_certs_enabled(self, store):
"""
Tests newly created course has web certs enabled by default.
"""
with modulestore().default_store(store):
response = self.client.ajax_post(self.course_create_rerun_url, {
'org': 'orgX',
'number': 'CS101',
'display_name': 'Course with web certs enabled',
'run': '2015_T2'
})
self.assertEqual(response.status_code, 200)
data = parse_json(response)
new_course_key = CourseKey.from_string(data['course_key'])
course = self.store.get_course(new_course_key)
self.assertTrue(course.cert_html_view_enabled)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_course_creation_for_unknown_organization_relaxed(self, store):
"""
Tests that when ORGANIZATIONS_AUTOCREATE is True,
creating a course-run with an unknown org slug will create an organization
and organization-course linkage in the system.
"""
with self.assertRaises(InvalidOrganizationException):
get_organization_by_short_name("orgX")
with modulestore().default_store(store):
response = self.client.ajax_post(self.course_create_rerun_url, {
'org': 'orgX',
'number': 'CS101',
'display_name': 'Course with web certs enabled',
'run': '2015_T2'
})
self.assertEqual(response.status_code, 200)
self.assertIsNotNone(get_organization_by_short_name("orgX"))
data = parse_json(response)
new_course_key = CourseKey.from_string(data['course_key'])
course_orgs = get_course_organizations(new_course_key)
self.assertEqual(len(course_orgs), 1)
self.assertEqual(course_orgs[0]['short_name'], 'orgX')
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
@override_settings(ORGANIZATIONS_AUTOCREATE=False)
def test_course_creation_for_unknown_organization_strict(self, store):
"""
Tests that when ORGANIZATIONS_AUTOCREATE is False,
creating a course-run with an unknown org slug will raise a validation error.
"""
with modulestore().default_store(store):
response = self.client.ajax_post(self.course_create_rerun_url, {
'org': 'orgX',
'number': 'CS101',
'display_name': 'Course with web certs enabled',
'run': '2015_T2'
})
self.assertEqual(response.status_code, 400)
with self.assertRaises(InvalidOrganizationException):
get_organization_by_short_name("orgX")
data = parse_json(response)
self.assertIn('Organization you selected does not exist in the system', data['error'])
@ddt.data(True, False)
def test_course_creation_for_known_organization(self, organizations_autocreate):
"""
Tests course creation workflow when course organization exist in system.
"""
add_organization({
'name': 'Test Organization',
'short_name': 'orgX',
'description': 'Testing Organization Description',
})
with override_settings(ORGANIZATIONS_AUTOCREATE=organizations_autocreate):
response = self.client.ajax_post(self.course_create_rerun_url, {
'org': 'orgX',
'number': 'CS101',
'display_name': 'Course with web certs enabled',
'run': '2015_T2'
})
self.assertEqual(response.status_code, 200)
data = parse_json(response)
new_course_key = CourseKey.from_string(data['course_key'])
course_orgs = get_course_organizations(new_course_key)
self.assertEqual(len(course_orgs), 1)
self.assertEqual(course_orgs[0]['short_name'], 'orgX')
|
Rondineli/django-sso | django_sso/accounts/migrations/0003_auto__add_field_user_is_admin.py | Python | gpl-2.0 | 5,064 | 0.008294 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'User.is_admin'
db.add_column(u'accounts_user', 'is_admin',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'User.is_admin'
db.delete_column(u'accounts_user', 'is_admin')
models = {
u'accounts.group': {
'Meta': {'object_name': 'Group'},
'creation_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modification_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'perspectives': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['perspectives.Perspective']", 'symmetrical': 'False'}),
'role': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['accounts.User']", 'symmetrical': 'False'})
},
u'accounts.user': {
'Meta': {'object_name': 'User'},
'creation_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'modification_time': ('django.db.models.fields.DateTimeField', [], {'auto_n | ow': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': ' | True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'team': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'})
},
u'companies.company': {
'Meta': {'object_name': 'Company'},
'creation_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modification_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
u'dashboards.dashboard': {
'Meta': {'unique_together': "(('name', 'company'),)", 'object_name': 'Dashboard'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['companies.Company']"}),
'creation_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modification_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'source': ('django.db.models.fields.TextField', [], {})
},
u'perspectives.perspective': {
'Meta': {'unique_together': "(('name', 'company'),)", 'object_name': 'Perspective'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['companies.Company']"}),
'creation_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'dashboards': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['dashboards.Dashboard']", 'symmetrical': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modification_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
}
}
complete_apps = ['accounts'] |
jparyani/pycapnp | examples/addressbook.py | Python | bsd-2-clause | 1,591 | 0 | #!/usr/bin/env python3
import capnp # noqa: F401
import addressbook_capnp
def writeAddressBook(file):
addresses = addressbook_capnp.AddressBook.new_message()
people = addresses.init("people", 2)
alice = people[0]
alice.id = 123
alice.name = "Alice"
alice.email = "alice@example.com"
alicePhones = alice.init("phones", 1)
alicePhones[0].number = "555-1212"
alicePhones[0].type = "mobile"
alice.employment.school = "MIT"
bob = people[1]
bob.id = 456
bob.name = "Bob"
bob.email = "bob@example.com"
bobPhones = bob.init("phones", 2)
bobPhones[0].number = "555-4567"
bobPhones[0].type = "home"
bobPhones[1].number = "555-7654"
bobPhones[1].type = "work"
bob.employment.unemployed = None
addresses.write(file)
def printAddressBook(file):
addresses = addressbook_capnp.AddressBook.read(file)
for person in addresses.people:
print(person.name, ":", person.email)
for phone in person.phones:
print(phone.type, " | :", phone.number)
which = person.employment.which()
print(which)
if which == "unemployed":
pri | nt("unemployed")
elif which == "employer":
print("employer:", person.employment.employer)
elif which == "school":
print("student at:", person.employment.school)
elif which == "selfEmployed":
print("self employed")
print()
if __name__ == "__main__":
f = open("example", "w")
writeAddressBook(f)
f = open("example", "r")
printAddressBook(f)
|
QISKit/qiskit-sdk-py | qiskit/transpiler/passes/depth.py | Python | apache-2.0 | 812 | 0 | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of | this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
""" An analysis pass for calculating the depth of a DAG circuit.
"""
from qiskit.transpiler.basepasses impor | t AnalysisPass
class Depth(AnalysisPass):
""" An analysis pass for calculating the depth of a DAG circuit.
"""
def run(self, dag):
self.property_set['depth'] = dag.depth()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.