repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
pytest-dev/pluggy | testing/test_helpers.py | Python | mit | 1,664 | 0 | from pluggy._hooks import varnames
from pluggy._manager import _formatdef
def test_varnames() -> None:
def f(x) -> None:
i = 3 # noqa
class A:
def f(self, y) -> None:
pass
class B:
def __call__(self, z) -> None:
pass
assert varnames(f) == (("x",), ())
assert varnames(A().f) == (("y",), ())
assert varnames(B()) == (("z",), ())
def test_varnames_default() -> None:
def f(x, y=3) -> None:
pass
assert varnames(f) == (("x",), ("y",))
def test_varnames_class() -> None:
class C:
def __init__(self, x) -> None:
pass
| class D:
pass
class E:
def __init__(self, x) -> None:
pass
class F:
pass
assert varnam | es(C) == (("x",), ())
assert varnames(D) == ((), ())
assert varnames(E) == (("x",), ())
assert varnames(F) == ((), ())
def test_varnames_keyword_only() -> None:
def f1(x, *, y) -> None:
pass
def f2(x, *, y=3) -> None:
pass
def f3(x=1, *, y=3) -> None:
pass
assert varnames(f1) == (("x",), ())
assert varnames(f2) == (("x",), ())
assert varnames(f3) == ((), ("x",))
def test_formatdef() -> None:
def function1():
pass
assert _formatdef(function1) == "function1()"
def function2(arg1):
pass
assert _formatdef(function2) == "function2(arg1)"
def function3(arg1, arg2="qwe"):
pass
assert _formatdef(function3) == "function3(arg1, arg2='qwe')"
def function4(arg1, *args, **kwargs):
pass
assert _formatdef(function4) == "function4(arg1, *args, **kwargs)"
|
allotria/intellij-community | python/testData/intentions/convertVariadicParamEmptySubscription.py | Python | apache-2.0 | 45 | 0 | def foo(**k | wargs):
doSomething | (kwargs[])
|
ucshadow/Fish_bot | even more.py | Python | lgpl-3.0 | 5,779 | 0.00173 | import pyscreenshot as img
import time
import msvcrt
import win32api
import win32con
import numpy as np
import tkinter as tk
import threading
import pyautogui
from win32api import GetKeyState
from ctypes import windll
class App(tk.Tk):
def __init__(self, *args, **kwargs):
tk.Tk.__init__(self, *args, **kwargs)
self.cursor = 0
self.color = 0
self.color_now = 0
self.title('SHADOW')
self.geometry('600x300')
self.x1 = tk.Label(self, text='first', width=20, anchor='w', fg='green')
self.x2 = tk.Label(self, text='1', width=20, anchor='w', fg='magenta')
self.b1 = tk.Button(text="Hello", command=self.b1_cb)
self.x1.grid(row=0, column=0)
self.x2.grid(row=0, column=1)
self.b1.grid(row=0, column=3)
self.e1 = tk.Label(self, text='bobbler color', width=20, anchor='w', fg='green')
self.e2 = tk.Canvas(self, width=100, height=20, bg='yellow', relief='ridge')
self.b2 = tk.Button(text='Get Color', command=self.color_thread)
self.e1.grid(row=1, column=0)
self.e2.grid(row=1, column=1)
self.b2.grid(row=1, column=3)
self.s1 = tk.Label(self, text='press to start', width=20, anchor='w', fg='green')
self.s2 = tk.Canvas(self, width=100, height=20, bg='yellow', relief='ridge')
self.b3 = tk.Button(text='Start Scan', command=self.loop_thread)
self.s1.grid(row=2, column=0)
self.s2.grid(row=2, column=1)
self.b3.grid(row=2, column=3)
def set_text(self, message):
self.label.config(text=message)
def b1_cb(self):
# tk.messagebox.showinfo(str(self.rgb))
print('bl_cl')
def get_cursor_possition(self):
cursor = win32api.GetCursorPos()
self.cursor = cursor
# print('cursor pos is ', self.cursor)
return cursor
def press_for_color(self):
# print('ACTIVATED')
while True:
if self.key_down(17):
pos = self.get_cursor_possition()
hdc = windll.user32.GetDC(0)
rgb = windll.gdi32.GetPixel(hdc, pos[0], pos[1])
r = rgb & 0xff
g = (rgb >> 8) & 0xff
b = (rgb >> 16) & 0xff
self.color = (r, g, b)
# print('color is ', self.color)
to_hex = ('#%02x%02x%02x' % (r, g, b))
self.e2.config(bg=to_hex)
self.b2.config(relief='raised')
return 0
time.sleep(0.1)
def key_down(self, key):
state = GetKeyState(key)
if (state != 0) and (state != 1):
return True
else:
return False
def color_thread(self):
self.b2.config(relief='sunken')
th = threading.Thread(target=self.press_for_color)
th.daemon = True
th.start()
def loop_thread(self):
th = threading.Thread(target=self.start_scan)
th.daemon = True
th.start()
def wait_thread(self):
th = threading.Thread(target=self.waiting_for_fish)
th.daemon = True
th.start()
def update_s2(self, c):
self.s2.after(1000, lambda: self.s2.config(bg='#%02x%02x%02x' % c))
def start_scan(self):
# print(self.cursor, self.color)
print('starting scan in 2')
time.sleep(2)
prev_position = [0, 0]
while True:
pyautogui.press('3')
# time.sleep(1)
color = self.color
image = img.grab()
for x in range(250, 1500, 2): # change to fishing area
for y in range(200, 650, 2):
color_now = image.getpixel((x, y))
if np.allclose(list(color_now), list(color), atol=10):
print('found color in position', x, y)
'''self.update_s2(color_now)
self.color_now = color_now
time.sleep(1)
win32api.SetCu | rsorPos((x, y))
print('match!')
self.after(2000)'''
if abs(x - prev_position[0] >= 10) and abs(y - prev_position[2] >= 10):
prev_position[0] = x
prev_position[1] = y
win32api.SetCursorPos((x, | y))
return self.wait_thread()
print('scan Finished with no match...')
def waiting_for_fish(self):
time.clock()
tolerance = t = 5
while True:
splash = (156, 150, 135)
density = []
image = img.grab()
colors = set()
cursor_position = win32api.GetCursorPos()
x1, y1 = cursor_position[0], cursor_position[1]
a = (x1 - 50, x1)
b = (y1 - 25, y1 + 25)
# time.clock()
for x in range(a[0], a[1]):
for y in range(b[0], b[1]):
# self.after(1, win32api.SetCursorPos((x, y)))
colors.add(image.getpixel((x, y)))
for i in colors:
if abs(splash[0] - i[0] <= t):
if abs(splash[1] - i[1] <= t):
if abs(splash[2] - i[2] <= t):
density.append(i)
print('density length is', len(density))
if len(density) > 100:
pyautogui.rightClick()
return self.start_scan()
#print(time.clock())
#print(colors)
#print(len(colors))
time.sleep(0.5)
if time.clock() > 18:
return self.start_scan()
return self.start_scan()
app = App()
app.mainloop()
|
khchine5/xl | lino_xl/lib/finan/fixtures/unused_demo.py | Python | bsd-2-clause | 2,045 | 0.008802 | # -*- coding: UTF-8 -*-
# Copyright 2009-2013 Luc Saffre
# License: BSD (see file COPYING for details)
#import time
#from datetime import date
#from dateutil import parser as dateparser
#from lino.projects.finan import models as finan
#~ import decimal
from decimal import Decimal
from django.conf import settings
from lino.api import dd, rt
from lino.utils import Cycler
from lino.utils.instantiator import Instantiator, i2d
from lino.core.utils import resolve_model
partner_model = settings.SITE.partners_app_label + '.Partner'
Partner = dd.resolve_model(partner_model)
REQUEST = None
def objects():
ledger = dd.resolve_app('ledger')
finan = dd.resolve_app('finan')
#~ partners = dd.resolve_app('partners')
#~ contacts = dd.resolve_app('contacts')
MODEL = finan.BankStatement
vt = ledger.VoucherTypes.get_for_model(MODEL)
JOURNALS = Cycler(vt.get_journals())
PARTNERS = Cycler(Partner.objects.order_by('name'))
USERS = Cycler(settings.SITE.user_model.objects.all())
AMOUNTS = Cycler([Decimal(x) for x in
"2.50 6.80 9.95 14.50 20 29.90 39.90 39.90 99.95 199.95 599.95 1599.99".split()])
ITEMCOUNT = Cycler(1, 3, 10)
for i in range(2):
jnl = JOURNALS.pop()
voucher = MODEL(journal=jnl,
user=USERS.pop(),
date=settings.SITE.demo_date(-30 + i))
yield voucher
ACCOUNTS = Cycler(jnl.get_allowed_accounts())
for j in range(ITEMCOUNT.pop()):
item = voucher.add_voucher_item(
partner=PART | NERS.pop(),
account=ACCOUNTS.pop(),
amount=AMOUNT | S.pop()
)
#~ item.total_incl_changed(REQUEST)
#~ item.before_ui_save(REQUEST)
#~ if item.total_incl:
#~ print "20121208 ok", item
#~ else:
#~ if item.product.price:
#~ raise Exception("20121208")
yield item
voucher.register(REQUEST)
yield voucher
|
JackDanger/sentry | src/sentry/utils/performance/sqlquerycount.py | Python | bsd-3-clause | 3,221 | 0.00031 | from __future__ import absolute_import
import logging
import six
import threading
from collections import defaultdict
from sentry.debug.utils.patch_context import PatchContext
DEFAULT_MAX_QUERIES = 25
DEFAULT_M | AX_DUPES = 3
class State(threading.local):
def __init__(self):
self.count = 0
self.query_hashes = defaultdict(int)
def record_query(self, sql):
self.count += 1
self.query_hashes[has | h(sql)] += 1
def count_dupes(self):
return sum(1 for n in six.itervalues(self.query_hashes) if n > 1)
class CursorWrapper(object):
def __init__(self, cursor, connection, state):
self.cursor = cursor
self.connection = connection
self._state = state
def execute(self, sql, params=()):
try:
return self.cursor.execute(sql, params)
finally:
self._state.record_query(sql)
def executemany(self, sql, paramlist):
try:
return self.cursor.executemany(sql, paramlist)
finally:
self._state.record_query(sql)
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.cursor, attr)
def __iter__(self):
return iter(self.cursor)
def get_cursor_wrapper(state):
def cursor(func, self, *args, **kwargs):
result = func(self, *args, **kwargs)
return CursorWrapper(result, self, state)
return cursor
class SqlQueryCountMonitor(object):
def __init__(self, context, max_queries=DEFAULT_MAX_QUERIES,
max_dupes=DEFAULT_MAX_DUPES, logger=None, **kwargs):
self.context = context
self.max_queries = max_queries
self.max_dupes = max_dupes
self.logger = logger or logging.getLogger(__name__)
self.state = State()
self._cursor = get_cursor_wrapper(self.state)
self._patcher = PatchContext('django.db.backends.BaseDatabaseWrapper.cursor', self._cursor)
def __enter__(self):
self.start()
return self
def __exit__(self, *args, **kwargs):
self.stop()
def start(self):
self._patcher.patch()
def stop(self):
self._patcher.unpatch()
num_dupes = self.state.count_dupes()
if self.state.count > self.max_queries:
self.log_max_queries(num_dupes)
if num_dupes > self.max_dupes:
self.log_max_dupes(num_dupes)
def log_max_dupes(self, num_dupes):
state = self.state
context = {
'stack': True,
'data': {
'query_count': state.count,
'num_dupes': num_dupes,
}
}
self.logger.warning('%d duplicate queries executed in %s',
num_dupes, self.context, extra=context)
def log_max_queries(self, num_dupes):
state = self.state
context = {
'stack': True,
'data': {
'query_count': state.count,
'num_dupes': num_dupes,
}
}
self.logger.warning('%d queries executed in %s',
state.count, self.context, extra=context)
|
fulfilio/nereid-webshop | party.py | Python | bsd-3-clause | 3,061 | 0 | # -*- coding: utf-8 -*-
import os
import logging
from wtforms import TextField, validators
from trytond.pool import PoolMeta, Pool
from trytond.modules.nereid.party import AddressForm
from trytond.config import config
from nereid import request, current_app, current_user
from trytond.modules.nereid_checkout.i18n import _
__metaclass__ = PoolMeta
__all__ = ['Address']
geoip = None
try:
from pygeoip import GeoIP
except ImportError:
logging.error("pygeoip is not installed")
else:
path = os.environ.get(
'GEOIP_DATA_PATH', config.get('nereid_webshop', 'geoip_data_path')
)
if path:
geoip = GeoIP(path)
class WebshopAddressForm(AddressForm):
"""Custom address form for webshop
"""
phone = TextField(_('Phone'), [validators.DataRequired(), ])
def get_default_country(self):
"""Get the default country based on geoip data.
"""
if not geoip or not request.remote_addr:
return None
Country = Pool().get('country.country')
try:
current_app.logger.debug(
"GeoIP lookup for remote address: %s" % request.remote_addr
)
country, = Country.search([
('code', '=', geoip.country_code_by_addr(request.remote_addr))
])
except ValueError:
return None
return country
def __init__(self, formdata=None, obj=None, prefix='', **kwargs):
# While choices can be assigned after the form is constructed, default
# cannot be. The form's data is picked from the first available of
# formdata, obj data, and kwargs.
# Once the data has been resolved, changing the default won't do
# anything.
default_country = self.get_default_country()
if default_country:
kwargs.setdefault('country', default_country.id)
super(WebshopAddressForm, self).__init__(
formdata, obj, prefix, **kwargs
)
class Address:
__name__ = 'party.address'
@classmethod
def get_address_form(cls, address=None):
"""
Return an initialised Address form that can be validated and used to
create/update addresses
:param address: If an active record is provided it is used to autofill
the form.
"""
if address:
form = WebshopAddressForm(
request.form,
name=a | ddress.name,
street=address.street,
streetbis=address.streetbis,
zip=address.zip,
city=address.city,
country=address.country and address.country.id,
subdivision=address.subdi | vision and address.subdivision.id,
email=address.party.email,
phone=address.phone
)
else:
address_name = "" if current_user.is_anonymous else \
current_user.display_name
form = WebshopAddressForm(request.form, name=address_name)
return form
|
Sibert-Aerts/c2p | src/main.py | Python | mit | 2,477 | 0.004441 | import sys
import c2p
import traceback
from antlr4 import * # type: ignore
from antlr4.error.ErrorListener import ErrorListener # type: ignore
from c2p.grammar.antlr.SmallCLexer import SmallCLexer
from c2p.grammar.antlr.SmallCParser import SmallCParser
from c2p.grammar.ast.visitor import ASTVisitor
from c2p.grammar.ast.visualize import Visualizer
from c2p.codegen.environment import Environment
from c2p.codegen.error import PositionalError, warn
from c2p.error_listener import ParserSyntaxErrorListener
def to_file(filename, text):
with open(filename, 'w') as f:
f.write(text)
def run(argv):
if len(argv) < 2:
sys.exit('Supply a C code file to compile.')
try:
inputStream = FileStream(argv[1])
except FileNotFoundError as e:
sys.exit(e)
parser = SmallCParser(CommonTokenStream(SmallCLexer(inputStream)))
parser.removeErrorListeners()
parser.addErrorListener(ParserSyntaxErrorListener())
try:
tree = parser.program()
except PositionalError as e:
sys.exit(e.pretty_print(inputStream))
action = ''
try:
action = 'generating the AST'
ast = ASTVisitor().visit(tree)
action = 'rendering the AST to DOT'
dotFileName = 'AST.dot'
to_file(dotFileName, Visualizer().make_dot(ast))
print('AST generation successful. Output written to \'{}\''.format(dotFileName))
action = 'compiling the AST to code'
code = ast.to_code(Environment()).code
codeText = '\n'.join(op.emit() for op in code) + '\n'
action = 'writing the code to a file'
codeFileName = 'code.p'
to_file(codeFileName, codeText)
print('Code generation successful. Output written to \'{}\''.format(codeFileName))
except PositionalError as e:
sys.exit(e.pretty_print(inputStream))
except ValueError as e:
# Don't print a gigantic stack trace each time.
exceptiondata = traceback.format_exc().splitlines()
print('Encountered {0} while {1} | :'.format(e.__class__.__name__, action))
print(e)
[print(l) for l in exceptiondata[-3:-1]]
except NotImplementedError as e:
exceptiondata = traceback.format_exc().splitlines()
print('Encountered {0} while {1}:'.format(e.__class__.__name__, action))
print(e)
if isinstance(e, NotImplementedError):
[print(l) for l in | exceptiondata[-3:-1]]
if __name__ == '__main__':
run(sys.argv)
|
Laika-ETS/shiba | shiba_ws/src/shiba_teleop/script/teleop.py | Python | mit | 533 | 0.011257 | #!/usr/bin/env python
PACKAGE_NAME = 'shiba_teleop'
import roslib
roslib.load_manifest(PACKAGE_NAME)
import rospy
from geometry_msgs.msg import Twist
from sensor_msgs.msg import Joy
import rospkg
FORWARD = 1
BACKWARDS = 2
SPINNING = 3
STOPPED = 4
linear_increment = 0.3
max_linear_vel = 1.0
min_linear_vel = -1.0
default_linear_vel = 0.1
angular_increment = 0.1
max_angular_vel = 0.6
spin_speed = 1.0
last_joy_message = | None
linear_vel = 0.0
angular_vel = 0.0
last_angular_acceleration = 0
rotating = False
state = STOPPED
| |
sbidoul/pip | src/pip/_internal/metadata/__init__.py | Python | mit | 2,036 | 0.000982 | from typing import List, Optional
from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
__all__ = [
"BaseDistribution",
"BaseEnvironment",
"FilesystemWheel",
"MemoryWheel",
"Wheel",
"get_default_environment",
"get_environment",
"get_wheel_distribution",
]
def get_default_environment() -> BaseEnvironment:
"""Get the default representation for the current environment.
This returns an Environment instance from the chosen backend. The default
Environment instance should be built from ``sys.path`` and may use caching
to share instance state accorss calls.
"""
from .pkg_resources import Environment
return Environment.default()
def get_environment(paths: Optional[List[str]]) -> BaseEnvironment:
"""Get a representation of the environment specified by ``paths``.
This returns an Environment instance from the chosen backend based on the
given import paths. The backend must build a fresh instance representing
the state of installed distributions when this function is called.
"""
from .pkg_resources import Environment
return Environment.from_paths(paths)
def get_directory_distribution(directory: str) -> BaseDistribution:
"""Get the distribution metadata representation in the specified directory.
This returns a Distribution instance from the chosen backend based on
the given on-disk ``.dist-info`` directory.
"""
from .pkg_resources import Distribution
return Distribution.from_d | irectory(directory)
def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution:
"""Get the representation of the specified wheel's distribution metadata.
This returns a Distribution instance from the chosen backend based | on
the given wheel's ``.dist-info`` directory.
:param canonical_name: Normalized project name of the given wheel.
"""
from .pkg_resources import Distribution
return Distribution.from_wheel(wheel, canonical_name)
|
nsnam/ns-3-dev-git | src/propagation/bindings/modulegen__gcc_LP64.py | Python | gpl-2.0 | 277,968 | 0.014196 | from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.propagation', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## propagation-environment.h (module 'propagation'): ns3::EnvironmentType [enumeration]
module.add_enum('EnvironmentType', ['UrbanEnvironment', 'SubUrbanEnvironment', 'OpenAreasEnvironment'])
## propagation-environment.h (module 'propagation'): ns3::CitySize [enumeration]
module.add_enum('CitySize', ['SmallCity', 'MediumCity', 'LargeCity'])
## log.h (module 'core'): ns3::LogLevel [enumeration]
module.add_enum('LogLevel', ['LOG_NONE', 'LOG_ERROR', 'LOG_LEVEL_ERROR', 'LOG_WARN', 'LOG_LEVEL_WARN', 'LOG_DEBUG', 'LOG_LEVEL_DEBUG', 'LOG_INFO', 'LOG_LEVEL_INFO', 'LOG_FUNCTION', 'LOG_LEVEL_FUNCTION', 'LOG_LOGIC', 'LOG_LEVEL_LOGIC', 'LOG_ALL', 'LOG_LEVEL_ALL', 'LOG_PREFIX_FUNC', 'LOG_PREFIX_TIME', 'LOG_PREFIX_NODE', 'LOG_PREFIX_LEVEL', 'LOG_PREFIX_ALL'], import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
typehandlers.add_type_alias('std::list< ns3::AttributeConstructionList::Item > const_iterator', 'ns3::AttributeConstructionList::CIterator')
typehandlers.add_type_alias('std::list< ns3::AttributeConstructionList::Item > const_iterator*', 'ns3::AttributeConstructionList::CIterator*')
typehandlers.add_type_alias('std::list< ns3::AttributeConstructionList::Item > const_iterator&', 'ns3::AttributeConstructionList::CIterator&')
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeChecker'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeValue'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::EventImpl> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::EventImpl'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor'])
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## event-id.h (module 'core'): ns3::EventId::UID [enumeration]
module.add_enum('UID', ['INVALID', 'NOW', 'DESTROY', 'RESERVED', 'VALID'], outer_class=root_module['ns3::EventId'], import_from_module='ns.core')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## log.h (module 'core'): ns3::LogComponent [class]
module.add_class('LogComponent', import_from_module='ns.core')
typehandlers.add_type_alias('std::map< std::string, ns3::LogComponent * >', 'ns3::LogComponent::ComponentList')
typehandlers.add_type_alias('std::map< std::string, ns3::LogComponent * >*', 'ns3::LogComponent::ComponentList*')
typehandlers.add_type_alias('std::map< std::string, ns3::LogComponent * >&', 'ns3::LogComponent::ComponentList&')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## log.h (module 'core'): ns3::ParameterLogger [class]
module.add_class('ParameterLogger', import_from_module='ns.core')
## propagation-cache.h (module 'propagation'): ns3::PropagationCache<ns3::JakesProcess> [class]
module.add_class('PropagationCache', template_parameters=['ns3::JakesProcess'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'), parent=root_module['ns3::ObjectBase'], template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'])
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST', 'AUTO'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
typehandlers.add_type_alias('void ( * ) ( ns3::Time )', 'ns3::Time::TracedCallback')
typehandlers.add_type_alias('void ( * ) ( ns3::Time )*', 'ns3::Time::TracedCallback*')
typehandlers.add_type_alias('void ( * ) ( ns3::Time )&', 'ns3::Time::TracedCallback&')
## nstime.h (module 'core'): ns3::TimeWithUnit [class]
module.add_class('TimeWithUnit', import_from_module='ns.core')
## type-i | d.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::SupportLevel [enumeration]
| module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
typehandlers.add_type_alias('uint32_t', 'ns3::TypeId::hash_t')
typehandlers.add_type_alias('uint32_t*', 'ns3::TypeId::hash_t*')
typehandlers.add_type_alias('uint32_t&', 'ns3::TypeId::hash_t&')
## vector.h (module 'core'): ns3::Vector2D [class]
module.add_class('Vector2D', import_from_module='ns.core')
## vector.h (module 'core'): ns3::Vector3D [class]
module.add_class('Vector3D', import_from_module='ns.core')
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-128.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.cor |
gsnbng/erpnext | erpnext/education/doctype/student/student_dashboard.py | Python | agpl-3.0 | 845 | 0.042604 | from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on the attendance of this Student'),
'fieldname': 'student',
'non_standard_fieldnames': {
'Bank Ac | count': 'party'
},
'transactions': [
{
'label': _('Admission'),
'items': ['Program Enrollment', 'Course Enrollment']
},
{
'label': _('Student Activity'),
'items': ['Student Log', 'Student Group', ]
},
{
'label': _('Assessment'),
'items': ['Assessment Result']
},
{
'label': _('Student LMS Activity'),
'items': ['C | ourse Activity', 'Quiz Activity' ]
},
{
'label': _('Attendance'),
'items': ['Student Attendance', 'Student Leave Application']
},
{
'label': _('Fee'),
'items': ['Fees', 'Bank Account']
}
]
}
|
mason-bially/windows-installer | packages/_Ghostview/_Ghostview.py | Python | mit | 169 | 0.017751 | '''
@author: KyleLevien
'''
from ..defaultpackage.package import Package
class _Ghostview(Package):
def __init__(self):
Package. | __init__(self)
| |
facebook/infer | infer/tests/testlock.py | Python | mit | 578 | 0 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import sys
import fcntl
import subprocess
TESTS_DIR = os.path.dirname(os.path.realpath(__file__))
LOCKFILE = os.path.join(TESTS_DIR, 't | estlock.mutex')
args = sys.argv[1:]
with open(LOCKFILE, 'r') as lockfile:
fd = lockfile.fileno()
fcntl.flock(fd, fcntl.LOCK_ | EX)
try:
subprocess.call(args)
finally:
fcntl.flock(fd, fcntl.LOCK_UN)
|
ozturkemre/programming-challanges | 15-CollatzConjecture/CollatzConjecture.py | Python | mit | 409 | 0.03423 | try:
entry=int(input("Enter the number: \n"))
i=entry
x=entry
#biggest number
status=[]
while(i!=1):
#end
status.append(i)
if i%2==0:
i=int(i/2)
| else:
i=3*i+1
if(i>x):
x=i
status.append(i)
print(*status)
print("The biggest number: {}".format(x))
except V | alueError:
print("I said number!!!") |
rabramley/microMedicalSpirometerXmlToCsv | correctIds.py | Python | mit | 3,291 | 0.013066 | import csv, sys, getopt, argparse, os
import dbSettings
COLUMN_LOCAL_ID = 'LocalId'
COLUMN_UHL_SYSTEM_NUMBER = 'Uhl System Number'
COLUMN_NHS_NUMBER = 'NHS Number'
COLUMN_TITLE = 'Title'
COLUMN_LAST_NAME = 'Last Name'
COLUMN_FIRST_NAME = 'First Name'
COLUMN_DOB = 'Date of Birth'
COLUMN_GENDER = 'Gender'
COLUMN_ADDRESS_1 = 'Address 1'
COLUMN_ADDRESS_2 = 'Address 2'
COLUMN_ADDRESS_3 = 'Address 3'
COLUMN_ADDRESS_4 = 'Address 4'
COLUMN_POST_CODE = 'Post Code'
def main():
parser = argparse.ArgumentParser()
parser.add_argument('infile', nargs='?')
parser.add_argument("-o", "--outputfilename", nargs='?', help="Output filename", default="uhlSystemNumbers.csv")
parser.add_argument("-c", "--idColumn", required=True, help="ID Column Na | me")
args = parser.parse_args()
with open(args.outputfilename, 'w') as outputFile:
fieldnames = [
COLUMN_LOCAL_ID,
COLUMN_UHL_SYSTEM_NUMBER,
COLUMN_NHS_NUMBER,
COLUMN_TITLE,
COLUMN_LAST_NAME,
COLUMN_FIRST_NAME,
COLUMN_DOB,
COLUMN_GENDER,
COLUMN_ADDRESS_1,
COLUMN_ADDRESS_2,
| COLUMN_ADDRESS_3,
COLUMN_ADDRESS_4,
COLUMN_POST_CODE]
output = csv.DictWriter(outputFile, fieldnames=fieldnames)
output.writeheader()
with pymssql.connect(dbSettings.DB_SERVER_NAME, dbSettings.DB_USERNAME, dbSettings.DB_PASSWORD, dbSettings.DB_DATABASE) as conn:
with open(args.infile, 'r') as infile:
spamreader = csv.DictReader(infile, delimiter=',', quotechar='"')
for row in spamreader:
if row[args.idColumn]:
details = getUhlSystemNumber(conn, row[args.idColumn])
output.writerow({
COLUMN_LOCAL_ID : row[args.idColumn],
COLUMN_UHL_SYSTEM_NUMBER : '' if details['main_pat_id'] is None else details['main_pat_id'].upper(),
COLUMN_NHS_NUMBER : '' if details['nhs_number'] is None else details['nhs_number'].replace(' ', ''),
COLUMN_TITLE : '' if details['title'] is None else details['title'].title(),
COLUMN_LAST_NAME : '' if details['last_name'] is None else details['last_name'].title(),
COLUMN_FIRST_NAME : '' if details['first_name'] is None else details['first_name'].title(),
COLUMN_DOB : '' if details['dob'] is None else details['dob'].strftime('%Y-%m-%d'),
COLUMN_GENDER : '' if details['gender'] is None else details['gender'],
COLUMN_ADDRESS_1 : '' if details['pat_addr1'] is None else details['pat_addr1'].title(),
COLUMN_ADDRESS_2 : '' if details['pat_addr2'] is None else details['pat_addr2'].title(),
COLUMN_ADDRESS_3 : '' if details['pat_addr3'] is None else details['pat_addr3'].title(),
COLUMN_ADDRESS_4 : '' if details['pat_addr4'] is None else details['pat_addr4'].title(),
COLUMN_POST_CODE : '' if details['postcode'] is None else details['postcode'].upper()
})
def getUhlSystemNumber(pmiConnection, identifier):
if (identifier.strip() == ""):
return None
with pmiConnection.cursor(as_dict=True) as cursor:
cursor.execute('SELECT * FROM [PMIS_LIVE].[dbo].[UHL_PMI_QUERY_BY_ID] (%s)', identifier)
return cursor.fetchone()
if __name__ == "__main__":
main()
|
peragro/peragro-at | src/damn_at/analyzers/__init__.py | Python | bsd-3-clause | 21 | 0 | """
Analy | zers
"""
| |
PeachyPrinter/peachyinstaller | windows/test/run_all_tests.py | Python | apache-2.0 | 423 | 0 | import unittest
import os
import sys
print "Running all | tests"
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src', ))
loader = unittest.TestL | oader()
suite = loader.discover(os.path.dirname(__file__), pattern='test*.py')
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(suite)
problems = len(result.errors) + len(result.failures)
print("\nProblems: %s\n" % problems)
exit(problems)
|
antoinecarme/pyaf | tests/artificial/transf_Anscombe/trend_ConstantTrend/cycle_7/ar_12/test_artificial_32_Anscombe_ConstantTrend_7_12_100.py | Python | bsd-3-clause | 268 | 0.085821 | import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as ar | t
art.process_dataset(N = 32 , FREQ = 'D | ', seed = 0, trendtype = "ConstantTrend", cycle_length = 7, transform = "Anscombe", sigma = 0.0, exog_count = 100, ar_order = 12); |
ManchesterBioinference/BranchedGP | notebooks/Hematopoiesis.py | Python | apache-2.0 | 4,055 | 0.002466 | # ---
# jupyter:
# anaconda-cloud: {}
# jupytext:
# cell_metadata_filter: -all
# formats: ipynb,py:percent
# notebook_metadata_filter: all
# text_representation:
# extension: .py
# format_name: percent
# format_version: '1.3'
# jupytext_version: 1.6.0
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# language_info:
# codemirror_mode:
# name: ipython
# version: 3
# file_extension: .py
# mimetype: text/x-python
# name: python
# nbconvert_exporter: python
# pygments_lexer: ipython3
# | version: 3.8.5
# ---
# %% [markdown]
# Branching GP Regression on hematopoietic data
# --
#
# *Alexis Boukouvalas, 2017*
#
# **Note:** this notebook i | s automatically generated by [Jupytext](https://jupytext.readthedocs.io/en/latest/index.html), see the README for instructions on working with it.
#
# test change
#
# Branching GP regression with Gaussian noise on the hematopoiesis data described in the paper "BGP: Gaussian processes for identifying branching dynamics in single cell data".
#
# This notebook shows how to build a BGP model and plot the posterior model fit and posterior branching times.
# %%
import time
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
import BranchedGP
plt.style.use("ggplot")
# %matplotlib inline
# %% [markdown]
# ### Read the hematopoiesis data. This has been simplified to a small subset of 23 genes found to be branching.
# We have also performed Monocle2 (version 2.1) - DDRTree on this data. The results loaded include the Monocle estimated pseudotime, branching assignment (state) and the DDRTree latent dimensions.
# %%
Y = pd.read_csv("singlecelldata/hematoData.csv", index_col=[0])
monocle = pd.read_csv("singlecelldata/hematoMonocle.csv", index_col=[0])
# %%
Y.head()
# %%
monocle.head()
# %%
# Plot Monocle DDRTree space
genelist = ["FLT3", "KLF1", "MPO"]
f, ax = plt.subplots(1, len(genelist), figsize=(10, 5), sharex=True, sharey=True)
for ig, g in enumerate(genelist):
y = Y[g].values
yt = np.log(1 + y / y.max())
yt = yt / yt.max()
h = ax[ig].scatter(
monocle["DDRTreeDim1"],
monocle["DDRTreeDim2"],
c=yt,
s=50,
alpha=1.0,
vmin=0,
vmax=1,
)
ax[ig].set_title(g)
# %%
def PlotGene(label, X, Y, s=3, alpha=1.0, ax=None):
fig = None
if ax is None:
fig, ax = plt.subplots(1, 1, figsize=(5, 5))
for li in np.unique(label):
idxN = (label == li).flatten()
ax.scatter(X[idxN], Y[idxN], s=s, alpha=alpha, label=int(np.round(li)))
return fig, ax
# %% [markdown]
# ### Fit BGP model
# Notice the cell assignment uncertainty is higher for cells close to the branching point.
#
# %%
def FitGene(g, ns=20): # for quick results subsample data
t = time.time()
Bsearch = list(np.linspace(0.05, 0.95, 5)) + [
1.1
] # set of candidate branching points
GPy = (Y[g].iloc[::ns].values - Y[g].iloc[::ns].values.mean())[
:, None
] # remove mean from gene expression data
GPt = monocle["StretchedPseudotime"].values[::ns]
globalBranching = monocle["State"].values[::ns].astype(int)
d = BranchedGP.FitBranchingModel.FitModel(Bsearch, GPt, GPy, globalBranching)
print(g, "BGP inference completed in %.1f seconds." % (time.time() - t))
# plot BGP
fig, ax = BranchedGP.VBHelperFunctions.PlotBGPFit(
GPy, GPt, Bsearch, d, figsize=(10, 10)
)
# overplot data
f, a = PlotGene(
monocle["State"].values,
monocle["StretchedPseudotime"].values,
Y[g].values - Y[g].iloc[::ns].values.mean(),
ax=ax[0],
s=10,
alpha=0.5,
)
# Calculate Bayes factor of branching vs non-branching
bf = BranchedGP.VBHelperFunctions.CalculateBranchingEvidence(d)["logBayesFactor"]
fig.suptitle("%s log Bayes factor of branching %.1f" % (g, bf))
return d, fig, ax
d, fig, ax = FitGene("MPO")
# %%
d_c, fig_c, ax_c = FitGene("CTSG")
# %%
|
sdpython/pyquickhelper | _unittests/ut_sphinxext/test_downloadlink_extension.py | Python | mit | 3,427 | 0 | """
@brief test log(time=4s)
@author Xavier Dupre
"""
import sys
import os
import unittest
import warnings
import sphinx
from pyquickhelper.pycode import get_temp_folder, ExtTestCase
from pyquickhelper.helpgen import rst2html
from pyquickhelper.sphinxext import process_downloadlink_role
from pyquickhelper.texthelper import compare_module_version
from docutils.parsers.rst.roles import register_canonical_role
class TestDownloadlinkExtension(ExtTestCase):
def test_post_parse_sn(self):
register_canonical_role("downloadlink", process_downloadlink_role)
def get_name(self):
this = os.path.dirname(__file__)
name = "test_rst_builder.py"
dest = os.path.join(this, name)
return dest.replace("\\", "/")
@unittest.skipIf(compare_module_version(sphinx.__version__, '1.8') < 0,
reason="DownloadFiles not available in 1.7")
def test_downloadlink_rst(self):
name = self.get_name()
content = """
:downloadlink:`rst::http://f.html`
:downloadlink:`rst::{0}`
:downloadlink:`{0} <rst::{0}>`
""".replace(" ", "").format(name)
out = rst2html(content,
writer="rst", keep_warnings=True,
directives=None)
out = out.replace("\n", " ")
self.assertNotIn('Unknown interpreted text role', out)
self.assertIn(
':downloadlink:`test_rst_builder.py', out)
self.assertNotIn("test_rst_builder.py>`test_rst_builder.py", out)
temp = get_temp_folder(__file__, "temp_downloadlink_rst")
with open(os.path.join(temp, "out.rst"), "w", encoding="utf8") as f:
f.write(out)
@unittest.skipIf(compare_module_version(sphinx.__version__, '1.8') < 0,
reason="DownloadFiles not available in 1.7")
def test_downlo | adlink_md(self):
name = self.get_name()
content = """
:downloadlink:`gggg <md::{0}>`
""".replace(" | ", "").format(name)
out = rst2html(content,
writer="md", keep_warnings=True,
directives=None)
self.assertIn("test_rst_builder.py", out)
self.assertNotIn('Unknown interpreted text role', out)
temp = get_temp_folder(__file__, "temp_downloadlink_rst")
with open(os.path.join(temp, "out.rst"), "w", encoding="utf8") as f:
f.write(out)
@unittest.skipIf(compare_module_version(sphinx.__version__, '1.8') < 0,
reason="DownloadFiles not available in 1.7")
def test_downloadlink_html(self):
name = self.get_name()
content = """
:downloadlink:`html::{0}`
""".replace(" ", "").format(name)
out = rst2html(content,
writer="html", keep_warnings=True,
directives=None)
self.assertNotIn("Unable to find 'html:test_rst_builder.py'", out)
self.assertNotIn('Unknown interpreted text role', out)
self.assertIn("test_rst_builder.py", out)
temp = get_temp_folder(__file__, "temp_downloadlink_rst")
with open(os.path.join(temp, "out.rst"), "w", encoding="utf8") as f:
f.write(out)
if __name__ == "__main__":
unittest.main()
|
Kloudless/kloudless-python | kloudless/resources.py | Python | mit | 35,586 | 0.000169 | from .util import to_datetime, to_iso
from .http import request
from .exceptions import KloudlessException as KException
from . import config
import inspect
import json
import requests
import six
import warnings
class BaseResource(dict):
# {'key': (serializer, deserializer)}
_serializers = {
'created': (to_iso, to_datetime),
'modified': (to_iso, to_datetime),
'expiration': (to_iso, to_datetime),
'expiry': (to_iso, to_datetime),
'token_expiry': (to_iso, to_datetime),
'refresh_token_expiry': (to_iso, to_datetime),
}
_path_segment = None
_parent_resource_class = None
# requests.Session's connection pool could cause failures due to the lack
# of keep-alives causing the connection to drop unexpectedly.
# Use `requests` to be safe, but alter if better performance is preferable.
_api_session = requests
def __init__(self, id=None, parent_resource=None, configuration=None):
if not configuration:
configuration = {}
self._configuration = config.merge(configuration)
self['id'] = id
# Saved state, as returned by the Kloudless API.
self._previous_data = {}
# Keys that used to be present that no longer are post-save.
# Useful for more helpful error messages.
| self._removed_keys = set()
self._parent_resource = parent_resource
if self._parent_resource_class is not None:
if self._parent_resource is None:
raise KException(
"A %s object or ID must be specif | ied as this "
"%s object's parent." %
(self._parent_resource_class,
self.__class__.__name__))
def populate(self, data):
"""
data: Response from Kloudless with data on this object.
"""
removed = set(self.keys()) - set(data.keys())
self._removed_keys |= removed
id = self['id']
self.clear()
for k, v in data.items():
if k in self._serializers:
data[k] = self._serializers[k][1](v)
for k, v in six.iteritems(data):
super(BaseResource, self).__setitem__(
k, self.__class__.create_from_data(
v, parent_resource=self._parent_resource,
configuration=self._configuration))
if 'id' not in self:
self['id'] = id
# Update our state.
self._previous_data = self.serialize(self)
@classmethod
def create_from_data(cls, data, parent_resource=None, configuration=None):
if isinstance(data, list):
return [cls.create_from_data(
d, parent_resource=parent_resource,
configuration=configuration) for d in data]
elif isinstance(data, dict) and not isinstance(data, BaseResource):
data = data.copy()
klass = cls
data_type = None
if data.get('api') and data.get('type'):
data_type = data['api'] + '_' + data['type']
if data_type in resources:
klass = resources[data_type]
elif data.get('type') in resources:
klass = resources[data['type']]
instance = klass(id=data.get('id'),
parent_resource=parent_resource,
configuration=configuration)
instance.populate(data)
return instance
else:
return data
@classmethod
def serialize(cls, resource_data):
"""
Converts values in the BaseResource object into primitive types.
This helps convert the entire object to JSON.
resource_data: Either the resource object, or a dict with the data
to populate the resource.
"""
serialized = {}
for k, v in six.iteritems(resource_data):
if isinstance(v, BaseResource):
serialized[k] = v.serialize(v)
elif k in cls._serializers:
serialized[k] = cls._serializers[k][0](v)
else:
serialized[k] = v
return serialized
@classmethod
def list_path(cls, parent_resource):
raise NotImplementedError("Subclasses must implement list_path.")
def detail_path(self):
if not self['id']:
raise KException("The detail_path cannot be obtained since the ID "
"is unknown.")
return "%s/%s" % (self.list_path(self._parent_resource), self['id'])
# Getter/Setter methods
def __setattr__(self, k, v):
if k[0] == '_' or k in self.__dict__:
return super(BaseResource, self).__setattr__(k, v)
else:
self[k] = v
def __getattr__(self, k):
if k[0] == '_':
raise AttributeError(k)
try:
return self[k]
except KeyError as e:
raise AttributeError(*e.args)
def __setitem__(self, k, v):
super(BaseResource, self).__setitem__(k, v)
def __getitem__(self, k):
try:
return super(BaseResource, self).__getitem__(k)
except KeyError:
if k in self._removed_keys:
raise KeyError(
"%r. The key %s was previously present but no longer is. "
"This is due to the object being updated with new "
"information returned from the Kloudless API, probably "
"due to the object being saved. Here are the current "
"attributes of this object: %s" %
(k, k, ', '.join(self.keys())))
else:
raise
def __delitem__(self, k):
raise TypeError(
"Items cannot be deleted. Please set them to None instead if you "
"wish to clear them.")
class AnnotatedList(list):
"""
Given a deserialized response of all(), the objects returned by the API
will be made iterable, and the other attributes will become attributes
of this AnnotatedList object.
"""
def __init__(self, all_data):
if isinstance(all_data, list):
return all_data
objects = None
for k, v in six.iteritems(all_data):
if k in ['objects', 'permissions', 'properties'] and isinstance(v, list):
objects = v
else:
setattr(self, k, v)
if objects is None:
raise KException("No lists were found!")
list.__init__(self, objects)
def allow_proxy(func):
func.allow_proxy = True
return func
class ListMixin(object):
@classmethod
@allow_proxy
def all(cls, parent_resource=None, configuration=None,
headers=None, **params):
response = request(cls._api_session.get,
cls.list_path(parent_resource),
configuration=configuration,
headers=headers, params=params)
data = cls.create_from_data(
response.json(), parent_resource=parent_resource,
configuration=configuration)
return AnnotatedList(data)
class RetrieveMixin(object):
@classmethod
@allow_proxy
def retrieve(cls, id, parent_resource=None, configuration=None,
headers=None, **params):
instance = cls(id=id, parent_resource=parent_resource,
configuration=configuration)
response = request(cls._api_session.get, instance.detail_path(),
configuration=configuration,
headers=headers, params=params)
instance.populate(response.json())
return instance
def refresh(self, headers=None):
"""
Retrieves and sets new metadata for the resource.
"""
response = request(self._api_session.get, self.detail_path(),
configuration=self._configuration,
headers=headers)
self.populate(response.json())
class ReadMixin(RetrieveMixin, ListMixin):
pass
|
gavinmcgimpsey/deckofcards | spades/settings.py | Python | mit | 1,928 | 0 | # Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
DEBUG = True
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'REPLACE_ME_BEFORE_PRODUCTION'
ALLOWED_HOSTS = ['.spades.com']
AUTH_USER_MODEL = 'deck.User'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'deck',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware. | common.CommonMiddleware',
'django.middleware.cs | rf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'spades.urls'
WSGI_APPLICATION = 'spades.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'database'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'MST'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
MEDIA_ROOT = BASE_DIR+'/media/'
MEDIA_URL = '/media/'
|
TGM-HIT/eqep-api | eqep/shakemap/display/plotstyle.py | Python | mit | 898 | 0 | from abc import *
class PlotStyle(metaclass=ABCMeta):
"""The base interface for all ShakeMap formats/styles
It is a simple decorator and therefore allows arbitrarily deep nesting of
different styles.
>>> # `Style1` will | be applied first, then `Style2`, ...
>>> style = Style1(Style2(Style3()))
.. warning::
When decorating keep in mind that some styles may cancel out each other
when they are nested in the wrong order.
Attributes
----------
style : PlotStyle
the inner decorator style
"""
@abstractmethod
def apply(self, plot):
"""Applies the class' defined formatting to the given plot
.. note::
For some styles this must be done before | plotting any data.
Parameters
----------
plot : Figure
the plot to apply the styling to
"""
pass
|
Bluscream/Discord-Selfbot | cogs/utils/checks.py | Python | gpl-3.0 | 7,337 | 0.002181 | import | json
import time
import git
import discord
import os
import aiohttp
from cogs.utils.dataIO import dataIO
from urllib.parse import quote as uriquote
try:
from lxml import etree
except ImportError:
from bs4 import BeautifulSoup
from urllib.parse import parse_qs, quote_plus
#from cogs.utils import common
# @common.deprecation_warn()
def load_config():
with open('settings/config.json', 'r') as f:
return json.load(f)
# @common.deprecation_warn()
def load_opt | ional_config():
with open('settings/optional_config.json', 'r') as f:
return json.load(f)
# @common.deprecation_warn()
def load_moderation():
with open('settings/moderation.json', 'r') as f:
return json.load(f)
# @common.deprecation_warn()
def load_notify_config():
with open('settings/notify.json', 'r') as f:
return json.load(f)
# @common.deprecation_warn()
def load_log_config():
with open('settings/log.json', 'r') as f:
return json.load(f)
def has_passed(oldtime):
if time.time() - 20.0 < oldtime:
return False
return time.time()
def set_status(bot):
if bot.default_status == 'idle':
return discord.Status.idle
elif bot.default_status == 'dnd':
return discord.Status.dnd
else:
return discord.Status.invisible
def user_post(key_users, user):
if time.time() - float(key_users[user][0]) < float(key_users[user][1]):
return False, [time.time(), key_users[user][1]]
else:
log = dataIO.load_json("settings/log.json")
now = time.time()
log["keyusers"][user] = [now, key_users[user][1]]
dataIO.save_json("settings/log.json", log)
return True, [now, key_users[user][1]]
def gc_clear(gc_time):
if time.time() - 3600.0 < gc_time:
return False
return time.time()
def game_time_check(oldtime, interval):
if time.time() - float(interval) < oldtime:
return False
return time.time()
def avatar_time_check(oldtime, interval):
if time.time() - float(interval) < oldtime:
return False
return time.time()
def update_bot(message):
g = git.cmd.Git(working_dir=os.getcwd())
branch = g.execute(["git", "rev-parse", "--abbrev-ref", "HEAD"])
g.execute(["git", "fetch", "origin", branch])
update = g.execute(["git", "remote", "show", "origin"])
if ('up to date' in update or 'fast-forward' in update) and message:
return False
else:
if message is False:
version = 4
else:
version = g.execute(["git", "rev-list", "--right-only", "--count", "{0}...origin/{0}".format(branch)])
version = description = str(int(version))
if int(version) > 4:
version = "4"
commits = g.execute(["git", "rev-list", "--max-count={0}".format(version), "origin/{0}".format(branch)])
commits = commits.split('\n')
em = discord.Embed(color=0x24292E, title='Latest changes for the selfbot:', description='{0} release(s) behind.'.format(description))
for i in range(int(version)):
i = i - 1 # Change i to i -1 to let the formatters below work
title = g.execute(["git", "log", "--format=%ar", "-n", "1", commits[i]])
field = g.execute(["git", "log", "--pretty=oneline", "--abbrev-commit", "--shortstat", commits[i], "^{0}".format(commits[i + 1])])
field = field[8:].strip()
link = 'https://github.com/appu1232/Discord-Selfbot/commit/%s' % commits[i]
em.add_field(name=title, value='{0}\n[Code changes]({1})'.format(field, link), inline=False)
em.set_thumbnail(url='https://image.flaticon.com/icons/png/512/25/25231.png')
em.set_footer(text='Full project: https://github.com/appu1232/Discord-Selfbot')
return em
def cmd_prefix_len():
config = load_config()
return len(config['cmd_prefix'])
def embed_perms(message):
try:
check = message.author.permissions_in(message.channel).embed_links
except:
check = True
return check
def get_user(message, user):
try:
member = message.mentions[0]
except:
member = message.guild.get_member_named(user)
if not member:
try:
member = message.guild.get_member(int(user))
except ValueError:
pass
if not member:
return None
return member
def find_channel(channel_list, text):
if text.isdigit():
found_channel = discord.utils.get(channel_list, id=int(text))
elif text.startswith("<#") and text.endswith(">"):
found_channel = discord.utils.get(channel_list,
id=text.replace("<", "").replace(">", "").replace("#", ""))
else:
found_channel = discord.utils.get(channel_list, name=text)
return found_channel
async def get_google_entries(query):
url = 'https://www.google.com/search?q={}'.format(uriquote(query))
params = {
'safe': 'off',
'lr': 'lang_en',
'h1': 'en'
}
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Win64; x64)'
}
entries = []
async with aiohttp.ClientSession() as session:
async with session.get(url, params=params, headers=headers) as resp:
if resp.status != 200:
config = load_optional_config()
async with session.get("https://www.googleapis.com/customsearch/v1?q=" + quote_plus(query) + "&start=" + '1' + "&key=" + config['google_api_key'] + "&cx=" + config['custom_search_engine']) as resp:
result = json.loads(await resp.text())
return None, result['items'][0]['link']
try:
root = etree.fromstring(await resp.text(), etree.HTMLParser())
search_nodes = root.findall(".//div[@class='g']")
for node in search_nodes:
url_node = node.find('.//h3/a')
if url_node is None:
continue
url = url_node.attrib['href']
if not url.startswith('/url?'):
continue
url = parse_qs(url[5:])['q'][0]
entries.append(url)
except NameError:
root = BeautifulSoup(await resp.text(), 'html.parser')
for result in root.find_all("div", class_='g'):
url_node = result.find('h3')
if url_node:
for link in url_node.find_all('a', href=True):
url = link['href']
if not url.startswith('/url?'):
continue
url = parse_qs(url[5:])['q'][0]
entries.append(url)
return entries, root
def attach_perms(message):
return message.author.permissions_in(message.channel).attach_files
def parse_prefix(bot, text):
prefix = bot.cmd_prefix
if type(prefix) is list:
prefix = prefix[0]
return text.replace("[c]", prefix).replace("[b]", bot.bot_prefix)
|
esthermm/enco | enco_sale_order_ext/models/sale_order.py | Python | gpl-3.0 | 492 | 0.016327 | # -*- coding: utf-8 -*-
# (c) Manuel Guil
# © 2016 Esther Martín - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import models, fields, api
class SaleOrder(models.Model):
_inherit = "sale.order"
period_ack = fields.Char(string='ACK Period', size=6, required=True)
#@api.one
| def action_button_confirm(self, cr, uid, ids, context=None):
return super(SaleOrder, self).action_button_confirm(cr, uid, ids, c | ontext)
|
YoshikawaMasashi/magenta | magenta/pipelines/lead_sheet_pipelines.py | Python | apache-2.0 | 2,760 | 0.003623 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Data processing pipelines for lead sheets."""
# internal imports
import tensorflow as tf
from magenta.music import chord_symbols_lib
from magenta.music import events_lib
from magenta.music import lead_sheets_lib
from magenta.pipelines import pipeline
from magenta.pipelines import statistics
from magenta.protobuf import music_pb2
class LeadSheet | Extractor(pipeline.Pipeline):
"""Extracts lead sheet fragments from a quantized NoteSequence."""
def _ | _init__(self, min_bars=7, max_steps=512, min_unique_pitches=5,
gap_bars=1.0, ignore_polyphonic_notes=False, filter_drums=True,
require_chords=True, all_transpositions=True, name=None):
super(LeadSheetExtractor, self).__init__(
input_type=music_pb2.NoteSequence,
output_type=lead_sheets_lib.LeadSheet,
name=name)
self._min_bars = min_bars
self._max_steps = max_steps
self._min_unique_pitches = min_unique_pitches
self._gap_bars = gap_bars
self._ignore_polyphonic_notes = ignore_polyphonic_notes
self._filter_drums = filter_drums
self._require_chords = require_chords
self._all_transpositions = all_transpositions
def transform(self, quantized_sequence):
try:
lead_sheets, stats = lead_sheets_lib.extract_lead_sheet_fragments(
quantized_sequence,
min_bars=self._min_bars,
max_steps_truncate=self._max_steps,
min_unique_pitches=self._min_unique_pitches,
gap_bars=self._gap_bars,
ignore_polyphonic_notes=self._ignore_polyphonic_notes,
filter_drums=self._filter_drums,
require_chords=self._require_chords,
all_transpositions=self._all_transpositions)
except events_lib.NonIntegerStepsPerBarException as detail:
tf.logging.warning('Skipped sequence: %s', detail)
lead_sheets = []
stats = [statistics.Counter('non_integer_steps_per_bar', 1)]
except chord_symbols_lib.ChordSymbolException as detail:
tf.logging.warning('Skipped sequence: %s', detail)
lead_sheets = []
stats = [statistics.Counter('chord_symbol_exception', 1)]
self._set_stats(stats)
return lead_sheets
|
pstjohn/cobrapy | cobra/core/Solution.py | Python | gpl-2.0 | 1,232 | 0 | class Solution(object):
"""Stores the solution from optimizing a cobra.Model. This is
used to provide a single interface to results from different
solvers that store their values in different w | ays.
f: The objective value
solver: A string indicating which solver package was used.
x: List or Array of the values from the primal.
x_dict: A dictionary of reaction ids that maps to the primal values.
y: List or Array of the values from the dual.
y_dict: A dictionary of reaction ids that maps to the dual values.
"""
def __init__(self, f, x=None,
x_dict=None, y=None, y_dic | t=None,
solver=None, the_time=0, status='NA'):
self.solver = solver
self.f = f
self.x = x
self.x_dict = x_dict
self.status = status
self.y = y
self.y_dict = y_dict
def dress_results(self, model):
""".. warning :: deprecated"""
from warnings import warn
warn("unnecessary to call this deprecated function")
def __repr__(self):
if self.f is None:
return "<Solution '%s' at 0x%x>" % (self.status, id(self))
return "<Solution %.2f at 0x%x>" % (self.f, id(self))
|
raccoongang/edx-platform | lms/djangoapps/ccx/tests/test_field_override_performance.py | Python | agpl-3.0 | 10,116 | 0.002274 | # coding=UTF-8
"""
Performance tests for field overrides.
"""
import itertools
from datetime import datetime
import ddt
import mock
from ccx_keys.locator import CCXLocator
from courseware.field_overrides import OverrideFieldData
from courseware.testutils import FieldOverrideTestMixin
from courseware.views.views import progress
from django.conf import settings
from django.core.cache import caches
from django.test.client import RequestFactory
from django.test.utils import override_settings
from lms.djangoapps.ccx.tests.factories import CcxFactory
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
from opaque_keys.edx.keys import CourseKey
from openedx.core.djangoapps.content.block_structure.api import get_course_in_cache
from openedx.core.djangoapps.waffle_utils.testutils import WAFFLE_TABLES
from pytz import UTC
from request_cache.middleware import RequestCache
from student.models import CourseEnrollment
from student.tests.factories import UserFactory
from xblock.core import XBlock
from xmodule.modulestore.tests.django_utils import (
TEST_DATA_MONGO_MODULESTORE,
TEST_DATA_SPLIT_MODULESTORE,
ModuleStoreTestCase
)
from xmodule.modulestore.tests.factories import CourseFactory, check_mongo_calls, check_sum_of_calls
from xmodule.modulestore.tests.utils import ProceduralCourseTestMixin
QUERY_COUNT_TABLE_BLACKLIST = WAFFLE_TABLES
@attr(shard=3)
@mock.patch.dict(
'django.conf.settings.FEATURES',
{
'ENABLE_XBLOCK_VIEW_ENDPOINT': True,
}
)
@ddt.ddt
cl | ass FieldOverridePerformanceTestCase(FieldOverrideTestMixin, ProceduralCourseTestMixin, ModuleStoreTestCase):
"""
Base class for instrumenting SQL queries and Mongo reads for field override
providers.
"""
__test__ = False
# Tell Django to clean out all databases, not | just default
multi_db = True
# TEST_DATA must be overridden by subclasses
TEST_DATA = None
def setUp(self):
"""
Create a test client, course, and user.
"""
super(FieldOverridePerformanceTestCase, self).setUp()
self.request_factory = RequestFactory()
self.student = UserFactory.create()
self.request = self.request_factory.get("foo")
self.request.session = {}
self.request.user = self.student
patcher = mock.patch('edxmako.request_context.get_current_request', return_value=self.request)
patcher.start()
self.addCleanup(patcher.stop)
self.course = None
self.ccx = None
def setup_course(self, size, enable_ccx, view_as_ccx):
"""
Build a gradable course where each node has `size` children.
"""
grading_policy = {
"GRADER": [
{
"drop_count": 2,
"min_count": 12,
"short_label": "HW",
"type": "Homework",
"weight": 0.15
},
{
"drop_count": 2,
"min_count": 12,
"type": "Lab",
"weight": 0.15
},
{
"drop_count": 0,
"min_count": 1,
"short_label": "Midterm",
"type": "Midterm Exam",
"weight": 0.3
},
{
"drop_count": 0,
"min_count": 1,
"short_label": "Final",
"type": "Final Exam",
"weight": 0.4
}
],
"GRADE_CUTOFFS": {
"Pass": 0.5
}
}
self.course = CourseFactory.create(
graded=True,
start=datetime.now(UTC),
grading_policy=grading_policy,
enable_ccx=enable_ccx,
)
self.populate_course(size)
course_key = self.course.id
if enable_ccx:
self.ccx = CcxFactory.create(course_id=self.course.id)
if view_as_ccx:
course_key = CCXLocator.from_course_locator(self.course.id, self.ccx.id)
CourseEnrollment.enroll(
self.student,
course_key
)
return CourseKey.from_string(unicode(course_key))
def grade_course(self, course_key):
"""
Renders the progress page for the given course.
"""
return progress(
self.request,
course_id=unicode(course_key),
student_id=self.student.id
)
def assertMongoCallCount(self, calls):
"""
Assert that mongodb is queried ``calls`` times in the surrounded
context.
"""
return check_mongo_calls(calls)
def assertXBlockInstantiations(self, instantiations):
"""
Assert that exactly ``instantiations`` XBlocks are instantiated in
the surrounded context.
"""
return check_sum_of_calls(XBlock, ['__init__'], instantiations, instantiations, include_arguments=False)
def instrument_course_progress_render(
self, course_width, enable_ccx, view_as_ccx,
sql_queries, mongo_reads,
):
"""
Renders the progress page, instrumenting Mongo reads and SQL queries.
"""
course_key = self.setup_course(course_width, enable_ccx, view_as_ccx)
# Switch to published-only mode to simulate the LMS
with self.settings(MODULESTORE_BRANCH='published-only'):
# Clear all caches before measuring
for cache in settings.CACHES:
caches[cache].clear()
# Refill the metadata inheritance cache
get_course_in_cache(course_key)
# We clear the request cache to simulate a new request in the LMS.
RequestCache.clear_request_cache()
# Reset the list of provider classes, so that our django settings changes
# can actually take affect.
OverrideFieldData.provider_classes = None
with self.assertNumQueries(sql_queries, using='default', table_blacklist=QUERY_COUNT_TABLE_BLACKLIST):
with self.assertNumQueries(0, using='student_module_history'):
with self.assertMongoCallCount(mongo_reads):
with self.assertXBlockInstantiations(1):
self.grade_course(course_key)
@ddt.data(*itertools.product(('no_overrides', 'ccx'), range(1, 4), (True, False), (True, False)))
@ddt.unpack
@override_settings(
XBLOCK_FIELD_DATA_WRAPPERS=[],
MODULESTORE_FIELD_OVERRIDE_PROVIDERS=[],
ENABLE_ENTERPRISE_INTEGRATION=False,
)
def test_field_overrides(self, overrides, course_width, enable_ccx, view_as_ccx):
"""
Test without any field overrides.
"""
providers = {
'no_overrides': (),
'ccx': ('ccx.overrides.CustomCoursesForEdxOverrideProvider',)
}
if overrides == 'no_overrides' and view_as_ccx:
raise SkipTest("Can't view a ccx course if field overrides are disabled.")
if not enable_ccx and view_as_ccx:
raise SkipTest("Can't view a ccx course if ccx is disabled on the course")
if self.MODULESTORE == TEST_DATA_MONGO_MODULESTORE and view_as_ccx:
raise SkipTest("Can't use a MongoModulestore test as a CCX course")
with self.settings(
XBLOCK_FIELD_DATA_WRAPPERS=['lms.djangoapps.courseware.field_overrides:OverrideModulestoreFieldData.wrap'],
MODULESTORE_FIELD_OVERRIDE_PROVIDERS=providers[overrides],
):
sql_queries, mongo_reads = self.TEST_DATA[
(overrides, course_width, enable_ccx, view_as_ccx)
]
self.instrument_course_progress_render(
course_width, enable_ccx, view_as_ccx, sql_queries, mongo_reads,
)
class TestFieldOverrideMongoPerformance(FieldOverridePerformanceTestCase):
"""
Test cases for instrumenting field overrides against the Mongo modulestore.
"""
MODULESTORE = TEST_DATA_MONGO_MODULEST |
jameswatt2008/jameswatt2008.github.io | python/Python基础/截图和代码/函数-下/11-递归.py | Python | gpl-2.0 | 419 | 0.019093 | #4! = 4*3*2*1
#5! = 5*4 | *3*2*1
'''
i = 1
result = 1
while i<=4:
result = result * i
i+=1
print(result)
'''
#5! => 5*4!
#4! => 4*3!
'''
def xxx(num):
num * xxxx(num-1)
def xx(num):
num * xxx(num-1)
def getNums(num):
num * xx(num-1)
getNums(4)
'''
def getNums(num):
if num>1:
return num * getNu | ms(num-1)
else:
return num
result = getNums(4)
print(result)
|
mit-ll/CATAN | catan-services_1.0/catan/db/__init__.py | Python | bsd-3-clause | 37,288 | 0.00775 | """
This file contains all of CATAN database operations
(c) 2015 Massachusetts Institute of Technology
"""
import os
import logging
logger = logging.getLogger(__name__)
import sqlite3
import inspect
import json
import time
import re
import base64
import struct
import socket
import SocketServer
import multiprocessing
from multiprocessing import Process, Lock
# CATAN
from catan.data import NodeMessage
from catan.comms import TxClient, TxServer
import catan.globals as G
import catan.metrics
db_mutex = Lock()
class DBSchema:
"""
This is our default database schema class
"""
def _get_sql_insert(self, **kargs):
keys = []
values = []
value_list = []
for k, v in kargs.items():
# Escape quotes
k = str(k).replace("'","''")
# v = str(v).replace("'","''")
# Append our strings
keys.append(k)
values.append("?")
value_list.append(v)
for k, v in self.__dict__.items():
if k.startswith("_"):
continue
# Skip None values
if v is None:
continue
# Escape quotes
k = str(k).replace("'","'")
# v = str(v).replace("'","''")
# Append our strings
keys.append(k)
values.append("?")
value_list.append(v)
keys = "(" + ",".join(keys) + ")"
values = "(" + ",".join(values) + ")"
# Return SQL string
return ("INSERT INTO %s %s VALUES %s"%(
self.__class__.__name__,
keys,
values),
value_list)
def sql_insert(self, person_id, submission_id, origin_node_id):
return self._get_sql_insert(person_id=person_id,
submission_id=submission_id,
origin_node_id=origin_node_id)
def get_fields(self):
"""
Return just our list of fields
"""
fields = []
for k in dir(self):
if k.startswith("_") or callable(getattr(self,k)):
continue
fields.append(k)
return fields
def has_entries(self):
"""
Return True if any of our fields are populated, False otherwise.
"""
fields = self.get_fields()
for f in fields:
if getattr(self,f) is not None:
return True
return False
class db_node_info(DBSchema):
node_id = None
gps_latitude = None
gps_longitude = None
gps_altitude = None
gps_sat_count = None
visited = None
def sql_insert(self, node_id, visited, timestamp):
return self._get_sql_insert(node_id=node_id,
visited=visited,
timestamp=time.time())
_SQL_CREATE = '''CREATE TABLE db_node_info
(node_id int, gps_latitude real, gps_longitude real, gps_altitude real, gps_sat_count int, visited int, timestamp real)'''
class db_catan_identity(DBSchema):
def sql_insert(self, person_id, submission_id,timestamp=time.time()):
return self._get_sql_insert(person_id=person_id,
submission_id=submission_id,
timestamp=timestamp)
_SQL_CREATE = '''CREATE TABLE db_catan_identity
(person_id int, submission_id int, timestamp real)'''
class db_submitter_info(DBSchema):
submitter_id = None
imsi = None
cookie = None
gps_latitude = None
gps_longitude = None
gps_accuracy = None
def sql_insert(self, submission_id, origin_node_id, origin_person_id, timestamp=time.time()):
return self._get_sql_insert(submission_id=submission_id,
origin_node_id=origin_node_id,
submitter_id=origin_person_id,
timestamp=timestamp)
_SQL_CREATE = '''CREATE TABLE db_submitter_info
(submission_id int, origin_node_id int,
submitter_id int,
imsi int,
cookie text,
gps_latitude real,
gps_longitude real,
gps_accuracy real,
timestamp real)'''
class db_person_bio(DBSchema):
name_family = None
name_given = None
age = None
sex = None
_SQL_CREATE = '''CREATE TABLE db_person_bio
(person_id int, origin_node_id int, submission_id int,
name_family text,
name_given text,
age text,
sex text)'''
class db_person_description(DBSchema):
person_description = None
_SQL_CREATE = '''CREATE TABLE db_person_description
(person_id int, origin_node_id int, submission_id int,
person_description text)'''
class db_person_contact(DBSchema):
street = None
neighborhood = None
city = None
state = None
zip = None
state = None
country = None
phone = None
email = None
_SQL_CREATE = '''CREATE TABLE db_person_contact
(person_id int, origin_node_id int, submission_id int,
street text,
neighborhood text,
city text,
zip text,
state text,
country text,
phone text,
email text)'''
class db_pictures(DBSchema):
picture_data = None
def sql_insert(self, picture_id, origin_node_id):
keys = "( | picture_id, origin_node_id, picture_data)"
values = "('%s','%s',?)"%(picture_id, orig | in_node_id)
return "INSERT INTO %s %s VALUES %s"%(self.__class__.__name__,keys,values)
_SQL_CREATE = '''CREATE TABLE db_pictures
(picture_id int, origin_node_id int,
picture_data blob)'''
class db_person_pictures(DBSchema):
picture_description = None
def sql_insert(self, person_id, submission_id, origin_node_id, picture_id):
return self._get_sql_insert(person_id=person_id,
submission_id=submission_id,
origin_node_id=origin_node_id,
picture_id=picture_id)
_SQL_CREATE = '''CREATE TABLE db_person_pictures
(person_id int, origin_node_id int, submission_id,
picture_id,
picture_description text)'''
class db_person_links(DBSchema):
person_id2 = None
relationship = None
_SQL_CREATE = '''CREATE TABLE db_person_links
(person_id int, origin_node_id int, submission_id int,
person_id2 int,
relationship text)'''
class db_person_messages(DBSchema):
person_message = None
status = None
status_location = None
status_gps_latitude = None
status_gps_longitude = None
status_gps_accuracy = None
_SQL_CREATE = '''CREATE TABLE db_person_messages
(person_id int, origin_node_id int, submission_id int,
status text,
status_location text,
status_gps_latitude real,
status_gps_longitude real,
status_gps_accuracy real,
person_message text)'''
class db_services(DBSchema):
service_type = None
service_subtype = None
def sql_insert(self, person_id, submission_id, origin_node_id, service_id):
return self._get_sql_insert(person_id=person_id,
submission_id=submission_id,
origin_node_id=origin_node_id,
service_id=service_id)
_SQL_CREATE = '''CREATE TABLE db_services
(person_id int,
origin_node_id int,
submission_id int,
service_id int,
service_type int,
service_subtype int)'''
class db_service_status(DBSchema):
service_status = None
service_comments = None
def |
airbnb/airflow | airflow/secrets/__init__.py | Python | apache-2.0 | 1,267 | 0.000789 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file | except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the Li | cense is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Secrets framework provides means of getting connection objects from various sources, e.g. the following:
* Environment variables
* Metastore database
* AWS SSM Parameter store
"""
__all__ = ['BaseSecretsBackend', 'DEFAULT_SECRETS_SEARCH_PATH']
from airflow.secrets.base_secrets import BaseSecretsBackend
DEFAULT_SECRETS_SEARCH_PATH = [
"airflow.secrets.environment_variables.EnvironmentVariablesBackend",
"airflow.secrets.metastore.MetastoreBackend",
]
|
james-d-mitchell/libsemigroups-python-bindings | semigroups/semiring.py | Python | gpl-3.0 | 28,988 | 0.000276 | '''
This module contains classes for semirings.
'''
# pylint: disable = no-member, protected-access, invalid-name
# pylint: disable = too-few-public-methods
class SemiringABC:
r'''
A *semiring* is a set :math:`R`, together with two binary operations,
:math:`+` and :math:`\times`, such that :math:`(R, +)` is a commutative
monoid, with identity called 0, :math:`(R\backslash\{0\},\times)` is a
monoid, with identity 1, :math:`(R, +, \times)` is left and right
distributive, and multiplication by 0 must annihilate :math:`R`.
Multiplication by an element :math:`b\in R` *annihilates* :math:`R` if for
all :math:`a \in R \quad a \cdot b = b \cdot a = b`.
Multiplication in :math:`R` is *left distributive* if for all
:math:`a, b, c \in R \quad a(b + c) = ab + ac`, and *right distributive* if
for all :math:`a, b, c \in R \quad (a + b)c = ac + bc`.
This abstract class provides common methods for its subclasses.
Returns:
None
Raises:
TypeError: If any argument is given.
'''
def __init__(self):
self._minus_infinity = -float('inf')
self._plus_infinity = float('inf')
class Integers(SemiringABC):
'''
The usual ring of the integers.
Returns:
None
Raises:
TypeError: If any argument is given.
Examples:
>>> from semigroups import Integers
>>> Integers().plus(3, 4)
7
>>> Integers().prod(3, 4)
12
'''
@staticmethod
def plus(x, y):
'''
A function to find the sum of two integers, since this is the additive
operation of the integers.
Args:
x (int): One of the integers to be added.
y (int): The other of the integers to be added.
Returns:
int: x + y
Raises:
TypeError: If x and y are not both ints.
Examples:
>>> from semigroups import Integers
>>> Integers().plus(2, -5)
-3
'''
if not (isinstance(x, int) and isinstance(y, int)):
raise TypeError
return x + y
@staticmethod
def prod(x, y):
'''
A function to find the sum of two integers, since this is the
multiplicative operation of the integers.
Args:
x (int): One of the integers to be multiplied.
y (int): The other of the integers to be multplied.
Returns:
int: x * y
Raises:
TypeError: If x and y are not both ints.
Examples:
>>> from semigroups import Integers
>>> Integers().prod(-13, 2)
-26
'''
if not (isinstance(x, int) and isinstance(y, int)):
raise TypeError
return x * y
@staticmethod
def zero():
'''
A function to find the additive identity of the integers, which
is 0.
Returns:
int: 0
Raises:
TypeError: If any argument is given.
Examples:
>>> from semigroups import Integers
>>> Integers().zero()
0
'''
return 0
@staticmethod
def one():
'''
A function to find the multiplicative identity of the integers, which
is 1.
Returns:
int: 1
Raises:
TypeError: If any argument is given.
Examples:
>>> from semigroups import Integers
>>> Integers().one()
1
'''
return 1
class MaxPlusSemiring(SemiringABC):
r'''
The *max plus semiring* is a semiring comprising the set
:math:`\mathbb{Z}\cup\{-\infty\}`, together with an operation which
returns the maximum of two elements, as the additive operation and addition
as the multiplicative operation.
*Minus infinity* is a defined as smaller than all integers, and the integer
sum of minus infinity and any element of the max plus semiring is minus
infinity.
Returns:
None
Raises:
TypeError: If any argument is given.
Examples:
>>> from semigroups import MaxPlusSemiring
>>> MaxPlusSemiring().plus(-float('inf'), -20)
-20
>>> MaxPlusSemiring().prod(-float('inf'), -20)
-inf
'''
@staticmethod
def plus(x, y):
'''
A function to find the maximum of two elements of the max plus
semiring, since this is the additive operation of the max plus
semiring.
Args:
x (int or float): One of the elements to be added.
y (int or float): The other of the elements to be added.
Returns:
int or float: The maximum of x and y.
Raises:
TypeError: If x and y are not both ints or minus infinity.
Examples:
>>> from semigroups import MaxPlusSemiring
>>> MaxPlusSemiring().plus(7, -20)
7
'''
if not ((isinstance(x, int) or x == -float('inf'))
and (isinstance(y, int) or y == -float('inf'))):
raise TypeError
return max(x, y)
@staticmethod
def prod(x, y):
'''
A function to find the integer sum of two elements of the max plus
semiring, since this is the multiplicative operation of the max plus
semiring. If either input is minus infinity, this function will return
minus infinity.
Args:
x (int or float): One of the elements to be multiplied.
y (int or float): The other of the elements to be multplied.
Returns:
int or float: x + y
Raises:
TypeError: If x and y are not both ints or minus infinity.
Examples:
>>> from semigroups import MaxPlusSemiring
>>> MaxPlusSemiring().prod(7, -20)
-13
'''
if not ((isinstance(x, int) or x == -float('inf'))
and (isinstance(y, int) or y == -float('inf'))):
raise TypeError
return x + y
@staticmethod
def zero():
'''
A function to find the additive identity of the max plus
semiring, which is minus infinity.
Returns:
float: -inf
Raises:
TypeError: If any argument is given.
Examples:
>>> from semigroups import MaxPlusSemiring
>>> MaxPlusSemiring().zero()
-inf
'''
return -float('inf')
@staticmethod
def one():
'''
A function to find the multiplicative identity of the max plus
semiring, which is 0.
Returns:
int: 0
Raises:
TypeError: If any argument is given.
Examples:
>>> from semigroups import MaxPlusSemiring
>>> MaxPlusSemiring().one()
0
'''
return 0
class MinPlusSemiring(SemiringABC):
r'''
The *min plus semiring* is a semiring comprising the set
:math:`\mathbb{Z}\cup\{\infty\}`, together with an operation which
returns the maximum of two elements, as the additive operation and addition
as the multiplicative operation.
*Plus infinity* is a defined as greater than all integers, and the integer
sum of plus infinity and any element of the max plus semiring is plus
infinity.
Returns:
None
Raises:
| TypeError: If any argument is given.
Examples:
>>> from semigroups import MinPlusSemiring
>>> MinPlusSemiring().plus(3, float('inf'))
3
>>> MinPlusSemiring().prod(3, float('inf'))
inf
'''
@staticmethod
def plus(x, y):
'''
| A function to find the minimum of two elements of the min plus
semiring, since this is the additive operation of the min plus
semiring.
Args:
x (int or float): One of the elements to be added.
y (int or float): The other of the elements to be added.
Returns:
int float: The minimum of x and y.
Rais |
AllMyChanges/allmychanges.com | allmychanges/management/commands/dev_extract_version.py | Python | bsd-2-clause | 846 | 0.00237 | # coding: utf-8
from optparse import make_option
from django.core.management.base import BaseCommand
from twiggy_goodies.django import LogMixin
from allmychanges.vcs_extractor import choose_version_extractor
class Command(LogMixin, BaseCommand):
help = u"""Command to test VCS log extractors' second step — version extraction."""
option_list = BaseCommand.option_list + (
make_option('--pdb',
action='store_true | ',
dest='pdb',
default=False,
help='Stop before extraction'),
)
def handle(self, *args, **options):
path = args[0] if args else '.'
get_version = choose_version_extractor(path)
if options.get('pdb'):
import pdb; pdb.set_tra | ce() # DEBUG
print get_version(path, use_threads=False)
|
GoogleCloudPlatform/cloud-foundation-toolkit | dm/tests/unit/test_deployment.py | Python | apache-2.0 | 2,633 | 0.00076 | from six import PY2
from apitools.base.py.exceptions import HttpNotFoundError
import jinja2
import pytest
from ruamel.yaml import YAML
from cloud_foundation_toolkit.deployment import Config
from cloud_foundation_toolkit.deployment import ConfigGraph
from cloud_foundation_toolkit.deployment import Deployment
if PY2:
import mock
else:
import unittest.mock as mock
class Message():
def __init__(self, **kwargs):
[setattr(self, k, v) for k, v in kwargs.items()]
@pytest.fixture
def args():
return Args()
def test_config(configs):
c = Config(configs.files['my-networks.yaml'].path)
assert c.as_string == configs.files['my-networks.yaml'].jinja
def test_config_list(configs):
config_paths = [v.path for k, v in configs.files.items()]
config_list = ConfigGraph(config_p | aths)
for level in config_list:
assert isinstance(level, list)
for c in level:
assert isinstance(c, | Config)
def test_deployment_object(configs):
config = Config(configs.files['my-networks.yaml'].path)
deployment = Deployment(config)
assert deployment.config['name'] == 'my-networks'
def test_deployment_get(configs):
config = Config(configs.files['my-networks.yaml'].path)
deployment = Deployment(config)
with mock.patch.object(deployment.client.deployments, 'Get') as m:
m.return_value = Message(
name='my-networks',
fingerprint='abcdefgh'
)
d = deployment.get()
assert d is not None
assert deployment.current == d
def test_deployment_get_doesnt_exist(configs):
config = Config(configs.files['my-networks.yaml'].path)
deployment = Deployment(config)
with mock.patch('cloud_foundation_toolkit.deployment.get_deployment') as m:
m.return_value = None
d = deployment.get()
assert d is None
assert deployment.current == d
def test_deployment_create(configs):
config = Config(configs.files['my-networks.yaml'].path)
patches = {
'client': mock.DEFAULT,
'wait': mock.DEFAULT,
'get': mock.DEFAULT,
'print_resources_and_outputs': mock.DEFAULT
}
with mock.patch.multiple(Deployment, **patches) as mocks:
deployment = Deployment(config)
mocks['client'].deployments.Insert.return_value = Message(
name='my-network-prod',
fingerprint='abcdefgh'
)
mocks['client'].deployments.Get.return_value = Message(
name='my-network-prod',
fingerprint='abcdefgh'
)
d = deployment.create()
assert deployment.current == d
|
tylertian/Openstack | openstack F/nova/nova/virt/vmwareapi/io_util.py | Python | apache-2.0 | 5,907 | 0.000508 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility classes for defining the time saving transfer of data from t | he reader
to the write using a LightQueue as a Pipe between the reader and the writer.
"""
from eventlet import event
from eventlet import greenthread
from eventlet import queue
from nova import exception
from nova.openstack.common import log as logging
LOG = logg | ing.getLogger(__name__)
IO_THREAD_SLEEP_TIME = .01
GLANCE_POLL_INTERVAL = 5
class ThreadSafePipe(queue.LightQueue):
"""The pipe to hold the data which the reader writes to and the writer
reads from."""
def __init__(self, maxsize, transfer_size):
queue.LightQueue.__init__(self, maxsize)
self.transfer_size = transfer_size
self.transferred = 0
def read(self, chunk_size):
"""Read data from the pipe. Chunksize if ignored for we have ensured
that the data chunks written to the pipe by readers is the same as the
chunks asked for by the Writer."""
if self.transferred < self.transfer_size:
data_item = self.get()
self.transferred += len(data_item)
return data_item
else:
return ""
def write(self, data):
"""Put a data item in the pipe."""
self.put(data)
def close(self):
"""A place-holder to maintain consistency."""
pass
class GlanceWriteThread(object):
"""Ensures that image data is written to in the glance client and that
it is in correct ('active')state."""
def __init__(self, context, input, image_service, image_id,
image_meta=None):
if not image_meta:
image_meta = {}
self.context = context
self.input = input
self.image_service = image_service
self.image_id = image_id
self.image_meta = image_meta
self._running = False
def start(self):
self.done = event.Event()
def _inner():
"""Function to do the image data transfer through an update
and thereon checks if the state is 'active'."""
self.image_service.update(self.context,
self.image_id,
self.image_meta,
data=self.input)
self._running = True
while self._running:
try:
image_meta = self.image_service.show(self.context,
self.image_id)
image_status = image_meta.get("status")
if image_status == "active":
self.stop()
self.done.send(True)
# If the state is killed, then raise an exception.
elif image_status == "killed":
self.stop()
msg = (_("Glance image %s is in killed state") %
self.image_id)
LOG.error(msg)
self.done.send_exception(exception.NovaException(msg))
elif image_status in ["saving", "queued"]:
greenthread.sleep(GLANCE_POLL_INTERVAL)
else:
self.stop()
msg = _("Glance image "
"%(image_id)s is in unknown state "
"- %(state)s") % {
"image_id": self.image_id,
"state": image_status}
LOG.error(msg)
self.done.send_exception(exception.NovaException(msg))
except Exception, exc:
self.stop()
self.done.send_exception(exc)
greenthread.spawn(_inner)
return self.done
def stop(self):
self._running = False
def wait(self):
return self.done.wait()
def close(self):
pass
class IOThread(object):
"""Class that reads chunks from the input file and writes them to the
output file till the transfer is completely done."""
def __init__(self, input, output):
self.input = input
self.output = output
self._running = False
self.got_exception = False
def start(self):
self.done = event.Event()
def _inner():
"""Read data from the input and write the same to the output
until the transfer completes."""
self._running = True
while self._running:
try:
data = self.input.read(None)
if not data:
self.stop()
self.done.send(True)
self.output.write(data)
greenthread.sleep(IO_THREAD_SLEEP_TIME)
except Exception, exc:
self.stop()
LOG.exception(exc)
self.done.send_exception(exc)
greenthread.spawn(_inner)
return self.done
def stop(self):
self._running = False
def wait(self):
return self.done.wait()
|
syed/PerfKitBenchmarker | perfkitbenchmarker/openstack/utils.py | Python | apache-2.0 | 6,884 | 0.001453 | # Copyright 2015 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import os
from perfkitbenchmarker import flags
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.vm_util import POLL_INTERVAL
FLAGS = flags.FLAGS
flags.DEFINE_string('openstack_auth_url',
os.environ.get('OS_AUTH_URL', 'http://localhost:5000'),
('Url for Keystone authentication service, defaults to '
'$OS_AUTH_URL. Required for discovery of other OpenStack '
'service URLs.'))
flags.DEFINE_string('openstack_username',
os.getenv('OS_USERNAME', 'admin'),
'OpenStack login username, defaults to $OS_USERNAME.')
flags.DEFINE_string('openstack_tenant',
os.getenv('OS_TENANT_NAME', 'admin'),
'OpenStack tenant name, defaults to $OS_TENANT_NAME.')
flags.DEFINE_string('openstack_password_file',
os.getenv('OPENSTACK_PASSWORD_FILE',
'~/.config/openstack-password.txt'),
'Path to file containing the openstack password, '
'defaults to $OPENSTACK_PASSWORD_FILE. Alternatively, '
'setting the password itself in $OS_PASSWORD is also '
'supported.')
flags.DEFINE_string('openstack_nova_endpoint_type',
os.getenv('NOVA_ENDPOINT_TYPE', 'publicURL'),
'OpenStack Nova endpoint type, '
'defaults to $NOVA_ENDPOINT_TYPE.')
class KeystoneAuth(object):
"""
Usage example:
auth = KeystoneAuth(auth_url, auth_tenant, auth_user, auth_password)
token = auth.get_token()
tenant_id = auth.get_tenant_id()
token and tenant_id are required to use all OpenStack python clients
"""
def __init__(self, url, tenant, user, password):
self.__url = url
self.__tenant = tenant
self.__user = user
self.__password = password
self.__connection = None
self.__session = None
def GetConnection(self):
if self.__connection is None:
self.__authenticate()
return self.__connection
def __authenticate(self):
import keystoneclient.v2_0.client as ksc | lient
self.__connection = ksclient.Client(
auth_url=self.__url,
username=self.__user,
password=self.__password,
tenant=self.__tenant)
self.__connect | ion.authenticate()
def get_token(self):
return self.GetConnection().get_token(self.__session)
def get_tenant_id(self):
raw_token = self.GetConnection().get_raw_token_from_identity_service(
auth_url=self.__url,
username=self.__user,
password=self.__password,
tenant_name=self.__tenant
)
return raw_token['token']['tenant']['id']
class NovaClient(object):
def __getattribute__(self, item):
try:
return super(NovaClient, self).__getattribute__(item)
except AttributeError:
return self.__client.__getattribute__(item)
def GetPassword(self):
# For compatibility with Nova CLI, use 'OS'-prefixed environment value
# if present. Also support reading the password from a file.
error_msg = ('No OpenStack password specified. '
'Either set the environment variable OS_PASSWORD to the '
'admin password, or provide the name of a file '
'containing the password using the OPENSTACK_PASSWORD_FILE '
'environment variable or --openstack_password_file flag.')
password = os.getenv('OS_PASSWORD')
if password is not None:
return password
try:
with open(os.path.expanduser(FLAGS.openstack_password_file)) as pwfile:
password = pwfile.readline().rstrip()
return password
except IOError as e:
raise Exception(error_msg + ' ' + str(e))
raise Exception(error_msg)
def __init__(self):
from novaclient import client as noclient
self.url = FLAGS.openstack_auth_url
self.user = FLAGS.openstack_username
self.tenant = FLAGS.openstack_tenant
self.endpoint_type = FLAGS.openstack_nova_endpoint_type
self.password = self.GetPassword()
self.__auth = KeystoneAuth(self.url, self.tenant,
self.user, self.password)
self.__client = noclient.Client('2',
auth_url=self.url,
username=self.user,
auth_token=self.__auth.get_token(),
tenant_id=self.__auth.get_tenant_id(),
endpoint_type=self.endpoint_type,
)
def reconnect(self):
from novaclient import client as noclient
self.__auth = KeystoneAuth(self.url, self.tenant, self.user,
self.password)
self.__client = noclient.Client('2',
auth_url=self.url,
username=self.user,
auth_token=self.__auth.get_token(),
tenant_id=self.__auth.get_tenant_id(),
endpoint_type=self.endpoint_type,
)
class AuthException(Exception):
"""Wrapper for NovaClient auth exceptions."""
pass
def retry_authorization(max_retries=1, poll_interval=POLL_INTERVAL):
def decored(function):
@vm_util.Retry(max_retries=max_retries,
poll_interval=poll_interval,
retryable_exceptions=AuthException,
log_errors=False)
@functools.wraps(function)
def decor(*args, **kwargs):
from novaclient.exceptions import Unauthorized
try:
return function(*args, **kwargs)
except Unauthorized as e:
NovaClient.instance.reconnect()
raise AuthException(str(e))
return decor
return decored
|
buffer/thug | tests/functional/test_jquery.py | Python | gpl-2.0 | 8,746 | 0.003659 | import os
import logging
from thug.ThugAPI.ThugAPI import ThugAPI
log = logging.getLogger("Thug")
class TestJQuerySamples(object):
cwd_path = os.path.dirname(os.path.realpath(__file__))
jquery_path = os.path.join(cwd_path, os.pardir, "samples/jQuery")
def do_perform_test(self, caplog, sample, expected):
thug = ThugAPI()
thug.set_useragent('win7ie90')
thug.set_events('click,storage')
thug.disable_cert_logging()
thug.set_file_logging()
thug.set_json_logging()
thug.set_features_logging()
thug.set_ssl_verify()
thug.get_ssl_verify()
thug.log_init(sample)
thug.run_local(sample)
records = [r.message for r in caplog.records]
matches = 0
for e in expected:
for record in records:
if e in record:
matches += 1
assert matches >= len(expected)
def test_jquery_1(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-1.html")
expected = ["[Window] Alert Text: Ready"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_2(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-2.html")
expected = ['<a class="foobar" href="http://www.google.com" id="myId">jQuery</a>']
self.do_perform_test(caplog, sample, expected)
def test_jquery_3(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-3.html")
expected = ['<div class="notMe">',
'<div class="myClass" foo="bar">div class="myClass"</div>',
'<span class="myClass" foo="bar">span class="myClass"</span>']
self.do_perform_test(caplog, sample, expected)
def test_jquery_4(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-4.html")
expected = ['<div foo="bar" id="notMe" name="whoa">Aieeee</div>']
self.do_perform_test(caplog, sample, expected)
def test_jquery_5(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-5.html")
expected = ['<div class="myClass" foo="bar" name="whoa">Aieeee</div>']
self.do_perform_test(caplog, sample, expected)
def test_jquery_6(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-6.html")
expected = ['<div class="myClass"><p>Just a modified p</p></div>',
'<div class="myClass"><foo>Just a foo</foo></div>']
self.do_perform_test(caplog, sample, expected)
def test_jquery_7(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-7.html")
expected = ["<h3>New text for the third h3</h3>"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_8(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-8.html")
expected = ["<h3>New text for the first h1</h3>",
"<h3>New text for the third h3</h3>"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_9(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-9.html")
expected = ['<p>Yet another p</p><div class="container1">',
'<div class="inner1">Hello<p>Just a p</p></div>',
'<div class="inner2">Goodbye<p>Just another p</p></div>']
self.do_perform_test(caplog, sample, expected)
def test_jquery_10(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-10.html")
expected = ["<ul><li>list item</li></ul>"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_11(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-11.html")
expected = ['<div id="target"><td>Hello World</td></div>']
self.do_perform_test(caplog, sample, expected)
def test_jquery_12(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-12.html")
expected = ["[Window] Alert Text: 2",
"[Window] Alert Text: Foo"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_14(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-14.html")
expected = ["[Window] Alert Text: 1",
"[Window] Alert Text: child"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_15(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-15.html")
expected = ["[Window] Alert Text: parent"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_16(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-16.html")
expected = ["[Window] Alert Text: child",
"[Window] Alert Text: parent",
"[Window] Alert Text: grandparent"]
self.do_perform_test(caplog, sample, expected)
def disabled_test_jquery_17(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-17.html")
expected = ["[Window] Alert Text: child",
"[Window] Alert Text: parent"]
self.do_perform_test(caplog, sample, expected)
def disabled_test_jquery_18(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-18.html")
expected = ["[Window] Alert Text: child"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_19(self, caplog):
sample = os.path.join(self.jquery_p | ath, "test-jquery-19.html")
expected = ["[Windo | w] Alert Text: child"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_20(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-20.html")
expected = ["[Window] Alert Text: parent",
"[Window] Alert Text: surrogateParent1",
"[Window] Alert Text: surrogateParent2"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_21(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-21.html")
expected = ["[Window] Alert Text: child",
"[Window] Alert Text: parent",
"[Window] Alert Text: surrogateParent1",
"[Window] Alert Text: surrogateParent2"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_22(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-22.html")
expected = ["[Window] Alert Text: surrogateParent1"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_24(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-24.html")
expected = ["[Window] Alert Text: surrogateParent1",
"[Window] Alert Text: surrogateParent2"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_25(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-25.html")
expected = ["[Window] Alert Text: surrogateParent1"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_26(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-26.html")
expected = ["[Window] Alert Text: surrogateParent2"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_27(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-27.html")
expected = ["[Window] Alert Text: parent",
"[Window] Alert Text: surrogateParent1"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_28(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-28.html")
expected = ["[Window] Alert Text: surrogateParent1"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_29(self, caplog):
sample = os.path.join(self.jquery_path, "test-jquery-29.html")
expected = ["[Window] Alert Text: parent"]
self.do_perform_test(caplog, sample, expected)
def test_jquery_32(self, ca |
Python3WebSpider/ProxyPool | proxypool/crawlers/public/xicidaili.py | Python | mit | 969 | 0 | from pyquery import PyQuery as pq
from proxypool.schemas.proxy import Proxy
from proxypool.crawlers.base import BaseCrawler
from loguru import logger
BASE_URL = 'https://www.xicidaili.com/'
class XicidailiCrawler(BaseCrawler):
"""
xididaili crawler, https://www.xicidaili.com/
"""
urls = [BASE_URL]
| ignore = True
def parse(self, html):
"""
parse html file to get proxies
:return:
"""
doc = pq(html)
items = doc('#ip_list tr:contains(高匿)').items()
for item in items:
country = item.find('td.country').text()
if not country or country.strip() != '高匿':
| continue
host = item.find('td:nth-child(2)').text()
port = int(item.find('td:nth-child(3)').text())
yield Proxy(host=host, port=port)
if __name__ == '__main__':
crawler = XicidailiCrawler()
for proxy in crawler.crawl():
print(proxy)
|
Affirm/suds-jurko | tests/test_wsse.py | Python | lgpl-3.0 | 1,605 | 0.000623 | # -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# L | icense, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more detai | ls at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jurko Gospodnetić ( jurko.gospodnetic@pke.hr )
"""
Implemented using the 'pytest' testing framework.
"""
if __name__ == "__main__":
import __init__
__init__.runUsingPyTest(globals())
from suds.wsse import UsernameToken
class TestUsernameToken:
username_token = None
def setup(self):
self.username_token = UsernameToken(
username=b"foouser",
password=b"barpasswd",
)
def test_setnonce_null(self):
self.setup()
self.username_token.setnonce()
assert self.username_token.nonce != None
def test_setnonce_text(self):
self.setup()
self.username_token.setnonce(b"affirm")
assert self.username_token.nonce == b"affirm"
|
nyrocron/tracking-server | tracker/migrations/0001_initial.py | Python | mit | 2,159 | 0.00139 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='TrackedPosition',
fields=[
('id', models.AutoField(serialize=False, auto_created=True, verbose_name='ID', primary_key=True)),
('time', models.DateTimeField()),
('latitude', models.FloatField()),
('longitude', models.FloatField()),
('altitude', models.FloatField()),
('accuracy', models.FloatField()),
],
options={
'ordering': ['id'],
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TrackingKey',
fields=[
| ('key', models.CharField(max_length=32, primary_key=True, serialize=False)),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TrackingSession',
fields=[
('id', models.AutoField(serialize=False, auto_created=True, verbose_name='ID', primary_key=True)),
('start_t | ime', models.DateTimeField()),
('end_time', models.DateTimeField(null=True, blank=True)),
('active', models.BooleanField(default=True)),
('viewkey', models.CharField(max_length=32)),
('is_cleaned', models.BooleanField(default=False)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='trackedposition',
name='session',
field=models.ForeignKey(to='tracker.TrackingSession'),
preserve_default=True,
),
]
|
chadgates/locmaster | mylocation/migrations/0005_auto_20160216_1419.py | Python | bsd-3-clause | 1,700 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-02-16 14:19
from __future__ import unicod | e_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mylocation', '0004_delete_portterminal'),
]
operations = [
migrations.RenameField(
model_name='worldborder',
old_name='geom',
new_name='mpoly',
),
migrations.AlterField(
model_name='worldborder',
name='fips',
field=models.CharField(ma | x_length=2, verbose_name='FIPS Code'),
),
migrations.AlterField(
model_name='worldborder',
name='iso2',
field=models.CharField(max_length=2, verbose_name='2 Digit ISO'),
),
migrations.AlterField(
model_name='worldborder',
name='iso3',
field=models.CharField(max_length=3, verbose_name='3 Digit ISO'),
),
migrations.AlterField(
model_name='worldborder',
name='pop2005',
field=models.IntegerField(verbose_name='Population 2005'),
),
migrations.AlterField(
model_name='worldborder',
name='region',
field=models.IntegerField(verbose_name='Region Code'),
),
migrations.AlterField(
model_name='worldborder',
name='subregion',
field=models.IntegerField(verbose_name='Sub-Region Code'),
),
migrations.AlterField(
model_name='worldborder',
name='un',
field=models.IntegerField(verbose_name='United Nations Code'),
),
]
|
fcaneto/can_i_beat_redis | gen_test.py | Python | mit | 506 | 0.005929 | import time
import random
import uuid
def timefunc(f):
def f_timer(*args, **kwargs):
start = time.time()
result = f(*args, **kwargs | )
end = time.time()
return result
return f_timer
with open("write.test", 'w') as sample:
for _ in range(10000):
sample.write("%s %s\n" % (random.randint(1000000, 9999999), random.randint(1, 99)))
# redis_server = redis.StrictRedis(host='localhost', port=6379, db=0)
# pipe = redis_server.pipe | line(transaction=False)
|
pjdelport/django | django/core/management/sql.py | Python | bsd-3-clause | 7,970 | 0.003764 | from __future__ import unicode_literals
import codecs
import os
import re
from django.conf import settings
from django.core.management.base import CommandError
from django.db import models
from django.db.models import get_models
def sql_create(app, style, connection):
"Returns a list of the CREATE TABLE SQL statements for the given app."
if connection.settings_dict['ENGINE'] == 'django.db.backends.dummy':
# This must be the "dummy" database backend, which means the user
# hasn't set ENGINE for the database.
raise CommandError("Django doesn't know which syntax to use for your SQL statements,\n" +
"because you haven't specified the ENGINE setting for the database.\n" +
"Edit your settings file and change DATBASES['default']['ENGINE'] to something like\n" +
"'django.db.backends.postgresql' or 'django.db.backends.mysql'.")
# Get installed models, so we generate REFERENCES right.
# We trim models from the current app so that the sqlreset command does not
# generate invalid SQL (leaving models out of known_models is harmless, so
# we can be conservative).
app_models = models.get_models(app, include_auto_created=True)
final_output = []
tables = connection.introspection.table_names()
known_models = set([model for model in connection.introspection.installed_models(tables) if model not in app_models])
pending_references = {}
for model in app_models:
output, references = connection.creation.sql_create_model(model, style, known_models)
final_output.extend(output)
for refto, refs in references.items():
pending_references.setdefault(refto, []).extend(refs)
if refto in known_models:
final_output.extend(connection.creation.sql_for_pending_references(refto, style, pending_references))
final_output.extend(connection.creation.sql_for_pending_references(model, style, pending_references))
# Keep track of the fact that we've created the table for this model.
known_models.add(model)
# Handle references to tables that are from other apps
# but don't exist physically.
not_installed_models = set(pending_references.keys())
if not_installed_models:
alter_sql = []
for model in not_installed_models:
alter_sql.extend(['-- ' + sql for sql in
connection.creation.sql_for_pending_references(model, style, pending_references)])
if alter_sql:
final_output.append('-- The following references should be added but depend on non-existent tables:')
final_output.extend(alter_sql)
return final_output
def sql_delete(app, style, connection):
"Returns a list of the DROP TABLE SQL statements for the given app."
# This should work even if a connection isn't available
try:
cursor = connection.cursor()
except:
cursor = None
# Figure out which tables already exist
if cursor:
table_names = connection.introspection.table_names(cursor)
else:
table_names = []
output = []
# Output DROP TABLE statements for standard application tables.
to_delete = set()
references_to_delete = {}
app_models = models.get_models(app, include_auto_created=True)
for model in app_models:
if cursor and connection.introspection.table_name_converter(model._meta.db_table) in table_names:
# The table exists, so it needs to be dropped
opts = model._meta
for f in opts.local_fields:
if f.rel and f.rel.to not in to_delete:
references_to_delete.setdefault(f.rel.to, []).append((model, f))
to_delete.add(model)
for model in app_models:
if connection.introspection.table_name_converter(model._meta.db_table) in table_names:
output.extend(connection.creation.sql_destroy_model(model, references_to_delete, style))
# Close database connection explicitly, in case this output is being piped
# directly into a database client, to avoid locking issues.
if cursor:
cursor.close()
connection.close()
return output[::-1] # Reverse it, to deal with table dependencies.
def sql_flush(style, connection, only_django=False, reset_sequences=True):
"""
Returns a list of the SQL statements used to flush the database.
If only_django is True, then only table names that hav | e associated Django
models and are in INSTALLED_APPS will be included.
"""
if only_django:
tables = connection.introspection.django_table_names(only_existing=True) |
else:
tables = connection.introspection.table_names()
seqs = connection.introspection.sequence_list() if reset_sequences else ()
statements = connection.ops.sql_flush(style, tables, seqs)
return statements
def sql_custom(app, style, connection):
"Returns a list of the custom table modifying SQL statements for the given app."
output = []
app_models = get_models(app)
for model in app_models:
output.extend(custom_sql_for_model(model, style, connection))
return output
def sql_indexes(app, style, connection):
"Returns a list of the CREATE INDEX SQL statements for all models in the given app."
output = []
for model in models.get_models(app):
output.extend(connection.creation.sql_indexes_for_model(model, style))
return output
def sql_all(app, style, connection):
"Returns a list of CREATE TABLE SQL, initial-data inserts, and CREATE INDEX SQL for the given module."
return sql_create(app, style, connection) + sql_custom(app, style, connection) + sql_indexes(app, style, connection)
def _split_statements(content):
comment_re = re.compile(r"^((?:'[^']*'|[^'])*?)--.*$")
statements = []
statement = ""
for line in content.split("\n"):
cleaned_line = comment_re.sub(r"\1", line).strip()
if not cleaned_line:
continue
statement += cleaned_line
if statement.endswith(";"):
statements.append(statement)
statement = ""
return statements
def custom_sql_for_model(model, style, connection):
opts = model._meta
app_dir = os.path.normpath(os.path.join(os.path.dirname(models.get_app(model._meta.app_label).__file__), 'sql'))
output = []
# Post-creation SQL should come before any initial SQL data is loaded.
# However, this should not be done for models that are unmanaged or
# for fields that are part of a parent model (via model inheritance).
if opts.managed:
post_sql_fields = [f for f in opts.local_fields if hasattr(f, 'post_create_sql')]
for f in post_sql_fields:
output.extend(f.post_create_sql(style, model._meta.db_table))
# Find custom SQL, if it's available.
backend_name = connection.settings_dict['ENGINE'].split('.')[-1]
sql_files = [os.path.join(app_dir, "%s.%s.sql" % (opts.object_name.lower(), backend_name)),
os.path.join(app_dir, "%s.sql" % opts.object_name.lower())]
for sql_file in sql_files:
if os.path.exists(sql_file):
with codecs.open(sql_file, 'U', encoding=settings.FILE_CHARSET) as fp:
# Some backends can't execute more than one SQL statement at a time,
# so split into separate statements.
output.extend(_split_statements(fp.read()))
return output
def emit_post_sync_signal(created_models, verbosity, interactive, db):
# Emit the post_sync signal for every application.
for app in models.get_apps():
app_name = app.__name__.split('.')[-2]
if verbosity >= 2:
print("Running post-sync handlers for application %s" % app_name)
models.signals.post_syncdb.send(sender=app, app=app,
created_models=created_models, verbosity=verbosity,
interactive=interactive, db=db)
|
pyxll/pyxll-examples | multiprocessing/running_object_table.py | Python | unlicense | 5,066 | 0.001184 | """
Example code to show how to use the 'Running Object Table' to find the right
Excel Application object to use from a child process.
When spawning a child process (or processes) from Excel sometimes it's
useful to be able to get a reference to the Excel Applciation object
corresponding to the parent process (to write back to a worksheet for
example).
The Excel application object can be obtained by using
win32com.client.Dispatch("Excel.Application") but this isn't guarenteed
to be the correct instance of Excel if there are multiple Excel applications
running.
This example shows how to solve this problem using the 'Running Object Table'.
References:
https://msdn.microsoft.com/en-us/library/windows/desktop/ms695276%28v=vs.85%29.aspx
https://msdn.microsoft.com/en-us/library/windows/desktop/ms684004(v=vs.85).aspx
"""
from pyxll import xl_func, get_active_object
from multiprocessing import Process
import win32com.client
import pythoncom
import logging
import time
import os
IID_Workbook = pythoncom.MakeIID("{000208DA-0000-0000-C000-000000000046}")
def get_xl_app(parent=None):
"""
Return an Excel Application instance.
Unlike using win32com.client.Dispatch("Excel.Application") the
Application returned will always be the one that corresponds
to the parent process.
"""
# Get the window handle set by the parent process
parent_hwnd = os.environ["PYXLL_EXCEL_HWND"]
# Iterate over the running object table looking for the Excel Workbook
# object from the parent process' Application object.
context = pythoncom.CreateBindCtx(0)
for moniker in pythoncom.GetRunningObjectTable():
try:
# Workbook implements IOleWindow so only consider objects implementing that
window = moniker.BindToObject(context, None, pythoncom.IID_IOleWindow)
disp = window.QueryInterface(pythoncom.IID_IDispatch)
# Get a win32com Dispatch object from the PyIDispatch object as it's
# easier to work with.
obj = win32com.client.Dispatch(disp)
except pythoncom.com_error:
# Skip any objects we're not interested in
continue
# Check the object we've found is a Workbook
if getattr(obj, "CLSID", None) == IID_Workbook:
# Get the Application from the Workbook and if its window matches return it.
xl_app = obj.Application
if str(xl_app.Hwnd) == parent_hwnd:
return xl_app
# This can happen if the parent process has terminated without terminating
# the child process.
raise RuntimeError("Parent Excel application not found")
def _subprocess_func(target_address, logfile):
"""
This function is | run in a child process of the main Excel process.
"""
# Initialize logging (since we're now running outside of Excel this isn't done for us).
logging.basicConfig(filename=logfile, level=logging.INFO)
log = logging.getLogger(__name__)
log.info("Child process %d starting" % os.getpid())
try:
# Get the Excel Application corresponding to the parent process
| xl_app = get_xl_app()
# Write to the target cell in the parent Excel.
cell = xl_app.Range(target_address)
message = "Child process %d is running..." % os.getpid()
cell.Value = message
# Run for a few seconds updating the value periodically
for i in range(300):
message = message[1:] + message[0]
# When setting a value in Excel it may fail if the user is also
# interacting with the sheet.
try:
cell.Value = message
except:
log.warn("Error setting cell value", exc_info=True)
time.sleep(0.2)
cell.Value = "Child process %d has terminated" % os.getpid()
except Exception:
log.error("An error occured in the child process", exc_info=True)
raise
@xl_func("string target_address: string")
def start_subprocess(target_address):
"""
Start a sub-process that will write back to a cell.
:param target_address: address of cell to write to from the child process.
"""
# Get the window handle of the Excel process so the sub-process can
# find the right Excel Application instance.
xl_app = win32com.client.Dispatch(get_active_object()).Application
os.environ["PYXLL_EXCEL_HWND"] = str(xl_app.Hwnd)
# Get the log file name for the subproces to log to.
root = logging.getLogger()
logfile = None
for handler in root.handlers:
if isinstance(handler, logging.FileHandler):
logfile = handler.baseFilename
break
# Start the subprocess that will write back to the target cell.
# It's a daemon process so that it doesn't stop the main Excel process
# from terminating even if it's still running.
process = Process(target=_subprocess_func, args=(target_address, logfile))
process.daemon = True
process.start()
return "Child process %d started" % process.pid
|
nextgis/nextgisweb_log | nextgisweb_log/log_handler.py | Python | gpl-2.0 | 998 | 0.003006 | import logging
from .model import LogEntry, LogLevels
class NGWLogHandler(logging.Handler):
"""
Simple standard log handler for nextgisweb_lo | g
"""
def __init__(self, level=LogLevels.default_value, component=None, group=None):
logging.Handler.__init__(self, level=level)
self.component = component
self.group = group
def emit(self, record):
self.format(record)
if record.exc_info:
record. | exc_text = logging._defaultFormatter.formatException(record.exc_info)
else:
record.exc_text = None
# Insert log record:
log_entry = LogEntry()
log_entry.component = self.component
log_entry.group = self.group
log_entry.message_level = record.levelno
log_entry.message_level_name = record.levelname
log_entry.message_name = record.name
log_entry.message_text = record.msg
log_entry.exc_info = record.exc_text
log_entry.persist()
|
TobbeTripitaka/src | user/zhiguang/Mfdlsrtm.py | Python | gpl-2.0 | 715 | 0.029371 | #!/usr/bin/python
'F | inite difference RTM as a linear operator'
import os, sys, tempfile, subprocess
import rsf.prog, rsf.path
# Madagascar bin directory
bindir=os.path.join(rsf.prog.RSFROOT,'bin')
# Madagascar | DATAPATH
datapath=rsf.path.datapath().rstrip('/')
# Madagascar commands
cp=os.path.join(bindir,'sfcp')
rtm=os.path.join(bindir,'sfmpifdlsrtm')
# Random files for input and output
inpd,inpfile=tempfile.mkstemp(dir=datapath)
outd,outfile=tempfile.mkstemp(dir=datapath)
p=subprocess.Popen([cp],stdout=inpd, close_fds=True)
p.wait()
run='ibrun tacc_affinity %s input=%s output=%s %s' %(rtm, inpfile, outfile,' '.join(sys.argv[1:]))
print run
os.system(run)
p=subprocess.Popen([cp],stdin=outd)
p.wait
|
riccardocagnasso/useless | setup.py | Python | mit | 576 | 0 | from setuptools import setup, find_ | packages
print(find_packages('src'))
setup(
name="Useles2s",
version='0.1',
author='Riccardo Cagnasso',
author_email="riccardo@phascode.org",
description='Useless is useless. Oh yeah, and parses bit and pieces' +
| 'of ELF and PE dynamic libraries',
license="MIT",
packages=find_packages('src'),
package_dir={'useless': 'src/useless/',
'useless.elf': 'src/useless/elf/'},
scripts=['src/usls.py'],
install_requires=[
'cached_property',
'prettytable'])
|
ArneBab/gamification-engine | gengine/metadata.py | Python | mit | 1,091 | 0.021082 | from sqlalchemy.orm.session import Session, sessionmaker
import transaction
from sqlalchemy.orm.scoping import scoped_session
from zope.sqlalchemy.datamanager import ZopeTransactionExtension
from sqlalchemy.ext.declarative.api import declarative_base
class MySession(Session):
"""This allow us to use the flask-admin sqla extension, which uses DBSession.commit() rather than transaction.commit()"""
def commit(self,*args,**kw):
transaction.commit(*args,**kw)
def rollback(self,*args,**kw):
transaction.abort(*args,**kw)
DBSession=None
def init_session(override_session=None):
global DBSession
if override_session:
DBSession = override_session
else:
DBSession = scoped_session | (sessionmaker(e | xtension=ZopeTransactionExtension(), class_=MySession))
Base=None
def init_declarative_base(override_base=None):
global Base
if override_base:
Base=override_base
else:
Base = declarative_base()
def init_db(engine):
DBSession.configure(bind=engine)
Base.metadata.bind = engine
|
monetate/sqlalchemy | lib/sqlalchemy/sql/sqltypes.py | Python | mit | 111,751 | 0 | # sql/sqltypes.py
# Copyright (C) 2005-2021 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""SQL specific types.
"""
import codecs
import datetime as dt
import decimal
import json
from . import coercions
from . import elements
from . import operators
from . import roles
from . import type_api
from .base import _bind_or_error
from .base import NO_ARG
from .base import SchemaEventTarget
from .elements import _NONE_NAME
from .elements import quoted_name
from .elements import Slice
from .elements import TypeCoerce as type_coerce # noqa
from .traversals import HasCacheKey
from .traversals import InternalTraversal
from .type_api import Emulated
from .type_api import NativeForEmulated # noqa
from .type_api import to_instance
from .type_api import TypeDecorator
from .type_api import TypeEngine
from .type_api import Variant
from .. import event
from .. import exc
from .. import inspection
from .. import processors
from .. import util
from ..util import compat
from ..util import langhelpers
from ..util import OrderedDict
from ..util import pickle
class _LookupExpressionAdapter(object):
"""Mixin expression adaptations based on lookup tables.
These rules are currently used by the numeric, integer and date types
which have detailed cross-expression coercion rules.
"""
@property
def _expression_adaptations(self):
raise NotImplementedError()
class Comparator(TypeEngine.Comparator):
_blank_dict = util.immutabledict()
def _adapt_expression(self, op, other_comparator):
othertype = other_comparator.type._type_affinity
lookup = self.type._expression_adaptations.get(
op, self._blank_dict
).get(othertype, self.type)
if lookup is othertype:
return (op, other_comparator.type)
elif lookup is self.type._type_affinity:
return (op, self.type)
else:
return (op, to_instance(lookup))
comparator_factory = Comparator
class Concatenable(object):
"""A mixin that marks a type as supporting 'concatenation',
typically strings."""
class Comparator(TypeEngine.Comparator):
def _adapt_expression(self, op, other_comparator):
if op is operators.add and isinstance(
other_comparator,
(Concatenable.Comparator, NullType.Comparator),
):
return operators.concat_op, self.expr.type
else:
return super(Concatenable.Comparator, self)._adapt_expression(
op, other_comparator
)
comparator_factory = Comparator
class Indexable(object):
"""A mixin that marks a type as supporting indexing operations,
such as array or JSON structures.
.. versionadded:: 1.1.0
"""
class Comparator(TypeEngine.Comparator):
def _setup_getitem(self, index):
raise NotImplementedError()
def __getitem__(self, index):
(
adjusted_op,
adjusted_right_expr,
result_type,
) = self._setup_getitem(index)
return self.operate(
adjusted_op, adjusted_right_expr, result_type=result_type
)
comparator_factory = Comparator
class String(Concatenable, TypeEngine):
"""The base for all string and character types.
In SQL, corresponds to VARCHAR. Can also take Python unicode objects
and encode to the database's encoding in bind params (and the reverse for
result sets.)
The `length` field is usually required when the `String` type is
used within a CREATE TABLE statement, as VARCHAR requires a length
on most databases.
"""
__visit_name__ = "string"
RETURNS_UNICODE = util.symbol(
"RETURNS_UNICODE",
"""Indicates that the DBAPI returns Python Unicode for VARCHAR,
NVARCHAR, and other character-based datatypes in all cases.
This is the default value for
:attr:`.DefaultDialect.returns_unicode_strings` under Python 3.
.. versionadded:: 1.4
""",
)
RETURNS_BYTES = util.symbol(
"RETURNS_BYTES",
"""Indicates that the DBAPI returns byte objects under Python 3
or non-Unicode string objects under Python 2 for VARCHAR, NVARCHAR,
and other character-based datatypes in all cases.
This may be applied to the
:attr:`.DefaultDialect.returns_unicode_strings` attribute.
.. versionadded:: 1.4
""",
)
RETURNS_CONDITIONAL = util.symbol(
"RETURNS_CONDITIONAL",
"""Indicates that the DBAPI may return Unicode or bytestrings for
VARCHAR, NVARCHAR, and other character-based datatypes, and that
SQLAlchemy's default String datatype will need to test on a per-row
basis for Unicode or bytes.
This may be applied to the
:attr:`.DefaultDialect.returns_unicode_strings` attribute.
.. versionadded:: 1.4
""",
)
RETURNS_UNKNOWN = util.symbol(
"RETURNS_UNKNOWN",
"""Indicates that the dialect should test on first c | onnect what the
string-returning behavior of character-based datatypes is.
This is the default value for DefaultDialect.unicode_returns under
Python 2.
This may be applied to the
:attr:`.DefaultDialect.returns_unicode_strings` attribute under
Python 2 only. The value is disallowed under Python 3.
| .. versionadded:: 1.4
.. deprecated:: 1.4 This value will be removed in SQLAlchemy 2.0.
""",
)
@util.deprecated_params(
convert_unicode=(
"1.3",
"The :paramref:`.String.convert_unicode` parameter is deprecated "
"and will be removed in a future release. All modern DBAPIs "
"now support Python Unicode directly and this parameter is "
"unnecessary.",
),
unicode_error=(
"1.3",
"The :paramref:`.String.unicode_errors` parameter is deprecated "
"and will be removed in a future release. This parameter is "
"unnecessary for modern Python DBAPIs and degrades performance "
"significantly.",
),
)
def __init__(
self,
length=None,
collation=None,
convert_unicode=False,
unicode_error=None,
_warn_on_bytestring=False,
_expect_unicode=False,
):
"""
Create a string-holding type.
:param length: optional, a length for the column for use in
DDL and CAST expressions. May be safely omitted if no ``CREATE
TABLE`` will be issued. Certain databases may require a
``length`` for use in DDL, and will raise an exception when
the ``CREATE TABLE`` DDL is issued if a ``VARCHAR``
with no length is included. Whether the value is
interpreted as bytes or characters is database specific.
:param collation: Optional, a column-level collation for
use in DDL and CAST expressions. Renders using the
COLLATE keyword supported by SQLite, MySQL, and PostgreSQL.
E.g.::
>>> from sqlalchemy import cast, select, String
>>> print(select(cast('some string', String(collation='utf8'))))
SELECT CAST(:param_1 AS VARCHAR COLLATE utf8) AS anon_1
:param convert_unicode: When set to ``True``, the
:class:`.String` type will assume that
input is to be passed as Python Unicode objects under Python 2,
and results returned as Python Unicode objects.
In the rare circumstance that the DBAPI does not support
Python unicode under Python 2, SQLAlchemy will use its own
encoder/decoder functionality on strings, referring to the
value of the :paramref:`_sa.create_engine.encoding` parameter
parameter passed to :func:`_sa.create_engine` |
DarkFenX/Phobos | miner/fsd_lite.py | Python | gpl-3.0 | 3,930 | 0.002545 | #===============================================================================
# Copyright (C) 2014-2019 Anton Vorobyov
#
# This file is part of Phobos.
#
# Phobos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Phobos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Phobos. If not, see <http://www.gnu.org/licenses/>.
#===============================================================================
import json
import re
import sqlite3
from miner.base import BaseMiner
from util import cachedproperty
class FsdLiteMiner(BaseMiner):
"""Class, which fetches data from FSDLite format static cache files."""
name = 'fsd_lite'
def __init__(self, resbrowser, translator):
self._resbrowser = resbrowser
self._translator = translator
def contname_iter(self):
for container_name in sorted(self._contname_respath_map):
yield container_name
def get_data(self, container_name, language=None, verbose=False, **kwargs):
try:
resource_path = self._contname_respath_map[container_name]
except KeyError:
self._container_not_found(container_name)
else:
rows = {}
file_path = self._resbrowser.get_file_info(resource_path).file_abspath
with sqlite3.connect(file_path) as dbconn:
c = dbconn.cursor()
c.execute(u'select key, value from cache')
for sqlite_row in c:
key = sqlite_row[0]
value = sqlite_row[1]
row = json.loads(value)
rows[key] = row
self._translator.translate_container(rows, language, verbose=verbose)
return rows
@cachedproperty
def _contname_respath_map(self):
"""
Map between container names and resource path names to static cache files.
Format: {container path: resource path to static cache}
"""
contname_respath_map = {}
for resource_path in self._resbrowser.respath_iter():
# Filter by resource file path first
container_name = self.__get_container_name(resource_path)
if container_name is None:
continue
# Now, check if it's actually sqlite database and if it has cache table
if not self.__check_cache(resource_path):
continue
contname_respath_map[container_name] = resource_path
return contname_respath_map
def __get_container_name(self, resource_path):
"""
Validate resource path and return stripped resource
name if path is valid, return None otherwise.
"""
m = re.match(r'^res:/staticdata/(?P<fname>.+).static$', resource_path)
if not m:
retur | n None
| return m.group('fname')
def __check_cache(self, resource_path):
"""Check if file is actually SQLite database and has cache table."""
file_path = self._resbrowser.get_file_info(resource_path).file_abspath
try:
dbconn = sqlite3.connect(file_path)
c = dbconn.cursor()
c.execute('select count(*) from sqlite_master where type = \'table\' and name = \'cache\'')
except KeyboardInterrupt:
raise
except:
has_cache = False
else:
has_cache = False
for row in c:
has_cache = bool(row[0])
return has_cache
|
jucimarjr/IPC_2017-1 | lista05/lista05_lista02_questao55.py | Python | apache-2.0 | 1,372 | 0.002208 | # encoding: utf-8
# --------------------------------------------------------------------------
# Introdução a Programação de Computadores - IPC
# Universidade do Estado do Amazonas - UEA
# Prof. Jucimar Jr
# ULISSES ANTONIO ANTONINO DA COSTA - 1515090555
# TIAGO FERREIRA ARANHA - 1715310047
# VÍTOR SIMÕES AZEVEDO - 1715310025
# VICTOR HUGO DE OLIVEIRA CARREIRA - 1715310063
# REINALDO VARGAS - 1715310054
#
# 55) Criar um algoritmo que leia o valor de N, imp | rima a seqüência a seguir e o resultado.
# N! / 0! – (N-1)! / 2! + (N-2)! / 4! – (N-3)! / 6! + ... 0! / (2N)!
# --------------------------------------------------------------------------
numero1 = int(input("Informe inteiro: "))
numero2 = 0
operacao = 1
resultado = 0
sequencia = ""
while numero1 >= 0:
i = 1
fatorial1 = 1
while i <= numero1:
fatorial1 = fatorial1 * i
i += 1
i = 1 |
fatorial2 = 1
while i <= numero2:
fatorial2 = fatorial2 * i
i += 1
resultado = resultado + (fatorial1 / fatorial2) * operacao
if operacao == 1:
sequencia = sequencia + " + " + str(numero1) + "!/" + str(numero2) + "!"
else:
sequencia = sequencia + " - " + str(numero1) + "!/" + str(numero2) + "!"
numero1 -= 1
numero2 += 2
operacao *= -1
print("Sequência: %s" % sequencia)
print("Resultado: %5.2f\n" % resultado)
|
0xkag/tornado | tornado/iostream.py | Python | apache-2.0 | 59,061 | 0.000203 | #!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utility classes to write to and read from non-blocking files and sockets.
Contents:
* `BaseIOStream`: Generic interface for reading and writing.
* `IOStream`: Implementation of BaseIOStream using non-blocking sockets.
* `SSLIOStream`: SSL-aware version of IOStream.
* `PipeIOStream`: Pipe-based IOStream implementation.
"""
from __future__ import absolute_import, division, print_function, with_statement
import collections
import errno
import numbers
import os
import socket
import sys
import re
from tornado.concurrent import TracebackFuture
from tornado import ioloop
from tornado.log import gen_log, app_log
from tornado.netutil import ssl_wrap_socket, ssl_match_hostname, SSLCertificateError
from tornado import stack_context
from tornado.util import errno_from_exception
try:
from tornado.platform.posix import _set_nonblocking
except ImportError:
_set_nonblocking = None
try:
import ssl
except ImportError:
# ssl is not available on Google App Engine
ssl = None
# These errnos indicate that a non-blocking operation must be retried
# at a later time. On most platforms they're the same value, but on
# some they differ.
_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN)
if hasattr(errno, "WSAEWOULDBLOCK"):
_ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,)
# These errnos indicate that a connection has been abruptly terminated.
# They should be caught and handled less noisily than other errors.
_ERRNO_CONNRESET = (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE,
errno.ETIMEDOUT)
if hasattr(errno, "WSAECONNRESET"):
_ERRNO_CONNRESET += (errno.WSAECONNRESET, errno.WSAECONNABORTED, errno.WSAETIMEDOUT)
# More non-portable errnos:
_ERRNO_INPROGRESS = (errno.EINPROGRESS,)
if hasattr(errno, "WSAEINPROGRESS"):
_ERRNO_INPROGRESS += (errno.WSAEINPROGRESS,)
#######################################################
class StreamClosedError(IOError):
"""Exception raised by `IOStream` methods when the stream is closed.
Note that the close callback is scheduled to run *after* other
callbacks on the stream (to allow for buffered data to be processed),
so you may see this error before you see the close callback.
"""
pass
class UnsatisfiableReadError(Exception):
"""Exception raised when a read cannot be satisfied.
Raised by ``read_until`` and ``read_until_regex`` with a ``max_bytes``
argument.
"""
pass
class StreamBufferFullError(Exception):
"""Exception raised by `IOStream` methods when the buffer is full.
"""
class BaseIOStream(object):
"""A utility class to write to and read from a non-blocking file or socket.
We support a non-blocking ``write()`` and a family of ``read_*()`` methods.
All of the methods take an optional ``callback`` argument and return a
`.Future` only if no callback is given. When the operation completes,
the callback will be run or the `.Future` will resolve with the data
read (or ``None`` for ``write()``). All outstanding ``Futures`` will
resolve with a `StreamClosedError` when the stream is closed; users
of the callback interface will be notified via
`.BaseIOStream.set_close_callback` instead.
When a stream is closed due to an error, the IOStream's ``error``
attribute contains the exception object.
Subclasses must implement `fileno`, `close_fd`, `write_to_fd`,
`read_from_fd`, and optionally `get_fd_error`.
"""
def __init__(self, io_loop=None, max_buffer_size=None,
read_chunk_size=None, max_write_buffer_size=None):
"""`BaseIOStream` constructor.
:arg io_loop: The `.IOLoop` to use; defaults to `.IOLoop.current`.
:arg max_buffer_size: Maximum amount of incoming data to buffer;
defaults to 100MB.
:arg read_chunk_size: Amount of data to read at one time from the
underlying transport; defaults to 64KB.
:arg max_write_buffer_size: Amount of outgoing data to buffer;
defaults to unlimited.
.. versionchanged:: 4.0
Add the ``max_write_buffer_size`` parameter. Changed def | ault
``read_chunk_size`` to 64KB.
"""
self.io_loop = io_loop or ioloop.IOLoop.current()
self.max_buffer_size = max_buffer_size or 104857600
# A chunk size that is too close to max_buffer_size can cause
# spurious failures.
self.read_chunk_ | size = min(read_chunk_size or 65536,
self.max_buffer_size // 2)
self.max_write_buffer_size = max_write_buffer_size
self.error = None
self._read_buffer = collections.deque()
self._write_buffer = collections.deque()
self._read_buffer_size = 0
self._write_buffer_size = 0
self._write_buffer_frozen = False
self._read_delimiter = None
self._read_regex = None
self._read_max_bytes = None
self._read_bytes = None
self._read_partial = False
self._read_until_close = False
self._read_callback = None
self._read_future = None
self._streaming_callback = None
self._write_callback = None
self._write_future = None
self._close_callback = None
self._connect_callback = None
self._connect_future = None
self._connecting = False
self._state = None
self._pending_callbacks = 0
self._closed = False
def fileno(self):
"""Returns the file descriptor for this stream."""
raise NotImplementedError()
def close_fd(self):
"""Closes the file underlying this stream.
``close_fd`` is called by `BaseIOStream` and should not be called
elsewhere; other users should call `close` instead.
"""
raise NotImplementedError()
def write_to_fd(self, data):
"""Attempts to write ``data`` to the underlying file.
Returns the number of bytes written.
"""
raise NotImplementedError()
def read_from_fd(self):
"""Attempts to read from the underlying file.
Returns ``None`` if there was nothing to read (the socket
returned `~errno.EWOULDBLOCK` or equivalent), otherwise
returns the data. When possible, should return no more than
``self.read_chunk_size`` bytes at a time.
"""
raise NotImplementedError()
def get_fd_error(self):
"""Returns information about any error on the underlying file.
This method is called after the `.IOLoop` has signaled an error on the
file descriptor, and should return an Exception (such as `socket.error`
with additional information, or None if no such information is
available.
"""
return None
def read_until_regex(self, regex, callback=None, max_bytes=None):
"""Asynchronously read until we have matched the given regex.
The result includes the data that matches the regex and anything
that came before it. If a callback is given, it will be run
with the data as an argument; if not, this method returns a
`.Future`.
If ``max_bytes`` is not None, the connection will be closed
if more than ``max_bytes`` bytes have been read and the regex is
not satisfied.
.. versionchanged:: 4.0
Added the ``max_bytes`` argument. The ``callback`` argument is
now optional and a `.Future` will be returned if it is omitted.
"""
future = self._set_read_callback(callback)
self._read_regex |
pacoqueen/cican | utils/mapa.py | Python | gpl-3.0 | 6,423 | 0.009199 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# TODO: PORASQUI: BUG: Cerrar la ventana no detiene el GMapCatcher y se queda
# como un proceso de fondo en espera bloqueante... Ni atiende al Ctrl+C
# siquiera.
import sys, os.path
dirfichero = os.path.realpath(os.path.dirname(__file__))
if os.path.realpath(os.path.curdir) == dirfichero:
os.chdir("..")
if ("utils" in os.listdir(os.path.curdir)
and os.path.abspath(os.path.curdir) not in sys.path):
sys.path.insert(0, ".")
from utils.googlemaps import GoogleMaps, GoogleMapsError
try:
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
# raise ImportError # XXX: Solo para probar... BORRAR DESPUÉS
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
import osmgpsmap # Third dependency. No está en el árbol local.
from utils.mapviewer import DummyLayer, imdir
OSMGPSMAP = True
except ImportError, msg:
OSMGPSMAP = False
os.chdir(os.path.abspath(
os.path.join(dirfichero, "..", "utils", "gmapcatcher")))
import maps as gmc
os.chdir(os.path.join(dirfichero, ".."))
import gtk
APIFILENAME = "gg_api_key.txt"
class Mapa():
def __init__(self, apifile = None):
if not apifile:
mydir = os.path.dirname(os.path.abspath(__file__))
apifile = os.path.join(mydir, APIFILENAME)
fapi = open(apifile)
self.__ggapi = fapi.read()
fapi.close()
self.ggmap = GoogleMaps(self.__ggapi)
self.init_mapa()
def init_mapa(self):
if OSMGPSMAP:
self.osm = osmgpsmap.GpsMap()
self.osm.layer_add(osmgpsmap.GpsMapOsd(show_dpad = True,
show_zoom = True))
self.osm.layer_add(
DummyLayer())
self.osm.connect('button_release_event', self.map_clicked)
self.osm.set_zoom(13) # Zoom por defecto
else:
logging_path = conf_path = None | # Es la conf. por defecto. Ver
# utils/gmapatcher/map.py para más detalles.
gmc.mapLogging.init_logging(logging_path)
gmc.log.info("Starting %s version %s." % (gmc.NAME, gmc.VERSION))
self.gmcw = gmc.MainWindow(config_path = conf_path)
self.gmcw.do_zoom(4) # Zoom por defecto.
# | TODO: PORASQUI: Hacer un traslado de escala entre el zoom de
# GMC que va -creo- desde -2 (cerca) a más de 10 (lejos) al de
# OSM, que va al contrario y 13 es cerca. Ver las "constantes"
# definidas en cada caso (MAX_ZOOM_no_sé_qué en GMC).
self.osm = self.gmcw.container
def map_clicked(self, osm, event):
if OSMGPSMAP:
lat, lon = self.osm.get_event_location(event).get_degrees()
else:
lat, lon = 0, 0 # PORASQUI
if event.button == 1:
#self.latlon_entry.set_text(
# 'Map Centre: latitude %s longitude %s' % (
# self.osm.props.latitude,
# self.osm.props.longitude
# )
#)
pass
elif event.button == 2:
if OSMGPSMAP:
self.osm.gps_add(lat, lon, heading = osmgpsmap.INVALID);
else:
pass # PORASQUI
elif event.button == 3:
if OSMGPSMAP:
pb = gtk.gdk.pixbuf_new_from_file_at_size(
os.path.join(imdir, "poi.png"), 24,24)
self.osm.image_add(lat,lon,pb)
else:
pass # PORASQUI
def centrar_mapa(self, lat, lon, zoom = None, track = True, flag = False):
"""
@param track Indica si se debe marcar el punto con un círculo y el
"track" de recorrido.
@param flag Indica si se debe marcar con una bandera el punto.
"""
if lat == None:
raise ValueError, "Mapa.centrar_mapa -> Latitud incorrecta"
if lon == None:
raise ValueError, "Mapa.centrar_mapa -> Longitud incorrecta"
if zoom is None:
if OSMGPSMAP:
self.osm.set_center(lat, lon)
else:
self.gmcw.confirm_clicked(None, None, lat, lon)
else:
if OSMGPSMAP:
self.osm.set_center_and_zoom(lat, lon, zoom)
else:
self.gmcw.confirm_clicked(None, None, lat, lon)
self.gmcw.do_zoom(zoom)
if track:
if OSMGPSMAP:
self.osm.gps_add(lat, lon, heading = osmgpsmap.INVALID);
else:
self.gmcw.confirm_clicked(None, None, lat, lon)
# PORASQUI: No support for the moment...
if flag:
if OSMGPSMAP:
pb = gtk.gdk.pixbuf_new_from_file_at_size(
os.path.join(imdir, "poi.png"), 24, 24)
self.osm.image_add(lat, lon, pb)
else:
self.gmcw.confirm_clicked(None, None, lat, lon)
# PORASQUI: No support for the moment...
def put_mapa(self, container):
#m = self.wids['mapa_container']
m = container
m.add(self.osm)
m.show_all()
if not OSMGPSMAP: # Hay que ocultar algunas cosillas...
for w in (self.gmcw.export_panel,
self.gmcw.top_panel,
self.gmcw.status_bar):
try:
w.set_visible(False)
except AttributeError:
w.set_property("visible", False)
@property
def zoom(self):
"""Nivel actual de zoom en el mapa."""
if OSMGPSMAP:
return self.osm.props.zoom
else:
return self.gmcw.get_zoom()
def get_latlon(self, direccion):
"""
Devuelve la latitud y longitud como flotantes correspondiente a la
dirección recibida. Si no se encuentra en Google Maps, devuelve
(None, None).
"""
try:
res = self.ggmap.address_to_latlng(direccion)
except GoogleMapsError:
res = (None, None)
return res
def test():
w = gtk.Window()
m = Mapa()
m.put_mapa(w)
#w.show_all()
w.connect("destroy", lambda *a, **kw: gtk.main_quit())
gtk.main()
if __name__ == "__main__":
test()
|
twistedretard/LaserSimulatedSecurityTurret | src/streaming/server.py | Python | mit | 272 | 0.007353 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import subprocess
running = os.system("nc -u -l -p 5001 | mpl | ayer -cache 1024 -")
#subprocess.check_call('/opt/vc/bin/raspivid -n -w 800 -h 600 -fps 24 -t 0 -o - | socat - udp-sendto:' + '129.16.194.248' + ':5001' | )
|
divio/django-shop | shop/cascade/segmentation.py | Python | bsd-3-clause | 317 | 0.003155 | from cmsp | lugin_cascade.segmentation.mixins import EmulateUserModelMixin, EmulateUserAdminMixin
from shop.admin.customer import CustomerProxy
class EmulateCustomerModelMixin(EmulateUserModelMixin):
UserModel = CustomerProxy
class EmulateCustomerA | dminMixin(EmulateUserAdminMixin):
UserModel = CustomerProxy
|
zhaochl/python-utils | verify_code/Imaging-1.1.7/build/lib.linux-x86_64-2.7/ImtImagePlugin.py | Python | apache-2.0 | 2,203 | 0.003177 | #
# The Python Imaging Library.
# $Id$
#
# IM Tools support for PIL
#
# history:
# 1996-05-27 fl Created (read 8-bit images only)
# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.2)
#
# Copyright (c) Secret Labs AB 1997-2001.
# Copyright (c) Fredrik Lundh 1996-2001.
#
# See the README file for information on usage and redistribution.
#
__version__ = "0.2"
import re
import Image, ImageFile
#
# --------------------------------------------------------------------
field = re.compile(r"([a-z]*) ([^ \r\n]*)")
##
# Image plugin for IM Tools images.
class ImtImageFile(ImageFile.ImageFile):
format = "IMT"
format_description = "IM Tools"
def _open(self):
# Quick rejection: if there's not a LF among the first
# 100 bytes, this is (probably) not a text header.
if not "\n" in self.fp.read(100):
raise SyntaxError, "not an IM file"
self.fp.seek(0)
xsize = ysize = 0
while 1:
s = self.fp.read(1)
if not s:
break
if s == chr(12):
# image data begins
self.tile = [("raw", (0,0)+self.size,
self.fp.tell(),
(self.mode, 0, 1))]
break
else:
# read key/value pair
# FIXME: dangerous, may read whole file
s = s + self.fp.readline()
if len(s) == 1 or len(s) > 100:
break
if s[0] == "*":
continue # comment
m = field.match(s)
if not m:
break
k, v = m.group(1,2)
| if k == "width":
xsize = int(v)
self.size = xsize, ysize
| elif k == "height":
ysize = int(v)
self.size = xsize, ysize
elif k == "pixel" and v == "n8":
self.mode = "L"
#
# --------------------------------------------------------------------
Image.register_open("IMT", ImtImageFile)
#
# no extension registered (".im" is simply too common)
|
Nahnja/PyIoC | pyioc.py | Python | mit | 730 | 0.005479 | from functools import partial
from types import MethodType
class IoCList(list):
def __getitem__(self, cls):
return sorted(
[item for item in self if cls in item.mro()],
key=lambda item: item.mro().index(cls)
)[0]
def inject_dependency(self, **kwargs):
def decorator(fun):
keywords = dict([(name, self[cls]()) for name, cls in kwargs.items()])
# make this work with methods not just plain functions
# -> partial don't have __get__, functions do, so wrap the partial in a | function
def method(*args, **kwargs):
| return partial(fun, **keywords)(*args, **kwargs)
return method
return decorator
|
fluo-io/fluo-deploy | lib/muchos/config/ec2.py | Python | apache-2.0 | 9,189 | 0 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import glob
import json
from sys import exit
import os
from .base import SERVICES
from .base import BaseConfig
from .decorators import (
ansible_play_var,
default,
)
from ..util import get_ephemeral_devices, get_arch
class Ec2DeployConfig(BaseConfig):
def __init__(
self,
deploy_path,
config_path,
hosts_path,
checksums_path,
templates_path,
cluster_name,
):
super(Ec2DeployConfig, self).__init__(
deploy_path,
config_path,
hosts_path,
checksums_path,
templates_path,
cluster_name,
)
self.sg_name = cluster_name + "-group"
self.ephemeral_root = "ephemeral"
self.cluster_template_d = None
self.metrics_drive_root = "media-" + self.ephemeral_root
self.init_template(templates_path)
def verify_config(self, action):
self._verify_config(action)
def verify_launch(self):
self.verify_instance_type(self.get("ec2", "default_instance_type"))
self.verify_instance_type(self.get("ec2", "worker_instance_type"))
def init_nodes(self):
self.node_d = {}
for (hostname, value) in self.items("nodes"):
if hostname in self.node_d:
exit(
"Hostname {0} already exists twice in nodes".format(
hostname
)
)
service_list = []
for service in value.split(","):
if service in SERVICES:
service_list.append(service)
else:
exit(
"Unknown service '{}' declared for node {}".format(
service, hostname
)
)
self.node_d[hostname] = service_list
def default_ephemeral_devices(self):
return get_ephemeral_devices(self.get("ec2", "default_instance_type"))
def worker_ephemeral_devices(self):
return get_ephemeral_devices(self.get("ec2", "worker_instance_type"))
def max_ephemeral(self):
return max(
(
len(self.default_ephemeral_devices()),
len(self.worker_ephemeral_devices()),
)
)
def node_type_map(self):
if self.cluster_template_d:
return self.cluster_template_d["devices"]
node_types = {}
node_list = [
("default", self.default_ephemeral_devices()),
("worker", self.worker_ephemeral_devices()),
]
for (ntype, devices) in node_list:
node_types[ntype] = {
"mounts": self.mounts(len(devices)),
"devices": devices,
}
return node_types
def mount_root(self):
return "/media/" + self.ephemeral_root
@ansible_play_var
@default("ext3")
def fstype(self):
return self.get("ec2", "fstype")
@ansible_play_var
@default("no")
def force_format(self):
return self.get("ec2", "force_format")
def data_dirs_common(self, nodeType):
return self.node_type_map()[nodeType]["mounts"]
def metrics_drive_ids(self):
drive_ids = []
for i in range(0, self.max_ephemeral()):
drive_ids.append(self.metrics_drive_root + str(i))
return drive_ids
def shutdown_delay_minutes(self):
return self.get("ec2", "shutdown_delay_minutes")
def verify_instance_type(self, instance_type):
if not self.cluster_template_d:
if get_arch(instance_type) == "pvm":
exit(
"ERROR - Configuration contains instance type '{0}' "
"that uses pvm architecture."
| "Only hvm architecture is supported!".format(instance_type)
)
def insta | nce_tags(self):
retd = {}
if self.has_option("ec2", "instance_tags"):
value = self.get("ec2", "instance_tags")
if value:
for kv in value.split(","):
(key, val) = kv.split(":")
retd[key] = val
return retd
def init_template(self, templates_path):
if self.has_option("ec2", "cluster_template"):
template_id = self.get("ec2", "cluster_template")
template_path = os.path.join(templates_path, template_id)
if os.path.exists(template_path):
self.cluster_template_d = {"id": template_id}
self.load_template_ec2_requests(template_path)
self.load_template_device_map(template_path)
self.validate_template()
def load_template_ec2_requests(self, template_dir):
for json_path in glob.glob(os.path.join(template_dir, "*.json")):
service = os.path.basename(json_path).rsplit(".", 1)[0]
if service not in SERVICES:
exit(
"ERROR - Template '{0}' has unrecognized option '{1}'. "
"Must be one of {2}".format(
self.cluster_template_d["id"], service, str(SERVICES)
)
)
with open(json_path, "r") as json_file:
# load as string, so we can use string.Template
# to inject config values
self.cluster_template_d[service] = json_file.read()
def load_template_device_map(self, template_dir):
device_map_path = os.path.join(template_dir, "devices")
if not os.path.isfile(device_map_path):
exit(
"ERROR - template '{0}' is missing 'devices' config".format(
self.cluster_template_d["id"]
)
)
with open(device_map_path, "r") as json_file:
self.cluster_template_d["devices"] = json.load(json_file)
def validate_template(self):
if not self.cluster_template_d:
exit(
"ERROR - Template '{0}' is not defined!".format(
self.get("ec2", "cluster_template")
)
)
if "worker" not in self.cluster_template_d:
exit(
"ERROR - '{0}' template config is invalid. No 'worker' "
"launch request is defined".format(
self.cluster_template_d["id"]
)
)
if "worker" not in self.cluster_template_d["devices"]:
exit(
"ERROR - '{0}' template is invalid. The devices file must "
"have a 'worker' device map".format(
self.cluster_template_d["id"]
)
)
if "default" not in self.cluster_template_d["devices"]:
exit(
"ERROR - '{0}' template is invalid. The devices file must "
"have a 'default' device map".format(
self.cluster_template_d["id"]
)
)
# Validate the selected launch template for each host
worker_count = 0
for hostname in self.node_d:
# first service listed denotes the selected template
selected_ec2_request = self.node_d[hostname][0]
if "worker" == selected_ec2_request:
worker_count = worker_count + 1
else:
|
mitodl/ccxcon | webhooks/models.py | Python | agpl-3.0 | 992 | 0 | """
Webhook models.
"""
import uuid
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
class ActiveManager(models.Manager):
"""
Manager which returns only enabled webhooks.
"""
def get_quer | yset(self, *args, **kwargs):
"""
Returns only enabled webhooks.
"""
# pylint: disable=super-on-old-class
qs = super(ActiveManager, self).get_queryset(*args, **kwargs)
return qs.filter(enabled=True)
def get_uuid_hex():
"""
Get uuid hex. Required to serialize method in migrations.
"""
return uuid.uuid4().hex
@python_2_unicode_compatible
class Webhook(models.Model):
"""
Represents p | ossibly outgoing webhooks.
"""
url = models.URLField()
secret = models.CharField(max_length=32, default=get_uuid_hex)
enabled = models.BooleanField(default=True)
objects = models.Manager()
active = ActiveManager()
def __str__(self):
return self.url
|
biomodels/BIOMD0000000199 | setup.py | Python | cc0-1.0 | 377 | 0.005305 | from setuptools import setup, find_packages
setup(name='BIOMD0000000199',
version=20140916,
description='BIOMD0000000199 from BioModels',
url='http://www.ebi.ac.uk/biomodels-main | /BIOMD0000000199',
maintainer='Stanley Gu',
maintainer_url='stanleygu@gmail.com',
packages=find_packages(),
package_data={'': [' | *.xml', 'README.md']},
) |
Pistachitos/Sick-Beard | sickbeard/history.py | Python | gpl-3.0 | 2,727 | 0.006234 | # Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import db
import datetime
from sickbeard.common import SNATCHED, SUBTITLED, Quality
dateFormat = "%Y%m%d%H%M%S"
def _logHistoryItem(action, showid, season, episode, quality, resource, provider):
logDate = datetime.datetime.today().strftime(dateFormat)
myDB = db.DBConnection()
myDB.action("INSERT INTO history (action, date, showid, season, episode, quality, resource, provider) VALUES (?,?,?,?,?,?,?,?)",
[action, logDate, | showid, season, episode, quality, resource, provider])
def logSnatch(searchResult):
for curEpObj in searchResult.episodes:
showid = int(curEpObj.show.tvdbid)
season = int(curEpObj.season)
episode = int(curEpObj.episode)
qualit | y = searchResult.quality
providerClass = searchResult.provider
if providerClass != None:
provider = providerClass.name
else:
provider = "unknown"
action = Quality.compositeStatus(SNATCHED, searchResult.quality)
resource = searchResult.name
_logHistoryItem(action, showid, season, episode, quality, resource, provider)
def logDownload(episode, filename, new_ep_quality, release_group=None):
showid = int(episode.show.tvdbid)
season = int(episode.season)
epNum = int(episode.episode)
quality = new_ep_quality
# store the release group as the provider if possible
if release_group:
provider = release_group
else:
provider = -1
action = episode.status
_logHistoryItem(action, showid, season, epNum, quality, filename, provider)
def logSubtitle(showid, season, episode, status, subtitleResult):
resource = subtitleResult.release if subtitleResult.release else ''
provider = subtitleResult.service
status, quality = Quality.splitCompositeStatus(status)
action = Quality.compositeStatus(SUBTITLED, quality)
_logHistoryItem(action, showid, season, episode, quality, resource, provider) |
pheanex/fail2ban | setup.py | Python | gpl-2.0 | 5,567 | 0.027124 | #!/usr/bin/python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-
# vi: set ft=python sts=4 ts=4 sw=4 noet :
# This file is part of Fail2Ban.
#
# Fail2Ban is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Fail2Ban is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Fail2Ban; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
__author__ = "Cyril Jaquier, Steven Hiscocks, Yaroslav Halchenko"
__copyright__ = "Copyright (c) 2004 Cyril Jaquier, 2008-2013 Fail2Ban Contributors"
__license__ = "GPL"
try:
import setuptools
from setuptools import setup
except ImportError:
setuptools = None
from distutils.core import setup
try:
# python 3.x
from distutils.command.build_py import build_py_2to3 as build_py
from distutils.command.build_scripts \
import build_scripts_2to3 as build_scripts
except ImportError:
# python 2.x
from distutils.command.build_py import build_py
from distutils.command.build_scripts import build_scripts
import os
from os.path import isfile, join, isdir
import sys
import warnings
from glob import glob
if setuptools and "test" in sys.argv:
import logging
logSys = logging.getLogger("fail2ban")
hdlr = logging.StreamHandler(sys.stdout)
fmt = logging.Formatter("%(asctime)-15s %(message)s")
hdlr.setFormatter(fmt)
logSys.addHandler(hdlr)
if set(["-q", "--quiet"]) & set(sys.argv):
logSys.setLevel(logging.CRITICAL)
warnings.simplefilter("ignore")
sys.warnoptions.append("ignore")
elif set(["-v", "--verbose"]) & set(sys.argv):
logSys.setLevel(logging.DEBUG)
else:
logSys.setLevel(logging.INFO)
elif "test" in sys.argv:
print("python distribute required to execute fail2ban tests")
print("")
longdesc = '''
Fail2Ban scans log files like /var/log/pwdfail or
/var/log/apache/error_log and bans IP that makes
too many password failures. It updates firewall rules
to reject the IP address or executes user defined
commands.'''
if setuptools:
setup_extra = {
'te | st_suite': "fail2ban.tests.utils.gatherTests",
'use_2to3': True,
}
else:
setup_extra = {}
data_files_extra = []
if os.path.exists('/var/run'):
# if we are on the system with /var/run -- we are to u | se it for having fail2ban/
# directory there for socket file etc
data_files_extra += [('/var/run/fail2ban', '')]
# Get version number, avoiding importing fail2ban.
# This is due to tests not functioning for python3 as 2to3 takes place later
exec(open(join("fail2ban", "version.py")).read())
setup(
name="fail2ban",
version=version,
description="Ban IPs that make too many password failures",
long_description=longdesc,
author="Cyril Jaquier & Fail2Ban Contributors",
author_email="cyril.jaquier@fail2ban.org",
url="http://www.fail2ban.org",
license="GPL",
platforms="Posix",
cmdclass={'build_py': build_py, 'build_scripts': build_scripts},
scripts=[
'bin/fail2ban-client',
'bin/fail2ban-server',
'bin/fail2ban-regex',
'bin/fail2ban-testcases',
],
packages=[
'fail2ban',
'fail2ban.client',
'fail2ban.server',
'fail2ban.tests',
'fail2ban.tests.action_d',
],
package_data={
'fail2ban.tests':
[join(w[0], f).replace("fail2ban/tests/", "", 1)
for w in os.walk('fail2ban/tests/files')
for f in w[2]] +
[join(w[0], f).replace("fail2ban/tests/", "", 1)
for w in os.walk('fail2ban/tests/config')
for f in w[2]] +
[join(w[0], f).replace("fail2ban/tests/", "", 1)
for w in os.walk('fail2ban/tests/action_d')
for f in w[2]]
},
data_files=[
('/etc/fail2ban',
glob("config/*.conf")
),
('/etc/fail2ban/filter.d',
glob("config/filter.d/*.conf")
),
('/etc/fail2ban/filter.d/ignorecommands',
glob("config/filter.d/ignorecommands/*")
),
('/etc/fail2ban/action.d',
glob("config/action.d/*.conf") +
glob("config/action.d/*.py")
),
('/etc/fail2ban/fail2ban.d',
''
),
('/etc/fail2ban/jail.d',
''
),
('/var/lib/fail2ban',
''
),
('/usr/share/doc/fail2ban',
['README.md', 'README.Solaris', 'DEVELOP', 'FILTERS',
'doc/run-rootless.txt']
)
] + data_files_extra,
**setup_extra
)
# Do some checks after installation
# Search for obsolete files.
obsoleteFiles = []
elements = {
"/etc/":
[
"fail2ban.conf"
],
"/usr/bin/":
[
"fail2ban.py"
],
"/usr/lib/fail2ban/":
[
"version.py",
"protocol.py"
]
}
for directory in elements:
for f in elements[directory]:
path = join(directory, f)
if isfile(path):
obsoleteFiles.append(path)
if obsoleteFiles:
print("")
print("Obsolete files from previous Fail2Ban versions were found on "
"your system.")
print("Please delete them:")
print("")
for f in obsoleteFiles:
print("\t" + f)
print("")
if isdir("/usr/lib/fail2ban"):
print("")
print("Fail2ban is not installed under /usr/lib anymore. The new "
"location is under /usr/share. Please remove the directory "
"/usr/lib/fail2ban and everything under this directory.")
print("")
# Update config file
if sys.argv[1] == "install":
print("")
print("Please do not forget to update your configuration files.")
print("They are in /etc/fail2ban/.")
print("")
|
odyssey4me/monitoring-scripts | check_virsh_domains.py | Python | apache-2.0 | 18,638 | 0.004346 | #!/usr/bin/python
#
# Script to determine the performance statistics and other information
# related to libvirt guests
# https://github.com/odyssey4me/monitoring-scripts
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import re
import sys
import socket
import libvirt
import argparse
import traceback
import jsonpickle
import subprocess
from xml.etree import ElementTree
# Version required for nagios
VERSION = 'check_virsh_domains v1.0'
# Convert the Domain State integer into the description
# http://libvirt.org/html/libvirt-libvirt.html#virDomainState
DOMAIN_STATES = {
0: 'None',
1: 'Running',
2: 'Blocked on resource',
3: 'Paused by user',
4: 'Being shut down',
5: 'Shut off',
6: 'Crashed',
7: 'Suspended by guest power management'
}
# Location of Zabbix Agent Configuration file
# TODO: This really should either be found, or be an optional argument
ZABBIX_CONF = '/opt/zabbix/etc/zabbix_agentd.conf'
# Location of the zabbix_sender binary
# TODO: This really should either be found, or be an optional argument
ZABBIX_SENDER = '/opt/zabbix/bin/zabbix_sender'
class Domain(object):
def __init__(self, vir_dom):
try:
# Get the domain's network interface device list
if_devices = self.get_if_devices(vir_dom)
# Get the domain's block device list
blk_devices = self.get_blk_devices(vir_dom)
# Get the domain's information
dom_info = vir_dom.info()
# Get the domain's memory stats
mem_stats = vir_dom.memoryStats()
# Get the domain's UUID
self.uuid = vir_dom.UUIDString()
# Compile the network interface stats for each network interface device
for if_num, if_dev in enumerate(if_devices):
# Get the interface stats
if_stats = vir_dom.interfaceStats(if_dev)
# Set class attributes using the interface index number (not the name)
setattr(self, 'if_%s_rx_bytes' % if_num, int(if_stats[0]))
setattr(self, 'if_%s_rx_packets' % if_num, int(if_stats[1]))
setattr(self, 'if_%s_rx_errors' % if_num, int(if_stats[2]))
setattr(self, 'if_%s_rx_drop' % if_num, int(if_stats[3]))
setattr(self, | 'if_%s_tx_bytes' % if_num, int(if_stats[4]))
setattr(self, 'if_%s_tx_packets' % if_num, int(if_stats[5]))
setattr(self, 'if_%s_tx_errors' % if_num, int(if_stats[6]))
setattr(self, 'if_%s_tx_drop' % if_num, int(if_stats[7]))
# Compile the block device stats for each block device
| for blk_dev in blk_devices:
#Get the block device stats
blk_stats = vir_dom.blockStats(blk_dev)
# Set class attributes using the device name
setattr(self, 'blk_%s_rd_req' % blk_dev, int(blk_stats[0]))
setattr(self, 'blk_%s_rd_bytes' % blk_dev, int(blk_stats[1]))
setattr(self, 'blk_%s_wr_req' % blk_dev, int(blk_stats[2]))
setattr(self, 'blk_%s_wr_bytes' % blk_dev, int(blk_stats[3]))
# Get the memory stats in kB and covert to B for consistency
self.mem_max_bytes = int(dom_info[1]) * 1024
self.mem_used_bytes = int(dom_info[2]) * 1024
# Get the number of vCPU's and the usage time in nanoseconds
self.cpu_count = int(dom_info[3])
self.cpu_time = int(dom_info[4])
# Get the state of the domain
self.state = DOMAIN_STATES[dom_info[0]]
# Note:
# To calculate %CPU utilization you need to have a time period. We're expecting that the
# %CPU calculation is done externally by a system that knows the time period between measurements.
#
# For reference:
# http://people.redhat.com/~rjones/virt-top/faq.html#calccpu
# cpu_time_diff = cpuTime_now - cpuTime_t_seconds_ago
# %CPU = 100 * cpu_time_diff / (t * host_cpus * 10^9)
# There may not be anything in mem_stats (support is limited), but let's add any values there may be
for key, value in mem_stats.iteritems():
value_bytes = int(value) * 1024
setattr(self, 'mem_%s' % key, value_bytes)
except OSError:
print 'Failed to get domain information'
def get_if_devices(self, vir_dom):
#Function to return a list of network devices used
#Create a XML tree from the domain XML description
dom_tree = ElementTree.fromstring(vir_dom.XMLDesc(0))
#The list of device names
devices = []
#Iterate through all network interface target elements of the domain
for target in dom_tree.findall("devices/interface/target"):
#Get the device name
dev = target.get("dev")
#If this device is already in the list, don't add it again
if not dev in devices:
devices.append(dev)
#Completed device name list
return devices
def get_blk_devices(self, vir_dom):
#Function to return a list of block devices used
#Create a XML tree from the domain XML description
dom_tree = ElementTree.fromstring(vir_dom.XMLDesc(0))
#The list of device names
devices = []
#Iterate through all network interface target elements of the domain
for target in dom_tree.findall("devices/disk/target"):
#Get the device name
dev = target.get("dev")
#If this device is already in the list, don't add it again
if not dev in devices:
devices.append(dev)
#Completed device name list
return devices
def health(self):
output = {'errorlevel': 0, 'errors': []}
# Check whether there are network interface errors or drops
for key in vars(self):
if re.match('if_.*_errors', key):
if vars(self)[key] > 0:
output['errors'].append('Domain has network interface errors.')
output['errorlevel'] = set_errorlevel(output['errorlevel'], 1)
if re.match('if_.*_drop', key):
if vars(self)[key] > 0:
output['errors'].append('Domain has network interface drops.')
output['errorlevel'] = set_errorlevel(output['errorlevel'], 1)
# Check whether the domain is in a 'blocked' or 'crashed' state
if self.state == 'Blocked on resource' or self.state == 'Crashed':
output['errors'].append('Domain is %s!' % self.state)
output['errorlevel'] = set_errorlevel(output['errorlevel'], 2)
return output
def inventory(self):
output = {}
output['mem_max_bytes'] = '%i' % self.mem_max_bytes
output['cpu_count'] = '%i' % self.cpu_count
output['state'] = '%s' % self.state
output['uuid'] = '%s' % self.uuid
return output
def perfdata(self):
output = {}
# Loop through all attributes and add the if and blk data
for key in vars(self):
if re.match('if_.*', key) or re.match('blk_.*', key):
output[key] = vars(self)[key]
output['mem_used_bytes'] = self.mem_used_bytes
output['cpu_time'] = self.cpu_time
return output
def parse_args():
ap = argparse.ArgumentParser()
ap.add_argument('-d', '--discovery', action='store_true', help='Only output discovery data')
ap.add_argument('-i', '--inventory', action='store_true', help= |
hryamzik/ansible | lib/ansible/modules/cloud/azure/azure_rm_sqlserver.py | Python | gpl-3.0 | 10,749 | 0.002233 | #!/usr/bin/python
#
# Copyright (c) 2017 Zim Kalinowski, <zikalino@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_sqlserver
version_added: "2.5"
short_description: Manage SQL Server instance
description:
- Create, update and delete instance of SQL Server
options:
resource_group:
description:
- The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
required: True
name:
description:
- The name of the server.
required: True
location:
description:
- Resource location.
admin_username:
description:
- Administrator username for the server. Once created it cannot be changed.
admin_password:
description:
- The administrator login password (required for server creation).
version:
description:
- "The version of the server. For example '12.0'."
identity:
description:
- "The identity type. Set this to 'SystemAssigned' in order to automatically create and assign an Azure Active Directory principal for the resour
ce. Possible values include: 'SystemAssigned'"
state:
description:
- Assert the state of the SQL server. Use 'present' to create or update a server and
'absent' to delete a server.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Zim Kalinowski (@zikalino)"
'''
EXAMPLES = '''
- name: Create (or update) SQL Server
azure_rm_sqlserver:
resource_group: resource_group
name: server_name
location: westus
admin_username: mylogin
admin_password: Testpasswordxyz12!
'''
RETURN = '''
id:
description:
- Resource ID.
returned: always
type: str
sample: /subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/sqlcrudtest-7398/providers/Microsoft.Sql/servers/sqlcrudtest-4645
version:
description:
- The version of the server.
returned: always
type: str
sample: 12.0
state:
description:
- The state of the server.
returned: always
type: str
sample: state
fully_qualified_domain_name:
description:
- The fully qualified domain name of the server.
returned: always
type: str
sample: sqlcrudtest-4645.database.windows.net
'''
import time
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
from azure.mgmt.sql import SqlManagementClient
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class Actions:
NoAction, Create, Update, Delete = range(4)
class AzureRMServers(AzureRMModuleBase):
"""Configuration class for an Azure RM SQL Server resource"""
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
location=dict(
type='str',
required=False
),
admin_username=dict(
type='str',
required=False
),
admin_password=dict(
type='str',
no_log=True,
required=False
),
version=dict(
type='str',
required=False
),
identity=dict(
type='str',
required=False
),
state=dict(
type='str',
required=False,
default='present',
choices=['present', 'absent']
)
)
self.resource_group = None
self.name = None
self.parameters = dict()
self.results = dict(changed=False)
self.mgmt_client = None
self.state = None
self.to_do = Actions.NoAction
super(AzureRMServers, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=True)
def exec_module(self, **kwarg | s):
"""Main module execution method" | ""
for key in list(self.module_arg_spec.keys()) + ['tags']:
if hasattr(self, key):
setattr(self, key, kwargs[key])
elif kwargs[key] is not None:
if key == "location":
self.parameters.update({"location": kwargs[key]})
elif key == "admin_username":
self.parameters.update({"administrator_login": kwargs[key]})
elif key == "admin_password":
self.parameters.update({"administrator_login_password": kwargs[key]})
elif key == "version":
self.parameters.update({"version": kwargs[key]})
elif key == "identity":
self.parameters.update({"identity": {"type": kwargs[key]}})
old_response = None
response = None
results = dict()
self.mgmt_client = self.get_mgmt_svc_client(SqlManagementClient,
base_url=self._cloud_environment.endpoints.resource_manager)
resource_group = self.get_resource_group(self.resource_group)
if "location" not in self.parameters:
self.parameters["location"] = resource_group.location
old_response = self.get_sqlserver()
if not old_response:
self.log("SQL Server instance doesn't exist")
if self.state == 'absent':
self.log("Old instance didn't exist")
else:
self.to_do = Actions.Create
else:
self.log("SQL Server instance already exists")
if self.state == 'absent':
self.to_do = Actions.Delete
elif self.state == 'present':
self.log("Need to check if SQL Server instance has to be deleted or may be updated")
self.to_do = Actions.Update
if (self.to_do == Actions.Create) or (self.to_do == Actions.Update):
self.log("Need to Create / Update the SQL Server instance")
if self.check_mode:
self.results['changed'] = True
return self.results
response = self.create_update_sqlserver()
response.pop('administrator_login_password', None)
if not old_response:
self.results['changed'] = True
else:
self.results['changed'] = old_response.__ne__(response)
self.log("Creation / Update done")
elif self.to_do == Actions.Delete:
self.log("SQL Server instance deleted")
self.results['changed'] = True
if self.check_mode:
return self.results
self.delete_sqlserver()
# make sure instance is actually deleted, for some Azure resources, instance is hanging around
# for some time after deletion -- this should be really fixed in Azure
while self.get_sqlserver():
time.sleep(20)
else:
self.log("SQL Server instance unchanged")
self.results['changed'] = False
response = old_response
if response:
self.results["id"] = response["id"]
self.results["version"] = response["version"]
|
alex/sentry | sentry/templatetags/sentry_admin_helpers.py | Python | bsd-3-clause | 965 | 0 | """
sentry.templatetags.sentry_admin_helpers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import datetime
from django import template
from django.db.models import Sum
register = template.Library()
@register.filter
def with_event_counts(project_list):
from sentry.models import ProjectCountByMinute
results = dict(ProjectCountByMinute.objects.filter(
project__in=project_list,
date__ | gte=datetime.datetime.now() - datetime.timedelta(days=30),
).values_list('project').annotate(
total_events=Sum('times_seen'),
).values_list('project', 'total_events'))
for project in project_list:
avg = results.get(project.pk, 0) / 30.0
if avg < 5:
avg = '%.1f' % avg
if avg == '0.0':
avg = 0
else:
avg | = int(avg)
yield project, avg
|
rcbops/quantum-buildpackage | quantum/plugins/cisco/ucs/cisco_ucs_inventory_configuration.py | Python | apache-2.0 | 1,019 | 0 | """
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2011 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY | KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Sumit Naiksatam, Cisco Systems, Inc.
#
"""
import os
from quantum.common.config import find_config_file
from quantum.plugins.cisco.common import cisco_configparser as confp
CONF_FILE = find_config_file({'plugin': 'cisco'}, | None, "ucs_inventory.ini")
CP = confp.CiscoConfigParser(CONF_FILE)
INVENTORY = CP.walk(CP.dummy)
|
wojtask/CormenPy | test/test_chapter16/test_textbook16_2.py | Python | gpl-3.0 | 1,705 | 0.002346 | import random
from unittest import TestCase
from hamcrest import *
from array_util import get_random_array
from chapter16.textbook16_2 import fractional_knapsack
from datastructures.array import Array
from util import between
def part_item_value(item_partial_weight, item_total_weight, item_value):
return item_partial_weight / item_total_weight * ite | m_value
def fractional_knapsack_heuristic(w, v, W, i=1):
if i == w.length:
return part_item_value(min(w[i], W), w[i], v[i])
value_no_item = frac | tional_knapsack_heuristic(w, v, W, i + 1)
value_half_item = part_item_value(min(w[i] // 2, W), w[i], v[i]) + \
fractional_knapsack_heuristic(w, v, W - min(w[i] // 2, W), i + 1)
value_full_item = part_item_value(min(w[i], W), w[i], v[i]) + \
fractional_knapsack_heuristic(w, v, W - min(w[i], W), i + 1)
return max(value_no_item, value_half_item, value_full_item)
class TestTextbook16_2(TestCase):
def test_fractional_knapsack(self):
n = random.randint(1, 10)
weights, weights_list = get_random_array(min_size=n, max_size=n)
values, values_list = get_random_array(min_size=n, max_size=n)
max_weight = random.randint(1, n * 1000)
actual_knapsack = fractional_knapsack(weights, values, max_weight)
assert_that(sum(actual_knapsack), is_(less_than_or_equal_to(max_weight)))
actual_knapsack_value = sum([part_item_value(actual_knapsack[i], weights[i], values[i]) for i in between(1, n)])
knapsack_value_bound = fractional_knapsack_heuristic(Array(weights_list), Array(values_list), max_weight)
assert_that(actual_knapsack_value, is_(greater_than_or_equal_to(knapsack_value_bound)))
|
sserrot/champion_relationships | venv/Lib/site-packages/notebook/tests/selenium/conftest.py | Python | mit | 4,894 | 0.000409 | import json
import nbformat
from nbformat.v4 import new_notebook, new_code_cell
import os
import pytest
import requests
from subprocess import Popen
import sys
from tempfile import mkstemp
from testpath.tempdir import TemporaryDirectory
import time
from urllib.parse import urljoin
from selenium.webdriver import Firefox, Remote, Chrome
fr | om .utils import Notebook
pjoin = os.path.join
def _wait_for_server(proc, info_file_path):
"""Wait 30 seconds for the notebook server to start"""
for i in range(300):
if proc.poll() is not None:
raise RuntimeError("Notebook server failed to start")
if os.path.exists(info_f | ile_path):
try:
with open(info_file_path) as f:
return json.load(f)
except ValueError:
# If the server is halfway through writing the file, we may
# get invalid JSON; it should be ready next iteration.
pass
time.sleep(0.1)
raise RuntimeError("Didn't find %s in 30 seconds", info_file_path)
@pytest.fixture(scope='session')
def notebook_server():
info = {}
with TemporaryDirectory() as td:
nbdir = info['nbdir'] = pjoin(td, 'notebooks')
os.makedirs(pjoin(nbdir, u'sub ∂ir1', u'sub ∂ir 1a'))
os.makedirs(pjoin(nbdir, u'sub ∂ir2', u'sub ∂ir 1b'))
info['extra_env'] = {
'JUPYTER_CONFIG_DIR': pjoin(td, 'jupyter_config'),
'JUPYTER_RUNTIME_DIR': pjoin(td, 'jupyter_runtime'),
'IPYTHONDIR': pjoin(td, 'ipython'),
}
env = os.environ.copy()
env.update(info['extra_env'])
command = [sys.executable, '-m', 'notebook',
'--no-browser',
'--notebook-dir', nbdir,
# run with a base URL that would be escaped,
# to test that we don't double-escape URLs
'--NotebookApp.base_url=/a@b/',
]
print("command=", command)
proc = info['popen'] = Popen(command, cwd=nbdir, env=env)
info_file_path = pjoin(td, 'jupyter_runtime',
'nbserver-%i.json' % proc.pid)
info.update(_wait_for_server(proc, info_file_path))
print("Notebook server info:", info)
yield info
# Shut the server down
requests.post(urljoin(info['url'], 'api/shutdown'),
headers={'Authorization': 'token '+info['token']})
def make_sauce_driver():
"""This function helps travis create a driver on Sauce Labs.
This function will err if used without specifying the variables expected
in that context.
"""
username = os.environ["SAUCE_USERNAME"]
access_key = os.environ["SAUCE_ACCESS_KEY"]
capabilities = {
"tunnel-identifier": os.environ["TRAVIS_JOB_NUMBER"],
"build": os.environ["TRAVIS_BUILD_NUMBER"],
"tags": [os.environ['TRAVIS_PYTHON_VERSION'], 'CI'],
"platform": "Windows 10",
"browserName": os.environ['JUPYTER_TEST_BROWSER'],
"version": "latest",
}
if capabilities['browserName'] == 'firefox':
# Attempt to work around issue where browser loses authentication
capabilities['version'] = '57.0'
hub_url = "%s:%s@localhost:4445" % (username, access_key)
print("Connecting remote driver on Sauce Labs")
driver = Remote(desired_capabilities=capabilities,
command_executor="http://%s/wd/hub" % hub_url)
return driver
@pytest.fixture(scope='session')
def selenium_driver():
if os.environ.get('SAUCE_USERNAME'):
driver = make_sauce_driver()
elif os.environ.get('JUPYTER_TEST_BROWSER') == 'chrome':
driver = Chrome()
else:
driver = Firefox()
yield driver
# Teardown
driver.quit()
@pytest.fixture(scope='module')
def authenticated_browser(selenium_driver, notebook_server):
selenium_driver.jupyter_server_info = notebook_server
selenium_driver.get("{url}?token={token}".format(**notebook_server))
return selenium_driver
@pytest.fixture
def notebook(authenticated_browser):
tree_wh = authenticated_browser.current_window_handle
yield Notebook.new_notebook(authenticated_browser)
authenticated_browser.switch_to.window(tree_wh)
@pytest.fixture
def prefill_notebook(selenium_driver, notebook_server):
def inner(cells):
cells = [new_code_cell(c) if isinstance(c, str) else c
for c in cells]
nb = new_notebook(cells=cells)
fd, path = mkstemp(dir=notebook_server['nbdir'], suffix='.ipynb')
with open(fd, 'w', encoding='utf-8') as f:
nbformat.write(nb, f)
fname = os.path.basename(path)
selenium_driver.get(
"{url}notebooks/{}?token={token}".format(fname, **notebook_server)
)
return Notebook(selenium_driver)
return inner
|
cklb/PyMoskito | pymoskito/mpl_settings.py | Python | bsd-3-clause | 1,697 | 0.000589 | import logging
import matplotlib as mpl
from .tools import get_figure_size
_ | logger = logging.getLogger("mpl_settings")
orig_settings = {**mpl.rcParams}
latex_settings = {
# change this if using contex, xetex or lualatex
"pgf.texsystem": "pdflatex",
# use LaTeX to write all text
"text.usetex": True,
'font.family': 'lmodern',
# blank entries should cause plots to inherit f | onts from the document
# "font.serif": [],
# "font.sans-serif": [],
# "font.monospace": [],
# "text.fontsize": 11,
"legend.fontsize": 9, # Make the legend/label fonts a little smaller
"xtick.labelsize": 9,
"ytick.labelsize": 9,
"figure.figsize": get_figure_size(1), # default fig size of 1\textwidth
"lines.linewidth": 0.5,
"axes.labelsize": 11, # LaTeX default is 10pt font.
"axes.linewidth": 0.5,
"axes.unicode_minus": False,
# subfig related
"figure.subplot.left": 0.1,
"figure.subplot.right": 0.95,
"figure.subplot.bottom": 0.125,
"figure.subplot.top": 0.95,
# the amount of width reserved for blank space between subplots
"figure.subplot.wspace": 0.4,
# the amount of height reserved for white space between subplots
"figure.subplot.hspace": 0.4,
# Patches are graphical objects that fill 2D space, like polygons or circles
"patch.linewidth": 0.5,
}
def enable_latex():
_logger.info("LaTeX export enabled")
mpl.rcParams['text.latex.preamble'].append(r'\usepackage{lmodern}'),
mpl.rcParams['text.latex.preamble'].append(r'\usepackage{siunitx}'),
mpl.rcParams.update(latex_settings)
def disable_latex():
_logger.info("LaTeX export disabled")
mpl.rcParams = orig_settings
|
rcoup/traveldash | traveldash/mine/views.py | Python | bsd-3-clause | 7,758 | 0.001547 | import json
from django.http import Http404, HttpResponse, HttpResponseRedirect
from django.template.response import TemplateResponse
from django.views.decorators.cache import cache_control
from django.views.decorators.vary import vary_on_cookie
from django.views.generic import DeleteView
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.contrib import messages
from django import forms
from django.forms.models import inlineformset_factory
from django.core.urlresolvers import reverse
from django.conf import settings
from django.contrib.gis.utils import GeoIP
from bootstrap.forms import BootstrapModelForm
from traveldash.mine.models import Dashboard, DashboardRoute, City
from traveldash.gtfs.models import Route, Stop
@vary_on_cookie
def home(request):
if request.user.is_authenticated():
# if user is signed in, take them to their most recent
# dashboard unless they have a referer (ie. they didn't type it)
referer = request.META.get('HTTP_REFERER')
# if we've never seen a referer from this user, then don't do a
# redirect
if (not referer) and request.session.get('seen_http_referer', False):
dash_pk = request.session.get('td_last_dashboard')
# fallback: dashboard list
redirect = reverse('traveldash.mine.views.dashboard_list')
if dash_pk:
try:
dashboard = Dashboard.objects.filter(user=request.user).get(pk=dash_pk)
redirect = dashboard.get_absolute_url()
except Dashboard.DoesNotExist:
pass
return HttpResponseRedirect(redirect)
example_dashboard = Dashboard.objects.exclude(routes__isnull=True).order_by('?')[0]
return TemplateResponse(request, "mine/home.html", {'example_dashboard': example_dashboard})
@vary_on_cookie
def dashboard(request, pk):
try:
dashboard = Dashboard.objects.get(pk=pk)
except Dashboard.DoesNotExist:
raise Http404
dashboard.touch()
context = {
'dashboard': dashboard,
'is_owner': (dashboard.user == request.user),
}
if request.user == dashboard.user:
request.session['td_last_dashboard'] = dashboard.pk
return TemplateResponse(request, "mine/dashboard.html", context)
@vary_on_cookie
@cache_control(must_revalidate=True)
def dashboard_update(request, pk):
try:
dashboard = Dashboard.objects.get(pk=pk)
except Dashboard.DoesNotExist:
return HttpResponse(json.dumps({"error": "dashboard-not-found"}), status=404, content_type="application/json")
content = dashboard.as_json()
return HttpResponse(json.dumps(content), content_type="application/json")
@login_required
def dashboard_list(request):
c = {
'dashboard_list': Dashboard.objects.filter(user=request.user),
'base_url': request.build_absolute_uri('/')[:-1],
}
return TemplateResponse(request, "mine/dashboard_list.html", c)
class RouteForm(BootstrapModelForm):
class Meta:
model = DashboardRoute
fields = ('id', 'from_stop', 'walk_time_start', 'to_stop', 'walk_time_end',)
widgets = {
'from_stop': forms.TextInput(attrs={'class': 'gtfsStop'}),
'to_stop': forms.TextInput(attrs={'class': 'gtfsStop'}),
}
def clean(self):
cd = self.cleaned_data
if ('from_stop' in cd) and ('to_stop' in cd):
if not Route.objects.between_stops(cd['from_stop'], cd['to_stop']).exists():
raise forms.ValidationError("No Transport routes between the stops you've selected")
return cd
def stop_json(self):
return json.dumps({
'from_stop': self._stop_info(self['from_stop'].value()),
'to_stop': self._stop_info(self['to_stop'].value()),
})
def _stop_info(self, stop_id):
if not stop_id:
return None
try:
stop = Stop.objects.get(pk=stop_id)
return {'id': stop.pk, 'name': stop.name, 'location': stop.location.tuple}
except Stop.DoesNotExist:
return None
RouteFormSet = inlineformset_factory(Dashboard, DashboardRoute, form=RouteForm, extra=1)
class DashboardForm(BootstrapModelForm):
class Meta:
model = Dashboard
exclude = ('user', 'last_viewed',)
@login_required
def dashboard_create(request):
if request.method == "POST":
form = DashboardForm(request.POST)
if form.is_valid():
dashboard = form.save(commit=False)
dashboard.user = request.user
route_formset = RouteFormSet(request.POST, instance=dashboard)
if route_formset.is_valid():
dashboard.save()
route_formset.save()
messages.success(request, "Created!")
return HttpResponseRedirect(dashboard.get_absolute_url())
else:
route_formset = RouteFormSet(instance=Dashboard())
else:
# try to find the best city match
initial = {}
if request.user.dashboards.exists():
# Use existing city to start with
initial['city'] = request.user.dashboards.all()[0].city
else:
# try a GeoIP | lookup
geoip = GeoIP().geos(request.META['REMOTE_ADDR'])
if geoip:
initial['city'] = City.objects.distance(geoip).order_by('-distance')[0]
form = DashboardForm(initial=initial)
| route_formset = RouteFormSet(instance=Dashboard())
context = {
'form': form,
'route_formset': route_formset,
'title': 'New Dashboard',
'stopFusionTableId': settings.GTFS_STOP_FUSION_TABLE_ID,
'city_data': json.dumps(City.objects.get_map_info()),
}
return TemplateResponse(request, "mine/dashboard_form.html", context)
@login_required
def dashboard_edit(request, pk):
try:
dashboard = Dashboard.objects.filter(user=request.user).get(pk=pk)
except Dashboard.DoesNotExist:
raise Http404
if request.method == "POST":
form = DashboardForm(request.POST, instance=dashboard)
if form.is_valid():
form.save(commit=False)
route_formset = RouteFormSet(request.POST, instance=dashboard)
if route_formset.is_valid():
dashboard.save()
route_formset.save()
if dashboard.routes.count() == 0:
dashboard.delete()
messages.success(request, "Deleted empty dashboard")
return HttpResponseRedirect(reverse('traveldash.mine.views.dashboard_list'))
else:
messages.success(request, "Saved")
return HttpResponseRedirect(dashboard.get_absolute_url())
else:
form = DashboardForm(instance=dashboard)
route_formset = RouteFormSet(instance=dashboard)
context = {
'form': form,
'route_formset': route_formset,
'title': 'Edit Dashboard',
'dashboard': dashboard,
'stopFusionTableId': settings.GTFS_STOP_FUSION_TABLE_ID,
'city_data': json.dumps(City.objects.get_map_info()),
}
return TemplateResponse(request, "mine/dashboard_form.html", context)
class DashboardDelete(DeleteView):
context_object_name = "dashboard"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(DashboardDelete, self).dispatch(*args, **kwargs)
def get_queryset(self):
return Dashboard.objects.filter(user=self.request.user)
def get_success_url(self):
return reverse('traveldash.mine.views.dashboard_list')
|
anhstudios/swganh | data/scripts/templates/object/tangible/ship/crafted/weapon/shared_quick_shot_upgrade_mk4.py | Python | mit | 484 | 0.045455 | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXA | MPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/ship/crafted/weapon/shared_quick_shot_upgrade_mk4.iff"
result.attribute_template_id = 8
result.stfName("space_crafting_n","quick_shot_upgrade_mk4")
#### BEGIN MODIFICATIONS ####
#### END MO | DIFICATIONS ####
return result |
heartsucker/securedrop | securedrop/alembic/versions/2d0ce3ee5bdc_added_passphrase_hash_column_to_.py | Python | agpl-3.0 | 1,753 | 0.002282 | """added passphrase_hash column to journalists table
Revision ID: 2d0ce3ee5bdc
Revises: fccf57ceef02
Create Date: 2018-06-08 15:08:37.718268
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2d0ce3ee5bdc'
down_revision = 'fccf57ceef02'
branch_labels = None
depends_on = None
def upgrade():
op.add_column('journalists', sa.Column('passphrase_hash', sa.String(length=256), nullable=True))
def downgrade():
# sqlite has no `drop column` command, so we recreate the original table
# then | load it from a temp table
op.rename_table('journalists', 'journalists_tmp')
op.create_table( | 'journalists',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=255), nullable=False),
sa.Column('pw_salt', sa.Binary(), nullable=True),
sa.Column('pw_hash', sa.Binary(), nullable=True),
sa.Column('is_admin', sa.Boolean(), nullable=True),
sa.Column('otp_secret', sa.String(length=16), nullable=True),
sa.Column('is_totp', sa.Boolean(), nullable=True),
sa.Column('hotp_counter', sa.Integer(), nullable=True),
sa.Column('last_token', sa.String(length=6), nullable=True),
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.Column('last_access', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username')
)
conn = op.get_bind()
conn.execute('''
INSERT INTO journalists
SELECT id, username, pw_salt, pw_hash, is_admin, otp_secret, is_totp,
hotp_counter, last_token, created_on, last_access
FROM journalists_tmp
''')
op.drop_table('journalists_tmp')
|
Letractively/aha-gae | aha/widget/tests/test_fields.py | Python | bsd-3-clause | 5,382 | 0.026756 | # -*- coding: utf-8 -*-
from unittest import TestCase
import logging
log = logging.getLogger(__name__)
from formencode import validators, Invalid
from nose.tools import *
import formencode
from coregae.widget.field import *
class TestBasefield(TestCase):
def test_basefield(self):
"""
Test for functions of BaseFields
"""
c = {'id':'AB1234', 'size':20}
bf = TextField(name = 'foo', args = c)
body = bf.render_body()
| assert_true( 'name = "foo"' in body)
as | sert_true( 'id = "AB1234"' in body)
body = bf.render_body(value = 'VALUESTRING')
assert_true("VALUESTRING" in body)
bf.title = 'THETITLE'
assert_true("THETITLE" in bf.get_title())
def test_validate(self):
"""
Test for MediaHandler, validation
"""
v = validators
c = {'id':'AB1234', 'size':20}
bf = TextField(name = 'foo', args = c, validator = v.Int())
assert_equal(bf.validate('1')[0], 1)
assert_equal(bf.validate('A')[0], None)
# test for multiple validators
bf = TextField(name = 'foo', args = c,
validator = ( v.Int(), v.OneOf([1, 2, 3]) ))
assert_equal(bf.validate('1')[0], 1)
assert_equal(bf.validate(2)[0], 2)
assert_equal(bf.validate('A')[0], None)
assert_equal(bf.validate('4')[0], None)
assert_equal(bf.validate(10)[0], None)
bf = TextField(name = 'foo', args = c,
validator = ( v.Int(), v.OneOf([1, 2, 3]), ),
required = True)
r = bf.validate('')
assert_equal(r[0], None)
def test_fields(self):
"""
Test for functions of subclass of BaseField
"""
c = {'id':'AB1234', 'size':20}
tf = TextField(name = 'foo', args = c)
body = tf.render_body()
assert_true( 'name = "foo"' in body)
assert_true( 'id = "AB1234"' in body)
hf = HiddenField(name = 'foo', args = c, default = 'defoo')
body = hf.render_body(value = 'VALUESTRING')
assert_true("VALUESTRING" in body)
body = hf.render_body()
assert_true("defoo" in body)
rf = RadioField(name = 'foo', args = c,
values = (('vfoo', 'v1'), ('vbar', 'v2')))
body = rf.render_body()
for v in ('vfoo', 'vbar'):
assert_true(">%s<" % v in body)
for v in ('v1', 'v2'):
assert_true("value = '%s'" % v in body)
assert_true("checked" not in body)
body = rf.render_body(value = 'v2')
assert_true("checked" in body)
cg = CheckboxGroup(name = 'foo', args = c,
values = (('vfoo', 'v1'), ('vbar', 1)))
body = cg.render_body()
for v in ('vfoo', 'vbar'):
assert_true(">%s<" % v in body)
for v in ('v1', '1'):
assert_true('value = "%s"' % v in body)
for v in ('v1', '1'):
assert_true('name = "foo_%s"' % v in body)
assert_true("checked" not in body)
body = cg.render_body(value = 'v1')
assert_true("checked" in body)
body = cg.render_body(value = [1])
assert_true("checked" in body)
v = validators
cg2 = CheckboxGroup(name = 'foo', args = c,
values = (('vfoo', 'v1'), ('vbar', 'v2')),
validator = v.Int())
t = cg2.validate({'foo_v1':'1', 'foo_v2':'a', 'foo_g3':'b'})
assert_equal(t[0][1], None)
assert_true(isinstance(t[0][2], Invalid))
t = cg2.validate({'foo_v1':'1', 'foo_v2':'2', 'foo_g3':'b'})
assert_equal(sorted(t[0][1]), [1, 2])
assert_equal(t[0][2], None)
sf = SelectField(name = 'foo', args = c,
values = (('vfoo', 'v1'), ('vbar', 'v2')))
body = sf.render_body()
for v in ('vfoo', 'vbar'):
assert_true("> %s </option>" % v in body)
for v in ('v1', 'v2'):
assert_true('value = "%s"' % v in body)
assert_true("selected" not in body)
body = sf.render_body(value = 'v2')
assert_true("selected" in body)
cf = CheckboxField(name = 'foo', args = c)
body = cf.render_body()
assert_true('name = "foo"' in body)
cf = CheckboxField(name = 'foo')
body = cf.render_body(value = True)
assert_true("checked" in body)
tf = TextArea(name = 'foo', args = c)
body = tf.render_body()
assert_true('name = "foo"' in body)
body = tf.render_body(value = 'this is body<body>')
assert_true(">this is body<body><" in body)
rt = RichText(name = 'foo', args = c)
assert_equal(len(rt.get_objects()), 1)
assert_equal(len(rt.get_object_tag()), 1)
ff = FileField(name = 'foo')
body = ff.render_body()
assert_true('type = "file"' in body)
assert_false('disabled' in body)
body = ff.render_body('bar')
assert_true(ff.REPLACE_PREFIX+'foo' in body)
imgf = ImageField(name = 'foo')
body = imgf.render_body(value = 'path/to/image')
assert_true("path/to/image" in body)
tf = TextField(name = 'foo', args = c, default = 'bar')
body = tf.render_body()
assert_true( 'value = "bar"' in body)
|
jscn/django | tests/logging_tests/tests.py | Python | bsd-3-clause | 18,252 | 0.001041 | # -*- coding:utf-8 -*-
from __future__ import unicode_literals
import logging
import warnings
from admin_scripts.tests import AdminScriptTestCase
from django.conf import settings
from django.core import mail
from django.core.files.temp import NamedTemporaryFile
from django.db import connection
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import LoggingCaptureMixin, patch_logger
from django.utils.deprecation import RemovedInNextVersionWarning
from django.utils.log import (
DEFAULT_LOGGING, AdminEmailHandler, CallbackFilter, RequireDebugFalse,
RequireDebugTrue,
)
from .logconfig import MyEmailBackend
# logging config prior to using filter with mail_admins
OLD_LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
class LoggingFiltersTest(SimpleTestCase):
def test_require_debug_false_filter(self):
"""
Test the RequireDebugFalse filter class.
"""
filter_ = RequireDebugFalse()
with self.settings(DEBUG=True):
self.assertEqual(filter_.filter("record is not used"), False)
with self.settings(DEBUG=False):
self.assertEqual(filter_.filter("record is not used"), True)
def test_require_debug_true_filter(self):
"""
Test the RequireDebugTrue filter class.
"""
filter_ = RequireDebugTrue()
with self.settings(DEBUG=True):
self.assertEqual(filter_.filter("record is not used"), True)
with self.settings(DEBUG=False):
self.assertEqual(filter_.filter("record is not used"), False)
class SetupDefaultLoggingMixin(object):
@classme | thod
def setUpClass(cls):
super(SetupDefaultLoggingMixin, cls).setUpClass()
cls._logging = settings.LOGGING
logging.config.dictConfig(DEFAULT_LOGGING)
@classmethod
def tearDownClass(cls):
super(SetupDefaultLoggingMixin, cls).tearDownClass()
logging.config.dictConfig(cls._logging)
class DefaultLoggingTests(SetupDefaultLoggingMixin, LoggingCaptureMixin, SimpleTestCase):
def | test_django_logger(self):
"""
The 'django' base logger only output anything when DEBUG=True.
"""
self.logger.error("Hey, this is an error.")
self.assertEqual(self.logger_output.getvalue(), '')
with self.settings(DEBUG=True):
self.logger.error("Hey, this is an error.")
self.assertEqual(self.logger_output.getvalue(), 'Hey, this is an error.\n')
@override_settings(DEBUG=True)
def test_django_logger_warning(self):
self.logger.warning('warning')
self.assertEqual(self.logger_output.getvalue(), 'warning\n')
@override_settings(DEBUG=True)
def test_django_logger_info(self):
self.logger.info('info')
self.assertEqual(self.logger_output.getvalue(), 'info\n')
@override_settings(DEBUG=True)
def test_django_logger_debug(self):
self.logger.debug('debug')
self.assertEqual(self.logger_output.getvalue(), '')
@override_settings(DEBUG=True, ROOT_URLCONF='logging_tests.urls')
class HandlerLoggingTests(SetupDefaultLoggingMixin, LoggingCaptureMixin, SimpleTestCase):
def test_page_found_no_warning(self):
self.client.get('/innocent/')
self.assertEqual(self.logger_output.getvalue(), '')
def test_page_not_found_warning(self):
self.client.get('/does_not_exist/')
self.assertEqual(self.logger_output.getvalue(), 'Not Found: /does_not_exist/\n')
@override_settings(
DEBUG=True,
USE_I18N=True,
LANGUAGES=[('en', 'English')],
MIDDLEWARE_CLASSES=[
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
],
ROOT_URLCONF='logging_tests.urls_i18n',
)
class I18nLoggingTests(SetupDefaultLoggingMixin, LoggingCaptureMixin, SimpleTestCase):
def test_i18n_page_found_no_warning(self):
self.client.get('/exists/')
self.client.get('/en/exists/')
self.assertEqual(self.logger_output.getvalue(), '')
def test_i18n_page_not_found_warning(self):
self.client.get('/this_does_not/')
self.client.get('/en/nor_this/')
self.assertEqual(self.logger_output.getvalue(), 'Not Found: /this_does_not/\nNot Found: /en/nor_this/\n')
class WarningLoggerTests(SimpleTestCase):
"""
Tests that warnings output for RemovedInDjangoXXWarning (XX being the next
Django version) is enabled and captured to the logging system
"""
def setUp(self):
# If tests are invoke with "-Wall" (or any -W flag actually) then
# warning logging gets disabled (see configure_logging in django/utils/log.py).
# However, these tests expect warnings to be logged, so manually force warnings
# to the logs. Use getattr() here because the logging capture state is
# undocumented and (I assume) brittle.
self._old_capture_state = bool(getattr(logging, '_warnings_showwarning', False))
logging.captureWarnings(True)
def tearDown(self):
# Reset warnings state.
logging.captureWarnings(self._old_capture_state)
@override_settings(DEBUG=True)
def test_error_filter_still_raises(self):
with warnings.catch_warnings():
warnings.filterwarnings(
'error',
category=RemovedInNextVersionWarning
)
with self.assertRaises(RemovedInNextVersionWarning):
warnings.warn('Foo Deprecated', RemovedInNextVersionWarning)
class CallbackFilterTest(SimpleTestCase):
def test_sense(self):
f_false = CallbackFilter(lambda r: False)
f_true = CallbackFilter(lambda r: True)
self.assertEqual(f_false.filter("record"), False)
self.assertEqual(f_true.filter("record"), True)
def test_passes_on_record(self):
collector = []
def _callback(record):
collector.append(record)
return True
f = CallbackFilter(_callback)
f.filter("a record")
self.assertEqual(collector, ["a record"])
class AdminEmailHandlerTest(SimpleTestCase):
logger = logging.getLogger('django')
def get_admin_email_handler(self, logger):
# Ensure that AdminEmailHandler does not get filtered out
# even with DEBUG=True.
admin_email_handler = [
h for h in logger.handlers
if h.__class__.__name__ == "AdminEmailHandler"
][0]
return admin_email_handler
def test_fail_silently(self):
admin_email_handler = self.get_admin_email_handler(self.logger)
self.assertTrue(admin_email_handler.connection().fail_silently)
@override_settings(
ADMINS=[('whatever admin', 'admin@example.com')],
EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-'
)
def test_accepts_args(self):
"""
Ensure that user-supplied arguments and the EMAIL_SUBJECT_PREFIX
setting are used to compose the email subject.
Refs #16736.
"""
message = "Custom message that says '%s' and '%s'"
token1 = 'ping'
token2 = 'pong'
admin_email_handler = self.get_admin_email_handler(self.logger)
# Backup then override original filters
orig_filters = admin_email_handler.filters
try:
admin_email_handler.filters = []
self.logger.error(message, token1, token2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['admin@example.com'])
self.assertEqual(mail.outbox[0].subject,
"-SuperAwesomeSubject-ERROR: Custom message that says 'ping' and 'pong'")
finally:
# Restore original filters
admin_email_handler.filters = orig_filters
@override_settings(
|
adaptive-learning/robomission | backend/learn/tests/locustfile.py | Python | gpl-3.0 | 3,918 | 0.000766 | """Configuration for a load testing using Locust.
To start load testing, run `make server` and `make test-load`.
"""
import random
from json.decoder import JSONDecodeError
from django.urls import reverse
from locust import HttpLocust, TaskSet, task
class SolvingTaskBehavior(TaskSet):
"""Describes interaction of a simulated user with a single task.
The users requests a randomly choosen task,
then she does random number of edits and unsuccessful executions,
and finally she solves the task.
"""
SOLVE_PROBABILITY = 0.3
def on_start(self):
selected_task = random.choice(self.parent.task_names)
self.start_task(selected_task)
def start_task(self, task_name):
url = self.parent.action_urls['start_task']
data = {'task': task_name}
response = self.parent.post_with_cookies(url, data)
self.task_session_id = response.json()['task_session_id']
self.edit_program()
@task(1)
def run_program(self):
url = self.parent.action_urls['run_program']
solved = random.random() < self.SOLVE_PROBABILITY
data = {
'task-session-id': self.task_session_id,
'program': 'f',
'correct': solved}
self.parent.post_with_cookies(url, data)
if solved:
self.interrupt()
@task(5)
def edit_program(self):
url = self.parent.action_urls['edit_program']
data = {
'task-session-id': self.task_session_id,
'program': 'f'}
self.parent.post_with_cookies(url, data)
class UserBehavior(TaskSet):
"""Describes interaction of a simulated user with the server.
"""
tasks = [SolvingTaskBehavior]
def __init__(self, parent):
super().__init__(parent)
self.cookies = {}
self.action_urls = {}
self.task_names = None
def on_start(self):
"""Fill in cookies so that post request can be made later.
"""
response = self.visit_homepage()
self.save_cookies(response)
self.save_tasks()
self.save_action_urls()
def visit_homepage(self):
response = self.client.get('/')
return response
def save_tasks(self):
re | sponse = self.client.get('/learn/api/tasks/')
self.save_cookies(response)
self.task_names = [task['name'] for task in response.json()]
def save_action_urls(self):
"""The session and lazy user is created. Now tasks can be solved.
"""
user_response = self.client.get('/learn/api/users/current')
self.save_cookies(user_response)
| student_url = user_response.json()['student']
student_response = self.client.get(student_url)
self.save_cookies(student_response)
self.action_urls['start_task'] = student_response.json()['start_task']
self.action_urls['edit_program'] = student_response.json()['edit_program']
self.action_urls['run_program'] = student_response.json()['run_program']
def save_cookies(self, response):
"""Stores cookies for later usage.
"""
self.cookies.update(response.cookies.get_dict())
def post_with_cookies(self, url, data):
"""Post request with correctly set cookies and headers.
"""
csrf_token = self.cookies['csrftoken']
data['csrfmiddlewaretoken'] = csrf_token
headers = {'X-CSRFToken': csrf_token, 'Referer': self.client.base_url}
response = self.client.post(url, data, headers=headers, cookies=self.cookies)
self.save_cookies(response)
self.log_errors(response)
return response
@staticmethod
def log_errors(response):
if not response.ok:
with open('request_errors.log', 'a') as f:
f.writelines(response.text)
class WebsiteUser(HttpLocust):
task_set = UserBehavior
min_wait = 500
max_wait = 5000
|
HydrelioxGitHub/home-assistant | homeassistant/components/neato/switch.py | Python | apache-2.0 | 3,290 | 0 | """Support for Neato Connected Vacuums switches."""
import logging
from datetime import timedelta
import requests
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.components.neato import NEATO_ROBOTS, NEATO_LOGIN
_LOGGER = | logging.getLo | gger(__name__)
DEPENDENCIES = ['neato']
SCAN_INTERVAL = timedelta(minutes=10)
SWITCH_TYPE_SCHEDULE = 'schedule'
SWITCH_TYPES = {
SWITCH_TYPE_SCHEDULE: ['Schedule']
}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Neato switches."""
dev = []
for robot in hass.data[NEATO_ROBOTS]:
for type_name in SWITCH_TYPES:
dev.append(NeatoConnectedSwitch(hass, robot, type_name))
_LOGGER.debug("Adding switches %s", dev)
add_entities(dev)
class NeatoConnectedSwitch(ToggleEntity):
"""Neato Connected Switches."""
def __init__(self, hass, robot, switch_type):
"""Initialize the Neato Connected switches."""
self.type = switch_type
self.robot = robot
self.neato = hass.data[NEATO_LOGIN]
self._robot_name = '{} {}'.format(
self.robot.name, SWITCH_TYPES[self.type][0])
try:
self._state = self.robot.state
except (requests.exceptions.ConnectionError,
requests.exceptions.HTTPError) as ex:
_LOGGER.warning("Neato connection error: %s", ex)
self._state = None
self._schedule_state = None
self._clean_state = None
self._robot_serial = self.robot.serial
def update(self):
"""Update the states of Neato switches."""
_LOGGER.debug("Running switch update")
self.neato.update_robots()
try:
self._state = self.robot.state
except (requests.exceptions.ConnectionError,
requests.exceptions.HTTPError) as ex:
_LOGGER.warning("Neato connection error: %s", ex)
self._state = None
return
_LOGGER.debug('self._state=%s', self._state)
if self.type == SWITCH_TYPE_SCHEDULE:
_LOGGER.debug("State: %s", self._state)
if self._state['details']['isScheduleEnabled']:
self._schedule_state = STATE_ON
else:
self._schedule_state = STATE_OFF
_LOGGER.debug("Schedule state: %s", self._schedule_state)
@property
def name(self):
"""Return the name of the switch."""
return self._robot_name
@property
def available(self):
"""Return True if entity is available."""
return self._state
@property
def unique_id(self):
"""Return a unique ID."""
return self._robot_serial
@property
def is_on(self):
"""Return true if switch is on."""
if self.type == SWITCH_TYPE_SCHEDULE:
if self._schedule_state == STATE_ON:
return True
return False
def turn_on(self, **kwargs):
"""Turn the switch on."""
if self.type == SWITCH_TYPE_SCHEDULE:
self.robot.enable_schedule()
def turn_off(self, **kwargs):
"""Turn the switch off."""
if self.type == SWITCH_TYPE_SCHEDULE:
self.robot.disable_schedule()
|
mvendra/mvtools | tests/convcygpath_test.py | Python | mit | 3,080 | 0.007143 | #!/usr/bin/env python3
import os
import shutil
import unittest
from unittest import mock
from unittest.mock import patch
import mvtools_test_fixture
import convcygpath
import get_platform
class ConvCygPathTest(unittest.TestCase):
def setUp(self):
v, r = self.delegate_setUp()
if not v:
self.tearDown()
self.fail(r)
def delegate_setUp(self):
v, r = mvtools_test_fixture.makeAndGetTestFolder("convcygpath_test")
if not v:
return v, r
self.test_base_dir = r[0]
self.test_dir = r[1]
return True, ""
def tearDown(self):
shutil.rmtree(self.test_base_dir)
def testConvertCygwinPathToWinPath(self):
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path(""), None)
with mock.patch("get_platform.getplat", return_value=get_platform.PLAT_CYGWIN):
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("/"), None)
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("\\"), None)
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("\\first"), None)
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("\\first\\second"), None)
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("/cigdrive"), "C:/cygwin/cigdrive")
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("/cigdrive/c"), "C:/cygwin/cigdrive/c")
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("/cigdrive/mp1"), "C:/cygwin/cigdrive/mp1")
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("/cygdrive/c"), "C:")
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("/cygdrive/mp1"), "MP1:")
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("/cygdrive/mp1/"), "MP1:")
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("/cygdrive/c/"), "C:")
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("/cygdrive/c/mp1"), "C:/mp1")
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("/cygdrive/c/mp1/first/second"), "C:/mp1/first/second")
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("/cygdrive/c/mp1/first/second/"), "C:/mp1/first/second")
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("/cygdrive/c/mp1/first/second/"), "C:/mp1/first/second")
with mock.patch("m | vtools_envvars.mvtools_envvar_read_cygwin_install_path", return_value=(False, "error-message")):
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("/home/user | /folder"), "C:/cygwin/home/user/folder")
with mock.patch("mvtools_envvars.mvtools_envvar_read_cygwin_install_path", return_value=(True, "D:/cygwin_custom_install_folder/cygwin")):
self.assertEqual(convcygpath.convert_cygwin_path_to_win_path("/home/user/folder"), "D:/cygwin_custom_install_folder/cygwin/home/user/folder")
if __name__ == '__main__':
unittest.main()
|
mpescimoro/stripp3r | lessonEntity.py | Python | gpl-3.0 | 1,603 | 0.007486 | __author__ = 'Paolo Bellagente'
# Documentation for this module.
#
# More details.
################################## DATABASE ##############################################
from sqlalchemy import *
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, sessionmaker
import datetime
## Database name
db_name = "testDatabase"
## Database user
db_uid = "root"
## Database user's password
db_passwd = ""
## Database host
db_host = "localhost"
##
# set the database connection engine
engine = create_engine('mysql+pymysql://'+db_uid+':'+db_passwd+'@'+db_host+'/'+db_name)
## Classe base per l'ereditarieta' delle tabelle
#
# Permette di istanziare una volta la classe base e riutilizzarla
Base = declarative_base()
class Lesson(Base):
__tablename__ = 'lessons'
id = Column(INTEGER, primary_key=True)
semesterStartDate = Column(DATE)
semesterEndDate = Column(DATE)
# lesson's | start hour
hour = Column(TIME)
# lesson's day of the week coded form 0 to 6 where 0 is monday and 6 is sunday.
day = Column(INTEGER)
subject = Column(VARCHAR(200))
rooms = Column(VARCHAR(30))
address = Column(VARCHAR(50))
| teacher = Column(VARCHAR(50))
def __init__(self):
self.teacher = ''
# persist the entity into the database
def persist(self):
Session = sessionmaker(bind=engine)
session = Session()
session.add(self)
session.commit()
session.close()
# todo: create new entity here
## Create the necesary tables into the databse
Base.metadata.create_all(engine)
|
PisiLinux-PyQt5Port/pisilinux-desktop-services | pds/quniqueapp.py | Python | gpl-2.0 | 2,838 | 0.002468 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Pardus Desktop Services
# Copyright (C) 2010, TUBITAK/UEKAE
# 2010 - Gökmen Göksel <gokmen:pardus.org.tr>
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
import sys
import signal
# PyQt5 Core Libraries
from PyQt5.QtNetwork import *
from PyQt5.QtWidgets import QApplication
from PyQt5.QtCore import QLocale
from PyQt5.QtCore import QIODevice
from PyQt5.QtCore import QTranslator
from PyQt5.QtCore import QLibraryInfo
class QUniqueApplication(QApplication):
def __init__(self, argv, catalog):
QApplication.__init__(self, argv)
self.aboutToQuit.connect(self.cleanup)
self.control = QLocalServer(self)
self.control.newConnection.connect(self.onControlConnect)
self.mainwindow = None
self.catalog = '%s-pds.socket' % catalog
self._init_translations()
self.readyToRun = self.control.listen(self.catalog)
if not self.readyToRun:
if self.sendToInstance('show-mainwindow'):
sys.exit()
else:
self.control.removeServer(self.catalog)
self.readyToRun = self.control.listen(self.catalog)
def _init_translations(self):
self.qTrans = QTranslator()
self.qTrans.load("qt_" + QLocale.system().name(),
QLibraryInfo.location(QLibraryInfo.TranslationsPath))
self.installTranslator(self.qTrans)
def setMainWindow(self, window):
self.mainwindow = window |
def exec_(self):
if self.readyToRun:
# Let Ctrl+C work ;)
signal.signal(signal.SIGINT, signal.SIG_DFL)
QApplication.exec_()
def cleanup(self):
self.control.removeServer(self.catalog)
def sendToInstance(self, data = ''):
socket = QLocalSocket()
socket.connectToServer(self.catalog, QIODevice.WriteOnly)
if socket.waitForConnected( 500 ):
if len(data) > 0:
sock | et.write(data)
socket.flush()
socket.close()
return True
return False
def onControlConnect(self):
self.socket = self.control.nextPendingConnection()
self.socket.readyRead.connect(self.onControlRequest)
def onControlRequest(self):
request = self.socket.readAll()
for cmd in request.split(' '):
self.parseCommand(cmd)
self.socket.flush()
self.socket.close()
self.socket.deleteLater()
def parseCommand(self, cmd):
if cmd == 'show-mainwindow':
if hasattr(self.mainwindow, 'show'):
self.mainwindow.show()
|
piedev/przepisnik | przepisnik/app.py | Python | apache-2.0 | 242 | 0 | #! | /usr/bin/env python
from mongoengine import connect
from flask import Flask
from api import blueprint as api
app = Flask(__name__)
app.register_blueprint(api)
connect('przepisnik001')
if __name__ == '__main__':
app.run(debug=True) | |
tomchristie/django | tests/gis_tests/tests.py | Python | bsd-3-clause | 3,659 | 0.00246 | import unittest
from django.core.exceptions import ImproperlyConfigured
from django.db import ProgrammingError
try:
from django.contrib.gis.db.backends.postgis.operations import PostGISOperations
HAS_POSTGRES = True
except ImportError:
HAS_POSTGRES = False
if HAS_POSTGRES:
class FakeConnection:
def __init__(self):
self.settings_dict = {
'NAME': 'test',
}
class FakePostGISOperations(PostGISOperations):
def __init__(self, version=None):
self.version = version
self.connection = FakeConnection()
def _get_postgis_func(self, func):
if func == 'postgis_lib_version':
if self.version is None:
raise ProgrammingError
else:
return self.version
elif func == 'version':
pass
else:
raise NotImplementedError('This function was | not expected to be called')
@unittest.skipUnless(HAS_POSTGRES, "The psycopg2 driver is needed for these tests")
class TestPostGISVersionCheck(unittest.TestCase):
"""
| The PostGIS version check parses correctly the version numbers
"""
def test_get_version(self):
expect = '1.0.0'
ops = FakePostGISOperations(expect)
actual = ops.postgis_lib_version()
self.assertEqual(expect, actual)
def test_version_classic_tuple(self):
expect = ('1.2.3', 1, 2, 3)
ops = FakePostGISOperations(expect[0])
actual = ops.postgis_version_tuple()
self.assertEqual(expect, actual)
def test_version_dev_tuple(self):
expect = ('1.2.3dev', 1, 2, 3)
ops = FakePostGISOperations(expect[0])
actual = ops.postgis_version_tuple()
self.assertEqual(expect, actual)
def test_version_loose_tuple(self):
expect = ('1.2.3b1.dev0', 1, 2, 3)
ops = FakePostGISOperations(expect[0])
actual = ops.postgis_version_tuple()
self.assertEqual(expect, actual)
def test_valid_version_numbers(self):
versions = [
('1.3.0', 1, 3, 0),
('2.1.1', 2, 1, 1),
('2.2.0dev', 2, 2, 0),
]
for version in versions:
with self.subTest(version=version):
ops = FakePostGISOperations(version[0])
actual = ops.spatial_version
self.assertEqual(version[1:], actual)
def test_no_version_number(self):
ops = FakePostGISOperations()
with self.assertRaises(ImproperlyConfigured):
ops.spatial_version
def test_version_dependent_funcs(self):
"""
Resolve names of functions renamed and deprecated in PostGIS 2.2.0
depending on PostGIS version.
Remove when dropping support for PostGIS 2.1.
"""
ops = FakePostGISOperations('2.2.0')
self.assertEqual(ops.spatial_function_name('DistanceSphere'), 'ST_DistanceSphere')
self.assertEqual(ops.spatial_function_name('DistanceSpheroid'), 'ST_DistanceSpheroid')
self.assertEqual(ops.spatial_function_name('LengthSpheroid'), 'ST_LengthSpheroid')
self.assertEqual(ops.spatial_function_name('MemSize'), 'ST_MemSize')
ops = FakePostGISOperations('2.1.0')
self.assertEqual(ops.spatial_function_name('DistanceSphere'), 'ST_distance_sphere')
self.assertEqual(ops.spatial_function_name('DistanceSpheroid'), 'ST_distance_spheroid')
self.assertEqual(ops.spatial_function_name('LengthSpheroid'), 'ST_length_spheroid')
self.assertEqual(ops.spatial_function_name('MemSize'), 'ST_mem_size')
|
jruere/concurrent-iterator | concurrent_iterator/utils.py | Python | lgpl-3.0 | 246 | 0 | # vim: set fileencoding=utf-8
from decorator import deco | rator |
@decorator
def check_open(f, self, *args, **kwargs):
if self.closed:
raise ValueError("%s operation on closed Consumer" % f.__name__)
return f(self, *args, **kwargs)
|
Anderson0026/mapproxy | mapproxy/test/system/test_xslt_featureinfo.py | Python | apache-2.0 | 10,434 | 0.007284 | # This file is part of the MapProxy project.
# Copyright (C) 2010 Omniscale <http://omniscale.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement, division
import os
from mapproxy.request.wms import WMS111FeatureInfoRequest, WMS130FeatureInfoRequest
from mapproxy.test.system import module_setup, module_teardown, SystemTest
from mapproxy.test.http import mock_httpd
from mapproxy.test.helper import strip_whitespace
from nose.tools import eq_
test_config = {}
xslt_input = """
<xsl:stylesheet version="1.0"
xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:template match="/">
<baz>
<foo><xsl:value-of select="/a/b/text()" /></foo>
</baz>
</xsl:template>
</xsl:stylesheet>""".strip()
xslt_input_html = """
<xsl:stylesheet version="1.0"
xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:template match="/">
<baz>
<foo><xsl:value-of select="/html/body/p" /></foo>
</baz>
</xsl:template>
</xsl:stylesheet>""".strip()
xslt_output = """
<xsl:stylesheet version="1.0"
xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:template match="/">
<bars>
<xsl:apply-templates/>
</bars>
</xsl:template>
<xsl:template match="foo">
<bar><xsl:value-of select="text()" /></bar>
</xsl:template>
</xsl:stylesheet>""".strip()
xslt_output_html = """
<xsl:stylesheet version="1.0"
xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:template match="/">
<html>
<body>
<h1>Bars</h1>
<xsl:apply-templates/>
</body>
</html>
</xsl:template>
<xsl:template match="foo">
<p><xsl:value-of select="text()" /></p>
</xsl:template>
</xsl:stylesheet>""".strip()
def setup_module():
module_setup(test_config, 'xslt_featureinfo.yaml')
with open(os.path.join(test_config['base_dir'], 'fi_in.xsl'), 'w') as f:
f.write(xslt_input)
with open(os.path.join(test_config['base_dir'], 'fi_in_html.xsl'), 'w') as f:
f.write(xslt_input_html)
with open(os.path.join(test_config['base_dir'], 'fi_out.xsl'), 'w') as f:
f.write(xslt_output)
with open(os.path.join(test_config['base_dir'], 'fi_out_html.xsl'), 'w') as f:
f.write(xslt_output_html)
def teardown_module():
module_teardown(test_config)
TESTSERVER_ADDRESS = 'localhost', 42423
class TestWMSXSLTFeatureInfo(SystemTest):
config = test_config
def setup(self):
SystemTest.setup(self)
self.common_fi_req = WMS111FeatureInfoRequest(url='/service?',
param=dict(x='10', y='20', width='200', height='200', layers='fi_layer',
format='image/png', query_layers='fi_layer', styles='',
bbox='1000,400,2000,1400', srs='EPSG:900913'))
def test_get_featureinfo(self):
fi_body = "<a><b>Bar</b></a>"
expected_req = ({'path': r'/service_a?LAYERs=a_one&SERVICE=WMS&FORMAT=image%2Fpng'
'&REQUEST=GetFeatureInfo&HEIGHT=200&CRS=EPSG%3A900913'
'&VERSION=1.3.0&BBOX=1000.0,400.0,2000.0,1400.0&styles='
'&WIDTH=200&QUERY_LAYERS=a_one&i=10&J=20&info_format=text/xml'},
{'body': fi_body, 'headers': {'content-type': 'text/xml; charset=UTF-8'}})
with mock_httpd(('localhost', 42423), [expected_req]):
resp = self.app.get(self.common_fi_req)
eq_(resp.content_type, 'application/vnd.ogc.gml')
eq_(strip_whitespace(resp.body), '<bars><bar>Bar</bar></bars>')
def test_get_featureinfo_130(self):
fi_body = "<a><b>Bar</b></a>"
expected_req = ({'path': r'/service_a?LAYERs=a_one&SERVICE=WMS&FORMAT=image%2Fpng'
'&REQUEST=GetFeatureInfo&HEIGHT=200&CRS=EPSG%3A900913'
'&VERSION=1.3.0&BBOX=1000.0,400.0,2000.0,1400.0&styles='
'&WIDTH=200&QUERY_LAYERS=a_one&i=10&J=20&info_format=text/xml'},
{'body': fi_body, 'headers': {'content-type': 'text/xml'}})
with mock_httpd(('localhost', 42423), [expected_req]):
req = WMS130FeatureInfoRequest(url='/service?').copy_with_request_params(self.common_fi_req)
resp = self.app.get(req)
eq_(resp.content_type, 'text/xml')
eq_(strip_whitespace(resp.body), '<bars><bar>Bar</bar></bars>')
def test_get_multiple_featureinfo(self):
fi_body1 = "<a><b>Bar1</b></a>"
fi_body2 = "<a><b>Bar2</b></a>"
fi_body3 = "<body><h1>Hello<p>Bar3"
ex | pected_req1 = ({'path': r'/service_a?LAYERs=a_one&SERVICE=WMS&FORMAT=image%2Fpng'
'&REQUEST=GetFeatureInfo&HEIGHT=200&CRS=EPSG%3A900913'
'&VERSION=1.3.0&BBOX=1000.0,400.0,2000.0,1400.0&styles='
'&WIDTH=200&QUERY_LAYERS=a_one&i=10&J=20&info_format=text/xml'},
| {'body': fi_body1, 'headers': {'content-type': 'text/xml'}})
expected_req2 = ({'path': r'/service_b?LAYERs=b_one&SERVICE=WMS&FORMAT=image%2Fpng'
'&REQUEST=GetFeatureInfo&HEIGHT=200&SRS=EPSG%3A900913'
'&VERSION=1.1.1&BBOX=1000.0,400.0,2000.0,1400.0&styles='
'&WIDTH=200&QUERY_LAYERS=b_one&X=10&Y=20&info_format=text/xml'},
{'body': fi_body2, 'headers': {'content-type': 'text/xml'}})
expected_req3 = ({'path': r'/service_d?LAYERs=d_one&SERVICE=WMS&FORMAT=image%2Fpng'
'&REQUEST=GetFeatureInfo&HEIGHT=200&SRS=EPSG%3A900913'
'&VERSION=1.1.1&BBOX=1000.0,400.0,2000.0,1400.0&styles='
'&WIDTH=200&QUERY_LAYERS=d_one&X=10&Y=20&info_format=text/html'},
{'body': fi_body3, 'headers': {'content-type': 'text/html'}})
with mock_httpd(('localhost', 42423), [expected_req1, expected_req2, expected_req3]):
self.common_fi_req.params['layers'] = 'fi_multi_layer'
self.common_fi_req.params['query_layers'] = 'fi_multi_layer'
resp = self.app.get(self.common_fi_req)
eq_(resp.content_type, 'application/vnd.ogc.gml')
eq_(strip_whitespace(resp.body),
'<bars><bar>Bar1</bar><bar>Bar2</bar><bar>Bar3</bar></bars>')
def test_get_multiple_featureinfo_html_out(self):
fi_body1 = "<a><b>Bar1</b></a>"
fi_body2 = "<a><b>Bar2</b></a>"
fi_body3 = "<body><h1>Hello<p>Bar3"
expected_req1 = ({'path': r'/service_a?LAYERs=a_one&SERVICE=WMS&FORMAT=image%2Fpng'
'&REQUEST=GetFeatureInfo&HEIGHT=200&CRS=EPSG%3A900913'
'&VERSION=1.3.0&BBOX=1000.0,400.0,2000.0,1400.0&styles='
'&WIDTH=200&QUERY_LAYERS=a_one&i=10&J=20&info_format=text/xml'},
{'body': fi_body1, 'headers': {'content-type': 'text/xml'}})
expected_req2 = ({'path': r'/service_b?LAYERs=b_one&SERVICE=WMS&FORMAT=image%2Fpng'
'&REQUEST=GetFeatureInfo&HEIGHT=200&SRS=EPSG%3A900913'
'&VERSION=1.1.1&BBOX=1000.0,400.0,2000.0,1400.0&styles='
'&WIDTH=200&QUERY_LAYERS=b_one&X=10&Y=20&info_format=text/xml'},
{'body': fi_body2, 'headers': {'content-type': 'text/xml'}})
expected_req3 = ({'path': r'/service_d?LAYERs=d_one&SERVICE=WMS&FORMAT=image%2Fpng'
'&REQUEST=GetFeatureInfo&HEIGHT=200&SRS=EPSG%3A900913'
|
martinrusev/amonone | amon/templatetags/__init__.py | Python | mit | 155 | 0.012903 | # from django.t | emplate.base import add_to_builtins
# from amon.settings import AUTOLOAD_TAGS
# for tag in AUTOLOAD_TAGS:
# add_to_builtin | s(tag)
|
google/shopping-markup | plugins/cloud_utils/__init__.py | Python | apache-2.0 | 1,168 | 0.000856 | # coding=utf-8
# Copyright 2020 Google LLC..
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing | , software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www. | apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
zhumengyuan/kallithea | kallithea/lib/vcs/backends/git/repository.py | Python | gpl-3.0 | 25,623 | 0.000702 | # -*- coding: utf-8 -*-
"""
vcs.backends.git.repository
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Git repository implementation.
:created_on: Apr 8, 2010
:copyright: (c) 2010-2011 by Marcin Kuzminski, Lukasz Balcerzak.
"""
import os
import re
import time
import urllib
import urllib2
import logging
import posixpath
import string
try:
# Python <=2.7
from pipes import quote
except ImportError:
# Python 3.3+
from shlex import quote
from dulwich.objects import Tag
from dulwich.repo import Repo, NotGitRepository
from dulwich.config import ConfigFile
from kallithea.lib.vcs import subprocessio
from kallithea.lib.vcs.backends.base import BaseRepository, CollectionGenerator
from kallithea.lib.vcs.conf import settings
from kallithea.lib.vcs.exceptions import (
BranchDoesNotExistError, ChangesetDoesNotExistError, EmptyRepositoryError,
RepositoryError, TagAlreadyExistError, TagDoesNotExistError
)
from kallithea.lib.vcs.utils import safe_unicode, makedate, date_fromtimestamp
from kallithea.lib.vcs.utils.lazy import LazyProperty
from kallithea.lib.vcs.utils.ordered_dict import OrderedDict
from kallithea.lib.vcs.utils.paths import abspath, get_user_home
from kallithea.lib.vcs.utils.hgcompat import (
hg_url, httpbasicauthhandler, httpdigestauthhandler
)
from .changeset import GitChangeset
from .inmemory import GitInMemoryChangeset
from .workdir import GitWorkdir
SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
log = logging.getLogger(__name__)
class GitRepository(BaseRepository):
"""
Git repository backend.
"""
DEFAULT_BRANCH_NAME = 'master'
scm = 'git'
def __init__(self, repo_path, create=False, src_url=None,
update_after_clone=False, bare=False):
self.path = abspath(repo_path)
repo = self._get_repo(create, src_url, update_after_clone, bare)
self.bare = repo.bare
@property
def _config_files(self):
return [
self.bare and abspath(self.path, 'config')
or abspath(self.path, '.git', 'config'),
abspath(get_user_home(), '.gitconfig'),
]
@property
def _repo(self):
return Repo(self.path)
@property
def head(self):
try:
return self._repo.head()
except KeyError:
return None
@property
def _empty(self):
"""
Checks if repository is empty ie. without any changesets
"""
try:
self.revisions[0]
except (KeyError, IndexError):
return True
return False
@LazyProperty
def revisions(self):
"""
Returns list of revisions' ids, in ascending order. Being lazy
attribute allows external tools to inject shas from cache.
"""
return self._get_all_revisions()
@classmethod
def _run_git_command(cls, cmd, **opts):
"""
Runs given ``cmd`` as git command and returns tuple
(stdout, stderr).
:param cmd: git command to be executed
:param opts: env options to pass into Subprocess command
"""
if '_bare' in opts:
_copts = []
del opts['_bare']
else:
_copts = ['-c', 'core.quotepath=false', ]
safe_call = False
if '_safe' in opts:
#no exc on failure
del opts['_safe']
safe_call = True
_str_cmd = False
if isinstance(cmd, basestring):
cmd = [cmd]
_str_cmd = True
gitenv = os.environ
# need to clean fix GIT_DIR !
if 'GIT_DIR' in gitenv:
del gitenv['GIT_DIR']
gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
_git_path = settings.GIT_EXECUTABLE_PATH
cmd = [_git_path] + _copts + cmd
if _str_cmd:
cmd = ' '.join(cmd)
try:
_opts = dict(
env=gitenv,
shell=True,
)
_opts.update(opts)
p = subprocessio.SubprocessIOChunker(cmd, **_opts)
except (EnvironmentError, OSError), err:
tb_err = ("Couldn't run git command (%s).\n"
"Original error was:%s\n" % (cmd, err))
log.error(tb_err)
if safe_call:
return '', err
else:
raise RepositoryError(tb_err)
return ''.join(p.output), ''.join(p.error)
def run_git_command(self, cmd):
opts = {}
if os.path.isdir(self.path):
opts['cwd'] = self.path
return self._run_git_command(cmd, **opts)
@classmethod
def _check_url(cls, url):
"""
Function will check given url and try to verify if it's a valid
link. Sometimes it may happened that git will issue basic
auth request that can cause whole API to hang when used from python
or other external calls.
On failures it'll raise urllib2.HTTPError, exception is also thrown
when the return code is non 200
"""
# check first if it's not an local url
if os.path.isdir(url) or url.startswith('file:'):
return True
if '+' in url[:url.find('://')]:
url = url[url.find('+') + 1:]
handlers = []
url_obj = hg_url(url)
test_uri, authinfo = url_obj.authinfo()
url_obj.passwd = '*****'
cleaned_uri = str(url_obj)
if not test_uri.endswith('info/refs'):
test_uri = test_uri.rstrip('/') + '/info/refs'
if authinfo:
#create a password manager
passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
passmgr.add_password(*authinfo)
handlers.extend((httpbasicauthhandler(passmgr),
httpdigestauthhandler(passmgr)))
o = urllib2.build_opener(*handlers)
o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
q = {"service": 'git-upload-pack'}
qs = '?%s' % urllib.urlencode(q)
cu = "%s%s" % (test_uri, qs)
req = urllib2.Request(cu, None, {})
try:
resp = o.open(req)
if resp.code != 200:
raise Exception('Return Code is not 200')
except Exception, e:
# means it cannot be cloned
raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
# now detect if it's proper git repo
gitdata = resp.read()
if not 'service=git-upload-pack' in gitdata:
raise urllib2.URLError(
"url [%s] does not look like an git" % (cleaned_uri))
return True
def _get_repo(self, create, src_url=None, update_after_clone=False,
bare=False):
if create and os.path.exists(self.path):
raise RepositoryError("Location already exist")
if src_url and not cre | ate:
raise Reposito | ryError("Create should be set to True if src_url is "
"given (clone operation creates repository)")
try:
if create and src_url:
GitRepository._check_url(src_url)
self.clone(src_url, update_after_clone, bare)
return Repo(self.path)
elif create:
os.makedirs(self.path)
if bare:
return Repo.init_bare(self.path)
else:
return Repo.init(self.path)
else:
return self._repo
except (NotGitRepository, OSError), err:
raise RepositoryError(err)
def _get_all_revisions(self):
# we must check if this repo is not empty, since later command
# fails if it is. And it's cheaper to ask than throw the subprocess
# errors
try:
self._repo.head()
except KeyError:
return []
rev_filter = settings.GIT_REV_FILTER
cmd = 'rev-list %s --reverse --date-order' % (rev_filter)
try:
so, se = self.run_git_command(cmd)
except RepositoryError:
# Can be raised for empty repositories
retu |
SaFi2266/odoo-rtl | website_rtl/models/ir_http.py | Python | agpl-3.0 | 1,637 | 0 | # -*- coding: utf-8 -*-
##############################################################################
#
# Odoo RTL support
# Copyright (C) 2014 Mohammed Barsi.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is di | stributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied w | arranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.http import request
from openerp.osv import orm
class ir_http(orm.AbstractModel):
_inherit = 'ir.http'
def _dispatch(self):
resp = super(ir_http, self)._dispatch()
if request.website:
langs = request.website.get_languages_dir()
dir = langs.get(request.context['lang'], None)
if dir is None:
request.website._get_languages_dir.clear_cache(request.website)
langs = request.website.get_languages_dir()
dir = langs.get(request.context['lang'], None)
if dir is None:
dir = 'ltr'
request.context['lang_dir'] = dir
return resp
|
bincyber/beesly | beesly/__init__.py | Python | gpl-3.0 | 910 | 0.003297 | import sys
import os.path
from beesly._logging import structured_log
from beesly.config import ConfigError, initialize_config
from beesly.views import app, rlimiter
def create_app():
"""
Initializes the Flask application.
"""
structured_log(level='info', msg="Starting beesly...")
try:
settings = initialize_config()
except ConfigError:
structured_log(level='cri | tical', msg="Failed to load configuration. Exiting...")
sys.exit(4)
# enable Swagger UI if running in DEV mode
if se | ttings["DEV"]:
app.static_folder = os.path.dirname(os.path.realpath(__file__)) + "/swagger-ui"
app.add_url_rule("/service/docs/<path:filename>", endpoint="/service/docs", view_func=app.send_static_file)
app.config.update(settings)
structured_log(level='info', msg="Successfully loaded configuration")
rlimiter.init_app(app)
return app
|
anshulgarg324/calender | api/google.py | Python | gpl-2.0 | 2,438 | 0.005332 | """
This module handles the Google Signup for a new user
"""
from oauth2client import client
from django.contrib.auth.models import User
from django.shortcuts import redirect
from djan | go.shortcuts import render
from rest_framework.decorators import api_view, permission_classes
from rest_fra | mework.views import APIView
from rest_framework import authentication, permissions
from rest_framework.authtoken.models import Token
from models import GoogleData
import httplib2
import json
flow = client.flow_from_clientsecrets(
'./client_secrets.json',
scope='https://www.googleapis.com/auth/drive.metadata.readonly https://www.googleapis.com/auth/calendar https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile',
redirect_uri='http://localhost:8000/oauth2callback'
)
flow.params['access_type'] = 'offline'
flow.params['prompt'] = 'consent'
@api_view(['GET'])
def google_signup(request):
"""
This method redirects the user to the different url which provides
the code
"""
auth_uri = flow.step1_get_authorize_url()
return redirect(auth_uri)
@api_view(['GET'])
def callback(request):
"""
This method sign up the user to the calender app and redirect to
the main page
"""
code = request.GET['code']
credentials = flow.step2_exchange(code)
http_auth = credentials.authorize(httplib2.Http())
user_dictionary = json.loads(credentials.to_json())
try:
email = str(user_dictionary['id_token']['email'])
username = User.objects.get(username=email)
except User.DoesNotExist:
username = None
if username is None:
user = User()
user.username = str(user_dictionary['id_token']['email'])
user.set_password(str(user_dictionary['refresh_token']))
user.save()
new_token = Token.objects.create(user=user)
new_token.save()
google_obj = GoogleData()
google_obj.refresh_token = str(user_dictionary['refresh_token'])
google_obj.user = user
google_obj.save()
return render(request, 'api/index.html',
{'key' : str(new_token.key),
'msg':"Signup Successful"})
else:
old_token = Token.objects.get(user=username)
return render(request, 'api/index.html',
{'msg': 'Email already exists',
'key' : str(old_token.key)})
|
PeerXu/reflector | server/app.py | Python | gpl-2.0 | 982 | 0.007128 | #!/usr/bin/env python2.7
from flask import Flask, make_response, render_template, request
import simplejson
app = Flask(__name__)
G = {
"cmds": [],
"outputs": {}
}
@app.route("/")
def index():
return render_template("index.html")
@app.route("/emit", methods=["POST"])
def emit():
G["cmds"].append(request.data)
return ''
@app.route( | "/cmd", methods=["GET"])
def cmd():
return '%s,%s' % (len(G["cmds"])-1, G["cmds"][-1])
@app.route("/result", methods=["POST"])
def result():
seq, data = request.data.split(',', 1)
seq = int(seq)
if seq < len(G["cmds"]) and seq not in G["outputs"]:
G["outputs"][int(seq)] = data
| return ''
@app.route("/display", methods=["GET", "POST"])
def display():
result = []
for k, v in enumerate(G["cmds"][-10:]):
result.append([k, G["cmds"][k], G["outputs"].get(k, "<not response yet>")])
return simplejson.dumps(result)
if __name__ == '__main__':
app.run(debug=True, port=4444)
|
MD-Studio/MDStudio | components/lie_echo/lie_echo/__main__.py | Python | apache-2.0 | 299 | 0.006689 | import sys
import os
modulepath = os.path.ab | spath(os.path.join(os.path.dirname(__file__), '../'))
if modulepath not in sys.path:
sys.path.insert(0, modulepath)
from mdstudio.runner import main
from lie_echo.application import EchoComponent
if __name__ == '__main__':
main(EchoCompone | nt)
|
rht/zulip | zerver/webhooks/lidarr/tests.py | Python | apache-2.0 | 4,458 | 0.001803 | from zerver.lib.test_classes import WebhookTestCase
class LidarrHookTests(WebhookTestCase):
STREAM_NAME = "lidarr"
URL_TEMPLATE = "/api/v1/external/lidarr?api_key={api_key}&stream={stream}"
WEBHOOK_DIR_NAME = "lidarr"
def test_lidarr_test(self) -> None:
"""
Tests if lidarr test payload is handled correctly
"""
expected_topic = "Lidarr - Test"
expected_message = "Lidarr webhook has been successfully configured."
self.check_webhook("lidarr_test", expected_topic, expected_message)
def test_lidarr_tracks_renamed(self) -> None:
"""
Tests if lidarr tracks renamed payload is handled correctly
"""
expected_topic = "Little Mix"
expected_message = "The artist Little Mix has had its tracks renamed."
self.check_webhook("lidarr_tracks_renamed", expected_topic, expected_message)
def test_lidarr_tracks_retagged(self) -> None:
"""
Tests if lidarr tracks retagged payload is handled correctly
"""
expected_topic = "Little Mix"
expected_message = "The artist Little Mix has had its tracks retagged."
self.check_webhook("lidarr_tracks_retagged", expected_topic, expected_message)
def test_lidarr_tracks_imported(self) -> None:
"""
Tests if lidarr tracks imported payload is handled correctly
"""
expected_topic = "UB40"
expected_message = """
The following tracks by UB40 have been imported:
* Cherry Oh Baby
* Keep On Moving
* Please Don't Make Me Cry
* Sweet Sensation
* Johnny Too Bad
* Red Red Wine
* Guilty
* She Caught the Train
* Version Girl
* Many Rivers to Cross
""".strip()
self.check_webhook("lidarr_tracks_imported", expected_topic, expected_message)
def test_lidarr_tracks_imported_upgrade(self) -> None:
"""
Tests if lidarr tracks imported upgrade payload is handled correctly
"""
expected_topic = "Little Mix"
expected_message = """
The following tracks by Little Mix have been imported due to upgrade:
* The National Manthem
* Woman Like Me
* Think About Us
* Strip
* Monster in Me
* Joan of Arc
* Love a Girl Right
* American Boy
* Told You So
* Wasabi
* More Than Words
* Motivate
* Notice
* The Cure
* Forget You Not
* Woman’s World
* The Cure (stripped)
* Only You
""".strip()
self.check_webhook("lidarr_tracks_imported_upgrade", expected_topic, expected_message)
def test_lidarr_album_grabbed(self) -> None:
"""
Tests if lidarr album grabbed payload is handled correctly
"""
expected_topic = "UB40"
expected_message = "The album Labour of Love by UB40 has been grabbed."
self.check_webhook("lidarr_album_grabbed", expected_topic, expected_message)
def test_lidarr_tracks_imported_over_limit(self) -> None:
"""
Tests if lidarr tracks imported over limit payload is handled correctly
"""
expected_topic = "Michael Jackson"
expected_message = """
The following tracks by Michael Jackson have been imported:
* Scream
* Billie Jean
* The Way You Make Me Feel
* They Don’t Care About Us
* Stranger in Moscow
* Black or White
* This Time Around
* Rock With You
* Earth Song
* She’s Out of My Life
* D.S.
* Bad
* Money
* I Just Can’t Stop Loving You
* Man in the Mirror
* Come Together
* Thriller
* You Are Not Alone
* Beat It
* Childhood (theme from “Free Willy 2”)
[and 10 more tracks(s)]
""".strip()
self.check_webhook("lidarr_tracks_imported_over_limit", expected_topic, expected_message)
def test_lidarr_tracks_imported_upgrade_over_limit(self) -> None:
"""
Tests if lidarr tracks imported upgrade over limit payload is handled correctly
"""
expected_topic = "Michael Jackson"
expected_message = """
The following tracks by Michael Jackson have been imported due to upgrade:
* Scream
* Billie Jean
* The Way You Make Me Feel
* They Don’t Care About Us
* Stranger in Moscow
* Black or White
* This Ti | me Around
* Rock With You
* Earth Song
* She’s Out of My Life
* D.S.
* Bad
* Money
* I Just Can’t Stop Loving You
* Man in the Mirror
* Come Together
* Thriller
* You Are Not Alone
* Beat It
* Childhood (theme from “Free Willy 2”)
[and 10 more tracks(s)]
""".strip() |
self.check_webhook(
"lidarr_tracks_imported_upgrade_over_limit", expected_topic, expected_message
)
|
gena/earthengine-api | python/ee/cli/eecli.py | Python | apache-2.0 | 2,385 | 0.007966 | #!/usr/bin/env python
"""Executable for the Earth Engine command line interface.
This executable starts a Python Cmd instance to receive and process command
line input entered by the user. If the executable is invoked with some
command line arguments, the Cmd is launched in the one-off mode, where
the provided arguments are processed as a single command after which the
program is terminated. Otherwise, this executable will launch the Cmd in the
interactive (looping) mode, where the user will be able to run multiple
commands as in a typical terminal program.
"""
from __future__ import print_function
import argparse
import sys
import ee
from ee.cli import commands
from ee.cli import utils
class CommandDispatcher(commands.Dispatcher):
name = 'main'
COMMANDS = [
commands.AuthenticateCommand,
commands.AclCommand,
commands.AssetCommand,
commands.CopyCommand,
commands.CreateCommand,
commands.ListCommand,
commands.SizeCommand,
commands.MoveCommand,
commands.RmCommand,
commands.TaskCommand,
commands.UploadCommand,
commands.UploadImageManifestCommand,
commands.UploadTableManifestCommand,
]
def main():
# Set the program name to 'earthengine' for proper help text display.
parser = argparse.ArgumentParser(
prog='earthengine', description='Earth Engine Command Line Interface.')
parser.add_argument(
'--ee_config', help='Path to the earthengine configuration file. '
'Defaults to "~/%s".' % utils.DEFAULT_EE_CONFIG_FILE_RELATIVE)
parser.add_argument(
'--service_account_file', help='Path to a service account credentials'
'file. Overrides any ee_config if specified.')
dispatcher = CommandDispatcher(parser)
# Print the list of commands if the user supplied no arguments at all.
if len(sys.argv) == 1:
parser.print_help()
return
args = parser.parse_args()
config = utils.CommandLineConfig(args.ee_config, args.service_account_file)
# Catch EEException errors, which wrap server-side Earth Engine
| # errors, and print the error message without the irrelevant local
# stack trace. (Individual commands may also catch EEException if
# they want to be able to continue despite errors.)
try:
dispatcher.run(args, config)
except ee.EEException as e:
print(e)
sys.exit(1)
if __na | me__ == '__main__':
main()
|
jnez71/lqRRT | demos/lqrrt_ros/behaviors/boat.py | Python | mit | 3,195 | 0.005008 | """
Constructs a planner that is good for boating around!
"""
from __future__ import division
import numpy as np
import numpy.linalg as npl
from params import *
import lqrrt
################################################# DYNAMICS
magic_rudder = 8000
focus = None
def dynamics(x, u, dt):
"""
Returns next state given last state x, wrench u, and timestep dt.
"""
# Rotation matrix (orientation, converts body to world)
R = np.array([
[np.cos(x[2]), -np.sin(x[2]), 0],
[np.sin(x[2]), np.cos(x[2]), 0],
[ 0, 0, 1]
])
# Construct drag coefficients based on our motion signs
D = np.copy(D_neg)
for i, v in enumerate(x[3:]):
if v >= 0:
D[i] = D_pos[i]
# Heading controller for staring at some focus point
if focus is not None:
vec = focus[:2] - x[:2]
ang = np.arctan2(vec[1], vec[0])
c = np.cos(x[2])
s = np.sin(x[2])
cg = np.cos(ang)
sg = np.sin(ang)
u[2] = magic_rudder*np.arctan2(sg*c - cg*s, cg*c + sg*s)
# Actuator saturation with even downscaling
thrusts = invB.dot(u)
ratios = thrust_max / np.clip(np.abs(thrusts), 1E-6, np.inf)
if np.any(ratios < 1):
| u = B | .dot(np.min(ratios) * thrusts)
# M*vdot + D*v = u and pdot = R*v
xdot = np.concatenate((R.dot(x[3:]), invM*(u - D*x[3:])))
# First-order integrate
xnext = x + xdot*dt
return xnext
################################################# POLICY
kp = np.diag([250, 250, 2500])
kd = np.diag([5, 5, 0.001])
S = np.diag([1, 1, 1, 1, 1, 1])
def lqr(x, u):
"""
Returns cost-to-go matrix S and policy matrix K given local state x and effort u.
"""
R = np.array([
[np.cos(x[2]), -np.sin(x[2]), 0],
[np.sin(x[2]), np.cos(x[2]), 0],
[ 0, 0, 1]
])
K = np.hstack((kp.dot(R.T), kd))
return (S, K)
################################################# HEURISTICS
goal_buffer = [real_tol[0], real_tol[1], real_tol[2], 10, 10, 6]
error_tol = np.copy(goal_buffer)
def gen_ss(seed, goal, buff=[ss_start]*4):
"""
Returns a sample space given a seed state, goal state, and buffer.
"""
return [(min([seed[0], goal[0]]) - buff[0], max([seed[0], goal[0]]) + buff[1]),
(min([seed[1], goal[1]]) - buff[2], max([seed[1], goal[1]]) + buff[3]),
(-np.pi, np.pi),
(-abs(velmax_neg[0]), velmax_pos[0]),
(-abs(velmax_neg[1]), velmax_pos[1]),
(-abs(velmax_neg[2]), velmax_pos[2])]
################################################# MAIN ATTRIBUTES
constraints = lqrrt.Constraints(nstates=nstates, ncontrols=ncontrols,
goal_buffer=goal_buffer, is_feasible=unset)
planner = lqrrt.Planner(dynamics, lqr, constraints,
horizon=horizon, dt=dt, FPR=FPR,
error_tol=error_tol, erf=unset,
min_time=basic_duration, max_time=basic_duration, max_nodes=max_nodes,
sys_time=unset, printing=False)
|
GoogleCloudPlatform/plspm-python | tests/test_regression_metric.py | Python | gpl-3.0 | 7,199 | 0.005556 | #!/usr/bin/python3
#
# Copyright (C) 2019 Google Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import pandas.testing as pt, pandas as pd, plspm.util as util, numpy.testing as npt, plspm.config as c, math, pytest
from plspm.plspm import Plspm
from plspm.scheme import Scheme
from plspm.mode import Mode
def satisfaction_path_matrix():
structure = c.Structure()
structure.add_path(["IMAG"], ["EXPE", "SAT", "LOY"])
structure.add_path(["EXPE"], ["QUAL", "VAL", "SAT"])
structure.add_path(["QUAL"], ["VAL", "SAT"])
structure.add_path(["VAL"], ["SAT"])
structure.add_path(["SAT"], ["LOY"])
return structure.path()
def test_plspm_satisfaction():
satisfaction = pd.read_csv("file:tests/data/satisfaction.csv", index_col=0)
config = c.Config(satisfaction_path_matrix(), scaled=False)
config.add_lv_with_columns_named("IMAG", Mode.A, satisfaction, "imag")
config.add_lv_with_columns_named("EXPE", Mode.A, satisfaction, "expe")
config.add_lv_with_columns_named("VAL", Mode.A, satisfaction, "val")
config.add_lv_with_columns_named("QUAL", Mode.A, satisfaction, "qual")
config.add_lv_with_columns_named("SAT", Mode.A, satisfaction, "sat")
config.add_lv_with_columns_named("LOY", Mode.A, satisfaction, "loy")
plspm_calc = Plspm(satisfaction, config)
expected_scores = pd.read_csv("file:tests/data/satisfaction.scores.csv")
npt.assert_allclose(util.sort_cols(expected_scores), util.sort_cols(plspm_calc.scores()))
expected_inner_model = pd.read_csv("file:tests/data/satisfaction.inner-model.csv", index_col=0)
actual_inner_model = plspm_calc.inner_model()
actual_inner_model = actual_inner_model[actual_inner_model['to'].isin(["SAT"])].drop(["to"], axis=1)
npt.assert_allclose(util.sort_cols(expected_inner_model).sort_index(),
util.sort_cols(actual_inner_model.set_index(["from"],drop=True)).sort_index())
expected_outer_model = pd.read_csv("file:tests/data/satisfaction.outer-model.csv", index_col=0).drop(["block"], axis=1)
pt.assert_index_equal(expected_outer_model.columns, plspm_calc.outer_model().columns)
npt.assert_allclose(
util.sort_cols(expected_outer_model.sort_index()),
util.sort_cols(plspm_calc.outer_model()).sort_index())
expected_crossloadings = pd.read_csv("file:tests/data/satisfaction.crossloadings.csv", index_col=0)
npt.assert_allclose(util.sort_cols(expected_crossloadings.drop(["block"], axis=1)).sort_index(),
util.sort_cols(plspm_calc.crossloadings()).sort_index())
expected_inner_summary = pd.read_csv("file:tests/data/satisfaction.inner-summary.csv", index_col=0)
npt.assert_allclose(util.sort_cols(expected_inner_summary.drop(["type"], axis=1)).sort_index(),
util.sort_cols(plspm_calc.inner_summary().drop(["type", "r_squared_adj"], axis=1)).sort_index())
pt.assert_series_equal(expected_inner_summary.loc[:, "type"].sort_index(),
plspm_calc.inner_summary().loc[:, "type"].sort_index())
expected_effects = pd.read_csv("file:tests/data/satisfaction.effects.csv", index_col=0)
pt.assert_frame_equal(expected_effects.loc[:, ["from", "to"]].sort_index(),
plspm_calc.effects().loc[:, ["from", "to"]].sort_index())
npt.assert_allclose(expected_effects.drop(["from", "to"], axis=1).sort_index(),
plspm_calc.effects().drop(["from", "to"], axis=1).sort_index())
expected_unidimensionality = pd.read_csv("file:tests/data/satisfaction_unidim.csv", index_col=0)
npt.assert_allclose(util.sort_cols(expected_unidimensionality.drop(["mode"], axis=1)).sort_index(),
util.sort_cols(plspm_calc.unidimensionality().drop(["mode"], axis=1)).sort_index())
assert math.isclose(0.609741624338411 | ,plspm_calc.goodness_of_fit() | )
plspm_calc_path = Plspm(satisfaction, config, Scheme.PATH)
expected_outer_model_path = util.sort_cols(
pd.read_csv("file:tests/data/satisfaction.outer-model-path.csv", index_col=0).drop(["block"],
axis=1)).sort_index()
npt.assert_allclose(expected_outer_model_path,
util.sort_cols(plspm_calc_path.outer_model()).sort_index())
plspm_calc_factorial = Plspm(satisfaction, config, Scheme.FACTORIAL)
expected_outer_model_factorial = util.sort_cols(
pd.read_csv("file:tests/data/satisfaction.outer-model-factorial.csv", index_col=0).drop(["block"],
axis=1)).sort_index()
npt.assert_allclose(expected_outer_model_factorial,
util.sort_cols(plspm_calc_factorial.outer_model()).sort_index())
def test_plspm_russa_mode_b():
satisfaction = pd.read_csv("file:tests/data/satisfaction.csv", index_col=0)
config = c.Config(satisfaction_path_matrix(), scaled=False)
config.add_lv_with_columns_named("QUAL", Mode.B, satisfaction, "qual")
config.add_lv_with_columns_named("VAL", Mode.B, satisfaction, "val")
config.add_lv_with_columns_named("SAT", Mode.B, satisfaction, "sat")
config.add_lv_with_columns_named("LOY", Mode.B, satisfaction, "loy")
config.add_lv_with_columns_named("IMAG", Mode.B, satisfaction, "imag")
config.add_lv_with_columns_named("EXPE", Mode.B, satisfaction, "expe")
plspm_calc = Plspm(satisfaction, config, Scheme.CENTROID)
expected_inner_summary = pd.read_csv("file:tests/data/satisfaction.modeb.inner-summary.csv", index_col=0)
npt.assert_allclose(util.sort_cols(expected_inner_summary.drop(["type"], axis=1)).sort_index(),
util.sort_cols(plspm_calc.inner_summary().drop(["type", "r_squared_adj"], axis=1)).sort_index())
pt.assert_series_equal(expected_inner_summary.loc[:, "type"].sort_index(),
plspm_calc.inner_summary().loc[:, "type"].sort_index())
def test_only_single_item_constructs():
satisfaction = pd.read_csv("file:tests/data/satisfaction.csv", index_col=0)
config = c.Config(satisfaction_path_matrix())
config.add_lv("QUAL", Mode.A, c.MV("qual1"))
config.add_lv("VAL", Mode.A, c.MV("val1"))
config.add_lv("SAT", Mode.A, c.MV("sat1"))
config.add_lv("LOY", Mode.A, c.MV("loy1"))
config.add_lv("IMAG", Mode.A, c.MV("imag1"))
config.add_lv("EXPE", Mode.A, c.MV("expe1"))
plspm_calc = Plspm(satisfaction, config, Scheme.CENTROID)
with pytest.raises(ValueError):
plspm_calc.goodness_of_fit()
|
bdell/pyPWA | pythonPWA/utilities/dataSimulator.py | Python | mit | 4,102 | 0.023403 | from random import random
from pythonPWA.fileHandlers.gampReader import gampReader
from pythonPWA.model.intensity import intensity
class dataSimulator(object):
"""description of class"""
def __init__(self,
mass=1010.,
waves=[],
resonances=[],
normint=None,
productionAmplitudes=[],
alphaList=[],
beamPolarization=.4):
self.mass=mass
self.waves=waves
self.resonances=resonances
self.normint=normint
self.productionAmplitudes=productionAmplitudes
self.alphaList=alphaList
self.beamPolarization=beamPolarization
self.intensity=intensity(resonances=self.resonances,waves=self.waves,productionAmplitudes=self.productionAmplitudes,normint=self.normint,alphaList=self.alphaList,beamPolarization=self.beamPolarization)
def execute(self,inputGampFile,outputRawGampFile,outputAccGampFile,inputPfFile):
#print"beggining simulation"
igreader=gampReader(gampFile=inputGampFile)
inputGampEvents=igreader.readGamp()
#lastPercent=0.
#calculate intensity
#print"calculating intensity"
iList=[]
iMax=0.
#d=float(len(self.alphaList))
for event in range(len(self.alphaList)):
#for event in range(10):
#if (float(event)/d)*100.-lastPercent>1.:
# print"intensity:",(float(event)/d)*100.,"%"
# lastPercent=(float(event)/d)*100.
i=self.intensity.calculate(self.mass,event)
if i>iMax:
iMax=i
iList.append(i)
#lastPercent=0.
#calculate wn
#print"calculating weights"
wList=[]
#a=float(len(self.alphaList))
for weight in range(len(self.alphaList)):
#if (float(weight)/a)*100. -lastPercent > 1.:
# print "weight calc:",(float(weight)/a)*100.,"%"
# lastPercent=(float(weight)/a)*100.
wList.append(iList[weight]/iMax)
#lastPercent=0.
#b=float(len(wList))
rawGampEvents=[]
#if wn>random keep gampEvents[eventNumber]
for wn in range(len(wList)):
#if (float(wn)/b)*100. -lastPercent > | 1.:
# print"random filter:",(float(wn)/b)*100.,"%"
# lastPercent=(float(wn)/b)*100.
if wList[wn]>random(): #random()=random float on [0.0, 1.0)
#print"writing event",wn,"to",outputRawGampFile.name
inputGampEvents[wn].raw=True
#inputGampEvents[wn].writeGamp(outputRawGampFile)
rawGampEvents.append(inputGampEvents[wn | ])
for rawGamps in rawGampEvents:
rawGamps.writeGamp(outputRawGampFile)
#lastPercent=0.
#c=float(len(inputGampEvents))
#acceptedGampEvents=[]
#passing through acceptance filter
#print"passing all events through acceptance filter"
#pfList=inputPfFile.readlines()
#for gEvents in inputGampEvents:
#index=inputGampEvents.index(gEvents)
#if (float(index)/c)*100. - lastPercent > 1.:
# print"acc filter:",(float(index)/c)*100.,"%"
# lastPercent=(float(index)/c)*100.
#accFlag=int(pfList[index].strip("\n"))
#print type(accFlag)
#print gEvents.raw
#if (accFlag==1 and gEvents.raw==True):
#print"writing event",index,"to",outputAccGampFile.name
#gEvents.writeGamp(outputAccGampFile)
#acceptedGampEvents.append(gEvents)
#if (accFlag!=1 or gEvents.raw!=True):
#acceptedGampEvents.append(None)
outputRawGampFile.close()
outputAccGampFile.close()
#print"finished"
#return inputGampEvents,rawGampEvents,acceptedGampEvents
|
openstack/tempest | tempest/api/compute/floating_ips/test_list_floating_ips_negative.py | Python | apache-2.0 | 1,663 | 0 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute.floating_ips import base
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest.lib import exceptions as lib_exc
CONF = config.CONF
class FloatingIPDetailsNega | tiveTestJSON(base.BaseFloatingIPsTest):
"""Negative tests of floating ip detail
Negative tests of floating ip detail with compute microversion less
than 2.36.
"""
max | _microversion = '2.35'
@decorators.attr(type=['negative'])
@decorators.idempotent_id('7ab18834-4a4b-4f28-a2c5-440579866695')
def test_get_nonexistent_floating_ip_details(self):
"""Test getting non existent floating ip should fail"""
# Creating a non-existent floatingIP id
if CONF.service_available.neutron:
non_exist_id = data_utils.rand_uuid()
else:
non_exist_id = data_utils.rand_int_id(start=999)
self.assertRaises(lib_exc.NotFound,
self.client.show_floating_ip, non_exist_id)
|
TissueMAPS/TmLibrary | tmlib/tools/base.py | Python | agpl-3.0 | 24,061 | 0.000249 | # TmLibrary - TissueMAPS library for distibuted image analysis routines.
# Copyright (C) 2016 Markus D. Herrmann, University of Zurich and Robin Hafen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'''Base classes for data analysis tools.'''
import re
import logging
import inspect
import importlib
import simplejson
import numpy as np
import pandas as pd
import collections
from abc import ABCMeta
from abc import abstractmethod
from abc import abstractproperty
from sqlalchemy import func
from sqlalchemy.dialects.postgresql import FLOAT
from psycopg2.extras import execute_values
from psycopg2.sql import SQL, Identifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.linear_model import SGDClassifier
from sklearn.svm import SVC
from sklearn.preprocessing import RobustScaler
from sklearn.model_selection import GridSearchCV, KFold
from sklearn.cluster import KMeans
from tmlib import cfg
import tmlib.models as tm
from tmlib.config import DEFAULT_LIB, IMPLEMENTED_LIBS
from tmlib.utils import (
same_docstring_as, autocreate_directory_property, assert_type,
create_partitions
)
logger = logging.getLogger(__name__)
_register = {}
class _ToolMeta(ABCMeta):
'''Meta class for :class:`Tool <tmlib.tools.base.Tool>`.'''
def __init__(cls, cls_name, cls_bases, cls_args):
def is_abstract(cls):
is_abstract = False
if '__abstract__' in vars(cls):
if getattr(cls, '__abstract__'):
is_abstract = True
return is_abstract
if not is_abstract(cls):
required_attrs = {'__icon__', '__description__'}
for attr in required_attrs:
if not hasattr(cls, attr):
raise AttributeError(
'Tool class "%s" must implement attribute "%s".' % (
cls_name, attr
)
)
logger.debug('registering tool %s', cls.__name__)
_register[cls_name] = cls
return super(_ToolMeta, cls).__init__(cls_name, cls_bases, cls_args)
def __call__(cls, *args, **kwargs):
return super(_ToolMeta, cls).__call__(*args, **kwargs)
class Tool(object):
'''Abstract base class for data analysis tools.
Tools use the
`Pandas DataFrame <http://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.html>`_ data container.
This is compatible with standard machine learning libries,
such as `Scikit-Learn <http://scikit-learn.org/stable/>`_
`Caffe <http://caffe.berkeleyvision.org/>`_ or `Keras <https://keras.io/>`_.
'''
__metaclass__ = _ToolMeta
__abstract__ = True
def __init__(self, experiment_id):
'''
Parameters
----------
experiment_id: int
ID of the experiment for which the tool request is made
'''
self.experiment_id = experiment_id
def load_feature_values(self, mapobject_type_name, feature_names,
mapobject_ids=None):
'''Loads values for each given feature of the given mapobject type.
Parameters
----------
mapobject_type_name: str
name of the selected
:class:`MapobjectType <tmlib.models.mapobject.MapobjectType>`
feature_names: List[str]
name of each selected
:class:`Feature <tmlib.models.feature.Feature>`
mapobject_ids: List[int], optional
ID of each :class:`Mapobject <tmlib.models.mapobject.Mapobject>`
for which values should be selected; if ``None`` values for
all objects will be loaded (default: ``None``)
Returns
-------
pandas.DataFrame
dataframe where columns are features and rows are mapobjects
indexable by their ID
'''
logger.info(
'load feature values for objects of type "%s"', mapobject_type_name
)
logger.debug(
'load values for features: "%s"', '", "'.join(feature_names)
)
if mapobject_ids is not None:
logger.debug('load values for %d objects', len(mapobject_ids))
else:
logger.debug('load values for all objects')
# FIXME: Use ExperimentSession
with tm.utils.ExperimentConnection(self.experiment_id) as conn:
conn.execute('''
SELECT t.id AS mapobject_type_id, f.id AS feature_id, f.name
FROM features AS f
JOIN mapobject_types AS t ON t.id = f.mapobject_type_id
WHERE f.name = ANY(%(feature_names)s)
AND t.name = %(mapobject_type_name)s;
''', {
'feature_names': feature_names,
'mapobject_type_name': mapobject_type_name
})
records = conn.fetchall()
mapobject_type_id = records[0].mapobject_type_id
feature_map = {str(r.feature_id): r.name for r in records}
sql = '''
SELECT
v.mapobject_id, v.tpoint,
slice(v.values, %(feature_ids)s) AS values
FROM feature_values AS v
JOIN mapobjects AS m
ON m.id = v.mapobject_id AND m.partition_key = v.partition_key
WHERE m.mapobject_type_id = %(mapobject_type_id)s
'''
if mapobject_ids is not None:
sql += '''
AND m.id = ANY(%(mapobject_ids)s)
'''
conn.execute(sql, {
'feature_ids': feature_map.keys(),
'mapobject_type_id': mapobject_type_id,
'mapobject_ids': mapobject_ids
})
records = conn.fetchall()
values = list()
index = list()
for r in records:
values.append(r.values)
index.append((r.mapobject_id, r.tpoint))
index = pd.MultiIndex.from_tuples(
index, names=['mapobject_id', 'tpoint']
)
# TODO: This probably creates a copy in memory. Can we avoid this?
df = pd.DataFrame(values, index=index).astype(float)
column_map = {i: name for i, name in feature_map.iteritems()}
df.rename(columns=column_map, inplace=True)
# TODO: How shall we deal with NaN values? Ideally we would expose
# the option to users to either filter rows (mapobjects) or columns
# (columns).
null_indices = self.identify_features_with_null_values(df)
for name, count in null_indices:
if count > 0:
logger.warn('feature "%s" contains %d null values', name, count)
return df
def calculate_extrema(self, mapobject_type_name, feature_name):
'''Calculates minimum and maximum values of a given feature and
mapobject type.
Parameters
----------
mapobject_type_name: str
name of the selected
:class:`MapobjectType <tmlib.models.mapobject.MapobjectType>`
feature_names: List[str]
name of each selected
:class:`Feature <tmlib.models.feature.Feature>`
Returns
-------
Tuple[float]
| min and max
'''
logger.info(
'calculate min/max for objects of type "%s" and feature "%s"',
mapobject_type_name | , feature_name
)
with tm.utils.ExperimentSession(self.experiment_id) as session:
mapobject_type = session.query(tm.MapobjectType.i |
hail-is/hail | hail/python/hailtop/batch/docs/cookbook/files/run_gwas.py | Python | mit | 1,789 | 0.001677 | import argparse
import hail as hl
def run_gwas(vcf_file, phenotypes_file, output_file):
table = hl.import_table(phenotypes_file, impute=True).key_by('Sample')
hl.import_vcf(vcf_file).write('tmp.mt')
mt = hl.read_matrix_t | able('tmp.mt')
mt = mt.annotate_cols(pheno=table[mt.s])
mt = hl.sample_qc(mt)
mt = mt.filter_cols((mt.sample_qc.dp_stats.mean >= 4) & (mt.sample_ | qc.call_rate >= 0.97))
ab = mt.AD[1] / hl.sum(mt.AD)
filter_condition_ab = ((mt.GT.is_hom_ref() & (ab <= 0.1))
| (mt.GT.is_het() & (ab >= 0.25) & (ab <= 0.75))
| (mt.GT.is_hom_var() & (ab >= 0.9)))
mt = mt.filter_entries(filter_condition_ab)
mt = hl.variant_qc(mt)
mt = mt.filter_rows(mt.variant_qc.AF[1] > 0.01)
eigenvalues, pcs, _ = hl.hwe_normalized_pca(mt.GT)
mt = mt.annotate_cols(scores=pcs[mt.s].scores)
gwas = hl.linear_regression_rows(
y=mt.pheno.CaffeineConsumption,
x=mt.GT.n_alt_alleles(),
covariates=[1.0, mt.pheno.isFemale, mt.scores[0], mt.scores[1], mt.scores[2]])
gwas = gwas.select(SNP=hl.variant_str(gwas.locus, gwas.alleles), P=gwas.p_value)
gwas = gwas.key_by(gwas.SNP)
gwas = gwas.select(gwas.P)
gwas.export(f'{output_file}.assoc', header=True)
hl.export_plink(mt, output_file, fam_id=mt.s, ind_id=mt.s)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--vcf', required=True)
parser.add_argument('--phenotypes', required=True)
parser.add_argument('--output-file', required=True)
parser.add_argument('--cores', required=False)
args = parser.parse_args()
if args.cores:
hl.init(master=f'local[{args.cores}]')
run_gwas(args.vcf, args.phenotypes, args.output_file)
|
wsricardo/mcestudos | treinamento-webScraping/raspa/geo03.py | Python | gpl-3.0 | 602 | 0.008306 | from geolocation.main import GoogleMaps
google_maps = GoogleMaps(api_key='AIzaSyBCksh0B58c_C6k_Epm2k1ZQb-YF6kA6SE')
lat = -2.500044
lng = -44.288093
location = google_maps.search(lat=lat, lng=lng)
my_location = location.first()
if my_l | ocation.cit | y: print(my_location.city.decode('utf-8'))
if my_location.route: print(my_location.route.decode('utf-8'))
if my_location.street_number: print(my_location.street_number)
if my_location.postal_code: print(my_location.postal_code)
print(my_location.country)
print(my_location.country_shortcut)
print(my_location.formatted_address)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.